text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
|---|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: ISO-8859-15 -*-
#
# Copyright (C) 2005-2007 David Guerizec <david@guerizec.net>
#
# Last modified: 2006 Sep 02, 01:40:01 by david
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
__plugin_name__ = "Console Extra Commands"
__description__ = """
This plugin offers several new commands for the console session:
- open user@site
Open a shell session on user@site.
- run user@site cmd args...
Run a command remotely on user@site.
"""
def __init_plugin__():
import commands
|
OutOfOrder/sshproxy
|
lib/console_extra/__init__.py
|
Python
|
gpl-2.0
| 1,214
| 0.001647
|
import sys
import pytest
py3 = sys.version_info[0] >= 3
class DummyCollector(pytest.collect.File):
def collect(self):
return []
def pytest_pycollect_makemodule(path, parent):
bn = path.basename
if "py3" in bn and not py3 or ("py2" in bn and py3):
return DummyCollector(path, parent=parent)
|
paulrouget/servo
|
tests/wpt/web-platform-tests/tools/third_party/pytest/doc/en/example/py2py3/conftest.py
|
Python
|
mpl-2.0
| 324
| 0
|
"""
Tests that skipped rows are properly handled during
parsing for all of the parsers defined in parsers.py
"""
from datetime import datetime
from io import StringIO
import numpy as np
import pytest
from pandas.errors import EmptyDataError
from pandas import (
DataFrame,
Index,
)
import pandas._testing as tm
# XFAIL ME PLS once hanging tests issues identified
pytestmark = pytest.mark.usefixtures("pyarrow_skip")
@pytest.mark.parametrize("skiprows", [list(range(6)), 6])
def test_skip_rows_bug(all_parsers, skiprows):
# see gh-505
parser = all_parsers
text = """#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
1/1/2000,1.,2.,3.
1/2/2000,4,5,6
1/3/2000,7,8,9
"""
result = parser.read_csv(
StringIO(text), skiprows=skiprows, header=None, index_col=0, parse_dates=True
)
index = Index(
[datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)], name=0
)
expected = DataFrame(
np.arange(1.0, 10.0).reshape((3, 3)), columns=[1, 2, 3], index=index
)
tm.assert_frame_equal(result, expected)
def test_deep_skip_rows(all_parsers):
# see gh-4382
parser = all_parsers
data = "a,b,c\n" + "\n".join(
[",".join([str(i), str(i + 1), str(i + 2)]) for i in range(10)]
)
condensed_data = "a,b,c\n" + "\n".join(
[",".join([str(i), str(i + 1), str(i + 2)]) for i in [0, 1, 2, 3, 4, 6, 8, 9]]
)
result = parser.read_csv(StringIO(data), skiprows=[6, 8])
condensed_result = parser.read_csv(StringIO(condensed_data))
tm.assert_frame_equal(result, condensed_result)
def test_skip_rows_blank(all_parsers):
# see gh-9832
parser = all_parsers
text = """#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
#foo,a,b,c
1/1/2000,1.,2.,3.
1/2/2000,4,5,6
1/3/2000,7,8,9
"""
data = parser.read_csv(
StringIO(text), skiprows=6, header=None, index_col=0, parse_dates=True
)
index = Index(
[datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)], name=0
)
expected = DataFrame(
np.arange(1.0, 10.0).reshape((3, 3)), columns=[1, 2, 3], index=index
)
tm.assert_frame_equal(data, expected)
@pytest.mark.parametrize(
"data,kwargs,expected",
[
(
"""id,text,num_lines
1,"line 11
line 12",2
2,"line 21
line 22",2
3,"line 31",1""",
{"skiprows": [1]},
DataFrame(
[[2, "line 21\nline 22", 2], [3, "line 31", 1]],
columns=["id", "text", "num_lines"],
),
),
(
"a,b,c\n~a\n b~,~e\n d~,~f\n f~\n1,2,~12\n 13\n 14~",
{"quotechar": "~", "skiprows": [2]},
DataFrame([["a\n b", "e\n d", "f\n f"]], columns=["a", "b", "c"]),
),
(
(
"Text,url\n~example\n "
"sentence\n one~,url1\n~"
"example\n sentence\n two~,url2\n~"
"example\n sentence\n three~,url3"
),
{"quotechar": "~", "skiprows": [1, 3]},
DataFrame([["example\n sentence\n two", "url2"]], columns=["Text", "url"]),
),
],
)
def test_skip_row_with_newline(all_parsers, data, kwargs, expected):
# see gh-12775 and gh-10911
parser = all_parsers
result = parser.read_csv(StringIO(data), **kwargs)
tm.assert_frame_equal(result, expected)
def test_skip_row_with_quote(all_parsers):
# see gh-12775 and gh-10911
parser = all_parsers
data = """id,text,num_lines
1,"line '11' line 12",2
2,"line '21' line 22",2
3,"line '31' line 32",1"""
exp_data = [[2, "line '21' line 22", 2], [3, "line '31' line 32", 1]]
expected = DataFrame(exp_data, columns=["id", "text", "num_lines"])
result = parser.read_csv(StringIO(data), skiprows=[1])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"data,exp_data",
[
(
"""id,text,num_lines
1,"line \n'11' line 12",2
2,"line \n'21' line 22",2
3,"line \n'31' line 32",1""",
[[2, "line \n'21' line 22", 2], [3, "line \n'31' line 32", 1]],
),
(
"""id,text,num_lines
1,"line '11\n' line 12",2
2,"line '21\n' line 22",2
3,"line '31\n' line 32",1""",
[[2, "line '21\n' line 22", 2], [3, "line '31\n' line 32", 1]],
),
(
"""id,text,num_lines
1,"line '11\n' \r\tline 12",2
2,"line '21\n' \r\tline 22",2
3,"line '31\n' \r\tline 32",1""",
[[2, "line '21\n' \r\tline 22", 2], [3, "line '31\n' \r\tline 32", 1]],
),
],
)
def test_skip_row_with_newline_and_quote(all_parsers, data, exp_data):
# see gh-12775 and gh-10911
parser = all_parsers
result = parser.read_csv(StringIO(data), skiprows=[1])
expected = DataFrame(exp_data, columns=["id", "text", "num_lines"])
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"lineterminator", ["\n", "\r\n", "\r"] # "LF" # "CRLF" # "CR"
)
def test_skiprows_lineterminator(all_parsers, lineterminator, request):
# see gh-9079
parser = all_parsers
data = "\n".join(
[
"SMOSMANIA ThetaProbe-ML2X ",
"2007/01/01 01:00 0.2140 U M ",
"2007/01/01 02:00 0.2141 M O ",
"2007/01/01 04:00 0.2142 D M ",
]
)
expected = DataFrame(
[
["2007/01/01", "01:00", 0.2140, "U", "M"],
["2007/01/01", "02:00", 0.2141, "M", "O"],
["2007/01/01", "04:00", 0.2142, "D", "M"],
],
columns=["date", "time", "var", "flag", "oflag"],
)
if parser.engine == "python" and lineterminator == "\r":
mark = pytest.mark.xfail(reason="'CR' not respect with the Python parser yet")
request.node.add_marker(mark)
data = data.replace("\n", lineterminator)
result = parser.read_csv(
StringIO(data),
skiprows=1,
delim_whitespace=True,
names=["date", "time", "var", "flag", "oflag"],
)
tm.assert_frame_equal(result, expected)
def test_skiprows_infield_quote(all_parsers):
# see gh-14459
parser = all_parsers
data = 'a"\nb"\na\n1'
expected = DataFrame({"a": [1]})
result = parser.read_csv(StringIO(data), skiprows=2)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"kwargs,expected",
[
({}, DataFrame({"1": [3, 5]})),
({"header": 0, "names": ["foo"]}, DataFrame({"foo": [3, 5]})),
],
)
def test_skip_rows_callable(all_parsers, kwargs, expected):
parser = all_parsers
data = "a\n1\n2\n3\n4\n5"
result = parser.read_csv(StringIO(data), skiprows=lambda x: x % 2 == 0, **kwargs)
tm.assert_frame_equal(result, expected)
def test_skip_rows_callable_not_in(all_parsers):
parser = all_parsers
data = "0,a\n1,b\n2,c\n3,d\n4,e"
expected = DataFrame([[1, "b"], [3, "d"]])
result = parser.read_csv(
StringIO(data), header=None, skiprows=lambda x: x not in [1, 3]
)
tm.assert_frame_equal(result, expected)
def test_skip_rows_skip_all(all_parsers):
parser = all_parsers
data = "a\n1\n2\n3\n4\n5"
msg = "No columns to parse from file"
with pytest.raises(EmptyDataError, match=msg):
parser.read_csv(StringIO(data), skiprows=lambda x: True)
def test_skip_rows_bad_callable(all_parsers):
msg = "by zero"
parser = all_parsers
data = "a\n1\n2\n3\n4\n5"
with pytest.raises(ZeroDivisionError, match=msg):
parser.read_csv(StringIO(data), skiprows=lambda x: 1 / 0)
def test_skip_rows_and_n_rows(all_parsers):
# GH#44021
data = """a,b
1,a
2,b
3,c
4,d
5,e
6,f
7,g
8,h
"""
parser = all_parsers
result = parser.read_csv(StringIO(data), nrows=5, skiprows=[2, 4, 6])
expected = DataFrame({"a": [1, 3, 5, 7, 8], "b": ["a", "c", "e", "g", "h"]})
tm.assert_frame_equal(result, expected)
|
pandas-dev/pandas
|
pandas/tests/io/parser/test_skiprows.py
|
Python
|
bsd-3-clause
| 7,845
| 0.001147
|
#
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os
import configuration
import sqlplus
def add_subparser(subparsers):
description = """Run a single sqlplus script."""
epilog = """SCRIPT is assumed to be relative to schema directory (default
{0}) unless it begins with '{1}'.""".format(
sqlplus.get_default_schema_dir(), os.path.sep)
parser = subparsers.add_parser("run",
help="run a single sqlplus script",
description=description,
epilog=epilog)
parser.add_argument("user",
metavar="USER",
choices=["sys"] + configuration.USERS,
help="connect to database as: " +
", ".join(["sys"] + configuration.USERS))
parser.add_argument("script",
metavar="SCRIPT",
help="sqlplus script to run")
parser.set_defaults(func=_run)
parser.set_defaults(need_config=True)
parser.set_defaults(need_metadata=True)
def _run(args, metadata, config):
sqlplus.run_steps([(args.user, args.script)],
metadata,
config,
args.schema_dir,
False,
None,
None,
False)
|
OpenXT/sync-database
|
sync_db/run_script.py
|
Python
|
gpl-2.0
| 2,130
| 0.002347
|
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr
import pmt
def make_lengthtags(lengths, offsets, tagname='length', vlen=1):
tags = []
assert(len(offsets) == len(lengths))
for offset, length in zip(offsets, lengths):
tag = gr.tag_t()
tag.offset = offset // vlen
tag.key = pmt.string_to_symbol(tagname)
tag.value = pmt.from_long(length // vlen)
tags.append(tag)
return tags
def string_to_vector(string):
v = []
for s in string:
v.append(ord(s))
return v
def strings_to_vectors(strings, tsb_tag_key):
vs = [string_to_vector(string) for string in strings]
return packets_to_vectors(vs, tsb_tag_key)
def vector_to_string(v):
s = []
for d in v:
s.append(chr(d))
return ''.join(s)
def vectors_to_strings(data, tags, tsb_tag_key):
packets = vectors_to_packets(data, tags, tsb_tag_key)
return [vector_to_string(packet) for packet in packets]
def count_bursts(data, tags, tsb_tag_key, vlen=1):
lengthtags = [t for t in tags
if pmt.symbol_to_string(t.key) == tsb_tag_key]
lengths = {}
for tag in lengthtags:
if tag.offset in lengths:
raise ValueError(
"More than one tags with key {0} with the same offset={1}."
.format(tsb_tag_key, tag.offset))
lengths[tag.offset] = pmt.to_long(tag.value) * vlen
in_burst = False
in_packet = False
packet_length = None
packet_pos = None
burst_count = 0
for pos in range(len(data)):
if pos in lengths:
if in_packet:
print("Got tag at pos {0} current packet_pos is {1}".format(
pos, packet_pos))
raise Exception("Received packet tag while in packet.")
packet_pos = -1
packet_length = lengths[pos]
in_packet = True
if not in_burst:
burst_count += 1
in_burst = True
elif not in_packet:
in_burst = False
if in_packet:
packet_pos += 1
if packet_pos == packet_length - 1:
in_packet = False
packet_pos = None
return burst_count
def vectors_to_packets(data, tags, tsb_tag_key, vlen=1):
lengthtags = [t for t in tags
if pmt.symbol_to_string(t.key) == tsb_tag_key]
lengths = {}
for tag in lengthtags:
if tag.offset in lengths:
raise ValueError(
"More than one tags with key {0} with the same offset={1}."
.format(tsb_tag_key, tag.offset))
lengths[tag.offset] = pmt.to_long(tag.value) * vlen
if 0 not in lengths:
raise ValueError("There is no tag with key {0} and an offset of 0"
.format(tsb_tag_key))
pos = 0
packets = []
while pos < len(data):
if pos not in lengths:
raise ValueError("There is no tag with key {0} and an offset of {1}."
"We were expecting one."
.format(tsb_tag_key, pos))
length = lengths[pos]
if length == 0:
raise ValueError("Packets cannot have zero length.")
if pos + length > len(data):
raise ValueError("The final packet is incomplete.")
packets.append(data[pos: pos + length])
pos += length
return packets
def packets_to_vectors(packets, tsb_tag_key, vlen=1):
""" Returns a single data vector and a set of tags.
If used with blocks.vector_source_X, this set of data
and tags will produced a correct tagged stream. """
tags = []
data = []
offset = 0
for packet in packets:
data.extend(packet)
tag = gr.tag_t()
tag.offset = offset // vlen
tag.key = pmt.string_to_symbol(tsb_tag_key)
tag.value = pmt.from_long(len(packet) // vlen)
tags.append(tag)
offset = offset + len(packet)
return data, tags
|
dl1ksv/gnuradio
|
gnuradio-runtime/python/gnuradio/gr/packet_utils.py
|
Python
|
gpl-3.0
| 4,116
| 0.000243
|
'''
Project: Farnsworth
Author: Karandeep Singh Nagra
'''
from django.contrib.auth.models import User, Group, Permission
from django.core.urlresolvers import reverse
from django.db import models
from base.models import UserProfile
class Thread(models.Model):
'''
The Thread model. Used to group messages.
'''
owner = models.ForeignKey(
UserProfile,
help_text="The user who started this thread.",
)
subject = models.CharField(
blank=False,
null=False,
max_length=254,
help_text="Subject of this thread.",
)
start_date = models.DateTimeField(
auto_now_add=True,
help_text="The date this thread was started.",
)
change_date = models.DateTimeField(
auto_now_add=True,
help_text="The last time this thread was modified.",
)
number_of_messages = models.PositiveSmallIntegerField(
default=1,
help_text="The number of messages in this thread.",
)
active = models.BooleanField(
default=True,
help_text="Whether this thread is still active.",
)
views = models.PositiveIntegerField(
default=0,
help_text="The number times this thread has been viewed.",
)
followers = models.ManyToManyField(
User,
blank=True,
null=True,
related_name="following",
help_text="Users following this thread",
)
def __unicode__(self):
return self.subject
class Meta:
ordering = ['-change_date']
def is_thread(self):
return True
def get_view_url(self):
return reverse("threads:view_thread", kwargs={"pk": self.pk})
class Message(models.Model):
'''
The Message model. Contains a body, owner, and post_date, referenced by thread.
'''
body = models.TextField(
blank=False,
null=False,
help_text="Body of this message.",
)
owner = models.ForeignKey(
UserProfile,
help_text="The user who posted this message.",
)
post_date = models.DateTimeField(
auto_now_add=True,
help_text="The date this message was posted.",
)
thread = models.ForeignKey(
Thread,
help_text="The thread to which this message belongs.",
)
edited = models.BooleanField(
default=False,
)
def __str__(self):
return self.__unicode__()
def __unicode__(self):
return self.body
class Meta:
ordering = ['post_date']
def is_message(self):
return True
def pre_save_thread(sender, instance, **kwargs):
thread = instance
thread.number_of_messages = thread.message_set.count()
def post_save_thread(sender, instance, created, **kwargs):
thread = instance
if not created and thread.number_of_messages == 0:
thread.delete()
def post_save_message(sender, instance, created, **kwargs):
message = instance
thread = message.thread
if created:
thread.change_date = message.post_date
thread.save()
def post_delete_message(sender, instance, **kwargs):
message = instance
message.thread.save()
# Connect signals with their respective functions from above.
# When a message is created, update that message's thread's change_date to the post_date of that message.
models.signals.post_save.connect(post_save_message, sender=Message)
models.signals.post_delete.connect(post_delete_message, sender=Message)
models.signals.pre_save.connect(pre_save_thread, sender=Thread)
models.signals.post_save.connect(post_save_thread, sender=Thread)
|
knagra/farnsworth
|
threads/models.py
|
Python
|
bsd-2-clause
| 3,639
| 0.002473
|
# coding: utf-8
# # L1 - Градиентый спуск и линейные модели
# In[1]:
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from scipy.optimize import minimize
import math
get_ipython().magic('matplotlib notebook')
matplotlib.rcParams['figure.figsize'] = '12,8'
matplotlib.rcParams['figure.max_open_warning'] = False
# In[2]:
def setup_plot_figure(xlabel='x', ylabel='y', hline=False, vline=False, equal_axes=False):
f = plt.figure()
if equal_axes:
plt.axis('equal')
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.grid(True, which='both')
if hline:
plt.axhline(color='k', alpha=0.7)
if vline:
plt.axvline(color='k', alpha=0.7)
return f
# In[5]:
SAMPLE_NUM = 10000
positive_class_features = np.random.normal(-1, 1, (SAMPLE_NUM, 2))
negative_class_features = np.random.normal(1.7, 1, (SAMPLE_NUM, 2))
X = np.c_[np.ones(SAMPLE_NUM * 2), np.concatenate((positive_class_features, negative_class_features))]
Y = np.r_[np.ones(SAMPLE_NUM), -np.ones(SAMPLE_NUM)].reshape(SAMPLE_NUM * 2)
w_exact = np.linalg.inv(X.T.dot(X)).dot(X.T).dot(Y)
x_ax = np.linspace(-5, 5, 2)
y_ax_exact = np.array(-(w_exact[0] + w_exact[1] * x_ax) / w_exact[2])
setup_plot_figure(hline=True, vline=True, equal_axes=True)
plt.scatter(positive_class_features[:, 0], positive_class_features[:, 1], c='green', alpha=0.2)
plt.scatter(negative_class_features[:, 0], negative_class_features[:, 1], c='red', alpha=0.2)
plt.plot(x_ax, y_ax_exact, color='b')
plt.show()
# Две матрицы размеров $a\times b$ и $b \times c$ перемножаются за $O(abc)$ операций ($a\cdot c$ ячеек в новой таблице, на получение каждой требуется $b$ операций), матрица $a \times a$ оборачивается за $O(a^3)$ операций, $a \times b$ транспонируется за $O(ab)$. Зная это, а также считая, что входная выборка имеет размер $n$ и $m$ параметров, получаем следующую асимптотику за счёт выполнения последовательно пяти операций: транспонирования матрицы $X$ умножения матриц $X_{m\times n}^T$ и $X_{n\times m}$, обращения получившейся матрицы $m\times m$, умножения обращённой матрицы $m\times m$ и $X_{m\times n}^T$ и умножения матриц $m\times n$ и $Y_{n \times 1}$. Тогда
# $$O(m\cdot n)+O(m\cdot n \cdot m)+O(m^3)+O(m\cdot m \cdot n)+O(m\cdot n)=O(m^2n)+O(m^3).$$ Если считать, что $m<n$, то можно упростить выражение до $O(m^2n)$.
# **Задание**
#
# 1. Пусть $\mathbb{P}\{y=1|x\} = \sigma(wx+b)$, где $\sigma(z) = \frac{1}{1 + \exp(-z)}$. Покажите, что задача
# $$ \arg\min_{w, b} \sum_{x, y} \ln(1 + \exp(-y(wx + b)))$$
# есть не что иное, как максимизация правдоподобия.
# 2. Отобразите все функционалы качества в осях $M \times Q$ для одного элемента.
#
# 1. Функция правдоподобия: $\mathbb{P}\{y|x\}=\sigma(y(wx+b))$. Если считать совпадения предсказанных классов для элементов выборки с актуальными классами независимыми в совокупности событиями, то можно переписать $\mathbb{P}\{y|x\}$ как $\prod_{i=0}^n\mathbb{P}\{y_i|x_i\}$. Максимизация функции правдоподобия тогда представляется как задача $$\underset{w}{\mathrm{argmax}}\prod_{i=0}^n\mathbb{P}\{y_i|x_i\}=\underset{w}{\mathrm{argmax}}\ln\prod_{i=0}^n\mathbb{P}\{y_i|x_i\}=$$ (исходя из монотонности логарифма, если функция в данной точке имеет максимум, то и её логарифм)$$=\underset{w}{\mathrm{argmax}}\sum_{i=0}^n\ln\mathbb{P}\{y_i|x_i\}=\underset{w}{\mathrm{argmax}}\sum_{i=0}^n\ln\sigma(y_i(wx_i+b))=\underset{w}{\mathrm{argmax}}\sum_{i=0}^n\ln(1+e^{-y(wx+b)})^{-1}=$$(по свойству логарифма)$$=-\underset{w}{\mathrm{argmax}}\sum_{i=0}^n\ln(1+e^{-y(wx+b)})=\underset{w}{\mathrm{argmin}}\sum_{i=0}^n\ln(1+e^{-y(wx+b)})$$
#
# 2. Так как $ M(x, y) = y\cdot(wx)$ (можно добавить первую компоненту к $x$, равную $1$ и к $w$, равную $b$ и получить исходное равенство), получаем следующие зависимости $Q$ от $M$:
#
# 1) $Q = [y_{pred} \neq y_{true}]=[y\cdot(wx)<0]=[M < 0]$
#
# 2) $Q = ((wx) - y)^{2}=\frac{1}{y^2}((wx\cdot y) - y^2)^2=\frac{1}{y^2}(M - y^2)^2$. Так как $y=\pm 1$, то $Q=(M - 1)^2$.
#
# 3) $Q = max(0, 1 - y\cdot(wx))=max(0,1-M)$
#
# 4) $Q = \ln(1 + e^{-y\cdot(wx)})=\ln(1+e^{-M})$
# In[7]:
margin = np.linspace(-7, 7, 1000)
@np.vectorize
def exact_loss_m(x):
return 1 if x < 0 else 0
@np.vectorize
def mse_loss_m(x):
return (x - 1) ** 2
@np.vectorize
def hinge_loss_m(x):
return max(0, 1 - x)
@np.vectorize
def logistic_loss_m(x):
return np.log(1 + np.exp(-x))
setup_plot_figure('M', 'Q', True, True, True)
plt.plot(margin, exact_loss_m(margin), label='$[M<0$]')
plt.plot(margin, mse_loss_m(margin), label='$(M - 1)^2$')
plt.plot(margin, hinge_loss_m(margin), label='$max(0,1-M)$')
plt.plot(margin, logistic_loss_m(margin), label='$\ln(1+e^{-M})$')
plt.legend()
plt.xlim(-7, 7)
plt.ylim(-3, 3)
plt.show()
# **Градиентный спуск**
# In[6]:
def grad_descent(x0, func, grad, learn_rate, iter_num):
steps = np.empty((iter_num, x0.shape[0]))
costs = np.empty(iter_num)
for i in range(iter_num):
costs[i] = func(x0)
steps[i] = x0
x0 -= learn_rate * grad(x0)
return x0, costs, steps
# In[7]:
def simple_func(x): # z=x^2+y^2
return x[0] ** 2 + x[1] ** 2
def simple_grad(x):
return 2 * x
xx = np.arange(-10, 10, 0.01)
yy = np.arange(-10, 10, 0.01)
xgrid, ygrid = np.meshgrid(xx, yy)
zgrid = simple_func((xgrid, ygrid))
setup_plot_figure(hline=True, vline=True, equal_axes=True)
cont = plt.contour(xgrid, ygrid, zgrid)
plt.xlim(-10, 10)
plt.ylim(-7, 7)
cont.clabel(fmt="%.0f")
start_simple = np.random.randn(2) * 10
bestval_simple = simple_func(start_simple)
bestres = start_simple
bestlr = 0
for lr in np.arange(0, 1.5, 1e-4):
res = grad_descent(start_simple, simple_func, simple_grad, lr, 50)
if simple_func(res[0]) < bestval_simple:
bestval_simple = simple_func(res[0])
bestres = res[2]
bestlr = lr
print('Optimal learning rate: ', bestlr)
plt.plot(bestres.T[0], bestres.T[1], 'bo')
plt.show()
# 1. Функция $z=x^2+y^2$, глобальный минимум $0$ в точке $(0;0)$.
# 2. См. ячейку выше.
# 3. См. ячейку выше.
# 4. См. ячейку выше.
# 5. Перебирались все значения $\lambda$ от 0 до 1.5 (при больших значениях градиент в некоторых случаях начинает значительно возрастать) с шагом в $10^{-4}$, при этом оптимизировалось значение функции в найденной точке. Логично, что оптимальная скорость обучения близка к 0.5, так как тогда за одну итерацию достигается $(x;y)-0.5\cdot(2x;2y)=(0;0)$ - глобальный минимум
# Существуют функции, которые плохо даются градиентному спуску. К примеру, функция Розенброка
#
# <center>$f(x, y) = (1-x)^2 + 100(y-x^2)^2$.</center>
# In[11]:
def rosenbrock(x):
return (1 - x[0]) ** 2 + 100 * (x[1] - x[0] ** 2) ** 2
def rosenbrock_grad(x):
return np.array([-2 * (1 - x[0]) - 400 * x[0] * (-x[0] ** 2 + x[1]), 200 * (-x[0] ** 2 + x[1])])
xx = np.arange(-20, 20, 0.1)
yy = np.arange(-20, 20, 0.1)
xgrid, ygrid = np.meshgrid(xx, yy)
zgrid = rosenbrock((xgrid, ygrid))
fig = setup_plot_figure()
ax = fig.gca(projection='3d')
cont = ax.plot_surface(xgrid, ygrid, zgrid, norm=matplotlib.colors.LogNorm(), cmap=plt.cm.jet, linewidth=0, shade=False)
fig.colorbar(cont, shrink=0.5, aspect=5)
start_ros = np.random.randn(2) * 20
res_rosenbrock = grad_descent(start_ros, rosenbrock, rosenbrock_grad, 1e-5, 5000)[2]
z_ros = rosenbrock(res_rosenbrock.T)
ax.plot(xs=res_rosenbrock.T[0], ys=res_rosenbrock.T[1], zs=z_ros)
fig.show()
# 1. См. ячейку выше.
# 2. Можно заметить, что функция имеет участок с почти нулевым градиентом — «долину», но вне этой долины эта функция очень быстро растёт, поэтому при малых фиксированных $\lambda$ функция будет очень медленно сходиться, а при больших — расходиться, при этом градиент будет вновь быстро расти. Также в окрестности глобального минимума («долине») алгоритм будет двигаться в сторону глобального минимума очень медленно из-за близкого к нулю градиента.
#
# 3. Можно изменять скорость обучения динамически одним из множества способов, например, описанных ниже. Можно также стартовать алгоритм достаточно большое раз со случайными параметрами и затем выбрать лучший. Ещё есть вариант с нормализацией градиента (вообще говоря, длину также можно задавать), чтобы на каждой итерации мы двигались в направлении уменьшения функции, но одинаковыми шагами.
# In[7]:
def mse_loss(w, x, y):
return (1 / x.shape[0]) * np.sum((x.dot(w) - y) ** 2)
def mse_grad(w, x, y):
return (2 / x.shape[0]) * x.T.dot(x.dot(w) - y)
# In[8]:
start_2d = np.random.randn(3) * 10
sol_mse = grad_descent(start_2d, lambda w: mse_loss(w, X, Y), lambda w: mse_grad(w, X, Y), 0.01, 300)
yy = np.array(-(sol_mse[0][0] + sol_mse[0][1] * x_ax) / sol_mse[0][2])
setup_plot_figure(hline=True, vline=True, equal_axes=True)
plt.scatter(positive_class_features.T[0], positive_class_features.T[1], c='green', alpha=0.2)
plt.scatter(negative_class_features.T[0], negative_class_features.T[1], c='red', alpha=0.2)
plt.plot(x_ax, yy.T, color='b', label='Gradient descent')
plt.plot(x_ax, y_ax_exact.T, color='black', label='Exact solution')
plt.legend()
plt.show()
print('Difference between solutions norm:', np.linalg.norm(sol_mse[0] - w_exact))
# **Метод наискорейшего спуска**
# In[15]:
def grad_descent_accel(x0, func, grad, iter_num):
costs = np.empty(iter_num)
steps = np.empty((iter_num, x0.shape[0]))
for i in range(iter_num):
q = func(x0)
costs[i] = q
steps[i] = x0
g = grad(x0)
learn_rate = minimize(lambda l: func(x0 - l * g), x0=np.zeros((1,))).x
x0 -= learn_rate * g
return x0, costs, steps
# In[16]:
sol_mse_accel = grad_descent_accel(start_2d, lambda w: mse_loss(w, X, Y), lambda w: mse_grad(w, X, Y), 300)
setup_plot_figure('Number of iterations', 'Loss')
plt.plot(sol_mse[1], color='black', label='Simple descent')
plt.plot(sol_mse_accel[1], color='green', label='Accelerated descent')
plt.legend()
plt.show()
# Из данного графика мы видим, что наискорейший спуск сходится значительно быстрее.
# Если у нас есть какие-то признаки, которые встречаются достаточно редко, то соответствующий столбец будет разреженным.
#
# **Задание**
# В чем заключается проблема?
#
# Так как в данном столбце будут преобладать нули, то градиент по соответствующей переменной на фоне остальных будет довольно мал, что означает в случае малой скорости обучения потенциально очень низкую скорость схождения ответа к оптимальному по данной переменной, а в случае большой - шанс расхождения по неразреженным столбцам. Более того, данные признаки могут быть значимыми при классификации, но из-за разреженности столбцов учитываться будут мало.
# **Стохастический градиентный спуск**
# In[17]:
def logistic_loss(w, x, y):
return (1 / x.shape[0]) * sum(np.logaddexp(0, -y[i] * np.dot(x[i], w)) for i in range(x.shape[0]))
def logistic_grad(w, x, y):
v = np.empty((x.shape[0], x.shape[1]))
for i in range(x.shape[0]):
val = -y[i] * np.dot(x[i], w)
if np.abs(val) > 1000: # overflow protection
if val > 0:
v[i] = -y[i] * x[i]
else:
v[i] = 0
else:
v[i] = -y[i] * x[i] * (math.exp(val) / (1 + math.exp(val)))
return (1 / x.shape[0]) * np.sum(v, axis=0)
# In[18]:
def sgd(x0, x, y, func, grad, learn_rate, pass_num, batch_size=1):
batch_num = x.shape[0] // batch_size
costs = np.empty(pass_num * batch_num)
steps = np.empty((pass_num * batch_num, x0.shape[0]))
for p in range(pass_num):
for j in range(batch_num):
x_new = x[j * batch_size:(j + 1) * batch_size]
y_new = y[j * batch_size:(j + 1) * batch_size]
costs[p * batch_num + j] = func(x0, x_new, y_new)
steps[p * batch_num + j] = x0
x0 -= learn_rate * grad(x0, x_new, y_new)
return x0, costs, steps
# In[19]:
train = np.loadtxt('train.csv', skiprows=1, delimiter=',')
ones = train[train[:, 0] == 1]
zeroes = train[train[:, 0] == 0]
o_tr, o_test = np.split(ones, 2) # training set is equal in size to test set
z_tr, z_test = np.split(zeroes, 2)
z_tr[:, 0] = 1 # we need first feature column to contain 1 in order for prediction to be fully vectorized;
# since b_size contains labels (0) in the original dataset, we can easily overwrite b_size after
# extracting labels
trainset = np.concatenate((o_tr, z_tr))
testset = np.concatenate((o_test, z_test))
labels = np.r_[np.ones(o_tr.shape[0]), -np.ones(z_tr.shape[0])].reshape(o_tr.shape[0] + z_tr.shape[0], 1)
testset_labels = list(map(lambda x: 1 if x == 1 else -1, testset[:, 0])) # manually construct labels: 1 for 1, -1 for 0
testset[:, 0] = 1
# In[20]:
start = np.zeros((train.shape[1]))
sol = sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 10, 4000)
# In[21]:
def calculate_acc(w, test_features, test_labels):
# 1st return value is number of wrong predictions, 2nd is accuracy (correct guesses/number of items in test set
wrong = sum(np.sign(test_features[i].dot(w)) != test_labels[i] for i in range(test_features.shape[0]))
return wrong, 1 - wrong / testset.shape[0]
# In[22]:
calculate_acc(sol[0], testset, testset_labels)
# In[23]:
setup_plot_figure('Batch size', 'Accuracy')
for b_size in range(10, trainset.shape[0], 25):
plt.plot(b_size,
calculate_acc(sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 1, b_size)[0], testset,
testset_labels)[1], 'bo')
plt.show()
# Заметим, что размер батчей не влияет на качество классификации заметным образом за исключением случая, когда выборка делится на два батча только с нулями и единицами (размер каждого - примерно 2000)
# In[24]:
setup_plot_figure('Batch size', 'Loss')
plt.plot(sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 400, 400)[1], color='green', label='400')
plt.plot(sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 10, 10)[1], color='red', label='10')
plt.plot(sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 1)[1], color='blue', label='1')
plt.legend(title='Batch elements')
plt.show()
# Такое число проходов для каждого размера батча мы ставим, чтобы в каждом случае алгоритм проработал одно и то же число итераций.
# Можно заметить, что после ~4000 итерации графики функций (даже стохастического спуска) относительно стабилизируются, что может значить, что более 1 прохода даже в худшем случае по выборке делать смысла не так много.
# **Экспоненциальное сглаживание**
# In[25]:
def sgd_smooth(x0, x, y, func, grad, learn_rate, pass_num, batch_size=1, gamma=0):
batch_num = x.shape[0] // batch_size
costs = np.empty(pass_num * batch_num)
steps = np.empty((pass_num * batch_num, x0.shape[0]))
for p in range(pass_num):
for j in range(batch_num):
x_new = x[j * batch_size:(j + 1) * batch_size]
y_new = y[j * batch_size:(j + 1) * batch_size]
if p != 0 or j != 0:
costs[p * batch_num + j] = gamma * costs[p * batch_num + j - 1] + (1 - gamma) * func(x0, x_new, y_new)
else:
costs[p * batch_num + j] = func(x0, x_new, y_new)
steps[p * batch_num + j, :] = x0
x0 -= learn_rate * grad(x0, x_new, y_new)
return x0, costs, steps
# In[26]:
setup_plot_figure('Iteration', 'Loss')
plt.plot(sgd_smooth(start, trainset, labels, logistic_loss, logistic_grad, 1e-7, 100, 100, 1)[1], color='green',
alpha=0.5)
plt.plot(sgd_smooth(start, trainset, labels, logistic_loss, logistic_grad, 1e-7, 100, 100, 0.75)[1], color='red',
alpha=0.5)
plt.plot(sgd_smooth(start, trainset, labels, logistic_loss, logistic_grad, 1e-7, 100, 100, 0.25)[1], color='black',
alpha=0.5)
plt.plot(sgd_smooth(start, trainset, labels, logistic_loss, logistic_grad, 1e-7, 100, 100)[1], color='blue', alpha=0.5)
plt.show()
# Заметим, что в случае $\gamma=1$ функция ошибки вырождается в константное значение на первом батче первого прохода, в случае $\gamma=0$ - в функцию ошибки без сглаживания. Также можно рассмотреть конкретные $Q^k$ и $Q(x_{k+1})$, тогда после несложных преобразований получаем, что $Q^{k+1}$ - линейная функция от $\gamma$, пересекающая ось ординат в точке $Q(x_{k+1})$. Таким образом, $\gamma$ влияет на график функции ошибки следующим образом: последующие значение начинают "учитывать" предыдущие, таким образом, как и следует из названия, сглаживаются скачки функции ошибки при обработке стоящих друг за другом батчей. Наиболее подходящий способ вычисления $Q$ зависит от задачи: сглаживание может не всегда удовлетворять нашим нуждам (например, мы хотим с какой-то целью анализировать скачки в неизменном виде).
# **Сохранение импульса**
# In[27]:
def grad_descent_momentum(x0, func, grad, learn_rate, iter_num, gamma=0):
steps = np.empty((iter_num, x0.shape[0]))
costs = np.empty(iter_num)
momentum = np.zeros(x0.shape[0])
for i in range(iter_num):
costs[i] = func(x0)
momentum = gamma * momentum + learn_rate * grad(x0)
x0 -= momentum
steps[i] = x0
return x0, costs, steps
# In[28]:
def simple1_func(x): # z=10x^2+y^2
return 10 * x[0] ** 2 + x[1] ** 2
def simple1_grad(x):
return np.array([20 * x[0], 2 * x[1]])
xx = np.arange(-10, 10, 0.01)
yy = np.arange(-10, 10, 0.01)
xgrid, ygrid = np.meshgrid(xx, yy)
zgrid = simple1_func((xgrid, ygrid))
setup_plot_figure()
cont = plt.contour(xgrid, ygrid, zgrid)
cont.clabel(fmt="%.0f")
start_simple_1 = np.random.randn(2) * 10
res_without_momentum = grad_descent_momentum(start_simple_1, simple1_func, simple1_grad, 1e-2, 50)
plt.plot(res_without_momentum[2].T[0], res_without_momentum[2].T[1], 'bo', label='$\gamma=0$ (regular GD)')
res_with_momentum1 = grad_descent_momentum(start_simple_1, simple1_func, simple1_grad, 1e-2, 50, 0.25)
plt.plot(res_with_momentum1[2].T[0], res_with_momentum1[2].T[1], 'ro', label='$\gamma=0.25$')
res_with_momentum2 = grad_descent_momentum(start_simple_1, simple1_func, simple1_grad, 1e-2, 50, 0.5)
plt.plot(res_with_momentum2[2].T[0], res_with_momentum2[2].T[1], 'yo', label='$\gamma=0.5$')
res_with_momentum3 = grad_descent_momentum(start_simple_1, simple1_func, simple1_grad, 1e-2, 50, 0.75)
plt.plot(res_with_momentum3[2].T[0], res_with_momentum3[2].T[1], 'go', label='$\gamma=0.75$')
plt.legend()
plt.show()
# In[29]:
setup_plot_figure('Iteration', 'Loss')
plt.plot(res_without_momentum[1], label='$\gamma=0$ (regular GD)')
plt.plot(res_with_momentum1[1], label='$\gamma=0.25$')
plt.plot(res_with_momentum2[1], label='$\gamma=0.5$')
plt.plot(res_with_momentum3[1], label='$\gamma=0.75$')
plt.ylim(0, 10)
plt.legend()
plt.show()
# Легко заметить, что повышение коэффициента на текущих данных и при фиксированных остальных параметрах даёт повышение скорости сходимости. Были рассмотрены 4 варианта $\gamma$, отстоящие от соседей на равных расстояниях, чтобы продемонстрировать динамику изменений графика в зависимости от данного параметра.
# **Ускоренный градиент Нестерова**
# In[30]:
def grad_descent_nesterov(x0, func, grad, learn_rate, iter_num, gamma=0):
steps = np.empty((iter_num, x0.shape[0]))
costs = np.empty(iter_num)
momentum = np.zeros(x0.shape[0])
for i in range(iter_num):
costs[i] = func(x0)
momentum = gamma * momentum + learn_rate * grad(x0 - momentum)
x0 -= momentum
steps[i] = x0
return x0, costs, steps
# In[31]:
start_m = np.random.randn(2) * 10
res_imp = grad_descent_momentum(start_m, rosenbrock, rosenbrock_grad, 1e-5, 100, 0.8)
res_nes = grad_descent_nesterov(start_m, rosenbrock, rosenbrock_grad, 1e-5, 100, 0.8)
setup_plot_figure('Iteration', 'Loss')
plt.plot(res_imp[1], label='Momentum')
plt.plot(res_nes[1], label='Nesterov\'s accelerated gradient')
plt.title('Performance of two gradient descent improvements on Rosenbrock function')
plt.legend()
plt.show()
# In[32]:
xx = np.arange(-20, 20, 0.1)
yy = np.arange(-20, 20, 0.1)
xgrid, ygrid = np.meshgrid(xx, yy)
zgrid = rosenbrock((xgrid, ygrid))
fig = setup_plot_figure()
ax = fig.gca(projection='3d')
cont = ax.plot_surface(xgrid, ygrid, zgrid, norm=matplotlib.colors.LogNorm(), cmap=plt.cm.jet, linewidth=0, shade=False)
fig.colorbar(cont, shrink=0.5, aspect=5)
ax.plot(xs=res_imp[2].T[0], ys=res_imp[2].T[1], zs=rosenbrock(res_imp[2].T), label='Momentum')
ax.plot(xs=res_nes[2].T[0], ys=res_nes[2].T[1], zs=rosenbrock(res_nes[2].T), label='Nesterov\'s accelerated gradient')
plt.legend()
fig.show()
# **Adagrad (2011)**
# In[33]:
def adagrad(x0, func, grad, learn_rate, iter_num):
costs = np.empty(iter_num)
steps = np.empty((iter_num, x0.shape[0]))
eps = 0.01
g = np.zeros((x0.shape[0], x0.shape[0]))
for i in range(iter_num):
costs[i] = func(x0)
steps[i] = x0
gr = grad(x0)
g += np.dot(gr.T, gr)
x0 -= learn_rate * gr / (np.sqrt(np.diag(g)) + eps)
return x0, costs, steps
# In[34]:
sol_sgd = sgd(start, trainset, labels, logistic_loss, logistic_grad, 1e-6, 1, 10)
sol_adagrad = adagrad(start, lambda w: logistic_loss(w, trainset, labels), lambda w: logistic_grad(w, trainset, labels),
1e-6, 440)
setup_plot_figure('Iteration', 'Loss')
plt.plot(sol_sgd[1], label='SGD')
plt.plot(sol_adagrad[1], label='Adagrad')
plt.legend()
plt.title('Convergence on MNIST dataset for logistic loss function')
plt.show()
# **RMSprop**
# In[35]:
def rmsprop(x0, func, grad, learn_rate, iter_num, gamma=1):
costs = np.empty(iter_num)
steps = np.empty((iter_num, x0.shape[0]))
eps = 0.01
g = np.zeros((x0.shape[0], x0.shape[0]))
for i in range(iter_num):
costs[i] = func(x0)
steps[i] = x0
gr = grad(x0)
g = gamma * g + (1 - gamma) * np.dot(gr.T, gr)
x0 -= learn_rate * gr / (np.sqrt(np.diag(g)) + eps)
return x0, costs, steps
# **Adadelta (2012)**
# 1. В [статье](https://arxiv.org/pdf/1212.5701v1.pdf) главным преимуществом данного метода над обычным Adagrad называется целиком адаптивная скорость обучения (не нужно подбирать стартовый параметр) и избавление от проблемы уменьшения шага градиента при большом числе итераций (в матрице $G$ значения монотонно возрастают, так как добавляются квадраты компонент градиентов). Теперь дробь, определяющяя коэффициент перед градиентом для каждого шага, определяется следующим образом: $$\frac{RMS[\Delta w]_{t-1}}{RMS[g]_t}$$ $RMS[a]=\sqrt{E[a^2]+\varepsilon}$, $E[a^2]_t=\gamma E[a^2]_{t-1}+(1-\gamma)a_t^2$. Таким образом, мы подсчитываем на каждой итерации "средние квадратические" для градиентов и шагов, а затем на основании полученных результатов вычисляем новый шаг.
# 2. Выражение в знаменателе аналогично RMSprop, таким образом, всё, сказанное для него, остаётся истинным. Выражение в числителе добавляется для соответствия гипотетических "единиц измерения" $w$ и $\Delta w$ (по аналогии с методом Ньютона рассматриваем диагональ гессиана, чтобы размерности в итоговом выражении сошлись).
# In[36]:
def adadelta(x0, func, grad, iter_num, gamma=1):
costs = np.empty(iter_num)
steps = np.empty((iter_num, x0.shape[0]))
eps = 0.01
dx = np.zeros((x0.shape[0], x0.shape[0]))
g = np.zeros((x0.shape[0], x0.shape[0]))
for i in range(iter_num):
costs[i] = func(x0)
steps[i] = x0
gr = grad(x0)
g = gamma * g + (1 - gamma) * np.dot(gr.T, gr)
update = (np.sqrt(np.diag(dx)) + eps) * gr / (np.sqrt(np.diag(g)) + eps)
dx = gamma * dx + (1 - gamma) * np.dot(update.T, update)
x0 -= update
return x0, costs, steps
# **Adam (2015)**
# In[37]:
def adam(x0, func, grad, learn_rate, iter_num, gamma=1):
costs = np.empty(iter_num)
steps = np.empty((iter_num, x0.shape[0]))
eps = 0.01
g = np.zeros((x0.shape[0], x0.shape[0]))
momentum = np.zeros((x0.shape[0]))
for i in range(iter_num):
costs[i] = func(x0)
steps[i] = x0
gr = grad(x0)
g = gamma * g + (1 - gamma) * np.dot(gr.T, gr)
momentum = gamma * momentum + learn_rate * gr / (np.sqrt(np.diag(g)) + eps)
x0 -= momentum
return x0, costs, steps
# In[38]:
sol_rmsprop = rmsprop(start, lambda w: logistic_loss(w, trainset, labels), lambda w: logistic_grad(w, trainset, labels),
1e-6, 440, 0.5)
sol_adadelta = adadelta(start, lambda w: logistic_loss(w, trainset, labels),
lambda w: logistic_grad(w, trainset, labels), 440, 0.5)
sol_adam = adam(start, lambda w: logistic_loss(w, trainset, labels), lambda w: logistic_grad(w, trainset, labels), 1e-6,
440, 0.5)
setup_plot_figure('Iteration', 'Loss')
plt.plot(sol_adagrad[1], label='Adagrad')
plt.plot(sol_rmsprop[1], label='RMSprop')
plt.plot(sol_adadelta[1], label='Adadelta')
plt.plot(sol_adam[1], label='Adam')
plt.legend()
plt.title('Convergence on MNIST dataset for logistic loss function')
plt.show()
# Можно заметить, что скорость сходимости Adam по сравнению с методом сохранения импульса и RMSprop действительно повысилась, что можно обосновать тем, что алгоритм комбинирует в себе лучшее от двух неконфликтующих оптимизаций.
# **Задание**
# 1. Предложите некоторую функцию, которая наглядно показывает отличие в работе всех предложенных методов.
# 2. Сделайте анимацию, которая пошагово отрисовывает треки все спусков.
# В качестве функции-бенчмарка можно рассмотреть уже знакомую функцию Розенброка, так как её свойства позволят нам легко сравнить сходимости (в частности, в "долине").
# In[52]:
bench_start = np.random.randn(2) * 10
lr = 1e-7
iters = 50
bench_basic = grad_descent(bench_start, rosenbrock, rosenbrock_grad, lr, iters)[2]
bench_accel = grad_descent_accel(bench_start, rosenbrock, rosenbrock_grad, iters)[2]
bench_momentum = grad_descent_momentum(bench_start, rosenbrock, rosenbrock_grad, lr, iters, 0.5)[2]
bench_nesterov = grad_descent_nesterov(bench_start, rosenbrock, rosenbrock_grad, lr, iters, 0.5)[2]
bench_adagrad = adagrad(bench_start, rosenbrock, rosenbrock_grad, lr, iters)[2]
bench_rmsprop = rmsprop(bench_start, rosenbrock, rosenbrock_grad, lr, iters)[2]
bench_adadelta = adadelta(bench_start, rosenbrock, rosenbrock_grad, iters)[2]
bench_adam = adam(bench_start, rosenbrock, rosenbrock_grad, lr, iters)[2]
xx = np.arange(-20, 20, 0.01)
yy = np.arange(-20, 20, 0.01)
xgrid, ygrid = np.meshgrid(xx, yy)
zgrid = rosenbrock((xgrid, ygrid))
setup_plot_figure(hline=True, vline=True)
cont = plt.contourf(xgrid, ygrid, zgrid, 1000, cmap=plt.cm.jet)
plt.xlim(-20, 20)
plt.ylim(-15, 15)
plt.plot(bench_basic.T[0], bench_basic.T[1], 'bo', alpha=0.5, label='Basic')
plt.plot(bench_accel.T[0], bench_accel.T[1], 'ro', alpha=0.5, label='Accelerated')
plt.plot(bench_momentum.T[0], bench_momentum.T[1], 'go', alpha=0.5, label='Momentum')
plt.plot(bench_nesterov.T[0], bench_nesterov.T[1], 'co', alpha=0.5, label='Nesterov')
plt.plot(bench_adagrad.T[0], bench_adagrad.T[1], 'mo', alpha=0.5, label='Adagrad')
plt.plot(bench_rmsprop.T[0], bench_rmsprop.T[1], 'yo', alpha=0.5, label='RMSprop')
plt.plot(bench_adadelta.T[0], bench_adadelta.T[1], 'wo', alpha=0.5, label='Adadelta')
plt.plot(bench_adam.T[0], bench_adam.T[1], 'ko', alpha=0.5, label='Adam')
plt.legend()
plt.show()
|
mryab/askme
|
labs/L1 - Gradient descent and linear models.py
|
Python
|
mit
| 32,558
| 0.00386
|
class TriangleMaking:
def maxPerimeter(self, a, b, c):
first = a
second = b
third = c
sides = [first, second, third]
for idx, side in enumerate(sides):
one = (idx + 1) % 3
two = (idx + 2) % 3
total = sides[one] + sides[two]
while sides[idx] >= total:
sides[idx] -= 1
return sum(sides)
|
mikefeneley/topcoder
|
src/SRM-697/triangle_making.py
|
Python
|
mit
| 423
| 0.002364
|
# -*- coding: utf-8 -*-
"""Factories to help in tests."""
from factory import PostGenerationMethodCall, Sequence
from factory.alchemy import SQLAlchemyModelFactory
from chamberlain.database import db
from chamberlain.user.models import User
class BaseFactory(SQLAlchemyModelFactory):
"""Base factory."""
class Meta:
"""Factory configuration."""
abstract = True
sqlalchemy_session = db.session
class UserFactory(BaseFactory):
"""User factory."""
username = Sequence(lambda n: 'user{0}'.format(n))
email = Sequence(lambda n: 'user{0}@example.com'.format(n))
password = PostGenerationMethodCall('set_password', 'example')
active = True
class Meta:
"""Factory configuration."""
model = User
|
sean-abbott/chamberlain
|
tests/factories.py
|
Python
|
bsd-3-clause
| 769
| 0
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "juisapp.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
snirp/juis
|
manage.py
|
Python
|
mit
| 250
| 0
|
# Copyright 2019 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import flask
import voluptuous
from werkzeug import exceptions as http_exceptions
from cloudkitty.api.v2 import base
from cloudkitty.api.v2 import utils as api_utils
from cloudkitty.common import policy
from cloudkitty import messaging
from cloudkitty import storage_state
from cloudkitty import tzutils
from cloudkitty import validation_utils as vutils
class ScopeState(base.BaseResource):
@classmethod
def reload(cls):
super(ScopeState, cls).reload()
cls._client = messaging.get_client()
cls._storage_state = storage_state.StateManager()
@api_utils.paginated
@api_utils.add_input_schema('query', {
voluptuous.Optional('scope_id', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Optional('scope_key', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Optional('fetcher', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Optional('collector', default=[]):
api_utils.MultiQueryParam(str),
})
@api_utils.add_output_schema({'results': [{
voluptuous.Required('scope_id'): vutils.get_string_type(),
voluptuous.Required('scope_key'): vutils.get_string_type(),
voluptuous.Required('fetcher'): vutils.get_string_type(),
voluptuous.Required('collector'): vutils.get_string_type(),
voluptuous.Required('state'): vutils.get_string_type(),
}]})
def get(self,
offset=0,
limit=100,
scope_id=None,
scope_key=None,
fetcher=None,
collector=None):
policy.authorize(
flask.request.context,
'scope:get_state',
{'tenant_id': scope_id or flask.request.context.project_id}
)
results = self._storage_state.get_all(
identifier=scope_id,
scope_key=scope_key,
fetcher=fetcher,
collector=collector,
offset=offset,
limit=limit,
)
if len(results) < 1:
raise http_exceptions.NotFound(
"No resource found for provided filters.")
return {
'results': [{
'scope_id': r.identifier,
'scope_key': r.scope_key,
'fetcher': r.fetcher,
'collector': r.collector,
'state': r.state.isoformat(),
} for r in results]
}
@api_utils.add_input_schema('body', {
voluptuous.Exclusive('all_scopes', 'scope_selector'):
voluptuous.Boolean(),
voluptuous.Exclusive('scope_id', 'scope_selector'):
api_utils.MultiQueryParam(str),
voluptuous.Optional('scope_key', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Optional('fetcher', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Optional('collector', default=[]):
api_utils.MultiQueryParam(str),
voluptuous.Required('state'):
voluptuous.Coerce(tzutils.dt_from_iso),
})
def put(self,
all_scopes=False,
scope_id=None,
scope_key=None,
fetcher=None,
collector=None,
state=None):
policy.authorize(
flask.request.context,
'scope:reset_state',
{'tenant_id': scope_id or flask.request.context.project_id}
)
if not all_scopes and scope_id is None:
raise http_exceptions.BadRequest(
"Either all_scopes or a scope_id should be specified.")
results = self._storage_state.get_all(
identifier=scope_id,
scope_key=scope_key,
fetcher=fetcher,
collector=collector,
)
if len(results) < 1:
raise http_exceptions.NotFound(
"No resource found for provided filters.")
serialized_results = [{
'scope_id': r.identifier,
'scope_key': r.scope_key,
'fetcher': r.fetcher,
'collector': r.collector,
} for r in results]
self._client.cast({}, 'reset_state', res_data={
'scopes': serialized_results, 'state': state.isoformat(),
})
return {}, 202
|
stackforge/cloudkitty
|
cloudkitty/api/v2/scope/state.py
|
Python
|
apache-2.0
| 4,899
| 0
|
import frrtest
import pytest
if 'S["SCRIPTING_TRUE"]=""\n' not in open("../config.status").readlines():
class TestFrrlua:
@pytest.mark.skipif(True, reason="Test unsupported")
def test_exit_cleanly(self):
pass
else:
class TestFrrlua(frrtest.TestMultiOut):
program = "./test_frrlua"
TestFrrlua.exit_cleanly()
|
freerangerouting/frr
|
tests/lib/test_frrlua.py
|
Python
|
gpl-2.0
| 358
| 0
|
# pylint: disable=C0111,R0902,R0904,R0912,R0913,R0915,E1101
# Smartsheet Python SDK.
#
# Copyright 2018 Smartsheet.com, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from ..types import *
from ..util import serialize
from ..util import deserialize
class AlternateEmail(object):
"""Smartsheet AlternateEmail data model."""
def __init__(self, props=None, base_obj=None):
"""Initialize the AlternateEmail model."""
self._base = None
if base_obj is not None:
self._base = base_obj
self._confirmed = Boolean()
self._email = String()
self._id_ = Number()
if props:
deserialize(self, props)
# requests package Response object
self.request_response = None
self.__initialized = True
def __getattr__(self, key):
if key == 'id':
return self.id_
else:
raise AttributeError(key)
def __setattr__(self, key, value):
if key == 'id':
self.id_ = value
else:
super(AlternateEmail, self).__setattr__(key, value)
@property
def confirmed(self):
return self._confirmed.value
@confirmed.setter
def confirmed(self, value):
self._confirmed.value = value
@property
def email(self):
return self._email.value
@email.setter
def email(self, value):
self._email.value = value
@property
def id_(self):
return self._id_.value
@id_.setter
def id_(self, value):
self._id_.value = value
def to_dict(self):
return serialize(self)
def to_json(self):
return json.dumps(self.to_dict())
def __str__(self):
return self.to_json()
|
smartsheet-platform/smartsheet-python-sdk
|
smartsheet/models/alternate_email.py
|
Python
|
apache-2.0
| 2,281
| 0
|
# Example for script that connects to PV,
# writes a value, then disconnects from the PV.
#
# This is usually a bad idea.
# It's better to have widgets connect to PVs,
# 1) More efficient. Widget connects once on start, then remains connected.
# Widget subscribes to PV updates instead of polling its value.
# 2) Widget will reflect the connection and alarm state of the PV
# 3) Widget will properly disconnect
#
# pvs[0]: PV with name of PV to which to connect
# pvs[1]: PV with value that will be written to the PV
from org.csstudio.display.builder.runtime.script import PVUtil, ScriptUtil
pv_name = PVUtil.getString(pvs[0])
value = PVUtil.getDouble(pvs[1])
print("Should write %g to %s" % (value, pv_name))
try:
PVUtil.writePV(pv_name, value, 5000)
except:
ScriptUtil.showErrorDialog(widget, "Error writing %g to %s" % (value, pv_name))
|
ESSICS/org.csstudio.display.builder
|
org.csstudio.display.builder.model/examples/script_util/write_any_pv.py
|
Python
|
epl-1.0
| 855
| 0.003509
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from os.path import exists as path_exists
from pyscaffold.api import create_project
from pyscaffold.cli import run
from pyscaffold.extensions import travis
def test_create_project_with_travis(tmpfolder):
# Given options with the travis extension,
opts = dict(project="proj",
extensions=[travis.Travis('travis')])
# when the project is created,
create_project(opts)
# then travis files should exist
assert path_exists("proj/.travis.yml")
assert path_exists("proj/tests/travis_install.sh")
def test_create_project_without_travis(tmpfolder):
# Given options without the travis extension,
opts = dict(project="proj")
# when the project is created,
create_project(opts)
# then travis files should not exist
assert not path_exists("proj/.travis.yml")
assert not path_exists("proj/tests/travis_install.sh")
def test_cli_with_travis(tmpfolder):
# Given the command line with the travis option,
sys.argv = ["pyscaffold", "--travis", "proj"]
# when pyscaffold runs,
run()
# then travis files should exist
assert path_exists("proj/.travis.yml")
assert path_exists("proj/tests/travis_install.sh")
def test_cli_without_travis(tmpfolder):
# Given the command line without the travis option,
sys.argv = ["pyscaffold", "proj"]
# when pyscaffold runs,
run()
# then travis files should not exist
assert not path_exists("proj/.travis.yml")
assert not path_exists("proj/tests/travis_install.sh")
|
cpaulik/pyscaffold
|
tests/extensions/test_travis.py
|
Python
|
mit
| 1,578
| 0
|
#!/usr/bin/env python
from __future__ import unicode_literals
import io
import optparse
import os
import sys
# Import youtube_dl
ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
sys.path.insert(0, ROOT_DIR)
import youtube_dl
def main():
parser = optparse.OptionParser(usage='%prog OUTFILE.md')
options, args = parser.parse_args()
if len(args) != 1:
parser.error('Expected an output filename')
outfile, = args
def gen_ies_md(ies):
for ie in ies:
ie_md = '**{0}**'.format(ie.IE_NAME)
ie_desc = getattr(ie, 'IE_DESC', None)
if ie_desc is False:
continue
if ie_desc is not None:
ie_md += ': {0}'.format(ie.IE_DESC)
if not ie.working():
ie_md += ' (Currently broken)'
yield ie_md
ies = sorted(youtube_dl.gen_extractors(), key=lambda i: i.IE_NAME.lower())
out = '# Supported sites\n' + ''.join(
' - ' + md + '\n'
for md in gen_ies_md(ies))
with io.open(outfile, 'w', encoding='utf-8') as outf:
outf.write(out)
if __name__ == '__main__':
main()
|
MarkTheF4rth/youtube-dl
|
devscripts/make_supportedsites.py
|
Python
|
unlicense
| 1,152
| 0.001736
|
"""Write initial TrueGrid files for one biplane blade station.
Usage
-----
start an IPython (qt)console with the pylab flag:
$ ipython qtconsole --pylab
or
$ ipython --pylab
Then, from the prompt, run this script:
|> %run biplane_blade_lib/prep_stnXX_mesh.py
or
|> import biplane_blade_lib/prep_stnXX_mesh
Author: Perry Roth-Johnson
Last updated: April 30, 2014
"""
import matplotlib.pyplot as plt
import lib.blade as bl
reload(bl)
import lib.poly_utils as pu
reload(pu)
from shapely.geometry import Polygon
from shapely.affinity import translate
# SET THESE PARAMETERS -----------------
station_num = 18
# --------------------------------------
plt.close('all')
# load the biplane blade
b1 = bl.BiplaneBlade(
'biplane blade, flapwise symmetric, no stagger, rj/R=0.452, g/c=1.25',
'biplane_blade')
# pre-process the station dimensions
station = b1.list_of_stations[station_num-1]
station.airfoil.create_polygon()
station.structure.create_all_layers()
station.structure.save_all_layer_edges()
station.structure.write_all_part_polygons()
# plot the parts
station.plot_parts()
# access the structure and airfoil for this station
st = station.structure
af = station.airfoil
x3_off = af.lower_chord * af.gap_to_chord_ratio * af.gap_fraction
# upper spar cap -----------------------------------------------------------
label = 'upper spar cap'
# create the bounding polygon
usc = st.lower_spar_cap.layer['upper']
is2 = st.lower_internal_surface_2.layer['resin']
points_usc = [
(-0.75, usc.left[0][1]), # lower_SparCap_upper.txt
is2.polygon.interiors[0].coords[-2], # lower_InternalSurface2_resin.txt
is2.polygon.interiors[0].coords[44-32], # lower_InternalSurface2_resin.txt
( 0.75, usc.right[1][1]), # lower_SparCap_upper.txt
( 0.75, 0.0),
(-0.75, 0.0)
]
bounding_polygon = Polygon(points_usc)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'triax', label,
bounding_polygon, airfoil='lower')
# lower spar cap -----------------------------------------------------------
label = 'lower spar cap'
# create the bounding polygon
lsc = st.lower_spar_cap.layer['lower']
points_lsc = [
(-0.75,-6.5),
( 0.75,-6.5),
( 0.75000000, lsc.right[0][1]), # lower_SparCap_lower.txt
is2.polygon.interiors[0].coords[43-32], # lower_InternalSurface2_resin.txt
is2.polygon.interiors[0].coords[-1], # lower_InternalSurface2_resin.txt
(-0.75000000, lsc.left[1][1]) # lower_SparCap_lower.txt
]
bounding_polygon = Polygon(points_lsc)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'triax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, upper 1 ------------------------------------------------
label = 'TE reinforcement, upper 1'
# create the bounding polygon
ter = st.lower_TE_reinforcement.layer['foam']
is4 = st.lower_internal_surface_4.layer['resin']
points_teu1 = [
(ter.top[0][0], -3.5), # TE_Reinforcement_foam.txt
(ter.top[0][0], -4.6), # TE_Reinforcement_foam.txt
is4.polygon.interiors[0].coords[376-150], # InternalSurface4_resin.txt
(is4.polygon.interiors[0].coords[376-150][0], -3.5) # InternalSurface4_resin.txt
]
bounding_polygon = Polygon(points_teu1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, lower 1 ------------------------------------------------
label = 'TE reinforcement, lower 1'
# create the bounding polygon
points_tel1 = [
(ter.bottom[0][0], -5.0), # TE_Reinforcement_foam.txt
(ter.bottom[0][0], -4.6), # TE_Reinforcement_foam.txt
is4.polygon.interiors[0].coords[376-150], # InternalSurface4_resin.txt
(is4.polygon.interiors[0].coords[376-150][0], -5.0) # InternalSurface4_resin.txt
]
bounding_polygon = Polygon(points_tel1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, upper 2 ------------------------------------------------
label = 'TE reinforcement, upper 2'
# create the bounding polygon
points_teu2 = [
points_teu1[-1],
points_teu1[-2],
ter.polygon.exterior.coords[50-3], # lower_TE_reinforcement_foam.txt
(ter.polygon.exterior.coords[50-3][0], -3.5) # lower_TE_reinforcement_foam.txt
]
bounding_polygon = Polygon(points_teu2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, lower 2 ------------------------------------------------
label = 'TE reinforcement, lower 2'
# create the bounding polygon
points_tel2 = [
(points_teu2[0][0], -5.0),
points_teu2[1],
points_teu2[2],
(points_teu2[2][0], -5.0)
]
bounding_polygon = Polygon(points_tel2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, upper 3 ------------------------------------------------
label = 'TE reinforcement, upper 3'
# create the bounding polygon
points_teu3 = [
points_teu2[-1],
points_teu2[-2],
ter.polygon.exterior.coords[0], # TE_Reinforcement_foam.txt
(ter.polygon.exterior.coords[0][0], -3.5) # TE_Reinforcement_foam.txt
]
bounding_polygon = Polygon(points_teu3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, lower 3 ------------------------------------------------
label = 'TE reinforcement, lower 3'
# create the bounding polygon
points_tel3 = [
(points_teu3[0][0], -5.0),
points_teu3[1],
points_teu3[2],
(points_teu3[2][0], -5.0)
]
bounding_polygon = Polygon(points_tel3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'foam', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, upper 4 ------------------------------------------------
label = 'TE reinforcement, upper 4'
# create the bounding polygon
es = st.lower_external_surface.layer['gelcoat']
teru = st.lower_TE_reinforcement.layer['uniax']
points_teu4 = [
points_teu3[-1],
points_teu3[-2],
(teru.polygon.exterior.coords[-2][0], -4.769), # TE_Reinforcement_uniax.txt
teru.polygon.exterior.coords[-2], # TE_Reinforcement_uniax.txt
es.polygon.exterior.coords[-2],
(teru.polygon.exterior.coords[-2][0], -3.5) # TE_Reinforcement_uniax.txt
]
bounding_polygon = Polygon(points_teu4)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# TE reinforcement, lower 4 ------------------------------------------------
label = 'TE reinforcement, lower 4'
# create the bounding polygon
points_tel4 = [
(points_teu4[0][0], -5.0),
points_teu4[1],
points_teu4[2],
teru.polygon.exterior.coords[-1], # TE_Reinforcement_uniax.txt
es.polygon.exterior.coords[-1],
(points_teu4[2][0], -5.0)
]
bounding_polygon = Polygon(points_tel4)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_TE_reinforcement, 'uniax', label,
bounding_polygon, airfoil='lower')
# LE panel -----------------------------------------------------------------
label = 'LE panel'
# create the bounding polygon
lep = st.lower_LE_panel.layer['foam']
is1 = st.lower_internal_surface_1.layer['resin']
points_le = [
(-3.00,-6.5),
(-0.836,-6.5),
tuple(lep.bottom[0]), # lower_LE_Panel_foam.txt
is1.polygon.interiors[0].coords[-2], # lower_InternalSurface1_resin.txt
(-1.5, -x3_off),
is1.polygon.interiors[0].coords[-1], # lower_InternalSurface1_resin.txt
tuple(lep.top[1]), # lower_LE_Panel_foam.txt
(-0.836, 0.0),
(-3.00, 0.0)
]
bounding_polygon = Polygon(points_le)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_1, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_1, 'triax', label,
bounding_polygon, airfoil='lower')
# upper aft panel 1 -------------------------------------------------------
label = 'upper aft panel 1'
# create the bounding polygon
ap1u = st.lower_aft_panel_1.layer['upper']
is3 = st.lower_internal_surface_3.layer['resin']
points_ap1u = [
(0.836, 0.0),
(ap1u.right[1][0], 0.0), # lower_AftPanel1_upper.txt
tuple(ap1u.right[1]), # lower_AftPanel1_upper.txt
is3.polygon.interiors[0].coords[88-50], # lower_InternalSurface3_resin.txt
(2.0, -4.5),
is3.polygon.interiors[0].coords[-2], # lower_InternalSurface3_resin.txt
tuple(ap1u.left[0]) # lower_AftPanel1_upper.txt
]
bounding_polygon = Polygon(points_ap1u)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'triax', label,
bounding_polygon, airfoil='lower')
# lower aft panel 1 -------------------------------------------------------
label = 'lower aft panel 1'
# create the bounding polygon
ap1l = st.lower_aft_panel_1.layer['lower']
points_ap1l = [
(0.836, -6.5),
(ap1l.right[0][0], -6.5), # lower_AftPanel1_lower.txt
tuple(ap1l.right[0]), # lower_AftPanel1_lower.txt
is3.polygon.interiors[0].coords[87-50], # lower_InternalSurface3_resin.txt
(2.0, -4.5),
is3.polygon.interiors[0].coords[-1], # lower_InternalSurface3_resin.txt
tuple(ap1l.left[1]) # lower_AftPanel1_lower.txt
]
bounding_polygon = Polygon(points_ap1l)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'triax', label,
bounding_polygon, airfoil='lower')
# upper aft panel 2 -------------------------------------------------------
label = 'upper aft panel 2'
# create the bounding polygon
ap2u = st.lower_aft_panel_2.layer['upper']
sw3br = st.lower_shear_web_3.layer['biax, right']
points_ap2u = [
(sw3br.right[0][0], 0.0),
(ap2u.right[1][0], 0.0), # AftPanel2_upper.txt
tuple(ap2u.right[1]), # AftPanel2_upper.txt
is4.polygon.interiors[0].coords[409-150], # InternalSurface4_resin.txt
(3.0, -4.6),
is4.polygon.interiors[0].coords[-2], # InternalSurface4_resin.txt
tuple(ap2u.left[0]) # AftPanel2_upper.txt
]
bounding_polygon = Polygon(points_ap2u)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon)
# lower aft panel 2 -------------------------------------------------------
label = 'lower aft panel 2'
# create the bounding polygon
ap2l = st.lower_aft_panel_2.layer['lower']
points_ap2l = [
(sw3br.right[0][0], -5.4),
(ap2l.right[0][0], -5.4), # AftPanel2_lower.txt
tuple(ap2l.right[0]), # AftPanel2_lower.txt
is4.polygon.interiors[0].coords[279-150], # InternalSurface4_resin.txt
(3.0, -4.6),
is4.polygon.interiors[0].coords[-1], # InternalSurface4_resin.txt
tuple(ap2l.left[1]) # AftPanel2_lower.txt
]
bounding_polygon = Polygon(points_ap2l)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon)
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon)
# above shear web 1 ----------------------------------------------------------
label = 'above shear web 1'
# create the bounding polygon
points_asw1 = [
(-0.75, 0.0),
(-0.75, -4.5),
(-0.836, -4.5),
(-0.836, 0.0)
]
bounding_polygon = Polygon(points_asw1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# below shear web 1 ----------------------------------------------------------
label = 'below shear web 1'
# create the bounding polygon
points_bsw1 = [
(-0.75, -6.5),
(-0.75, -5.0),
(-0.836, -5.0),
(-0.836, -6.5)
]
bounding_polygon = Polygon(points_bsw1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# above shear web 2 ----------------------------------------------------------
label = 'above shear web 2'
# create the bounding polygon
points_asw2 = [
(0.75, 0.0),
(0.75, -4.5),
(0.836, -4.5),
(0.836, 0.0)
]
bounding_polygon = Polygon(points_asw2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# below shear web 2 ----------------------------------------------------------
label = 'below shear web 2'
# create the bounding polygon
points_bsw2 = [
(0.75, -6.5),
(0.75, -5.0),
(0.836, -5.0),
(0.836, -6.5)
]
bounding_polygon = Polygon(points_bsw2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# above shear web 3 ----------------------------------------------------------
label = 'above shear web 3'
sw3bl = st.lower_shear_web_3.layer['biax, left']
# create the bounding polygon
points_asw3 = [
(sw3bl.left[0][0], 0.0),
(sw3bl.left[0][0], -4.5),
(sw3br.right[0][0], -4.5),
(sw3br.right[0][0], 0.0)
]
bounding_polygon = Polygon(points_asw3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# below shear web 3 ----------------------------------------------------------
label = 'below shear web 3'
# create the bounding polygon
points_bsw3 = [
(sw3bl.left[0][0], -6.5),
(sw3bl.left[0][0], -4.7),
(sw3br.right[0][0], -4.7),
(sw3br.right[0][0], -6.5)
]
bounding_polygon = Polygon(points_bsw3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'triax', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_external_surface, 'gelcoat', label,
bounding_polygon, airfoil='lower')
# left of shear web 1 -------------------------------------------------------
label = 'left of shear web 1'
# create the bounding polygon
points_lsw1 = points_le[2:-2]
bounding_polygon = Polygon(points_lsw1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_1, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_1, 'triax', label,
bounding_polygon, airfoil='lower')
# right of shear web 1 -------------------------------------------------------
label = 'right of shear web 1'
# create the bounding polygon
points_rsw1 = [
points_usc[0],
points_usc[1],
(0.0, -x3_off),
points_lsc[-2],
points_lsc[-1]
]
bounding_polygon = Polygon(points_rsw1)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'triax', label,
bounding_polygon, airfoil='lower')
# left of shear web 2 -------------------------------------------------------
label = 'left of shear web 2'
# create the bounding polygon
points_lsw2 = [
points_usc[3],
points_usc[2],
(0.0, -x3_off),
points_lsc[3],
points_lsc[2]
]
bounding_polygon = Polygon(points_lsw2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_2, 'triax', label,
bounding_polygon, airfoil='lower')
# right of shear web 2 -------------------------------------------------------
label = 'right of shear web 2'
# create the bounding polygon
points_rsw2 = [
points_ap1u[-1],
points_ap1u[-2],
(1.5, -x3_off),
points_ap1l[-2],
points_ap1l[-1]
]
bounding_polygon = Polygon(points_rsw2)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'triax', label,
bounding_polygon, airfoil='lower')
# left of shear web 3 -------------------------------------------------------
label = 'left of shear web 3'
# create the bounding polygon
points_lsw3 = [
points_ap1u[2],
points_ap1u[3],
(2.0, -4.5),
points_ap1l[3],
points_ap1l[2]
]
bounding_polygon = Polygon(points_lsw3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_3, 'triax', label,
bounding_polygon, airfoil='lower')
# right of shear web 3 -------------------------------------------------------
label = 'right of shear web 3'
# create the bounding polygon
points_rsw3 = [
points_ap2u[-1],
points_ap2u[-2],
(3.0, -4.6),
points_ap2l[-2],
points_ap2l[-1]
]
bounding_polygon = Polygon(points_rsw3)
pu.plot_polygon(bounding_polygon, 'None', '#000000')
# cut the new layer polygons
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'resin', label,
bounding_polygon, airfoil='lower')
pu.cut_plot_and_write_alt_layer(st.lower_internal_surface_4, 'triax', label,
bounding_polygon, airfoil='lower')
# -----------------------------------------------------------------------------
list_of_mesh_layers = []
# translate all the alt layers in each part
for (name, layer) in st.lower_TE_reinforcement.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
for (name, layer) in st.lower_external_surface.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
for (name, layer) in st.lower_internal_surface_1.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
for (name, layer) in st.lower_internal_surface_2.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
for (name, layer) in st.lower_internal_surface_3.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
for (name, layer) in st.lower_internal_surface_4.alt_layer.items():
layer.move(x3_off, alt_layer=True)
list_of_mesh_layers.append(layer)
# translate all the remaining regular layers
st.lower_spar_cap.layer['upper'].move(x3_off)
st.lower_spar_cap.layer['lower'].move(x3_off)
st.lower_aft_panel_1.layer['upper'].move(x3_off)
st.lower_aft_panel_1.layer['lower'].move(x3_off)
st.lower_aft_panel_2.layer['upper'].move(x3_off)
st.lower_aft_panel_2.layer['lower'].move(x3_off)
st.lower_LE_panel.layer['foam'].move(x3_off)
st.lower_shear_web_1.layer['biax, left'].move(x3_off)
st.lower_shear_web_1.layer['foam'].move(x3_off)
st.lower_shear_web_1.layer['biax, right'].move(x3_off)
st.lower_shear_web_2.layer['biax, left'].move(x3_off)
st.lower_shear_web_2.layer['foam'].move(x3_off)
st.lower_shear_web_2.layer['biax, right'].move(x3_off)
st.lower_shear_web_3.layer['biax, left'].move(x3_off)
st.lower_shear_web_3.layer['foam'].move(x3_off)
st.lower_shear_web_3.layer['biax, right'].move(x3_off)
list_of_mesh_layers.append(st.lower_spar_cap.layer['upper'])
list_of_mesh_layers.append(st.lower_spar_cap.layer['lower'])
list_of_mesh_layers.append(st.lower_aft_panel_1.layer['upper'])
list_of_mesh_layers.append(st.lower_aft_panel_1.layer['lower'])
list_of_mesh_layers.append(st.lower_aft_panel_2.layer['upper'])
list_of_mesh_layers.append(st.lower_aft_panel_2.layer['lower'])
list_of_mesh_layers.append(st.lower_LE_panel.layer['foam'])
list_of_mesh_layers.append(st.lower_shear_web_1.layer['biax, left'])
list_of_mesh_layers.append(st.lower_shear_web_1.layer['foam'])
list_of_mesh_layers.append(st.lower_shear_web_1.layer['biax, right'])
list_of_mesh_layers.append(st.lower_shear_web_2.layer['biax, left'])
list_of_mesh_layers.append(st.lower_shear_web_2.layer['foam'])
list_of_mesh_layers.append(st.lower_shear_web_2.layer['biax, right'])
list_of_mesh_layers.append(st.lower_shear_web_3.layer['biax, left'])
list_of_mesh_layers.append(st.lower_shear_web_3.layer['foam'])
list_of_mesh_layers.append(st.lower_shear_web_3.layer['biax, right'])
# plot the lower airfoil in the local beam coordinate system
# (translate it up by the appropriate gap distance: x3_off)
fig,ax = plt.subplots()
fmt1 = "Station #{0}, {1}, {2}% span\n"
fmt2 = "lower airfoil in local beam coordinate system (x3-offset = {3:+.4f})"
fmt = fmt1 + fmt2
ax.set_title(fmt.format(station.station_num, station.airfoil.name,
station.coords.x1, x3_off))
lp2 = translate(af.lower_polygon, yoff=x3_off)
(minx, miny, maxx, maxy) = lp2.bounds
ax.set_xlim([minx*1.2,maxx*1.2])
ax.set_ylim([miny*1.2,maxy*1.2])
plt.grid('on')
ax.set_xlabel('x2 [meters]')
ax.set_ylabel('x3 [meters]')
ax.set_aspect('equal')
for layer in list_of_mesh_layers:
station.plot_polygon(layer.polygon, ax, layer.face_color, layer.edge_color,
alpha=0.8)
# show the plots
plt.show()
# write the TrueGrid input file for mesh generation ---------------------
st.write_truegrid_inputfile(
interrupt_flag=True,
additional_layers=[
st.lower_spar_cap.layer['upper'],
st.lower_spar_cap.layer['lower'],
st.lower_aft_panel_1.layer['upper'],
st.lower_aft_panel_1.layer['lower'],
st.lower_aft_panel_2.layer['upper'],
st.lower_aft_panel_2.layer['lower'],
st.lower_LE_panel.layer['foam'],
st.lower_shear_web_1.layer['biax, left'],
st.lower_shear_web_1.layer['foam'],
st.lower_shear_web_1.layer['biax, right'],
st.lower_shear_web_2.layer['biax, left'],
st.lower_shear_web_2.layer['foam'],
st.lower_shear_web_2.layer['biax, right'],
st.lower_shear_web_3.layer['biax, left'],
st.lower_shear_web_3.layer['foam'],
st.lower_shear_web_3.layer['biax, right']
],
alt_TE_reinforcement=True,
soft_warning=True)
|
perryjohnson/biplaneblade
|
biplane_blade_lib/prep_stn18_mesh.py
|
Python
|
gpl-3.0
| 30,755
| 0.006763
|
#!/usr/bin/env python
import sys
import math
def main():
if len(sys.argv) != 3:
print('USAGE: ' + sys.argv[0] + ' <filename> ' + ' <boundary id of interest>')
return
targetString = 'Moment coefficient for body[' + sys.argv[2]
targetTimestep = ': \n'
filename = sys.argv[1]
try:
f = open(filename, 'r')
except:
print('File does not exist: ' + filename)
return
for line in f:
if targetTimestep in line:
'replace the colon and newline with a comma so we get a CSV file'
line = line.replace (targetTimestep, ', ')
print(line),
continue
if targetString in line:
'find the last colon and get the string after that which is the numeric value'
'of the lift coefficient'
pos = line.rfind(':')
size = len(line)
print(line[pos+1:size]),
continue
if __name__ == '__main__':
main()
|
ngcurrier/ProteusCFD
|
tools/extractCM.py
|
Python
|
gpl-3.0
| 990
| 0.009091
|
from django.contrib.localflavor.it.forms import (ITZipCodeField, ITRegionSelect,
ITSocialSecurityNumberField, ITVatNumberField)
from django.test import SimpleTestCase
class ITLocalFlavorTests(SimpleTestCase):
def test_ITRegionSelect(self):
f = ITRegionSelect()
out = u'''<select name="regions">
<option value="ABR">Abruzzo</option>
<option value="BAS">Basilicata</option>
<option value="CAL">Calabria</option>
<option value="CAM">Campania</option>
<option value="EMR">Emilia-Romagna</option>
<option value="FVG">Friuli-Venezia Giulia</option>
<option value="LAZ">Lazio</option>
<option value="LIG">Liguria</option>
<option value="LOM">Lombardia</option>
<option value="MAR">Marche</option>
<option value="MOL">Molise</option>
<option value="PMN" selected="selected">Piemonte</option>
<option value="PUG">Puglia</option>
<option value="SAR">Sardegna</option>
<option value="SIC">Sicilia</option>
<option value="TOS">Toscana</option>
<option value="TAA">Trentino-Alto Adige</option>
<option value="UMB">Umbria</option>
<option value="VAO">Valle d\u2019Aosta</option>
<option value="VEN">Veneto</option>
</select>'''
self.assertEqual(f.render('regions', 'PMN'), out)
def test_ITZipCodeField(self):
error_invalid = [u'Enter a valid zip code.']
valid = {
'00100': '00100',
}
invalid = {
' 00100': error_invalid,
}
self.assertFieldOutput(ITZipCodeField, valid, invalid)
def test_ITSocialSecurityNumberField(self):
error_invalid = [u'Enter a valid Social Security number.']
valid = {
'LVSGDU99T71H501L': 'LVSGDU99T71H501L',
'LBRRME11A01L736W': 'LBRRME11A01L736W',
'lbrrme11a01l736w': 'LBRRME11A01L736W',
'LBR RME 11A01 L736W': 'LBRRME11A01L736W',
}
invalid = {
'LBRRME11A01L736A': error_invalid,
'%BRRME11A01L736W': error_invalid,
}
self.assertFieldOutput(ITSocialSecurityNumberField, valid, invalid)
def test_ITVatNumberField(self):
error_invalid = [u'Enter a valid VAT number.']
valid = {
'07973780013': '07973780013',
'7973780013': '07973780013',
7973780013: '07973780013',
}
invalid = {
'07973780014': error_invalid,
'A7973780013': error_invalid,
}
self.assertFieldOutput(ITVatNumberField, valid, invalid)
|
mixman/djangodev
|
tests/regressiontests/localflavor/it/tests.py
|
Python
|
bsd-3-clause
| 2,453
| 0.000815
|
#!/usr/bin/env python
#
# MCP320x
#
# Author: Maurik Holtrop
#
# This module interfaces with the MCP300x or MCP320x family of chips. These
# are 10-bit and 12-bit ADCs respectively. The x number indicates the number
# of multiplexed analog inputs: 2 (MCP3202), 4 (MCP3204) or 8 (MCP3208)
# Communications with this chip are over the SPI protocol.
# See: https://en.wikipedia.org/wiki/Serial_Peripheral_Interface_Bus
#
# The version of the code has two SPI interfaces: the builtin hardware
# SPI interface on the RPI, or a "bit-banged" GPIO version.
#
# Bit-Bang GPIO:
# We emulate a SPI port in software using the GPIO lines.
# This is a bit slower than the hardware interface, but it is far more
# clear what is going on, plus the RPi has only one SPI device.
# Connections: RPi GPIO to MCP320x
# CS_bar_pin = CS/SHDN
# CLK_pin = CLK
# MOSI_pin = D_in
# MISO_pin = D_out
#
# Hardware SPI:
# This uses the builtin hardware on the RPi. You need to enable this with the
# raspi-config program first. The data rate can be up to 1MHz.
# Connections: RPi pins to MCP320x
# CE0 or CE1 = CS/SHDN (chip select) set CS_bar = 0 or 1
# SCK = CLK set CLK_pin = 1000000 (transfer speed)
# MOSI = D_in set MOSI_pin = 0
# MISO = D_out set MISO_pin = 0
# The SPI protocol simulated here is MODE=0, CPHA=0, which has a positive polarity clock,
# (the clock is 0 at rest, active at 1) and a positive phase (0 to 1 transition) for reading
# or writing the data. Thus corresponds to the specifications of the MCP320x chips.
#
# From MCP3208 datasheet:
# Outging data : MCU latches data to A/D converter on rising edges of SCLK
# Incoming data: Data is clocked out of A/D converter on falling edges, so should be read on rising edge.
try:
import RPi.GPIO as GPIO
except ImportError as error:
pass
try:
import Adafruit_BBIO as GPIO
except ImportError as error:
pass
try:
import spidev
except ImportError as error:
pass
from DevLib.MyValues import MyValues
class MCP320x:
"""This is an class that implements an interface to the MCP320x ADC chips.
Standard is the MCP3208, but is will also work wiht the MCP3202, MCP3204, MCP3002, MCP3004 and MCP3008."""
def __init__(self, cs_bar_pin, clk_pin=1000000, mosi_pin=0, miso_pin=0, chip='MCP3208',
channel_max=None, bit_length=None, single_ended=True):
"""Initialize the code and set the GPIO pins.
The last argument, ch_max, is 2 for the MCP3202, 4 for the
MCP3204 or 8 for the MCS3208."""
self._CLK = clk_pin
self._MOSI = mosi_pin
self._MISO = miso_pin
self._CS_bar = cs_bar_pin
chip_dictionary = {
"MCP3202": (2, 12),
"MCP3204": (4, 12),
"MCP3208": (8, 12),
"MCP3002": (2, 10),
"MCP3004": (4, 10),
"MCP3008": (8, 10)
}
if chip in chip_dictionary:
self._ChannelMax = chip_dictionary[chip][0]
self._BitLength = chip_dictionary[chip][1]
elif chip is None and (channel_max is not None) and (bit_length is not None):
self._ChannelMax = channel_max
self._BitLength = bit_length
else:
print("Unknown chip: {} - Please re-initialize.")
self._ChannelMax = 0
self._BitLength = 0
return
self._SingleEnded = single_ended
self._Vref = 3.3
self._values = MyValues(self.read_adc, self._ChannelMax)
self._volts = MyValues(self.read_volts, self._ChannelMax)
# This is used to speed up the SPIDEV communication. Send out MSB first.
# control[0] - bit7-3: upper 5 bits 0, because we can only send 8 bit sequences.
# - bit2 : Start bit - starts conversion in ADCs
# - bit1 : Select single_ended=1 or differential=0
# - bit0 : D2 high bit of channel select.
# control[1] - bit7 : D1 middle bit of channel select.
# - bit6 : D0 low bit of channel select.
# - bit5-0 : Don't care.
if self._SingleEnded:
self._control0 = [0b00000110, 0b00100000, 0] # Pre-compute part of the control word.
else:
self._control0 = [0b00000100, 0b00100000, 0] # Pre-compute part of the control word.
if self._MOSI > 0: # Bing Bang mode
assert self._MISO != 0 and self._CLK < 32
if GPIO.getmode() != 11:
GPIO.setmode(GPIO.BCM) # Use the BCM numbering scheme
GPIO.setup(self._CLK, GPIO.OUT) # Setup the ports for in and output
GPIO.setup(self._MOSI, GPIO.OUT)
GPIO.setup(self._MISO, GPIO.IN)
GPIO.setup(self._CS_bar, GPIO.OUT)
GPIO.output(self._CLK, 0) # Set the clock low.
GPIO.output(self._MOSI, 0) # Set the Master Out low
GPIO.output(self._CS_bar, 1) # Set the CS_bar high
else:
self._dev = spidev.SpiDev(0, self._CS_bar) # Start a SpiDev device
self._dev.mode = 0 # Set SPI mode (phase)
self._dev.max_speed_hz = self._CLK # Set the data rate
self._dev.bits_per_word = 8 # Number of bit per word. ALWAYS 8
def __del__(self):
""" Cleanup the GPIO before being destroyed """
if self._MOSI > 0:
GPIO.cleanup(self._CS_bar)
GPIO.cleanup(self._CLK)
GPIO.cleanup(self._MOSI)
GPIO.cleanup(self._MISO)
def get_channel_max(self):
"""Return the maximum number of channels"""
return self._ChannelMax
def get_bit_length(self):
"""Return the number of bits that will be read"""
return self._BitLength
def get_value_max(self):
"""Return the maximum value possible for an ADC read"""
return 2 ** self._BitLength - 1
def send_bit(self, bit):
""" Send out a single bit, and pulse clock."""
if self._MOSI == 0:
return
#
# The input is read on the rising edge of the clock.
#
GPIO.output(self._MOSI, bit) # Set the bit.
GPIO.output(self._CLK, 1) # Rising edge sends data
GPIO.output(self._CLK, 0) # Return clock to zero.
def read_bit(self):
""" Read a single bit from the ADC and pulse clock."""
if self._MOSI == 0:
return 0
#
# The output is going out on the falling edge of the clock,
# and is to be read on the rising edge of the clock.
# Clock should be already low, and data should already be set.
GPIO.output(self._CLK, 1) # Set the clock high. Ready to read.
bit = GPIO.input(self._MISO) # Read the bit.
GPIO.output(self._CLK, 0) # Return clock low, next bit will be set.
return bit
def read_adc(self, channel):
"""This reads the actual ADC value, after connecting the analog multiplexer to
the desired channel.
ADC value is returned at a n-bit integer value, with n=10 or 12 depending on the chip.
The value can be converted to a voltage with:
volts = data*Vref/(2**n-1)"""
if channel < 0 or channel >= self._ChannelMax:
print("Error - chip does not have channel = {}".format(channel))
if self._MOSI == 0:
# SPIdev Code
# This builds up the control word, which selects the channel
# and sets single/differential more.
control = [self._control0[0] + ((channel & 0b100) >> 2), self._control0[1]+((channel & 0b011) << 6), 0]
dat = self._dev.xfer(control)
value = (dat[1] << 8)+dat[2] # Unpack the two 8-bit words to a single integer.
return value
else:
# Bit Bang code.
# To read out this chip you need to send:
# 1 - start bit
# 2 - Single ended (1) or differential (0) mode
# 3 - Channel select: 1 bit for x=2 or 3 bits for x=4,8
# 4 - MSB first (1) or LSB first (0)
#
# Start of sequence sets CS_bar low, and sends sequence
#
GPIO.output(self._CLK, 0) # Make sure clock starts low.
GPIO.output(self._MOSI, 0)
GPIO.output(self._CS_bar, 0) # Select the chip.
self.send_bit(1) # Start bit = 1
self.send_bit(self._SingleEnded) # Select single or differential
if self._ChannelMax > 2:
self.send_bit(int((channel & 0b100) > 0)) # Send high bit of channel = DS2
self.send_bit(int((channel & 0b010) > 0)) # Send mid bit of channel = DS1
self.send_bit(int((channel & 0b001) > 0)) # Send low bit of channel = DS0
else:
self.send_bit(channel)
self.send_bit(0) # MSB First (for MCP3x02) or don't care.
# The clock is currently low, and the dummy bit = 0 is on the output of the ADC
#
self.read_bit() # Read the bit.
data = 0
for i in range(self._BitLength):
# Note you need to shift left first, or else you shift the last bit (bit 0)
# to the 1 position.
data <<= 1
bit = self.read_bit()
data += bit
GPIO.output(self._CS_bar, 1) # Unselect the chip.
return data
def read_volts(self, channel):
"""Read the ADC value from channel and convert to volts, assuming that Vref is set correctly. """
return self._Vref * self.read_adc(channel) / self.get_value_max()
def fast_read_adc0(self):
"""This reads the actual ADC value of channel 0, with as little overhead as possible.
Use with SPIDEV ONLY!!!!
returns: The ADC value as an n-bit integer value, with n=10 or 12 depending on the chip."""
dat = self._dev.xfer(self._control0)
value = (dat[1] << 8) + dat[2]
return value
@property
def values(self):
"""ADC values presented as a list."""
return self._values
@property
def volts(self):
"""ADC voltages presented as a list"""
return self._volts
@property
def accuracy(self):
"""The fractional voltage of the least significant bit. """
return self._Vref / float(self.get_value_max())
@property
def vref(self):
"""Reference voltage used by the chip. You need to set this. It defaults to 3.3V"""
return self._Vref
@vref.setter
def vref(self, vr):
self._Vref = vr
def main(argv):
"""Test code for the MCP320x driver. This assumes you are using a MCP3208
If no arguments are supplied, then use SPIdev for CE0 and read channel 0"""
if len(argv) < 3:
print("Args : ", argv)
cs_bar = 0
clk_pin = 1000000
mosi_pin = 0
miso_pin = 0
if len(argv) < 2:
channel = 0
else:
channel = int(argv[1])
elif len(argv) < 6:
print("Please supply: cs_bar_pin clk_pin mosi_pin miso_pin channel")
sys.exit(1)
else:
cs_bar = int(argv[1])
clk_pin = int(argv[2])
mosi_pin = int(argv[3])
miso_pin = int(argv[4])
channel = int(argv[5])
adc_chip = MCP320x(cs_bar, clk_pin, mosi_pin, miso_pin)
try:
while True:
value = adc_chip.read_adc(channel)
print("{:4d}".format(value))
time.sleep(0.1)
except KeyboardInterrupt:
sys.exit(0)
if __name__ == '__main__':
import sys
import time
main(sys.argv)
|
mholtrop/Phys605
|
Python/DevLib/MCP320x.py
|
Python
|
gpl-3.0
| 11,971
| 0.002423
|
# This file is only necessary for the tests to work
|
nathangeffen/tbonline-old
|
tbonlineproject/external/filebrowser/models.py
|
Python
|
mit
| 51
| 0.019608
|
# -*- coding: utf-8 -*-
# Taboot - Client utility for performing deployments with Func.
# Copyright © 2009, Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from func.minion.modules import func_module
class Test(func_module.FuncModule):
pass
|
abutcher/Taboot
|
taboot-func/__init__.py
|
Python
|
gpl-3.0
| 851
| 0
|
# -*- coding: utf-8 -*-
from pytdx.hq import TdxHq_API
from fooltrader.api import technical
from fooltrader.contract.data_contract import KDATA_COLUMN_SINA
from fooltrader.utils.utils import get_exchange
def get_tdx_kdata(security_item, start, end):
api = TdxHq_API()
with api.connect():
# open close high low vol amount date code
# KDATA_COLUMN = ['timestamp', 'code', 'low', 'open', 'close', 'high', 'volume', 'turnover', 'securityId']
df = api.get_k_data(security_item['code'], start, end)
df = df[['date', 'code', 'low', 'open', 'close', 'high', 'vol', 'amount']]
df['securityId'] = df['code'].apply(lambda x: 'stock_{}_{}'.format(get_exchange(x), x))
df['vol'] = df['vol'].apply(lambda x: x * 100)
df.columns = KDATA_COLUMN_SINA
return df
if __name__ == '__main__':
pass
|
foolcage/fooltrader
|
fooltrader/datasource/tdx.py
|
Python
|
mit
| 854
| 0.004684
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-24 20:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='LineItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=255)),
('amount', models.FloatField(default=0)),
],
),
migrations.CreateModel(
name='Quote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
],
),
migrations.AddField(
model_name='lineitem',
name='quote',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='quotes.Quote'),
),
]
|
moniquehw/quoterizer
|
quotes/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 1,134
| 0.002646
|
def extractMkkbunkotoikemenWordpressCom(item):
'''
Parser for 'mkkbunkotoikemen.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return _buildReleaseMessage(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractMkkbunkotoikemenWordpressCom.py
|
Python
|
bsd-3-clause
| 565
| 0.033628
|
import os
from concourse_common import jsonutil
def post_successful_tests(filepath, payload, sc, total_string):
sc.api_call("chat.postMessage", as_user=True,
channel=jsonutil.get_params_value(payload, "channel"),
attachments=[{"fallback": "Test Results",
"pretext": "Test results of " + os.environ["BUILD_JOB_NAME"] + " in version " + open(
os.path.join(filepath, jsonutil.get_params_value(payload, "version"))).read(),
"color": "good",
"title": "Test Results: ",
"fields": [{"value": total_string,
"short": False}]}])
def post_failed_tests(failed_string, filepath, payload, sc, total_string):
sc.api_call("chat.postMessage", as_user=True,
channel=jsonutil.get_params_value(payload, "channel"),
attachments=[{"fallback": "Test Results",
"pretext": "Test results of " + os.environ["BUILD_JOB_NAME"] + " in version " + open(
os.path.join(filepath, jsonutil.get_params_value(payload, "version"))).read(),
"color": "danger",
"text": total_string,
"title": "Test Results",
"fields": [{"title": "Failures: ",
"value": failed_string,
"short": False}]}])
def post_success_message(filepath, payload, sc):
sc.api_call("chat.postMessage", as_user=True,
channel=jsonutil.get_params_value(payload, "channel"),
attachments=[{"fallback": "Pipeline Success of version " + open(
os.path.join(filepath, jsonutil.get_params_value(payload, "version"))).read(),
"pretext": "Pipeline Success",
"color": "good",
"title": "Success:",
"fields": [
{"value": "Version " + open(os.path.join(filepath,
jsonutil.get_params_value(payload, "version"))).read() +
" successfully finished the Pipeline with Job: " +
os.environ["BUILD_JOB_NAME"],
"short": False}]}])
def post_failure_message(filepath, payload, sc):
sc.api_call("chat.postMessage", as_user=True,
channel=jsonutil.get_params_value(payload, "channel"),
attachments=[{"fallback": "Pipeline Failure in " + jsonutil.get_params_value(payload, "pipeline_step"),
"pretext": "Pipeline Failure",
"color": "danger",
"title": "Failure:",
"fields": [{"value": os.environ["BUILD_JOB_NAME"] + " in version " + open(
os.path.join(filepath, jsonutil.get_params_value(payload, "version"))).read() + "failed",
"short": False}]}])
|
cosee-concourse/slack-upload-resource
|
opt/resource/slack_post.py
|
Python
|
mit
| 3,302
| 0.004543
|
#
# This file is part of HEPData.
# Copyright (C) 2016 CERN.
#
# HEPData is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# HEPData is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HEPData; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
from flask_webpackext import WebpackBundle
search_js = WebpackBundle(
__name__,
'assets',
entry={
'hepdata-search-js': './js/hepdata_search.js',
'hepdata-search-facets-js': './js/hepdata_search_facets.js',
},
dependencies={
"d3": "~3.5.12",
"d3-tip": "~0.6.7",
"typeahead.js": "0.11.1"
}
)
|
HEPData/hepdata
|
hepdata/modules/search/webpack.py
|
Python
|
gpl-2.0
| 1,105
| 0
|
# The following parse_* methods are from bitcoin-abe
import base58
def parse_TxIn(vds):
d = {}
d['prevout_hash'] = vds.read_bytes(32)
d['prevout_n'] = vds.read_uint32()
d['scriptSig'] = vds.read_bytes(vds.read_compact_size())
d['sequence'] = vds.read_uint32()
return d
def parse_TxOut(vds):
d = {}
d['value'] = vds.read_int64()
raw = vds.read_bytes(vds.read_compact_size())
d['scriptPubKey'] = raw
if len(raw) == 25 and raw[0] == '\x76' and raw[1] == '\xa9' and raw[2] == '\x14':
d['address'] = base58.hash_160_to_bc_address(raw[3:-2])
return d
def parse_Transaction(vds):
d = {}
start = vds.read_cursor
d['version'] = vds.read_int32()
n_vin = vds.read_compact_size()
d['txIn'] = []
for i in xrange(n_vin):
d['txIn'].append(parse_TxIn(vds))
n_vout = vds.read_compact_size()
d['txOut'] = []
for i in xrange(n_vout):
d['txOut'].append(parse_TxOut(vds))
d['lockTime'] = vds.read_uint32()
d['tx'] = vds.input[start:vds.read_cursor]
return d
def parse_BlockHeader(vds):
d = {}
blk_magic = vds.read_bytes(4)
#if blk_magic != '\xf9\xbe\xb4\xd9':
# if blk_magic != '\xbf\xfa\xda\xb5':
# raise Exception('Bad magic' + str(blk_magic))
# return d
blk_length = vds.read_int32()
header_start = vds.read_cursor
d['version'] = vds.read_int32()
d['hashPrev'] = vds.read_bytes(32)
d['hashMerkleRoot'] = vds.read_bytes(32)
d['nTime'] = vds.read_uint32()
d['nBits'] = vds.read_uint32()
d['nNonce'] = vds.read_uint32()
header_end = vds.read_cursor
d['__header__'] = vds.input[header_start:header_end]
return d
def parse_Block(vds):
d = parse_BlockHeader(vds)
d['transactions'] = []
nTransactions = vds.read_compact_size()
for i in xrange(nTransactions):
d['transactions'].append(parse_Transaction(vds))
return d
|
thandal/passe-partout
|
pp/pp_parse.py
|
Python
|
mit
| 1,809
| 0.028192
|
import pytest
import unittest.mock as mock
import open_cp.network as network
import open_cp.data
import numpy as np
import datetime
def test_PlanarGraphBuilder():
b = network.PlanarGraphBuilder()
assert b.add_vertex(0.2, 0.5) == 0
b.set_vertex(5, 1, 2)
b.add_edge(0, 5)
g = b.build()
assert g.vertices == {0:(0.2,0.5), 5:(1,2)}
assert g.edges == [(0,5)]
b1 = network.PlanarGraphBuilder(g)
assert b1.add_vertex(5,6) == 6
b1.add_edge(5,6)
g1 = b1.build()
assert g1.vertices == {0:(0.2,0.5), 5:(1,2), 6:(5,6)}
assert g1.edges == [(0,5), (5,6)]
# Check haven't mutated g
assert g.vertices == {0:(0.2,0.5), 5:(1,2)}
assert g.edges == [(0,5)]
def test_PlanarGraphBuilder_remove_unused_vertices():
b = network.PlanarGraphBuilder()
b.add_vertex(0.2, 0.5)
b.add_vertex(0.6, 0.4)
b.set_vertex(5, 1, 2)
b.add_edge(0, 5)
assert len(b.vertices) == 3
b.remove_unused_vertices()
assert len(b.vertices) == 2
@pytest.fixture
def planar_graph_geo_builder():
b = network.PlanarGraphGeoBuilder()
b.add_path([(0,0),(1,1),(5.1,1.2)])
b.add_path([(2,0),(1,1),(0,5),(5.1,1.2)])
b.add_path([(0,0),(0,5)])
return b
def test_PlanarGraphGeoBuilder(planar_graph_geo_builder):
b = planar_graph_geo_builder
assert b.coord_nodes == {(0,0):[0], (1,1):[1,4], (5.1,1.2):[2],
(2,0):[3], (0,5):[5]}
assert b.edges == [(0,1), (1,2), (3,4), (4,5), (5,2), (0,5)]
def test_PlanarGraphGeoBuilder_builds(planar_graph_geo_builder):
g = planar_graph_geo_builder.build()
assert g.vertices == {0:(0,0), 1:(1,1), 2:(5.1,1.2), 3:(2,0), 4:(1,1), 5:(0,5)}
assert g.edges == [(0,1), (1,2), (3,4), (4,5), (5,2), (0,5)]
assert g.number_edges == 6
assert g.bounds == (0, 0, 5.1, 5)
@pytest.fixture
def planar_graph_node_builder():
b = network.PlanarGraphNodeBuilder()
b.add_path([(0,0),(1,1),(5.1,1.2)])
b.add_edge(0,0,2,2)
b.add_path([(1,0),(1,1),(2,2)])
return b
def test_PlanarGraphNodeBuilder(planar_graph_node_builder):
b = planar_graph_node_builder
assert b.coord_nodes == [(0,0), (1,1), (5.1,1.2), (2,2), (1,0)]
assert b.edges == [(0,1), (1,2), (0,3), (4,1), (1,3)]
def test_PlanarGraphNodeBuilder_builds(planar_graph_node_builder):
g = planar_graph_node_builder.build()
assert g.vertices == {0:(0,0), 1:(1,1), 2:(5.1,1.2), 3:(2,2), 4:(1,0)}
assert g.edges == [(0,1), (1,2), (0,3), (4,1), (1,3)]
def test_PlanarGraphNodeBuilder_tolerance():
b = network.PlanarGraphNodeBuilder()
b.tolerance = 0.2
assert b.tolerance == pytest.approx(0.2)
b.add_path([(0,0),(1,1),(5.1,1.2)])
b.add_edge(0.1,0.01,2,2)
assert b.coord_nodes == [(0,0), (1,1), (5.1,1.2), (2,2)]
assert b.edges == [(0,1), (1,2), (0,3)]
def test_PlanarGraphNodeOneShot():
nodes = [(0,0), (1,1), (5.1,1.2), (0.1,0.01), (2,2)]
b = network.PlanarGraphNodeOneShot(nodes, 0.2)
r = b.add_path([(0,0),(1,1),(5.1,1.2)])
assert r == [(0,1), (1,2)]
r = b.add_edge(0.1,0.01,2,2)
assert r == (0,3)
g = b.build()
assert set(g.vertices.values()) == {(0,0), (1,1), (5.1,1.2), (2,2)}
assert len(g.edges) == 3
assert [g.vertices[x] for x in g.edges[0]] == [(0,0), (1,1)]
assert [g.vertices[x] for x in g.edges[1]] == [(1,1), (5.1,1.2)]
assert [g.vertices[x] for x in g.edges[2]] == [(0,0), (2,2)]
def test_PlanarGraphNodeOneShot_remove_duplicates():
nodes = [(0,0), (1,1), (5.1,1.2), (0.1,0.01), (2,2)]
b = network.PlanarGraphNodeOneShot(nodes, 0.2)
b.add_path([(0,0),(1,1),(5.1,1.2)])
b.add_edge(0.1,0.01,2,2)
b.add_edge(1,1,1,1)
b.add_edge(0.1,0.01,2,2)
with pytest.raises(ValueError):
b.build()
b.remove_duplicate_edges()
g = b.build()
assert set(g.vertices.values()) == {(0,0), (1,1), (5.1,1.2), (2,2)}
assert len(g.edges) == 3
assert [g.vertices[x] for x in g.edges[0]] == [(0,0), (1,1)]
assert [g.vertices[x] for x in g.edges[1]] == [(1,1), (5.1,1.2)]
assert [g.vertices[x] for x in g.edges[2]] == [(0,0), (2,2)]
def test_PlanarGraph_constructs():
with pytest.raises(ValueError):
network.PlanarGraph([(0,1,2), (0,2,3)], [])
with pytest.raises(ValueError):
network.PlanarGraph([(0,1,2), (1,2,3)], [(0,0)])
g = network.PlanarGraph([(0,1,2), (1,2,3)], [(0,1)])
assert g.vertices == {0:(1,2), 1:(2,3)}
assert g.edges == [(0,1)]
def test_PlanarGraph_projects():
g = network.PlanarGraph([(0,1,2), (1,2,3)], [(0,1)])
def proj(x, y):
return x-1, y-2
gg = g.project(proj)
assert gg.vertices[0] == pytest.approx((0,0))
assert gg.vertices[1] == pytest.approx((1,1))
assert set(gg.vertices.keys()) == {0,1}
assert gg.edges == [(0,1)]
@pytest.fixture
def graph1():
b = network.PlanarGraphGeoBuilder()
b.add_path([(0,0), (10,0)])
b.add_path([(0,1), (5,5), (9,1)])
return b.build()
def test_derived_graph1(graph1):
g = network.to_derived_graph(graph1)
assert g.vertices == { (0,1), (2,3), (3,4) }
assert g.edges == [((2,3), (3,4))]
assert g.lengths == [pytest.approx((np.sqrt(25+16)+np.sqrt(32))/2)]
def test_shortest_edge_paths(graph1):
dists, prevs = network.shortest_edge_paths(graph1, 0)
assert dists == {0:5, 1:5}
assert prevs == {0:0, 1:1}
dists, prevs = network.shortest_edge_paths(graph1, 0, 0.1)
assert dists == {0:1, 1:9}
assert prevs == {0:0, 1:1}
def test_shortest_paths(graph1):
dists, prevs = network.shortest_paths(graph1, 0)
assert dists == {0:0, 1:10, 2:-1, 3:-1, 4:-1}
assert prevs == {0:0, 1:0}
dists, prevs = network.shortest_paths(graph1, 1)
assert prevs == {1:1, 0:1}
assert dists == {0:10, 1:0, 2:-1, 3:-1, 4:-1}
dists, prevs = network.shortest_paths(graph1, 2)
assert dists == {0:-1, 1:-1, 2:0,
3:pytest.approx(np.sqrt(25+16)),
4:pytest.approx(np.sqrt(25+16)+np.sqrt(32))}
assert prevs == {2:2, 3:2, 4:3}
def test_PlanarGraph_lengths(graph1):
assert graph1.length(0) == pytest.approx(10)
assert graph1.length(1) == pytest.approx(np.sqrt(25+16))
assert graph1.length(2) == pytest.approx(np.sqrt(32))
def test_PlanarGraph_as_quads(graph1):
exp = [ (0,0,10,0), (0,1,5,5), (5,5,9,1) ]
x = graph1.as_quads()
np.testing.assert_allclose(x, exp)
def test_PlanarGraph_as_lines(graph1):
exp = [ ((0,0),(10,0)), ((0,1),(5,5)), ((5,5),(9,1)) ]
x = graph1.as_lines()
np.testing.assert_allclose(x, exp)
def test_PlanarGraph_project(graph1):
edge, t = graph1.project_point_to_graph(5,1)
assert edge == (0, 1)
assert t == pytest.approx(0.5)
edge, t = graph1.project_point_to_graph(-0.5, -0.5)
assert edge == (0, 1)
assert t == 0
edge, t = graph1.project_point_to_graph(-0.1, 1)
assert edge == (2, 3)
assert t == 0
edge, t = graph1.project_point_to_graph(5, 5.2)
assert (edge, t) == ((2,3), 1) or (edge, t) == ((3,4), 0)
edge, t = graph1.project_point_to_graph(9, .4)
assert edge == (0, 1)
assert t == pytest.approx(0.9)
edge, t = graph1.project_point_to_graph(9, .6)
assert edge == (3, 4)
assert t == 1
edge, t = graph1.project_point_to_graph(2.5, 2)
assert edge == (2, 3)
assert t == pytest.approx(0.402439024)
def test_io(graph1):
js = graph1.dump_json()
import json
out = json.loads(js)
assert set(out.keys()) == {"keys", "xcoords", "ycoords", "edges"}
g = network.PlanarGraph.from_json(js)
assert network.approximately_equal(graph1, g)
b = graph1.dump_bytes()
g = network.PlanarGraph.from_bytes(b)
assert network.approximately_equal(graph1, g)
@pytest.fixture
def graph2():
b = network.PlanarGraphGeoBuilder()
b.add_path([(0,10), (1,10)])
b.add_path([(1,10), (2,11), (3, 11), (4,10)])
b.add_path([(1,10), (2,9), (3, 9), (4,10)])
b.add_path([(2,9), (2,11)])
b.add_path([(4,10), (5,10)])
return b.build()
def test_graph2(graph2):
assert graph2.vertices == {0:(0,10), 1:(1,10), 2:(2,11), 3:(3,11), 4:(4,10),
5:(2,9), 6:(3,9), 7:(5,10)}
assert graph2.edges == [(0,1), (1,2), (2,3), (3,4), (1,5), (5,6), (6,4), (5,2), (4,7)]
def test_shortest_paths2(graph2):
dists, prevs = network.shortest_paths(graph2, 0)
assert dists == {0:0, 1:1, 2:pytest.approx(1+np.sqrt(2)),
3:pytest.approx(2+np.sqrt(2)), 4:pytest.approx(2+2*np.sqrt(2)),
5:pytest.approx(1+np.sqrt(2)), 6:pytest.approx(2+np.sqrt(2)),
7:pytest.approx(3+2*np.sqrt(2))}
assert prevs == {0:0, 1:0, 2:1, 5:1, 3:2, 6:5, 4:3, 7:4}
dists, prevs = network.shortest_paths(graph2, 2)
assert dists == {0:pytest.approx(1+np.sqrt(2)),
1:pytest.approx(np.sqrt(2)), 2:0, 3:1, 5:2,
6:3, 4:pytest.approx(1+np.sqrt(2)), 7:pytest.approx(2+np.sqrt(2))}
assert prevs == {2:2, 3:2, 1:2, 5:2, 0:1, 4:3, 6:5, 7:4}
def test_shortest_edge_paths2(graph2):
dists, prevs = network.shortest_edge_paths(graph2, 0)
assert dists == {0:0.5, 1:0.5, 2:pytest.approx(0.5+np.sqrt(2)),
3:pytest.approx(1.5+np.sqrt(2)), 4:pytest.approx(1.5+2*np.sqrt(2)),
5:pytest.approx(0.5+np.sqrt(2)), 6:pytest.approx(1.5+np.sqrt(2)),
7:pytest.approx(2.5+2*np.sqrt(2))}
assert prevs == {0:0, 1:1, 2:1, 5:1, 3:2, 6:5, 4:3, 7:4}
dists, prevs = network.shortest_edge_paths(graph2, 2)
assert dists == {2:0.5, 3:0.5, 5:2.5, 1:pytest.approx(np.sqrt(2)+0.5),
4:pytest.approx(np.sqrt(2)+0.5), 0:pytest.approx(np.sqrt(2)+1.5),
6:pytest.approx(np.sqrt(2)*2+0.5),
7:pytest.approx(np.sqrt(2)+1.5)}
assert prevs == {2:2,3:3,1:2,4:3,6:4,7:4,5:2,0:1}
def test_PlanarGraph_find_edge(graph2):
assert graph2.find_edge(0,1) == (0, 1)
assert graph2.find_edge(1,0) == (0, -1)
assert graph2.find_edge(3,4) == (3, 1)
assert graph2.find_edge(4,3) == (3, -1)
with pytest.raises(KeyError):
graph2.find_edge(1,3)
def test_PlanarGraph_neighbours(graph2):
assert graph2.neighbours(0) == [1]
assert graph2.neighbours(1) == [0,2,5]
assert graph2.neighbours(2) == [1,3,5]
assert graph2.neighbours(3) == [2,4]
assert graph2.neighbours(4) == [3,6,7]
assert graph2.neighbours(5) == [1,2,6]
assert graph2.neighbours(6) == [4,5]
assert graph2.neighbours(7) == [4]
def test_PlanarGraph_degree(graph2):
assert graph2.degree(0) == 1
assert graph2.degree(1) == 3
assert graph2.degree(3) == 2
def test_PlanarGraph_neighbourhood_edges(graph2):
assert graph2.neighbourhood_edges(0) == [0]
assert graph2.neighbourhood_edges(1) == [0,1,4]
assert graph2.neighbourhood_edges(2) == [1,2,7]
assert graph2.neighbourhood_edges(3) == [2,3]
assert graph2.neighbourhood_edges(4) == [3,6,8]
assert graph2.neighbourhood_edges(5) == [4,5,7]
assert graph2.neighbourhood_edges(6) == [5,6]
assert graph2.neighbourhood_edges(7) == [8]
def test_PlanarGraph_neighbourhood_paths_between(graph2):
assert list(graph2.paths_between(0,1,10000)) == [[0,1]]
out = [ tuple(x) for x in graph2.paths_between(0,2,10000) ]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,2), (0,1,5,2), (0,1,5,6,4,3,2)}
out = [ tuple(x) for x in graph2.paths_between(0,3,10000) ]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,2,3), (0,1,5,2,3), (0,1,5,6,4,3), (0,1,2,5,6,4,3)}
out = [ tuple(x) for x in graph2.paths_between(0,7,10000) ]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,2,3,4,7), (0,1,2,5,6,4,7), (0,1,5,6,4,7), (0,1,5,2,3,4,7)}
def test_PlanarGraph_neighbourhood_paths_between_length_bound(graph2):
assert list(graph2.paths_between(0,1,1)) == [[0,1]]
assert list(graph2.paths_between(0,1,0.9)) == []
assert list(graph2.paths_between(0,2,2)) == []
assert list(graph2.paths_between(0,2,2.5)) == [[0,1,2]]
assert list(graph2.paths_between(0,6,3)) == []
assert list(graph2.paths_between(0,6,3.5)) == [[0,1,5,6]]
out = [ tuple(x) for x in graph2.paths_between(0,6,5.5) ]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,5,6), (0,1,2,5,6)}
def test_PlanarGraph_paths_between_avoiding(graph2):
assert list(graph2.paths_between_avoiding(0, 2, [(0,1)], 100)) == []
assert list(graph2.paths_between_avoiding(0, 2, [(1,0)], 100)) == []
out = [tuple(x) for x in graph2.paths_between_avoiding(0, 3, [(1,2), (5,2)], 100)]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,5,6,4,3)}
out = [tuple(x) for x in graph2.paths_between_avoiding(0, 7, [(1,2), (4,3)], 100)]
assert len(set(out)) == len(out)
assert set(out) == {(0,1,5,6,4,7)}
def test_PlanarGraph_edge_paths_between(graph2):
out = [ tuple(x) for x in graph2.edge_paths_between((0,1), (1,2), 1000)]
assert len(set(out)) == len(out)
assert set(out) == {(1,), (1,5,2), (1,5,6,4,3,2)}
out = [ tuple(x) for x in graph2.edge_paths_between((0,1), (4,7), 1000)]
assert len(set(out)) == len(out)
assert set(out) == {(1,2,3,4), (1,2,5,6,4), (1,5,6,4), (1,5,2,3,4)}
out = [ tuple(x) for x in graph2.edge_paths_between((3,4), (2,5), 1000)]
assert len(set(out)) == len(out)
assert set(out) == {(3,2), (4,6,5), (3,2,1,5), (4,6,5,1,2)}
out = [ tuple(x) for x in graph2.edge_paths_between((3,4), (5,1), 1000)]
assert len(set(out)) == len(out)
assert set(out) == {(3,2,1), (3,2,5), (4,6,5), (4,6,5,2,1)}
out = [ tuple(x) for x in graph2.edge_paths_between((0,1), (1,2), 0)]
assert len(set(out)) == len(out)
assert set(out) == {(1,)}
out = [ tuple(x) for x in graph2.edge_paths_between((0,1), (1,2), 2)]
assert len(set(out)) == len(out)
assert set(out) == {(1,)}
out = [ tuple(x) for x in graph2.edge_paths_between((0,1), (1,2), 3.5)]
assert len(set(out)) == len(out)
assert set(out) == {(1,), (1,5,2)}
def test_PlanarGraph_walk_from(graph2):
search = graph2.walk_from(0, 1)
assert next(search) == ([0], 0.0)
with pytest.raises(StopIteration):
search.send(True)
search.close()
search = graph2.walk_from(1, 2)
assert next(search) == ([1], 0.0)
assert search.send(True) == ([1,5], pytest.approx(np.sqrt(2)))
assert search.send(True) == ([1,5,2], pytest.approx(np.sqrt(2)+2))
assert search.send(True) == ([1,5,2,3], pytest.approx(np.sqrt(2)+3))
assert search.send(True) == ([1,5,2,3,4], pytest.approx(np.sqrt(2)*2+3))
assert search.send(True) == ([1,5,2,3,4,7], pytest.approx(np.sqrt(2)*2+4))
assert search.send(True) == ([1,5,2,3,4,6], pytest.approx(np.sqrt(2)*3+3))
assert search.send(True) == ([1,5,6], pytest.approx(np.sqrt(2)+1))
assert search.send(True) == ([1,5,6,4], pytest.approx(np.sqrt(2)*2+1))
assert search.send(True) == ([1,5,6,4,7], pytest.approx(np.sqrt(2)*2+2))
assert search.send(True) == ([1,5,6,4,3], pytest.approx(np.sqrt(2)*3+1))
assert search.send(True) == ([1,5,6,4,3,2], pytest.approx(np.sqrt(2)*3+2))
assert search.send(True) == ([1,0], 1)
with pytest.raises(StopIteration):
search.send(True)
search = graph2.walk_from(1, 2)
assert next(search) == ([1], 0.0)
assert search.send(True) == ([1,5], pytest.approx(np.sqrt(2)))
assert search.send(False) == ([1,0], 1)
with pytest.raises(StopIteration):
search.send(True)
search = graph2.walk_from(2, 1)
assert next(search) == ([2], 0.0)
assert search.send(True) == ([2,5], 2)
assert search.send(True) == ([2,5,6], 3)
assert search.send(False) == ([2,5,1], pytest.approx(2+np.sqrt(2)))
assert search.send(True) == ([2,5,1,0], pytest.approx(3+np.sqrt(2)))
assert search.send(True) == ([2,3], 1)
with pytest.raises(StopIteration):
search.send(False)
def test_Graph_walk_with_degrees(graph2):
paths = list(graph2.walk_with_degrees(0, 1, 1000, 1000))
assert paths == [(None,0,0,1)]
paths = list(graph2.walk_with_degrees(0, None, 1000, 1000))
assert paths[0] == (None, 0, 0, 1)
assert paths[1] == (0, 0, pytest.approx(1), 1)
assert paths[2] == (4, 1, pytest.approx(1+np.sqrt(2)), 2)
assert paths[3] == (7, pytest.approx(1+np.sqrt(2)), pytest.approx(3+np.sqrt(2)), 4)
assert paths[4] == (2, pytest.approx(3+np.sqrt(2)), pytest.approx(4+np.sqrt(2)), 8)
assert paths[5] == (3, pytest.approx(4+np.sqrt(2)), pytest.approx(4+2*np.sqrt(2)), 8)
assert paths[6] == (8, pytest.approx(4+2*np.sqrt(2)), pytest.approx(5+2*np.sqrt(2)), 16)
assert paths[7] == (6, pytest.approx(4+2*np.sqrt(2)), pytest.approx(4+3*np.sqrt(2)), 16)
assert paths[8] == (5, pytest.approx(1+np.sqrt(2)), pytest.approx(2+np.sqrt(2)), 4)
# ...
assert len(paths) == 24
paths = list(graph2.walk_with_degrees(0, None, 1.1, 1000))
assert paths == [(None,0,0,1), (0, 0, pytest.approx(1), 1),
(4, 1, pytest.approx(1+np.sqrt(2)), 2),
(1, 1, pytest.approx(1+np.sqrt(2)), 2) ]
paths = list(graph2.walk_with_degrees(0, None, 1, 1000))
assert paths == [(None,0,0,1), (0, 0, pytest.approx(1), 1)]
paths = list(graph2.walk_with_degrees(0, None, 3, 1000))
assert len(paths) == 8
paths = list(graph2.walk_with_degrees(1, None, 1.1, 1000))
assert paths == [(None,0,0,1),
(4, 0, pytest.approx(np.sqrt(2)), 2),
(1, 0, pytest.approx(np.sqrt(2)), 2),
(0, 0, pytest.approx(1), 2)]
paths = list(graph2.walk_with_degrees(1, None, 1000, 2))
assert paths == [(None,0,0,1),
(4, 0, pytest.approx(np.sqrt(2)), 2),
(1, 0, pytest.approx(np.sqrt(2)), 2),
(0, 0, pytest.approx(1), 2)]
def test_TimedNetworkPoints():
times = [datetime.datetime(2017,8,7,12,30), datetime.datetime(2017,8,7,13,45)]
locations = [((1,2), 0.4), ((3,4), 0.1)]
tnp = network.TimedNetworkPoints(times, locations)
expected_times = [(datetime.datetime(2017,1,1) - x).total_seconds() for x in times]
np.testing.assert_allclose(expected_times,
(np.datetime64("2017-01-01") - tnp.timestamps) / np.timedelta64(1, "s"))
np.testing.assert_allclose(tnp.distances, [0.4, 0.1])
np.testing.assert_allclose(tnp.start_keys, [1, 3])
np.testing.assert_allclose(tnp.end_keys, [2, 4])
with pytest.raises(ValueError):
network.TimedNetworkPoints([datetime.datetime(2017,8,7,12,30)], locations)
assert tnp[0] == [np.datetime64("2017-08-07T12:30"), 1, 2, 0.4]
tnpp = tnp[1:]
assert np.all(tnpp.timestamps == [np.datetime64("2017-08-07T13:45")])
assert tnpp.start_keys == [3]
assert tnpp.end_keys == [4]
np.testing.assert_allclose(tnpp.distances, [0.1])
graph = mock.Mock()
graph.edge_to_coords.return_value = (1.3, 2.4)
tp = tnp.to_timed_points(graph)
np.testing.assert_allclose(expected_times,
(np.datetime64("2017-01-01") - tp.timestamps) / np.timedelta64(1, "s"))
np.testing.assert_allclose(tp.xcoords, [1.3, 1.3])
np.testing.assert_allclose(tp.ycoords, [2.4, 2.4])
assert graph.edge_to_coords.call_args_list == [mock.call(1, 2, 0.4), mock.call(3, 4, 0.1)]
def test_TimedNetworkPoints_from_projection():
times = [datetime.datetime(2017,8,7,12,30), datetime.datetime(2017,8,7,13,45)]
xcs = [1.2, 2.3]
ycs = [4.5, 6.7]
tp = open_cp.data.TimedPoints.from_coords(times, xcs, ycs)
graph = mock.Mock()
graph.project_point_to_graph.return_value = ((1,2), 0.3)
tnp = network.TimedNetworkPoints.project_timed_points(tp, graph)
expected_times = [(datetime.datetime(2017,1,1) - x).total_seconds() for x in times]
np.testing.assert_allclose(expected_times,
(np.datetime64("2017-01-01") - tnp.timestamps) / np.timedelta64(1, "s"))
np.testing.assert_allclose(tnp.start_keys, [1, 1])
np.testing.assert_allclose(tnp.end_keys, [2, 2])
np.testing.assert_allclose(tnp.distances, [0.3, 0.3])
graph.project_point_to_graph.call_args_list == [mock.call(1.2, 4.5), mock.call(2.3, 6.7)]
def test_GraphBuilder():
b = network.GraphBuilder()
b.add_edge(0,1)
b.add_edge(1,2)
b.add_edge(3,4)
g = b.build()
assert g.number_edges == 3
assert g.vertices == {0,1,2,3,4}
assert g.edges == [(0,1), (1,2), (3,4)]
b.lengths = [1,2,5]
g = b.build()
assert g.number_edges == 3
assert g.vertices == {0,1,2,3,4}
assert g.edges == [(0,1), (1,2), (3,4)]
assert g.length(0) == 1
assert g.length(1) == 2
assert g.length(2) == 5
b.lengths = [1,2]
with pytest.raises(ValueError):
b.build()
b.vertices.add(7)
b.remove_unused_vertices()
assert b.vertices == {4,3,2,1,0}
assert list(g.paths_between(0, 2)) == [[0,1,2]]
def test_GraphBuilder_duplicate_edges():
b = network.GraphBuilder()
b.add_edge(0,1)
b.add_edge(1,2)
b.add_edge(0,1)
b.add_edge(1,2)
b.add_edge(3,4)
with pytest.raises(ValueError):
g = b.build()
b.remove_duplicate_edges()
g = b.build()
assert g.number_edges == 3
assert g.vertices == {0,1,2,3,4}
@pytest.fixture
def graph3():
b = network.GraphBuilder()
b.add_edge(0,1).add_edge(1,2).add_edge(2,3).add_edge(3,4).add_edge(4,5)
b.add_edge(3,5).add_edge(5,6).add_edge(6,7).add_edge(7,1)
return b.build()
def test_Graph_partition_by_segments(graph3):
segs = set(graph3.partition_by_segments())
# Bad test: no reason (1,2,3) couldn't be (3,2,1)
assert segs == {(0,1), (1,2,3), (3,5), (3,4,5), (5,6,7,1)}
def test_simple_reduce_graph(graph3):
g = network.simple_reduce_graph(graph3)
assert g.vertices == {0,1,3,4,5}
assert g.number_edges == 6
f = frozenset
edges = set(f(e) for e in g.edges)
assert edges == {f((0,1)), f((1,3)), f((3,4)), f((3,5)), f((4,5)), f((5,1))}
@pytest.fixture
def graph4():
b = network.GraphBuilder()
b.add_edge(0,1).add_edge(1,2).add_edge(2,3).add_edge(3,4)
b.add_edge(4,5).add_edge(5,0).add_edge(0,6).add_edge(0,7)
return b.build()
def test_simple_reduce_graph2(graph4):
g = network.simple_reduce_graph(graph4)
assert g.number_edges == 5
f = frozenset
edges = set(f(e) for e in g.edges)
# Again, dodgy test...
assert edges == {f((0,6)), f((0,7)), f((0,4)), f((4,5)), f((5,0))}
def test_derived_graph2(graph2):
g = network.to_derived_graph(graph2)
assert g.vertices == {(0,1), (1,2), (2,3), (3,4), (4,7), (5,2), (1,5), (5,6), (6,4)}
assert g.edges[0] == ((0,1), (1,2))
assert g.lengths[0] == pytest.approx((1+np.sqrt(2))/2)
def test_derived_graph4(graph4):
g = network.to_derived_graph(graph4, use_edge_indicies=True)
assert g.vertices == {0,1,2,3,4,5,6,7}
assert g.edges == [(0,5), (0,6), (0,7), (0,1), (1,2), (2,3), (3,4), (4,5), (5,6), (5,7), (6,7)]
assert g.lengths is None
def test_shortest_edge_paths_with_degrees1(graph2):
dists, degrees = network.shortest_edge_paths_with_degrees(graph2, 0)
sq2 = np.sqrt(2)
np.testing.assert_allclose(dists, [0, (1+sq2)/2, 1+sq2, 1.5+sq2+sq2/2, (1+sq2)/2,
1+sq2, 1.5+sq2+sq2/2, 1.5 + sq2, sq2*2+2])
np.testing.assert_allclose(degrees, [1, 2, 4, 4, 2, 4, 4, 4, 8])
dists, degrees = network.shortest_edge_paths_with_degrees(graph2, 2)
np.testing.assert_allclose(dists, [1+sq2, (1+sq2)/2, 0, (1+sq2)/2,
0.5+sq2+sq2/2, 3, 0.5+sq2+sq2/2, 1.5, 1+sq2])
np.testing.assert_allclose(degrees, [4, 2, 1, 1, 4, 4, 2, 2, 2])
def test_segment_graph1(graph1):
got = set(frozenset(k) for k in network.segment_graph(graph1))
assert got == {frozenset({0}), frozenset({1,2})}
def test_segment_graph3(graph3):
got = set(frozenset(k) for k in network.segment_graph(graph3))
assert got == {frozenset(k) for k in [{0}, {1,2}, {3, 4}, {5}, {6,7,8}]}
def test_segment_graph4(graph4):
got = set(frozenset(k) for k in network.segment_graph(graph4))
assert got == {frozenset(k) for k in [{0,1,2,3,4,5}, {6}, {7}]}
def test_ordered_segment_graph1(graph1):
got = set(tuple(x) for x in network.ordered_segment_graph(graph1))
print("Dodgy test, as having (1,0) instead of (0,1) would be fine.")
assert got == {(0,1), (2,3,4)}
def test_ordered_segment_graph3(graph3):
got = set(tuple(x) for x in network.ordered_segment_graph(graph3))
print("Dodgy test...")
assert got == {(0,1), (1,2,3), (3,4,5), (3,5), (5,6,7,1)}
def test_ordered_segment_graph4(graph4):
got = set(tuple(x) for x in network.ordered_segment_graph(graph4))
print("Dodgy test, as having (6,0) instead of (0,6) would be fine.")
assert got == {(3,4,5,0,1,2,3), (0,6), (0,7)}
def test_ordered_segment_cycle():
graph = network.GraphBuilder().add_edge(0,1).add_edge(1,2).add_edge(0,2).build()
got = set(tuple(x) for x in network.ordered_segment_graph(graph))
print("Dodgy test...")
assert got == {(2,0,1,2)}
@pytest.fixture
def graph5():
b = network.GraphBuilder()
b.add_edge(0,1).add_edge(1,2).add_edge(3,4).add_edge(4,5).add_edge(5,3)
b.vertices.add(6)
return b.build()
def test_connected_components(graph5):
out = list(network.connected_components(graph5))
assert len(out) == 3
out = {frozenset(x) for x in out}
expected = {frozenset({0,1,2}), frozenset({4,5,3}), frozenset({6})}
assert out == expected
|
QuantCrimAtLeeds/PredictCode
|
tests/network_test.py
|
Python
|
artistic-2.0
| 25,390
| 0.037968
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
version = (0, "5d")
__title__ = "Elevator"
__author__ = "Oleiade"
__license__ = "MIT"
__version__ = '.'.join(map(str, version))
|
oleiade/Elevator
|
elevator/__init__.py
|
Python
|
mit
| 177
| 0
|
import inspect
import itertools
import types
import unittest
from tempfile import NamedTemporaryFile
from tests.test_bears.AllKindsOfSettingsDependentBear import (
AllKindsOfSettingsDependentBear)
from coala_quickstart.generation.Utilities import (
contained_in,
get_hashbang,
get_default_args, get_all_args,
search_for_orig, concatenate, peek,
split_by_language,
get_language_from_hashbang)
from coalib.results.SourcePosition import SourcePosition
from coalib.results.SourceRange import SourceRange
def foo():
pass
def foo_bar(n):
def bar():
return n+1
return bar
class TestAdditionalFunctions(unittest.TestCase):
def second(func):
def wrapper():
return func()
return wrapper
def first():
pass
third = second(first)
fourth = second(second(first))
def test_search_for_orig(self):
self.assertEqual(types.MethodType(search_for_orig(self.third, 'first'),
self), self.first)
self.assertEqual(types.MethodType(search_for_orig(self.fourth,
'first'),
self), self.first)
self.assertEqual(search_for_orig(self.first, 'first'), None)
self.assertEqual(search_for_orig(self.first, "bar"), None)
self.assertEqual(search_for_orig(self.first, "first"), None)
# function without closure
self.assertEqual(search_for_orig(foo, "bar"), None)
self.assertEqual(search_for_orig(foo, "foo"), None)
# function with closure
func = foo_bar(3)
self.assertEqual(search_for_orig(func, "bar"), None)
def test_get_default_args(self):
self.assertEqual(get_default_args(AllKindsOfSettingsDependentBear.run),
{'chars': False,
'dependency_results': {},
'max_line_lengths': 1000,
'no_chars': 79,
'use_spaces': None,
'use_tabs': False})
def test_get_all_args(self):
empty = inspect._empty
self.assertEqual(get_all_args(AllKindsOfSettingsDependentBear.run),
{'self': empty, 'file': empty, 'filename': empty,
'configs': empty,
'use_bears': empty, 'no_lines': empty,
'use_spaces': None,
'use_tabs': False, 'max_line_lengths': 1000,
'no_chars': 79,
'chars': False, 'dependency_results': {}})
class TestHashBang(unittest.TestCase):
def test_missing_file(self):
self.assertIsNone(get_hashbang('does_not_exist'))
def test_with_bash(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#!bin/bash\n')
temp_file.close()
self.assertEqual(get_hashbang(temp_file.name), '#!bin/bash')
def test_no_eol(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#!bin/bash')
temp_file.close()
self.assertEqual(get_hashbang(temp_file.name), '#!bin/bash')
def test_with_slash(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#!/bin/bash\n')
temp_file.close()
self.assertEqual(get_hashbang(temp_file.name), '#!/bin/bash')
def test_with_space(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#!/bin/bash \n')
temp_file.close()
self.assertEqual(get_hashbang(temp_file.name), '#!/bin/bash')
def test_env(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#!/bin/env bash\n')
temp_file.close()
self.assertEqual(get_hashbang(temp_file.name), '#!/bin/env bash')
def test_non_unicode_file(self):
with NamedTemporaryFile(mode='w+b', delete=False) as temp_file:
temp_file.write(b'\2000x80')
temp_file.close()
self.assertIsNone(get_hashbang(temp_file.name))
def test_empty_file(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('\n')
temp_file.close()
self.assertIsNone(get_hashbang(temp_file.name))
def test_no_bang(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('#bin/bash')
temp_file.close()
self.assertIsNone(get_hashbang(temp_file.name))
def test_no_hash(self):
with NamedTemporaryFile(mode='w+t', delete=False) as temp_file:
temp_file.write('!bin/bash')
temp_file.close()
self.assertIsNone(get_hashbang(temp_file.name))
def test_get_language_from_hashbang(self):
self.assertEqual(get_language_from_hashbang('#!/usr/bin/env python'),
'python')
self.assertEqual(get_language_from_hashbang('#!bin/bash'),
'bash')
self.assertEqual(get_language_from_hashbang('#!/bin/bash'),
'bash')
def test_split_by_language(self):
with NamedTemporaryFile(delete=False, suffix='.py') as temp_file1, \
NamedTemporaryFile(delete=False, suffix='.txt') as temp_file2, \
NamedTemporaryFile(delete=False, suffix='.txt') as temp_file3:
temp_file3.write(b'#!bin/python')
temp_file3.close()
langs = split_by_language(
[temp_file1.name, temp_file2.name, temp_file3.name])
self.assertCountEqual(
langs,
{
'all': [temp_file1.name, temp_file3.name],
'python': [temp_file1.name, temp_file3.name],
}
)
class TestDataStructuresOperationsFunctions(unittest.TestCase):
def test_concatenate(self):
dict1 = {'1': {'a', 'b', 'c'},
'2': {'d', 'e', 'f'},
'3': {'g', 'h', 'i'}}
dict2 = {'4': {'j', 'k', 'l'},
'2': {'m', 'n', 'o'},
'5': {'p', 'q', 'r'}}
result_dict = {'1': {'a', 'b', 'c'},
'2': {'d', 'e', 'f', 'm', 'n', 'o'},
'3': {'g', 'h', 'i'},
'4': {'j', 'k', 'l'},
'5': {'p', 'q', 'r'}}
ret_val = concatenate(dict1, dict2)
self.assertEqual(ret_val, result_dict)
def test_peek(self):
def give_gen():
for i in range(1, 5):
yield i
def give_empty_gen():
for i in range(1, 1):
yield i
obj = give_gen()
for i in range(1, 5):
num, new_obj = peek(obj)
obj, new_obj = itertools.tee(obj)
self.assertEqual(i, num)
ret_val = peek(obj)
obj = give_empty_gen()
ret_val_1 = peek(obj)
self.assertEqual(ret_val, None)
self.assertEqual(ret_val_1, None)
class TestContainedIn(unittest.TestCase):
def test_contained_in_1(self):
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=5, column=1)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=5, column=2)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=1, column=4)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
def test_contained_in_2(self):
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=5, column=1)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=1, column=9)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=1, column=6)
end = SourcePosition('a.py', line=4, column=2)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
start = SourcePosition('b.py', line=1, column=5)
end = SourcePosition('b.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
def test_contained_in_3(self):
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=5, column=1)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=2, column=5)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
def test_contained_in_4(self):
start = SourcePosition('a.py', line=3, column=5)
end = SourcePosition('a.py', line=5, column=1)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=1, column=5)
end = SourcePosition('a.py', line=6, column=1)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=3, column=5)
end = SourcePosition('a.py', line=6, column=1)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
def test_contained_in_5(self):
start = SourcePosition('a.py', line=3, column=5)
end = SourcePosition('a.py', line=5, column=1)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=2, column=5)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertTrue(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=3, column=8)
end = SourcePosition('a.py', line=7, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
def test_contained_in_6(self):
start = SourcePosition('a.py', line=3, column=5)
end = SourcePosition('a.py', line=5, column=7)
smaller = SourceRange(start, end)
start = SourcePosition('a.py', line=3, column=5)
end = SourcePosition('a.py', line=5, column=6)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=2, column=8)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
start = SourcePosition('a.py', line=2, column=None)
end = SourcePosition('a.py', line=5, column=1)
bigger = SourceRange(start, end)
self.assertFalse(contained_in(smaller, bigger))
|
coala-analyzer/coala-quickstart
|
tests/generation/UtilitiesTest.py
|
Python
|
agpl-3.0
| 11,382
| 0.000088
|
"""
timer.py: Request timer statistical tool
Code adapted from Bottle documentation
Copyright 2014-2015, Outernet Inc.
Some rights reserved.
This software is free software licensed under the terms of GPLv3. See COPYING
file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
"""
from __future__ import division
import time
from functools import wraps
try:
import resource
except ImportError:
# Platform does not support ``resources`` module.
resource = None
from bottle import response
def get_mem():
if not resource:
return 0
rss = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss # in KB
return round(rss / 1024, 3)
def request_timer(label):
t_header = str('X-%s-Time' % label)
m_header = str('X-%s-Mem' % label)
def _timer(callback):
@wraps(callback)
def wrapper(*args, **kwargs):
start = time.time()
res = callback(*args, **kwargs)
delta = time.time() - start
response.headers[t_header] = str(
round(delta * 1000, 4)) + 'ms'
response.headers[m_header] = str(get_mem())
return res
return wrapper
return _timer
def total_timer_plugin(app):
app.install(request_timer('Total'))
def handler_timer_plugin(app):
app.install(request_timer('Handler'))
|
karanisverma/feature_langpop
|
librarian/utils/timer.py
|
Python
|
gpl-3.0
| 1,356
| 0
|
from django.shortcuts import render, resolve_url
from django.contrib.auth.decorators import login_required
from gui.decorators import profile_required
from gui.utils import collect_view_data
from gui.signals import view_faq
from api.decorators import setting_required
@login_required
@profile_required
def api(request):
"""
API Documentation view (via iframe).
"""
context = collect_view_data(request, 'api_docs')
return render(request, 'gui/docs/api.html', context)
@login_required
@profile_required
def user_guide(request):
"""
User Guide view (via iframe).
"""
context = collect_view_data(request, 'user_guide')
return render(request, 'gui/docs/user_guide.html', context)
@login_required
@profile_required
@setting_required('FAQ_ENABLED', check_settings=False) # FAQ must be enabled only in DC
def faq(request):
"""
Frequently Asked Questions view.
"""
dc_settings = request.dc.settings
context = collect_view_data(request, 'faq')
context['support_email'] = dc_settings.SUPPORT_EMAIL
if dc_settings.SUPPORT_ENABLED:
context['support_section_url'] = resolve_url('add_ticket')
else:
context['support_section_url'] = '#'
view_faq.send(sender='faq', request=request, context=context)
return render(request, 'gui/docs/faq.html', context)
|
erigones/esdc-ce
|
gui/docs/views.py
|
Python
|
apache-2.0
| 1,344
| 0.000744
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-30 00:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0010_change_on_delete_behaviour'),
('recommendations', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='recommendation',
name='displayTitle',
field=wagtail.wagtailcore.fields.RichTextField(blank=True),
),
migrations.AddField(
model_name='recommendation',
name='image',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image'),
),
migrations.AddField(
model_name='recommendation',
name='main_content',
field=wagtail.wagtailcore.fields.RichTextField(blank=True, default=None, null=True),
),
migrations.AddField(
model_name='recommendation',
name='sub_title',
field=wagtail.wagtailcore.fields.RichTextField(blank=True, default=None, null=True),
),
]
|
Ecotrust/F2S-MOI
|
moi/recommendations/migrations/0002_auto_20151230_0007.py
|
Python
|
apache-2.0
| 1,297
| 0.002313
|
"""
To use this, create a settings.py file and make these variables:
TOKEN=<oath token for github>
ORG=<your org in github>
DEST=<Path to download to>
"""
from github import Github
from subprocess import call
import os
from settings import TOKEN, ORG, DEST
def download():
"""Quick and Dirty Download all repos function"""
os.chdir(DEST)
print "Downloading to destination: ", os.getcwd()
g = Github(TOKEN)
repos = []
for repo in g.get_organization(ORG).get_repos():
print "Fetching Repo Name: %s" % repo.name
repos.append("git@github.com:%s/%s.git" % (ORG, repo.name))
total = len(repos)
print "Found %s repos" % total
count = 0
for repo in repos:
count +=1
print "Cloning Repo [%s]/[%s]: %s" % (count, total, repo)
call([u'git', u'clone', repo])
download()
|
sqor/3rdeye
|
fetch_repos.py
|
Python
|
mit
| 789
| 0.032953
|
def count_factor(n, factor=0):
for i in range(1, int(n**0.5)+1):
if n % i == 0:
factor += 2
return factor
def nth_triangular_number(n):
return int(n+(n*(n-1))/2)
def find_triangular_number_over(k, n=0):
while count_factor(nth_triangular_number(n)) <= k:
n += 1
return nth_triangular_number(n)
def main():
print(find_triangular_number_over(500))
if __name__ == "__main__":
main()
|
higee/project_euler
|
11-20/12.py
|
Python
|
mit
| 439
| 0.009112
|
# Globals for the directions
# Change the values as you see fit
EAST = None
NORTH = None
WEST = None
SOUTH = None
class Robot:
def __init__(self, direction=NORTH, x_pos=0, y_pos=0):
pass
|
jmluy/xpython
|
exercises/practice/robot-simulator/robot_simulator.py
|
Python
|
mit
| 201
| 0
|
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="scatter.unselected.textfont", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scatter/unselected/textfont/_color.py
|
Python
|
mit
| 470
| 0.002128
|
"""
Document class definition
"""
class Document(object):
"""Represents a document"""
def __init__(self, id, name, type, path):
if '\n' in name:
raise ValueError('The document name cannot contain newline character!')
if '\n' in type:
raise ValueError('The document type cannot contain newline character!')
if '\n' in path:
raise ValueError('The document path cannot contain newline character!')
self._id = id
self._name = name
self._type = type
self._path = path
@property
def id(self):
return self._id
@property
def name(self):
return self._name
@property
def type(self):
return self._type
@property
def path(self):
return self._path
|
piller-imre/grimoire-tk
|
grimoire/document.py
|
Python
|
gpl-3.0
| 806
| 0.003722
|
#!/usr/bin/env python
"""
For a given vdi and import file this script will import a VDI on to a XS host.
This script needs to be run whenever you want to restore a VDI to a previous
version.
example: python cbt_import_whole_vdi.py -ip <host address> -u <host username>
-p <host password> -v <vdi uuid> -f <import VDI filename>
"""
import urllib3
import requests
import XenAPI
import argparse
def create_new_vdi(session, sr, size):
vdi_record = {
"SR": sr,
"virtual_size": size,
"type": "user",
"sharable": False,
"read_only": False,
"other_config": {},
"name_label": "CBT backup"
}
vdi_ref = session.xenapi.VDI.create(vdi_record)
vdi_uuid = session.xenapi.VDI.get_uuid(vdi_ref)
return vdi_uuid
def import_vdi(host, session_id, vdi_uuid, file_format, import_path):
url = ('https://%s/import_raw_vdi?session_id=%s&vdi=%s&format=%s'
% (host, session_id, vdi_uuid, file_format))
with open(import_path, 'r') as filehandle:
# ToDo: Security - We need to verify the SSL certificate here.
# Depends on CP-23051.
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
with requests.Session() as session:
request = session.put(url, filehandle, verify=False)
request.raise_for_status()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-ip', '--host-ip', dest='host')
parser.add_argument('-u', '--username', dest='username')
parser.add_argument('-p', '--password', dest='password')
parser.add_argument('-v', '--vdi-uuid', dest='vdi_uuid')
parser.add_argument('-f', '--filename', dest='path')
parser.add_argument('--as-new-vdi', dest='new_vdi', action='store_const',
const=True, default=False,
help='Create a new VDI for the import')
args = parser.parse_args()
session = XenAPI.Session("https://" + args.host, ignore_ssl=True)
session.login_with_password(args.username, args.password, "0.1",
"CBT example")
try:
vdi_uuid = args.vdi_uuid
if args.new_vdi:
vdi_ref = session.xenapi.VDI.get_by_uuid(args.vdi_uuid)
size = session.xenapi.VDI.get_virtual_size(vdi_ref)
sr_ref = session.xenapi.VDI.get_SR(vdi_ref)
vdi_uuid = create_new_vdi(session, sr_ref, size)
import_vdi(args.host, session._session, vdi_uuid, 'raw',
args.path)
print vdi_uuid
finally:
session.xenapi.session.logout(session)
if __name__ == "__main__":
main()
|
xenserver/xs-cbt-samples
|
cbt_import_whole_vdi.py
|
Python
|
bsd-3-clause
| 2,718
| 0.000368
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Mycroft Logging module.
This module provides the LOG pseudo function quickly creating a logger instance
for use.
The default log level of the logger created here can ONLY be set in
/etc/mycroft/mycroft.conf or ~/.config/mycroft/mycroft.conf
The default log level can also be programatically be changed by setting the
LOG.level parameter.
"""
import inspect
import logging
import sys
import mycroft
def getLogger(name="MYCROFT"):
"""Depreciated. Use LOG instead"""
return logging.getLogger(name)
def _make_log_method(fn):
@classmethod
def method(cls, *args, **kwargs):
cls._log(fn, *args, **kwargs)
method.__func__.__doc__ = fn.__doc__
return method
class LOG:
"""
Custom logger class that acts like logging.Logger
The logger name is automatically generated by the module of the caller
Usage:
>>> LOG.debug('My message: %s', debug_str)
13:12:43.673 - :<module>:1 - DEBUG - My message: hi
>>> LOG('custom_name').debug('Another message')
13:13:10.462 - custom_name - DEBUG - Another message
"""
_custom_name = None
handler = None
level = logging.getLevelName('INFO')
# Copy actual logging methods from logging.Logger
# Usage: LOG.debug(message)
debug = _make_log_method(logging.Logger.debug)
info = _make_log_method(logging.Logger.info)
warning = _make_log_method(logging.Logger.warning)
error = _make_log_method(logging.Logger.error)
exception = _make_log_method(logging.Logger.exception)
@classmethod
def init(cls):
""" Initializes the class, sets the default log level and creates
the required handlers.
"""
log_message_format = (
'{asctime} | {levelname:8} | {process:5} | {name} | {message}'
)
formatter = logging.Formatter(log_message_format, style='{')
formatter.default_msec_format = '%s.%03d'
cls.handler = logging.StreamHandler(sys.stdout)
cls.handler.setFormatter(formatter)
config = mycroft.configuration.Configuration.get(cache=False,
remote=False)
if config.get('log_format'):
formatter = logging.Formatter(config.get('log_format'), style='{')
cls.handler.setFormatter(formatter)
cls.level = logging.getLevelName(config.get('log_level', 'INFO'))
# Enable logging in external modules
cls.create_logger('').setLevel(cls.level)
@classmethod
def create_logger(cls, name):
logger = logging.getLogger(name)
logger.propagate = False
logger.addHandler(cls.handler)
return logger
def __init__(self, name):
LOG._custom_name = name
@classmethod
def _log(cls, func, *args, **kwargs):
if cls._custom_name is not None:
name = cls._custom_name
cls._custom_name = None
else:
# Stack:
# [0] - _log()
# [1] - debug(), info(), warning(), or error()
# [2] - caller
try:
stack = inspect.stack()
# Record:
# [0] - frame object
# [1] - filename
# [2] - line number
# [3] - function
# ...
record = stack[2]
mod = inspect.getmodule(record[0])
module_name = mod.__name__ if mod else ''
name = module_name + ':' + record[3] + ':' + str(record[2])
except Exception:
# The location couldn't be determined
name = 'Mycroft'
func(cls.create_logger(name), *args, **kwargs)
|
forslund/mycroft-core
|
mycroft/util/log.py
|
Python
|
apache-2.0
| 4,287
| 0
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class WebSiteManagementClientConfiguration(Configuration):
"""Configuration for WebSiteManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Your Azure subscription ID. This is a GUID-formatted string (e.g. 00000000-0000-0000-0000-000000000000).
:type subscription_id: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(WebSiteManagementClientConfiguration, self).__init__(**kwargs)
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2020-09-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-web/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
|
Azure/azure-sdk-for-python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_09_01/aio/_configuration.py
|
Python
|
mit
| 3,315
| 0.004223
|
from docar import Document, Collection
from docar import fields
from docar.backends.http import HttpBackendManager
from libthirty.state import uri, app_uri, service_uri, resource_collection_uri
from libthirty.validators import naming, max_25_chars, naming_with_dashes
import os
HttpBackendManager.SSL_CERT = os.path.join(
os.path.dirname(__file__), "ssl", "StartSSL_CA.pem")
class User(Document):
username = fields.StringField(validators=[naming, max_25_chars])
email = fields.StringField()
is_active = fields.BooleanField()
class Account(Document):
name = fields.StringField(validators=[naming, max_25_chars])
#users = fields.CollectionField(User)
class Meta:
backend_type = 'http'
identifier = 'name'
class CnameRecord(Document):
record = fields.StringField()
class Meta:
backend_type = 'http'
identifier = 'record'
class CnameRecords(Collection):
document = CnameRecord
class EnvironmentVariable(Document):
id = fields.NumberField(render=False, optional=True)
name = fields.StringField()
value = fields.StringField()
class Meta:
backend_type = 'http'
class EnvironmentVariables(Collection):
document = EnvironmentVariable
class Postgres(Document):
name = fields.StringField(validators=[naming_with_dashes, max_25_chars],
read_only=True, optional=True)
label = fields.StaticField(value="postgres")
variant = fields.ChoicesField(choices=['postgres_micro'],
default="postgres_micro")
username = fields.StringField(optional=True, read_only=True)
password = fields.StringField(optional=True, read_only=True)
host = fields.StringField(optional=True, read_only=True)
port = fields.NumberField(optional=True, read_only=True)
published = fields.BooleanField(default=False, read_only=True)
class Meta:
backend_type = 'http'
identifier = 'name'
context = ['account']
def post_uri(self):
return '%s/services' % app_uri()
def uri(self):
return service_uri(service='postgres')
class PostgresCollection(Collection):
document = Postgres
def uri(self):
return resource_collection_uri(label='postgres')
class Mongodb(Document):
name = fields.StringField(validators=[naming_with_dashes, max_25_chars],
read_only=True, optional=True)
label = fields.StaticField(value="mongodb")
variant = fields.ChoicesField(choices=['mongodb_micro'],
default='mongodb_micro')
username = fields.StringField(optional=True, read_only=True)
password = fields.StringField(optional=True, read_only=True)
host = fields.StringField(optional=True, read_only=True)
port = fields.NumberField(optional=True, read_only=True)
published = fields.BooleanField(default=False, read_only=True)
class Meta:
backend_type = 'http'
identifier = 'name'
context = ['account']
def post_uri(self):
return '%s/services' % app_uri()
def uri(self):
return service_uri(service='mongodb')
class MongodbCollection(Collection):
document = Mongodb
def uri(self):
return resource_collection_uri(label='mongodb')
class Repository(Document):
name = fields.StringField(validators=[naming_with_dashes, max_25_chars],
read_only=True, optional=True, render=False)
label = fields.StaticField(value="repository")
variant = fields.ChoicesField(choices=['git'], default='git')
location = fields.StringField()
ssh_key = fields.StringField(optional=True)
class Meta:
backend_type = 'http'
identifier = 'name'
context = ['account']
def post_uri(self):
return '%s/services' % app_uri()
def uri(self):
return service_uri(service='repository')
class RepositoryCollection(Collection):
document = Repository
def uri(self):
return resource_collection_uri(label='repository')
class Worker(Document):
name = fields.StringField(validators=[naming_with_dashes, max_25_chars],
read_only=True, render=False, optional=True)
label = fields.StaticField(value="worker")
variant = fields.ChoicesField(choices=['python'], default='python')
instances = fields.NumberField(default=1)
published = fields.BooleanField(default=False, read_only=True)
envvars = fields.CollectionField(EnvironmentVariables, inline=True)
class Meta:
backend_type = 'http'
identifier = 'name'
context = ['account']
def post_uri(self):
return '%s/services' % app_uri()
def uri(self):
return service_uri(service='worker')
class WorkerCollection(Collection):
document = Worker
def uri(self):
return resource_collection_uri(label='worker')
class App(Document):
name = fields.StringField(validators=[naming, max_25_chars])
label = fields.StaticField(value="app")
variant = fields.ChoicesField(default='python',
choices=['static', 'python'])
repository = fields.ForeignDocument(Repository)
postgres = fields.ForeignDocument(Postgres, optional=True)
mongodb = fields.ForeignDocument(Mongodb, optional=True)
worker = fields.ForeignDocument(Worker, optional=True)
repo_commit = fields.StringField(default='HEAD')
region = fields.ChoicesField(default="eu-nl", choices=['eu-nl', 'ams1'])
instances = fields.NumberField(default=1)
dns_record = fields.StringField(optional=True)
cnames = fields.CollectionField(CnameRecords, inline=True)
published = fields.BooleanField(default=False, read_only=True)
envvars = fields.CollectionField(EnvironmentVariables, inline=True)
class Meta:
backend_type = 'http'
identifier = 'name'
context = ['account']
def post_uri(self):
return '%s/apps' % uri()
def uri(self):
return app_uri(appname=self.name)
class AppCollection(Collection):
document = App
def uri(self):
return '%s/apps' % uri()
|
30loops/libthirty
|
libthirty/documents.py
|
Python
|
bsd-3-clause
| 6,039
| 0.001325
|
import ast
import label
import repository
import os
class IncludeDef:
"""
Represents build file include definition like
include_defs("//include/path").
"""
def __init__(self, ast_call: ast.Call) -> None:
self.ast_call = ast_call
def get_location(self) -> str:
"""
Returns an include definition location.
For include_defs("//include/path") it is "//include/path".
"""
return self.ast_call.args[0].s
def get_label(self) -> label.Label:
"""Returns a label identifying a build extension file."""
return label.from_string(self.get_location())
def get_include_path(self, repo: repository.Repository):
"""Returns a path to a file from which symbols should be imported."""
l = self.get_label()
return os.path.join(repo.get_cell_path(l.cell), l.package)
def from_ast_call(ast_call: ast.Call) -> IncludeDef:
"""
IncludeDef factory method that creates instances from ast Call description.
"""
return IncludeDef(ast_call)
|
LegNeato/buck
|
scripts/migrations/include_def.py
|
Python
|
apache-2.0
| 1,059
| 0.000944
|
import relayManager
import dronekit
class ShotManager():
def __init__(self):
# see the shotlist in app/shots/shots.p
print "init"
def Start(self, vehicle):
self.vehicle = vehicle
# Initialize relayManager
self.relayManager = relayManager.RelayManager(self)
target = 'udp:127.0.0.1:14551' #'tcp:127.0.0.1:5760'
print 'Connecting to ' + target + '...'
vehicle = dronekit.connect(target, wait_ready=True)
sm = ShotManager()
sm.Start(vehicle)
|
mapossum/SeymourSolo
|
tester.py
|
Python
|
gpl-3.0
| 495
| 0.006061
|
# coding=utf-8
import datetime
import logging
import time
import uuid
from dateutil.relativedelta import relativedelta
from redis.connection import Connection
import listenbrainz.db.user as db_user
from listenbrainz.db.testing import DatabaseTestCase
from listenbrainz import config
from listenbrainz.listen import Listen
from listenbrainz.webserver.redis_connection import init_redis_connection
from listenbrainz.listenstore.redis_listenstore import RedisListenStore
class RedisListenStoreTestCase(DatabaseTestCase):
def setUp(self):
super(RedisListenStoreTestCase, self).setUp()
self.log = logging.getLogger()
# TODO: Ideally this would use a config from a flask app, but this test case doesn't create an app
self._redis = init_redis_connection(self.log, config.REDIS_HOST, config.REDIS_PORT, config.REDIS_NAMESPACE)
self.testuser = db_user.get_or_create(1, "test")
def tearDown(self):
self._redis.redis.flushdb()
Connection(self._redis.redis).disconnect()
super(RedisListenStoreTestCase, self).tearDown()
def test_get_and_put_playing_now(self):
listen = {
'user_id': self.testuser['id'],
'user_name': self.testuser['musicbrainz_id'],
'listened_at': int(time.time()),
'track_metadata': {
'artist_name': 'The Strokes',
'track_name': 'Call It Fate, Call It Karma',
'additional_info': {},
},
}
self._redis.put_playing_now(listen['user_id'], listen, config.PLAYING_NOW_MAX_DURATION)
playing_now = self._redis.get_playing_now(listen['user_id'])
self.assertIsNotNone(playing_now)
self.assertIsInstance(playing_now, Listen)
self.assertEqual(playing_now.data['artist_name'], 'The Strokes')
self.assertEqual(playing_now.data['track_name'], 'Call It Fate, Call It Karma')
def test_update_and_get_recent_listens(self):
recent = self._redis.get_recent_listens()
self.assertEqual(recent, [])
listens = []
t = int(time.time())
for i in range(RedisListenStore.RECENT_LISTENS_MAX * 3):
listen = Listen(user_id=self.testuser['id'],
user_name = self.testuser['musicbrainz_id'],
timestamp = t - i,
data = {
'artist_name': str(uuid.uuid4()),
'track_name': str(uuid.uuid4()),
'additional_info': {},
}
)
listens.append(listen)
self._redis.update_recent_listens(listens)
recent = self._redis.get_recent_listens()
self.assertEqual(len(recent), RedisListenStore.RECENT_LISTENS_MAX)
self.assertIsInstance(recent[0], Listen)
for i, r in enumerate(recent):
self.assertEqual(r.timestamp, listens[i].timestamp)
recent = self._redis.get_recent_listens(5)
self.assertEqual(len(recent), 5)
for i, r in enumerate(recent):
self.assertEqual(r.timestamp, listens[i].timestamp)
def test_incr_listen_count_for_day(self):
today = datetime.datetime.utcnow()
# get without setting any value, should return None
self.assertIsNone(self._redis.get_listen_count_for_day(today))
# set a value to a key that doesn't exists
self._redis.increment_listen_count_for_day(today, 2)
self.assertEqual(2, self._redis.get_listen_count_for_day(today))
# increment again
self._redis.increment_listen_count_for_day(today, 3)
self.assertEqual(5, self._redis.get_listen_count_for_day(today))
# check for a different day
yesterday = today - relativedelta(days=1)
self.assertIsNone(self._redis.get_listen_count_for_day(yesterday))
self._redis.increment_listen_count_for_day(yesterday, 2)
self.assertEqual(2, self._redis.get_listen_count_for_day(yesterday))
|
Freso/listenbrainz-server
|
listenbrainz/listenstore/tests/test_redislistenstore.py
|
Python
|
gpl-2.0
| 4,058
| 0.002957
|
import subprocess
import tempfile
import random
import os
import shutil
import re
import chirc.replies as replies
from chirc.client import ChircClient
from chirc.types import ReplyTimeoutException
import pytest
import time
class IRCSession():
def __init__(self, chirc_exe = None, msg_timeout = 0.1, randomize_ports = False,
default_port = None, loglevel = -1, debug = False):
if chirc_exe is None:
self.chirc_exe = "../chirc"
else:
self.chirc_exe = chirc_exe
if not (os.path.exists(self.chirc_exe) and os.path.isfile(self.chirc_exe) and os.access(self.chirc_exe, os.X_OK)):
raise RuntimeError("{} does not exist or it is not executable".format(self.chirc_exe))
if default_port is None:
self.default_port = 7776
else:
self.default_port = default_port
self.msg_timeout = msg_timeout
self.randomize_ports = randomize_ports
self.loglevel = loglevel
self.debug = debug
self.oper_password = "foobar"
# Testing functions
def _assert_equals(self, a, b, explanation, irc_msg = None):
if irc_msg is not None:
explanation = explanation + "\n\nMESSAGE: {}".format(irc_msg.raw(bookends=True))
assert a == b, explanation
def _assert_is_none(self, a, explanation, irc_msg = None):
if irc_msg is not None:
explanation = explanation + "\n\nMESSAGE: {}".format(irc_msg.raw(bookends=True))
assert a is None, explanation
def _assert_is_not_none(self, a, explanation, irc_msg = None):
if irc_msg is not None:
explanation = explanation + "\n\nMESSAGE: {}".format(irc_msg.raw(bookends=True))
assert a is not None, explanation
def _assert_in(self, x, l, explanation, irc_msg = None):
if irc_msg is not None:
explanation = explanation + "\n\nMESSAGE: {}".format(irc_msg.raw(bookends=True))
assert x in l, explanation
# Start/end IRC session
def start_session(self):
self.tmpdir = tempfile.mkdtemp()
if self.randomize_ports:
self.port = random.randint(10000,60000)
else:
self.port = self.default_port
if self.randomize_ports:
tries = 10
else:
tries = 1
while tries > 0:
chirc_cmd = [os.path.abspath(self.chirc_exe), "-p", str(self.port), "-o", self.oper_password]
if self.loglevel == -1:
chirc_cmd.append("-q")
elif self.loglevel == 1:
chirc_cmd.append("-v")
elif self.loglevel == 2:
chirc_cmd.append("-vv")
self.chirc_proc = subprocess.Popen(chirc_cmd, cwd = self.tmpdir)
time.sleep(0.01)
rc = self.chirc_proc.poll()
if rc != None:
tries -=1
if tries == 0:
pytest.fail("chirc process failed to start. rc = %i" % rc)
else:
if self.randomize_ports:
self.port = random.randint(10000,60000)
else:
break
self.clients = []
def end_session(self):
for c in self.clients:
self.disconnect_client(c)
rc = self.chirc_proc.poll()
if rc is not None:
if rc != 0:
shutil.rmtree(self.tmpdir)
pytest.fail("chirc process failed during test. rc = %i" % rc)
else:
self.chirc_proc.kill()
self.chirc_proc.wait()
shutil.rmtree(self.tmpdir)
# Client connect/disconnect
def get_client(self, nodelay = False):
c = ChircClient(msg_timeout = self.msg_timeout, port=self.port, nodelay = nodelay)
self.clients.append(c)
return c
def disconnect_client(self, c):
c.disconnect()
self.clients.remove(c)
def connect_user(self, nick, username):
client = self.get_client()
client.send_cmd("NICK %s" % nick)
client.send_cmd("USER %s * * :%s" % (nick, username))
self.verify_welcome_messages(client, nick)
self.verify_lusers(client, nick)
self.verify_motd(client, nick)
return client
def connect_clients(self, numclients, join_channel = None):
clients = []
for i in range(numclients):
nick = "user%i" % (i+1)
username = "User %s" % nick
client = self.connect_user(nick, username)
clients.append( (nick, client) )
if join_channel != None:
self.join_channel(clients, join_channel)
return clients
def connect_and_join_channels(self, channels, aways = [], ircops = [], test_names = False):
users = {}
if None in channels:
for user in channels[None]:
if user not in users:
client = self.connect_user(user, user)
users[user] = client
channelsl = sorted([k for k in channels.keys() if k is not None])
for channel in channelsl:
channelusers = channels[channel]
joined = []
joinedp = []
op = channelusers[0][1:]
if op not in users:
client = self.connect_user(op, op)
users[op] = client
if test_names:
expect_names = [channelusers[0]]
else:
expect_names = None
users[op].send_cmd("JOIN %s" % channel)
self.verify_join(users[op], op, channel, expect_names = expect_names)
joined.append(op)
joinedp.append(channelusers[0])
for user in channelusers[1:]:
if user[0] in ("@", "+"):
nick = user[1:]
else:
nick = user
if nick not in users:
client = self.connect_user(nick, nick)
users[nick] = client
if test_names:
expect_names = joinedp + [nick]
else:
expect_names = None
users[nick].send_cmd("JOIN %s" % channel)
self.verify_join(users[nick], nick, channel, expect_names = expect_names)
for user2 in joined:
self.verify_relayed_join(users[user2], from_nick = None, channel=channel)
joined.append(nick)
joinedp.append(user)
if user[0] in ("@","+"):
if user[0] == "@":
mode = "+o"
elif user[0] == "+":
mode = "+v"
self.set_channel_mode(users[op], op, channel, mode, nick)
for user2 in joined:
self.verify_relayed_mode(users[user2], from_nick=op, channel=channel, mode=mode, mode_nick=nick)
for user in aways:
users[user].send_cmd("AWAY :I'm away")
self.get_reply(users[user], expect_code = replies.RPL_NOWAWAY, expect_nick = user,
expect_nparams = 1, long_param_re = "You have been marked as being away")
for user in ircops:
users[user].send_cmd("OPER %s %s" % (user, self.oper_password))
self.get_reply(users[user], expect_code = replies.RPL_YOUREOPER, expect_nick = user,
expect_nparams = 1, long_param_re = "You are now an IRC operator")
return users
# IRC actions
def join_channel(self, clients, channel):
for (nick, client) in clients:
client.send_cmd("JOIN %s" % channel)
self.verify_join(client, nick, channel)
relayed = len(clients) - 1
for (nick, client) in clients:
for i in range(relayed):
self.verify_relayed_join(client, from_nick = None, channel=channel)
relayed -= 1
def part_channel(self, clients, channel):
clients2 = clients[:]
for (nick1, client1) in clients:
client1.send_cmd("PART #test :%s is out of here!" % nick1)
self.verify_relayed_part(client1, from_nick=nick1, channel=channel, msg="%s is out of here!" % nick1)
clients2.remove( (nick1, client1) )
for (nick2, client2) in clients2:
self.verify_relayed_part(client2, from_nick=nick1, channel=channel, msg="%s is out of here!" % nick1)
def set_user_mode(self, client, nick, nick_mode, mode, expect_wrong_mode=False, expect_relay=True):
client.send_cmd("MODE %s %s" % (nick_mode, mode))
if nick != nick_mode:
self.get_reply(client, expect_code = replies.ERR_USERSDONTMATCH, expect_nick = nick,
expect_nparams = 1,
long_param_re = "Cannot change mode for other users")
return
if expect_wrong_mode:
self.get_reply(client, expect_code = replies.ERR_UMODEUNKNOWNFLAG, expect_nick = nick,
expect_nparams = 1,
long_param_re = "Unknown MODE flag")
else:
if expect_relay:
reply = self.get_message(client, expect_prefix = True, expect_cmd = "MODE",
expect_nparams = 2, expect_short_params = [nick_mode],
long_param_re = mode)
self._assert_equals(reply.prefix.hostname, nick,
explanation = "Expected MODE's prefix to be nick '{}'".format(nick),
irc_msg = reply)
else:
with pytest.raises(ReplyTimeoutException):
self.get_reply(client)
def set_channel_mode(self, client, nick, channel, mode = None, nick_mode = None, expect_mode = None,
expect_wrong_channel=False, expect_wrong_mode = False, expect_ops_needed = False,
expect_not_on_channel=False):
if mode is None and nick_mode is None:
client.send_cmd("MODE %s" % channel)
elif nick_mode is None:
client.send_cmd("MODE %s %s" % (channel, mode))
else:
client.send_cmd("MODE %s %s %s" % (channel, mode, nick_mode))
if expect_wrong_channel:
self.get_reply(client, expect_code = replies.ERR_NOSUCHCHANNEL, expect_nick = nick,
expect_nparams = 2, expect_short_params = [channel],
long_param_re = "No such channel")
return
if mode is None and nick_mode is None:
reply = self.get_reply(client, expect_code = replies.RPL_CHANNELMODEIS, expect_nick = nick,
expect_nparams = 2, expect_short_params = [channel])
mode_string = reply.params[-1]
self._assert_equals(mode_string[0], "+",
explanation = "Returned mode string does not start with '+'",
irc_msg = reply)
mode_string = mode_string[1:]
if expect_mode is not None:
self._assert_equals(len(mode_string), len(expect_mode),
explanation = "Expected mode string to have length {}".format(len(expect_mode)),
irc_msg = reply)
for m in expect_mode:
self._assert_in(m, mode_string,
explanation = "Expected mode string to have '{}', got this instead: {}".format(m, mode_string),
irc_msg = reply)
else:
if expect_wrong_mode:
self.get_reply(client, expect_code = replies.ERR_UNKNOWNMODE, expect_nick = nick,
expect_nparams = 2, expect_short_params = [mode[1]],
long_param_re = "is unknown mode char to me for (?P<channel>.+)",
long_param_values = {"channel":channel})
if expect_ops_needed:
self.get_reply(client, expect_code = replies.ERR_CHANOPRIVSNEEDED, expect_nick = nick,
expect_nparams = 2, expect_short_params = [channel],
long_param_re = "You're not channel operator")
if nick_mode is not None and expect_not_on_channel:
self.get_reply(client, expect_code = replies.ERR_USERNOTINCHANNEL, expect_nick = nick,
expect_nparams = 3, expect_short_params = [nick_mode, channel],
long_param_re = "They aren't on that channel")
# Message/reply getters
def get_reply(self, client, expect_code = None, expect_nick = None, expect_nparams = None,
expect_short_params = None, long_param_re = None, long_param_values = None):
msg = client.get_message()
self.verify_reply(msg, expect_code, expect_nick, expect_nparams, expect_short_params, long_param_re, long_param_values)
return msg
def get_message(self, client, expect_prefix = None, expect_cmd = None, expect_nparams = None,
expect_short_params = None, long_param_re = None, long_param_values = None):
msg = client.get_message()
self.verify_message(msg, expect_prefix, expect_cmd,
expect_nparams, expect_short_params,
long_param_re, long_param_values)
return msg
# Verifiers
def verify_message(self, msg, expect_prefix = None, expect_cmd = None,
expect_nparams = None, expect_short_params = None,
long_param_re = None, long_param_values = None):
if expect_prefix != None and expect_prefix:
assert msg.prefix is not None, "Expected a prefix, but got none.\nMessage: {}".format(msg.raw(bookends=True))
if expect_cmd != None:
self._assert_equals(msg.cmd, expect_cmd,
"Expected command {}, got {} instead".format(expect_cmd, msg.cmd),
irc_msg = msg)
if expect_nparams != None:
nparams = len(msg.params)
self._assert_equals(nparams, expect_nparams,
"Expected {} parameters, got {} instead".format(expect_nparams, nparams),
irc_msg = msg)
if expect_short_params != None:
for i, expect_p, p in zip (range(len(expect_short_params)), expect_short_params, msg.params):
if expect_p is not None:
self._assert_equals(str(p), str(expect_p),
"Expected parameter #{} to be {}, got {} instead".format(str(i+1), str(expect_p), str(p)),
irc_msg = msg)
if long_param_re != None:
lpre = "^:%s$" % long_param_re
lp = msg.params[-1]
match = re.match(lpre, lp)
self._assert_is_not_none(match, "|||%s||| <-- Long parameter does not match regular expression: %s" % (lp, lpre), irc_msg = msg)
if long_param_values != None:
for k,v in long_param_values.items():
self._assert_equals(match.group(k), str(v),
"Expected <{}> in long parameter to be {}, not {} (long parameter regex: {})".format(k, v, match.group(k), lpre),
irc_msg = msg)
def verify_reply(self, msg, expect_code = None, expect_nick = None, expect_nparams = None,
expect_short_params = None, long_param_re = None, long_param_values = None):
if expect_nparams is not None:
nparams = expect_nparams + 1
else:
nparams = expect_nparams
if expect_short_params is not None:
if expect_nick is not None:
short_params = [expect_nick] + expect_short_params
else:
short_params = [None] + expect_short_params
else:
if expect_nick is not None:
short_params = [expect_nick]
else:
short_params = None
self.verify_message(msg, expect_prefix = True, expect_cmd = expect_code,
expect_nparams = nparams, expect_short_params = short_params,
long_param_re = long_param_re, long_param_values = long_param_values)
def verify_welcome_messages(self, client, nick, user=None):
r = []
if user is None:
user = nick
reply = self.get_reply(client, expect_code = replies.RPL_WELCOME, expect_nick = nick, expect_nparams = 1,
long_param_re= "Welcome to the Internet Relay Network {}!{}.*".format(nick, user))
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_YOURHOST, expect_nick = nick, expect_nparams = 1)
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_CREATED, expect_nick = nick, expect_nparams = 1)
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_MYINFO, expect_nick = nick, expect_nparams = 4)
r.append(reply)
return r
def verify_lusers(self, client, nick, expect_users = None, expect_ops = None, expect_unknown = None, expect_channels = None, expect_clients = None):
r = []
reply = self.get_reply(client, expect_code = replies.RPL_LUSERCLIENT, expect_nick = nick, expect_nparams = 1)
if expect_users is not None:
self.verify_reply(reply,
long_param_re = "There are (?P<users>\d+) users and 0 services on 1 servers",
long_param_values = {"users":expect_users})
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_LUSEROP, expect_nick = nick,
expect_nparams = 2, long_param_re = "operator\(s\) online")
if expect_ops is not None:
self.verify_reply(reply, expect_short_params = [expect_ops])
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_LUSERUNKNOWN, expect_nick = nick,
expect_nparams = 2, long_param_re = "unknown connection\(s\)")
if expect_unknown is not None:
self.verify_reply(reply, expect_short_params = [expect_unknown])
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_LUSERCHANNELS, expect_nick = nick,
expect_nparams = 2, long_param_re = "channels formed")
if expect_channels is not None:
self.verify_reply(reply, expect_short_params = [expect_channels])
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_LUSERME, expect_nick = nick, expect_nparams = 1)
if expect_clients is not None:
self.verify_reply(reply,
long_param_re = "I have (?P<clients>\d+) clients and (?P<servers>\d+) servers",
long_param_values = {"clients":expect_clients})
r.append(reply)
return r
def verify_motd(self, client, nick, expect_motd = None):
r = []
if expect_motd is None:
reply = self.get_reply(client, expect_code = replies.ERR_NOMOTD, expect_nick = nick,
expect_nparams = 1, long_param_re = "MOTD File is missing")
r.append(reply)
else:
reply = self.get_reply(client, expect_code = replies.RPL_MOTDSTART, expect_nick = nick,
expect_nparams = 1, long_param_re = "- .* Message of the day - ")
r.append(reply)
motd_lines = expect_motd.strip().split("\n")
for l in motd_lines:
reply = self.get_reply(client, expect_code = replies.RPL_MOTD, expect_nick = nick,
expect_nparams = 1, long_param_re = "- " + l)
r.append(reply)
reply = self.get_reply(client, expect_code = replies.RPL_ENDOFMOTD, expect_nick = nick,
expect_nparams = 1, long_param_re = "End of MOTD command")
r.append(reply)
return r
def verify_join(self, client, nick, channel, expect_topic = None, expect_names = None):
self.verify_relayed_join(client, nick, channel)
if expect_topic != None:
self.get_reply(client, expect_code = replies.RPL_TOPIC, expect_nick = nick,
expect_nparams = 2, expect_short_params = [channel], long_param_re=expect_topic)
self.verify_names(client, nick, expect_names = expect_names)
def verify_relayed_join(self, client, from_nick, channel):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "JOIN",
expect_nparams = 1, expect_short_params = [channel])
if from_nick != None:
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected JOIN's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_part(self, client, from_nick, channel, msg):
if msg != None:
expect_nparams = 2
else:
expect_nparams = 1
reply = self.get_message(client, expect_prefix = True, expect_cmd = "PART",
expect_nparams = expect_nparams, expect_short_params = [channel],
long_param_re = msg)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected PART's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_quit(self, client, from_nick, msg):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "QUIT",
expect_nparams = 1, long_param_re = msg)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected QUIT's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_nick(self, client, from_nick, newnick):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "NICK",
expect_nparams = 1, long_param_re = newnick)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected NICK's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_privmsg(self, client, from_nick, recip, msg):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "PRIVMSG",
expect_nparams = 2, expect_short_params = [recip],
long_param_re = msg)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected PRIVMSG's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_topic(self, client, from_nick, channel, topic):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "TOPIC",
expect_nparams = 2, expect_short_params = [channel],
long_param_re = topic)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected TOPIC's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_mode(self, client, from_nick, channel, mode, mode_nick = None):
if mode_nick is not None:
expect_nparams = 3
expect_short_params = [channel, mode, mode_nick]
else:
expect_nparams = 2
expect_short_params = [channel, mode]
reply = self.get_message(client, expect_prefix = True, expect_cmd = "MODE",
expect_nparams = expect_nparams, expect_short_params = expect_short_params)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected MODE's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_relayed_notice(self, client, from_nick, recip, msg):
reply = self.get_message(client, expect_prefix = True, expect_cmd = "NOTICE",
expect_nparams = 2, expect_short_params = [recip],
long_param_re = msg)
self._assert_equals(reply.prefix.nick, from_nick,
explanation = "Expected NOTICE's prefix to have nick '{}'".format(from_nick),
irc_msg = reply)
def verify_names_single(self, reply, nick, expect_channel = None, expect_names = None):
if expect_channel is not None:
if expect_channel == "*":
self._assert_equals(reply.params[1], "*",
explanation = "Expected first parameter to be '*'",
irc_msg = reply)
self._assert_equals(reply.params[2], "*",
explanation = "Expected second parameter to be '*'",
irc_msg = reply)
else:
self._assert_equals(reply.params[1], "=",
explanation = "Expected first parameter to be '='",
irc_msg = reply)
self._assert_equals(reply.params[2], expect_channel,
explanation = "Expected channel in NAMES to be {}".format(expect_channel),
irc_msg = reply)
if expect_names is not None:
names = reply.params[3][1:].split(" ")
self._assert_equals(len(names), len(expect_names),
explanation = "Expected list of names to have {} entries" .format(len(expect_names)),
irc_msg = reply)
for name in expect_names:
self._assert_in(name, names,
explanation = "Expected {} in NAMES".format(name),
irc_msg = reply)
def verify_names(self, client, nick, expect_channel = None, expect_names = None):
reply = self.get_reply(client, expect_code = replies.RPL_NAMREPLY, expect_nick = nick,
expect_nparams = 3)
self.verify_names_single(reply, nick, expect_channel, expect_names)
if expect_channel is not None:
expect_short_params = [expect_channel]
else:
expect_short_params = None
self.get_reply(client, expect_code = replies.RPL_ENDOFNAMES, expect_nick = nick,
expect_short_params = expect_short_params, expect_nparams = 2)
|
loosecannon93/chittyrc
|
tests/chirc/tests/common.py
|
Python
|
apache-2.0
| 28,644
| 0.031804
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
FileSelectionPanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt import uic
from qgis.PyQt.QtWidgets import QFileDialog
from qgis.PyQt.QtCore import QSettings
from processing.tools.system import isWindows
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'widgetBaseSelector.ui'))
class FileSelectionPanel(BASE, WIDGET):
def __init__(self, isFolder, ext=None):
super(FileSelectionPanel, self).__init__(None)
self.setupUi(self)
self.ext = ext or '*'
self.isFolder = isFolder
self.btnSelect.clicked.connect(self.showSelectionDialog)
def showSelectionDialog(self):
# Find the file dialog's working directory
settings = QSettings()
text = self.leText.text()
if os.path.isdir(text):
path = text
elif os.path.isdir(os.path.dirname(text)):
path = os.path.dirname(text)
elif settings.contains('/Processing/LastInputPath'):
path = settings.value('/Processing/LastInputPath')
else:
path = ''
if self.isFolder:
folder = QFileDialog.getExistingDirectory(self,
self.tr('Select folder'), path)
if folder:
self.leText.setText(folder)
settings.setValue('/Processing/LastInputPath',
os.path.dirname(folder))
else:
filenames = QFileDialog.getOpenFileNames(self,
self.tr('Select file'), path, '*.' + self.ext)
if filenames:
self.leText.setText(u';'.join(filenames))
settings.setValue('/Processing/LastInputPath',
os.path.dirname(filenames[0]))
def getValue(self):
s = self.leText.text()
if isWindows():
s = s.replace('\\', '/')
return s
def setText(self, text):
self.leText.setText(text)
|
alexbruy/QGIS
|
python/plugins/processing/gui/FileSelectionPanel.py
|
Python
|
gpl-2.0
| 3,127
| 0.00064
|
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name="family",
parent_name="scatter3d.marker.colorbar.title.font",
**kwargs
):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
no_blank=kwargs.pop("no_blank", True),
strict=kwargs.pop("strict", True),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/scatter3d/marker/colorbar/title/font/_family.py
|
Python
|
mit
| 558
| 0
|
import re
from gourmet.plugin import ExporterPlugin
from gourmet.convert import seconds_to_timestring, float_to_frac
from . import gxml2_exporter
from gettext import gettext as _
GXML = _('Gourmet XML File')
class GourmetExportChecker:
def check_rec (self, rec, file):
self.txt = file.read()
self.rec = rec
self.check_attrs()
def check_attrs (self):
for attr in ['title','cuisine',
'source','link']:
if getattr(self.rec,attr):
assert re.search(r'<%(attr)s>\s*%(val)s\s*</%(attr)s>'%{
'attr':attr,
'val':getattr(self.rec,attr)
},
self.txt), \
'Did not find %s value %s'%(attr,getattr(self.rec,attr))
if self.rec.yields:
assert re.search(r'<yields>\s*%s\s*%s\s*</yields>'%(
self.rec.yields,
self.rec.yield_unit),
self.txt) or \
re.search(r'<yields>\s*%s\s*%s\s*</yields>'%(
float_to_frac(self.rec.yields),
self.rec.yield_unit),
self.txt), \
'Did not find yields value %s %s'%(self.rec.yields,
self.rec.yield_unit)
for att in ['preptime','cooktime']:
if getattr(self.rec,att):
tstr = seconds_to_timestring(getattr(self.rec,att))
assert re.search(r'<%(att)s>\s*%(tstr)s\s*</%(att)s>'%locals(),self.txt),\
'Did not find %s value %s'%(att,tstr)
class GourmetExporterPlugin (ExporterPlugin):
label = _('Gourmet XML Export')
sublabel = _('Exporting recipes to Gourmet XML file %(file)s.')
single_completed_string = _('Recipe saved in Gourmet XML file %(file)s.'),
filetype_desc = GXML
saveas_filters = [GXML,['text/xml'],['*.grmt','*.xml','*.XML']]
saveas_single_filters = saveas_filters
def get_multiple_exporter (self, args):
return gxml2_exporter.recipe_table_to_xml(
args['rd'],
args['rv'],
args['file'],
)
def do_single_export (self, args) :
gxml2_exporter.recipe_table_to_xml(args['rd'],
[args['rec']],
args['out'],
change_units=args['change_units'],
mult=args['mult']
).run()
def run_extra_prefs_dialog (self):
pass
def check_export (self, rec, file):
gec = GourmetExportChecker()
gec.check_rec(rec,file)
|
thinkle/gourmet
|
gourmet/plugins/import_export/gxml_plugin/gxml_exporter_plugin.py
|
Python
|
gpl-2.0
| 2,889
| 0.014192
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls.defaults import patterns
from django.conf.urls.defaults import url
from openstack_dashboard.dashboards.project.instances.views import DetailView
from openstack_dashboard.dashboards.project.instances.views import IndexView
from openstack_dashboard.dashboards.project.instances.views import \
LaunchInstanceView
from openstack_dashboard.dashboards.project.instances.views import ResizeView
from openstack_dashboard.dashboards.project.instances.views import UpdateView
INSTANCES = r'^(?P<instance_id>[^/]+)/%s$'
VIEW_MOD = 'openstack_dashboard.dashboards.project.instances.views'
urlpatterns = patterns(VIEW_MOD,
url(r'^$', IndexView.as_view(), name='index'),
url(r'^launch$', LaunchInstanceView.as_view(), name='launch'),
url(r'^(?P<instance_id>[^/]+)/$', DetailView.as_view(), name='detail'),
url(INSTANCES % 'update', UpdateView.as_view(), name='update'),
url(INSTANCES % 'console', 'console', name='console'),
url(INSTANCES % 'vnc', 'vnc', name='vnc'),
url(INSTANCES % 'spice', 'spice', name='spice'),
url(INSTANCES % 'resize', ResizeView.as_view(), name='resize'),
)
|
fajoy/horizon-example
|
openstack_dashboard/dashboards/project/instances/urls.py
|
Python
|
apache-2.0
| 1,941
| 0.000515
|
#!/usr/bin/env python
# $Id$
"""
Print detailed information about a process.
"""
import os
import datetime
import socket
import sys
import psutil
from psutil._compat import namedtuple
def convert_bytes(n):
if n == 0:
return '0B'
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
def print_(a, b):
if sys.stdout.isatty():
fmt = '\x1b[1;32m%-17s\x1b[0m %s' %(a, b)
else:
fmt = '%-15s %s' %(a, b)
print fmt
def run(pid):
p = psutil.Process(pid)
if p.parent:
parent = '(%s)' % p.parent.name
else:
parent = ''
started = datetime.datetime.fromtimestamp(p.create_time).strftime('%Y-%M-%d %H:%M')
io = p.get_io_counters()
mem = p.get_memory_info()
mem = '%s%% (resident=%s, virtual=%s) ' %(round(p.get_memory_percent(), 1),
convert_bytes(mem.rss),
convert_bytes(mem.vms))
cpu_times = p.get_cpu_times()
cpu_percent = p.get_cpu_percent(0)
children = p.get_children()
files = p.get_open_files()
threads = p.get_threads()
connections = p.get_connections()
print_('pid', p.pid)
print_('name', p.name)
print_('exe', p.exe)
print_('parent', '%s %s' % (p.ppid, parent))
print_('cmdline', ' '.join(p.cmdline))
print_('started', started)
print_('user', p.username)
if os.name == 'posix':
print_('uids', 'real=%s, effective=%s, saved=%s' % p.uids)
print_('gids', 'real=%s, effective=%s, saved=%s' % p.gids)
print_('terminal', p.terminal or '')
if hasattr(p, 'getcwd'):
print_('cwd', p.getcwd())
print_('memory', mem)
print_('cpu', '%s%% (user=%s, system=%s)' % (cpu_percent,
cpu_times.user,
cpu_times.system))
print_('status', p.status)
print_('niceness', p.nice)
print_('num threads', p.get_num_threads())
if hasattr(p, 'get_io_counters'):
print_('I/O', 'bytes-read=%s, bytes-written=%s' % \
(convert_bytes(io.read_bytes),
convert_bytes(io.write_bytes)))
if children:
print_('children', '')
for child in children:
print_('', 'pid=%s name=%s' % (child.pid, child.name))
if files:
print_('open files', '')
for file in files:
print_('', 'fd=%s %s ' % (file.fd, file.path))
if threads:
print_('running threads', '')
for thread in threads:
print_('', 'id=%s, user-time=%s, sys-time=%s' \
% (thread.id, thread.user_time, thread.system_time))
if connections:
print_('open connections', '')
for conn in connections:
type = 'TCP' if conn.type == socket.SOCK_STREAM else 'UDP'
lip, lport = conn.local_address
if not conn.remote_address:
rip, rport = '*', '*'
else:
rip, rport = conn.remote_address
print_('', '%s:%s -> %s:%s type=%s status=%s' \
% (lip, lport, rip, rport, type, conn.status))
def main(argv=None):
if argv is None:
argv = sys.argv
if len(argv) == 1:
sys.exit(run(os.getpid()))
elif len(argv) == 2:
sys.exit(run(int(argv[1])))
else:
sys.exit('usage: %s [pid]' % __file__)
if __name__ == '__main__':
sys.exit(main())
|
jazinga/psutil
|
examples/process_detail.py
|
Python
|
bsd-3-clause
| 3,753
| 0.00373
|
#MenuTitle: Guides through All Selected Nodes
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Creates guides through all selected nodes.
"""
from Foundation import NSPoint
import math
thisFont = Glyphs.font # frontmost font
selectedLayers = thisFont.selectedLayers # active layers of selected glyphs
def angle( firstPoint, secondPoint ):
"""
Returns the angle (in degrees) of the straight line between firstPoint and secondPoint,
0 degrees being the second point to the right of first point.
firstPoint, secondPoint: must be NSPoint or GSNode
"""
xDiff = secondPoint.x - firstPoint.x
yDiff = secondPoint.y - firstPoint.y
return math.degrees(math.atan2(yDiff,xDiff))
def newGuide( position, angle=0 ):
try:
# GLYPHS 3
newGuide = GSGuide()
except:
# GLYPHS 2
newGuide = GSGuideLine()
newGuide.position = position
newGuide.angle = angle
return newGuide
def isThereAlreadyAGuideWithTheseProperties(thisLayer,guideposition,guideangle):
if guideangle < 0:
guideangle += 180
if guideangle > 180:
guideangle -= 180
for thisGuide in thisLayer.guides:
thisAngle = thisGuide.angle
if thisAngle < 0:
thisAngle += 180
if thisAngle > 180:
thisAngle -= 180
if abs(thisAngle - guideangle) < 0.01 and abs(thisGuide.position.x - guideposition.x) < 0.01 and abs(thisGuide.position.y - guideposition.y) < 0.01:
return True
return False
if len(selectedLayers) == 1:
thisLayer = selectedLayers[0]
thisGlyph = thisLayer.parent
currentPointSelection = [point.position for point in thisLayer.selection if type(point) in (GSNode,GSAnchor)]
# thisGlyph.beginUndo() # undo grouping causes crashes
try:
if len(currentPointSelection) > 1:
# clear selection:
thisLayer.clearSelection()
currentPointSelection.append(currentPointSelection[0])
for i,j in enumerate(range(1,len(currentPointSelection))):
point1 = currentPointSelection[i]
point2 = currentPointSelection[j]
angleBetweenPoints = angle(point1,point2)
middlePoint = addPoints(point1,point2)
middlePoint.x *= 0.5
middlePoint.y *= 0.5
# create guide and add it to layer:
if not isThereAlreadyAGuideWithTheseProperties(thisLayer, middlePoint, angleBetweenPoints):
guideBetweenPoints = newGuide(middlePoint, angleBetweenPoints)
thisLayer.guides.append( guideBetweenPoints )
# select it:
thisLayer.selection.append(guideBetweenPoints)
elif len(currentPointSelection) == 1:
point = currentPointSelection[0]
guide = newGuide(point)
thisLayer.guides.append(guide)
# select only guide:
thisLayer.clearSelection()
thisLayer.selection.append(guide)
except Exception as e:
raise e
# finally:
# thisGlyph.endUndo() # undo grouping causes crashes
|
mekkablue/Glyphs-Scripts
|
Guides/Guides through All Selected Nodes.py
|
Python
|
apache-2.0
| 2,786
| 0.037688
|
"""
Provide basic components for groupby. These definitions
hold the allowlist of methods that are exposed on the
SeriesGroupBy and the DataFrameGroupBy objects.
"""
from __future__ import annotations
import dataclasses
from typing import Hashable
@dataclasses.dataclass(order=True, frozen=True)
class OutputKey:
label: Hashable
position: int
# special case to prevent duplicate plots when catching exceptions when
# forwarding methods from NDFrames
plotting_methods = frozenset(["plot", "hist"])
common_apply_allowlist = (
frozenset(
[
"quantile",
"fillna",
"mad",
"take",
"idxmax",
"idxmin",
"tshift",
"skew",
"corr",
"cov",
"diff",
]
)
| plotting_methods
)
series_apply_allowlist: frozenset[str] = (
common_apply_allowlist
| frozenset(
{"nlargest", "nsmallest", "is_monotonic_increasing", "is_monotonic_decreasing"}
)
) | frozenset(["dtype", "unique"])
dataframe_apply_allowlist: frozenset[str] = common_apply_allowlist | frozenset(
["dtypes", "corrwith"]
)
# cythonized transformations or canned "agg+broadcast", which do not
# require postprocessing of the result by transform.
cythonized_kernels = frozenset(["cumprod", "cumsum", "shift", "cummin", "cummax"])
# List of aggregation/reduction functions.
# These map each group to a single numeric value
reduction_kernels = frozenset(
[
"all",
"any",
"corrwith",
"count",
"first",
"idxmax",
"idxmin",
"last",
"mad",
"max",
"mean",
"median",
"min",
"ngroup",
"nth",
"nunique",
"prod",
# as long as `quantile`'s signature accepts only
# a single quantile value, it's a reduction.
# GH#27526 might change that.
"quantile",
"sem",
"size",
"skew",
"std",
"sum",
"var",
]
)
# List of transformation functions.
# a transformation is a function that, for each group,
# produces a result that has the same shape as the group.
transformation_kernels = frozenset(
[
"backfill",
"bfill",
"cumcount",
"cummax",
"cummin",
"cumprod",
"cumsum",
"diff",
"ffill",
"fillna",
"pad",
"pct_change",
"rank",
"shift",
"tshift",
]
)
# these are all the public methods on Grouper which don't belong
# in either of the above lists
groupby_other_methods = frozenset(
[
"agg",
"aggregate",
"apply",
"boxplot",
# corr and cov return ngroups*ncolumns rows, so they
# are neither a transformation nor a reduction
"corr",
"cov",
"describe",
"dtypes",
"expanding",
"ewm",
"filter",
"get_group",
"groups",
"head",
"hist",
"indices",
"ndim",
"ngroups",
"ohlc",
"pipe",
"plot",
"resample",
"rolling",
"tail",
"take",
"transform",
"sample",
]
)
# Valid values of `name` for `groupby.transform(name)`
# NOTE: do NOT edit this directly. New additions should be inserted
# into the appropriate list above.
transform_kernel_allowlist = reduction_kernels | transformation_kernels
|
rs2/pandas
|
pandas/core/groupby/base.py
|
Python
|
bsd-3-clause
| 3,488
| 0.000573
|
import collections
from django import forms
from django.forms.fields import MultiValueField, CharField
from django.forms.utils import flatatt
from django.forms.widgets import (
CheckboxInput,
Input,
RadioChoiceInput,
RadioSelect,
RadioFieldRenderer,
TextInput,
MultiWidget,
Widget,
)
from django.template.loader import render_to_string
from django.utils.encoding import force_unicode
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
import json
from django.utils.translation import ugettext_noop
from dimagi.utils.dates import DateSpan
class BootstrapCheckboxInput(CheckboxInput):
def __init__(self, attrs=None, check_test=bool, inline_label=""):
super(BootstrapCheckboxInput, self).__init__(attrs, check_test)
self.inline_label = inline_label
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs, type='checkbox', name=name)
try:
result = self.check_test(value)
except: # Silently catch exceptions
result = False
if result:
final_attrs['checked'] = 'checked'
if value not in ('', True, False, None):
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(value)
return mark_safe(u'<label class="checkbox"><input%s /> %s</label>' %
(flatatt(final_attrs), self.inline_label))
class BootstrapRadioInput(RadioChoiceInput):
def __unicode__(self):
if 'id' in self.attrs:
label_for = ' for="%s_%s"' % (self.attrs['id'], self.index)
else:
label_for = ''
choice_label = conditional_escape(force_unicode(self.choice_label))
return mark_safe(u'<label class="radio"%s>%s %s</label>' % (label_for, self.tag(), choice_label))
class BootstrapRadioFieldRenderer(RadioFieldRenderer):
def render(self):
return mark_safe(u'\n'.join([u'%s'
% force_unicode(w) for w in self]))
def __iter__(self):
for i, choice in enumerate(self.choices):
yield BootstrapRadioInput(self.name, self.value, self.attrs.copy(), choice, i)
class BootstrapRadioSelect(RadioSelect):
renderer = BootstrapRadioFieldRenderer
class BootstrapAddressField(MultiValueField):
"""
The original for this was found here:
http://stackoverflow.com/questions/7437108/saving-a-form-model-with-using-multiwidget-and-a-multivaluefield
"""
def __init__(self,num_lines=3,*args,**kwargs):
fields = tuple([CharField(widget=TextInput(attrs={'class':'input-xxlarge'})) for _ in range(0, num_lines)])
self.widget = BootstrapAddressFieldWidget(widgets=[field.widget for field in fields])
super(BootstrapAddressField,self).__init__(fields=fields,*args,**kwargs)
def compress(self, data_list):
return data_list
class BootstrapAddressFieldWidget(MultiWidget):
def decompress(self, value):
return ['']*len(self.widgets)
def format_output(self, rendered_widgets):
lines = list()
for field in rendered_widgets:
lines.append("<p>%s</p>" % field)
return u'\n'.join(lines)
# def value_from_datadict(self, data, files, name):
# line_list = [widget.value_from_datadict(data,files,name+'_%s' %i) for i,widget in enumerate(self.widgets)]
# try:
# return line_list[0] + ' ' + line_list[1] + ' ' + line_list[2]
# except Exception:
# return ''
class BootstrapDisabledInput(Input):
input_type = 'hidden'
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, type=self.input_type, name=name)
if value != '':
# Only add the 'value' attribute if a value is non-empty.
final_attrs['value'] = force_unicode(self._format_value(value))
return mark_safe(u'<span class="uneditable-input %s">%s</span><input%s />' %
(attrs.get('class', ''), value, flatatt(final_attrs)))
class BootstrapPhoneNumberInput(Input):
input_type = 'text'
def render(self, name, value, attrs=None):
return mark_safe(u"""<div class="input-prepend">
<span class="add-on">+</span>%s
</div>""" % super(BootstrapPhoneNumberInput, self).render(name, value, attrs))
class AutocompleteTextarea(forms.Textarea):
"""
Textarea with auto-complete. Uses a custom extension on top of Twitter
Bootstrap's typeahead plugin.
"""
def render(self, name, value, attrs=None):
if hasattr(self, 'choices') and self.choices:
output = mark_safe("""
<script>
$(function() {
$("#%s").select2({
multiple: true,
tags: %s
});
});
</script>\n""" % (attrs['id'], json.dumps(map(lambda c: {'text': c, 'id': c}, self.choices))))
else:
output = mark_safe("")
output += super(AutocompleteTextarea, self).render(name, value,
attrs=attrs)
return output
class Select2MultipleChoiceWidget(forms.SelectMultiple):
class Media:
css = {
'all': ('select2-3.4.5-legacy/select2.css',)
}
js = ('select2-3.4.5-legacy/select2.js',)
def render(self, name, value, attrs=None, choices=()):
final_attrs = self.build_attrs(attrs)
output = super(Select2MultipleChoiceWidget, self).render(name, value, attrs, choices)
output += """
<script>
$(function() {
$('#%s').select2({ width: 'resolve' });
});
</script>
""" % final_attrs.get('id')
return mark_safe(output)
class Select2Ajax(forms.TextInput):
"""
A Select2 widget that loads its options asynchronously.
You must use `set_url()` to set the url. This will usually be done in the form's __init__() method.
The url is not specified in the form class definition because in most cases the url will be dependent on the
domain of the request.
"""
class Media:
css = {
'all': ('select2-3.5.2-legacy/select2.css', 'select2-3.5.2-legacy/select2-bootstrap.css')
}
js = ('select2-3.5.2-legacy/select2.js',)
def __init__(self, attrs=None, page_size=20):
self.page_size = page_size
super(Select2Ajax, self).__init__(attrs)
def set_url(self, url):
self.url = url
def _clean_initial(self, val):
if isinstance(val, collections.Sequence) and not isinstance(val, (str, unicode)):
return {"id": val[0], "text": val[1]}
else:
return {"id": val, "text": val}
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs)
output = super(Select2Ajax, self).render(name, value, attrs)
output += render_to_string(
'hqstyle/forms/select_2_ajax_widget.html',
{
'id': final_attrs.get('id'),
'initial': self._clean_initial(value),
'endpoint': self.url,
'page_size': self.page_size,
}
)
return mark_safe(output)
class DateRangePickerWidget(Input):
"""SUPPORTS BOOTSTRAP 3 ONLY
Extends the standard input widget to render a Date Range Picker Widget.
Documentation and Demo here: http://www.daterangepicker.com/
usage:
apply the following decorator to your view's dispatch method
@use_daterangepicker
def dispatch(self, request, *args, **kwargs):
super(self, MyView).dispatch(request, *args, **kwargs)
"""
class Range(object):
LAST_7 = 'last_7_days'
LAST_MONTH = 'last_month'
LAST_30_DAYS = 'last_30_days'
range_labels = {
Range.LAST_7: ugettext_noop('Last 7 Days'),
Range.LAST_MONTH: ugettext_noop('Last Month'),
Range.LAST_30_DAYS: ugettext_noop('Last 30 Days'),
}
separator = ugettext_noop(' to ')
def __init__(self, attrs=None, range_labels=None, separator=None,
default_datespan=None):
self.range_labels = range_labels or self.range_labels
self.separator = separator or self.separator
self.default_datespan = default_datespan
super(DateRangePickerWidget, self).__init__(attrs=attrs)
def render(self, name, value, attrs=None):
final_attrs = self.build_attrs(attrs)
output = super(DateRangePickerWidget, self).render(name, value, attrs)
# yes, I know inline html in python is gross, but this is what the
# built in django widgets are doing. :|
output += """
<script>
$(function () {
var separator = '%(separator)s';
var report_labels = JSON.parse('%(range_labels_json)s');
$('#%(elem_id)s').createDateRangePicker(
report_labels, separator, '%(startdate)s',
'%(enddate)s'
);
});
</script>
""" % {
'elem_id': final_attrs.get('id'),
'separator': self.separator,
'range_labels_json': json.dumps(self.range_labels),
'startdate': (self.default_datespan.startdate.strftime('%m/%d/%Y')
if (isinstance(self.default_datespan, DateSpan)
and self.default_datespan.startdate is not None)
else ''),
'enddate': (self.default_datespan.enddate.strftime('%m/%d/%Y')
if (isinstance(self.default_datespan, DateSpan)
and self.default_datespan.enddate is not None)
else ''),
}
output = """
<span class="input-group-addon"><i class="fa fa-calendar"></i></span>
""" + output
output = '<div class="input-group">{}</div>'.format(output)
return mark_safe(output)
|
qedsoftware/commcare-hq
|
corehq/apps/style/forms/widgets.py
|
Python
|
bsd-3-clause
| 10,116
| 0.00257
|
import lassie
from .base import LassieBaseTestCase
class LassieOpenGraphTestCase(LassieBaseTestCase):
def test_open_graph_all_properties(self):
url = 'http://lassie.it/open_graph/all_properties.html'
data = lassie.fetch(url)
self.assertEqual(data['url'], url)
self.assertEqual(data['title'], 'Lassie Open Graph All Properies Test')
self.assertEqual(data['description'], 'Just a test template with OG data!')
self.assertEqual(data['locale'], 'en_US')
self.assertEqual(data['site_name'], 'Lassie')
self.assertEqual(len(data['images']), 1)
image = data['images'][0]
self.assertEqual(image['src'], 'http://i.imgur.com/cvoR7zv.jpg')
self.assertEqual(image['width'], 550)
self.assertEqual(image['height'], 365)
self.assertEqual(image['type'], 'og:image')
self.assertEqual(len(data['videos']), 1)
video = data['videos'][0]
self.assertEqual(video['src'], 'http://www.youtube.com/v/dQw4w9WgXcQ?version=3&autohide=1')
self.assertEqual(video['width'], 640)
self.assertEqual(video['height'], 480)
self.assertEqual(video['type'], 'application/x-shockwave-flash')
def test_open_graph_no_og_title_no_og_url(self):
url = 'http://lassie.it/open_graph/no_og_title_no_og_url.html'
data = lassie.fetch(url)
self.assertEqual(data['url'], url)
self.assertEqual(data['title'], 'Lassie Open Graph Test | No og:title, No og:url')
def test_open_graph_og_image_plus_two_body_images(self):
url = 'http://lassie.it/open_graph/og_image_plus_two_body_images.html'
data = lassie.fetch(url)
# Try without passing "all_images", then pass it
self.assertEqual(len(data['images']), 1)
data = lassie.fetch(url, all_images=True)
self.assertEqual(len(data['images']), 3)
image_0 = data['images'][0]
image_1 = data['images'][1]
image_2 = data['images'][2]
self.assertEqual(image_0['type'], 'og:image')
self.assertEqual(image_1['type'], 'body_image')
self.assertEqual(image_2['type'], 'body_image')
def test_open_graph_og_image_relative_url(self):
url = 'http://lassie.it/open_graph/og_image_relative_url.html'
data = lassie.fetch(url)
self.assertEqual(
data['images'][0]['src'], 'http://lassie.it/open_graph/name.jpg')
|
michaelhelmick/lassie
|
tests/test_open_graph.py
|
Python
|
mit
| 2,426
| 0.001237
|
#!/usr/bin/python3
import sys
import os
# this script allows the loading of palettes from files
# when invoked you must specify a palette
# google-blue.hex
# google-light-blue.hex
# old-blue.rgb
#
# each palette must contain 10 colours for the graduations between 0 and 100 %
# and an 11th colour for the 'incomplete' (current) month
#
# a hex file must be made up with one rgb hex file per line, like so:
#
# chris@Ox1b open-source-stats(master)-> cat palettes/google-light-blue.hex
# E1F5FE
# B3E5FC
# 81D4FA
# 4FC3F7
# 29B6F6
# 03A9F4
# 039BE5
# 0288D1
# 0277BD
# 01579B
# 7C019F
#
# a rgb file takes 3 values per line (r, g and then b)
# these values must be whitespace or comma separated
# leading and trailing whitespace are ignored:
#
# chris@Ox1b open-source-stats(master)-> cat palettes/old-blue.rgb
# 227 242 253
# 187 222 251
# 144 202 249
# 100 181 246
# 66 165 245
# 33 150 243
# 30 136 229
# 25 118 210
# 21 101 192
# 13 71 161
# 124 1 159
#
def hex_to_rgb_list(hex):
r = hex[0:2]
g = hex[2:4]
b = hex[4:6]
return [
"%4d" %(int(r,16)),
"%4d" %(int(g,16)),
"%4d" %(int(b,16))
]
def rgb_str_to_list(rgbstr):
parts = rgbstr.split()
out = []
for part in parts:
part = "%3d" %(int(part))
out.append(part)
print(part)
return out
def parse_palette(path):
colours = []
if not os.path.exists(path):
print("Error: failed to find palette at '" + path + "'")
exit(1)
mode = path[-3:]
if mode != 'hex' and mode != 'rgb':
print("Error: unsupported palette format '" + mode + "'")
exit(1)
lines = []
with open(path, "r") as f:
lines = f.readlines()
if len(lines) != 11:
print("Error: palette did not specify exactly 11 colours")
exit(1)
colours = []
for line in lines:
line = line.lstrip(" ")
line = line.rstrip("\n")
line = line.rstrip(" ")
line = line.replace(",", " ")
colour = None
if mode == "rgb":
colour = rgb_str_to_list(line)
elif mode == "hex":
colour = hex_to_rgb_list(line)
else:
print("Error: unsupported format")
exit(1)
colours.append(colour)
if len(colours) != 11:
print("Error: failed to parse 11 colour lines")
exit(1)
return colours
# example output:
#
# var colour_range = [
# /* 0 = 0 .. 9 % */
# [225, 245, 254]
#
# /* 1 = 10 .. 19 % */
# [179, 229, 252]
#
# /* 2 = 20 .. 29 % */
# [129, 212, 250]
#
# /* 3 = 30 .. 39 % */
# [79, 195, 247]
#
# /* 4 = 40 .. 49 % */
# [41, 182, 246]
#
# /* 5 = 50 .. 59 % */
# [3, 169, 244]
#
# /* 6 = 60 .. 69 % */
# [3, 155, 229]
#
# /* 7 = 70 .. 79 % */
# [2, 136, 209]
#
# /* 8 = 80 .. 89 % */
# [2, 119, 189]
#
# /* 9 = 90 .. 100 % */
# [1, 87, 155]
#
# ];
header = '''
/* FIXME we need to decide on these colour graduations
* the idea is:
* colour_range[0] is for 0 .. 9 %
* colour_range[1] is for 10 .. 19 %
* colour_range[2] is for 20 .. 29 %
* ...
* colour_range[8] is for 80 .. 89 %
* colour_range[9] is for 90 ..100 %
*
* currently using the light blue palette from
* http://www.google.com/design/spec/style/color.html#color-color-palette
*
* FIXME I do really like this blue...
* [ 0, 0, 255],
*/
'''
comments = [
"/* 0 = 0 .. 9 % */",
"/* 1 = 10 .. 19 % */",
"/* 2 = 20 .. 29 % */",
"/* 3 = 30 .. 39 % */",
"/* 4 = 40 .. 49 % */",
"/* 5 = 50 .. 59 % */",
"/* 6 = 60 .. 69 % */",
"/* 7 = 70 .. 79 % */",
"/* 8 = 80 .. 89 % */",
"/* 9 = 90 .. 100 % */",
"// ongoing colour is for the current month, to show that the number is not yet final",
]
footer = '''
function get_colour(percent){
var index = Math.floor(colour_range.length * percent);
index = Math.max(index, 0);
index = Math.min(index, (colour_range.length-1));
if( index in colour_range ){
return colour_range[index];
}
console.log("Error: failed to find colour")
return [0,0,0];
}
'''
def pretty_print(f, colours):
if len(colours) != 11:
print("Error: pretty_print output must be 10 elems")
exit(1)
incomplete_colour = colours[10]
lines = []
# print header
lines.append(header)
lines.append("\n")
# print open
lines.append("var colour_range = [")
for i in range(0, 10):
lines.append(" ")
lines.append(comments[i])
lines.append("\n")
lines.append(" ")
lines.append("[")
for o in colours[i]:
o += ","
lines.append(o)
lines.append("],\n")
# print close
lines.append("];\n")
lines.append("\n")
lines.append(comments[10])
lines.append("\n")
lines.append('''var colour_ongoing = [''')
for o in colours[10]:
o += ","
lines.append(o)
lines.append("];\n")
# print footer
lines.append(footer)
lines.append("\n")
f.writelines(lines)
if __name__ == "__main__":
args = sys.argv
if len(args) < 2:
print("Error: must specify palette file")
exit(1)
if len(args) > 2:
print("Error: too many args, only take palette file")
exit(1)
palette = args[1]
if not os.path.exists(palette):
expanded_palette = os.path.join("palettes", palette)
if not os.path.exists(expanded_palette):
print("Error: failed to find palette, checked at '" + palette + "' and '" + expanded_palette + "'")
print("Please see palettes/ for a list of available palettes")
exit(1)
palette = expanded_palette
colours = parse_palette(palette)
if len(colours) != 11:
print("Error: colour list must have 10 elems")
exit(1)
target = "site/colours.js"
with open(target, "w") as f:
pretty_print(f, colours)
|
mkfifo/open-source-stats
|
scripts/gen_colour_palette.py
|
Python
|
gpl-3.0
| 6,213
| 0.002253
|
from PyOMAPIc import PyOMAPIc
|
stanvit/pyomapic
|
__init__.py
|
Python
|
mit
| 31
| 0.032258
|
from settings_base import *
PORT = 80
SERVER_NAME = 'http://libra.pitomba.org:%s' % PORT
MONGODB_DATABASE_URL = "localhost"
MONGODB_DATABASE_PORT = 27017
MONGODB_DATABASE_USER = "usr_libra"
MONGODB_DATABASE_PWD = "usr_libra"
MONGODB_DATABASE_POOL_SIZE = 50
|
pitomba/libra
|
libra/settings.py
|
Python
|
mit
| 259
| 0
|
def italianhello():
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(105,78)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,144,60,75)
i01.moveHand("left",112,111,105,102,81,10)
i01.moveHand("right",0,0,0,50,82,180)
ear.pauseListening()
sleep(1)
for w in range(0,3):
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.60)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 0.60, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(83,98)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,157,47,75)
i01.moveHand("left",112,111,105,102,81,10)
i01.moveHand("right",3,0,62,41,117,94)
if w==1:
i01.setHandSpeed("left", 0.60, 0.60, 1.0, 1.0, 1.0, 1.0)
i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 0.60)
i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0)
i01.setArmSpeed("right", 0.65, 1.0, 1.0, 1.0)
i01.setHeadSpeed(0.65, 0.75)
i01.moveHead(83,70)
i01.mouth.speakBlocking("ciao , il mio nome e inmoov one")
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",57,145,50,68)
i01.moveHand("left",100,90,85,80,71,15)
i01.moveHand("right",3,0,31,12,26,45)
sleep(1)
i01.moveHead(83,98)
i01.moveArm("left",78,48,37,11)
i01.moveArm("right",90,157,47,75)
i01.moveHand("left",112,111,105,102,81,10)
i01.moveHand("right",3,0,62,41,117,94)
sleep(1)
i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85)
i01.setHandSpeed("right", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85)
i01.setArmSpeed("right", 0.75, 0.85, 0.95, 0.85)
i01.setArmSpeed("left", 0.95, 0.65, 0.75, 0.75)
i01.setHeadSpeed(0.75, 0.75)
i01.moveHead(79,100)
i01.moveArm("left",5,94,28,15)
i01.moveArm("right",5,82,28,15)
i01.moveHand("left",42,58,42,55,71,35)
i01.moveHand("right",81,50,82,60,105,113)
ear.resumeListening()
|
MyRobotLab/pyrobotlab
|
home/kwatters/harry/gestures/italianhello.py
|
Python
|
apache-2.0
| 2,293
| 0.066289
|
#
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
import json
import re
from itertools import chain
from functools import wraps
from ansible.module_utils._text import to_text
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase
def configure(func):
@wraps(func)
def wrapped(self, *args, **kwargs):
prompt = self._connection.get_prompt()
if not to_text(prompt, errors='surrogate_or_strict').strip().endswith('#'):
self.send_command('configure')
return func(self, *args, **kwargs)
return wrapped
class Cliconf(CliconfBase):
def get_text(self, ele, tag):
try:
return to_text(ele.find(tag).text, errors='surrogate_then_replace').strip()
except AttributeError:
pass
def get_device_info(self):
device_info = dict()
device_info['network_os'] = 'junos'
reply = self.get(command='show version')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'Junos: (\S+)', data)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'Model: (\S+)', data, re.M)
if match:
device_info['network_os_model'] = match.group(1)
match = re.search(r'Hostname: (\S+)', data, re.M)
if match:
device_info['network_os_hostname'] = match.group(1)
return device_info
def get_config(self, source='running', format='text', flags=None):
if source != 'running':
raise ValueError("fetching configuration from %s is not supported" % source)
options_values = self.get_option_values()
if format not in options_values['format']:
raise ValueError("'format' value %s is invalid. Valid values are %s" % (format, ','.join(options_values['format'])))
if format == 'text':
cmd = 'show configuration'
else:
cmd = 'show configuration | display %s' % format
cmd += ' '.join(to_list(flags))
cmd = cmd.strip()
return self.send_command(cmd)
@configure
def edit_config(self, candidate=None, commit=True, replace=None, comment=None):
operations = self.get_device_operations()
self.check_edit_config_capability(operations, candidate, commit, replace, comment)
resp = {}
results = []
requests = []
if replace:
candidate = 'load replace {0}'.format(replace)
for line in to_list(candidate):
if not isinstance(line, collections.Mapping):
line = {'command': line}
cmd = line['command']
results.append(self.send_command(**line))
requests.append(cmd)
diff = self.compare_configuration()
if diff:
resp['diff'] = diff
if commit:
self.commit(comment=comment)
else:
self.discard_changes()
else:
for cmd in ['top', 'exit']:
self.send_command(cmd)
resp['request'] = requests
resp['response'] = results
return resp
def get(self, command, prompt=None, answer=None, sendonly=False, output=None):
if output:
command = self._get_command_with_output(command, output)
return self.send_command(command, prompt=prompt, answer=answer, sendonly=sendonly)
@configure
def commit(self, comment=None, confirmed=False, at_time=None, synchronize=False):
"""
Execute commit command on remote device.
:param comment: Comment to be associated with commit
:param confirmed: Boolean flag to indicate if the previous commit should confirmed
:param at_time: Time at which to activate configuration changes
:param synchronize: Boolean flag to indicate if commit should synchronize on remote peers
:return: Command response received from device
"""
command = 'commit'
if comment:
command += ' comment {0}'.format(comment)
if confirmed:
command += ' confirmed'
if at_time:
command += ' {0}'.format(at_time)
if synchronize:
command += ' peers-synchronize'
command += ' and-quit'
return self.send_command(command)
@configure
def discard_changes(self):
command = 'rollback 0'
for cmd in chain(to_list(command), 'exit'):
self.send_command(cmd)
@configure
def validate(self):
return self.send_command('commit check')
@configure
def compare_configuration(self, rollback_id=None):
command = 'show | compare'
if rollback_id is not None:
command += ' rollback %s' % int(rollback_id)
resp = self.send_command(command)
r = resp.splitlines()
if len(r) == 1 and r[0] == '[edit]':
resp = ''
return resp
def get_diff(self, rollback_id=None):
diff = {'config_diff': None}
response = self.compare_configuration(rollback_id=rollback_id)
if response:
diff['config_diff'] = response
return diff
def get_device_operations(self):
return {
'supports_diff_replace': False,
'supports_commit': True,
'supports_rollback': True,
'supports_defaults': False,
'supports_onbox_diff': True,
'supports_commit_comment': True,
'supports_multiline_delimiter': False,
'supports_diff_match': False,
'supports_diff_ignore_lines': False,
'supports_generate_diff': False,
'supports_replace': True
}
def get_option_values(self):
return {
'format': ['text', 'set', 'xml', 'json'],
'diff_match': [],
'diff_replace': [],
'output': ['text', 'set', 'xml', 'json']
}
def get_capabilities(self):
result = dict()
result['rpc'] = self.get_base_rpc() + ['commit', 'discard_changes', 'run_commands', 'compare_configuration', 'validate', 'get_diff']
result['network_api'] = 'cliconf'
result['device_info'] = self.get_device_info()
result['device_operations'] = self.get_device_operations()
result.update(self.get_option_values())
return json.dumps(result)
def _get_command_with_output(self, command, output):
options_values = self.get_option_values()
if output not in options_values['output']:
raise ValueError("'output' value %s is invalid. Valid values are %s" % (output, ','.join(options_values['output'])))
if output == 'json' and not command.endswith('| display json'):
cmd = '%s | display json' % command
elif output == 'xml' and not command.endswith('| display xml'):
cmd = '%s | display xml' % command
elif output == 'text' and (command.endswith('| display json') or command.endswith('| display xml')):
cmd = command.rsplit('|', 1)[0]
else:
cmd = command
return cmd
|
jimi-c/ansible
|
lib/ansible/plugins/cliconf/junos.py
|
Python
|
gpl-3.0
| 7,917
| 0.001768
|
from django.apps import AppConfig
from django.core import checks
from django.utils.translation import ugettext_lazy as _
import xadmin
class XAdminConfig(AppConfig):
"""Simple AppConfig which does not do automatic discovery."""
name = 'xadmin'
verbose_name = _("Administration")
def ready(self):
self.module.autodiscover()
setattr(xadmin, 'site', xadmin.site)
|
why168/PythonProjects
|
MxOnlie/extra_apps/xadmin/apps.py
|
Python
|
artistic-2.0
| 396
| 0
|
#!/usr/bin/env python
from distutils.core import setup
execfile('modlunky/version.py')
with open('requirements.txt') as requirements:
required = requirements.read().splitlines()
kwargs = {
"name": "modlunky",
"version": str(__version__),
"packages": ["modlunky"],
"scripts": ["bin/modlunky"],
"description": "Library and Command Line Tool for Spelunky.",
"author": "Gary M. Josack",
"maintainer": "Gary M. Josack",
"author_email": "gary@byoteki.com",
"maintainer_email": "gary@byoteki.com",
"license": "MIT",
"url": "https://github.com/gmjosack/modlunky",
"download_url": "https://github.com/gmjosack/modlunky/archive/master.tar.gz",
"classifiers": [
"Programming Language :: Python",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
]
}
if required:
kwargs["install_requires"] = required
setup(**kwargs)
|
gmjosack/modlunky
|
setup.py
|
Python
|
mit
| 1,002
| 0.001996
|
#!/usr/bin/python
############################################################################
# tcp2tcp.py #
# v0.1 #
# #
# Copyright (C) 2011 by Boyan Tabakov #
# blade@alslayer.net #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the #
# Free Software Foundation, Inc., #
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. #
############################################################################
import sys,re
inp = sys.stdin
debug = 0
if len(sys.argv) < 2:
dbg ( "Usage: ", sys.argv[0], " <frame file>")
exit(1)
def dbg(*args):
global debug
if not debug:
return
for a in args:
print >> sys.stderr, a,
print >> sys.stderr
frames = []
f_index = 0
fr = open(sys.argv[1], 'r')
first_line = True
for line in inp:
if line == '\n' or line[0] == '#':
continue
if first_line:
first_line = False
print line[:-1] + '\tcurrent_throughput'
continue
l = re.split('\t', line[:-1])
cstart = float(l[8])
cend = float(l[9])
if cend <= cstart:
continue
raise Exception('bad connection times')
cbytes = 0
fr.seek(f_index)
seeking = True
for f in fr:
ll = len(f)
f = re.split('\t', f[:-1])
ts = float(f[0])
if ts < cstart:
f_index += ll
seeking = False
continue
if ts > cend:
break
cbytes += int(f[1])
thr = float(cbytes) / (cend - cstart)
print line[:-1] + '\t' + str(thr)
|
bladealslayer/nettraf-scripts
|
tcp2tcp.py
|
Python
|
gpl-2.0
| 2,816
| 0.003196
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Ship()
result.template = "object/ship/player/shared_player_z95.iff"
result.attribute_template_id = -1
result.stfName("space_ship","player_z95")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
obi-two/Rebelion
|
data/scripts/templates/object/ship/player/shared_player_z95.py
|
Python
|
mit
| 434
| 0.048387
|
import csv
from django.db.models import Max
from django.utils.translation import ugettext_lazy as _
from urllib2 import URLError
from googlemaps import GoogleMaps, GoogleMapsError
from locations.models import Location
from locations.exceptions import LocationEncodingError
class CsvParseError(csv.Error):
pass
def unicode_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
"""From Python csv documentation, used to read non-ASCII data"""
# csv.py doesn't do Unicode; encode temporarily as UTF-8:
csv_reader = csv.reader(utf_8_encoder(unicode_csv_data),
dialect=dialect, **kwargs)
for row in csv_reader:
# decode UTF-8 back to Unicode, cell by cell:
yield [unicode(cell, 'utf-8') for cell in row]
def utf_8_encoder(unicode_csv_data):
for line in unicode_csv_data:
yield unicode(line.decode('cp1252')).encode('utf8')
def get_data_list(csv_reader, has_header=False):
"""
Reads from the csv reader object and creates a list of dictionaries with
all fo the values, separating csv parsing from all other data management.
"""
locations_list = []
# TODO: re-implement this, perhaps after fixing problem of newlines
# inside strings... the problems look like this (next two rows):
# "My string starts
# ", 5, 3
##row_start = 1 if csv.Sniffer().has_header(sample) else 0
# This had to be commented out because the csv reader object is not
# subscriptable!
##for (counter, row) in enumerate(csv_reader[row_start:]):
for (counter, row) in enumerate(csv_reader):
# Postal code is optional
try:
postal_code = row[4].strip()
except IndexError:
postal_code = ''
try:
locations_list.append({
'name': row[0].strip(),
'address': row[1].strip(),
'city': row[2].strip(),
'state': row[3].strip(),
'postal_code': postal_code,
})
except IndexError:
raise CsvParseError("Missing a column in row %s" % (counter + row))
except Exception:
raise CsvParseError(
"%s exception in row %s, %s" % (Exception, (counter + row), row))
return locations_list
def locations_from_csv(csv_file, category, has_header=False,
duplicates_field=None):
"""
Something something
"""
sample = csv_file.read(1024)
dialect = csv.Sniffer().sniff(sample)
csv_file.seek(0)
csv_reader = unicode_csv_reader(csv_file, dialect)
messages = {
'errors': True,
'warnings': [],
'created': [],
'skipped': [],
'created_count': 0,
'skipped_count': 0,
'upload_count': 0,
}
try:
location_list = get_data_list(csv_reader)
except CsvParseError, e:
messages['warnings'].append(e)
return messages
counter_query = Location.objects.aggregate(Max('upload_count')).get('upload_count__max', 0)
upload_counter = 0 if counter_query is None else counter_query + 1
for location_row in location_list:
try:
location, created = Location.objects.get_or_create(
original_name=location_row['name'], defaults={
'name': " ".join([word[0].upper() + word[1:].lower() for word in location_row['name'].split()]),
'street_address': " ".join([word[0].upper() + word[1:].lower() for word in location_row['address'].split()]),
'city': " ".join([word[0].upper() + word[1:].lower() for word in location_row['city'].split()]),
'state': location_row['state'].upper(),
'postal_code': location_row['postal_code'],
'upload_count': upload_counter,
})
except Location.MultipleObjectsReturned:
messages['warnings'].append(
"%s is already duplicated in the database" % location_row['name'])
created = False
if created:
location.category.add(category)
location.save()
messages['created'].append(location_row['name'])
else:
# Duplicate, but enforce that it is now active
location.is_active = True
location.save()
messages['skipped'].append(location_row['name'])
messages['errors'] = False
messages['created_count'] = len(messages['created'])
messages['skipped_count'] = len(messages['skipped'])
messages['upload_count'] = upload_counter
return messages
def geopoint_average(points):
"""Takes a list of lat-lng tuples and returns an average"""
count = len(points)
if not count:
return None
lat = 0
lng = 0
for point in points:
lat += point[0]
lng += point[1]
return (lat/count, lng/count)
def get_address_latlng(location):
"""
Requests the latitude and longitude for the given location's address.
Uses the Google Maps API, but could be extended to use a different API.
"""
address = u"%s, %s, %s %s" % (location.street_address, location.city,
location.state, location.postal_code)
try:
return GoogleMaps().address_to_latlng(address)
except GoogleMapsError:
raise LocationEncodingError(_("Google reported an error!"))
except URLError:
raise LocationEncodingError(_("Hmm, network error. Please try again."))
except Exception, e:
raise LocationEncodingError(_("Unknown error: %s, %s" % (
Exception, e)))
def geocode_location(location):
"""
Basically the same as geocode_address but acts on the location.
"""
location.point = get_address_latlng(location)
location.save()
return location
|
bennylope/django-lokoj
|
locations/utils.py
|
Python
|
mit
| 5,917
| 0.00169
|
'''
/*******************************************************************************
*
* Copyright (c) 2015 Fraunhofer FOKUS, All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3.0 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
* AUTHORS: Louay Bassbouss (louay.bassbouss@fokus.fraunhofer.de)
*
******************************************************************************/
'''
from django.template import TemplateSyntaxError, Node, Variable, Library
from django.conf import settings
register = Library()
# I found some tricks in URLNode and url from defaulttags.py:
# https://code.djangoproject.com/browser/django/trunk/django/template/defaulttags.py
@register.tag
def value_from_settings(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'%s' takes at least one " \
"argument (settings constant to retrieve)" % bits[0])
settingsvar = bits[1]
settingsvar = settingsvar[1:-1] if settingsvar[0] == '"' else settingsvar
asvar = None
bits = bits[2:]
if len(bits) >= 2 and bits[-2] == 'as':
asvar = bits[-1]
bits = bits[:-2]
if len(bits):
raise TemplateSyntaxError("'value_from_settings' didn't recognise " \
"the arguments '%s'" % ", ".join(bits))
return ValueFromSettings(settingsvar, asvar)
class ValueFromSettings(Node):
def __init__(self, settingsvar, asvar):
self.arg = Variable(settingsvar)
self.asvar = asvar
def render(self, context):
ret_val = getattr(settings,str(self.arg))
if self.asvar:
context[self.asvar] = ret_val
return ''
else:
return ret_val
|
fraunhoferfokus/fixmycity
|
dummy/templatetags/value_from_settings.py
|
Python
|
lgpl-3.0
| 2,196
| 0.013206
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""RetinaNet (via ResNet) model definition.
Defines the RetinaNet model and loss functions from this paper:
https://arxiv.org/pdf/1708.02002
Uses the ResNet model as a basis.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
_WEIGHT_DECAY = 1e-4
_BATCH_NORM_DECAY = 0.997
_BATCH_NORM_EPSILON = 1e-4
_RESNET_MAX_LEVEL = 5
def batch_norm_relu(inputs,
is_training_bn,
relu=True,
init_zero=False,
data_format='channels_last',
name=None):
"""Performs a batch normalization followed by a ReLU.
Args:
inputs: `Tensor` of shape `[batch, channels, ...]`.
is_training_bn: `bool` for whether the model is training.
relu: `bool` if False, omits the ReLU operation.
init_zero: `bool` if True, initializes scale parameter of batch
normalization with 0 instead of 1 (default).
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
name: the name of the batch normalization layer
Returns:
A normalized `Tensor` with the same `data_format`.
"""
if init_zero:
gamma_initializer = tf.zeros_initializer()
else:
gamma_initializer = tf.ones_initializer()
if data_format == 'channels_first':
axis = 1
else:
axis = 3
inputs = tf.layers.batch_normalization(
inputs=inputs,
axis=axis,
momentum=_BATCH_NORM_DECAY,
epsilon=_BATCH_NORM_EPSILON,
center=True,
scale=True,
training=is_training_bn,
fused=True,
gamma_initializer=gamma_initializer,
name=name)
if relu:
inputs = tf.nn.relu(inputs)
return inputs
def fixed_padding(inputs, kernel_size, data_format='channels_last'):
"""Pads the input along the spatial dimensions independently of input size.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]` or
`[batch, height, width, channels]` depending on `data_format`.
kernel_size: `int` kernel size to be used for `conv2d` or max_pool2d`
operations. Should be a positive integer.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
A padded `Tensor` of the same `data_format` with size either intact
(if `kernel_size == 1`) or padded (if `kernel_size > 1`).
"""
pad_total = kernel_size - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
if data_format == 'channels_first':
padded_inputs = tf.pad(
inputs, [[0, 0], [0, 0], [pad_beg, pad_end], [pad_beg, pad_end]])
else:
padded_inputs = tf.pad(
inputs, [[0, 0], [pad_beg, pad_end], [pad_beg, pad_end], [0, 0]])
return padded_inputs
def conv2d_fixed_padding(inputs,
filters,
kernel_size,
strides,
data_format='channels_last'):
"""Strided 2-D convolution with explicit padding.
The padding is consistent and is based only on `kernel_size`, not on the
dimensions of `inputs` (as opposed to using `tf.layers.conv2d` alone).
Args:
inputs: `Tensor` of size `[batch, channels, height_in, width_in]`.
filters: `int` number of filters in the convolution.
kernel_size: `int` size of the kernel to be used in the convolution.
strides: `int` strides of the convolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
A `Tensor` of shape `[batch, filters, height_out, width_out]`.
"""
if strides > 1:
inputs = fixed_padding(inputs, kernel_size, data_format=data_format)
return tf.layers.conv2d(
inputs=inputs,
filters=filters,
kernel_size=kernel_size,
strides=strides,
padding=('SAME' if strides == 1 else 'VALID'),
use_bias=False,
kernel_initializer=tf.variance_scaling_initializer(),
data_format=data_format)
def residual_block(inputs,
filters,
is_training_bn,
strides,
use_projection=False,
data_format='channels_last'):
"""Standard building block for residual networks with BN after convolutions.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
is_training_bn: `bool` for whether the model is in training.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block.
"""
shortcut = inputs
if use_projection:
# Projection shortcut in first layer to match filters and strides
shortcut = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=1,
strides=strides,
data_format=data_format)
shortcut = batch_norm_relu(
shortcut, is_training_bn, relu=False, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=strides,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training_bn, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=1,
data_format=data_format)
inputs = batch_norm_relu(
inputs,
is_training_bn,
relu=False,
init_zero=True,
data_format=data_format)
return tf.nn.relu(inputs + shortcut)
def bottleneck_block(inputs,
filters,
is_training_bn,
strides,
use_projection=False,
data_format='channels_last'):
"""Bottleneck block variant for residual networks with BN after convolutions.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first two convolutions. Note that
the third and final convolution will use 4 times as many filters.
is_training_bn: `bool` for whether the model is in training.
strides: `int` block stride. If greater than 1, this block will ultimately
downsample the input.
use_projection: `bool` for whether this block should use a projection
shortcut (versus the default identity shortcut). This is usually `True`
for the first block of a block group, which may change the number of
filters and the resolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block.
"""
shortcut = inputs
if use_projection:
# Projection shortcut only in first block within a group. Bottleneck blocks
# end with 4 times the number of filters.
filters_out = 4 * filters
shortcut = conv2d_fixed_padding(
inputs=inputs,
filters=filters_out,
kernel_size=1,
strides=strides,
data_format=data_format)
shortcut = batch_norm_relu(
shortcut, is_training_bn, relu=False, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=1,
strides=1,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training_bn, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=filters,
kernel_size=3,
strides=strides,
data_format=data_format)
inputs = batch_norm_relu(inputs, is_training_bn, data_format=data_format)
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=4 * filters,
kernel_size=1,
strides=1,
data_format=data_format)
inputs = batch_norm_relu(
inputs,
is_training_bn,
relu=False,
init_zero=True,
data_format=data_format)
return tf.nn.relu(inputs + shortcut)
def block_group(inputs,
filters,
block_fn,
blocks,
strides,
is_training_bn,
name,
data_format='channels_last'):
"""Creates one group of blocks for the ResNet model.
Args:
inputs: `Tensor` of size `[batch, channels, height, width]`.
filters: `int` number of filters for the first convolution of the layer.
block_fn: `function` for the block to use within the model
blocks: `int` number of blocks contained in the layer.
strides: `int` stride to use for the first convolution of the layer. If
greater than 1, this layer will downsample the input.
is_training_bn: `bool` for whether the model is training.
name: `str`name for the Tensor output of the block layer.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
The output `Tensor` of the block layer.
"""
# Only the first block per block_group uses projection shortcut and strides.
inputs = block_fn(
inputs,
filters,
is_training_bn,
strides,
use_projection=True,
data_format=data_format)
for _ in range(1, blocks):
inputs = block_fn(
inputs, filters, is_training_bn, 1, data_format=data_format)
return tf.identity(inputs, name)
def resnet_v1_generator(block_fn, layers, data_format='channels_last'):
"""Generator of ResNet v1 model with classification layers removed.
Our actual ResNet network. We return the output of c2, c3,c4,c5
N.B. batch norm is always run with trained parameters, as we use very small
batches when training the object layers.
Args:
block_fn: `function` for the block to use within the model. Either
`residual_block` or `bottleneck_block`.
layers: list of 4 `int`s denoting the number of blocks to include in each
of the 4 block groups. Each group consists of blocks that take inputs of
the same resolution.
data_format: `str` either "channels_first" for `[batch, channels, height,
width]` or "channels_last for `[batch, height, width, channels]`.
Returns:
Model `function` that takes in `inputs` and `is_training` and returns the
output `Tensor` of the ResNet model.
"""
def model(inputs, is_training_bn=False):
"""Creation of the model graph."""
inputs = conv2d_fixed_padding(
inputs=inputs,
filters=64,
kernel_size=7,
strides=2,
data_format=data_format)
inputs = tf.identity(inputs, 'initial_conv')
inputs = batch_norm_relu(inputs, is_training_bn, data_format=data_format)
inputs = tf.layers.max_pooling2d(
inputs=inputs,
pool_size=3,
strides=2,
padding='SAME',
data_format=data_format)
inputs = tf.identity(inputs, 'initial_max_pool')
c2 = block_group(
inputs=inputs,
filters=64,
blocks=layers[0],
strides=1,
block_fn=block_fn,
is_training_bn=is_training_bn,
name='block_group1',
data_format=data_format)
c3 = block_group(
inputs=c2,
filters=128,
blocks=layers[1],
strides=2,
block_fn=block_fn,
is_training_bn=is_training_bn,
name='block_group2',
data_format=data_format)
c4 = block_group(
inputs=c3,
filters=256,
blocks=layers[2],
strides=2,
block_fn=block_fn,
is_training_bn=is_training_bn,
name='block_group3',
data_format=data_format)
c5 = block_group(
inputs=c4,
filters=512,
blocks=layers[3],
strides=2,
block_fn=block_fn,
is_training_bn=is_training_bn,
name='block_group4',
data_format=data_format)
return c2, c3, c4, c5
return model
def resnet_v1(resnet_depth, data_format='channels_last'):
"""Returns the ResNet model for a given size and number of output classes."""
model_params = {
18: {'block': residual_block, 'layers': [2, 2, 2, 2]},
34: {'block': residual_block, 'layers': [3, 4, 6, 3]},
50: {'block': bottleneck_block, 'layers': [3, 4, 6, 3]},
101: {'block': bottleneck_block, 'layers': [3, 4, 23, 3]},
152: {'block': bottleneck_block, 'layers': [3, 8, 36, 3]},
200: {'block': bottleneck_block, 'layers': [3, 24, 36, 3]}
}
if resnet_depth not in model_params:
raise ValueError('Not a valid resnet_depth:', resnet_depth)
params = model_params[resnet_depth]
return resnet_v1_generator(
params['block'], params['layers'], data_format)
def nearest_upsampling(data, scale):
"""Nearest neighbor upsampling implementation.
Args:
data: A float32 tensor of size [batch, height_in, width_in, channels].
scale: An integer multiple to scale resolution of input data.
Returns:
data_up: A float32 tensor of size
[batch, height_in*scale, width_in*scale, channels].
"""
with tf.name_scope('nearest_upsampling'):
bs, h, w, c = data.get_shape().as_list()
bs = -1 if bs is None else bs
# Use reshape to quickly upsample the input. The nearest pixel is selected
# implicitly via broadcasting.
data = tf.reshape(data, [bs, h, 1, w, 1, c]) * tf.ones(
[1, 1, scale, 1, scale, 1], dtype=data.dtype)
return tf.reshape(data, [bs, h * scale, w * scale, c])
# TODO(b/111271774): Removes this wrapper once b/111271774 is resolved.
def resize_bilinear(images, size, output_type):
"""Returns resized images as output_type.
Args:
images: A tensor of size [batch, height_in, width_in, channels].
size: A 1-D int32 Tensor of 2 elements: new_height, new_width. The new size
for the images.
output_type: The destination type.
Returns:
A tensor of size [batch, height_out, width_out, channels] as a dtype of
output_type.
"""
images = tf.image.resize_bilinear(images, size, align_corners=True)
return tf.cast(images, output_type)
## RetinaNet specific layers
def class_net(images, level, num_classes, num_anchors=6, is_training_bn=False):
"""Class prediction network for RetinaNet."""
for i in range(4):
images = tf.layers.conv2d(
images,
256,
kernel_size=(3, 3),
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
activation=None,
padding='same',
name='class-%d' % i)
# The convolution layers in the class net are shared among all levels, but
# each level has its batch normlization to capture the statistical
# difference among different levels.
images = batch_norm_relu(images, is_training_bn, relu=True, init_zero=False,
name='class-%d-bn-%d' % (i, level))
classes = tf.layers.conv2d(
images,
num_classes * num_anchors,
kernel_size=(3, 3),
bias_initializer=tf.constant_initializer(-np.log((1 - 0.01) / 0.01)),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
padding='same',
name='class-predict')
return classes
def box_net(images, level, num_anchors=6, is_training_bn=False):
"""Box regression network for RetinaNet."""
for i in range(4):
images = tf.layers.conv2d(
images,
256,
kernel_size=(3, 3),
activation=None,
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
padding='same',
name='box-%d' % i)
# The convolution layers in the box net are shared among all levels, but
# each level has its batch normlization to capture the statistical
# difference among different levels.
images = batch_norm_relu(images, is_training_bn, relu=True, init_zero=False,
name='box-%d-bn-%d' % (i, level))
boxes = tf.layers.conv2d(
images,
4 * num_anchors,
kernel_size=(3, 3),
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
padding='same',
name='box-predict')
return boxes
def resnet_fpn(features,
min_level=3,
max_level=7,
resnet_depth=50,
is_training_bn=False,
use_nearest_upsampling=True):
"""ResNet feature pyramid networks."""
# upward layers
with tf.variable_scope('resnet%s' % resnet_depth):
resnet_fn = resnet_v1(resnet_depth)
u2, u3, u4, u5 = resnet_fn(features, is_training_bn)
feats_bottom_up = {
2: u2,
3: u3,
4: u4,
5: u5,
}
with tf.variable_scope('resnet_fpn'):
# lateral connections
feats_lateral = {}
for level in range(min_level, _RESNET_MAX_LEVEL + 1):
feats_lateral[level] = tf.layers.conv2d(
feats_bottom_up[level],
filters=256,
kernel_size=(1, 1),
padding='same',
name='l%d' % level)
# add top-down path
feats = {_RESNET_MAX_LEVEL: feats_lateral[_RESNET_MAX_LEVEL]}
for level in range(_RESNET_MAX_LEVEL - 1, min_level - 1, -1):
if use_nearest_upsampling:
feats[level] = nearest_upsampling(feats[level + 1],
2) + feats_lateral[level]
else:
feats[level] = resize_bilinear(
feats[level + 1], tf.shape(feats_lateral[level])[1:3],
feats[level + 1].dtype) + feats_lateral[level]
# add post-hoc 3x3 convolution kernel
for level in range(min_level, _RESNET_MAX_LEVEL + 1):
feats[level] = tf.layers.conv2d(
feats[level],
filters=256,
strides=(1, 1),
kernel_size=(3, 3),
padding='same',
name='post_hoc_d%d' % level)
# coarser FPN levels introduced for RetinaNet
for level in range(_RESNET_MAX_LEVEL + 1, max_level + 1):
feats_in = feats[level - 1]
if level > _RESNET_MAX_LEVEL + 1:
feats_in = tf.nn.relu(feats_in)
feats[level] = tf.layers.conv2d(
feats_in,
filters=256,
strides=(2, 2),
kernel_size=(3, 3),
padding='same',
name='p%d' % level)
# add batchnorm
for level in range(min_level, max_level + 1):
feats[level] = tf.layers.batch_normalization(
inputs=feats[level],
momentum=_BATCH_NORM_DECAY,
epsilon=_BATCH_NORM_EPSILON,
center=True,
scale=True,
training=is_training_bn,
fused=True,
name='p%d-bn' % level)
return feats
def retinanet(features,
min_level=3,
max_level=7,
num_classes=90,
num_anchors=6,
resnet_depth=50,
use_nearest_upsampling=True,
is_training_bn=False):
"""RetinaNet classification and regression model."""
# create feature pyramid networks
feats = resnet_fpn(features, min_level, max_level, resnet_depth,
is_training_bn, use_nearest_upsampling)
# add class net and box net in RetinaNet. The class net and the box net are
# shared among all the levels.
with tf.variable_scope('retinanet'):
class_outputs = {}
box_outputs = {}
with tf.variable_scope('class_net', reuse=tf.AUTO_REUSE):
for level in range(min_level, max_level + 1):
class_outputs[level] = class_net(feats[level], level, num_classes,
num_anchors, is_training_bn)
with tf.variable_scope('box_net', reuse=tf.AUTO_REUSE):
for level in range(min_level, max_level + 1):
box_outputs[level] = box_net(feats[level], level,
num_anchors, is_training_bn)
return class_outputs, box_outputs
def remove_variables(variables, resnet_depth=50):
"""Removes low-level variables from the input.
Removing low-level parameters (e.g., initial convolution layer) from training
usually leads to higher training speed and slightly better testing accuracy.
The intuition is that the low-level architecture (e.g., ResNet-50) is able to
capture low-level features such as edges; therefore, it does not need to be
fine-tuned for the detection task.
Args:
variables: all the variables in training
resnet_depth: the depth of ResNet model
Returns:
var_list: a list containing variables for training
"""
var_list = [v for v in variables
if v.name.find('resnet%s/conv2d/' % resnet_depth) == -1]
return var_list
def segmentation_class_net(images,
level,
num_channels=256,
is_training_bn=False):
"""Segmentation Feature Extraction Module.
Args:
images: A tensor of size [batch, height_in, width_in, channels_in].
level: The level of features at FPN output_size /= 2^level.
num_channels: The number of channels in convolution layers
is_training_bn: Whether batch_norm layers are in training mode.
Returns:
images: A feature tensor of size [batch, output_size, output_size,
channel_number]
"""
for i in range(3):
images = tf.layers.conv2d(
images,
num_channels,
kernel_size=(3, 3),
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
activation=None,
padding='same',
name='class-%d' % i)
images = batch_norm_relu(images, is_training_bn, relu=True, init_zero=False,
name='class-%d-bn-%d' % (i, level))
images = tf.layers.conv2d(
images,
num_channels,
kernel_size=(3, 3),
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
activation=None,
padding='same',
name='class-final')
return images
def retinanet_segmentation(features,
min_level=3,
max_level=5,
num_classes=21,
resnet_depth=50,
use_nearest_upsampling=False,
is_training_bn=False):
"""RetinaNet extension for semantic segmentation.
Args:
features: A tensor of size [batch, height_in, width_in, channels].
min_level: The minimum output feature pyramid level. This input defines the
smallest nominal feature stride = 2^min_level.
max_level: The maximum output feature pyramid level. This input defines the
largest nominal feature stride = 2^max_level.
num_classes: Number of object classes.
resnet_depth: The depth of ResNet backbone model.
use_nearest_upsampling: Whether use nearest upsampling for FPN network.
Alternatively, use bilinear upsampling.
is_training_bn: Whether batch_norm layers are in training mode.
Returns:
A tensor of size [batch, height_l, width_l, num_classes]
representing pixel-wise predictions before Softmax function.
"""
feats = resnet_fpn(features, min_level, max_level, resnet_depth,
is_training_bn, use_nearest_upsampling)
with tf.variable_scope('class_net', reuse=tf.AUTO_REUSE):
for level in range(min_level, max_level + 1):
feats[level] = segmentation_class_net(
feats[level], level, is_training_bn=is_training_bn)
if level == min_level:
fused_feature = feats[level]
else:
if use_nearest_upsampling:
scale = level / min_level
feats[level] = nearest_upsampling(feats[level], scale)
else:
feats[level] = resize_bilinear(
feats[level], tf.shape(feats[min_level])[1:3], feats[level].dtype)
fused_feature += feats[level]
fused_feature = batch_norm_relu(
fused_feature, is_training_bn, relu=True, init_zero=False)
classes = tf.layers.conv2d(
fused_feature,
num_classes,
kernel_size=(3, 3),
bias_initializer=tf.zeros_initializer(),
kernel_initializer=tf.random_normal_initializer(stddev=0.01),
padding='same',
name='class-predict')
return classes
|
mlperf/training_results_v0.5
|
v0.5.0/google/cloud_v3.8/ssd-tpuv3-8/code/ssd/model/tpu/models/official/retinanet/retinanet_architecture.py
|
Python
|
apache-2.0
| 25,570
| 0.004263
|
#! /usr/bin/env python
# -*- coding: latin-1 -*-
# Copyright (C) 2006 Universitat Pompeu Fabra
#
# Permission is hereby granted to distribute this software for
# non-commercial research purposes, provided that this copyright
# notice is included with any such distribution.
#
# THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
# SOFTWARE IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU
# ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
#
# Elaborated by Hector Palacios, hlp@ldc.usb.ve, hectorpal@gmail.com
import sys
import os
name = "s"
if len(sys.argv) < 2 or len(sys.argv) > 3:
print """
usage: %s <n> {cff}
generates files d.pddl and p.pddl at directory %s-<n>
for the problem of sorting <n> numbers
if {cff} is passwd, then PDDL compatible with ConformantFF is generated.
""" % (sys.argv[0], name)
sys.exit(1)
n = int(sys.argv[1])
cff=False
if len(sys.argv) == 3:
if sys.argv[2] == 'cff':
cff = True
else:
print 'Param should be empty or cff:', sys.argv[2]
sys.exit(1)
path=name+"-"+str(n)
name_p="%s-%d" % (name, n)
if cff:
path += "-cff"
name_p += "-cff"
os.system("/bin/rm -rf ./"+path)
os.makedirs(path)
domain = file(path + "/d.pddl", "w")
cs = ''
for i in range(1,n+1):
cs += ' n%d' % i
if cff:
t = ''
else:
t = ' - num'
print >> domain, """
(define (domain %s) """ % name + """
(:requirements :typing :equality)
(:types num)
(:constants %s%s)
(:predicates (foo) (less ?n1 ?n2%s))
""" % (cs,t,t)
def print_act(i,j):
print >> domain, """
(:action cmpswap-%d-%d
:effect (and (less n%d n%d) (not (less n%d n%d))""" % (i, j, i, j, j, i),
for k in range(1,n+1):
if k != i and k != j:
print >> domain, """
(when (less n%d n%d)
(and (less n%d n%d) (not (less n%d n%d))))""" % (
k, i, k, j, j, k),
print >> domain, """
(when (and (less n%d n%d) (not (less n%d n%d)))
(not (less n%d n%d)))""" % (
k, i, k, j, k, i),
print >> domain, """
(when (less n%d n%d)
(and (less n%d n%d) (not (less n%d n%d))))""" % (
j, k, i, k, k, i),
print >> domain, """
(when (and (less n%d n%d) (not (less n%d n%d)))
(not (less n%d n%d)))""" % (
j, k, i, k, j, k),
print >> domain, """
))"""
for i in range(1,n+1):
for j in range(i+1,n+1):
if i != j:
print_act(i,j)
print >> domain, """
)
"""
problem = file(path + "/p.pddl", "w")
if cff:
t = ''
else:
t = '(and '
print >> problem, """
(define (problem s%d)
(:domain %s)
(:init %s""" % (n,name,t),
for i in range(1,n+1):
for j in range(1,n+1):
if i != j:
print >> problem, """
(or (less n%d n%d) (not (less n%d n%d)))""" % (i,j,i,j),
if cff:
print >> problem, """
(unknown (less n%d n%d))""" % (i,j),
if cff:
t = ''
else:
t = ')'
print >> problem, """
%s)
(:goal (and""" % t,
for i in range(1,n):
print >> problem, """
(less n%d n%d)""" % (i,i+1),
print >> problem, """
))
)
"""
|
PlanTool/plantool
|
code/Uncertainty/T0/translator/generators/sortnum.py
|
Python
|
gpl-2.0
| 3,602
| 0.009162
|
import sys
import numpy as np
from flopy.mbase import Package
from flopy.utils import util_2d,util_3d
from flopy.modflow.mfpar import ModflowPar as mfpar
class ModflowUpw(Package):
'Upstream weighting package class\n'
def __init__(self, model, laytyp=0, layavg=0, chani=1.0, layvka=0, laywet=0, iupwcb = 53, hdry=-1E+30, iphdry = 0,\
hk=1.0, hani=1.0, vka=1.0, ss=1e-5, sy=0.15, vkcb=0.0, noparcheck=False, \
extension='upw', unitnumber = 31):
Package.__init__(self, model, extension, 'UPW', unitnumber) # Call ancestor's init to set self.parent, extension, name and unit number
self.heading = '# UPW for MODFLOW-NWT, generated by Flopy.'
self.url = 'upw_upstream_weighting_package.htm'
nrow, ncol, nlay, nper = self.parent.nrow_ncol_nlay_nper
# item 1
self.iupwcb = iupwcb # Unit number for file with cell-by-cell flow terms
self.hdry = hdry # Head in cells that are converted to dry during a simulation
self.npupw = 0 # number of UPW parameters
self.iphdry = iphdry
self.laytyp = util_2d(model,(nlay,),np.int,laytyp,name='laytyp')
self.layavg = util_2d(model,(nlay,),np.int,layavg,name='layavg')
self.chani = util_2d(model,(nlay,),np.int,chani,name='chani')
self.layvka = util_2d(model,(nlay,),np.int,layvka,name='vka')
self.laywet = util_2d(model,(nlay,),np.int,laywet,name='laywet')
self.options = ' '
if noparcheck: self.options = self.options + 'NOPARCHECK '
self.hk = util_3d(model,(nlay,nrow,ncol),np.float32,hk,name='hk',locat=self.unit_number[0])
self.hani = util_3d(model,(nlay,nrow,ncol),np.float32,hani,name='hani',locat=self.unit_number[0])
self.vka = util_3d(model,(nlay,nrow,ncol),np.float32,vka,name='vka',locat=self.unit_number[0])
self.ss = util_3d(model,(nlay,nrow,ncol),np.float32,ss,name='ss',locat=self.unit_number[0])
self.sy = util_3d(model,(nlay,nrow,ncol),np.float32,sy,name='sy',locat=self.unit_number[0])
self.vkcb = util_3d(model,(nlay,nrow,ncol),np.float32,vkcb,name='vkcb',locat=self.unit_number[0])
self.parent.add_package(self)
def write_file(self):
nrow, ncol, nlay, nper = self.parent.nrow_ncol_nlay_nper
# Open file for writing
f_upw = open(self.fn_path, 'w')
# Item 0: text
f_upw.write('%s\n' % self.heading)
# Item 1: IBCFCB, HDRY, NPLPF
f_upw.write('{0:10d}{1:10.3G}{2:10d}{3:10d}{4:s}\n'.format(self.iupwcb,self.hdry,self.npupw,self.iphdry,self.options))
# LAYTYP array
f_upw.write(self.laytyp.string);
# LAYAVG array
f_upw.write(self.layavg.string);
# CHANI array
f_upw.write(self.chani.string);
# LAYVKA array
f_upw.write(self.layvka.string)
# LAYWET array
f_upw.write(self.laywet.string);
# Item 7: WETFCT, IWETIT, IHDWET
iwetdry = self.laywet.sum()
if iwetdry > 0:
raise Exception, 'LAYWET should be 0 for UPW'
transient = not self.parent.get_package('DIS').steady.all()
for k in range(nlay):
f_upw.write(self.hk[k].get_file_entry())
if self.chani[k] < 1:
f_upw.write(self.hani[k].get_file_entry())
f_upw.write(self.vka[k].get_file_entry())
if transient == True:
f_upw.write(self.ss[k].get_file_entry())
if self.laytyp[k] !=0:
f_upw.write(self.sy[k].get_file_entry())
if self.parent.get_package('DIS').laycbd[k] > 0:
f_upw.write(self.vkcb[k].get_file_entry())
if (self.laywet[k] != 0 and self.laytyp[k] != 0):
f_upw.write(self.laywet[k].get_file_entry())
f_upw.close()
@staticmethod
def load(f, model, ext_unit_dict=None):
"""
Load an existing package.
Parameters
----------
f : filename or file handle
File to load.
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to
which this package will be added.
ext_unit_dict : dictionary, optional
If the arrays in the file are specified using EXTERNAL,
or older style array control records, then `f` should be a file
handle. In this case ext_unit_dict is required, which can be
constructed using the function
:class:`flopy.utils.mfreadnam.parsenamefile`.
Returns
-------
dis : ModflowUPW object
ModflowLpf object.
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> lpf = flopy.modflow.ModflowUpw.load('test.upw', m)
"""
if model.verbose:
sys.stdout.write('loading upw package file...\n')
if type(f) is not file:
filename = f
f = open(filename, 'r')
#dataset 0 -- header
while True:
line = f.readline()
if line[0] != '#':
break
# determine problem dimensions
nrow, ncol, nlay, nper = model.get_nrow_ncol_nlay_nper()
# Item 1: IBCFCB, HDRY, NPLPF - line already read above
if model.verbose:
print ' loading IUPWCB, HDRY, NPUPW, IPHDRY...'
t = line.strip().split()
iupwcb, hdry, npupw, iphdry = int(t[0]), float(t[1]), int(t[2]), int(t[3])
if iupwcb != 0:
model.add_pop_key_list(iupwcb)
iupwcb = 53
# options
noparcheck = False
if len(t) > 3:
for k in xrange(3,len(t)):
if 'NOPARCHECK' in t[k].upper():
noparcheck = True
# LAYTYP array
if model.verbose:
print ' loading LAYTYP...'
line = f.readline()
t = line.strip().split()
laytyp = np.array((t[0:nlay]),dtype=np.int)
# LAYAVG array
if model.verbose:
print ' loading LAYAVG...'
line = f.readline()
t = line.strip().split()
layavg = np.array((t[0:nlay]),dtype=np.int)
# CHANI array
if model.verbose:
print ' loading CHANI...'
line = f.readline()
t = line.strip().split()
chani = np.array((t[0:nlay]),dtype=np.float32)
# LAYVKA array
if model.verbose:
print ' loading LAYVKA...'
line = f.readline()
t = line.strip().split()
layvka = np.array((t[0:nlay]),dtype=np.int)
# LAYWET array
if model.verbose:
print ' loading LAYWET...'
line = f.readline()
t = line.strip().split()
laywet = np.array((t[0:nlay]),dtype=np.int)
# Item 7: WETFCT, IWETIT, IHDWET
wetfct,iwetit,ihdwet = None,None,None
iwetdry = laywet.sum()
if iwetdry > 0:
raise Exception, 'LAYWET should be 0 for UPW'
#--get parameters
par_types = []
if npupw > 0:
par_types, parm_dict = mfpar.load(f, nplpf, model.verbose)
#--get arrays
transient = not model.get_package('DIS').steady.all()
hk = [0] * nlay
hani = [0] * nlay
vka = [0] * nlay
ss = [0] * nlay
sy = [0] * nlay
vkcb = [0] * nlay
for k in range(nlay):
if model.verbose:
print ' loading hk layer {0:3d}...'.format(k+1)
if 'hk' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'hk',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'hk', parm_dict, findlayer=k)
hk[k] = t
if chani[k] < 1:
if model.verbose:
print ' loading hani layer {0:3d}...'.format(k+1)
if 'hani' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'hani',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'hani', parm_dict, findlayer=k)
hani[k] = t
if model.verbose:
print ' loading vka layer {0:3d}...'.format(k+1)
if 'vka' not in par_types and 'vani' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'vka',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'vka', parm_dict, findlayer=k)
vka[k] = t
if transient:
if model.verbose:
print ' loading ss layer {0:3d}...'.format(k+1)
if 'ss' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'ss',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'ss', parm_dict, findlayer=k)
ss[k] = t
if laytyp[k] != 0:
if model.verbose:
print ' loading sy layer {0:3d}...'.format(k+1)
if 'sy' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'sy',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'sy', parm_dict, findlayer=k)
sy[k] = t
if model.get_package('DIS').laycbd[k] > 0:
if model.verbose:
print ' loading vkcb layer {0:3d}...'.format(k+1)
if 'vkcb' not in par_types:
t = util_2d.load(f, model, (nrow,ncol), np.float32, 'vkcb',
ext_unit_dict)
else:
line = f.readline()
t = mfpar.parameter_fill(model, (nrow, ncol), 'vkcb', parm_dict, findlayer=k)
vkcb[k] = t
#--create upw object
upw = ModflowUpw(model, iupwcb=iupwcb, iphdry=iphdry, hdry=hdry,
noparcheck=noparcheck,
laytyp=laytyp, layavg=layavg, chani=chani,
layvka=layvka, laywet=laywet,
hk=hk, hani=hani, vka=vka, ss=ss, sy=sy, vkcb=vkcb)
#--return upw object
return upw
def plot(self):
try:
import pylab as plt
except Exception as e:
print "error importing pylab: " + str(e)
return
#get the bas for ibound masking
bas = self.parent.bas6
if bas is not None:
ibnd = bas.getibound()
else:
ibnd = np.ones((self.parent.nlay, self.parent.nrow,
self.parent.ncol), dtype=np.int)
cmap = plt.cm.winter
cmap.set_bad('w', 1.0)
fs = 5
nlay = self.parent.nlay
#the width and height of each subplot
delt = 2.0
props = [self.hk.array, self.vka.array, self.ss.array, self.sy.array]
names = ["hk", "vk", "ss", "sy"]
shape = (len(names), nlay+1)
fig = plt.figure(figsize=(delt+(nlay*delt), delt * len(names)))
for k in xrange(nlay):
for iname, name in enumerate(names):
ax = plt.subplot2grid(shape, (iname, k), aspect="equal")
p = props[iname][k]
p = np.ma.masked_where(ibnd[k] == 0, p)
ax.imshow(p, cmap=cmap, alpha=0.7,
interpolation="none")
ax.set_title(name + " of layer {0:d} - max,min : {1:G},{2:G}"
.format(k+1, p.max(), p.min()), fontsize=fs)
if k == 0:
ax.set_ylabel("row", fontsize=fs)
ax.set_yticklabels(ax.get_yticks(), fontsize=fs)
else:
ax.set_yticklabels([])
if iname == len(names)-1:
ax.set_xticklabels(ax.get_xticks(), fontsize=fs)
ax.set_xlabel("column", fontsize=fs)
else:
ax.set_xticklabels([])
plt.tight_layout()
plt.show()
|
mjasher/gac
|
original_libraries/flopy-master/flopy/modflow/mfupw.py
|
Python
|
gpl-2.0
| 13,021
| 0.010598
|
from flask import Blueprint
log_analyzer = Blueprint('log_analyzer', __name__)
from . import views
|
DonYum/LogAna
|
app/log_analyzer/__init__.py
|
Python
|
mit
| 101
| 0.009901
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests for Google Compute Engine Driver
"""
import sys
import unittest
import datetime
from libcloud.utils.py3 import httplib
from libcloud.compute.drivers.gce import (GCENodeDriver, API_VERSION,
timestamp_to_datetime,
GCEAddress, GCEBackendService,
GCEFirewall, GCEForwardingRule,
GCEHealthCheck, GCENetwork,
GCENodeImage, GCERoute,
GCETargetHttpProxy, GCEUrlMap,
GCEZone)
from libcloud.common.google import (GoogleBaseAuthConnection,
ResourceNotFoundError, ResourceExistsError,
InvalidRequestError, GoogleBaseError)
from libcloud.test.common.test_google import GoogleAuthMockHttp, GoogleTestCase
from libcloud.compute.base import Node, StorageVolume
from libcloud.test import MockHttpTestCase
from libcloud.test.compute import TestCaseMixin
from libcloud.test.file_fixtures import ComputeFileFixtures
from libcloud.test.secrets import GCE_PARAMS, GCE_KEYWORD_PARAMS
class GCENodeDriverTest(GoogleTestCase, TestCaseMixin):
"""
Google Compute Engine Test Class.
"""
# Mock out a few specific calls that interact with the user, system or
# environment.
GCEZone._now = lambda x: datetime.datetime(2013, 6, 26, 19, 0, 0)
datacenter = 'us-central1-a'
def setUp(self):
GCEMockHttp.test = self
GCENodeDriver.connectionCls.conn_classes = (GCEMockHttp, GCEMockHttp)
GoogleBaseAuthConnection.conn_classes = (GoogleAuthMockHttp,
GoogleAuthMockHttp)
GCEMockHttp.type = None
kwargs = GCE_KEYWORD_PARAMS.copy()
kwargs['auth_type'] = 'IA'
kwargs['datacenter'] = self.datacenter
self.driver = GCENodeDriver(*GCE_PARAMS, **kwargs)
def test_default_scopes(self):
self.assertEqual(self.driver.scopes, None)
def test_timestamp_to_datetime(self):
timestamp1 = '2013-06-26T10:05:19.340-07:00'
datetime1 = datetime.datetime(2013, 6, 26, 17, 5, 19)
self.assertEqual(timestamp_to_datetime(timestamp1), datetime1)
timestamp2 = '2013-06-26T17:43:15.000-00:00'
datetime2 = datetime.datetime(2013, 6, 26, 17, 43, 15)
self.assertEqual(timestamp_to_datetime(timestamp2), datetime2)
def test_get_object_by_kind(self):
obj = self.driver._get_object_by_kind(None)
self.assertIsNone(obj)
obj = self.driver._get_object_by_kind('')
self.assertIsNone(obj)
obj = self.driver._get_object_by_kind(
'https://www.googleapis.com/compute/v1/projects/project_name/'
'global/targetHttpProxies/web-proxy')
self.assertEqual(obj.name, 'web-proxy')
def test_get_region_from_zone(self):
zone1 = self.driver.ex_get_zone('us-central1-a')
expected_region1 = 'us-central1'
region1 = self.driver._get_region_from_zone(zone1)
self.assertEqual(region1.name, expected_region1)
zone2 = self.driver.ex_get_zone('europe-west1-b')
expected_region2 = 'europe-west1'
region2 = self.driver._get_region_from_zone(zone2)
self.assertEqual(region2.name, expected_region2)
def test_find_zone_or_region(self):
zone1 = self.driver._find_zone_or_region('libcloud-demo-np-node',
'instances')
self.assertEqual(zone1.name, 'us-central2-a')
zone2 = self.driver._find_zone_or_region(
'libcloud-demo-europe-np-node', 'instances')
self.assertEqual(zone2.name, 'europe-west1-a')
region = self.driver._find_zone_or_region('libcloud-demo-address',
'addresses', region=True)
self.assertEqual(region.name, 'us-central1')
def test_match_images(self):
project = 'debian-cloud'
image = self.driver._match_images(project, 'debian-7')
self.assertEqual(image.name, 'debian-7-wheezy-v20131120')
image = self.driver._match_images(project, 'backports')
self.assertEqual(image.name, 'backports-debian-7-wheezy-v20131127')
def test_ex_get_serial_output(self):
self.assertRaises(ValueError, self.driver.ex_get_serial_output, 'foo')
node = self.driver.ex_get_node('node-name', 'us-central1-a')
self.assertTrue(self.driver.ex_get_serial_output(node),
'This is some serial\r\noutput for you.')
def test_ex_list(self):
d = self.driver
# Test the default case for all list methods
# (except list_volume_snapshots, which requires an arg)
for list_fn in (d.ex_list_addresses,
d.ex_list_backendservices,
d.ex_list_disktypes,
d.ex_list_firewalls,
d.ex_list_forwarding_rules,
d.ex_list_healthchecks,
d.ex_list_networks,
d.ex_list_project_images,
d.ex_list_regions,
d.ex_list_routes,
d.ex_list_snapshots,
d.ex_list_targethttpproxies,
d.ex_list_targetinstances,
d.ex_list_targetpools,
d.ex_list_urlmaps,
d.ex_list_zones,
d.list_images,
d.list_locations,
d.list_nodes,
d.list_sizes,
d.list_volumes):
full_list = [item.name for item in list_fn()]
li = d.ex_list(list_fn)
iter_list = [item.name for sublist in li for item in sublist]
self.assertEqual(full_list, iter_list)
# Test paging & filtering with a single list function as they require
# additional test fixtures
list_fn = d.ex_list_regions
for count, sublist in zip((2, 1), d.ex_list(list_fn).page(2)):
self.assertTrue(len(sublist) == count)
for sublist in d.ex_list(list_fn).filter('name eq us-central1'):
self.assertTrue(len(sublist) == 1)
self.assertEqual(sublist[0].name, 'us-central1')
def test_ex_list_addresses(self):
address_list = self.driver.ex_list_addresses()
address_list_all = self.driver.ex_list_addresses('all')
address_list_uc1 = self.driver.ex_list_addresses('us-central1')
address_list_global = self.driver.ex_list_addresses('global')
self.assertEqual(len(address_list), 2)
self.assertEqual(len(address_list_all), 5)
self.assertEqual(len(address_list_global), 1)
self.assertEqual(address_list[0].name, 'libcloud-demo-address')
self.assertEqual(address_list_uc1[0].name, 'libcloud-demo-address')
self.assertEqual(address_list_global[0].name, 'lcaddressglobal')
names = [a.name for a in address_list_all]
self.assertTrue('libcloud-demo-address' in names)
def test_ex_list_backendservices(self):
self.backendservices_mock = 'empty'
backendservices_list = self.driver.ex_list_backendservices()
self.assertListEqual(backendservices_list, [])
self.backendservices_mock = 'web-service'
backendservices_list = self.driver.ex_list_backendservices()
web_service = backendservices_list[0]
self.assertEqual(web_service.name, 'web-service')
self.assertEqual(len(web_service.healthchecks), 1)
self.assertEqual(len(web_service.backends), 2)
def test_ex_list_healthchecks(self):
healthchecks = self.driver.ex_list_healthchecks()
self.assertEqual(len(healthchecks), 3)
self.assertEqual(healthchecks[0].name, 'basic-check')
def test_ex_list_firewalls(self):
firewalls = self.driver.ex_list_firewalls()
self.assertEqual(len(firewalls), 5)
self.assertEqual(firewalls[0].name, 'default-allow-internal')
def test_ex_list_forwarding_rules(self):
forwarding_rules = self.driver.ex_list_forwarding_rules()
forwarding_rules_all = self.driver.ex_list_forwarding_rules('all')
forwarding_rules_uc1 = self.driver.ex_list_forwarding_rules(
'us-central1')
self.assertEqual(len(forwarding_rules), 2)
self.assertEqual(len(forwarding_rules_all), 2)
self.assertEqual(forwarding_rules[0].name, 'lcforwardingrule')
self.assertEqual(forwarding_rules_uc1[0].name, 'lcforwardingrule')
names = [f.name for f in forwarding_rules_all]
self.assertTrue('lcforwardingrule' in names)
def test_ex_list_forwarding_rules_global(self):
forwarding_rules = self.driver.ex_list_forwarding_rules(global_rules=True)
self.assertEqual(len(forwarding_rules), 2)
self.assertEqual(forwarding_rules[0].name, 'http-rule')
names = [f.name for f in forwarding_rules]
self.assertListEqual(names, ['http-rule', 'http-rule2'])
def test_list_images(self):
local_images = self.driver.list_images()
all_deprecated_images = self.driver.list_images(ex_include_deprecated=True)
debian_images = self.driver.list_images(ex_project='debian-cloud')
local_plus_deb = self.driver.list_images(['debian-cloud', 'project_name'])
self.assertEqual(len(local_images), 23)
self.assertEqual(len(all_deprecated_images), 156)
self.assertEqual(len(debian_images), 2)
self.assertEqual(len(local_plus_deb), 3)
self.assertEqual(local_images[0].name, 'aws-ubuntu')
self.assertEqual(debian_images[1].name, 'debian-7-wheezy-v20131120')
def test_list_locations(self):
locations = self.driver.list_locations()
self.assertEqual(len(locations), 6)
self.assertEqual(locations[0].name, 'asia-east1-a')
def test_ex_list_routes(self):
routes = self.driver.ex_list_routes()
self.assertEqual(len(routes), 3)
self.assertTrue('lcdemoroute' in [route.name for route in routes])
def test_ex_list_networks(self):
networks = self.driver.ex_list_networks()
self.assertEqual(len(networks), 3)
self.assertEqual(networks[0].name, 'default')
def test_list_nodes(self):
nodes = self.driver.list_nodes()
nodes_all = self.driver.list_nodes(ex_zone='all')
nodes_uc1a = self.driver.list_nodes(ex_zone='us-central1-a')
self.assertEqual(len(nodes), 1)
self.assertEqual(len(nodes_all), 8)
self.assertEqual(len(nodes_uc1a), 1)
self.assertEqual(nodes[0].name, 'node-name')
self.assertEqual(nodes_uc1a[0].name, 'node-name')
names = [n.name for n in nodes_all]
self.assertTrue('node-name' in names)
def test_ex_list_regions(self):
regions = self.driver.ex_list_regions()
self.assertEqual(len(regions), 3)
self.assertEqual(regions[0].name, 'europe-west1')
def test_ex_list_snapshots(self):
snapshots = self.driver.ex_list_snapshots()
self.assertEqual(len(snapshots), 2)
self.assertEqual(snapshots[0].name, 'lcsnapshot')
def test_ex_list_targethttpproxies(self):
target_proxies = self.driver.ex_list_targethttpproxies()
self.assertEqual(len(target_proxies), 2)
self.assertEqual(target_proxies[0].name, 'web-proxy')
names = [t.name for t in target_proxies]
self.assertListEqual(names, ['web-proxy', 'web-proxy2'])
def test_ex_list_targetinstances(self):
target_instances = self.driver.ex_list_targetinstances()
target_instances_all = self.driver.ex_list_targetinstances('all')
target_instances_uc1 = self.driver.ex_list_targetinstances('us-central1-a')
self.assertEqual(len(target_instances), 2)
self.assertEqual(len(target_instances_all), 2)
self.assertEqual(len(target_instances_uc1), 2)
self.assertEqual(target_instances[0].name, 'hello')
self.assertEqual(target_instances_uc1[0].name, 'hello')
names = [t.name for t in target_instances_all]
self.assertTrue('lctargetinstance' in names)
def test_ex_list_targetpools(self):
target_pools = self.driver.ex_list_targetpools()
target_pools_all = self.driver.ex_list_targetpools('all')
target_pools_uc1 = self.driver.ex_list_targetpools('us-central1')
self.assertEqual(len(target_pools), 4)
self.assertEqual(len(target_pools_all), 5)
self.assertEqual(len(target_pools_uc1), 4)
self.assertEqual(target_pools[0].name, 'lctargetpool')
self.assertEqual(target_pools_uc1[0].name, 'lctargetpool')
names = [t.name for t in target_pools_all]
self.assertTrue('www-pool' in names)
def test_list_sizes(self):
sizes = self.driver.list_sizes()
sizes_all = self.driver.list_sizes('all')
self.assertEqual(len(sizes), 22)
self.assertEqual(len(sizes_all), 100)
self.assertEqual(sizes[0].name, 'f1-micro')
self.assertEqual(sizes[0].extra['zone'].name, 'us-central1-a')
names = [s.name for s in sizes_all]
self.assertEqual(names.count('n1-standard-1'), 5)
def test_ex_get_license(self):
license = self.driver.ex_get_license('suse-cloud', 'sles-12')
self.assertEqual(license.name, 'sles-12')
self.assertTrue(license.charges_use_fee)
def test_list_disktypes(self):
disktypes = self.driver.ex_list_disktypes()
disktypes_all = self.driver.ex_list_disktypes('all')
disktypes_uc1a = self.driver.ex_list_disktypes('us-central1-a')
self.assertEqual(len(disktypes), 2)
self.assertEqual(len(disktypes_all), 9)
self.assertEqual(len(disktypes_uc1a), 2)
self.assertEqual(disktypes[0].name, 'pd-ssd')
self.assertEqual(disktypes_uc1a[0].name, 'pd-ssd')
names = [v.name for v in disktypes_all]
self.assertTrue('pd-standard' in names)
self.assertTrue('local-ssd' in names)
def test_ex_list_urlmaps(self):
urlmaps_list = self.driver.ex_list_urlmaps()
web_map = urlmaps_list[0]
self.assertEqual(web_map.name, 'web-map')
self.assertEqual(len(web_map.host_rules), 0)
self.assertEqual(len(web_map.path_matchers), 0)
self.assertEqual(len(web_map.tests), 0)
def test_list_volumes(self):
volumes = self.driver.list_volumes()
volumes_all = self.driver.list_volumes('all')
volumes_uc1a = self.driver.list_volumes('us-central1-a')
self.assertEqual(len(volumes), 2)
self.assertEqual(len(volumes_all), 10)
self.assertEqual(len(volumes_uc1a), 2)
self.assertEqual(volumes[0].name, 'lcdisk')
self.assertEqual(volumes_uc1a[0].name, 'lcdisk')
names = [v.name for v in volumes_all]
self.assertTrue('libcloud-demo-europe-boot-disk' in names)
def test_ex_list_zones(self):
zones = self.driver.ex_list_zones()
self.assertEqual(len(zones), 6)
self.assertEqual(zones[0].name, 'asia-east1-a')
def test_ex_create_address_global(self):
address_name = 'lcaddressglobal'
address = self.driver.ex_create_address(address_name, 'global')
self.assertTrue(isinstance(address, GCEAddress))
self.assertEqual(address.name, address_name)
self.assertEqual(address.region, 'global')
def test_ex_create_address(self):
address_name = 'lcaddress'
address = self.driver.ex_create_address(address_name)
self.assertTrue(isinstance(address, GCEAddress))
self.assertEqual(address.name, address_name)
def test_ex_create_backendservice(self):
backendservice_name = 'web-service'
backendservice = self.driver.ex_create_backendservice(
name=backendservice_name,
healthchecks=['lchealthcheck'])
self.assertTrue(isinstance(backendservice, GCEBackendService))
self.assertEqual(backendservice.name, backendservice_name)
def test_ex_create_healthcheck(self):
healthcheck_name = 'lchealthcheck'
kwargs = {'host': 'lchost',
'path': '/lc',
'port': 8000,
'interval': 10,
'timeout': 10,
'unhealthy_threshold': 4,
'healthy_threshold': 3,
'description': 'test healthcheck'}
hc = self.driver.ex_create_healthcheck(healthcheck_name, **kwargs)
self.assertTrue(isinstance(hc, GCEHealthCheck))
self.assertEqual(hc.name, healthcheck_name)
self.assertEqual(hc.path, '/lc')
self.assertEqual(hc.port, 8000)
self.assertEqual(hc.interval, 10)
self.assertEqual(hc.extra['host'], 'lchost')
self.assertEqual(hc.extra['description'], 'test healthcheck')
def test_ex_create_image(self):
volume = self.driver.ex_get_volume('lcdisk')
image = self.driver.ex_create_image('coreos', volume)
self.assertTrue(isinstance(image, GCENodeImage))
self.assertTrue(image.name.startswith('coreos'))
self.assertEqual(image.extra['description'], 'CoreOS beta 522.3.0')
self.assertEqual(image.extra['family'], 'coreos')
def test_ex_create_firewall(self):
firewall_name = 'lcfirewall'
allowed = [{'IPProtocol': 'tcp', 'ports': ['4567']}]
source_tags = ['libcloud']
firewall = self.driver.ex_create_firewall(firewall_name, allowed,
source_tags=source_tags)
self.assertTrue(isinstance(firewall, GCEFirewall))
self.assertEqual(firewall.name, firewall_name)
def test_ex_create_forwarding_rule(self):
fwr_name = 'lcforwardingrule'
targetpool = 'lctargetpool'
region = 'us-central1'
address = 'lcaddress'
port_range = '8000-8500'
description = 'test forwarding rule'
fwr = self.driver.ex_create_forwarding_rule(fwr_name, targetpool,
region=region,
address=address,
port_range=port_range,
description=description)
self.assertTrue(isinstance(fwr, GCEForwardingRule))
self.assertEqual(fwr.name, fwr_name)
self.assertEqual(fwr.region.name, region)
self.assertEqual(fwr.protocol, 'TCP')
self.assertEqual(fwr.extra['portRange'], port_range)
self.assertEqual(fwr.extra['description'], description)
def test_ex_create_forwarding_rule_global(self):
fwr_name = 'http-rule'
target_name = 'web-proxy'
address = 'lcaddressglobal'
port_range = '80-80'
description = 'global forwarding rule'
for target in (target_name,
self.driver.ex_get_targethttpproxy(target_name)):
fwr = self.driver.ex_create_forwarding_rule(fwr_name, target,
global_rule=True,
address=address,
port_range=port_range,
description=description)
self.assertTrue(isinstance(fwr, GCEForwardingRule))
self.assertEqual(fwr.name, fwr_name)
self.assertEqual(fwr.extra['portRange'], port_range)
self.assertEqual(fwr.extra['description'], description)
def test_ex_create_forwarding_rule_targetpool_keyword(self):
"""Test backwards compatibility with the targetpool kwarg."""
fwr_name = 'lcforwardingrule'
targetpool = 'lctargetpool'
region = 'us-central1'
address = self.driver.ex_get_address('lcaddress')
port_range = '8000-8500'
description = 'test forwarding rule'
fwr = self.driver.ex_create_forwarding_rule(fwr_name,
targetpool=targetpool,
region=region,
address=address,
port_range=port_range,
description=description)
self.assertTrue(isinstance(fwr, GCEForwardingRule))
self.assertEqual(fwr.name, fwr_name)
self.assertEqual(fwr.region.name, region)
self.assertEqual(fwr.protocol, 'TCP')
self.assertEqual(fwr.extra['portRange'], port_range)
self.assertEqual(fwr.extra['description'], description)
def test_ex_create_route(self):
route_name = 'lcdemoroute'
dest_range = '192.168.25.0/24'
priority = 1000
route = self.driver.ex_create_route(route_name, dest_range)
self.assertTrue(isinstance(route, GCERoute))
self.assertEqual(route.name, route_name)
self.assertEqual(route.priority, priority)
self.assertTrue("tag1" in route.tags)
self.assertTrue(route.extra['nextHopInstance'].endswith('libcloud-100'))
self.assertEqual(route.dest_range, dest_range)
def test_ex_create_network(self):
network_name = 'lcnetwork'
cidr = '10.11.0.0/16'
network = self.driver.ex_create_network(network_name, cidr)
self.assertTrue(isinstance(network, GCENetwork))
self.assertEqual(network.name, network_name)
self.assertEqual(network.cidr, cidr)
def test_ex_set_machine_type_notstopped(self):
# get running node, change machine type
zone = 'us-central1-a'
node = self.driver.ex_get_node('node-name', zone)
self.assertRaises(GoogleBaseError, self.driver.ex_set_machine_type,
node, 'custom-4-61440')
def test_ex_set_machine_type_invalid(self):
# get stopped node, change machine type
zone = 'us-central1-a'
node = self.driver.ex_get_node('custom-node', zone)
self.assertRaises(InvalidRequestError, self.driver.ex_set_machine_type,
node, 'custom-1-61440')
def test_ex_set_machine_type(self):
# get stopped node, change machine type
zone = 'us-central1-a'
node = self.driver.ex_get_node('stopped-node', zone)
self.assertEqual(node.size, 'n1-standard-1')
self.assertEqual(node.extra['status'], 'TERMINATED')
self.assertTrue(self.driver.ex_set_machine_type(node, 'custom-4-11264'))
def test_ex_node_start(self):
zone = 'us-central1-a'
node = self.driver.ex_get_node('stopped-node', zone)
self.assertTrue(self.driver.ex_start_node(node))
def test_ex_node_stop(self):
zone = 'us-central1-a'
node = self.driver.ex_get_node('node-name', zone)
self.assertTrue(self.driver.ex_stop_node(node))
# try and stop a stopped node (should work)
zone = 'us-central1-a'
node = self.driver.ex_get_node('stopped-node', zone)
self.assertTrue(self.driver.ex_stop_node(node))
def test_create_node_req(self):
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1')
location = self.driver.zone
network = self.driver.ex_get_network('default')
tags = ['libcloud']
metadata = [{'key': 'test_key', 'value': 'test_value'}]
boot_disk = self.driver.ex_get_volume('lcdisk')
node_request, node_data = self.driver._create_node_req('lcnode', size,
image, location,
network, tags,
metadata,
boot_disk)
self.assertEqual(node_request, '/zones/%s/instances' % location.name)
self.assertEqual(node_data['metadata']['items'][0]['key'], 'test_key')
self.assertEqual(node_data['tags']['items'][0], 'libcloud')
self.assertEqual(node_data['name'], 'lcnode')
self.assertTrue(node_data['disks'][0]['boot'])
self.assertIsInstance(node_data['serviceAccounts'], list)
self.assertIsInstance(node_data['serviceAccounts'][0], dict)
self.assertEqual(node_data['serviceAccounts'][0]['email'], 'default')
self.assertIsInstance(node_data['serviceAccounts'][0]['scopes'], list)
self.assertEqual(len(node_data['serviceAccounts'][0]['scopes']), 1)
def test_create_node_network_opts(self):
node_name = 'node-name'
size = self.driver.ex_get_size('n1-standard-1')
image = self.driver.ex_get_image('debian-7')
zone = self.driver.ex_get_zone('us-central1-a')
network = self.driver.ex_get_network('lcnetwork')
address = self.driver.ex_get_address('lcaddress')
ex_nic_gce_struct = [
{
"network": "global/networks/lcnetwork",
"accessConfigs": [
{
"name": "lcnetwork-test",
"type": "ONE_TO_ONE_NAT"
}
]
}
]
# Test using default
node = self.driver.create_node(node_name, size, image)
self.assertEqual(node.extra['networkInterfaces'][0]["name"], 'nic0')
# Test using just the network
node = self.driver.create_node(node_name, size, image, location=zone,
ex_network=network)
self.assertEqual(node.extra['networkInterfaces'][0]["name"], 'nic0')
# Test using just the struct
node = self.driver.create_node(node_name, size, image, location=zone,
ex_nic_gce_struct=ex_nic_gce_struct)
self.assertEqual(node.extra['networkInterfaces'][0]["name"], 'nic0')
# Test both address and struct, should fail
self.assertRaises(ValueError, self.driver.create_node, node_name,
size, image, location=zone, external_ip=address,
ex_nic_gce_struct=ex_nic_gce_struct)
# Test both ex_network and struct, should fail
self.assertRaises(ValueError, self.driver.create_node, node_name,
size, image, location=zone, ex_network=network,
ex_nic_gce_struct=ex_nic_gce_struct)
def test_create_node_disk_opts(self):
node_name = 'node-name'
size = self.driver.ex_get_size('n1-standard-1')
image = self.driver.ex_get_image('debian-7')
boot_disk = self.driver.ex_get_volume('lcdisk')
disk_type = self.driver.ex_get_disktype('pd-ssd', 'us-central1-a')
DEMO_BASE_NAME = "lc-test"
gce_disk_struct = [
{
"type": "PERSISTENT",
"deviceName": '%s-gstruct' % DEMO_BASE_NAME,
"initializeParams": {
"diskName": '%s-gstruct' % DEMO_BASE_NAME,
"sourceImage": image.extra['selfLink']
},
"boot": True,
"autoDelete": True
},
{
"type": "SCRATCH",
"deviceName": '%s-gstruct-lssd' % DEMO_BASE_NAME,
"initializeParams": {
"diskType": disk_type.extra['selfLink']
},
"autoDelete": True
}
]
self.assertRaises(ValueError, self.driver.create_node, node_name,
size, None)
node = self.driver.create_node(node_name, size, image)
self.assertTrue(isinstance(node, Node))
node = self.driver.create_node(node_name, size, None,
ex_boot_disk=boot_disk)
self.assertTrue(isinstance(node, Node))
node = self.driver.create_node(node_name, size, None,
ex_disks_gce_struct=gce_disk_struct)
self.assertTrue(isinstance(node, Node))
self.assertRaises(ValueError, self.driver.create_node, node_name,
size, None, ex_boot_disk=boot_disk,
ex_disks_gce_struct=gce_disk_struct)
def test_create_node(self):
node_name = 'node-name'
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1')
node = self.driver.create_node(node_name, size, image)
self.assertTrue(isinstance(node, Node))
self.assertEqual(node.name, node_name)
def test_create_node_req_with_serviceaccounts(self):
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1')
location = self.driver.zone
network = self.driver.ex_get_network('default')
# ex_service_accounts with specific scopes, default 'email'
ex_sa = [{'scopes': ['compute-ro', 'pubsub', 'storage-ro']}]
node_request, node_data = self.driver._create_node_req('lcnode', size,
image, location,
network,
ex_service_accounts=ex_sa)
self.assertIsInstance(node_data['serviceAccounts'], list)
self.assertIsInstance(node_data['serviceAccounts'][0], dict)
self.assertEqual(node_data['serviceAccounts'][0]['email'], 'default')
self.assertIsInstance(node_data['serviceAccounts'][0]['scopes'], list)
self.assertEqual(len(node_data['serviceAccounts'][0]['scopes']), 3)
self.assertTrue('https://www.googleapis.com/auth/devstorage.read_only'
in node_data['serviceAccounts'][0]['scopes'])
self.assertTrue('https://www.googleapis.com/auth/compute.readonly'
in node_data['serviceAccounts'][0]['scopes'])
def test_format_metadata(self):
in_md = [{'key': 'k0', 'value': 'v0'}, {'key': 'k1', 'value': 'v1'}]
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 2)
self.assertTrue(out_md['items'][0]['key'] in ['k0', 'k1'])
self.assertTrue(out_md['items'][0]['value'] in ['v0', 'v1'])
in_md = [{'k0': 'v0'}, {'k1': 'v1'}]
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 2)
self.assertTrue(out_md['items'][0]['key'] in ['k0', 'k1'])
self.assertTrue(out_md['items'][0]['value'] in ['v0', 'v1'])
in_md = {'key': 'k0', 'value': 'v0'}
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 1, out_md)
self.assertEqual(out_md['items'][0]['key'], 'k0')
self.assertEqual(out_md['items'][0]['value'], 'v0')
in_md = {'k0': 'v0'}
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 1)
self.assertEqual(out_md['items'][0]['key'], 'k0')
self.assertEqual(out_md['items'][0]['value'], 'v0')
in_md = {'k0': 'v0', 'k1': 'v1', 'k2': 'v2'}
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 3)
keys = [x['key'] for x in out_md['items']]
vals = [x['value'] for x in out_md['items']]
keys.sort()
vals.sort()
self.assertEqual(keys, ['k0', 'k1', 'k2'])
self.assertEqual(vals, ['v0', 'v1', 'v2'])
in_md = {'items': [{'key': 'k0', 'value': 'v0'},
{'key': 'k1', 'value': 'v1'}]}
out_md = self.driver._format_metadata('fp', in_md)
self.assertTrue('fingerprint' in out_md)
self.assertEqual(out_md['fingerprint'], 'fp')
self.assertTrue('items' in out_md)
self.assertEqual(len(out_md['items']), 2)
self.assertTrue(out_md['items'][0]['key'] in ['k0', 'k1'])
self.assertTrue(out_md['items'][0]['value'] in ['v0', 'v1'])
in_md = {'items': 'foo'}
self.assertRaises(ValueError, self.driver._format_metadata, 'fp', in_md)
in_md = {'items': {'key': 'k1', 'value': 'v0'}}
self.assertRaises(ValueError, self.driver._format_metadata, 'fp', in_md)
in_md = ['k0', 'v1']
self.assertRaises(ValueError, self.driver._format_metadata, 'fp', in_md)
def test_create_node_with_metadata(self):
node_name = 'node-name'
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1')
zone = self.driver.ex_get_zone('us-central1-a')
# md is a list of dicts, each with 'key' and 'value' for
# backwards compatibility
md = [{'key': 'k0', 'value': 'v0'}, {'key': 'k1', 'value': 'v1'}]
request, data = self.driver._create_node_req(node_name, size, image,
zone, metadata=md)
self.assertTrue('items' in data['metadata'])
self.assertEqual(len(data['metadata']['items']), 2)
# md doesn't contain "items" key
md = {'key': 'key1', 'value': 'value1'}
request, data = self.driver._create_node_req(node_name, size, image,
zone, metadata=md)
self.assertTrue('items' in data['metadata'])
self.assertEqual(len(data['metadata']['items']), 1)
# md contains "items" key
md = {'items': [{'key': 'k0', 'value': 'v0'}]}
request, data = self.driver._create_node_req(node_name, size, image,
zone, metadata=md)
self.assertTrue('items' in data['metadata'])
self.assertEqual(len(data['metadata']['items']), 1)
self.assertEqual(data['metadata']['items'][0]['key'], 'k0')
self.assertEqual(data['metadata']['items'][0]['value'], 'v0')
def test_create_node_existing(self):
node_name = 'libcloud-demo-europe-np-node'
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1', zone='europe-west1-a')
self.assertRaises(ResourceExistsError, self.driver.create_node,
node_name, size, image, location='europe-west1-a')
def test_ex_create_multiple_nodes(self):
base_name = 'lcnode'
image = self.driver.ex_get_image('debian-7')
size = self.driver.ex_get_size('n1-standard-1')
number = 2
nodes = self.driver.ex_create_multiple_nodes(base_name, size, image,
number)
self.assertEqual(len(nodes), 2)
self.assertTrue(isinstance(nodes[0], Node))
self.assertTrue(isinstance(nodes[1], Node))
self.assertEqual(nodes[0].name, '%s-000' % base_name)
self.assertEqual(nodes[1].name, '%s-001' % base_name)
def test_ex_create_targethttpproxy(self):
proxy_name = 'web-proxy'
urlmap_name = 'web-map'
for urlmap in (urlmap_name, self.driver.ex_get_urlmap(urlmap_name)):
proxy = self.driver.ex_create_targethttpproxy(proxy_name, urlmap)
self.assertTrue(isinstance(proxy, GCETargetHttpProxy))
self.assertEqual(proxy_name, proxy.name)
def test_ex_create_targetinstance(self):
targetinstance_name = 'lctargetinstance'
zone = 'us-central1-a'
node = self.driver.ex_get_node('node-name', zone)
targetinstance = self.driver.ex_create_targetinstance(
targetinstance_name, zone=zone, node=node)
self.assertEqual(targetinstance.name, targetinstance_name)
self.assertEqual(targetinstance.zone.name, zone)
def test_ex_create_targetpool(self):
targetpool_name = 'lctargetpool'
region = 'us-central1'
healthchecks = ['libcloud-lb-demo-healthcheck']
node1 = self.driver.ex_get_node('libcloud-lb-demo-www-000',
'us-central1-b')
node2 = self.driver.ex_get_node('libcloud-lb-demo-www-001',
'us-central1-b')
nodes = [node1, node2]
targetpool = self.driver.ex_create_targetpool(
targetpool_name, region=region, healthchecks=healthchecks,
nodes=nodes)
self.assertEqual(targetpool.name, targetpool_name)
self.assertEqual(len(targetpool.nodes), len(nodes))
self.assertEqual(targetpool.region.name, region)
def test_ex_create_targetpool_session_affinity(self):
targetpool_name = 'lctargetpool-sticky'
region = 'us-central1'
session_affinity = 'CLIENT_IP_PROTO'
targetpool = self.driver.ex_create_targetpool(
targetpool_name, region=region,
session_affinity=session_affinity)
self.assertEqual(targetpool.name, targetpool_name)
self.assertEqual(targetpool.extra.get('sessionAffinity'),
session_affinity)
def test_ex_create_urlmap(self):
urlmap_name = 'web-map'
for service in ('web-service',
self.driver.ex_get_backendservice('web-service')):
urlmap = self.driver.ex_create_urlmap(urlmap_name, service)
self.assertTrue(isinstance(urlmap, GCEUrlMap))
self.assertEqual(urlmap_name, urlmap.name)
def test_ex_create_volume_snapshot(self):
snapshot_name = 'lcsnapshot'
volume = self.driver.ex_get_volume('lcdisk')
snapshot = volume.snapshot(snapshot_name)
self.assertEqual(snapshot.name, snapshot_name)
self.assertEqual(snapshot.size, '10')
def test_create_volume_ssd(self):
volume_name = 'lcdisk'
size = 10
volume = self.driver.create_volume(size, volume_name,
ex_disk_type='pd-ssd')
self.assertTrue(isinstance(volume, StorageVolume))
self.assertEqual(volume.extra['type'], 'pd-ssd')
def test_create_volume(self):
volume_name = 'lcdisk'
size = 10
volume = self.driver.create_volume(size, volume_name)
self.assertTrue(isinstance(volume, StorageVolume))
self.assertEqual(volume.name, volume_name)
def test_ex_update_healthcheck(self):
healthcheck_name = 'lchealthcheck'
healthcheck = self.driver.ex_get_healthcheck(healthcheck_name)
healthcheck.port = 9000
healthcheck2 = self.driver.ex_update_healthcheck(healthcheck)
self.assertTrue(isinstance(healthcheck2, GCEHealthCheck))
def test_ex_update_firewall(self):
firewall_name = 'lcfirewall'
firewall = self.driver.ex_get_firewall(firewall_name)
firewall.source_ranges = ['10.0.0.0/16']
firewall.source_tags = ['libcloud', 'test']
firewall2 = self.driver.ex_update_firewall(firewall)
self.assertTrue(isinstance(firewall2, GCEFirewall))
def test_ex_targetpool_gethealth(self):
targetpool = self.driver.ex_get_targetpool('lb-pool')
health = targetpool.get_health('libcloud-lb-demo-www-000')
self.assertEqual(len(health), 1)
self.assertTrue('node' in health[0])
self.assertTrue('health' in health[0])
self.assertEqual(health[0]['health'], 'UNHEALTHY')
def test_ex_targetpool_with_backup_pool(self):
targetpool = self.driver.ex_get_targetpool('lb-pool')
self.assertTrue('backupPool' in targetpool.extra)
self.assertTrue('failoverRatio' in targetpool.extra)
def test_ex_targetpool_setbackup(self):
targetpool = self.driver.ex_get_targetpool('lb-pool')
backup_targetpool = self.driver.ex_get_targetpool('backup-pool')
self.assertTrue(targetpool.set_backup_targetpool(backup_targetpool,
0.1))
def test_ex_targetpool_remove_add_node(self):
targetpool = self.driver.ex_get_targetpool('lctargetpool')
node = self.driver.ex_get_node('libcloud-lb-demo-www-001',
'us-central1-b')
remove_node = self.driver.ex_targetpool_remove_node(targetpool, node)
self.assertTrue(remove_node)
self.assertEqual(len(targetpool.nodes), 1)
add_node = self.driver.ex_targetpool_add_node(targetpool, node.extra['selfLink'])
self.assertTrue(add_node)
self.assertEqual(len(targetpool.nodes), 2)
remove_node = self.driver.ex_targetpool_remove_node(targetpool, node.extra['selfLink'])
self.assertTrue(remove_node)
self.assertEqual(len(targetpool.nodes), 1)
add_node = self.driver.ex_targetpool_add_node(targetpool, node)
self.assertTrue(add_node)
self.assertEqual(len(targetpool.nodes), 2)
# check that duplicates are filtered
add_node = self.driver.ex_targetpool_add_node(targetpool, node.extra['selfLink'])
self.assertTrue(add_node)
self.assertEqual(len(targetpool.nodes), 2)
def test_ex_targetpool_remove_add_healthcheck(self):
targetpool = self.driver.ex_get_targetpool('lctargetpool')
healthcheck = self.driver.ex_get_healthcheck(
'libcloud-lb-demo-healthcheck')
remove_healthcheck = self.driver.ex_targetpool_remove_healthcheck(
targetpool, healthcheck)
self.assertTrue(remove_healthcheck)
self.assertEqual(len(targetpool.healthchecks), 0)
add_healthcheck = self.driver.ex_targetpool_add_healthcheck(
targetpool, healthcheck)
self.assertTrue(add_healthcheck)
self.assertEqual(len(targetpool.healthchecks), 1)
def test_reboot_node(self):
node = self.driver.ex_get_node('node-name')
reboot = self.driver.reboot_node(node)
self.assertTrue(reboot)
def test_ex_set_node_tags(self):
new_tags = ['libcloud']
node = self.driver.ex_get_node('node-name')
set_tags = self.driver.ex_set_node_tags(node, new_tags)
self.assertTrue(set_tags)
def test_attach_volume_invalid_usecase(self):
node = self.driver.ex_get_node('node-name')
self.assertRaises(ValueError, self.driver.attach_volume, node, None)
self.assertRaises(ValueError, self.driver.attach_volume, node, None,
ex_source='foo/bar', device=None)
def test_attach_volume(self):
volume = self.driver.ex_get_volume('lcdisk')
node = self.driver.ex_get_node('node-name')
attach = volume.attach(node)
self.assertTrue(attach)
def test_detach_volume(self):
volume = self.driver.ex_get_volume('lcdisk')
node = self.driver.ex_get_node('node-name')
# This fails since the node is required
detach = volume.detach()
self.assertFalse(detach)
# This should pass
detach = self.driver.detach_volume(volume, node)
self.assertTrue(detach)
def test_ex_destroy_address_global(self):
address = self.driver.ex_get_address('lcaddressglobal', 'global')
self.assertEqual(address.name, 'lcaddressglobal')
self.assertEqual(address.region, 'global')
destroyed = address.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_address(self):
address = self.driver.ex_get_address('lcaddress')
destroyed = address.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_backendservice(self):
backendservice = self.driver.ex_get_backendservice('web-service')
destroyed = backendservice.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_healthcheck(self):
hc = self.driver.ex_get_healthcheck('lchealthcheck')
destroyed = hc.destroy()
self.assertTrue(destroyed)
def test_ex_delete_image(self):
self.assertRaises(ResourceNotFoundError,
self.driver.ex_get_image, 'missing-image')
self.assertRaises(ResourceNotFoundError,
self.driver.ex_delete_image, 'missing-image')
image = self.driver.ex_get_image('debian-7')
deleted = self.driver.ex_delete_image(image)
self.assertTrue(deleted)
def test_ex_deprecate_image(self):
dep_ts = '2064-03-11T20:18:36.194-07:00'
obs_ts = '2074-03-11T20:18:36.194-07:00'
del_ts = '2084-03-11T20:18:36.194-07:00'
image = self.driver.ex_get_image('debian-7-wheezy-v20131014')
deprecated = image.deprecate('debian-7', 'DEPRECATED',
deprecated=dep_ts,
obsolete=obs_ts,
deleted=del_ts)
self.assertTrue(deprecated)
self.assertEqual(image.extra['deprecated']['deprecated'], dep_ts)
self.assertEqual(image.extra['deprecated']['obsolete'], obs_ts)
self.assertEqual(image.extra['deprecated']['deleted'], del_ts)
def test_ex_destroy_firewall(self):
firewall = self.driver.ex_get_firewall('lcfirewall')
destroyed = firewall.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_forwarding_rule(self):
fwr = self.driver.ex_get_forwarding_rule('lcforwardingrule')
destroyed = fwr.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_forwarding_rule_global(self):
fwr = self.driver.ex_get_forwarding_rule('http-rule', global_rule=True)
destroyed = fwr.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_route(self):
route = self.driver.ex_get_route('lcdemoroute')
destroyed = route.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_network(self):
network = self.driver.ex_get_network('lcnetwork')
destroyed = network.destroy()
self.assertTrue(destroyed)
def test_destroy_node(self):
node = self.driver.ex_get_node('node-name')
destroyed = node.destroy()
self.assertTrue(destroyed)
def test_ex_destroy_multiple_nodes(self):
nodes = []
nodes.append(self.driver.ex_get_node('lcnode-000'))
nodes.append(self.driver.ex_get_node('lcnode-001'))
destroyed = self.driver.ex_destroy_multiple_nodes(nodes)
for d in destroyed:
self.assertTrue(d)
def test_destroy_targethttpproxy(self):
proxy = self.driver.ex_get_targethttpproxy('web-proxy')
destroyed = proxy.destroy()
self.assertTrue(destroyed)
def test_destroy_targetinstance(self):
targetinstance = self.driver.ex_get_targetinstance('lctargetinstance')
self.assertEqual(targetinstance.name, 'lctargetinstance')
destroyed = targetinstance.destroy()
self.assertTrue(destroyed)
def test_destroy_targetpool(self):
targetpool = self.driver.ex_get_targetpool('lctargetpool')
destroyed = targetpool.destroy()
self.assertTrue(destroyed)
def test_destroy_urlmap(self):
urlmap = self.driver.ex_get_urlmap('web-map')
destroyed = urlmap.destroy()
self.assertTrue(destroyed)
def test_destroy_volume(self):
disk = self.driver.ex_get_volume('lcdisk')
destroyed = disk.destroy()
self.assertTrue(destroyed)
def test_ex_set_volume_auto_delete(self):
node = self.driver.ex_get_node('node-name')
volume = node.extra['boot_disk']
auto_delete = self.driver.ex_set_volume_auto_delete(
volume, node)
self.assertTrue(auto_delete)
def test_destroy_volume_snapshot(self):
snapshot = self.driver.ex_get_snapshot('lcsnapshot')
destroyed = snapshot.destroy()
self.assertTrue(destroyed)
def test_ex_get_address_global(self):
address_name = 'lcaddressglobal'
address = self.driver.ex_get_address(address_name, 'global')
self.assertEqual(address.name, address_name)
self.assertEqual(address.address, '173.99.99.99')
self.assertEqual(address.region, 'global')
self.assertEqual(address.extra['status'], 'RESERVED')
def test_ex_get_address(self):
address_name = 'lcaddress'
address = self.driver.ex_get_address(address_name)
self.assertEqual(address.name, address_name)
self.assertEqual(address.address, '173.255.113.20')
self.assertEqual(address.region.name, 'us-central1')
self.assertEqual(address.extra['status'], 'RESERVED')
def test_ex_get_backendservice(self):
web_service = self.driver.ex_get_backendservice('web-service')
self.assertEqual(web_service.name, 'web-service')
self.assertEqual(web_service.protocol, 'HTTP')
self.assertEqual(web_service.port, 80)
self.assertEqual(web_service.timeout, 30)
self.assertEqual(web_service.healthchecks[0].name, 'basic-check')
self.assertEqual(len(web_service.healthchecks), 1)
backends = web_service.backends
self.assertEqual(len(backends), 2)
self.assertEqual(backends[0]['balancingMode'], 'RATE')
self.assertEqual(backends[0]['maxRate'], 100)
self.assertEqual(backends[0]['capacityScaler'], 1.0)
web_service = self.driver.ex_get_backendservice('no-backends')
self.assertEqual(web_service.name, 'web-service')
self.assertEqual(web_service.healthchecks[0].name, 'basic-check')
self.assertEqual(len(web_service.healthchecks), 1)
self.assertEqual(len(web_service.backends), 0)
def test_ex_get_healthcheck(self):
healthcheck_name = 'lchealthcheck'
healthcheck = self.driver.ex_get_healthcheck(healthcheck_name)
self.assertEqual(healthcheck.name, healthcheck_name)
self.assertEqual(healthcheck.port, 8000)
self.assertEqual(healthcheck.path, '/lc')
def test_ex_get_firewall(self):
firewall_name = 'lcfirewall'
firewall = self.driver.ex_get_firewall(firewall_name)
self.assertEqual(firewall.name, firewall_name)
self.assertEqual(firewall.network.name, 'default')
self.assertEqual(firewall.source_tags, ['libcloud'])
def test_ex_get_forwarding_rule(self):
fwr_name = 'lcforwardingrule'
fwr = self.driver.ex_get_forwarding_rule(fwr_name)
self.assertEqual(fwr.name, fwr_name)
self.assertEqual(fwr.extra['portRange'], '8000-8500')
self.assertEqual(fwr.targetpool.name, 'lctargetpool')
self.assertEqual(fwr.protocol, 'TCP')
def test_ex_get_forwarding_rule_global(self):
fwr_name = 'http-rule'
fwr = self.driver.ex_get_forwarding_rule(fwr_name, global_rule=True)
self.assertEqual(fwr.name, fwr_name)
self.assertEqual(fwr.extra['portRange'], '80-80')
self.assertEqual(fwr.targetpool.name, 'web-proxy')
self.assertEqual(fwr.protocol, 'TCP')
self.assertEqual(fwr.address, '192.0.2.1')
self.assertEqual(fwr.targetpool.name, 'web-proxy')
def test_ex_get_image_license(self):
image = self.driver.ex_get_image('sles-12-v20141023')
self.assertTrue('licenses' in image.extra)
self.assertEqual(image.extra['licenses'][0].name, 'sles-12')
self.assertTrue(image.extra['licenses'][0].charges_use_fee)
def test_ex_get_image(self):
partial_name = 'debian-7'
image = self.driver.ex_get_image(partial_name)
self.assertEqual(image.name, 'debian-7-wheezy-v20131120')
# A 'debian-7' image exists in the local project
self.assertTrue(image.extra['description'].startswith('Debian'))
partial_name = 'debian-6'
image = self.driver.ex_get_image(partial_name)
self.assertEqual(image.name, 'debian-6-squeeze-v20130926')
self.assertTrue(image.extra['description'].startswith('Debian'))
partial_name = 'debian-7'
image = self.driver.ex_get_image(partial_name, ['debian-cloud'])
self.assertEqual(image.name, 'debian-7-wheezy-v20131120')
partial_name = 'debian-7'
self.assertRaises(ResourceNotFoundError, self.driver.ex_get_image,
partial_name, 'suse-cloud',
ex_standard_projects=False)
def test_ex_copy_image(self):
name = 'coreos'
url = 'gs://storage.core-os.net/coreos/amd64-generic/247.0.0/coreos_production_gce.tar.gz'
description = 'CoreOS beta 522.3.0'
family = 'coreos'
image = self.driver.ex_copy_image(name, url, description=description,
family=family)
self.assertTrue(image.name.startswith(name))
self.assertEqual(image.extra['description'], description)
self.assertEqual(image.extra['family'], family)
def test_ex_get_route(self):
route_name = 'lcdemoroute'
route = self.driver.ex_get_route(route_name)
self.assertEqual(route.name, route_name)
self.assertEqual(route.dest_range, '192.168.25.0/24')
self.assertEqual(route.priority, 1000)
def test_ex_get_network(self):
network_name = 'lcnetwork'
network = self.driver.ex_get_network(network_name)
self.assertEqual(network.name, network_name)
self.assertEqual(network.cidr, '10.11.0.0/16')
self.assertEqual(network.extra['gatewayIPv4'], '10.11.0.1')
self.assertEqual(network.extra['description'], 'A custom network')
def test_ex_get_node(self):
node_name = 'node-name'
zone = 'us-central1-a'
node = self.driver.ex_get_node(node_name, zone)
self.assertEqual(node.name, node_name)
self.assertEqual(node.size, 'n1-standard-1')
removed_node = 'libcloud-lb-demo-www-002'
self.assertRaises(ResourceNotFoundError, self.driver.ex_get_node,
removed_node, 'us-central1-b')
missing_node = 'dummy-node'
self.assertRaises(ResourceNotFoundError, self.driver.ex_get_node,
missing_node, 'all')
def test_ex_get_project(self):
project = self.driver.ex_get_project()
self.assertEqual(project.name, 'project_name')
networks_quota = project.quotas[1]
self.assertEqual(networks_quota['usage'], 3)
self.assertEqual(networks_quota['limit'], 5)
self.assertEqual(networks_quota['metric'], 'NETWORKS')
self.assertTrue('fingerprint' in project.extra['commonInstanceMetadata'])
self.assertTrue('items' in project.extra['commonInstanceMetadata'])
self.assertTrue('usageExportLocation' in project.extra)
self.assertTrue('bucketName' in project.extra['usageExportLocation'])
self.assertEqual(project.extra['usageExportLocation']['bucketName'], 'gs://graphite-usage-reports')
def test_ex_add_access_config(self):
self.assertRaises(ValueError, self.driver.ex_add_access_config,
'node', 'name', 'nic')
node = self.driver.ex_get_node('node-name', 'us-central1-a')
self.assertTrue(self.driver.ex_add_access_config(node, 'foo', 'bar'))
def test_ex_delete_access_config(self):
self.assertRaises(ValueError, self.driver.ex_add_access_config,
'node', 'name', 'nic')
node = self.driver.ex_get_node('node-name', 'us-central1-a')
self.assertTrue(self.driver.ex_delete_access_config(node, 'foo', 'bar'))
def test_ex_set_usage_export_bucket(self):
self.assertRaises(ValueError,
self.driver.ex_set_usage_export_bucket, 'foo')
bucket_name = 'gs://foo'
self.driver.ex_set_usage_export_bucket(bucket_name)
bucket_name = 'https://www.googleapis.com/foo'
self.driver.ex_set_usage_export_bucket(bucket_name)
def test__set_project_metadata(self):
self.assertEqual(len(self.driver._set_project_metadata(None, False, "")), 0)
# 'delete' metadata, but retain current sshKeys
md = self.driver._set_project_metadata(None, False, "this is a test")
self.assertEqual(len(md), 1)
self.assertEqual(md[0]['key'], 'sshKeys')
self.assertEqual(md[0]['value'], 'this is a test')
# 'delete' metadata *and* any existing sshKeys
md = self.driver._set_project_metadata(None, True, "this is a test")
self.assertEqual(len(md), 0)
# add new metadata, keep existing sshKeys, since the new value also
# has 'sshKeys', we want the final struct to only have one ke/value
# of sshKeys and it should be the "current_keys"
gce_md = {'items': [{'key': 'foo', 'value': 'one'},
{'key': 'sshKeys', 'value': 'another test'}]}
md = self.driver._set_project_metadata(gce_md, False, "this is a test")
self.assertEqual(len(md), 2, str(md))
sshKeys = ""
count = 0
for d in md:
if d['key'] == 'sshKeys':
count += 1
sshKeys = d['value']
self.assertEqual(sshKeys, 'this is a test')
self.assertEqual(count, 1)
# add new metadata, overwrite existing sshKeys, in this case, the
# existing 'sshKeys' value should be replaced
gce_md = {'items': [{'key': 'foo', 'value': 'one'},
{'key': 'sshKeys', 'value': 'another test'}]}
md = self.driver._set_project_metadata(gce_md, True, "this is a test")
self.assertEqual(len(md), 2, str(md))
sshKeys = ""
count = 0
for d in md:
if d['key'] == 'sshKeys':
count += 1
sshKeys = d['value']
self.assertEqual(sshKeys, 'another test')
self.assertEqual(count, 1)
# add new metadata, remove existing sshKeys. in this case, we had an
# 'sshKeys' entry, but it will be removed entirely
gce_md = {'items': [{'key': 'foo', 'value': 'one'},
{'key': 'nokeys', 'value': 'two'}]}
md = self.driver._set_project_metadata(gce_md, True, "this is a test")
self.assertEqual(len(md), 2, str(md))
sshKeys = ""
count = 0
for d in md:
if d['key'] == 'sshKeys':
count += 1
sshKeys = d['value']
self.assertEqual(sshKeys, '')
self.assertEqual(count, 0)
def test_ex_set_common_instance_metadata(self):
# test non-dict
self.assertRaises(ValueError,
self.driver.ex_set_common_instance_metadata,
['bad', 'type'])
# test standard python dict
pydict = {'key': 'pydict', 'value': 1}
self.driver.ex_set_common_instance_metadata(pydict)
# test GCE badly formatted dict
bad_gcedict = {'items': 'foo'}
self.assertRaises(ValueError,
self.driver.ex_set_common_instance_metadata,
bad_gcedict)
# test gce formatted dict
gcedict = {'items': [{'key': 'gcedict1', 'value': 'v1'},
{'key': 'gcedict2', 'value': 'v2'}]}
self.driver.ex_set_common_instance_metadata(gcedict)
def test_ex_set_node_metadata(self):
node = self.driver.ex_get_node('node-name', 'us-central1-a')
# test non-dict
self.assertRaises(ValueError, self.driver.ex_set_node_metadata,
node, ['bad', 'type'])
# test standard python dict
pydict = {'key': 'pydict', 'value': 1}
self.driver.ex_set_node_metadata(node, pydict)
# test GCE badly formatted dict
bad_gcedict = {'items': 'foo'}
self.assertRaises(ValueError, self.driver.ex_set_node_metadata,
node, bad_gcedict)
# test gce formatted dict
gcedict = {'items': [{'key': 'gcedict1', 'value': 'v1'},
{'key': 'gcedict2', 'value': 'v2'}]}
self.driver.ex_set_node_metadata(node, gcedict)
def test_ex_get_region(self):
region_name = 'us-central1'
region = self.driver.ex_get_region(region_name)
self.assertEqual(region.name, region_name)
self.assertEqual(region.status, 'UP')
self.assertEqual(region.zones[0].name, 'us-central1-a')
def test_ex_get_size(self):
size_name = 'n1-standard-1'
size = self.driver.ex_get_size(size_name)
self.assertEqual(size.name, size_name)
self.assertEqual(size.extra['zone'].name, 'us-central1-a')
self.assertEqual(size.disk, 10)
self.assertEqual(size.ram, 3840)
self.assertEqual(size.extra['guestCpus'], 1)
def test_ex_get_targethttpproxy(self):
targethttpproxy_name = 'web-proxy'
targethttpproxy = self.driver.ex_get_targethttpproxy(
targethttpproxy_name)
self.assertEqual(targethttpproxy.name, targethttpproxy_name)
self.assertEqual(targethttpproxy.urlmap.name, 'web-map')
def test_ex_get_targetinstance(self):
targetinstance_name = 'lctargetinstance'
targetinstance = self.driver.ex_get_targetinstance(targetinstance_name)
self.assertEqual(targetinstance.name, targetinstance_name)
self.assertEqual(targetinstance.zone.name, 'us-central1-a')
def test_ex_get_targetpool(self):
targetpool_name = 'lctargetpool'
targetpool = self.driver.ex_get_targetpool(targetpool_name)
self.assertEqual(targetpool.name, targetpool_name)
self.assertEqual(len(targetpool.nodes), 2)
self.assertEqual(targetpool.region.name, 'us-central1')
def test_ex_get_snapshot(self):
snapshot_name = 'lcsnapshot'
snapshot = self.driver.ex_get_snapshot(snapshot_name)
self.assertEqual(snapshot.name, snapshot_name)
self.assertEqual(snapshot.size, '10')
self.assertEqual(snapshot.status, 'READY')
def test_ex_get_urlmap(self):
urlmap_name = 'web-map'
urlmap = self.driver.ex_get_urlmap(urlmap_name)
self.assertEqual(urlmap.name, urlmap_name)
self.assertEqual(urlmap.default_service.name, 'web-service')
def test_ex_get_volume(self):
volume_name = 'lcdisk'
volume = self.driver.ex_get_volume(volume_name)
self.assertEqual(volume.name, volume_name)
self.assertEqual(volume.size, '10')
self.assertEqual(volume.extra['status'], 'READY')
self.assertEqual(volume.extra['type'], 'pd-ssd')
def test_ex_get_disktype(self):
disktype_name = 'pd-ssd'
disktype_zone = 'us-central1-a'
disktype = self.driver.ex_get_disktype(disktype_name, disktype_zone)
self.assertEqual(disktype.name, disktype_name)
self.assertEqual(disktype.zone.name, disktype_zone)
self.assertEqual(disktype.extra['description'], 'SSD Persistent Disk')
self.assertEqual(disktype.extra['valid_disk_size'], '10GB-10240GB')
self.assertEqual(disktype.extra['default_disk_size_gb'], '100')
def test_ex_get_zone(self):
zone_name = 'us-central1-b'
zone = self.driver.ex_get_zone(zone_name)
self.assertEqual(zone.name, zone_name)
self.assertFalse(zone.time_until_mw)
self.assertFalse(zone.next_mw_duration)
zone_no_mw = self.driver.ex_get_zone('us-central1-a')
self.assertEqual(zone_no_mw.time_until_mw, None)
class GCEMockHttp(MockHttpTestCase):
fixtures = ComputeFileFixtures('gce')
json_hdr = {'content-type': 'application/json; charset=UTF-8'}
def _get_method_name(self, type, use_param, qs, path):
api_path = '/compute/%s' % API_VERSION
project_path = '/projects/%s' % GCE_KEYWORD_PARAMS['project']
path = path.replace(api_path, '')
# This replace is separate, since there is a call with a different
# project name
path = path.replace(project_path, '')
# The path to get project information is the base path, so use a fake
# '/project' path instead
if not path:
path = '/project'
method_name = super(GCEMockHttp, self)._get_method_name(type,
use_param,
qs, path)
return method_name
def _setUsageExportBucket(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('setUsageExportBucket_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_custom_node(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_instances_custom_node.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_setMachineType(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_instances_node_name_setMachineType.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_setMachineType_notstopped(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_operations_operation_setMachineType_notstopped.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_custom_node_setMachineType(self, method, url, body, header):
body = {
"error": {
"errors": [
{
"domain": "global",
"reason": "invalid",
"message": "Invalid value for field 'resource.machineTypes': "
"'projects/project_name/zones/us-central1-a/machineTypes/custom-1-61440'. Resource was not found.",
}
],
"code": 400,
"message": "Invalid value for field 'resource.machineTypes': "
"'projects/project_name/zones/us-central1-a/machineTypes/custom-1-61440'. Resource was not found."
}
}
return (httplib.BAD_REQUEST, body, self.json_hdr, httplib.responses[httplib.BAD_REQUEST])
def _zones_us_central1_a_instances_stopped_node_setMachineType(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_instances_stopped_node_setMachineType.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_setMachineType(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_operations_operation_setMachineType.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_startnode(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_operations_operation_startnode.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_stopped_node_start(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_instances_stopped_node_start.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_stopped_node_stop(self, method, url, body, header):
body = self.fixtures.load('zones_us_central1_a_instances_stopped_node_stop.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_stopped_node(self, method, url, body, headers):
body = self.fixtures.load('zones_us_central1_a_instances_stopped_node.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_stopnode(self, method, url, body, headers):
body = self.fixtures.load('zones_us_central1_a_operations_operation_stopnode.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_stop(self, method, url, body, headers):
body = self.fixtures.load('zones_us_central1_a_instances_node_name_stop.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_setMetadata(self, method, url, body, headers):
body = self.fixtures.load('zones_us_central1_a_instances_node_name_setMetadata_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _setCommonInstanceMetadata(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('setCommonInstanceMetadata_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_addresses(self, method, url, body, headers):
body = self.fixtures.load('aggregated_addresses.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_diskTypes(self, method, url, body, headers):
body = self.fixtures.load('aggregated_disktypes.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_disks(self, method, url, body, headers):
body = self.fixtures.load('aggregated_disks.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_forwardingRules(self, method, url, body, headers):
body = self.fixtures.load('aggregated_forwardingRules.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_instances(self, method, url, body, headers):
body = self.fixtures.load('aggregated_instances.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_machineTypes(self, method, url, body, headers):
body = self.fixtures.load('aggregated_machineTypes.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_targetInstances(self, method, url, body, headers):
body = self.fixtures.load('aggregated_targetInstances.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _aggregated_targetPools(self, method, url, body, headers):
body = self.fixtures.load('aggregated_targetPools.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_backendServices(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_backendServices_post.json')
else:
backend_name = getattr(self.test, 'backendservices_mock',
'web-service')
body = self.fixtures.load('global_backendServices-%s.json' %
backend_name)
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_backendServices_no_backends(self, method, url, body, headers):
body = self.fixtures.load('global_backendServices_no_backends.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_backendServices_web_service(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_backendServices_web_service_delete.json')
else:
body = self.fixtures.load('global_backendServices_web_service.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_forwardingRules(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_forwardingRules_post.json')
else:
body = self.fixtures.load('global_forwardingRules.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_forwardingRules_http_rule(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load('global_forwardingRules_http_rule_delete.json')
else:
body = self.fixtures.load('global_forwardingRules_http_rule.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_httpHealthChecks(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_httpHealthChecks_post.json')
else:
body = self.fixtures.load('global_httpHealthChecks.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_httpHealthChecks_default_health_check(self, method, url, body, headers):
body = self.fixtures.load('global_httpHealthChecks_basic-check.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_httpHealthChecks_basic_check(self, method, url, body, headers):
body = self.fixtures.load('global_httpHealthChecks_basic-check.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_httpHealthChecks_libcloud_lb_demo_healthcheck(
self, method, url, body, headers):
body = self.fixtures.load(
'global_httpHealthChecks_libcloud-lb-demo-healthcheck.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_httpHealthChecks_lchealthcheck(self, method, url, body,
headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_httpHealthChecks_lchealthcheck_delete.json')
elif method == 'PUT':
body = self.fixtures.load(
'global_httpHealthChecks_lchealthcheck_put.json')
else:
body = self.fixtures.load(
'global_httpHealthChecks_lchealthcheck.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_firewalls(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_firewalls_post.json')
else:
body = self.fixtures.load('global_firewalls.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_firewalls_lcfirewall(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_firewalls_lcfirewall_delete.json')
elif method == 'PUT':
body = self.fixtures.load('global_firewalls_lcfirewall_put.json')
else:
body = self.fixtures.load('global_firewalls_lcfirewall.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_images(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_images_post.json')
else:
body = self.fixtures.load('global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_images_debian_7_wheezy_v20131120(
self, method, url, body, headers):
body = self.fixtures.load('global_images_debian_7_wheezy_v20131120_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_images_debian_7_wheezy_v20131014_deprecate(
self, method, url, body, headers):
body = self.fixtures.load('global_images_debian_7_wheezy_v20131014_deprecate.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_routes(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_routes_post.json')
else:
body = self.fixtures.load('global_routes.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_networks(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_networks_post.json')
else:
body = self.fixtures.load('global_networks.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_networks_default(self, method, url, body, headers):
body = self.fixtures.load('global_networks_default.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_networks_libcloud_demo_network(self, method, url, body,
headers):
body = self.fixtures.load('global_networks_libcloud-demo-network.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_networks_libcloud_demo_europe_network(self, method, url, body,
headers):
body = self.fixtures.load(
'global_networks_libcloud-demo-europe-network.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_routes_lcdemoroute(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load('global_routes_lcdemoroute_delete.json')
else:
body = self.fixtures.load('global_routes_lcdemoroute.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_networks_lcnetwork(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load('global_networks_lcnetwork_delete.json')
else:
body = self.fixtures.load('global_networks_lcnetwork.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_snapshots(self, method, url, body, headers):
body = self.fixtures.load('global_snapshots.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_snapshots_lcsnapshot(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_snapshots_lcsnapshot_delete.json')
else:
body = self.fixtures.load('global_snapshots_lcsnapshot.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_setUsageExportBucket(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_setUsageExportBucket.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_setCommonInstanceMetadata(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_setCommonInstanceMetadata.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_backendServices_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_backendServices_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_backendServices_web_service_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_backendServices_web_service_delete'
'.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_forwardingRules_http_rule_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_forwardingRules_http_rule_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_forwardingRules_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_forwardingRules_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_httpHealthChecks_lchealthcheck_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_httpHealthChecks_lchealthcheck_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_images_debian7_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_images_debian7_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_httpHealthChecks_lchealthcheck_put(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_httpHealthChecks_lchealthcheck_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_httpHealthChecks_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_httpHealthChecks_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_firewalls_lcfirewall_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_firewalls_lcfirewall_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_firewalls_lcfirewall_put(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_firewalls_lcfirewall_put.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_firewalls_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_firewalls_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_routes_lcdemoroute_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_routes_lcdemoroute_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_networks_lcnetwork_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_networks_lcnetwork_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_routes_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_routes_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_networks_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_networks_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_snapshots_lcsnapshot_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_snapshots_lcsnapshot_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_image_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_image_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_addresses_lcaddressglobal_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_addresses_lcaddressglobal_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_targetHttpProxies_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_targetHttpProxies_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_targetHttpProxies_web_proxy_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_targetHttpProxies_web_proxy_delete'
'.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_urlMaps_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_urlMaps_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_urlMaps_web_map_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_urlMaps_web_map_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_targetHttpProxies(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_targetHttpProxies_post.json')
else:
body = self.fixtures.load('global_targetHttpProxies.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_targetHttpProxies_web_proxy(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_targetHttpProxies_web_proxy_delete.json')
else:
body = self.fixtures.load('global_targetHttpProxies_web_proxy.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_urlMaps(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('global_urlMaps_post.json')
else:
body = self.fixtures.load('global_urlMaps.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_urlMaps_web_map(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load('global_urlMaps_web_map_delete.json')
else:
body = self.fixtures.load('global_urlMaps_web_map.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_addresses_lcaddress_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_addresses_lcaddress_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_operations_operation_global_addresses_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_global_addresses_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_addresses_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_addresses_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_forwardingRules_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_forwardingRules_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_forwardingRules_lcforwardingrule_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_forwardingRules_lcforwardingrule_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_deleteAccessConfig(self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node_name_deleteAccessConfig_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_serialPort(self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-a_instances_node_name_getSerialOutput.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_addAccessConfig(self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node_name_addAccessConfig_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_setMetadata_post(self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us_central1_a_node_name_setMetadata_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_targetInstances_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_targetInstances_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_addAccessConfig_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node_name_addAccessConfig_done.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_deleteAccessConfig_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node_name_deleteAccessConfig_done.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_targetInstances_lctargetinstance_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_targetInstances_lctargetinstance_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lctargetpool_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lctargetpool_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lctargetpool_removeHealthCheck_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lctargetpool_removeHealthCheck_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lctargetpool_addHealthCheck_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lctargetpool_addHealthCheck_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lctargetpool_removeInstance_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lctargetpool_removeInstance_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lb_pool_setBackup_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lb_pool_setBackup_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_operations_operation_regions_us_central1_targetPools_lctargetpool_addInstance_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_regions_us-central1_targetPools_lctargetpool_addInstance_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_disks_lcdisk_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_disks_lcdisk_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_setDiskAutoDelete(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us_central1_a_instances_node_name_setDiskAutoDelete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_volume_auto_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us_central1_a_operations_operation_volume_auto_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_disks_lcdisk_createSnapshot_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_disks_lcdisk_createSnapshot_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_disks_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_disks_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_lcnode_000_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_lcnode-000_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_lcnode_001_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_lcnode-001_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_delete(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node-name_delete.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_attachDisk_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node-name_attachDisk_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_detachDisk_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node-name_detachDisk_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_setTags_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node-name_setTags_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_node_name_reset_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_node-name_reset_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_operations_operation_zones_europe_west1_a_instances_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_europe-west1-a_instances_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_operations_operation_zones_us_central1_a_instances_post(
self, method, url, body, headers):
body = self.fixtures.load(
'operations_operation_zones_us-central1-a_instances_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _project(self, method, url, body, headers):
body = self.fixtures.load('project.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_windows_cloud_global_licenses_windows_server_2008_r2_dc(self, method, url, body, headers):
body = self.fixtures.load('projects_windows-cloud_global_licenses_windows_server_2008_r2_dc.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_suse_cloud_global_licenses_sles_11(self, method, url, body, headers):
body = self.fixtures.load('projects_suse-cloud_global_licenses_sles_11.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_rhel_cloud_global_licenses_rhel_7_server(self, method, url, body, headers):
body = self.fixtures.load('projects_rhel-cloud_global_licenses_rhel_server.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_suse_cloud_global_licenses_sles_12(self, method, url, body, headers):
body = self.fixtures.load('projects_suse-cloud_global_licenses_sles_12.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_windows_cloud_global_images(self, method, url, body, header):
body = self.fixtures.load('projects_windows-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_rhel_cloud_global_images(self, method, url, boyd, header):
body = self.fixtures.load('projects_rhel-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_gce_nvme_global_images(self, method, url, boyd, header):
body = self.fixtures.load('projects_gce-nvme_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_coreos_cloud_global_images(self, method, url, boyd, header):
body = self.fixtures.load('projects_coreos-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_opensuse_cloud_global_images(self, method, url, boyd, header):
body = self.fixtures.load('projects_opensuse-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_google_containers_global_images(self, method, url, boyd, header):
body = self.fixtures.load('projects_google-containers_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_ubuntu_os_cloud_global_images(self, method, url, body, header):
body = self.fixtures.load('projects_ubuntu-os-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_centos_cloud_global_images(self, method, url, body, header):
body = self.fixtures.load('projects_centos-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_suse_cloud_global_images(self, method, url, body, headers):
body = self.fixtures.load('projects_suse-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _projects_debian_cloud_global_images(self, method, url, body, headers):
body = self.fixtures.load('projects_debian-cloud_global_images.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions(self, method, url, body, headers):
if 'pageToken' in url or 'filter' in url:
body = self.fixtures.load('regions-paged-2.json')
elif 'maxResults' in url:
body = self.fixtures.load('regions-paged-1.json')
else:
body = self.fixtures.load('regions.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_addresses(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'global_addresses_post.json')
else:
body = self.fixtures.load('global_addresses.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_addresses(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'regions_us-central1_addresses_post.json')
else:
body = self.fixtures.load('regions_us-central1_addresses.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _global_addresses_lcaddressglobal(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'global_addresses_lcaddressglobal_delete.json')
else:
body = self.fixtures.load('global_addresses_lcaddressglobal.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_addresses_lcaddress(self, method, url, body,
headers):
if method == 'DELETE':
body = self.fixtures.load(
'regions_us-central1_addresses_lcaddress_delete.json')
else:
body = self.fixtures.load(
'regions_us-central1_addresses_lcaddress.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_forwardingRules(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'regions_us-central1_forwardingRules_post.json')
else:
body = self.fixtures.load(
'regions_us-central1_forwardingRules.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_forwardingRules_libcloud_lb_demo_lb(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_forwardingRules_libcloud-lb-demo-lb.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_forwardingRules_lcforwardingrule(
self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'regions_us-central1_forwardingRules_lcforwardingrule_delete.json')
else:
body = self.fixtures.load(
'regions_us-central1_forwardingRules_lcforwardingrule.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_targetInstances(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'zones_us-central1-a_targetInstances_post.json')
else:
body = self.fixtures.load('zones_us-central1-a_targetInstances.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'regions_us-central1_targetPools_post.json')
else:
body = self.fixtures.load('regions_us-central1_targetPools.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_targetInstances_lctargetinstance(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'zones_us-central1-a_targetInstances_lctargetinstance_delete.json')
else:
body = self.fixtures.load(
'zones_us-central1-a_targetInstances_lctargetinstance.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lb_pool_getHealth(self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lb_pool_getHealth.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lb_pool(self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lb_pool.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool(self, method, url,
body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_delete.json')
else:
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool_sticky(self, method, url,
body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_sticky.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_backup_pool(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_backup_pool.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_libcloud_lb_demo_lb_tp(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_libcloud-lb-demo-lb-tp.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool_removeHealthCheck(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_removeHealthCheck_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool_addHealthCheck(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_addHealthCheck_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool_removeInstance(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_removeInstance_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lb_pool_setBackup(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lb_pool_setBackup_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _regions_us_central1_targetPools_lctargetpool_addInstance(
self, method, url, body, headers):
body = self.fixtures.load(
'regions_us-central1_targetPools_lctargetpool_addInstance_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones(self, method, url, body, headers):
body = self.fixtures.load('zones.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_asia_east_1a(self, method, url, body, headers):
body = self.fixtures.load('zones_asia-east1-a.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_diskTypes(self, method, url, body, headers):
body = self.fixtures.load('zones_us-central1-a_diskTypes.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_diskTypes_pd_standard(self, method, url, body, headers):
body = self.fixtures.load('zones_us-central1-a_diskTypes_pd_standard.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_diskTypes_pd_ssd(self, method, url, body, headers):
body = self.fixtures.load('zones_us-central1-a_diskTypes_pd_ssd.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('zones_us-central1-a_disks_post.json')
else:
body = self.fixtures.load('zones_us-central1-a_disks.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks_lcdisk(self, method, url, body, headers):
if method == 'DELETE':
body = self.fixtures.load(
'zones_us-central1-a_disks_lcdisk_delete.json')
else:
body = self.fixtures.load('zones_us-central1-a_disks_lcdisk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks_lcdisk_createSnapshot(self, method, url,
body, headers):
body = self.fixtures.load(
'zones_us-central1-a_disks_lcdisk_createSnapshot_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks_node_name(self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks_lcnode_000(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_disks_lcnode_001(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_disks_libcloud_lb_demo_www_000(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_disks_libcloud_lb_demo_www_001(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_disks_libcloud_lb_demo_www_002(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central2_a_disks_libcloud_demo_boot_disk(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central2_a_disks_libcloud_demo_np_node(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central2_a_disks_libcloud_demo_multiple_nodes_000(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central2_a_disks_libcloud_demo_multiple_nodes_001(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_disks(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load('zones_us-central1-a_disks_post.json')
else:
body = self.fixtures.load('zones_us-central1-a_disks.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_disks_libcloud_demo_europe_np_node(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_disks_libcloud_demo_europe_boot_disk(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_disks_libcloud_demo_europe_multiple_nodes_000(
self, method, url, body, headers):
body = self.fixtures.load('generic_disk.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_instances(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'zones_europe-west1-a_instances_post.json')
else:
body = self.fixtures.load('zones_europe-west1-a_instances.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_diskTypes_pd_standard(self, method, url, body, headers):
body = self.fixtures.load('zones_europe-west1-a_diskTypes_pd_standard.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances(self, method, url, body, headers):
if method == 'POST':
body = self.fixtures.load(
'zones_us-central1-a_instances_post.json')
else:
body = self.fixtures.load('zones_us-central1-a_instances.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name(self, method, url, body,
headers):
if method == 'DELETE':
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name_delete.json')
else:
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_attachDisk(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name_attachDisk_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_detachDisk(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name_detachDisk_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_setTags(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name_setTags_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_node_name_reset(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-a_instances_node-name_reset_post.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_lcnode_000(self, method, url, body,
headers):
if method == 'DELETE':
body = self.fixtures.load(
'zones_us-central1-a_instances_lcnode-000_delete.json')
else:
body = self.fixtures.load(
'zones_us-central1-a_instances_lcnode-000.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_instances_lcnode_001(self, method, url, body,
headers):
if method == 'DELETE':
body = self.fixtures.load(
'zones_us-central1-a_instances_lcnode-001_delete.json')
else:
body = self.fixtures.load(
'zones_us-central1-a_instances_lcnode-001.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_instances_libcloud_lb_demo_www_000(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-b_instances_libcloud-lb-demo-www-000.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_instances_libcloud_lb_demo_www_001(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-b_instances_libcloud-lb-demo-www-001.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_b_instances_libcloud_lb_demo_www_002(
self, method, url, body, headers):
body = self.fixtures.load(
'zones_us-central1-b_instances_libcloud-lb-demo-www-002.json')
return (httplib.NOT_FOUND, body, self.json_hdr,
httplib.responses[httplib.NOT_FOUND])
def _zones_us_central1_a(self, method, url, body, headers):
body = self.fixtures.load('zones_us-central1-a.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_machineTypes(self, method, url, body, headers):
body = self.fixtures.load('zones_us-central1-a_machineTypes.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_europe_west1_a_machineTypes_n1_standard_1(self, method, url,
body, headers):
body = self.fixtures.load(
'zones_europe-west1-a_machineTypes_n1-standard-1.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
def _zones_us_central1_a_machineTypes_n1_standard_1(self, method, url,
body, headers):
body = self.fixtures.load(
'zones_us-central1-a_machineTypes_n1-standard-1.json')
return (httplib.OK, body, self.json_hdr, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
|
curoverse/libcloud
|
libcloud/test/compute/test_gce.py
|
Python
|
apache-2.0
| 123,700
| 0.001156
|
import os
import sys
import unittest
from logilab.common import testlib
from pylint.testutils import make_tests, LintTestUsingFile, cb_test_gen, linter
import ConfigParser
HERE = os.path.dirname(os.path.abspath(__file__))
PLUGINPATH = os.path.join(HERE, "..")
linter.prepare_import_path(PLUGINPATH)
linter.load_plugin_modules(['Playero'])
linter.global_set_option('required-attributes', ()) # remove required __revision__
linter.load_file_configuration(os.path.join(HERE, "..", "config", ".pylintrc"))
convs = ['C0111', 'C0103', 'C0301', 'C0303', 'C0304', 'C0321']
warns = ['W0141', 'W0142', 'W0212', 'W0312', 'W0401', 'W0403', 'W0511', 'W0512', 'W0614', 'W0622']
refac = ['R0903', 'R0904', 'R0913']
for disabled in convs + warns + refac:
linter.disable(disabled)
config = ConfigParser.SafeConfigParser()
config.read(os.path.join(HERE, "..", "config", "playero.cfg"))
PLAYEROPATH = config.get('paths', os.name)
sys.path.append(os.path.join(PLAYEROPATH, "core"))
for scriptdir in ["base", "standard", "extra/StdPy"]:
for pydir in ['records', 'windows', 'reports', 'routines', 'documents','tools']:
sys.path.append(os.path.join(PLAYEROPATH, scriptdir, pydir))
sys.path.append(os.path.join(PLUGINPATH, "corepy", "embedded"))
def tests():
callbacks = [cb_test_gen(LintTestUsingFile)]
input_dir = os.path.join(HERE, 'input')
messages_dir = os.path.join(HERE, 'messages')
return make_tests(input_dir, messages_dir, None, callbacks)
def additional_tests():
suites = unittest.TestSuite()
for fn in os.listdir(os.path.dirname(__file__)):
if fn.endswith('.py') and fn not in ('__init__.py', 'fulltest.py'):
name = os.path.splitext(fn)[0]
module = __import__(name, globals(), locals(), [name])
if hasattr(module, 'test_suite'):
suites.addTests(module.test_suite())
return suites
def suite():
default = [unittest.makeSuite(test, suiteClass=testlib.TestSuite) for test in tests()]
default.append(additional_tests())
return testlib.TestSuite(default)
if __name__ == '__main__':
testlib.unittest_main(defaultTest='suite')
|
ancho85/pylint-playero-plugin
|
tests/fulltest.py
|
Python
|
gpl-2.0
| 2,139
| 0.004675
|
# code source: https://blog.miguelgrinberg.com/post/the-flask-mega-tutorial-part-iv-database
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
api.downgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
print 'Current database version: ' + str(api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO))
# This script downgrades database by 1 revision every time it runs.
# To downgrade in multiple revisions, run script as many as needed.
|
EddyCodeIt/SPA_Project_2016_Data_Rep-Quering
|
db_downgrade.py
|
Python
|
apache-2.0
| 600
| 0.006667
|
example_template = Template({
'A': RsrcDef({}, []),
'B': RsrcDef({}, []),
'C': RsrcDef({'a': '4alpha'}, ['A', 'B']),
'D': RsrcDef({'c': GetRes('C')}, []),
'E': RsrcDef({'ca': GetAtt('C', 'a')}, []),
})
engine.create_stack('foo', example_template)
engine.noop(3)
engine.rollback_stack('foo')
engine.noop(6)
engine.call(verify, Template())
|
zaneb/heat-convergence-prototype
|
scenarios/basic_create_rollback.py
|
Python
|
apache-2.0
| 358
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from mozboot.base import BaseBootstrapper
class OpenBSDBootstrapper(BaseBootstrapper):
def __init__(self, version):
BaseBootstrapper.__init__(self)
def install_system_packages(self):
# we use -z because there's no other way to say "any autoconf-2.13"
self.run_as_root(['pkg_add', '-z',
'mercurial',
'llvm',
'autoconf-2.13',
'yasm',
'gtk+2',
'libIDL',
'gmake',
'gtar',
'wget',
'unzip',
'zip'])
def _update_package_manager(self):
self.run_as_root(['port', 'sync'])
def upgrade_mercurial(self, current):
self.run_as_root(['pkg_add', '-u', 'mercurial'])
|
SlateScience/MozillaJS
|
js/src/python/mozboot/mozboot/openbsd.py
|
Python
|
mpl-2.0
| 954
| 0.013627
|
#!/usr/bin/env python3
"""
The goal of this example is to show you the syntax for IR seeking readings. When using
IR-SEEK with a remote control you get both heading and distance data. The code below
shows the syntax for beacon seeking. Additionally it's good to play with a demo so that
you can see how well or not well a sensor behaves.
To test this module, put the IR Remote into beacon mode by pressing the button at the top
of the remote and making sure the green LED is on. Use channel 1 for this module. Move
the beacon around and watch the values that are printed.
Authors: David Fisher and PUT_YOUR_NAME_HERE. February 2017.
"""
import ev3dev.ev3 as ev3
import time
def main():
print("--------------------------------------------")
print(" Printing beacon seeking data")
print(" Press the touch sensor to exit")
print("--------------------------------------------")
ev3.Sound.speak("Printing beacon seeking").wait()
touch_sensor = ev3.TouchSensor()
ir_sensor = ev3.InfraredSensor()
assert touch_sensor
assert ir_sensor
ir_sensor.mode = "IR-SEEK"
while not touch_sensor.is_pressed:
current_heading = ir_sensor.value(0)
current_distance = ir_sensor.value(1)
print("IR Heading = {} Distance = {}".format(current_heading, current_distance))
time.sleep(0.5)
ev3.Sound.speak("Goodbye")
# ----------------------------------------------------------------------
# Calls main to start the ball rolling.
# ----------------------------------------------------------------------
main()
|
Rosebotics/cwc-projects
|
lego-ev3/examples/analog_sensors/ir_sensor/print_beacon_seeking.py
|
Python
|
gpl-3.0
| 1,582
| 0.003793
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2015 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.split."""
import collections
import pytest
from qutebrowser.misc import split
# Most tests copied from Python's shlex.
# The original test data set was from shellwords, by Hartmut Goebel.
# Format: input/split|output|without|keep/split|output|with|keep/
test_data_str = r"""
one two/one|two/one| two/
one "two three" four/one|two three|four/one| "two three"| four/
one 'two three' four/one|two three|four/one| 'two three'| four/
one "two\" three" four/one|two" three|four/one| "two\" three"| four/
one 'two'\'' three' four/one|two' three|four/one| 'two'\'' three'| four/
one "two three/one|two three/one| "two three/
one 'two three/one|two three/one| 'two three/
one\/one\/one\/
one "two\/one|two\/one| "two\/
one /one/one| /
open -t i/open|-t|i/open| -t| i/
foo bar/foo|bar/foo| bar/
foo bar/foo|bar/ foo| bar/
foo bar /foo|bar/ foo| bar| /
foo bar bla fasel/foo|bar|bla|fasel/foo| bar| bla| fasel/
x y z xxxx/x|y|z|xxxx/x| y| z| xxxx/
\x bar/x|bar/\x| bar/
\ x bar/ x|bar/\ x| bar/
\ bar/ bar/\ bar/
foo \x bar/foo|x|bar/foo| \x| bar/
foo \ x bar/foo| x|bar/foo| \ x| bar/
foo \ bar/foo| bar/foo| \ bar/
foo "bar" bla/foo|bar|bla/foo| "bar"| bla/
"foo" "bar" "bla"/foo|bar|bla/"foo"| "bar"| "bla"/
"foo" bar "bla"/foo|bar|bla/"foo"| bar| "bla"/
"foo" bar bla/foo|bar|bla/"foo"| bar| bla/
foo 'bar' bla/foo|bar|bla/foo| 'bar'| bla/
'foo' 'bar' 'bla'/foo|bar|bla/'foo'| 'bar'| 'bla'/
'foo' bar 'bla'/foo|bar|bla/'foo'| bar| 'bla'/
'foo' bar bla/foo|bar|bla/'foo'| bar| bla/
blurb foo"bar"bar"fasel" baz/blurb|foobarbarfasel|baz/blurb| foo"bar"bar"fasel"| baz/
blurb foo'bar'bar'fasel' baz/blurb|foobarbarfasel|baz/blurb| foo'bar'bar'fasel'| baz/
""//""/
''//''/
foo "" bar/foo||bar/foo| ""| bar/
foo '' bar/foo||bar/foo| ''| bar/
foo "" "" "" bar/foo||||bar/foo| ""| ""| ""| bar/
foo '' '' '' bar/foo||||bar/foo| ''| ''| ''| bar/
\"/"/\"/
"\""/"/"\""/
"foo\ bar"/foo\ bar/"foo\ bar"/
"foo\\ bar"/foo\ bar/"foo\\ bar"/
"foo\\ bar\""/foo\ bar"/"foo\\ bar\""/
"foo\\" bar\"/foo\|bar"/"foo\\"| bar\"/
"foo\\ bar\" dfadf"/foo\ bar" dfadf/"foo\\ bar\" dfadf"/
"foo\\\ bar\" dfadf"/foo\\ bar" dfadf/"foo\\\ bar\" dfadf"/
"foo\\\x bar\" dfadf"/foo\\x bar" dfadf/"foo\\\x bar\" dfadf"/
"foo\x bar\" dfadf"/foo\x bar" dfadf/"foo\x bar\" dfadf"/
\'/'/\'/
'foo\ bar'/foo\ bar/'foo\ bar'/
'foo\\ bar'/foo\\ bar/'foo\\ bar'/
"foo\\\x bar\" df'a\ 'df"/foo\\x bar" df'a\ 'df/"foo\\\x bar\" df'a\ 'df"/
\"foo/"foo/\"foo/
\"foo\x/"foox/\"foo\x/
"foo\x"/foo\x/"foo\x"/
"foo\ "/foo\ /"foo\ "/
foo\ xx/foo xx/foo\ xx/
foo\ x\x/foo xx/foo\ x\x/
foo\ x\x\"/foo xx"/foo\ x\x\"/
"foo\ x\x"/foo\ x\x/"foo\ x\x"/
"foo\ x\x\\"/foo\ x\x\/"foo\ x\x\\"/
"foo\ x\x\\""foobar"/foo\ x\x\foobar/"foo\ x\x\\""foobar"/
"foo\ x\x\\"\'"foobar"/foo\ x\x\'foobar/"foo\ x\x\\"\'"foobar"/
"foo\ x\x\\"\'"fo'obar"/foo\ x\x\'fo'obar/"foo\ x\x\\"\'"fo'obar"/
"foo\ x\x\\"\'"fo'obar" 'don'\''t'/foo\ x\x\'fo'obar|don't/"foo\ x\x\\"\'"fo'obar"| 'don'\''t'/
"foo\ x\x\\"\'"fo'obar" 'don'\''t' \\/foo\ x\x\'fo'obar|don't|\/"foo\ x\x\\"\'"fo'obar"| 'don'\''t'| \\/
foo\ bar/foo bar/foo\ bar/
:-) ;-)/:-)|;-)/:-)| ;-)/
áéíóú/áéíóú/áéíóú/
"""
def _parse_split_test_data_str():
"""
Parse the test data set into a namedtuple to use in tests.
Returns:
A list of namedtuples with str attributes: input, keep, no_keep
"""
tuple_class = collections.namedtuple('TestCase', 'input, keep, no_keep')
for line in test_data_str.splitlines():
if not line:
continue
data = line.split('/')
item = tuple_class(input=data[0], keep=data[1].split('|'),
no_keep=data[2].split('|'))
yield item
yield tuple_class(input='', keep=[], no_keep=[])
class TestSplit:
"""Test split."""
@pytest.fixture(params=_parse_split_test_data_str(), ids=lambda e: e.input)
def split_test_case(self, request):
"""Fixture to automatically parametrize all depending tests.
It will use the test data from test_data_str, parsed using
_parse_split_test_data_str().
"""
return request.param
def test_split(self, split_test_case):
"""Test splitting."""
items = split.split(split_test_case.input)
assert items == split_test_case.keep
def test_split_keep_original(self, split_test_case):
"""Test if splitting with keep=True yields the original string."""
items = split.split(split_test_case.input, keep=True)
assert ''.join(items) == split_test_case.input
def test_split_keep(self, split_test_case):
"""Test splitting with keep=True."""
items = split.split(split_test_case.input, keep=True)
assert items == split_test_case.no_keep
class TestSimpleSplit:
"""Test simple_split."""
TESTS = {
' foo bar': [' foo', ' bar'],
'foobar': ['foobar'],
' foo bar baz ': [' foo', ' bar', ' baz', ' '],
'f\ti\ts\th': ['f', '\ti', '\ts', '\th'],
'foo\nbar': ['foo', '\nbar'],
}
@pytest.mark.parametrize('test', TESTS)
def test_str_split(self, test):
"""Test if the behavior matches str.split."""
assert split.simple_split(test) == test.rstrip().split()
@pytest.mark.parametrize('s, maxsplit',
[("foo bar baz", 1), (" foo bar baz ", 0)])
def test_str_split_maxsplit(self, s, maxsplit):
"""Test if the behavior matches str.split with given maxsplit."""
actual = split.simple_split(s, maxsplit=maxsplit)
expected = s.rstrip().split(maxsplit=maxsplit)
assert actual == expected
@pytest.mark.parametrize('test, expected', TESTS.items())
def test_split_keep(self, test, expected):
"""Test splitting with keep=True."""
assert split.simple_split(test, keep=True) == expected
def test_maxsplit_0_keep(self):
"""Test special case with maxsplit=0 and keep=True."""
s = "foo bar"
assert split.simple_split(s, keep=True, maxsplit=0) == [s]
|
artur-shaik/qutebrowser
|
tests/unit/misc/test_split.py
|
Python
|
gpl-3.0
| 6,878
| 0.000583
|
import re
import sys
class URI():
def __init__( self, root_path ):
super().__init__()
if root_path[-1] != '/' or root_path[0] != '/':
raise ValueError( 'root_path must start and end with "/"' )
self.root_path = root_path
self.uri_regex = re.compile( r'^({0}|/)(([a-zA-Z0-9\-_.!~*<>]+/)*)([a-zA-Z0-9\-_.!~*<>]+)?(:([a-zA-Z0-9\-_.!~*\'<>]*:)*)?(\([a-zA-Z0-9\-_.!~*<>]+\))?$'.format( self.root_path ) )
def split( self, uri, root_optional=False ):
uri_match = self.uri_regex.match( uri )
if not uri_match:
raise ValueError( 'Unable to parse URI "{0}"'.format( uri ) )
( root, namespace, _, model, rec_id, _, action ) = uri_match.groups()
if root != self.root_path and not root_optional:
raise ValueError( 'URI does not start in the root_path' )
if namespace != '':
namespace_list = namespace.rstrip( '/' ).split( '/' )
else:
namespace_list = []
if rec_id is not None:
id_list = rec_id.strip( ':' ).split( ':' )
multi = len( id_list ) > 1
else:
id_list = None # id_list = [] is an empty list of ids, where None means the list is not even present
multi = False
if action is not None:
action = action[ 1:-1 ]
return ( namespace_list, model, action, id_list, multi )
def build( self, namespace=None, model=None, action=None, id_list=None, in_root=True ):
"""
build a uri, NOTE: if model is None, id_list and action are skiped
"""
if in_root:
result = self.root_path
else:
result = '/'
if namespace is not None:
if not isinstance( namespace, list ):
namespace = [ namespace ]
if len( namespace ) > 0:
result = '{0}{1}/'.format( result, '/'.join( namespace ) )
if model is None:
return result
result = '{0}{1}'.format( result, model )
if id_list is not None and id_list != []:
if not isinstance( id_list, list ):
id_list = [ id_list ]
result = '{0}:{1}:'.format( result, ':'.join( id_list ) )
if action is not None:
result = '{0}({1})'.format( result, action )
return result
def extractIds( self, uri_list ): # TODO: should we make sure the namespace/model do not change in the list?
"""
extract the record IDs from the URI's in uri_list, can handle some/all/none
of the URIs having multiple IDs in them allready, does not force uniqunes
order should remain intact
"""
if isinstance( uri_list, str ):
uri_list = [ uri_list ]
if not isinstance( uri_list, list ):
raise ValueError( 'uri_list must be string or list of strings' )
result = []
for uri in uri_list:
uri_match = self.uri_regex.match( uri )
if not uri_match:
raise ValueError( 'Unable to parse URI "{0}"'.format( uri ) )
( _, _, _, _, rec_id, _, _ ) = uri_match.groups()
if rec_id is None:
continue
result += rec_id.strip( ':' ).split( ':' )
return result
def uriListToMultiURI( self, uri_list ):
"""
runs extract Ids on the list, then takes the first uri and applies all
the ids to it
"""
if not uri_list:
return []
id_list = self.extractIds( uri_list )
if not id_list:
return []
( namespace_list, model, action, _, _ ) = self.split( uri_list[0] )
return self.build( namespace_list, model, action, id_list, True )
# barrowed from https://www.python.org/dev/peps/pep-0257/
def doccstring_prep( docstring ):
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
indent = sys.maxsize
for line in lines[ 1: ]:
stripped = line.lstrip()
if stripped:
indent = min( indent, len( line ) - len( stripped ) )
# Remove indentation (first line is special):
trimmed = [ lines[0].strip() ]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append( line[ indent: ].rstrip() )
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop( 0 )
# Return a single string:
return '\n'.join( trimmed )
|
cinp/python
|
cinp/common.py
|
Python
|
apache-2.0
| 4,279
| 0.038093
|
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import tempfile
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except:
pass
fixture_data[path] = data
return data
class AnsibleExitJson(Exception):
pass
class AnsibleFailJson(Exception):
pass
class TestCnosModule(unittest.TestCase):
def setUp(self):
super(TestCnosModule, self).setUp()
self.test_log = tempfile.mkstemp(prefix='ansible-test-cnos-module-', suffix='.log')[1]
def tearDown(self):
super(TestCnosModule, self).tearDown()
os.remove(self.test_log)
def execute_module(self, failed=False, changed=False, commands=None,
sort=True, defaults=False):
self.load_fixtures(commands)
if failed:
result = self.failed()
self.assertTrue(result['failed'], result)
else:
result = self.changed(changed)
self.assertEqual(result['changed'], changed, result)
if commands is not None:
if sort:
self.assertEqual(sorted(commands), sorted(result['commands']),
result['commands'])
else:
self.assertEqual(commands, result['commands'],
result['commands'])
return result
def failed(self):
def fail_json(*args, **kwargs):
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
with patch.object(basic.AnsibleModule, 'fail_json', fail_json):
with self.assertRaises(AnsibleFailJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'], result)
return result
def changed(self, changed=False):
def exit_json(*args, **kwargs):
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
with patch.object(basic.AnsibleModule, 'exit_json', exit_json):
with self.assertRaises(AnsibleExitJson) as exc:
self.module.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], changed, result)
return result
def load_fixtures(self, commands=None):
pass
|
sgerhart/ansible
|
test/units/modules/network/cnos/cnos_module.py
|
Python
|
mit
| 3,502
| 0.000571
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gitiles_poller
def Update(config, active_master, c):
master_poller = gitiles_poller.GitilesPoller(
'https://chromium.googlesource.com/external/mojo')
c['change_source'].append(master_poller)
|
eunchong/build
|
masters/master.client.mojo/master_source_cfg.py
|
Python
|
bsd-3-clause
| 386
| 0.005181
|
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('data', one_hot=True)
mnist_train = mnist.train
mnist_val = mnist.validation
p = 28 * 28
n = 10
h1 = 300
func_act = tf.nn.sigmoid
x_pl = tf.placeholder(dtype=tf.float32, shape=[None, p])
y_pl = tf.placeholder(dtype=tf.float32, shape=[None, n])
w1 = tf.Variable(tf.truncated_normal(shape=[p, h1], stddev=0.1))
b1 = tf.Variable(tf.zeros(shape=[h1]))
w2 = tf.Variable(tf.truncated_normal(shape=[h1, n], stddev=0.1))
b2 = tf.Variable(tf.zeros(shape=[n]))
hidden1 = func_act(tf.matmul(x_pl, w1) + b1)
y_pre = tf.matmul(hidden1, w2) + b2
y_ = tf.nn.softmax(y_pre)
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_pl, logits=y_pre))
correct_prediction = tf.equal(tf.argmax(y_pl, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
eta = 0.3
train_op = tf.train.AdagradOptimizer(learning_rate=0.3).minimize(cross_entropy)
batch_size = 50
batch_per_epoch = mnist_train.num_examples // batch_size
epoch = 2
with tf.Session() as sess:
tf.global_variables_initializer().run()
x_val = mnist_val.images
y_val = mnist_val.labels
val_fd = {x_pl: x_val, y_pl: y_val}
for ep in range(epoch):
print(f'Epoch {ep+1}:')
for sp in range(batch_per_epoch):
xtr, ytr = mnist_train.next_batch(batch_size)
loss_value, _ = sess.run([cross_entropy, train_op], feed_dict={x_pl: xtr, y_pl: ytr})
if sp == 0 or (sp + 1) % 100 == 0:
print(f'Loss: {loss_value:.4f}')
acc = sess.run(accuracy, feed_dict=val_fd)
print(f'Validation Acc: {acc:.4f}')
|
bm2-lab/MLClass
|
cgh_deep_learning/mnist_mlp.py
|
Python
|
apache-2.0
| 1,723
| 0.001741
|
import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="tickvals", parent_name="carpet.baxis", **kwargs):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/carpet/baxis/_tickvals.py
|
Python
|
mit
| 407
| 0.002457
|
from django.contrib import admin
from .models import Grant, AccessToken, RefreshToken, get_application_model
class ApplicationAdmin(admin.ModelAdmin):
list_display = ("name", "user", "client_type", "authorization_grant_type")
list_filter = ("client_type", "authorization_grant_type", "skip_authorization")
radio_fields = {
"client_type": admin.HORIZONTAL,
"authorization_grant_type": admin.VERTICAL,
}
raw_id_fields = ("user", )
class GrantAdmin(admin.ModelAdmin):
list_display = ("code", "application", "user", "expires")
raw_id_fields = ("user", )
class AccessTokenAdmin(admin.ModelAdmin):
list_display = ("token", "user", "application", "expires")
raw_id_fields = ("user", )
class RefreshTokenAdmin(admin.ModelAdmin):
list_display = ("token", "user", "application")
raw_id_fields = ("user", "access_token")
Application = get_application_model()
admin.site.register(Application, ApplicationAdmin)
admin.site.register(Grant, GrantAdmin)
admin.site.register(AccessToken, AccessTokenAdmin)
admin.site.register(RefreshToken, RefreshTokenAdmin)
|
StepicOrg/django-oauth-toolkit
|
oauth2_provider/admin.py
|
Python
|
bsd-2-clause
| 1,114
| 0.000898
|
import os
import sys
# Add parent directory to path to make test aware of other modules
srcfolder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', "src"))
if srcfolder not in sys.path:
sys.path.append(srcfolder)
|
lmotta/Roam
|
tests/__init__.py
|
Python
|
gpl-2.0
| 234
| 0.008547
|
import logging
import os
import re
import select
import subprocess
import threading
import time
__all__ = [
'ExternalService',
'SpawnedService',
]
log = logging.getLogger(__name__)
class ExternalService(object):
def __init__(self, host, port):
log.info("Using already running service at %s:%d", host, port)
self.host = host
self.port = port
def open(self):
pass
def close(self):
pass
class SpawnedService(threading.Thread):
def __init__(self, args=None, env=None):
threading.Thread.__init__(self)
if args is None:
raise TypeError("args parameter is required")
self.args = args
self.env = env
self.captured_stdout = []
self.captured_stderr = []
self.should_die = threading.Event()
self.child = None
self.alive = False
def run(self):
self.run_with_handles()
def _spawn(self):
if self.alive: return
if self.child and self.child.poll() is None: return
self.child = subprocess.Popen(
self.args,
preexec_fn=os.setsid, # to avoid propagating signals
env=self.env,
bufsize=1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.alive = True
def _despawn(self):
if self.child.poll() is None:
self.child.terminate()
self.alive = False
for _ in range(50):
if self.child.poll() is not None:
self.child = None
break
time.sleep(0.1)
else:
self.child.kill()
def run_with_handles(self):
self._spawn()
while True:
(rds, _, _) = select.select([self.child.stdout, self.child.stderr], [], [], 1)
if self.child.stdout in rds:
line = self.child.stdout.readline()
self.captured_stdout.append(line.decode('utf-8'))
if self.child.stderr in rds:
line = self.child.stderr.readline()
self.captured_stderr.append(line.decode('utf-8'))
if self.child.poll() is not None:
self.dump_logs()
self._spawn()
if self.should_die.is_set():
self._despawn()
break
def dump_logs(self):
log.critical('stderr')
for line in self.captured_stderr:
log.critical(line.rstrip())
log.critical('stdout')
for line in self.captured_stdout:
log.critical(line.rstrip())
def wait_for(self, pattern, timeout=30):
t1 = time.time()
while True:
t2 = time.time()
if t2 - t1 >= timeout:
try:
self.child.kill()
except:
log.exception("Received exception when killing child process")
self.dump_logs()
log.error("Waiting for %r timed out after %d seconds", pattern, timeout)
return False
if re.search(pattern, '\n'.join(self.captured_stdout), re.IGNORECASE) is not None:
log.info("Found pattern %r in %d seconds via stdout", pattern, (t2 - t1))
return True
if re.search(pattern, '\n'.join(self.captured_stderr), re.IGNORECASE) is not None:
log.info("Found pattern %r in %d seconds via stderr", pattern, (t2 - t1))
return True
time.sleep(0.1)
def start(self):
threading.Thread.start(self)
def stop(self):
self.should_die.set()
self.join()
|
gamechanger/kafka-python
|
test/service.py
|
Python
|
apache-2.0
| 3,648
| 0.003289
|
from django.conf.urls import patterns, include, url
from service_order import views, data_views
urlpatterns = patterns('',
url(r'^$', views.index),
url(r'^order_state_machine/$', views.order_state_machine),
url(r'^make_order/$', views.make_order),
url(r'^make_order2/$', views.make_order2),
url(r'^make_order3/$', views.make_order3),
url(r'^create_order/$', views.create_order),
url(r'^view_refund_sheet/$', views.view_refund_sheet),
url(r'^data/validate_code/', data_views.validate_code),
)
|
yejia/order_system
|
service_order/urls.py
|
Python
|
mit
| 542
| 0.00369
|
"""The tests for the google calendar component."""
# pylint: disable=protected-access
import logging
import unittest
from unittest.mock import patch, Mock
import pytest
import homeassistant.components.calendar as calendar_base
from homeassistant.components.google import calendar
import homeassistant.util.dt as dt_util
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers.template import DATE_STR_FORMAT
from tests.common import get_test_home_assistant, MockDependency
TEST_PLATFORM = {calendar_base.DOMAIN: {CONF_PLATFORM: 'test'}}
_LOGGER = logging.getLogger(__name__)
class TestComponentsGoogleCalendar(unittest.TestCase):
"""Test the Google calendar."""
hass = None # HomeAssistant
# pylint: disable=invalid-name
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.hass.http = Mock()
# Set our timezone to CST/Regina so we can check calculations
# This keeps UTC-6 all year round
dt_util.set_default_time_zone(dt_util.get_time_zone('America/Regina'))
# pylint: disable=invalid-name
def tearDown(self):
"""Stop everything that was started."""
dt_util.set_default_time_zone(dt_util.get_time_zone('UTC'))
self.hass.stop()
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
week_from_today = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=7)
event = {
'summary': 'Test All Day Event',
'start': {
'date': week_from_today.isoformat()
},
'end': {
'date': (week_from_today + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': 'cydrevtfuybguinhomj@google.com',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day'
cal = calendar.GoogleCalendarEventDevice(self.hass, None,
'', {'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': True,
'offset_reached': False,
'start_time': '{} 00:00:00'.format(event['start']['date']),
'end_time': '{} 00:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_future_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
one_hour_from_now = dt_util.now() \
+ dt_util.dt.timedelta(minutes=30)
event = {
'start': {
'dateTime': one_hour_from_now.isoformat()
},
'end': {
'dateTime': (one_hour_from_now
+ dt_util.dt.timedelta(minutes=60))
.isoformat()
},
'summary': 'Test Event in 30 minutes',
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': 'dsfohuygtfvgbhnuju@google.com',
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Future Event'
device_id = 'test_future_event'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': False,
'offset_reached': False,
'start_time': one_hour_from_now.strftime(DATE_STR_FORMAT),
'end_time':
(one_hour_from_now + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() \
- dt_util.dt.timedelta(minutes=30)
event = {
'start': {
'dateTime': middle_of_event.isoformat()
},
'end': {
'dateTime': (middle_of_event + dt_util.dt
.timedelta(minutes=60))
.isoformat()
},
'summary': 'Test Event in Progress',
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': 'dsfohuygtfvgbhnuju@google.com',
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Event in Progress'
device_id = 'test_event_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_ON
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event['summary'],
'all_day': False,
'offset_reached': False,
'start_time': middle_of_event.strftime(DATE_STR_FORMAT),
'end_time':
(middle_of_event + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_offset_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
middle_of_event = dt_util.now() \
+ dt_util.dt.timedelta(minutes=14)
event_summary = 'Test Event in Progress'
event = {
'start': {
'dateTime': middle_of_event.isoformat()
},
'end': {
'dateTime': (middle_of_event + dt_util.dt
.timedelta(minutes=60))
.isoformat()
},
'summary': '{} !!-15'.format(event_summary),
'reminders': {'useDefault': True},
'id': 'aioehgni435lihje',
'status': 'confirmed',
'updated': '2016-11-05T15:52:07.329Z',
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True,
},
'created': '2016-11-05T15:52:07.000Z',
'iCalUID': 'dsfohuygtfvgbhnuju@google.com',
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
},
'etag': '"2956722254658000"',
'kind': 'calendar#event',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
}
mock_next_event.return_value.event = event
device_name = 'Test Event in Progress'
device_id = 'test_event_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': False,
'offset_reached': True,
'start_time': middle_of_event.strftime(DATE_STR_FORMAT),
'end_time':
(middle_of_event + dt_util.dt.timedelta(minutes=60))
.strftime(DATE_STR_FORMAT),
'location': '',
'description': '',
}
@pytest.mark.skip
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_offset_in_progress_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=1)
event_summary = 'Test All Day Event Offset In Progress'
event = {
'summary': '{} !!-25:0'.format(event_summary),
'start': {
'date': tomorrow.isoformat()
},
'end': {
'date': (tomorrow + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': 'cydrevtfuybguinhomj@google.com',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day Offset In Progress'
device_id = 'test_all_day_offset_in_progress'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': True,
'offset_reached': True,
'start_time': '{} 06:00:00'.format(event['start']['date']),
'end_time': '{} 06:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@patch('homeassistant.components.google.calendar.GoogleCalendarData')
def test_all_day_offset_event(self, mock_next_event):
"""Test that we can create an event trigger on device."""
tomorrow = dt_util.dt.date.today() \
+ dt_util.dt.timedelta(days=2)
offset_hours = (1 + dt_util.now().hour)
event_summary = 'Test All Day Event Offset'
event = {
'summary': '{} !!-{}:0'.format(event_summary, offset_hours),
'start': {
'date': tomorrow.isoformat()
},
'end': {
'date': (tomorrow + dt_util.dt.timedelta(days=1))
.isoformat()
},
'location': 'Test Cases',
'description': 'We\'re just testing that all day events get setup '
'correctly',
'kind': 'calendar#event',
'created': '2016-06-23T16:37:57.000Z',
'transparency': 'transparent',
'updated': '2016-06-24T01:57:21.045Z',
'reminders': {'useDefault': True},
'organizer': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'sequence': 0,
'creator': {
'email': 'uvrttabwegnui4gtia3vyqb@import.calendar.google.com',
'displayName': 'Organizer Name',
'self': True
},
'id': '_c8rinwq863h45qnucyoi43ny8',
'etag': '"2933466882090000"',
'htmlLink': 'https://www.google.com/calendar/event?eid=*******',
'iCalUID': 'cydrevtfuybguinhomj@google.com',
'status': 'confirmed'
}
mock_next_event.return_value.event = event
device_name = 'Test All Day Offset'
device_id = 'test_all_day_offset'
cal = calendar.GoogleCalendarEventDevice(self.hass, None, device_id,
{'name': device_name})
assert cal.name == device_name
assert cal.state == STATE_OFF
assert not cal.offset_reached()
assert cal.device_state_attributes == {
'message': event_summary,
'all_day': True,
'offset_reached': False,
'start_time': '{} 00:00:00'.format(event['start']['date']),
'end_time': '{} 00:00:00'.format(event['end']['date']),
'location': event['location'],
'description': event['description'],
}
@MockDependency("httplib2")
def test_update_false(self, mock_httplib2):
"""Test that the update returns False upon Error."""
mock_service = Mock()
mock_service.get = Mock(
side_effect=mock_httplib2.ServerNotFoundError("unit test"))
cal = calendar.GoogleCalendarEventDevice(self.hass, mock_service, None,
{'name': "test"})
result = cal.data.update()
assert not result
|
PetePriority/home-assistant
|
tests/components/google/test_calendar.py
|
Python
|
apache-2.0
| 16,362
| 0
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.unittest.cron.setup_configman import (
get_config_manager_for_crontabber,
)
from crontabber.tests import base
class IntegrationTestBase(base.IntegrationTestCaseBase):
@classmethod
def get_standard_config(cls):
"""this method overrides the crontabber version of the same name.
It is not used by Socorro clients directly, but the base crontabber
class uses this method during setup. By overriding the implementation
here, we get a default Socorro configuration file with many of the
standard Socorro defaults already in place: logging, executors, etc.
This allows the bootstraping of the integration tests to participate
fully with the environment variables, commandline arguments, and
configurations files that the Socorro installation/test system of
Makefiles and shell scripts offers"""
config = get_config_manager_for_crontabber().get_config()
return config
|
AdrianGaudebert/socorro
|
socorro/unittest/cron/jobs/base.py
|
Python
|
mpl-2.0
| 1,178
| 0
|
raise Exception("tests where moved to emzed to avoid circular dependencies")
|
uweschmitt/emzed_optimizations
|
tests/test_sample.py
|
Python
|
bsd-3-clause
| 77
| 0
|
import sys
from craystack import cf
if len(sys.argv) < 4:
print "Usage: %s <key> <subkey> <path>" % sys.argv[0]
sys.exit(2)
_, key, subkey, filename = sys.argv
with open(filename) as f:
content = f.read()
cf.insert(key, {subkey: content})
print "Uploaded %s to %s/%s (%s bytes)" % (filename, key, subkey, len(content))
|
rbranson/craystack
|
upload.py
|
Python
|
bsd-3-clause
| 343
| 0.002915
|
import re
import svgwrite
import math
class LantexBase(object):
def __init__(self):
self.identifier = None
self.description = None
self.properties = [ 'description' ]
def __repr__(self):
out = "{0} {1}:\n".format(type(self).__name__, self.identifier)
for p in self.properties:
val = getattr(self, p)
if val != None:
out += "{0}: {1}\n".format(p, val)
return out + "\n"
def valid_property(self, p):
return p in self.properties
class Drawable(object):
"""
Base class for Drawable things. For simplicity, we're going to use a
rectangle with ports and an identifier as a starting point for every
object.
"""
def __init__(self):
# Holds drawing data
self.drawing = {'margin' : 4,
'port_size': 10}
self.__drawinit__()
def __drawinit__(self):
"""
Adds drawing constants to the drawing dict
"""
raise NotImplementedError("Function hasn't been implemented")
def calc_size(self, env):
"""
Returns the width and height of the object that will be drawn. Used to
work out where to position it. Returns (width, height). If the child
hasn't implemented it we'll just use our base function
"""
return self.calc_size_base(env)
def calc_size_base(self, env):
"""
The width will either be ports_per_row * ports or the width of the
identifier, whichever is larger.
"""
m = self.drawing['margin']
# Identifier width
id_width = len(self.identifier) * env.font.width
id_width_m = m + id_width + m
ppr = None
if 'ports_per_row' in self.drawing:
ppr = self.drawing['ports_per_row']
else:
# Work out the number of ports that can fit on each row if we use
# the identifier width
ppr = math.floor(id_width / (m + self.drawing['port_size']))
self.drawing['ports_per_row'] = ppr
rows = math.ceil(len(self.ports) / ppr)
ports_width = (self.drawing['port_size'] + m) * len(self.ports)
ports_width -= m
port_w = m + ports_width + m
h = m + env.font.height + m + ((self.drawing['port_size'] + m) * rows)
if id_width_m >= port_w:
w = id_width_m
else:
w = port_w
self.drawing['rows'] = rows
self.drawing['w'] = w
self.drawing['h'] = h
self.drawing['ports_width'] = ports_width
return w, h
def draw(self, env):
"""
Env is an instance of DrawEnv. If the child class hasn't implemented the
draw function then we'll use use our base draw function
"""
self.draw_base(env)
def draw_base(self, env):
# Create a group for the object
g = env.dwg.add(env.dwg.g(id='{0}-{1}'.format(self.__class__.__name__,
self.identifier)))
env.dwg_g = g
# Draw the outside rectangle
x, y = env.x, env.y
bgcol = env.colors['bg']['base2'].rgb
stcol = env.colors['bg']['base02'].rgb
g.add(env.dwg.rect(insert=(x, y),
size=(self.drawing['w'], self.drawing['h']),
fill=bgcol,
stroke=stcol))
# Add it's identifier
x += self.drawing['margin']
y += self.drawing['margin'] + env.font.height
g.add(env.dwg.text(self.identifier, insert=(x,y)))
# Work out where to draw first port by working out the width and
# where x needs to be to center it
ports_width = self.drawing['ports_width']
row_startx = env.x + int(round((self.drawing['w'] - ports_width) / 2))
x = row_startx
y += self.drawing['margin']
# Draw each port
for p in self.ports:
# If we're on a new row then
if p.identifier % (self.drawing['ports_per_row'] + 1) == 0:
y += (self.drawing['port_size'] + self.drawing['margin'])
x = row_startx
fgcol = env.colors['fg']['green'].rgb
g.add(env.dwg.rect(insert=(x, y),
size=(self.drawing['port_size'],
self.drawing['port_size']),
fill=fgcol,
stroke=stcol))
x += (self.drawing['port_size'] + self.drawing['margin'])
class UnresolvedIdentifier(object):
"""
Represents a variable that might exist in the future but we can't find
it yet.
"""
instance_list = []
@staticmethod
def new(identifier):
"""
If we already have an instance for this identifier, don't make a new
one
"""
for i in UnresolvedIdentifier.instance_list:
if i.identifier == identifier:
return i
# Didn't find it so make a new one
ui = UnresolvedIdentifier(identifier)
UnresolvedIdentifier.instance_list.append(ui)
return ui
@staticmethod
def resolve_all(entities, instance_list):
if len(instance_list) == 0:
raise ValueError("No unresolved identifiers")
for i in instance_list:
found = False
for e in entities:
if i.identifier == e.identifier:
# Found the entity we want
i.resolved = e
found = True
if found == False:
raise ValueError("Couldn't resolve identifier"
" {0}".format(i.identifier))
def __init__(self, identifier):
"""
Should only be called by our static method new
"""
self.identifier = identifier
self.resolved = None
def __repr__(self):
if self.resolved != None:
return "ResolvedIdentifier {0}".format(self.resolved.__repr__())
else:
return "UnresolvedIdentifier {0}".format(self.identifier)
class Connection(object):
"""
Connects a port of an entity to the port of another entity
"""
def __init__(self):
self.from_e = None
self.to_e = None
# 1 based port indexes
self.from_i = None
self.to_i = None
def __repr__(self):
out = "Connection: {0}->{1} : ".format(self.from_e.identifier,
self.from_i)
if self.to_i == None:
out += self.to_e.identifier
else:
out += "{0}->{1}".format(self.to_e.identifier, self.to_i)
return out
def update_ports(self):
"""
Update the port.networks entries for the relevant entity
"""
if self.to_i != None:
self.from_e.ports[self.from_i - 1].networks = self.to_e.ports[self.to_i - 1].networks
else:
self.from_e.ports[self.from_i - 1].networks = [self.to_e]
class Addressable(LantexBase):
def __init__(self):
super().__init__()
self._v4 = None
self._v6 = None
self._v4_range = None
self._v6_range = None
self._v4_gateway = None
self._v6_gateway = None
self.properties.append('v4')
self.properties.append('v6')
self.properties.append('v4_range')
self.properties.append('v6_range')
self.properties.append('v4_gateway')
self.properties.append('v6_gateway')
@property
def v4_gateway(self):
return self._v4_gateway
@v4_gateway.setter
def v4_gateway(self, value):
"""
Can either be an UnresolvedIdentifier or IPv4Addr
"""
try:
ip = IPv4Addr(value)
self._v4_gateway = ip
except ValueError:
self._v4_gateway = UnresolvedIdentifier(value)
@property
def v6_gateway(self):
return self._v6_gateway
@v6_gateway.setter
def v6_gateway(self, value):
"""
Can either be an UnresolvedIdentifier or IPv6Addr
"""
try:
ip = IPv6Addr(value)
self._v6_gateway = ip
except ValueError:
self._v6_gateway = UnresolvedIdentifier(value)
@property
def v4_range(self):
return self._v4_range
@v4_range.setter
def v4_range(self, value):
ip = IPv4Addr(value)
self._v4_range = ip
@property
def v6_range(self):
return self._v6_range
@v6_range.setter
def v6_range(self, value):
ip = IPv6Addr(value)
self._v6_range = ip
@property
def v4(self):
return self._v4
@v4.setter
def v4(self, value):
if self._v4 == None:
self._v4 = {}
network = 'unknown'
if type(value) is tuple:
if type(value[0]) is Network:
network = value[0]
value = value[1]
else:
raise ValueError("Bad tuple {0} for v4 address".format(value))
ip = IPv4Addr(value)
if network in self._v4:
self._v4[network].append(ip)
else:
self._v4[network] = [ip]
@property
def v6(self):
return self._v6
@v6.setter
def v6(self, value):
if self._v6 == None:
self._v6 = {}
network = 'unknown'
if type(value) is tuple:
if type(value[0]) is Network:
network = value[0]
value = value[1]
else:
raise ValueError("Bad tuple {0} for v6 address".format(value))
ip = IPv6Addr(value)
if network in self._v6:
self._v6[network].append(ip)
else:
self._v6[network] = [ip]
class IPAddr(object):
def __init__(self, addr):
self.value = None
m = re.search(self.regex, addr)
if m:
self.value = addr
else:
raise ValueError("Invalid {0} {1}".format(self.__class__.__name__,
addr))
def __repr__(self):
return "{0}: {1}".format(self.__class__.__name__, self.value)
class IPv4Addr(IPAddr):
regex = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
def __init__(self, addr):
super().__init__(addr)
class IPv6Addr(IPAddr):
regex = '(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]).){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))'
def __init__(self, addr):
super().__init__(addr)
class Port(LantexBase):
def __init__(self, index):
LantexBase.__init__(self)
self.identifier = index
self.networks = []
self.pvid = None
self.properties.append('networks')
self.properties.append('pvid')
class Ports(object):
"""
Base class for an object with ports. Will be used with multiple inheritance
"""
def __init__(self):
self._ports = None
self.properties.append('ports')
@property
def ports(self):
return self._ports
@ports.setter
def ports(self, number):
try:
number = int(number)
except:
raise ValueError("Can't convert port number {0}"
" to an int".format(number))
self._ports = []
for i in range(0, number):
self._ports.append(Port(i+1))
@property
def networks(self):
"""
Return a list of the all of the networks accessible across the entire
device (i.e. all ports).
"""
networks = {}
for p in self._ports:
for n in p.networks:
if n not in networks:
networks[n] = None
return list(networks.keys())
class Route(object):
def __init__(self, target, via):
self.target = target
self.via = via
def __repr__(self):
out = "Route: {0} via {1}".format(self.target.identifier,
self.via.identifier)
return out
class Routes(object):
def __init__(self):
self.routes = []
self.properties.append('routes')
class Switch(Addressable, Ports, Drawable):
def __init__(self):
Addressable.__init__(self)
Ports.__init__(self)
Drawable.__init__(self)
self._managed = None
self._default_pvid = None
self.properties.append('managed')
self.properties.append('default_pvid')
self.properties.append('network_pmap')
@property
def managed(self):
return self._managed
@managed.setter
def managed(self, value):
# Expecting a string either true or false
if value.lower() == 'true':
self._managed = True
elif value.lower() == 'false':
self._managed = False
else:
raise ValueError("Invalid value {0} for property managed".format(value))
@property
def default_pvid(self):
return self._default_pvid
@default_pvid.setter
def default_pvid(self, pvid):
try:
pvid = int(pvid)
except:
raise ValueError("Can't convert pvid {0} to an int".format(number))
self._default_pvid = pvid
for p in self._ports:
if p.pvid == None:
p.pvid = pvid
@property
def network_pmap(self):
"""
Create a dictionary mapping network names to ports / port ranges
"""
if self._ports == None:
return None
out = {}
for p in self._ports:
for n in p.networks:
if n.identifier in out:
out[n.identifier].append(p.identifier)
else:
out[n.identifier] = [p.identifier]
return out
@network_pmap.setter
def network_pmap(self, map_dict):
for network, ports in map_dict.items():
# Don't care if the network exists for now. We'll try and resolve
# it later.
n = UnresolvedIdentifier.new(network)
# Try and match for a range of numbers like 1-8
m = re.search('(\d+)-(\d+)', ports)
if m:
range_from = int(m.group(1)) - 1
range_to = int(m.group(2))
for port in range(range_from, range_to):
self._ports[port].networks.append(n)
else:
raise ValueError("Not sure what to do with ports"
" {0}".format(ports))
def __drawinit__(self):
self.drawing['ports_per_row'] = 8
class AccessPoint(Addressable, Ports, Drawable):
def __init__(self):
Addressable.__init__(self)
Ports.__init__(self)
Drawable.__init__(self)
self._network_ssidmap = None
self.properties.append('network_ssidmap')
@property
def network_ssidmap(self):
return self._network_ssidmap
@network_ssidmap.setter
def network_ssidmap(self, map_dict):
# Will have a dictionary mapping a Network to an SSID
# Networks will be an unresolved identifier for now
if self._network_ssidmap == None:
self._network_ssidmap = {}
else:
raise ValueError("Network ssidmap already set: {0}".format(
self._network_ssidmap))
for network, ssid in map_dict.items():
n = UnresolvedIdentifier.new(network)
if n not in self._network_ssidmap:
self._network_ssidmap[n] = ssid
else:
raise ValueError("Network {0} already exists in network ssidmap: "
"{1}".format(network, self._network_ssidmap))
def __drawinit__(self):
self.drawing['antenna_width_ratio'] = 0.05
self.drawing['antenna_height_ratio'] = 2
def calc_size(self, env):
w, h = self.calc_size_base(env)
"""
We now have a base rectangle calculated but want to add two extra
rectangles on the top to represent antennas. Width will stay the same
though.
"""
old_h = h
h = old_h * self.drawing['antenna_height_ratio']
self.drawing['h_with_ant'] = h
ant_w = self.drawing['w'] * self.drawing['antenna_width_ratio']
self.drawing['ant_w'] = ant_w
# Increment the starting point so we have enough space for the antennas
h_diff = h - old_h
self.drawing['h_diff'] = h_diff
env.y += h_diff
self.drawing['ant_h'] = h_diff
return w, h
def draw(self, env):
self.draw_base(env)
# Fetch the drawing group from the environment
g = env.dwg_g
# Where we want to start our antennas is higher than where the base
# rect was drawn
ant_starty = env.y - self.drawing['h_diff']
# The antennas will start at 1 * width ratio and
# width - 2 * width ratio
col = env.colors['bg']['base02'].rgb
ant_startx = env.x + self.drawing['ant_w']
g.add(env.dwg.rect(insert=(ant_startx, ant_starty),
size=(self.drawing['ant_w'], self.drawing['ant_h']),
fill=col,
stroke=col))
ant_startx = env.x + self.drawing['w'] - (2 * self.drawing['ant_w'])
g.add(env.dwg.rect(insert=(ant_startx, ant_starty),
size=(self.drawing['ant_w'], self.drawing['ant_h']),
fill=col,
stroke=col))
class Network(Addressable):
def __init__(self):
super().__init__()
self._vlan = None
self.properties.append('vlan')
@property
def vlan(self):
return self._vlan
@vlan.setter
def vlan(self, number):
try:
number = int(number)
except:
raise ValueError("Can't convert vlan number {0} to an int".format(number))
self._vlan = number
class Tunnel(Addressable):
def __init__(self):
super().__init__()
class Host(Addressable, Ports, Routes):
def __init__(self):
Addressable.__init__(self)
Ports.__init__(self)
Routes.__init__(self)
primitives = { 'Switch' : Switch,
'AccessPoint' : AccessPoint,
'Network' : Network,
'Tunnel' : Tunnel,
'Host' : Host }
|
liamfraser/lantex
|
lantex/types.py
|
Python
|
bsd-3-clause
| 19,172
| 0.002921
|
import csv
from datetime import datetime
from django.conf import settings
from django.core.management import BaseCommand
from bustimes.utils import download_if_changed
from ...models import Licence, Registration, Variation
def parse_date(date_string):
if date_string:
return datetime.strptime(date_string, '%d/%m/%y').date()
def download_if_modified(path):
url = f"https://content.mgmt.dvsacloud.uk/olcs.prod.dvsa.aws/data-gov-uk-export/{path}"
return download_if_changed(settings.DATA_DIR / path, url)
class Command(BaseCommand):
@staticmethod
def add_arguments(parser):
parser.add_argument('regions', nargs='?', type=str, default="FBCMKGDH")
def get_rows(self, path):
with open(settings.DATA_DIR / path) as open_file:
yield from csv.DictReader(open_file)
def handle(self, regions, **kwargs):
for region in regions:
modified_1, last_modified_1 = download_if_modified(f"Bus_RegisteredOnly_{region}.csv")
modified_2, last_modified_2 = download_if_modified(f"Bus_Variation_{region}.csv")
if modified_1 or modified_2:
print(region, last_modified_1, last_modified_2)
self.handle_region(region)
def handle_region(self, region):
lics = Licence.objects.filter(traffic_area=region)
lics = lics.in_bulk(field_name="licence_number")
lics_to_update = set()
lics_to_create = []
regs = Registration.objects.filter(licence__traffic_area=region)
regs = regs.in_bulk(field_name="registration_number")
regs_to_update = set()
regs_to_create = []
variations = Variation.objects.filter(registration__licence__traffic_area=region)
variations = variations.select_related('registration').all()
variations_dict = {}
for variation in variations:
reg_no = variation.registration.registration_number
if reg_no in variations_dict:
variations_dict[reg_no][variation.variation_number] = variation
else:
variations_dict[reg_no] = {
variation.variation_number: variation
}
# vars_to_update = set()
vars_to_create = []
# previous_line = None
# cardinals = set()
for line in self.get_rows(f"Bus_Variation_{region}.csv"):
reg_no = line["Reg_No"]
var_no = int(line["Variation Number"])
lic_no = line["Lic_No"]
if lic_no in lics:
licence = lics[lic_no]
if licence.id and licence not in lics_to_update:
licence.trading_name = ''
lics_to_update.add(licence)
else:
licence = Licence(licence_number=lic_no)
lics_to_create.append(licence)
lics[lic_no] = licence
licence.name = line['Op_Name']
# a licence can have multiple trading names
if line['trading_name'] not in licence.trading_name:
if licence.trading_name:
licence.trading_name = f"{licence.trading_name}\n{line['trading_name']}"
else:
licence.trading_name = line['trading_name']
if licence.address != line['Address']:
if licence.address:
print(licence.address, line['Address'])
licence.address = line['Address']
if licence.traffic_area:
assert licence.traffic_area == line['Current Traffic Area']
else:
licence.traffic_area = line['Current Traffic Area']
licence.discs = line['Discs in Possession'] or 0
licence.authorised_discs = line['AUTHDISCS'] or 0
licence.description = line['Description']
licence.granted_date = parse_date(line['Granted_Date'])
licence.expiry_date = parse_date(line['Exp_Date'])
if len(reg_no) > 20:
# PK0000098/PK0000098/364
parts = reg_no.split('/')
assert parts[0] == parts[1]
reg_no = f'{parts[1]}/{parts[2]}'
if reg_no in regs:
registration = regs[reg_no]
if registration.id and registration not in regs_to_update:
regs_to_update.add(registration)
else:
registration = Registration(
registration_number=reg_no,
registered=False
)
regs_to_create.append(registration)
regs[reg_no] = registration
registration.licence = licence
status = line['Registration Status']
registration.registration_status = status
if var_no == 0 and status == 'New':
registration.registered = True
elif status == 'Registered':
registration.registered = True
elif status == 'Cancelled' or status == 'Admin Cancelled' or status == 'Cancellation':
registration.registered = False
registration.start_point = line['start_point']
registration.finish_point = line['finish_point']
registration.via = line['via']
registration.subsidies_description = line['Subsidies_Description']
registration.subsidies_details = line['Subsidies_Details']
registration.traffic_area_office_covered_by_area = line['TAO Covered BY Area']
# a registration can have multiple numbers
if registration.service_number:
if line['Service Number'] not in registration.service_number:
registration.service_number = f"{registration.service_number}\n{line['Service Number']}"
else:
registration.service_number = line['Service Number']
# a registration can have multiple types
if registration.service_type_description:
if line['Service_Type_Description'] not in registration.service_type_description:
registration.service_type_description += f"\n{line['Service_Type_Description']}"
else:
registration.service_type_description = line['Service_Type_Description']
if registration.authority_description:
if line['Auth_Description'] not in registration.authority_description:
registration.authority_description += f"\n{line['Auth_Description']}"
if len(registration.authority_description) > 255:
# some National Express coach services cover many authorities
# print(reg_no)
registration.authority_description = registration.authority_description[:255]
else:
registration.authority_description = line['Auth_Description']
# if previous_line:
# if previous_line["Reg_No"] == reg_no:
# if int(previous_line["Variation Number"]) == var_no:
# for key in line:
# prev = previous_line[key]
# value = line[key]
# if prev != value:
# if key not in (
# 'Auth_Description', 'TAO Covered BY Area',
# 'trading_name', 'Pub_Text', 'Registration Status', 'end_date', 'received_date'
# 'effective_date', 'short_notice', 'Service_Type_Description'
# ):
# print(reg_no)
# print(f"'{key}': '{prev}', '{value}'")
# cardinals.add(key)
# # print(line)
variation = Variation(registration=registration, variation_number=var_no)
if reg_no in variations_dict:
if var_no in variations_dict[reg_no]:
continue # ?
else:
variations_dict[reg_no][var_no] = variation
else:
variations_dict[reg_no] = {var_no: variation}
variation.effective_date = parse_date(line['effective_date'])
variation.date_received = parse_date(line['received_date'])
variation.end_date = parse_date(line['end_date'])
variation.service_type_other_details = line['Service_Type_Other_Details']
variation.registration_status = line['Registration Status']
variation.publication_text = line['Pub_Text']
variation.short_notice = line['Short Notice']
assert not variation.id
if not variation.id:
vars_to_create.append(variation)
# previous_line = line
# previous_line = None
# cardinals = set()
# use this file to work out if a registration has not been cancelled/expired
for line in self.get_rows(f"Bus_RegisteredOnly_{region}.csv"):
reg_no = line["Reg_No"]
reg = regs[reg_no]
if reg.registration_status != line["Registration Status"]:
reg.registration_status = line["Registration Status"]
reg.registered = True
# if previous_line and previous_line["Reg_No"] == reg_no:
# for key in line:
# prev = previous_line[key]
# value = line[key]
# if prev != value:
# cardinals.add(key)
# if key == 'TAO Covered BY Area':
# print(prev, value)
# previous_line = line
# print(cardinals)
Licence.objects.bulk_update(
lics_to_update,
["name", "trading_name", "traffic_area", "discs", "authorised_discs",
"description", "granted_date", "expiry_date", "address"]
)
Licence.objects.bulk_create(lics_to_create)
for registration in regs_to_create:
registration.licence = registration.licence
Registration.objects.bulk_update(
regs_to_update,
["start_point", "finish_point", "via",
"subsidies_description", "subsidies_details",
"traffic_area_office_covered_by_area",
"service_number", "service_type_description",
"registration_status", "authority_description",
"registered"],
batch_size=1000
)
Registration.objects.bulk_create(regs_to_create)
Variation.objects.bulk_create(vars_to_create)
# Variation.objects.bulk_update(
# vars_to_update,
# ['date_received', 'end_date', 'service_type_other_details', 'registration_status', 'publication_text',
# 'short_notice']
# )
|
jclgoodwin/bustimes.org.uk
|
vosa/management/commands/import_vosa.py
|
Python
|
mpl-2.0
| 11,021
| 0.002087
|
from couchpotato import get_session
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.variable import mergeDicts, randomString
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.core.settings.model import Library
import copy
import traceback
log = CPLog(__name__)
class MovieResultModifier(Plugin):
default_info = {
'tmdb_id': 0,
'titles': [],
'original_title': '',
'year': 0,
'images': {
'poster': [],
'backdrop': [],
'poster_original': [],
'backdrop_original': [],
'actors': {}
},
'runtime': 0,
'plot': '',
'tagline': '',
'imdb': '',
'genres': [],
'mpaa': None,
'actors': [],
'actor_roles': {}
}
def __init__(self):
addEvent('result.modify.info.search', self.returnByType)
addEvent('result.modify.movie.search', self.combineOnIMDB)
addEvent('result.modify.movie.info', self.checkLibrary)
def returnByType(self, results):
new_results = {}
for r in results:
type_name = r.get('type', 'movie') + 's'
if type_name not in new_results:
new_results[type_name] = []
new_results[type_name].append(r)
# Combine movies, needs a cleaner way..
if 'movies' in new_results:
new_results['movies'] = self.combineOnIMDB(new_results['movies'])
return new_results
def combineOnIMDB(self, results):
temp = {}
order = []
# Combine on imdb id
for item in results:
random_string = randomString()
imdb = item.get('imdb', random_string)
imdb = imdb if imdb else random_string
if not temp.get(imdb):
temp[imdb] = self.getLibraryTags(imdb)
order.append(imdb)
# Merge dicts
temp[imdb] = mergeDicts(temp[imdb], item)
# Make it a list again
temp_list = [temp[x] for x in order]
return temp_list
def getLibraryTags(self, imdb):
temp = {
'in_wanted': False,
'in_library': False,
}
# Add release info from current library
db = get_session()
try:
l = db.query(Library).filter_by(identifier = imdb).first()
if l:
# Statuses
active_status, done_status = fireEvent('status.get', ['active', 'done'], single = True)
for movie in l.movies:
if movie.status_id == active_status['id']:
temp['in_wanted'] = fireEvent('media.get', movie.id, single = True)
for release in movie.releases:
if release.status_id == done_status['id']:
temp['in_library'] = fireEvent('media.get', movie.id, single = True)
except:
log.error('Tried getting more info on searched movies: %s', traceback.format_exc())
return temp
def checkLibrary(self, result):
result = mergeDicts(copy.deepcopy(self.default_info), copy.deepcopy(result))
if result and result.get('imdb'):
return mergeDicts(result, self.getLibraryTags(result['imdb']))
return result
|
tmxdyf/CouchPotatoServer
|
couchpotato/core/providers/info/_modifier/main.py
|
Python
|
gpl-3.0
| 3,402
| 0.004409
|
"""Test cltk.prosody."""
__license__ = 'MIT License. See LICENSE.'
from cltk.prosody.latin.scanner import Scansion as ScansionLatin
from cltk.prosody.latin.clausulae_analysis import Clausulae
from cltk.prosody.greek.scanner import Scansion as ScansionGreek
from cltk.prosody.latin.macronizer import Macronizer
import unittest
class TestSequenceFunctions(unittest.TestCase): # pylint: disable=R0904
"""Class for unittest"""
"""greek/scanner.py"""
# Test string for Greek prosody module unit testing
test = "νέος μὲν καὶ ἄπειρος, δικῶν ἔγωγε ἔτι. μὲν καὶ ἄπειρος."
def test_clean_text_greek(self):
"""Test _clean_text method."""
correct = "νέος μὲν καὶ ἄπειρος δικῶν ἔγωγε ἔτι. μὲν καὶ ἄπειρος."
current = ScansionGreek()._clean_text(self.test)
self.assertEqual(current, correct)
def test_clean_accents_greek(self):
"""Test _clean_accents method."""
correct = "νεος μεν και απειρος δικων εγωγε ετι. μεν και απειρος."
current = ScansionGreek()._clean_accents(self.test)
self.assertEqual(current, correct)
def test_tokenize_greek(self):
"""Test _tokenize method."""
correct = [['νεος', 'μεν', 'και', 'απειρος', 'δικων', 'εγωγε', 'ετι.'],
['μεν', 'και', 'απειρος.']]
current = ScansionGreek()._tokenize(self.test)
self.assertEqual(current, correct)
def test_make_syllables_greek(self):
"""Test _make_syllables method."""
correct = [[['νε', 'ος'], ['μεν'], ['και'], ['α', 'πει', 'ρος'],
['δι', 'κων'], ['ε', 'γω', 'γε'], ['ε', 'τι']], [['μεν'],
['και'], ['α', 'πει', 'ρος']]]
current = ScansionGreek()._make_syllables(self.test)
self.assertEqual(current, correct)
def test_scan_text_greek(self):
"""Test scan_text method."""
correct = ['˘¯¯¯˘¯¯˘¯˘¯˘˘x', '¯¯˘¯x']
current = ScansionGreek().scan_text(self.test)
self.assertEqual(current, correct)
"""latin/macronizer.py"""
def test_retrieve_morpheus_entry(self):
""" Text Macronizer()._retrieve_morpheus_tag()"""
correct = [('n-s---fb-', 'puella', 'puellā'), ('n-s---fn-', 'puella', 'puella'), ('n-s---fv-', 'puella', 'puella')]
current = Macronizer("tag_ngram_123_backoff")._retrieve_morpheus_entry("puella")
self.assertEqual(current, correct)
def test_macronize_word(self):
"""Test Macronizer()._macronize_word()"""
correct = ('flumine', 'n-s---nb-', 'flūmine')
current = Macronizer("tag_ngram_123_backoff")._macronize_word(('flumine', 'n-s---nb-'))
self.assertEqual(current, correct)
def test_macronize_tags(self):
"""Test Macronizer().macronize_tags()"""
text = "Quo usque tandem, O Catilina, abutere nostra patientia?"
correct = [('quo', 'd--------', 'quō'), ('usque', 'd--------', 'usque'), ('tandem', 'd--------', 'tandem'), (',', 'u--------', ','), ('o', 'e--------', 'ō'), ('catilina', 'n-s---mb-', 'catilīnā'), (',', 'u--------', ','), ('abutere', 'v2sfip---', 'abūtēre'), ('nostra', 'a-s---fb-', 'nostrā'), ('patientia', 'n-s---fn-', 'patientia'), ('?', None, '?')]
current = Macronizer("tag_ngram_123_backoff").macronize_tags(text)
self.assertEqual(current, correct)
def test_macronize_text(self):
"""Test Macronizer().macronize_text()"""
text = "Quo usque tandem, O Catilina, abutere nostra patientia?"
correct = "quō usque tandem , ō catilīnā , abūtēre nostrā patientia ?"
current = Macronizer("tag_ngram_123_backoff").macronize_text(text)
self.assertEqual(current, correct)
if __name__ == '__main__':
unittest.main()
|
TylerKirby/cltk
|
cltk/tests/test_nlp/test_prosody.py
|
Python
|
mit
| 3,978
| 0.001078
|
from datetime import datetime, time, timedelta
from pandas.compat import range
import sys
import os
import nose
import numpy as np
from pandas import Index, DatetimeIndex, Timestamp, Series, date_range, period_range
import pandas.tseries.frequencies as frequencies
from pandas.tseries.tools import to_datetime
import pandas.tseries.offsets as offsets
from pandas.tseries.period import PeriodIndex
import pandas.compat as compat
import pandas.util.testing as tm
from pandas import Timedelta
def test_to_offset_multiple():
freqstr = '2h30min'
freqstr2 = '2h 30min'
result = frequencies.to_offset(freqstr)
assert(result == frequencies.to_offset(freqstr2))
expected = offsets.Minute(150)
assert(result == expected)
freqstr = '2h30min15s'
result = frequencies.to_offset(freqstr)
expected = offsets.Second(150 * 60 + 15)
assert(result == expected)
freqstr = '2h 60min'
result = frequencies.to_offset(freqstr)
expected = offsets.Hour(3)
assert(result == expected)
freqstr = '15l500u'
result = frequencies.to_offset(freqstr)
expected = offsets.Micro(15500)
assert(result == expected)
freqstr = '10s75L'
result = frequencies.to_offset(freqstr)
expected = offsets.Milli(10075)
assert(result == expected)
freqstr = '2800N'
result = frequencies.to_offset(freqstr)
expected = offsets.Nano(2800)
assert(result == expected)
# malformed
try:
frequencies.to_offset('2h20m')
except ValueError:
pass
else:
assert(False)
def test_to_offset_negative():
freqstr = '-1S'
result = frequencies.to_offset(freqstr)
assert(result.n == -1)
freqstr = '-5min10s'
result = frequencies.to_offset(freqstr)
assert(result.n == -310)
def test_to_offset_leading_zero():
freqstr = '00H 00T 01S'
result = frequencies.to_offset(freqstr)
assert(result.n == 1)
freqstr = '-00H 03T 14S'
result = frequencies.to_offset(freqstr)
assert(result.n == -194)
def test_to_offset_pd_timedelta():
# Tests for #9064
td = Timedelta(days=1, seconds=1)
result = frequencies.to_offset(td)
expected = offsets.Second(86401)
assert(expected==result)
td = Timedelta(days=-1, seconds=1)
result = frequencies.to_offset(td)
expected = offsets.Second(-86399)
assert(expected==result)
td = Timedelta(hours=1, minutes=10)
result = frequencies.to_offset(td)
expected = offsets.Minute(70)
assert(expected==result)
td = Timedelta(hours=1, minutes=-10)
result = frequencies.to_offset(td)
expected = offsets.Minute(50)
assert(expected==result)
td = Timedelta(weeks=1)
result = frequencies.to_offset(td)
expected = offsets.Day(7)
assert(expected==result)
td1 = Timedelta(hours=1)
result1 = frequencies.to_offset(td1)
result2 = frequencies.to_offset('60min')
assert(result1 == result2)
td = Timedelta(microseconds=1)
result = frequencies.to_offset(td)
expected = offsets.Micro(1)
assert(expected == result)
td = Timedelta(microseconds=0)
tm.assertRaises(ValueError, lambda: frequencies.to_offset(td))
def test_anchored_shortcuts():
result = frequencies.to_offset('W')
expected = frequencies.to_offset('W-SUN')
assert(result == expected)
result = frequencies.to_offset('Q')
expected = frequencies.to_offset('Q-DEC')
assert(result == expected)
_dti = DatetimeIndex
class TestFrequencyInference(tm.TestCase):
def test_raise_if_period_index(self):
index = PeriodIndex(start="1/1/1990", periods=20, freq="M")
self.assertRaises(TypeError, frequencies.infer_freq, index)
def test_raise_if_too_few(self):
index = _dti(['12/31/1998', '1/3/1999'])
self.assertRaises(ValueError, frequencies.infer_freq, index)
def test_business_daily(self):
index = _dti(['12/31/1998', '1/3/1999', '1/4/1999'])
self.assertEqual(frequencies.infer_freq(index), 'B')
def test_day(self):
self._check_tick(timedelta(1), 'D')
def test_day_corner(self):
index = _dti(['1/1/2000', '1/2/2000', '1/3/2000'])
self.assertEqual(frequencies.infer_freq(index), 'D')
def test_non_datetimeindex(self):
dates = to_datetime(['1/1/2000', '1/2/2000', '1/3/2000'])
self.assertEqual(frequencies.infer_freq(dates), 'D')
def test_hour(self):
self._check_tick(timedelta(hours=1), 'H')
def test_minute(self):
self._check_tick(timedelta(minutes=1), 'T')
def test_second(self):
self._check_tick(timedelta(seconds=1), 'S')
def test_millisecond(self):
self._check_tick(timedelta(microseconds=1000), 'L')
def test_microsecond(self):
self._check_tick(timedelta(microseconds=1), 'U')
def test_nanosecond(self):
self._check_tick(np.timedelta64(1, 'ns'), 'N')
def _check_tick(self, base_delta, code):
b = Timestamp(datetime.now())
for i in range(1, 5):
inc = base_delta * i
index = _dti([b + inc * j for j in range(3)])
if i > 1:
exp_freq = '%d%s' % (i, code)
else:
exp_freq = code
self.assertEqual(frequencies.infer_freq(index), exp_freq)
index = _dti([b + base_delta * 7] +
[b + base_delta * j for j in range(3)])
self.assertIsNone(frequencies.infer_freq(index))
index = _dti([b + base_delta * j for j in range(3)] +
[b + base_delta * 7])
self.assertIsNone(frequencies.infer_freq(index))
def test_weekly(self):
days = ['MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT', 'SUN']
for day in days:
self._check_generated_range('1/1/2000', 'W-%s' % day)
def test_week_of_month(self):
days = ['MON', 'TUE', 'WED', 'THU', 'FRI', 'SAT', 'SUN']
for day in days:
for i in range(1, 5):
self._check_generated_range('1/1/2000', 'WOM-%d%s' % (i, day))
def test_fifth_week_of_month(self):
# Only supports freq up to WOM-4. See #9425
func = lambda: date_range('2014-01-01', freq='WOM-5MON')
self.assertRaises(ValueError, func)
def test_fifth_week_of_month_infer(self):
# Only attempts to infer up to WOM-4. See #9425
index = DatetimeIndex(["2014-03-31", "2014-06-30", "2015-03-30"])
assert frequencies.infer_freq(index) is None
def test_week_of_month_fake(self):
#All of these dates are on same day of week and are 4 or 5 weeks apart
index = DatetimeIndex(["2013-08-27","2013-10-01","2013-10-29","2013-11-26"])
assert frequencies.infer_freq(index) != 'WOM-4TUE'
def test_monthly(self):
self._check_generated_range('1/1/2000', 'M')
def test_monthly_ambiguous(self):
rng = _dti(['1/31/2000', '2/29/2000', '3/31/2000'])
self.assertEqual(rng.inferred_freq, 'M')
def test_business_monthly(self):
self._check_generated_range('1/1/2000', 'BM')
def test_business_start_monthly(self):
self._check_generated_range('1/1/2000', 'BMS')
def test_quarterly(self):
for month in ['JAN', 'FEB', 'MAR']:
self._check_generated_range('1/1/2000', 'Q-%s' % month)
def test_annual(self):
for month in MONTHS:
self._check_generated_range('1/1/2000', 'A-%s' % month)
def test_business_annual(self):
for month in MONTHS:
self._check_generated_range('1/1/2000', 'BA-%s' % month)
def test_annual_ambiguous(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
self.assertEqual(rng.inferred_freq, 'A-JAN')
def _check_generated_range(self, start, freq):
freq = freq.upper()
gen = date_range(start, periods=7, freq=freq)
index = _dti(gen.values)
if not freq.startswith('Q-'):
self.assertEqual(frequencies.infer_freq(index), gen.freqstr)
else:
inf_freq = frequencies.infer_freq(index)
self.assertTrue((inf_freq == 'Q-DEC' and
gen.freqstr in ('Q', 'Q-DEC', 'Q-SEP', 'Q-JUN',
'Q-MAR'))
or
(inf_freq == 'Q-NOV' and
gen.freqstr in ('Q-NOV', 'Q-AUG', 'Q-MAY', 'Q-FEB'))
or
(inf_freq == 'Q-OCT' and
gen.freqstr in ('Q-OCT', 'Q-JUL', 'Q-APR', 'Q-JAN')))
gen = date_range(start, periods=5, freq=freq)
index = _dti(gen.values)
if not freq.startswith('Q-'):
self.assertEqual(frequencies.infer_freq(index), gen.freqstr)
else:
inf_freq = frequencies.infer_freq(index)
self.assertTrue((inf_freq == 'Q-DEC' and
gen.freqstr in ('Q', 'Q-DEC', 'Q-SEP', 'Q-JUN',
'Q-MAR'))
or
(inf_freq == 'Q-NOV' and
gen.freqstr in ('Q-NOV', 'Q-AUG', 'Q-MAY', 'Q-FEB'))
or
(inf_freq == 'Q-OCT' and
gen.freqstr in ('Q-OCT', 'Q-JUL', 'Q-APR', 'Q-JAN')))
def test_infer_freq(self):
rng = period_range('1959Q2', '2009Q3', freq='Q')
rng = Index(rng.to_timestamp('D', how='e').asobject)
self.assertEqual(rng.inferred_freq, 'Q-DEC')
rng = period_range('1959Q2', '2009Q3', freq='Q-NOV')
rng = Index(rng.to_timestamp('D', how='e').asobject)
self.assertEqual(rng.inferred_freq, 'Q-NOV')
rng = period_range('1959Q2', '2009Q3', freq='Q-OCT')
rng = Index(rng.to_timestamp('D', how='e').asobject)
self.assertEqual(rng.inferred_freq, 'Q-OCT')
def test_infer_freq_tz(self):
freqs = {'AS-JAN': ['2009-01-01', '2010-01-01', '2011-01-01', '2012-01-01'],
'Q-OCT': ['2009-01-31', '2009-04-30', '2009-07-31', '2009-10-31'],
'M': ['2010-11-30', '2010-12-31', '2011-01-31', '2011-02-28'],
'W-SAT': ['2010-12-25', '2011-01-01', '2011-01-08', '2011-01-15'],
'D': ['2011-01-01', '2011-01-02', '2011-01-03', '2011-01-04'],
'H': ['2011-12-31 22:00', '2011-12-31 23:00', '2012-01-01 00:00', '2012-01-01 01:00']
}
# GH 7310
for tz in [None, 'Australia/Sydney', 'Asia/Tokyo', 'Europe/Paris',
'US/Pacific', 'US/Eastern']:
for expected, dates in compat.iteritems(freqs):
idx = DatetimeIndex(dates, tz=tz)
self.assertEqual(idx.inferred_freq, expected)
def test_infer_freq_tz_transition(self):
# Tests for #8772
date_pairs = [['2013-11-02', '2013-11-5'], #Fall DST
['2014-03-08', '2014-03-11'], #Spring DST
['2014-01-01', '2014-01-03']] #Regular Time
freqs = ['3H', '10T', '3601S', '3600001L', '3600000001U', '3600000000001N']
for tz in [None, 'Australia/Sydney', 'Asia/Tokyo', 'Europe/Paris',
'US/Pacific', 'US/Eastern']:
for date_pair in date_pairs:
for freq in freqs:
idx = date_range(date_pair[0], date_pair[1], freq=freq, tz=tz)
print(idx)
self.assertEqual(idx.inferred_freq, freq)
index = date_range("2013-11-03", periods=5, freq="3H").tz_localize("America/Chicago")
self.assertIsNone(index.inferred_freq)
def test_infer_freq_businesshour(self):
# GH 7905
idx = DatetimeIndex(['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00',
'2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00'])
# hourly freq in a day must result in 'H'
self.assertEqual(idx.inferred_freq, 'H')
idx = DatetimeIndex(['2014-07-01 09:00', '2014-07-01 10:00', '2014-07-01 11:00',
'2014-07-01 12:00', '2014-07-01 13:00', '2014-07-01 14:00',
'2014-07-01 15:00', '2014-07-01 16:00',
'2014-07-02 09:00', '2014-07-02 10:00', '2014-07-02 11:00'])
self.assertEqual(idx.inferred_freq, 'BH')
idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00',
'2014-07-04 12:00', '2014-07-04 13:00', '2014-07-04 14:00',
'2014-07-04 15:00', '2014-07-04 16:00',
'2014-07-07 09:00', '2014-07-07 10:00', '2014-07-07 11:00'])
self.assertEqual(idx.inferred_freq, 'BH')
idx = DatetimeIndex(['2014-07-04 09:00', '2014-07-04 10:00', '2014-07-04 11:00',
'2014-07-04 12:00', '2014-07-04 13:00', '2014-07-04 14:00',
'2014-07-04 15:00', '2014-07-04 16:00',
'2014-07-07 09:00', '2014-07-07 10:00', '2014-07-07 11:00',
'2014-07-07 12:00', '2014-07-07 13:00', '2014-07-07 14:00',
'2014-07-07 15:00', '2014-07-07 16:00',
'2014-07-08 09:00', '2014-07-08 10:00', '2014-07-08 11:00',
'2014-07-08 12:00', '2014-07-08 13:00', '2014-07-08 14:00',
'2014-07-08 15:00', '2014-07-08 16:00'])
self.assertEqual(idx.inferred_freq, 'BH')
def test_not_monotonic(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
rng = rng[::-1]
self.assertIsNone(rng.inferred_freq)
def test_non_datetimeindex(self):
rng = _dti(['1/31/2000', '1/31/2001', '1/31/2002'])
vals = rng.to_pydatetime()
result = frequencies.infer_freq(vals)
self.assertEqual(result, rng.inferred_freq)
def test_invalid_index_types(self):
# test all index types
for i in [ tm.makeIntIndex(10),
tm.makeFloatIndex(10),
tm.makePeriodIndex(10) ]:
self.assertRaises(TypeError, lambda : frequencies.infer_freq(i))
for i in [ tm.makeStringIndex(10),
tm.makeUnicodeIndex(10) ]:
self.assertRaises(ValueError, lambda : frequencies.infer_freq(i))
def test_string_datetimelike_compat(self):
# GH 6463
expected = frequencies.infer_freq(['2004-01', '2004-02', '2004-03', '2004-04'])
result = frequencies.infer_freq(Index(['2004-01', '2004-02', '2004-03', '2004-04']))
self.assertEqual(result,expected)
def test_series(self):
# GH6407
# inferring series
# invalid type of Series
for s in [ Series(np.arange(10)),
Series(np.arange(10.))]:
self.assertRaises(TypeError, lambda : frequencies.infer_freq(s))
# a non-convertible string
self.assertRaises(ValueError, lambda : frequencies.infer_freq(Series(['foo','bar'])))
# cannot infer on PeriodIndex
for freq in [None, 'L', 'Y']:
s = Series(period_range('2013',periods=10,freq=freq))
self.assertRaises(TypeError, lambda : frequencies.infer_freq(s))
# DateTimeIndex
for freq in ['M', 'L', 'S']:
s = Series(date_range('20130101',periods=10,freq=freq))
inferred = frequencies.infer_freq(s)
self.assertEqual(inferred,freq)
s = Series(date_range('20130101','20130110'))
inferred = frequencies.infer_freq(s)
self.assertEqual(inferred,'D')
MONTHS = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP',
'OCT', 'NOV', 'DEC']
def test_is_superperiod_subperiod():
assert(frequencies.is_superperiod(offsets.YearEnd(), offsets.MonthEnd()))
assert(frequencies.is_subperiod(offsets.MonthEnd(), offsets.YearEnd()))
assert(frequencies.is_superperiod(offsets.Hour(), offsets.Minute()))
assert(frequencies.is_subperiod(offsets.Minute(), offsets.Hour()))
assert(frequencies.is_superperiod(offsets.Second(), offsets.Milli()))
assert(frequencies.is_subperiod(offsets.Milli(), offsets.Second()))
assert(frequencies.is_superperiod(offsets.Milli(), offsets.Micro()))
assert(frequencies.is_subperiod(offsets.Micro(), offsets.Milli()))
assert(frequencies.is_superperiod(offsets.Micro(), offsets.Nano()))
assert(frequencies.is_subperiod(offsets.Nano(), offsets.Micro()))
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
|
bdh1011/wau
|
venv/lib/python2.7/site-packages/pandas/tseries/tests/test_frequencies.py
|
Python
|
mit
| 16,705
| 0.004071
|
from OpenGLCffi.GL import params
@params(api='gl', prms=['len', 'string'])
def glStringMarkerGREMEDY(len, string):
pass
|
cydenix/OpenGLCffi
|
OpenGLCffi/GL/EXT/GREMEDY/string_marker.py
|
Python
|
mit
| 123
| 0.02439
|
import unittest
from github_person import GithubPerson
class Test_GithubPerson(unittest.TestCase):
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
|
pepincho/Python101-and-Algo1-Courses
|
Programming-101-v3/week6/1-Who-Follows-You-Back/github_person_test.py
|
Python
|
mit
| 185
| 0
|
import shutil
import os
import hashlib
import dockbot
def gen_hash(data):
return hashlib.sha256(data).hexdigest()
class Image(object):
def __init__(self, root, name, path, platform = None, projects = [],
modes = None, slave = False, remote = False):
self.root = root
self.conf = root.conf
self.name = name
self.qname = self.conf['namespace'] + '-' + name
self.platform = platform
self.modes = modes
if not remote:
self.path = path
self.dir = os.path.dirname(path)
self.context = self.conf.get_list('context', platform, slave)
self.containers = []
if not slave:
self.context += [
dockbot.get_resource('dockbot/data/master/nginx.conf')]
self.containers.append(dockbot.Master(self))
return
# Slave only from here
for mode in modes:
self.containers.append(self.create_slave(mode))
# Slave projects
self.projects = set()
for project in projects:
self.projects.update(self.conf.get_project_deps(project))
# Get project overrides
self.project_overrides = \
self.conf.get_sub_key(platform).get('projects', {})
def __eq__(self, other): return self.name == other.name
def __ne__(self, other): return not self.__eq__(other)
def kind(self): return 'Image'
def create_slave(self, mode): return dockbot.Slave(self, mode)
def is_running(self):
for container in self.containers:
if container.is_running(): return True
return False
def get_context_path(self):
return 'run/docker/' + self.name
def get_hash_path(self):
return self.get_context_path() + '.sha256'
def get_data_hash(self):
path = self.get_hash_path()
if os.path.exists(path):
f = None
try:
f = open(path, 'rt')
return f.read()
finally:
if f is not None: f.close()
def is_dirty(self):
if dockbot.args.force: return False
return self.get_data_hash() != gen_hash(self.gen_dockerfile())
def gen_dockerfile(self):
libpath = [os.path.dirname(self.path)]
libpath += self.conf.get('libpath', ['lib'])
libpath += [dockbot.get_resource('dockbot/data/lib')]
cmd = ['m4'] + sum([['-I', x] for x in libpath], []) + [self.path]
ret, out, err = dockbot.system(cmd, True)
if ret:
raise dockbot.Error('Failed to construct Docker file: ' +
err.decode('utf-8'))
return out
def get_project(self, name):
import copy
for project in self.conf.projects:
if project['name'] == name:
p = copy.deepcopy(project)
p.update(self.project_overrides.get(name, {}))
return p
raise dockbot.Error('Project "%s" not found' % name)
def exists(self):
return dockbot.inspect(self.qname) != dockbot.NOT_FOUND
def cmd_delete(self):
if self.exists():
for container in self.containers:
container.cmd_delete()
dockbot.status_line(self.qname, *dockbot.DELETING)
dockbot.system(['docker', 'rmi', '--no-prune', self.qname], True,
'remove image')
def container_exists(self):
for container in self.containers:
if container.exists(): return True
return False
def cmd_status(self):
for container in self.containers:
container.cmd_status()
def cmd_config(self):
for container in self.containers:
container.cmd_config()
def cmd_shell(self):
raise dockbot.Error('Cannot open shell in image')
def cmd_start(self):
for container in self.containers:
container.cmd_start()
def cmd_stop(self):
for container in self.containers:
container.cmd_stop()
def cmd_restart(self):
self.cmd_stop()
self.cmd_start()
def cmd_build(self):
# Check if image is running
if self.is_running():
if dockbot.args.all and (self.is_dirty() or dockbot.args.force):
self.cmd_stop()
else:
dockbot.status_line(self.qname, *dockbot.RUNNING)
return
if self.is_dirty() or dockbot.args.force:
self.cmd_delete() # Delete image if it exists
elif self.exists():
dockbot.status_line(self.qname, *dockbot.BUILT)
return
dockbot.status_line(self.qname, *dockbot.BUILDING)
# Generate Dockerfile
data = self.gen_dockerfile()
data_hash = gen_hash(data)
# Clean up old context
ctx_path = self.get_context_path()
if os.path.exists(ctx_path): shutil.rmtree(ctx_path)
# Construct Dockerfile
os.makedirs(ctx_path)
dockerfile = ctx_path + '/Dockerfile'
f = None
try:
f = open(dockerfile, 'w')
f.write(data.decode('utf-8'))
f.close()
f = open(self.get_hash_path(), 'w')
f.write(data_hash)
finally:
if f is not None: f.close()
# Link context
for path in self.context:
target = os.path.join(ctx_path, os.path.basename(path))
if dockbot.args.verbose: print('%s -> %s' % (path, target))
shutil.copy(path, target)
# Build command
cmd = ['docker', 'build', '--rm', '-t', self.qname]
# Extra args
cmd += dockbot.args.args
# Do build
dockbot.system(cmd + ['.'], False, 'build ' + self.qname,
cwd = ctx_path)
def cmd_trigger(self):
for container in self.containers:
if isinstance(container, dockbot.Slave):
container.cmd_trigger()
def cmd_publish(self):
for container in self.containers:
container.cmd_publish()
|
CauldronDevelopmentLLC/dockbot
|
dockbot/Image.py
|
Python
|
gpl-3.0
| 6,147
| 0.00667
|
#!/bin/env python
# mainly for sys.argv[], sys.argv[0] is the name of the program
import sys
# mainly for arrays
import numpy as np
def newtraph(fx, fxprime):
if __name__ == '__main__':
print 'hello'
|
ketancmaheshwari/hello-goog
|
src/python/newtonraphson.py
|
Python
|
apache-2.0
| 209
| 0.004785
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.