id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
11,300
|
test_box.py
|
Kozea_pygal/pygal/test/test_box.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Box chart related tests"""
from pygal.graph.box import Box
def test_quartiles():
"""Test box points for the 1.5IQR computation method"""
a = [-2.0, 3.0, 4.0, 5.0, 8.0] # odd test data
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
a, mode='1.5IQR'
)
assert q1 == 7.0 / 4.0
assert q2 == 4.0
assert q3 == 23 / 4.0
assert q0 == 7.0 / 4.0 - 6.0 # q1 - 1.5 * iqr
assert q4 == 23 / 4.0 + 6.0 # q3 + 1.5 * iqr
b = [1.0, 4.0, 6.0, 8.0] # even test data
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
b, mode='1.5IQR'
)
assert q2 == 5.0
c = [2.0, None, 4.0, 6.0, None] # odd with None elements
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
c, mode='1.5IQR'
)
assert q2 == 4.0
d = [4]
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
d, mode='1.5IQR'
)
assert q0 == 4
assert q1 == 4
assert q2 == 4
assert q3 == 4
assert q4 == 4
def test_quartiles_min_extremes():
"""Test box points for the extremes computation method"""
a = [-2.0, 3.0, 4.0, 5.0, 8.0] # odd test data
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
a, mode='extremes'
)
assert q1 == 7.0 / 4.0
assert q2 == 4.0
assert q3 == 23 / 4.0
assert q0 == -2.0 # min
assert q4 == 8.0 # max
b = [1.0, 4.0, 6.0, 8.0] # even test data
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
b, mode='extremes'
)
assert q2 == 5.0
c = [2.0, None, 4.0, 6.0, None] # odd with None elements
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
c, mode='extremes'
)
assert q2 == 4.0
d = [4]
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
d, mode='extremes'
)
assert q0 == 4
assert q1 == 4
assert q2 == 4
assert q3 == 4
assert q4 == 4
def test_quartiles_tukey():
"""Test box points for the tukey computation method"""
a = [] # empty data
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
a, mode='tukey'
)
assert min_s == q0 == q1 == q2 == q3 == q4 == 0
assert outliers == []
# https://en.wikipedia.org/wiki/Quartile example 1
b = [6, 7, 15, 36, 39, 40, 41, 42, 43, 47, 49]
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
b, mode='tukey'
)
assert min_s == q0 == 6
assert q1 == 20.25
assert q2 == 40
assert q3 == 42.75
assert max_s == q4 == 49
assert outliers == []
# previous test with added outlier 75
c = [6, 7, 15, 36, 39, 40, 41, 42, 43, 47, 49, 75]
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
c, mode='tukey'
)
assert min_s == q0 == 6
assert q1 == 25.5
assert q2 == (40 + 41) / 2.0
assert q3 == 45
assert max_s == 75
assert outliers == [75]
# one more outlier, 77
c = [6, 7, 15, 36, 39, 40, 41, 42, 43, 47, 49, 75, 77]
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
c, mode='tukey'
)
assert min_s == q0 == 6
assert q1 == 30.75
assert q2 == 41
assert q3 == 47.5
assert max_s == 77
assert 75 in outliers
assert 77 in outliers
def test_quartiles_stdev():
"""Test box points for the stdev computation method"""
a = [
35, 42, 35, 41, 36, 6, 12, 51, 33, 27, 46, 36, 44, 53, 75, 46, 16, 51,
45, 29, 25, 26, 54, 61, 27, 40, 23, 34, 51, 37
]
SD = 14.67
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
a, mode='stdev'
)
assert min_s == min(a)
assert max_s == max(a)
assert q2 == 36.5
assert q4 <= q2 + SD
assert q0 >= q2 - SD
assert all(n in outliers for n in [6, 12, 16, 53, 54, 61, 75])
b = [5] # test for posible zero division
(min_s, q0, q1, q2, q3, q4, max_s), outliers = Box._box_points(
b, mode='stdev'
)
assert min_s == q0 == q1 == q2 == q3 == q4 == max_s == b[0]
assert outliers == []
def test_simple_box():
"""Simple box test"""
box = Box()
box.add('test1', [-1, 2, 3, 3.1, 3.2, 4, 5])
box.add('test2', [2, 3, 5, 6, 6, 4])
box.title = 'Box test'
q = box.render_pyquery()
assert len(q(".axis.y")) == 1
assert len(q(".legend")) == 2
assert len(q(".plot .series rect")) == 2
| 5,213
|
Python
|
.py
| 154
| 28.88961
| 79
| 0.562736
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,301
|
test_line_log_none_max_solved.py
|
Kozea_pygal/pygal/test/test_line_log_none_max_solved.py
|
# This file is test file for NoneMaxSolved
# I have modified the line.py and passed other test
# This test is for us to test whether the none value
# in the Log graph will be max or not (issue #309)
from __future__ import division
from pygal import Line
chart = Line(title='test', logarithmic=True)
chart.add('test 1', [None, -38, 48, 4422, 35586, 1003452, 225533])
chart.add('test 2', [1, 40, 20, 38, 2937, 20399, 3947])
q = chart.render_pyquery()
assert len(q(".dots")) == 12
| 481
|
Python
|
.py
| 11
| 42.454545
| 66
| 0.719486
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,302
|
test_formatters.py
|
Kozea_pygal/pygal/test/test_formatters.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Test formatters"""
from pygal import formatters
def test_human_readable():
"""Test human_readable formatter"""
f = formatters.human_readable
assert f(1) == '1'
assert f(1.) == '1'
assert f(10) == '10'
assert f(12.5) == '12.5'
assert f(1000) == '1k'
assert f(5000) == '5k'
assert f(100000) == '100k'
assert f(1253) == '1.253k'
assert f(1250) == '1.25k'
assert f(0.1) == '100m'
assert f(0.01) == '10m'
assert f(0.001) == '1m'
assert f(0.002) == '2m'
assert f(0.0025) == '2.5m'
assert f(0.0001) == '100µ'
assert f(0.000123) == '123µ'
assert f(0.00001) == '10µ'
assert f(0.000001) == '1µ'
assert f(0.0000001) == '100n'
assert f(0.0000000001) == '100p'
assert f(0) == '0'
assert f(0.) == '0'
assert f(-1337) == '-1.337k'
assert f(-.000000042) == '-42n'
def test_human_readable_custom():
"""Test human_readable formatter option"""
f = formatters.HumanReadable()
assert f(None) == '∅'
f = formatters.HumanReadable(none_char='/')
assert f(None) == '/'
def test_significant():
"""Test significant formatter"""
f = formatters.significant
assert f(1) == '1'
assert f(1.) == '1'
assert f(-1.) == '-1'
assert f(10) == '10'
assert f(10000000000) == '1e+10'
assert f(100000000000) == '1e+11'
assert f(120000000000) == '1.2e+11'
assert f(.1) == '0.1'
assert f(.01) == '0.01'
assert f(.0000000001) == '1e-10'
assert f(-.0000000001) == '-1e-10'
assert f(.0000000001002) == '1.002e-10'
assert f(.0000000001002) == '1.002e-10'
assert f(.12345678912345) == '0.1234567891'
assert f(.012345678912345) == '0.01234567891'
assert f(12345678912345) == '1.234567891e+13'
| 2,535
|
Python
|
.py
| 72
| 31.194444
| 79
| 0.638002
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,303
|
test_sparktext.py
|
Kozea_pygal/pygal/test/test_sparktext.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Test sparktext rendering"""
from pygal import Bar, Line
def test_basic_sparktext():
"""Test basic sparktext"""
chart = Line()
chart.add('_', [1, 5, 22, 13, 53])
assert chart.render_sparktext() == '▁▁▃▂█'
def test_all_sparktext():
"""Test all character sparktext"""
chart = Line()
chart.add('_', range(8))
assert chart.render_sparktext() == '▁▂▃▄▅▆▇█'
def test_shifted_sparktext():
"""Test relative_to option in sparktext"""
chart = Line()
chart.add('_', list(map(lambda x: x + 10000, range(8))))
assert chart.render_sparktext() == '▁▂▃▄▅▆▇█'
assert chart.render_sparktext(relative_to=0) == '▇▇▇▇▇▇▇█'
def test_another_sparktext():
"""Test that same data produces same sparktext"""
chart = Line()
chart.add('_', [0, 30, 55, 80, 33, 150])
assert chart.render_sparktext() == '▁▂▃▄▂█'
assert chart.render_sparktext() == chart.render_sparktext()
chart2 = Bar()
chart2.add('_', [0, 30, 55, 80, 33, 150])
assert chart2.render_sparktext() == chart.render_sparktext()
def test_negative_and_float__sparktext():
"""Test negative values"""
"""Test negative values"""
chart = Line()
chart.add('_', [0.1, 0.2, 0.9, -0.5])
assert chart.render_sparktext() == '▁▂█▁'
def test_no_data_sparktext():
"""Test no data sparktext"""
chart2 = Line()
chart2.add('_', [])
assert chart2.render_sparktext() == ''
chart3 = Line()
assert chart3.render_sparktext() == ''
def test_same_max_and_relative_values_sparktext():
"""Test flat sparktexts"""
chart = Line()
chart.add('_', [0, 0, 0, 0, 0])
assert chart.render_sparktext() == '▁▁▁▁▁'
chart2 = Line()
chart2.add('_', [1, 1, 1, 1, 1])
assert chart2.render_sparktext(relative_to=1) == '▁▁▁▁▁'
| 2,660
|
Python
|
.py
| 66
| 35.181818
| 79
| 0.652542
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,304
|
test_graph.py
|
Kozea_pygal/pygal/test/test_graph.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Generate tests for different chart types with different data"""
import io
import os
import uuid
import pytest
import pygal
from pygal.graph.map import BaseMap
from pygal.test import make_data
from pygal.util import cut
try:
import cairosvg
except ImportError:
cairosvg = None
def test_multi_render(Chart, datas):
"""Check that a chart always render the same"""
chart = Chart()
chart = make_data(chart, datas)
svg = chart.render()
for i in range(2):
assert svg == chart.render()
def test_render_to_file(Chart, datas):
"""Test in file rendering"""
file_name = '/tmp/test_graph-%s.svg' % uuid.uuid4()
if os.path.exists(file_name):
os.remove(file_name)
chart = Chart()
chart = make_data(chart, datas)
chart.render_to_file(file_name)
with io.open(file_name, encoding="utf-8") as f:
assert 'pygal' in f.read()
os.remove(file_name)
@pytest.mark.skipif(not cairosvg, reason="CairoSVG not installed")
def test_render_to_png(Chart, datas):
"""Test in file png rendering"""
file_name = '/tmp/test_graph-%s.png' % uuid.uuid4()
if os.path.exists(file_name):
os.remove(file_name)
chart = Chart()
chart = make_data(chart, datas)
chart.render_to_png(file_name)
png = chart._repr_png_()
with open(file_name, 'rb') as f:
assert png == f.read()
os.remove(file_name)
def test_metadata(Chart):
"""Test metadata values"""
chart = Chart()
v = range(7)
if Chart in (pygal.Box, ):
return # summary charts cannot display per-value metadata
elif Chart == pygal.XY:
v = list(map(lambda x: (x, x + 1), v))
elif issubclass(Chart, BaseMap):
v = [(k, i) for i, k in enumerate(Chart.x_labels)
if k not in ['oecd', 'nafta', 'eur']]
chart.add(
'Serie with metadata', [
v[0], {
'value': v[1]
}, {
'value': v[2],
'label': 'Three'
}, {
'value': v[3],
'xlink': 'http://4.example.com/'
}, {
'value': v[4],
'xlink': 'http://5.example.com/',
'label': 'Five'
}, {
'value': v[5],
'xlink': {
'href': 'http://6.example.com/'
},
'label': 'Six'
}, {
'value': v[6],
'xlink': {
'href': 'http://7.example.com/',
'target': '_blank'
},
'label': 'Seven'
}
]
)
q = chart.render_pyquery()
for md in ('Three', 'Five', 'Seven'):
assert md in cut(q('desc'), 'text')
for md in ('http://7.example.com/', 'http://4.example.com/'):
assert md in [e.attrib.get('xlink:href') for e in q('a')]
if Chart in (pygal.Pie, pygal.Treemap, pygal.SolidGauge):
# Slices with value 0 are not rendered
assert len(v) - 1 == len(q('.tooltip-trigger').siblings('.value'))
elif not issubclass(Chart, BaseMap):
# Tooltip are not working on maps
assert len(v) == len(q('.tooltip-trigger').siblings('.value'))
def test_empty_lists(Chart):
"""Test chart rendering with an empty serie"""
chart = Chart()
chart.add('A', [1, 2])
chart.add('B', [])
if not chart._dual:
chart.x_labels = ('red', 'green', 'blue')
q = chart.render_pyquery()
assert len(q(".legend")) == 2
def test_empty_lists_with_nones(Chart):
"""Test chart rendering with a None filled serie"""
chart = Chart()
chart.add('A', [None, None])
chart.add('B', [None, 4, 4])
q = chart.render_pyquery()
assert len(q(".legend")) == 2
def test_only_one_value(Chart):
"""Test chart rendering with only one value"""
chart = Chart()
chart.add('S', [1])
q = chart.render_pyquery()
assert len(q(".legend")) == 1
def test_only_one_value_log(Chart):
"""Test logarithmic chart rendering with only one value"""
chart = Chart(logarithmic=True)
chart.add('S', [1])
if not chart._dual:
chart.x_labels = ('single')
q = chart.render_pyquery()
assert len(q(".legend")) == 1
def test_only_one_value_intrp(Chart):
"""Test interpolated chart rendering with only one value"""
chart = Chart(interpolate='cubic')
chart.add('S', [1])
q = chart.render_pyquery()
assert len(q(".legend")) == 1
def test_non_iterable_value(Chart):
"""Test serie as non iterable"""
chart = Chart(no_prefix=True)
chart.add('A', 1)
chart.add('B', 2)
if not chart._dual:
chart.x_labels = ('red', 'green', 'blue')
chart1 = chart.render()
chart = Chart(no_prefix=True)
chart.add('A', [1])
chart.add('B', [2])
if not chart._dual:
chart.x_labels = ('red', 'green', 'blue')
chart2 = chart.render()
assert chart1 == chart2
def test_iterable_types(Chart):
"""Test serie as various iterable"""
chart = Chart(no_prefix=True)
chart.add('A', [1, 2])
chart.add('B', [])
if not chart._dual:
chart.x_labels = ('red', 'green', 'blue')
chart1 = chart.render()
chart = Chart(no_prefix=True)
chart.add('A', (1, 2))
chart.add('B', tuple())
if not chart._dual:
chart.x_labels = ('red', 'green', 'blue')
chart2 = chart.render()
assert chart1 == chart2
def test_values_by_dict(Chart):
"""Test serie as dict"""
chart1 = Chart(no_prefix=True)
chart2 = Chart(no_prefix=True)
if not issubclass(Chart, BaseMap) and not Chart._dual:
chart1.add('A', {'red': 10, 'green': 12, 'blue': 14})
chart1.add('B', {'green': 11, 'red': 7})
chart1.add('C', {'blue': 7})
chart1.add('D', {})
chart1.add('E', {'blue': 2, 'red': 13})
chart1.x_labels = ('red', 'green', 'blue')
chart2.add('A', [10, 12, 14])
chart2.add('B', [7, 11])
chart2.add('C', [None, None, 7])
chart2.add('D', [])
chart2.add('E', [13, None, 2])
chart2.x_labels = ('red', 'green', 'blue')
elif not Chart._dual:
chart1.add('A', {'fr': 10, 'us': 12, 'jp': 14})
chart1.add('B', {'cn': 99})
chart1.add('C', {})
chart2.add('A', [('fr', 10), ('us', 12), ('jp', 14)])
chart2.add('B', [('cn', 99)])
chart2.add('C', [None, (None, None)])
assert chart1.render() == chart2.render()
def test_no_data_with_no_values(Chart):
"""Test no data"""
chart = Chart()
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_no_values_with_include_x_axis(Chart):
"""Test no data and include_x_axis"""
chart = Chart(include_x_axis=True)
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_empty_serie(Chart):
"""Test no data for empty serie"""
chart = Chart()
chart.add('Serie', [])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_empty_series(Chart):
"""Test no data for 2 empty series"""
chart = Chart()
chart.add('Serie1', [])
chart.add('Serie2', [])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_none(Chart):
"""Test no data for a None containing serie"""
chart = Chart()
chart.add('Serie', None)
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_list_of_none(Chart):
"""Test no data for a None containing serie"""
chart = Chart()
chart.add('Serie', [None])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_lists_of_nones(Chart):
"""Test no data for several None containing series"""
chart = Chart()
chart.add('Serie1', [None, None, None, None])
chart.add('Serie2', [None, None, None])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_unicode_labels_decode(Chart):
"""Test unicode labels"""
chart = Chart()
chart.add(
'Série1', [{
'value': 1,
'xlink': 'http://1/',
'label': '°ijæð©&×&<—×€¿_…'
}, {
'value': 2,
'xlink': {
'href': 'http://6.example.com/'
},
'label': 'æÂ°€≠|€æÂ°€əæ'
}, {
'value': 3,
'label': 'unicode <3'
}]
)
if not chart._dual:
chart.x_labels = ['&œ', '¿?', '††††††††', 'unicode <3']
chart.render_pyquery()
def test_unicode_labels(Chart):
chart = Chart()
chart.add(
'Série1', [{
'value': 1,
'xlink': 'http://1/',
'label': eval("'°ijæð©&×&<—×€¿_…'")
}, {
'value': 2,
'xlink': {
'href': 'http://6.example.com/'
},
'label': eval("'æÂ°€≠|€æÂ°€əæ'")
}, {
'value': 3,
'label': eval("b'unicode <3'")
}]
)
if not chart._dual:
chart.x_labels = eval("['&œ', '¿?', '††††††††', 'unicode <3']")
chart.render_pyquery()
def test_labels_with_links(Chart):
"""Test values with links"""
chart = Chart()
# link on chart and label
chart.add({
'title': 'Red',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Red'
}
}, [{
'value': 2,
'label': 'This is red',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Red'
}
}])
# link on chart only
chart.add(
'Green', [{
'value': 4,
'label': 'This is green',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Green',
'target': '_top'
}
}]
)
# link on label only opens in new tab
chart.add({
'title': 'Yellow',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Yellow',
'target': '_blank'
}
}, 7)
# link on chart only
chart.add(
'Blue', [{
'value': 5,
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Blue',
'target': '_blank'
}
}]
)
# link on label and chart with diffrent behaviours
chart.add({
'title': 'Violet',
'xlink': 'http://en.wikipedia.org/wiki/Violet_(color)'
}, [{
'value': 3,
'label': 'This is violet',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Violet_(color)',
'target': '_self'
}
}])
q = chart.render_pyquery()
links = q('a')
assert len(links) == 7 or isinstance(chart, BaseMap) and len(links) == 3
def test_sparkline(Chart, datas):
"""Test sparkline"""
chart = Chart()
chart = make_data(chart, datas)
assert chart.render_sparkline()
def test_secondary(Chart):
"""Test secondary chart"""
chart = Chart()
rng = [83, .12, -34, 59]
chart.add('First serie', rng)
chart.add('Secondary serie', map(lambda x: x * 2, rng), secondary=True)
assert chart.render_pyquery()
def test_ipython_notebook(Chart, datas):
"""Test ipython notebook"""
chart = Chart()
chart = make_data(chart, datas)
assert chart._repr_svg_()
def test_long_title(Chart, datas):
"""Test chart rendering with a long title"""
chart = Chart(
title="A chart is a graphical representation of data, in which "
"'the data is represented by symbols, such as bars in a bar chart, "
"lines in a line chart, or slices in a pie chart'. A chart can "
"represent tabular numeric data, functions or some kinds of "
"qualitative structure and provides different info."
)
chart = make_data(chart, datas)
q = chart.render_pyquery()
assert len(q('.titles text')) == 5
| 12,894
|
Python
|
.py
| 379
| 26.709763
| 79
| 0.557212
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,305
|
test_line.py
|
Kozea_pygal/pygal/test/test_line.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Line chart related tests"""
from __future__ import division
from math import cos, sin
from pygal import Line
from pygal.test.utils import texts
def test_simple_line():
"""Simple line test"""
line = Line()
rng = range(-30, 31, 5)
line.add('test1', [cos(x / 10) for x in rng])
line.add('test2', [sin(x / 10) for x in rng])
line.add('test3', [cos(x / 10) - sin(x / 10) for x in rng])
line.x_labels = map(str, rng)
line.title = "cos sin and cos - sin"
q = line.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 3
assert len(q(".legend")) == 3
assert len(q(".x.axis .guides")) == 13
assert len(q(".y.axis .guides")) == 13
assert len(q(".dots")) == 3 * 13
assert q(".axis.x text").map(texts) == [
'-30', '-25', '-20', '-15', '-10', '-5', '0', '5', '10', '15', '20',
'25', '30'
]
assert q(".axis.y text").map(texts) == [
'-1.2', '-1', '-0.8', '-0.6', '-0.4', '-0.2', '0', '0.2', '0.4', '0.6',
'0.8', '1', '1.2'
]
assert q(".title").text() == 'cos sin and cos - sin'
assert q(".legend text").map(texts) == ['test1', 'test2', 'test3']
def test_line():
"""Another simple line test"""
line = Line()
rng = [8, 12, 23, 73, 39, 57]
line.add('Single serie', rng)
line.title = "One serie"
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 7
def test_one_dot():
"""Line test with an unique value"""
line = Line()
line.add('one dot', [12])
line.x_labels = ['one']
q = line.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".y.axis .guides")) == 1
def test_no_dot():
"""Line test with an empty serie"""
line = Line()
line.add('no dot', [])
q = line.render_pyquery()
assert q(".text-overlay text").text() == 'No data'
def test_no_dot_at_all():
"""Line test with no value"""
q = Line().render_pyquery()
assert q(".text-overlay text").text() == 'No data'
def test_not_equal_x_labels():
"""Test x_labels"""
line = Line()
line.add('test1', range(100))
line.truncate_label = -1
line.x_labels = map(str, range(11))
q = line.render_pyquery()
assert len(q(".dots")) == 100
assert len(q(".axis.x")) == 1
assert q(".axis.x text").map(texts) == [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'
]
def test_int_x_labels():
"""Test x_labels"""
line = Line()
line.add('test1', range(100))
line.truncate_label = -1
line.x_labels = list(range(11))
q = line.render_pyquery()
assert len(q(".dots")) == 100
assert len(q(".axis.x")) == 1
assert q(".axis.x text").map(texts) == [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10'
]
def test_only_major_dots_every():
"""Test major dots"""
line = Line(show_only_major_dots=True, x_labels_major_every=3)
line.add('test', range(12))
line.x_labels = map(str, range(12))
q = line.render_pyquery()
assert len(q(".dots")) == 4
def test_only_major_dots_no_labels():
"""Test major dots with no labels"""
line = Line(show_only_major_dots=True)
line.add('test', range(12))
q = line.render_pyquery()
assert len(q(".dots")) == 12
def test_only_major_dots_count():
"""Test major dots with a major label count"""
line = Line(show_only_major_dots=True)
line.add('test', range(12))
line.x_labels = map(str, range(12))
line.x_labels_major_count = 2
q = line.render_pyquery()
assert len(q(".dots")) == 2
def test_only_major_dots():
"""Test major dots with specified major labels"""
line = Line(show_only_major_dots=True, )
line.add('test', range(12))
line.x_labels = map(str, range(12))
line.x_labels_major = ['1', '5', '11']
q = line.render_pyquery()
assert len(q(".dots")) == 3
def test_line_secondary():
"""Test line with a secondary serie"""
line = Line()
rng = [8, 12, 23, 73, 39, 57]
line.add('First serie', rng)
line.add('Secondary serie', map(lambda x: x * 2, rng), secondary=True)
line.title = "One serie"
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 2
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 7
| 5,352
|
Python
|
.py
| 147
| 32.013605
| 79
| 0.589803
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,306
|
test_style.py
|
Kozea_pygal/pygal/test/test_style.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Style related tests"""
from pygal import Line
from pygal.style import (
DarkenStyle,
DesaturateStyle,
LightenStyle,
LightStyle,
RotateStyle,
SaturateStyle,
)
STYLES = LightenStyle, DarkenStyle, SaturateStyle, DesaturateStyle, RotateStyle
def test_parametric_styles():
"""Test that no parametric produce the same result"""
chart = None
for style in STYLES:
line = Line(style=style('#f4e83a'))
line.add('_', [1, 2, 3])
line.x_labels = 'abc'
new_chart = line.render()
assert chart != new_chart
chart = new_chart
def test_parametric_styles_with_parameters():
"""Test a parametric style with parameters"""
line = Line(
style=RotateStyle('#de3804', step=12, max_=180, base_style=LightStyle)
)
line.add('_', [1, 2, 3])
line.x_labels = 'abc'
assert line.render()
| 1,653
|
Python
|
.py
| 47
| 31.574468
| 79
| 0.71
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,307
|
utils.py
|
Kozea_pygal/pygal/test/utils.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Tests helpers"""
from pyquery import PyQuery as pq
def texts(i, e):
"""Helper for getting the text of an element"""
return pq(e).text()
| 923
|
Python
|
.py
| 23
| 38.652174
| 79
| 0.756968
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,308
|
__init__.py
|
Kozea_pygal/pygal/test/__init__.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Pygal test package"""
from decimal import Decimal
import pygal
from pygal.graph.map import BaseMap
from pygal.util import cut
def get_data(i):
"""Return sample test data for an index"""
return [[(-1, 1), (2, 0), (0, 4)], [(0, 1), (None, 2), (3, 2)],
[(-3, 3), (1, 3), (1, 1)], [(1, 1), (Decimal('1.'), 1),
(1, 1)], [(3, 2), (2, 1), (1., 1)]][i]
def adapt(chart, data):
"""Adapt data to chart type"""
if isinstance(chart, pygal.XY):
return data
data = cut(data)
if isinstance(chart, BaseMap):
return list(
map(lambda x: chart.__class__.x_labels[
int(x) % len(chart.__class__.x_labels)]
if x is not None else None, data))
return data
def make_data(chart, datas):
"""Add sample data to the test chart"""
for i, data in enumerate(datas):
chart.add(data[0], adapt(chart, data[1]), secondary=bool(i % 2))
return chart
| 1,757
|
Python
|
.py
| 44
| 35.090909
| 79
| 0.64554
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,309
|
test_view.py
|
Kozea_pygal/pygal/test/test_view.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""View related tests"""
# TODO
def test_all_logarithmic(Chart):
"""Test logarithmic view rendering"""
chart = Chart(logarithmic=True)
chart.add('1', [1, 30, 8, 199, -23])
chart.add('2', [87, 42, .9, 189, 81])
assert chart.render()
| 1,028
|
Python
|
.py
| 26
| 37.653846
| 79
| 0.735736
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,310
|
test_pie.py
|
Kozea_pygal/pygal/test/test_pie.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Donut chart related tests"""
from pygal import Pie
def test_donut():
"""Test a donut pie chart"""
chart = Pie(inner_radius=.3, pretty_print=True)
chart.title = 'Browser usage in February 2012 (in %)'
chart.add('IE', 19.5)
chart.add('Firefox', 36.6)
chart.add('Chrome', 36.3)
chart.add('Safari', 4.5)
chart.add('Opera', 2.3)
assert chart.render()
def test_multiseries_donut():
"""Test a donut pie chart with multiserie"""
# this just demos that the multiseries pie does not respect
# the inner_radius
chart = Pie(inner_radius=.3, pretty_print=True)
chart.title = 'Browser usage by version in February 2012 (in %)'
chart.add('IE', [5.7, 10.2, 2.6, 1])
chart.add('Firefox', [.6, 16.8, 7.4, 2.2, 1.2, 1, 1, 1.1, 4.3, 1])
chart.add('Chrome', [.3, .9, 17.1, 15.3, .6, .5, 1.6])
chart.add('Safari', [4.4, .1])
chart.add('Opera', [.1, 1.6, .1, .5])
assert chart.render()
def test_half_pie():
"""Test a half pie chart"""
pie = Pie()
pie.add('IE', 19.5)
pie.add('Firefox', 36.6)
pie.add('Chrome', 36.3)
pie.add('Safari', 4.5)
pie.add('Opera', 2.3)
half = Pie(half_pie=True)
half.add('IE', 19.5)
half.add('Firefox', 36.6)
half.add('Chrome', 36.3)
half.add('Safari', 4.5)
half.add('Opera', 2.3)
assert pie.render() != half.render()
| 2,140
|
Python
|
.py
| 57
| 34.017544
| 79
| 0.660723
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,311
|
test_xml_filters.py
|
Kozea_pygal/pygal/test/test_xml_filters.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Xml filter tests"""
from pygal import Bar
class ChangeBarsXMLFilter(object):
"""xml filter that insert a subplot"""
def __init__(self, a, b):
"""Generate data"""
self.data = [b[i] - a[i] for i in range(len(a))]
def __call__(self, T):
"""Apply the filter on the tree"""
subplot = Bar(
legend_at_bottom=True, explicit_size=True, width=800, height=150
)
subplot.add("Difference", self.data)
subplot = subplot.render_tree()
subplot = subplot.findall("g")[0]
T.insert(2, subplot)
T.findall("g")[1].set('transform', 'translate(0,150), scale(1,0.75)')
return T
def test_xml_filters_round_trip():
"""Ensure doing nothing does nothing"""
plot = Bar()
plot.add("A", [60, 75, 80, 78, 83, 90])
plot.add("B", [92, 87, 81, 73, 68, 55])
before = plot.render()
plot.add_xml_filter(lambda T: T)
after = plot.render()
assert before == after
def test_xml_filters_change_bars():
"""Test the use a xml filter"""
plot = Bar(
legend_at_bottom=True, explicit_size=True, width=800, height=600
)
A = [60, 75, 80, 78, 83, 90]
B = [92, 87, 81, 73, 68, 55]
plot.add("A", A)
plot.add("B", B)
plot.add_xml_filter(ChangeBarsXMLFilter(A, B))
q = plot.render_tree()
assert len(q.findall("g")) == 2
assert q.findall("g")[1].attrib["transform"
] == "translate(0,150), scale(1,0.75)"
| 2,264
|
Python
|
.py
| 59
| 33.288136
| 79
| 0.64071
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,312
|
test_serie_config.py
|
Kozea_pygal/pygal/test/test_serie_config.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Test per serie configuration"""
from pygal import Line
s1 = [1, 3, 12, 3, 4]
s2 = [7, -4, 10, None, 8, 3, 1]
def test_no_serie_config():
"""Test per serie no configuration"""
chart = Line()
chart.add('1', s1)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 1
assert len(q('.serie-1 .line')) == 1
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_global_config():
"""Test global configuration"""
chart = Line(stroke=False)
chart.add('1', s1)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 0
assert len(q('.serie-1 .line')) == 0
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_serie_config():
"""Test per serie configuration"""
chart = Line()
chart.add('1', s1, stroke=False)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 0
assert len(q('.serie-1 .line')) == 1
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_serie_precedence_over_global_config():
"""Test that per serie configuration overide global configuration"""
chart = Line(stroke=False)
chart.add('1', s1, stroke=True)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 1
assert len(q('.serie-1 .line')) == 0
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
| 2,295
|
Python
|
.py
| 62
| 33.532258
| 79
| 0.649123
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,313
|
test_util.py
|
Kozea_pygal/pygal/test/test_util.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Utility functions tests"""
from pytest import raises
from pygal.util import (
_swap_curly,
majorize,
mergextend,
minify_css,
round_to_float,
round_to_int,
template,
truncate,
)
def test_round_to_int():
"""Test round to int function"""
assert round_to_int(154231, 1000) == 154000
assert round_to_int(154231, 10) == 154230
assert round_to_int(154231, 100000) == 200000
assert round_to_int(154231, 50000) == 150000
assert round_to_int(154231, 500) == 154000
assert round_to_int(154231, 200) == 154200
assert round_to_int(154361, 200) == 154400
def test_round_to_float():
"""Test round to float function"""
assert round_to_float(12.01934, .01) == 12.02
assert round_to_float(12.01134, .01) == 12.01
assert round_to_float(12.1934, .1) == 12.2
assert round_to_float(12.1134, .1) == 12.1
assert round_to_float(12.1134, .001) == 12.113
assert round_to_float(12.1134, .00001) == 12.1134
assert round_to_float(12.1934, .5) == 12.0
assert round_to_float(12.2934, .5) == 12.5
def test_swap_curly():
"""Test swap curly function"""
for str in ('foo', 'foo foo foo bar', 'foo béè b¡ð/ijə˘©þß®~¯æ',
'foo béè b¡ð/ijə˘©þß®~¯æ'):
assert _swap_curly(str) == str
assert _swap_curly('foo{bar}baz') == 'foo{{bar}}baz'
assert _swap_curly('foo{{bar}}baz') == 'foo{bar}baz'
assert _swap_curly('{foo}{{bar}}{baz}') == '{{foo}}{bar}{{baz}}'
assert _swap_curly('{foo}{{{bar}}}{baz}') == '{{foo}}{{{bar}}}{{baz}}'
assert _swap_curly('foo{ bar }baz') == 'foo{{ bar }}baz'
assert _swap_curly('foo{ bar}baz') == 'foo{{ bar}}baz'
assert _swap_curly('foo{bar }baz') == 'foo{{bar }}baz'
assert _swap_curly('foo{{ bar }}baz') == 'foo{bar}baz'
assert _swap_curly('foo{{bar }}baz') == 'foo{bar}baz'
assert _swap_curly('foo{{ bar}}baz') == 'foo{bar}baz'
def test_format():
"""Test format function"""
assert template('foo {{ baz }}', baz='bar') == 'foo bar'
with raises(KeyError):
assert template('foo {{ baz }}') == 'foo baz'
class Object(object):
pass
obj = Object()
obj.a = 1
obj.b = True
obj.c = '3'
assert template('foo {{ o.a }} {{o.b}}-{{o.c}}', o=obj) == 'foo 1 True-3'
def test_truncate():
"""Test truncate function"""
assert truncate('1234567890', 50) == '1234567890'
assert truncate('1234567890', 5) == '1234…'
assert truncate('1234567890', 1) == '…'
assert truncate('1234567890', 9) == '12345678…'
assert truncate('1234567890', 10) == '1234567890'
assert truncate('1234567890', 0) == '1234567890'
assert truncate('1234567890', -1) == '1234567890'
def test_minify_css():
"""Test css minifier function"""
css = '''
/*
* Font-sizes from config, override with care
*/
.title {
font-family: sans;
font-size: 12 ;
}
.legends .legend text {
font-family: monospace;
font-size: 14 ;}
'''
assert minify_css(css) == (
'.title{font-family:sans;font-size:12}'
'.legends .legend text{font-family:monospace;font-size:14}'
)
def test_majorize():
"""Test majorize function"""
assert majorize(()) == []
assert majorize((0, )) == []
assert majorize((0, 1)) == []
assert majorize((0, 1, 2)) == []
assert majorize((-1, 0, 1, 2)) == [0]
assert majorize((0, .1, .2, .3, .4, .5, .6, .7, .8, .9, 1)) == [0, .5, 1]
assert majorize((0, .2, .4, .6, .8, 1)) == [0, 1]
assert majorize((-.4, -.2, 0, .2, .4, .6, .8, 1)) == [0, 1]
assert majorize((-1, -.8, -.6, -.4, -.2, 0, .2, .4, .6, .8,
1)) == [-1, 0, 1]
assert majorize((0, .2, .4, .6, .8, 1, 1.2, 1.4, 1.6)) == [0, 1]
assert majorize((0, .2, .4, .6, .8, 1, 1.2, 1.4, 1.6, 1.8, 2)) == [0, 1, 2]
assert majorize((0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110,
120)) == [0, 50, 100]
assert majorize((
0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36
)) == [0, 10, 20, 30]
assert majorize((0, 1, 2, 3, 4, 5)) == [0, 5]
assert majorize((-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)) == [-5, 0, 5]
assert majorize((-5, 5, -4, 4, 0, 1, -1, 3, -2, 2, -3)) == [-5, 0, 5]
assert majorize((0, 1, 2, 3, 4)) == [0]
assert majorize((3, 4, 5, 6)) == [5]
assert majorize((0, 1, 2, 3, 4, 5, 6, 7, 8)) == [0, 5]
assert majorize((-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5)) == [-5, 0, 5]
assert majorize((-6, -5, -4, -3, -2, -1, 0, 1, 2, 3)) == [-5, 0]
assert majorize((-6, -5, -4, -3)) == [-5]
assert majorize((1, 10, 100, 1000, 10000, 100000)) == []
assert majorize(range(30, 70, 5)) == [30, 40, 50, 60]
assert majorize(range(20, 55, 2)) == [20, 30, 40, 50]
assert majorize(range(21, 83, 3)) == [30, 45, 60, 75]
# TODO: handle crazy cases
# assert majorize(range(20, 83, 3)) == [20, 35, 50, 65, 80]
def test_mergextend():
"""Test mergextend function"""
assert mergextend(['a', 'b'], ['c', 'd']) == ['a', 'b']
assert mergextend([], ['c', 'd']) == []
assert mergextend(['a', 'b'], []) == ['a', 'b']
assert mergextend([Ellipsis], ['c', 'd']) == ['c', 'd']
assert mergextend([Ellipsis, 'b'], ['c', 'd']) == ['c', 'd', 'b']
assert mergextend(['a', Ellipsis], ['c', 'd']) == ['a', 'c', 'd']
assert mergextend(['a', Ellipsis, 'b'],
['c', 'd']) == ['a', 'c', 'd', 'b']
assert mergextend(['a', ..., 'b'], ['c', 'd']) == ['a', 'c', 'd', 'b']
| 6,281
|
Python
|
.py
| 148
| 37.486486
| 79
| 0.563385
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,314
|
test_stacked.py
|
Kozea_pygal/pygal/test/test_stacked.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Stacked chart related tests"""
from pygal import StackedLine
def test_stacked_line():
"""Test stacked line"""
stacked = StackedLine()
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_reverse():
"""Test stack from top stacked line"""
stacked = StackedLine(stack_from_top=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('11 (+1)', '14 (+2)', '10', '12')
)
def test_stacked_line_log():
"""Test logarithmic stacked line"""
stacked = StackedLine(logarithmic=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_interpolate():
"""Test interpolated stacked line"""
stacked = StackedLine(interpolate='cubic')
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
| 2,148
|
Python
|
.py
| 56
| 34.625
| 79
| 0.652424
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,315
|
test_interpolate.py
|
Kozea_pygal/pygal/test/test_interpolate.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Interpolations tests"""
from pygal.test import make_data
def test_cubic(Chart, datas):
"""Test cubic interpolation"""
chart = Chart(interpolate='cubic')
chart = make_data(chart, datas)
assert chart.render()
def test_cubic_prec(Chart, datas):
"""Test cubic interpolation precision"""
chart = Chart(interpolate='cubic', interpolation_precision=200)
chart = make_data(chart, datas)
chart_low = Chart(interpolate='cubic', interpolation_precision=5)
chart_low = make_data(chart, datas)
assert len(chart.render()) >= len(chart_low.render())
def test_quadratic(Chart, datas):
"""Test quadratic interpolation"""
chart = Chart(interpolate='quadratic')
chart = make_data(chart, datas)
assert chart.render()
def test_lagrange(Chart, datas):
"""Test lagrange interpolation"""
chart = Chart(interpolate='lagrange')
chart = make_data(chart, datas)
assert chart.render()
def test_trigonometric(Chart, datas):
"""Test trigonometric interpolation"""
chart = Chart(interpolate='trigonometric')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite(Chart, datas):
"""Test hermite interpolation"""
chart = Chart(interpolate='hermite')
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_finite(Chart, datas):
"""Test hermite finite difference interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={'type': 'finite_difference'}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_cardinal(Chart, datas):
"""Test hermite cardinal interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'cardinal',
'c': .75
}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_catmull_rom(Chart, datas):
"""Test hermite catmull rom interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={'type': 'catmull_rom'}
)
chart = make_data(chart, datas)
assert chart.render()
def test_hermite_kochanek_bartels(Chart, datas):
"""Test hermite kochanek bartels interpolation"""
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': -1,
'c': 1,
't': 1
}
)
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': -1,
'c': -8,
't': 0
}
)
chart = make_data(chart, datas)
assert chart.render()
chart = Chart(
interpolate='hermite',
interpolation_parameters={
'type': 'kochanek_bartels',
'b': 0,
'c': 10,
't': -1
}
)
chart = make_data(chart, datas)
assert chart.render()
| 3,831
|
Python
|
.py
| 114
| 27.894737
| 79
| 0.657096
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,316
|
test_config.py
|
Kozea_pygal/pygal/test/test_config.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Various config options tested on one chart type or more"""
from tempfile import NamedTemporaryFile
from pygal import (
XY,
Bar,
Box,
Config,
DateLine,
DateTimeLine,
Dot,
Funnel,
Gauge,
Histogram,
HorizontalBar,
HorizontalLine,
HorizontalStackedBar,
HorizontalStackedLine,
Line,
Pie,
Pyramid,
Radar,
SolidGauge,
TimeDeltaLine,
TimeLine,
Treemap,
formatters,
)
from pygal.graph.dual import Dual
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.map import BaseMap
from pygal.test.utils import texts
def test_config_behaviours():
"""Test that all different way to set config produce same results"""
line1 = Line()
line1.show_legend = False
line1.fill = True
line1.pretty_print = True
line1.no_prefix = True
line1.x_labels = ['a', 'b', 'c']
line1.add('_', [1, 2, 3])
l1 = line1.render()
q = line1.render_pyquery()
assert len(q(".axis.x")) == 1
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".legend")) == 0
assert len(q(".x.axis .guides")) == 3
assert len(q(".y.axis .guides")) == 11
assert len(q(".dots")) == 3
assert q(".axis.x text").map(texts) == ['a', 'b', 'c']
line2 = Line(
show_legend=False,
fill=True,
pretty_print=True,
no_prefix=True,
x_labels=['a', 'b', 'c']
)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 == l2
class LineConfig(Config):
show_legend = False
fill = True
pretty_print = True
no_prefix = True
x_labels = ['a', 'b', 'c']
line3 = Line(LineConfig)
line3.add('_', [1, 2, 3])
l3 = line3.render()
assert l1 == l3
line4 = Line(LineConfig())
line4.add('_', [1, 2, 3])
l4 = line4.render()
assert l1 == l4
line_config = Config()
line_config.show_legend = False
line_config.fill = True
line_config.pretty_print = True
line_config.no_prefix = True
line_config.x_labels = ['a', 'b', 'c']
line5 = Line(line_config)
line5.add('_', [1, 2, 3])
l5 = line5.render()
assert l1 == l5
l6 = Line(line_config)(1, 2, 3, title='_').render()
assert l1 == l6
def test_config_alterations_class():
"""Assert a config can be changed on config class"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
line1 = Line(LineConfig)
line1.add('_', [1, 2, 3])
l1 = line1.render()
LineConfig.stroke = False
line2 = Line(LineConfig)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 != l2
l1bis = line1.render()
assert l1 == l1bis
def test_config_alterations_instance():
"""Assert a config can be changed on instance"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
config = LineConfig()
line1 = Line(config)
line1.add('_', [1, 2, 3])
l1 = line1.render()
config.stroke = False
line2 = Line(config)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 != l2
l1bis = line1.render()
assert l1 == l1bis
def test_config_alterations_kwargs():
"""Assert a config can be changed with keyword args"""
class LineConfig(Config):
no_prefix = True
show_legend = False
fill = True
pretty_print = True
x_labels = ['a', 'b', 'c']
config = LineConfig()
line1 = Line(config)
line1.add('_', [1, 2, 3])
l1 = line1.render()
line1.stroke = False
l1bis = line1.render()
assert l1 != l1bis
line2 = Line(config)
line2.add('_', [1, 2, 3])
l2 = line2.render()
assert l1 == l2
assert l1bis != l2
line3 = Line(config, title='Title')
line3.add('_', [1, 2, 3])
l3 = line3.render()
assert l3 != l2
l2bis = line2.render()
assert l2 == l2bis
def test_logarithmic():
"""Test logarithmic option"""
line = Line(logarithmic=True)
line.add('_', [1, 10**10, 1])
q = line.render_pyquery()
assert len(q(".axis.x")) == 0
assert len(q(".axis.y")) == 1
assert len(q(".plot .series path")) == 1
assert len(q(".legend")) == 1
assert len(q(".x.axis .guides")) == 0
assert len(q(".y.axis .guides")) == 21
assert len(q(".dots")) == 3
def test_interpolation(Chart):
"""Test interpolation option"""
chart = Chart(interpolate='cubic')
chart.add('1', [1, 3, 12, 3, 4])
chart.add('2', [7, -4, 10, None, 8, 3, 1])
q = chart.render_pyquery()
assert len(q(".legend")) == 2
def test_no_data_interpolation(Chart):
"""Test interpolation option with no data"""
chart = Chart(interpolate='cubic')
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_no_data_with_empty_serie_interpolation(Chart):
"""Test interpolation option with an empty serie"""
chart = Chart(interpolate='cubic')
chart.add('Serie', [])
q = chart.render_pyquery()
assert q(".text-overlay text").text() == "No data"
def test_logarithmic_bad_interpolation():
"""Test interpolation option with a logarithmic chart"""
line = Line(logarithmic=True, interpolate='cubic')
line.add('_', [.001, .00000001, 1])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 41
def test_logarithmic_big_scale():
"""Test logarithmic option with a large range of value"""
line = Line(logarithmic=True)
line.add('_', [10**-10, 10**10, 1])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 21
def test_value_formatter():
"""Test value formatter option"""
line = Line(value_formatter=lambda x: str(x) + '‰')
line.add('_', [10**4, 10**5, 23 * 10**4])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 11
assert q(".axis.y text").map(texts) == list(
map(
lambda x: str(x) + '‰', map(float, range(20000, 240000, 20000))
)
)
def test_logarithmic_small_scale():
"""Test logarithmic with a small range of values"""
line = Line(logarithmic=True)
line.add('_', [1 + 10**10, 3 + 10**10, 2 + 10**10])
q = line.render_pyquery()
assert len(q(".y.axis .guides")) == 11
def test_human_readable():
"""Test human readable option"""
line = Line()
line.add('_', [10**4, 10**5, 23 * 10**4])
q = line.render_pyquery()
assert q(".axis.y text").map(texts) == list(
map(str, range(20000, 240000, 20000))
)
line.value_formatter = formatters.human_readable
q = line.render_pyquery()
assert q(".axis.y text").map(texts) == list(
map(lambda x: '%dk' % x, range(20, 240, 20))
)
def test_show_legend():
"""Test show legend option"""
line = Line()
line.add('_', [1, 2, 3])
q = line.render_pyquery()
assert len(q(".legend")) == 1
line.show_legend = False
q = line.render_pyquery()
assert len(q(".legend")) == 0
def test_show_dots():
"""Test show dots option"""
line = Line()
line.add('_', [1, 2, 3])
q = line.render_pyquery()
assert len(q(".dots")) == 3
line.show_dots = False
q = line.render_pyquery()
assert len(q(".dots")) == 0
def test_no_data():
"""Test no data and no data text option"""
line = Line()
q = line.render_pyquery()
assert q(".text-overlay text").text() == "No data"
line.no_data_text = "þæ®þ怀&ij¿’€"
q = line.render_pyquery()
assert q(".text-overlay text").text() == "þæ®þ怀&ij¿’€"
def test_include_x_axis(Chart):
"""Test x axis inclusion option"""
chart = Chart()
if Chart in (Pie, Treemap, Radar, Funnel, Dot, Gauge, Histogram, Box,
SolidGauge) or issubclass(Chart, BaseMap):
return
if not chart._dual:
data = 100, 200, 150
else:
data = (1, 100), (3, 200), (2, 150)
chart.add('_', data)
q = chart.render_pyquery()
# Ghost thing
yaxis = ".axis.%s .guides text" % (
'y' if not getattr(chart, 'horizontal', False) else 'x'
)
if not isinstance(chart, Bar):
assert '0' not in q(yaxis).map(texts)
else:
assert '0' in q(yaxis).map(texts)
chart.include_x_axis = True
q = chart.render_pyquery()
assert '0' in q(yaxis).map(texts)
def test_css(Chart):
"""Test css file option"""
css = "{{ id }}text { fill: #bedead; }\n"
with NamedTemporaryFile('w') as f:
f.write(css)
f.flush()
config = Config()
config.css.append('file://' + f.name)
chart = Chart(config)
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
chart = Chart(css=(Ellipsis, 'file://' + f.name))
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
def test_inline_css(Chart):
"""Test inline css option"""
css = "{{ id }}text { fill: #bedead; }\n"
config = Config()
config.css.append('inline:' + css)
chart = Chart(config)
chart.add('/', [10, 1, 5])
svg = chart.render().decode('utf-8')
assert '#bedead' in svg
def test_meta_config():
"""Test config metaclass"""
from pygal.config import CONFIG_ITEMS
assert all(c.name != 'Unbound' for c in CONFIG_ITEMS)
def test_label_rotation(Chart):
"""Test label rotation option"""
chart = Chart(x_label_rotation=28, y_label_rotation=76)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
if not chart._dual:
chart.x_labels = ['one', 'twoooooooooooooooooooooo', 'three', '4']
q = chart.render_pyquery()
if Chart in (Line, Bar):
assert len(q('.axis.x text[transform^="rotate(28"]')) == 4
assert len(q('.axis.y text[transform^="rotate(76"]')) == 13
def test_legend_at_bottom(Chart):
"""Test legend at bottom option"""
chart = Chart(legend_at_bottom=True)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
lab = chart.render()
chart.legend_at_bottom = False
assert lab != chart.render()
def test_x_y_title(Chart):
"""Test x title and y title options"""
chart = Chart(
title='I Am A Title',
x_title="I am a x title",
y_title="I am a y title"
)
chart.add('1', [4, -5, 123, 59, 38])
chart.add('2', [89, 0, 8, .12, 8])
q = chart.render_pyquery()
assert len(q('.titles .title')) == 3
def test_range(Chart):
"""Test y label major option"""
if Chart in (Pie, Treemap, Dot, SolidGauge) or issubclass(Chart, BaseMap):
return
chart = Chart()
chart.range = (0, 100)
chart.add('', [1, 2, 10])
q = chart.render_pyquery()
axis = map(str, range(0, 101, 10))
if Chart == Radar:
axis = map(str, range(100, -1, -20))
z = 'x' if getattr(chart, 'horizontal', False) or Chart == Gauge else 'y'
assert [t.text for t in q('.axis.%s .guides text' % z)] == list(axis)
def test_x_label_major(Chart):
"""Test x label major option"""
if Chart in (Pie, Treemap, Funnel, Dot, Gauge, Histogram, Box, SolidGauge,
Pyramid, DateTimeLine, TimeLine, DateLine,
TimeDeltaLine) or issubclass(
Chart, (BaseMap, Dual, HorizontalGraph)):
return
chart = Chart()
chart.add('test', range(12))
chart.x_labels = map(str, range(12))
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 0
chart.x_labels_major = ['1', '5', '11', '1.0', '5.0', '11.0']
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 3
assert len(q(".axis.x text")) == 12
chart.show_minor_x_labels = False
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 3
assert len(q(".axis.x text")) == 3
chart.show_minor_x_labels = True
chart.x_labels_major = None
chart.x_labels_major_every = 2
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 6
assert len(q(".axis.x text")) == 12
chart.x_labels_major_every = None
chart.x_labels_major_count = 4
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 4
assert len(q(".axis.x text")) == 12
chart.x_labels_major_every = None
chart.x_labels_major_count = 78
q = chart.render_pyquery()
assert len(q(".axis.x text.major")) == 12
assert len(q(".axis.x text")) == 12
def test_y_label_major(Chart):
"""Test y label major option"""
if Chart in (Pie, Treemap, Funnel, Dot, Gauge, Histogram, Box, SolidGauge,
HorizontalBar, HorizontalStackedBar, HorizontalStackedLine,
HorizontalLine, Pyramid, DateTimeLine, TimeLine, DateLine,
TimeDeltaLine) or issubclass(Chart, BaseMap):
return
chart = Chart()
data = range(12)
if Chart == XY:
data = list(zip(*[range(12), range(12)]))
chart.add('test', data)
chart.y_labels = range(12)
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
chart.y_labels_major = [1.0, 5.0, 11.0]
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
assert len(q(".axis.y text")) == 12
chart.show_minor_y_labels = False
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 3
assert len(q(".axis.y text")) == 3
chart.show_minor_y_labels = True
chart.y_labels_major = None
chart.y_labels_major_every = 2
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 6
assert len(q(".axis.y text")) == 12
chart.y_labels_major_every = None
chart.y_labels_major_count = 4
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 4
assert len(q(".axis.y text")) == 12
chart.y_labels_major_every = None
chart.y_labels_major_count = 78
q = chart.render_pyquery()
assert len(q(".axis.y text.major")) == 12
assert len(q(".axis.y text")) == 12
def test_no_y_labels(Chart):
"""Test no y labels chart"""
chart = Chart()
chart.y_labels = []
chart.add('_', [1, 2, 3])
chart.add('?', [10, 21, 5])
assert chart.render_pyquery()
def test_fill(Chart):
"""Test fill option"""
chart = Chart(fill=True)
chart.add('_', [1, 2, 3])
chart.add('?', [10, 21, 5])
assert chart.render_pyquery()
def test_render_data_uri(Chart):
"""Test the render data uri"""
chart = Chart(fill=True)
chart.add('ééé', [1, 2, 3])
chart.add('èèè', [10, 21, 5])
assert chart.render_data_uri(
).startswith('data:image/svg+xml;charset=utf-8;base64,')
def test_formatters(Chart):
"""Test custom formatters"""
if Chart._dual or Chart == Box:
return
chart = Chart(formatter=lambda x, chart, serie: '%s%s$' % (x, serie.title))
chart.add('_a', [1, 2, {'value': 3, 'formatter': lambda x: '%s¥' % x}])
chart.add('_b', [4, 5, 6], formatter=lambda x: '%s€' % x)
chart.x_labels = [2, 4, 6]
chart.x_labels_major = [4]
q = chart.render_pyquery()
assert set(
[v.text for v in q(".value")]
) == set(('4€', '5€', '6€', '1_a$', '2_a$', '3¥') +
(('6_a$', '15€') if Chart in (Pie, SolidGauge) else ()))
def test_classes(Chart):
"""Test classes option"""
chart = Chart()
assert chart.render_pyquery().attr('class') == 'pygal-chart'
chart = Chart(classes=())
assert not chart.render_pyquery().attr('class')
chart = Chart(classes=(Ellipsis, ))
assert chart.render_pyquery().attr('class') == 'pygal-chart'
chart = Chart(classes=('graph', ))
assert chart.render_pyquery().attr('class') == 'graph'
chart = Chart(classes=('pygal-chart', 'graph'))
assert chart.render_pyquery().attr('class') == 'pygal-chart graph'
chart = Chart(classes=(Ellipsis, 'graph'))
assert chart.render_pyquery().attr('class') == 'pygal-chart graph'
chart = Chart(classes=('graph', Ellipsis))
assert chart.render_pyquery().attr('class') == 'graph pygal-chart'
| 17,002
|
Python
|
.py
| 485
| 29.461856
| 79
| 0.599768
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,317
|
test_histogram.py
|
Kozea_pygal/pygal/test/test_histogram.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Histogram chart related tests"""
from pygal import Histogram
def test_histogram():
"""Simple histogram test"""
hist = Histogram()
hist.add('1', [(2, 0, 1), (4, 1, 3), (3, 3.5, 5), (1.5, 5, 10)])
hist.add('2', [(2, 2, 8)], secondary=True)
q = hist.render_pyquery()
assert len(q('.rect')) == 5
| 1,095
|
Python
|
.py
| 27
| 38.555556
| 79
| 0.713615
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,318
|
test_maps.py
|
Kozea_pygal/pygal/test/test_maps.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Map plugins tests are imported here"""
from importlib_metadata import entry_points
# Load plugins tests
for entry in entry_points(group="pygal.test.test_maps"):
module = entry.load()
for k, v in module.__dict__.items():
if k.startswith('test_'):
globals()['test_maps_' + entry.name + '_' + k[5:]] = v
| 1,107
|
Python
|
.py
| 26
| 40.423077
| 79
| 0.732159
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,319
|
test_colors.py
|
Kozea_pygal/pygal/test/test_colors.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Color utility functions tests"""
from __future__ import division
from pygal.colors import (
darken,
desaturate,
hsl_to_rgb,
lighten,
parse_color,
rgb_to_hsl,
rotate,
saturate,
unparse_color,
)
def test_parse_color():
"""Test color parse function"""
assert parse_color('#123') == (17, 34, 51, 1., '#rgb')
assert parse_color('#cdf') == (204, 221, 255, 1., '#rgb')
assert parse_color('#a3d7') == (170, 51, 221, 119 / 255, '#rgba')
assert parse_color('#584b4f') == (88, 75, 79, 1., '#rrggbb')
assert parse_color('#8cbe22') == (140, 190, 34, 1., '#rrggbb')
assert parse_color('#16cbf055') == (22, 203, 240, 1 / 3, '#rrggbbaa')
assert parse_color('rgb(134, 67, 216)') == (134, 67, 216, 1., 'rgb')
assert parse_color('rgb(0, 111, 222)') == (0, 111, 222, 1., 'rgb')
assert parse_color('rgba(237, 83, 48, .8)') == (237, 83, 48, .8, 'rgba')
assert parse_color('rgba(0, 1, 0, 0.1223)') == (0, 1, 0, .1223, 'rgba')
def test_unparse_color():
"""Test color unparse function"""
assert unparse_color(17, 34, 51, 1., '#rgb') == '#123'
assert unparse_color(204, 221, 255, 1., '#rgb') == '#cdf'
assert unparse_color(170, 51, 221, 119 / 255, '#rgba') == '#a3d7'
assert unparse_color(88, 75, 79, 1., '#rrggbb') == '#584b4f'
assert unparse_color(140, 190, 34, 1., '#rrggbb') == '#8cbe22'
assert unparse_color(22, 203, 240, 1 / 3, '#rrggbbaa') == '#16cbf055'
assert unparse_color(134, 67, 216, 1., 'rgb') == 'rgb(134, 67, 216)'
assert unparse_color(0, 111, 222, 1., 'rgb') == 'rgb(0, 111, 222)'
assert unparse_color(237, 83, 48, .8, 'rgba') == 'rgba(237, 83, 48, 0.8)'
assert unparse_color(0, 1, 0, .1223, 'rgba') == 'rgba(0, 1, 0, 0.1223)'
def test_darken():
"""Test darken color function"""
assert darken('#800', 20) == '#200'
assert darken('#800e', 20) == '#200e'
assert darken('#800', 0) == '#800'
assert darken('#ffffff', 10) == '#e6e6e6'
assert darken('#000000', 10) == '#000000'
assert darken('#f3148a', 25) == '#810747'
assert darken('#f3148aab', 25) == '#810747ab'
assert darken('#121212', 1) == '#0f0f0f'
assert darken('#999999', 100) == '#000000'
assert darken('#99999999', 100) == '#00000099'
assert darken('#1479ac', 8) == '#105f87'
assert darken('rgb(136, 0, 0)', 20) == 'rgb(34, 0, 0)'
assert darken('rgba(20, 121, 172, .13)', 8) == 'rgba(16, 95, 135, 0.13)'
def test_lighten():
"""Test lighten color function"""
assert lighten('#800', 20) == '#e00'
assert lighten('#800', 0) == '#800'
assert lighten('#ffffff', 10) == '#ffffff'
assert lighten('#000000', 10) == '#1a1a1a'
assert lighten('#f3148a', 25) == '#f98dc6'
assert lighten('#121212', 1) == '#151515'
assert lighten('#999999', 100) == '#ffffff'
assert lighten('#1479ac', 8) == '#1893d1'
def test_saturate():
"""Test color saturation function"""
assert saturate('#000', 20) == '#000'
assert saturate('#fff', 20) == '#fff'
assert saturate('#8a8', 100) == '#3f3'
assert saturate('#855', 20) == '#9e3f3f'
def test_desaturate():
"""Test color desaturation function"""
assert desaturate('#000', 20) == '#000'
assert desaturate('#fff', 20) == '#fff'
assert desaturate('#8a8', 100) == '#999'
assert desaturate('#855', 20) == '#726b6b'
def test_rotate():
"""Test color rotation function"""
assert rotate('#000', 45) == '#000'
assert rotate('#fff', 45) == '#fff'
assert rotate('#811', 45) == '#886a11'
assert rotate('#8a8', 360) == '#8a8'
assert rotate('#8a8', 0) == '#8a8'
assert rotate('#8a8', -360) == '#8a8'
def test_hsl_to_rgb_part_0():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_0():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
def test_hsl_to_rgb_part_1():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(-360, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(-300, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(-240, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(-180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(-120, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(-60, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_1():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (-360, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (-300, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (-240, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (-180, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (-120, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (-60, 100, 50)
pass
def test_hsl_to_rgb_part_2():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(360, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(420, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(480, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(540, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(600, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(660, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_2():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (360, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (420, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (480, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (540, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (600, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (660, 100, 50)
pass
def test_hsl_to_rgb_part_3():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(6120, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(-9660, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(99840, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(-900, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(-104880, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(2820, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_3():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(255, 0, 0) == (6120, 100, 50)
# assert rgb_to_hsl(255, 255, 0) == (-9660, 100, 50)
# assert rgb_to_hsl(0, 255, 0) == (99840, 100, 50)
# assert rgb_to_hsl(0, 255, 255) == (-900, 100, 50)
# assert rgb_to_hsl(0, 0, 255) == (-104880, 100, 50)
# assert rgb_to_hsl(255, 0, 255) == (2820, 100, 50)
pass
def test_hsl_to_rgb_part_4():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(12, 100, 50) == (255, 51, 0)
assert hsl_to_rgb(24, 100, 50) == (255, 102, 0)
assert hsl_to_rgb(36, 100, 50) == (255, 153, 0)
assert hsl_to_rgb(48, 100, 50) == (255, 204, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(72, 100, 50) == (204, 255, 0)
assert hsl_to_rgb(84, 100, 50) == (153, 255, 0)
assert hsl_to_rgb(96, 100, 50) == (102, 255, 0)
assert hsl_to_rgb(108, 100, 50) == (51, 255, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
def test_rgb_to_hsl_part_4():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 51, 0) == (12, 100, 50)
assert rgb_to_hsl(255, 102, 0) == (24, 100, 50)
assert rgb_to_hsl(255, 153, 0) == (36, 100, 50)
assert rgb_to_hsl(255, 204, 0) == (48, 100, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(204, 255, 0) == (72, 100, 50)
assert rgb_to_hsl(153, 255, 0) == (84, 100, 50)
assert rgb_to_hsl(102, 255, 0) == (96, 100, 50)
assert rgb_to_hsl(51, 255, 0) == (108, 100, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
def test_hsl_to_rgb_part_5():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(132, 100, 50) == (0, 255, 51)
assert hsl_to_rgb(144, 100, 50) == (0, 255, 102)
assert hsl_to_rgb(156, 100, 50) == (0, 255, 153)
assert hsl_to_rgb(168, 100, 50) == (0, 255, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(192, 100, 50) == (0, 204, 255)
assert hsl_to_rgb(204, 100, 50) == (0, 153, 255)
assert hsl_to_rgb(216, 100, 50) == (0, 102, 255)
assert hsl_to_rgb(228, 100, 50) == (0, 51, 255)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
def test_rgb_to_hsl_part_5():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(0, 255, 51) == (132, 100, 50)
assert rgb_to_hsl(0, 255, 102) == (144, 100, 50)
assert rgb_to_hsl(0, 255, 153) == (156, 100, 50)
assert rgb_to_hsl(0, 255, 204) == (168, 100, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(0, 204, 255) == (192, 100, 50)
assert rgb_to_hsl(0, 153, 255) == (204, 100, 50)
assert rgb_to_hsl(0, 102, 255) == (216, 100, 50)
assert rgb_to_hsl(0, 51, 255) == (228, 100, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
def test_hsl_to_rgb_part_6():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(252, 100, 50) == (51, 0, 255)
assert hsl_to_rgb(264, 100, 50) == (102, 0, 255)
assert hsl_to_rgb(276, 100, 50) == (153, 0, 255)
assert hsl_to_rgb(288, 100, 50) == (204, 0, 255)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
assert hsl_to_rgb(312, 100, 50) == (255, 0, 204)
assert hsl_to_rgb(324, 100, 50) == (255, 0, 153)
assert hsl_to_rgb(336, 100, 50) == (255, 0, 102)
assert hsl_to_rgb(348, 100, 50) == (255, 0, 51)
assert hsl_to_rgb(360, 100, 50) == (255, 0, 0)
def test_rgb_to_hsl_part_6():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(51, 0, 255) == (252, 100, 50)
assert rgb_to_hsl(102, 0, 255) == (264, 100, 50)
assert rgb_to_hsl(153, 0, 255) == (276, 100, 50)
assert rgb_to_hsl(204, 0, 255) == (288, 100, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
assert rgb_to_hsl(255, 0, 204) == (312, 100, 50)
assert rgb_to_hsl(255, 0, 153) == (324, 100, 50)
assert rgb_to_hsl(255, 0, 102) == (336, 100, 50)
assert rgb_to_hsl(255, 0, 51) == (348, 100, 50)
# assert rgb_to_hsl(255, 0, 0) == (360, 100, 50)
def test_hsl_to_rgb_part_7():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 20, 50) == (153, 102, 102)
assert hsl_to_rgb(0, 60, 50) == (204, 51, 51)
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
def test_rgb_to_hsl_part_7():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 102, 102) == (0, 20, 50)
assert rgb_to_hsl(204, 51, 51) == (0, 60, 50)
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
def test_hsl_to_rgb_part_8():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(60, 20, 50) == (153, 153, 102)
assert hsl_to_rgb(60, 60, 50) == (204, 204, 51)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
def test_rgb_to_hsl_part_8():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 153, 102) == (60, 20, 50)
assert rgb_to_hsl(204, 204, 51) == (60, 60, 50)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
def test_hsl_to_rgb_part_9():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 20, 50) == (102, 153, 102)
assert hsl_to_rgb(120, 60, 50) == (51, 204, 51)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
def test_rgb_to_hsl_part_9():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 153, 102) == (120, 20, 50)
assert rgb_to_hsl(51, 204, 51) == (120, 60, 50)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
def test_hsl_to_rgb_part_10():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(180, 20, 50) == (102, 153, 153)
assert hsl_to_rgb(180, 60, 50) == (51, 204, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
def test_rgb_to_hsl_part_10():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 153, 153) == (180, 20, 50)
assert rgb_to_hsl(51, 204, 204) == (180, 60, 50)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
def test_hsl_to_rgb_part_11():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 20, 50) == (102, 102, 153)
assert hsl_to_rgb(240, 60, 50) == (51, 51, 204)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
def test_rgb_to_hsl_part_11():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(102, 102, 153) == (240, 20, 50)
assert rgb_to_hsl(51, 51, 204) == (240, 60, 50)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
def test_hsl_to_rgb_part_12():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(300, 20, 50) == (153, 102, 153)
assert hsl_to_rgb(300, 60, 50) == (204, 51, 204)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
def test_rgb_to_hsl_part_12():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(153, 102, 153) == (300, 20, 50)
assert rgb_to_hsl(204, 51, 204) == (300, 60, 50)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
def test_hsl_to_rgb_part_13():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(0, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(0, 100, 10) == (51, 0, 0)
assert hsl_to_rgb(0, 100, 20) == (102, 0, 0)
assert hsl_to_rgb(0, 100, 30) == (153, 0, 0)
assert hsl_to_rgb(0, 100, 40) == (204, 0, 0)
assert hsl_to_rgb(0, 100, 50) == (255, 0, 0)
assert hsl_to_rgb(0, 100, 60) == (255, 51, 51)
assert hsl_to_rgb(0, 100, 70) == (255, 102, 102)
assert hsl_to_rgb(0, 100, 80) == (255, 153, 153)
assert hsl_to_rgb(0, 100, 90) == (255, 204, 204)
assert hsl_to_rgb(0, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_13():
"""Test rgb to hsl color function"""
assert rgb_to_hsl(0, 0, 0) == (0, 0, 0)
assert rgb_to_hsl(51, 0, 0) == (0, 100, 10)
assert rgb_to_hsl(102, 0, 0) == (0, 100, 20)
assert rgb_to_hsl(153, 0, 0) == (0, 100, 30)
assert rgb_to_hsl(204, 0, 0) == (0, 100, 40)
assert rgb_to_hsl(255, 0, 0) == (0, 100, 50)
assert rgb_to_hsl(255, 51, 51) == (0, 100, 60)
assert rgb_to_hsl(255, 102, 102) == (0, 100, 70)
assert rgb_to_hsl(255, 153, 153) == (0, 100, 80)
assert rgb_to_hsl(255, 204, 204) == (0, 100, 90)
assert rgb_to_hsl(255, 255, 255) == (0, 0, 100)
def test_hsl_to_rgb_part_14():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(60, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(60, 100, 10) == (51, 51, 0)
assert hsl_to_rgb(60, 100, 20) == (102, 102, 0)
assert hsl_to_rgb(60, 100, 30) == (153, 153, 0)
assert hsl_to_rgb(60, 100, 40) == (204, 204, 0)
assert hsl_to_rgb(60, 100, 50) == (255, 255, 0)
assert hsl_to_rgb(60, 100, 60) == (255, 255, 51)
assert hsl_to_rgb(60, 100, 70) == (255, 255, 102)
assert hsl_to_rgb(60, 100, 80) == (255, 255, 153)
assert hsl_to_rgb(60, 100, 90) == (255, 255, 204)
assert hsl_to_rgb(60, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_14():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (60, 100, 0)
assert rgb_to_hsl(51, 51, 0) == (60, 100, 10)
assert rgb_to_hsl(102, 102, 0) == (60, 100, 20)
assert rgb_to_hsl(153, 153, 0) == (60, 100, 30)
assert rgb_to_hsl(204, 204, 0) == (60, 100, 40)
assert rgb_to_hsl(255, 255, 0) == (60, 100, 50)
assert rgb_to_hsl(255, 255, 51) == (60, 100, 60)
assert rgb_to_hsl(255, 255, 102) == (60, 100, 70)
assert rgb_to_hsl(255, 255, 153) == (60, 100, 80)
assert rgb_to_hsl(255, 255, 204) == (60, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (60, 100, 100)
def test_hsl_to_rgb_part_15():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(120, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(120, 100, 10) == (0, 51, 0)
assert hsl_to_rgb(120, 100, 20) == (0, 102, 0)
assert hsl_to_rgb(120, 100, 30) == (0, 153, 0)
assert hsl_to_rgb(120, 100, 40) == (0, 204, 0)
assert hsl_to_rgb(120, 100, 50) == (0, 255, 0)
assert hsl_to_rgb(120, 100, 60) == (51, 255, 51)
assert hsl_to_rgb(120, 100, 70) == (102, 255, 102)
assert hsl_to_rgb(120, 100, 80) == (153, 255, 153)
assert hsl_to_rgb(120, 100, 90) == (204, 255, 204)
assert hsl_to_rgb(120, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_15():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (120, 100, 0)
assert rgb_to_hsl(0, 51, 0) == (120, 100, 10)
assert rgb_to_hsl(0, 102, 0) == (120, 100, 20)
assert rgb_to_hsl(0, 153, 0) == (120, 100, 30)
assert rgb_to_hsl(0, 204, 0) == (120, 100, 40)
assert rgb_to_hsl(0, 255, 0) == (120, 100, 50)
assert rgb_to_hsl(51, 255, 51) == (120, 100, 60)
assert rgb_to_hsl(102, 255, 102) == (120, 100, 70)
assert rgb_to_hsl(153, 255, 153) == (120, 100, 80)
assert rgb_to_hsl(204, 255, 204) == (120, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (120, 100, 100)
def test_hsl_to_rgb_part_16():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(180, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(180, 100, 10) == (0, 51, 51)
assert hsl_to_rgb(180, 100, 20) == (0, 102, 102)
assert hsl_to_rgb(180, 100, 30) == (0, 153, 153)
assert hsl_to_rgb(180, 100, 40) == (0, 204, 204)
assert hsl_to_rgb(180, 100, 50) == (0, 255, 255)
assert hsl_to_rgb(180, 100, 60) == (51, 255, 255)
assert hsl_to_rgb(180, 100, 70) == (102, 255, 255)
assert hsl_to_rgb(180, 100, 80) == (153, 255, 255)
assert hsl_to_rgb(180, 100, 90) == (204, 255, 255)
assert hsl_to_rgb(180, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_16():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (180, 100, 0)
assert rgb_to_hsl(0, 51, 51) == (180, 100, 10)
assert rgb_to_hsl(0, 102, 102) == (180, 100, 20)
assert rgb_to_hsl(0, 153, 153) == (180, 100, 30)
assert rgb_to_hsl(0, 204, 204) == (180, 100, 40)
assert rgb_to_hsl(0, 255, 255) == (180, 100, 50)
assert rgb_to_hsl(51, 255, 255) == (180, 100, 60)
assert rgb_to_hsl(102, 255, 255) == (180, 100, 70)
assert rgb_to_hsl(153, 255, 255) == (180, 100, 80)
assert rgb_to_hsl(204, 255, 255) == (180, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (180, 100, 100)
def test_hsl_to_rgb_part_17():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(240, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(240, 100, 10) == (0, 0, 51)
assert hsl_to_rgb(240, 100, 20) == (0, 0, 102)
assert hsl_to_rgb(240, 100, 30) == (0, 0, 153)
assert hsl_to_rgb(240, 100, 40) == (0, 0, 204)
assert hsl_to_rgb(240, 100, 50) == (0, 0, 255)
assert hsl_to_rgb(240, 100, 60) == (51, 51, 255)
assert hsl_to_rgb(240, 100, 70) == (102, 102, 255)
assert hsl_to_rgb(240, 100, 80) == (153, 153, 255)
assert hsl_to_rgb(240, 100, 90) == (204, 204, 255)
assert hsl_to_rgb(240, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_17():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (240, 100, 0)
assert rgb_to_hsl(0, 0, 51) == (240, 100, 10)
assert rgb_to_hsl(0, 0, 102) == (240, 100, 20)
assert rgb_to_hsl(0, 0, 153) == (240, 100, 30)
assert rgb_to_hsl(0, 0, 204) == (240, 100, 40)
assert rgb_to_hsl(0, 0, 255) == (240, 100, 50)
assert rgb_to_hsl(51, 51, 255) == (240, 100, 60)
assert rgb_to_hsl(102, 102, 255) == (240, 100, 70)
assert rgb_to_hsl(153, 153, 255) == (240, 100, 80)
assert rgb_to_hsl(204, 204, 255) == (240, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (240, 100, 100)
def test_hsl_to_rgb_part_18():
"""Test hsl to rgb color function"""
assert hsl_to_rgb(300, 100, 0) == (0, 0, 0)
assert hsl_to_rgb(300, 100, 10) == (51, 0, 51)
assert hsl_to_rgb(300, 100, 20) == (102, 0, 102)
assert hsl_to_rgb(300, 100, 30) == (153, 0, 153)
assert hsl_to_rgb(300, 100, 40) == (204, 0, 204)
assert hsl_to_rgb(300, 100, 50) == (255, 0, 255)
assert hsl_to_rgb(300, 100, 60) == (255, 51, 255)
assert hsl_to_rgb(300, 100, 70) == (255, 102, 255)
assert hsl_to_rgb(300, 100, 80) == (255, 153, 255)
assert hsl_to_rgb(300, 100, 90) == (255, 204, 255)
assert hsl_to_rgb(300, 100, 100) == (255, 255, 255)
def test_rgb_to_hsl_part_18():
"""Test rgb to hsl color function"""
# assert rgb_to_hsl(0, 0, 0) == (300, 100, 0)
assert rgb_to_hsl(51, 0, 51) == (300, 100, 10)
assert rgb_to_hsl(102, 0, 102) == (300, 100, 20)
assert rgb_to_hsl(153, 0, 153) == (300, 100, 30)
assert rgb_to_hsl(204, 0, 204) == (300, 100, 40)
assert rgb_to_hsl(255, 0, 255) == (300, 100, 50)
assert rgb_to_hsl(255, 51, 255) == (300, 100, 60)
assert rgb_to_hsl(255, 102, 255) == (300, 100, 70)
assert rgb_to_hsl(255, 153, 255) == (300, 100, 80)
assert rgb_to_hsl(255, 204, 255) == (300, 100, 90)
# assert rgb_to_hsl(255, 255, 255) == (300, 100, 100)
| 22,053
|
Python
|
.py
| 461
| 43.219089
| 79
| 0.573767
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,320
|
test_table.py
|
Kozea_pygal/pygal/test/test_table.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Box chart related tests"""
from pyquery import PyQuery as pq
from pygal import Pie
def test_pie_table():
"""Test rendering a table for a pie"""
chart = Pie(inner_radius=.3, pretty_print=True)
chart.title = 'Browser usage in February 2012 (in %)'
chart.add('IE', 19.5)
chart.add('Firefox', 36.6)
chart.add('Chrome', 36.3)
chart.add('Safari', 4.5)
chart.add('Opera', 2.3)
q = pq(chart.render_table())
assert len(q('table')) == 1
| 1,247
|
Python
|
.py
| 32
| 36.59375
| 79
| 0.723369
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,321
|
__init__.py
|
Kozea_pygal/pygal/maps/__init__.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Maps extensions namespace module"""
| 812
|
Python
|
.py
| 19
| 41.736842
| 79
| 0.770492
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,322
|
radar.py
|
Kozea_pygal/pygal/graph/radar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Radar chart: As known as kiviat chart or spider chart is a polar line chart
useful for multivariate observation.
"""
from math import cos, pi
from pygal.adapters import none_to_zero, positive
from pygal.graph.line import Line
from pygal.util import cached_property, compute_scale, cut, deg, truncate
from pygal.view import PolarLogView, PolarView
class Radar(Line):
"""Rada graph class"""
_adapters = [positive, none_to_zero]
def __init__(self, *args, **kwargs):
"""Init custom vars"""
self._rmax = None
super(Radar, self).__init__(*args, **kwargs)
def _fill(self, values):
"""Add extra values to fill the line"""
return values
@cached_property
def _values(self):
"""Getter for series values (flattened)"""
if self.interpolate:
return [
val[0] for serie in self.series for val in serie.interpolated
]
else:
return super(Line, self)._values
def _set_view(self):
"""Assign a view to current graph"""
if self.logarithmic:
view_class = PolarLogView
else:
view_class = PolarView
self.view = view_class(
self.width - self.margin_box.x, self.height - self.margin_box.y,
self._box
)
def _x_axis(self, draw_axes=True):
"""Override x axis to make it polar"""
if not self._x_labels or not self.show_x_labels:
return
axis = self.svg.node(
self.nodes['plot'],
class_="axis x web%s" %
(' always_show' if self.show_x_guides else '')
)
format_ = lambda x: '%f %f' % x
center = self.view((0, 0))
r = self._rmax
# Can't simply determine truncation
truncation = self.truncate_label or 25
for label, theta in self._x_labels:
major = label in self._x_labels_major
if not (self.show_minor_x_labels or major):
continue
guides = self.svg.node(axis, class_='guides')
end = self.view((r, theta))
self.svg.node(
guides,
'path',
d='M%s L%s' % (format_(center), format_(end)),
class_='%s%sline' %
('axis ' if label == "0" else '', 'major ' if major else '')
)
r_txt = (1 - self._box.__class__.margin) * self._box.ymax
pos_text = self.view((r_txt, theta))
text = self.svg.node(
guides,
'text',
x=pos_text[0],
y=pos_text[1],
class_='major' if major else ''
)
text.text = truncate(label, truncation)
if text.text != label:
self.svg.node(guides, 'title').text = label
else:
self.svg.node(
guides,
'title',
).text = self._x_format(theta)
angle = -theta + pi / 2
if cos(angle) < 0:
angle -= pi
text.attrib['transform'] = 'rotate(%f %s)' % (
self.x_label_rotation or deg(angle), format_(pos_text)
)
def _y_axis(self, draw_axes=True):
"""Override y axis to make it polar"""
if not self._y_labels or not self.show_y_labels:
return
axis = self.svg.node(self.nodes['plot'], class_="axis y web")
for label, r in reversed(self._y_labels):
major = r in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue
guides = self.svg.node(
axis,
class_='%sguides' %
('logarithmic ' if self.logarithmic else '')
)
if self.show_y_guides:
self.svg.line(
guides, [self.view((r, theta)) for theta in self._x_pos],
close=True,
class_='%sguide line' % ('major ' if major else '')
)
x, y = self.view((r, self._x_pos[0]))
x -= 5
text = self.svg.node(
guides, 'text', x=x, y=y, class_='major' if major else ''
)
text.text = label
if self.y_label_rotation:
text.attrib[
'transform'
] = "rotate(%d %f %f)" % (self.y_label_rotation, x, y)
self.svg.node(
guides,
'title',
).text = self._y_format(r)
def _compute(self):
"""Compute r min max and labels position"""
delta = 2 * pi / self._len if self._len else 0
self._x_pos = [.5 * pi + i * delta for i in range(self._len + 1)]
for serie in self.all_series:
serie.points = [(v, self._x_pos[i])
for i, v in enumerate(serie.values)]
if self.interpolate:
extended_x_pos = ([.5 * pi - delta] + self._x_pos)
extended_vals = (serie.values[-1:] + serie.values)
serie.interpolated = list(
map(
tuple,
map(
reversed,
self._interpolate(extended_x_pos, extended_vals)
)
)
)
# x labels space
self._box.margin *= 2
self._rmin = self.zero
self._rmax = self._max or 1
self._box.set_polar_box(self._rmin, self._rmax)
self._self_close = True
def _compute_y_labels(self):
y_pos = compute_scale(
self._rmin, self._rmax, self.logarithmic, self.order_min,
self.min_scale, self.max_scale / 2
)
if self.y_labels:
self._y_labels = []
for i, y_label in enumerate(self.y_labels):
if isinstance(y_label, dict):
pos = self._adapt(y_label.get('value'))
title = y_label.get('label', self._y_format(pos))
elif isinstance(y_label, str):
pos = self._adapt(y_pos[i])
title = y_label
else:
pos = self._adapt(y_label)
title = self._y_format(pos)
self._y_labels.append((title, pos))
self._rmin = min(self._rmin, min(cut(self._y_labels, 1)))
self._rmax = max(self._rmax, max(cut(self._y_labels, 1)))
self._box.set_polar_box(self._rmin, self._rmax)
else:
self._y_labels = list(zip(map(self._y_format, y_pos), y_pos))
| 7,491
|
Python
|
.py
| 188
| 28.026596
| 79
| 0.517796
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,323
|
pyramid.py
|
Kozea_pygal/pygal/graph/pyramid.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Pyramid chart: Stacked bar chart containing only positive values divided by two
axes, generally gender for age pyramid.
"""
from pygal.adapters import positive
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.stackedbar import StackedBar
class VerticalPyramid(StackedBar):
"""Vertical Pyramid graph class"""
_adapters = [positive]
def _value_format(self, value):
"""Format value for dual value display."""
return super(VerticalPyramid, self)._value_format(value and abs(value))
def _get_separated_values(self, secondary=False):
"""Separate values between odd and even series stacked"""
series = self.secondary_series if secondary else self.series
positive_vals = map(
sum,
zip(
*[
serie.safe_values for index, serie in enumerate(series)
if index % 2
]
)
)
negative_vals = map(
sum,
zip(
*[
serie.safe_values for index, serie in enumerate(series)
if not index % 2
]
)
)
return list(positive_vals), list(negative_vals)
def _compute_box(self, positive_vals, negative_vals):
"""Compute Y min and max"""
max_ = max(
max(positive_vals or [self.zero]),
max(negative_vals or [self.zero])
)
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
else:
self._box.ymin = -max_
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
else:
self._box.ymax = max_
def _pre_compute_secondary(self, positive_vals, negative_vals):
"""Compute secondary y min and max"""
self._secondary_max = max(max(positive_vals), max(negative_vals))
self._secondary_min = -self._secondary_max
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal stacking bar drawing function"""
if serie.index % 2:
y = -y
return super(VerticalPyramid,
self)._bar(serie, parent, x, y, i, zero, secondary)
class Pyramid(HorizontalGraph, VerticalPyramid):
"""Horizontal Pyramid graph class like the one used by age pyramid"""
| 3,176
|
Python
|
.py
| 79
| 32.341772
| 79
| 0.635863
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,324
|
horizontalstackedline.py
|
Kozea_pygal/pygal/graph/horizontalstackedline.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal Stacked Line graph"""
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.stackedline import StackedLine
class HorizontalStackedLine(HorizontalGraph, StackedLine):
"""Horizontal Stacked Line graph"""
def _plot(self):
"""Draw the lines in reverse order"""
for serie in self.series[::-1]:
self.line(serie)
for serie in self.secondary_series[::-1]:
self.line(serie, True)
| 1,232
|
Python
|
.py
| 29
| 39.413793
| 79
| 0.743119
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,325
|
solidgauge.py
|
Kozea_pygal/pygal/graph/solidgauge.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Solid Guage
For each series a solid guage is shown on the plot area.
"""
from math import pi, sqrt
from pygal.graph.graph import Graph
from pygal.util import alter, decorate
class SolidGauge(Graph):
def gaugify(self, serie, squares, sq_dimensions, current_square):
serie_node = self.svg.serie(serie)
if self.half_pie:
start_angle = 3 * pi / 2
center = ((current_square[1] * sq_dimensions[0]) -
(sq_dimensions[0] / 2.),
(current_square[0] * sq_dimensions[1]) -
(sq_dimensions[1] / 4))
end_angle = pi / 2
else:
start_angle = 0
center = ((current_square[1] * sq_dimensions[0]) -
(sq_dimensions[0] / 2.),
(current_square[0] * sq_dimensions[1]) -
(sq_dimensions[1] / 2.))
end_angle = 2 * pi
max_value = serie.metadata.get(0, {}).get('max_value', 100)
radius = min([sq_dimensions[0] / 2, sq_dimensions[1] / 2]) * .9
small_radius = radius * serie.inner_radius
self.svg.gauge_background(
serie_node, start_angle, center, radius, small_radius, end_angle,
self.half_pie, self._serie_format(serie, max_value)
)
sum_ = 0
for i, value in enumerate(serie.values):
if value is None:
continue
ratio = min(value, max_value) / max_value
if self.half_pie:
angle = 2 * pi * ratio / 2
else:
angle = 2 * pi * ratio
val = self._format(serie, i)
metadata = serie.metadata.get(i)
gauge_ = decorate(
self.svg, self.svg.node(serie_node['plot'], class_="gauge"),
metadata
)
alter(
self.svg.solid_gauge(
serie_node, gauge_, radius, small_radius, angle,
start_angle, center, val, i, metadata, self.half_pie,
end_angle, self._serie_format(serie, max_value)
), metadata
)
start_angle += angle
sum_ += value
x, y = center
self.svg.node(
serie_node['text_overlay'],
'text',
class_='value gauge-sum',
x=x,
y=y + self.style.value_font_size / 3,
attrib={
'text-anchor': 'middle'
}
).text = self._serie_format(serie, sum_)
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
pass
def _plot(self):
"""Draw all the serie slices"""
squares = self._squares()
sq_dimensions = self.add_squares(squares)
for index, serie in enumerate(self.series):
current_square = self._current_square(squares, index)
self.gaugify(serie, squares, sq_dimensions, current_square)
def _squares(self):
n_series_ = len(self.series)
i = 2
if sqrt(n_series_).is_integer():
_x = int(sqrt(n_series_))
_y = int(sqrt(n_series_))
else:
while i * i < n_series_:
while n_series_ % i == 0:
n_series_ = n_series_ / i
i = i + 1
_y = int(n_series_)
_x = int(len(self.series) / _y)
if len(self.series) == 5:
_x, _y = 2, 3
if abs(_x - _y) > 2:
_sq = 3
while (_x * _y) - 1 < len(self.series):
_x, _y = _sq, _sq
_sq += 1
return (_x, _y)
def _current_square(self, squares, index):
current_square = [1, 1]
steps = index + 1
steps_taken = 0
for i in range(squares[0] * squares[1]):
steps_taken += 1
if steps_taken != steps and steps_taken % squares[0] != 0:
current_square[1] += 1
elif steps_taken != steps and steps_taken % squares[0] == 0:
current_square[1] = 1
current_square[0] += 1
else:
return tuple(current_square)
raise Exception(
'Something went wrong with the current square assignment.'
)
| 5,100
|
Python
|
.py
| 132
| 27.80303
| 79
| 0.530707
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,326
|
histogram.py
|
Kozea_pygal/pygal/graph/histogram.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Histogram chart: like a bar chart but with data plotted along a x axis
as bars of varying width.
"""
from pygal.graph.bar import Bar
from pygal.graph.dual import Dual
from pygal.util import alter, cached_property, decorate
class Histogram(Dual, Bar):
"""Histogram chart class"""
_series_margin = 0
@cached_property
def _values(self):
"""Getter for secondary series values (flattened)"""
return self.yvals
@cached_property
def _secondary_values(self):
"""Getter for secondary series values (flattened)"""
return [
val[0] for serie in self.secondary_series for val in serie.values
if val[0] is not None
]
@cached_property
def xvals(self):
"""All x values"""
return [
val for serie in self.all_series for dval in serie.values
for val in dval[1:3] if val is not None
]
@cached_property
def yvals(self):
"""All y values"""
return [
val[0] for serie in self.series for val in serie.values
if val[0] is not None
]
def _bar(self, serie, parent, x0, x1, y, i, zero, secondary=False):
"""Internal bar drawing function"""
x, y = self.view((x0, y))
x1, _ = self.view((x1, y))
width = x1 - x
height = self.view.y(zero) - y
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
alter(
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
), serie.metadata.get(i)
)
return x, y, width, height
def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_="histbars")
points = serie.points
for i, (y, x0, x1) in enumerate(points):
if None in (x0, x1, y) or (self.logarithmic and y <= 0):
continue
metadata = serie.metadata.get(i)
bar = decorate(
self.svg, self.svg.node(bars, class_='histbar'), metadata
)
val = self._format(serie, i)
bounds = self._bar(
serie, bar, x0, x1, y, i, self.zero, secondary=rescale
)
self._tooltip_and_print_values(
serie_node, serie, bar, i, val, metadata, *bounds
)
def _compute(self):
"""Compute x/y min and max and x/y scale and set labels"""
if self.xvals:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
if self.yvals:
ymin = min(min(self.yvals), self.zero)
ymax = max(max(self.yvals), self.zero)
yrng = (ymax - ymin)
else:
yrng = None
for serie in self.all_series:
serie.points = serie.values
if xrng:
self._box.xmin, self._box.xmax = xmin, xmax
if yrng:
self._box.ymin, self._box.ymax = ymin, ymax
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
| 4,392
|
Python
|
.py
| 120
| 27.741667
| 79
| 0.580296
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,327
|
treemap.py
|
Kozea_pygal/pygal/graph/treemap.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Treemap chart: Visualize data using nested recangles"""
from pygal.adapters import none_to_zero, positive
from pygal.graph.graph import Graph
from pygal.util import alter, cut, decorate
class Treemap(Graph):
"""Treemap graph class"""
_adapters = [positive, none_to_zero]
def _rect(self, serie, serie_node, rects, val, x, y, w, h, i):
rx, ry = self.view((x, y))
rw, rh = self.view((x + w, y + h))
rw -= rx
rh -= ry
metadata = serie.metadata.get(i)
val = self._format(serie, i)
rect = decorate(
self.svg, self.svg.node(rects, class_="rect"), metadata
)
alter(
self.svg.node(
rect,
'rect',
x=rx,
y=ry,
width=rw,
height=rh,
class_='rect reactive tooltip-trigger'
), metadata
)
self._tooltip_data(
rect, val, rx + rw / 2, ry + rh / 2, 'centered',
self._get_x_label(i)
)
self._static_value(serie_node, val, rx + rw / 2, ry + rh / 2, metadata)
def _binary_tree(self, data, total, x, y, w, h, parent=None):
if total == 0:
return
if len(data) == 1:
if parent:
i, datum = data[0]
serie, serie_node, rects = parent
self._rect(serie, serie_node, rects, datum, x, y, w, h, i)
else:
datum = data[0]
serie_node = self.svg.serie(datum)
self._binary_tree(
list(enumerate(datum.values)), total, x, y, w, h, (
datum, serie_node,
self.svg.node(serie_node['plot'], class_="rects")
)
)
return
midpoint = total / 2
pivot_index = 1
running_sum = 0
for i, elt in enumerate(data):
if running_sum >= midpoint:
pivot_index = i
break
running_sum += elt[1] if parent else sum(elt.values)
half1 = data[:pivot_index]
half2 = data[pivot_index:]
if parent:
half1_sum = sum(cut(half1, 1))
half2_sum = sum(cut(half2, 1))
else:
half1_sum = sum(map(sum, map(lambda x: x.values, half1)))
half2_sum = sum(map(sum, map(lambda x: x.values, half2)))
pivot_pct = half1_sum / total
if h > w:
y_pivot = pivot_pct * h
self._binary_tree(half1, half1_sum, x, y, w, y_pivot, parent)
self._binary_tree(
half2, half2_sum, x, y + y_pivot, w, h - y_pivot, parent
)
else:
x_pivot = pivot_pct * w
self._binary_tree(half1, half1_sum, x, y, x_pivot, h, parent)
self._binary_tree(
half2, half2_sum, x + x_pivot, y, w - x_pivot, h, parent
)
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
pass
def _plot(self):
total = sum(map(sum, map(lambda x: x.values, self.series)))
if total == 0:
return
gw = self.width - self.margin_box.x
gh = self.height - self.margin_box.y
self.view.box.xmin = self.view.box.ymin = x = y = 0
self.view.box.xmax = w = (total * gw / gh)**.5
self.view.box.ymax = h = total / w
self.view.box.fix()
self._binary_tree(self.series, total, x, y, w, h)
| 4,311
|
Python
|
.py
| 113
| 28.212389
| 79
| 0.543582
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,328
|
horizontalstackedbar.py
|
Kozea_pygal/pygal/graph/horizontalstackedbar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal stacked graph"""
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.stackedbar import StackedBar
class HorizontalStackedBar(HorizontalGraph, StackedBar):
"""Horizontal Stacked Bar graph"""
| 1,000
|
Python
|
.py
| 23
| 42.173913
| 79
| 0.784394
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,329
|
stackedline.py
|
Kozea_pygal/pygal/graph/stackedline.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Stacked Line chart: Like a line chart but with all lines stacking
on top of the others. Used along fill=True option.
"""
from pygal.adapters import none_to_zero
from pygal.graph.line import Line
class StackedLine(Line):
"""Stacked Line graph class"""
_adapters = [none_to_zero]
def __init__(self, *args, **kwargs):
"""Custom variable initialization"""
self._previous_line = None
super(StackedLine, self).__init__(*args, **kwargs)
def _value_format(self, value, serie, index):
"""
Display value and cumulation
"""
sum_ = serie.points[index][1]
if serie in self.series and (
self.stack_from_top
and self.series.index(serie) == self._order - 1
or not self.stack_from_top and self.series.index(serie) == 0):
return super(StackedLine, self)._value_format(value)
return '%s (+%s)' % (self._y_format(sum_), self._y_format(value))
def _fill(self, values):
"""Add extra values to fill the line"""
if not self._previous_line:
self._previous_line = values
return super(StackedLine, self)._fill(values)
new_values = values + list(reversed(self._previous_line))
self._previous_line = values
return new_values
def _points(self, x_pos):
"""
Convert given data values into drawable points (x, y)
and interpolated points if interpolate option is specified
"""
for series_group in (self.series, self.secondary_series):
accumulation = [0] * self._len
for serie in series_group[::-1 if self.stack_from_top else 1]:
accumulation = list(map(sum, zip(accumulation, serie.values)))
serie.points = [(x_pos[i], v)
for i, v in enumerate(accumulation)]
if serie.points and self.interpolate:
serie.interpolated = self._interpolate(x_pos, accumulation)
else:
serie.interpolated = []
def _plot(self):
"""Plot stacked serie lines and stacked secondary lines"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.line(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.line(serie, True)
| 3,161
|
Python
|
.py
| 71
| 36.746479
| 79
| 0.639403
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,330
|
horizontalline.py
|
Kozea_pygal/pygal/graph/horizontalline.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal line graph"""
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.line import Line
class HorizontalLine(HorizontalGraph, Line):
"""Horizontal Line graph"""
def _plot(self):
"""Draw the lines in reverse order"""
for serie in self.series[::-1]:
self.line(serie)
for serie in self.secondary_series[::-1]:
self.line(serie, True)
| 1,188
|
Python
|
.py
| 29
| 37.896552
| 79
| 0.735065
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,331
|
bar.py
|
Kozea_pygal/pygal/graph/bar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Bar chart that presents grouped data with rectangular bars with lengths
proportional to the values that they represent.
"""
from pygal.graph.graph import Graph
from pygal.util import alter, decorate, ident, swap
class Bar(Graph):
"""Bar graph class"""
_series_margin = .06
_serie_margin = .06
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal bar drawing function"""
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
width /= self._order
if self.horizontal:
serie_index = self._order - serie.index - 1
else:
serie_index = serie.index
x += serie_index * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
alter(
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
), serie.metadata.get(i)
)
return x, y, width, height
def _tooltip_and_print_values(
self, serie_node, serie, parent, i, val, metadata, x, y, width,
height
):
transpose = swap if self.horizontal else ident
x_center, y_center = transpose((x + width / 2, y + height / 2))
x_top, y_top = transpose((x + width, y + height))
x_bottom, y_bottom = transpose((x, y))
if self._dual:
v = serie.values[i][0]
else:
v = serie.values[i]
sign = -1 if v < self.zero else 1
self._tooltip_data(
parent, val, x_center, y_center, "centered", self._get_x_label(i)
)
if self.print_values_position == 'top':
if self.horizontal:
x = x_bottom + sign * self.style.value_font_size / 2
y = y_center
else:
x = x_center
y = y_bottom - sign * self.style.value_font_size / 2
elif self.print_values_position == 'bottom':
if self.horizontal:
x = x_top + sign * self.style.value_font_size / 2
y = y_center
else:
x = x_center
y = y_top - sign * self.style.value_font_size / 2
else:
x = x_center
y = y_center
self._static_value(serie_node, val, x, y, metadata, "middle")
def bar(self, serie, rescale=False):
"""Draw a bar graph for a serie"""
serie_node = self.svg.serie(serie)
bars = self.svg.node(serie_node['plot'], class_="bars")
if rescale and self.secondary_series:
points = self._rescale(serie.points)
else:
points = serie.points
for i, (x, y) in enumerate(points):
if None in (x, y) or (self.logarithmic and y <= 0):
continue
metadata = serie.metadata.get(i)
val = self._format(serie, i)
bar = decorate(
self.svg, self.svg.node(bars, class_='bar'), metadata
)
x_, y_, width, height = self._bar(
serie, bar, x, y, i, self.zero, secondary=rescale
)
self._confidence_interval(
serie_node['overlay'], x_ + width / 2, y_, serie.values[i],
metadata
)
self._tooltip_and_print_values(
serie_node, serie, bar, i, val, metadata, x_, y_, width, height
)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
if self._min:
self._box.ymin = min(self._min, self.zero)
if self._max:
self._box.ymax = max(self._max, self.zero)
self._x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(self._x_pos)
self._x_pos = [(i + .5) / self._len for i in range(self._len)]
def _plot(self):
"""Draw bars for series and secondary series"""
for serie in self.series:
self.bar(serie)
for serie in self.secondary_series:
self.bar(serie, True)
| 5,363
|
Python
|
.py
| 137
| 29.430657
| 79
| 0.558372
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,332
|
gauge.py
|
Kozea_pygal/pygal/graph/gauge.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Gauge chart representing values as needles on a polar scale"""
from pygal.graph.graph import Graph
from pygal.util import alter, compute_scale, cut, decorate
from pygal.view import PolarThetaLogView, PolarThetaView
class Gauge(Graph):
"""Gauge graph class"""
needle_width = 1 / 20
def _set_view(self):
"""Assign a view to current graph"""
if self.logarithmic:
view_class = PolarThetaLogView
else:
view_class = PolarThetaView
self.view = view_class(
self.width - self.margin_box.x, self.height - self.margin_box.y,
self._box
)
def needle(self, serie):
"""Draw a needle for each value"""
serie_node = self.svg.serie(serie)
for i, theta in enumerate(serie.values):
if theta is None:
continue
def point(x, y):
return '%f %f' % self.view((x, y))
val = self._format(serie, i)
metadata = serie.metadata.get(i)
gauges = decorate(
self.svg, self.svg.node(serie_node['plot'], class_="dots"),
metadata
)
tolerance = 1.15
if theta < self._min:
theta = self._min * tolerance
if theta > self._max:
theta = self._max * tolerance
w = (self._box._tmax - self._box._tmin + self.view.aperture) / 4
if self.logarithmic:
w = min(w, self._min - self._min * 10**-10)
alter(
self.svg.node(
gauges,
'path',
d='M %s L %s A %s 1 0 1 %s Z' % (
point(.85, theta),
point(self.needle_width, theta - w),
'%f %f' % (self.needle_width, self.needle_width),
point(self.needle_width, theta + w),
),
class_='line reactive tooltip-trigger'
), metadata
)
x, y = self.view((.75, theta))
self._tooltip_data(gauges, val, x, y, xlabel=self._get_x_label(i))
self._static_value(serie_node, val, x, y, metadata)
def _y_axis(self, draw_axes=True):
"""Override y axis to plot a polar axis"""
axis = self.svg.node(self.nodes['plot'], class_="axis x gauge")
for i, (label, theta) in enumerate(self._y_labels):
guides = self.svg.node(axis, class_='guides')
self.svg.line(
guides, [self.view((.95, theta)),
self.view((1, theta))],
close=True,
class_='line'
)
self.svg.line(
guides, [self.view((0, theta)),
self.view((.95, theta))],
close=True,
class_='guide line %s' %
('major' if i in (0, len(self._y_labels) - 1) else '')
)
x, y = self.view((.9, theta))
self.svg.node(guides, 'text', x=x, y=y).text = label
self.svg.node(
guides,
'title',
).text = self._y_format(theta)
def _x_axis(self, draw_axes=True):
"""Override x axis to put a center circle in center"""
axis = self.svg.node(self.nodes['plot'], class_="axis y gauge")
x, y = self.view((0, 0))
self.svg.node(axis, 'circle', cx=x, cy=y, r=4)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
self.min_ = self._min or 0
self.max_ = self._max or 0
if self.max_ - self.min_ == 0:
self.min_ -= 1
self.max_ += 1
self._box.set_polar_box(0, 1, self.min_, self.max_)
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
y_pos = compute_scale(
self.min_, self.max_, self.logarithmic, self.order_min,
self.min_scale, self.max_scale
)
if self.y_labels:
self._y_labels = []
for i, y_label in enumerate(self.y_labels):
if isinstance(y_label, dict):
pos = self._adapt(y_label.get('value'))
title = y_label.get('label', self._y_format(pos))
elif isinstance(y_label, str):
pos = self._adapt(y_pos[i])
title = y_label
else:
pos = self._adapt(y_label)
title = self._y_format(pos)
self._y_labels.append((title, pos))
self.min_ = min(self.min_, min(cut(self._y_labels, 1)))
self.max_ = max(self.max_, max(cut(self._y_labels, 1)))
self._box.set_polar_box(0, 1, self.min_, self.max_)
else:
self._y_labels = list(zip(map(self._y_format, y_pos), y_pos))
def _plot(self):
"""Plot all needles"""
for serie in self.series:
self.needle(serie)
| 5,811
|
Python
|
.py
| 139
| 30.143885
| 79
| 0.527994
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,333
|
dual.py
|
Kozea_pygal/pygal/graph/dual.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Dual chart base. Dual means a chart with 2 scaled axis like xy"""
from pygal.graph.graph import Graph
from pygal.util import compute_scale, cut
class Dual(Graph):
_dual = True
def _value_format(self, value):
"""
Format value for dual value display.
"""
return '%s: %s' % (self._x_format(value[0]), self._y_format(value[1]))
def _compute_x_labels(self):
x_pos = compute_scale(
self._box.xmin, self._box.xmax, self.logarithmic, self.order_min,
self.min_scale, self.max_scale
)
if self.x_labels:
self._x_labels = []
for i, x_label in enumerate(self.x_labels):
if isinstance(x_label, dict):
pos = self._x_adapt(x_label.get('value'))
title = x_label.get('label', self._x_format(pos))
elif isinstance(x_label, str):
pos = self._x_adapt(x_pos[i % len(x_pos)])
title = x_label
else:
pos = self._x_adapt(x_label)
title = self._x_format(pos)
self._x_labels.append((title, pos))
self._box.xmin = min(self._box.xmin, min(cut(self._x_labels, 1)))
self._box.xmax = max(self._box.xmax, max(cut(self._x_labels, 1)))
else:
self._x_labels = list(zip(map(self._x_format, x_pos), x_pos))
def _compute_x_labels_major(self):
# In case of dual, x labels must adapters and so majors too
self.x_labels_major = self.x_labels_major and list(
map(self._x_adapt, self.x_labels_major)
)
super(Dual, self)._compute_x_labels_major()
def _get_x_label(self, i):
"""Convenience function to get the x_label of a value index"""
return
| 2,595
|
Python
|
.py
| 59
| 35.847458
| 79
| 0.61575
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,334
|
pie.py
|
Kozea_pygal/pygal/graph/pie.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Pie chart: A circular chart divided into slice to illustrate proportions
It can be made as a donut or a half pie.
"""
from math import pi
from pygal.adapters import none_to_zero, positive
from pygal.graph.graph import Graph
from pygal.util import alter, decorate
class Pie(Graph):
"""Pie graph class"""
_adapters = [positive, none_to_zero]
def slice(self, serie, start_angle, total):
"""Make a serie slice"""
serie_node = self.svg.serie(serie)
dual = self._len > 1 and not self._order == 1
slices = self.svg.node(serie_node['plot'], class_="slices")
serie_angle = 0
original_start_angle = start_angle
if self.half_pie:
center = ((self.width - self.margin_box.x) / 2.,
(self.height - self.margin_box.y) / 1.25)
else:
center = ((self.width - self.margin_box.x) / 2.,
(self.height - self.margin_box.y) / 2.)
radius = min(center)
for i, val in enumerate(serie.values):
perc = val / total
if self.half_pie:
angle = 2 * pi * perc / 2
else:
angle = 2 * pi * perc
serie_angle += angle
val = self._format(serie, i)
metadata = serie.metadata.get(i)
slice_ = decorate(
self.svg, self.svg.node(slices, class_="slice"), metadata
)
if dual:
small_radius = radius * .9
big_radius = radius
else:
big_radius = radius * .9
small_radius = radius * serie.inner_radius
alter(
self.svg.slice(
serie_node, slice_, big_radius, small_radius, angle,
start_angle, center, val, i, metadata
), metadata
)
start_angle += angle
if dual:
val = self._serie_format(serie, sum(serie.values))
self.svg.slice(
serie_node, self.svg.node(slices,
class_="big_slice"), radius * .9, 0,
serie_angle, original_start_angle, center, val, i, metadata
)
return serie_angle
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
pass
def _plot(self):
"""Draw all the serie slices"""
total = sum(map(sum, map(lambda x: x.values, self.series)))
if total == 0:
return
if self.half_pie:
current_angle = 3 * pi / 2
else:
current_angle = 0
for index, serie in enumerate(self.series):
angle = self.slice(serie, current_angle, total)
current_angle += angle
| 3,561
|
Python
|
.py
| 92
| 29.271739
| 79
| 0.578292
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,335
|
xy.py
|
Kozea_pygal/pygal/graph/xy.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
XY Line graph: Plot a set of couple data points (x, y) connected by
straight segments.
"""
from functools import reduce
from pygal.graph.dual import Dual
from pygal.graph.line import Line
from pygal.util import cached_property, compose, ident
class XY(Line, Dual):
"""XY Line graph class"""
_x_adapters = []
@cached_property
def xvals(self):
"""All x values"""
return [
val[0] for serie in self.all_series for val in serie.values
if val[0] is not None
]
@cached_property
def yvals(self):
"""All y values"""
return [
val[1] for serie in self.series for val in serie.values
if val[1] is not None
]
@cached_property
def _min(self):
"""Getter for the minimum series value"""
return (
self.range[0] if (self.range and self.range[0] is not None) else
(min(self.yvals) if self.yvals else None)
)
@cached_property
def _max(self):
"""Getter for the maximum series value"""
return (
self.range[1] if (self.range and self.range[1] is not None) else
(max(self.yvals) if self.yvals else None)
)
def _compute(self):
"""Compute x/y min and max and x/y scale and set labels"""
if self.xvals:
if self.xrange:
x_adapter = reduce(compose, self._x_adapters) if getattr(
self, '_x_adapters', None
) else ident
xmin = x_adapter(self.xrange[0])
xmax = x_adapter(self.xrange[1])
else:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
if self.yvals:
ymin = self._min
ymax = self._max
if self.include_x_axis:
ymin = min(ymin or 0, 0)
ymax = max(ymax or 0, 0)
yrng = (ymax - ymin)
else:
yrng = None
for serie in self.all_series:
serie.points = serie.values
if self.interpolate:
vals = list(
zip(
*sorted(
filter(lambda t: None not in t, serie.points),
key=lambda x: x[0]
)
)
)
serie.interpolated = self._interpolate(vals[0], vals[1])
if self.interpolate:
self.xvals = [
val[0] for serie in self.all_series
for val in serie.interpolated
]
self.yvals = [
val[1] for serie in self.series for val in serie.interpolated
]
if self.xvals:
xmin = min(self.xvals)
xmax = max(self.xvals)
xrng = (xmax - xmin)
else:
xrng = None
# these values can also be 0 (zero), so testing explicitly for None
if xrng is not None:
self._box.xmin, self._box.xmax = xmin, xmax
if yrng is not None:
self._box.ymin, self._box.ymax = ymin, ymax
| 4,016
|
Python
|
.py
| 112
| 25.794643
| 79
| 0.556242
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,336
|
funnel.py
|
Kozea_pygal/pygal/graph/funnel.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Funnel chart: Represent values as a funnel"""
from pygal.adapters import none_to_zero, positive
from pygal.graph.graph import Graph
from pygal.util import alter, cut, decorate
class Funnel(Graph):
"""Funnel graph class"""
_adapters = [positive, none_to_zero]
def _value_format(self, value):
"""Format value for dual value display."""
return super(Funnel, self)._value_format(value and abs(value))
def funnel(self, serie):
"""Draw a funnel slice"""
serie_node = self.svg.serie(serie)
fmt = lambda x: '%f %f' % x
for i, poly in enumerate(serie.points):
metadata = serie.metadata.get(i)
val = self._format(serie, i)
funnels = decorate(
self.svg, self.svg.node(serie_node['plot'], class_="funnels"),
metadata
)
alter(
self.svg.node(
funnels,
'polygon',
points=' '.join(map(fmt, map(self.view, poly))),
class_='funnel reactive tooltip-trigger'
), metadata
)
# Poly center from label
x, y = self.view((
self._center(self._x_pos[serie.index]),
sum([point[1] for point in poly]) / len(poly)
))
self._tooltip_data(
funnels, val, x, y, 'centered', self._get_x_label(serie.index)
)
self._static_value(serie_node, val, x, y, metadata)
def _center(self, x):
return x - 1 / (2 * self._order)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
self._x_pos = [
(x + 1) / self._order for x in range(self._order)
] if self._order != 1 else [.5] # Center if only one value
previous = [[self.zero, self.zero] for i in range(self._len)]
for i, serie in enumerate(self.series):
y_height = -sum(serie.safe_values) / 2
all_x_pos = [0] + self._x_pos
serie.points = []
for j, value in enumerate(serie.values):
poly = []
poly.append((all_x_pos[i], previous[j][0]))
poly.append((all_x_pos[i], previous[j][1]))
previous[j][0] = y_height
y_height = previous[j][1] = y_height + value
poly.append((all_x_pos[i + 1], previous[j][1]))
poly.append((all_x_pos[i + 1], previous[j][0]))
serie.points.append(poly)
val_max = max(list(map(sum, cut(self.series, 'values'))) + [self.zero])
self._box.ymin = -val_max
self._box.ymax = val_max
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
def _compute_x_labels(self):
self._x_labels = list(
zip(
self.x_labels and map(self._x_format, self.x_labels) or [
serie.title['title']
if isinstance(serie.title, dict) else serie.title or ''
for serie in self.series
], map(self._center, self._x_pos)
)
)
def _plot(self):
"""Plot the funnel"""
for serie in self.series:
self.funnel(serie)
| 4,183
|
Python
|
.py
| 98
| 32.571429
| 79
| 0.566618
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,337
|
line.py
|
Kozea_pygal/pygal/graph/line.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Line chart: Display series of data as markers (dots)
connected by straight segments
"""
from pygal.graph.graph import Graph
from pygal.util import alter, cached_property, decorate
class Line(Graph):
"""Line graph class"""
def __init__(self, *args, **kwargs):
"""Set _self_close as False, it's True for Radar like Line"""
self._self_close = False
super(Line, self).__init__(*args, **kwargs)
@cached_property
def _values(self):
"""Getter for series values (flattened)"""
return [
val[1] for serie in self.series for val in
(serie.interpolated if self.interpolate else serie.points)
if val[1] is not None and (not self.logarithmic or val[1] > 0)
]
@cached_property
def _secondary_values(self):
"""Getter for secondary series values (flattened)"""
return [
val[1] for serie in self.secondary_series for val in
(serie.interpolated if self.interpolate else serie.points)
if val[1] is not None and (not self.logarithmic or val[1] > 0)
]
def _fill(self, values):
"""Add extra values to fill the line"""
zero = self.view.y(min(max(self.zero, self._box.ymin), self._box.ymax))
# Check to see if the data has been padded with "none's"
# Fill doesn't work correctly otherwise
end = len(values) - 1
while end > 0:
x, y = values[end]
if self.missing_value_fill_truncation == "either":
if x is not None and y is not None:
break
elif self.missing_value_fill_truncation == "x":
if x is not None:
break
elif self.missing_value_fill_truncation == "y":
if y is not None:
break
else:
raise ValueError(
"Invalid value ({}) for config key "
"'missing_value_fill_truncation';"
" Use 'x', 'y' or 'either'".format(
self.missing_value_fill_truncation
)
)
end -= 1
return ([(values[0][0], zero)] + values + [(values[end][0], zero)])
def line(self, serie, rescale=False):
"""Draw the line serie"""
serie_node = self.svg.serie(serie)
if rescale and self.secondary_series:
points = self._rescale(serie.points)
else:
points = serie.points
view_values = list(map(self.view, points))
if serie.show_dots:
for i, (x, y) in enumerate(view_values):
if None in (x, y):
continue
if self.logarithmic:
if points[i][1] is None or points[i][1] <= 0:
continue
if (serie.show_only_major_dots and self.x_labels
and i < len(self.x_labels)
and self.x_labels[i] not in self._x_labels_major):
continue
metadata = serie.metadata.get(i)
classes = []
if x > self.view.width / 2:
classes.append('left')
if y > self.view.height / 2:
classes.append('top')
classes = ' '.join(classes)
self._confidence_interval(
serie_node['overlay'], x, y, serie.values[i], metadata
)
dots = decorate(
self.svg,
self.svg.node(serie_node['overlay'], class_="dots"),
metadata
)
val = self._format(serie, i)
alter(
self.svg.transposable_node(
dots,
'circle',
cx=x,
cy=y,
r=serie.dots_size,
class_='dot reactive tooltip-trigger'
), metadata
)
self._tooltip_data(
dots, val, x, y, xlabel=self._get_x_label(i)
)
self._static_value(
serie_node, val, x + self.style.value_font_size,
y + self.style.value_font_size, metadata
)
if serie.stroke:
if self.interpolate:
points = serie.interpolated
if rescale and self.secondary_series:
points = self._rescale(points)
view_values = list(map(self.view, points))
if serie.fill:
view_values = self._fill(view_values)
if serie.allow_interruptions:
# view_values are in form [(x1, y1), (x2, y2)]. We
# need to split that into multiple sequences if a
# None is present here
sequences = []
cur_sequence = []
for x, y in view_values:
if y is None and len(cur_sequence) > 0:
# emit current subsequence
sequences.append(cur_sequence)
cur_sequence = []
elif y is None: # just discard
continue
else:
cur_sequence.append((x, y)) # append the element
if len(cur_sequence) > 0: # emit last possible sequence
sequences.append(cur_sequence)
else:
# plain vanilla rendering
sequences = [view_values]
if self.logarithmic:
for seq in sequences:
for ele in seq[::-1]:
y = points[seq.index(ele)][1]
if y is None or y <= 0:
del seq[seq.index(ele)]
for seq in sequences:
self.svg.line(
serie_node['plot'],
seq,
close=self._self_close,
class_='line reactive' +
(' nofill' if not serie.fill else '')
)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
# X Labels
if self.horizontal:
self._x_pos = [
x / (self._len - 1) for x in range(self._len)
][::-1] if self._len != 1 else [.5] # Center if only one value
else:
self._x_pos = [
x / (self._len - 1) for x in range(self._len)
] if self._len != 1 else [.5] # Center if only one value
self._points(self._x_pos)
if self.include_x_axis:
# Y Label
self._box.ymin = min(self._min or 0, 0)
self._box.ymax = max(self._max or 0, 0)
else:
self._box.ymin = self._min
self._box.ymax = self._max
def _plot(self):
"""Plot the serie lines and secondary serie lines"""
for serie in self.series:
self.line(serie)
for serie in self.secondary_series:
self.line(serie, True)
| 8,006
|
Python
|
.py
| 192
| 27.890625
| 79
| 0.505327
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,338
|
time.py
|
Kozea_pygal/pygal/graph/time.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
XY time extensions: handle convertion of date, time, datetime, timedelta
into float for xy plot and back to their type for display
"""
from datetime import date, datetime, time, timedelta
from pygal._compat import timestamp
from pygal.adapters import positive
from pygal.graph.xy import XY
def datetime_to_timestamp(x):
"""Convert a datetime into a utc float timestamp"""
if isinstance(x, datetime):
return timestamp(x)
return x
def datetime_to_time(x):
"""Convert a datetime into a time"""
if isinstance(x, datetime):
return x.time()
return x
def date_to_datetime(x):
"""Convert a date into a datetime"""
if not isinstance(x, datetime) and isinstance(x, date):
return datetime.combine(x, time())
return x
def time_to_datetime(x):
"""Convert a time into a datetime"""
if isinstance(x, time):
return datetime.combine(date(1970, 1, 1), x)
return x
def timedelta_to_seconds(x):
"""Convert a timedelta into an amount of seconds"""
if isinstance(x, timedelta):
return x.total_seconds()
return x
def time_to_seconds(x):
"""Convert a time in a seconds sum"""
if isinstance(x, time):
return ((((x.hour * 60) + x.minute) * 60 + x.second) * 10**6 +
x.microsecond) / 10**6
if isinstance(x, str):
return x
# Clamp to valid time
return x and max(0, min(x, 24 * 3600 - 10**-6))
def seconds_to_time(x):
"""Convert a number of second into a time"""
t = int(x * 10**6)
ms = t % 10**6
t = t // 10**6
s = t % 60
t = t // 60
m = t % 60
t = t // 60
h = t
return time(h, m, s, ms)
class DateTimeLine(XY):
"""DateTime abscissa xy graph class"""
_x_adapters = [datetime_to_timestamp, date_to_datetime]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def datetime_to_str(x):
dt = datetime.utcfromtimestamp(x)
return self.x_value_formatter(dt)
return datetime_to_str
class DateLine(DateTimeLine):
"""Date abscissa xy graph class"""
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def date_to_str(x):
d = datetime.utcfromtimestamp(x).date()
return self.x_value_formatter(d)
return date_to_str
class TimeLine(DateTimeLine):
"""Time abscissa xy graph class"""
_x_adapters = [positive, time_to_seconds, datetime_to_time]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def date_to_str(x):
t = seconds_to_time(x)
return self.x_value_formatter(t)
return date_to_str
class TimeDeltaLine(XY):
"""TimeDelta abscissa xy graph class"""
_x_adapters = [timedelta_to_seconds]
@property
def _x_format(self):
"""Return the value formatter for this graph"""
def timedelta_to_str(x):
td = timedelta(seconds=x)
return self.x_value_formatter(td)
return timedelta_to_str
| 3,878
|
Python
|
.py
| 110
| 29.854545
| 79
| 0.660139
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,339
|
graph.py
|
Kozea_pygal/pygal/graph/graph.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Chart properties and drawing"""
from math import ceil, cos, sin, sqrt
from pygal import stats
from pygal._compat import is_list_like
from pygal.graph.public import PublicApi
from pygal.interpolate import INTERPOLATIONS
from pygal.util import (
cached_property,
compute_scale,
cut,
decorate,
filter_kwargs,
get_text_box,
get_texts_box,
majorize,
rad,
reverse_text_len,
split_title,
truncate,
)
from pygal.view import LogView, ReverseView, View, XYLogView
class Graph(PublicApi):
"""Graph super class containing generic common functions"""
_dual = False
def _decorate(self):
"""Draw all decorations"""
self._set_view()
self._make_graph()
self._axes()
self._legend()
self._make_title()
self._make_x_title()
self._make_y_title()
def _axes(self):
"""Draw axes"""
self._y_axis()
self._x_axis()
def _set_view(self):
"""Assign a view to current graph"""
if self.logarithmic:
if self._dual:
view_class = XYLogView
else:
view_class = LogView
else:
view_class = ReverseView if self.inverse_y_axis else View
self.view = view_class(
self.width - self.margin_box.x, self.height - self.margin_box.y,
self._box
)
def _make_graph(self):
"""Init common graph svg structure"""
self.nodes['graph'] = self.svg.node(
class_='graph %s-graph %s' % (
self.__class__.__name__.lower(),
'horizontal' if self.horizontal else 'vertical'
)
)
self.svg.node(
self.nodes['graph'],
'rect',
class_='background',
x=0,
y=0,
width=self.width,
height=self.height
)
self.nodes['plot'] = self.svg.node(
self.nodes['graph'],
class_="plot",
transform="translate(%d, %d)" %
(self.margin_box.left, self.margin_box.top)
)
self.svg.node(
self.nodes['plot'],
'rect',
class_='background',
x=0,
y=0,
width=self.view.width,
height=self.view.height
)
self.nodes['title'] = self.svg.node(
self.nodes['graph'], class_="titles"
)
self.nodes['overlay'] = self.svg.node(
self.nodes['graph'],
class_="plot overlay",
transform="translate(%d, %d)" %
(self.margin_box.left, self.margin_box.top)
)
self.nodes['text_overlay'] = self.svg.node(
self.nodes['graph'],
class_="plot text-overlay",
transform="translate(%d, %d)" %
(self.margin_box.left, self.margin_box.top)
)
self.nodes['tooltip_overlay'] = self.svg.node(
self.nodes['graph'],
class_="plot tooltip-overlay",
transform="translate(%d, %d)" %
(self.margin_box.left, self.margin_box.top)
)
self.nodes['tooltip'] = self.svg.node(
self.nodes['tooltip_overlay'],
transform='translate(0 0)',
style="opacity: 0",
**{'class': 'tooltip'}
)
self.svg.node(
self.nodes['tooltip'],
'rect',
rx=self.tooltip_border_radius,
ry=self.tooltip_border_radius,
width=0,
height=0,
**{'class': 'tooltip-box'}
)
self.svg.node(self.nodes['tooltip'], 'g', class_='text')
def _x_axis(self):
"""Make the x axis: labels and guides"""
if not self._x_labels or not self.show_x_labels:
return
axis = self.svg.node(
self.nodes['plot'],
class_="axis x%s" % (' always_show' if self.show_x_guides else '')
)
truncation = self.truncate_label
if not truncation:
if self.x_label_rotation or len(self._x_labels) <= 1:
truncation = 25
else:
first_label_position = self.view.x(self._x_labels[0][1]) or 0
last_label_position = self.view.x(self._x_labels[-1][1]) or 0
available_space = (last_label_position - first_label_position
) / len(self._x_labels) - 1
truncation = reverse_text_len(
available_space, self.style.label_font_size
)
truncation = max(truncation, 1)
lastlabel = self._x_labels[-1][0]
if 0 not in [label[1] for label in self._x_labels]:
self.svg.node(
axis,
'path',
d='M%f %f v%f' % (0, 0, self.view.height),
class_='line'
)
lastlabel = None
for label, position in self._x_labels:
if self.horizontal:
major = position in self._x_labels_major
else:
major = label in self._x_labels_major
if not (self.show_minor_x_labels or major):
continue
guides = self.svg.node(axis, class_='guides')
x = self.view.x(position)
if x is None:
continue
y = self.view.height + 5
last_guide = (self._y_2nd_labels and label == lastlabel)
self.svg.node(
guides,
'path',
d='M%f %f v%f' % (x or 0, 0, self.view.height),
class_='%s%s%sline' % (
'axis ' if label == "0" else '', 'major '
if major else '', 'guide '
if position != 0 and not last_guide else ''
)
)
y += .5 * self.style.label_font_size + 5
text = self.svg.node(
guides, 'text', x=x, y=y, class_='major' if major else ''
)
text.text = truncate(label, truncation)
if text.text != label:
self.svg.node(guides, 'title').text = label
elif self._dual:
self.svg.node(
guides,
'title',
).text = self._x_format(position)
if self.x_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
self.x_label_rotation, x, y
)
if self.x_label_rotation >= 180:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
if self._y_2nd_labels and 0 not in [label[1]
for label in self._x_labels]:
self.svg.node(
axis,
'path',
d='M%f %f v%f' % (self.view.width, 0, self.view.height),
class_='line'
)
if self._x_2nd_labels:
secondary_ax = self.svg.node(
self.nodes['plot'],
class_="axis x x2%s" %
(' always_show' if self.show_x_guides else '')
)
for label, position in self._x_2nd_labels:
major = label in self._x_labels_major
if not (self.show_minor_x_labels or major):
continue
# it is needed, to have the same structure as primary axis
guides = self.svg.node(secondary_ax, class_='guides')
x = self.view.x(position)
y = -5
text = self.svg.node(
guides, 'text', x=x, y=y, class_='major' if major else ''
)
text.text = label
if self.x_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
-self.x_label_rotation, x, y
)
if self.x_label_rotation >= 180:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
def _y_axis(self):
"""Make the y axis: labels and guides"""
if not self._y_labels or not self.show_y_labels:
return
axis = self.svg.node(
self.nodes['plot'],
class_="axis y%s" % (' always_show' if self.show_y_guides else '')
)
if (0 not in [label[1] for label in self._y_labels]
and self.show_y_guides):
self.svg.node(
axis,
'path',
d='M%f %f h%f' % (
0, 0 if self.inverse_y_axis else self.view.height,
self.view.width
),
class_='line'
)
for label, position in self._y_labels:
if self.horizontal:
major = label in self._y_labels_major
else:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue
guides = self.svg.node(
axis,
class_='%sguides' %
('logarithmic ' if self.logarithmic else '')
)
x = -5
y = self.view.y(position)
if not y:
continue
if self.show_y_guides:
self.svg.node(
guides,
'path',
d='M%f %f h%f' % (0, y, self.view.width),
class_='%s%s%sline' % (
'axis ' if label == "0" else '', 'major '
if major else '', 'guide ' if position != 0 else ''
)
)
text = self.svg.node(
guides,
'text',
x=x,
y=y + .35 * self.style.label_font_size,
class_='major' if major else ''
)
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
self.y_label_rotation, x, y
)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
self.svg.node(
guides,
'title',
).text = self._y_format(position)
if self._y_2nd_labels:
secondary_ax = self.svg.node(self.nodes['plot'], class_="axis y2")
for label, position in self._y_2nd_labels:
major = position in self._y_labels_major
if not (self.show_minor_y_labels or major):
continue
# it is needed, to have the same structure as primary axis
guides = self.svg.node(secondary_ax, class_='guides')
x = self.view.width + 5
y = self.view.y(position)
text = self.svg.node(
guides,
'text',
x=x,
y=y + .35 * self.style.label_font_size,
class_='major' if major else ''
)
text.text = label
if self.y_label_rotation:
text.attrib['transform'] = "rotate(%d %f %f)" % (
self.y_label_rotation, x, y
)
if 90 < self.y_label_rotation < 270:
text.attrib['class'] = ' '.join((
text.attrib['class']
and text.attrib['class'].split(' ') or []
) + ['backwards'])
def _legend(self):
"""Make the legend box"""
if not self.show_legend:
return
truncation = self.truncate_legend
if self.legend_at_bottom:
x = self.margin_box.left + self.spacing
y = (
self.margin_box.top + self.view.height + self._x_title_height +
self._x_labels_height + self.spacing
)
cols = self.legend_at_bottom_columns or ceil(sqrt(self._order)
) or 1
if not truncation:
available_space = self.view.width / cols - (
self.legend_box_size + 5
)
truncation = reverse_text_len(
available_space, self.style.legend_font_size
)
else:
x = self.spacing
y = self.margin_box.top + self.spacing
cols = 1
if not truncation:
truncation = 15
legends = self.svg.node(
self.nodes['graph'],
class_='legends',
transform='translate(%d, %d)' % (x, y)
)
h = max(self.legend_box_size, self.style.legend_font_size)
x_step = self.view.width / cols
if self.legend_at_bottom:
secondary_legends = legends # svg node is the same
else:
# draw secondary axis on right
x = self.margin_box.left + self.view.width + self.spacing
if self._y_2nd_labels:
h, w = get_texts_box(
cut(self._y_2nd_labels), self.style.label_font_size
)
x += self.spacing + max(
w * abs(cos(rad(self.y_label_rotation))), h
)
y = self.margin_box.top + self.spacing
secondary_legends = self.svg.node(
self.nodes['graph'],
class_='legends',
transform='translate(%d, %d)' % (x, y)
)
serie_number = -1
i = 0
for titles, is_secondary in ((self._legends, False),
(self._secondary_legends, True)):
if not self.legend_at_bottom and is_secondary:
i = 0
for title in titles:
serie_number += 1
if title is None:
continue
col = i % cols
row = i // cols
legend = self.svg.node(
secondary_legends if is_secondary else legends,
class_='legend reactive activate-serie',
id="activate-serie-%d" % serie_number
)
self.svg.node(
legend,
'rect',
x=col * x_step,
y=1.5 * row * h + (
self.style.legend_font_size - self.legend_box_size
if self.style.legend_font_size > self.legend_box_size
else 0
) / 2,
width=self.legend_box_size,
height=self.legend_box_size,
class_="color-%d reactive" % serie_number
)
if isinstance(title, dict):
node = decorate(self.svg, legend, title)
title = title['title']
else:
node = legend
truncated = truncate(title, truncation)
self.svg.node(
node,
'text',
x=col * x_step + self.legend_box_size + 5,
y=1.5 * row * h + .5 * h + .3 * self.style.legend_font_size
).text = truncated
if truncated != title:
self.svg.node(legend, 'title').text = title
i += 1
def _make_title(self):
"""Make the title"""
if self._title:
for i, title_line in enumerate(self._title, 1):
self.svg.node(
self.nodes['title'],
'text',
class_='title plot_title',
x=self.width / 2,
y=i * (self.style.title_font_size + self.spacing)
).text = title_line
def _make_x_title(self):
"""Make the X-Axis title"""
y = (self.height - self.margin_box.bottom + self._x_labels_height)
if self._x_title:
for i, title_line in enumerate(self._x_title, 1):
text = self.svg.node(
self.nodes['title'],
'text',
class_='title',
x=self.margin_box.left + self.view.width / 2,
y=y + i * (self.style.title_font_size + self.spacing)
)
text.text = title_line
def _make_y_title(self):
"""Make the Y-Axis title"""
if self._y_title:
yc = self.margin_box.top + self.view.height / 2
for i, title_line in enumerate(self._y_title, 1):
text = self.svg.node(
self.nodes['title'],
'text',
class_='title',
x=self._legend_at_left_width,
y=i * (self.style.title_font_size + self.spacing) + yc
)
text.attrib['transform'] = "rotate(%d %f %f)" % (
-90, self._legend_at_left_width, yc
)
text.text = title_line
def _interpolate(self, xs, ys):
"""Make the interpolation"""
x = []
y = []
for i in range(len(ys)):
if ys[i] is not None:
x.append(xs[i])
y.append(ys[i])
interpolate = INTERPOLATIONS[self.interpolate]
return list(
interpolate(
x, y, self.interpolation_precision,
**self.interpolation_parameters
)
)
def _rescale(self, points):
"""Scale for secondary"""
return [(
x, self._scale_diff + (y - self._scale_min_2nd) * self._scale
if y is not None else None
) for x, y in points]
def _tooltip_data(self, node, value, x, y, classes=None, xlabel=None):
"""Insert in desc tags informations for the javascript tooltip"""
self.svg.node(node, 'desc', class_="value").text = value
if classes is None:
classes = []
if x > self.view.width / 2:
classes.append('left')
if y > self.view.height / 2:
classes.append('top')
classes = ' '.join(classes)
self.svg.node(node, 'desc', class_="x " + classes).text = str(x)
self.svg.node(node, 'desc', class_="y " + classes).text = str(y)
if xlabel:
self.svg.node(node, 'desc', class_="x_label").text = str(xlabel)
def _static_value(
self,
serie_node,
value,
x,
y,
metadata,
align_text='left',
classes=None
):
"""Write the print value"""
label = metadata and metadata.get('label')
classes = classes and [classes] or []
if self.print_labels and label:
label_cls = classes + ['label']
if self.print_values:
y -= self.style.value_font_size / 2
self.svg.node(
serie_node['text_overlay'],
'text',
class_=' '.join(label_cls),
x=x,
y=y + self.style.value_font_size / 3
).text = label
y += self.style.value_font_size
if self.print_values or self.dynamic_print_values:
val_cls = classes + ['value']
if self.dynamic_print_values:
val_cls.append('showable')
self.svg.node(
serie_node['text_overlay'],
'text',
class_=' '.join(val_cls),
x=x,
y=y + self.style.value_font_size / 3,
attrib={
'text-anchor': align_text
}
).text = value if self.print_zeroes or value != '0' else ''
def _points(self, x_pos):
"""
Convert given data values into drawable points (x, y)
and interpolated points if interpolate option is specified
"""
for serie in self.all_series:
serie.points = [(x_pos[i], v) for i, v in enumerate(serie.values)]
if serie.points and self.interpolate:
serie.interpolated = self._interpolate(x_pos, serie.values)
else:
serie.interpolated = []
def _compute_secondary(self):
"""Compute secondary axis min max and label positions"""
# secondary y axis support
if self.secondary_series and self._y_labels:
y_pos = list(zip(*self._y_labels))[1]
if self.include_x_axis:
ymin = min(self._secondary_min, 0)
ymax = max(self._secondary_max, 0)
else:
ymin = self._secondary_min
ymax = self._secondary_max
steps = len(y_pos)
left_range = abs(y_pos[-1] - y_pos[0])
right_range = abs(ymax - ymin) or 1
scale = right_range / ((steps - 1) or 1)
self._y_2nd_labels = [(self._y_format(ymin + i * scale), pos)
for i, pos in enumerate(y_pos)]
self._scale = left_range / right_range
self._scale_diff = y_pos[0]
self._scale_min_2nd = ymin
def _post_compute(self):
"""Hook called after compute and before margin computations and plot"""
pass
def _get_x_label(self, i):
"""Convenience function to get the x_label of a value index"""
if not self.x_labels or not self._x_labels or len(self._x_labels) <= i:
return
return self._x_labels[i][0]
@property
def all_series(self):
"""Getter for all series (nomal and secondary)"""
return self.series + self.secondary_series
@property
def _x_format(self):
"""Return the abscissa value formatter (always unary)"""
return self.x_value_formatter
@property
def _default_formatter(self):
return str
@property
def _y_format(self):
"""Return the ordinate value formatter (always unary)"""
return self.value_formatter
def _value_format(self, value):
"""
Format value for value display.
(Varies in type between chart types)
"""
return self._y_format(value)
def _format(self, serie, i):
"""Format the nth value for the serie"""
value = serie.values[i]
metadata = serie.metadata.get(i)
kwargs = {'chart': self, 'serie': serie, 'index': i}
formatter = ((metadata and metadata.get('formatter'))
or serie.formatter or self.formatter
or self._value_format)
kwargs = filter_kwargs(formatter, kwargs)
return formatter(value, **kwargs)
def _serie_format(self, serie, value):
"""Format an independent value for the serie"""
kwargs = {'chart': self, 'serie': serie, 'index': None}
formatter = (serie.formatter or self.formatter or self._value_format)
kwargs = filter_kwargs(formatter, kwargs)
return formatter(value, **kwargs)
def _compute(self):
"""Initial computations to draw the graph"""
def _compute_margin(self):
"""Compute graph margins from set texts"""
self._legend_at_left_width = 0
for series_group in (self.series, self.secondary_series):
if self.show_legend and series_group:
h, w = get_texts_box(
map(
lambda x: truncate(x, self.truncate_legend or 15), [
serie.title['title']
if isinstance(serie.title, dict) else serie.title
or '' for serie in series_group
]
), self.style.legend_font_size
)
if self.legend_at_bottom:
h_max = max(h, self.legend_box_size)
cols = (
self._order // self.legend_at_bottom_columns
if self.legend_at_bottom_columns else
ceil(sqrt(self._order)) or 1
)
self.margin_box.bottom += self.spacing + h_max * round(
cols - 1
) * 1.5 + h_max
else:
if series_group is self.series:
legend_width = self.spacing + w + self.legend_box_size
self.margin_box.left += legend_width
self._legend_at_left_width += legend_width
else:
self.margin_box.right += (
self.spacing + w + self.legend_box_size
)
self._x_labels_height = 0
if (self._x_labels or self._x_2nd_labels) and self.show_x_labels:
for xlabels in (self._x_labels, self._x_2nd_labels):
if xlabels:
h, w = get_texts_box(
map(
lambda x: truncate(x, self.truncate_label or 25),
cut(xlabels)
), self.style.label_font_size
)
self._x_labels_height = self.spacing + max(
w * abs(sin(rad(self.x_label_rotation))), h
)
if xlabels is self._x_labels:
self.margin_box.bottom += self._x_labels_height
else:
self.margin_box.top += self._x_labels_height
if self.x_label_rotation:
if self.x_label_rotation % 180 < 90:
self.margin_box.right = max(
w * abs(cos(rad(self.x_label_rotation))),
self.margin_box.right
)
else:
self.margin_box.left = max(
w * abs(cos(rad(self.x_label_rotation))),
self.margin_box.left
)
if self.show_y_labels:
for ylabels in (self._y_labels, self._y_2nd_labels):
if ylabels:
h, w = get_texts_box(
cut(ylabels), self.style.label_font_size
)
if ylabels is self._y_labels:
self.margin_box.left += self.spacing + max(
w * abs(cos(rad(self.y_label_rotation))), h
)
else:
self.margin_box.right += self.spacing + max(
w * abs(cos(rad(self.y_label_rotation))), h
)
self._title = split_title(
self.title, self.width, self.style.title_font_size
)
if self.title:
h, _ = get_text_box(self._title[0], self.style.title_font_size)
self.margin_box.top += len(self._title) * (self.spacing + h)
self._x_title = split_title(
self.x_title, self.width - self.margin_box.x,
self.style.title_font_size
)
self._x_title_height = 0
if self._x_title:
h, _ = get_text_box(self._x_title[0], self.style.title_font_size)
height = len(self._x_title) * (self.spacing + h)
self.margin_box.bottom += height
self._x_title_height = height + self.spacing
self._y_title = split_title(
self.y_title, self.height - self.margin_box.y,
self.style.title_font_size
)
self._y_title_height = 0
if self._y_title:
h, _ = get_text_box(self._y_title[0], self.style.title_font_size)
height = len(self._y_title) * (self.spacing + h)
self.margin_box.left += height
self._y_title_height = height + self.spacing
# Inner margin
if self.print_values_position == 'top':
gh = self.height - self.margin_box.y
alpha = 1.1 * (self.style.value_font_size / gh) * self._box.height
if self._max and self._max > 0:
self._box.ymax += alpha
if self._min and self._min < 0:
self._box.ymin -= alpha
def _confidence_interval(self, node, x, y, value, metadata):
if not metadata or 'ci' not in metadata:
return
ci = metadata['ci']
ci['point_estimate'] = value
low, high = getattr(
stats, 'confidence_interval_%s' % ci.get('type', 'manual')
)(**ci)
self.svg.confidence_interval(
node,
x,
# Respect some charts y modifications (pyramid, stackbar)
y + (self.view.y(low) - self.view.y(value)),
y + (self.view.y(high) - self.view.y(value))
)
@cached_property
def _legends(self):
"""Getter for series title"""
return [serie.title for serie in self.series]
@cached_property
def _secondary_legends(self):
"""Getter for series title on secondary y axis"""
return [serie.title for serie in self.secondary_series]
@cached_property
def _values(self):
"""Getter for series values (flattened)"""
return [
val for serie in self.series for val in serie.values
if val is not None
]
@cached_property
def _secondary_values(self):
"""Getter for secondary series values (flattened)"""
return [
val for serie in self.secondary_series for val in serie.values
if val is not None
]
@cached_property
def _len(self):
"""Getter for the maximum series size"""
return max([len(serie.values) for serie in self.all_series] or [0])
@cached_property
def _secondary_min(self):
"""Getter for the minimum series value"""
return (
self.secondary_range[0]
if (self.secondary_range
and self.secondary_range[0] is not None) else
(min(self._secondary_values) if self._secondary_values else None)
)
@cached_property
def _min(self):
"""Getter for the minimum series value"""
return (
self.range[0] if (self.range and self.range[0] is not None) else
(min(self._values) if self._values else None)
)
@cached_property
def _max(self):
"""Getter for the maximum series value"""
return (
self.range[1] if (self.range and self.range[1] is not None) else
(max(self._values) if self._values else None)
)
@cached_property
def _secondary_max(self):
"""Getter for the maximum series value"""
return (
self.secondary_range[1]
if (self.secondary_range
and self.secondary_range[1] is not None) else
(max(self._secondary_values) if self._secondary_values else None)
)
@cached_property
def _order(self):
"""Getter for the number of series"""
return len(self.all_series)
def _x_label_format_if_value(self, label):
if not isinstance(label, str):
return self._x_format(label)
return label
def _compute_x_labels(self):
self._x_labels = self.x_labels and list(
zip(
map(self._x_label_format_if_value, self.x_labels), self._x_pos
)
)
def _compute_x_labels_major(self):
if self.x_labels_major_every:
self._x_labels_major = [
self._x_labels[i][0] for i in
range(0, len(self._x_labels), self.x_labels_major_every)
]
elif self.x_labels_major_count:
label_count = len(self._x_labels)
major_count = self.x_labels_major_count
if (major_count >= label_count):
self._x_labels_major = [label[0] for label in self._x_labels]
else:
self._x_labels_major = [
self._x_labels[int(
i * (label_count - 1) / (major_count - 1)
)][0] for i in range(major_count)
]
else:
self._x_labels_major = self.x_labels_major and list(
map(self._x_label_format_if_value, self.x_labels_major)
) or []
def _compute_y_labels(self):
y_pos = compute_scale(
self._box.ymin, self._box.ymax, self.logarithmic, self.order_min,
self.min_scale, self.max_scale
)
if self.y_labels:
self._y_labels = []
for i, y_label in enumerate(self.y_labels):
if isinstance(y_label, dict):
pos = self._adapt(y_label.get('value'))
title = y_label.get('label', self._y_format(pos))
elif isinstance(y_label, str):
pos = self._adapt(y_pos[i % len(y_pos)])
title = y_label
else:
pos = self._adapt(y_label)
title = self._y_format(pos)
self._y_labels.append((title, pos))
self._box.ymin = min(self._box.ymin, min(cut(self._y_labels, 1)))
self._box.ymax = max(self._box.ymax, max(cut(self._y_labels, 1)))
else:
self._y_labels = list(zip(map(self._y_format, y_pos), y_pos))
def _compute_y_labels_major(self):
if self.y_labels_major_every:
self._y_labels_major = [
self._y_labels[i][1] for i in
range(0, len(self._y_labels), self.y_labels_major_every)
]
elif self.y_labels_major_count:
label_count = len(self._y_labels)
major_count = self.y_labels_major_count
if (major_count >= label_count):
self._y_labels_major = [label[1] for label in self._y_labels]
else:
self._y_labels_major = [
self._y_labels[int(
i * (label_count - 1) / (major_count - 1)
)][1] for i in range(major_count)
]
elif self.y_labels_major:
self._y_labels_major = list(map(self._adapt, self.y_labels_major))
elif self._y_labels:
self._y_labels_major = majorize(cut(self._y_labels, 1))
else:
self._y_labels_major = []
def add_squares(self, squares):
x_lines = squares[0] - 1
y_lines = squares[1] - 1
_current_x = 0
_current_y = 0
for line in range(x_lines):
_current_x += (self.width - self.margin_box.x) / squares[0]
self.svg.node(
self.nodes['plot'],
'path',
class_='bg-lines',
d='M%s %s L%s %s' %
(_current_x, 0, _current_x, self.height - self.margin_box.y)
)
for line in range(y_lines):
_current_y += (self.height - self.margin_box.y) / squares[1]
self.svg.node(
self.nodes['plot'],
'path',
class_='bg-lines',
d='M%s %s L%s %s' %
(0, _current_y, self.width - self.margin_box.x, _current_y)
)
return ((self.width - self.margin_box.x) / squares[0],
(self.height - self.margin_box.y) / squares[1])
def _draw(self):
"""Draw all the things"""
self._compute()
self._compute_x_labels()
self._compute_x_labels_major()
self._compute_y_labels()
self._compute_y_labels_major()
self._compute_secondary()
self._post_compute()
self._compute_margin()
self._decorate()
if self.series and self._has_data() and self._values:
self._plot()
else:
self.svg.draw_no_data()
def _has_data(self):
"""Check if there is any data"""
return any([
len([
v for a in (s[0] if is_list_like(s) else [s])
for v in (a if is_list_like(a) else [a]) if v is not None
]) for s in self.raw_series
])
| 37,728
|
Python
|
.py
| 937
| 26.3746
| 79
| 0.485306
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,340
|
horizontalbar.py
|
Kozea_pygal/pygal/graph/horizontalbar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal bar graph"""
from pygal.graph.bar import Bar
from pygal.graph.horizontal import HorizontalGraph
class HorizontalBar(HorizontalGraph, Bar):
"""Horizontal Bar graph"""
def _plot(self):
"""Draw the bars in reverse order"""
for serie in self.series[::-1]:
self.bar(serie)
for serie in self.secondary_series[::-1]:
self.bar(serie, True)
| 1,179
|
Python
|
.py
| 29
| 37.586207
| 79
| 0.732984
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,341
|
box.py
|
Kozea_pygal/pygal/graph/box.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Box plot: a convenient way to display series as box with whiskers and outliers
Different types are available throught the box_mode option
"""
from bisect import bisect_left, bisect_right
from pygal.graph.graph import Graph
from pygal.util import alter, decorate
class Box(Graph):
"""
Box plot
For each series, shows the median value, the 25th and 75th percentiles,
and the values within
1.5 times the interquartile range of the 25th and 75th percentiles.
See http://en.wikipedia.org/wiki/Box_plot
"""
_series_margin = .06
def _value_format(self, value, serie):
"""
Format value for dual value display.
"""
if self.box_mode == "extremes":
return (
'Min: %s\nQ1 : %s\nQ2 : %s\nQ3 : %s\nMax: %s' %
tuple(map(self._y_format, serie.points[1:6]))
)
elif self.box_mode in ["tukey", "stdev", "pstdev"]:
return (
'Min: %s\nLower Whisker: %s\nQ1: %s\nQ2: %s\nQ3: %s\n'
'Upper Whisker: %s\nMax: %s' %
tuple(map(self._y_format, serie.points))
)
elif self.box_mode == '1.5IQR':
# 1.5IQR mode
return 'Q1: %s\nQ2: %s\nQ3: %s' % tuple(
map(self._y_format, serie.points[2:5])
)
else:
return self._y_format(serie.points)
def _compute(self):
"""
Compute parameters necessary for later steps
within the rendering process
"""
for serie in self.series:
serie.points, serie.outliers = \
self._box_points(serie.values, self.box_mode)
self._x_pos = [(i + .5) / self._order for i in range(self._order)]
if self._min:
self._box.ymin = min(self._min, self.zero)
if self._max:
self._box.ymax = max(self._max, self.zero)
def _plot(self):
"""Plot the series data"""
for serie in self.series:
self._boxf(serie)
@property
def _len(self):
"""Len is always 7 here"""
return 7
def _boxf(self, serie):
"""For a specific series, draw the box plot."""
serie_node = self.svg.serie(serie)
# Note: q0 and q4 do not literally mean the zero-th quartile
# and the fourth quartile, but rather the distance from 1.5 times
# the inter-quartile range to Q1 and Q3, respectively.
boxes = self.svg.node(serie_node['plot'], class_="boxes")
metadata = serie.metadata.get(0)
box = decorate(self.svg, self.svg.node(boxes, class_='box'), metadata)
val = self._format(serie, 0)
x_center, y_center = self._draw_box(
box, serie.points[1:6], serie.outliers, serie.index, metadata
)
self._tooltip_data(
box, val, x_center, y_center, "centered",
self._get_x_label(serie.index)
)
self._static_value(serie_node, val, x_center, y_center, metadata)
def _draw_box(self, parent_node, quartiles, outliers, box_index, metadata):
"""
Return the center of a bounding box defined by a box plot.
Draws a box plot on self.svg.
"""
width = (self.view.x(1) - self.view.x(0)) / self._order
series_margin = width * self._series_margin
left_edge = self.view.x(0) + width * box_index + series_margin
width -= 2 * series_margin
# draw lines for whiskers - bottom, median, and top
for i, whisker in enumerate((quartiles[0], quartiles[2],
quartiles[4])):
whisker_width = width if i == 1 else width / 2
shift = (width - whisker_width) / 2
xs = left_edge + shift
xe = left_edge + width - shift
alter(
self.svg.line(
parent_node,
coords=[(xs, self.view.y(whisker)),
(xe, self.view.y(whisker))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 3}
), metadata
)
# draw lines connecting whiskers to box (Q1 and Q3)
alter(
self.svg.line(
parent_node,
coords=[(left_edge + width / 2, self.view.y(quartiles[0])),
(left_edge + width / 2, self.view.y(quartiles[1]))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 2}
), metadata
)
alter(
self.svg.line(
parent_node,
coords=[(left_edge + width / 2, self.view.y(quartiles[4])),
(left_edge + width / 2, self.view.y(quartiles[3]))],
class_='reactive tooltip-trigger',
attrib={'stroke-width': 2}
), metadata
)
# box, bounded by Q1 and Q3
alter(
self.svg.node(
parent_node,
tag='rect',
x=left_edge,
y=self.view.y(quartiles[1]),
height=self.view.y(quartiles[3]) - self.view.y(quartiles[1]),
width=width,
class_='subtle-fill reactive tooltip-trigger'
), metadata
)
# draw outliers
for o in outliers:
alter(
self.svg.node(
parent_node,
tag='circle',
cx=left_edge + width / 2,
cy=self.view.y(o),
r=3,
class_='subtle-fill reactive tooltip-trigger'
), metadata
)
return (
left_edge + width / 2,
self.view.y(sum(quartiles) / len(quartiles))
)
@staticmethod
def _box_points(values, mode='extremes'):
"""
Default mode: (mode='extremes' or unset)
Return a 7-tuple of 2x minimum, Q1, Median, Q3,
and 2x maximum for a list of numeric values.
1.5IQR mode: (mode='1.5IQR')
Return a 7-tuple of min, Q1 - 1.5 * IQR, Q1, Median, Q3,
Q3 + 1.5 * IQR and max for a list of numeric values.
Tukey mode: (mode='tukey')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q1 - IQR or x > q3 + IQR
SD mode: (mode='stdev')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q2 - SD or x > q2 + SD
SDp mode: (mode='pstdev')
Return a 7-tuple of min, q[0..4], max and a list of outliers
Outliers are considered values x: x < q2 - SDp or x > q2 + SDp
The iterator values may include None values.
Uses quartile definition from Mendenhall, W. and
Sincich, T. L. Statistics for Engineering and the
Sciences, 4th ed. Prentice-Hall, 1995.
"""
def median(seq):
n = len(seq)
if n % 2 == 0: # seq has an even length
return (seq[n // 2] + seq[n // 2 - 1]) / 2
else: # seq has an odd length
return seq[n // 2]
def mean(seq):
return sum(seq) / len(seq)
def stdev(seq):
m = mean(seq)
l = len(seq)
v = sum((n - m)**2 for n in seq) / (l - 1) # variance
return v**0.5 # sqrt
def pstdev(seq):
m = mean(seq)
l = len(seq)
v = sum((n - m)**2 for n in seq) / l # variance
return v**0.5 # sqrt
outliers = []
# sort the copy in case the originals must stay in original order
s = sorted([x for x in values if x is not None])
n = len(s)
if not n:
return (0, 0, 0, 0, 0, 0, 0), []
elif n == 1:
return (s[0], s[0], s[0], s[0], s[0], s[0], s[0]), []
else:
q2 = median(s)
# See 'Method 3' in http://en.wikipedia.org/wiki/Quartile
if n % 2 == 0: # even
q1 = median(s[:n // 2])
q3 = median(s[n // 2:])
else: # odd
if n == 1: # special case
q1 = s[0]
q3 = s[0]
elif n % 4 == 1: # n is of form 4n + 1 where n >= 1
m = (n - 1) // 4
q1 = 0.25 * s[m - 1] + 0.75 * s[m]
q3 = 0.75 * s[3 * m] + 0.25 * s[3 * m + 1]
else: # n is of form 4n + 3 where n >= 1
m = (n - 3) // 4
q1 = 0.75 * s[m] + 0.25 * s[m + 1]
q3 = 0.25 * s[3 * m + 1] + 0.75 * s[3 * m + 2]
iqr = q3 - q1
min_s = s[0]
max_s = s[-1]
if mode == 'extremes':
q0 = min_s
q4 = max_s
elif mode == 'tukey':
# the lowest datum still within 1.5 IQR of the lower quartile,
# and the highest datum still within 1.5 IQR of the upper
# quartile [Tukey box plot, Wikipedia ]
b0 = bisect_left(s, q1 - 1.5 * iqr)
b4 = bisect_right(s, q3 + 1.5 * iqr)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == 'stdev':
# one standard deviation above and below the mean of the data
sd = stdev(s)
b0 = bisect_left(s, q2 - sd)
b4 = bisect_right(s, q2 + sd)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == 'pstdev':
# one population standard deviation above and below
# the mean of the data
sdp = pstdev(s)
b0 = bisect_left(s, q2 - sdp)
b4 = bisect_right(s, q2 + sdp)
q0 = s[b0]
q4 = s[b4 - 1]
outliers = s[:b0] + s[b4:]
elif mode == '1.5IQR':
# 1.5IQR mode
q0 = q1 - 1.5 * iqr
q4 = q3 + 1.5 * iqr
return (min_s, q0, q1, q2, q3, q4, max_s), outliers
| 11,067
|
Python
|
.py
| 271
| 28.911439
| 79
| 0.503159
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,342
|
dot.py
|
Kozea_pygal/pygal/graph/dot.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Dot chart displaying values as a grid of dots, the bigger the value
the bigger the dot
"""
from math import log10
from pygal.graph.graph import Graph
from pygal.util import alter, cached_property, decorate, safe_enumerate
from pygal.view import ReverseView, View
class Dot(Graph):
"""Dot graph class"""
def dot(self, serie, r_max):
"""Draw a dot line"""
serie_node = self.svg.serie(serie)
view_values = list(map(self.view, serie.points))
for i, value in safe_enumerate(serie.values):
x, y = view_values[i]
if self.logarithmic:
log10min = log10(self._min) - 1
log10max = log10(self._max or 1)
if value != 0:
size = r_max * ((log10(abs(value)) - log10min) /
(log10max - log10min))
else:
size = 0
else:
size = r_max * (abs(value) / (self._max or 1))
metadata = serie.metadata.get(i)
dots = decorate(
self.svg, self.svg.node(serie_node['plot'], class_="dots"),
metadata
)
alter(
self.svg.node(
dots,
'circle',
cx=x,
cy=y,
r=size,
class_='dot reactive tooltip-trigger' +
(' negative' if value < 0 else '')
), metadata
)
val = self._format(serie, i)
self._tooltip_data(
dots, val, x, y, 'centered', self._get_x_label(i)
)
self._static_value(serie_node, val, x, y, metadata)
def _compute(self):
"""Compute y min and max and y scale and set labels"""
x_len = self._len
y_len = self._order
self._box.xmax = x_len
self._box.ymax = y_len
self._x_pos = [n / 2 for n in range(1, 2 * x_len, 2)]
self._y_pos = [n / 2 for n in reversed(range(1, 2 * y_len, 2))]
for j, serie in enumerate(self.series):
serie.points = [(self._x_pos[i], self._y_pos[j])
for i in range(x_len)]
def _compute_y_labels(self):
if self.y_labels:
y_labels = [str(label) for label in self.y_labels]
else:
y_labels = [
(
serie.title['title']
if isinstance(serie.title, dict)
else serie.title
) or ''
for serie in self.series
]
self._y_labels = list(zip(y_labels, self._y_pos))
def _set_view(self):
"""Assign a view to current graph"""
view_class = ReverseView if self.inverse_y_axis else View
self.view = view_class(
self.width - self.margin_box.x, self.height - self.margin_box.y,
self._box
)
@cached_property
def _values(self):
"""Getter for series values (flattened)"""
return [abs(val) for val in super(Dot, self)._values if val != 0]
@cached_property
def _max(self):
"""Getter for the maximum series value"""
return (
self.range[1] if (self.range and self.range[1] is not None) else
(max(map(abs, self._values)) if self._values else None)
)
def _plot(self):
"""Plot all dots for series"""
r_max = min(
self.view.x(1) - self.view.x(0),
(self.view.y(0) or 0) - self.view.y(1)
) / (2 * 1.05)
for serie in self.series:
self.dot(serie, r_max)
| 4,445
|
Python
|
.py
| 115
| 28.347826
| 79
| 0.545918
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,343
|
__init__.py
|
Kozea_pygal/pygal/graph/__init__.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Graph package containing all builtin charts"""
| 823
|
Python
|
.py
| 19
| 42.315789
| 79
| 0.771144
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,344
|
base.py
|
Kozea_pygal/pygal/graph/base.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Base for pygal charts"""
import os
from functools import reduce
from uuid import uuid4
from pygal._compat import is_list_like
from pygal.adapters import decimal_to_float, not_zero, positive
from pygal.config import Config, SerieConfig
from pygal.serie import Serie
from pygal.state import State
from pygal.svg import Svg
from pygal.util import compose, ident
from pygal.view import Box, Margin
class BaseGraph(object):
"""Chart internal behaviour related functions"""
_adapters = []
def __init__(self, config=None, **kwargs):
"""Config preparation and various initialization"""
if config:
if isinstance(config, type):
config = config()
else:
config = config.copy()
else:
config = Config()
config(**kwargs)
self.config = config
self.state = None
self.uuid = str(uuid4())
self.raw_series = []
self.xml_filters = []
def __setattr__(self, name, value):
"""Set an attribute on the class or in the state if there is one"""
if name.startswith('__') or getattr(self, 'state', None) is None:
super(BaseGraph, self).__setattr__(name, value)
else:
setattr(self.state, name, value)
def __getattribute__(self, name):
"""Get an attribute from the class or from the state if there is one"""
if name.startswith('__') or name == 'state' or getattr(
self, 'state',
None) is None or name not in self.state.__dict__:
return super(BaseGraph, self).__getattribute__(name)
return getattr(self.state, name)
def prepare_values(self, raw, offset=0):
"""Prepare the values to start with sane values"""
from pygal import Histogram
from pygal.graph.map import BaseMap
if self.zero == 0 and isinstance(self, BaseMap):
self.zero = 1
if self.x_label_rotation:
self.x_label_rotation %= 360
if self.y_label_rotation:
self.y_label_rotation %= 360
for key in ('x_labels', 'y_labels'):
if getattr(self, key):
setattr(self, key, list(getattr(self, key)))
if not raw:
return
adapters = list(self._adapters) or [lambda x: x]
if self.logarithmic:
for fun in not_zero, positive:
if fun in adapters:
adapters.remove(fun)
adapters = adapters + [positive, not_zero]
adapters = adapters + [decimal_to_float]
self._adapt = reduce(compose, adapters) if not self.strict else ident
self._x_adapt = reduce(
compose, self._x_adapters
) if not self.strict and getattr(self, '_x_adapters', None) else ident
series = []
raw = [(
list(raw_values) if not isinstance(raw_values, dict) else
raw_values, serie_config_kwargs
) for raw_values, serie_config_kwargs in raw]
width = max([len(values)
for values, _ in raw] + [len(self.x_labels or [])])
for raw_values, serie_config_kwargs in raw:
metadata = {}
values = []
if isinstance(raw_values, dict):
if isinstance(self, BaseMap):
raw_values = list(raw_values.items())
else:
value_list = [None] * width
for k, v in raw_values.items():
if k in (self.x_labels or []):
value_list[self.x_labels.index(k)] = v
raw_values = value_list
for index, raw_value in enumerate(raw_values + (
(width - len(raw_values)) * [None] # aligning values
if len(raw_values) < width else [])):
if isinstance(raw_value, dict):
raw_value = dict(raw_value)
value = raw_value.pop('value', None)
metadata[index] = raw_value
else:
value = raw_value
# Fix this by doing this in charts class methods
if isinstance(self, Histogram):
if value is None:
value = (None, None, None)
elif not is_list_like(value):
value = (value, self.zero, self.zero)
elif len(value) == 2:
value = (1, value[0], value[1])
value = list(map(self._adapt, value))
elif self._dual:
if value is None:
value = (None, None)
elif not is_list_like(value):
value = (value, self.zero)
if self._x_adapt:
value = (
self._x_adapt(value[0]), self._adapt(value[1])
)
if isinstance(self, BaseMap):
value = (self._adapt(value[0]), value[1])
else:
value = list(map(self._adapt, value))
else:
value = self._adapt(value)
values.append(value)
serie_config = SerieConfig()
serie_config(
**dict((k, v) for k, v in self.state.__dict__.items()
if k in dir(serie_config))
)
serie_config(**serie_config_kwargs)
series.append(
Serie(offset + len(series), values, serie_config, metadata)
)
return series
def setup(self, **kwargs):
"""Set up the transient state prior rendering"""
# Keep labels in case of map
if getattr(self, 'x_labels', None) is not None:
self.x_labels = list(self.x_labels)
if getattr(self, 'y_labels', None) is not None:
self.y_labels = list(self.y_labels)
self.state = State(self, **kwargs)
if isinstance(self.style, type):
self.style = self.style()
self.series = self.prepare_values([
rs for rs in self.raw_series if not rs[1].get('secondary')
]) or []
self.secondary_series = self.prepare_values([
rs for rs in self.raw_series if rs[1].get('secondary')
], len(self.series)) or []
self.horizontal = getattr(self, 'horizontal', False)
self.svg = Svg(self)
self._x_labels = None
self._y_labels = None
self._x_2nd_labels = None
self._y_2nd_labels = None
self.nodes = {}
self.margin_box = Margin(
self.margin_top or self.margin, self.margin_right or self.margin,
self.margin_bottom or self.margin, self.margin_left or self.margin
)
self._box = Box()
self.view = None
if self.logarithmic and self.zero == 0:
# Explicit min to avoid interpolation dependency
positive_values = list(
filter(
lambda x: x > 0, [
val[1] or 1 if self._dual else val
for serie in self.series for val in serie.safe_values
]
)
)
self.zero = min(positive_values or (1, )) or 1
if self._len < 3:
self.interpolate = None
self._draw()
self.svg.pre_render()
def teardown(self):
"""Remove the transient state after rendering"""
if os.getenv('PYGAL_KEEP_STATE'):
return
del self.state
self.state = None
def _repr_svg_(self):
"""Display svg in IPython notebook"""
return self.render(disable_xml_declaration=True)
def _repr_png_(self):
"""Display png in IPython notebook"""
return self.render_to_png()
| 8,692
|
Python
|
.py
| 206
| 30.373786
| 79
| 0.550077
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,345
|
horizontal.py
|
Kozea_pygal/pygal/graph/horizontal.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal graph mixin"""
from pygal.graph.graph import Graph
from pygal.view import HorizontalLogView, HorizontalView
class HorizontalGraph(Graph):
"""Horizontal graph mixin"""
def __init__(self, *args, **kwargs):
"""Set the horizontal flag to True"""
self.horizontal = True
super(HorizontalGraph, self).__init__(*args, **kwargs)
def _post_compute(self):
"""After computations transpose labels"""
self._x_labels, self._y_labels = self._y_labels, self._x_labels
self._x_labels_major, self._y_labels_major = (
self._y_labels_major, self._x_labels_major
)
self._x_2nd_labels, self._y_2nd_labels = (
self._y_2nd_labels, self._x_2nd_labels
)
self.show_y_guides, self.show_x_guides = (
self.show_x_guides, self.show_y_guides
)
def _axes(self):
"""Set the _force_vertical flag when rendering axes"""
self.view._force_vertical = True
super(HorizontalGraph, self)._axes()
self.view._force_vertical = False
def _set_view(self):
"""Assign a horizontal view to current graph"""
if self.logarithmic:
view_class = HorizontalLogView
else:
view_class = HorizontalView
self.view = view_class(
self.width - self.margin_box.x, self.height - self.margin_box.y,
self._box
)
def _get_x_label(self, i):
"""Convenience function to get the x_label of a value index"""
if not self.x_labels or not self._y_labels or len(self._y_labels) <= i:
return
return self._y_labels[i][0]
| 2,440
|
Python
|
.py
| 59
| 35.050847
| 79
| 0.658094
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,346
|
map.py
|
Kozea_pygal/pygal/graph/map.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
pygal contains no map but a base class to create extension
see the pygal_maps_world package to get an exemple.
https://github.com/Kozea/pygal_maps_world
"""
from pygal.etree import etree
from pygal.graph.graph import Graph
from pygal.util import alter, cached_property, cut, decorate
class BaseMap(Graph):
"""Base class for maps"""
_dual = True
@cached_property
def _values(self):
"""Getter for series values (flattened)"""
return [
val[1] for serie in self.series for val in serie.values
if val[1] is not None
]
def enumerate_values(self, serie):
"""Hook to replace default enumeration on values"""
return enumerate(serie.values)
def adapt_code(self, area_code):
"""Hook to change the area code"""
return area_code
def _value_format(self, value):
"""
Format value for map value display.
"""
return '%s: %s' % (
self.area_names.get(self.adapt_code(value[0]), '?'),
self._y_format(value[1])
)
def _plot(self):
"""Insert a map in the chart and apply data on it"""
map = etree.fromstring(self.svg_map)
map.set('width', str(self.view.width))
map.set('height', str(self.view.height))
for i, serie in enumerate(self.series):
safe_vals = list(
filter(lambda x: x is not None, cut(serie.values, 1))
)
if not safe_vals:
continue
min_ = min(safe_vals)
max_ = max(safe_vals)
for j, (area_code, value) in self.enumerate_values(serie):
area_code = self.adapt_code(area_code)
if value is None:
continue
if max_ == min_:
ratio = 1
else:
ratio = .3 + .7 * (value - min_) / (max_ - min_)
areae = map.findall(
".//*[@class='%s%s %s map-element']" %
(self.area_prefix, area_code, self.kind)
)
if not areae:
continue
for area in areae:
cls = area.get('class', '').split(' ')
cls.append('color-%d' % i)
cls.append('serie-%d' % i)
cls.append('series')
area.set('class', ' '.join(cls))
area.set('style', 'fill-opacity: %f' % ratio)
metadata = serie.metadata.get(j)
if metadata:
node = decorate(self.svg, area, metadata)
if node != area:
area.remove(node)
for g in map:
if area not in g:
continue
index = list(g).index(area)
g.remove(area)
node.append(area)
g.insert(index, node)
for node in area:
cls = node.get('class', '').split(' ')
cls.append('reactive')
cls.append('tooltip-trigger')
cls.append('map-area')
node.set('class', ' '.join(cls))
alter(node, metadata)
val = self._format(serie, j)
self._tooltip_data(area, val, 0, 0, 'auto')
self.nodes['plot'].append(map)
def _compute_x_labels(self):
pass
def _compute_y_labels(self):
pass
| 4,477
|
Python
|
.py
| 110
| 28.027273
| 79
| 0.519899
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,347
|
public.py
|
Kozea_pygal/pygal/graph/public.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""pygal public api functions"""
import base64
import io
from pygal._compat import is_list_like
from pygal.graph.base import BaseGraph
class PublicApi(BaseGraph):
"""Chart public functions"""
def add(self, title, values, **kwargs):
"""Add a serie to this graph, compat api"""
if not is_list_like(values) and not isinstance(values, dict):
values = [values]
kwargs['title'] = title
self.raw_series.append((values, kwargs))
return self
def __call__(self, *args, **kwargs):
"""Call api: chart(1, 2, 3, title='T')"""
self.raw_series.append((args, kwargs))
return self
def add_xml_filter(self, callback):
"""Add an xml filter for in tree post processing"""
self.xml_filters.append(callback)
return self
def render(self, is_unicode=False, **kwargs):
"""Render the graph, and return the svg string"""
self.setup(**kwargs)
svg = self.svg.render(
is_unicode=is_unicode, pretty_print=self.pretty_print
)
self.teardown()
return svg
def render_tree(self, **kwargs):
"""Render the graph, and return (l)xml etree"""
self.setup(**kwargs)
svg = self.svg.root
for f in self.xml_filters:
svg = f(svg)
self.teardown()
return svg
def render_table(self, **kwargs):
"""Render the data as a html table"""
# Import here to avoid lxml import
try:
from pygal.table import Table
except ImportError:
raise ImportError('You must install lxml to use render table')
return Table(self).render(**kwargs)
def render_pyquery(self, **kwargs):
"""Render the graph, and return a pyquery wrapped tree"""
from pyquery import PyQuery as pq
return pq(self.render(**kwargs), parser='html')
def render_in_browser(self, **kwargs):
"""Render the graph, open it in your browser with black magic"""
try:
from lxml.html import open_in_browser
except ImportError:
raise ImportError('You must install lxml to use render in browser')
kwargs.setdefault('force_uri_protocol', 'https')
open_in_browser(self.render_tree(**kwargs), encoding='utf-8')
def render_response(self, **kwargs):
"""Render the graph, and return a Flask response"""
from flask import Response
return Response(self.render(**kwargs), mimetype='image/svg+xml')
def render_django_response(self, **kwargs):
"""Render the graph, and return a Django response"""
from django.http import HttpResponse
return HttpResponse(
self.render(**kwargs), content_type='image/svg+xml'
)
def render_data_uri(self, **kwargs):
"""Output a base 64 encoded data uri"""
# Force protocol as data uri have none
kwargs.setdefault('force_uri_protocol', 'https')
return "data:image/svg+xml;charset=utf-8;base64,%s" % (
base64.b64encode(self.render(**kwargs)
).decode('utf-8').replace('\n', '')
)
def render_to_file(self, filename, **kwargs):
"""Render the graph, and write it to filename"""
with io.open(filename, 'w', encoding='utf-8') as f:
f.write(self.render(is_unicode=True, **kwargs))
def render_to_png(self, filename=None, dpi=72, **kwargs):
"""Render the graph, convert it to png and write it to filename"""
import cairosvg
return cairosvg.svg2png(
bytestring=self.render(**kwargs), write_to=filename, dpi=dpi
)
def render_sparktext(self, relative_to=None):
"""Make a mini text sparkline from chart"""
bars = '▁▂▃▄▅▆▇█'
if len(self.raw_series) == 0:
return ''
values = list(self.raw_series[0][0])
if len(values) == 0:
return ''
chart = ''
values = list(map(lambda x: max(x, 0), values))
vmax = max(values)
if relative_to is None:
relative_to = min(values)
if (vmax - relative_to) == 0:
chart = bars[0] * len(values)
return chart
divisions = len(bars) - 1
for value in values:
chart += bars[int(
divisions * (value - relative_to) / (vmax - relative_to)
)]
return chart
def render_sparkline(self, **kwargs):
"""Render a sparkline"""
spark_options = dict(
width=200,
height=50,
show_dots=False,
show_legend=False,
show_x_labels=False,
show_y_labels=False,
spacing=0,
margin=5,
min_scale=1,
max_scale=2,
explicit_size=True,
no_data_text='',
js=(),
classes=(Ellipsis, 'pygal-sparkline'),
)
spark_options.update(kwargs)
return self.render(**spark_options)
| 5,823
|
Python
|
.py
| 146
| 31.287671
| 79
| 0.60777
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,348
|
stackedbar.py
|
Kozea_pygal/pygal/graph/stackedbar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Stacked Bar chart: Like a bar chart but with all series stacking
on top of the others instead of being displayed side by side.
"""
from pygal.adapters import none_to_zero
from pygal.graph.bar import Bar
class StackedBar(Bar):
"""Stacked Bar graph class"""
_adapters = [none_to_zero]
def _get_separated_values(self, secondary=False):
"""Separate values between positives and negatives stacked"""
series = self.secondary_series if secondary else self.series
transposed = list(zip(*[serie.values for serie in series]))
positive_vals = [
sum([val for val in vals if val is not None and val >= self.zero])
for vals in transposed
]
negative_vals = [
sum([val for val in vals if val is not None and val < self.zero])
for vals in transposed
]
return positive_vals, negative_vals
def _compute_box(self, positive_vals, negative_vals):
"""Compute Y min and max"""
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
else:
self._box.ymin = negative_vals and min(
min(negative_vals), self.zero
) or self.zero
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
else:
self._box.ymax = positive_vals and max(
max(positive_vals), self.zero
) or self.zero
def _compute(self):
"""Compute y min and max and y scale and set labels"""
positive_vals, negative_vals = self._get_separated_values()
if self.logarithmic:
positive_vals = list(
filter(lambda x: x > self.zero, positive_vals)
)
negative_vals = list(
filter(lambda x: x > self.zero, negative_vals)
)
self._compute_box(positive_vals, negative_vals)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self._x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(self._x_pos)
self.negative_cumulation = [0] * self._len
self.positive_cumulation = [0] * self._len
if self.secondary_series:
positive_vals, negative_vals = self._get_separated_values(True)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self.secondary_negative_cumulation = [0] * self._len
self.secondary_positive_cumulation = [0] * self._len
self._pre_compute_secondary(positive_vals, negative_vals)
self._x_pos = [(i + .5) / self._len for i in range(self._len)]
def _pre_compute_secondary(self, positive_vals, negative_vals):
"""Compute secondary y min and max"""
self._secondary_min = (
negative_vals and min(min(negative_vals), self.zero)
) or self.zero
self._secondary_max = (
positive_vals and max(max(positive_vals), self.zero)
) or self.zero
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal stacking bar drawing function"""
if secondary:
cumulation = (
self.secondary_negative_cumulation
if y < self.zero else self.secondary_positive_cumulation
)
else:
cumulation = (
self.negative_cumulation
if y < self.zero else self.positive_cumulation
)
zero = cumulation[i]
cumulation[i] = zero + y
if zero == 0:
zero = self.zero
y -= self.zero
y += zero
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
y = y or 0
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
if self.secondary_series:
width /= 2
x += int(secondary) * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
)
return x, y, width, height
def _plot(self):
"""Draw bars for series and secondary series"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.bar(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.bar(serie, True)
| 5,711
|
Python
|
.py
| 139
| 31.870504
| 79
| 0.597227
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,349
|
moulinrouge.py
|
Kozea_pygal/demo/moulinrouge.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
import logging
from moulinrouge import create_app
app = create_app()
try:
from log_colorizer import make_colored_stream_handler
handler = make_colored_stream_handler()
app.logger.handlers = []
app.logger.addHandler(handler)
import werkzeug
werkzeug._internal._log('debug', '<-- I am with stupid')
logging.getLogger('werkzeug').handlers = []
logging.getLogger('werkzeug').addHandler(handler)
handler.setLevel(logging.DEBUG)
app.logger.setLevel(logging.DEBUG)
logging.getLogger('werkzeug').setLevel(logging.DEBUG)
except Exception:
pass
try:
from wdb.ext import WdbMiddleware, add_w_builtin
except ImportError:
pass
else:
add_w_builtin()
app.wsgi_app = WdbMiddleware(app.wsgi_app, start_disabled=True)
app.run(debug=True, threaded=True, host='0.0.0.0', port=21112)
| 1,628
|
Python
|
.py
| 44
| 34.409091
| 79
| 0.754753
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,350
|
__init__.py
|
Kozea_pygal/demo/moulinrouge/__init__.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
import pickle
import random
import string
from base64 import urlsafe_b64decode as b64decode
from base64 import urlsafe_b64encode as b64encode
from flask import Flask, Response, render_template, request
import pygal
from pygal.config import Config
from pygal.etree import etree
from pygal.style import parametric_styles, styles
from pygal.util import cut
def get(type):
from importlib import import_module
module = '.'.join(type.split('.')[:-1])
name = type.split('.')[-1]
return getattr(import_module(module), name)
def random_label():
chars = string.ascii_letters + string.digits + u' àéèçêâäëï'
return ''.join([
random.choice(chars) for i in range(random.randrange(4, 30))
])
def random_value(min=0, max=15):
return random.randrange(min, max, 1)
def create_app():
"""Creates the pygal test web app"""
app = Flask(__name__)
@app.before_request
def before_request():
if request.args.get('etree'):
etree.to_etree()
elif request.args.get('lxml'):
etree.to_lxml()
def _random(data, order):
max = 10**order
min = 10**random.randrange(0, order)
series = []
for i in range(random.randrange(1, 10)):
values = [(
random_value((-max, min)[random.randrange(0, 2)], max),
random_value((-max, min)[random.randrange(0, 2)], max)
) for i in range(data)]
series.append((random_label(), values, {}))
return series
def _random_series(type, data, order):
max = 10**order
min = 10**random.randrange(0, order)
with_secondary = bool(random.randint(0, 1))
series = []
for i in range(random.randrange(1, 10)):
if type == 'Pie':
values = random_value(min, max)
elif type == 'XY':
values = [(
random_value((-max, min)[random.randrange(0, 2)], max),
random_value((-max, min)[random.randrange(0, 2)], max)
) for i in range(data)]
else:
values = [
random_value((-max, min)[random.randrange(1, 2)], max)
for i in range(data)
]
config = {
'secondary': with_secondary and bool(random.randint(0, 1))
}
series.append((random_label(), values, config))
return series
from .tests import get_test_routes
links = get_test_routes(app)
@app.route("/")
def index():
return render_template(
'index.jinja2',
styles=styles,
parametric_styles=parametric_styles,
parametric_colors=(
'#ff5995', '#b6e354', '#feed6c', '#8cedff', '#9e6ffe'
),
links=links,
charts_name=pygal.CHARTS_NAMES
)
@app.route("/svg/<type>/<series>/<config>")
def svg(type, series, config):
graph = get(type)(pickle.loads(b64decode(str(config))))
for title, values, serie_config in pickle.loads(b64decode(
str(series))):
graph.add(title, values, **serie_config)
return graph.render_response()
@app.route("/table/<type>/<series>/<config>")
def table(type, series, config):
graph = get(type)(pickle.loads(b64decode(str(config))))
for title, values, serie_config in pickle.loads(b64decode(
str(series))):
graph.add(title, values, **serie_config)
return graph.render_table()
@app.route("/sparkline/<style>")
@app.route("/sparkline/parameric/<style>/<color>")
def sparkline(style, color=None):
if color is None:
style = styles[style]
else:
style = parametric_styles[style](color)
line = pygal.Line(style=style, pretty_print=True)
line.add('_', [random.randrange(0, 10) for _ in range(25)])
return Response(
line.render_sparkline(height=40), mimetype='image/svg+xml'
)
@app.route("/with/table/<type>")
def with_table(type):
chart = pygal.StackedBar(
disable_xml_declaration=True, x_label_rotation=35
)
chart.title = (
'What Linux distro do you primarily use'
' on your server computers? (Desktop'
' users vs Server Users)'
)
if type == 'series':
chart.add('Debian', [1775, 82])
chart.add('Ubuntu', [1515, 80])
chart.add('CentOS', [807, 60])
chart.add('Arch Linux', [549, 12])
chart.add('Red Hat Enterprise Linux', [247, 10])
chart.add('Gentoo', [129, 7])
chart.add('Fedora', [91, 6])
chart.add('Amazon Linux', [60, 0])
chart.add('OpenSUSE', [58, 0])
chart.add('Slackware', [50, 3])
chart.add('Xubuntu', [38, 1])
chart.add('Rasbian', [33, 4])
chart.add('SUSE Linux Enterprise Server', [33, 1])
chart.add('Linux Mint', [30, 4])
chart.add('Scientific Linux', [32, 0])
chart.add('Other', [187, 5])
elif type == 'labels':
chart.x_labels = [
'Debian', 'Ubuntu', 'CentOS', 'Arch Linux',
'Red Hat Enterprise Linux', 'Gentoo', 'Fedora', 'Amazon Linux',
'OpenSUSE', 'Slackware', 'Xubuntu', 'Rasbian',
'SUSE Linux Enterprise Server', 'Linux Mint',
'Scientific Linux', 'Other'
]
chart.add(
'Desktop Users', [
1775, 1515, 807, 549, 247, 129, 91, 60, 58, 50, 38, 33, 33,
30, 32, 187
]
)
chart.add(
'Server Users',
[82, 80, 60, 12, 10, 7, 6, 0, 0, 3, 1, 4, 1, 4, 0, 5]
)
return render_template('table.jinja2', chart=chart)
@app.route("/all")
@app.route("/all/<style>")
@app.route("/all/<style>/<color>")
@app.route("/all/<style>/<color>/<base_style>")
@app.route("/all/interpolate=<interpolate>")
def all(style='default', color=None, interpolate=None, base_style=None):
width, height = 600, 400
data = random.randrange(1, 10)
order = random.randrange(1, 10)
if color is None:
style = styles[style]
else:
style = parametric_styles[style](
color, base_style=styles[base_style or 'default']
)
xy_series = _random(data, order)
other_series = []
for title, values, config in xy_series:
other_series.append((title, cut(values, 1), config))
xy_series = b64encode(pickle.dumps(xy_series))
other_series = b64encode(pickle.dumps(other_series))
config = Config()
config.width = width
config.height = height
config.fill = bool(random.randrange(0, 2))
config.interpolate = interpolate
config.style = style
svgs = []
for chart in pygal.CHARTS:
type = '.'.join((chart.__module__, chart.__name__))
if chart._dual:
config.x_labels = None
else:
config.x_labels = [random_label() for i in range(data)]
svgs.append({
'type': type,
'series': xy_series if chart._dual else other_series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/rotation")
def rotation():
width, height = 375, 245
config = Config()
config.width = width
config.height = height
config.fill = True
config.style = styles['neon']
data = random.randrange(1, 10)
order = random.randrange(1, 10)
series = b64encode(pickle.dumps(_random_series(type, data, order)))
labels = [random_label() for i in range(data)]
svgs = []
config.show_legend = bool(random.randrange(0, 2))
for angle in range(0, 370, 10):
config.title = "%d rotation" % angle
config.x_labels = labels
config.x_label_rotation = angle
config.y_label_rotation = angle
svgs.append({
'type': 'pygal.Bar',
'series': series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/interpolation")
def interpolation():
width, height = 600, 400
config = Config()
config.width = width
config.height = height
config.fill = True
config.style = styles['neon']
data = random.randrange(1, 10)
order = random.randrange(1, 10)
series = b64encode(pickle.dumps(_random_series(type, data, order)))
svgs = []
for interpolation in 'quadratic', 'cubic', 'lagrange', 'trigonometric':
config.title = "%s interpolation" % interpolation
config.interpolate = interpolation
svgs.append({
'type': 'pygal.StackedLine',
'series': series,
'config': b64encode(pickle.dumps(config))
})
for params in [{'type': 'catmull_rom'}, {'type': 'finite_difference'},
{'type': 'cardinal',
'c': .25}, {'type': 'cardinal',
'c': .5}, {'type': 'cardinal', 'c': .75},
{'type': 'cardinal',
'c': 1.5}, {'type': 'cardinal',
'c': 2}, {'type': 'cardinal', 'c': 5},
{'type': 'kochanek_bartels', 'b': 1, 'c': 1,
't': 1}, {'type': 'kochanek_bartels', 'b': -1, 'c': 1,
't': 1}, {'type': 'kochanek_bartels', 'b': 1,
'c': -1, 't': 1},
{'type': 'kochanek_bartels', 'b': 1, 'c': 1, 't': -1}, {
'type': 'kochanek_bartels', 'b': -1, 'c': 1, 't': -1
}, {'type': 'kochanek_bartels', 'b': -1, 'c': -1,
't': 1}, {'type': 'kochanek_bartels', 'b': -1,
'c': -1, 't': -1}]:
config.title = "Hermite interpolation with params %r" % params
config.interpolate = 'hermite'
config.interpolation_parameters = params
svgs.append({
'type': 'pygal.StackedLine',
'series': series,
'config': b64encode(pickle.dumps(config))
})
return render_template(
'svgs.jinja2', svgs=svgs, width=width, height=height
)
@app.route("/raw_svgs/")
def raw_svgs():
svgs = []
for color in styles['neon'].colors:
chart = pygal.Pie(
style=parametric_styles['rotate'](color),
width=400,
height=300
)
chart.title = color
chart.disable_xml_declaration = True
chart.explicit_size = True
chart.js = ['http://l:2343/2.0.x/pygal-tooltips.js']
for i in range(6):
chart.add(str(i), 2**i)
svgs.append(chart.render())
return render_template('raw_svgs.jinja2', svgs=svgs)
return app
| 12,397
|
Python
|
.py
| 304
| 29.644737
| 79
| 0.53594
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,351
|
tests.py
|
Kozea_pygal/demo/moulinrouge/tests.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
from datetime import date, datetime
from random import choice, randint
from flask import abort
from pygal import (
CHARTS_BY_NAME,
XY,
Bar,
Box,
Config,
DateLine,
DateTimeLine,
Dot,
Funnel,
Gauge,
Histogram,
Line,
Pie,
Pyramid,
Radar,
SolidGauge,
StackedBar,
StackedLine,
TimeLine,
Treemap,
formatters,
stats,
)
from pygal.colors import rotate
from pygal.graph.horizontal import HorizontalGraph
from pygal.style import RotateStyle, Style, styles
try:
from pygal.maps import world
except ImportError:
world = None
try:
from pygal.maps import fr
except ImportError:
fr = None
try:
from pygal.maps import ch
except ImportError:
ch = None
def get_test_routes(app):
lnk = lambda v, l=None: {
'value': v,
'xlink': 'javascript:alert("Test %s")' % v,
'label': l}
@app.route('/test/unsorted')
def test_unsorted():
bar = Bar(
style=styles['neon'], value_formatter=formatters.human_readable
)
bar.add('A', {'red': 10, 'green': 12, 'blue': 14})
bar.add('B', {'green': 11, 'blue': 7})
bar.add('C', {'blue': 7})
bar.add('D', {})
bar.add('E', {'blue': 2, 'red': 13})
bar.x_labels = ('red', 'green', 'blue')
return bar.render_response()
@app.route('/test/bar_links')
def test_bar_links():
bar = StackedLine(
style=styles['default'](font_family='googlefont:Raleway')
)
bar.js = ('http://l:2343/2.0.x/pygal-tooltips.js', )
bar.title = 'Wow ! Such Chart !'
bar.x_title = 'Many x labels'
bar.y_title = 'Much y labels'
bar.dynamic_print_values = True
bar.add(
'Red serie', [{
'value': 10,
'label': 'Ten',
'xlink': 'http://google.com?q=10'
}, {
'value': 20,
'label':
'Twenty is a good number yada yda yda yada '
'yadaaaaaaaaaaaaaaaaaaaaaa',
'xlink': 'http://google.com?q=20'
}, 30, {
'value': 40,
'label': 'Forty',
'xlink': 'http://google.com?q=40'
}]
)
bar.add(
'Blue serie', [
40, {
'value': 30,
'label': 'Thirty',
'xlink': 'http://google.com?q=30'
}, 20, 10
]
)
bar.x_labels = [
'Yesterday', 'Today or any other day', 'Tomorrow', 'Someday'
]
bar.logarithmic = True
# bar.zero = 1
return bar.render_response()
@app.route('/test/xy_links')
def test_xy_links():
xy = XY(style=styles['neon'], interpolate='cubic')
xy.add(
'1234', [{
'value': (10, 5),
'label': 'Ten',
'xlink': 'http://google.com?q=10'
}, {
'value': (20, 20),
'tooltip': 'Twenty',
'xlink': 'http://google.com?q=20'
}, (30, 15), {
'value': (40, -5),
'label': 'Forty',
'xlink': 'http://google.com?q=40'
}]
)
xy.add(
'4321', [(40, 10), {
'value': (30, 3),
'label': 'Thirty',
'xlink': 'http://google.com?q=30'
}, (20, 10), (10, 21)]
)
xy.x_labels = list(range(1, 50))
xy.y_labels = list(range(1, 50))
return xy.render_response()
@app.route('/test/long_title')
def test_long_title():
bar = Bar()
bar.add('Looooooooooooooooooooooooooooooooooong', [2, None, 12])
bar.title = (
'1 12 123 1234 12345 123456 1234567 12345678 123456789 1234567890 '
'12345678901 123456789012 1234567890123 12345678901234 '
'123456789012345 1234567890123456 12345678901234567 '
'123456789012345678 1234567890123456789 12345678901234567890 '
'123456789012345 1234567890123456 12345678901234567 '
'12345678901 123456789012 1234567890123 12345678901234 '
'1 12 123 1234 12345 123456 1234567 12345678 123456789 1234567890'
)
return bar.render_response()
@app.route('/test/multiline_title')
def test_multiline_title():
bar = Bar()
bar.add('Looooooooooooooooooooooooooooooooooong', [2, None, 12])
bar.title = ('First line \n Second line \n Third line')
return bar.render_response()
@app.route('/test/long_labels')
def test_long_labels():
bar = Bar()
bar.add('Long', [2, None, 12])
bar.title = (
'1 12 123 1234 12345 123456 1234567 12345678 123456789 1234567890'
)
bar.x_labels = 'a' * 100, 'b ' * 50, 'cc ! ' * 20
bar.x_label_rotation = 45
return bar.render_response()
@app.route('/test/none')
def test_bar_none():
bar = Bar()
bar.add('Lol', [2, None, 12])
bar.x_labels = range(1, 4)
return bar.render_response()
@app.route('/test/print_values/<chart>')
def test_print_values_for(chart):
graph = CHARTS_BY_NAME[chart](
print_values=True,
print_labels=True,
print_zeroes=True,
style=styles['default'](
value_font_family='googlefont:Raleway',
value_colors=(None, None, 'blue', 'red', 'green')
)
)
graph.js = ('http://l:2343/2.0.x/pygal-tooltips.js', )
for i in range(12):
graph.add(
'', [{
'value': i + j,
'label': 'abcdefghijklmnopqrstuvwxyz' [i + j]
} for j in range(5)]
)
return graph.render_response()
@app.route('/test/treemap')
def test_treemap():
treemap = Treemap(
style=RotateStyle(
'#ff5995',
opacity=.6,
value_font_size=32,
value_colors=['#ffffff']
)
)
treemap.title = 'Binary TreeMap'
treemap.print_values = True
treemap.print_labels = True
for i in range(1, 5):
treemap.add('', [{'label': 'Area %d' % i, 'value': i}])
treemap.add('', [2])
# treemap.add('A', [2, 1, 12, 4, 2, 1, 1, 3, 12, 3, 4, None, 9])
# treemap.add('B', [4, 2, 5, 10, 3, 4, 2, 7, 4, -10, None, 8, 3, 1])
# treemap.add('C', [3, 8, 3, 3, 5, 3, 3, 5, 4, 12])
# treemap.add('D', [23, 18])
# treemap.add('E', [1, 2, 1, 2, 3, 3, 1, 2, 3,
# 4, 3, 1, 2, 1, 1, 1, 1, 1])
# treemap.add('F', [31])
# treemap.add('G', [5, 9.3, 8.1, 12, 4, 3, 2])
# treemap.add('H', [12, 3, 3])
return treemap.render_response()
@app.route('/test/gauge')
def test_gauge():
gauge = Gauge()
gauge.range = [-10, 10]
gauge.add('Need l', [2.3, 5.12])
gauge.add('Need m', [-4])
gauge.add('Need z', [-10, 10.5])
gauge.add('No', [99, -99])
gauge.y_labels = [{
'label': 'X',
'value': 6
}, {
'label': '><',
'value': -6
}]
return gauge.render_response()
@app.route('/test/solidgauge/')
def test_solidgauge():
gauge = SolidGauge(
half_pie=True,
inner_radius=0.70,
print_values=not True,
human_readable=True
)
gauge.title = 'Hello World!'
percent_formatter = lambda x: '{:.10g}%'.format(x)
dollar_formatter = lambda x: '{:.10g}$'.format(x)
gauge.value_formatter = percent_formatter
gauge.add(
'Series 1', [{
'value': 225000,
'max_value': 1275000
}],
formatter=dollar_formatter
)
gauge.add('Series 2', [{'value': 110, 'max_value': 100}])
gauge.add('Series 3', [{'value': 3}])
gauge.add(
'Series 4', [{
'value': 51,
'max_value': 100
}, {
'value': 12,
'max_value': 100
}]
)
gauge.add('Series 5', [{'value': 79, 'max_value': 100}])
gauge.add('Series 6', 99)
gauge.add('Series 7', [{'value': 100, 'max_value': 100}])
return gauge.render_response()
@app.route('/test/gauge/log')
def test_gauge_log():
gauge = Gauge(logarithmic=True)
gauge.add('Need l', [200.3, 500.12])
gauge.add('Need z', [10, 1000.5])
return gauge.render_response()
@app.route('/test/pyramid')
def test_pyramid():
pyramid = Pyramid()
pyramid.x_labels = ['0-25', '25-45', '45-65', '65+']
pyramid.add('Man single', [2, 4, 2, 1])
pyramid.add('Woman single', [10, 6, 1, 1])
pyramid.add('Man maried', [10, 3, 4, 2])
pyramid.add('Woman maried', [3, 3, 5, 3])
return pyramid.render_response()
@app.route('/test/funnel')
def test_funnel():
funnel = Funnel()
funnel.add('1', [1, 2, 3])
funnel.add('3', [3, 4, 5])
funnel.add('6', [6, 5, 4])
funnel.add('12', [12, 2, 9])
return funnel.render_response()
@app.route('/test/dot')
def test_dot():
dot = Dot(logarithmic=True)
dot.x_labels = map(str, range(4))
dot.add('a', [1, lnk(3, 'Foo'), 5, 3])
dot.add('b', [2, -2, 0, 2, .1])
dot.add('c', [5, 1, 50, lnk(3, 'Bar')])
dot.add('d', [-5, 5, lnk(0, 'Babar'), 3])
return dot.render_response()
@app.route('/test/<chart>')
def test_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd')
graph.x_label_rotation = 90
return graph.render_response()
@app.route('/test/<chart>')
def test_call_api_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph(1, 3, 12, 3, 4, None, 9, title='1')
graph(7, -4, 10, None, 8, 3, 1, title='2')
graph(7, -14, -10, None, 8, 3, 1, title='3')
graph(7, 4, -10, None, 8, 3, 1, title='4')
graph.x_labels = ('a', 'b', 'c', 'd')
graph.x_label_rotation = 90
return graph.render_response()
@app.route('/test/one/<chart>')
def test_one_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('1', [10])
graph.x_labels = 'a',
return graph.render_response()
@app.route('/test/xytitles/<chart>')
def test_xy_titles_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.title = 'My global title'
graph.x_title = 'My X title'
graph.y_title = 'My Y title'
graph.add('My number 1 serie', [1, 3, 12])
graph.add('My number 2 serie', [7, -4, 10])
graph.add('A', [17, -14, 11], secondary=True)
graph.x_label_rotation = 25
graph.legend_at_bottom = not True
graph.x_labels = ('First point', 'Second point', 'Third point')
return graph.render_response()
@app.route('/test/no_data/<chart>')
def test_no_data_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('Empty 1', [])
graph.add('Empty 2', [])
graph.x_labels = 'empty'
graph.title = '123456789 ' * 30
return graph.render_response()
@app.route('/test/xy_single')
def test_xy_single():
graph = XY(interpolate='cubic')
graph.add('Single', [(1, 1)])
return graph.render_response()
@app.route('/test/no_data/at_all/<chart>')
def test_no_data_at_all_for(chart):
graph = CHARTS_BY_NAME[chart]()
return graph.render_response()
@app.route('/test/interpolate/<chart>')
def test_interpolate_for(chart):
graph = CHARTS_BY_NAME[chart](
interpolate='lagrange',
interpolation_parameters={
'type': 'kochanek_bartels',
'c': 1,
'b': -1,
't': -1
}
)
graph.add('1', [1, 3, 12, 3, 4])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
return graph.render_response()
@app.route('/test/logarithmic/<chart>')
def test_logarithmic_for(chart):
graph = CHARTS_BY_NAME[chart](logarithmic=True)
if isinstance(graph, CHARTS_BY_NAME['XY']):
graph.add(
'xy', [(.1, .234), (10, 243), (.001, 2), (1000000, 1231)]
)
else:
graph.add('1', [.1, 10, .01, 10000])
graph.add('2', [.234, 243, 2, 2379, 1231])
graph.x_labels = ('a', 'b', 'c', 'd', 'e')
graph.x_label_rotation = 90
return graph.render_response()
@app.route('/test/zero_at_34/<chart>')
@app.route('/test/zero_at_<int:zero>/<chart>')
def test_zero_at_34_for(chart, zero=34):
graph = CHARTS_BY_NAME[chart](fill=True, zero=zero)
graph.add('1', [100, 34, 12, 43, -48])
graph.add('2', [73, -14, 10, None, -58, 32, 91])
return graph.render_response()
@app.route('/test/range/<chart>')
def test_range_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.range = [0, 100]
graph.add('1', [1, 2, 10])
return graph.render_response()
@app.route('/test/fill_with_none/')
def test_fill_with_none():
graph = XY(fill=True)
graph.add('1', [(1, 2), (3, 3), (3.5, 5), (5, 1)])
graph.add('2', [(1, 9), (None, 5), (5, 23)])
return graph.render_response()
@app.route('/test/negative/<chart>')
def test_negative_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('1', [10, 0, -10])
return graph.render_response()
@app.route('/test/bar')
def test_bar():
bar = Bar(dynamic_print_values=True, show_minor_x_labels=False)
bar.add('1', [1, 2, 3])
bar.add('2', [4, 5, 6])
bar.x_labels = [2, 4, 6]
bar.x_labels_major = [4]
return bar.render_response()
@app.route('/test/formatters/<chart>')
def test_formatters_for(chart):
chart = CHARTS_BY_NAME[chart](
print_values=True,
formatter=lambda x, chart, serie: '%s%s$' % (x, serie.title)
)
chart.add('_a', [1, 2, {'value': 3, 'formatter': lambda x: '%s¥' % x}])
chart.add('_b', [4, 5, 6], formatter=lambda x: '%s€' % x)
chart.x_labels = [2, 4, 6]
chart.x_labels_major = [4]
return chart.render_response()
@app.route('/test/bar/position')
def test_bar_print_values_position():
bar = StackedBar(
print_values=True,
print_values_position='top',
zero=2,
style=styles['default'](
value_font_family='googlefont:Raleway', value_font_size=46
)
)
bar.add('1', [1, -2, 3])
bar.add('2', [4, -5, 6])
bar.x_labels = [2, 4, 6]
bar.x_labels_major = [4]
return bar.render_response()
@app.route('/test/histogram')
def test_histogram():
hist = Histogram(
print_values=True,
print_values_position='top',
style=styles['neon']
)
hist.add('1', [(2, 0, 1), (4, 1, 3), (3, 3.5, 5), (1.5, 5, 10)])
hist.add('2', [(2, 2, 8)])
hist.x_labels = [0, 3, 6, 9, 12]
return hist.render_response()
@app.route('/test/ylabels')
def test_ylabels():
chart = Bar()
chart.x_labels = 'Red', 'Blue', 'Green'
chart.y_labels = [{
'value': .0001,
'label': 'LOL'
}, {
'value': .0003,
'label': 'ROFL'
}, {
'value': .0004,
'label': 'MAO'
}, {
'value': .00045,
'label': 'LMFAO'
}, {
'value': .0005,
'label': 'GMCB'
}]
chart.add('line', [.0002, .0005, .00035])
return chart.render_response()
@app.route('/test/secondary/<chart>')
def test_secondary_for(chart):
chart = CHARTS_BY_NAME[chart](fill=True)
chart.title = 'LOL ' * 23
chart.x_labels = 'abc'
chart.x_label_rotation = 25
chart.y_label_rotation = 50
chart.add('1', [30, 20, -2])
chart.add('1b', [-4, 50, 6], secondary=True)
chart.add('2b', [None, 10, 20], secondary=True)
chart.add('2', [8, 21, -0])
chart.add('3', [None, 20, 10])
chart.add('3b', [-1, 2, -3], secondary=True)
return chart.render_response()
@app.route('/test/secondary_xy')
def test_secondary_xy():
chart = XY()
chart.add(10 * '1', [(30, 5), (20, 12), (25, 4)])
chart.add(10 * '1b', [(4, 12), (5, 8), (6, 4)], secondary=True)
chart.add(10 * '2b', [(3, 24), (0, 17), (12, 9)], secondary=True)
chart.add(10 * '2', [(8, 23), (21, 1), (5, 0)])
chart.value_formatter = lambda x: str(int(x)) + '+'
return chart.render_response()
@app.route('/test/box')
def test_box():
chart = Box()
# chart.js = ('http://l:2343/2.0.x/pygal-tooltips.js',)
chart.box_mode = '1.5IQR'
chart.add('One', [15, 8, 2, -12, 9, 23])
chart.add('Two', [5, 8, 2, -9, 23, 12])
chart.add('Three', [8, -2, 12, -5, 9, 3])
chart.add('Four', [5, 8, 2, -9, -3, 12])
chart.add('Five', [8, 12, 12, -9, 5, 13])
chart.x_labels = map(str, range(5))
return chart.render_response()
@app.route('/test/stacked')
def test_stacked():
stacked = StackedLine(stack_from_top=True, logarithmic=True)
stacked.add('1', [1, 2])
stacked.add('2', [10, 12])
stacked.x_labels = ['a', 'b', 'c', 'd']
return stacked.render_response()
@app.route('/test/stacked/reverse')
def test_stacked_reverse():
stacked = StackedBar(stack_from_top=True)
stacked.add('1', [1, 2, 3])
stacked.add('2', [4, 5, 6])
return stacked.render_response()
@app.route('/test/show_dots')
def test_show_dots():
line = Line(show_dots=False)
line.add('1', [1, 2, 3])
line.add('2', [4, 5, 6])
return line.render_response()
@app.route('/test/config')
def test_config():
class LolConfig(Config):
js = ['http://l:2343/2.0.x/pygal-tooltips.js']
stacked = StackedBar(LolConfig())
stacked.add('', [1, 2, 3])
stacked.add('My beautiful serie of 2019', [4, 5, 6])
return stacked.render_response()
@app.route('/test/dateline')
def test_dateline():
dateline = DateLine(y_label_rotation=112)
dateline.x_labels = [
date(2013, 1, 1),
date(2013, 7, 1),
date(2014, 1, 1),
date(2014, 7, 1),
date(2015, 1, 1),
date(2015, 7, 1)
]
dateline.x_labels_major = [date(2013, 1, 1), date(2015, 7, 1)]
dateline.add(
"Serie", [(date(2013, 1, 2), 213), (date(2013, 8, 2), 281),
(date(2013, 5, 31), 281), (date(2014, 12, 7), 198),
(date(2014, 9, 6), 198), (date(2015, 3, 21), 120)]
)
return dateline.render_response()
@app.route('/test/timeline')
def test_timexy():
from datetime import time
timeline = TimeLine()
timeline.add(
'1', [(time(1, 12, 29), 2), (time(21, 2, 29), 10),
(time(12, 30, 59), 7)]
)
timeline.add(
'2', [(time(12, 12, 12), 4), (time(), 8), (time(23, 59, 59), 6)]
)
timeline.x_label_rotation = 25
return timeline.render_response()
@app.route('/test/worldmap')
def test_worldmap():
wmap = world.World(
print_values=True, style=choice(list(styles.values()))
)
# wmap.js = ('http://l:2343/2.0.x/pygal-tooltips.js',)
# wmap.add('1st', [('fr', 100), {
# 'value': ('us', 10),
# 'node': {'style': 'fill: red'}
# }
# ])
# wmap.add('2nd', [('jp', 1), ('ru', 7), ('uk', 0)])
# wmap.add('3rd', ['ch', 'cz', 'ca', 'cn'])
# wmap.add('4th', {'jp': 12, 'bo': 1, 'bu': 23, 'fr': 34})
# wmap.add('5th', [{
# 'value': ('tw', 10),
# 'label': 'First label',
# 'xlink': 'http://google.com?q=tw',
# }, {
# 'value': ('bw', 20),
# 'label': 'Second one',
# 'xlink': 'http://google.com?q=bw',
# 'node': {'style': 'fill: blue'}
# }, {
# 'value': ('mw', 40),
# 'label': 'Last'
# }])
wmap.add('_', {'us': 1})
wmap.add('-', {'us': 2})
wmap.add('.', {'us': 3})
wmap.title = 'World Map !!'
wmap.value_formatter = lambda x: '%d%%' % x
return wmap.render_response()
@app.route('/test/supranational')
def test_supranational():
wmap = world.SupranationalWorld(style=choice(list(styles.values())))
v = [('europe', 0), ('oceania', 2), ('antartica', 4),
('south_america', 5), ('africa', 6), ('north_america',
7), ('asia', 8)]
wmap.add(
'Serie with metadata', [
v[0], {
'value': v[1]
}, {
'value': v[2],
'label': 'Three'
}, {
'value': v[3],
'xlink': 'http://4.example.com/'
}, {
'value': v[4],
'xlink': 'http://5.example.com/',
'label': 'Five'
}, {
'value': v[5],
'xlink': {
'href': 'http://6.example.com/'
},
'label': 'Six'
}, {
'value': v[6],
'xlink': {
'href': 'http://7.example.com/',
'target': '_blank'
},
'label': 'Seven'
}
]
)
# wmap.add('Asia', [('asia', 1)])
# wmap.add('Europe', [('europe', 1)])
# wmap.add('Africa', [('africa', 1)])
# wmap.add('North america', [('north_america', 1)])
# wmap.add('South america', [('south_america', 1)])
# wmap.add('Oceania', [('oceania', 1)])
# wmap.add('Antartica', [('antartica', 1)])
wmap.title = 'Supra World Map !!'
return wmap.render_response()
@app.route('/test/frenchmapdepartments')
def test_frenchmapdepartments():
if fr is None:
abort(404)
fmap = fr.Departments(style=choice(list(styles.values())))
fmap.add('', [(i, i) for i in range(1, 100)])
fmap.add('', [(970 + i, i) for i in range(1, 7)])
fmap.add('', [('2A', 1), ('2B', 2)])
fmap.title = 'French map'
return fmap.render_response()
@app.route('/test/swissmap')
def test_swissmap():
smap = ch.Cantons(style=choice(list(styles.values())))
for i in range(10):
smap.add(
's%d' % i, [(choice(list(ch.CANTONS.keys())), randint(0, 100))
for _ in range(randint(1, 5))]
)
smap.add(
'links', [{
'value': ('kt-vs', 10),
'label': r'\o/',
'xlink': 'http://google.com?q=69'
}, {
'value': ('bt', 20),
'label': 'Y',
}]
)
smap.add('6th', [3, 5, 34, 12])
smap.title = 'Swiss map'
return smap.render_response()
@app.route('/test/frenchmapregions')
def test_frenchmapregions():
if fr is None:
abort(404)
fmap = fr.Regions(style=choice(list(styles.values())))
for i in range(10):
fmap.add(
's%d' % i, [(choice(list(fr.REGIONS.keys())), randint(0, 100))
for _ in range(randint(1, 5))]
)
fmap.add(
'links', [{
'value': ('02', 10),
'label': r'\o/',
'xlink': 'http://google.com?q=69'
}, {
'value': ('72', 20),
'label': 'Y',
}]
)
fmap.add('6th', [91, 2, 41])
fmap.title = 'French map'
return fmap.render_response()
@app.route('/test/labels')
def test_labels():
line = Line()
line.add('test1', range(100))
line.x_labels = map(str, range(11))
return line.render_response()
@app.route('/test/64colors')
def test_64_colors():
n = 64
colors = [rotate('#ff0000', i * 360 / n) for i in range(n)]
pie = Pie(style=Style(colors=colors))
for i in range(n):
pie(1, title=str(i) if i % 5 == 1 else None)
return pie.render_response()
@app.route('/test/major_dots')
def test_major_dots():
line = Line(x_labels_major_count=2, show_only_major_dots=True)
line.add('test', range(12))
line.x_labels = [
'lol', 'lol1', 'lol2', 'lol3', 'lol4', 'lol5', 'lol6', 'lol7',
'lol8', 'lol9', 'lol10', 'lol11'
]
# line.x_labels_major = ['lol3']
return line.render_response()
@app.route('/test/x_major_labels/<chart>')
def test_x_major_labels_for(chart):
chart = CHARTS_BY_NAME[chart](show_minor_y_labels=False)
for i in range(12):
chart.add('test', range(12))
chart.x_labels = map(str, range(12))
# chart.x_labels_major_count = 4
# chart.x_labels_major = ['1', '5', '11', 6]
# chart.y_labels_major = [60, 120]
return chart.render_response()
@app.route('/test/y_major_labels/<chart>')
def test_y_major_labels_for(chart):
chart = CHARTS_BY_NAME[chart]()
chart.add('test', range(12))
# chart.add('test', zip(*[range(12), range(12)]))
chart.y_labels = range(12)
# chart.y_labels_major_count = 4
chart.y_labels_major = [1.0, 5.0, 11.0]
return chart.render_response()
@app.route('/test/stroke_config')
def test_stroke_config():
line = Line(stroke_style={'width': .5})
line.add('test_no_line', range(12), stroke=False)
line.add('test', reversed(range(12)), stroke_style={'width': 3})
line.add(
'test_no_dots', [5] * 12,
show_dots=False,
stroke_style={
'width': 2,
'dasharray': '12, 31'
}
)
line.add(
'test_big_dots', [randint(1, 12) for _ in range(12)], dots_size=5
)
line.add(
'test_fill', [randint(1, 3) for _ in range(12)],
fill=True,
stroke_style={
'width': 5,
'dasharray': '4, 12, 7, 20'
}
)
line.x_labels = [
'lol', 'lol1', 'lol2', 'lol3', 'lol4', 'lol5', 'lol6', 'lol7',
'lol8', 'lol9', 'lol10', 'lol11'
]
return line.render_response()
@app.route('/test/radar')
def test_radar():
radar = Radar()
for i in range(10):
radar.add(str(i), [i * j for j in range(8)])
radar.x_labels = [
'lol', 'rofl', 'mao', 'lolroflmao', '12345678901234567890'
]
radar.x_label_rotation = 35
radar.y_label_rotation = 35
radar.y_labels = [{
'label': '500',
'value': 10
}, {
'label': '1000',
'value': 20
}, {
'label': '5000',
'value': 30
}, {
'label': '10000',
'value': 40
}]
return radar.render_response()
@app.route('/test/pie_serie_radius')
def test_pie_serie_radius():
pie = Pie()
pie.js = ('http://a.zi:2343/2.0.x/pygal-tooltips.js', )
for i in range(10):
pie.add(str(i), i, inner_radius=(10 - i) / 10)
return pie.render_response()
@app.route('/test/half_pie')
def test_half_pie():
pie = Pie(half_pie=True)
for i in range(20):
pie.add(str(i), i, inner_radius=.1)
pie.legend_at_bottom = True
pie.legend_at_bottom_columns = 4
return pie.render_response()
@app.route('/test/interpolate/secondary')
def test_interpolate_secondary():
chart = Line(title=u'Some different points', interpolate='cubic')
chart.add('line', [1000, 2000, 7000])
chart.add('other line', [100, 500, 500], secondary=True)
chart.range = 0, 10000
chart.secondary_range = 0, 1000
return chart.render_response()
@app.route('/test/legend_at_bottom/<chart>')
def test_legend_at_bottom_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/custom_metadata/<chart>')
def test_custom_metadata_for(chart):
c = CHARTS_BY_NAME[chart]()
c.add(
'1', [{
'style': 'fill: red',
'value': 1,
'node': {
'r': 12
}
}, {
'color': 'blue',
'value': 2,
'node': {
'width': 12
}
}, {
'style': 'fill: red; stroke: yellow',
'value': 3
}]
)
c.add(
'2', [{
'value': 4,
'xlink': {
'href': 'javascript:alert("-")',
'target': 'top',
'class': 'lol'
}
}, {
'color': 'green',
'value': 5
}, 6]
)
return c.render_response()
@app.route('/test/sparkline/<chart>')
def test_sparkline_for(chart):
graph = CHARTS_BY_NAME[chart](
**dict(
width=200,
height=50,
show_dots=False,
show_legend=False,
show_y_labels=False,
show_x_labels=False,
spacing=0,
margin=5,
explicit_size=True
)
)
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/sparkline/label/<chart>')
def test_sparkline_label_for(chart):
graph = CHARTS_BY_NAME[chart](
**dict(
width=200,
height=50,
show_dots=False,
show_legend=False,
# show_y_labels=False,
# show_x_labels=False,
spacing=0,
margin=5,
min_scale=2,
max_scale=2,
explicit_size=True
)
)
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/normal/<chart>')
def test_normal_for(chart):
graph = CHARTS_BY_NAME[chart]()
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/horizontal_force/<chart>')
def test_horizontal_force_for(chart):
class H(CHARTS_BY_NAME[chart], HorizontalGraph):
pass
graph = H()
graph.add('1', [1, 3, 12, 3, 4, None, 9])
graph.add('2', [7, -4, 10, None, 8, 3, 1])
graph.add('3', [7, -14, -10, None, 8, 3, 1])
graph.add('4', [7, 4, -10, None, 8, 3, 1])
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/inverse_y_axis/<chart>')
def test_inverse_y_axis_for(chart):
graph = CHARTS_BY_NAME[chart](**dict(inverse_y_axis=True))
graph.add('inverse', [1, 2, 3, 12, 24, 36])
return graph.render_response()
@app.route('/test/only_zeroes')
def test_only_zeroes():
line = Line()
line.add('zeroes', [])
line.add('zeroes 2', [0])
return line.render_response()
@app.route('/test/rotations/<chart>')
def test_rotations_for(chart):
graph = CHARTS_BY_NAME[chart]()
# graph.x_label_rotation = 290
# graph.y_label_rotation = 0
graph.add('lalalla al alallaa a 1', [1, 3, 12, 3, 4, None, 9])
graph.add(
'lalalla al alallaa a 2', [7, -4, 10, None, 8, 3, 1],
secondary=True
)
graph.add('lalalla al alallaa a 3', [7, -14, -10, None, 8, 3, 1])
graph.add(
'lalalla al alallaa a 4', [7, 4, -10, None, 8, 3, 1],
secondary=True
)
graph.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
# graph.legend_at_bottom = True
return graph.render_response()
@app.route('/test/datetimeline')
def test_datetimeline():
line = DateTimeLine()
from datetime import timedelta, timezone
tz7 = timezone(timedelta(hours=7), 'GMT +7')
tzn4 = timezone(timedelta(hours=-4), 'GMT -4')
line.add(
'dt', [(datetime(2013, 1, 12, 8, tzinfo=tz7), 300),
(datetime(2013, 1, 12, 8), 412),
(datetime(2013, 1, 12, 8, tzinfo=tzn4), 823)]
)
line.x_label_rotation = 45
return line.render_response()
@app.route('/test/datetimeline_with_pytz')
def test_datetimeline_with_pytz():
import pytz
tz = pytz.timezone('US/Eastern')
line = DateTimeLine()
line.add(
'dt', [(tz.localize(datetime(2013, 1, 12, 8)), 300),
(tz.localize(datetime(2013, 1, 12, 10)), 600),
(tz.localize(datetime(2013, 1, 12, 14)), 30),
(tz.localize(datetime(2013, 1, 12, 16)), 200)]
)
from datetime import timezone
line.x_value_formatter = lambda x: (
x.replace(tzinfo=timezone.utc).astimezone(tz)).isoformat()
# line.x_value_formatter = lambda x: tz.normalize(
# x.replace(tzinfo=pytz.utc)).isoformat()
line.x_label_rotation = 45
return line.render_response()
@app.route('/test/order_min')
def test_order_min():
line = Line(order_min=-32)
line.add('_', [1, 32, 12, .4, .009])
return line.render_response()
@app.route('/test/custom_css_file')
def test_custom_css_file():
custom_css = '''
{{ id }}text {
fill: green;
font-family: monospace;
}
{{ id }}.legends .legend text {
font-size: {{ font_sizes.legend }};
}
{{ id }}.axis {
stroke: #666;
}
{{ id }}.axis text {
font-size: {{ font_sizes.label }};
font-family: sans;
stroke: none;
}
{{ id }}.axis.y text {
text-anchor: end;
}
{{ id }}#tooltip text {
font-size: {{ font_sizes.tooltip }};
}
{{ id }}.dot {
fill: yellow;
}
{{ id }}.color-0 {
stroke: #ff1100;
fill: #ff1100;
}
{{ id }}.color-1 {
stroke: #ffee00;
fill: #ffee00;
}
{{ id }}.color-2 {
stroke: #66bb44;
fill: #66bb44;
}
{{ id }}.color-3 {
stroke: #88bbdd;
fill: #88bbdd;
}
{{ id }}.color-4 {
stroke: #0000ff;
fill: #0000ff;
}
'''
custom_css_file = '/tmp/pygal_custom_style.css'
with open(custom_css_file, 'w') as f:
f.write(custom_css)
config = Config(fill=True, interpolate='cubic')
config.css.append(custom_css_file)
chart = StackedLine(config)
chart.add('A', [1, 3, 5, 16, 13, 3, 7])
chart.add('B', [5, 2, 3, 2, 5, 7, 17])
chart.add('C', [6, 10, 9, 7, 3, 1, 0])
chart.add('D', [2, 3, 5, 9, 12, 9, 5])
chart.add('E', [7, 4, 2, 1, 2, 10, 0])
return chart.render_response()
@app.route('/test/legendlink/<chart>')
def test_legend_link_for(chart):
chart = CHARTS_BY_NAME[chart]()
# link on chart and label
chart.add([{
'value': 2,
'label': 'This is red',
'tooltip': 'LOOLLOLOLO',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Red'
}
}],
title={
'title': 'Red',
'tooltip': 'Cramoisi',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Red'
}
})
chart.add({
'title': 'Yellow',
'xlink': {
'href': 'http://en.wikipedia.org/wiki/Yellow',
'target': '_blank'
}
}, 7)
return chart.render_response()
@app.route('/test/gradient/<chart>')
def test_gradient_for(chart):
config = Config()
config.style = styles['dark']
config.defs.append(
'''
<linearGradient id="gradient-0" x1="0" x2="0" y1="0" y2="1">
<stop offset="0%" stop-color="#ff5995" />
<stop offset="100%" stop-color="#feed6c" />
</linearGradient>
'''
)
config.defs.append(
'''
<linearGradient id="gradient-1" x1="0" x2="0" y1="0" y2="1">
<stop offset="0%" stop-color="#b6e354" />
<stop offset="100%" stop-color="#8cedff" />
</linearGradient>
'''
)
config.css.append(
'''inline:
.color-0 {
fill: url(#gradient-0) !important;
stroke: url(#gradient-0) !important;
}'''
)
config.css.append(
'''inline:
.color-1 {
fill: url(#gradient-1) !important;
stroke: url(#gradient-1) !important;
}'''
)
chart = CHARTS_BY_NAME[chart](config)
chart.add('1', [1, 3, 12, 3, 4, None, 9])
chart.add('2', [7, -4, 10, None, 8, 3, 1])
chart.x_labels = ('a', 'b', 'c', 'd', 'e', 'f', 'g')
chart.legend_at_bottom = True
chart.interpolate = 'cubic'
return chart.render_response()
@app.route('/test/erfinv/approx')
def test_erfinv():
from scipy import stats as sstats
chart = Line(show_dots=False)
chart.add('scipy', [sstats.norm.ppf(x / 1000) for x in range(1, 999)])
chart.add('approx', [stats.ppf(x / 1000) for x in range(1, 999)])
# chart.add('approx', [
# special.erfinv(x/1000) - erfinv(x/1000)
# for x in range(-999, 1000)])
return chart.render_response()
@app.route('/test/ci/<chart>')
def test_ci_for(chart):
chart = CHARTS_BY_NAME[chart](
style=styles['default'](
value_font_family='googlefont:Raleway',
value_colors=(None, None, 'blue', 'red', 'green'),
ci_colors=(None, 'magenta')
)
)
chart.add(
'Series 1', [
{
'value': 127.3,
'ci': {
'type': 'continuous',
'sample_size': 3534,
'stddev': 19,
'confidence': .99
}
},
{
'value': 127.3,
'ci': {
'type': 'continuous',
'sample_size': 3534,
'stddev': 19
}
},
{
'value': 127.3,
'ci': {
'type': 'continuous',
'sample_size': 3534,
'stddev': 19,
'confidence': .90
}
},
{
'value': 127.3,
'ci': {
'type': 'continuous',
'sample_size': 3534,
'stddev': 19,
'confidence': .75
}
},
]
)
chart.add(
'Series 2', [
{
'value': 34.5,
'ci': {
'type': 'dichotomous',
'sample_size': 3532
}
},
]
)
chart.add(
'Series 3', [
{
'value': 100,
'ci': {
'low': 50,
'high': 150
}
},
{
'value': 100,
'ci': {
'low': 75,
'high': 175
}
},
{
'value': 50,
'ci': {
'low': 50,
'high': 100
}
},
{
'value': 125,
'ci': {
'low': 120,
'high': 130
}
},
]
)
chart.range = (30, 200)
return chart.render_response()
@app.route('/test/interruptions')
def test_interruptions():
chart = Line(allow_interruptions=True)
chart.add(
'interrupt', [22, 34, 43, 12, None, 12, 55, None, 56],
allow_interruptions=False
)
chart.add(
'not interrupt', [
-a if a else None
for a in (22, 34, 43, 12, None, 12, 55, None, 56)
]
)
return chart.render_response()
return list(
sorted(
filter(
lambda x: x.startswith('test') and not x.endswith('_for'),
locals()
)
)
) + list(
sorted(
filter(
lambda x: x.startswith('test') and x.endswith('_for'), locals()
)
)
)
| 44,064
|
Python
|
.py
| 1,250
| 24.2176
| 79
| 0.462005
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,352
|
data.py
|
Kozea_pygal/demo/moulinrouge/data.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
labels = [
'AURSAUTRAUIA', 'dpvluiqhu enuie', 'su sru a nanan a',
'09_28_3023_98120398', u'éàé瀮ð{æə|&'
]
series = {'Female': [4, 2, 3, 0, 2], 'Male': [5, 1, 1, 3, 2]}
| 961
|
Python
|
.py
| 23
| 39.913043
| 79
| 0.728942
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,353
|
conf.py
|
Kozea_pygal/docs/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# pygal documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 1 14:31:10 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(os.path.abspath('.'), 'ext'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx',
'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'pygal_sphinx_directives'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'pygal'
copyright = '2012-2016, Florian Mounier'
author = 'Florian Mounier'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '3.0'
# The full version, including alpha/beta/rc tags.
release = '3.0.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pygaldoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc, 'pygal.tex', 'pygal Documentation', 'Florian Mounier',
'manual'
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'pygal', 'pygal Documentation', [author], 1)]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc, 'pygal', 'pygal Documentation', author, 'pygal',
'One line description of project.', 'Miscellaneous'
),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
| 9,949
|
Python
|
.py
| 225
| 42.448889
| 82
| 0.724277
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,354
|
pygal_sphinx_directives.py
|
Kozea_pygal/docs/ext/pygal_sphinx_directives.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal_sphinx_directives
#
# Pygal sphinx integration
# Copyright © 2012-2016 Florian Mounier
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
from traceback import format_exc, print_exc
import docutils.core
from docutils.parsers.rst import Directive
from sphinx.directives.code import CodeBlock
import pygal
# Patch default style
pygal.config.Config.style.value = pygal.style.RotateStyle(
'#2980b9',
background='#fcfcfc',
plot_background='#ffffff',
foreground='#707070',
foreground_strong='#404040',
foreground_subtle='#909090',
opacity='.8',
opacity_hover='.9',
transition='400ms ease-in'
)
class PygalDirective(Directive):
"""Execute the given python file and puts its result in the document."""
required_arguments = 0
optional_arguments = 2
final_argument_whitespace = True
has_content = True
def run(self):
width, height = map(int, self.arguments[:2]
) if len(self.arguments) >= 2 else (600, 400)
if len(self.arguments) == 1:
self.render_fix = bool(self.arguments[0])
elif len(self.arguments) == 3:
self.render_fix = bool(self.arguments[2])
else:
self.render_fix = False
self.content = list(self.content)
content = list(self.content)
if self.render_fix:
content[-1] = 'rv = ' + content[-1]
code = '\n'.join(content)
scope = {'pygal': pygal}
try:
exec(code, scope)
except Exception:
print(code)
print_exc()
return [
docutils.nodes.system_message(
'An exception as occured during code parsing:'
' \n %s' % format_exc(),
type='ERROR',
source='/',
level=3
)
]
if self.render_fix:
_rv = scope['rv']
else:
chart = None
for key, value in scope.items():
if isinstance(value, pygal.graph.graph.Graph):
chart = value
self.content.append(key + '.render()')
break
if chart is None:
return [
docutils.nodes.system_message(
'No instance of graph found',
level=3,
type='ERROR',
source='/'
)
]
chart.config.width = width
chart.config.height = height
chart.explicit_size = True
try:
svg = '<embed src="%s" />' % chart.render_data_uri()
except Exception:
return [
docutils.nodes.system_message(
'An exception as occured during graph generation:'
' \n %s' % format_exc(),
type='ERROR',
source='/',
level=3
)
]
return [docutils.nodes.raw('', svg, format='html')]
class PygalWithCode(PygalDirective):
def run(self):
node_list = super(PygalWithCode, self).run()
node_list.extend(
CodeBlock(
self.name, ['python'], self.options, self.content, self.lineno,
self.content_offset, self.block_text, self.state,
self.state_machine
).run()
)
return [docutils.nodes.compound('', *node_list)]
class PygalTable(Directive):
"""Execute the given python file and puts its result in the document."""
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = True
has_content = True
def run(self):
self.content = list(self.content)
content = list(self.content)
content[-1] = 'rv = ' + content[-1]
code = '\n'.join(content)
scope = {'pygal': pygal}
try:
exec(code, scope)
except Exception:
print_exc()
return [
docutils.nodes.system_message(
'An exception as occured during code parsing:'
' \n %s' % format_exc(),
type='ERROR',
source='/',
level=3
)
]
rv = scope['rv']
return [docutils.nodes.raw('', rv, format='html')]
class PygalTableWithCode(PygalTable):
def run(self):
node_list = super(PygalTableWithCode, self).run()
node_list.extend(
CodeBlock(
self.name, ['python'], self.options, self.content, self.lineno,
self.content_offset, self.block_text, self.state,
self.state_machine
).run()
)
return [docutils.nodes.compound('', *node_list)]
def setup(app):
app.add_directive('pygal', PygalDirective)
app.add_directive('pygal-code', PygalWithCode)
app.add_directive('pygal-table', PygalTable)
app.add_directive('pygal-table-code', PygalTableWithCode)
return {'version': '1.0.1'}
| 5,797
|
Python
|
.py
| 159
| 26.106918
| 79
| 0.560463
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,355
|
test_stacked.py
|
Kozea_pygal/pygal/test/test_stacked.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Stacked chart related tests"""
from pygal import StackedLine
def test_stacked_line():
"""Test stacked line"""
stacked = StackedLine()
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_reverse():
"""Test stack from top stacked line"""
stacked = StackedLine(stack_from_top=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('11 (+1)', '14 (+2)', '10', '12')
)
def test_stacked_line_log():
"""Test logarithmic stacked line"""
stacked = StackedLine(logarithmic=True)
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
def test_stacked_line_interpolate():
"""Test interpolated stacked line"""
stacked = StackedLine(interpolate='cubic')
stacked.add('one_two', [1, 2])
stacked.add('ten_twelve', [10, 12])
q = stacked.render_pyquery()
assert set([v.text for v in q("desc.value")]) == set(
('1', '2', '11 (+10)', '14 (+12)')
)
| 2,148
|
Python
|
.tac
| 56
| 34.625
| 79
| 0.652424
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,356
|
horizontalstackedline.py
|
Kozea_pygal/pygal/graph/horizontalstackedline.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal Stacked Line graph"""
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.stackedline import StackedLine
class HorizontalStackedLine(HorizontalGraph, StackedLine):
"""Horizontal Stacked Line graph"""
def _plot(self):
"""Draw the lines in reverse order"""
for serie in self.series[::-1]:
self.line(serie)
for serie in self.secondary_series[::-1]:
self.line(serie, True)
| 1,232
|
Python
|
.tac
| 29
| 39.413793
| 79
| 0.743119
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,357
|
horizontalstackedbar.py
|
Kozea_pygal/pygal/graph/horizontalstackedbar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""Horizontal stacked graph"""
from pygal.graph.horizontal import HorizontalGraph
from pygal.graph.stackedbar import StackedBar
class HorizontalStackedBar(HorizontalGraph, StackedBar):
"""Horizontal Stacked Bar graph"""
| 1,000
|
Python
|
.tac
| 23
| 42.173913
| 79
| 0.784394
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,358
|
stackedline.py
|
Kozea_pygal/pygal/graph/stackedline.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Stacked Line chart: Like a line chart but with all lines stacking
on top of the others. Used along fill=True option.
"""
from pygal.adapters import none_to_zero
from pygal.graph.line import Line
class StackedLine(Line):
"""Stacked Line graph class"""
_adapters = [none_to_zero]
def __init__(self, *args, **kwargs):
"""Custom variable initialization"""
self._previous_line = None
super(StackedLine, self).__init__(*args, **kwargs)
def _value_format(self, value, serie, index):
"""
Display value and cumulation
"""
sum_ = serie.points[index][1]
if serie in self.series and (
self.stack_from_top
and self.series.index(serie) == self._order - 1
or not self.stack_from_top and self.series.index(serie) == 0):
return super(StackedLine, self)._value_format(value)
return '%s (+%s)' % (self._y_format(sum_), self._y_format(value))
def _fill(self, values):
"""Add extra values to fill the line"""
if not self._previous_line:
self._previous_line = values
return super(StackedLine, self)._fill(values)
new_values = values + list(reversed(self._previous_line))
self._previous_line = values
return new_values
def _points(self, x_pos):
"""
Convert given data values into drawable points (x, y)
and interpolated points if interpolate option is specified
"""
for series_group in (self.series, self.secondary_series):
accumulation = [0] * self._len
for serie in series_group[::-1 if self.stack_from_top else 1]:
accumulation = list(map(sum, zip(accumulation, serie.values)))
serie.points = [(x_pos[i], v)
for i, v in enumerate(accumulation)]
if serie.points and self.interpolate:
serie.interpolated = self._interpolate(x_pos, accumulation)
else:
serie.interpolated = []
def _plot(self):
"""Plot stacked serie lines and stacked secondary lines"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.line(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.line(serie, True)
| 3,161
|
Python
|
.tac
| 71
| 36.746479
| 79
| 0.639403
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,359
|
stackedbar.py
|
Kozea_pygal/pygal/graph/stackedbar.py
|
# -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2016 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Stacked Bar chart: Like a bar chart but with all series stacking
on top of the others instead of being displayed side by side.
"""
from pygal.adapters import none_to_zero
from pygal.graph.bar import Bar
class StackedBar(Bar):
"""Stacked Bar graph class"""
_adapters = [none_to_zero]
def _get_separated_values(self, secondary=False):
"""Separate values between positives and negatives stacked"""
series = self.secondary_series if secondary else self.series
transposed = list(zip(*[serie.values for serie in series]))
positive_vals = [
sum([val for val in vals if val is not None and val >= self.zero])
for vals in transposed
]
negative_vals = [
sum([val for val in vals if val is not None and val < self.zero])
for vals in transposed
]
return positive_vals, negative_vals
def _compute_box(self, positive_vals, negative_vals):
"""Compute Y min and max"""
if self.range and self.range[0] is not None:
self._box.ymin = self.range[0]
else:
self._box.ymin = negative_vals and min(
min(negative_vals), self.zero
) or self.zero
if self.range and self.range[1] is not None:
self._box.ymax = self.range[1]
else:
self._box.ymax = positive_vals and max(
max(positive_vals), self.zero
) or self.zero
def _compute(self):
"""Compute y min and max and y scale and set labels"""
positive_vals, negative_vals = self._get_separated_values()
if self.logarithmic:
positive_vals = list(
filter(lambda x: x > self.zero, positive_vals)
)
negative_vals = list(
filter(lambda x: x > self.zero, negative_vals)
)
self._compute_box(positive_vals, negative_vals)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self._x_pos = [
x / self._len for x in range(self._len + 1)
] if self._len > 1 else [0, 1] # Center if only one value
self._points(self._x_pos)
self.negative_cumulation = [0] * self._len
self.positive_cumulation = [0] * self._len
if self.secondary_series:
positive_vals, negative_vals = self._get_separated_values(True)
positive_vals = positive_vals or [self.zero]
negative_vals = negative_vals or [self.zero]
self.secondary_negative_cumulation = [0] * self._len
self.secondary_positive_cumulation = [0] * self._len
self._pre_compute_secondary(positive_vals, negative_vals)
self._x_pos = [(i + .5) / self._len for i in range(self._len)]
def _pre_compute_secondary(self, positive_vals, negative_vals):
"""Compute secondary y min and max"""
self._secondary_min = (
negative_vals and min(min(negative_vals), self.zero)
) or self.zero
self._secondary_max = (
positive_vals and max(max(positive_vals), self.zero)
) or self.zero
def _bar(self, serie, parent, x, y, i, zero, secondary=False):
"""Internal stacking bar drawing function"""
if secondary:
cumulation = (
self.secondary_negative_cumulation
if y < self.zero else self.secondary_positive_cumulation
)
else:
cumulation = (
self.negative_cumulation
if y < self.zero else self.positive_cumulation
)
zero = cumulation[i]
cumulation[i] = zero + y
if zero == 0:
zero = self.zero
y -= self.zero
y += zero
width = (self.view.x(1) - self.view.x(0)) / self._len
x, y = self.view((x, y))
y = y or 0
series_margin = width * self._series_margin
x += series_margin
width -= 2 * series_margin
if self.secondary_series:
width /= 2
x += int(secondary) * width
serie_margin = width * self._serie_margin
x += serie_margin
width -= 2 * serie_margin
height = self.view.y(zero) - y
r = serie.rounded_bars * 1 if serie.rounded_bars else 0
self.svg.transposable_node(
parent,
'rect',
x=x,
y=y,
rx=r,
ry=r,
width=width,
height=height,
class_='rect reactive tooltip-trigger'
)
return x, y, width, height
def _plot(self):
"""Draw bars for series and secondary series"""
for serie in self.series[::-1 if self.stack_from_top else 1]:
self.bar(serie)
for serie in self.secondary_series[::-1 if self.stack_from_top else 1]:
self.bar(serie, True)
| 5,711
|
Python
|
.tac
| 139
| 31.870504
| 79
| 0.597227
|
Kozea/pygal
| 2,634
| 411
| 197
|
LGPL-3.0
|
9/5/2024, 5:11:10 PM (Europe/Amsterdam)
|
11,360
|
live-docs.py
|
NixOS_nixops/live-docs.py
|
#!/usr/bin/env nix-shell
# !nix-shell ./shell.nix -i python3
from livereload import Server, shell
server = Server()
build_docs = shell("make html", cwd="doc")
print("Doing an initial build of the docs...")
build_docs()
server.watch("doc/*.rst", build_docs)
server.watch("doc/**/*.rst", build_docs)
server.serve(root="doc/_build/html")
| 340
|
Python
|
.py
| 10
| 32.5
| 46
| 0.716923
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,361
|
mypy-ratchet.sh
|
NixOS_nixops/ci/mypy-ratchet.sh
|
#!/usr/bin/env bash
set -eu
cd "${0%/*}/.."
scratch=$(mktemp -d -t tmp.XXXXXXXXXX)
function finish {
rm -rf "$scratch"
}
# trap finish EXIT
cp ci/run-ratchet.sh $scratch/
head=$(git rev-parse HEAD)
base=origin/${GITHUB_BASE_REF:-master}
git fetch origin
printf "Checking base branch at %s, then PR at %s...\n" "$base" "$head"
git checkout "$base"
nix-shell shell.nix --run "$scratch/run-ratchet.sh $scratch base"
git checkout "$head"
nix-shell shell.nix --run "$scratch/run-ratchet.sh $scratch head"
diff --ignore-all-space -u100 -r "$scratch/base/" "$scratch/head/" || true
nix-shell shell.nix --run "mypy ./ci/ratchet.py"
nix-shell shell.nix --run "python3 ./ci/ratchet.py $scratch"
| 701
|
Python
|
.py
| 20
| 33.3
| 75
| 0.70597
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,362
|
ratchet.py
|
NixOS_nixops/ci/ratchet.py
|
import sys
import re
from typing import Dict, Tuple, Optional, List, TextIO
report: Dict[str, List[Optional[float]]] = {}
extract_line = re.compile(
r"^\|\s+(?P<module>[^\s]*)\s+\|\s+(?P<percent>\d\d?\.\d\d)% imprecise \|"
)
def load(idx: int, fp: TextIO):
global report
for line in fp:
r = extract_line.match(line)
if r:
module: str = r.group("module")
percent: float = float(r.group("percent"))
if module not in report:
report[module] = [None, None]
report[module][idx] = percent
with open(f"{sys.argv[1]}/base/index.txt") as fp:
load(0, fp)
with open(f"{sys.argv[1]}/head/index.txt") as fp:
load(1, fp)
good: List[Tuple[str, float, float]] = []
bad: List[Tuple[str, float, float]] = []
for module, values in report.items():
base: Optional[float] = values[0]
head: Optional[float] = values[1]
if base is not None and head is not None:
if base > head:
good.append((module, base, head))
elif base < head:
bad.append((module, base, head))
if len(bad) > 0:
print("Imprecision went up:")
for module, base, head in bad:
print(f"{module}:\t\t{base} -> {head}")
if len(good) > 0:
print("Imprecision went down:")
for module, base, head in good:
print(f"{module}:\t\t{base} -> {head}")
if len(bad) > 0:
sys.exit(1)
| 1,408
|
Python
|
.py
| 41
| 28.439024
| 77
| 0.591581
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,363
|
__init__.py
|
NixOS_nixops/tests/__init__.py
|
# -*- coding: utf-8 -*-
import os
import sys
import threading
from os import path
import nixops.statefile
_multiprocess_shared_ = True
db_file = "%s/test.nixops" % (path.dirname(__file__))
def setup():
nixops.statefile.StateFile(db_file, writable=True).close()
def destroy(sf, uuid):
depl = sf.open_deployment(uuid)
depl.logger.set_autoresponse("y")
try:
depl.clean_backups(keep=0)
except Exception:
pass
try:
depl.destroy_resources()
except Exception:
pass
depl.delete()
depl.logger.log("deployment ‘{0}’ destroyed".format(uuid))
def teardown():
sf = nixops.statefile.StateFile(db_file, writable=True)
uuids = sf.query_deployments()
threads = []
for uuid in uuids:
threads.append(threading.Thread(target=destroy, args=(sf, uuid)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
uuids_left = sf.query_deployments()
sf.close()
if not uuids_left:
os.remove(db_file)
else:
sys.stderr.write(
"warning: not all deployments have been destroyed; some resources may still exist!\n"
)
| 1,184
|
Python
|
.py
| 41
| 23.487805
| 97
| 0.660477
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,364
|
test_parallel.py
|
NixOS_nixops/tests/unit/test_parallel.py
|
import unittest
from typing import Callable, Any
from nixops.parallel import run_tasks, MultipleExceptions
__all__ = ["ParallelTest"]
class ExampleTask:
name: str
todo: Callable[[], Any]
def __init__(self, name, todo):
self.name = name
self.todo = todo
class ComplexException(Exception):
def __init__(self, arg1: str, arg2: str) -> None:
pass
def err(msg: str):
raise Exception(msg)
def complex_err(msg1: str, msg2: str):
raise ComplexException(msg1, msg2)
class ParallelTest(unittest.TestCase):
def test_okay(self):
self.assertEqual(
run_tasks(
1,
[ExampleTask("foo", lambda: "ok"), ExampleTask("bar", lambda: "ok")],
lambda task: task.todo(),
),
["ok", "ok"],
)
def test_one_exception(self):
self.assertRaises(
Exception,
run_tasks,
1,
[
ExampleTask("foo", lambda: "ok"),
ExampleTask("bar", lambda: err("oh no")),
],
lambda task: task.todo(),
)
def test_two_exceptions(self):
self.assertRaises(
MultipleExceptions,
run_tasks,
1,
[
ExampleTask("foo", lambda: err("uh oh")),
ExampleTask("bar", lambda: err("oh no")),
],
lambda task: task.todo(),
)
def test_complicated_exception(self):
self.assertRaises(
ComplexException,
run_tasks,
1,
[ExampleTask("foo", lambda: complex_err("uh", "oh"))],
lambda task: task.todo(),
)
def test_complicated_two_exceptions(self):
self.assertRaises(
MultipleExceptions,
run_tasks,
1,
[
ExampleTask("foo", lambda: complex_err("uh", "oh")),
ExampleTask("baz", lambda: err("oh no")),
],
lambda task: task.todo(),
)
| 2,065
|
Python
|
.py
| 68
| 20.235294
| 85
| 0.513636
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,365
|
test_logger.py
|
NixOS_nixops/tests/unit/test_logger.py
|
import unittest
from io import StringIO
from nixops.logger import Logger
class RootLoggerTest(unittest.TestCase):
def setUp(self):
self.logfile = StringIO()
self.root_logger = Logger(self.logfile)
def assert_log(self, value):
self.assertEqual(self.logfile.getvalue(), value)
def test_simple(self):
self.root_logger.log("line1")
self.assert_log("line1\n")
self.root_logger.log("line2")
self.assert_log("line1\nline2\n")
def test_prefix(self):
self.root_logger.log_start("xxx: ", "foo")
self.root_logger.log_end("xxx: ", "bar")
self.assert_log("xxx: foobar\n")
def test_prefix_mixed(self):
self.root_logger.log_start("xxx: ", "begin1")
self.root_logger.log_start("yyy: ", "begin2")
self.root_logger.log_end("xxx: ", "end1")
self.root_logger.log_end("yyy: ", "end2")
self.assert_log("xxx: begin1\nyyy: begin2\nxxx: end1\nyyy: end2\n")
class MachineLoggerTest(RootLoggerTest):
def setUp(self):
RootLoggerTest.setUp(self)
self.m1_logger = self.root_logger.get_logger_for("machine1")
self.m2_logger = self.root_logger.get_logger_for("machine2")
def test_simple(self):
self.m2_logger.success("success!")
self.m1_logger.warn("warning!")
self.assert_log("machine2> success!\nmachine1> warning: warning!\n")
def test_continue(self):
self.m1_logger.log_start("Begin...")
for dummy in range(10):
self.m1_logger.log_continue(".")
self.m1_logger.log_end("end.")
self.assert_log("machine1> Begin.............end.\n")
def test_continue_mixed(self):
self.m1_logger.log_start("Begin 1...")
self.m2_logger.log_start("Begin 2...")
for dummy in range(10):
self.m1_logger.log_continue(".")
self.m2_logger.log_continue(".")
self.m1_logger.log_end("end 1.")
self.m2_logger.log_end("end 2.")
self.assert_log(
"machine1> Begin 1...\nmachine2> Begin 2...\n"
"machine1> .\nmachine2> .\nmachine1> .\nmachine2> .\n"
"machine1> .\nmachine2> .\nmachine1> .\nmachine2> .\n"
"machine1> .\nmachine2> .\nmachine1> .\nmachine2> .\n"
"machine1> .\nmachine2> .\nmachine1> .\nmachine2> .\n"
"machine1> .\nmachine2> .\nmachine1> .\nmachine2> .\n"
"machine1> end 1.\nmachine2> end 2.\n"
)
| 2,474
|
Python
|
.py
| 56
| 35.553571
| 76
| 0.604661
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,366
|
test_device_name_to_boto_expected.py
|
NixOS_nixops/tests/unit/test_device_name_to_boto_expected.py
|
import unittest
from nixops.util import device_name_to_boto_expected
class TestDeviceNameToBotoExpected(unittest.TestCase):
def test_device_name_to_boto_expected(self):
self.assertEqual(
device_name_to_boto_expected("/dev/sdf"),
"/dev/sdf",
)
self.assertEqual(device_name_to_boto_expected("/dev/sdg"), "/dev/sdg")
self.assertEqual(device_name_to_boto_expected("/dev/xvdf"), "/dev/sdf")
self.assertEqual(device_name_to_boto_expected("/dev/xvdg"), "/dev/sdg")
self.assertEqual(device_name_to_boto_expected("/dev/nvme1n1"), "/dev/sdf")
self.assertEqual(device_name_to_boto_expected("/dev/nvme2n1"), "/dev/sdg")
# TODO
# self.assertEqual(
# device_name_to_boto_expected('/dev/nvme26n1'),
# '/dev/sdg'
# )
self.assertEqual(device_name_to_boto_expected("/dev/nvme2n1p1"), "/dev/sdg1")
self.assertEqual(device_name_to_boto_expected("/dev/nvme2n1p6"), "/dev/sdg6")
# TODO
# self.assertEqual(
# device_name_to_boto_expected('/dev/nvme26n1p6'),
# '/dev/sdg6'
# )
| 1,152
|
Python
|
.py
| 25
| 37.76
| 85
| 0.620107
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,367
|
test_util.py
|
NixOS_nixops/tests/unit/test_util.py
|
from typing import Any, Sequence, Mapping
import json
from nixops.logger import Logger
from io import StringIO
import unittest
import typeguard
from nixops import util
class TestUtilTest(unittest.TestCase):
def setUp(self):
self.logfile = StringIO()
self.root_logger = Logger(self.logfile)
self.logger = self.root_logger.get_logger_for("dummymachine")
def test_assert_logged_exec(self):
msg = "hello"
ret = util.logged_exec(
command=["cat"],
logger=self.logger,
stdin_string=msg,
capture_stdout=True,
)
self.assertEqual(ret, msg)
def test_assert_logged_exec_stdin_none(self):
msg = "hello"
ret = util.logged_exec(
command=["echo", msg],
logger=self.logger,
capture_stdout=True,
)
if not isinstance(ret, str):
raise ValueError("Wrong return type!")
self.assertEqual(ret.strip(), msg)
def test_immutable_dict(self):
d = {
"foo": "bar",
"list": [1, 2, 3],
"nested": {"x": "y"},
"nested_in_list": [{"x": "y"}],
}
# Assert that the shape of the immutable dict is the same as the input dict
i: util.ImmutableMapping[str, Any] = util.ImmutableMapping(d)
self.assertEqual(d["foo"], i["foo"])
tup = i["list"]
self.assertTrue(isinstance(tup, tuple))
self.assertEqual(list(tup), d["list"])
# Ensure our encoder round-trips okay
self.assertEqual(json.dumps(i, cls=util.NixopsEncoder), json.dumps(d))
dic = i["nested"]
self.assertTrue(isinstance(dic, util.ImmutableMapping))
self.assertEqual(
dic["x"],
d["nested"]["x"], # type: ignore
)
dic_l = i["nested_in_list"][0]
self.assertTrue(isinstance(dic_l, util.ImmutableMapping))
# Assert immutability
def _assign():
i["z"] = 1
self.assertRaises(TypeError, _assign)
def test_immutable_object(self):
class SubResource(util.ImmutableValidatedObject):
x: int
class HasSubResource(util.ImmutableValidatedObject):
sub: SubResource
r = HasSubResource(sub={"x": 1})
self.assertTrue(isinstance(r.sub.x, int))
self.assertEqual(r.sub.x, 1)
self.assertRaises(typeguard.TypeCheckError, lambda: SubResource(x="a string"))
def _assign():
r = SubResource(x=1)
r.x = 2
self.assertRaises(AttributeError, _assign)
# Fuzz not passed, should raise TypeError
class MustRaise(util.ImmutableValidatedObject):
fuzz: str
self.assertRaises(typeguard.TypeCheckError, lambda: MustRaise())
class WithDefaults(util.ImmutableValidatedObject):
x: int = 1
self.assertEqual(WithDefaults().x, 1)
# Extensible
class A(util.ImmutableValidatedObject):
x: int
class B(A):
y: int
a = A(x=1)
b = B(a, y=1)
self.assertEqual(a.x, b.x)
self.assertEqual(b.x, 1)
# Test Sequence[ImmutableValidatedObject]
class WithSequence(util.ImmutableValidatedObject):
subs: Sequence[SubResource]
seq = WithSequence(subs=[{"x": 1}, {"x": 2}])
for i in seq.subs:
self.assertIsInstance(i, SubResource)
# Test Mapping[str, ImmutableValidatedObject]
class WithMapping(util.ImmutableValidatedObject):
mapping: Mapping[str, SubResource]
mapped = WithMapping(mapping={"aaa": {"x": 1}, "bbb": {"x": 2}})
for _, v in mapped.mapping.items():
self.assertIsInstance(v, SubResource)
| 3,797
|
Python
|
.py
| 99
| 28.757576
| 86
| 0.597325
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,368
|
test_split_openssh_args.py
|
NixOS_nixops/tests/unit/test_split_openssh_args.py
|
import unittest
from nixops.ssh_util import SSH
class SplitSSHArgs(unittest.TestCase):
def assert_split(self, orig, expected_flags, expected_command):
flags, command = SSH.split_openssh_args(orig)
self.assertEqual(flags, expected_flags)
self.assertEqual(command, expected_command)
def test_empty(self):
self.assert_split([], [], [])
def test_invalid(self):
self.assert_split(["-o"], ["-o"], [])
self.assert_split(["-xo"], ["-x", "-o"], [])
self.assert_split(["--", "-ox"], [], ["-ox"])
self.assert_split(["-"], ["-"], [])
self.assert_split(["--help"], ["--help"], [])
def test_simple(self):
self.assert_split(["-x12", "command"], ["-x", "-1", "-2"], ["command"])
self.assert_split(["-oOpt", "command"], ["-oOpt"], ["command"])
self.assert_split(["-oOpt", "--", "command"], ["-oOpt"], ["command"])
self.assert_split(["ls", "-l", "--", "x"], [], ["ls", "-l", "--", "x"])
def test_mixed(self):
self.assert_split(["-xoFoo", "xxx"], ["-x", "-oFoo"], ["xxx"])
self.assert_split(["-1_oFoo", "xxx"], ["-1", "-_", "-oFoo"], ["xxx"])
| 1,171
|
Python
|
.py
| 23
| 43.521739
| 79
| 0.523225
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,369
|
test_nix_expr.py
|
NixOS_nixops/tests/unit/test_nix_expr.py
|
import functools
import unittest
from textwrap import dedent
from nixops.nix_expr import py2nix, nix2py, nixmerge
from nixops.nix_expr import RawValue, Function, Call
__all__ = ["Nix2PyTest", "NixMergeTest"]
class Py2NixTestBase(unittest.TestCase):
def assert_nix(self, nix_expr, expected, maxwidth=80, inline=False):
result = py2nix(nix_expr, maxwidth=maxwidth, inline=inline)
self.assertEqual(
result, expected, "Expected:\n{0}\nGot:\n{1}".format(expected, result)
)
def test_numeric(self):
self.assert_nix(123, "123")
self.assert_nix(-123, "builtins.sub 0 123")
self.assertRaises(ValueError, py2nix, 123.4)
def test_boolean(self):
self.assert_nix(True, "true")
self.assert_nix(False, "false")
def test_null(self):
self.assert_nix(None, "null")
def test_invalid(self):
self.assertRaises(ValueError, py2nix, lambda: 123)
self.assertRaises(ValueError, py2nix, Exception)
def test_empty(self):
self.assert_nix("", '""')
self.assert_nix({}, "{}")
self.assert_nix([], "[]")
def test_string(self):
self.assert_nix("xyz", '"xyz"')
self.assert_nix("a'b\"c", r'''"a'b\"c"''')
self.assert_nix("abc\ndef\nghi", r'"abc\ndef\nghi"')
self.assert_nix("abc\ndef\nghi\n", "''\n abc\n def\n ghi\n''", maxwidth=0)
self.assert_nix("\\foo", r'"\\foo"')
self.assert_nix("xx${yy}zz", r'"xx\${yy}zz"')
self.assert_nix("xx\n${yy}\nzz\n", "''\n xx\n ''${yy}\n zz\n''", maxwidth=0)
self.assert_nix("xx\n''yy\nzz\n", "''\n xx\n '''yy\n zz\n''", maxwidth=0)
def test_raw_value(self):
self.assert_nix(
{"a": RawValue("import <something>")}, "{ a = import <something>; }"
)
self.assert_nix([RawValue("!")], "[ ! ]")
def test_list(self):
self.assert_nix([1, 2, 3], "[ 1 2 3 ]")
self.assert_nix(["a", "b", "c"], '[ "a" "b" "c" ]')
self.assert_nix(["a\na\na\n", "b\nb\n", "c"], r'[ "a\na\na\n" "b\nb\n" "c" ]')
self.assert_nix(
["a\na\na\n", "b\nb\n", "c"],
'[\n "a\\na\\na\\n"\n "b\\nb\\n"\n "c"\n]',
maxwidth=15,
)
def test_nested_list(self):
match = dedent(
"""
[
[ 1 2 3 ]
[ 4 5 6 ]
[
[
6
6
6
]
[
[
7
7
7
]
[
8
8
8
]
[
9
9
9
]
]
]
]
"""
).strip()
self.assert_nix(
[[1, 2, 3], [4, 5, 6], [[6, 6, 6], [[7, 7, 7], [8, 8, 8], [9, 9, 9]]]],
match,
maxwidth=12,
)
def test_nested_singletons(self):
match = dedent(
"""
[ [ [
1
2
[ [ 3 ] ]
] ] ]
"""
).strip()
self.assert_nix([[[1, 2, [[3]]]]], match, maxwidth=12)
def test_attrkeys(self):
self.assert_nix({"aAa": 123}, "{ aAa = 123; }")
self.assert_nix({"a.a": 123}, '{ "a.a" = 123; }')
self.assert_nix({"\\": 123}, r'{ "\\" = 123; }')
self.assert_nix({"a1": 123}, "{ a1 = 123; }")
self.assert_nix({"1a": 123}, '{ "1a" = 123; }')
self.assert_nix({"_aA": 123}, "{ _aA = 123; }")
self.assertRaises(KeyError, py2nix, {"": 123})
self.assertRaises(KeyError, py2nix, {123: 123})
def test_attrvalues(self):
self.assert_nix({"a": "abc"}, '{ a = "abc"; }')
self.assert_nix({"a": "a\nb\nc\n"}, r'{ a = "a\nb\nc\n"; }')
self.assert_nix({"A": [1, 2, 3]}, r"{ A = [ 1 2 3 ]; }")
def test_nested_attrsets(self):
match = dedent(
"""
{
aaa = {
bbb.ccc = 123;
cCc = 456;
};
xxx = [
1
2
3
];
yyy.y1.y2.y3 = [
"a"
"b"
{
c = "d";
}
];
}
"""
).strip()
self.assert_nix(
{
"aaa": {"bbb": {"ccc": 123}, "cCc": 456},
"xxx": [1, 2, 3],
"yyy": {"y1": {"y2": {"y3": ["a", "b", {"c": "d"}]}}},
},
match,
maxwidth=0,
)
self.assert_nix(
{"fileSystems": {"/": {"fsType": "btrfs", "label": "root"}}},
'{ fileSystems."/" = { fsType = "btrfs"; label = "root"; }; }',
)
def test_functions(self):
self.assert_nix(Function("Aaa", RawValue("bbb")), "Aaa: bbb")
self.assert_nix(Function("{ ... }", [1, 2, 3]), "{ ... }: [ 1 2 3 ]")
self.assert_nix(Function("{ ... }", "a\nb\nc\n"), r'{ ... }: "a\nb\nc\n"')
self.assert_nix(
Function("{ ... }", "a\nb\nc\n"),
"{ ... }: ''\n a\n b\n c\n''",
maxwidth=0,
)
self.assert_nix(
Function("xxx", {"a": {"b": "c"}}), 'xxx: {\n a.b = "c";\n}', maxwidth=0
)
def test_nested_functions(self):
match = dedent(
"""
{ config, pkgs, ... }: {
a.b.c = 1;
b.c.d = 2;
d.e = [ "e" "f" ];
e = f: {
x = ''
aaa
bbb
ccc
'';
};
}
"""
).strip()
self.assert_nix(
Function(
"{ config, pkgs, ... }",
{
"a": {"b": {"c": 1}},
"b": {"c": {"d": 2}},
"d": {"e": ["e", "f"]},
"e": Function("f", {"x": "aaa\nbbb\nccc\n"}),
},
),
match,
maxwidth=26,
)
def test_function_call(self):
self.assert_nix(
Call(RawValue("fun_call"), {"a": "b"}), '( fun_call { a = "b"; } )'
)
self.assert_nix(
Call(RawValue("multiline_call"), {"a": "b"}),
'(\n multiline_call\n {\n a = "b";\n }\n)',
maxwidth=0,
)
def test_stacked_attrs(self):
self.assert_nix({("a", "b"): "c", ("d"): "e"}, '{ a.b = "c"; d = "e"; }')
self.assert_nix(
{"a": {("b", "c"): {}}, ("a", "b", "c", "d"): "x"}, '{ a.b.c.d = "x"; }'
)
self.assert_nix(
{("a", "a"): 1, ("a", "b"): 2, "a": {"c": 3}},
"{ a = { a = 1; b = 2; c = 3; }; }",
)
self.assert_nix(
{("a", "b"): [1, 2], "a": {"b": [3, 4]}}, "{ a.b = [ 1 2 3 4 ]; }"
)
# a more real-world example
self.assert_nix(
{
("services", "xserver"): {
"enable": True,
"layout": "dvorak",
("windowManager", "default"): "i3",
("windowManager", "i3"): {
"enable": True,
"configFile": "/somepath",
},
("desktopManager", "default"): "none",
"desktopManager": {"e17": {"enable": True}},
}
},
dedent(
"""
{
services.xserver = {
desktopManager = { default = "none"; e17.enable = true; };
enable = true;
layout = "dvorak";
windowManager = {
default = "i3";
i3 = { configFile = "/somepath"; enable = true; };
};
};
}
"""
).strip(),
)
self.assertRaises(KeyError, py2nix, {(): 1})
self.assertRaises(ValueError, py2nix, {("a", "b"): 1, "a": 2})
def test_inline(self):
self.assert_nix(
{"foo": ["a\nb\nc\n"], "bar": ["d\ne\nf\n"]},
r'{ bar = [ "d\ne\nf\n" ]; foo = [ "a\nb\nc\n" ]; }',
inline=True,
maxwidth=0,
)
self.assert_nix(
{"a\nb": ["c", "d"], "e\nf": ["g", "h"]},
r'{ "a\nb" = [ "c" "d" ]; "e\nf" = [ "g" "h" ]; }',
inline=True,
maxwidth=0,
)
def test_list_compound(self):
self.assert_nix(
[Call(RawValue("123 //"), 456), RawValue("a b c")],
"[ (( 123 // 456 )) (a b c) ]",
)
self.assert_nix(
[
RawValue("a b c"),
{"cde": [RawValue("1,2,3"), RawValue("4 5 6"), RawValue("7\n8\n9")]},
],
"[ (a b c) { cde = [ 1,2,3 (4 5 6) (7\n8\n9) ]; } ]",
)
class Nix2PyTest(unittest.TestCase):
def test_simple(self):
self.assertEqual(py2nix(nix2py("{\na = b;\n}"), maxwidth=0), "{\na = b;\n}")
self.assertEqual(py2nix(nix2py("\n{\na = b;\n}\n"), maxwidth=0), "{\na = b;\n}")
def test_nested(self):
self.assertEqual(
py2nix([nix2py("a\nb\nc")], maxwidth=0), "[\n (a\n b\n c)\n]"
)
self.assertEqual(
py2nix({"foo": nix2py("a\nb\nc"), "bar": nix2py("d\ne\nf")}, maxwidth=0),
# ugly, but probably won't happen in practice
"{\n bar = d\n e\n f;\n foo = a\n b\n c;\n}",
)
class NixMergeTest(unittest.TestCase):
def assert_merge(self, sources, expect):
self.assertEqual(functools.reduce(nixmerge, sources), expect)
def test_merge_list(self):
self.assert_merge(
[[1, 2, 3], [4, 5, 6], [7, 6, 5], ["abc", "def"], ["ghi", "abc"]],
[1, 2, 3, 4, 5, 6, 7, "abc", "def", "ghi"],
)
def test_merge_dict(self):
self.assert_merge(
[
{},
{"a": {"b": {"c": "d"}}},
{"a": {"c": "e"}},
{"b": {"a": ["a"]}},
{"b": {"a": ["b"]}},
{"b": {"A": ["B"]}},
{"e": "f"},
{},
],
{
"a": {"c": "e", "b": {"c": "d"}},
"b": {"a": ["a", "b"], "A": ["B"]},
"e": "f",
},
)
def test_unhashable(self):
self.assertRaises(TypeError, nixmerge, [[1]], [[2]])
self.assertRaises(TypeError, nixmerge, [{"x": 1}], [{"y": 2}])
def test_invalid(self):
self.assertRaises(ValueError, nixmerge, [123], {"a": 456})
self.assertRaises(ValueError, nixmerge, "a", "b")
self.assertRaises(ValueError, nixmerge, 123, 456)
self.assertRaises(ValueError, nixmerge, RawValue("a"), RawValue("b"))
self.assertRaises(
ValueError, nixmerge, Function("aaa", {"a": 1}), Function("ccc", {"b": 2})
)
self.assertRaises(ValueError, nixmerge, Function("aaa", {"a": 1}), {"b": 2})
| 11,036
|
Python
|
.py
| 321
| 22.968847
| 88
| 0.393724
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,370
|
test_query_deployments.py
|
NixOS_nixops/tests/functional/test_query_deployments.py
|
from tests.functional import DatabaseUsingTest
class TestQueryDeployments(DatabaseUsingTest):
def test_shows_all_deployments(self):
depls = []
for i in range(10):
depls.append(self.sf.create_deployment())
uuids = self.sf.query_deployments()
for depl in depls:
assert any([depl.uuid == uuid for uuid in uuids])
| 372
|
Python
|
.py
| 9
| 33.444444
| 61
| 0.66759
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,371
|
test_rollback_rollsback.py
|
NixOS_nixops/tests/functional/test_rollback_rollsback.py
|
from os import path
from pytest import raises
from tests.functional.single_machine_test import SingleMachineTest
from nixops.ssh_util import SSHCommandFailed
from nixops.evaluation import NetworkFile
parent_dir = path.dirname(__file__)
has_hello_spec = "%s/single_machine_has_hello.nix" % (parent_dir)
rollback_spec = "%s/single_machine_rollback.nix" % (parent_dir)
class TestRollbackRollsback(SingleMachineTest):
_multiprocess_can_split_ = True
def setup_method(self):
super(TestRollbackRollsback, self).setup_method()
self.depl.network_expr = NetworkFile(rollback_spec)
self.depl.nix_exprs = self.depl.nix_exprs + [rollback_spec] # type: ignore
def run_check(self):
self.depl.deploy()
with raises(SSHCommandFailed):
self.check_command("hello")
self.depl.network_expr = NetworkFile(has_hello_spec)
self.depl.deploy()
self.check_command("hello")
self.depl.rollback(generation=1)
with raises(SSHCommandFailed):
self.check_command("hello")
| 1,066
|
Python
|
.py
| 24
| 38.125
| 83
| 0.71581
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,372
|
test_stopping_stops.py
|
NixOS_nixops/tests/functional/test_stopping_stops.py
|
from tests.functional import single_machine_test
class TestStoppingStops(single_machine_test.SingleMachineTest):
def run_check(self):
self.depl.deploy()
self.depl.stop_machines()
assert self.depl.active
m = list(self.depl.active.values())[0]
assert m.state == m.STOPPED
| 316
|
Python
|
.py
| 8
| 32.75
| 63
| 0.696078
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,373
|
test_ssh_key_pair_resource.py
|
NixOS_nixops/tests/functional/test_ssh_key_pair_resource.py
|
from os import path
from tests.functional.generic_deployment_test import GenericDeploymentTest
from nixops.evaluation import NetworkFile
parent_dir = path.dirname(__file__)
ssh_key_pair_spec = "%s/ssh-key-pair-resource.nix" % (parent_dir)
class TestSSHKeyPairResource(GenericDeploymentTest):
def setup_method(self):
super(TestSSHKeyPairResource, self).setup_method()
self.depl.network_expr = NetworkFile(ssh_key_pair_spec)
def test_evaluate(self):
self.depl.evaluate()
assert self.depl.definitions and "ssh-key" in self.depl.definitions
| 585
|
Python
|
.py
| 12
| 43.75
| 75
| 0.762832
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,374
|
single_machine_test.py
|
NixOS_nixops/tests/functional/single_machine_test.py
|
from os import path
from tests.functional.generic_deployment_test import GenericDeploymentTest
from nixops.evaluation import NetworkFile
parent_dir = path.dirname(__file__)
logical_spec = "{0}/single_machine_logical_base.nix".format(parent_dir)
class SingleMachineTest(GenericDeploymentTest):
_multiprocess_can_split_ = True
def setup_method(self):
super(SingleMachineTest, self).setup_method()
self.depl.network_expr = NetworkFile(logical_spec)
def check_command(self, command):
self.depl.evaluate()
machine = next(iter(self.depl.machines.values()))
return machine.run_command(command)
| 647
|
Python
|
.py
| 14
| 40.928571
| 74
| 0.752
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,375
|
__init__.py
|
NixOS_nixops/tests/functional/__init__.py
|
from nixops.statefile import StateFile
from tests import db_file
class DatabaseUsingTest(object):
_multiprocess_can_split_ = True
def setup_method(self):
self.sf = StateFile(db_file, writable=True)
def teardown_method(self):
self.sf.close()
| 273
|
Python
|
.py
| 8
| 29.125
| 51
| 0.727969
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,376
|
test_rebooting_reboots.py
|
NixOS_nixops/tests/functional/test_rebooting_reboots.py
|
from pytest import raises
from tests.functional.single_machine_test import SingleMachineTest
from nixops.ssh_util import SSHCommandFailed
class TestRebootingReboots(SingleMachineTest):
def run_check(self):
self.depl.deploy()
self.check_command("touch /run/not-rebooted")
self.depl.reboot_machines(wait=True)
assert self.depl.active
m = list(self.depl.active.values())[0]
m.check()
assert m.state == m.UP
with raises(SSHCommandFailed):
self.check_command("test -f /run/not-rebooted")
| 567
|
Python
|
.py
| 14
| 33.5
| 66
| 0.699454
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,377
|
test_send_keys_sends_keys.py
|
NixOS_nixops/tests/functional/test_send_keys_sends_keys.py
|
from os import path
from tests.functional.single_machine_test import SingleMachineTest
parent_dir = path.dirname(__file__)
secret_key_spec = "%s/single_machine_secret_key.nix" % (parent_dir)
elsewhere_key_spec = "%s/single_machine_elsewhere_key.nix" % (parent_dir)
class TestSendKeysSendsKeys(SingleMachineTest):
_multiprocess_can_split_ = True
def setup_method(self):
super(TestSendKeysSendsKeys, self).setup_method()
self.depl.nix_exprs = self.depl.nix_exprs + [ # type: ignore
secret_key_spec,
elsewhere_key_spec,
]
def run_check(self):
self.depl.deploy()
self.check_command("test -f /run/keys/secret.key")
self.check_command("rm -f /run/keys/secret.key")
self.check_command("test -f /new/directory/elsewhere.key")
self.check_command("rm -f /new/directory/elsewhere.key")
self.depl.send_keys()
self.check_command("test -f /run/keys/secret.key")
self.check_command("test -f /new/directory/elsewhere.key")
| 1,039
|
Python
|
.py
| 22
| 40.272727
| 73
| 0.678218
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,378
|
test_deleting_deletes.py
|
NixOS_nixops/tests/functional/test_deleting_deletes.py
|
from pytest import raises
from tests.functional.single_machine_test import SingleMachineTest
class TestDeletingDeletes(SingleMachineTest):
def run_check(self):
uuid = self.depl.uuid
self.depl.delete()
with raises(Exception):
self.sf.open_deployment(uuid)
| 297
|
Python
|
.py
| 8
| 30.875
| 66
| 0.731707
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,379
|
test_starting_starts.py
|
NixOS_nixops/tests/functional/test_starting_starts.py
|
from tests.functional.single_machine_test import SingleMachineTest
class TestStartingStarts(SingleMachineTest):
def run_check(self):
self.depl.deploy()
self.depl.stop_machines()
self.depl.start_machines()
assert self.depl.active
m = list(self.depl.active.values())[0]
m.check()
assert m.state == m.UP
| 363
|
Python
|
.py
| 10
| 29.1
| 66
| 0.672365
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,380
|
test_cloning_clones.py
|
NixOS_nixops/tests/functional/test_cloning_clones.py
|
from tests.functional.single_machine_test import SingleMachineTest
class TestCloningClones(SingleMachineTest):
def run_check(self):
depl = self.depl.clone()
assert depl.network_expr.network == self.depl.network_expr.network
assert depl.nix_path == self.depl.nix_path
assert depl.args == self.depl.args
| 340
|
Python
|
.py
| 7
| 42.142857
| 74
| 0.731118
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,381
|
test_invalid_identifier.py
|
NixOS_nixops/tests/functional/test_invalid_identifier.py
|
from os import path
from pytest import raises
from tests.functional.generic_deployment_test import GenericDeploymentTest
from nixops.evaluation import NetworkFile
parent_dir = path.dirname(__file__)
logical_spec = "%s/invalid-identifier.nix" % (parent_dir)
class TestInvalidIdentifier(GenericDeploymentTest):
def setup_method(self):
super(TestInvalidIdentifier, self).setup_method()
self.depl.network_expr = NetworkFile(logical_spec)
def test_invalid_identifier_fails_evaluation(self):
with raises(Exception):
self.depl.evaluate()
| 583
|
Python
|
.py
| 13
| 39.846154
| 74
| 0.768683
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,382
|
test_deploys_nixos.py
|
NixOS_nixops/tests/functional/test_deploys_nixos.py
|
from tests.functional.single_machine_test import SingleMachineTest
class TestDeploysNixos(SingleMachineTest):
def run_check(self):
self.depl.deploy()
self.check_command("test -f /etc/NIXOS")
| 213
|
Python
|
.py
| 5
| 37.2
| 66
| 0.752427
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,383
|
generic_deployment_test.py
|
NixOS_nixops/tests/functional/generic_deployment_test.py
|
from tests.functional import DatabaseUsingTest
class GenericDeploymentTest(DatabaseUsingTest):
def setup_method(self):
super(GenericDeploymentTest, self).setup_method()
self.depl = self.sf.create_deployment()
self.depl.logger.set_autoresponse("y")
| 278
|
Python
|
.py
| 6
| 40.333333
| 57
| 0.755556
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,384
|
conf.py
|
NixOS_nixops/doc/conf.py
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# -- Project information -----------------------------------------------------
project = "NixOps"
copyright = "2020, NixOps Contributors"
author = "NixOps Contributors"
# The full version, including alpha/beta/rc tags
release = "2.0"
# -- General configuration ---------------------------------------------------
# The document name of the “master” document, that is, the document
# that contains the root toctree directive. Default is 'index'.
# Changed in version 2.0: The default is changed to 'index' from
# 'contents'. But, RTD seems to not be using 2.0.
master_doc = "index"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ["sphinx.ext.autodoc"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
def setup(app):
from nixops.plugins.manager import PluginManager
from sphinx.directives.other import TocTree # type: ignore
from docutils.parsers.rst import Directive
from docutils import nodes
class NixopsPluginsDirective(Directive):
has_content = True
def run(self):
plugin_docs = list(PluginManager.docs())
if not plugin_docs:
return []
ret = [nodes.title("", "Plugins")]
for plugin_name, path in plugin_docs:
ret.extend(
TocTree(
name=plugin_name,
arguments=[],
options={"caption": plugin_name.capitalize(), "maxdepth": 2},
content=[path],
lineno=None,
content_offset=None,
block_text=None,
state=self.state,
state_machine=self.state_machine,
).run()
)
return ret
app.add_directive("nixops_plugins_doc", NixopsPluginsDirective)
| 3,368
|
Python
|
.py
| 72
| 39.777778
| 85
| 0.626683
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,385
|
ansi.py
|
NixOS_nixops/nixops/ansi.py
|
import sys
from typing import TextIO
def ansi_highlight(s: str, outfile: TextIO = sys.stderr) -> str:
return "\033[1;35m" + s + "\033[0m" if outfile.isatty() else s
def ansi_warn(s: str, outfile: TextIO = sys.stderr) -> str:
return "\033[1;33m" + s + "\033[0m" if outfile.isatty() else s
def ansi_error(s: str, outfile: TextIO = sys.stderr) -> str:
return "\033[1;31m" + s + "\033[0m" if outfile.isatty() else s
def ansi_success(s: str, outfile: TextIO = sys.stderr) -> str:
return "\033[1;32m" + s + "\033[0m" if outfile.isatty() else s
| 562
|
Python
|
.py
| 10
| 52.8
| 66
| 0.654412
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,386
|
state.py
|
NixOS_nixops/nixops/state.py
|
import json
import collections
import sqlite3
import nixops.util
from typing import Any, KeysView, List, Iterator, Tuple, NewType
RecordId = NewType("RecordId", str)
class StateDict(collections.abc.MutableMapping):
"""
An implementation of a MutableMapping container providing
a python dict like behavior for the NixOps state file.
"""
# TODO implement __repr__ for convenience e.g debugging the structure
def __init__(self, depl, id: RecordId):
super(StateDict, self).__init__()
self._db: sqlite3.Connection = depl._db
self.id = id
def __setitem__(self, key: str, value: Any) -> None:
with self._db:
c = self._db.cursor()
if value is None:
c.execute(
"delete from ResourceAttrs where machine = ? and name = ?",
(self.id, key),
)
else:
v = value
if isinstance(value, list) or isinstance(value, dict):
v = json.dumps(value, cls=nixops.util.NixopsEncoder)
c.execute(
"insert or replace into ResourceAttrs(machine, name, value) values (?, ?, ?)",
(self.id, key, v),
)
def __getitem__(self, key: str) -> Any:
with self._db:
c = self._db.cursor()
c.execute(
"select value from ResourceAttrs where machine = ? and name = ?",
(self.id, key),
)
row: Tuple[str] = c.fetchone()
if row is not None:
try:
v = json.loads(row[0])
if isinstance(v, list):
v = tuple(v)
return v
except ValueError:
return row[0]
raise KeyError("couldn't find key {} in the state file".format(key))
def __delitem__(self, key: str) -> None:
with self._db:
c = self._db.cursor()
c.execute(
"delete from ResourceAttrs where machine = ? and name = ?",
(self.id, key),
)
def keys(self) -> KeysView[str]:
# Generally the list of keys per ResourceAttrs is relatively small
# so this should be also relatively fast.
_keys = set()
with self._db:
c = self._db.cursor()
c.execute("select name from ResourceAttrs where machine = ?", (self.id,))
rows: List[Tuple[str]] = c.fetchall()
for row in rows:
_keys.add(row[0])
return _keys # type: ignore
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __len__(self) -> int:
return len(self.keys())
| 2,782
|
Python
|
.py
| 71
| 27.338028
| 98
| 0.519807
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,387
|
script_defs.py
|
NixOS_nixops/nixops/script_defs.py
|
# -*- coding: utf-8 -*-
from nixops.nix_expr import py2nix
from nixops.parallel import run_tasks
from nixops.storage import StorageBackend, StorageInterface
from nixops.locks import LockDriver, LockInterface
import contextlib
import nixops.statefile
import prettytable # type: ignore
from argparse import ArgumentParser, _SubParsersAction, Namespace
import os
import pwd
import re
import sys
import subprocess
import nixops.parallel
import nixops.util
import nixops.known_hosts
import time
import logging
import logging.handlers
import json
from tempfile import TemporaryDirectory
import pipes
from typing import Tuple, List, Optional, Union, Generator, Type, Set, Sequence
import nixops.ansi
from nixops.plugins.manager import PluginManager
from nixops.plugins import get_plugin_manager
from nixops.evaluation import eval_network, NetworkEval, NixEvalError, NetworkFile
from nixops.backends import MachineDefinition
PluginManager.load()
def get_network_file(args: Namespace) -> NetworkFile:
network_dir: str = os.path.abspath(args.network_dir)
if not os.path.exists(network_dir):
raise ValueError(f"{network_dir} does not exist")
classic_path = os.path.join(network_dir, "nixops.nix")
flake_path = os.path.join(network_dir, "flake.nix")
classic_exists: bool = os.path.exists(classic_path)
flake_exists: bool = os.path.exists(flake_path)
if all((flake_exists, classic_exists)):
raise ValueError("Both flake.nix and nixops.nix cannot coexist")
if classic_exists:
return NetworkFile(network=classic_path, is_flake=False)
if flake_exists:
return NetworkFile(network=network_dir, is_flake=True)
raise ValueError(f"Neither flake.nix nor nixops.nix exists in {network_dir}")
def set_common_depl(depl: nixops.deployment.Deployment, args: Namespace) -> None:
network_file = get_network_file(args)
depl.network_expr = network_file
@contextlib.contextmanager
def deployment(
args: Namespace, writable: bool, activityDescription: str
) -> Generator[nixops.deployment.Deployment, None, None]:
with network_state(args, writable, description=activityDescription) as sf:
depl = open_deployment(sf, args)
set_common_depl(depl, args)
yield depl
def get_lock(network: NetworkEval) -> LockInterface:
lock: LockInterface
lock_class: Type[LockDriver]
lock_drivers = PluginManager.lock_drivers()
try:
lock_class = lock_drivers[network.lock.provider]
except KeyError:
sys.stderr.write(
nixops.ansi.ansi_warn(
f"The network requires the '{network.lock.provider}' lock driver, "
"but no plugin provides it.\n"
)
)
raise Exception("Missing lock driver plugin.")
else:
lock_class_options = lock_class.options(**network.lock.configuration)
lock = lock_class(lock_class_options)
return lock
@contextlib.contextmanager
def network_state(
args: Namespace, writable: bool, description: str, doLock: bool = True
) -> Generator[nixops.statefile.StateFile, None, None]:
network = eval_network(get_network_file(args))
storage_backends = PluginManager.storage_backends()
storage_class: Optional[Type[StorageBackend]] = storage_backends.get(
network.storage.provider
)
if storage_class is None:
sys.stderr.write(
nixops.ansi.ansi_warn(
f"The network requires the '{network.storage.provider}' state provider, "
"but no plugin provides it.\n"
)
)
raise Exception("Missing storage provider plugin.")
lock: Optional[LockInterface]
if doLock:
lock = get_lock(network)
else:
lock = None
storage_class_options = storage_class.options(**network.storage.configuration)
storage: StorageInterface = storage_class(storage_class_options)
with TemporaryDirectory("nixops") as statedir:
statefile = statedir + "/state.nixops"
if lock is not None:
lock.lock(description=description, exclusive=writable)
try:
storage.fetchToFile(statefile)
if writable:
state = nixops.statefile.StateFile(statefile, writable, lock=lock)
else:
# Non-mutating commands use the state file as their data
# structure, therefore requiring mutation to work.
# Changes *will* be lost, as tolerating racy writes will be
# even harder to debug than consistently discarding changes.
# TODO: Change the NixOps architecture to separate reading
# and writing cleanly, so we can request a read-only
# statefile here and 'guarantee' no loss of state changes.
state = nixops.statefile.StateFile(statefile, True, lock=lock)
try:
storage.onOpen(state)
yield state
finally:
state.close()
if writable:
storage.uploadFromFile(statefile)
finally:
if lock is not None:
lock.unlock()
def op_list_plugins(args: Namespace) -> None:
pm = get_plugin_manager()
if args.verbose:
tbl = create_table([("Installed Plugins", "c"), ("Plugin Reference", "c")])
else:
tbl = create_table([("Installed Plugins", "c")])
for plugin in sorted(pm.list_name_plugin()):
if args.verbose:
tbl.add_row([plugin[0], plugin[1].__str__()])
else:
tbl.add_row([plugin[0]])
print(tbl)
def create_table(headers: List[Tuple[str, str]]) -> prettytable.PrettyTable:
tbl = prettytable.PrettyTable([name for (name, align) in headers])
for (name, align) in headers:
tbl.align[name] = align
return tbl
def sort_deployments(
depls: List[nixops.deployment.Deployment],
) -> List[nixops.deployment.Deployment]:
return sorted(depls, key=lambda depl: (depl.name or depl.uuid, depl.uuid))
# Handle the --all switch: if --all is given, return all deployments;
# otherwise, return the deployment specified by -d /
# $NIXOPS_DEPLOYMENT.
@contextlib.contextmanager
def one_or_all(
args: Namespace, writable: bool, activityDescription: str
) -> Generator[List[nixops.deployment.Deployment], None, None]:
with network_state(args, writable, description=activityDescription) as sf:
if args.all:
yield sf.get_all_deployments()
else:
yield [open_deployment(sf, args)]
def op_list_deployments(args: Namespace) -> None:
with network_state(args, False, "nixops list") as sf:
tbl = create_table(
[
("UUID", "l"),
("Name", "l"),
("Description", "l"),
("# Machines", "r"),
("Type", "c"),
]
)
for depl in sort_deployments(sf.get_all_deployments()):
set_common_depl(depl, args)
depl.evaluate()
types: Set[str] = set()
n_machines: int = 0
for defn in (depl.definitions or {}).values():
if not isinstance(defn, MachineDefinition):
continue
n_machines += 1
types.add(defn.get_type())
tbl.add_row(
[
depl.uuid,
depl.name or "(none)",
depl.description,
n_machines,
", ".join(types),
]
)
print(tbl)
def open_deployment(
sf: nixops.statefile.StateFile, args: Namespace
) -> nixops.deployment.Deployment:
depl = sf.open_deployment(uuid=args.deployment)
depl.extra_nix_path = sum(args.nix_path or [], [])
for (n, v) in args.nix_options or []:
depl.extra_nix_flags.extend(["--option", n, v])
if args.max_jobs is not None:
depl.extra_nix_flags.extend(["--max-jobs", str(args.max_jobs)])
if args.cores is not None:
depl.extra_nix_flags.extend(["--cores", str(args.cores)])
if args.keep_going:
depl.extra_nix_flags.append("--keep-going")
if args.keep_failed:
depl.extra_nix_flags.append("--keep-failed")
if args.show_trace:
depl.extra_nix_flags.append("--show-trace")
if args.fallback:
depl.extra_nix_flags.append("--fallback")
if args.no_build_output:
depl.extra_nix_flags.append("--no-build-output")
if not args.read_only_mode:
depl.extra_nix_eval_flags.append("--read-write-mode")
return depl
def set_name(depl: nixops.deployment.Deployment, name: Optional[str]) -> None:
if not name:
return
if not re.match(r"^[a-zA-Z_\-][a-zA-Z0-9_\-\.]*$", name): # noqa: W605
raise Exception("invalid deployment name ‘{0}’".format(name))
depl.name = name
def modify_deployment(args: Namespace, depl: nixops.deployment.Deployment) -> None:
set_common_depl(depl, args)
depl.nix_path = [nixops.util.abs_nix_path(x) for x in sum(args.nix_path or [], [])]
def op_create(args: Namespace) -> None:
with network_state(args, True, "nixops create") as sf:
depl = sf.create_deployment()
sys.stderr.write("created deployment ‘{0}’\n".format(depl.uuid))
modify_deployment(args, depl)
# When deployment is created without state "name" does not exist
name: str = args.deployment
if "name" in args:
name = args.name or args.deployment
if name:
set_name(depl, name)
sys.stdout.write(depl.uuid + "\n")
def op_modify(args: Namespace) -> None:
with deployment(args, True, "nixops modify") as depl:
modify_deployment(args, depl)
if args.name:
set_name(depl, args.name)
def op_clone(args: Namespace) -> None:
with deployment(args, True, "nixops clone") as depl:
depl2 = depl.clone()
sys.stderr.write("created deployment ‘{0}’\n".format(depl2.uuid))
set_name(depl2, args.name)
sys.stdout.write(depl2.uuid + "\n")
def op_delete(args: Namespace) -> None:
with one_or_all(args, True, "nixops delete") as depls:
for depl in depls:
depl.delete(force=args.force or False)
def machine_to_key(depl: str, name: str, type: str) -> Tuple[str, str, List[object]]:
xs = [int(x) if x.isdigit() else x for x in re.split("(\d+)", name)] # noqa: W605
return (depl, type, xs)
def op_info(args: Namespace) -> None: # noqa: C901
table_headers = [
("Name", "l"),
("Status", "c"),
("Type", "l"),
("Resource Id", "l"),
("IP address", "l"),
]
def state(
depl: nixops.deployment.Deployment,
d: Optional[nixops.resources.ResourceDefinition],
m: nixops.backends.GenericMachineState,
) -> str:
if d and m.obsolete:
return "Revived"
if d is None and m.obsolete:
return "Obsolete"
if depl.configs_path != m.cur_configs_path:
return "Outdated"
return "Up-to-date"
def do_eval(depl) -> None:
set_common_depl(depl, args)
if not args.no_eval:
try:
depl.evaluate()
except NixEvalError:
sys.stderr.write(
nixops.ansi.ansi_warn(
"warning: evaluation of the deployment specification failed; status info may be incorrect\n\n"
)
)
depl.definitions = None
def print_deployment(depl: nixops.deployment.Deployment) -> None:
definitions = depl.definitions or {}
# Sort machines by type, then name. Sort numbers in machine
# names numerically (e.g. "foo10" comes after "foo9").
def name_to_key(name: str) -> Tuple[str, str, List[object]]:
d: Optional[nixops.resources.ResourceDefinition] = definitions.get(name)
r: Optional[nixops.resources.GenericResourceState] = depl.resources.get(
name
)
if r:
key = machine_to_key(depl.uuid, name, r.get_type())
elif d:
key = machine_to_key(depl.uuid, name, d.get_type())
else:
key = machine_to_key(depl.uuid, name, "")
return key
names = sorted(
set(definitions.keys()) | set(depl.resources.keys()), key=name_to_key
)
for name in names:
d = definitions.get(name)
r = depl.resources.get(name)
resource_state: str = "Missing"
if isinstance(r, nixops.backends.MachineState):
resource_state = "{0} / {1}".format(r.show_state(), state(depl, d, r))
elif r:
resource_state = r.show_state()
user_type: str = "unknown-type"
if r:
user_type = r.show_type()
elif d:
user_type = d.show_type()
public_ipv4: str = ""
private_ipv4: str = ""
if isinstance(r, nixops.backends.MachineState):
public_ipv4 = r.public_ipv4 or ""
private_ipv4 = r.private_ipv4 or ""
if args.plain:
print(
"\t".join(
([depl.uuid, depl.name or "(none)"] if args.all else [])
+ [
name,
resource_state.lower(),
user_type,
r.resource_id or "" if r else "",
public_ipv4,
private_ipv4,
]
)
)
else:
tbl.add_row(
([depl.name or depl.uuid] if args.all else [])
+ [
name,
resource_state,
user_type,
r.resource_id or "" if r else "",
public_ipv4 or private_ipv4,
]
)
if args.all:
with network_state(args, False, "nixops info") as sf:
if not args.plain:
tbl = create_table([("Deployment", "l")] + table_headers)
for depl in sort_deployments(sf.get_all_deployments()):
do_eval(depl)
print_deployment(depl)
if not args.plain:
print(tbl)
else:
with deployment(args, False, "nixops info") as depl:
do_eval(depl)
if args.plain:
print_deployment(depl)
else:
print("Network name:", depl.name or "(none)")
print("Network UUID:", depl.uuid)
print("Network description:", depl.description)
print("Nix expression:", get_network_file(args).network)
if depl.nix_path != []:
print("Nix path:", " ".join(["-I " + x for x in depl.nix_path]))
if depl.rollback_enabled:
print("Nix profile:", depl.get_profile())
if depl.args != {}:
print(
"Nix arguments:",
", ".join([n + " = " + v for n, v in depl.args.items()]),
)
print()
tbl = create_table(table_headers)
print_deployment(depl)
print(tbl)
def op_check(args: Namespace) -> None: # noqa: C901
def highlight(s: str) -> str:
return nixops.ansi.ansi_highlight(s, outfile=sys.stdout)
def warn(s: str) -> str:
return nixops.ansi.ansi_warn(s, outfile=sys.stdout)
def render_tristate(x: bool) -> str:
if x is None:
return "N/A"
elif x:
return nixops.ansi.ansi_success("Yes", outfile=sys.stdout)
else:
return warn("No")
tbl = create_table(
([("Deployment", "l")] if args.all else [])
+ [
("Name", "l"),
("Exists", "l"),
("Up", "l"),
("Reachable", "l"),
("Disks OK", "l"),
("Load avg.", "l"),
("Units", "l"),
("Notes", "l"),
]
)
machines: List[nixops.backends.GenericMachineState] = []
resources: List[nixops.resources.GenericResourceState] = []
def check(depl: nixops.deployment.Deployment) -> None:
for m in depl.active_resources.values():
if not nixops.deployment.should_do(
m, args.include or [], args.exclude or []
):
continue
if isinstance(m, nixops.backends.MachineState):
machines.append(m)
else:
resources.append(m)
# TODO: writable=False?
# Historically, nixops check was allowed to write to the state file.
# With remote state however, this requires an exclusive lock, which may
# not be the best choice.
with one_or_all(args, writable=True, activityDescription="nixops check") as depls:
for depl in depls:
check(depl)
ResourceStatus = Tuple[
str,
Union[
nixops.backends.GenericMachineState,
nixops.resources.GenericResourceState,
],
List[str],
int,
]
# Check all machines in parallel.
def worker(m: nixops.backends.GenericMachineState) -> ResourceStatus:
res = m.check()
unit_lines: List[str] = []
if res.failed_units:
unit_lines.append(
"\n".join(
[warn("{0} [failed]".format(x)) for x in res.failed_units]
)
)
if res.in_progress_units:
unit_lines.append(
"\n".join(
[
highlight("{0} [running]".format(x))
for x in res.in_progress_units
]
)
)
row = ([m.depl.name or m.depl.uuid] if args.all else []) + [
m.name,
render_tristate(res.exists),
render_tristate(res.is_up),
render_tristate(res.is_reachable),
render_tristate(res.disks_ok),
"{0} {1} {2}".format(res.load[0], res.load[1], res.load[2])
if res.load is not None
else "",
"\n".join(unit_lines),
"\n".join([warn(x) for x in res.messages]),
]
status = 0
if res.exists is False:
status |= 1
if res.is_up is False:
status |= 2
if res.is_reachable is False:
status |= 4
if res.disks_ok is False:
status |= 8
if res.failed_units is not None and res.failed_units != []:
status |= 16
return (m.depl.name or m.depl.uuid, m, row, status)
resources_tbl = create_table(
([("Deployment", "l")] if args.all else [])
+ [("Name", "l"), ("Exists", "l")]
)
def resource_worker(
r: nixops.resources.GenericResourceState,
) -> Optional[ResourceStatus]:
if not nixops.deployment.is_machine(r):
r.check()
exist = True if r.state == nixops.resources.ResourceState.UP else False
row = ([r.depl.name or r.depl.uuid] if args.all else []) + [
r.name,
render_tristate(exist),
]
return (r.depl.name or r.depl.uuid, r, row, 0)
return None
results = run_tasks(nr_workers=len(machines), tasks=machines, worker_fun=worker)
resources_results = run_tasks(
nr_workers=len(resources), tasks=resources, worker_fun=resource_worker
)
# Sort the rows by deployment/machine.
status = 0
for res in sorted(
[res for res in results if res is not None],
key=lambda res: machine_to_key(res[0], res[1].name, res[1].get_type()),
):
tbl.add_row(res[2])
status |= res[3]
print(nixops.ansi.ansi_success("Machines state:"))
print(tbl)
for res in sorted(
[res for res in resources_results if res is not None],
key=lambda res: machine_to_key(res[0], res[1].name, res[1].get_type()),
):
resources_tbl.add_row(res[2])
status |= res[3]
print(nixops.ansi.ansi_success("Non machines resources state:"))
print(resources_tbl)
sys.exit(status)
def print_backups(depl, backups) -> None:
tbl = prettytable.PrettyTable(["Backup ID", "Status", "Info"])
for k, v in sorted(backups.items(), reverse=True):
tbl.add_row([k, v["status"], "\n".join(v["info"])])
print(tbl)
def op_clean_backups(args: Namespace) -> None:
if args.keep and args.keep_days:
raise Exception(
"Combining of --keep and --keep-days arguments are not possible, please use one."
)
if not (args.keep or args.keep_days):
raise Exception("Please specify at least --keep or --keep-days arguments.")
with deployment(args, True, "nixops clean-backups") as depl:
depl.clean_backups(args.keep, args.keep_days, args.keep_physical)
def op_remove_backup(args: Namespace) -> None:
with deployment(args, True, "nixops remove-backup") as depl:
depl.remove_backup(args.backupid, args.keep_physical)
def op_backup(args: Namespace) -> None:
with deployment(args, True, "nixops backup") as depl:
def do_backup():
backup_id = depl.backup(
include=args.include or [],
exclude=args.exclude or [],
devices=args.devices or [],
)
print(backup_id)
if args.force:
do_backup()
else:
backups = depl.get_backups(
include=args.include or [], exclude=args.exclude or []
)
backups_status = [b["status"] for _, b in backups.items()]
if "running" in backups_status:
raise Exception(
"There are still backups running, use --force to run a new backup concurrently (not advised!)"
)
else:
do_backup()
def op_backup_status(args: Namespace) -> None:
with deployment(args, False, "nixops backup-status") as depl:
backupid = args.backupid
while True:
backups = depl.get_backups(
include=args.include or [], exclude=args.exclude or []
)
if backupid or args.latest:
sorted_backups = sorted(backups.keys(), reverse=True)
if args.latest:
if len(backups) == 0:
raise Exception("no backups found")
backupid = sorted_backups[0]
if backupid not in backups:
raise Exception("backup ID ‘{0}’ does not exist".format(backupid))
_backups = {}
_backups[backupid] = backups[backupid]
else:
_backups = backups
print_backups(depl, _backups)
backups_status = [b["status"] for _, b in _backups.items()]
if "running" in backups_status:
if args.wait:
print("waiting for 30 seconds...")
time.sleep(30)
else:
raise Exception("backup has not yet finished")
else:
return
def op_restore(args: Namespace) -> None:
with deployment(args, True, "nixops restore") as depl:
depl.restore(
include=args.include or [],
exclude=args.exclude or [],
backup_id=args.backup_id,
devices=args.devices or [],
)
def op_deploy(args: Namespace) -> None:
with deployment(args, True, "nixops deploy") as depl:
if args.confirm:
depl.logger.set_autoresponse("y")
if args.evaluate_only:
raise Exception("--evaluate-only was removed as it's the same as --dry-run")
depl.deploy(
dry_run=args.dry_run,
test=args.test,
boot=args.boot,
build_only=args.build_only,
plan_only=args.plan_only,
create_only=args.create_only,
copy_only=args.copy_only,
include=args.include or [],
exclude=args.exclude or [],
check=args.check,
kill_obsolete=args.kill_obsolete,
allow_reboot=args.allow_reboot,
allow_recreate=args.allow_recreate,
force_reboot=args.force_reboot,
max_concurrent_copy=args.max_concurrent_copy,
sync=not args.no_sync,
always_activate=args.always_activate,
repair=args.repair,
dry_activate=args.dry_activate,
max_concurrent_activate=args.max_concurrent_activate,
)
def op_send_keys(args: Namespace) -> None:
with deployment(args, False, "nixops send-keys") as depl:
depl.send_keys(include=args.include or [], exclude=args.exclude or [])
def op_set_args(args: Namespace) -> None:
with deployment(args, True, "nixops set-args") as depl:
for [n, v] in args.args or []:
depl.set_arg(n, v)
for [n, v] in args.argstrs or []:
depl.set_argstr(n, v)
for [n] in args.unset or []:
depl.unset_arg(n)
def op_destroy(args: Namespace) -> None:
with one_or_all(args, True, "nixops destroy") as depls:
for depl in depls:
if args.confirm:
depl.logger.set_autoresponse("y")
depl.destroy_resources(
include=args.include or [], exclude=args.exclude or [], wipe=args.wipe
)
def op_reboot(args: Namespace) -> None:
with deployment(args, True, "nixops reboot") as depl:
depl.reboot_machines(
include=args.include or [],
exclude=args.exclude or [],
wait=(not args.no_wait),
rescue=args.rescue,
hard=args.hard,
)
def op_delete_resources(args: Namespace) -> None:
with deployment(args, True, "nixops delete-resources") as depl:
if args.confirm:
depl.logger.set_autoresponse("y")
depl.delete_resources(include=args.include or [], exclude=args.exclude or [])
def op_stop(args: Namespace) -> None:
with deployment(args, True, "nixops stop") as depl:
if args.confirm:
depl.logger.set_autoresponse("y")
depl.stop_machines(include=args.include or [], exclude=args.exclude or [])
def op_start(args: Namespace) -> None:
with deployment(args, True, "nixops start") as depl:
depl.start_machines(include=args.include or [], exclude=args.exclude or [])
def op_rename(args: Namespace) -> None:
with deployment(args, True, "nixops rename") as depl:
depl.rename(args.current_name, args.new_name)
def print_physical_backup_spec(
depl: nixops.deployment.Deployment, backupid: str
) -> None:
config = {}
for m in depl.active_machines.values():
config[m.name] = m.get_physical_backup_spec(backupid)
sys.stdout.write(py2nix(config))
def op_show_arguments(cli_args: Namespace) -> None:
with deployment(cli_args, False, "nixops show-arguments") as depl:
tbl = create_table([("Name", "l"), ("Location", "l")])
args = depl.get_arguments()
for arg in sorted(args.keys()):
files = sorted(args[arg])
tbl.add_row([arg, "\n".join(files)])
print(tbl)
def op_show_physical(args: Namespace) -> None:
with deployment(args, False, "nixops show-physical") as depl:
if args.backupid:
print_physical_backup_spec(depl, args.backupid)
return
depl.evaluate()
sys.stdout.write(depl.get_physical_spec())
def op_dump_nix_paths(args: Namespace) -> None:
def get_nix_path(p: Optional[str]) -> Optional[str]:
if p is None:
return None
p = os.path.realpath(os.path.abspath(p))
# FIXME: hardcoded nix store
nix_store = "/nix/store"
if not p.startswith("{0}/".format(nix_store)):
return None
return "/".join(p.split("/")[: len(nix_store.split("/")) + 1])
def strip_nix_path(p: str) -> str:
parts: List[str] = p.split("=")
if len(parts) == 1:
return parts[0]
else:
return parts[1]
def nix_paths(depl: nixops.deployment.Deployment) -> List[str]:
set_common_depl(depl, args)
candidates: Sequence[Optional[str]] = []
candidates = (
[depl.network_expr.network]
+ [strip_nix_path(p) for p in depl.nix_path]
+ [depl.configs_path]
)
candidates = [get_nix_path(p) for p in candidates]
return [p for p in candidates if p is not None]
paths: List[str] = []
with one_or_all(args, False, "nixops dump-nix-paths") as depls:
for depl in depls:
paths.extend(nix_paths(depl))
for p in paths:
print(p)
def op_export(args: Namespace) -> None:
res = {}
with one_or_all(args, False, "nixops export") as depls:
for depl in depls:
res[depl.uuid] = depl.export()
print(json.dumps(res, indent=2, sort_keys=True, cls=nixops.util.NixopsEncoder))
def op_unlock(args: Namespace) -> None:
network = eval_network(get_network_file(args))
lock = get_lock(network)
lock.unlock()
def op_import(args: Namespace) -> None:
with network_state(args, True, "nixops import") as sf:
existing = set(sf.query_deployments())
dump = json.loads(sys.stdin.read())
for uuid, attrs in dump.items():
if uuid in existing:
raise Exception(
"state file already contains a deployment with UUID ‘{0}’".format(
uuid
)
)
with sf._db:
depl = sf.create_deployment(uuid=uuid)
depl.import_(attrs)
sys.stderr.write("added deployment ‘{0}’\n".format(uuid))
if args.include_keys:
for m in depl.active_machines.values():
if nixops.deployment.is_machine(m) and hasattr(
m, "public_host_key"
):
if m.public_ipv4:
nixops.known_hosts.add(m.public_ipv4, m.public_host_key)
if m.private_ipv4:
nixops.known_hosts.add(m.private_ipv4, m.public_host_key)
def parse_machine(
name: str, depl: nixops.deployment.Deployment
) -> Tuple[str, str, nixops.backends.GenericMachineState]:
username: Optional[str]
machine_name: str
if name.find("@") == -1:
username = None
machine_name = name
else:
username, machine_name = name.split("@", 1)
# For nixops mount, split path element
machine_name = machine_name.split(":")[0]
m = depl.machines.get(machine_name)
if not m:
raise Exception("unknown machine ‘{0}’".format(machine_name))
if not username and m.ssh_user:
username = m.ssh_user
if username is None:
username = "root"
return username, machine_name, m
def op_ssh(args: Namespace) -> None:
with network_state(
args, False, description="nixops ssh", doLock=not args.now
) as sf:
depl = open_deployment(sf, args)
set_common_depl(depl, args)
(username, _, m) = parse_machine(args.machine, depl)
flags, command = m.ssh.split_openssh_args(args.args)
# unlock early, to avoid blocking mutable operations (deploy etc) while
# an interactive session is active.
if sf.lock is not None:
sf.lock.unlock()
sys.exit(
m.ssh.run_command(
command,
flags=flags,
check=False,
logged=False,
allow_ssh_args=True,
user=username,
)
)
def op_ssh_for_each(args: Namespace) -> None:
with one_or_all(args, False, "nixops ssh-for-each") as depls:
for depl in depls:
def worker(m: nixops.backends.GenericMachineState) -> Optional[int]:
if not nixops.deployment.should_do(
m, args.include or [], args.exclude or []
):
return None
return m.ssh.run_command_get_status(
args.args, allow_ssh_args=True, check=False, user=m.ssh_user
)
results: List[int] = [
result
for result in nixops.parallel.run_tasks(
nr_workers=len(depl.machines) if args.parallel else 1,
tasks=iter(depl.active_machines.values()),
worker_fun=worker,
)
if result is not None
]
sys.exit(max([r for r in results if r is not None]) if results != [] else 0)
def scp_loc(user: str, ssh_name: str, remote: str, loc: str) -> str:
return "{0}@{1}:{2}".format(user, ssh_name, loc) if remote else loc
def op_scp(args: Namespace) -> None:
if args.scp_from == args.scp_to:
raise Exception("exactly one of ‘--from’ and ‘--to’ must be specified")
with deployment(args, False, "nixops scp") as depl:
(username, machine, m) = parse_machine(args.machine, depl)
ssh_name = m.get_ssh_name()
from_loc = scp_loc(username, ssh_name, args.scp_from, args.source)
to_loc = scp_loc(username, ssh_name, args.scp_to, args.destination)
print("{0} -> {1}".format(from_loc, to_loc), file=sys.stderr)
flags = ["scp", "-r"] + m.get_ssh_flags() + [from_loc, to_loc]
# Map ssh's ‘-p’ to scp's ‘-P’.
flags = ["-P" if f == "-p" else f for f in flags]
res = subprocess.call(flags)
sys.exit(res)
def op_mount(args: Namespace) -> None:
# TODO: Fixme
with deployment(args, False, "nixops mount") as depl:
(username, rest, m) = parse_machine(args.machine, depl)
try:
remote_path = args.machine.split(":")[1]
except IndexError:
remote_path = "/"
ssh_name = m.get_ssh_name()
ssh_flags = nixops.util.shlex_join(["ssh"] + m.get_ssh_flags())
new_flags = ["-o" f"ssh_command={ssh_flags}"]
for o in args.sshfs_option or []:
new_flags.extend(["-o", o])
# Note: sshfs will go into the background when it has finished
# setting up, so we can safely delete the SSH identity file
# afterwards.
res = subprocess.call(
["sshfs", username + "@" + ssh_name + ":" + remote_path, args.destination]
+ new_flags
)
sys.exit(res)
def op_show_option(args: Namespace) -> None:
with deployment(args, False, "nixops show-option") as depl:
if args.include_physical:
depl.evaluate()
json.dump(
depl.evaluate_option_value(
args.machine,
args.option,
include_physical=args.include_physical,
),
sys.stdout,
indent=2,
)
@contextlib.contextmanager
def deployment_with_rollback(
args: Namespace,
activityDescription: str,
) -> Generator[nixops.deployment.Deployment, None, None]:
with deployment(args, True, activityDescription) as depl:
if not depl.rollback_enabled:
raise Exception(
"rollback is not enabled for this network; please set ‘network.enableRollback’ to ‘true’ and redeploy"
)
yield depl
def op_list_generations(args: Namespace) -> None:
with deployment_with_rollback(args, "nixops list-generations") as depl:
if (
subprocess.call(["nix-env", "-p", depl.get_profile(), "--list-generations"])
!= 0
):
raise Exception("nix-env --list-generations failed")
def op_delete_generation(args: Namespace) -> None:
with deployment_with_rollback(args, "nixops delete-generation") as depl:
if (
subprocess.call(
[
"nix-env",
"-p",
depl.get_profile(),
"--delete-generations",
str(args.generation),
]
)
!= 0
):
raise Exception("nix-env --delete-generations failed")
def op_rollback(args: Namespace) -> None:
with deployment_with_rollback(args, "nixops rollback") as depl:
depl.rollback(
generation=args.generation,
include=args.include or [],
exclude=args.exclude or [],
check=args.check,
allow_reboot=args.allow_reboot,
force_reboot=args.force_reboot,
max_concurrent_copy=args.max_concurrent_copy,
max_concurrent_activate=args.max_concurrent_activate,
sync=not args.no_sync,
)
def op_show_console_output(args: Namespace) -> None:
with deployment(args, False, "nixops show-console-output") as depl:
m = depl.machines.get(args.machine)
if not m:
raise Exception("unknown machine ‘{0}’".format(args.machine))
sys.stdout.write(m.get_console_output())
def op_edit(args: Namespace) -> None:
with deployment(args, False, "nixops edit") as depl:
editor = os.environ.get("EDITOR")
if not editor:
raise Exception("the $EDITOR environment variable is not set")
os.system(
"$EDITOR " + " ".join([pipes.quote(x) for x in depl.network_expr.network])
)
def op_copy_closure(args: Namespace) -> None:
with deployment(args, False, "nixops copy-closure") as depl:
(username, machine, m) = parse_machine(args.machine, depl)
m.copy_closure_to(args.storepath)
# Set up logging of all commands and output
def setup_logging(args: Namespace) -> None:
if os.path.exists("/dev/log") and args.op not in [
op_ssh,
op_ssh_for_each,
op_scp,
op_mount,
op_info,
op_list_deployments,
op_list_generations,
op_backup_status,
op_show_console_output,
op_dump_nix_paths,
op_export,
op_show_physical,
]:
# determine user
try:
user = subprocess.check_output(
["logname"], stderr=subprocess.PIPE, text=True
).strip()
except Exception:
user = pwd.getpwuid(os.getuid())[0]
logger = logging.getLogger("root")
logger.setLevel(logging.INFO)
handler = logging.handlers.SysLogHandler(address="/dev/log")
formatter = logging.Formatter("nixops[{0}]: %(message)s".format(os.getpid()))
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info("User: {0}, Command: {1}".format(user, " ".join(sys.argv)))
# pass all stdout/stderr to the logger as well
nixops.util.TeeStderr()
nixops.util.TeeStdout()
def add_subparser(
subparsers: _SubParsersAction, name: str, help: str
) -> ArgumentParser:
subparser: ArgumentParser
subparser = subparsers.add_parser(name, help=help)
subparser.add_argument(
"--network",
dest="network_dir",
metavar="FILE",
default=os.getcwd(),
help="path to a directory containing either nixops.nix or flake.nix",
)
subparser.add_argument(
"--deployment",
"-d",
dest="deployment",
metavar="UUID_OR_NAME",
default=os.environ.get(
"NIXOPS_DEPLOYMENT", os.environ.get("CHARON_DEPLOYMENT", None)
),
help="UUID or symbolic name of the deployment",
)
subparser.add_argument("--debug", action="store_true", help="enable debug output")
subparser.add_argument(
"--confirm",
action="store_true",
help="confirm dangerous operations; do not ask",
)
# Nix options that we pass along.
subparser.add_argument(
"-I",
nargs=1,
action="append",
dest="nix_path",
metavar="PATH",
help="append a directory to the Nix search path",
)
subparser.add_argument(
"--max-jobs",
"-j",
type=int,
metavar="N",
help="set maximum number of concurrent Nix builds",
)
subparser.add_argument(
"--cores",
type=int,
metavar="N",
help="sets the value of the NIX_BUILD_CORES environment variable in the invocation of builders",
)
subparser.add_argument(
"--keep-going", action="store_true", help="keep going after failed builds"
)
subparser.add_argument(
"--keep-failed",
"-K",
action="store_true",
help="keep temporary directories of failed builds",
)
subparser.add_argument(
"--show-trace",
action="store_true",
help="print a Nix stack trace if evaluation fails, or a python stack trace if nixops fails",
)
subparser.add_argument(
"--fallback", action="store_true", help="fall back on installation from source"
)
subparser.add_argument(
"--no-build-output",
action="store_true",
help="suppress output written by builders",
)
subparser.add_argument(
"--option",
nargs=2,
action="append",
dest="nix_options",
metavar=("NAME", "VALUE"),
help="set a Nix option",
)
subparser.add_argument(
"--read-only-mode",
action="store_true",
help="run Nix evaluations in read-only mode",
)
return subparser
def add_common_deployment_options(subparser: ArgumentParser) -> None:
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="perform deployment actions on the specified machines only",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="do not perform deployment actions on the specified machines",
)
subparser.add_argument(
"--check",
action="store_true",
help="do not assume that the recorded state is correct",
)
subparser.add_argument(
"--allow-reboot", action="store_true", help="reboot machines if necessary"
)
subparser.add_argument(
"--force-reboot", action="store_true", help="reboot machines unconditionally"
)
subparser.add_argument(
"--max-concurrent-copy",
type=int,
default=5,
metavar="N",
help="maximum number of concurrent nix-copy-closure processes",
)
subparser.add_argument(
"--max-concurrent-activate",
type=int,
default=-1,
metavar="N",
help="maximum number of concurrent machine activations",
)
subparser.add_argument(
"--no-sync", action="store_true", help="do not flush buffers to disk"
)
def error(msg: str) -> None:
sys.stderr.write(nixops.ansi.ansi_warn("error: ") + msg + "\n")
def parser_plugin_hooks(parser: ArgumentParser, subparsers: _SubParsersAction) -> None:
PluginManager.parser(parser, subparsers)
| 43,899
|
Python
|
.py
| 1,090
| 29.801835
| 118
| 0.574055
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,388
|
__main__.py
|
NixOS_nixops/nixops/__main__.py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
from typing import Optional
def setup_debugger() -> None:
""" """
import traceback
import pdb
from types import TracebackType
from typing import Type
def hook(
_type: Type[BaseException], value: BaseException, tb: Optional[TracebackType]
) -> None:
if hasattr(sys, "ps1") or not sys.stderr.isatty():
sys.__excepthook__(_type, value, tb)
else:
traceback.print_exception(_type, value, tb)
pdb.post_mortem(tb)
sys.excepthook = hook
# Run check for --pdb as early as possible so it kicks in _before_ plugin loading
# and other dynamic startup happens
if __name__.split(".")[-1] == "__main__":
if "--pdb" in sys.argv:
setup_debugger()
from nixops.parallel import MultipleExceptions
from nixops.script_defs import setup_logging
from nixops.evaluation import NixEvalError
from nixops.script_defs import error
from nixops.args import parser
import nixops
def main() -> None:
if os.path.basename(sys.argv[0]) == "charon":
sys.stderr.write(
nixops.ansi.ansi_warn("warning: ‘charon’ is now called ‘nixops’") + "\n"
)
args = parser.parse_args()
setup_logging(args)
from nixops.exceptions import NixError
try:
nixops.deployment.DEBUG = args.debug
args.op(args)
except NixEvalError:
error("evaluation of the deployment specification failed")
sys.exit(1)
except KeyboardInterrupt:
error("interrupted")
sys.exit(1)
except MultipleExceptions as e:
error(str(e))
if args.debug or args.show_trace or str(e) == "":
e.print_all_backtraces()
sys.exit(1)
except NixError as e:
sys.stderr.write(str(e))
sys.stderr.flush()
sys.exit(1)
if __name__ == "__main__":
main()
| 1,917
|
Python
|
.py
| 59
| 26.338983
| 85
| 0.646129
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,389
|
util.py
|
NixOS_nixops/nixops/util.py
|
# -*- coding: utf-8 -*-
from __future__ import annotations
import os
import sys
import time
import json
import copy
import fcntl
import base64
import select
import shutil
import tempfile
import subprocess
import logging
import atexit
import re
import typing
import typeguard # type: ignore
import shlex
import collections.abc
from inspect import isclass
from typing import (
Callable,
List,
Optional,
Any,
IO,
Union,
Mapping,
TextIO,
Tuple,
Dict,
Iterator,
TypeVar,
Generic,
Iterable,
)
import nixops.util
from nixops.logger import MachineLogger
from io import StringIO
def shlex_join(split_command: Iterable[str]) -> str:
"""Backport of shlex.join from python 3.8"""
return " ".join(shlex.quote(arg) for arg in split_command)
devnull = open(os.devnull, "r+")
def check_wait(
test: Callable[[], bool],
initial: int = 10,
factor: int = 1,
max_tries: int = 60,
exception: bool = True,
) -> bool:
"""Call function ‘test’ periodically until it returns True or a timeout occurs."""
wait = initial
tries = 0
while tries < max_tries and not test():
wait = wait * factor
tries = tries + 1
if tries == max_tries:
if exception:
raise Exception("operation timed out")
return False
time.sleep(wait)
return True
class CommandFailed(Exception):
def __init__(self, message: str, exitcode: int) -> None:
self.message = message
self.exitcode = exitcode
def __str__(self) -> str:
return "{0} (exit code {1})".format(self.message, self.exitcode)
K = TypeVar("K")
V = TypeVar("V")
class ImmutableMapping(Generic[K, V], Mapping[K, V]):
"""
An immutable wrapper around dict's that also turns lists to tuples
"""
def __init__(self, base_dict: Dict):
def _transform_value(value: Any) -> Any:
if isinstance(value, list):
return tuple(_transform_value(i) for i in value)
elif isinstance(value, dict):
return self.__class__(value)
else:
return value
self._dict: Dict[K, V] = {k: _transform_value(v) for k, v in base_dict.items()}
def __getitem__(self, key: K) -> V:
return self._dict[key]
def __iter__(self) -> Iterator[K]:
return iter(self._dict)
def __len__(self) -> int:
return len(self._dict)
def __contains__(self, key: Any) -> bool:
return key in self._dict
def __getattr__(self, key: Any) -> V:
return self[key]
def __repr__(self) -> str:
return "<{} {}>".format(self.__class__.__name__, self._dict)
class ImmutableValidatedObject:
"""
An immutable object that validates input types
It also converts nested dictionaries into new ImmutableValidatedObject
instances (or the annotated subclass).
"""
_frozen: bool
def __init__(self, *args: ImmutableValidatedObject, **kwargs):
kw = {}
for arg in args:
if not isinstance(arg, ImmutableValidatedObject):
raise TypeError("Arg not a Immutablevalidatedobject instance")
kw.update(dict(arg))
kw.update(kwargs)
# Support inheritance
anno: Dict = {}
for x in reversed(self.__class__.mro()):
anno.update(typing.get_type_hints(x))
def _transform_value(key: Any, value: Any) -> Any:
ann = anno.get(key)
# Untyped, pass through
if not ann:
return value
if isclass(ann) and issubclass(ann, ImmutableValidatedObject):
value = ann(**value)
# Support containers of ImmutableValidatedObjects
match typing.get_origin(ann):
case collections.abc.Sequence:
sequence: List = []
for v in value:
for subann in typing.get_args(ann):
if isclass(subann) and issubclass(
subann, ImmutableValidatedObject
):
sequence.append(subann(**v))
else:
sequence.append(v)
value = tuple(sequence)
case collections.abc.Mapping:
_, value_ann = typing.get_args(ann)
if isclass(value_ann) and issubclass(
value_ann, ImmutableValidatedObject
):
mapping: Dict = {}
for k, v in value.items():
mapping[k] = value_ann(**v)
value = mapping
typeguard.check_type(value, ann)
return value
for key in set(list(anno.keys()) + list(kwargs.keys())):
if key == "_frozen":
continue
# If a default value:
# class SomeSubClass(ImmutableValidatedObject):
# x: int = 1
#
# is set this attribute is set on self before __init__ is called
default = getattr(self, key) if hasattr(self, key) else None
value = kw.get(key, default)
setattr(self, key, _transform_value(key, value))
self._frozen = True
def __setattr__(self, name, value) -> None:
if hasattr(self, "_frozen") and self._frozen:
raise AttributeError(f"{self.__class__.__name__} is immutable")
super().__setattr__(name, value)
def __iter__(self):
for attr, value in self.__dict__.items():
if attr == "_frozen":
continue
yield attr, value
def __repr__(self) -> str:
anno: Dict = self.__annotations__
attrs: List[str] = []
for attr, value in self.__dict__.items():
if attr == "_frozen":
continue
ann: str = ""
a = anno.get(attr)
if a and hasattr(a, "__name__"):
ann = f": {a.__name__}"
elif a is not None:
ann = f": {a}"
attrs.append(f"{attr}{ann} = {value}")
return "{}({})".format(self.__class__.__name__, ", ".join(attrs))
class NixopsEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (ImmutableMapping, ImmutableValidatedObject)):
return dict(obj)
return json.JSONEncoder.default(self, obj)
def logged_exec( # noqa: C901
command: List[str],
logger: MachineLogger,
check: bool = True,
capture_stdout: bool = False,
capture_stderr: Optional[bool] = True,
stdin: Optional[IO[Any]] = None,
stdin_string: Optional[str] = None,
env: Optional[Mapping[str, str]] = None,
preexec_fn: Optional[Callable[[], Any]] = None,
) -> Union[str, int]:
"""
Execute a command with logging using the specified logger.
The command itself has to be an iterable of strings, just like
subprocess.Popen without shell=True. Keywords stdin and env have the same
functionality as well.
When calling with capture_stdout=True, a string is returned, which contains
everything the program wrote to stdout.
When calling with check=False, the return code isn't checked and the
function will return an integer which represents the return code of the
program, otherwise a CommandFailed exception is thrown.
"""
passed_stdin: Union[int, IO[Any]]
if stdin_string is not None:
passed_stdin = subprocess.PIPE
elif stdin is not None:
passed_stdin = stdin
else:
passed_stdin = devnull
fds: List[IO[str]] = []
if capture_stdout:
process = subprocess.Popen(
command,
env=env,
stdin=passed_stdin,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE if capture_stderr else nixops.util.devnull,
preexec_fn=preexec_fn,
text=True,
)
fds = [fd for fd in [process.stdout, process.stderr] if fd]
log_fd_opt = process.stderr
else:
process = subprocess.Popen(
command,
env=env,
stdin=passed_stdin,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT if capture_stderr else nixops.util.devnull,
preexec_fn=preexec_fn,
text=True,
)
if process.stdout:
fds = [process.stdout]
log_fd_opt = process.stdout
if process.stdout is None:
raise ValueError("process.stdout was None")
process_stdout: IO[str] = process.stdout
if log_fd_opt is None:
raise ValueError("log_fd was None")
log_fd: IO[str] = log_fd_opt
# FIXME: this can deadlock if stdin_string doesn't fit in the
# kernel pipe buffer.
if stdin_string is not None:
if process.stdin is None:
raise ValueError("process.stdin was None")
process_stdin: IO[str] = process.stdin
# PIPE_BUF is not the size of the kernel pipe buffer (see
# https://unix.stackexchange.com/questions/11946/how-big-is-the-pipe-buffer)
# but if something fits in PIPE_BUF, it'll fit in the kernel pipe
# buffer.
# So we use PIPE_BUF as the threshold to emit a warning,
# so that if the deadlock described above does happen,
# the user at least knows what the cause is.
if len(stdin_string) > select.PIPE_BUF:
sys.stderr.write(
(
"Warning: Feeding more than PIPE_BUF = {} bytes ({})"
+ " via stdin to a subprocess. This may deadlock."
+ " Please report it as a bug if you see it happen,"
+ " at https://github.com/NixOS/nixops/issues/800\n"
).format(select.PIPE_BUF, len(stdin_string))
)
process_stdin.write(stdin_string)
process_stdin.close()
for fd in fds:
make_non_blocking(fd)
at_new_line = True
stdout = ""
while len(fds) > 0:
# The timeout/poll is to deal with processes (like
# VBoxManage) that start children that go into the
# background but keep the parent's stdout/stderr open,
# preventing an EOF. FIXME: Would be better to catch
# SIGCHLD.
(r, w, x) = select.select(fds, [], [], 1)
if len(r) == 0 and process.poll() is not None:
break
if capture_stdout and process_stdout in r:
data = process_stdout.read()
if data == "":
fds.remove(process_stdout)
else:
stdout += data
if log_fd in r:
data = log_fd.read()
if data == "":
if not at_new_line:
logger.log_end("")
fds.remove(log_fd)
else:
start = 0
while start < len(data):
end = data.find("\n", start)
if end == -1:
logger.log_start(data[start:])
at_new_line = False
else:
s = data[start:end]
if at_new_line:
logger.log(s)
else:
logger.log_end(s)
at_new_line = True
if end == -1:
break
start = end + 1
res = process.wait()
if check and res != 0:
msg = "command ‘{0}’ failed on machine ‘{1}’"
err = msg.format(command, logger.machine_name)
raise CommandFailed(err, res)
return stdout if capture_stdout else res
def generate_random_string(length: int = 256) -> str:
"""Generate a base-64 encoded cryptographically strong random string."""
s = os.urandom(length)
assert len(s) == length
return base64.b64encode(s).decode()
def make_non_blocking(fd: IO[Any]) -> None:
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
def wait_for_success(
fn: Callable[[], None],
timeout: Optional[int] = None,
callback: Optional[Callable[[], None]] = None,
) -> bool:
n = 0
while True:
try:
fn()
except Exception:
pass
else:
return True
n = n + 1
if timeout is not None and n >= timeout:
break
if callback:
callback()
time.sleep(1)
return False
def wait_for_fail(
fn: Callable[[], None],
timeout: Optional[int] = None,
callback: Optional[Callable[[], None]] = None,
) -> bool:
n = 0
while True:
try:
fn()
except Exception:
return True
n = n + 1
if timeout is not None and n >= timeout:
break
if callback:
callback()
time.sleep(1)
return False
def _maybe_abspath(s: str) -> str:
if (
s.startswith("http://")
or s.startswith("https://")
or s.startswith("file://")
or s.startswith("channel:")
):
return s
return os.path.abspath(s)
def abs_nix_path(x: str) -> str:
xs = x.split("=", 1)
if len(xs) == 1:
return _maybe_abspath(x)
return xs[0] + "=" + _maybe_abspath(xs[1])
class Undefined:
pass
undefined = Undefined()
def attr_property(name: str, default: Any, type: Optional[Any] = str) -> Any:
"""Define a property that corresponds to a value in the NixOps state file."""
def get(self) -> Any:
s: Any = self._get_attr(name, default)
if s == undefined:
if default != undefined:
return copy.deepcopy(default)
raise Exception(
"deployment attribute ‘{0}’ missing from state file".format(name)
)
if s is None:
return None
elif type is str:
return s
elif type is int:
return int(s)
elif type is bool:
return True if s == "1" else False
elif type == "json":
return json.loads(s)
else:
assert False
def set(self, x: Any) -> None:
if x == default:
self._del_attr(name)
elif type == "json":
self._set_attr(name, json.dumps(x, cls=NixopsEncoder))
else:
self._set_attr(name, x)
return property(get, set)
def create_key_pair(
key_name: str = "NixOps auto-generated key", type: str = "ed25519"
) -> Tuple[str, str]:
key_dir = tempfile.mkdtemp(prefix="nixops-key-tmp")
res = subprocess.call(
["ssh-keygen", "-t", type, "-f", key_dir + "/key", "-N", "", "-C", key_name],
stdout=devnull,
)
if res != 0:
raise Exception("unable to generate an SSH key")
with open(key_dir + "/key") as f:
private = f.read()
with open(key_dir + "/key.pub") as f:
public = f.read().rstrip()
shutil.rmtree(key_dir)
return (private, public)
class SelfDeletingDir(str):
def __init__(self, s: str) -> None:
str.__init__(s)
atexit.register(self._delete)
def _delete(self) -> None:
shutil.rmtree(self)
class TeeStderr(StringIO):
stderr: TextIO
def __init__(self) -> None:
StringIO.__init__(self)
self.stderr = sys.stderr
self.logger = logging.getLogger("root")
sys.stderr = self
def __del__(self) -> None:
sys.stderr = self.stderr
def write(self, data: str) -> int:
ret = self.stderr.write(data)
for line in data.split("\n"):
self.logger.warning(line)
return ret
def fileno(self) -> int:
return self.stderr.fileno()
def isatty(self) -> bool:
return self.stderr.isatty()
def flush(self) -> None:
return self.stderr.flush()
class TeeStdout(StringIO):
stdout: TextIO
def __init__(self) -> None:
StringIO.__init__(self)
self.stdout = sys.stdout
self.logger = logging.getLogger("root")
sys.stdout = self
def __del__(self) -> None:
sys.stdout = self.stdout
def write(self, data: str) -> int:
ret = self.stdout.write(data)
for line in data.split("\n"):
self.logger.info(line)
return ret
def fileno(self) -> int:
return self.stdout.fileno()
def isatty(self) -> bool:
return self.stdout.isatty()
def flush(self) -> None:
return self.stdout.flush()
# Borrowed from http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python.
def which(program: str) -> str:
import os
def is_exe(fpath: str) -> bool:
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
raise Exception(r"program ‘{0}’ not found in \$PATH".format(program)) # noqa: W605
def write_file(path: str, contents: str) -> None:
with open(path, "w") as f:
f.write(contents)
def parse_nixos_version(s: str) -> List[str]:
"""Split a NixOS version string into a list of components."""
return s.split(".")
# sd -> sd
# xvd -> sd
# nvme -> sd
def device_name_to_boto_expected(string: str) -> str:
"""Transforms device name to name, that boto expects."""
m = re.search(r"(.*)\/nvme(\d+)n1p?(\d+)?", string) # noqa: W605
if m is not None:
device = m.group(2)
device_ = int(device) - 1
device_transformed = chr(ord("f") + device_)
partition = m.group(3) or ""
return "{0}/sd{1}{2}".format(m.group(1), device_transformed, partition)
else:
return string.replace("/dev/xvd", "/dev/sd")
# sd -> sd
# xvd -> sd
# nvme -> nvme
def device_name_user_entered_to_stored(string: str) -> str:
return string.replace("/dev/xvd", "/dev/sd")
# sd -> xvd
# xvd -> xvd
# nvme -> nvme
def device_name_stored_to_real(string: str) -> str:
return string.replace("/dev/sd", "/dev/xvd")
| 18,423
|
Python
|
.py
| 518
| 26.397683
| 94
| 0.564789
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,390
|
evaluation.py
|
NixOS_nixops/nixops/evaluation.py
|
from nixops.nix_expr import RawValue, py2nix
import subprocess
import typing
from typing import Optional, Mapping, Any, List, Dict, TextIO
import json
from nixops.util import ImmutableValidatedObject
from nixops.exceptions import NixError
import itertools
import os.path
import os
from dataclasses import dataclass
class NixEvalError(NixError):
pass
class MalformedNetworkError(NixError):
pass
class GenericStorageConfig(ImmutableValidatedObject):
provider: str
configuration: typing.Mapping[typing.Any, typing.Any]
class GenericLockConfig(ImmutableValidatedObject):
provider: str
configuration: typing.Mapping[typing.Any, typing.Any]
class NetworkEval(ImmutableValidatedObject):
storage: GenericStorageConfig
lock: GenericLockConfig
description: str = "Unnamed NixOps network"
enableRollback: bool = False
class RawNetworkEval(ImmutableValidatedObject):
storage: Optional[Mapping[str, Any]]
lock: Optional[Mapping[str, Any]]
description: Optional[str]
enableRollback: Optional[bool]
class EvalResult(ImmutableValidatedObject):
exists: bool
value: Any
@dataclass
class NetworkFile:
network: str
is_flake: bool = False
def get_expr_path() -> str:
expr_path: str = os.path.realpath(
os.path.dirname(__file__) + "/../../../../share/nix/nixops"
)
if not os.path.exists(expr_path):
expr_path = os.path.realpath(
os.path.dirname(__file__) + "/../../../../../share/nix/nixops"
)
if not os.path.exists(expr_path):
expr_path = os.path.dirname(__file__) + "/../nix"
return expr_path
def eval(
# eval-machine-info args
networkExpr: NetworkFile, # Flake conditional
uuid: str,
deploymentName: str,
networkExprs: List[str] = [],
args: Dict[str, str] = {},
pluginNixExprs: List[str] = [],
checkConfigurationOptions: bool = True,
# Extend internal defaults
nix_path: List[str] = [],
# nix-instantiate args
nix_args: Dict[str, Any] = {},
attr: Optional[str] = None,
extra_flags: List[str] = [],
# Non-propagated args
stderr: Optional[TextIO] = None,
build: bool = False,
) -> Any:
exprs: List[str] = list(networkExprs)
if not networkExpr.is_flake:
exprs.append(networkExpr.network)
base_cmd: List[str] = (
["nix-build"]
if build
else ["nix-instantiate", "--eval-only", "--json", "--strict"]
)
argv: List[str] = (
base_cmd
+ ["--show-trace"]
+ [os.path.join(get_expr_path(), "eval-machine-info.nix")]
+ ["-I", "nixops=" + get_expr_path()]
+ [
"--arg",
"networkExprs",
py2nix([RawValue(x) if x[0] == "<" else x for x in exprs]),
]
+ [
"--arg",
"args",
py2nix({key: RawValue(val) for key, val in args.items()}, inline=True),
]
+ ["--argstr", "uuid", uuid]
+ ["--argstr", "deploymentName", deploymentName]
+ ["--arg", "pluginNixExprs", py2nix(pluginNixExprs)]
+ ["--arg", "checkConfigurationOptions", json.dumps(checkConfigurationOptions)]
+ list(itertools.chain(*[["-I", x] for x in (nix_path + pluginNixExprs)]))
+ extra_flags
)
for k, v in nix_args.items():
argv.extend(["--arg", k, py2nix(v, inline=True)])
if attr:
argv.extend(["-A", attr])
if networkExpr.is_flake:
argv.extend(["--allowed-uris", get_expr_path()])
argv.extend(["--argstr", "flakeUri", networkExpr.network])
try:
ret = subprocess.check_output(argv, stderr=stderr, text=True)
if build:
return ret.strip()
return json.loads(ret)
except OSError as e:
raise Exception("unable to run ‘nix-instantiate’: {0}".format(e))
except subprocess.CalledProcessError:
raise NixEvalError
def eval_network(nix_expr: NetworkFile) -> NetworkEval:
try:
result = eval(
networkExpr=nix_expr,
uuid="dummy",
deploymentName="dummy",
attr="info.network",
)
except Exception:
raise NixEvalError("No network attribute found")
if result.get("storage") is None:
raise MalformedNetworkError(
"""
WARNING: NixOps 1.0 -> 2.0 conversion step required
NixOps 2.0 added support for multiple storage backends.
Upgrade steps:
1. Open %s
2. Add:
network.storage.legacy = {
databasefile = "~/.nixops/deployments.nixops";
};
3. Rerun
See https://nixops.readthedocs.io/en/latest/manual/migrating.html#state-location for more guidance.
"""
% nix_expr.network
)
raw_eval = RawNetworkEval(**result)
storage: Mapping[str, Any] = raw_eval.storage or {}
if len(storage) > 1:
raise MalformedNetworkError(
"Invalid property: network.storage can only have one defined storage backend."
)
storage_config: Optional[Mapping[str, Any]]
try:
storage_key = list(storage.keys()).pop()
storage_value = storage[storage_key]
storage_config = {"provider": storage_key, "configuration": storage_value}
except IndexError:
raise MalformedNetworkError(
"Missing property: network.storage has no defined storage backend."
)
lock: Mapping[str, Any] = raw_eval.lock or {}
if len(lock) > 1:
raise MalformedNetworkError(
"Invalid property: network.lock can only have one defined lock backend."
)
lock_config: Optional[Mapping[str, Any]]
try:
lock_key = list(lock.keys()).pop()
lock_config = {
"provider": lock_key,
"configuration": lock[lock_key],
}
except IndexError:
lock_config = {
"provider": "noop",
"configuration": {},
}
return NetworkEval(
enableRollback=raw_eval.enableRollback or False,
description=raw_eval.description or "Unnamed NixOps network",
storage=storage_config,
lock=lock_config,
)
| 6,094
|
Python
|
.py
| 178
| 27.55618
| 99
| 0.634043
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,391
|
plugin.py
|
NixOS_nixops/nixops/plugin.py
|
from nixops.storage import StorageBackend
from nixops.storage.legacy import LegacyBackend
from nixops.storage.memory import MemoryBackend
import nixops.plugins
from nixops.locks import LockDriver
from nixops.locks.noop import NoopLock
from typing import Dict, Type
class InternalPlugin(nixops.plugins.Plugin):
def storage_backends(self) -> Dict[str, Type[StorageBackend]]:
return {"legacy": LegacyBackend, "memory": MemoryBackend}
def lock_drivers(self) -> Dict[str, Type[LockDriver]]:
return {"noop": NoopLock}
@nixops.plugins.hookimpl
def plugin():
return InternalPlugin()
| 609
|
Python
|
.py
| 15
| 37.333333
| 66
| 0.789116
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,392
|
parallel.py
|
NixOS_nixops/nixops/parallel.py
|
import threading
import sys
import queue
import traceback
from typing import Dict, TypeVar, List, Iterable, Callable, Tuple, Optional, Any
class MultipleExceptions(Exception):
exceptions: Dict[str, BaseException]
def __init__(self, exceptions: Dict[str, BaseException] = {}) -> None:
self.exceptions = exceptions
def __str__(self) -> str:
err = "Multiple exceptions (" + str(len(self.exceptions)) + "): \n"
for r in sorted(self.exceptions.keys()):
err += " * {}: {}\n".format(r, self.exceptions[r])
return err
def print_all_backtraces(self) -> None:
for k, e in self.exceptions.items():
sys.stderr.write("-" * 30 + "\n")
for line in traceback.format_exception(type(e), e, e.__traceback__):
sys.stderr.write(line)
sys.stderr.flush()
# Once we're using Python 3.8, use this instead of the Any
# class Task(Protocol):
# name: st
Task = Any
Result = TypeVar("Result")
WorkerResult = Tuple[
Optional[Result], # Result of the execution, None if there is an Exception
Optional[BaseException], # Optional Exception information
str, # The result of `task.name`
]
def run_tasks( # noqa: C901
nr_workers: int, tasks: Iterable[Task], worker_fun: Callable[[Task], Result]
) -> List[Result]:
task_queue: queue.Queue[Task] = queue.Queue()
result_queue: queue.Queue[WorkerResult[Result]] = queue.Queue()
nr_tasks = 0
for t in tasks:
task_queue.put(t)
nr_tasks = nr_tasks + 1
if nr_tasks == 0:
return []
if nr_workers == -1:
nr_workers = nr_tasks
if nr_workers < 1:
raise Exception("number of worker threads must be at least 1")
def thread_fun() -> None:
n = 0
while True:
try:
t = task_queue.get(False)
except queue.Empty:
break
n = n + 1
work_result: WorkerResult[Result]
try:
work_result = (worker_fun(t), None, t.name)
except Exception as e:
work_result = (None, e, t.name)
result_queue.put(work_result)
# sys.stderr.write("thread {0} did {1} tasks\n".format(threading.current_thread(), n))
threads = []
for n in range(nr_workers):
thr = threading.Thread(target=thread_fun)
thr.daemon = True
thr.start()
threads.append(thr)
results: List[Result] = []
exceptions = {}
found_results: int = 0
while found_results < nr_tasks:
try:
# Use a timeout to allow keyboard interrupts to be
# processed. The actual timeout value doesn't matter.
result: WorkerResult[Result] = result_queue.get(True, 1000)
found_results += 1
(res, exc, name) = result
except queue.Empty:
continue
if exc:
exceptions[name] = exc
if res:
results.append(res)
for thr in threads:
thr.join()
if len(exceptions) == 1:
raise list(exceptions.values())[0]
if len(exceptions) > 1:
raise MultipleExceptions(exceptions)
return results
| 3,214
|
Python
|
.py
| 89
| 28.078652
| 94
| 0.594586
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,393
|
ssh_util.py
|
NixOS_nixops/nixops/ssh_util.py
|
# -*- coding: utf-8 -*-
import atexit
import os
import shlex
import subprocess
import sys
import time
import weakref
from tempfile import mkdtemp
from typing import Dict, Any, Optional, Callable, List, Union, Iterable, Tuple, cast
import nixops.util
from nixops.logger import MachineLogger
__all__ = ["SSHConnectionFailed", "SSHCommandFailed", "SSH"]
class SSHConnectionFailed(Exception):
pass
class SSHCommandFailed(nixops.util.CommandFailed):
pass
class SSHMaster(object):
def __init__(
self,
target: str,
logger: MachineLogger,
ssh_flags: List[str] = [],
passwd: Optional[str] = None,
user: Optional[str] = None,
compress: bool = False,
ssh_quiet: Optional[bool] = False,
) -> None:
self._running: bool = False
self._tempdir: nixops.util.SelfDeletingDir = nixops.util.SelfDeletingDir(
mkdtemp(prefix="nixops-ssh-tmp")
)
self._askpass_helper: Optional[str] = None
self._control_socket: str = self._tempdir + "/master-socket"
self._ssh_target: str = target
pass_prompts: int = 0 if "-i" in ssh_flags and user is None else 3
kwargs: Dict[str, Any] = {}
if passwd is not None:
self._askpass_helper = self._make_askpass_helper()
newenv = dict(os.environ)
newenv.update(
{
"DISPLAY": ":666",
"SSH_ASKPASS": self._askpass_helper,
"NIXOPS_SSH_PASSWORD": passwd,
}
)
kwargs["env"] = newenv
kwargs["stdin"] = nixops.util.devnull
kwargs["preexec_fn"] = os.setsid
pass_prompts = 1
if ssh_quiet:
kwargs["capture_stderr"] = False
cmd = (
[
"ssh",
"-x",
self._ssh_target,
"-S",
self._control_socket,
"-M",
"-N",
"-f",
"-oNumberOfPasswordPrompts={0}".format(pass_prompts),
"-oServerAliveInterval=60",
"-oControlPersist=600",
]
+ (["-C"] if compress else [])
+ ssh_flags
)
res = nixops.util.logged_exec(cmd, logger, **kwargs)
if res != 0:
raise SSHConnectionFailed(
"unable to start SSH master connection to " "‘{0}’".format(target)
)
self.opts = ["-oControlPath={0}".format(self._control_socket)]
timeout = 60.0
while not self.is_alive():
if timeout < 0:
raise SSHConnectionFailed(
"could not establish an SSH master socket to "
"‘{0}’ within 60 seconds".format(target)
)
time.sleep(0.1)
timeout -= 0.1
self._running = True
weakself = weakref.ref(self)
def maybe_shutdown() -> None:
realself = weakself()
if realself is not None:
realself.shutdown()
atexit.register(maybe_shutdown)
def is_alive(self) -> bool:
"""
Check whether the control socket is still existing.
"""
return os.path.exists(self._control_socket)
def _make_askpass_helper(self) -> str:
"""
Create a SSH_ASKPASS helper script, which just outputs the contents of
the environment variable NIXOPS_SSH_PASSWORD.
"""
path = os.path.join(self._tempdir, "nixops-askpass-helper")
fd = os.open(path, os.O_WRONLY | os.O_CREAT | os.O_NOFOLLOW, 0o700)
os.write(
fd,
"""#!{0}
import sys
import os
sys.stdout.write(os.environ['NIXOPS_SSH_PASSWORD'])""".format(
sys.executable
).encode(),
)
os.close(fd)
return path
def shutdown(self) -> None:
"""
Shutdown master process and clean up temporary files.
"""
if not self._running:
return
self._running = False
subprocess.call(
["ssh", self._ssh_target, "-S", self._control_socket, "-O", "exit"],
stderr=nixops.util.devnull,
)
def __del__(self) -> None:
self.shutdown()
Command = Union[str, Iterable[str]]
class SSH(object):
def __init__(self, logger: MachineLogger):
"""
Initialize a SSH object with the specified Logger instance, which will
be used to write SSH output to.
"""
self._flag_fun: Callable[[], List[str]] = lambda: []
self._host_fun: Optional[Callable[[], str]] = None
self._passwd_fun: Callable[[], Optional[str]] = lambda: None
self._logger = logger
self._ssh_master: Optional[SSHMaster] = None
self._compress = False
self.privilege_escalation_command: List[str] = []
def register_host_fun(self, host_fun: Callable[[], str]) -> None:
"""
Register a function which returns the hostname or IP to connect to. The
function has to require no arguments.
"""
self._host_fun = host_fun
def _get_target(self, user: str) -> str:
if self._host_fun is None:
raise AssertionError("don't know which SSH host to connect to")
return "{0}@{1}".format(user, self._host_fun())
def register_flag_fun(self, flag_fun: Callable[[], List[str]]) -> None:
"""
Register a function that is used for obtaining additional SSH flags.
The function has to require no arguments and should return a list of
strings, each being a SSH flag/argument.
"""
self._flag_fun = flag_fun
def _get_flags(self) -> List[str]:
return self._flag_fun()
def register_passwd_fun(self, passwd_fun: Callable[[], Optional[str]]) -> None:
"""
Register a function that returns either a string or None and requires
no arguments. If the return value is a string, the returned string is
used for keyboard-interactive authentication, if it is None, no attempt
is made to inject a password.
"""
self._passwd_fun = passwd_fun
def _get_passwd(self) -> Optional[str]:
return self._passwd_fun()
def reset(self) -> None:
"""
Reset SSH master connection.
"""
if self._ssh_master is not None:
self._ssh_master.shutdown()
self._ssh_master = None
def get_master(
self,
user: str,
flags: List[str] = [],
timeout: Optional[int] = None,
tries: int = 5,
ssh_quiet: Optional[bool] = False,
) -> SSHMaster:
"""
Start (if necessary) an SSH master connection to speed up subsequent
SSH sessions. Returns the SSHMaster instance on success.
"""
flags = flags + self._get_flags()
if self._ssh_master is not None:
master = weakref.proxy(self._ssh_master)
if master.is_alive():
return master # type: ignore
else:
master.shutdown()
tries = tries
if timeout is not None:
flags = flags + ["-o", "ConnectTimeout={0}".format(timeout)]
tries = 1
if self._host_fun is None:
raise AssertionError("don't know which SSH host to connect to")
elif self._host_fun() == "localhost":
tries = 1
sleep_time = 1
while True:
try:
self._ssh_master = SSHMaster(
self._get_target(user),
self._logger,
flags,
self._get_passwd(),
user,
compress=self._compress,
ssh_quiet=ssh_quiet,
)
break
except Exception:
tries = tries - 1
if tries == 0:
raise
msg = "could not connect to ‘{0}’, retrying in {1} seconds..."
self._logger.log(msg.format(self._get_target(user), sleep_time))
time.sleep(sleep_time)
sleep_time = sleep_time * 2
pass
return weakref.proxy(self._ssh_master) # type: ignore
@classmethod
def split_openssh_args(self, args: Iterable[str]) -> Tuple[List[str], Command]:
"""
Splits the specified list of arguments into a tuple consisting of the
list of flags and a list of strings for the actual command.
"""
non_option_args = "bcDEeFIiLlmOopQRSWw"
flags = []
command = list(args)
while len(command) > 0:
arg = command.pop(0)
if arg == "--":
break
elif arg.startswith("-"):
if len(command) > 0 and arg[1] in non_option_args:
flags.append(arg)
if len(arg) == 2:
flags.append(command.pop(0))
elif len(arg) > 2 and arg[1] != "-":
flags.append(arg[:2])
command.insert(0, "-" + arg[2:])
else:
flags.append(arg)
else:
command.insert(0, arg)
break
return (flags, command)
def _format_command(
self,
command: Command,
user: str,
allow_ssh_args: bool,
) -> Iterable[str]:
"""
Helper method for run_command, which essentially prepares and properly
escape the command. See run_command() for further description.
"""
# Don't make assumptions about remote login shell
cmd: List[str] = ["bash", "-c"]
if isinstance(command, str):
if allow_ssh_args:
return shlex.split(command)
else:
cmd.append(command)
# iterable
elif allow_ssh_args:
return command
else:
cmd.append(
" ".join(["'{0}'".format(arg.replace("'", r"'\''")) for arg in command])
)
if user and user != "root":
cmd = self.privilege_escalation_command + cmd
return ["--", nixops.util.shlex_join(cmd)]
def run_command(
self,
command: Command,
user: str,
flags: List[str] = [],
timeout: Optional[int] = None,
logged: bool = True,
allow_ssh_args: bool = False,
connection_tries: int = 5,
ssh_quiet: Optional[bool] = False,
**kwargs: Any
) -> Union[str, int]:
"""
Execute a 'command' on the current target host using SSH, passing
'flags' as additional arguments to SSH. The command can be either a
string or an iterable of strings, whereby if it's the latter, it will
be joined with spaces and properly shell-escaped.
If 'allow_ssh_args' is set to True, the specified command may contain
SSH flags.
The 'user' argument specifies the remote user to connect as. If unset
or None, the default is "root".
All keyword arguments except timeout and user are passed as-is to
nixops.util.logged_exec(), though if you set 'logged' to False, the
keyword arguments are passed as-is to subprocess.call() and the command
is executed interactively with no logging.
'timeout' specifies the SSH connection timeout.
'ssh_quiet' spawns a master ssh session, if needed, with stderr suppressed
"""
master = self.get_master(
flags=flags,
timeout=timeout,
user=user,
tries=connection_tries,
ssh_quiet=True if ssh_quiet else False,
)
flags = flags + self._get_flags()
if logged:
flags.append("-x")
cmd = ["ssh"] + master.opts + flags
cmd.append(self._get_target(user))
cmd += self._format_command(command, user=user, allow_ssh_args=allow_ssh_args)
if logged:
try:
return nixops.util.logged_exec(cmd, self._logger, **kwargs)
except nixops.util.CommandFailed as exc:
raise SSHCommandFailed(exc.message, exc.exitcode)
else:
check = kwargs.pop("check", True)
res = subprocess.call(cmd, **kwargs)
if check and res != 0:
msg = "command ‘{0}’ failed on host ‘{1}’"
err = msg.format(cmd, self._get_target(user))
raise SSHCommandFailed(err, res)
else:
return res
def run_command_get_stdout(
self,
command: Command,
user: str,
flags: List[str] = [],
timeout: Optional[int] = None,
logged: bool = True,
allow_ssh_args: bool = False,
**kwargs: Any
) -> str:
assert kwargs.get("capture_stdout", True) is True
kwargs["capture_stdout"] = True
return cast(
str,
self.run_command(
command=command,
flags=flags,
timeout=timeout,
logged=logged,
allow_ssh_args=allow_ssh_args,
user=user,
**kwargs
),
)
def run_command_get_status(
self,
command: Command,
user: str,
flags: List[str] = [],
timeout: Optional[int] = None,
logged: bool = True,
allow_ssh_args: bool = False,
**kwargs: Any
) -> int:
assert kwargs.get("capture_stdout", False) is False
kwargs["capture_stdout"] = False
return cast(
int,
self.run_command(
command=command,
flags=flags,
timeout=timeout,
logged=logged,
allow_ssh_args=allow_ssh_args,
user=user,
**kwargs
),
)
def enable_compression(self) -> None:
self._compress = True
| 14,148
|
Python
|
.py
| 387
| 25.589147
| 88
| 0.53854
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,394
|
deployment.py
|
NixOS_nixops/nixops/deployment.py
|
# -*- coding: utf-8 -*-
from __future__ import annotations
import sys
import os.path
import subprocess
import tempfile
import threading
from collections import defaultdict
import re
from datetime import datetime, timedelta
from nixops.resources import GenericResourceState
import nixops.statefile
import getpass
import traceback
import glob
import fcntl
import platform
import time
import importlib
from functools import reduce, lru_cache
from typing import (
Callable,
Dict,
Optional,
TextIO,
Set,
List,
DefaultDict,
Any,
Tuple,
Union,
cast,
TypeVar,
Type,
)
import nixops.backends
import nixops.logger
import nixops.parallel
from nixops.plugins.manager import (
DeploymentHooksManager,
MachineHooksManager,
PluginManager,
)
from nixops.nix_expr import RawValue, Function, Call, nixmerge, py2nix
from nixops.ansi import ansi_success
import nixops.evaluation
Definitions = Dict[str, nixops.resources.ResourceDefinition]
class UnknownBackend(Exception):
pass
DEBUG: bool = False
NixosConfigurationType = List[Dict[Tuple[str, ...], Any]]
TypedResource = TypeVar("TypedResource")
TypedDefinition = TypeVar("TypedDefinition")
class Deployment:
"""NixOps top-level deployment manager."""
default_description = "Unnamed NixOps network"
name: Optional[str] = nixops.util.attr_property("name", None)
nix_path = nixops.util.attr_property("nixPath", [], "json")
args: Dict[str, str] = nixops.util.attr_property("args", {}, "json")
description = nixops.util.attr_property("description", default_description)
configs_path = nixops.util.attr_property("configsPath", None)
rollback_enabled: bool = nixops.util.attr_property("rollbackEnabled", False)
# internal variable to mark if network attribute of network has been evaluated (separately)
network_attr_eval: bool = False
network_expr: nixops.evaluation.NetworkFile
_statefile: nixops.statefile.StateFile
def __init__(
self,
statefile,
uuid: str,
log_file: TextIO = sys.stderr,
):
self._statefile = statefile
self._db: nixops.statefile.Connection = statefile._db
self.uuid = uuid
self._last_log_prefix = None
self.extra_nix_path: List[str] = []
self.extra_nix_flags: List[str] = []
self.extra_nix_eval_flags: List[str] = []
self.nixos_version_suffix: Optional[str] = None
self._tempdir: Optional[nixops.util.SelfDeletingDir] = None
self.logger = nixops.logger.Logger(log_file)
self._lock_file_path: Optional[str] = None
self.expr_path = nixops.evaluation.get_expr_path()
self.resources: Dict[str, nixops.resources.GenericResourceState] = {}
with self._db:
c = self._db.cursor()
c.execute(
"select id, name, type from Resources where deployment = ?",
(self.uuid,),
)
for (id, name, type) in c.fetchall():
r = _create_state(self, type, name, id)
self.resources[name] = r
self.logger.update_log_prefixes()
self.definitions: Optional[Definitions] = None
@property
def tempdir(self) -> nixops.util.SelfDeletingDir:
if not self._tempdir:
self._tempdir = nixops.util.SelfDeletingDir(
tempfile.mkdtemp(prefix="nixops-tmp")
)
return self._tempdir
@property
def machines(self) -> Dict[str, nixops.backends.GenericMachineState]:
return _filter_machines(self.resources)
@property
def active(self) -> None:
"""
Legacy alias for active_machines.
Return type is set to None to make mypy fail and let plugin authors
notice that they should not use this legacy name.
"""
return self.active_machines # type: ignore
@property
def active_machines(
self,
) -> Dict[
str, nixops.backends.GenericMachineState
]: # FIXME: rename to "active_machines"
return _filter_machines(self.active_resources)
@property
def active_resources(self) -> Dict[str, nixops.resources.GenericResourceState]:
return {n: r for n, r in self.resources.items() if not r.obsolete}
def get_generic_resource(
self, name: str, type_name: str
) -> nixops.resources.GenericResourceState:
res = self.active_resources.get(name, None)
if not res:
raise Exception("resource ‘{0}’ does not exist".format(name))
if res.get_type() != type_name:
raise Exception(
"resource ‘{0}’ is not of type ‘{1}’".format(name, type_name)
)
return res
def get_typed_resource(
self, name: str, type_name: str, type: Type[TypedResource]
) -> TypedResource:
res = self.get_generic_resource(name, type_name)
if not isinstance(res, type):
raise ValueError(f"{res} not of type {type}")
return res
def get_generic_definition(
self, name: str, type_name: str
) -> nixops.resources.ResourceDefinition:
defn = self._definitions().get(name, None)
if not defn:
raise Exception("definition ‘{0}’ does not exist".format(name))
if defn.get_type() != type_name:
raise Exception(
"definition ‘{0}’ is not of type ‘{1}’".format(name, type_name)
)
return defn
def get_typed_definition(
self, name: str, type_name: str, type: Type[TypedDefinition]
) -> TypedDefinition:
defn = self.get_generic_definition(name, type_name)
if not isinstance(defn, type):
raise ValueError(f"{defn} not of type {type}")
return defn
def get_machine(self, name: str, type: Type[TypedResource]) -> TypedResource:
m = self.get_generic_machine(name)
if not isinstance(m, type):
raise ValueError(f"{m} not of type {type}")
return m
def get_generic_machine(self, name: str) -> nixops.resources.GenericResourceState:
res = self.active_resources.get(name, None)
if not res:
raise Exception("machine ‘{0}’ does not exist".format(name))
if not is_machine(res):
raise Exception("resource ‘{0}’ is not a machine".format(name))
return res
def _definitions(self) -> Definitions:
if self.definitions is None:
raise Exception("Bug: Deployment.definitions is None.")
return self.definitions
def _definition_for(
self, name: str
) -> Optional[nixops.resources.ResourceDefinition]:
definitions = self._definitions()
return definitions[name]
def _definition_for_required(
self, name: str
) -> nixops.resources.ResourceDefinition:
defn = self._definition_for(name)
if defn is None:
raise Exception("Bug: Deployment.definitions['{}'] is None.".format(name))
return defn
def _machine_definition_for_required(
self, name: str
) -> nixops.backends.MachineDefinition:
defn = self._definition_for_required(name)
if not isinstance(defn, nixops.backends.MachineDefinition):
raise Exception("Definition named '{}' is not a machine.".format(name))
return defn
def _set_attrs(self, attrs: Dict[str, Optional[str]]) -> None:
"""Update deployment attributes in the state file."""
with self._db:
c = self._db.cursor()
for n, v in attrs.items():
if v is None:
c.execute(
"delete from DeploymentAttrs where deployment = ? and name = ?",
(self.uuid, n),
)
else:
c.execute(
"insert or replace into DeploymentAttrs(deployment, name, value) values (?, ?, ?)",
(self.uuid, n, v),
)
def _set_attr(self, name: str, value: Any) -> None:
"""Update one deployment attribute in the state file."""
self._set_attrs({name: value})
def _del_attr(self, name: str) -> None:
"""Delete a deployment attribute from the state file."""
with self._db:
self._db.execute(
"delete from DeploymentAttrs where deployment = ? and name = ?",
(self.uuid, name),
)
def _get_attr(self, name: str, default: Any = nixops.util.undefined) -> Any:
"""Get a deployment attribute from the state file."""
with self._db:
c = self._db.cursor()
c.execute(
"select value from DeploymentAttrs where deployment = ? and name = ?",
(self.uuid, name),
)
row: List[Optional[Any]] = c.fetchone()
if row is not None:
return row[0]
return nixops.util.undefined
def _create_resource(
self, name: str, type: str
) -> nixops.resources.GenericResourceState:
c = self._db.cursor()
c.execute(
"select 1 from Resources where deployment = ? and name = ?",
(self.uuid, name),
)
if len(c.fetchall()) != 0:
raise Exception("resource already exists in database!")
c.execute(
"insert into Resources(deployment, name, type) values (?, ?, ?)",
(self.uuid, name, type),
)
id = c.lastrowid
if id is None:
raise Exception("internal error: insert did not produce row id?")
r = _create_state(self, type, name, id)
self.resources[name] = r
return r
def export(self) -> Dict[str, Dict[str, Dict[str, str]]]:
with self._db:
c = self._db.cursor()
c.execute(
"select name, value from DeploymentAttrs where deployment = ?",
(self.uuid,),
)
rows = c.fetchall()
res = {row[0]: row[1] for row in rows}
res["resources"] = {r.name: r.export() for r in self.resources.values()}
return res
def import_(self, attrs: Dict[str, Union[str, Dict[str, Dict[str, str]]]]) -> None:
with self._db:
for name, value in attrs.items():
if name == "resources":
continue
self._set_attr(name, value)
if isinstance(attrs["resources"], dict):
for k, v in attrs["resources"].items():
if "type" not in v:
raise Exception("imported resource lacks a type")
r = self._create_resource(k, v["type"])
r.import_(v)
def clone(self) -> Deployment:
with self._db:
new = self._statefile.create_deployment()
self._db.execute(
"insert into DeploymentAttrs (deployment, name, value) "
+ "select ?, name, value from DeploymentAttrs where deployment = ?",
(new.uuid, self.uuid),
)
new.configs_path = None
return new
def _get_deployment_lock(
self,
) -> Any: # FIXME: DeploymentLock is defined inside the function
if self._lock_file_path is None:
lock_dir = os.environ.get("HOME", "") + "/.nixops/locks"
if not os.path.exists(lock_dir):
os.makedirs(lock_dir, 0o700)
self._lock_file_path = lock_dir + "/" + self.uuid
class DeploymentLock(object):
def __init__(self, depl: Deployment):
assert depl._lock_file_path is not None
self._lock_file_path: str = depl._lock_file_path
self._logger: nixops.logger.Logger = depl.logger
self._lock_file: Optional[TextIO] = None
def __enter__(self) -> None:
self._lock_file = open(self._lock_file_path, "w")
fcntl.fcntl(self._lock_file, fcntl.F_SETFD, fcntl.FD_CLOEXEC)
try:
fcntl.flock(self._lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
self._logger.log("waiting for exclusive deployment lock...")
fcntl.flock(self._lock_file, fcntl.LOCK_EX)
def __exit__(self, exception_type, exception_value, exception_traceback):
if self._lock_file is not None:
self._lock_file.close()
return DeploymentLock(self)
def delete_resource(self, m: nixops.resources.GenericResourceState) -> None:
del self.resources[m.name]
with self._db:
self._db.execute(
"delete from Resources where deployment = ? and id = ?",
(self.uuid, m.id),
)
def delete(self, force: bool = False) -> None:
"""Delete this deployment from the state file."""
with self._db:
if not force and len(self.resources) > 0:
raise Exception(
"cannot delete this deployment because it still has resources"
)
# Delete the profile, if any.
profile = self.get_profile()
assert profile
for p in glob.glob(profile + "*"):
if os.path.islink(p):
os.remove(p)
# Delete the deployment from the database.
self._db.execute("delete from Deployments where uuid = ?", (self.uuid,))
def set_arg(self, name: str, value: str) -> None:
"""Set a persistent argument to the deployment specification."""
assert isinstance(name, str)
assert isinstance(value, str)
args = self.args
args[name] = value
self.args = args
def set_argstr(self, name: str, value: str) -> None:
"""Set a persistent argument to the deployment specification."""
assert isinstance(value, str)
self.set_arg(name, py2nix(value, inline=True))
def unset_arg(self, name: str) -> None:
"""Unset a persistent argument to the deployment specification."""
assert isinstance(name, str)
args = self.args
args.pop(name, None)
self.args = args
def evaluate_args(self) -> Any:
"""Evaluate the NixOps network expression's arguments."""
return self.eval(attr="nixopsArguments")
@lru_cache()
def evaluate_config(self, attr) -> Dict:
return self.eval(checkConfigurationOptions=False, attr=attr) # type: ignore
def evaluate_network(self, action: str = "") -> None:
if not self.network_attr_eval:
# Extract global deployment attributes.
try:
config = self.evaluate_config("info")["network"]
except Exception as e:
if action not in ("destroy", "delete"):
raise e
config = {}
self.description = config.get("description", self.default_description)
self.rollback_enabled = config.get("enableRollback", False)
self.network_attr_eval = True
def evaluate(self) -> None:
"""Evaluate the Nix expressions belonging to this deployment into a deployment specification."""
self.definitions = {}
self.evaluate_network()
config = self.evaluate_config("info")
# Extract machine information.
for name, cfg in config["machines"].items():
defn = _create_definition(name, cfg, cfg["targetEnv"])
self.definitions[name] = defn
# Extract info about other kinds of resources.
for res_type, cfg in config["resources"].items():
for name, y in cfg.items():
defn = _create_definition(
name, config["resources"][res_type][name], res_type
)
self.definitions[name] = defn
def eval(
self,
nix_args: Dict[str, Any] = {},
attr: Optional[str] = None,
include_physical: bool = False,
checkConfigurationOptions: bool = True,
) -> Any:
exprs: List[str] = []
if include_physical:
phys_expr = self.tempdir + "/physical.nix"
with open(phys_expr, "w") as f:
f.write(self.get_physical_spec())
exprs.append(phys_expr)
return nixops.evaluation.eval(
# eval-machine-info args
networkExpr=self.network_expr,
networkExprs=exprs,
uuid=self.uuid,
deploymentName=self.name or "",
args=self.args,
pluginNixExprs=PluginManager.nixexprs(),
# Extend defaults
nix_path=self.extra_nix_path + self.nix_path,
# nix-instantiate args
nix_args=nix_args,
attr=attr,
extra_flags=self.extra_nix_eval_flags,
# Non-propagated args
stderr=self.logger.log_file,
)
def evaluate_option_value(
self,
machine_name: str,
option_name: str,
include_physical: bool = False,
) -> Any:
"""Evaluate a single option of a single machine in the deployment specification."""
return self.eval(
checkConfigurationOptions=False,
include_physical=include_physical,
attr="nodes.{0}.config.{1}".format(machine_name, option_name),
)
def get_arguments(self) -> Any:
try:
return self.evaluate_args()
except Exception:
raise Exception("Could not determine arguments to NixOps deployment.")
def get_physical_spec(self) -> Any:
"""Compute the contents of the Nix expression specifying the computed physical deployment attributes"""
active_machines = self.active_machines
active_resources = self.active_resources
attrs_per_resource: Dict[str, NixosConfigurationType] = {
m.name: [] for m in active_resources.values()
}
authorized_keys: Dict[str, List[str]] = {
m.name: [] for m in active_machines.values()
}
kernel_modules: Dict[str, Set[str]] = {
m.name: set() for m in active_machines.values()
}
trusted_interfaces: Dict[str, Set[str]] = {
m.name: set() for m in active_machines.values()
}
for name, attrs in DeploymentHooksManager.physical_spec(self).items():
attrs_per_resource[name].extend(attrs)
# Hostnames should be accumulated like this:
#
# hosts[local_name][remote_ip] = [name1, name2, ...]
#
# This makes hosts deterministic and is more in accordance to the
# format in hosts(5), which is like this:
#
# ip_address canonical_hostname [aliases...]
#
# This is critical for example when using host names for access
# control, because the canonical_hostname is returned in reverse
# lookups.
hosts: DefaultDict[str, DefaultDict[str, List[str]]] = defaultdict(
lambda: defaultdict(list)
)
def index_to_private_ip(index: int) -> str:
n = 105 + index / 256
assert n <= 255
return "192.168.{0}.{1}".format(n, index % 256)
def do_machine(m: nixops.backends.GenericMachineState) -> None:
defn = self._machine_definition_for_required(m.name)
attrs_list = attrs_per_resource[m.name]
private_ipv4: Optional[str] = m.private_ipv4
if private_ipv4:
attrs_list.append({("networking", "privateIPv4"): private_ipv4})
public_ipv4: Optional[str] = m.public_ipv4
if public_ipv4:
attrs_list.append({("networking", "publicIPv4"): public_ipv4})
# Set system.stateVersion if the Nixpkgs version supports it.
nixos_version = nixops.util.parse_nixos_version(defn.config.nixosRelease)
if nixos_version >= ["15", "09"]:
attrs_list.append(
{
("system", "stateVersion"): Call(
RawValue("lib.mkDefault"),
m.state_version or defn.config.nixosRelease,
)
}
)
if self.nixos_version_suffix:
if nixos_version >= ["18", "03"]:
attrs_list.append(
{
(
"system",
"nixos",
"versionSuffix",
): self.nixos_version_suffix
}
)
else:
attrs_list.append(
{("system", "nixosVersionSuffix"): self.nixos_version_suffix}
)
for m in active_machines.values():
do_machine(m)
def emit_resource(r: nixops.resources.GenericResourceState) -> Any:
config: NixosConfigurationType = []
config.extend(attrs_per_resource[r.name])
if is_machine(r):
# Sort the hosts by its canonical host names.
sorted_hosts = sorted(
hosts[r.name].items(), key=lambda item: item[1][0]
)
# Just to remember the format:
# ip_address canonical_hostname [aliases...]
extra_hosts = [
"{0} {1}".format(ip, " ".join(names)) for ip, names in sorted_hosts
]
if authorized_keys[r.name]:
config.append(
{
("users", "extraUsers", "root"): {
("openssh", "authorizedKeys", "keys"): authorized_keys[
r.name
]
},
("services", "openssh"): {
"extraConfig": "PermitTunnel yes\n"
},
}
)
config.append(
{
("boot", "kernelModules"): list(kernel_modules[r.name]),
("networking", "firewall"): {
"trustedInterfaces": list(trusted_interfaces[r.name])
},
("networking", "extraHosts"): "\n".join(extra_hosts) + "\n",
}
)
# Add SSH public host keys for all machines in network.
for m2 in active_machines.values():
if hasattr(m2, "public_host_key") and m2.public_host_key:
# Using references to files in same tempdir for now, until NixOS has support
# for adding the keys directly as string. This way at least it is compatible
# with older versions of NixOS as well.
# TODO: after reasonable amount of time replace with string option
config.append(
{
("services", "openssh", "knownHosts", m2.name): {
"hostNames": [m2.name],
"publicKey": m2.public_host_key,
}
}
)
merged = reduce(nixmerge, config) if len(config) > 0 else {}
physical = r.get_physical_spec()
if len(merged) == 0 and len(physical) == 0:
return {}
else:
return r.prefix_definition(
{
r.name: Function(
"{ config, lib, pkgs, ... }",
{"config": merged, "imports": [physical]},
)
}
)
return (
py2nix(
reduce(
nixmerge, [emit_resource(r) for r in active_resources.values()], {}
)
)
+ "\n"
)
def get_profile(self) -> str:
profile_dir = "/nix/var/nix/profiles/per-user/" + getpass.getuser()
if os.path.exists(profile_dir + "/charon") and not os.path.exists(
profile_dir + "/nixops"
):
os.rename(profile_dir + "/charon", profile_dir + "/nixops")
return profile_dir + "/nixops/" + self.uuid
def create_profile(self) -> str:
profile = self.get_profile()
dir = os.path.dirname(profile)
if not os.path.exists(dir):
os.makedirs(dir, 0o755)
return profile
def build_configs(
self,
include: List[str],
exclude: List[str],
dry_run: bool = False,
repair: bool = False,
) -> str:
"""Build the machine configurations in the Nix store."""
self.logger.log("building all machine configurations...")
# TODO: Use `lib.versionSuffix` from nixpkgs through an eval
# TODO: `lib.versionSuffix` doesn't really work for git repos, fix in nixpkgs.
#
# # Set the NixOS version suffix, if we're building from Git.
# # That way ‘nixos-version’ will show something useful on the
# # target machines.
# nixos_path = str(self.evaluate_config("nixpkgs"))
# get_version_script = nixos_path + "/modules/installer/tools/get-version-suffix"
# if os.path.exists(nixos_path + "/.git") and os.path.exists(get_version_script):
# self.nixos_version_suffix = subprocess.check_output(
# ["/bin/sh", get_version_script] + self._nix_path_flags(), text=True
# ).rstrip()
phys_expr = self.tempdir + "/physical.nix"
p = self.get_physical_spec()
nixops.util.write_file(phys_expr, p)
if DEBUG:
print("generated physical spec:\n" + p, file=sys.stderr)
selected = [
m for m in self.active_machines.values() if should_do(m, include, exclude)
]
names = [m.name for m in selected]
# If we're not running on Linux, then perform the build on the
# target machines. FIXME: Also enable this if we're on 32-bit
# and want to deploy to 64-bit.
if platform.system() != "Linux" and os.environ.get("NIX_REMOTE") != "daemon":
if os.environ.get("NIX_REMOTE_SYSTEMS") is None:
remote_machines = []
for m in sorted(selected, key=lambda m: m.get_index()):
key_file: Optional[str] = m.get_ssh_private_key_file()
if not key_file:
raise Exception(
"do not know private SSH key for machine ‘{0}’".format(
m.name
)
)
# FIXME: Figure out the correct machine type of ‘m’ (it might not be x86_64-linux).
remote_machines.append(
"root@{0} {1} {2} 2 1\n".format(
m.get_ssh_name(), "i686-linux,x86_64-linux", key_file
)
)
# Use only a single machine for now (issue #103).
break
remote_machines_file = "{0}/nix.machines".format(self.tempdir)
with open(remote_machines_file, "w") as f:
f.write("".join(remote_machines))
os.environ["NIX_REMOTE_SYSTEMS"] = remote_machines_file
else:
self.logger.log(
"using predefined remote systems file: {0}".format(
os.environ["NIX_REMOTE_SYSTEMS"]
)
)
# FIXME: Use ‘--option use-build-hook true’ instead of setting
# $NIX_BUILD_HOOK, once Nix supports that.
os.environ["NIX_BUILD_HOOK"] = (
os.path.dirname(os.path.realpath(nixops.util.which("nix-build")))
+ "/../libexec/nix/build-remote.pl"
)
load_dir = "{0}/current-load".format(self.tempdir)
if not os.path.exists(load_dir):
os.makedirs(load_dir, 0o700)
os.environ["NIX_CURRENT_LOAD"] = load_dir
try:
drv: str = self.eval(
include_physical=True,
nix_args={"names": names},
attr="machines.drvPath",
)
argv: List[str] = (
["nix-store", "-r"]
+ self.extra_nix_flags
+ (["--dry-run"] if dry_run else [])
+ (["--repair"] if repair else [])
+ [drv]
)
configs_path = subprocess.check_output(
argv,
text=True,
stderr=self.logger.log_file,
).rstrip()
except subprocess.CalledProcessError:
raise Exception("unable to build all machine configurations")
if self.rollback_enabled and not dry_run:
profile = self.create_profile()
if subprocess.call(["nix-env", "-p", profile, "--set", configs_path]) != 0:
raise Exception("cannot update profile ‘{0}’".format(profile))
return configs_path
def copy_closures(
self,
configs_path: str,
include: List[str],
exclude: List[str],
max_concurrent_copy: int,
) -> None:
"""Copy the closure of each machine configuration to the corresponding machine."""
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
m.logger.log("copying closure...")
m.new_toplevel = os.path.realpath(configs_path + "/" + m.name)
if not os.path.exists(m.new_toplevel):
raise Exception("can't find closure of machine ‘{0}’".format(m.name))
m.copy_closure_to(m.new_toplevel)
nixops.parallel.run_tasks(
nr_workers=max_concurrent_copy,
tasks=iter(self.active_machines.values()),
worker_fun=worker,
)
self.logger.log(
ansi_success(
"{0}> closures copied successfully".format(self.name or "unnamed"),
outfile=self.logger._log_file,
)
)
def activate_configs( # noqa: C901
self,
configs_path: str,
include: List[str],
exclude: List[str],
allow_reboot: bool,
force_reboot: bool,
check: bool,
sync: bool,
always_activate: bool,
dry_activate: bool,
test: bool,
boot: bool,
max_concurrent_activate: int,
) -> None:
"""Activate the new configuration on a machine."""
def worker(m: nixops.backends.GenericMachineState) -> Optional[str]:
if not should_do(m, include, exclude):
return None
def set_profile():
# Set the system profile to the new configuration.
daemon_var = "" if m.state == m.RESCUE else "env NIX_REMOTE=daemon "
setprof = (
daemon_var + 'nix-env -p /nix/var/nix/profiles/system --set "{0}"'
)
defn = self._machine_definition_for_required(m.name)
if always_activate or defn.always_activate:
m.run_command(setprof.format(m.new_toplevel))
else:
# Only activate if the profile has changed.
new_profile_cmd = "; ".join(
[
'old_gen="$(readlink -f /nix/var/nix/profiles/system)"',
'new_gen="$(readlink -f "{0}")"',
'[ "x$old_gen" != "x$new_gen" ] || exit 111',
setprof,
]
).format(m.new_toplevel)
ret = m.run_command(new_profile_cmd, check=False)
if ret == 111:
m.log("configuration already up to date")
return None
elif ret != 0:
raise Exception("unable to set new system profile")
try:
if not test:
set_profile()
m.send_keys()
if boot or force_reboot or m.state == m.RESCUE:
switch_method = "boot"
elif dry_activate:
switch_method = "dry-activate"
elif test:
switch_method = "test"
else:
switch_method = "switch"
# Run the switch script. This will also update the
# GRUB boot loader.
res = m.switch_to_configuration(
switch_method,
sync,
command=f"{m.new_toplevel}/bin/switch-to-configuration",
)
if dry_activate:
return None
if res != 0 and res != 100:
raise Exception(
"unable to activate new configuration (exit code {})".format(
res
)
)
if res == 100 or force_reboot or m.state == m.RESCUE:
if not allow_reboot and not force_reboot:
raise Exception(
"the new configuration requires a "
"reboot of '{}' to take effect (hint: use "
"‘--allow-reboot’)".format(m.name)
)
m.reboot_sync()
res = 0
# FIXME: should check which systemd services
# failed to start after the reboot.
if res == 0:
m.success("activation finished successfully")
# Record that we switched this machine to the new
# configuration.
m.cur_configs_path = configs_path
m.cur_toplevel = m.new_toplevel
except Exception:
# This thread shouldn't throw an exception because
# that will cause NixOps to exit and interrupt
# activation on the other machines.
m.logger.error(traceback.format_exc())
return m.name
return None
res = nixops.parallel.run_tasks(
nr_workers=max_concurrent_activate,
tasks=iter(self.active_machines.values()),
worker_fun=worker,
)
failed = [x for x in res if x is not None]
if failed != []:
raise Exception(
"activation of {0} of {1} machines failed (namely on {2})".format(
len(failed),
len(res),
", ".join(["‘{0}’".format(x) for x in failed]),
)
)
def _get_free_resource_index(self) -> int:
index = 0
for r in self.resources.values():
if r.index is not None and index <= r.index:
index = r.index + 1
return index
def get_backups(
self, include: List[str] = [], exclude: List[str] = []
) -> Dict[str, Dict[str, Any]]:
self.evaluate_active(include, exclude) # unnecessary?
machine_backups = {}
for m in self.active_machines.values():
if should_do(m, include, exclude):
machine_backups[m.name] = m.get_backups()
# merging machine backups into network backups
backup_ids = [b for bs in machine_backups.values() for b in bs.keys()]
backups: Dict[str, Dict[str, Any]] = {}
for backup_id in backup_ids:
backups[backup_id] = {}
backups[backup_id]["machines"] = {}
backups[backup_id]["info"] = []
backups[backup_id]["status"] = "complete"
backup = backups[backup_id]
for m in self.active_machines.values():
if should_do(m, include, exclude):
if backup_id in machine_backups[m.name].keys():
backup["machines"][m.name] = machine_backups[m.name][backup_id]
backup["info"].extend(backup["machines"][m.name]["info"])
# status is always running when one of the backups is still running
if (
backup["machines"][m.name]["status"] != "complete"
and backup["status"] != "running"
):
backup["status"] = backup["machines"][m.name]["status"]
else:
backup["status"] = "incomplete"
backup["info"].extend(
["No backup available for {0}".format(m.name)]
)
return backups
def clean_backups(
self, keep: bool, keep_days: int, keep_physical: bool = False
) -> None:
_backups = self.get_backups()
backup_ids = sorted(_backups.keys())
if keep:
index = len(backup_ids) - keep
tbr = backup_ids[:index]
if keep_days:
cutoff = (datetime.now() - timedelta(days=keep_days)).strftime(
"%Y%m%d%H%M%S"
)
print(cutoff)
tbr = [bid for bid in backup_ids if bid < cutoff]
for backup_id in tbr:
print("Removing backup {0}".format(backup_id))
self.remove_backup(backup_id, keep_physical)
def remove_backup(self, backup_id: str, keep_physical: bool = False) -> None:
with self._get_deployment_lock():
def worker(m: nixops.backends.GenericMachineState) -> None:
m.remove_backup(backup_id, keep_physical)
nixops.parallel.run_tasks(
nr_workers=len(self.active_machines),
tasks=iter(self.machines.values()),
worker_fun=worker,
)
def backup(
self, include: List[str] = [], exclude: List[str] = [], devices: List[str] = []
) -> str:
self.evaluate_active(include, exclude)
backup_id = datetime.now().strftime("%Y%m%d%H%M%S")
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
if m.state != m.STOPPED:
ssh_name = m.get_ssh_name()
res = subprocess.call(
["ssh", "root@" + ssh_name] + m.get_ssh_flags() + ["sync"]
)
if res != 0:
m.logger.log("running sync failed on {0}.".format(m.name))
m.backup(self._machine_definition_for_required(m.name), backup_id, devices)
nixops.parallel.run_tasks(
nr_workers=5, tasks=iter(self.active_machines.values()), worker_fun=worker
)
return backup_id
def restore(
self,
include: List[str] = [],
exclude: List[str] = [],
backup_id: Optional[str] = None,
devices: List[str] = [],
) -> None:
with self._get_deployment_lock():
self.evaluate_active(include, exclude)
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
m.restore(
self._machine_definition_for_required(m.name), backup_id, devices
)
nixops.parallel.run_tasks(
nr_workers=-1,
tasks=iter(self.active_machines.values()),
worker_fun=worker,
)
self.start_machines(include=include, exclude=exclude)
self.logger.warn(
"restore finished; please note that you might need to run ‘nixops deploy’ to fix configuration issues regarding changed IP addresses"
)
def evaluate_active(
self,
include: List[str] = [],
exclude: List[str] = [],
kill_obsolete: bool = False,
) -> None:
self.evaluate()
# Create state objects for all defined resources.
with self._db:
for defn in self._definitions().values():
if defn.name not in self.resources:
self._create_resource(defn.name, defn.get_type())
self.logger.update_log_prefixes()
to_destroy = []
# Determine the set of active resources. (We can't just
# delete obsolete resources from ‘self.resources’ because they
# contain important state that we don't want to forget about.)
for m in self.resources.values():
if m.name in self._definitions():
if m.obsolete:
self.logger.log(
"resource ‘{0}’ is no longer obsolete".format(m.name)
)
m.obsolete = False
else:
self.logger.log("resource ‘{0}’ is obsolete".format(m.name))
if not m.obsolete:
m.obsolete = True
if not should_do(m, include, exclude):
continue
if kill_obsolete:
to_destroy.append(m.name)
if to_destroy:
self._destroy_resources(include=to_destroy)
def _deploy( # noqa: C901
self,
dry_run: bool = False,
test: bool = False,
boot: bool = False,
plan_only: bool = False,
build_only: bool = False,
create_only: bool = False,
copy_only: bool = False,
include: List[str] = [],
exclude: List[str] = [],
check: bool = False,
kill_obsolete: bool = False,
allow_reboot: bool = False,
allow_recreate: bool = False,
force_reboot: bool = False,
max_concurrent_copy: int = 5,
max_concurrent_activate: int = -1,
sync: bool = True,
always_activate: bool = False,
repair: bool = False,
dry_activate: bool = False,
) -> None:
"""Perform the deployment defined by the deployment specification."""
self.evaluate_active(include, exclude, kill_obsolete)
# Assign each resource an index if it doesn't have one.
for r in self.active_resources.values():
if r.index is None:
r.index = self._get_free_resource_index()
# FIXME: Logger should be able to do coloring without the need
# for an index maybe?
r.logger.register_index(r.index)
self.logger.update_log_prefixes()
# Start or update the active resources. Non-machine resources
# are created first, because machines may depend on them
# (e.g. EC2 machines depend on EC2 key pairs or EBS volumes).
# FIXME: would be nice to have a more fine-grained topological
# sort.
if not dry_run and not build_only:
for r in self.active_resources.values():
defn = self._definition_for_required(r.name)
if r.get_type() != defn.get_type():
raise Exception(
"the type of resource ‘{0}’ changed from ‘{1}’ to ‘{2}’, which is currently unsupported".format(
r.name, r.get_type(), defn.get_type()
)
)
r._created_event = threading.Event()
r._errored = False
def plan_worker(r: nixops.resources.DiffEngineResourceState) -> None:
if not should_do(r, include, exclude):
return
r.plan(self._definition_for_required(r.name))
if plan_only:
for r in self.active_resources.values():
if isinstance(r, nixops.resources.DiffEngineResourceState):
plan_worker(r)
else:
r.warn(
"resource type {} doesn't implement a plan operation".format(
r.get_type()
)
)
return
def worker(r: nixops.resources.GenericResourceState):
try:
if not should_do(r, include, exclude):
return
# Sleep until all dependencies of this resource have
# been created.
deps = r.create_after(
iter(self.active_resources.values()),
self._definition_for_required(r.name),
)
for dep in deps:
if dep._created_event:
dep._created_event.wait()
# !!! Should we print a message here?
if dep._errored:
r._errored = True
return
# Now create the resource itself.
if not r.creation_time:
r.creation_time = int(time.time())
r.create(
self._definition_for_required(r.name),
check=check,
allow_reboot=allow_reboot,
allow_recreate=allow_recreate,
)
if is_machine(r):
# NOTE: unfortunate mypy doesn't check that
# is_machine calls an isinstance() function
m = cast(nixops.backends.GenericMachineState, r)
# The first time the machine is created,
# record the state version. We get it from
# /etc/os-release, rather than from the
# configuration's state.systemVersion
# attribute, because the machine may have been
# booted from an older NixOS image.
if not m.state_version:
os_release = str(
m.run_command(
"cat /etc/os-release", capture_stdout=True
)
)
match = re.search(
r'VERSION_ID="([0-9]+\.[0-9]+).*"', # noqa: W605
os_release,
)
if match:
m.state_version = match.group(1)
m.log(
"setting state version to {0}".format(
m.state_version
)
)
else:
m.warn("cannot determine NixOS version")
m.wait_for_ssh(check=check)
MachineHooksManager.post_wait(m)
except Exception:
r._errored = True
raise
finally:
if r._created_event:
r._created_event.set()
nixops.parallel.run_tasks(
nr_workers=-1,
tasks=iter(self.active_resources.values()),
worker_fun=worker,
)
if create_only:
return
# Build the machine configurations.
# Record configs_path in the state so that the ‘info’ command
# can show whether machines have an outdated configuration.
self.configs_path = self.build_configs(
dry_run=dry_run, repair=repair, include=include, exclude=exclude
)
if build_only or dry_run:
return
# Copy the closures of the machine configurations to the
# target machines.
self.copy_closures(
self.configs_path,
include=include,
exclude=exclude,
max_concurrent_copy=max_concurrent_copy,
)
if copy_only:
return
# Active the configurations.
self.activate_configs(
self.configs_path,
include=include,
exclude=exclude,
allow_reboot=allow_reboot,
force_reboot=force_reboot,
check=check,
sync=sync,
always_activate=always_activate,
dry_activate=dry_activate,
test=test,
boot=boot,
max_concurrent_activate=max_concurrent_activate,
)
if dry_activate:
return
# Trigger cleanup of resources, e.g. disks that need to be detached etc. Needs to be
# done after activation to make sure they are not in use anymore.
def cleanup_worker(r: nixops.resources.GenericResourceState) -> None:
if not should_do(r, include, exclude):
return
# Now create the resource itself.
r.after_activation(self._definition_for_required(r.name))
nixops.parallel.run_tasks(
nr_workers=-1,
tasks=iter(self.active_resources.values()),
worker_fun=cleanup_worker,
)
self.logger.log(
ansi_success(
"{0}> deployment finished successfully".format(self.name or "unnamed"),
outfile=self.logger._log_file,
)
)
# can generalize notifications later (e.g. emails, for now just hardcode datadog)
def notify_start(self, action: str) -> None:
self.evaluate_network(action)
def notify_success(self, action: str) -> None:
pass
def notify_failed(
self, action: str, e: Union[KeyboardInterrupt, Exception]
) -> None:
pass
def run_with_notify(self, action: str, f: Callable[[], None]) -> None:
self.notify_start(action)
try:
f()
self.notify_success(action)
except KeyboardInterrupt as e:
self.notify_failed(action, e)
raise
except Exception as e:
self.notify_failed(action, e)
raise
def deploy(self, **kwargs: Any) -> None:
with self._get_deployment_lock():
self.run_with_notify("deploy", lambda: self._deploy(**kwargs))
def _rollback(
self,
generation: int,
include: List[str] = [],
exclude: List[str] = [],
check: bool = False,
allow_reboot: bool = False,
force_reboot: bool = False,
max_concurrent_copy: int = 5,
max_concurrent_activate: int = -1,
sync: bool = True,
) -> None:
if not self.rollback_enabled:
raise Exception(
"rollback is not enabled for this network; please set ‘network.enableRollback’ to ‘true’ and redeploy"
)
profile = self.get_profile()
if (
subprocess.call(
["nix-env", "-p", profile, "--switch-generation", str(generation)]
)
!= 0
):
raise Exception("nix-env --switch-generation failed")
self.configs_path = os.path.realpath(profile)
assert os.path.isdir(self.configs_path)
names = set()
for filename in os.listdir(self.configs_path):
if not os.path.islink(self.configs_path + "/" + filename):
continue
if (
should_do_n(filename, include, exclude)
and filename not in self.machines
):
raise Exception(
"cannot roll back machine ‘{0}’ which no longer exists".format(
filename
)
)
names.add(filename)
# Update the set of active machines.
for m in self.machines.values():
if m.name in names:
if m.obsolete:
self.logger.log(
"machine ‘{0}’ is no longer obsolete".format(m.name)
)
m.obsolete = False
else:
self.logger.log("machine ‘{0}’ is obsolete".format(m.name))
m.obsolete = True
self.copy_closures(
self.configs_path,
include=include,
exclude=exclude,
max_concurrent_copy=max_concurrent_copy,
)
self.activate_configs(
self.configs_path,
include=include,
exclude=exclude,
allow_reboot=allow_reboot,
force_reboot=force_reboot,
check=check,
sync=sync,
always_activate=True,
dry_activate=False,
test=False,
boot=False,
max_concurrent_activate=max_concurrent_activate,
)
def rollback(self, **kwargs: Any) -> None:
with self._get_deployment_lock():
self._rollback(**kwargs)
def _destroy_resources(
self, include: List[str] = [], exclude: List[str] = [], wipe: bool = False
) -> None:
for r in self.resources.values():
r._destroyed_event = threading.Event()
r._errored = False
for rev_dep in r.destroy_before(iter(self.resources.values())):
try:
rev_dep._wait_for.append(r)
except AttributeError:
rev_dep._wait_for = [r]
def worker(m: nixops.resources.GenericResourceState) -> None:
try:
if not should_do(m, include, exclude):
return
try:
for dep in m._wait_for:
if dep._destroyed_event:
dep._destroyed_event.wait()
# !!! Should we print a message here?
if dep._errored:
m._errored = True
return
except AttributeError:
pass
if m.destroy(wipe=wipe):
self.delete_resource(m)
except Exception:
m._errored = True
raise
finally:
if m._destroyed_event:
m._destroyed_event.set()
nixops.parallel.run_tasks(
nr_workers=-1, tasks=list(self.resources.values()), worker_fun=worker
)
def destroy_resources(
self, include: List[str] = [], exclude: List[str] = [], wipe: bool = False
) -> None:
"""Destroy all active and obsolete resources."""
with self._get_deployment_lock():
self.run_with_notify(
"destroy", lambda: self._destroy_resources(include, exclude, wipe)
)
# Remove the destroyed machines from the rollback profile.
# This way, a subsequent "nix-env --delete-generations old" or
# "nix-collect-garbage -d" will get rid of the machine
# configurations.
if self.rollback_enabled: # and len(self.active) == 0:
profile = self.create_profile()
attrs = {
m.name: Call(RawValue("builtins.storePath"), m.cur_toplevel)
for m in self.active_machines.values()
if m.cur_toplevel
}
if (
subprocess.call(
[
"nix-env",
"-p",
profile,
"--set",
"*",
"-I",
"nixops=" + self.expr_path,
"-f",
"<nixops/update-profile.nix>",
"--arg",
"machines",
py2nix(attrs, inline=True),
]
)
!= 0
):
raise Exception("cannot update profile ‘{0}’".format(profile))
def delete_resources(
self, include: List[str] = [], exclude: List[str] = []
) -> None:
"""delete all resources state."""
def worker(m: nixops.resources.GenericResourceState) -> None:
if not should_do(m, include, exclude):
return
if m.delete_resources():
self.delete_resource(m)
nixops.parallel.run_tasks(
nr_workers=-1, tasks=list(self.resources.values()), worker_fun=worker
)
def reboot_machines(
self,
include: List[str] = [],
exclude: List[str] = [],
wait: bool = False,
rescue: bool = False,
hard: bool = False,
) -> None:
"""Reboot all active machines."""
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
if rescue:
m.reboot_rescue(hard=hard)
elif wait:
m.reboot_sync(hard=hard)
else:
m.reboot(hard=hard)
nixops.parallel.run_tasks(
nr_workers=-1, tasks=iter(self.active_machines.values()), worker_fun=worker
)
def stop_machines(self, include: List[str] = [], exclude: List[str] = []) -> None:
"""Stop all active machines."""
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
m.stop()
nixops.parallel.run_tasks(
nr_workers=-1, tasks=iter(self.active_machines.values()), worker_fun=worker
)
def start_machines(self, include: List[str] = [], exclude: List[str] = []) -> None:
"""Start all active machines."""
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
m.start()
nixops.parallel.run_tasks(
nr_workers=-1, tasks=iter(self.active_machines.values()), worker_fun=worker
)
def is_valid_resource_name(self, name: str) -> bool:
p = re.compile(r"^[\w-]+$") # noqa: W605
return not p.match(name) is None
def rename(self, name: str, new_name: str) -> None:
if name not in self.resources:
raise Exception("resource ‘{0}’ not found".format(name))
if new_name in self.resources:
raise Exception("resource with name ‘{0}’ already exists".format(new_name))
if not self.is_valid_resource_name(new_name):
raise Exception("{0} is not a valid resource identifier".format(new_name))
self.logger.log("renaming resource ‘{0}’ to ‘{1}’...".format(name, new_name))
m = self.resources.pop(name)
self.resources[new_name] = m
with self._db:
self._db.execute(
"update Resources set name = ? where deployment = ? and id = ?",
(new_name, self.uuid, m.id),
)
def send_keys(self, include: List[str] = [], exclude: List[str] = []) -> None:
"""Send encryption keys to machines."""
def worker(m: nixops.backends.GenericMachineState) -> None:
if not should_do(m, include, exclude):
return
m.send_keys()
nixops.parallel.run_tasks(
nr_workers=-1, tasks=iter(self.active_machines.values()), worker_fun=worker
)
def should_do(
m: Union[
nixops.resources.GenericResourceState, nixops.backends.GenericMachineState
],
include: List[str],
exclude: List[str],
) -> bool:
return should_do_n(m.name, include, exclude)
def should_do_n(name: str, include: List[str], exclude: List[str]) -> bool:
if name in exclude:
return False
if include == []:
return True
return name in include
def is_machine(
r: Union[nixops.resources.GenericResourceState, nixops.backends.GenericMachineState]
) -> bool:
# Hack around isinstance checks not working on subscripted generics
# See ./monkey.py
return nixops.backends.MachineState in r.__class__.mro()
def _filter_machines(
resources: Dict[str, nixops.resources.GenericResourceState]
) -> Dict[str, nixops.backends.GenericMachineState]:
return {n: r for n, r in resources.items() if is_machine(r)} # type: ignore
def is_machine_defn(r: nixops.resources.GenericResourceState) -> bool:
return isinstance(r, nixops.backends.MachineDefinition)
def _subclasses(cls: Any) -> List[Any]:
sub = cls.__subclasses__()
return [cls] if not sub else [g for s in sub for g in _subclasses(s)]
def _create_definition(
name: str, config: Dict[str, Any], type_name: str
) -> nixops.resources.ResourceDefinition:
"""Create a resource definition object from the given XML representation of the machine's attributes."""
for cls in _subclasses(nixops.resources.ResourceDefinition):
if type_name == cls.get_resource_type():
return cls(name, nixops.resources.ResourceEval(config)) # type: ignore
raise nixops.deployment.UnknownBackend(
"unknown resource type ‘{0}’".format(type_name)
)
def _create_state(
depl: Deployment, type: str, name: str, id: int
) -> GenericResourceState:
"""Create a resource state object of the desired type."""
for cls in _subclasses(nixops.resources.ResourceState):
try:
if type == cls.get_type():
return cls(depl, name, id) # type: ignore
except NotImplementedError:
pass
raise nixops.deployment.UnknownBackend("unknown resource type ‘{0}’".format(type))
# Automatically load all resource types.
def _load_modules_from(dir: str) -> None:
for module in os.listdir(os.path.dirname(__file__) + "/" + dir):
if module[-3:] != ".py" or module == "__init__.py":
continue
importlib.import_module("nixops." + dir + "." + module[:-3])
_load_modules_from("backends")
_load_modules_from("resources")
| 62,872
|
Python
|
.py
| 1,461
| 29.478439
| 149
| 0.529597
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,395
|
nix_expr.py
|
NixOS_nixops/nixops/nix_expr.py
|
from abc import abstractmethod
import functools
import string
from typing import Iterable, Optional, Any, List, Sequence, Tuple, Union, Dict
from textwrap import dedent
__all__ = ["py2nix", "nix2py", "nixmerge", "expand_dict", "RawValue", "Function"]
class ValueLike:
@abstractmethod
def indent(self, level: int, inline: bool, maxwidth: int) -> str:
pass
class RawValue(ValueLike):
def __init__(self, value: str) -> None:
self.value: str = value
def get_min_length(self) -> Optional[int]:
return len(self.value)
def is_inlineable(self) -> bool:
return True
def indent(self, level: int = 0, inline: bool = False, maxwidth: int = 80) -> str:
return " " * level + self.value
def __repr__(self) -> str:
return self.value
def __eq__(self, other: Any) -> bool:
return isinstance(other, RawValue) and other.value == self.value
class MultiLineRawValue(RawValue):
def __init__(self, values: List[str]):
self.values: List[str] = values
def get_min_length(self) -> None:
return None
def is_inlineable(self) -> bool:
return False
def indent(self, level: int = 0, inline: bool = False, maxwidth: int = 80) -> str:
return "\n".join([" " * level + value for value in self.values])
class Function(object):
def __init__(self, head: Any, body: Any):
self.head: Any = head
self.body: Any = body
def __repr__(self) -> str:
return "{0} {1}".format(self.head, self.body)
def __eq__(self, other: Any) -> bool:
return (
isinstance(other, Function)
and other.head == self.head
and other.body == self.body
)
class Call(object):
def __init__(self, fun: Any, arg: Any):
self.fun: Any = fun
self.arg: Any = arg
def __repr__(self) -> str:
return "{0} {1}".format(self.fun, self.arg)
def __eq__(self, other) -> bool:
return (
isinstance(other, Call) and other.fun == self.fun and other.arg == self.arg
)
class Container(object):
def __init__(
self,
prefix: str,
children: Sequence[Union[RawValue, "Container"]],
suffix: str,
inline_variant: Optional[ValueLike] = None,
):
self.prefix: str = prefix
self.children = children
self.suffix: str = suffix
self.inline_variant = inline_variant
def get_min_length(self) -> int:
"""
Return the minimum length of this container and all sub-containers.
"""
return (
len(self.prefix)
+ len(self.suffix)
+ 1
+ len(self.children)
+ sum([child.get_min_length() or 0 for child in self.children])
)
def is_inlineable(self) -> bool:
return all([child.is_inlineable() for child in self.children])
def indent(self, level: int = 0, inline: bool = False, maxwidth: int = 80) -> str:
if not self.is_inlineable():
inline = False
elif level * 2 + self.get_min_length() < maxwidth:
inline = True
ind = " " * level
if inline and self.inline_variant is not None:
return self.inline_variant.indent(
level=level, inline=True, maxwidth=maxwidth
)
elif inline:
sep = " "
lines = " ".join(
[child.indent(level=0, inline=True) for child in self.children]
)
suffix_ind = ""
else:
sep = "\n"
lines = "\n".join(
[
child.indent(level + 1, inline=inline, maxwidth=maxwidth)
for child in self.children
]
)
suffix_ind = ind
return ind + self.prefix + sep + lines + sep + suffix_ind + self.suffix
def enclose_node(
node: Any, prefix: str = "", suffix: str = ""
) -> Union[MultiLineRawValue, RawValue, Container]:
if isinstance(node, MultiLineRawValue):
new_values = list(node.values)
new_values[0] = prefix + new_values[0]
new_values[-1] += suffix
return MultiLineRawValue(new_values)
elif isinstance(node, RawValue):
return RawValue(prefix + node.value + suffix)
else:
new_inline: Optional[RawValue]
if node.inline_variant is not None:
new_inline = RawValue(prefix + node.inline_variant.value + suffix)
else:
new_inline = None
return Container(
prefix + node.prefix, node.children, node.suffix + suffix, new_inline
)
def _fold_string(value: str, rules: Iterable[Tuple[str, str]]) -> str:
def folder(val: str, rule: Tuple[str, str]) -> str:
return val.replace(rule[0], rule[1])
return functools.reduce(folder, rules, value)
def py2nix( # noqa: C901
value: Any, initial_indentation: int = 0, maxwidth: int = 80, inline: bool = False
):
"""
Return the given value as a Nix expression string.
If initial_indentation is to a specific level (two spaces per level), don't
inline fewer than that. Also, 'maxwidth' specifies the maximum line width
which is enforced whenever it is possible to break an expression. Set to 0
if you want to break on every occasion possible. If 'inline' is set to
True, squash everything into a single line.
"""
def _enc_int(node):
if node < 0:
return RawValue("builtins.sub 0 " + str(-node))
else:
return RawValue(str(node))
def _enc_str(node, for_attribute: bool = False):
encoded = _fold_string(
node,
[
("\\", "\\\\"),
("${", "\\${"),
('"', '\\"'),
("\n", "\\n"),
("\t", "\\t"),
],
)
inline_variant = RawValue('"{0}"'.format(encoded))
if for_attribute:
return inline_variant.value
if node.endswith("\n"):
encoded = _fold_string(
node[:-1], [("''", "'''"), ("${", "''${"), ("\t", "'\\t")]
)
atoms = [RawValue(line) for line in encoded.splitlines()]
return Container("''", atoms, "''", inline_variant=inline_variant)
else:
return inline_variant
def _enc_list(nodes) -> Union[RawValue, Container]:
if len(nodes) == 0:
return RawValue("[]")
pre, post = "[", "]"
while len(nodes) == 1 and isinstance(nodes[0], list):
nodes = nodes[0]
pre, post = pre + " [", post + " ]"
return Container(pre, [_enc(n, inlist=True) for n in nodes], post)
def _enc_key(key):
if not isinstance(key, str):
raise KeyError("key {0} is not a string".format(repr(key)))
elif len(key) == 0:
raise KeyError("key name has zero length")
if (
all(char in string.ascii_letters + string.digits + "_" for char in key)
and not key[0].isdigit()
):
return key
else:
return _enc_str(key, for_attribute=True)
def _enc_attrset(node):
if len(node) == 0:
return RawValue("{}")
nodes = []
for key, value in sorted(node.items()):
encoded_key = _enc_key(key)
# If the children are attrsets as well and only contain one
# attribute, recursively merge them with a dot, like "a.b.c".
child_key, child_value = key, value
while isinstance(child_value, dict) and len(child_value) == 1:
child_key, child_value = next(iter(child_value.items()))
encoded_key += "." + _enc_key(child_key)
contents = _enc(child_value)
prefix = "{0} = ".format(encoded_key)
suffix = ";"
nodes.append(enclose_node(contents, prefix, suffix))
return Container("{", nodes, "}")
def _enc_function(node):
body = _enc(node.body)
return enclose_node(body, node.head + ": ")
def _enc_call(node):
return Container("(", [_enc(node.fun), _enc(node.arg)], ")")
def _enc(node, inlist=False):
if isinstance(node, RawValue):
if inlist and (
isinstance(node, MultiLineRawValue)
or any(char.isspace() for char in node.value)
):
return enclose_node(node, "(", ")")
else:
return node
elif node is True:
return RawValue("true")
elif node is False:
return RawValue("false")
elif node is None:
return RawValue("null")
elif isinstance(node, int):
return _enc_int(node)
elif isinstance(node, str):
return _enc_str(node)
elif isinstance(node, list):
return _enc_list(node)
elif isinstance(node, dict):
return _enc_attrset(expand_dict(node))
elif isinstance(node, Function):
if inlist:
return enclose_node(_enc_function(node), "(", ")")
else:
return _enc_function(node)
elif isinstance(node, Call):
if inlist:
return enclose_node(_enc_call(node), "(", ")")
else:
return _enc_call(node)
else:
raise ValueError("unable to encode {0}".format(repr(node)))
return _enc(value).indent(initial_indentation, maxwidth=maxwidth, inline=inline)
def expand_dict(unexpanded) -> Dict:
"""
Turns a dict containing tuples as keys into a set of nested dictionaries.
Examples:
>>> expand_dict({('a', 'b', 'c'): 'd'})
{'a': {'b': {'c': 'd'}}}
>>> expand_dict({('a', 'b'): 'c',
... 'a': {('d', 'e'): 'f'}})
{'a': {'b': 'c', 'd': {'e': 'f'}}}
"""
paths, strings = [], {}
for key, val in unexpanded.items():
if isinstance(key, tuple):
if len(key) == 0:
raise KeyError("invalid key {0}".format(repr(key)))
newkey = key[0]
if len(key) > 1:
newval = {key[1:]: val}
else:
newval = val
paths.append({newkey: newval})
else:
strings[key] = val
return {
key: (expand_dict(val) if isinstance(val, dict) else val)
for key, val in functools.reduce(nixmerge, paths + [strings]).items()
}
def nixmerge(expr1, expr2):
"""
Merge both expressions into one, merging dictionary keys and appending list
elements if they otherwise would clash.
"""
def _merge_dicts(d1, d2) -> Dict:
out = {}
for key in set(d1.keys()).union(d2.keys()):
if key in d1 and key in d2:
out[key] = _merge(d1[key], d2[key])
elif key in d1:
out[key] = d1[key]
else:
out[key] = d2[key]
return out
def _merge(e1, e2) -> Union[Dict, List]:
if isinstance(e1, dict) and isinstance(e2, dict):
return _merge_dicts(e1, e2)
elif isinstance(e1, list) and isinstance(e2, list):
merged = []
seen = set()
for x in e1 + e2:
if x not in seen:
seen.add(x)
merged.append(x)
return merged
else:
err = "unable to merge {0} with {1}".format(type(e1), type(e2))
raise ValueError(err)
return _merge(expr1, expr2)
def nix2py(source: str) -> MultiLineRawValue:
"""
Dedent the given Nix source code and encode it into multiple raw values
which are used as-is and only indentation will take place.
"""
return MultiLineRawValue(dedent(source).strip().splitlines())
| 11,862
|
Python
|
.py
| 307
| 28.915309
| 87
| 0.54661
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,396
|
logger.py
|
NixOS_nixops/nixops/logger.py
|
# -*- coding: utf-8 -*-
from __future__ import annotations
import sys
import threading
from typing import List, Optional, TextIO
from nixops.ansi import ansi_warn, ansi_error, ansi_success
__all__ = ["Logger"]
class Logger(object):
def __init__(self, log_file: TextIO) -> None:
self._last_log_prefix: Optional[str] = None # XXX!
self._log_lock: threading.Lock = threading.Lock()
self._log_file: TextIO = log_file
self._auto_response: Optional[str] = None
self.machine_loggers: List[MachineLogger] = []
@property
def log_file(self) -> TextIO:
# XXX: Remove me soon!
return self._log_file
def isatty(self) -> bool:
return self._log_file.isatty()
def log(self, msg: str) -> None:
with self._log_lock:
if self._last_log_prefix is not None:
self._log_file.write("\n")
self._last_log_prefix = None
self._log_file.write(msg + "\n")
self._log_file.flush()
def log_start(self, prefix: str, msg: str) -> None:
with self._log_lock:
if self._last_log_prefix != prefix:
if self._last_log_prefix is not None:
self._log_file.write("\n")
self._log_file.write(prefix)
self._log_file.write(msg)
self._last_log_prefix = prefix
self._log_file.flush()
def log_end(self, prefix: str, msg: str) -> None:
with self._log_lock:
last = self._last_log_prefix
self._last_log_prefix = None
if last != prefix:
if last is not None:
self._log_file.write("\n")
if msg == "":
return
self._log_file.write(prefix)
self._log_file.write(msg + "\n")
self._log_file.flush()
def get_logger_for(self, machine_name: str) -> MachineLogger:
"""
Returns a logger instance for a specific machine name.
"""
machine_logger = MachineLogger(self, machine_name)
self.machine_loggers.append(machine_logger)
self.update_log_prefixes()
return machine_logger
def set_autoresponse(self, response: str) -> None:
"""
Automatically respond to all confirmations with the response given by
'response'.
"""
self._auto_response = response
def update_log_prefixes(self) -> None:
max_len = max([len(ml.machine_name) for ml in self.machine_loggers] or [0])
for ml in self.machine_loggers:
ml.update_log_prefix(max_len)
def warn(self, msg: str) -> None:
self.log(ansi_warn("warning: " + msg, outfile=self._log_file))
def error(self, msg: str) -> None:
self.log(ansi_error("error: " + msg, outfile=self._log_file))
def confirm_once(self, question: str) -> Optional[bool]:
with self._log_lock:
if self._last_log_prefix is not None:
self._log_file.write("\n")
self._last_log_prefix = None
# XXX: This should be DRY!
self._log_file.write(
ansi_warn(
"warning: {0} (y/N) ".format(question), outfile=self._log_file
)
)
self._log_file.flush()
if self._auto_response is not None:
self._log_file.write("{0}\n".format(self._auto_response))
self._log_file.flush()
return self._auto_response == "y"
response = sys.stdin.readline()
if response == "":
return False
response = response.rstrip().lower()
if response == "y":
return True
if response == "n" or response == "":
return False
return None
def confirm(self, question: str) -> bool:
ret: Optional[bool] = None
while ret is None:
ret = self.confirm_once(question)
return ret
class MachineLogger(object):
def __init__(self, main_logger: Logger, machine_name: str) -> None:
self.main_logger: Logger = main_logger
self.machine_name: str = machine_name
self.index: Optional[int] = None
self.update_log_prefix(0)
def register_index(self, index: int) -> None:
# FIXME Find a good way to do coloring based on machine name only.
self.index = index
def update_log_prefix(self, length: int) -> None:
self._log_prefix = "{0}{1}> ".format(
self.machine_name, "." * (length - len(self.machine_name))
)
if self.main_logger.isatty() and self.index is not None:
self._log_prefix = "\033[1;{0}m{1}\033[0m".format(
31 + self.index % 7, self._log_prefix
)
def log(self, msg: str) -> None:
self.main_logger.log(self._log_prefix + msg)
def log_start(self, msg: str) -> None:
self.main_logger.log_start(self._log_prefix, msg)
def log_continue(self, msg: str) -> None:
self.main_logger.log_start(self._log_prefix, msg)
def log_end(self, msg: str) -> None:
self.main_logger.log_end(self._log_prefix, msg)
def warn(self, msg: str) -> None:
self.log(ansi_warn("warning: " + msg, outfile=self.main_logger._log_file))
def error(self, msg: str) -> None:
self.log(ansi_error("error: " + msg, outfile=self.main_logger._log_file))
def success(self, msg: str) -> None:
self.log(ansi_success(msg, outfile=self.main_logger._log_file))
| 5,584
|
Python
|
.py
| 131
| 32.381679
| 83
| 0.567084
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,397
|
monkey.py
|
NixOS_nixops/nixops/monkey.py
|
from typing import TYPE_CHECKING
__all__ = (
"Protocol",
"runtime_checkable",
)
# ☢️☢️☢️☢️☢️☢️☢️ 2020-05-18 ☢️☢️☢️☢️☢️☢️☢️
# Explicitly subclassed Protocols don't support super().__init__
#
# ... but we need that.
#
# See: https://github.com/python/typing/issues/572 for a description, including
# the below workaround.
#
# Protocol doesn't give us any special run-time behavior (except for
# runtime_checkable,) and can be pretty transparently swapped out for
# Generic at run time.
#
# By using Generic at run-time, we get the expected __init__ behavior.
#
# But, we still want Protocols at type-checking time because Protocol
# is much stricter about assigning to `self` without explicitly defining
# and typing the object variable.
#
# In conclusion, I'm sorry. Hopefully #572 gets fixed and we can delete
# this and go back to the isinstance check in deployment.py.
if not TYPE_CHECKING:
from typing import Generic
Protocol = Generic
def runtime_checkable(f):
return f
else:
from typing_extensions import Protocol, runtime_checkable
| 1,132
|
Python
|
.py
| 32
| 31.4375
| 79
| 0.732177
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,398
|
args.py
|
NixOS_nixops/nixops/args.py
|
from argparse import ArgumentParser, _SubParsersAction, SUPPRESS, REMAINDER
from nixops.script_defs import (
add_subparser,
op_list_deployments,
op_create,
op_modify,
op_clone,
op_delete,
op_info,
op_check,
op_set_args,
op_deploy,
add_common_deployment_options,
op_send_keys,
op_destroy,
op_delete_resources,
op_stop,
op_start,
op_reboot,
op_show_arguments,
op_show_physical,
op_ssh,
op_ssh_for_each,
op_scp,
op_mount,
op_rename,
op_backup,
op_backup_status,
op_remove_backup,
op_clean_backups,
op_restore,
op_show_option,
op_list_generations,
op_rollback,
op_delete_generation,
op_show_console_output,
op_dump_nix_paths,
op_export,
op_import,
op_edit,
op_copy_closure,
op_list_plugins,
parser_plugin_hooks,
op_unlock,
)
# Set up the parser.
parser = ArgumentParser(description="NixOS cloud deployment tool", prog="nixops")
parser.add_argument("--version", action="version", version="NixOps @version@")
parser.add_argument(
"--pdb", action="store_true", help="Invoke pdb on unhandled exception"
)
subparsers: _SubParsersAction = parser.add_subparsers(
help="sub-command help", metavar="operation", required=True
)
subparser = add_subparser(subparsers, "list", help="list all known deployments")
subparser.set_defaults(op=op_list_deployments)
subparser = add_subparser(subparsers, "create", help="create a new deployment")
subparser.set_defaults(op=op_create)
subparser.add_argument(
"--name", "-n", dest="name", metavar="NAME", help=SUPPRESS
) # obsolete, use -d instead
subparser = add_subparser(subparsers, "modify", help="modify an existing deployment")
subparser.set_defaults(op=op_modify)
subparser.add_argument(
"--name", "-n", dest="name", metavar="NAME", help="new symbolic name of deployment"
)
subparser = add_subparser(subparsers, "clone", help="clone an existing deployment")
subparser.set_defaults(op=op_clone)
subparser.add_argument(
"--name",
"-n",
dest="name",
metavar="NAME",
help="symbolic name of the cloned deployment",
)
subparser = add_subparser(subparsers, "delete", help="delete a deployment")
subparser.add_argument(
"--force", action="store_true", help="force deletion even if resources still exist"
)
subparser.add_argument("--all", action="store_true", help="delete all deployments")
subparser.set_defaults(op=op_delete)
subparser = add_subparser(subparsers, "info", help="show the state of the deployment")
subparser.set_defaults(op=op_info)
subparser.add_argument("--all", action="store_true", help="show all deployments")
subparser.add_argument(
"--plain", action="store_true", help="do not pretty-print the output"
)
subparser.add_argument(
"--no-eval",
action="store_true",
help="do not evaluate the deployment specification",
)
subparser = add_subparser(
subparsers,
"check",
help="check the state of the machines in the network"
" (note that this might alter the internal nixops state to consolidate with the real state of the resource)",
)
subparser.set_defaults(op=op_check)
subparser.add_argument("--all", action="store_true", help="check all deployments")
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="check only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="check all except the specified machines",
)
subparser = add_subparser(
subparsers,
"set-args",
help="persistently set arguments to the deployment specification",
)
subparser.set_defaults(op=op_set_args)
subparser.add_argument(
"--arg",
nargs=2,
action="append",
dest="args",
metavar=("NAME", "VALUE"),
help="pass a Nix expression value",
)
subparser.add_argument(
"--argstr",
nargs=2,
action="append",
dest="argstrs",
metavar=("NAME", "VALUE"),
help="pass a string value",
)
subparser.add_argument(
"--unset",
nargs=1,
action="append",
dest="unset",
metavar="NAME",
help="unset previously set argument",
)
subparser = add_subparser(subparsers, "deploy", help="deploy the network configuration")
subparser.set_defaults(op=op_deploy)
subparser.add_argument(
"--kill-obsolete", "-k", action="store_true", help="kill obsolete virtual machines"
)
subparser.add_argument(
"--dry-run", action="store_true", help="evaluate and print what would be built"
)
subparser.add_argument(
"--dry-activate",
action="store_true",
help="show what will be activated on the machines in the network",
)
subparser.add_argument(
"--test",
action="store_true",
help="build and activate the new configuration; do not enable it in the bootloader. Rebooting the system will roll back automatically.",
)
subparser.add_argument(
"--boot",
action="store_true",
help="build the new configuration and enable it in the bootloader; do not activate it. Upon reboot, the system will use the new configuration.",
)
subparser.add_argument(
"--repair", action="store_true", help="use --repair when calling nix-build (slow)"
)
subparser.add_argument(
"--evaluate-only", action="store_true", help="only call nix-instantiate and exit"
)
subparser.add_argument(
"--plan-only",
action="store_true",
help="show the diff between the configuration and the state and exit",
)
subparser.add_argument(
"--build-only",
action="store_true",
help="build only; do not perform deployment actions",
)
subparser.add_argument(
"--create-only", action="store_true", help="exit after creating missing machines"
)
subparser.add_argument(
"--copy-only", action="store_true", help="exit after copying closures"
)
subparser.add_argument(
"--allow-recreate",
action="store_true",
help="recreate resources machines that have disappeared",
)
subparser.add_argument(
"--always-activate",
action="store_true",
help="activate unchanged configurations as well",
)
add_common_deployment_options(subparser)
subparser = add_subparser(subparsers, "send-keys", help="send encryption keys")
subparser.set_defaults(op=op_send_keys)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="send keys to only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="send keys to all except the specified machines",
)
subparser = add_subparser(
subparsers, "destroy", help="destroy all resources in the specified deployment"
)
subparser.set_defaults(op=op_destroy)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="destroy only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="destroy all except the specified machines",
)
subparser.add_argument(
"--wipe", action="store_true", help="securely wipe data on the machines"
)
subparser.add_argument("--all", action="store_true", help="destroy all deployments")
subparser = add_subparser(
subparsers,
"delete-resources",
help="deletes the resource from the local NixOps state file.",
)
subparser.set_defaults(op=op_delete_resources)
subparser.add_argument(
"--include",
nargs="+",
metavar="RESOURCE-NAME",
help="delete only the specified resources",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="RESOURCE-NAME",
help="delete all resources except the specified resources",
)
subparser = add_subparser(
subparsers, "stop", help="stop all virtual machines in the network"
)
subparser.set_defaults(op=op_stop)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="stop only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="stop all except the specified machines",
)
subparser = add_subparser(
subparsers, "start", help="start all virtual machines in the network"
)
subparser.set_defaults(op=op_start)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="start only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="start all except the specified machines",
)
subparser = add_subparser(
subparsers, "reboot", help="reboot all virtual machines in the network"
)
subparser.set_defaults(op=op_reboot)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="reboot only the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="reboot all except the specified machines",
)
subparser.add_argument(
"--no-wait", action="store_true", help="do not wait until the machines are up again"
)
subparser.add_argument(
"--rescue",
action="store_true",
help="reboot machines into the rescue system" " (if available)",
)
subparser.add_argument(
"--hard",
action="store_true",
help="send a hard reset (power switch) to the machines" " (if available)",
)
subparser = add_subparser(
subparsers, "show-arguments", help="print the arguments to the network expressions"
)
subparser.set_defaults(op=op_show_arguments)
subparser = add_subparser(
subparsers, "show-physical", help="print the physical network expression"
)
subparser.add_argument(
"--backup",
dest="backupid",
default=None,
help="print physical network expression for given backup id",
)
subparser.set_defaults(op=op_show_physical)
subparser = add_subparser(
subparsers, "ssh", help="login on the specified machine via SSH"
)
subparser.set_defaults(op=op_ssh)
subparser.add_argument("machine", metavar="MACHINE", help="identifier of the machine")
subparser.add_argument(
"args",
metavar="SSH_ARGS",
nargs=REMAINDER,
help="SSH flags and/or command",
)
subparser.add_argument(
"--now",
dest="now",
action="store_true",
help="do not acquire a lock before fetching the state",
)
subparser = add_subparser(
subparsers, "ssh-for-each", help="execute a command on each machine via SSH"
)
subparser.set_defaults(op=op_ssh_for_each)
subparser.add_argument(
"args", metavar="ARG", nargs="*", help="additional arguments to SSH"
)
subparser.add_argument("--parallel", "-p", action="store_true", help="run in parallel")
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="run command only on the specified machines",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="run command on all except the specified machines",
)
subparser.add_argument(
"--all", action="store_true", help="run ssh-for-each for all deployments"
)
subparser = add_subparser(
subparsers, "scp", help="copy files to or from the specified machine via scp"
)
subparser.set_defaults(op=op_scp)
subparser.add_argument(
"--from",
dest="scp_from",
action="store_true",
help="copy a file from specified machine",
)
subparser.add_argument(
"--to", dest="scp_to", action="store_true", help="copy a file to specified machine"
)
subparser.add_argument("machine", metavar="MACHINE", help="identifier of the machine")
subparser.add_argument("source", metavar="SOURCE", help="source file location")
subparser.add_argument("destination", metavar="DEST", help="destination file location")
subparser = add_subparser(
subparsers,
"mount",
help="mount a directory from the specified machine into the local filesystem",
)
subparser.set_defaults(op=op_mount)
subparser.add_argument(
"machine",
metavar="MACHINE[:PATH]",
help="identifier of the machine, optionally followed by a path",
)
subparser.add_argument("destination", metavar="PATH", help="local path")
subparser.add_argument(
"--sshfs-option",
"-o",
action="append",
metavar="OPTIONS",
help="mount options passed to sshfs",
)
subparser = add_subparser(subparsers, "rename", help="rename machine in network")
subparser.set_defaults(op=op_rename)
subparser.add_argument(
"current_name", metavar="FROM", help="current identifier of the machine"
)
subparser.add_argument("new_name", metavar="TO", help="new identifier of the machine")
subparser = add_subparser(
subparsers,
"backup",
help="make snapshots of persistent disks in network (currently EC2-only)",
)
subparser.set_defaults(op=op_backup)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="perform backup actions on the specified machines only",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="do not perform backup actions on the specified machines",
)
subparser.add_argument(
"--freeze",
dest="freeze_fs",
action="store_true",
help="freeze filesystems for non-root filesystems that support this (e.g. xfs)",
)
subparser.add_argument(
"--force",
dest="force",
action="store_true",
help="start new backup even if previous is still running",
)
subparser.add_argument(
"--devices",
nargs="+",
metavar="DEVICE-NAME",
help="only backup the specified devices",
)
subparser = add_subparser(subparsers, "backup-status", help="get status of backups")
subparser.set_defaults(op=op_backup_status)
subparser.add_argument(
"backupid", default=None, nargs="?", help="use specified backup in stead of latest"
)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="perform backup actions on the specified machines only",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="do not perform backup actions on the specified machines",
)
subparser.add_argument(
"--wait",
dest="wait",
action="store_true",
help="wait until backup is finished",
)
subparser.add_argument(
"--latest",
dest="latest",
action="store_true",
help="show status of latest backup only",
)
subparser = add_subparser(subparsers, "remove-backup", help="remove a given backup")
subparser.set_defaults(op=op_remove_backup)
subparser.add_argument("backupid", metavar="BACKUP-ID", help="backup ID to remove")
subparser.add_argument(
"--keep-physical",
dest="keep_physical",
action="store_true",
help="do not remove the physical backups, only remove backups from nixops state",
)
subparser = add_subparser(subparsers, "clean-backups", help="remove old backups")
subparser.set_defaults(op=op_clean_backups)
subparser.add_argument(
"--keep", dest="keep", type=int, help="number of backups to keep around"
)
subparser.add_argument(
"--keep-days",
metavar="N",
dest="keep_days",
type=int,
help="keep backups newer than N days",
)
subparser.add_argument(
"--keep-physical",
dest="keep_physical",
action="store_true",
help="do not remove the physical backups, only remove backups from nixops state",
)
subparser = add_subparser(
subparsers,
"restore",
help="restore machines based on snapshots of persistent disks in network (currently EC2-only)",
)
subparser.set_defaults(op=op_restore)
subparser.add_argument(
"--backup-id", default=None, help="use specified backup in stead of latest"
)
subparser.add_argument(
"--include",
nargs="+",
metavar="MACHINE-NAME",
help="perform backup actions on the specified machines only",
)
subparser.add_argument(
"--exclude",
nargs="+",
metavar="MACHINE-NAME",
help="do not perform backup actions on the specified machines",
)
subparser.add_argument(
"--devices",
nargs="+",
metavar="DEVICE-NAME",
help="only restore the specified devices",
)
subparser = add_subparser(
subparsers, "show-option", help="print the value of a configuration option"
)
subparser.set_defaults(op=op_show_option)
subparser.add_argument("machine", metavar="MACHINE", help="identifier of the machine")
subparser.add_argument("option", metavar="OPTION", help="option name")
subparser.add_argument(
"--include-physical",
action="store_true",
help="include the physical specification in the evaluation",
)
subparser = add_subparser(
subparsers,
"list-generations",
help="list previous configurations to which you can roll back",
)
subparser.set_defaults(op=op_list_generations)
subparser = add_subparser(
subparsers, "rollback", help="roll back to a previous configuration"
)
subparser.set_defaults(op=op_rollback)
subparser.add_argument(
"generation",
type=int,
metavar="GENERATION",
help="number of the desired configuration (see ‘nixops list-generations’)",
)
add_common_deployment_options(subparser)
subparser = add_subparser(
subparsers, "delete-generation", help="remove a previous configuration"
)
subparser.set_defaults(op=op_delete_generation)
subparser.add_argument(
"generation",
type=int,
metavar="GENERATION",
help="number of the desired configuration (see ‘nixops list-generations’)",
)
add_common_deployment_options(subparser)
subparser = add_subparser(
subparsers,
"show-console-output",
help="print the machine's console output on stdout",
)
subparser.set_defaults(op=op_show_console_output)
subparser.add_argument("machine", metavar="MACHINE", help="identifier of the machine")
add_common_deployment_options(subparser)
subparser = add_subparser(
subparsers, "dump-nix-paths", help="dump Nix paths referenced in deployments"
)
subparser.add_argument(
"--all", action="store_true", help="dump Nix paths for all deployments"
)
subparser.set_defaults(op=op_dump_nix_paths)
add_common_deployment_options(subparser)
subparser = add_subparser(subparsers, "export", help="export the state of a deployment")
subparser.add_argument("--all", action="store_true", help="export all deployments")
subparser.set_defaults(op=op_export)
subparser = add_subparser(
subparsers, "import", help="import deployments into the state file"
)
subparser.add_argument(
"--include-keys",
action="store_true",
help="import public SSH hosts keys to .ssh/known_hosts",
)
subparser.set_defaults(op=op_import)
subparser = add_subparser(
subparsers, "edit", help="open the deployment specification in $EDITOR"
)
subparser.set_defaults(op=op_edit)
subparser = add_subparser(
subparsers, "copy-closure", help="copy closure to a target machine"
)
subparser.add_argument("machine", help="identifier of the machine")
subparser.add_argument("storepath", help="store path of the closure to be copied")
subparser.set_defaults(op=op_copy_closure)
subparser = subparsers.add_parser(
"list-plugins", help="list the available nixops plugins"
)
subparser.set_defaults(op=op_list_plugins)
subparser.add_argument(
"--verbose", "-v", action="store_true", help="Provide extra plugin information"
)
subparser.add_argument("--debug", action="store_true", help="enable debug output")
subparser = add_subparser(subparsers, "unlock", help="Force unlock the deployment lock")
subparser.set_defaults(op=op_unlock)
parser_plugin_hooks(parser, subparsers)
| 19,203
|
Python
|
.py
| 613
| 28.1354
| 148
| 0.721722
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|
11,399
|
known_hosts.py
|
NixOS_nixops/nixops/known_hosts.py
|
import os
import threading
import fcntl
from typing import Optional
# Allow only one thread to rewrite known_hosts at a time.
LOCK = threading.Lock()
def _rewrite(ip_address: str, add_ip: bool, public_host_key: str) -> None:
with LOCK:
path = os.path.expanduser("~/.ssh/known_hosts")
# If hosts file doesn't exist, create an empty file
if not os.path.isfile(path):
basedir = os.path.dirname(path)
if not os.path.exists(basedir):
os.makedirs(basedir)
open(path, "a").close()
with open(os.path.expanduser("~/.ssh/.known_hosts.lock"), "w") as lockfile:
fcntl.flock(
lockfile, fcntl.LOCK_EX
) # unlock is implicit at the end of the with
with open(path, "r") as f:
contents = f.read()
def rewrite(lst: str) -> Optional[str]:
if " " not in lst:
return lst
(first, rest) = lst.split(" ", 1)
names = first.split(",")
if ip_address not in names:
return lst
if not add_ip and public_host_key != rest:
return lst
new_names = [n for n in names if n != ip_address]
return ",".join(new_names) + " " + rest if new_names != [] else None
new = [
line
for line in [rewrite(line) for line in contents.splitlines()]
if line is not None
]
if add_ip:
new.append(ip_address + " " + public_host_key)
tmp = "{0}.tmp-{1}".format(path, os.getpid())
f = open(tmp, "w")
f.write("\n".join(new + [""]))
f.close()
os.rename(tmp, path)
def remove(ip_address: str, public_host_key: str) -> None:
"""Remove a specific known host key."""
_rewrite(ip_address, False, public_host_key)
def add(ip_address: str, public_host_key: str) -> None:
"""Add a known host key."""
_rewrite(ip_address, True, public_host_key)
def update(prev_address: str, new_address: str, public_host_key: str) -> None:
# FIXME: this rewrites known_hosts twice.
if prev_address != new_address:
remove(prev_address, public_host_key)
add(new_address, public_host_key)
| 2,353
|
Python
|
.py
| 55
| 31.745455
| 84
| 0.546012
|
NixOS/nixops
| 1,813
| 363
| 328
|
LGPL-3.0
|
9/5/2024, 5:11:18 PM (Europe/Amsterdam)
|