repo_name stringlengths 6 97 | path stringlengths 3 341 | text stringlengths 8 1.02M |
|---|---|---|
gaybro8777/klio | devtools/src/klio_devtools/commands/develop.py | <gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import docker
import dockerpty
from klio_cli.commands import base
class DevelopKlioContainer(base.BaseDockerizedPipeline):
DOCKER_LOGGER_NAME = "klio.develop"
PIP_CMD = "pip install --use-feature=2020-resolver -e /usr/src/{pkg}"
# order is important - klio-core doesn't depend on any other
# internal libraries; klio depends on klio-core; klio-exec depends
# on klio-core & klio
PKGS = ("klio-core", "klio", "klio-exec", "klio-audio")
def __init__(
self,
job_dir,
klio_config,
docker_runtime_config,
klio_path,
exclude_pkgs,
):
super().__init__(job_dir, klio_config, docker_runtime_config)
self.klio_path = klio_path
self.install_pkgs = [
pkg for pkg in self.PKGS if pkg not in exclude_pkgs
]
self.requires_config_file = False
def _run_docker_container(self, runflags):
"""
Create & run job container, install klio packages as editable,
and attach to the container with an interactive terminal.
"""
container = self._docker_client.containers.create(**runflags)
container.start()
def run_and_log(cmd):
_, output = container.exec_run(cmd, tty=True, stream=True)
for line in output:
try:
self._docker_logger.info(line.decode("utf-8").strip("\n"))
except Exception:
# sometimes there's a decode error for a log line, but it
# shouldn't stop the setup
pass
run_and_log("pip install --upgrade pip setuptools")
for pkg in self.install_pkgs:
run_and_log(self.PIP_CMD.format(pkg=pkg))
# need to use lower-level Docker API client in order to start
# an interactive terminal inside the running container
self._docker_logger.info(
"\nConnecting to job's container. Use CTRL+C to stop."
)
pty_client = docker.APIClient(base_url="unix://var/run/docker.sock")
dockerpty.start(pty_client, container.attrs)
def _get_volumes(self):
volumes = super(DevelopKlioContainer, self)._get_volumes()
local_exec_path = os.path.join(self.klio_path, "exec")
volumes[local_exec_path] = {"bind": "/usr/src/klio-exec", "mode": "rw"}
local_core_path = os.path.join(self.klio_path, "core")
volumes[local_core_path] = {"bind": "/usr/src/klio-core", "mode": "rw"}
local_lib_path = os.path.join(self.klio_path, "lib")
volumes[local_lib_path] = {"bind": "/usr/src/klio", "mode": "rw"}
local_audio_path = os.path.join(self.klio_path, "audio")
volumes[local_audio_path] = {
"bind": "/usr/src/klio-audio",
"mode": "rw",
}
return volumes
def _get_docker_runflags(self, *args, **kwargs):
return {
"image": self._full_image_name,
"entrypoint": "/bin/bash",
"volumes": self._get_volumes(),
"environment": self._get_environment(),
"stdin_open": True,
"tty": True,
}
|
gaybro8777/klio | exec/tests/unit/commands/utils/test_profile_utils.py | <filename>exec/tests/unit/commands/utils/test_profile_utils.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import pytest
from klio_exec.commands.utils import profile_utils
def test_get_profiling_data(mocker):
profiling_data = "MEM 100 100001\n" "MEM 200 100002\n" "MEM 300 100003\n"
m_open = mocker.mock_open(read_data=profiling_data)
mock_open = mocker.patch(
"klio_exec.commands.utils.profile_utils.open", m_open
)
act_ret = profile_utils._get_profiling_data("foo.txt")
mock_open.assert_called_once_with("foo.txt", "r")
exp_ret = {
"data": [100.0, 200.0, 300.0],
"timestamp": [100001.0, 100002.0, 100003.0],
}
assert exp_ret == act_ret
def test_get_profiling_data_raises_no_file(mocker, caplog):
with pytest.raises(SystemExit):
ret = profile_utils._get_profiling_data("i_should_not_exist.txt")
assert ret is None
assert 1 == len(caplog.records)
assert "ERROR" == caplog.records[0].levelname
assert "Could not read profiling data" in caplog.records[0].message
def test_get_profiling_data_malformed_data(mocker):
profiling_data = (
"MEM 100 100001\n" "foobar baz\n" "\n" "MEM 1 2 3\n" "MEM 300 100003\n"
)
m_open = mocker.mock_open(read_data=profiling_data)
mock_open = mocker.patch(
"klio_exec.commands.utils.profile_utils.open", m_open
)
act_ret = profile_utils._get_profiling_data("foo.txt")
mock_open.assert_called_once_with("foo.txt", "r")
exp_ret = {"data": [100.0, 300.0], "timestamp": [100001.0, 100003.0]}
assert exp_ret == act_ret
def test_get_profiling_data_raises_no_data(mocker, caplog):
m_open = mocker.mock_open(read_data="\n")
mock_open = mocker.patch(
"klio_exec.commands.utils.profile_utils.open", m_open
)
with pytest.raises(SystemExit):
ret = profile_utils._get_profiling_data("foo.txt")
assert ret is None
mock_open.assert_called_once_with("foo.txt", "r")
assert 1 == len(caplog.records)
assert "ERROR" == caplog.records[0].levelname
assert "No samples to parse in" in caplog.records[0].message
def test_plot(mocker, monkeypatch):
parsed_data = {
"data": [100.0, 200.0, 300.0],
"timestamp": [100001.0, 100002.0, 100003.0],
}
mock_get_profiling_data = mocker.Mock()
mock_get_profiling_data.return_value = parsed_data
monkeypatch.setattr(
profile_utils, "_get_profiling_data", mock_get_profiling_data
)
mock_plt = mocker.Mock()
monkeypatch.setattr(profile_utils, "plt", mock_plt)
profile_utils.plot(
"input.txt", "output.png", "x-label", "y-label", "title test"
)
mock_get_profiling_data.assert_called_once_with("input.txt")
mock_plt.figure.assert_called_once_with(figsize=(14, 6), dpi=90)
# can't directly compare (or assert called with) np arrays;
# grabbing the args/kwargs then using np.allclose with a 0 tolerance
exp_data = np.asarray(parsed_data["data"])
ts = np.asarray(parsed_data["timestamp"])
exp_t = ts - float(ts[0])
act_call_args, act_call_kwargs = mock_plt.plot.call_args
assert 3 == len(act_call_args)
assert not act_call_kwargs
assert np.allclose(exp_t, act_call_args[0], rtol=0)
assert np.allclose(exp_data, act_call_args[1], rtol=0)
assert "+-c" == act_call_args[2]
mock_plt.xlabel.assert_called_once_with("x-label")
mock_plt.ylabel.assert_called_once_with("y-label")
mock_plt.title.assert_called_once_with("title test")
mock_plt.grid.assert_called_once_with()
mock_plt.savefig.assert_called_once_with("output.png")
|
gaybro8777/klio | lib/tests/unit/metrics/test_dispatcher.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pytest
from klio.metrics import dispatcher
class GenericDispatcher(dispatcher.BaseMetricDispatcher):
METRIC_TYPE = "generic"
def _setup_metric_relay(self, relay_clients):
pass
@pytest.fixture
def metric_params():
return {
"name": "my-metric",
"value": 0,
"transform": "my-transform",
"kwargs": {"tags": {"key-tag": "value-tag"}},
}
@pytest.fixture
def metric(mocker):
mock = mocker.Mock()
mock.update = mocker.Mock()
return mock
@pytest.fixture
def relay_client(mocker, metric):
client = mocker.Mock()
client.emit = mocker.Mock()
client.counter.return_value = metric
client.gauge.return_value = metric
client.timer.return_value = metric
return client
@pytest.fixture
def counter_dispatcher(
relay_client, metric_params, metric, mocker, monkeypatch
):
counter = dispatcher.CounterDispatcher(
relay_clients=[relay_client], **metric_params
)
mock_submit_to_thread = mocker.Mock()
monkeypatch.setattr(counter, "submit", mock_submit_to_thread)
return counter
@pytest.fixture
def gauge_dispatcher(relay_client, metric_params, metric, mocker, monkeypatch):
gauge = dispatcher.GaugeDispatcher(
relay_clients=[relay_client], **metric_params
)
mock_submit_to_thread = mocker.Mock()
monkeypatch.setattr(gauge, "submit", mock_submit_to_thread)
return gauge
@pytest.fixture
def default_timer(mocker, monkeypatch):
mock = mocker.Mock()
mock.default_timer.side_effect = [0.0, 1.0]
monkeypatch.setattr(dispatcher, "timeit", mock)
return mock
@pytest.fixture
def timer_dispatcher(
relay_client, metric_params, metric, default_timer, mocker, monkeypatch
):
timer = dispatcher.TimerDispatcher(
relay_clients=[relay_client], **metric_params
)
mock_submit_to_thread = mocker.Mock()
monkeypatch.setattr(timer, "submit", mock_submit_to_thread)
return timer
@pytest.fixture
def generic_dispatcher(relay_client, metric_params):
return GenericDispatcher(relay_clients=[relay_client], **metric_params)
def test_base_metric_dispatcher_raises(relay_client, metric_params):
class FakeMetricDispatcher(dispatcher.BaseMetricDispatcher):
pass
with pytest.raises(NotImplementedError):
FakeMetricDispatcher(relay_clients=[relay_client], **metric_params)
@pytest.mark.parametrize(
"has_transform,exp_key",
((True, "generic_my-metric_my-transform"), (False, "generic_my-metric")),
)
def test_base_metric_setup_metric_key(
has_transform, exp_key, relay_client, metric_params
):
if not has_transform:
metric_params.pop("transform")
gd = GenericDispatcher(relay_clients=[relay_client], **metric_params)
assert exp_key == gd.metric_key
def test_base_metric_logger(generic_dispatcher):
assert not getattr(
generic_dispatcher._thread_local,
"klio_metrics_dispatcher_logger",
None,
) # sanity check
logger = generic_dispatcher.logger
assert logging.getLogger("klio.metrics.dispatcher") == logger
assert (
logger
== generic_dispatcher._thread_local.klio_metrics_dispatcher_logger
)
def test_base_metric_submit_callback(generic_dispatcher, mocker, caplog):
fut = mocker.Mock()
fut.result.return_value = None
generic_dispatcher._submit_callback(fut)
fut.result.assert_called_once_with()
assert not len(caplog.records)
def test_base_metric_submit_callback_exception(
generic_dispatcher, mocker, caplog
):
fut = mocker.Mock()
fut.result.side_effect = Exception("fuu")
generic_dispatcher._submit_callback(fut)
fut.result.assert_called_once_with()
assert 1 == len(caplog.records)
assert logging.WARNING == caplog.records[0].levelno
def test_base_metric_submit(
generic_dispatcher, relay_client, mocker, monkeypatch
):
mock_thread_pool, mock_future = mocker.Mock(), mocker.Mock()
mock_thread_pool.submit.return_value = mock_future
monkeypatch.setattr(generic_dispatcher, "_thread_pool", mock_thread_pool)
metric = mocker.Mock()
metric.name, metric.transform = "my-metric", "my-transform"
expected_key = "generic_my-metric_my-transform"
generic_dispatcher.submit(relay_client.emit, metric)
mock_thread_pool.submit.assert_called_once_with(relay_client.emit, metric)
assert expected_key == mock_future.metric_key
mock_future.add_done_callback.assert_called_once_with(
generic_dispatcher._submit_callback
)
def test_counter_setup_metric_relay(relay_client, metric, metric_params):
counter = dispatcher.CounterDispatcher(
relay_clients=[relay_client], **metric_params
)
expected_relay_to_metric = [(relay_client, metric)]
assert expected_relay_to_metric == counter.relay_to_metric
relay_client.counter.assert_called_once_with(**metric_params)
def test_counter_inc(counter_dispatcher, relay_client, metric):
assert 0 == counter_dispatcher.value # sanity check
counter_dispatcher.inc()
assert 1 == counter_dispatcher.value
metric.update.assert_called_once_with(1)
counter_dispatcher.submit.assert_called_once_with(
relay_client.emit, metric
)
def test_gauge_setup_metric_relay(relay_client, metric, metric_params):
gauge = dispatcher.GaugeDispatcher(
relay_clients=[relay_client], **metric_params
)
expected_relay_to_metric = [(relay_client, metric)]
assert expected_relay_to_metric == gauge.relay_to_metric
relay_client.gauge.assert_called_once_with(**metric_params)
def test_gauge_set(gauge_dispatcher, relay_client, metric):
assert 0 == gauge_dispatcher.value # sanity check
gauge_dispatcher.set(5)
assert 5 == gauge_dispatcher.value
metric.update.assert_called_once_with(5)
gauge_dispatcher.submit.assert_called_once_with(relay_client.emit, metric)
def test_timer_setup_metric_relay(relay_client, metric, metric_params):
metric_params["timer_unit"] = "ns"
timer = dispatcher.TimerDispatcher(
relay_clients=[relay_client], **metric_params
)
expected_relay_to_metric = [(relay_client, metric)]
assert expected_relay_to_metric == timer.relay_to_metric
relay_client.timer.assert_called_once_with(**metric_params)
def test_timer_start(timer_dispatcher, metric):
assert 0 == timer_dispatcher.value # sanity check
assert not timer_dispatcher._start_time # sanity check
timer_dispatcher.start()
assert 0.0 == timer_dispatcher._start_time
metric.update.assert_not_called()
timer_dispatcher.submit.assert_not_called()
def test_timer_stop(timer_dispatcher, relay_client, metric):
assert 0 == timer_dispatcher.value # sanity check
assert not timer_dispatcher._start_time # sanity check
timer_dispatcher.start()
assert 0.0 == timer_dispatcher._start_time
metric.update.assert_not_called()
timer_dispatcher.submit.assert_not_called()
timer_dispatcher.stop()
# seconds -> nanoseconds
assert 1.0 * 1e9 == timer_dispatcher.value
metric.update.assert_called_once_with(timer_dispatcher.value)
timer_dispatcher.submit.assert_called_once_with(relay_client.emit, metric)
def test_timer_stop_no_start(timer_dispatcher, metric, caplog):
assert not timer_dispatcher._start_time # sanity check
timer_dispatcher.stop()
assert 1 == len(caplog.records)
assert logging.WARNING == caplog.records[0].levelno
metric.update.assert_not_called()
timer_dispatcher.submit.assert_not_called()
def test_timer_context_manager(
relay_client, metric, metric_params, default_timer, mocker, monkeypatch
):
timer = dispatcher.TimerDispatcher(
relay_clients=[relay_client], **metric_params
)
mock_submit_to_thread = mocker.Mock()
monkeypatch.setattr(timer, "submit", mock_submit_to_thread)
with timer:
assert 1 == 1
assert 0 == timer._start_time
assert 1 * 1e9 == timer.value
metric.update.assert_called_once_with(timer.value)
timer.submit.assert_called_once_with(relay_client.emit, metric)
|
gaybro8777/klio | core/tests/test_dataflow.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os
import pytest
from klio_core import dataflow
from klio_core import utils
HERE = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir))
FIXTURE_PATH = os.path.join(HERE, "fixtures")
@pytest.fixture
def mock_discovery_client(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(dataflow.discovery, "build", lambda x, y: mock)
return mock
def list_jobs():
list_job_file = os.path.join(FIXTURE_PATH, "list_all_active_jobs.json")
with open(list_job_file, "r") as f:
return json.load(f)
def list_jobs_with_duplicates():
list_job_file = os.path.join(
FIXTURE_PATH, "list_all_active_jobs_duplicates.json"
)
with open(list_job_file, "r") as f:
return json.load(f)
def get_job_detail():
get_job_detail_file = os.path.join(FIXTURE_PATH, "get_job_detail.json")
with open(get_job_detail_file, "r") as f:
return json.load(f)
def get_job_detail_no_kind():
return {"steps": [{"kind": "SomethingElse"}]}
def get_job_detail_no_username_value():
return {
"steps": [
{
"kind": "ParallelRead",
"properties": {"username": "SomethingElse"},
}
]
}
def get_job_detail_no_topic():
return {
"steps": [
{
"kind": "ParallelRead",
"properties": {"username": "ReadFromPubSub/Read"},
}
]
}
@pytest.mark.parametrize(
"region,side_effect,exp_call_count,exp_log_count",
(
(None, [list_jobs_with_duplicates(), {}, {}], 13, 11),
("foo-region", [list_jobs(), {}, {}], 1, 0),
(None, Exception("nojob4u"), 13, 13),
),
)
def test_find_job_by_name(
region,
side_effect,
exp_call_count,
exp_log_count,
mock_discovery_client,
caplog,
):
client = dataflow.DataflowClient()
locs = mock_discovery_client.projects.return_value.locations.return_value
request = locs.jobs.return_value.list.return_value
request.execute.side_effect = side_effect
client.find_job_by_name("benchmark-beats", "gcp-project", region)
assert exp_call_count == request.execute.call_count
assert exp_log_count == len(caplog.records)
@pytest.mark.parametrize(
"found_job,response,exp_log_count",
(
(True, Exception("foo"), 1),
(True, get_job_detail(), 0),
(False, None, 0),
),
)
def test_get_job_detail(
found_job,
response,
exp_log_count,
mock_discovery_client,
caplog,
mocker,
monkeypatch,
):
client = dataflow.DataflowClient()
mock_find_job_by_name = mocker.Mock()
monkeypatch.setattr(client, "find_job_by_name", mock_find_job_by_name)
if found_job:
mock_find_job_by_name.return_value = list_jobs().get("jobs")[0]
else:
mock_find_job_by_name.return_value = None
locs = mock_discovery_client.projects.return_value.locations.return_value
request = locs.jobs.return_value.get.return_value
if isinstance(response, Exception):
request.execute.side_effect = response
exp_response = None
else:
request.execute.return_value = response
exp_response = response
ret_response = client.get_job_detail("job-name", "gcp-project")
mock_find_job_by_name.assert_called_once_with(
"job-name", "gcp-project", None
)
if found_job:
assert 1 == request.execute.call_count
assert exp_response == ret_response
assert exp_log_count == len(caplog.records)
@pytest.mark.parametrize(
"ret_job_info,exp_topic",
(
(lambda: None, None),
(
get_job_detail,
"projects/sigint/topics/benchmark-audio-download-output",
),
(get_job_detail_no_kind, None),
(get_job_detail_no_username_value, None),
(get_job_detail_no_topic, None),
),
)
def test_get_job_input_topic(
ret_job_info, exp_topic, mock_discovery_client, mocker, monkeypatch
):
client = dataflow.DataflowClient()
mock_get_job_detail = mocker.Mock()
monkeypatch.setattr(client, "get_job_detail", mock_get_job_detail)
mock_get_job_detail.return_value = ret_job_info()
ret_topic = client.get_job_input_topic("job-name", "gcp-project")
assert exp_topic == ret_topic
@pytest.mark.parametrize(
"api_version,exp_version", ((None, "v1b3"), ("v2", "v2"))
)
def test_get_dataflow_client(api_version, exp_version, mock_discovery_client):
client = dataflow.get_dataflow_client(api_version)
assert isinstance(client, dataflow.DataflowClient)
global_client = utils.get_global("dataflow_client_%s" % exp_version)
assert global_client == client
delattr(utils, "klio_global_state_dataflow_client_%s" % exp_version)
|
gaybro8777/klio | integration/audio-spectrograms/run.py | <reponame>gaybro8777/klio<gh_stars>0
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import warnings
import apache_beam as beam
from klio_audio.transforms import io as aio
from klio_audio.transforms import audio
import transforms
warnings.simplefilter("ignore")
loggers_to_mute = (
"apache_beam.io.filebasedsink",
"apache_beam.runners.worker.statecache",
"apache_beam.runners.portability.fn_api_runner",
"apache_beam.runners.portability.fn_api_runner_transforms",
"apache_beam.internal.gcp.auth",
"oauth2client.transport",
"oauth2client.client",
# The concurrency logs may be different for every machine, so let's
# just turn them off
"klio.concurrency",
)
for logger in loggers_to_mute:
logging.getLogger(logger).setLevel(logging.ERROR)
logging.getLogger("klio").setLevel(logging.DEBUG)
def run(in_pcol, job_config):
# load 5 seconds of audio and get STFT
stft = (
in_pcol
| aio.GcsLoadBinary()
| audio.LoadAudio(offset=10, duration=5)
| audio.GetSTFT()
)
# get magnitude of audio
magnitude = (
stft | "Get magnitude" >> beam.ParDo(transforms.GetMagnitude()).with_outputs()
)
# map the result to a key (the KlioMessage element)
# so we can group all results by key
magnitude_key = (
magnitude.spectrogram
| "element to spec" >> beam.Map(transforms.create_key_from_element)
)
# get nearest neighbors and map the result to a key (the KlioMessage element)
nn_filter = (
magnitude.spectrogram
| "Get nn filter" >> beam.ParDo(transforms.FilterNearestNeighbors())
| "element to filter" >> beam.Map(transforms.create_key_from_element)
)
# map together the full magnitude with its filter by key (the KlioMessage element)
merge = (
{"full": magnitude_key, "nnfilter": nn_filter}
| "merge" >> beam.CoGroupByKey()
)
# calc the difference between full magnitude and the filter
net = merge | beam.Map(transforms.subtract_filter_from_full)
# create a mask from the filter minus the difference of full & filter
first_mask = (
{"first": nn_filter, "second": net, "full": magnitude_key}
| "first mask group" >> beam.CoGroupByKey()
| "first mask" >> beam.ParDo(transforms.GetSoftMask(margin=2))
)
# create another mask from the difference of full & filter minus the filter
second_mask = (
{"first": net, "second": nn_filter, "full": magnitude_key}
| "second mask group" >> beam.CoGroupByKey()
| "second mask" >> beam.ParDo(transforms.GetSoftMask(margin=10))
)
# plot the full magnitude spectrogram
magnitude_out = (
magnitude.spectrogram
| "full spec" >> audio.GetSpec()
| "plot full spec" >> audio.SpecToPlot(title="Full Spectrogam for {element}", y_axis="log")
| "save full" >> aio.GcsUploadPlot(suffix="-full")
)
# plot the first mask (background) spectrogram
background_out = (
first_mask
| "background spec" >> audio.GetSpec()
| "plot background spec" >> audio.SpecToPlot(title="Background Spectrogam for {element}", y_axis="log")
| "save background" >> aio.GcsUploadPlot(suffix="-background")
)
# plot the second mask (foreground) spectrogram
foreground_out = (
second_mask
| "foreground spec" >> audio.GetSpec()
| "plot forground spec" >> audio.SpecToPlot(title="Foreground Spectrogam for {element}", y_axis="log")
| "save foreground" >> aio.GcsUploadPlot(suffix="-foreground")
)
return (
(magnitude_out, background_out, foreground_out)
| "flatten output paths" >> beam.Flatten()
| "remove dups" >> beam.Distinct()
)
|
gaybro8777/klio | core/src/klio_core/config/_converters.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
logger = logging.getLogger("klio")
class _UnsetRequiredValue(object):
"""
Sentinel class used on as the default for attributes that require a value.
``_UnsetRequiredValue`` is a singleton. There is only ever one of it.
"""
_singleton = None
def __new__(cls):
if _UnsetRequiredValue._singleton is None:
_UnsetRequiredValue._singleton = super(
_UnsetRequiredValue, cls
).__new__(cls)
return _UnsetRequiredValue._singleton
def __repr__(self):
return "UNSET_REQUIRED_VALUE"
UNSET_REQUIRED_VALUE = _UnsetRequiredValue()
"""
This is set as the default for an attribute when we want no default. This way
attrs itself doesn't raise an exception and instead the value is passed into
our converter which will raise a properly formatted error
"""
class ConfigValueConverter(object):
"""Base class for converters automatically added to any `attrs` `attrib`
created in a class decorated with `config_object`. These converters
properly handle raising Exceptions when a required value is not provided,
as well as more strict type checking and type coercion.
"""
def __init__(self, key):
self.key = key
def validate(self, value):
if value == UNSET_REQUIRED_VALUE:
raise Exception("missing required key: {}".format(self.key))
elif value is None:
return value
return self._validate_value(value)
def _validate_value(self, value):
return value
class TypeCheckingConverter(ConfigValueConverter):
def __init__(self, key, type):
super().__init__(key)
self.type = type
def _wrong_type_msg(self, wrong_value):
return (
"{}: expected value of type '{}'" ", got a '{}' value instead"
).format(self.key, self.type.__name__, type(wrong_value).__name__)
class SingleValueConverter(TypeCheckingConverter):
def validate(self, value):
if isinstance(value, dict) or isinstance(value, list):
self._wrong_type_message(value)
return super().validate(value)
class IntConverter(SingleValueConverter):
def __init__(self, key_prefix):
super().__init__(key_prefix, int)
def _validate_value(self, value):
try:
return int(value)
except Exception:
raise Exception(
"{}: expected numeric value, got '{}'".format(self.key, value)
)
class BoolConverter(SingleValueConverter):
def __init__(self, key_prefix):
super().__init__(key_prefix, bool)
def _validate_value(self, value):
if value is None:
return value
try:
new_value = bool(value)
if not isinstance(value, bool):
logger.warning(
(
"{}: found non-bool value '{}'"
", converting to bool '{}'"
).format(self.key, value, new_value)
)
return new_value
except Exception:
raise Exception(
"{}: expected boolean value, got '{}'".format(self.key, value)
)
class StringConverter(SingleValueConverter):
def __init__(self, key_prefix):
super().__init__(key_prefix, str)
def _validate_value(self, value):
if value is None:
return value
if not isinstance(value, str):
logger.warning(
"{}: found non-string value '{}', converting to str".format(
self.key, value
)
)
return str(value)
class Converters(object):
TYPES = {
int: IntConverter,
str: StringConverter,
bool: BoolConverter,
}
@classmethod
def for_type(cls, type, key):
if type in cls.TYPES:
return cls.TYPES[type](key).validate
else:
return ConfigValueConverter(key).validate
|
gaybro8777/klio | lib/src/klio/metrics/client.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Metrics registry to manage metrics for all configured relay clients.
The :class:`MetricsRegistry` class is the main client for which to create
and emit metrics. For example:
.. code-block:: python
class MyTransform(KlioBaseTransform):
def __init__(self):
self.my_counter = self._klio.metrics.counter(
name="foo",
value=2,
transform="mytransform",
tags={"tag1": "value1"}
)
def process(self, element):
# user code
# immediately emits a metric
self.my_counter.inc()
"""
import logging
from klio.metrics import dispatcher
class MetricsRegistry(object):
"""Main client to create and emit metrics.
Args:
relay_clients (list(klio.metrics.base.AbstractRelayClient)):
configured relay clients.
"""
def __init__(self, relay_clients, transform_name):
self._relays = relay_clients
self._transform_name = transform_name
self._registry = {}
def counter(self, name, value=0, **kwargs):
"""Get or create a counter.
Creates a new counter if one is not found with a key of
``"counter_{name}_{transform}"`` of the given ``transform``.
New counters will be stored in memory for simple caching.
Args:
name (str): name of counter
value (int): starting value of counter; defaults to 0
kwargs (dict): keyword arguments passed to each configured
relay clients' counter object.
Returns:
dispatcher.CounterDispatcher: instance of a counter dispatcher
"""
transform_name = kwargs.pop("transform", self._transform_name)
key = "counter_{}_{}".format(name, transform_name)
if key in self._registry:
return self._registry[key]
counter = dispatcher.CounterDispatcher(
relay_clients=self._relays,
name=name,
value=value,
transform=transform_name,
**kwargs
)
self._registry[key] = counter
return counter
def gauge(self, name, value=0, **kwargs):
"""Get or create a gauge.
Creates a new gauge if one is not found with a key of
"gauge_{name}_{transform}".
New gauges will be stored in memory for simple caching.
Args:
name (str): name of gauge
value (int): starting value of gauge; defaults to 0
kwargs (dict): keyword arguments passed to each configured
relay clients' gauge object.
Returns:
dispatcher.GaugeDispatcher: instance of a gauge dispatcher
"""
transform_name = kwargs.pop("transform", self._transform_name)
key = "gauge_{}_{}".format(name, transform_name)
if key in self._registry:
return self._registry[key]
gauge = dispatcher.GaugeDispatcher(
relay_clients=self._relays,
name=name,
value=value,
transform=transform_name,
**kwargs
)
self._registry[key] = gauge
return gauge
def timer(self, name, value=0, timer_unit="ns", **kwargs):
"""Get or create a timer.
Creates a new timer if one is not found with a key of
"timer_{name}_{transform}".
New timers will be stored in memory for simple caching.
Args:
name (str): name of timer
value (int): starting value of timer; defaults to 0
timer_unit (str): desired unit of time; defaults to ns
kwargs (dict): keyword arguments passed to each configured
relay clients' timer object.
Returns:
dispatcher.TimerDispatcher: instance of a timer dispatcher
"""
transform_name = kwargs.pop("transform", self._transform_name)
key = "timer_{}_{}".format(name, transform_name)
if key in self._registry:
return self._registry[key]
timer = dispatcher.TimerDispatcher(
relay_clients=self._relays,
name=name,
value=value,
transform=transform_name,
timer_unit=timer_unit,
**kwargs
)
self._registry[key] = timer
return timer
def marshal(self, metric):
"""Create a dictionary-representation of a given metric.
Used when metric objects need to be pickled.
Args:
metric (dispatcher.BaseMetricDispatcher): metric instance to
marshal.
Returns:
dict: the metric's data in dictionary form
"""
data = {
"type": metric.METRIC_TYPE,
"name": metric.name,
"value": metric.value,
}
data.update(metric.kwargs)
return data
def unmarshal(self, metric_data):
"""Create a metric instance based off of a dictionary.
If "type" is not specified or is not one of "counter", "gauge",
or "timer", it defaults to a gauge-type metric.
Used when metrics objects need to be unpickled.
Args:
metric_data (dict): dictionary-representation of a given
metric.
Returns:
dispatcher.BaseMetricDispatcher: a dispatcher relevant to
metric type.
"""
metric_type = metric_data.pop("type", None)
name = metric_data.pop("name", None)
value = metric_data.pop("value", None)
kwargs = metric_data
metric_method = {
"counter": self.counter,
"gauge": self.gauge,
"timer": self.timer,
}.get(metric_type)
if not metric_method:
msg = "Metric type '{}' not supported. Defaulting to gauge.".format(
metric_type
)
logging.getLogger("klio.metrics").warning(msg)
metric_method = self.gauge
return metric_method(name=name, value=value, **kwargs)
|
gaybro8777/klio | lib/tests/unit/utils/test_thread_limiter.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import multiprocessing
import pytest
from klio.utils import _thread_limiter
N_CPU = multiprocessing.cpu_count()
BASE_LOG = "Initial semaphore value:"
@pytest.mark.parametrize(
"max_thread_count, exp_count_log",
(
(1, f"{BASE_LOG} 1"),
(_thread_limiter.ThreadLimit.NONE, "Using unlimited semaphore"),
(_thread_limiter.ThreadLimit.DEFAULT, f"{BASE_LOG} {N_CPU}"),
(lambda: 2 * 3, f"{BASE_LOG} 6"),
),
)
def test_thread_limiter(max_thread_count, exp_count_log, caplog):
_thread_limiter.ThreadLimiter(max_thread_count=max_thread_count)
assert 1 == len(caplog.messages)
assert exp_count_log in caplog.messages[0]
@pytest.mark.parametrize(
"max_thread_count", (2, _thread_limiter.ThreadLimit.NONE)
)
def test_thread_limiter_ctx_mgr(max_thread_count, mocker, monkeypatch, caplog):
mock_semaphore = mocker.Mock()
limiter = _thread_limiter.ThreadLimiter(max_thread_count=max_thread_count)
monkeypatch.setattr(limiter, "_semaphore", mock_semaphore)
with limiter:
3 * 3
mock_semaphore.acquire.assert_called_once_with()
mock_semaphore.release.assert_called_once_with()
assert 3 == len(caplog.messages)
|
gaybro8777/klio | exec/tests/unit/commands/utils/test_plugin_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.utils import plugin_utils
@pytest.fixture
def mock_inspect(mocker, monkeypatch):
mock = mocker.Mock()
mock.getfile.return_value = "/path/to/package"
monkeypatch.setattr(plugin_utils, "inspect", mock)
return mock
@pytest.fixture
def mock_plugins(mocker):
mock_dist = mocker.Mock()
mock_dist.project_name = "package-name"
mock_dist.parsed_version = "1.2.3"
mock_plugins = []
for i in range(2):
mock_loaded = mocker.Mock()
mock_loaded.AUDIT_STEP_NAME = "step-{}".format(i)
mock_plugin = mocker.Mock(name=i)
mock_plugin.load.return_value = mock_loaded
mock_plugin.dist = mock_dist
mock_plugins.append(mock_plugin)
return mock_plugins
@pytest.fixture
def mock_iter_entry_points(mock_plugins, mocker, monkeypatch):
mock = mocker.Mock()
mock.return_value = mock_plugins
monkeypatch.setattr(plugin_utils.pkg_resources, "iter_entry_points", mock)
return mock
def test_load_plugins_by_namespace(mock_plugins, mock_iter_entry_points):
ret = plugin_utils.load_plugins_by_namespace("a.namespace")
mock_iter_entry_points.assert_called_once_with("a.namespace")
for plugin in mock_plugins:
plugin.load.assert_called_once_with()
assert ret == [p.load.return_value for p in mock_plugins]
@pytest.mark.parametrize(
"get_desc,docstr,exp_desc",
(
("a description", None, "a description"),
(None, "a docstring desc", "a docstring desc"),
(None, None, "No description."),
("a description", "a docstring desc", "a description"),
),
)
def test_get_plugins_by_namespace(
get_desc,
docstr,
exp_desc,
mock_inspect,
mock_plugins,
mock_iter_entry_points,
mocker,
):
for mp in mock_plugins:
mp.load.return_value.get_description.return_value = get_desc
mp.load.return_value.__doc__ = docstr
exp_plugins = []
for i in range(2):
exp_p = plugin_utils.KlioPlugin(
plugin_name="step-{}".format(i),
description=exp_desc,
package_name="package-name",
package_version="1.2.3",
module_path="/path/to/package",
)
exp_plugins.append(exp_p)
actual_plugins = list(
plugin_utils._get_plugins_by_namespace("a.namespace")
)
mock_iter_entry_points.assert_called_once_with("a.namespace")
for plugin in mock_plugins:
plugin.load.assert_called_once_with()
exp_calls = [mocker.call(plugin.load.return_value)]
mock_inspect.getfile.assert_has_calls(exp_calls)
assert len(mock_plugins) == mock_inspect.getfile.call_count
assert exp_plugins == actual_plugins
@pytest.mark.parametrize("tw", (True, False))
def test_print_plugins(tw, mock_terminal_writer, mocker, monkeypatch):
loaded_plugins = [
plugin_utils.KlioPlugin(
plugin_name="plugin_name",
description="a desc",
package_name="package-name",
package_version="1.2.3",
module_path="/path/to/package",
)
]
mock_get_plugins_by_ns = mocker.Mock()
mock_get_plugins_by_ns.return_value = loaded_plugins
monkeypatch.setattr(
plugin_utils, "_get_plugins_by_namespace", mock_get_plugins_by_ns
)
exp_plugin_meta_str = " -- via package-name (v1.2.3) -- /path/to/package\n"
exp_plugin_desc = "\ta desc\n\n"
exp_calls = [
mocker.call("plugin_name", blue=True, bold=True),
mocker.call(exp_plugin_meta_str, green=True),
mocker.call(exp_plugin_desc),
]
kwargs = {"tw": None}
if tw:
kwargs["tw"] = mock_terminal_writer
plugin_utils.print_plugins("a.namespace", **kwargs)
mock_get_plugins_by_ns.assert_called_once_with("a.namespace")
mock_terminal_writer.write.assert_has_calls(exp_calls)
assert 3 == mock_terminal_writer.write.call_count
|
gaybro8777/klio | exec/tests/unit/commands/audit_steps/test_numpy_broken_blas.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.audit_steps import numpy_broken_blas
@pytest.mark.parametrize(
"worker_threads,platform,has_numpy,version,exp_emit_calls",
(
(1, None, False, None, 0),
(2, "Darwin", False, None, 0),
(0, "Linux", False, None, 0),
(2, "Linux", False, None, 0),
(2, "Linux", True, "1.17.0", 0),
(2, "Linux", True, "1.16.3", 0),
(2, "Linux", True, "1.16.2", 1),
),
)
def test_numpy_broken_blas_usage(
worker_threads,
platform,
has_numpy,
version,
exp_emit_calls,
klio_config,
mock_emit_error,
mocker,
monkeypatch,
):
if worker_threads:
klio_config.pipeline_options.experiments = [
"worker_threads={}".format(worker_threads)
]
monkeypatch.setattr(numpy_broken_blas.platform, "system", lambda: platform)
if has_numpy is False:
mocker.patch.dict("sys.modules", {"numpy": None})
else:
monkeypatch.setattr("numpy.version.short_version", version)
numpy_broken_blas_usage = numpy_broken_blas.NumPyBrokenBLASUsage(
"job/dir", klio_config, "term_writer"
)
numpy_broken_blas_usage.after_tests()
# don't care about the actual message
assert exp_emit_calls == mock_emit_error.call_count
|
gaybro8777/klio | exec/tests/unit/commands/utils/test_cpu_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.utils import cpu_utils
@pytest.mark.parametrize("interval,exp_sleep", ((None, 0.1), (0.5, 0.5)))
def test_get_cpu_usage(interval, exp_sleep, mocker, monkeypatch, capsys):
mock_proc = mocker.Mock()
# wanting at least 50 lines to hit the flush
proc_side_effect = [None] * 51
proc_side_effect.append(True)
mock_proc.poll.side_effect = proc_side_effect
mock_time = mocker.Mock()
timestamps = range(1, 53)
mock_time.time.side_effect = timestamps
monkeypatch.setattr(cpu_utils, "time", mock_time)
cpu_measurements = (29.0, 51.4, 78.1, 61.2) * 13
mock_cpu_percent = mocker.Mock(side_effect=cpu_measurements)
monkeypatch.setattr(cpu_utils.psutil, "cpu_percent", mock_cpu_percent)
cpu_utils.get_cpu_usage(mock_proc, interval=interval)
assert 52 == mock_time.sleep.call_count
assert mocker.call(exp_sleep) == mock_time.sleep.call_args
captured = capsys.readouterr()
data = zip(cpu_measurements, timestamps)
exp_out = "\n".join("CPU {} {}.0000".format(d[0], d[1]) for d in data)
exp_out = exp_out + "\n"
assert exp_out == captured.out
|
gaybro8777/klio | cli/src/klio_cli/options.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import click
from klio_core import options as core_options
def _verify_gcs_uri(ctx, param, value):
if value and not value.startswith("gs://"):
raise click.BadParameter(
"Unsupported location type. Please provide a GCS location with "
"the `gs://` prefix."
)
return value
#####
# common options
#####
def force_build(func):
return click.option(
"--force-build",
default=False,
is_flag=True,
help="Build Docker image even if you already have it.",
)(func)
def runtime(func):
func = force_build(func)
func = core_options.direct_runner(func)
func = core_options.update(func)
return func
def job_name(*args, **kwargs):
def wrapper(func):
return click.option("--job-name", **kwargs)(func)
# allows @options.foo to be used without parens (i.e. no need to do
# `@options.foo()`) when there are no args/kwargs provided
if args:
return wrapper(args[0])
return wrapper
def region(func):
return click.option(
"--region",
help=(
"Region of job, if neither ``--job-dir`` nor ``--config-file`` is "
"not provided."
),
)(func)
def gcp_project(*args, **kwargs):
def wrapper(func):
return click.option("--gcp-project", **kwargs)(func)
# allows @options.foo to be used without parens (i.e. no need to do
# `@options.foo()`) when there are no args/kwargs provided
if args:
return wrapper(args[0])
return wrapper
#####
# options for `klio job verify
#####
def create_resources(func):
return click.option(
"--create-resources",
default=False,
is_flag=True,
help=(
"Create missing GCP resources based on ``klio-info.yaml``."
" Default: ``False``"
),
)(func)
#####
# options for `klio job audit`
#####
def list_steps(func):
return click.option(
"--list",
"list_steps",
is_flag=True,
default=False,
help="List available audit steps (does not run any audits).",
)(func)
#####
# options for `klio job create`
#####
def output(func):
return click.option(
"--output",
help="Output directory. Defaults to current working directory.",
)(func)
def use_defaults(func):
return click.option(
"--use-defaults",
default=False,
is_flag=True,
help="Accept default values.",
)(func)
#####
# common options for `klio job profile <subcommand>`
#####
def input_file(*args, **kwargs):
mutually_exclusive = kwargs.pop("mutex", [])
def wrapper(func):
return click.option(
"-i",
"--input-file",
type=click.Path(exists=False, dir_okay=False, readable=True),
cls=core_options.MutuallyExclusiveOption,
mutually_exclusive=mutually_exclusive,
**kwargs,
)(func)
# allows @options.foo to be used without parens (i.e. no need to do
# `@options.foo()`) when there are no args/kwargs provided
if args:
return wrapper(args[0])
return wrapper
def output_file(*args, **kwargs):
mutually_exclusive = kwargs.pop("mutex", [])
def wrapper(func):
return click.option(
"-o",
"--output-file",
type=click.Path(exists=False, dir_okay=False, writable=True),
cls=core_options.MutuallyExclusiveOption,
mutually_exclusive=mutually_exclusive,
**kwargs,
)(func)
# allows @options.foo to be used without parens (i.e. no need to do
# `@options.foo()`) when there are no args/kwargs provided
if args:
return wrapper(args[0])
return wrapper
#####
# options for `klio job profile collect-profiling-data`
#####
def gcs_location(func):
return click.option(
"--gcs-location",
default=None,
show_default="`pipeline_options.profile_location` in `klio-job.yaml`",
help="GCS location of cProfile data.",
cls=core_options.MutuallyExclusiveOption,
mutually_exclusive=["job_dir", "input_file", "config_file"],
callback=_verify_gcs_uri,
)(func)
def since(func):
return click.option(
"--since",
default=None,
show_default="1 hour ago",
help=(
"Start time, relative or absolute (interpreted by "
"``dateparser.parse``). "
),
)(func)
def until(func):
return click.option(
"--until",
default=None,
show_default="now",
help=(
"End time, relative or absolute (interpreted by "
"``dateparser.parse``). "
),
)(func)
# https://docs.python.org/3.6/library/profile.html#pstats.Stats.sort_stats
# unfortunately, can't get this dynamically <3.7 (@lynn)
SORT_STATS_KEY = [
"calls",
"cumulative",
"cumtime",
"file",
"filename",
"module",
"ncalls",
"pcalls",
"line",
"name",
"nfl",
"stdname",
"time",
"tottime",
]
def sort_stats(func):
return click.option(
"--sort-stats",
multiple=True,
default=["tottime"],
show_default=True,
type=click.Choice(SORT_STATS_KEY, case_sensitive=False),
help=(
"Sort output of profiling statistics as supported by "
"`sort_stats <https://docs.python.org/3/library/profile.html#"
"pstats.Stats.sort_stats>`_. Multiple ``--sort-stats`` invocations "
"are supported."
),
)(func)
#####
# options for `klio message publish`
#####
def force(func):
return click.option(
"-f",
"--force",
default=False,
is_flag=True,
help="Force processing even if output data exists.",
)(func)
def ping(func):
return click.option(
"-p",
"--ping",
default=False,
is_flag=True,
help="Skip the processing of an ID to trace the path the ID takes.",
)(func)
def top_down(func):
return click.option(
"-t",
"--top-down",
default=False,
is_flag=True,
help=(
"Trigger an apex node and all child nodes below. Default: "
"``False``"
),
)(func)
def bottom_up(func):
return click.option(
"-b",
"--bottom-up",
# default is actually true, but initially conflicts w top-down logic
default=False,
is_flag=True,
help=(
"Trigger work for only this job and any required parent jobs, "
"but no sibling or child jobs. Default: ``True``"
),
)(func)
def non_klio(func):
return click.option(
"-n",
"--non-klio",
default=False,
is_flag=True,
help=(
"Publish a free-form, non-Klio message to the targeted job. The "
"targeted job must also support non-Klio messages. Mutually "
"exclusive with ``--force``, ``--ping``, and ``--bottom-up``. "
"Default: ``False``"
),
)(func)
|
gaybro8777/klio | exec/tests/unit/commands/audit_steps/conftest.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.audit_steps import base
@pytest.fixture
def klio_config(mocker):
conf = mocker.Mock()
conf.pipeline_options = mocker.Mock()
conf.pipeline_options.experiments = []
return conf
@pytest.fixture
def mock_emit_warning(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(base.BaseKlioAuditStep, "emit_warning", mock)
return mock
@pytest.fixture
def mock_emit_error(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(base.BaseKlioAuditStep, "emit_error", mock)
return mock
|
gaybro8777/klio | cli/src/klio_cli/commands/job/run.py | <reponame>gaybro8777/klio
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import atexit
import logging
import docker
from klio_cli import __version__ as klio_cli_version
from klio_cli.commands import base
from klio_cli.utils import docker_utils
from klio_cli.utils import stackdriver_utils as sd_utils
GCP_CRED_FILE = "gcloud/application_default_credentials.json"
class RunPipeline(base.BaseDockerizedPipeline):
DOCKER_LOGGER_NAME = "klio.job.run"
def __init__(
self, job_dir, klio_config, docker_runtime_config, run_job_config
):
super().__init__(job_dir, klio_config, docker_runtime_config)
self.run_job_config = run_job_config
self.requires_config_file = True
@staticmethod
def _try_container_kill(container):
try:
container.kill()
except docker.errors.APIError as e:
logging.debug("Exception while killing container", exc_info=e)
def _run_docker_container(self, runflags):
container = self._docker_client.containers.run(**runflags)
if self.run_job_config.direct_runner:
atexit.register(self._try_container_kill, container)
# TODO: container.logs(stream=True) redirects stderr to stdout.
# We should use appropriate streams so it's obvious to the use.
# (@jpvelez)
for line in container.logs(stream=True):
self._docker_logger.info(line.decode("utf-8").strip("\n"))
exit_status = container.wait()["StatusCode"]
if exit_status == 0 and not self.run_job_config.direct_runner:
dashboard_name = sd_utils.DASHBOARD_NAME_TPL.format(
job_name=self.klio_config.job_name,
region=self.klio_config.pipeline_options.region,
)
base_err_msg = (
"Could not find a Stackdriver dashboard for job '%s' that "
"matched the name %s"
% (self.klio_config.job_name, dashboard_name)
)
try:
dashboard_url = sd_utils.get_stackdriver_group_url(
self.klio_config.pipeline_options.project,
self.klio_config.job_name,
self.klio_config.pipeline_options.region,
)
except Exception as e:
logging.warning("%s: %s" % (base_err_msg, e))
else:
if dashboard_url:
logging.info(
"View the job's dashboard on Stackdriver: %s"
% dashboard_url
)
else:
logging.warning(base_err_msg)
return exit_status
def _get_environment(self):
envs = super()._get_environment()
envs[
"GOOGLE_CLOUD_PROJECT"
] = self.klio_config.pipeline_options.project
envs["COMMIT_SHA"] = self.run_job_config.git_sha
envs["KLIO_CLI_VERSION"] = klio_cli_version
return envs
def _get_command(self):
command = ["run"]
if self.docker_runtime_config.image_tag:
command.extend(
["--image-tag", self.docker_runtime_config.image_tag]
)
if self.run_job_config.direct_runner:
command.append("--direct-runner")
if self.run_job_config.update is True:
command.append("--update")
elif (
self.run_job_config.update is False
): # don't do anything if `None`
command.append("--no-update")
return command
def _setup_docker_image(self):
super()._setup_docker_image()
if not self.run_job_config.direct_runner:
logging.info("Pushing worker image to GCR")
docker_utils.push_image_to_gcr(
self._full_image_name,
self.docker_runtime_config.image_tag,
self._docker_client,
)
|
gaybro8777/klio | core/tests/config/test_preprocessing.py | <gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import pytest
import yaml
from klio_core import exceptions
from klio_core.config import _preprocessing
@pytest.fixture
def kcp():
return _preprocessing.KlioConfigPreprocessor
@pytest.fixture
def job_raw_config():
return {
"allow_non_klio_messages": False,
"events": {
"inputs": {
"file0": {
"type": "file",
"location": "gs://test/yesterday.txt",
},
"file1": {"type": "file", "location": "gs://test/today.txt"},
}
},
}
@pytest.fixture
def job_raw_config_templated():
return {
"events": {
"inputs": {
"bigquery0": {
"type": "bigquery",
"table": "testing.top_tracks_global",
"partition": "$YESTERDAY",
},
"bigquery1": {
"type": "bigquery",
"table": "testing.top_artists_global",
"partition": "$TODAY",
},
"bigquery2": {
"type": "bigquery",
"table": "testing.${GENRE}_join_$COUNTRY",
"partition": "$TODAY",
},
}
}
}
@pytest.mark.parametrize(
"overrides_dict,expected",
(
# No overrides given - no changes in returned dict
(
{},
{
"allow_non_klio_messages": False,
"events": {
"inputs": {
"file0": {
"type": "file",
"location": "gs://test/yesterday.txt",
},
"file1": {
"type": "file",
"location": "gs://test/today.txt",
},
}
},
},
),
# Override dict given but no changes - no changes in returned dict
(
{"allow_non_klio_messages": True},
{
"allow_non_klio_messages": True,
"events": {
"inputs": {
"file0": {
"type": "file",
"location": "gs://test/yesterday.txt",
},
"file1": {
"type": "file",
"location": "gs://test/today.txt",
},
}
},
},
),
# Override current values
(
{
"events.inputs.file0.location": "gs://test/12-31-2019.txt",
"events.inputs.file1.location": "gs://test/01-01-2020.txt",
},
{
"allow_non_klio_messages": False,
"events": {
"inputs": {
"file0": {
"type": "file",
"location": "gs://test/12-31-2019.txt",
},
"file1": {
"type": "file",
"location": "gs://test/01-01-2020.txt",
},
}
},
},
),
# Add new values
(
{
"events.inputs.file2.location": "gs://test/01-01-2020.txt",
"events.inputs.file2.type": "file",
},
{
"allow_non_klio_messages": False,
"events": {
"inputs": {
"file0": {
"type": "file",
"location": "gs://test/yesterday.txt",
},
"file1": {
"type": "file",
"location": "gs://test/today.txt",
},
"file2": {
"type": "file",
"location": "gs://test/01-01-2020.txt",
},
}
},
},
),
),
)
def test_apply_overrides(kcp, job_raw_config, overrides_dict, expected):
# One key override
new_dict = kcp._apply_overrides(job_raw_config, overrides_dict)
assert expected == new_dict
@pytest.mark.parametrize(
"template_dict,expected",
(
# One or more missing templates should raise a KeyError
(
{
"YESTERDAY": "12-31-2019",
"TODAY": "01-01-2020",
"COUNTRY": "sto",
},
None,
),
(
{
"YESTERDAY": "12-31-2019",
"TODAY": "01-01-2020",
"GENRE": "rock",
"COUNTRY": "sto",
},
{
"events": {
"inputs": {
"bigquery0": {
"type": "bigquery",
"table": "testing.top_tracks_global",
"partition": "12-31-2019",
},
"bigquery1": {
"type": "bigquery",
"table": "testing.top_artists_global",
"partition": "01-01-2020",
},
"bigquery2": {
"type": "bigquery",
"table": "testing.rock_join_sto",
"partition": "01-01-2020",
},
}
}
},
),
),
)
def test_apply_templates(
kcp, job_raw_config_templated, template_dict, expected
):
raw_config_str = json.dumps(job_raw_config_templated)
if not expected:
with pytest.raises(
exceptions.KlioConfigTemplatingException, match=r"'GENRE' .*"
):
kcp._apply_templates(raw_config_str, template_dict)
else:
new_dict = kcp._apply_templates(raw_config_str, template_dict)
assert isinstance(new_dict, str)
assert expected == json.loads(new_dict)
def test_transform_io(kcp):
config = {
"job_config": {
"events": {
"inputs": [
{"type": "bq", "key": "value"},
{"type": "bq", "key": "value2"},
{"type": "gcs", "gcskey": "gcsvalue"},
],
"outputs": [
{"type": "bq", "key": "value", "name": "mybq"},
{"type": "bq", "key": "value2"},
],
},
"data": {
"inputs": [],
"outputs": [
{"type": "bq", "key": "value", "name": "mybq"},
{"type": "bq", "key": "value2"},
],
},
}
}
expected = {
"job_config": {
"events": {
"inputs": {
"bq0": {"type": "bq", "key": "value"},
"bq1": {"type": "bq", "key": "value2"},
"gcs0": {"type": "gcs", "gcskey": "gcsvalue"},
},
"outputs": {
"mybq": {"type": "bq", "key": "value"},
"bq0": {"type": "bq", "key": "value2"},
},
},
"data": {
"inputs": {},
"outputs": {
"mybq": {"type": "bq", "key": "value"},
"bq0": {"type": "bq", "key": "value2"},
},
},
}
}
actual = kcp._transform_io_sections(config)
assert actual == expected
def test_apply_plugin_preprocessing(mocker, kcp):
config = {
"version": 1,
"job_name": "test_job",
"job_config": {},
}
raw_config = yaml.dump(config)
def add_field(config_dict):
config_dict["version"] = 2
return config_dict
expected_config = config.copy()
expected_config["version"] = 2
mocker.patch.object(kcp, "PLUGIN_PREPROCESSORS", [add_field])
processed_config = kcp.process(raw_config, [], [])
assert expected_config == processed_config
|
gaybro8777/klio | cli/tests/commands/job/test_verify.py | <reponame>gaybro8777/klio<filename>cli/tests/commands/job/test_verify.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pytest
from google.api_core import exceptions as api_ex
from google.api_core import page_iterator
from google.cloud import exceptions
from googleapiclient import errors as google_errors
from klio_core import config as kconfig
from klio_cli.commands.job import verify
from klio_cli.utils import stackdriver_utils as sd_utils
@pytest.fixture
def mock_publisher(mocker):
return mocker.patch.object(verify.pubsub_v1, "PublisherClient")
@pytest.fixture
def mock_sub(mocker):
return mocker.patch.object(verify.pubsub_v1, "SubscriberClient")
@pytest.fixture
def mock_storage(mocker):
return mocker.patch.object(verify.storage, "Client")
@pytest.fixture
def mock_bucket(mocker):
return mocker.patch.object(verify.storage.Client, "bucket")
@pytest.fixture
def mock_blob(mocker):
return mocker.patch.object(verify.storage, "blob")
@pytest.fixture
def mock_discovery_client(mocker, monkeypatch):
mock = mocker.MagicMock()
monkeypatch.setattr(verify.discovery, "build", lambda x, y: mock)
return mock
@pytest.fixture
def mock_iterator(mocker):
return mocker.patch.object(page_iterator, "HTTPIterator")
@pytest.fixture
def config():
return {
"job_name": "klio-job-name",
"job_config": {
"inputs": [
{
"topic": "test-in-topic",
"subscription": "foo",
"data_location": "gs://test-in-data",
}
],
"outputs": [
{
"topic": "test-out-topic",
"data_location": "gs://test-out-data",
}
],
},
"pipeline_options": {
"streaming": True,
"worker_harness_container_image": "a-worker-image",
"experiments": ["beam_fn_api"],
"project": "test-gcp-project",
"zone": "europe-west1-c",
"region": "europe-west1",
"staging_location": "gs://test-gcp-project-dataflow-tmp/staging",
"temp_location": "gs://test-gcp-project-dataflow-tmp/temp",
"max_num_workers": 2,
"autoscaling_algorithm": "NONE",
"disk_size_gb": 32,
"worker_machine_type": "n1-standard-2",
"runner": "DataflowRunner",
},
}
@pytest.fixture
def klio_config(config):
return kconfig.KlioConfig(config)
@pytest.fixture
def mock_get_sd_group_url(mocker):
return mocker.patch.object(sd_utils, "get_stackdriver_group_url")
@pytest.fixture
def mock_create_sd_group(mocker):
return mocker.patch.object(sd_utils, "create_stackdriver_group")
@pytest.mark.parametrize(
"expected,bindings,create_resources",
[
(
True,
[
{
"role": "roles/monitoring.metricWriter",
"members": ["serviceAccount:the-default-svc-account"],
}
],
False,
),
(
False,
[
{
"role": "roles/monitoring.metricWriter",
"members": ["serviceAccount:some-other-account"],
}
],
False,
),
(
False,
[
{
"role": "roles/monitoring.metricWriter",
"members": ["serviceAccount:some-other-account"],
}
],
True,
),
(
False,
[{"role": "roles/monitoring.metricWriter", "members": []}],
False,
),
(False, [], False),
],
)
def test_verify_iam_roles(
caplog,
expected,
bindings,
create_resources,
mock_discovery_client,
klio_config,
):
compute_client = mock_discovery_client.build("compute")
compute_client.projects().get().execute.return_value = {
"defaultServiceAccount": "the-default-svc-account"
}
iam_client = mock_discovery_client.build("cloudresourcemanager")
job = verify.VerifyJob(klio_config, create_resources)
job._compute_client = compute_client
job._iam_client = iam_client
gcp_project = job.klio_config.pipeline_options.project
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.return_value = {"bindings": bindings}
result = job._verify_iam_roles()
if create_resources:
assert (
"--create-resources is not able to add these roles"
in caplog.records[-1].msg
)
compute_client.projects().get(
project=gcp_project
).execute.assert_called_once_with()
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.assert_called_once_with()
assert result is expected
def test_verify_iam_roles_editor(caplog, klio_config, mock_discovery_client):
bindings = [
{
"role": "roles/monitoring.metricWriter",
"members": ["serviceAccount:the-default-svc-account"],
},
{
"role": "roles/editor",
"members": ["serviceAccount:the-default-svc-account"],
},
]
gcp_project = klio_config.pipeline_options.project
compute_client = mock_discovery_client.build("compute")
compute_client.projects().get().execute.return_value = {
"defaultServiceAccount": "the-default-svc-account"
}
iam_client = mock_discovery_client.build("cloudresourcemanager")
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.return_value = {"bindings": bindings}
job = verify.VerifyJob(klio_config, False)
job._compute_client = compute_client
job._iam_client = iam_client
result = job._verify_iam_roles()
compute_client.projects().get(
project=gcp_project
).execute.assert_called_once_with()
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.assert_called_once_with()
assert result is True
with caplog.at_level(logging.WARNING):
assert len(caplog.records) == 3
msg = caplog.records[1].msg
assert "unsafe project editor or owner permissions" in msg
def test_verify_iam_roles_with_svc_account(klio_config, mock_discovery_client):
"If the user configures a SA, verify it instead of the default compute SA"
job = verify.VerifyJob(klio_config, False)
job.klio_config.pipeline_options.service_account_email = (
"<EMAIL>"
)
bindings = [
{
"role": "roles/monitoring.metricWriter",
"members": ["serviceAccount:<EMAIL>"],
},
{
"role": "roles/editor",
"members": ["serviceAccount:the-default-svc-account"],
},
]
gcp_project = job.klio_config.pipeline_options.project
compute_client = mock_discovery_client.build("compute")
compute_client.projects().get().execute.return_value = {
"defaultServiceAccount": "the-default-svc-account"
}
iam_client = mock_discovery_client.build("cloudresourcemanager")
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.return_value = {"bindings": bindings}
job._compute_client = compute_client
job._iam_client = iam_client
result = job._verify_iam_roles()
# Assert that we don't fetch the default SA since we don't need it
compute_client.projects().get(
project=gcp_project
).execute.assert_not_called()
iam_client.projects().getIamPolicy(
resource=gcp_project, body={}
).execute.assert_called_once_with()
assert result is True
def test_verify_iam_roles_http_error(klio_config, mock_discovery_client):
compute_client = mock_discovery_client.build("compute")
err = google_errors.HttpError
compute_client.projects().get().execute.side_effect = err(
"some resp", "some content".encode()
)
iam_client = mock_discovery_client.build("cloudresourcemanager")
job = verify.VerifyJob(klio_config, False)
job._compute_client = compute_client
job._iam_client = iam_client
result = job._verify_iam_roles()
assert result is False
compute_client = mock_discovery_client.build("compute")
compute_client.projects().get().execute.side_effect = None
compute_client.projects().get().execute.return_value = {
"defaultServiceAccount": "the-default-svc-account"
}
iam_client = mock_discovery_client.build("cloudresourcemanager")
iam_client.projects().getIamPolicy(
resource=job.klio_config.pipeline_options.project, body={}
).execute.side_effect = google_errors.HttpError(
"some resp", "some content".encode()
)
job._compute_client = compute_client
job._iam_client = iam_client
result = job._verify_iam_roles()
assert result is False
@pytest.mark.parametrize(
"create_resources,dashboard_url",
((True, None), (False, None), (False, "dashboard/url")),
)
def test_verify_stackdriver_dashboard(
klio_config,
mock_get_sd_group_url,
mock_create_sd_group,
create_resources,
dashboard_url,
):
mock_get_sd_group_url.return_value = dashboard_url
job = verify.VerifyJob(klio_config, create_resources)
actual = job._verify_stackdriver_dashboard()
mock_get_sd_group_url.assert_called_once_with(
"test-gcp-project", "klio-job-name", "europe-west1"
)
if create_resources:
mock_create_sd_group.assert_called_once_with(
"test-gcp-project", "klio-job-name", "europe-west1"
)
assert actual is True
elif dashboard_url:
assert actual is True
else:
assert actual is False
def test_verify_stackdriver_dashboard_raises(
klio_config, mock_get_sd_group_url, caplog
):
mock_get_sd_group_url.side_effect = Exception("error")
with pytest.raises(Exception, match="error"):
job = verify.VerifyJob(klio_config, False)
job._verify_stackdriver_dashboard()
assert 2 == len(caplog.records)
def test_verify_stackdriver_dashboard_errors(
klio_config, mock_get_sd_group_url, mock_create_sd_group, caplog
):
mock_get_sd_group_url.return_value = None
mock_create_sd_group.return_value = None
job = verify.VerifyJob(klio_config, True)
actual = job._verify_stackdriver_dashboard()
assert actual is False
assert 3 == len(caplog.records)
@pytest.mark.parametrize("create_resources", (True, False))
def test_verify_gcs_bucket(klio_config, mock_storage, create_resources):
test_path = "gs://bucket/blob"
job = verify.VerifyJob(klio_config, create_resources)
job._storage_client = mock_storage
actual = job._verify_gcs_bucket(test_path)
if create_resources:
mock_storage.create_bucket.assert_called_once_with("bucket")
else:
mock_storage.get_bucket.assert_called_once_with("bucket")
assert actual is True
def test_verify_gcs_bucket_invalid_name(klio_config, mock_storage, caplog):
job = verify.VerifyJob(klio_config, True)
job._storage_client = mock_storage
assert not job._verify_gcs_bucket("a/b/c")
assert 2 == len(caplog.records)
def test_verify_gcs_bucket_exists(klio_config, mock_storage):
test_path = "gs://bucket/blob"
mock_storage.create_bucket.side_effect = api_ex.Conflict("test")
job = verify.VerifyJob(klio_config, True)
job._storage_client = mock_storage
actual = job._verify_gcs_bucket(test_path)
assert actual is True
@pytest.mark.parametrize("not_found", (True, False))
def test_verify_gcs_bucket_exceptions(klio_config, mock_storage, not_found):
test_path = "gs://bucket/blob"
if not_found:
mock_storage.get_bucket.side_effect = exceptions.NotFound("test")
else:
mock_storage.get_bucket.side_effect = Exception
job = verify.VerifyJob(klio_config, False)
job._storage_client = mock_storage
actual = job._verify_gcs_bucket(test_path)
assert actual is False
@pytest.mark.parametrize("create_resources", (True, False))
def test_verify_pub_topic(klio_config, mock_publisher, create_resources):
test_topic = "test"
job = verify.VerifyJob(klio_config, create_resources)
job._publisher_client = mock_publisher
actual = job._verify_pub_topic(test_topic, input)
if create_resources:
mock_publisher.create_topic.assert_called_once_with(
request={"name": test_topic}
)
else:
mock_publisher.get_topic.assert_called_once_with(
request={"topic": test_topic}
)
assert actual is True
def test_verify_pub_topic_exists(
klio_config, mock_publisher,
):
test_topic = "test"
mock_publisher.create_topic.side_effect = api_ex.AlreadyExists("test")
job = verify.VerifyJob(klio_config, True)
job._publisher_client = mock_publisher
actual = job._verify_pub_topic(test_topic, input)
assert actual is True
@pytest.mark.parametrize("not_found", (True, False))
def test_verify_pub_topic_exceptions(klio_config, mock_publisher, not_found):
test_topic = "test"
if not_found:
mock_publisher.get_topic.side_effect = exceptions.NotFound("test")
else:
mock_publisher.get_topic.side_effect = Exception
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
actual = job._verify_pub_topic(test_topic, input)
assert actual is False
@pytest.mark.parametrize("create_resources", (True, False))
def test_verify_subscription_and_topic(
klio_config, mock_publisher, mock_sub, create_resources
):
test_sub = "test"
upstream_topic = "Some"
job = verify.VerifyJob(klio_config, create_resources)
job._publisher_client = mock_publisher
job._subscriber_client = mock_sub
if create_resources:
actual = job._verify_subscription_and_topic(test_sub, upstream_topic,)
mock_sub.create_subscription.assert_called_once_with(
request={"name": test_sub, "topic": upstream_topic}
)
else:
actual = job._verify_subscription_and_topic(test_sub, upstream_topic,)
mock_sub.get_subscription.assert_called_once_with(
request={"subscription": test_sub}
)
expected = True, True
assert expected == actual
def test_verify_subscription_and_topic_exists(
klio_config, mock_publisher, mock_sub
):
test_sub = "test"
upstream_topic = "Some"
job = verify.VerifyJob(klio_config, True)
job._publisher_client = mock_publisher
job._subscriber_client = mock_sub
mock_sub.create_subscription.side_effect = api_ex.AlreadyExists("test")
actual = job._verify_subscription_and_topic(test_sub, upstream_topic)
expected = True, True
assert expected == actual
@pytest.mark.parametrize(
"not_found, no_topic", ((True, False), (False, False), (False, True))
)
def test_verify_subscription_and_topic_exceptions(
klio_config, mock_publisher, mock_sub, not_found, no_topic
):
test_sub = "test"
if no_topic:
upstream_topic = None
expected = False, True
else:
upstream_topic = "Some"
expected = True, False
if not_found:
mock_sub.get_subscription.side_effect = exceptions.NotFound("test")
else:
mock_sub.get_subscription.side_effect = Exception
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._subscriber_client = mock_sub
actual = job._verify_subscription_and_topic(test_sub, upstream_topic)
assert expected == actual
mock_gcs_event_config = {
"type": "gcs",
"location": "gs://some/events.txt",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
mock_gcs_event = kconfig._io.KlioReadFileConfig.from_dict(
mock_gcs_event_config
)
mock_bq_event_config = {
"type": "bq",
"project": "sigint",
"dataset": "test-data",
"table": "test-table",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
mock_bq_event = kconfig._io.KlioBigQueryEventInput.from_dict(
mock_bq_event_config
)
mock_avro_event_config = {
"type": "avro",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
mock_avro_event = kconfig._io.KlioReadAvroEventConfig.from_dict(
mock_avro_event_config
)
@pytest.mark.parametrize(
"mock_event_input", ((mock_gcs_event), (mock_bq_event), (mock_avro_event),)
)
def test_unverified_event_inputs(
mocker,
caplog,
mock_storage,
mock_publisher,
mock_sub,
klio_config,
mock_event_input,
):
mock_verify_gcs_bucket = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
mock_verify_sub = mocker.patch.object(
verify.VerifyJob, "_verify_subscription_and_topic"
)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job._subscriber_client = mock_sub
job.klio_config.pipeline_options.project = "sigint"
job.klio_config.job_config.events.inputs = [mock_event_input]
data_config = {
"type": "gcs",
"location": "test",
"io_type": kconfig._io.KlioIOType.DATA,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
job.klio_config.job_config.data.inputs = [
kconfig._io.KlioGCSInputDataConfig.from_dict(data_config)
]
mock_verify_gcs_bucket.return_value = True
actual = job._verify_inputs()
mock_verify_sub.assert_not_called()
mock_verify_gcs_bucket.assert_called_with("test")
assert actual
assert 3 == len(caplog.records)
@pytest.mark.parametrize(
"unverified_bucket, unverified_topic, unverified_sub",
(
(False, False, False),
(True, False, False),
(True, True, True),
(False, True, True),
(False, False, True),
(False, True, False),
(False, False, False),
),
)
def test_verify_inputs(
mocker,
unverified_bucket,
unverified_topic,
unverified_sub,
klio_config,
mock_storage,
mock_publisher,
mock_sub,
):
mock_verify_gcs_bucket = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
mock_verify_sub = mocker.patch.object(
verify.VerifyJob, "_verify_subscription_and_topic"
)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job._subscriber_client = mock_sub
job.klio_config.pipeline_options.project = "sigint"
event_config = {
"type": "pubsub",
"topic": "test",
"subscription": "test",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
job.klio_config.job_config.events.inputs = [
kconfig._io.KlioPubSubEventInput.from_dict(event_config)
]
data_config = {
"type": "gcs",
"location": "test",
"io_type": kconfig._io.KlioIOType.DATA,
"io_direction": kconfig._io.KlioIODirection.INPUT,
}
job.klio_config.job_config.data.inputs = [
kconfig._io.KlioGCSInputDataConfig.from_dict(data_config)
]
if unverified_topic and unverified_sub and unverified_bucket:
mock_verify_gcs_bucket.return_value = False
mock_verify_sub.return_value = False, False
actual = job._verify_inputs()
expected = False
assert expected == actual
elif unverified_topic and unverified_sub:
mock_verify_sub.return_value = False, False
actual = job._verify_inputs()
expected = False
assert expected == actual
elif unverified_topic:
mock_verify_sub.return_value = False, True
actual = job._verify_inputs()
expected = False
assert expected == actual
elif unverified_sub:
mock_verify_sub.return_value = True, False
actual = job._verify_inputs()
expected = False
assert expected == actual
elif unverified_bucket:
mock_verify_gcs_bucket.return_value = False
mock_verify_sub.return_value = True, True
actual = job._verify_inputs()
expected = False
assert expected == actual
else:
mock_verify_gcs_bucket.return_value = True
mock_verify_sub.return_value = True, True
actual = job._verify_inputs()
expected = True
assert expected == actual
mock_verify_gcs_bucket.assert_called_with("test")
mock_verify_sub.assert_called_with("test", "test")
@pytest.mark.parametrize(
"data_dict, event_dict, expected_log_count",
(
({"location": "test"}, {"topic": "test", "subscription": "test"}, 3),
({"location": None}, {"topic": "test", "subscription": "test"}, 4),
({"location": "test"}, {"topic": "test", "subscription": None}, 4),
({"location": None}, {"topic": "test", "subscription": None}, 5),
(None, None, 5),
),
)
def test_verify_inputs_logs(
data_dict,
event_dict,
expected_log_count,
klio_config,
mock_storage,
mock_publisher,
mock_sub,
mocker,
caplog,
):
mocker.patch.object(verify.VerifyJob, "_verify_gcs_bucket")
mock_verify_sub = mocker.patch.object(
verify.VerifyJob, "_verify_subscription_and_topic"
)
mock_verify_sub.return_value = (False, False)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job._subscriber_client = mock_sub
job.klio_config.pipeline_options.project = "sigint"
if event_dict:
event_dict["type"] = "pubsub"
event_dict["io_type"] = kconfig._io.KlioIOType.EVENT
event_dict["io_direction"] = kconfig._io.KlioIODirection.OUTPUT
job.klio_config.job_config.events.inputs = [
kconfig._io.KlioPubSubEventInput.from_dict(event_dict)
]
else:
job.klio_config.job_config.events.inputs = []
if data_dict:
data_dict["type"] = "gcs"
data_dict["io_type"] = kconfig._io.KlioIOType.DATA
data_dict["io_direction"] = kconfig._io.KlioIODirection.OUTPUT
job.klio_config.job_config.data.inputs = [
kconfig._io.KlioGCSOutputDataConfig.from_dict(data_dict)
]
else:
job.klio_config.job_config.data.inputs = []
job._verify_inputs()
assert expected_log_count == len(caplog.records)
mock_gcs_output_event_config = {
"type": "gcs",
"location": "gs://some/events.txt",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
mock_gcs_output_event = kconfig._io.KlioWriteFileConfig.from_dict(
mock_gcs_event_config
)
mock_bq_output_event_config = {
"type": "bq",
"project": "sigint",
"dataset": "test-data",
"table": "test-table",
"schema": {"fields": [{"name": "n", "type": "t", "mode": "m"}]},
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
mock_bq_output_event = kconfig._io.KlioBigQueryEventOutput.from_dict(
mock_bq_output_event_config
)
@pytest.mark.parametrize(
"mock_event_output", ((mock_gcs_output_event), (mock_bq_output_event),)
)
def test_unverified_event_outputs(
mocker,
caplog,
klio_config,
mock_event_output,
mock_storage,
mock_publisher,
):
mock_verify_gcs_bucket = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
mock_verify_pub_topic = mocker.patch.object(
verify.VerifyJob, "_verify_pub_topic"
)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job.klio_config.job_config.events.outputs = [mock_event_output]
data_config = {
"type": "gcs",
"location": "test",
"io_type": kconfig._io.KlioIOType.DATA,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
job.klio_config.job_config.data.outputs = [
kconfig._io.KlioGCSOutputDataConfig.from_dict(data_config)
]
mock_verify_gcs_bucket.return_value = True
actual = job._verify_outputs()
assert actual
mock_verify_pub_topic.assert_not_called()
mock_verify_gcs_bucket.assert_called_with("test")
assert 3 == len(caplog.records)
@pytest.mark.parametrize(
"unverified_gcs, unverified_topic",
((False, False), (True, False), (True, True), (False, True)),
)
def test_verify_outputs(
mocker,
klio_config,
unverified_gcs,
unverified_topic,
mock_storage,
mock_publisher,
):
mock_verify_gcs_bucket = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
mock_verify_pub_topic = mocker.patch.object(
verify.VerifyJob, "_verify_pub_topic"
)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
event_config = {
"type": "pubsub",
"topic": "test",
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
job.klio_config.job_config.events.outputs = [
kconfig._io.KlioPubSubEventOutput.from_dict(event_config)
]
data_config = {
"type": "gcs",
"location": "test",
"io_type": kconfig._io.KlioIOType.DATA,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
job.klio_config.job_config.data.outputs = [
kconfig._io.KlioGCSOutputDataConfig.from_dict(data_config)
]
if unverified_gcs and unverified_topic:
mock_verify_gcs_bucket.return_value = False
mock_verify_pub_topic.return_value = False
actual = job._verify_outputs()
expected = False
assert expected == actual
elif unverified_topic:
mock_verify_gcs_bucket.return_value = True
mock_verify_pub_topic.return_value = False
actual = job._verify_outputs()
expected = False
assert expected == actual
elif unverified_gcs:
mock_verify_gcs_bucket.return_value = False
mock_verify_pub_topic.return_value = True
actual = job._verify_outputs()
expected = False
assert expected == actual
else:
mock_verify_gcs_bucket.return_value = True
mock_verify_pub_topic.return_value = True
actual = job._verify_outputs()
expected = True
assert expected == actual
mock_verify_gcs_bucket.assert_called_with("test")
mock_verify_pub_topic.assert_called_with("test", "output")
@pytest.mark.parametrize(
"event_topic, data_location, expected_log_count",
(
("test", "test", 3),
(None, "test", 4),
("test", None, 4),
(None, None, 5),
),
)
def test_verify_outputs_logs(
event_topic,
data_location,
expected_log_count,
mocker,
klio_config,
mock_storage,
mock_publisher,
caplog,
):
mock_verify_gcs = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
mock_verify_pub = mocker.patch.object(
verify.VerifyJob, "_verify_pub_topic"
)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
event_config = {
"type": "pubsub",
"topic": event_topic,
"io_type": kconfig._io.KlioIOType.EVENT,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
job.klio_config.job_config.events.outputs = [
kconfig._io.KlioPubSubEventOutput.from_dict(event_config)
]
data_config = {
"type": "gcs",
"location": data_location,
"io_type": kconfig._io.KlioIOType.DATA,
"io_direction": kconfig._io.KlioIODirection.OUTPUT,
}
job.klio_config.job_config.data.outputs = [
kconfig._io.KlioGCSOutputDataConfig.from_dict(data_config)
]
job._verify_outputs()
assert expected_log_count == len(caplog.records)
if data_location is not None:
mock_verify_gcs.assert_called_with(data_location)
if event_topic is not None:
mock_verify_pub.assert_called_with(event_topic, "output")
@pytest.mark.parametrize(
"unverified_staging, unverified_temp",
((False, False), (True, True), (True, False), (False, True)),
)
def test_verify_tmp_files(
mocker, mock_storage, klio_config, unverified_staging, unverified_temp
):
job = verify.VerifyJob(klio_config, True)
job._storage_client = mock_storage
job.klio_config.pipeline_options.staging_location = "test"
job.klio_config.pipeline_options.temp_location = "test2"
mock_verify_gcs = mocker.patch.object(
verify.VerifyJob, "_verify_gcs_bucket"
)
if unverified_staging and unverified_temp:
mock_verify_gcs.side_effect = [False, False]
actual = job._verify_tmp_files()
assert actual is False
elif unverified_staging:
mock_verify_gcs.side_effect = [False, True]
actual = job._verify_tmp_files()
assert actual is False
elif unverified_temp:
mock_verify_gcs.side_effect = [True, False]
actual = job._verify_tmp_files()
assert actual is False
else:
mock_verify_gcs.side_effect = [True, True]
actual = job._verify_tmp_files()
assert actual is True
mock_verify_gcs.assert_any_call("test")
mock_verify_gcs.assert_any_call("test2")
assert 2 == mock_verify_gcs.call_count
def test_verify(
mocker, klio_config, mock_storage, mock_publisher, mock_sub, caplog
):
caplog.set_level(logging.INFO)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job._subscriber_client = mock_sub
mock_verify_inputs = mocker.patch.object(job, "_verify_inputs")
mock_verify_outputs = mocker.patch.object(job, "_verify_outputs")
mock_verify_tmp = mocker.patch.object(job, "_verify_tmp_files")
mock_verify_iam_roles = mocker.patch.object(job, "_verify_iam_roles")
mock_verify_dashboard = mocker.patch.object(
job, "_verify_stackdriver_dashboard"
)
mocker.patch.object(verify, "discovery")
mock_verify_config = mocker.patch.object(kconfig, "KlioConfig")
mock_verify_config.return_value = klio_config
mock_storage.return_value = mock_storage
mock_publisher.return_value = mock_publisher
mock_sub.return_value = mock_sub
mock_verify_inputs.return_value = True
mock_verify_outputs.return_value = True
mock_verify_tmp.return_value = True
mock_verify_iam_roles.return_value = True
job.verify_job()
mock_verify_inputs.assert_called_once_with()
mock_verify_outputs.assert_called_once_with()
mock_verify_tmp.assert_called_once_with()
assert mock_verify_iam_roles.called
mock_verify_dashboard.assert_called_once_with()
assert 1 == len(caplog.records)
def test_verify_raises_exception_raises_system_exit(mocker, klio_config):
job = verify.VerifyJob(klio_config, False)
mock_verify_all = mocker.patch.object(job, "_verify_all")
mock_verify_all.return_value = False
mock_verify_all.side_effect = Exception
with pytest.raises(SystemExit):
job.verify_job()
def test_verify_raises_system_exit(
mocker, klio_config, mock_storage, mock_publisher, mock_sub, caplog
):
caplog.set_level(logging.INFO)
job = verify.VerifyJob(klio_config, False)
job._publisher_client = mock_publisher
job._storage_client = mock_storage
job._subscriber_client = mock_sub
mock_verify_inputs = mocker.patch.object(job, "_verify_inputs")
mock_verify_outputs = mocker.patch.object(job, "_verify_outputs")
mock_verify_tmp = mocker.patch.object(job, "_verify_tmp_files")
mock_verify_iam_roles = mocker.patch.object(job, "_verify_iam_roles")
mock_verify_dashboard = mocker.patch.object(
job, "_verify_stackdriver_dashboard"
)
mocker.patch.object(verify, "discovery")
mock_verify_config = mocker.patch.object(kconfig, "KlioConfig")
mock_verify_config.return_value = klio_config
mock_storage.return_value = mock_storage
mock_publisher.return_value = mock_publisher
mock_sub.return_value = mock_sub
mock_verify_inputs.return_value = True
mock_verify_outputs.return_value = False
mock_verify_iam_roles.return_value = True
mock_verify_tmp.return_value = True
with pytest.raises(SystemExit):
job.verify_job()
mock_verify_inputs.assert_called_once_with()
mock_verify_outputs.assert_called_once_with()
mock_verify_tmp.assert_called_once_with()
mock_verify_dashboard.assert_called_once_with()
|
gaybro8777/klio | cli/src/klio_cli/utils/docker_utils.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import logging
import os
import docker
import emoji
import requests
from klio_cli.utils import multi_line_terminal_writer
def check_docker_connection(docker_client):
try:
docker_client.ping()
except (docker.errors.APIError, requests.exceptions.ConnectionError):
logging.error(emoji.emojize("Could not reach Docker! :whale:"))
logging.error("Is it installed and running?")
raise SystemExit(1)
def check_dockerfile_present(job_dir):
dockerfile_path = job_dir + "/Dockerfile"
if not os.path.exists(dockerfile_path):
logging.error("Klio can't run job without a Dockerfile.")
logging.error("Please supply \033[4m{}\033[4m".format(dockerfile_path))
raise SystemExit(1)
def docker_image_exists(name, client):
try:
client.images.get(name)
exists = True
except docker.errors.ImageNotFound:
exists = False
except docker.errors.APIError as e:
msg = (
"Docker ran into the error checking if image {}"
"has already been built:\n{}".format(name, e)
)
logging.error(msg)
raise SystemExit(1)
return exists
def build_docker_image(job_dir, image_name, image_tag, config_file=None):
"""Build given Docker image.
Note: This uses the python Docker SDK's low-level API in order to capture
and emit build logs as they are generated by Docker. Using the
high-level API, you only get access to logs at the end of the build,
which creates a bad user experience.
Args:
job_dir (str): Relative path to directory containing Dockerfile.
image_name (str): Name to build the image with (forms a ‘name:tag’ pair)
image_tag (str): Tag to build the image with (forms a ‘name:tag’ pair)
Raises:
SystemExit(1) If Docker build errors out, process terminates.
"""
def clean_logs(log_generator):
# Loop through lines containing log JSON objects.
# Example line: {"stream":"Starting build..."}\r\n{"stream":"\\n"}\n
for line in log_generator:
if isinstance(line, bytes):
line = line.decode("utf-8")
# Some lines contain multiple whitespace-separated objects.
# Split them so json.loads doesn't choke.
for log_obj in line.split("\r\n"):
# Some log objects only wrap newlines.
# Split sometimes produces '' char.
# Remove these artifacts.
if log_obj != '{"stream":"\\n"}' and log_obj != "":
yield log_obj
def print_log(log):
if "stream" in log:
logging.info(log["stream"].strip("\n"))
if "error" in log:
fail_color = "\033[91m"
end_color = "\033[0m"
logging.info(
"{}{}{}".format(
fail_color, log["errorDetail"]["message"], end_color
)
)
logging.error("\nDocker hit an error while building job image.")
logging.error(
"Please fix your Dockerfile: {}/Dockerfile".format(job_dir)
)
raise SystemExit(1)
build_flag = {
"path": job_dir,
"tag": "{}:{}".format(image_name, image_tag),
"rm": True,
"buildargs": {
"tag": image_tag,
"KLIO_CONFIG": config_file or "klio-job.yaml",
},
} # Remove intermediate build containers.
logs = docker.APIClient(base_url="unix://var/run/docker.sock").build(
**build_flag
)
for log_obj in clean_logs(logs):
log = json.loads(log_obj)
print_log(log)
def _get_layer_id_and_message(clean_line):
line_json = json.loads(clean_line)
layer_id = line_json.get("id")
# very first log message doesn't have an id
msg_pfx = ""
if layer_id:
msg_pfx = "{}: ".format(layer_id)
msg = "{prefix}{status}{progress}".format(
prefix=msg_pfx,
status=line_json.get("status", ""),
progress=line_json.get("progress", ""),
)
return layer_id, msg
def push_image_to_gcr(image, tag, client):
kwargs = {"repository": image, "tag": tag, "stream": True}
writer = multi_line_terminal_writer.MultiLineTerminalWriter()
for raw_line in client.images.push(**kwargs):
clean_line = raw_line.decode("utf-8").strip("\r\n")
clean_lines = clean_line.split("\r\n")
for line in clean_lines:
layer_id, msg = _get_layer_id_and_message(line)
writer.emit_line(layer_id, msg.strip())
def get_docker_image_client(job_dir, image_tag, image_name, force_build):
"""Returns the docker image and client for running klio commands.
Args:
job_dir (str): Relative path to directory containing Dockerfile.
image_tag (str): Tag to build the image with (forms a ‘name:tag’ pair)
image_name (str): Name to build the image with (forms a ‘name:tag’ pair)
force_build(bool): Flag to force a new docker image build.
Raises:
Valid docker image and client.
"""
image = "{}:{}".format(image_name, image_tag)
client = docker.from_env()
check_docker_connection(client)
check_dockerfile_present(job_dir)
if not docker_image_exists(image, client) or force_build:
logging.info("Building worker image: {}".format(image))
build_docker_image(job_dir, image_name, image_tag)
else:
logging.info("Found worker image: {}".format(image))
return image, client
|
gaybro8777/klio | docs/src/_ext/collapsible_admon.py | <reponame>gaybro8777/klio
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Custom Sphinx extension that defines a ``collapsible`` admonition. This
directive will look like a normal admonition with the ability to toggle
its content to show/hide.
Example:
.. collapsible:: A title for my admonition
Some body text. It can include other directives within and will be
parsed like other admonitions, e.g.:
.. code-block:: python
print("I should be properly syntax highlighted")
"""
from docutils.parsers.rst.directives import admonitions
from sphinx.util import docutils
class CollapsibleAdmonition(admonitions.Admonition, docutils.SphinxDirective):
"""Add CSS classes to `.. collapsible::` admonition."""
def run(self):
if not self.options.get("class"):
self.options["class"] = ["admonition-collapsible"]
else:
self.options["class"].append("admonition-collapsible")
return super().run()
def setup(app):
app.add_directive("collapsible", CollapsibleAdmonition)
return {
"version": "1.0",
"parallel_read_safe": True,
"parallel_write_safe": True,
}
|
gaybro8777/klio | examples/snippets/custom_ext_checks.py | <reponame>gaybro8777/klio
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Example of custom data existence checks implementation.
Please see the full walk-through here:
https://docs.klio.io/en/latest/userguide/
examples/custom_data_ext_checks.html
"""
import apache_beam as beam
from apache_beam import pvalue
from apache_beam.io.gcp import gcsio
from klio.transforms import decorators
class BonjourInputCheck(beam.DoFn):
@decorators.set_klio_context
def setup(self):
self.client = gcsio.Client()
@decorators.handle_klio
def process(self, data):
element = data.element.decode("utf-8")
ic = self._klio.config.job_config.data.inputs[0]
subdirs = ("subdir1", "subdir2")
inputs_exists = []
for subdir in subdirs:
path = f"{ic.location}/{subdir}/{element}.{ic.file_suffix}"
exists = self.client.exists(path)
inputs_exists.append(exists)
if all(inputs_exists):
yield data
else:
self._klio.logger.info(f"Skipping {element}: input data not found")
class BonjourOutputCheck(beam.DoFn):
@decorators.set_klio_context
def setup(self):
self.client = gcsio.Client()
@decorators.handle_klio
def process(self, data):
element = data.element.decode("utf-8")
oc = self._klio.config.job_config.data.outputs[0]
subdirs = ("subdir1", "subdir2")
outputs_exist = []
for subdir in subdirs:
path = f"{oc.location}/{subdir}/{element}.{oc.file_suffix}"
exists = self.client.exists(path)
outputs_exist.append(exists)
if all(outputs_exist):
yield pvalue.TaggedOutput("not_found", data)
else:
yield pvalue.TaggedOutput("found", data)
#####
# example of run.py
#####
from klio.transforms import helpers
def run(input_pcol, config):
output_data = input_pcol | beam.ParDo(BonjourOutputCheck()).with_outputs()
output_force = output_data.found | helpers.KlioFilterForce()
to_input_check = (
(output_data.not_found, output_force.process)
| beam.Flatten()
)
to_process = to_input_check | beam.ParDo(BonjourInputCheck())
# continue on with the job-related logic
output_pcol = to_process | ...
return output_pcol
|
gaybro8777/klio | cli/src/klio_cli/commands/job/audit.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from klio_cli.commands import base
class AuditPipeline(base.BaseDockerizedPipeline):
DOCKER_LOGGER_NAME = "klio.job.audit"
def _get_environment(self):
envs = super()._get_environment()
envs["KLIO_TEST_MODE"] = "true"
return envs
def _get_command(self, list_steps):
command = ["audit"]
if list_steps:
command.append("--list")
return command
|
gaybro8777/klio | examples/audio_spectrograms/transforms.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Implement custom transforms using utilities for Klio-fication.
"""
import io
import os
import pickle
import apache_beam as beam
from apache_beam import pvalue
import librosa
import numpy as np
from klio_audio import decorators as audio_decorators
from klio_core.proto import klio_pb2
from klio.transforms import decorators as tfm_decorators
####
# Helper funcs for handling klio & numpy de/serialization when working
# with pcolls that are grouped by key
####
def _unpickle_from_klio_message(item):
kmsg = klio_pb2.KlioMessage()
kmsg.ParseFromString(item)
return pickle.loads(kmsg.data.payload)
def _dump_to_klio_message(key, payload):
kmsg = klio_pb2.KlioMessage()
kmsg.data.element = key
out = io.BytesIO()
np.save(out, payload)
kmsg.data.payload = out.getvalue()
return kmsg.SerializeToString()
#####
# Transforms
#####
class GetMagnitude(beam.DoFn):
"""Get the magnitude of a song given its STFT."""
@tfm_decorators.handle_klio
@audio_decorators.handle_binary(load_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Computing the magnitude spectrogram for {}".format(element)
)
stft = item.payload
spectrogram, phase = librosa.magphase(stft)
yield pvalue.TaggedOutput("phase", phase)
yield pvalue.TaggedOutput("spectrogram", spectrogram)
class FilterNearestNeighbors(beam.DoFn):
@tfm_decorators.handle_klio
@audio_decorators.handle_binary
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Filtering nearest neighbors for {}".format(element)
)
spectrogram = item.payload
nn_filter = librosa.decompose.nn_filter(
spectrogram,
aggregate=np.median,
metric="cosine",
width=int(librosa.time_to_frames(2)),
)
# The output of the filter shouldn't be greater than the input
# if we assume signals are additive. Taking the pointwise minimium
# with the input spectrum forces this.
nn_filter = np.minimum(spectrogram, nn_filter)
yield nn_filter
class GetSoftMask(beam.DoFn):
def __init__(self, margin=1, power=2):
self.margin = margin
self.power = power
@tfm_decorators.set_klio_context
def process(self, item):
key, data = item
first_data = data["first"][0]
second_data = data["second"][0]
full_data = data["full"][0]
first = _unpickle_from_klio_message(first_data)
second = _unpickle_from_klio_message(second_data)
full = _unpickle_from_klio_message(full_data)
self._klio.logger.debug("Getting softmask for {}".format(key))
mask = librosa.util.softmask(
first, self.margin * second, power=self.power
)
ret = mask * full
yield _dump_to_klio_message(key, ret)
def create_key_from_element(item):
kmsg = klio_pb2.KlioMessage()
kmsg.ParseFromString(item)
return (kmsg.data.element, item)
# key_pair looks like
# (element, {"full": [<serialized numpy array>],
# "nnfilter": [<serialized numpy array>]})
def subtract_filter_from_full(key_pair):
key, pair_data = key_pair
full = _unpickle_from_klio_message(pair_data["full"][0])
nn_filter = _unpickle_from_klio_message(pair_data["nnfilter"][0])
net = full - nn_filter
payload = pickle.dumps(net)
kmsg = klio_pb2.KlioMessage()
kmsg.data.element = key
kmsg.data.payload = payload
return (key, kmsg.SerializeToString())
|
gaybro8777/klio | cli/tests/commands/job/test_delete.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_core import config as kconfig
from klio_cli.commands.job import delete as delete_job
@pytest.fixture
def config_data():
return {
"job_name": "test-job",
"version": 1,
"pipeline_options": {"project": "foo"},
"job_config": {
"events": {
"inputs": [
{
"type": "pubsub",
"topic": "foo-topic",
"subscription": "foo-sub",
}
],
"outputs": [{"type": "pubsub", "topic": "foo-topic-output"}],
},
"data": {
"inputs": [{"type": "gcs", "location": "foo-input-location"}],
"outputs": [
{"type": "gcs", "location": "foo-output-location"}
],
},
},
}
@pytest.fixture
def klio_config(config_data):
return kconfig.KlioConfig(config_data)
@pytest.fixture
def delete_job_inst(klio_config):
return delete_job.DeleteJob(klio_config)
@pytest.fixture
def mock_confirm(mocker):
return mocker.patch.object(delete_job.click, "confirm")
@pytest.fixture
def mock_prompt(mocker):
return mocker.patch.object(delete_job.click, "prompt")
@pytest.fixture
def mock_publisher(mocker):
return mocker.patch.object(
delete_job.pubsub_v1, "PublisherClient", autospec=True
)
@pytest.fixture
def mock_subscriber(mocker):
return mocker.patch.object(
delete_job.pubsub_v1, "SubscriberClient", autospec=True
)
@pytest.fixture
def mock_storage(mocker):
return mocker.patch.object(delete_job.storage, "Client", autospec=True)
@pytest.mark.parametrize(
"confirmation,prompt,name,expected,effect",
[
([False], [None], {}, False, None),
([False], ["foo"], "foo", False, None),
([True], ["foo"], "foo", True, None),
([True], ["pancake"], "milkshake", None, ValueError),
],
)
def test_confirmation_dialog(
confirmation,
prompt,
name,
expected,
effect,
delete_job_inst,
mock_confirm,
mock_prompt,
):
mock_confirm.side_effect = confirmation
mock_prompt.side_effect = prompt
if effect:
with pytest.raises(ValueError):
delete_job_inst._confirmation_dialog("meh", name)
else:
assert expected == delete_job_inst._confirmation_dialog("meh", name)
@pytest.mark.parametrize(
"confirm,expected",
[
(
# do not confirm to delete an resource
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[False, False, False, False, False, False],
{
"topic": [],
"subscription": [],
"location": [],
"stackdriver_group": False,
},
),
(
# only confirm stackdriver
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[False, False, False, False, False, True],
{
"topic": [],
"subscription": [],
"location": [],
"stackdriver_group": True,
},
),
(
# gcs buckets & stackdriver
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[False, False, False, True, True, True],
{
"topic": [],
"subscription": [],
"location": ["foo-input-location", "foo-output-location"],
"stackdriver_group": True,
},
),
(
# output topic, gcs buckets & stackdriver
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[False, False, True, True, True, True],
{
"topic": ["foo-topic-output"],
"subscription": [],
"location": ["foo-input-location", "foo-output-location"],
"stackdriver_group": True,
},
),
(
# subscription, output topic, gcs buckets & stackdriver
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[False, True, True, True, True, True],
{
"topic": ["foo-topic-output"],
"subscription": ["foo-sub"],
"location": ["foo-input-location", "foo-output-location"],
"stackdriver_group": True,
},
),
(
# everything!
# (2 topics, 1 sub, 2 gcs locations, 1 stackdriver)
[True, True, True, True, True, True],
{
"topic": ["foo-topic", "foo-topic-output"],
"subscription": ["foo-sub"],
"location": ["foo-input-location", "foo-output-location"],
"stackdriver_group": True,
"stackdriver_group": True,
},
),
],
)
def test_get_resources(confirm, expected, delete_job_inst, mocker):
mock_sd = mocker.patch.object(delete_job.sd_utils, "generate_group_meta")
mock_sd.return_value = (None, "fake-stackdriver-group")
mock_confirmation = mocker.patch.object(
delete_job_inst, "_confirmation_dialog"
)
mock_confirmation.side_effect = confirm
resources = delete_job_inst._get_resources()
assert resources == expected
@pytest.mark.parametrize("effect,record_count", [(Exception, 6), (None, 3)])
def test_delete_subscription(
effect, record_count, mocker, mock_subscriber, caplog, delete_job_inst
):
mock_subscriber.return_value.delete_subscription.side_effect = effect
delete_job_inst._delete_subscriptions(["a", "b", "c"])
mock_subscriber.return_value.delete_subscription.assert_has_calls(
[
mocker.call(request={"subscription": "a"}),
mocker.call(request={"subscription": "b"}),
mocker.call(request={"subscription": "c"}),
]
)
assert 3 == mock_subscriber.return_value.delete_subscription.call_count
assert record_count == len(caplog.records)
@pytest.mark.parametrize("effect,record_count", [(Exception, 6), (None, 3)])
def test_delete_topic(
effect, record_count, mocker, mock_publisher, caplog, delete_job_inst
):
mock_publisher.return_value.delete_topic.side_effect = effect
delete_job_inst._delete_topics(["a", "b", "c"])
mock_publisher.return_value.delete_topic.assert_has_calls(
[
mocker.call(request={"topic": "a"}),
mocker.call(request={"topic": "b"}),
mocker.call(request={"topic": "c"}),
]
)
assert 3 == mock_publisher.return_value.delete_topic.call_count
assert record_count == len(caplog.records)
@pytest.mark.parametrize("effect,record_count", [(Exception, 6), (None, 4)])
def test_delete_buckets(
effect, record_count, mocker, mock_storage, caplog, delete_job_inst
):
mock_storage.return_value.get_bucket.return_value.delete.side_effect = (
effect
)
delete_job_inst._delete_buckets(
"test-project", ["gs://a", "gs://b/d", "c"]
)
mock_storage.assert_called_once_with("test-project")
mock_storage.return_value.get_bucket.assert_has_calls(
[mocker.call("a"), mocker.call("b")], any_order=True
)
assert 2 == mock_storage.return_value.get_bucket.call_count
mock_storage.return_value.get_bucket.return_value.delete.assert_has_calls(
[mocker.call(force=True), mocker.call(force=True)]
)
assert (
2
== mock_storage.return_value.get_bucket.return_value.delete.call_count
)
assert record_count == len(caplog.records)
@pytest.mark.parametrize("sd_group", [True, False])
def test_delete(sd_group, mocker, mock_confirm, mock_prompt, klio_config):
mock_get_resources = mocker.patch.object(
delete_job.DeleteJob, "_get_resources"
)
mock_get_resources.return_value = {
"topic": ["a", "d"],
"subscription": ["b", "e"],
"location": ["c", "f"],
"stackdriver_group": sd_group,
}
mock_delete_subscriptions = mocker.patch.object(
delete_job.DeleteJob, "_delete_subscriptions"
)
mock_delete_topics = mocker.patch.object(
delete_job.DeleteJob, "_delete_topics"
)
mock_delete_buckets = mocker.patch.object(
delete_job.DeleteJob, "_delete_buckets"
)
mock_delete_stackdriver_group = mocker.patch.object(
delete_job.sd_utils, "delete_stackdriver_group"
)
delete_job.DeleteJob(klio_config).delete()
mock_delete_subscriptions.assert_called_once_with(["b", "e"])
mock_delete_topics.assert_called_once_with(["a", "d"])
mock_delete_buckets.assert_called_once_with("foo", ["c", "f"])
if sd_group:
mock_delete_stackdriver_group.assert_called_once_with(
"foo", "test-job", "europe-west1"
)
else:
mock_delete_stackdriver_group.assert_not_called()
|
gaybro8777/klio | integration/read-bq-write-bq/it/common.py | <filename>integration/read-bq-write-bq/it/common.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Variables and functions shared between the stages of integration testing."""
import os
import yaml
from klio_core import config
def get_config():
"""Load KlioConfig object."""
config_path = os.path.join(os.path.dirname(__file__), "..", "klio-job.yaml")
try:
with open(config_path) as f:
cfg_dict = yaml.safe_load(f)
return config.KlioConfig(cfg_dict)
except IOError as e:
logging.error(e)
raise SystemExit(1)
entity_ids = ['1', '2', '3', '4', '5']
|
gaybro8777/klio | exec/tests/unit/commands/utils/test_memory_utils.py | <reponame>gaybro8777/klio<filename>exec/tests/unit/commands/utils/test_memory_utils.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.utils import memory_utils
@pytest.fixture
def k_profiler():
return memory_utils.KMemoryLineProfiler()
@pytest.fixture
def mock_show_results(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(memory_utils.mp, "show_results", mock)
return mock
def dummy_func(x):
return x + 10
def dummy_gen(x):
yield x + 10
yield x + 20
def test_wrap_per_element(k_profiler, mock_show_results, mocker, monkeypatch):
# don't want to mock the whole class since we need to test a class method,
# just mocking out the creation of a new instance to return what we can
# control
mock_new = mocker.Mock(return_value=k_profiler)
monkeypatch.setattr(memory_utils.KMemoryLineProfiler, "__new__", mock_new)
wrapper = memory_utils.KMemoryLineProfiler.wrap_per_element(dummy_func)
mock_new.assert_not_called()
mock_show_results.assert_not_called()
assert "dummy_func" == wrapper.__name__
result = wrapper(10)
assert 20 == result
mock_new.assert_called_once_with(
memory_utils.KMemoryLineProfiler, backend="psutil"
)
mock_show_results.assert_called_once_with(k_profiler, stream=None)
mock_new.reset_mock()
mock_show_results.reset_mock()
wrapper = memory_utils.KMemoryLineProfiler.wrap_per_element(dummy_gen)
mock_new.assert_not_called()
mock_show_results.assert_not_called()
assert "dummy_gen" == wrapper.__name__
result = wrapper(10)
assert 20 == next(result)
assert 30 == next(result)
with pytest.raises(StopIteration):
next(result)
mock_new.assert_called_once_with(
memory_utils.KMemoryLineProfiler, backend="psutil"
)
mock_show_results.assert_called_once_with(k_profiler, stream=None)
def test_wrap_maximum(k_profiler, mocker, monkeypatch):
wrapper = memory_utils.KMemoryLineProfiler.wrap_maximum(
k_profiler, dummy_func
)
assert "dummy_func" == wrapper.__name__
result = wrapper(10)
assert 20 == result
wrapper = memory_utils.KMemoryLineProfiler.wrap_maximum(
k_profiler, dummy_gen
)
assert "dummy_gen" == wrapper.__name__
result = wrapper(10)
assert 20 == next(result)
assert 30 == next(result)
with pytest.raises(StopIteration):
next(result)
|
gaybro8777/klio | exec/tests/unit/commands/utils/test_wrappers.py | <filename>exec/tests/unit/commands/utils/test_wrappers.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands.utils import wrappers
class DummyTransformGenerator(object):
def process(self, x):
yield x + 10
yield x + 20
class DummyTransformGeneratorRaises(object):
def process(self, x):
yield x + 10
yield x + 20
raise Exception("catch me")
class DummyTransformFunc(object):
def process(self, x):
return x + 10
class DummyTransformFuncRaises(object):
def process(self, *args):
raise Exception("catch me")
class TestProfiler(wrappers.KLineProfilerMixin):
def add_function(self, *args, **kwargs):
pass
def enable_by_count(self, *args, **kwargs):
pass
def disable_by_count(self, *args, **kwargs):
pass
@pytest.fixture
def k_profiler():
return TestProfiler()
@pytest.fixture
def mock_add_func(k_profiler, mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(k_profiler, "add_function", mock)
return mock
@pytest.fixture
def mock_enable_by_count(k_profiler, mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(k_profiler, "enable_by_count", mock)
return mock
@pytest.fixture
def mock_disable_by_count(k_profiler, mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(k_profiler, "disable_by_count", mock)
return mock
def dummy_func(x):
return x + 10
def dummy_gen(x):
yield x + 10
yield x + 20
@pytest.mark.parametrize(
"transform,print_msg",
(
(DummyTransformGenerator, ""),
(DummyTransformGeneratorRaises, "WARN: Error caught while profiling"),
),
)
def test_print_user_exceptions_generators(transform, print_msg, capsys):
transform, *_ = list(wrappers.print_user_exceptions([transform]))
# fails without functools.wraps(func)
assert "process" == transform.process.__name__
# first argument is "self" of the process method
result = transform.process(None, 10)
assert 20 == next(result)
assert 30 == next(result)
with pytest.raises(StopIteration):
next(result)
captured = capsys.readouterr()
assert print_msg in captured.out
@pytest.mark.parametrize(
"transform,print_msg,exp_ret",
(
(DummyTransformFunc, "", 20),
(DummyTransformFuncRaises, "WARN: Error caught while profiling", None),
),
)
def test_print_user_exceptions_funcs(transform, print_msg, exp_ret, capsys):
transform, *_ = list(wrappers.print_user_exceptions([transform]))
# fails without functools.wraps(func)
assert "process" == transform.process.__name__
# first argument is "self" of the process method
result = transform.process(None, 10)
assert exp_ret == result
captured = capsys.readouterr()
assert print_msg in captured.out
def test_call(
k_profiler, mock_add_func, mock_enable_by_count, mock_disable_by_count
):
wrapper = k_profiler.__call__(dummy_func)
mock_add_func.assert_called_once_with(dummy_func)
mock_enable_by_count.assert_not_called()
mock_disable_by_count.assert_not_called()
assert "dummy_func" == wrapper.__name__
result = wrapper(10)
assert 20 == result
mock_enable_by_count.assert_called_once_with()
mock_disable_by_count.assert_called_once_with()
mock_add_func.reset_mock()
mock_enable_by_count.reset_mock()
mock_disable_by_count.reset_mock()
wrapper = k_profiler.__call__(dummy_gen)
mock_add_func.assert_called_once_with(dummy_gen)
mock_enable_by_count.assert_not_called()
mock_disable_by_count.assert_not_called()
assert "dummy_gen" == wrapper.__name__
result = wrapper(10)
assert 20 == next(result)
assert 30 == next(result)
with pytest.raises(StopIteration):
next(result)
mock_enable_by_count.assert_called_once_with()
mock_disable_by_count.assert_called_once_with()
|
gaybro8777/klio | cli/tests/commands/job/utils/fixtures/expected/__init__.py | <filename>cli/tests/commands/job/utils/fixtures/expected/__init__.py
"""
Notice: Do not delete this file! Klio will not be able to run your job!
"""
|
gaybro8777/klio | integration/audio-spectrograms/transforms.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import io
import os
import pickle
import apache_beam as beam
from apache_beam import pvalue
import librosa
import numpy as np
from klio_audio import decorators
from klio_core.proto import klio_pb2
from klio.transforms import decorators as tfm_decorators
####
# Helper funcs for handling klio & numpy de/serialization when working
# with pcolls that are grouped by key
# TODO: it'd prob be helpful to provide utilities for making use of
# beam.(Co)GroupByKey
####
def _load_from_msg(item):
kmsg = klio_pb2.KlioMessage()
kmsg.ParseFromString(item)
return pickle.loads(kmsg.data.payload)
def _dump_to_klio_message(key, payload):
kmsg = klio_pb2.KlioMessage()
kmsg.data.element = key
out = io.BytesIO()
np.save(out, payload)
kmsg.data.payload = out.getvalue()
return kmsg.SerializeToString()
# Transforms
class GetMagnitude(beam.DoFn):
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Computing the magnitude spectrogram for {}".format(element)
)
stft = item.payload
spectrogram, phase = librosa.magphase(stft)
# yield "phase" to show multi-yields w/ tagged outputswork, but
# we're only concerned about the spectrogram in our integration
# test pipeline
yield pvalue.TaggedOutput("phase", spectrogram)
yield pvalue.TaggedOutput("spectrogram", spectrogram)
class FilterNearestNeighbors(beam.DoFn):
@tfm_decorators._handle_klio
@decorators.handle_binary
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Filtering nearest neighbors for {}".format(element)
)
spectrogram = item.payload
nn_filter = librosa.decompose.nn_filter(
spectrogram,
aggregate=np.median,
metric="cosine",
width=int(librosa.time_to_frames(2)),
)
# The output of the filter shouldn't be greater than the input
# if we assume signals are additive. Taking the pointwise minimium
# with the input spectrum forces this.
nn_filter = np.minimum(spectrogram, nn_filter)
yield nn_filter
# TODO: this could be useful enough to make a generic "group by klio element"
def create_key_from_element(item):
kmsg = klio_pb2.KlioMessage()
kmsg.ParseFromString(item)
return (kmsg.data.element, item)
def subtract_filter_from_full(key_pair):
# key_pair looks like
# (element, {"full": [<serialized numpy array>], "nnfilter": [<serialized numpy array>]})
key, pair_data = key_pair
full = _load_from_msg(pair_data["full"][0])
nn_filter = _load_from_msg(pair_data["nnfilter"][0])
net = full - nn_filter
payload = pickle.dumps(net)
kmsg = klio_pb2.KlioMessage()
kmsg.data.element = key
kmsg.data.payload = payload
return (key, kmsg.SerializeToString())
class GetSoftMask(beam.DoFn):
def __init__(self, margin=1, power=2):
self.margin = margin
self.power = power
@tfm_decorators._set_klio_context
def process(self, item):
key, data = item
first_data = data["first"][0]
second_data = data["second"][0]
full_data = data["full"][0]
first = _load_from_msg(first_data)
second = _load_from_msg(second_data)
full = _load_from_msg(full_data)
self._klio.logger.debug("Getting softmask for {}".format(key))
mask = librosa.util.softmask(
first, self.margin * second, power=self.power
)
ret = mask * full
yield _dump_to_klio_message(key, ret)
|
gaybro8777/klio | cli/tests/commands/job/utils/fixtures/expected/test_transforms.py | <reponame>gaybro8777/klio<filename>cli/tests/commands/job/utils/fixtures/expected/test_transforms.py
"""
Notice: the code below is just a simple example of how to write unit
tests for a transform.
Feel free to import what's needed, rewrite tests, etc.
"""
import pytest
from klio_core.proto import klio_pb2
import transforms
@pytest.fixture
def klio_msg():
msg = klio_pb2.KlioMessage()
msg.data.element = b"hello"
msg.version = klio_pb2.Version.V2
return msg.SerializeToString()
def test_process(klio_msg):
"""Assert process method yields expected data."""
helloklio_dofn = transforms.HelloKlio()
output = helloklio_fn.process(klio_msg)
assert klio_msg == list(output)[0]
|
gaybro8777/klio | docs/src/conf.py | <filename>docs/src/conf.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import codecs
import os
import re
import sys
sys.path.append(os.path.abspath("./_ext"))
# -- Helper funcs
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, *parts), "rb", "utf-8") as f:
return f.read()
def find_version(*file_paths):
"""
Build a path from *file_paths* and search for a ``__version__``
string inside.
"""
version_file = read(*file_paths)
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
# -- Project information -----------------------------------------------------
project = "klio"
copyright = "2020, Spotify AB"
author = "The klio developers"
versions = {
"klio_cli_release": find_version("../../cli/src/klio_cli/__init__.py"),
"klio_core_release": find_version("../../core/src/klio_core/__init__.py"),
"klio_devtools_release": find_version("../../devtools/src/klio_devtools/__init__.py"),
"klio_exec_release": find_version("../../exec/src/klio_exec/__init__.py"),
"klio_release": find_version("../../lib/src/klio/__init__.py"),
"klio_audio_release": find_version("../../audio/src/klio_audio/__init__.py"),
}
# Define ``rst_prolog`` to make custom roles available at the beginning of each
# rst file. The `/` is 'relative' to the main source directory / where conf.py
# is, otherwhise sphinx will try to do relative includes
rst_prolog = """
.. include:: /.custom_roles.rst
"""
# Define ``rst_epilog`` to make variables globally-available to compiled .rst files
rst_epilog = """
.. |klio-cli-version| replace:: {klio_cli_release}
.. |klio-version| replace:: {klio_release}
.. |klio-audio-version| replace:: {klio_audio_release}
.. |klio-exec-version| replace:: {klio_exec_release}
.. |klio-core-version| replace:: {klio_core_release}
.. |klio-devtools-version| replace:: {klio_devtools_release}
""".format(**versions)
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.todo", # highlight TODO items
"sphinx.ext.intersphinx", # interlink between other projects w/ sphinx docs
"sphinx.ext.autodoc", # auto-generate docs from docstrings
"sphinx.ext.napoleon", # handle Google-style docstrings
"sphinx.ext.autosummary", # auto-gen summaries
"collapsible_admon", # custom extension from _ext dir
"sphinxcontrib.images", # thumbnail images
"sphinxcontrib.spelling", # spell check
"sphinx_click", # auto-docs for Click commands
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# sphinx-build has a "nitpick" mode (used during CI docs workflow and
# `make stricthtml`). We inherit some docs from Apache Beam, and some of
# their docstrings fail to compile in nitpick mode.
# https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-nitpick_ignore
nitpick_ignore = [
# <-- nitpick docstrings from beam.io.gcp.WriteToBigQuery
("py:class", "callable"),
("py:class", "ValueProvider"),
("py:class", "apache_beam.io.gcp.internal.clients.bigquery.bigquery_v2_messages.TableSchema"),
("py:class", "BigQueryDisposition"),
("py:attr", "BigQueryDisposition.CREATE_IF_NEEDED"),
("py:attr", "BigQueryDisposition.CREATE_NEVER"),
("py:attr", "BigQueryDisposition.WRITE_TRUNCATE"),
("py:attr", "BigQueryDisposition.WRITE_APPEND"),
("py:attr", "BigQueryDisposition.WRITE_EMPTY"),
# -->
# <-- nitpick docstrings from beam.io.textio.WriteToText
("py:class", "WriteToText"),
("py:class", "apache_beam.io.filesystem.CompressionTypes.AUTO"),
# -->
# <-- nitpick drom beam.io.ReadFromText
("py:class", "ReadFromText"),
# -->
# <-- nitpick docstrings that reference other Klio objects that are
# not yet documented
("py:class", "klio_core.config.KlioConfig"),
("py:class", "klio_core.proto.klio_pb2.KlioMessage"),
("py:exc", "klio_core.proto.klio_pb2._message.DecodeError"),
# -->
]
# sphinx-build -b linkcheck will error out if links in docstrings are broken,
# including inherited docstrings (i.e. Beam)
# https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-linkcheck_ignore
linkcheck_ignore = [
r"https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load",
# ignore local links
r"\./.+\.html",
# temp ignore reference to keps
r"https://github.com/spotify/klio/tree/master/docs/src/keps",
r"https://docs.klio.io/en/latest/keps/",
]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "pydata_sphinx_theme"
html_logo = "_static/images/logo.png"
html_theme_options = {
"github_url": "https://github.com/spotify/klio",
"twitter_url": "https://twitter.com/SpotifyEng",
"show_prev_next": False,
# when there's group of thumbnails (like User Guide > Pipelines > Transforms)
# don't navigate to the next page automatically; allow to key left/right
# to view thumbnail carousel
"navigation_with_keys": False,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_css_files = ["css/fonts.css", "css/custom.css", "css/colors.css", "css/roles.css"] # relative to _static
html_js_files = ["js/custom.js"] # relative to _static
html_favicon = "_static/images/favicon.png"
pygments_style = "vs"
# -- Extention configuration
# TODO ext: https://www.sphinx-doc.org/en/master/usage/extensions/todo.html
todo_include_todos = True
todo_emit_warnings = False
todo_link_only = False
# don't render todos in production docs
if os.environ.get("READTHEDOCS"):
todo_include_todos = False
# -- Autodoc config
autodoc_default_options = {
"inherited-members": False,
}
autodoc_mock_imports = ["librosa", "numpy", "matplotlib"]
autodoc_member_order = "bysource"
# -- intersphinx mapping
# This will auto-generate links to Python's docs when referenced (e.g.
# :func:`pdb.set_trace` will link to the set_trace docs)
intersphinx_mapping = {
"https://docs.python.org/3": None,
"https://beam.apache.org/releases/pydoc/current": None,
"https://librosa.org/doc/latest": None,
"https://numpy.org/doc/stable/": None,
"https://matplotlib.org/": None,
"https://googleapis.dev/python/pubsub/latest/": None,
}
# -- sphinxcontrib.spelling config
spelling_word_list_filename="spelling_wordlist.txt"
|
gaybro8777/klio | cli/src/klio_cli/commands/image/build.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import docker
from klio_cli.utils import docker_utils
def build(job_dir, conf, config_file, image_tag):
image_name = conf.pipeline_options.worker_harness_container_image
client = docker.from_env()
if config_file:
basename = os.path.basename(config_file)
image_tag = "{}-{}".format(image_tag, basename)
docker_utils.check_docker_connection(client)
docker_utils.check_dockerfile_present(job_dir)
docker_utils.build_docker_image(
job_dir, image_name, image_tag, config_file
)
|
gaybro8777/klio | lib/src/klio/metrics/base.py | <reponame>gaybro8777/klio
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Base classes from which a metrics consumer (i.e. Stackdriver, ffwd, etc.)
will need to implement.
New consumers are required to implement the :class:`AbstractRelayClient`, and
three metrics objects based off of :class:`BaseMetric`: a counter, a gauge, and
a timer.
"""
import abc
import six
class _DummyAttribute(object):
# for the ability to do `FOO_ATTR = abstract_attr()` as well as
# decorate a property method
pass
def abstract_attr(obj=None):
"""Set an attribute or a property as abstract.
Supports class-level attributes as well as methods defined as a
``@property``.
Usage:
.. code-block:: python
class Foo(object):
my_foo_attribute = abstract_attr()
@property
@abstract_attr
def my_foo_property(self):
pass
Args:
obj (callable): Python object to "decorate", i.e. a class method. If
none is provided, a dummy object is created in order to attach
the ``__isabstractattr__`` attribute (similar to
``__isabstractmethod__`` from ``abc.abstractmethod``).
Returns object with ``__isabstractattr__`` attribute set to ``True``.
"""
if not obj:
obj = _DummyAttribute()
obj.__isabstractattr__ = True
return obj
def _has_abstract_attributes_implemented(cls, name, bases):
"""Verify a given class has its abstract attributes implemented."""
for base in bases:
abstract_attrs = getattr(base, "_klio_metrics_abstract_attributes", [])
class_attrs = getattr(cls, "_klio_metrics_all_attributes", [])
for attr in abstract_attrs:
if attr not in class_attrs:
err_str = (
"Error instantiating class '{0}'. Implementation of "
"abstract attribute '{1}' from base class '{2}' is "
"required.".format(name, attr, base.__name__)
)
raise NotImplementedError(err_str)
def _get_all_attributes(clsdict):
return [name for name, val in six.iteritems(clsdict) if not callable(val)]
def _get_abstract_attributes(clsdict):
return [
name
for name, val in six.iteritems(clsdict)
if not callable(val) and getattr(val, "__isabstractattr__", False)
]
class _ABCBaseMeta(abc.ABCMeta):
"""Enforce behavior upon implementations of ABC classes."""
def __init__(cls, name, bases, clsdict):
_has_abstract_attributes_implemented(cls, name, bases)
def __new__(metaclass, name, bases, clsdict):
clsdict[
"_klio_metrics_abstract_attributes"
] = _get_abstract_attributes(clsdict)
clsdict["_klio_metrics_all_attributes"] = _get_all_attributes(clsdict)
cls = super(_ABCBaseMeta, metaclass).__new__(
metaclass, name, bases, clsdict
)
return cls
class AbstractRelayClient(six.with_metaclass(_ABCBaseMeta)):
"""Abstract base class for all metric consumer relay clients.
Each new consumer (i.e. Stackdriver, ffwd, logging-based metrics)
will need to implement this relay class.
Attributes:
RELAY_CLIENT_NAME (str): must match the key in ``klio-job.yaml``
under ``job_config.metrics``.
"""
RELAY_CLIENT_NAME = abstract_attr()
def __init__(self, klio_config):
self.klio_config = klio_config
@abc.abstractmethod
def unmarshal(self, metric):
"""Returns a dictionary-representation of the ``metric`` object"""
pass
@abc.abstractmethod
def emit(self, metric):
"""Emit the given metric object to the particular consumer.
``emit`` will be run in a threadpool separate from the transform,
and any errors raised from the method will be logged then ignored.
"""
pass
@abc.abstractmethod
def counter(self, name, value=0, transform=None, **kwargs):
"""Return a newly instantiated counter-type metric specific for
the particular consumer.
Callers to the ``counter`` method will store new counter objects
returned in memory for simple caching.
"""
pass
@abc.abstractmethod
def gauge(self, name, value=0, transform=None, **kwargs):
"""Return a newly instantiated gauge-type metric specific for
the particular consumer.
Callers to the ``gauge`` method will store new gauge objects
returned in memory for simple caching.
"""
pass
@abc.abstractmethod
def timer(self, name, transform=None, **kwargs):
"""Return a newly instantiated timer-type metric specific for
the particular consumer.
Callers to the ``timer`` method will store new timer objects
returned in memory for simple caching.
"""
pass
class BaseMetric(object):
"""Base class for all metric types.
A consumer must implement a counter metric, a gauge metric, and a
timer metric.
"""
def __init__(self, name, value=0, transform=None, **kwargs):
self.name = name
self.value = value
self.transform = transform
def update(self, value):
self.value = value
|
gaybro8777/klio | integration/read-bq-write-bq/it/after.py | <reponame>gaybro8777/klio<gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Cleanup after each integration test.
Deletes both input and output tables.
"""
import os
from apache_beam.io.gcp import bigquery_tools as beam_bq_tools
import common
def delete_tables():
klio_cfg = common.get_config()
input_table_cfg = klio_cfg.job_config.events.inputs[0]
output_table_cfg = klio_cfg.job_config.events.outputs[0]
bq_client = beam_bq_tools.BigQueryWrapper()
bq_client._delete_table(input_table_cfg.project,
input_table_cfg.dataset,
input_table_cfg.table)
bq_client._delete_table(output_table_cfg.project,
output_table_cfg.dataset,
output_table_cfg.table)
def restore_original_config():
klio_cfg_file_path = os.path.join(os.path.dirname(__file__), "..", "klio-job.yaml")
klio_save_file_path = os.path.join(os.path.dirname(__file__), "..", "klio-job.yaml.save")
os.rename(klio_save_file_path, klio_cfg_file_path)
if __name__ == '__main__':
delete_tables()
restore_original_config()
|
gaybro8777/klio | cli/src/klio_cli/utils/cli_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import subprocess
def get_git_sha(cwd=None, image_tag=None):
cmd = "git describe --match=NeVeRmAtCh --always --abbrev=8 --dirty"
try:
return (
subprocess.check_output(
# pipe to devnull to suppress the error msgs from git itself
cmd.split(),
cwd=cwd,
stderr=subprocess.DEVNULL,
)
.decode()
.strip()
)
except subprocess.CalledProcessError:
if not image_tag:
logging.error(
"The directory from which you are running this is not a git "
"directory, or has no commits yet. The latest commit is used "
"to tag the Docker image that is built by this command. "
"Consider overriding this value using the --image-tag flag "
"until such a time as commits are available."
)
raise SystemExit(1)
# TODO: Move this to KlioConfig validation
# once overriding & templates are done
def validate_dataflow_runner_config(klio_config):
pipeline_opts = klio_config.pipeline_options.as_dict()
mandatory_gcp_keys = [
"project",
"staging_location",
"temp_location",
"region",
]
is_gcp = all(
pipeline_opts.get(key) is not None for key in mandatory_gcp_keys
)
if not is_gcp:
logging.error(
"Unable to verify the mandatory configuration fields for"
" DataflowRunner. Please fix job configuration or run via direct"
"runner."
)
raise SystemExit(1)
def is_direct_runner(klio_config, direct_runner):
if not direct_runner:
validate_dataflow_runner_config(klio_config)
return direct_runner
|
gaybro8777/klio | cli/tests/utils/test_multi_line_terminal_writer.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from klio_cli.utils import multi_line_terminal_writer
def test_simple_output(capsys):
writer = multi_line_terminal_writer.MultiLineTerminalWriter()
writer.emit_line("line one", "initial contents of line one")
captured = capsys.readouterr()
assert "initial contents of line one\n" == captured.out
def test_multiline_output(capsys):
writer = multi_line_terminal_writer.MultiLineTerminalWriter()
writer.emit_line("line one", "initial contents of line one")
writer.emit_line("line two", "initial contents of line two")
writer.emit_line("line one", "updated line one")
captured = capsys.readouterr()
expected_output = (
"initial contents of line one\n" # Write the first line
"initial contents of line two\n" # Write the second line
"\x1b[2F" # Move up two lines
"updated line one\u001b[0K\n" # Overwrite the first line
"\x1b[1E" # Move back down one line (as we already wrote a newline)
)
assert expected_output == captured.out
|
gaybro8777/klio | cli/tests/utils/test_cli_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import pytest
from klio_cli.utils import cli_utils
@pytest.mark.parametrize("job_dir", (None, "/foo/bar/klio-job.yaml"))
def test_get_git_sha(mocker, job_dir):
check_output_mock = mocker.patch.object(subprocess, "check_output")
check_output_mock.return_value = b""
result = cli_utils.get_git_sha(job_dir)
cmd = "git describe --match=NeVeRmAtCh --always --abbrev=8 --dirty"
check_output_mock.assert_called_once_with(
cmd.split(), cwd=job_dir, stderr=subprocess.DEVNULL
)
assert "" == result
def test_get_git_sha_called_process_error(mocker):
check_output_mock = mocker.patch.object(subprocess, "check_output")
check_output_mock.side_effect = subprocess.CalledProcessError(1, "")
with pytest.raises(SystemExit):
cli_utils.get_git_sha()
@pytest.mark.parametrize(
"pipeline_options,will_raise",
[
({}, True),
(
{ # missing one key
"project": "p",
"staging_location": "s",
"region": "r",
},
True,
),
(
{ # has all keys
"project": "p",
"staging_location": "s",
"temp_location": "t",
"region": "r",
},
False,
),
],
)
def test_validate_dataflow_runner_config(
mocker, caplog, pipeline_options, will_raise
):
mock_klio_cfg = mocker.Mock()
mock_klio_cfg.pipeline_options.as_dict.return_value = pipeline_options
if will_raise:
with pytest.raises(SystemExit):
cli_utils.validate_dataflow_runner_config(mock_klio_cfg)
assert 1 == len(caplog.records)
assert "ERROR" == caplog.records[0].levelname
else:
cli_utils.validate_dataflow_runner_config(mock_klio_cfg)
@pytest.mark.parametrize("direct_runner", [False, True])
def test_is_direct_runner(mocker, monkeypatch, direct_runner):
mock_klio_cfg = mocker.Mock()
mock_validate_df_config = mocker.Mock()
monkeypatch.setattr(
cli_utils, "validate_dataflow_runner_config", mock_validate_df_config
)
if not direct_runner:
assert mock_validate_df_config.called_once_with(mock_klio_cfg)
assert (
cli_utils.is_direct_runner(mock_klio_cfg, direct_runner)
== direct_runner
)
|
gaybro8777/klio | exec/src/klio_exec/commands/audit_steps/base.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import traceback
def _get_relevant_frames(tb):
"""Remove up to and including pytest-related frames from tb.
Args:
list(traceback.FrameSummary): complete traceback of code
executed by pytest
Returns:
list(traceback.FrameSummary) of traceback frames not including
the invocation of pytest
"""
last_index = 0
for i, frame in enumerate(reversed(tb)):
if "pytest" in str(frame):
last_index = len(tb) - i
break
return tb[last_index:]
class BaseKlioAuditStep(object):
"""
A base class to be subclassed when creating new audit steps for
auditing Klio jobs.
Subclasses have access to the following properties:
- self.job_dir
- self.klio_config
All other inspection or modification of the Python environment
should happen using global state, as pytest itself is run in between
before_tests and after_tests.
"""
AUDIT_STEP_NAME = None
def __init__(self, job_dir, klio_config, tw):
self.klio_config = klio_config
self.job_dir = job_dir
self.tw = tw
self.errored = False
self.warned = False
@staticmethod
def get_description():
"""
User-facing description of audit step shown in `klio job audit --list`.
If not implemented, the audit step class's docstring is used.
If there is no class-level docstring, description defaults to
"No description".
"""
pass
def _emit(self, message, tb=None, **kw):
msg = "[{}]: {}\n".format(self.AUDIT_STEP_NAME, message)
if tb:
tb = _get_relevant_frames(tb)
tb_fmtd = "\n".join(traceback.format_list(tb))
msg = "{}{}\n".format(msg, tb_fmtd)
self.tw.write(msg, **kw)
def emit_warning(self, warning, tb=None, **kw):
"""Emit audit warning and set `self.warned` to True.
Excludes any pytest related stacktraces in tb.
Args:
warning(str): Warning to emit. Prefixed to tb.
tb(traceback object): Traceback to emit.
kw(dict): Any attributes to pass onto `TerminalWriter.write`.
"""
self.warned = True
kwargs = {"yellow": True}
kwargs.update(kw)
self._emit(warning, tb=tb, **kwargs)
def emit_error(self, error, tb=None, **kw):
"""Emit audit error and set `self.errored` to True.
Excludes any pytest related stacktraces in tb.
Args:
warning(str): Warning to emit. Prefixed to tb.
tb(traceback object): Traceback to emit.
kw(dict): Any attributes to pass onto `TerminalWriter.write`.
"""
self.errored = True
kwargs = {"red": True}
kwargs.update(kw)
self._emit(error, tb=tb, **kwargs)
def before_tests(self):
"""Any setup needed for test."""
pass
def after_tests(self):
"""Evaluate the results of auditing and emit results.
Users can use the `emit_warning` and `emit_error` methods
here to simplify reporting results. Implementation required.
"""
raise NotImplementedError("Subclasses must implement after_tests!")
|
gaybro8777/klio | audio/src/klio_audio/transforms/audio.py | <reponame>gaybro8777/klio
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import librosa
import librosa.display
import numpy as np
from klio.transforms import decorators as tfm_decorators
from klio_audio import decorators
from klio_audio.transforms import _base
class LoadAudio(_base.KlioAudioBaseDoFn):
"""Load audio into memory as a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.load` takes in a :class:`PCollection
<apache_beam.pvalue.PCollection>` of :ref:`KlioMessages <klio-message>`
with the payload of the ``KlioMessage`` a file-like object or a path
to a file, and returns a ``PCollection`` of ``KlioMessages`` where the
payload is a :class:`numpy.ndarray`.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
# other transforms
)
Args:
librosa_kwargs (dict): Instantiate the transform with keyword
arguments to pass into :func:`librosa.load`.
"""
def __init__(self, *_, **librosa_kwargs):
self.librosa_kwargs = librosa_kwargs
@tfm_decorators._handle_klio
@decorators.handle_binary(save_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Loading {} into memory as a numpy array.".format(element)
)
audio, _ = librosa.load(item.payload, **self.librosa_kwargs)
yield audio
class GetSTFT(_base.KlioAudioBaseDoFn):
"""Calculate Short-time Fourier transform from a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.stft` and expects a :class:`PCollection
<apache_beam.pvalue.PCollection>` of :ref:`KlioMessages <klio-message>`
where the payload is a :class:`numpy.ndarray` and the output is the
same with the ``stft`` calculation applied.
The Short-time Fourier transform (STFT) is a Fourier-related
transform used to determine the sinusoidal frequency and phase
content of local sections of a signal as it changes over time.
STFT provides the time-localized frequency information for
situations in which frequency components of a signal vary over time,
whereas the standard Fourier transform provides the frequency
information averaged over the entire signal time interval.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetSTFT
# other transforms
)
Args:
librosa_kwargs (dict): Instantiate the transform with keyword
arguments to pass into :func:`librosa.stft`.
"""
def __init__(self, *_, **librosa_kwargs):
self.librosa_kwargs = librosa_kwargs
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True, save_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Calculating the short-time Fourier transform for {}".format(
element
)
)
yield librosa.stft(y=item.payload, **self.librosa_kwargs)
class GetSpec(_base.KlioAudioBaseDoFn):
"""Generate a dB-scaled spectrogram from a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.amplitude_to_db` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` and the output is the same with the ``amplitude_to_
db`` function applied.
A spectrogram shows the the intensity of frequencies over time.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetSpec()
# other transforms
)
Args:
librosa_kwargs (dict): Instantiate the transform with keyword
arguments to pass into :func:`librosa.amplitude_to_db`.
"""
def __init__(self, *_, **librosa_kwargs):
self.librosa_kwargs = librosa_kwargs
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True, save_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Generating a spectrogram for {}".format(element)
)
stft = item.payload
yield librosa.amplitude_to_db(
np.abs(stft), ref=np.max(np.abs(stft)), **self.librosa_kwargs
)
class GetMelSpec(_base.KlioAudioBaseDoFn):
"""Generate a spectrogram from a :class:`numpy.ndarray` using the mel scale.
This transform wraps :func:`librosa.feature.melspectrogram` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` and the output is the same with the
``melspectrogram`` function applied.
The mel scale is a non-linear transformation of frequency scale
based on the perception of pitches. The mel scale is calculated so
that two pairs of frequencies separated by a delta in the mel scale
are perceived by humans as being equidistant.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetMelSpec()
# other transforms
)
Args:
librosa_kwargs (dict): Instantiate the transform with keyword
arguments to pass into :func:`librosa.feature.melspectrogram`.
"""
def __init__(self, *_, **librosa_kwargs):
self.librosa_kwargs = librosa_kwargs
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True, save_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Generating a Mel spectrogram for {}".format(element)
)
yield librosa.feature.melspectrogram(
y=item.payload, **self.librosa_kwargs
)
class GetMFCC(_base.KlioAudioBaseDoFn):
"""Calculate MFCCs from a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.power_to_db` followed by
:func:`librosa.feature.mfcc` and expects a :class:`PCollection
<apache_beam.pvalue.PCollection>` of :ref:`KlioMessages <klio-message>`
where the payload is a :class:`numpy.ndarray` and the output is the same
with the ``mfcc`` function applied.
The Mel frequency cepstral coefficients (MFCCs) of a signal are
a small set of features (usually about 10–20) which describe the
overall shape of a spectral envelope. It's is often used to describe
timbre or model characteristics of human voice.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetMFCC()
# other transforms
)
Args:
librosa_kwargs (dict): Instantiate the transform with keyword
arguments to pass into :func:`librosa.feature.mfcc`.
"""
def __init__(self, *_, **librosa_kwargs):
self.librosa_kwargs = librosa_kwargs
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True, save_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
self._klio.logger.debug(
"Generating Mel frequency cepstral coefficients for {}".format(
element
)
)
# melspectrogram by default returns a power'ed (**2) spectrogram
# so we need to convert to decibel units (if it wasn't a power'ed
# spec, then we'd use amplitude_to_db)
Sdb = librosa.power_to_db(item.payload, ref=np.max)
yield librosa.feature.mfcc(S=Sdb, **self.librosa_kwargs)
class SpecToPlot(_base.KlioPlotBaseDoFn):
"""Generate a matplotlib figure of the spectrogram of a
:class:`numpy.ndarray`.
This transform wraps :func:`librosa.display.specshow` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` of a spectrogram and the output is a
:class:`matplotlib.figure.Figure` instance.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetSpec()
| audio.SpecToPlot()
# other transforms
)
Args:
title (str): Title of spectrogram plot. Default: ``Spectrogram of
{KlioMessage.data.element}``.
plot_args (dict): keyword arguments to pass to
:func:`librosa.display.specshow`.
"""
DEFAULT_TITLE = "Spectrogram of {element}"
def __init__(self, *_, title=None, **plot_args):
super(SpecToPlot, self).__init__(self, title=title, **plot_args)
self.plot_args["x_axis"] = self.plot_args.get("x_axis", "time")
self.plot_args["y_axis"] = self.plot_args.get("y_axis", "linear")
def _plot(self, item, fig):
librosa.display.specshow(item.payload, ax=fig.gca(), **self.plot_args)
class MelSpecToPlot(_base.KlioPlotBaseDoFn):
"""Generate a matplotlib figure of the mel spectrogram of a
a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.power_to_db` followed by
:func:`librosa.display.specshow` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` of a melspectrogram and the output is a
:class:`matplotlib.figure.Figure` instance.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetMelSpec()
| audio.SpecToPlot()
# other transforms
)
Args:
title (str): Title of spectrogram plot. Default: ``Mel-freqency
Spectrogram of {KlioMessage.data.element}``.
plot_args (dict): keyword arguments to pass to
:func:`librosa.display.specshow`.
"""
DEFAULT_TITLE = "Mel-frequency Spectrogram of {element}"
def __init__(self, *_, title=None, **plot_args):
super(MelSpecToPlot, self).__init__(self, title=title, **plot_args)
self.plot_args["y_axis"] = "mel"
self.plot_args["x_axis"] = self.plot_args.get("x_axis", "time")
self.plot_args["fmax"] = self.plot_args.get("fmax", 8000)
def _plot(self, item, fig):
Sdb = librosa.power_to_db(item.payload, ref=np.max)
librosa.display.specshow(Sdb, ax=fig.gca(), **self.plot_args)
class MFCCToPlot(_base.KlioPlotBaseDoFn):
"""Generate a matplotlib figure of the MFCCs as a :class:`numpy.ndarray`.
This transform wraps :func:`librosa.display.specshow` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` of the MFCCs of an audio and the output is a
:class:`matplotlib.figure.Figure` instance.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.GetMFCC()
| audio.MFCCToPlot()
# other transforms
)
Args:
title (str): Title of spectrogram plot. Default: ``MFCCs of
{KlioMessage.data.element}``.
plot_args (dict): keyword arguments to pass to
:func:`librosa.display.specshow`.
"""
DEFAULT_TITLE = "MFCCs of {element}"
def __init__(self, *_, title=None, **plot_args):
super(MFCCToPlot, self).__init__(self, title=title, **plot_args)
self.plot_args["x_axis"] = self.plot_args.get("x_axis", "time")
def _plot(self, item, fig):
librosa.display.specshow(item.payload, ax=fig.gca(), **self.plot_args)
class WaveformToPlot(_base.KlioAudioBaseDoFn):
"""Generate a matplotlib figure of the wave form of a
:class:`numpy.ndarray`.
This transform wraps :func:`librosa.display.waveplot` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`numpy.ndarray` of a loaded audio file the output is a
:class:`matplotlib.figure.Figure` instance.
Example:
.. code-block:: python
# run.py
import apache_beam as beam
from klio.transforms import decorators
from klio_audio.transforms import audio
@decorators.handle_klio
def element_to_filename(ctx, data):
filename = data.element.decode("utf-8")
return f"file:///path/to/audio/{filename}.wav"
def run(in_pcol, job_config):
return (
in_pcol
| beam.Map(element_to_filename)
| audio.LoadAudio()
| audio.WaveformToPlot()
# other transforms
)
Args:
num_samples (int): Number of samples to plot. Default: ``5000``.
title (str): Title of spectrogram plot. Default: ``Waveplot of
{KlioMessage.data.element}``.
plot_args (dict): keyword arguments to pass to
:func:`librosa.display.waveplot`.
"""
DEFAULT_TITLE = "Waveplot of {element}"
def __init__(self, *_, num_samples=5000, title=None, **plot_args):
super(WaveformToPlot, self).__init__(self, title=title, **plot_args)
self.num_samples = num_samples
def _plot(self, item, fig):
librosa.display.waveplot(
item.payload[: self.num_samples], ax=fig.gca(), **self.plot_args
)
|
gaybro8777/klio | exec/src/klio_exec/commands/audit.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import time
import pytest
from py import io
from klio_exec.commands.utils import plugin_utils
PLUGIN_NAMESPACE = "klio.plugins.audit"
def _run_pytest(tw):
tw.write("Running tests for audit validation...\n")
# Test-style verification
exit_code = pytest.main(["-qq"])
pytest_failed = exit_code != 0
if pytest_failed:
tw.write("PyTest failed!\n", yellow=True)
else:
tw.write("PyTest passed!\n")
return pytest_failed
def _get_audit_steps(job_dir, config_obj, tw):
loaded_steps = plugin_utils.load_plugins_by_namespace(PLUGIN_NAMESPACE)
return [step(job_dir, config_obj, tw) for step in loaded_steps]
def list_audit_steps(ctx, param, value):
# this is a click.option callback where it automatically passes in
# context, param name, and param value
if not value or ctx.resilient_parsing:
return
tw = io.TerminalWriter()
tw.hasmarkup = True
tw.sep("=", "Installed audit steps")
plugin_utils.print_plugins(PLUGIN_NAMESPACE, tw)
ctx.exit()
def audit(job_dir, config_obj):
tw = io.TerminalWriter()
tw.hasmarkup = True
tw.write("Auditing your Klio job...\n")
try:
audit_steps = _get_audit_steps(job_dir, config_obj, tw)
for step in audit_steps:
step.before_tests()
pytest_failed = _run_pytest(tw)
tw.sep("=", "audit session starts", bold=True)
start = time.time()
# TODO: Currently, we're running all of the audit steps in
# the same Python process and there's no isolation. Do we want
# to run the tests once per step instead?
for step in audit_steps:
step.after_tests()
if not any([step.errored, step.warned]):
msg = "[{}]: audit step passed!\n".format(step.AUDIT_STEP_NAME)
tw.write(msg, green=True)
end = time.time()
failed_steps = [step for step in audit_steps if step.errored]
warned_steps = [step for step in audit_steps if step.warned]
error_count = len(failed_steps)
warning_count = len(warned_steps)
finished_msg = "{0} errors, {1} warnings in {2:.3f} seconds".format(
error_count, warning_count, end - start
)
color = "green"
if error_count:
color = "red"
elif warning_count:
color = "yellow"
kwargs = {
color: True,
"bold": True,
}
tw.sep("=", finished_msg, **kwargs)
except Exception:
# use logging instead of tw to easily get traceback info
logging.error(
"Unable to run the audit command due to:\n", exc_info=True
)
raise SystemExit(1)
if not pytest_failed and not error_count:
if not warning_count:
tw.write("Good job! Your job does not exhibit any known issues.\n")
else:
tw.write(
"Cool! Your job has warnings, but no errors. "
"Please check the warnings.\n",
yellow=True,
)
else:
tw.sep(
"You have errors in your job. Please fix and try again.\n",
red=True,
)
raise SystemExit(1)
|
gaybro8777/klio | exec/tests/unit/commands/test_stop.py | <reponame>gaybro8777/klio
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from googleapiclient import errors as gerrors
from klio_exec.commands import stop
@pytest.fixture
def mock_discovery_client(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(stop.discovery, "build", lambda x, y: mock)
return mock
@pytest.fixture
def config(mocker):
pipeline_options = mocker.Mock(
project="test-project", region="europe-west1"
)
return mocker.Mock(job_name="test-job", pipeline_options=pipeline_options)
@pytest.fixture
def jobs_response():
return {
"jobs": [
{"name": "not-the-test-job"},
{
"id": "1234",
"name": "test-job",
"projectId": "test-project",
"location": "europe-west1",
},
]
}
@pytest.fixture
def job(jobs_response):
return jobs_response["jobs"][1]
@pytest.fixture
def mock_sleep(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(stop.time, "sleep", mock)
return mock
@pytest.mark.parametrize("api_version", (None, "v1b3", "v2"))
def test_set_dataflow_client(mock_discovery_client, api_version):
assert stop._client is None
stop._set_dataflow_client(api_version)
assert stop._client is not None
assert mock_discovery_client == stop._client
# cleanup
setattr(stop, "_client", None)
# return the desired job, no jobs at all, or no jobs matching job name
@pytest.mark.parametrize("returns_jobs", (True, False, None))
def test_check_job_running(
mock_discovery_client, returns_jobs, jobs_response, config, monkeypatch
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.list
req.return_value.execute.return_value = {}
if returns_jobs:
req.return_value.execute.return_value = jobs_response
elif returns_jobs is False:
req.return_value.execute.return_value = {
"jobs": [{"name": "not-the-test-job"}]
}
ret = stop._check_job_running(config)
if returns_jobs:
assert jobs_response["jobs"][1] == ret
else:
assert ret is None
req.assert_called_once_with(
projectId="test-project", location="europe-west1", filter="ACTIVE"
)
req.return_value.execute.assert_called_once_with()
def test_check_job_running_errors(
mock_discovery_client, config, monkeypatch, caplog
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.list
req.return_value.execute.side_effect = Exception("foo")
stop._check_job_running(config)
req.assert_called_once_with(
projectId="test-project", location="europe-west1", filter="ACTIVE"
)
req.return_value.execute.assert_called_once_with()
assert 2 == len(caplog.records)
@pytest.mark.parametrize(
"state,pyver", (("drain", None), ("cancel", None), (None, 2), (None, 3))
)
def test_update_job_state(
state, pyver, mock_discovery_client, job, monkeypatch
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
exp_state = state
if not state:
if pyver == 2:
exp_state = "JOB_STATE_DRAINED"
else:
exp_state = "JOB_STATE_CANCELLED"
monkeypatch.setitem(stop.JOB_STATE_MAP, "default", exp_state)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
req.return_value.execute.return_value = None
stop._update_job_state(job, state)
job["requestedState"] = exp_state
req.assert_called_once_with(
jobId="1234",
projectId="test-project",
location="europe-west1",
body=job,
)
req.return_value.execute.assert_called_once_with()
def test_update_job_state_400_error(
mock_discovery_client, job, mock_sleep, mocker, monkeypatch, caplog
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
mock_resp = mocker.Mock(status=400)
req.return_value.execute.side_effect = gerrors.HttpError(mock_resp, b"foo")
with pytest.raises(SystemExit):
stop._update_job_state(job, "drain")
assert 1 == req.return_value.execute.call_count
assert 1 == len(caplog.records)
assert not mock_sleep.call_count
def test_update_job_state_500_error(
mock_discovery_client, job, mock_sleep, mocker, monkeypatch, caplog
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
mock_resp = mocker.Mock(status=500)
req.return_value.execute.side_effect = gerrors.HttpError(mock_resp, b"foo")
with pytest.raises(SystemExit):
stop._update_job_state(job, "drain")
assert 4 == req.return_value.execute.call_count
assert 4 == len(caplog.records)
assert 3 == mock_sleep.call_count
def test_update_job_state_error(
mock_discovery_client, job, mock_sleep, monkeypatch, caplog
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
req.return_value.execute.side_effect = Exception("foo")
with pytest.raises(SystemExit):
stop._update_job_state(job, "cancel")
assert 4 == req.return_value.execute.call_count
assert 4 == len(caplog.records)
assert 3 == mock_sleep.call_count
@pytest.mark.parametrize(
"exec_side_effect",
(
(
{"currentState": "JOB_STATE_CANCELLING"},
{"currentState": "JOB_STATE_CANCELLED"},
),
(Exception("foo"), {"currentState": "JOB_STATE_CANCELLED"}),
),
)
def test_watch_job_state(
mock_discovery_client,
mock_sleep,
monkeypatch,
caplog,
job,
exec_side_effect,
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.get
req.return_value.execute.side_effect = exec_side_effect
stop._watch_job_state(job)
assert 2 == req.return_value.execute.call_count
mock_sleep.assert_called_once_with(5)
assert 1 == len(caplog.records)
def test_watch_job_state_raises(
mock_discovery_client, monkeypatch, caplog, job
):
monkeypatch.setattr(stop, "_client", mock_discovery_client)
with pytest.raises(SystemExit):
stop._watch_job_state(job, timeout=0)
assert 1 == len(caplog.records)
@pytest.mark.parametrize("has_running_job", (True, False))
def test_stop(has_running_job, config, mocker, monkeypatch, job):
mock_set_dataflow_client = mocker.Mock()
monkeypatch.setattr(stop, "_set_dataflow_client", mock_set_dataflow_client)
ret_val = None
if has_running_job:
ret_val = job
mock_check_job_running = mocker.Mock(return_value=ret_val)
monkeypatch.setattr(stop, "_check_job_running", mock_check_job_running)
mock_update_job_state = mocker.Mock()
monkeypatch.setattr(stop, "_update_job_state", mock_update_job_state)
mock_watch_job_state = mocker.Mock()
monkeypatch.setattr(stop, "_watch_job_state", mock_watch_job_state)
stop.stop(config, "cancel")
mock_set_dataflow_client.assert_called_once_with()
mock_check_job_running.assert_called_once_with(config)
if has_running_job:
mock_update_job_state.assert_called_once_with(job, req_state="cancel")
mock_watch_job_state.assert_called_once_with(job)
else:
mock_update_job_state.assert_not_called()
mock_watch_job_state.assert_not_called()
|
gaybro8777/klio | cli/tests/commands/message/test_publish.py | <gh_stars>100-1000
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
import pytest
from google.api_core import exceptions as gapi_exceptions
from klio_core import config
from klio_core.proto.v1beta1 import klio_pb2
from klio_cli.commands.message import publish
@pytest.fixture
def mock_publisher(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(publish.pubsub, "PublisherClient", mock)
return mock
@pytest.fixture
def klio_job_config():
conf = {
"job_name": "test-job",
"version": 1,
"pipeline_options": {"project": "test-gcp-project"},
"job_config": {
"events": {
"inputs": [
{
"type": "pubsub",
"topic": "an-input-topic",
"subscription": "a-subscription",
}
],
"outputs": [{"type": "pubsub", "topic": "foo-topic-output"}],
},
"data": {
"inputs": [
{"type": "gcs", "location": "gs://a-test-input/location"}
],
"outputs": [
{"type": "gcs", "location": "foo-output-location"}
],
},
},
}
return config.KlioConfig(conf)
@pytest.fixture
def expected_klio_job(klio_job_config):
klio_job = klio_pb2.KlioJob()
klio_job.job_name = "test-job"
klio_job.gcp_project = "test-gcp-project"
return klio_job
@pytest.fixture
def expected_klio_message(expected_klio_job):
message = klio_pb2.KlioMessage()
message.metadata.downstream.extend([expected_klio_job])
return message
def test_create_publisher(mock_publisher):
client = mock_publisher.return_value
ret_publisher = publish._create_publisher("a-topic")
mock_publisher.assert_called_once_with()
client.get_topic.assert_called_once_with(request={"topic": "a-topic"})
expected = functools.partial(client.publish, topic="a-topic")
assert expected.func == ret_publisher.func
def test_create_publisher_topic_not_found(mock_publisher):
client = mock_publisher.return_value
client.get_topic.side_effect = gapi_exceptions.NotFound("foo")
with pytest.raises(SystemExit):
publish._create_publisher("a-topic")
mock_publisher.assert_called_once_with()
client.get_topic.assert_called_once_with(request={"topic": "a-topic"})
def test_create_publisher_raises(mock_publisher):
client = mock_publisher.return_value
client.get_topic.side_effect = Exception("foo")
with pytest.raises(Exception, match="foo"):
publish._create_publisher("a-topic")
mock_publisher.assert_called_once_with()
client.get_topic.assert_called_once_with(request={"topic": "a-topic"})
def test_get_current_klio_job(klio_job_config, expected_klio_job):
ret_job = publish._get_current_klio_job(klio_job_config)
assert expected_klio_job == ret_job
@pytest.mark.parametrize(
"force,ping,top_down,version",
(
(True, True, True, 1),
(True, False, False, 1),
(False, True, False, 1),
(False, False, False, 1),
(False, False, False, 2),
),
)
def test_create_pubsub_message(
force, ping, top_down, version, expected_klio_job
):
entity_id = "s0m3-ent1ty-1D"
expected_klio_message = klio_pb2.KlioMessage()
expected_klio_message.metadata.force = force
expected_klio_message.metadata.ping = ping
expected_klio_message.version = version
if version == 1:
expected_klio_message.data.entity_id = entity_id
if not top_down:
expected_klio_message.metadata.downstream.extend(
[expected_klio_job]
)
else:
expected_klio_message.data.element = bytes(entity_id, "utf-8")
if not top_down:
rec = expected_klio_message.metadata.intended_recipients
rec.limited.recipients.extend([expected_klio_job])
ret_msg = publish._create_pubsub_message(
entity_id, expected_klio_job, force, ping, top_down, version
)
assert expected_klio_message.SerializeToString() == ret_msg
@pytest.mark.parametrize(
"force,ping,top_down,non_klio",
(
(True, True, True, False),
(True, False, False, False),
(False, True, False, False),
(False, False, False, False),
(False, False, True, True),
),
)
def test_private_publish_messages(
klio_job_config,
mock_publisher,
expected_klio_job,
force,
ping,
top_down,
non_klio,
):
entity_id = "s0m3-ent1ty-1D"
msg_version = 1
if non_klio:
exp_data = bytes(entity_id.encode("utf-8"))
else:
expected_klio_message = klio_pb2.KlioMessage()
expected_klio_message.metadata.force = force
expected_klio_message.metadata.ping = ping
expected_klio_message.data.entity_id = entity_id
expected_klio_message.version = msg_version
if not top_down:
expected_klio_message.metadata.downstream.extend(
[expected_klio_job]
)
exp_data = expected_klio_message.SerializeToString()
ret_success, ret_fail = publish._publish_messages(
klio_job_config,
[entity_id],
force,
ping,
top_down,
non_klio,
msg_version,
)
mock_publisher.return_value.publish.assert_called_once_with(
topic="an-input-topic", data=exp_data
)
assert 1 == len(ret_success)
assert not len(ret_fail)
def test_private_publish_messages_raises(
mock_publisher, klio_job_config, caplog
):
client = mock_publisher.return_value
client.publish.side_effect = Exception("foo")
ret_success, ret_fail = publish._publish_messages(
klio_job_config, ["s0m3-ent1ty-1D"], True, False, False, False, 1
)
assert not len(ret_success)
assert 1 == len(ret_fail)
assert 1 == len(caplog.records)
def test_publish_messages(
klio_job_config, expected_klio_message, mock_publisher, caplog
):
entity_id = "s0m3-ent1ty-1D"
msg_version = 1
expected_klio_message.metadata.force = False
expected_klio_message.metadata.ping = False
expected_klio_message.data.entity_id = entity_id
expected_klio_message.version = msg_version
publish.publish_messages(klio_job_config, [entity_id])
mock_publisher.return_value.publish.assert_called_once_with(
data=expected_klio_message.SerializeToString(), topic="an-input-topic"
)
assert 2 == len(caplog.records)
assert "INFO" == caplog.records[0].levelname
assert "INFO" == caplog.records[1].levelname
def test_publish_messages_fails(
klio_job_config, expected_klio_message, mock_publisher, caplog
):
client = mock_publisher.return_value
client.publish.side_effect = Exception("foo")
entity_id = "s0m3-ent1ty-1D"
msg_version = 1
expected_klio_message.metadata.force = False
expected_klio_message.metadata.ping = False
expected_klio_message.data.entity_id = entity_id
expected_klio_message.version = msg_version
publish.publish_messages(klio_job_config, [entity_id])
mock_publisher.return_value.publish.assert_called_once_with(
data=expected_klio_message.SerializeToString(), topic="an-input-topic"
)
assert 3 == len(caplog.records)
assert "INFO" == caplog.records[0].levelname
assert "WARNING" == caplog.records[1].levelname
assert "WARNING" == caplog.records[2].levelname
def test_publish_messages_raises(klio_job_config, monkeypatch, caplog):
monkeypatch.setattr(klio_job_config.job_config.events, "inputs", None)
with pytest.raises(SystemExit):
publish.publish_messages(klio_job_config, ["s0m3-ent1ty-1D"])
assert 1 == len(caplog.records)
|
gaybro8777/klio | lib/src/klio/transforms/io.py | <reponame>gaybro8777/klio<filename>lib/src/klio/transforms/io.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os
import apache_beam as beam
from apache_beam.io import avroio as beam_avroio
from apache_beam.io.gcp import bigquery as beam_bq
from fastavro import parse_schema
from klio_core.proto import klio_pb2
class BaseKlioIOException(Exception):
"""Base IO exception."""
class KlioMissingConfiguration(BaseKlioIOException):
"""Required configuration is missing."""
class _KlioReadWrapper(beam.io.Read):
"""Klio-ified `beam.io.Read` class.
Handle the case if a user invokes an already-wrapped Klio IO
transform (via `_KlioTransformMixin`) with `beam.io.Read`, as this
is not possible to double-wrap.
"""
def __init__(self, *args, **kwargs):
if not len(args):
# I would think we'd never get here, but this is just in case
# something real funky happens, like this semi-private class
# is not instantiated with an IO transform
super(_KlioReadWrapper, self).__init__(*args, **kwargs)
self.__wrapped_transform = args[0].__class__.__name__
super(_KlioReadWrapper, self).__init__(*args, **kwargs)
def is_bounded(self, *args, **kwargs):
# beam.io.Read invokes beam.io.Read.source.is_bounded
# (beam.io.Read.source == self/this instance) and throw an
# AttributeError if is_bounded not defined. Here we define
# the method but raise our own expection telling the user not to
# invoke an alread-wrapped Klio IO Transform with `beam.io.Read()`
raise TypeError(
"Error reading from `{}`. If the transform was wrapped with "
"`apache_beam.io.Read`, remove that wrapper and try again.".format(
self.__wrapped_transform
)
)
class _KlioWrapIOMetaclass(type):
def __call__(self, *args, **kwargs):
# Some IO transforms require invocation to be wrapped with
# `beam.io.Read()`, and others do not. This allows for the API
# to be the same no matter what by wrapping `beam.io.Read` if
# the transform requires it. If a user wraps a Klio IO transform
# again (i.e. `beam.io.Read(_AlreadyWrappedKlioTransform))`), a
# human-friendly `TypeError` will raise (see
# `_KlioReadWrapper.is_bounded`)
if self._REQUIRES_IO_READ_WRAP:
return _KlioReadWrapper(
super(_KlioWrapIOMetaclass, self).__call__(*args, **kwargs)
)
return super(_KlioWrapIOMetaclass, self).__call__(*args, **kwargs)
class _KlioTransformMixin(metaclass=_KlioWrapIOMetaclass):
"""Common properties for klio v2 IO transforms."""
# whether or not the transform needs to be invoked by beam.io.Read()
_REQUIRES_IO_READ_WRAP = False
class _KlioReadFromTextSource(beam.io.textio._TextSource):
"""Parses a text file as newline-delimited elements.
Supports newline delimiters '\n' and '\r\n
Returns:
(str) KlioMessage serialized as a string
"""
def read_records(self, file_name, range_tracker):
records = super(_KlioReadFromTextSource, self).read_records(
file_name, range_tracker
)
for record in records:
record_as_bytes = record.encode("utf-8")
message = klio_pb2.KlioMessage()
message.version = klio_pb2.Version.V2
message.metadata.intended_recipients.anyone.SetInParent()
message.data.element = record_as_bytes
yield message.SerializeToString()
class KlioReadFromText(beam.io.ReadFromText, _KlioTransformMixin):
"""Read from a local or GCS file with each new line as a
``KlioMessage.data.element``.
"""
_source_class = _KlioReadFromTextSource
class _KlioReadFromBigQueryMapper(object):
"""Wrapper class to provide a ``beam.Map`` object that converts a row of a
``ReadFromBigQuery`` to a properly formatted ``KlioMessage``.
"""
def __init__(self, klio_message_columns=None):
self.__klio_message_columns = klio_message_columns
def _generate_klio_message(self):
message = klio_pb2.KlioMessage()
message.version = klio_pb2.Version.V2
message.metadata.intended_recipients.anyone.SetInParent()
# TODO: this is where we should add (relevant) KlioMessage.metadata;
# (1) One thing to figure out is the klio_pb2.KlioJob definition,
# particularly the JobInput definition, in light of KlioConfig v2.
# Once that's figured out, we should at least populate the
# job audit log.
# (2) Another thing to figure out is force/ping. In streaming, messages
# are individually marked as force or ping when needed. However,
# users aren't able to tag individual messages generated from a row
# of BQ data as force/ping, and it's probably very difficult for us
# to provide a way to do that. So, should we allow users to at least
# globally set force/ping on their event input config in klio-job.yaml?
# Potentially.
return message
def _map_row_element(self, row):
# NOTE: this assumes that the coder being used (default is
# beam.io.gcp.bigquery_tools.RowAsDictJsonCoder, otherwise set in
# klio-job.yaml) is JSON serializable (since the default is just
# a plain dictionary). This assumption might break if someone
# provides a different coder.
# NOTE: We need to have the row elements be bytes, so if it is
# a dictionary, we json.dumps into a str to convert to bytes,
# but that may need to change if we want to support other coders
data = {}
if self.__klio_message_columns:
if len(self.__klio_message_columns) == 1:
data = row[self.__klio_message_columns[0]]
else:
for key, value in row.items():
if key in self.__klio_message_columns:
data[key] = value
data = json.dumps(data)
else:
data = json.dumps(row)
return data
def _map_row(self, row):
message = self._generate_klio_message()
message.data.element = bytes(self._map_row_element(row), "utf-8")
return message.SerializeToString()
def as_beam_map(self):
return "Convert to KlioMessage" >> beam.Map(self._map_row)
# Note: copy-pasting the docstrings of `ReadFromBigQuery` so that we can
# include our added parameter (`klio_message_columns`) in the API
# documentation (via autodoc). If we don't do this, then just the parent
# documentation will be shown, excluding our new parameter.
class KlioReadFromBigQuery(beam.PTransform, _KlioTransformMixin):
"""Read data from BigQuery.
This PTransform uses a BigQuery export job to take a snapshot of the table
on GCS, and then reads from each produced file. File format is Avro by
default.
Args:
table (str, callable, ValueProvider): The ID of the table, or a callable
that returns it. The ID must contain only letters ``a-z``, ``A-Z``,
numbers ``0-9``, or underscores ``_``. If dataset argument is
:data:`None` then the table argument must contain the entire table
reference specified as: ``'DATASET.TABLE'``
or ``'PROJECT:DATASET.TABLE'``.
If it's a callable, it must receive one argument representing an
element to be written to BigQuery, and return
a TableReference, or a string table name as specified above.
dataset (str): The ID of the dataset containing this table or
:data:`None` if the table reference is specified entirely by
the table argument.
project (str): The ID of the project containing this table.
klio_message_columns (list): A list of fields (``str``) that should
be assigned to ``KlioMessage.data.element``.
.. note::
If more than one field is provided, the results including the
column names will be serialized to JSON before assigning to
``KlioMessage.data.element``. (e.g. ``'{"field1": "foo",
"field2": bar"}'``). If only one field is provided, just the
value will be assigned to ``KlioMessage.data.element``.
query (str, ValueProvider): A query to be used instead of arguments
table, dataset, and project.
validate (bool): If :data:`True`, various checks will be done when
source gets initialized (e.g., is table present?).
This should be :data:`True` for most scenarios
in order to catch errors as early as possible
(pipeline construction instead of pipeline execution).
It should be :data:`False` if the table is created during pipeline
execution by a previous step.
coder (~apache_beam.coders.coders.Coder): The coder for the table
rows. If :data:`None`, then the default coder is
_JsonToDictCoder, which will interpret every row as a JSON
serialized dictionary.
use_standard_sql (bool): Specifies whether to use BigQuery's standard
SQL dialect for this query. The default value is :data:`False`.
If set to :data:`True`, the query will use BigQuery's updated SQL
dialect with improved standards compliance.
This parameter is ignored for table inputs.
flatten_results (bool): Flattens all nested and repeated fields in the
query results. The default value is :data:`True`.
kms_key (str): Optional Cloud KMS key name for use when creating new
temporary tables.
gcs_location (str, ValueProvider): The name of the Google Cloud Storage
bucket where the extracted table should be written as a string or
a :class:`~apache_beam.options.value_provider.ValueProvider`. If
:data:`None`, then the temp_location parameter is used.
bigquery_job_labels (dict): A dictionary with string labels to be passed
to BigQuery export and query jobs created by this transform. See:
https://cloud.google.com/bigquery/docs/reference/rest/v2/\
Job#JobConfiguration
use_json_exports (bool): By default, this transform works by exporting
BigQuery data into Avro files, and reading those files. With this
parameter, the transform will instead export to JSON files.
JSON files are slower to read due to their larger size.
When using JSON exports,
the BigQuery types for DATE, DATETIME, TIME, and TIMESTAMP will be
exported as strings.
This behavior is consistent with BigQuerySource.
When using Avro exports,
these fields will be exported as native Python
types (datetime.date, datetime.datetime, datetime.datetime,
and datetime.datetime respectively). Avro exports are recommended.
To learn more about BigQuery types, and Time-related type
representations,
see:
https://cloud.google.com/bigquery/docs/reference/standard-sql/\
data-types
To learn more about type conversions between BigQuery and Avro, see:
https://cloud.google.com/bigquery/docs/loading-data-cloud-\
storage-avro#avro_conversions
"""
def __init__(self, *args, klio_message_columns=None, **kwargs):
self.__reader = beam_bq.ReadFromBigQuery(*args, **kwargs)
self.__mapper = _KlioReadFromBigQueryMapper(klio_message_columns)
def expand(self, pcoll):
return pcoll | self.__reader | self.__mapper.as_beam_map()
class KlioWriteToBigQuery(beam.io.WriteToBigQuery, _KlioTransformMixin):
"""Writes to BigQuery table with each row as ``KlioMessage.data.element``.
"""
# Note: Not using BigQuerySink due to it only being available for
# batch. See https://beam.apache.org/releases/pydoc/2.22.0/
# apache_beam.io.gcp.bigquery.html?highlight=bigquerysink
# #apache_beam.io.gcp.bigquery.BigQuerySink
_REQUIRES_IO_READ_WRAP = False
def __unwrap(self, encoded_element):
message = klio_pb2.KlioMessage()
message.ParseFromString(encoded_element)
data = json.loads(message.data.payload)
return data
def expand(self, pcoll):
return super().expand(pcoll | beam.Map(self.__unwrap))
class _KlioTextSink(beam.io.textio._TextSink):
"""A :class:`~apache_beam.transforms.ptransform.PTransform`
for writing to text files. Takes a PCollection of KlioMessages
and writes the elements to a textfile
"""
def write_record(self, file_handle, encoded_element):
"""Writes a single encoded record.
Args:
file_handle (str): a referential identifier that points to an
audio file found in the configured output data location.
encoded_element (KlioMessage): KlioMessage
"""
message = klio_pb2.KlioMessage()
message.ParseFromString(encoded_element)
record = message.data.element
super(_KlioTextSink, self).write_encoded_record(file_handle, record)
class KlioWriteToText(beam.io.textio.WriteToText):
"""Write to a local or GCS file with each new line as
``KlioMessage.data.element``.
"""
def __init__(self, *args, **kwargs):
self._sink = _KlioTextSink(*args, **kwargs)
# note: fast avro is default for py3 on beam
class _KlioFastAvroSource(beam_avroio._FastAvroSource):
def read_records(self, file_name, range_tracker):
records = super(_KlioFastAvroSource, self).read_records(
file_name=file_name, range_tracker=range_tracker
)
for record in records:
message = klio_pb2.KlioMessage()
message.version = klio_pb2.Version.V2
message.metadata.intended_recipients.anyone.SetInParent()
# If an element is sent then we set the element
# to handle event reading
# If "element" is not present then we stuff the record
# into the message element
message.data.element = (
record["element"]
if "element" in record
else bytes(json.dumps(record).encode("utf-8"))
)
yield message.SerializeToString()
# define an I/O transform using the klio-specific avro source
# note: fast avro is default for py3 on beam
# Note: copy-pasting the docstrings of `ReadFromAvro` so that we can
# include our added parameter (`location`) in the API
# documentation (via autodoc) and drop `use_fastavro` since we default to
# True. If we don't do this, then just the parent documentation will be shown,
# excluding our new parameter and including an unavailable parameter
# (`location` and `use_fastavro` respectively)
class KlioReadFromAvro(beam.io.ReadFromAvro):
"""Read avro from a local directory or GCS bucket.
Data from avro is dumped into JSON and assigned to ``KlioMessage.data.
element``.
Args:
file_pattern (str): the file glob to read.
location (str): local or GCS path of file(s) to read.
min_bundle_size (int): the minimum size in bytes, to be considered when
splitting the input into bundles.
validate (bool): flag to verify that the files exist during the pipeline
creation time.
"""
_REQUIRES_IO_READ_WRAP = True
def __init__(
self,
file_pattern=None,
location=None,
min_bundle_size=0,
validate=True,
):
file_pattern = self._get_file_pattern(file_pattern, location)
super(KlioReadFromAvro, self).__init__(
file_pattern=file_pattern,
min_bundle_size=min_bundle_size,
validate=validate,
use_fastavro=True,
)
self._source = _KlioFastAvroSource(
file_pattern, min_bundle_size, validate=validate
)
def _get_file_pattern(self, file_pattern, location):
# TODO: this should be a validator in klio_core.config
if not any([file_pattern, location]):
raise KlioMissingConfiguration(
"Must configure at least one of the following keys when "
"reading from avro: `file_pattern`, `location`."
)
if all([file_pattern, location]):
file_pattern = os.path.join(location, file_pattern)
elif file_pattern is None:
file_pattern = location
return file_pattern
# note: fast avro is default for py3 on beam
class _KlioFastAvroSink(beam_avroio._FastAvroSink):
def write_record(self, writer, encoded_element):
message = klio_pb2.KlioMessage()
message.ParseFromString(encoded_element)
record = {"element": message.data.element}
super(_KlioFastAvroSink, self).write_record(
writer=writer, value=record
)
# Note of caution: In the past problems have arisen due to
# changes to internal beam classes.
# If this occurs, consider writing this as a custom PTransform
# that bundles a ``beam.Map`` with the standard ``WriteToAvro``
# Refer to ``KlioReadFromBigQuery`` as an example.
class KlioWriteToAvro(beam.io.WriteToAvro):
"""Write avro to a local directory or GCS bucket.
``KlioMessage.data.element`` data is parsed out
and dumped into arvo format.
Args:
file_path_prefix (str): The file path to write to
location (str): local or GCS path to write to
schema (str): The schema to use, as returned by avro.schema.parse
codec (str): The codec to use for block-level compression.
defaults to 'deflate'
file_name_suffix (str): Suffix for the files written.
num_shards (int): The number of files (shards) used for output.
shard_name_template (str): template string for shard number and count
mime_type (str): The MIME type to use for the produced files.
Defaults to "application/x-avro"
"""
KLIO_SCHEMA_OBJ = {
"namespace": "klio.avro",
"type": "record",
"name": "KlioMessage",
"fields": [{"name": "element", "type": "bytes"}],
}
def __init__(
self,
file_path_prefix=None,
location=None,
schema=parse_schema(KLIO_SCHEMA_OBJ),
codec="deflate",
file_name_suffix="",
num_shards=0,
shard_name_template=None,
mime_type="application/x-avro",
):
file_path = self._get_file_path(file_path_prefix, location)
super(KlioWriteToAvro, self).__init__(
file_path_prefix=file_path,
schema=schema,
codec=codec,
file_name_suffix=file_name_suffix,
num_shards=num_shards,
shard_name_template=shard_name_template,
mime_type=mime_type,
use_fastavro=True,
)
self._sink = _KlioFastAvroSink(
file_path,
schema,
codec,
file_name_suffix,
num_shards,
shard_name_template,
mime_type,
)
def _get_file_path(self, file_path_prefix, location):
# TODO: this should be a validator in klio_core.config
if not any([file_path_prefix, location]):
raise KlioMissingConfiguration(
"Must configure at least one of the following keys when "
"writing to avro: `file_path_prefix`, `location`."
)
if all([file_path_prefix, location]):
file_path_prefix = os.path.join(location, file_path_prefix)
elif file_path_prefix is None:
file_path_prefix = location
return file_path_prefix
|
gaybro8777/klio | cli/src/klio_cli/utils/multi_line_terminal_writer.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from contextlib import contextmanager
CLEAR_TO_END_OF_LINE = "\u001b[0K"
def _move_n_lines(to_move: int) -> None:
if to_move == 0:
return
# F to move the cursor up w/i the console; E to move
# the cursor down
ansi_code = "E" if to_move > 0 else "F"
print("\033[%d%c" % (abs(to_move), ansi_code), end="", flush=True)
class MultiLineTerminalWriter(object):
"""
A class to manage writing multi-line output to the terminal,
with each line tagged with a line_id. Useful for displaying
progress of multiple conncurrent items, like a multi-layer
Docker push.
"""
def __init__(self):
self._line_id_to_index = {}
self._lines_printed = 0
def emit_line(self, line_id, contents):
"""
Emit a string at the given line_id. If this line_id has not yet
been printed, it will be placed at the bottom of the current
terminal output.
"""
writing_new_line_at_bottom = line_id not in self._line_id_to_index
if writing_new_line_at_bottom:
self._line_id_to_index[line_id] = len(self._line_id_to_index)
with self._at_line(self._line_id_to_index[line_id]):
if not writing_new_line_at_bottom:
contents += CLEAR_TO_END_OF_LINE
print(contents, flush=True)
@contextmanager
def _at_line(self, index):
"""
A context manager that moves the cursor to a given line, assuming
that the cursor is already placed at the bottom of the list of lines,
and returns the cursor to the bottom after writing one line.
"""
distance_from_bottom = self._lines_printed - index
if distance_from_bottom:
_move_n_lines(-distance_from_bottom)
# Print one line.
yield
# Assume that a newline was printed when we yielded.
# If we had to move up to get to this line, we have to move down
# by one fewer line. If we didn't have to move up to get to this
# line, we don't need to move down at all as we're already at
# the new bottom.
if distance_from_bottom != 0:
distance_from_bottom -= 1
_move_n_lines(distance_from_bottom)
self._lines_printed = max(self._lines_printed, index + 1)
|
gaybro8777/klio | exec/tests/unit/commands/test_profile.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import pytest
from klio_exec.commands import profile as profile_cmd
@pytest.fixture
def config(mocker):
return mocker.Mock()
@pytest.fixture
def klio_pipeline(config):
return profile_cmd.KlioPipeline(config)
class DummyTransformGenerator(object):
def process(self, x):
yield x + 10
yield x + 20
class DummyTransformFunc(object):
def process(self, x):
return x + 10
class DummyTransformFuncRaises(object):
def process(self, *args):
raise Exception("catch me")
@pytest.fixture
def transforms():
return [
DummyTransformGenerator,
DummyTransformFunc,
DummyTransformFuncRaises,
]
@pytest.mark.parametrize("filename", (None, "-", "input.txt"))
def test_smart_open(filename, mocker):
m_open = mocker.mock_open()
mock_open = mocker.patch("klio_exec.commands.profile.open", m_open)
with profile_cmd.smart_open(filename, fmode="r") as act_ret:
pass
if filename and filename != "-":
mock_open.assert_called_once_with(filename, "r")
assert act_ret.closed
else:
mock_open.assert_not_called()
assert act_ret == sys.stdout
@pytest.mark.parametrize(
"what,output_file,temp_out,exp_ret",
(
("mem", "output.txt", False, "output.png"),
("mem", "output.txt", True, "klio_profile_mem_1234.png"),
("mem", "output", False, "output.png"),
),
)
def test_get_output_png_file(
what, output_file, temp_out, exp_ret, klio_pipeline, monkeypatch
):
monkeypatch.setattr(klio_pipeline, "_now_str", "1234")
monkeypatch.setattr(klio_pipeline, "output_file", output_file)
act_ret = klio_pipeline._get_output_png_file(what, temp_out)
assert exp_ret == act_ret
@pytest.mark.parametrize(
"output_file,plot_graph,exp_temp_output",
((None, True, True), (None, False, False), ("output.txt", True, False)),
)
def test_smart_temp_create(
output_file,
plot_graph,
exp_temp_output,
klio_pipeline,
mocker,
monkeypatch,
):
monkeypatch.setattr(klio_pipeline, "output_file", output_file)
mock_named_temp_file = mocker.Mock()
mock_file = mocker.Mock()
mock_file.name = "temp-1234"
mock_named_temp_file.return_value = mock_file
monkeypatch.setattr(
profile_cmd.tempfile, "NamedTemporaryFile", mock_named_temp_file
)
with klio_pipeline._smart_temp_create(
"mem", plot_graph
) as act_temp_output:
pass
assert exp_temp_output is act_temp_output
if exp_temp_output:
assert klio_pipeline.output_file == "temp-1234"
@pytest.mark.parametrize(
"show_logs,input_file", ((False, None), (True, "input.txt"))
)
def test_get_subproc(
show_logs, input_file, klio_pipeline, mocker, monkeypatch
):
mock_subproc_open = mocker.Mock()
monkeypatch.setattr(profile_cmd.subprocess, "Popen", mock_subproc_open)
monkeypatch.setattr(klio_pipeline, "input_file", input_file)
monkeypatch.setattr(klio_pipeline, "entity_ids", ("foo", "bar"))
kwargs = {"show_logs": show_logs}
klio_pipeline._get_subproc(**kwargs)
exp_call_args = ["klioexec", "profile", "run-pipeline"]
if show_logs:
exp_call_args.append("--show-logs")
if input_file:
exp_call_args.extend(["--input-file", input_file])
else:
exp_call_args.extend(["foo", "bar"])
mock_subproc_open.assert_called_once_with(exp_call_args)
@pytest.mark.parametrize("output_file", (None, "output.txt"))
def test_profile_wall_time_per_line(
output_file, klio_pipeline, mocker, monkeypatch
):
monkeypatch.setattr(klio_pipeline, "output_file", output_file)
mock_line_prof = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_cpu_line_profiler", mock_line_prof
)
mock_run_pipeline = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_run_pipeline", mock_run_pipeline)
klio_pipeline._profile_wall_time_per_line(iterations=1)
if output_file:
mock_line_prof.return_value.print_stats.assert_called_once_with(
klio_pipeline.output_file, output_unit=1
)
else:
mock_line_prof.return_value.print_stats.assert_called_once_with(
output_unit=1
)
@pytest.mark.parametrize("get_maximum,exp_fmode", ((True, "w"), (False, "a")))
def test_profile_memory_per_line(
get_maximum, exp_fmode, klio_pipeline, mocker, monkeypatch
):
mock_memory_profiler = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_memory_line_profiler", mock_memory_profiler
)
mock_memory_wrapper = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_memory_line_wrapper", mock_memory_wrapper
)
mock_smart_open = mocker.patch.object(profile_cmd, "smart_open")
mock_smart_open.return_value.__enter__.return_value = "opened_file"
mock_run_pipeline = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_run_pipeline", mock_run_pipeline)
mock_show_results = mocker.Mock()
monkeypatch.setattr(
profile_cmd.memory_profiler, "show_results", mock_show_results
)
klio_pipeline._profile_memory_per_line(get_maximum=get_maximum)
mock_memory_profiler.assert_called_once_with()
mock_memory_wrapper.assert_called_once_with(
mock_memory_profiler.return_value, get_maximum
)
mock_smart_open.assert_called_once_with(
klio_pipeline.output_file, fmode=exp_fmode
)
assert "opened_file" == klio_pipeline._stream
mock_run_pipeline.assert_called_once_with()
if get_maximum:
mock_show_results.assert_called_once_with(
mock_memory_profiler.return_value, stream="opened_file"
)
else:
mock_show_results.assert_not_called()
@pytest.mark.parametrize("plot_graph", (True, False))
def test_profile_memory(plot_graph, klio_pipeline, mocker, monkeypatch):
mock_get_subproc = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_get_subproc", mock_get_subproc)
mock_smart_temp_create = mocker.patch.object(
klio_pipeline, "_smart_temp_create"
)
mock_smart_temp_create.return_value.__enter__.return_value = True
mock_smart_open = mocker.patch.object(profile_cmd, "smart_open")
mock_smart_open.return_value.__enter__.return_value.name = "bar"
mock_memory_usage = mocker.Mock()
monkeypatch.setattr(
profile_cmd.memory_profiler, "memory_usage", mock_memory_usage
)
mock_get_output_png = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_output_png_file", mock_get_output_png
)
mock_plot = mocker.Mock()
monkeypatch.setattr(profile_cmd.profile_utils, "plot", mock_plot)
kwargs = {"plot_graph": plot_graph}
act_ret = klio_pipeline._profile_memory(**kwargs)
if plot_graph:
mock_get_output_png.assert_called_once_with("memory", True)
assert mock_get_output_png.return_value == act_ret
else:
assert act_ret is None
@pytest.mark.parametrize("plot_graph", (True, False))
def test_profile_cpu(plot_graph, klio_pipeline, mocker, monkeypatch):
mock_get_subproc = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_get_subproc", mock_get_subproc)
mock_smart_temp_create = mocker.patch.object(
klio_pipeline, "_smart_temp_create"
)
mock_smart_temp_create.return_value.__enter__.return_value = True
mock_smart_open = mocker.patch.object(profile_cmd, "smart_open")
mock_smart_open.return_value.__enter__.return_value.name = "bar"
mock_get_cpu_usage = mocker.Mock()
monkeypatch.setattr(
profile_cmd.cpu_utils, "get_cpu_usage", mock_get_cpu_usage
)
mock_get_output_png = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_output_png_file", mock_get_output_png
)
mock_plot = mocker.Mock()
monkeypatch.setattr(profile_cmd.profile_utils, "plot", mock_plot)
kwargs = {"plot_graph": plot_graph}
act_ret = klio_pipeline._profile_cpu(**kwargs)
if plot_graph:
mock_get_output_png.assert_called_once_with("cpu", True)
assert mock_get_output_png.return_value == act_ret
else:
assert act_ret is None
@pytest.mark.parametrize("input_file", (None, "input.txt"))
def test_get_io_mapper(
input_file, klio_pipeline, transforms, mocker, monkeypatch
):
entity_ids = ["id1", "id2", "id3"]
serialized_messages = [
klio_pipeline._entity_id_to_message(i).SerializeToString()
for i in entity_ids
]
if input_file:
monkeypatch.setattr(klio_pipeline, "input_file", input_file)
else:
monkeypatch.setattr(klio_pipeline, "entity_ids", entity_ids)
mock_read_from_text = mocker.Mock()
monkeypatch.setattr(
profile_cmd.beam.io, "ReadFromText", mock_read_from_text
)
mock_create = mocker.Mock()
monkeypatch.setattr(profile_cmd.beam, "Create", mock_create)
mock_flatmap = mocker.Mock()
mock_transform = mocker.MagicMock()
mock_flatmap.return_value = mock_transform
monkeypatch.setattr(profile_cmd.beam, "FlatMap", mock_flatmap)
mapper = klio_pipeline._get_io_mapper(1)
assert isinstance(mapper, profile_cmd.StubIOMapper)
if input_file:
mock_read_from_text.assert_called_once_with(klio_pipeline.input_file)
else:
mock_create.assert_called_once_with(serialized_messages)
def test_run_pipeline(klio_pipeline, mocker, monkeypatch):
mock_get_io_mapper = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_get_io_mapper", mock_get_io_mapper)
mock_get_user_pipeline = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_user_pipeline", mock_get_user_pipeline
)
mock_get_user_config = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_get_user_config", mock_get_user_config
)
klio_pipeline._run_pipeline()
mock_get_io_mapper.assert_called_once_with(1)
mock_get_user_config.assert_called_once_with()
mock_get_user_pipeline.assert_called_once_with(
mock_get_user_config.return_value, mock_get_io_mapper.return_value
)
mock_get_user_pipeline.return_value.run.assert_called_once_with()
@pytest.mark.parametrize(
"what", ("run", "cpu", "timeit", "memory", "memory_per_line", "foo")
)
def test_profile(what, klio_pipeline, mocker, monkeypatch):
mock_run_pipeline = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_run_pipeline", mock_run_pipeline)
mock_profile_cpu = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_profile_cpu", mock_profile_cpu)
mock_profile_memory = mocker.Mock()
monkeypatch.setattr(klio_pipeline, "_profile_memory", mock_profile_memory)
mock_memory_per_line = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_profile_memory_per_line", mock_memory_per_line
)
mock_wall_time_per_line = mocker.Mock()
monkeypatch.setattr(
klio_pipeline, "_profile_wall_time_per_line", mock_wall_time_per_line
)
klio_pipeline.profile(what)
if what == "run":
mock_run_pipeline.assert_called_once_with()
mock_profile_cpu.assert_not_called()
mock_profile_memory.assert_not_called()
mock_memory_per_line.assert_not_called()
mock_wall_time_per_line.assert_not_called()
elif what == "cpu":
mock_run_pipeline.assert_not_called()
mock_profile_cpu.assert_called_once_with()
mock_profile_memory.assert_not_called()
mock_memory_per_line.assert_not_called()
mock_wall_time_per_line.assert_not_called()
elif what == "memory":
mock_run_pipeline.assert_not_called()
mock_profile_cpu.assert_not_called()
mock_profile_memory.assert_called_once_with()
mock_memory_per_line.assert_not_called()
mock_wall_time_per_line.assert_not_called()
elif what == "memory_per_line":
mock_run_pipeline.assert_not_called()
mock_profile_cpu.assert_not_called()
mock_profile_memory.assert_not_called()
mock_memory_per_line.assert_called_once_with()
mock_wall_time_per_line.assert_not_called()
elif what == "timeit":
mock_run_pipeline.assert_not_called()
mock_profile_cpu.assert_not_called()
mock_profile_memory.assert_not_called()
mock_memory_per_line.assert_not_called()
mock_wall_time_per_line.assert_called_once_with()
else:
mock_run_pipeline.assert_not_called()
mock_profile_cpu.assert_not_called()
mock_profile_memory.assert_not_called()
mock_memory_per_line.assert_not_called()
mock_wall_time_per_line.assert_not_called()
|
gaybro8777/klio | lib/tests/unit/metrics/test_stackdriver.py | <filename>lib/tests/unit/metrics/test_stackdriver.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from googleapiclient import errors as gapi_errors
from klio.metrics import stackdriver as sd_metrics
@pytest.fixture
def mock_stackdriver_client(mocker, monkeypatch):
mock_client = mocker.Mock()
monkeypatch.setattr(
sd_metrics.discovery, "build", lambda x, y: mock_client
)
return mock_client
@pytest.fixture
def client(klio_config, monkeypatch, mocker, mock_stackdriver_client):
return sd_metrics.StackdriverLogMetricsClient(klio_config)
@pytest.fixture
def mock_discovery_build(mocker, monkeypatch):
mock_discovery_build = mocker.Mock()
monkeypatch.setattr(sd_metrics.discovery, "build", mock_discovery_build)
return mock_discovery_build
@pytest.fixture
def counter():
kwargs = {
"name": "my-counter",
"job_name": "test-job",
"project": "test-project",
}
return sd_metrics.StackdriverLogMetricsCounter(**kwargs)
def test_stackdriver_client(client, mock_discovery_build):
# sanity check
assert not getattr(client._thread_local, "stackdriver_client", None)
client._stackdriver_client
assert getattr(client._thread_local, "stackdriver_client", None)
mock_discovery_build.assert_called_once_with("logging", "v2")
@pytest.mark.parametrize("value", (None, 5))
def test_client_counter(client, value, mocker, monkeypatch, caplog):
mock_init_metric = mocker.Mock()
monkeypatch.setattr(
sd_metrics.StackdriverLogMetricsCounter,
"_init_metric",
mock_init_metric,
)
kwargs = {}
if value:
kwargs["value"] = value
counter = client.counter(name="my-counter", **kwargs)
mock_init_metric.assert_called_once_with(client._stackdriver_client)
assert isinstance(counter, sd_metrics.StackdriverLogMetricsCounter)
if value:
assert 1 == len(caplog.records)
assert "WARNING" == caplog.records[0].levelname
def test_client_gauge(client, caplog):
gauge = client.gauge(name="my-gauge")
assert isinstance(gauge, sd_metrics.StackdriverLogMetricsGauge)
assert 1 == len(caplog.records)
assert "WARNING" == caplog.records[0].levelname
def test_client_timer(client, caplog):
gauge = client.timer(name="my-timer")
assert isinstance(gauge, sd_metrics.StackdriverLogMetricsTimer)
assert 1 == len(caplog.records)
assert "WARNING" == caplog.records[0].levelname
def test_counter_get_filter(counter):
exp_filter = (
'resource.type="dataflow_step" '
'logName="projects/test-project/logs/'
'dataflow.googleapis.com%2Fworker" '
'jsonPayload.message:"[my-counter]"'
)
actual_filter = counter._get_filter()
assert exp_filter == actual_filter
@pytest.mark.parametrize("raises", (False, True))
def test_counter_init_metric(raises, counter, mock_stackdriver_client, caplog):
_mock_projects = mock_stackdriver_client.projects.return_value
mock_req = _mock_projects.metrics.return_value.create
if raises:
mock_req.return_value.execute.side_effect = Exception("foo")
counter._init_metric(mock_stackdriver_client)
mock_req.assert_called_once_with(parent=counter.parent, body=counter.body)
mock_req.return_value.execute.assert_called_once_with()
if raises:
assert 1 == len(caplog.records)
assert "ERROR" == caplog.records[0].levelname
@pytest.mark.parametrize(
"status,exp_log_level", ((409, "DEBUG"), (403, "ERROR"))
)
def test_counter_init_metric_api_error(
status, exp_log_level, counter, mock_stackdriver_client, mocker, caplog
):
mock_resp = mocker.Mock()
mock_resp.status = status
error = gapi_errors.HttpError(resp=mock_resp, content=b"foo")
_mock_projects = mock_stackdriver_client.projects.return_value
mock_req = _mock_projects.metrics.return_value.create
mock_req.return_value.execute.side_effect = error
counter._init_metric(mock_stackdriver_client)
mock_req.assert_called_once_with(parent=counter.parent, body=counter.body)
mock_req.return_value.execute.assert_called_once_with()
assert 1 == len(caplog.records)
assert exp_log_level == caplog.records[0].levelname
|
gaybro8777/klio | audio/src/klio_audio/transforms/io.py | <gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import io
import os
from apache_beam.io.gcp import gcsio
from klio.transforms import decorators as tfm_decorators
from klio_audio import decorators
from klio_audio.transforms import _base
# TODO: handle multiple data inputs
class GcsLoadBinary(_base.KlioAudioBaseDoFn):
"""Download binary file from GCS into memory.
This transform uses the ``job_config.data.inputs`` configuration in
a job's ``klio-job.yaml`` file.
This transform uses Apache's native :class:`GCS client
<apache_beam.io.gcp.gcsio.GcsIO>` and expects a
:class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>`, and returns with a payload of the
downloaded binary as a file-like bytes object.
Example:
.. code-block:: python
# run.py
from klio_audio.transforms import audio
from klio_audio.transforms import io
def run(in_pcol, job_config):
return (
in_pcol
| io.GcsLoadBinary()
| audio.LoadAudio()
# other transforms
)
.. code-block:: yaml
# klio-job.yaml
# <-- snip -->
job_config:
events:
# <-- snip -->
data:
inputs:
- type: file
location: gs://my-bucket/input
file_suffix: .ogg
# <-- snip -->
"""
def setup(self):
self.client = gcsio.GcsIO()
@tfm_decorators._handle_klio
@decorators.handle_binary(skip_load=True)
def process(self, item):
element = item.element.decode("utf-8")
input_data_config = self._klio.config.job_config.data.inputs
# raise a runtime error so it actually crashes klio/beam rather than
# just continue processing elements
if len(input_data_config) == 0:
raise RuntimeError(
"The `klio_audio.transforms.io.GcsLoadBinary` transform "
"requires a data input to be configured in "
"`klio-job.yaml::job_config.data.inputs`."
)
# raise a runtime error so it actually crashes klio/beam rather than
# just continue processing elements
if len(input_data_config) > 1:
raise RuntimeError(
"The `klio_audio.transforms.io.GcsLoadBinary` transform "
"does not support multiple configured inputs in "
"`klio-job.yaml::job_config.data.inputs`."
)
input_data = input_data_config[0]
file_suffix = input_data.file_suffix
if not file_suffix.startswith("."):
file_suffix = "." + file_suffix
filename = element + file_suffix
input_path = os.path.join(input_data.location, filename)
self._klio.logger.debug(
"Downloading {} from {}".format(filename, input_data.location)
)
with self.client.open(input_path, "rb") as source:
out = io.BytesIO(source.read())
self._klio.logger.debug("Downloaded {}".format(filename))
yield out
# TODO: handle multiple data outputs
class GcsUploadPlot(_base.KlioAudioBaseDoFn):
"""Upload a matplotlib :class:`figure <matplotlib.figure.Figure>` to GCS.
This transform uses the ``job_config.data.outputs`` configuration in
a job's ``klio-job.yaml`` file.
This transform wraps :class:`savefig <matplotlib.figure.Figure>` and
expects a :class:`PCollection <apache_beam.pvalue.PCollection>` of
:ref:`KlioMessages <klio-message>` where the payload is a
:class:`matplotlib.figure.Figure` and returns with a payload of the
uploaded file location as ``bytes``.
Example:
.. code-block:: python
# run.py
from klio_audio.transforms import audio
from klio_audio.transforms import io
def run(in_pcol, job_config):
return (
in_pcol
| io.GcsLoadBinary()
| audio.LoadAudio()
| audio.GetSpec()
| audio.SpecToPlot()
| io.GcsUploadPlot()
)
.. code-block:: yaml
# klio-job.yaml
# <-- snip -->
job_config:
# <-- snip -->
data:
inputs:
# <-- snip -->
outputs:
- type: file
location: gs://my-bucket/output
file_suffix: .png
Args:
prefix (str): filename prefix. Default: ``""``
suffix (str): filename suffix. Default: ``""``
file_format (str): plot format (e.g. png). Defaults to the file
suffix as configured in
``klio-job.yaml::job_config.data.outputs[].file_suffix``.
plt_kwargs (dict): keyword arguments to pass to
:class:`savefig <matplotlib.figure.Figure>`.
"""
def __init__(self, prefix="", suffix="", file_format=None, **plt_kwargs):
self.prefix = prefix
self.suffix = suffix
self.file_format = file_format
self.plt_kwargs = plt_kwargs
def setup(self):
self.client = gcsio.GcsIO()
@tfm_decorators._handle_klio
@decorators.handle_binary(skip_dump=True)
def process(self, item):
element = item.element.decode("utf-8")
output_data_config = self._klio.config.job_config.data.outputs
# raise a runtime error so it actually crashes klio/beam rather than
# just continue processing elements
if len(output_data_config) == 0:
raise RuntimeError(
"The `klio_audio.transforms.io.GcsUploadPlot` transform "
"requires a data output to be configured in "
"`klio-job.yaml::job_config.data.outputs`."
)
# raise a runtime error so it actually crashes klio/beam rather than
# just continue processing elements
if len(output_data_config) > 1:
raise RuntimeError(
"The `klio_audio.transforms.io.GcsUploadPlot` transform "
"does not support multiple configured outputs in "
"`klio-job.yaml::job_config.data.outputs`."
)
output_data = output_data_config[0]
file_suffix = output_data.file_suffix
if not file_suffix.startswith("."):
file_suffix = "." + file_suffix
filename = self.prefix + element + self.suffix + file_suffix
output_path = os.path.join(output_data.location, filename)
source = io.BytesIO()
fig = item.payload
fig_format = self.file_format or file_suffix.lstrip(".")
self._klio.logger.debug(
"Saving plot as {} for {}".format(fig_format, element)
)
fig.savefig(source, format=fig_format, **self.plt_kwargs)
with self.client.open(output_path, "wb") as out:
out.write(source.getvalue())
self._klio.logger.debug("Saved plot to {}".format(output_path))
yield output_path
|
gaybro8777/klio | exec/src/klio_exec/commands/audit_steps/numpy_broken_blas.py | <reponame>gaybro8777/klio
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import platform
from packaging import version
from klio_exec.commands.audit_steps import base
class NumPyBrokenBLASUsage(base.BaseKlioAuditStep):
"""Detect numpy version for potential threading issues."""
AUDIT_STEP_NAME = "numpy_broken_blas"
MINIMUM_NUMPY_VERSION = version.parse("1.16.3")
@staticmethod
def get_description():
# tmp turn off black formatting to skip long URL
# fmt: off
return (
"The 1.16.3 version of numpy links against a newer version of "
"OpenBLAS that fixes some important threading issues - notably, "
"the `dot` function that calls into OpenBLAS' _dgemv function, "
"which on older versions, is non-reentrant and can cause both "
"incorrect results and deadlocks.\n\n"
"See:\n"
"\t- https://github.com/numpy/numpy/blob/2f70544179e24b0ebc0263111f36e6450bbccf94/doc/source/release/1.16.3-notes.rst#numpy-1163-release-notes\n" # noqa: E501
"\t- https://github.com/xianyi/OpenBLAS/issues/1844\n"
"\t- https://github.com/numpy/numpy/issues/12394\n"
"\t- https://github.com/xianyi/OpenBLAS/pull/1865\n"
)
# fmt: on
@property
def _is_job_single_threaded_per_container(self):
exps = self.klio_config.pipeline_options.experiments
return "worker_threads=1" in exps
@staticmethod
def _get_current_numpy_version():
try:
import numpy
return version.parse(numpy.version.short_version)
except ImportError:
return None
def after_tests(self):
if self._is_job_single_threaded_per_container:
return
if platform.system().lower() != "linux":
return
numpy_version = self._get_current_numpy_version()
if numpy_version is None:
return
if numpy_version < NumPyBrokenBLASUsage.MINIMUM_NUMPY_VERSION:
msg = (
"Multiple threads are used, but a NumPy version older than %s "
"was detected. Older versions of NumPy are known to be "
"thread-unsafe due to a broken OpenBLAS dependency on Linux."
) % NumPyBrokenBLASUsage.MINIMUM_NUMPY_VERSION
self.emit_error(msg)
_init = NumPyBrokenBLASUsage
|
gaybro8777/klio | cli/tests/commands/job/utils/fixtures/expected/transforms.py | <filename>cli/tests/commands/job/utils/fixtures/expected/transforms.py
"""
Notice: the code below is just an example of what can be done.
Feel free to import what's needed, including third-party libraries or
other self-written modules.
"""
import apache_beam as beam
from klio.transforms import decorators
class HelloKlio(beam.DoFn):
"""A simple DoFn."""
@decorators.handle_klio
def process(self, data):
"""Main entrypoint to a transform.
Any errors raised here (explicitly or not), Klio will catch and
log + drop the Klio message.
For information on the Klio message, see
https://docs.klio.io/en/latest/userguide/pipeline/message.html
For information on yielding other information other than ``data``, see
https://docs.klio.io/en/latest/userguide/pipeline/state.html
Args:
data (KlioMessage.data): The data of the Klio message, which
contains two attributes: ``data.element`` and
``data.payload``.
Yields:
KlioMessage.data: the same Klio message data object received.
"""
element = data.element.decode("utf-8")
self._klio.logger.info(
"Received '%s' from Pub/Sub topic '%s'"
% (element, self._klio.config.job_config.events.inputs[0].topic)
)
yield data
|
gaybro8777/klio | core/src/klio_core/config/_io.py | <reponame>gaybro8777/klio<gh_stars>1-10
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import enum
import importlib
import logging
import attr
logger = logging.getLogger("klio")
class KlioIODirection(enum.Enum):
INPUT = 1
OUTPUT = 2
class KlioIOType(enum.Enum):
EVENT = 1
DATA = 2
class KlioJobMode(enum.Enum):
STREAMING = 1
BATCH = 2
def supports(*type_directions):
"""Decorator for subclasses of KlioIOConfig to indicate what IO types and
directions the Config supports. In many cases the same config object can
be used for configuring either input or output, and likewise event I/O and
data I/O. Adding respective `KlioIOType` and `KlioIODirection` values via
this decorator will inform Klio about which class to use when parsing
config.
"""
def wrapper(cls):
io_types = [t for t in type_directions if isinstance(t, KlioIOType)]
io_directions = [
t for t in type_directions if isinstance(t, KlioIODirection)
]
# IMPORTANT - new lists must be created here, otherwise all subclasses
# end up sharing the same lists
cls.SUPPORTED_TYPES = [t for t in cls.SUPPORTED_TYPES]
cls.SUPPORTED_DIRECTIONS = [d for d in cls.SUPPORTED_DIRECTIONS]
for t in io_types:
if t not in cls.SUPPORTED_TYPES:
cls.SUPPORTED_TYPES.append(t)
for d in io_directions:
if d not in cls.SUPPORTED_DIRECTIONS:
cls.SUPPORTED_DIRECTIONS.append(d)
return cls
return wrapper
@attr.attrs
class KlioIOConfig(object):
io_type = attr.attrib(type=KlioIOType)
io_direction = attr.attrib(type=KlioIODirection)
# these must be filled in by subclasses so Klio knows what supports what
SUPPORTED_TYPES = []
SUPPORTED_DIRECTIONS = []
# NOTICE! any new `attr.attrib`s added to this class should be added in
# `ATTRIBS_TO_SKIP` below in order to be filtered out when calling
# `as_dict`, leaving only `attr.attrib`s related to the specific
# config instance
ATTRIBS_TO_SKIP = ["io_type", "io_direction"]
@classmethod
def supports_type(cls, io_type):
return io_type in cls.SUPPORTED_TYPES
@classmethod
def supports_direction(cls, io_direction):
return io_direction in cls.SUPPORTED_DIRECTIONS
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
copy = config_dict.copy()
if "type" in copy:
del copy["type"]
return cls(*args, **copy, **kwargs)
def _as_dict(self):
"""Return a dictionary-representation of the config object.
Useful for instantiating objects where the config keys map 1:1
to the object init arguments/keyword arguments, i.e.
KlioReadFromBigQuery(**config.job_config.events.inputs[0].as_dict())
"""
# filter parameter is used as "to include"/"filter-in",
# not "to exclude"/"filter-out"
return attr.asdict(
self, filter=lambda x, _: x.name not in self.ATTRIBS_TO_SKIP
)
def as_dict(self):
config_dict = self._as_dict()
# since dicts preserve order by default in py3, let's force
# type to be first - particularly helpful/useful for dumping
# config via `klio job config show`
copy = {"type": self.name}
copy.update(config_dict)
return copy
def to_io_kwargs(self):
return self._as_dict()
@attr.attrs(frozen=True)
class IOFlags(object):
io_direction = attr.attrib(type=KlioIODirection)
io_type = attr.attrib(type=KlioIOType)
job_mode = attr.attrib(type=KlioJobMode)
@attr.attrs(frozen=True)
class KlioEventInput(KlioIOConfig):
skip_klio_read = attr.attrib(type=bool)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
# work-around with attrs - since attrs will not allow attributes
# to be defined without a default after those that have defaults,
# we're inserting the default value here if the user doesn't have
# it already in their config
if "skip_klio_read" not in config_dict:
copy = config_dict.copy()
copy["skip_klio_read"] = False
return super().from_dict(copy, *args, **kwargs)
return super().from_dict(config_dict, *args, **kwargs)
def to_io_kwargs(self):
kwargs = super().to_io_kwargs()
kwargs.pop("skip_klio_read", None)
return kwargs
@attr.attrs(frozen=True)
class KlioEventOutput(KlioIOConfig):
skip_klio_write = attr.attrib(type=bool)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
# work-around with attrs - since attrs will not allow attributes
# to be defined without a default after those that have defaults,
# we're inserting the default value here if the user doesn't have
# it already in their config
if "skip_klio_write" not in config_dict:
copy = config_dict.copy()
copy["skip_klio_write"] = False
return super().from_dict(copy, *args, **kwargs)
return super().from_dict(config_dict, *args, **kwargs)
def to_io_kwargs(self):
kwargs = super().to_io_kwargs()
kwargs.pop("skip_klio_write", None)
return kwargs
@attr.attrs(frozen=True)
class KlioDataIOConfig(KlioIOConfig):
skip_klio_existence_check = attr.attrib(type=bool)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
# work-around with attrs - since attrs will not allow attributes
# to be defined without a default after those that have defaults,
# we're inserting the default value here if the user doesn't have
# it already in their config
if "skip_klio_existence_check" not in config_dict:
copy = config_dict.copy()
copy["skip_klio_existence_check"] = False
return super().from_dict(copy, *args, **kwargs)
return super().from_dict(config_dict, *args, **kwargs)
def to_io_kwargs(self):
kwargs = super().to_io_kwargs()
kwargs.pop("skip_klio_existence_check", None)
return kwargs
@attr.attrs(frozen=True)
class KlioPubSubConfig(object):
name = "pubsub"
topic = attr.attrib(type=str)
@staticmethod
def _from_dict(config_dict):
copy = config_dict.copy()
copy.pop("data_location", None)
return copy
@supports(KlioIODirection.INPUT, KlioIOType.EVENT)
@attr.attrs(frozen=True)
class KlioPubSubEventInput(KlioEventInput, KlioPubSubConfig):
subscription = attr.attrib(type=str)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
config_dict = super()._from_dict(config_dict)
# work-around with attrs - since attrs will not allow attributes
# to be defined without a default after those that have defaults,
# we're inserting the default value here if the user doesn't have
# it already in their config
if "topic" not in config_dict:
config_dict["topic"] = None
if "subscription" not in config_dict:
config_dict["subscription"] = None
return super().from_dict(config_dict, *args, **kwargs)
@subscription.validator
def __assert_topic_subscription(self, attribute, value):
# either topic or subscription is required
if not self.topic and not self.subscription:
raise ValueError("One of 'topic', 'subscription' required")
def to_io_kwargs(self):
kwargs = super().to_io_kwargs()
# pubsub only allows either topic or subscription, not both. We'll
# prioritize subscription
if kwargs["topic"] is not None and kwargs["subscription"] is None:
kwargs.pop("subscription", None)
else:
kwargs.pop("topic", None)
return kwargs
@supports(KlioIODirection.OUTPUT, KlioIOType.EVENT)
@attr.attrs(frozen=True)
class KlioPubSubEventOutput(KlioEventOutput, KlioPubSubConfig):
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
config_dict = super()._from_dict(config_dict)
return super().from_dict(config_dict, *args, **kwargs)
class KlioFileConfig(object):
name = "file"
@attr.attrs(frozen=True)
class KlioReadFileConfig(KlioEventInput, KlioFileConfig):
file_pattern = attr.attrib(type=str)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
if "location" in config_dict:
copy = config_dict.copy()
copy["file_pattern"] = copy.pop("location", None)
return super().from_dict(copy, *args, **kwargs)
try:
return super().from_dict(config_dict, *args, **kwargs)
except TypeError as e:
if "file_pattern" in str(e):
raise KeyError(
"I/O configuration for `type: file` is missing 'location' "
"key."
)
def as_dict(self):
config_dict = super().as_dict()
copy = config_dict.copy()
copy["location"] = copy.pop("file_pattern", None)
return copy
@attr.attrs(frozen=True)
class KlioWriteFileConfig(KlioEventOutput, KlioFileConfig):
file_path_prefix = attr.attrib(type=str)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
if "location" in config_dict:
copy = config_dict.copy()
copy["file_path_prefix"] = copy.pop("location", None)
return super().from_dict(copy, *args, **kwargs)
return super().from_dict(config_dict, *args, **kwargs)
def as_dict(self):
config_dict = super().as_dict()
copy = config_dict.copy()
copy["location"] = copy.pop("file_path_prefix", None)
return copy
@supports(KlioIODirection.INPUT, KlioIOType.EVENT)
@attr.attrs(frozen=True)
class KlioReadFileEventConfig(KlioReadFileConfig):
pass
@supports(KlioIODirection.OUTPUT, KlioIOType.EVENT)
@attr.attrs(frozen=True)
class KlioWriteFileEventConfig(KlioWriteFileConfig):
pass
@attr.attrs(frozen=True)
@supports(KlioIODirection.INPUT, KlioIOType.DATA)
class KlioFileInputDataConfig(KlioDataIOConfig, KlioFileConfig):
location = attr.attrib(type=str)
ping = attr.attrib(type=bool, default=False)
file_suffix = attr.attrib(type=str, default="")
@attr.attrs(frozen=True)
@supports(KlioIODirection.OUTPUT, KlioIOType.DATA)
class KlioFileOutputDataConfig(KlioDataIOConfig, KlioFileConfig):
location = attr.attrib(type=str)
file_suffix = attr.attrib(type=str, default="")
force = attr.attrib(type=bool, default=False)
class KlioAvroConfig(object):
name = "avro"
@attr.attrs(frozen=True)
class KlioReadAvroConfig(KlioAvroConfig):
# TODO: Add validation
# Potentially could set a general file pattern default, i.e. *.avro, too
# but either file_pattern or location must be set
file_pattern = attr.attrib(type=str, default=None)
location = attr.attrib(type=str, default=None)
min_bundle_size = attr.attrib(type=int, default=0)
validate = attr.attrib(type=bool, default=True)
@supports(KlioIODirection.INPUT, KlioIOType.EVENT)
@attr.attrs(frozen=True)
class KlioReadAvroEventConfig(KlioEventInput, KlioReadAvroConfig):
pass
# TODO: integrate into @dsimon's converter logic once his PR#154 is merged
def _convert_bigquery_input_coder(coder_str):
# direct runner seems to call this multiple times, prob with pickling;
# subsequent times seem to call with a None value (not sure why...)
if coder_str is None:
return
coder_path_stems = coder_str.split(".")
coder_kls_str = coder_path_stems.pop()
module_str = ".".join(coder_path_stems)
module = importlib.import_module(module_str) # should raise if not avail
return getattr(module, coder_kls_str) # should raise if not avail
@attr.attrs(frozen=True)
class KlioBigQueryConfig(object):
name = "bq"
project = attr.attrib(type=str, default=None)
dataset = attr.attrib(type=str, default=None)
table = attr.attrib(type=str, default=None)
@attr.attrs(frozen=True)
@supports(KlioIODirection.INPUT, KlioIOType.EVENT)
class KlioBigQueryEventInput(KlioEventInput, KlioBigQueryConfig):
# Set to true by default to fail early (check table existence before
# starting pipeline) (Beam sets it to false by default but suggests it
# to be set to true in most scenarios)
validate = attr.attrib(type=bool, default=True)
# TODO: use @dsimon's converter logic once his PR#154 is merged in
coder = attr.attrib(
type=str, default=None, converter=_convert_bigquery_input_coder
)
kms_key = attr.attrib(type=str, default=None)
# Optional, but only applies to project+dataset+table;
# Klio handles filtering data after a `SELECT *` query
klio_message_columns = attr.attrib(type=list, default=None)
# Mutually exclusive with project+dataset+table & klio_message_columns
query = attr.attrib(type=str, default=None)
use_standard_sql = attr.attrib(type=bool, default=False)
flatten_results = attr.attrib(type=bool, default=True)
@klio_message_columns.validator
def __assert_project_dataset_table(self, attribute, value):
has_project_dataset_table = all(
[self.project, self.dataset, self.table]
)
if value is not None:
if not has_project_dataset_table:
raise ValueError(
"Must include `project`, `dataset` and `table` if "
"selecting `columns` for BigQuery."
)
@query.validator
def __assert_query_or_project_dataset_table(self, attribute, value):
has_project_dataset_table = any(
[self.project, self.dataset, self.table]
)
if value is not None and has_project_dataset_table:
raise ValueError(
"`query` is mutually exclusive with `project`, `dataset` "
"and `table`."
)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
if "columns" in config_dict:
copy = config_dict.copy()
copy["klio_message_columns"] = copy.pop("columns", None)
return super().from_dict(copy, *args, **kwargs)
return super().from_dict(config_dict, *args, **kwargs)
def as_dict(self):
config_dict = super().as_dict()
copy = config_dict.copy()
copy["columns"] = copy.pop("klio_message_columns", None)
return copy
@attr.attrs(frozen=True)
@supports(KlioIODirection.OUTPUT, KlioIOType.EVENT)
class KlioBigQueryEventOutput(KlioEventOutput, KlioBigQueryConfig):
# schema field is optional; assumes form of
# {"fields": [{"name": ...,"type": ..., "mode": ...}, ... ]
schema = attr.attrib(type=dict, default=None)
create_disposition = attr.attrib(type=str, default="CREATE_IF_NEEDED")
write_disposition = attr.attrib(type=str, default="WRITE_EMPTY")
@schema.validator
def check(self, attribute, value):
def valid_field(field_dict):
is_dict = isinstance(field_dict, dict)
required_keys = [
"name",
"type",
"mode",
] # are all of these required?
return is_dict and all(k in field_dict for k in required_keys)
def contains_invalid_fields(field_list):
return any(valid_field(f) is False for f in field_list)
has_fields = value.get("fields")
if not has_fields or contains_invalid_fields(value.get("fields", [])):
raise ValueError(
"Must be a dict with the key `fields` set to a list of"
"dict, each of which have the keys "
"`name`, `type`, and `mode`"
)
@attr.attrs(frozen=True)
class KlioGCSConfig(KlioIOConfig):
name = "gcs"
location = attr.attrib(type=str)
@staticmethod
def _from_dict(config_dict):
copy = config_dict.copy()
copy.pop("topic", None)
copy.pop("subscription", None)
data_location = copy.pop("data_location", None)
copy["location"] = copy.get("location", data_location)
return copy
@attr.attrs(frozen=True)
@supports(KlioIODirection.INPUT, KlioIOType.DATA)
class KlioGCSInputDataConfig(KlioDataIOConfig, KlioGCSConfig):
file_suffix = attr.attrib(type=str, default="")
ping = attr.attrib(type=bool, default=False)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
config_dict = super()._from_dict(config_dict)
return super().from_dict(config_dict, *args, **kwargs)
@attr.attrs(frozen=True)
@supports(KlioIODirection.OUTPUT, KlioIOType.DATA)
class KlioGCSOutputDataConfig(KlioDataIOConfig, KlioGCSConfig):
file_suffix = attr.attrib(type=str, default="")
force = attr.attrib(type=bool, default=False)
@classmethod
def from_dict(cls, config_dict, *args, **kwargs):
config_dict = super()._from_dict(config_dict)
return super().from_dict(config_dict, *args, **kwargs)
|
gaybro8777/klio | lib/tests/unit/transforms/test_decorators.py | <filename>lib/tests/unit/transforms/test_decorators.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_core.proto import klio_pb2
from klio.transforms import decorators
from klio.utils import _thread_limiter
@pytest.fixture
def kmsg():
msg = klio_pb2.KlioMessage()
msg.data.element = b"3l3m3nt"
return msg
def test_retry(kmsg, mocker, mock_config):
mock_function = mocker.Mock()
@decorators._handle_klio
@decorators._retry(tries=2)
def func(*args, **kwargs):
mock_function(*args, **kwargs)
raise Exception("fuu")
func(kmsg.SerializeToString())
assert 2 == mock_function.call_count
def test_retry_custom_catch(kmsg, mocker, mock_config):
# Assert retry on custom exception
class CustomCatchException(Exception):
pass
mock_function = mocker.Mock()
raise_custom = True
@decorators._handle_klio
@decorators._retry(tries=3, exception=CustomCatchException)
def func(*args, **kwargs):
mock_function(*args, **kwargs)
if raise_custom:
raise CustomCatchException("custom fuu")
raise Exception("fuu")
func(kmsg.SerializeToString())
assert 3 == mock_function.call_count
# Assert retry on multiple custom exceptions
class AnotherCustomCatchException(Exception):
pass
exc_tuple = (CustomCatchException, AnotherCustomCatchException)
@decorators._handle_klio
@decorators._retry(tries=3, exception=exc_tuple)
def func(*args, **kwargs):
mock_function(*args, **kwargs)
if raise_custom:
raise CustomCatchException("custom fuu")
raise Exception("fuu")
mock_function.reset_mock()
func(kmsg.SerializeToString())
assert 3 == mock_function.call_count
# Assert retries do not happen for an exception that isn't provided
mock_function.reset_mock()
raise_custom = False
func(kmsg.SerializeToString())
assert 1 == mock_function.call_count
def test_retry_raises_runtime_parents(kmsg, mocker, mock_config):
# Need to call @retry with parens
with pytest.raises(RuntimeError):
@decorators._handle_klio
@decorators.retry
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
@pytest.mark.parametrize(
"invalid_tries", (-2, 0.5, "1", {"a": "dict"}, ["a", "list"], lambda x: x)
)
def test_retry_raises_runtime_invalid_tries(
invalid_tries, kmsg, mocker, mock_config
):
# Assert `tries` as a valid integer
with pytest.raises(RuntimeError):
@decorators._handle_klio
@decorators._retry(tries=invalid_tries)
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
@pytest.mark.parametrize(
"invalid_delay", (-2, {"a": "dict"}, ["a", "list"], lambda x: x)
)
def test_retry_raises_runtime_invalid_delay(
invalid_delay, kmsg, mocker, mock_config
):
# Assert `delay` as a valid int/float
with pytest.raises(RuntimeError):
@decorators._handle_klio
@decorators._retry(tries=1, delay=invalid_delay)
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
@pytest.mark.parametrize(
"max_thread_count,patch_str",
(
(None, "threading.BoundedSemaphore"),
(_thread_limiter.ThreadLimit.DEFAULT, "threading.BoundedSemaphore"),
(_thread_limiter.ThreadLimit.NONE, "_DummySemaphore"),
),
)
def test_thread_limiting(
max_thread_count, patch_str, kmsg, mock_config, mocker, monkeypatch
):
mock_function = mocker.Mock()
mock_semaphore = mocker.Mock()
monkeypatch.setattr(
f"klio.utils._thread_limiter.{patch_str}", mock_semaphore
)
kwargs = {}
if max_thread_count is not None:
kwargs["max_thread_count"] = max_thread_count
@decorators._handle_klio(**kwargs)
def func(*args, **kwargs):
mock_function(*args, **kwargs)
return
func(kmsg.SerializeToString())
assert 1 == mock_function.call_count
mock_semaphore.return_value.acquire.assert_called_once_with()
mock_semaphore.return_value.release.assert_called_once_with()
def test_thread_limiting_custom_limiter(
kmsg, mock_config, mocker, monkeypatch
):
mock_function = mocker.Mock()
mock_semaphore = mocker.Mock()
limiter = _thread_limiter.ThreadLimiter(max_thread_count=1)
monkeypatch.setattr(limiter, "_semaphore", mock_semaphore)
@decorators._handle_klio(thread_limiter=limiter)
def func(*args, **kwargs):
mock_function(*args, **kwargs)
return
func(kmsg.SerializeToString())
assert 1 == mock_function.call_count
mock_semaphore.acquire.assert_called_once_with()
mock_semaphore.release.assert_called_once_with()
def test_thread_limiting_raises_mutex_args(kmsg, mocker, mock_config):
limiter = _thread_limiter.ThreadLimiter(max_thread_count=1)
with pytest.raises(RuntimeError):
@decorators._handle_klio(max_thread_count=1, thread_limiter=limiter)
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
def test_thread_limiting_raises_invalid_limiter(kmsg, mocker, mock_config):
limiter = "not an instance of ThreadLimiter"
with pytest.raises(RuntimeError):
@decorators._handle_klio(thread_limiter=limiter)
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
@pytest.mark.parametrize(
"invalid_max_thread_count", (-2, {"a": "dict"}, ["a", "list"])
)
def test_thread_limiting_raises_invalid_max(
invalid_max_thread_count, kmsg, mocker, mock_config
):
with pytest.raises(RuntimeError):
@decorators._handle_klio(max_thread_count=invalid_max_thread_count)
def func(*args, **kwargs):
pass
func(kmsg.SerializeToString())
|
gaybro8777/klio | lib/src/klio/transforms/core.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import __main__
import glob
import logging
import os
import threading
import yaml
from klio_core import config
from klio_core.proto import klio_pb2
from klio.metrics import client as metrics_client
from klio.metrics import logger as metrics_logger
from klio.metrics import stackdriver
class RunConfig(object):
_thread_local = threading.local()
@classmethod
def _load_config_from_file(cls):
# [Klio v2] this may get expensive, to always be reading config
# from a file. Can this be replaced by something in memory
# that's also globally accessible?
klio_job_file = "/usr/src/config/.effective-klio-job.yaml"
# for backwards compatibility, and user is using setup.py and we
# have to find it somewhere...
if not os.path.exists(klio_job_file):
# use iterator so we don't waste time searching everywhere upfront
files = glob.iglob("/usr/**/klio-job.yaml", recursive=True)
for f in files:
klio_job_file = f
# only grab the first one
break
with open(klio_job_file, "r") as f:
all_config_data = yaml.safe_load(f)
return config.KlioConfig(all_config_data)
# NOTE: for now this approach is not being used (and may be removed in the
# future)
@classmethod
def _get_via_main_session(cls):
if hasattr(__main__, "run_config"):
return __main__.run_config
else:
raise Exception(
"Attempt to access RunConfig before it was set. This likely"
" means something was imported before RunConfig was set."
)
@classmethod
def _get_via_thread_local(cls):
klio_config = getattr(cls._thread_local, "klio_config", None)
if not klio_config:
cls._thread_local.klio_config = cls._load_config_from_file()
return cls._thread_local.klio_config
@classmethod
def get(cls):
return cls._get_via_thread_local()
@classmethod
def set(cls, config):
__main__.run_config = config
class KlioContext(object):
"""Context related to the currently running job.
Available to transforms via one of the :ref:`KlioContext decorators
<klio-context-decorators>`.
"""
_thread_local = threading.local()
def __init__(self):
self.__transform_name = None
def _create_klio_job_obj(self):
klio_job = klio_pb2.KlioJob()
klio_job.job_name = self.config.job_name
klio_job.gcp_project = self.config.pipeline_options.project
klio_job_str = klio_job.SerializeToString()
return klio_job_str
def _get_metrics_registry(self):
clients = []
use_logger, use_stackdriver = None, None
metrics_config = self.config.job_config.metrics
# use_logger and use_stackdriver could be False (turn off),
# None (use default config), or a dict of configured values
use_logger = metrics_config.get("logger")
use_stackdriver = metrics_config.get("stackdriver_logger")
# TODO: set runner in OS environment (via klio-exec), since
# the runner defined in config could be overwritten via
# `--direct-runner`.
# i.e.: runner = os.getenv("BEAM_RUNNER", "").lower()
runner = self.config.pipeline_options.runner
if "dataflow" in runner.lower():
# Must explicitly compare to `False` since `None` could be
# the user accepting default config.
# If explicitly false, then just disable logger underneath SD
if use_stackdriver is not False:
sd_client = stackdriver.StackdriverLogMetricsClient(
self.config
)
clients.append(sd_client)
else:
# if use_stackdriver is explicitly false, then make sure
# logger client is disabled since the stackdriver client
# inherits the logger client
use_logger = False
if not len(clients): # setup default client
disabled = False
# User might disable the logger, but we still need a relay
# client if all other relay clients are disabled. This allows
# folks to silence metrics but not need to remove code that
# interacts with `_klio.metrics`.
# Must explicitly compare to `False` since `None` could be
# the user accepting default config
if use_logger is False:
disabled = True
logger_client = metrics_logger.MetricsLoggerClient(
self.config, disabled=disabled
)
clients.append(logger_client)
return metrics_client.MetricsRegistry(
clients, transform_name=self._transform_name
)
@property
def config(self):
"""A ``KlioConfig`` instance representing the job's configuration."""
return RunConfig.get()
@property
def job(self):
"""An instance of :ref:`kliojob` of the current job."""
klio_job = getattr(self._thread_local, "klio_job", None)
if not klio_job:
self._thread_local.klio_job = self._create_klio_job_obj()
return self._thread_local.klio_job
@property
def logger(self):
"""A namespaced logger.
Equivalent to ``logging.getLogger("klio")``.
"""
klio_logger = getattr(self._thread_local, "klio_logger", None)
if not klio_logger:
self._thread_local.klio_logger = logging.getLogger("klio")
return self._thread_local.klio_logger
@property
def metrics(self):
"""A metrics registry instance.
See :ref:`metrics <metrics>` for more information."""
metrics_registry = getattr(self._thread_local, "klio_metrics", None)
if not metrics_registry:
self._thread_local.klio_metrics = self._get_metrics_registry()
return self._thread_local.klio_metrics
# <-- private/internal attributes -->
@property
def _transform_name(self):
return self.__transform_name
@_transform_name.setter
def _transform_name(self, name):
self.__transform_name = name
|
gaybro8777/klio | cli/tests/commands/job/test_test.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_cli.commands.job import test as test_job
@pytest.fixture
def mock_os_environ(mocker):
patch = {"HOME": "/home", "USER": "cookiemonster"}
return mocker.patch.dict("os.environ", patch)
@pytest.fixture
def test_pipeline(mock_os_environ):
return test_job.TestPipeline("job/dir", "klio_config", "docker_config")
def test_get_environment(test_pipeline):
gcreds = "/usr/gcloud/application_default_credentials.json"
exp_envs = {
"PYTHONPATH": "/usr/src/app",
"GOOGLE_APPLICATION_CREDENTIALS": gcreds,
"USER": "cookiemonster",
"KLIO_TEST_MODE": "true",
}
assert exp_envs == test_pipeline._get_environment()
def test_get_command(test_pipeline):
assert ["test", "py", "args"] == test_pipeline._get_command(["py", "args"])
def test_requires_config_setting(test_pipeline):
assert not test_pipeline.requires_config_file
|
gaybro8777/klio | core/src/klio_core/dataflow.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module for interacting with the `Dataflow REST API
<https://cloud.google.com/dataflow/docs/reference/rest>`_."""
import functools
import logging
import random
from googleapiclient import discovery
from klio_core import utils
from klio_core import variables
class DataflowClient(object):
"""Client to interact with Dataflow REST API.
Args:
api_version (str): Version of Dataflow REST API. Defaults to
``v1b3``.
"""
def __init__(self, api_version=None):
_api_version = api_version or "v1b3"
self.client = discovery.build("dataflow", _api_version)
self.logger = logging.getLogger("klio")
def find_job_by_name(self, job_name, gcp_project, region=None):
"""Search Dataflow for a job given its name and GCP project.
Args:
job_name (str): Name of Dataflow job.
gcp_project (str): GCP project in which to search.
region (str): Region in which to search. Defaults to
searching all regions in
:attr:`klio_core.variables.DATAFLOW_REGIONS`.
Returns:
dict or None: If found, ``dict`` of job summary results. Otherwise,
``None``.
"""
if not region:
regions = variables.DATAFLOW_REGIONS
else:
regions = (region,)
base_request = self.client.projects().locations().jobs()
all_matching_jobs = []
# TODO: no batch requesting from Google's side, but should add
# threading to send multiple requests concurrently. @lynn
for region in regions:
# Note: the parameter `view="JOB_VIEW_ALL"` does not return
# the same information in this `.list()` call as it
# does in the `.get()` call in `get_job_detail` below.
request = base_request.list(
projectId=gcp_project, location=region, filter="ACTIVE"
)
try:
response = request.execute()
# general catch all since the handling would be the same no matter
# of the exception
except Exception as e:
self.logger.warning(
"Error listing active jobs in project '%s' in region '%s':"
" %s" % (gcp_project, region, e)
)
continue
job_results = response.get("jobs", [])
if job_results:
for result in job_results:
if result["name"] == job_name:
all_matching_jobs.append(result)
# Note: job names are unique within regions, but not across
# regions :grimace:
if len(all_matching_jobs) > 1:
self.logger.info(
"More than one parent job found for job name '%s' under "
"project '%s'. Selecting one at random."
)
return random.choice(all_matching_jobs)
if all_matching_jobs:
return all_matching_jobs[0]
def get_job_detail(self, job_name, gcp_project, region=None):
"""Get verbose job detail given a job name.
Args:
job_name (str): Name of Dataflow job.
gcp_project (str): GCP project in which to search.
region (str): Region in which to search. Defaults to
searching all regions in
:attr:`klio_core.variables.DATAFLOW_REGIONS`.
Returns:
dict or None: If found, ``dict`` of detailed job results.
Otherwise, ``None``.
"""
basic_job = self.find_job_by_name(job_name, gcp_project, region)
if not basic_job:
return None
job_id = basic_job["id"]
job_location = basic_job["location"]
request = (
self.client.projects()
.locations()
.jobs()
.get(
projectId=gcp_project,
location=job_location,
jobId=job_id,
view="JOB_VIEW_ALL",
)
)
try:
response = request.execute()
# general catch all since the handling would be the same no matter
# of the exception
except Exception as e:
self.logger.warning(
"Error getting job detail for '%s' in project '%s' in "
"region '%s': %s" % (job_name, gcp_project, job_location, e)
)
return
return response
def get_job_input_topic(self, job_name, gcp_project, region=None):
"""Get input topic of a particular job.
Args:
job_name (str): Name of Dataflow job.
gcp_project (str): GCP project in which to search.
region (str): Region in which to search. Defaults to
searching all regions in
:attr:`klio_core.variables.DATAFLOW_REGIONS`.
Returns:
str or None: If found, input topic of job. Otherwise, ``None``.
"""
job_info = self.get_job_detail(job_name, gcp_project, region=None)
if not job_info:
return None
read_pubsub_user_name = "ReadFromPubSub/Read"
for step in job_info.get("steps", []):
if step.get("kind") == "ParallelRead":
props = step.get("properties", {})
user_name = props.get("user_name", {})
if user_name.get("value") == read_pubsub_user_name:
# TODO: support multiple input topics; will need to
# see how Google's response json renders it. @lynn
return props.get("pubsub_topic", {}).get("value")
def get_dataflow_client(api_version=None):
"""Get an initialized :class:`DataflowClient`.
This function will first check if there is an already initialized
client in the global namespace. Otherwise, initialize one then set it
in the global namespace to avoid redundant initialization.
Args:
api_version (str): Version of Dataflow REST API. Defaults to
``v1b3``.
Returns:
DataflowClient: A client to interact with the Dataflow REST API.
"""
if not api_version:
api_version = "v1b3"
key = "dataflow_client_{}".format(api_version)
initializer = functools.partial(DataflowClient, api_version)
return utils.get_or_initialize_global(key, initializer)
|
gaybro8777/klio | cli/tests/commands/job/utils/fixtures/expected/run.py | <gh_stars>100-1000
"""
Notice: the code within `run` is just an example of what can be done.
Feel free to import what's needed, including third-party libraries or
other self-written modules.
"""
import apache_beam as beam
import transforms
def run(input_pcol, config):
"""REQUIRED: Main entrypoint in running a job's transform(s).
Any Beam transforms that need to happen after a message is consumed
from PubSub from an upstream job, and before publishing a message to
a downstream job (if needed/configured).
Args:
input_pcol: A Beam PCollection returned from
``beam.io.ReadFromPubSub``.
config (klio.KlioConfig): Configuration as defined in
``klio-job.yaml``.
Returns:
apache_beam.pvalue.PCollection: PCollection that will be passed to
the output transform for the configured event output (if any).
"""
output_pcol = input_pcol | beam.ParDo(transforms.HelloKlio())
# <-- multiple Klio-based ParDo transforms are supported here -->
return output_pcol
|
gaybro8777/klio | cli/tests/utils/test_docker_utils.py | <reponame>gaybro8777/klio
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os
import docker
import pytest
from docker import errors as docker_errors
from requests import exceptions as requests_exceptions
from klio_cli.utils import docker_utils
@pytest.fixture
def mock_docker_client(mocker):
return mocker.Mock()
@pytest.fixture
def mock_json_loads(mocker):
return mocker.patch.object(json, "loads")
@pytest.fixture
def mock_docker_api_client(mocker):
return mocker.patch.object(docker, "APIClient")
@pytest.fixture
def mock_client(mocker):
mock_client = mocker.Mock()
return mock_client
@pytest.fixture
def mock_os_getcwd(monkeypatch):
test_dir = "/test/dir"
monkeypatch.setattr(os, "getcwd", lambda: test_dir)
return "/test/dir"
@pytest.fixture
def mock_os_environ(mocker):
return mocker.patch.dict("os.environ", {"HOME": "/home"})
def test_check_docker_connection(mock_docker_client):
docker_utils.check_docker_connection(mock_docker_client)
mock_docker_client.ping.assert_called_once_with()
@pytest.mark.parametrize(
"error",
[docker_errors.APIError("msg"), requests_exceptions.ConnectionError()],
)
def test_check_docker_connection_with_errors(
mock_docker_client, error, caplog
):
mock_docker_client.ping.side_effect = error
with pytest.raises(SystemExit):
docker_utils.check_docker_connection(mock_docker_client)
assert 2 == len(caplog.records)
@pytest.mark.parametrize("path_exists", [True, False])
def test_check_dockerfile_present(monkeypatch, path_exists, caplog):
monkeypatch.setattr(os.path, "exists", lambda x: path_exists)
job_dir = "my/job/dir"
if not path_exists:
with pytest.raises(SystemExit):
docker_utils.check_dockerfile_present(job_dir)
assert 2 == len(caplog.records)
else:
docker_utils.check_dockerfile_present(job_dir)
def test_docker_image_exists(mocker, mock_client):
mock_images = mocker.Mock()
mock_images.get = mocker.Mock()
images_mock = mocker.PropertyMock(return_value=mock_images)
mock_client.images = images_mock
image_tag = "gcr.io/sigint/test-image-name"
exists = docker_utils.docker_image_exists(image_tag, mock_client)
mock_client.images.get.assert_called_once_with(image_tag)
assert exists
def test_docker_image_exists_with_image_not_found_error(mocker, mock_client):
mock_images = mocker.Mock()
mock_images.get = mocker.Mock()
images_mock = mocker.PropertyMock(return_value=mock_images)
mock_client.images = images_mock
image_tag = "gcr.io/sigint/test-image-name"
mock_client.images.get.side_effect = docker_errors.ImageNotFound(
"pew", "pew"
)
exists = docker_utils.docker_image_exists(image_tag, mock_client)
assert not exists
def test_docker_image_exists_with_api_error(mocker, mock_client, caplog):
mock_images = mocker.Mock()
mock_images.get = mocker.Mock()
images_mock = mocker.PropertyMock(return_value=mock_images)
mock_client.images = images_mock
image_tag = "gcr.io/sigint/test-image-name"
mock_client.images.get.side_effect = docker_errors.APIError("pew")
with pytest.raises(SystemExit):
docker_utils.docker_image_exists(image_tag, mock_client)
assert 1 == len(caplog.records)
@pytest.mark.parametrize(
"config_file,exp_config_file",
((None, "klio-job.yaml"), ("klio-job2.yaml", "klio-job2.yaml"),),
)
def test_build_docker_image(
config_file,
exp_config_file,
mocker,
mock_json_loads,
mock_docker_api_client,
caplog,
):
mock_api_client = mocker.Mock()
mock_docker_api_client.return_value = mock_api_client
mock_api_client.build.return_value = (
item for item in (b"BYTELOGS", "LOGS", "", '{"stream":"\\n"}')
)
mock_json_loads.return_value = {"stream": "SUCCESS"}
job_dir = "/test/dir/jobs/test_run_job"
image_name = "gcr.io/sigint/test-image-name"
image_tag = "foobar"
image_name_and_tag = "{}:{}".format(image_name, image_tag)
build_flag = {
"path": job_dir,
"tag": image_name_and_tag,
"rm": True,
"buildargs": {"tag": image_tag, "KLIO_CONFIG": exp_config_file},
}
docker_utils.build_docker_image(
job_dir, image_name, image_tag, config_file
)
docker.APIClient.assert_called_once_with(
base_url="unix://var/run/docker.sock"
)
mock_api_client.build.assert_called_once_with(**build_flag)
assert 2 == len(caplog.records)
def test_build_docker_image_with_errors(
mocker, mock_json_loads, mock_docker_api_client, caplog
):
mock_api_client = mocker.Mock()
mock_docker_api_client.return_value = mock_api_client
mock_api_client.build.return_value = "LOGS"
mock_json_loads.return_value = {
"error": "FAILURE",
"errorDetail": {"message": "FAILURE"},
}
job_dir = "/test/dir/jobs/test_run_job"
image_name = "gcr.io/sigint/test-image-name"
image_tag = "foobar"
image_name_and_tag = "{}:{}".format(image_name, image_tag)
build_flag = {
"path": job_dir,
"tag": image_name_and_tag,
"rm": True,
"buildargs": {"tag": image_tag, "KLIO_CONFIG": "klio-job.yaml"},
}
with pytest.raises(SystemExit):
docker_utils.build_docker_image(job_dir, image_name, image_tag)
docker.APIClient.assert_called_once_with(
base_url="unix://var/run/docker.sock"
)
mock_api_client.build.assert_called_once_with(**build_flag)
assert 3 == len(caplog.records)
def test_push_image_to_gcr(mocker, capsys):
image_name = "my.img.repo"
tag = "my-tag"
mock_client = mocker.Mock()
mock_push = mocker.Mock()
mock_client.images.push = mock_push
mock_push.return_value = [
b'{"status": "foo", "progress": {}}',
b'{"id": "layerid1", "status": "bar", "progress": "some progress"}',
(
b'{"id": "layerid1", "status": "baz", "progress": "some more '
b'progress"}\r\n{"id": "layerid2", "status": "foo", "progress": '
b'"some other progress"}'
),
]
exp_stdout = (
"foo{}\nlayerid1: barsome progress\n\x1b[1Flayerid1: "
"bazsome more progress\u001b[0K\nlayerid2: foosome other progress\n"
)
docker_utils.push_image_to_gcr(image_name, tag, mock_client)
mock_push.assert_called_once_with(
repository=image_name, tag=tag, stream=True
)
captured = capsys.readouterr()
assert exp_stdout == captured.out
@pytest.mark.parametrize(
"docker_image_exists, docker_image_build_raises, check_docker, "
"check_dockerfile, force_build",
[
(True, False, False, False, True),
(True, False, False, False, False),
(False, False, False, False, False),
(False, True, False, False, True),
(False, True, False, False, False),
(False, False, True, False, False),
(False, False, False, True, False),
],
)
def test_get_docker_image_client(
mocker,
docker_image_exists,
docker_image_build_raises,
check_docker,
check_dockerfile,
force_build,
caplog,
):
mock_docker_from_env = mocker.patch.object(docker, "from_env")
mock_check_docker_connection = mocker.patch.object(
docker_utils, "check_docker_connection"
)
mock_check_dockerfile_present = mocker.patch.object(
docker_utils, "check_dockerfile_present"
)
mock_docker_image_exists = mocker.patch.object(
docker_utils, "docker_image_exists"
)
mock_build_docker_image = mocker.patch.object(
docker_utils, "build_docker_image"
)
job_dir = "/test/dir/jobs/test_run_job"
image_name = "gcr.io/sigint/test-image-name"
image_tag = "foobar"
image_name_and_tag = "{}:{}".format(image_name, image_tag)
client = 'A "Docker Client" object.'
mock_docker_from_env.return_value = client
mock_docker_image_exists.return_value = docker_image_exists
if docker_image_build_raises or check_docker or check_dockerfile:
mock_check_docker_connection.side_effect = (
SystemExit(1) if check_docker else None
)
mock_check_dockerfile_present.side_effect = (
SystemExit(1) if check_dockerfile else None
)
mock_build_docker_image.side_effect = (
SystemExit(1) if docker_image_build_raises else None
)
with pytest.raises(SystemExit) as _exec:
docker_utils.get_docker_image_client(
job_dir, image_tag, image_name, force_build
)
assert 1 == _exec.value.code
else:
actual_image, actual_client = docker_utils.get_docker_image_client(
job_dir, image_tag, image_name, force_build
)
mock_check_docker_connection.assert_called_once_with(client)
mock_check_dockerfile_present.assert_called_once_with(job_dir)
if not docker_image_exists or force_build:
assert mock_build_docker_image.called
else:
assert 1 == len(caplog.records)
assert image_name_and_tag == actual_image
assert client == actual_client
|
gaybro8777/klio | exec/src/klio_exec/commands/utils/cpu_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import time
import line_profiler as lp
import psutil
from klio_exec.commands.utils import wrappers
class KLineProfiler(wrappers.KLineProfilerMixin, lp.LineProfiler):
pass
# adapted from memory_profiler's approach:
# github.com/pythonprofilers/memory_profiler/blob/master/memory_profiler.py
def get_cpu_usage(proc, interval=None, stream=None):
"""Measure the cpu % usage at an interval."""
if not interval:
interval = 0.1
if not stream:
stream = sys.stdout
line_count = 0
while True:
cpu_usage = psutil.cpu_percent()
timestamp = time.time()
# Same format as memory_profiler
stream.write("CPU {:.1f} {:.4f}\n".format(cpu_usage, timestamp))
time.sleep(interval)
line_count += 1
# flush every 50 lines. Make 'tail -f' usable on profile file
if line_count > 50:
line_count = 0
stream.flush()
if proc.poll() is not None:
break
|
gaybro8777/klio | exec/src/klio_exec/options.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import click
#####
# options for `klioexec run`
#####
def config_file(func):
return click.option(
"-c",
"--config-file",
type=click.Path(exists=True, resolve_path=True, dir_okay=False),
help=(
"Path to config filename. Defaults to `klio-job.yaml` in the "
"current working directory. "
),
)(func)
def blocking(func):
return click.option(
"--blocking/--no-blocking",
default=None,
is_flag=True,
help="Wait for Dataflow job to finish before returning",
)(func)
#####
# options for `klioexec profile`
#####
def input_file(func):
# mutually exclusive with entity ID click args
return click.option(
"-i",
"--input-file",
type=click.Path(exists=True, dir_okay=False, readable=True),
help=(
"File of entity IDs (separated by a new line character) with "
"which to profile a Klio job."
),
required=False,
)(func)
def output_file(func):
return click.option(
"-o",
"--output-file",
type=click.Path(exists=False, dir_okay=False, writable=True),
default=None,
show_default="stdout",
help="Output file for results.",
)(func)
|
gaybro8777/klio | cli/tests/commands/image/test_build.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_core import config
from klio_cli.commands.image import build as build_image
@pytest.mark.parametrize(
"conf_file,exp_image_tag",
(
(None, "v1"),
("klio-job2.yaml", "v1-klio-job2.yaml"),
("bar/klio-job2.yaml", "v1-klio-job2.yaml"),
),
)
def test_build(conf_file, exp_image_tag, mocker, monkeypatch):
mock_docker = mocker.Mock()
mock_client = mocker.Mock()
mock_docker.from_env.return_value = mock_client
monkeypatch.setattr(build_image, "docker", mock_docker)
mock_docker_utils = mocker.Mock()
monkeypatch.setattr(build_image, "docker_utils", mock_docker_utils)
mock_config = {
"job_name": "test-job",
"version": 1,
"pipeline_options": {
"worker_harness_container_image": "gcr.register.io/squad/feature"
},
"job_config": {
"inputs": [
{
"topic": "foo-topic",
"subscription": "foo-sub",
"data_location": "foo-input-location",
}
],
"outputs": [
{
"topic": "foo-topic-output",
"data_location": "foo-output-location",
}
],
},
}
conf_obj = config.KlioConfig(mock_config)
job_dir = "jerbs"
image_tag = "v1"
build_image.build(job_dir, conf_obj, conf_file, image_tag)
mock_docker.from_env.assert_called_once_with()
mock_docker_utils.check_docker_connection.assert_called_once_with(
mock_client
)
mock_docker_utils.check_dockerfile_present.assert_called_once_with(job_dir)
mock_docker_utils.build_docker_image.assert_called_once_with(
job_dir,
conf_obj.pipeline_options.worker_harness_container_image,
exp_image_tag,
conf_file,
)
|
gaybro8777/klio | cli/tests/utils/test_stackdriver_utils.py | <filename>cli/tests/utils/test_stackdriver_utils.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import pytest
from klio_cli.utils import stackdriver_utils as sd_utils
stackdriver_group = collections.namedtuple(
"StackdriverGroup", ["name", "display_name"]
)
@pytest.fixture
def sd_client(mocker, monkeypatch):
client = mocker.Mock()
monkeypatch.setattr(sd_utils.monitoring, "GroupServiceClient", client)
return client.return_value
@pytest.fixture
def groups():
base_display_name = "test-job-name-{}-test-region-klio-dashboard"
base_name = "projects/test-gcp-project/groups/123456789{}"
ret_groups = []
for i in range(3):
ret_groups.append(
stackdriver_group(
name=base_name.format(i),
display_name=base_display_name.format(i),
)
)
return ret_groups
def test_generate_group_meta():
assert sd_utils.generate_group_meta("a", "b", "c") == (
"projects/a",
"b-c-klio-dashboard",
)
@pytest.mark.parametrize(
"job_name,exp_ret", [("test-job-name-1", True), ("test-job-name-4", False)]
)
def test_get_stackdriver_group_url(sd_client, groups, job_name, exp_ret):
sd_client.list_groups.return_value = groups
ret_url = sd_utils.get_stackdriver_group_url(
"test-gcp-project", job_name, "test-region"
)
exp_url = None
if exp_ret:
exp_url = (
"https://app.google.stackdriver.com/groups/1234567891/"
"test-job-name-1-test-region-klio-dashboard?"
"project=test-gcp-project"
)
assert exp_url == ret_url
sd_client.list_groups.assert_called_once_with(
request={"name": "projects/test-gcp-project"}
)
def test_get_stackdriver_group_url_raises(sd_client):
sd_client.list_groups.side_effect = Exception("fuuuuu")
with pytest.raises(Exception):
sd_utils.get_stackdriver_group_url(
"test-gcp-project", "test-job-name-1", "test-region"
)
def test_create_stackdriver_group(sd_client, groups, caplog):
sd_client.create_group.return_value = groups[0]
ret_url = sd_utils.create_stackdriver_group(
"test-gcp-project", "test-job-name-0", "test-region"
)
exp_url = (
"https://app.google.stackdriver.com/groups/1234567890/"
"test-job-name-0-test-region-klio-dashboard?"
"project=test-gcp-project"
)
assert exp_url == ret_url
group_arg = {
"display_name": "test-job-name-0-test-region-klio-dashboard",
"filter": "resource.metadata.name=starts_with(test-job-name-0)",
}
sd_client.create_group.assert_called_once_with(
request={"name": "projects/test-gcp-project", "group": group_arg}
)
assert 1 == len(caplog.records)
def test_create_stackdriver_group_errors(sd_client, caplog):
sd_client.create_group.side_effect = Exception("fuuuu")
ret_url = sd_utils.create_stackdriver_group(
"test-gcp-project", "test-job-name-0", "test-region"
)
assert not ret_url
assert 1 == len(caplog.records)
@pytest.mark.parametrize(
"job_name,side_effect,exp_log_level",
(
("test-job-name-1", None, "INFO"),
("test-job-name-1", Exception("meow"), "ERROR"),
("kittehs", None, "WARNING"),
),
)
def test_delete_stackdriver_group(
job_name, side_effect, exp_log_level, groups, sd_client, caplog
):
sd_client.list_groups.return_value = groups
sd_client.list_groups.side_effect = side_effect
sd_utils.delete_stackdriver_group(
"test-gcp-project", job_name, "test-region"
)
assert 1 == len(caplog.records)
assert exp_log_level == caplog.records[0].levelname
|
gaybro8777/klio | lib/src/klio/metrics/logger.py | <reponame>gaybro8777/klio<filename>lib/src/klio/metrics/logger.py<gh_stars>100-1000
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Klio ships with a default :class:`klio.metrics.base.AbstractRelayClient`
implementation, which outputs metrics via the standard library ``logging``
module through the :class:`MetricsLoggerClient` below.
This implementation is used by default if no other metrics consumers are
configured. It must be explicitly turned off.
The default configuration in ``klio-info.yaml`` can be overwritten:
.. code-block:: yaml
job_config:
metrics:
logger:
# Logged metrics are emitted at the `debug` level by default.
level: info
# Default timer unit is ns/nanoseconds; available
# options include `s` or `seconds`, `ms` or `milliseconds`,
# `us` or `microseconds`, and `ns` or `nanoseconds`.
timer_unit: s
To turn off logging-based metrics:
.. code-block:: yaml
job_config
metrics:
logger: false
"""
import logging
import threading
from klio.metrics import base
TIMER_UNIT_MAP = {
"nanoseconds": "ns",
"microseconds": "us",
"milliseconds": "ms",
"seconds": "s",
"ns": "ns",
"us": "us",
"ms": "ms",
"s": "s",
}
"""Map of supported measurement units to shorthand for :class:`LoggerTimer`.
"""
class MetricsLoggerClient(base.AbstractRelayClient):
"""Logging client for transform metrics.
Intended to be instantiated by :class:`klio.metrics.client.MetricsRegistry`
and not by itself.
Args:
klio_config (klio_core.config.KlioConfig): the job's configuration.
disabled (bool): whether or not to disable the Python ``logger``
Default: ``False``.
"""
RELAY_CLIENT_NAME = "logger"
DEFAULT_LEVEL = logging.DEBUG
DEFAULT_TIME_UNIT = "ns"
_thread_local = threading.local()
def __init__(self, klio_config, disabled=False):
super(MetricsLoggerClient, self).__init__(klio_config)
self.logger_config = self.klio_config.job_config.metrics.get(
"logger", {}
)
self.disabled = disabled
self.log_level = self._set_log_level()
self.timer_unit = self._set_timer_unit()
def _set_log_level(self):
log_level = MetricsLoggerClient.DEFAULT_LEVEL
if isinstance(self.logger_config, dict):
log_level_str = self.logger_config.get("level")
if log_level_str:
log_level = getattr(logging, log_level_str.upper(), log_level)
return log_level
def _set_timer_unit(self):
timer_unit = MetricsLoggerClient.DEFAULT_TIME_UNIT
if isinstance(self.logger_config, dict):
_timer_unit = self.logger_config.get("timer_unit")
if _timer_unit:
timer_unit = TIMER_UNIT_MAP.get(_timer_unit, timer_unit)
return timer_unit
@property
def logger(self):
"""Python logger associated with the job which this client will use
to emit metrics.
"""
klio_metrics_logger = getattr(
self._thread_local, "klio_metrics_logger", None
)
if not klio_metrics_logger:
logger = logging.getLogger("klio.metrics")
logger.disabled = self.disabled
self._thread_local.klio_metrics_logger = logger
return self._thread_local.klio_metrics_logger
def unmarshal(self, metric):
"""Return a dict-representation of a given metric.
Args:
metric (LoggerMetric): logger-specific metrics object
Returns:
dict(str, str): metric data
"""
return {
"name": metric.name,
"value": metric.value,
"transform": metric.transform,
"tags": metric.tags,
}
def emit(self, metric):
"""Log a given metric.
Args:
metric (LoggerMetric): logger-specific metrics object
"""
metric_data = self.unmarshal(metric)
self.logger.log(
self.log_level, metric.DEFAULT_LOG_FORMAT.format(**metric_data)
)
def counter(self, name, value=0, transform=None, tags=None, **kwargs):
"""Create a :class:`LoggerCounter` object.
Args:
name (str): name of counter
value (int): starting value of counter; defaults to 0
transform (str): transform the counter is associated with
tags (dict): any tags of additional contextual information
to associate with the counter
Returns:
LoggerCounter: a log-based counter
"""
return LoggerCounter(
name=name, value=value, transform=transform, tags=tags
)
def gauge(self, name, value=0, transform=None, tags=None, **kwargs):
"""Create a :class:`LoggerGauge` object.
Args:
name (str): name of gauge
value (int): starting value of gauge; defaults to 0
transform (str): transform the gauge is associated with
tags (dict): any tags of additional contextual information
to associate with the gauge
Returns:
LoggerGauge: a log-based gauge
"""
return LoggerGauge(
name=name, value=value, transform=transform, tags=tags
)
def timer(
self,
name,
value=0,
transform=None,
tags=None,
timer_unit=None,
**kwargs
):
"""Create a :class:`LoggerTimer` object.
Args:
name (str): name of timer
value (int): starting value of timer; defaults to 0
transform (str): transform the timer is associated with
tags (dict): any tags of additional contextual information
to associate with the timer
timer_unit (str): timer unit; defaults to configured value
in `klio-job.yaml`, or "ns". See module-level docs of
`klio.metrics.logger` for supported values.
Returns:
LoggerTimer: a log-based timer
"""
if timer_unit:
# Note: this should probably have better validation if it does
# not recognize the unit given. Instead of erroring out, we'll
# just use the default (@lynn)
timer_unit = TIMER_UNIT_MAP.get(timer_unit, self.timer_unit)
else:
timer_unit = self.timer_unit
return LoggerTimer(
name=name,
value=value,
transform=transform,
tags=tags,
timer_unit=timer_unit,
)
class LoggerMetric(base.BaseMetric):
"""Base metric type for loggers.
Args:
name (str): name of counter
value (int): initial value. Default: ``0``.
transform (str): Name of transform associated with metric, if any.
tags (dict): Tags to associate with metric.
"""
LOGGER_METRIC_TAGS = None
DEFAULT_LOG_FORMAT = (
"[{name}] value: {value} transform: '{transform}' tags: {tags}"
)
def __init__(self, name, value=0, transform=None, tags=None):
super(LoggerMetric, self).__init__(
name, value=value, transform=transform
)
self.tags = tags if tags else {}
self.tags.update(self.LOGGER_METRIC_TAGS)
class LoggerCounter(LoggerMetric):
"""Log-based counter metric.
Args:
name (str): name of counter
value (int): initial value. Default: ``0``.
transform (str): Name of transform associated with counter, if any.
tags (dict): Tags to associate with counter. Note:
``{"metric_type": "counter"}`` will always be an included tag.
"""
LOGGER_METRIC_TAGS = {"metric_type": "counter"}
class LoggerGauge(LoggerMetric):
"""Log-based gauge metric.
Args:
name (str): name of gauge
value (int): initial value. Default: ``0``.
transform (str): Name of transform associated with gauge, if any.
tags (dict): Tags to associate with gauge. Note:
``{"metric_type": "gauge"}`` will always be an included tag.
"""
LOGGER_METRIC_TAGS = {"metric_type": "gauge"}
class LoggerTimer(LoggerMetric):
"""Log-based timer metric.
Args:
name (str): name of timer
value (int): initial value. Default: ``0``.
transform (str): Name of transform associated with timer, if any.
tags (dict): Tags to associate with timer. Note:
``{"metric_type": "timer"}`` will always be an included tag.
timer_unit (str): Unit of measurement. Options: :attr:`TIMER_UNIT_MAP`.
Default: ``ns`` (nanoseconds).
"""
LOGGER_METRIC_TAGS = {"metric_type": "timer"}
def __init__(
self, name, value=0, transform=None, tags=None, timer_unit="ns"
):
self.LOGGER_METRIC_TAGS.update({"unit": timer_unit})
super(LoggerTimer, self).__init__(
name, value=value, transform=transform, tags=tags
)
self.timer_unit = timer_unit
|
gaybro8777/klio | integration/audio-spectrograms/integration_test.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# To be run after `klio job run --direct-runner` (not within job container)
import os
import unittest
import yaml
from google.cloud import storage
from klio_core import config
HERE = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir))
EXPECTED_LOGS = os.path.join(HERE, "expected_job_output.txt")
ACTUAL_LOGS = os.path.join(HERE, "job_output.log")
BATCH_IDS = os.path.join(HERE, "batch_track_ids.txt")
class TestExpectedOutput(unittest.TestCase):
@classmethod
def _load_klio_config(cls):
config_file_path = os.path.join(os.path.dirname(__file__), "klio-job.yaml")
with open(config_file_path) as f:
return config.KlioConfig(yaml.safe_load(f))
@classmethod
def setUpClass(self):
config = self._load_klio_config()
self.gcs_bucket, self.gcs_object_path = os.path.split(config.job_config.data.inputs[0].location)
self.gcs_bucket = self.gcs_bucket.split("/")[-1]
self.project = config.pipeline_options.project
self.client = storage.Client(project=self.project)
self.output_dir = "/".join(config.job_config.data.outputs[0].location.split("/")[3:])
with open(EXPECTED_LOGS, "r") as f:
self.expected_logs = f.readlines()
if not os.path.exists(ACTUAL_LOGS):
# tox deletes the file after the test is done so that tests
# don't pass accidentally from a previously successful run/
# cached results
raise Exception(
"The job's output does not exist. Rerun the job to produce "
"the required output."
)
with open(ACTUAL_LOGS, "r") as f:
self.actual_logs = f.readlines()
with open(BATCH_IDS, "r") as f:
self.batch_ids = f.readlines()
suffixes = ["full.png", "background.png", "foreground.png"]
self.exp_files = ["-".join([i.strip(), s]) for i in self.batch_ids for s in suffixes]
def test_expected_output(self):
# sort them since the order of some parts of the pipeline are not
# deterministic
self.assertEqual(sorted(self.expected_logs), sorted(self.actual_logs))
def _exists(self, expected_blob):
bucket = self.client.lookup_bucket(self.gcs_bucket)
if not bucket:
return False
object_path = os.path.join(self.output_dir, expected_blob)
blob = bucket.get_blob(object_path)
return blob is not None
def test_expected_gcs_files(self):
for exp_file in self.exp_files:
exists = self._exists(exp_file)
self.assertTrue(exists, msg=f"{exp_file} does not exist")
@classmethod
def tearDownClass(self):
bucket = self.client.get_bucket(self.gcs_bucket)
for exp_file in self.exp_files:
object_path = os.path.join(self.output_dir, exp_file)
bucket.delete_blob(object_path)
if __name__ == '__main__':
unittest.main()
|
gaybro8777/klio | lib/tests/unit/transforms/test_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio.transforms import _helpers
from klio.transforms import _utils
class BaseKlassMeta(object):
__metaclass__ = _helpers._KlioBaseDataExistenceCheck
class KlassMeta(BaseKlassMeta):
def process(self):
pass
def not_process(self):
pass
@pytest.mark.parametrize(
"clsdict,bases,expected",
(
(
{"process": KlassMeta.process},
(_helpers._KlioBaseDataExistenceCheck,),
True,
),
(
{"process": KlassMeta.not_process},
(_helpers._KlioBaseDataExistenceCheck,),
False,
),
(
{"process": "not a callable"},
(_helpers._KlioBaseDataExistenceCheck,),
False,
),
({"process": KlassMeta.process}, (BaseKlassMeta,), False),
({}, (BaseKlassMeta,), False),
),
)
def test_is_original_process_func(clsdict, bases, expected):
actual = _utils.is_original_process_func(
clsdict, bases, base_class="_KlioBaseDataExistenceCheck"
)
assert expected == actual
|
gaybro8777/klio | examples/catvdog/test_transform.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_core import config
from klio_core.proto import klio_pb2
import transforms
@pytest.fixture(autouse=True)
def mock_config(mocker, monkeypatch):
mock = mocker.Mock(autospec=config.KlioConfig)
monkeypatch.setattr(transforms.CatVDog, "_klio.config", mock, raising=False)
return mock
@pytest.fixture(autouse=True)
def mock_gcs_client(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(transforms.gcsio, "GcsIO", mock)
return mock
@pytest.fixture
def mock_model(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(transforms.tf.keras.models, "load_model", mock)
return mock
@pytest.fixture
def klio_msg():
msg = klio_pb2.KlioMessage()
msg.version = klio_pb2.Version.V2
msg.data.element = b"1234"
return msg.SerializeToString()
@pytest.mark.parametrize("prediction,exp_folder", ((0, "cat"), (1, "dog")))
def test_process(
prediction, exp_folder, klio_msg, mock_model, mocker, monkeypatch
):
mock_model.return_value.predict_classes.return_value = [[prediction]]
mock_download_image = mocker.Mock()
mock_download_image.return_value.name = "/tmp/tmp_abcd.jpg"
monkeypatch.setattr(
transforms.CatVDog, "download_image", mock_download_image
)
mock_load_image = mocker.Mock()
monkeypatch.setattr(transforms.CatVDog, "load_image", mock_load_image)
mock_upload_image = mocker.Mock()
monkeypatch.setattr(transforms.CatVDog, "upload_image", mock_upload_image)
dofn_inst = transforms.CatVDog()
dofn_inst.setup()
ret_data = next(dofn_inst.process(klio_msg))
filename = "1234.jpg"
mock_download_image.assert_called_once_with(filename)
mock_load_image.assert_called_once_with(
mock_download_image.return_value.name
)
mock_model.return_value.predict_classes.assert_called_once_with(
mock_load_image.return_value
)
mock_upload_image.assert_called_once_with(
mock_download_image.return_value, exp_folder, filename
)
assert klio_msg == ret_data
|
gaybro8777/klio | examples/catvdog/run.py | <gh_stars>100-1000
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import apache_beam as beam
from klio.transforms import helpers
import transforms
def run(input_pcol, config):
"""Main entrypoint in running a job's transform(s).
Run any Beam transforms that need to happen after a message is
consumed from PubSub from an upstream job (if not an apex job),
and before publishing a message to any downstream job (if
needed/configured).
Args:
input_pcol: A Beam PCollection returned from
``beam.io.ReadFromPubSub``.
config (klio.KlioConfig): Job-related configuration as
defined in ``klio-job.yaml``.
Returns:
A Beam PCollection that will be passed to ``beam.io.WriteToPubSub``.
"""
output_data = (
input_pcol
| beam.ParDo(transforms.CatVDogOutputCheck()).with_outputs()
)
output_force = output_data.found | helpers.KlioFilterForce()
to_input_check = (
(output_data.not_found, output_force.process)
| beam.Flatten()
)
to_process = to_input_check | helpers.KlioGcsCheckInputExists()
return to_process.found | beam.ParDo(transforms.CatVDog())
|
gaybro8777/klio | cli/tests/commands/job/test_stop.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from googleapiclient import errors as gerrors
from klio_cli.commands.job import stop as stop_job
@pytest.fixture
def stop_job_inst():
return stop_job.StopJob()
@pytest.fixture
def mock_discovery_client(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(stop_job.discovery, "build", lambda x, y: mock)
return mock
@pytest.fixture
def job():
return {
"id": "1234",
"name": "test-job",
"projectId": "test-project",
"location": "europe-west1",
}
@pytest.fixture
def mock_sleep(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(stop_job.time, "sleep", mock)
return mock
@pytest.mark.parametrize("api_version", (None, "v1b3", "v2"))
def test_set_dataflow_client(
mock_discovery_client, api_version, stop_job_inst
):
assert stop_job_inst._client is not None
stop_job_inst._set_dataflow_client(api_version)
assert stop_job_inst._client is not None
assert mock_discovery_client == stop_job_inst._client
# cleanup
setattr(stop_job_inst, "_client", None)
@pytest.mark.parametrize(
"jobs_response, result_is_none",
(
(
{
"jobs": [
{
"id": "1234",
"name": "test-job",
"projectId": "test-project",
"location": "europe-west1",
}
]
},
False,
),
(
{
"jobs": [
{
"id": "1234",
"name": "not-this-job",
"projectId": "test-project",
"location": "europe-west1",
}
]
},
True,
),
({"jobs": []}, True),
),
)
def test_check_job_running(
mock_discovery_client,
jobs_response,
result_is_none,
monkeypatch,
stop_job_inst,
job,
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.list
req.return_value.execute.return_value = jobs_response
ret = stop_job_inst._check_job_running(
job["name"], job["projectId"], job["location"]
)
exp_ret_val = jobs_response["jobs"][0] if not result_is_none else None
assert exp_ret_val == ret
req.assert_called_once_with(
projectId="test-project", location="europe-west1", filter="ACTIVE"
)
req.return_value.execute.assert_called_once_with()
def test_check_job_running_errors(
mock_discovery_client, monkeypatch, caplog, stop_job_inst, job
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.list
req.return_value.execute.side_effect = Exception("foo")
stop_job_inst._check_job_running(
job["name"], job["projectId"], job["location"]
)
req.assert_called_once_with(
projectId="test-project", location="europe-west1", filter="ACTIVE"
)
req.return_value.execute.assert_called_once_with()
assert 2 == len(caplog.records)
@pytest.mark.parametrize(
"state,pyver", (("drain", None), ("cancel", None), (None, 2), (None, 3))
)
def test_update_job_state(
state, pyver, mock_discovery_client, job, monkeypatch, stop_job_inst
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
exp_state = state
if not state:
if pyver == 2:
exp_state = "JOB_STATE_DRAINED"
else:
exp_state = "JOB_STATE_CANCELLED"
monkeypatch.setitem(stop_job.JOB_STATE_MAP, "default", exp_state)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
req.return_value.execute.return_value = None
stop_job_inst._update_job_state(job, state)
job["requestedState"] = exp_state
req.assert_called_once_with(
jobId="1234",
projectId="test-project",
location="europe-west1",
body=job,
)
req.return_value.execute.assert_called_once_with()
def test_update_job_state_400_error(
mock_discovery_client,
job,
mock_sleep,
mocker,
monkeypatch,
caplog,
stop_job_inst,
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
mock_resp = mocker.Mock(status=400)
req.return_value.execute.side_effect = gerrors.HttpError(mock_resp, b"foo")
with pytest.raises(SystemExit):
stop_job_inst._update_job_state(job, "drain")
assert 1 == req.return_value.execute.call_count
assert 1 == len(caplog.records)
assert not mock_sleep.call_count
def test_update_job_state_500_error(
mock_discovery_client,
job,
mock_sleep,
mocker,
monkeypatch,
caplog,
stop_job_inst,
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
mock_resp = mocker.Mock(status=500)
req.return_value.execute.side_effect = gerrors.HttpError(mock_resp, b"foo")
with pytest.raises(SystemExit):
stop_job_inst._update_job_state(job, "drain")
assert 4 == req.return_value.execute.call_count
assert 4 == len(caplog.records)
assert 3 == mock_sleep.call_count
def test_update_job_state_error(
mock_discovery_client, job, mock_sleep, monkeypatch, caplog, stop_job_inst
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.update
req.return_value.execute.side_effect = Exception("foo")
with pytest.raises(SystemExit):
stop_job_inst._update_job_state(job, "cancel")
assert 4 == req.return_value.execute.call_count
assert 4 == len(caplog.records)
assert 3 == mock_sleep.call_count
@pytest.mark.parametrize(
"exec_side_effect",
(
(
{"currentState": "JOB_STATE_CANCELLING"},
{"currentState": "JOB_STATE_CANCELLED"},
),
(Exception("foo"), {"currentState": "JOB_STATE_CANCELLED"}),
),
)
def test_watch_job_state(
mock_discovery_client,
mock_sleep,
monkeypatch,
caplog,
job,
exec_side_effect,
stop_job_inst,
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
_projects_req = mock_discovery_client.projects.return_value
req = _projects_req.locations.return_value.jobs.return_value.get
req.return_value.execute.side_effect = exec_side_effect
stop_job_inst._watch_job_state(job)
assert 2 == req.return_value.execute.call_count
mock_sleep.assert_called_once_with(5)
assert 1 == len(caplog.records)
def test_watch_job_state_raises(
mock_discovery_client, monkeypatch, caplog, job, stop_job_inst
):
monkeypatch.setattr(stop_job_inst, "_client", mock_discovery_client)
with pytest.raises(SystemExit):
stop_job_inst._watch_job_state(job, timeout=0)
assert 1 == len(caplog.records)
def test_stop(mocker, monkeypatch, mock_discovery_client, job, stop_job_inst):
mock_set_dataflow_client = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_set_dataflow_client", mock_set_dataflow_client
)
mock_check_job_running = mocker.Mock(return_value=job)
monkeypatch.setattr(
stop_job_inst, "_check_job_running", mock_check_job_running
)
mock_update_job_state = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_update_job_state", mock_update_job_state
)
mock_watch_job_state = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_watch_job_state", mock_watch_job_state
)
stop_job_inst.stop(
job["name"], job["projectId"], job["location"], "cancel"
)
mock_set_dataflow_client.assert_called_once_with(None)
mock_check_job_running.assert_called_once_with(
"test-job", "test-project", "europe-west1"
)
mock_update_job_state.assert_called_once_with(job, req_state="cancel")
mock_watch_job_state.assert_called_once_with(job)
def test_stop_no_running_job(
mocker, monkeypatch, mock_discovery_client, stop_job_inst, job
):
mock_set_dataflow_client = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_set_dataflow_client", mock_set_dataflow_client
)
mock_check_job_running = mocker.Mock(return_value=None)
monkeypatch.setattr(
stop_job_inst, "_check_job_running", mock_check_job_running
)
mock_update_job_state = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_update_job_state", mock_update_job_state
)
mock_watch_job_state = mocker.Mock()
monkeypatch.setattr(
stop_job_inst, "_watch_job_state", mock_watch_job_state
)
stop_job_inst.stop(
job["name"], job["projectId"], job["location"], "cancel"
)
mock_set_dataflow_client.assert_called_once_with(None)
mock_check_job_running.assert_called_once_with(
"test-job", "test-project", "europe-west1"
)
mock_update_job_state.assert_not_called()
mock_watch_job_state.assert_not_called()
|
gaybro8777/klio | cli/src/klio_cli/commands/message/publish.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
import logging
import emoji
from google.api_core import exceptions as gapi_exceptions
from google.cloud import pubsub
from klio_core.proto.v1beta1 import klio_pb2
def _create_publisher(topic):
client = pubsub.PublisherClient()
try:
client.get_topic(request={"topic": topic})
except gapi_exceptions.NotFound:
msg = (
":persevere: Topic '{}' not found. Is there a running job "
"subscribed to this topic? or is there a typo in the configured "
"topic?".format(topic)
)
logging.error(emoji.emojize(msg, use_aliases=True))
raise SystemExit(1)
except Exception:
raise
return functools.partial(client.publish, topic=topic)
def _get_current_klio_job(config):
klio_job = klio_pb2.KlioJob()
klio_job.job_name = config.job_name
klio_job.gcp_project = config.pipeline_options.project
return klio_job
# [batch dev] TODO: rename entity_id variable in this module
def _create_pubsub_message(entity_id, job, force, ping, top_down, msg_version):
kmsg = klio_pb2.KlioMessage()
kmsg.version = msg_version
if msg_version == 1:
kmsg.data.entity_id = entity_id
if not top_down:
kmsg.metadata.downstream.extend([job])
elif msg_version == 2:
kmsg.data.element = bytes(entity_id, "utf-8")
if not top_down:
kmsg.metadata.intended_recipients.limited.recipients.extend([job])
else:
kmsg.metadata.intended_recipients.anyone.SetInParent()
kmsg.metadata.ping = ping
kmsg.metadata.force = force
return kmsg.SerializeToString()
def _publish_messages(
config, entity_ids, ping, force, top_down, allow_non_klio, msg_version
):
current_job = _get_current_klio_job(config)
publish = _create_publisher(config.job_config.events.inputs[0].topic)
success_ids = []
fail_ids = []
for entity_id in entity_ids:
if not allow_non_klio:
message = _create_pubsub_message(
entity_id, current_job, ping, force, top_down, msg_version
)
else:
# TODO: should rename argument to something more abstract (@lynn)
message = bytes(entity_id.encode("utf-8"))
try:
publish(data=message)
success_ids.append(entity_id)
except Exception as e:
msg = "Failed to publish message for entity '%s': %s" % (
entity_id,
e,
)
logging.warning(msg)
fail_ids.append(entity_id)
return success_ids, fail_ids
def publish_messages(
config,
entity_ids,
force=False,
ping=False,
top_down=False,
allow_non_klio=False,
msg_version=None,
):
# [batch dev] TODO: if we use KlioConfig, we don't have to find the
# version
# [batch dev] maintaining backwards compatibility if this code path
# is executed by some other way other than via cli.py::publish.
if msg_version is None:
msg_version = config.version
if not config.job_config.events.inputs:
msg = "No input topics configured for {} :-1:".format(config.job_name)
logging.error(emoji.emojize(msg, use_aliases=True))
raise SystemExit(1)
logging.info(
"Publishing {} messages to {}'s input topic {}".format(
len(entity_ids),
config.job_name,
# [batch dev] should we support multiple inputs in the future?
config.job_config.events.inputs[0].topic,
)
)
success, fail = _publish_messages(
config, entity_ids, force, ping, top_down, allow_non_klio, msg_version
)
if success:
msg = ":boom: Successfully published {} messages.".format(len(success))
logging.info(emoji.emojize(msg, use_aliases=True))
if fail:
msg = (
":persevere: Failed to publish the following {} entity "
"IDs: {}".format(len(fail), ", ".join(fail))
)
logging.warning(emoji.emojize(msg, use_aliases=True))
|
gaybro8777/klio | integration/read-bq-write-bq/it/before.py | <gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Setup for integration test
* Rewrites klio-job.yaml so that the input and output BigQuery tables have
names unique to the run
* Creates BigQuery tables that need to exist before the test run starts.
"""
import os
import apache_beam as beam
import yaml
import common
def _append_build_id(base_name):
"""Returns base_name with BQ-friendly `GITHUB_SHA` appended"""
build_id = os.environ.get("GITHUB_SHA", None)
if not build_id:
raise Exception("Unable to get build id; env var GITHUB_SHA not set")
# valid BQ table names only allow underscores and alphanumeric chars
# https://cloud.google.com/bigquery/docs/tables#table_naming
table_name = "{}_{}".format(base_name, build_id.replace("-", "_"))
return table_name
def rewrite_klio_config_yaml():
"""Rewrite `klio-job.yaml` with tablenames that have the GITHUB_SHA appended"""
klio_cfg_file_path = os.path.join(os.path.dirname(__file__), "..", "klio-job.yaml")
klio_save_file_path = os.path.join(os.path.dirname(__file__), "..", "klio-job.yaml.save")
with open(klio_cfg_file_path) as f:
config_dict = yaml.safe_load(f)
# save the original
with open(klio_save_file_path, "w") as g:
g.write(yaml.safe_dump(config_dict))
new_input_table_name = _append_build_id(config_dict["job_config"]["events"]["inputs"][0]["table"])
config_dict["job_config"]["events"]["inputs"][0]["table"] = new_input_table_name
new_output_table_name = _append_build_id(config_dict["job_config"]["events"]["outputs"][0]["table"])
config_dict["job_config"]["events"]["outputs"][0]["table"] = new_output_table_name
with open(klio_cfg_file_path, "w") as g:
g.write(yaml.safe_dump(config_dict))
def populate_bigquery_table():
"""Create & populate input table based on what is configured for event input in klio-job.yaml
This needs to run before `klio job run` is called, which is why
"""
table_schema = {"fields": [{
'name': 'entity_id', 'type': 'STRING', 'mode': 'NULLABLE'
}]}
klio_cfg = common.get_config()
input_table_cfg = klio_cfg.job_config.events.inputs[0]
table_name = "{}:{}.{}".format(input_table_cfg.project,
input_table_cfg.dataset,
input_table_cfg.table)
with beam.Pipeline() as p:
def create_record(v):
return {
'entity_id': v,
}
record_ids = p | 'CreateIDs' >> beam.Create(common.entity_ids)
records = record_ids | 'CreateRecords' >> beam.Map(lambda x: create_record(x))
records | 'write' >> beam.io.WriteToBigQuery(
table_name,
schema=table_schema,
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE)
if __name__ == '__main__':
rewrite_klio_config_yaml()
populate_bigquery_table()
|
gaybro8777/klio | audio/src/klio_audio/transforms/_base.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import apache_beam as beam
import matplotlib.pyplot as plt
from klio.transforms import decorators as tfm_decorators
from klio_audio import decorators
class KlioAudioDoFnMetaclass(type):
"""Enforce behavior upon subclasses of `KlioAudioBaseDoFn`."""
def __call__(self, *args, **kwargs):
# automatically wrap DoFn in a beam.ParDo so folks can just do
# `pcoll | SomeAudioTransform()` rather than
# `pcoll | beam.ParDo(SomeAudioTransform())`
return beam.ParDo(
super(KlioAudioDoFnMetaclass, self).__call__(*args, **kwargs)
)
class KlioAudioBaseDoFn(beam.DoFn, metaclass=KlioAudioDoFnMetaclass):
pass
class KlioPlotBaseDoFn(KlioAudioBaseDoFn):
DEFAULT_TITLE = ""
def __init__(self, *_, title=None, **plot_args):
if "ax" in plot_args:
raise RuntimeError(
"Invalid keyword `ax`: Specifying the plot's axes is not "
"supported."
)
self.title = title or self.DEFAULT_TITLE
self.plot_args = plot_args
def _plot(self, *args, **kwargs):
pass
@tfm_decorators._handle_klio
@decorators.handle_binary(load_with_numpy=True)
def process(self, item):
element = item.element.decode("utf-8")
title = self.title.format(element=element)
self._klio.logger.debug("Generating plot '{}'".format(title))
fig = plt.figure()
fig.suptitle(title)
self._plot(item, fig)
yield fig
|
gaybro8777/klio | cli/tests/commands/job/test_create.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import os
import click
import mock
import pytest
import yaml
from klio_cli.commands.job import create
HERE = os.path.abspath(os.path.join(os.path.abspath(__file__), os.path.pardir))
FIXTURE_PATH = os.path.join(HERE, "utils", "fixtures")
@pytest.fixture
def job():
return create.CreateJob()
# pytest prevents monkeypatching datetime directly
class MockDatetime(datetime.datetime):
@classmethod
def now(cls):
return datetime.datetime(2019, 1, 1)
def test_get_environment(job):
env = job._get_environment()
expected_templates = [
"MANIFEST.in.tpl",
"README.md.tpl",
"dockerfile.tpl",
"init.py.tpl",
"job-requirements.txt.tpl",
"klio-job-batch.yaml.tpl",
"klio-job.yaml.tpl",
"run.py.tpl",
"setup.py.tpl",
"test_transforms.py.tpl",
"transforms-batch.py.tpl",
"transforms.py.tpl",
]
assert expected_templates == sorted(env.list_templates())
def test_create_job_directory(tmpdir, job):
output_base_dir = tmpdir.mkdir("testing")
output_dir = os.path.join(str(output_base_dir), "test_job")
job._create_job_directory(output_dir)
assert os.path.exists(output_dir)
@pytest.mark.parametrize("error_code", (1, 17))
def test_create_job_directory_raises(monkeypatch, tmpdir, error_code, job):
output_base_dir = tmpdir.mkdir("testing")
output_dir = os.path.join(str(output_base_dir), "test_job")
def mock_mkdir(*args, **kwargs):
raise OSError(error_code, "some error message", output_dir)
monkeypatch.setattr(create.os, "mkdir", mock_mkdir)
if error_code != 17:
with pytest.raises(OSError) as e:
job._create_job_directory(output_dir)
assert e.match("some error message")
else:
# should not re-raise if file already existsgit
job._create_job_directory(output_dir)
def test_write_template(tmpdir, job):
data = u"this is some test data"
output_dir = tmpdir.mkdir("testing")
output_file = "foo.txt"
job._write_template(output_dir.strpath, output_file, data)
ret_file = output_dir.join(output_file)
ret_contents = ret_file.read()
assert data == ret_contents
@pytest.fixture
def batch_job_context():
return {
"inputs": [
{
"event_location": "test-job_input_elements.txt",
"data_location": "test-job-input",
}
],
"outputs": [
{
"event_location": "test-job_output_elements",
"data_location": "test-job-output",
}
],
}
@pytest.fixture
def context():
base_gcs = "gs://test-gcp-project-dataflow-tmp/test-job"
gcr_url = "gcr.io/test-gcp-project/test-job-worker"
return {
"job_name": "test-job",
"job_type": "streaming",
"python_version": "3.6",
"pipeline_options": {
"project": "test-gcp-project",
"region": "europe-west1",
"worker_harness_container_image": gcr_url,
"experiments": ["beam_fn_api"],
"staging_location": base_gcs + "/staging",
"temp_location": base_gcs + "/temp",
"num_workers": 2,
"max_num_workers": 2,
"autoscaling_algorithm": "NONE",
"disk_size_gb": 32,
"worker_machine_type": "n1-standard-2",
},
"job_options": {
"inputs": [
{
"topic": (
"projects/test-parent-gcp-project/topics/"
"test-parent-job-output"
),
"subscription": (
"projects/test-gcp-project/subscriptions/"
"test-parent-job-output-test-job"
),
"data_location": (
"gs://test-parent-gcp-project-output/test-parent-job"
),
}
],
"outputs": [
{
"topic": "projects/test-gcp-project/topics/test-job-output",
"data_location": "gs://test-gcp-project-output/test-job",
}
],
"dependencies": [
{
"job_name": "test-parent-job",
"gcp_project": "test-parent-gcp-project",
"input_topics": [
(
"projects/test-grandparent-gcp-project/topics/"
"test-grandparent-job-output"
)
],
"region": "us-central1",
}
],
},
}
@pytest.fixture
def default_context():
base_gcs = "gs://test-gcp-project-dataflow-tmp/test-job"
gcr_url = "gcr.io/test-gcp-project/test-job-worker"
return {
"job_name": "test-job",
"job_type": "streaming",
"python_version": "3.6",
"use_fnapi": False,
"create_resources": False,
"pipeline_options": {
"project": "test-gcp-project",
"region": "europe-west1",
"worker_harness_container_image": gcr_url,
"experiments": [],
"staging_location": base_gcs + "/staging",
"temp_location": base_gcs + "/temp",
"num_workers": 2,
"max_num_workers": 2,
"autoscaling_algorithm": "NONE",
"disk_size_gb": 32,
"worker_machine_type": "n1-standard-2",
},
"job_options": {
"inputs": [
{
"topic": (
"projects/test-gcp-project/topics/test-job-input"
),
"subscription": (
"projects/test-gcp-project/subscriptions/"
"test-job-input-test-job"
),
"data_location": ("gs://test-gcp-project-input/test-job"),
}
],
"outputs": [
{
"topic": "projects/test-gcp-project/topics/test-job-output",
"data_location": "gs://test-gcp-project-output/test-job",
}
],
"dependencies": [],
},
}
@pytest.mark.parametrize("job_type", ("batch", "streaming"))
@pytest.mark.parametrize("use_fnapi", (True, False))
def test_create_job_config(
use_fnapi, context, batch_job_context, tmpdir, monkeypatch, job, job_type
):
output_dir = (
tmpdir.mkdir("testing")
.mkdir("jobs")
.mkdir("test-job-{}".format(job_type))
)
env = job._get_environment()
if job_type == "batch":
context["job_options"] = batch_job_context
context["use_fnapi"] = use_fnapi
context["job_type"] = job_type
if not use_fnapi:
monkeypatch.setitem(context["pipeline_options"], "experiments", [])
job._create_job_config(env, context, output_dir.strpath)
expected_fixtures = os.path.join(FIXTURE_PATH, "expected")
is_fnapi_dir = "fnapi" if use_fnapi else "no_fnapi"
expected_fixtures = os.path.join(expected_fixtures, is_fnapi_dir)
if job_type == "batch":
fixture = os.path.join(expected_fixtures, "klio-job-batch.yaml")
else:
fixture = os.path.join(expected_fixtures, "klio-job.yaml")
with open(fixture, "r") as f:
expected = yaml.safe_load(f)
ret_file = output_dir.join("klio-job.yaml")
ret_contents = yaml.safe_load(ret_file.read())
assert expected == ret_contents
@pytest.mark.parametrize("job_type", ("batch", "streaming"))
def test_create_python_files(tmpdir, mocker, job, job_type):
output_dir = tmpdir.mkdir("testing").mkdir("jobs").mkdir("test_job")
env = job._get_environment()
dt_patch = "klio_cli.commands.job.create.datetime.datetime"
with mock.patch(dt_patch, MockDatetime):
job._create_python_files(env, "test_job", job_type, output_dir.strpath)
ret_init_file = output_dir.join("__init__.py")
ret_init_contents = ret_init_file.read()
ret_run_file = output_dir.join("run.py")
ret_run_contents = ret_run_file.read()
ret_transforms_file = output_dir.join("transforms.py")
ret_transforms_contents = ret_transforms_file.read()
expected_fixtures = os.path.join(FIXTURE_PATH, "expected")
init_fixture = os.path.join(expected_fixtures, "__init__.py")
run_fixture = os.path.join(expected_fixtures, "run.py")
if job_type == "batch":
transforms_fixture = os.path.join(
expected_fixtures, "transforms-batch.py"
)
else:
transforms_fixture = os.path.join(expected_fixtures, "transforms.py")
with open(init_fixture, "r") as f:
expected_init = f.read()
with open(run_fixture, "r") as f:
expected_run = f.read()
with open(transforms_fixture, "r") as f:
expected_transforms = f.read()
assert expected_init == ret_init_contents + "\n"
assert expected_run == ret_run_contents + "\n"
assert expected_transforms == ret_transforms_contents + "\n"
def test_create_no_fnapi_files(tmpdir, job):
output_dir = tmpdir.mkdir("testing").mkdir("jobs").mkdir("test_job")
env = job._get_environment()
context = {"use_fnapi": False, "package_name": "test-job"}
job._create_no_fnapi_files(env, context, output_dir.strpath)
manifest_file = output_dir.join("MANIFEST.in")
ret_manifest_contents = manifest_file.read()
setup_file = output_dir.join("setup.py")
ret_setup_contents = setup_file.read()
expected_fixtures = os.path.join(FIXTURE_PATH, "expected", "no_fnapi")
manifest_fixture = os.path.join(expected_fixtures, "MANIFEST.in")
setup_fixture = os.path.join(expected_fixtures, "setup.py")
with open(manifest_fixture, "r") as f:
expected_manifest = f.read()
with open(setup_fixture, "r") as f:
expected_setup = f.read()
assert expected_manifest == ret_manifest_contents + "\n"
assert expected_setup == ret_setup_contents + "\n"
@pytest.mark.parametrize("use_fnapi", (True, False))
def test_create_reqs_file(use_fnapi, tmpdir, job):
output_dir = tmpdir.mkdir("testing").mkdir("jobs").mkdir("test_job")
env = job._get_environment()
context = {"use_fnapi": use_fnapi}
job._create_reqs_file(env, context, output_dir.strpath)
ret_reqs_file = output_dir.join("job-requirements.txt")
ret_reqs_contents = ret_reqs_file.read()
expected_fixtures = os.path.join(FIXTURE_PATH, "expected")
fixture = os.path.join(expected_fixtures, "job-requirements.txt")
with open(fixture, "r") as f:
expected = f.read()
assert expected == ret_reqs_contents + "\n"
@pytest.mark.parametrize("use_fnapi", (True, False))
def test_create_dockerfile(use_fnapi, tmpdir, job):
output_dir = tmpdir.mkdir("testing").mkdir("jobs").mkdir("test_job")
env = job._get_environment()
context = {
"pipeline_options": {
"worker_harness_container_image": "gcr.io/foo/bar",
"project": "test-gcp-project",
},
"python_version": "3.6",
"use_fnapi": use_fnapi,
"create_resources": False,
}
job._create_dockerfile(env, context, output_dir.strpath)
ret_dockerfile_file = output_dir.join("Dockerfile")
ret_dockerfile_contents = ret_dockerfile_file.read()
expected_fixtures = os.path.join(FIXTURE_PATH, "expected")
is_fnapi_dir = "fnapi" if use_fnapi else "no_fnapi"
expected_fixtures = os.path.join(expected_fixtures, is_fnapi_dir)
fixture = os.path.join(expected_fixtures, "Dockerfile")
with open(fixture, "r") as f:
expected = f.read()
assert expected == ret_dockerfile_contents
def test_create_readme(tmpdir, job):
output_dir = tmpdir.mkdir("testing").mkdir("test_job")
env = job._get_environment()
context = {"job_name": "test-job"}
job._create_readme(env, context, output_dir.strpath)
expected_fixtures = os.path.join(FIXTURE_PATH, "expected")
ret_file = output_dir.join("README.md")
ret_file_contents = ret_file.read()
exp_file = os.path.join(expected_fixtures, "README.md")
with open(exp_file, "r") as f:
exp_file_contents = f.read()
assert exp_file_contents == ret_file_contents
def test_validate_worker_image(job):
assert not job._validate_worker_image("foo")
def test_validate_region(job):
exp_region = "us-central1"
ret_region = job._validate_region(exp_region)
assert exp_region == ret_region
def test_validate_region_raises(job):
err_region = "not-a-region"
with pytest.raises(click.BadParameter) as e:
job._validate_region(err_region)
assert e.match(
'"{}" is not a valid region. Available: '.format(err_region)
)
@pytest.mark.parametrize(
"input_version,exp_output_version",
(
("3.6", "3.6"),
("3.6.1", "3.6"),
("3.7", "3.7"),
("3.7.1", "3.7"),
("3.8", "3.8"),
("3.8.1", "3.8"),
),
)
def test_parse_python_version(input_version, exp_output_version, job):
assert exp_output_version == job._parse_python_version(input_version)
@pytest.mark.parametrize(
"input_version,exp_msg",
(
("2", "Klio no longer supports Python 2.7"),
("2.7", "Klio no longer supports Python 2.7"),
("3", "Invalid Python version given"),
("3.3", "Invalid Python version given"),
("3.6.7.8", "Invalid Python version given"),
),
)
def test_parse_python_version_raises(input_version, exp_msg, job):
# only matching the start of the error message
with pytest.raises(click.BadParameter, match=exp_msg):
job._parse_python_version(input_version)
def test_get_context_from_defaults(default_context, job):
basic_context = {
"job_name": "test-job",
"gcp_project": "test-gcp-project",
}
ret_context, ret_create_dockerfile = job._get_context_from_defaults(
basic_context
)
default_context.pop("job_name")
assert default_context == ret_context
assert ret_create_dockerfile
@pytest.fixture
def context_overrides():
return {
"job_name": "test-job",
"job_type": "streaming",
"gcp_project": "test-gcp-project",
"worker_image": "gcr.io/foo/bar",
"experiments": "beam_fn_api,another_experiment",
"region": "us-central1",
"staging_location": "gs://a-different/staging/location",
"temp_location": "gs://a-different/temp/location",
"num_workers": 1000000,
"max_num_workers": 1000000,
"autoscaling_algorithm": "THROUGHPUT_BASED",
"disk_size_gb": 1000000,
"input_topic": "projects/test-gcp-projects/topics/another-topic",
"output_topic": "a-different-topic",
"input_data_location": "gs://test-parent-gcp-project/test-parent-job",
"output_data_location": "bq://somewhere/over/the/rainbow",
"dependencies": [
{"job_name": "parent-job", "gcp_project": "parent-gcp-project"}
],
"python_version": "3.7",
"use_fnapi": "n",
"create_resources": "n",
}
@pytest.fixture
def expected_overrides():
return {
"pipeline_options": {
"project": "test-gcp-project",
"worker_harness_container_image": "gcr.io/foo/bar",
"experiments": ["beam_fn_api", "another_experiment"],
"region": "us-central1",
"staging_location": "gs://a-different/staging/location",
"temp_location": "gs://a-different/temp/location",
"num_workers": 1000000,
"max_num_workers": 1000000,
"autoscaling_algorithm": "THROUGHPUT_BASED",
"disk_size_gb": 1000000,
"worker_machine_type": "n4-highmem-l33t",
},
"job_options": {
"inputs": [
{
"topic": "projects/test-gcp-projects/topics/another-topic",
"subscription": (
"projects/test-gcp-project/subscriptions/"
"another-topic-test-job"
),
"data_location": (
"gs://test-parent-gcp-project/test-parent-job"
),
}
],
"outputs": [
{
"topic": "a-different-topic",
"data_location": "bq://somewhere/over/the/rainbow",
}
],
"dependencies": [
{"job_name": "parent-job", "gcp_project": "parent-gcp-project"}
],
},
"python_version": "3.7",
"use_fnapi": False,
"create_resources": False,
"job_type": "streaming",
}
def test_get_context_from_defaults_overrides(
context_overrides, expected_overrides, job
):
# FYI: Click will pass in kwargs as a flat dict
context_overrides["worker_machine_type"] = "n4-highmem-l33t"
ret_context, ret_create_dockerfile = job._get_context_from_defaults(
context_overrides
)
assert expected_overrides == ret_context
assert not ret_create_dockerfile
@pytest.mark.parametrize(
"inputs,confirmation, regions,expected_dependencies",
(
(
["my-job", "my-proj", "my-topic", "europe-west1"],
[False],
["europe-west1"],
[
{
"job_name": "my-job",
"gcp_project": "my-proj",
"input_topics": ["my-topic"],
"region": "europe-west1",
}
],
), # full input
(
[
# mock confirm 1
"my-job",
"my-proj",
"my-topic",
"europe-west1",
# mock confirm 2
"my-job2",
"my-proj",
"my-topic2",
"europe-west1",
],
[True, False],
["europe-west1", "europe-west1"],
[
{
"job_name": "my-job",
"gcp_project": "my-proj",
"input_topics": ["my-topic"],
"region": "europe-west1",
},
{
"job_name": "my-job2",
"gcp_project": "my-proj",
"input_topics": ["my-topic2"],
"region": "europe-west1",
},
],
), # multiple full inputs
(
["my-job", "my-proj", "", ""],
[False],
[],
[{"job_name": "my-job", "gcp_project": "my-proj"}],
), # use default inputs for input_topics and region
(
["my-job", "my-proj", "my-topic", ""],
[False],
[],
[
{
"job_name": "my-job",
"gcp_project": "my-proj",
"input_topics": ["my-topic"],
}
],
), # use defaults for region
(
["my-job", "my-proj", "", "europe-west1"],
[False],
["europe-west1"],
[
{
"job_name": "my-job",
"gcp_project": "my-proj",
"region": "europe-west1",
}
],
), # use defaults for input topics
),
)
def test_get_dependencies_from_user_inputs(
mocker,
mock_prompt,
mock_confirm,
inputs,
confirmation,
regions,
expected_dependencies,
job,
):
mock_prompt.side_effect = inputs
mock_confirm.side_effect = confirmation
mock_validate_region = mocker.patch.object(job, "_validate_region")
actual_dependencies = job._get_dependencies_from_user_inputs()
assert expected_dependencies == actual_dependencies
expected_validate_region_calls = [mocker.call(r) for r in regions]
assert (
expected_validate_region_calls == mock_validate_region.call_args_list
)
@pytest.fixture
def mock_prompt(mocker):
return mocker.patch.object(create.click, "prompt")
@pytest.fixture
def mock_confirm(mocker):
return mocker.patch.object(create.click, "confirm")
def test_get_context_from_user_inputs(
context, mock_prompt, mock_confirm, mocker, job,
):
# mimicking user inputs for each prompt
prompt_side_effect = [
"streaming",
"europe-west1",
"Y",
"n",
["beam_fn_api"],
2,
2,
"NONE",
32,
"n1-standard-2",
"",
"3.6",
"gs://test-gcp-project-dataflow-tmp/test-job/staging",
"gs://test-gcp-project-dataflow-tmp/test-job/temp",
"projects/test-parent-gcp-project/topics/test-parent-job-output",
"projects/test-gcp-project/topics/test-job-output",
"gs://test-parent-gcp-project-output/test-parent-job",
"gs://test-gcp-project-output/test-job",
# <-- mock_confirm side effect 1 -->
"test-parent-job",
"test-parent-gcp-project",
(
"projects/test-grandparent-gcp-project/topics/test-grandparent-"
"job-output"
),
"us-central1",
# <-- mock_confirm side effect 2-->
]
confirm_side_effect = [True, False]
mock_prompt.side_effect = prompt_side_effect
mock_confirm.side_effect = confirm_side_effect
user_input_context = {
"job_name": "test-job",
"gcp_project": "test-gcp-project",
}
mock_validate_region = mocker.patch.object(job, "_validate_region")
mock_validate_worker_image = mocker.patch.object(
job, "_validate_worker_image"
)
ret_context, ret_dockerfile = job._get_context_from_user_inputs(
user_input_context
)
assert len(prompt_side_effect) == mock_prompt.call_count
assert len(confirm_side_effect) == mock_confirm.call_count
exp_calls = [mocker.call("europe-west1"), mocker.call("us-central1")]
assert exp_calls == mock_validate_region.call_args_list
assert 2 == mock_validate_region.call_count
# mock_validate_region.assert_called_once_with("europe-west1")
gcr_url = "gcr.io/test-gcp-project/test-job-worker"
mock_validate_worker_image.assert_called_once_with(gcr_url)
context.pop("job_name")
context["pipeline_options"].pop("project")
context["use_fnapi"] = True
context["create_resources"] = False
assert context == ret_context
assert ret_dockerfile
def test_get_context_from_user_inputs_no_prompts(
mocker,
context_overrides,
expected_overrides,
mock_prompt,
mock_confirm,
job,
):
context_overrides["machine_type"] = "n4-highmem-l33t"
mock_validate_region = mocker.patch.object(job, "_validate_region")
mock_validate_worker_image = mocker.patch.object(
job, "_validate_worker_image"
)
ret_context, ret_dockerfile = job._get_context_from_user_inputs(
context_overrides
)
expected_overrides["pipeline_options"].pop("project")
expected_overrides["python_version"] = "3.6"
assert not mock_prompt.call_count
assert not mock_confirm.call_count
mock_validate_region.assert_called_once_with("us-central1")
mock_validate_worker_image.assert_called_once_with("gcr.io/foo/bar")
assert not ret_dockerfile
assert expected_overrides == ret_context
@pytest.mark.parametrize(
"users_provided_dependencies,confirmed_dependencies",
(
(True, True),
# (True, False), # not possible
(False, True),
(False, False),
),
)
def test_get_context_from_user_inputs_dependency_settings(
context_overrides,
expected_overrides,
mock_prompt,
mock_confirm,
mocker,
monkeypatch,
users_provided_dependencies,
confirmed_dependencies,
job,
):
context_overrides["machine_type"] = "n4-highmem-l33t"
saved_dependencies = context_overrides.pop("dependencies")
mock_get_dependencies_from_user_inputs = mocker.Mock()
if users_provided_dependencies:
mock_get_dependencies_from_user_inputs.return_value = (
saved_dependencies
)
else:
mock_get_dependencies_from_user_inputs.return_value = None
monkeypatch.setattr(
job,
"_get_dependencies_from_user_inputs",
mock_get_dependencies_from_user_inputs,
)
mock_confirm.side_effect = [confirmed_dependencies]
mock_validate_region = mocker.patch.object(job, "_validate_region")
mock_validate_worker_image = mocker.patch.object(
job, "_validate_worker_image"
)
ret_context, ret_dockerfile = job._get_context_from_user_inputs(
context_overrides
)
expected_overrides["pipeline_options"].pop("project")
expected_overrides["python_version"] = "3.6"
assert not mock_prompt.call_count
assert 1 == mock_confirm.call_count
mock_validate_region.assert_called_once_with("us-central1")
mock_validate_worker_image.assert_called_once_with("gcr.io/foo/bar")
assert not ret_dockerfile
if users_provided_dependencies:
assert expected_overrides == ret_context
else:
expected_overrides["job_options"]["dependencies"] = []
assert expected_overrides == ret_context
if confirmed_dependencies:
assert 1 == mock_get_dependencies_from_user_inputs.call_count
else:
assert 0 == mock_get_dependencies_from_user_inputs.call_count
def test_get_context_from_user_inputs_no_prompts_image(
mocker,
context_overrides,
expected_overrides,
mock_prompt,
mock_confirm,
job,
):
mock_prompt.side_effect = [""]
context_overrides.pop("worker_image")
context_overrides["machine_type"] = "n4-highmem-l33t"
mock_validate_region = mocker.patch.object(job, "_validate_region")
mock_validate_worker_image = mocker.patch.object(
job, "_validate_worker_image"
)
ret_context, ret_dockerfile = job._get_context_from_user_inputs(
context_overrides
)
gcr_url = "gcr.io/test-gcp-project/test-job-worker"
exp_pipeline_opts = expected_overrides["pipeline_options"]
exp_pipeline_opts.pop("project")
exp_pipeline_opts["worker_harness_container_image"] = gcr_url
assert 1 == mock_prompt.call_count
assert not mock_confirm.call_count
mock_validate_region.assert_called_once_with("us-central1")
mock_validate_worker_image.assert_called_once_with(gcr_url)
assert ret_dockerfile
assert expected_overrides == ret_context
@pytest.mark.parametrize("use_defaults", (True, False))
def test_get_user_input(use_defaults, mocker, job):
ret_context = {"pipeline_options": {}}
mock_get_context_defaults = mocker.patch.object(
job, "_get_context_from_defaults"
)
mock_get_context_defaults.return_value = (ret_context, True)
mock_get_context_user = mocker.patch.object(
job, "_get_context_from_user_inputs"
)
mock_get_context_user.return_value = (ret_context, True)
input_kwargs = {
"use_defaults": use_defaults,
"job_name": "test-job",
"gcp_project": "test-gcp-project",
}
job._get_user_input(input_kwargs)
if use_defaults:
mock_get_context_defaults.assert_called_once_with(input_kwargs)
else:
mock_get_context_user.assert_called_once_with(input_kwargs)
@pytest.mark.parametrize(
"dependencies,expected_dependencies,num_warnings",
(
(
(
"job_name=parent-job",
"gcp_project=a-project",
"region=a-region",
"input_topic=d",
"input_topics=a,b,c",
),
{
"job_name": "parent-job",
"gcp_project": "a-project",
"region": "a-region",
"input_topic": "d",
"input_topics": ["a", "b", "c"],
},
0,
),
(
(
"job-name=parent-job",
"gcp-project=a-project",
"region=a-region",
"input-topic=d",
"input-topics=a,b,c",
),
{
"job_name": "parent-job",
"gcp_project": "a-project",
"region": "a-region",
"input_topic": "d",
"input_topics": ["a", "b", "c"],
},
0,
),
(
("job-name=parent-job", "gcp-project=a-project", "banana=1"),
{"job_name": "parent-job", "gcp_project": "a-project"},
1,
),
),
)
def test_parse_dependency_args(
dependencies, expected_dependencies, num_warnings, caplog, job,
):
assert expected_dependencies == job._parse_dependency_args(dependencies)
assert num_warnings == len(caplog.records)
@pytest.mark.parametrize(
"unknown_args,expected",
(
(("--foo", "foobar"), {"foo": "foobar"}),
(
("--foo", "foobar", "--bar", "barfoo"),
{"foo": "foobar", "bar": "barfoo"},
),
(("--foo", "bar", "baz"), {"foo": ("bar", "baz")}),
(
("--dependency", "job_name=parent-job", "gcp_project=a-project"),
{
"dependencies": [
{"job_name": "parent-job", "gcp_project": "a-project"}
]
},
),
(
("--dependency", "job_name=parent-job"),
{"dependencies": [{"job_name": "parent-job"}]},
),
(("--dependency", "banana=phone"), {},),
),
)
def test_parse_unknown_args(unknown_args, expected, job):
ret_args = job._parse_unknown_args(unknown_args)
assert expected == ret_args
@pytest.mark.parametrize("create_dockerfile", (True, False))
@pytest.mark.parametrize("use_fnapi", (True, False))
@pytest.mark.parametrize("create_resources", (True, False))
def test_create(
use_fnapi, create_dockerfile, create_resources, mocker, caplog, job
):
context = {
"job_name": "test-job",
"use_fnapi": use_fnapi,
"create_resources": create_resources,
"job_type": "streaming",
}
mock_get_user_input = mocker.patch.object(job, "_get_user_input")
mock_get_user_input.return_value = (context, create_dockerfile)
mock_get_environment = mocker.patch.object(job, "_get_environment")
mock_create_job_dir = mocker.patch.object(job, "_create_job_directory")
mock_create_job_config = mocker.patch.object(job, "_create_job_config")
mock_create_no_fnapi_files = mocker.patch.object(
job, "_create_no_fnapi_files"
)
mock_create_python_files = mocker.patch.object(job, "_create_python_files")
mock_create_reqs_files = mocker.patch.object(job, "_create_reqs_file")
mock_create_dockerfile = mocker.patch.object(job, "_create_dockerfile")
mock_create_readme = mocker.patch.object(job, "_create_readme")
mock_create_topics = mocker.patch.object(create.gcp_setup, "create_topics")
mock_create_buckets = mocker.patch.object(
create.gcp_setup, "create_buckets"
)
mock_create_stackdriver = mocker.patch.object(
create.gcp_setup, "create_stackdriver_dashboard"
)
unknown_args = ("--foo", "bar")
known_args = {
"job_name": "test-job",
"gcp-project": "test-gcp-project",
}
output_dir = "/testing/dir"
job.create(unknown_args, known_args, output_dir)
known_args["foo"] = "bar"
mock_get_user_input.assert_called_once_with(known_args)
mock_get_environment.assert_called_once_with()
ret_env = mock_get_environment.return_value
job_name = context["job_name"]
package_name = job_name.replace("-", "_")
mock_create_job_dir.assert_called_once_with(output_dir)
mock_create_job_config.assert_called_once_with(
ret_env, context, output_dir
)
job_type = context.get("job_type")
mock_create_python_files.assert_called_once_with(
ret_env, package_name, job_type, output_dir
)
if use_fnapi:
mock_create_no_fnapi_files.assert_not_called()
else:
mock_create_no_fnapi_files.assert_called_once_with(
ret_env, context, output_dir
)
if create_resources:
if job_type == "streaming":
mock_create_topics.assert_called_once_with(context)
mock_create_buckets.assert_called_once_with(context)
mock_create_stackdriver.assert_called_once_with(context)
else:
mock_create_topics.assert_not_called()
mock_create_buckets.assert_not_called()
mock_create_stackdriver.assert_not_called()
mock_create_reqs_files.assert_called_once_with(
ret_env, context, output_dir
)
if create_dockerfile:
mock_create_dockerfile.assert_called_once_with(
ret_env, context, output_dir
)
mock_create_readme.assert_called_once_with(ret_env, context, output_dir)
assert 1 == len(caplog.records)
def test_get_batch_user_context(job, mock_prompt):
kwargs = {"job_name": "test-job"}
event_input = "events-in"
data_input = "data-in"
event_output = "events-out"
data_output = "data-out"
mock_prompt.side_effect = [
event_input,
data_input,
event_output,
data_output,
]
ret_context = job._get_batch_user_input_job_context(kwargs)
expected_context = {
"inputs": [
{"event_location": "events-in", "data_location": "data-in"}
],
"outputs": [
{"event_location": "events-out", "data_location": "data-out"}
],
}
assert 4 == mock_prompt.call_count
assert expected_context == ret_context
def test_get_batch_user_context_no_prompt(job, mock_prompt):
kwargs = {
"job_name": "test-job",
"batch_event_input": "input_ids.txt",
"batch_event_output": "output_ids",
"batch_data_input": "input-data",
"batch_data_output": "output-data",
}
ret_context = job._get_batch_user_input_job_context(kwargs)
mock_prompt.assert_not_called()
expected_context = {
"inputs": [
{
"event_location": kwargs.get("batch_event_input"),
"data_location": kwargs.get("batch_data_input"),
}
],
"outputs": [
{
"event_location": kwargs.get("batch_event_output"),
"data_location": kwargs.get("batch_data_output"),
}
],
}
assert expected_context == ret_context
def test_get_default_batch_job_context(job):
kwargs = {"job_name": "test-job"}
ret_context = job._get_default_batch_job_context(kwargs)
expected_context = {
"inputs": [
{
"event_location": "test-job_input_elements.txt",
"data_location": "test-job-input",
}
],
"outputs": [
{
"event_location": "test-job_output_elements",
"data_location": "test-job-output",
}
],
}
assert expected_context == ret_context
|
gaybro8777/klio | core/tests/test_utils.py | <gh_stars>100-1000
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from unittest import mock
import pytest
from google.api_core import exceptions as gapi_exceptions
from klio_core import utils
def test_set_global():
utils.set_global("set-a-value", "a-value")
actual = getattr(utils, "klio_global_state_set-a-value", None)
assert "a-value" == actual
delattr(utils, "klio_global_state_set-a-value")
def test_get_global():
setattr(utils, "klio_global_state_get-a-value", "a-value")
actual = utils.get_global("get-a-value")
assert "a-value" == actual
delattr(utils, "klio_global_state_get-a-value")
@pytest.mark.parametrize("exists", (True, False))
def test_delete_global(exists):
if exists:
setattr(utils, "klio_global_state_delete-a-value", "a-value")
utils.delete_global("delete-a-value")
actual = getattr(utils, "klio_global_state_delete-a-value", None)
assert not actual
@pytest.mark.parametrize(
"set_value,callable_init", ((True, False), (False, True), (False, False))
)
def test_get_or_initialize_global(set_value, callable_init, mocker):
if set_value:
setattr(utils, "klio_global_state_get-or-init-value", "a-value")
if callable_init:
initializer = mocker.Mock(return_value="a-value")
else:
initializer = "a-value"
actual = utils.get_or_initialize_global("get-or-init-value", initializer)
assert "a-value" == actual
if not isinstance(initializer, str):
initializer.assert_called_once_with()
delattr(utils, "klio_global_state_get-or-init-value")
@pytest.fixture
def mock_publisher(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(utils.pubsub, "PublisherClient", mock)
return mock
def test_private_get_publisher(mock_publisher):
ret_publisher = utils._get_publisher("a-topic")
mock_publisher.assert_called_once_with()
mock_publisher.return_value.create_topic.assert_called_once_with("a-topic")
assert mock_publisher.return_value == ret_publisher
def test_private_get_publisher_topic_exists(mock_publisher):
client = mock_publisher.return_value
client.create_topic.side_effect = gapi_exceptions.AlreadyExists("foo")
ret_publisher = utils._get_publisher("a-topic")
mock_publisher.assert_called_once_with()
client.create_topic.assert_called_once_with("a-topic")
assert client == ret_publisher
def test_private_get_publisher_raises(mock_publisher):
client = mock_publisher.return_value
client.create_topic.side_effect = Exception("foo")
with pytest.raises(Exception, match="foo"):
utils._get_publisher("a-topic")
mock_publisher.assert_called_once_with()
client.create_topic.assert_called_once_with("a-topic")
@pytest.mark.parametrize("in_globals", (True, False))
def test_get_publisher(in_globals, mock_publisher):
client = mock_publisher.return_value
if in_globals:
setattr(utils, "klio_global_state_publisher_a-topic", client)
ret_publisher = utils.get_publisher("a-topic")
assert client == ret_publisher
delattr(utils, "klio_global_state_publisher_a-topic")
#########################
# Config Utils tests
#########################
@pytest.fixture
def patch_os_getcwd(monkeypatch, tmpdir):
test_dir = str(tmpdir.mkdir("testing"))
monkeypatch.setattr(os, "getcwd", lambda: test_dir)
return test_dir
def test_get_config_by_path(mocker, monkeypatch):
m_open = mocker.mock_open()
mock_open = mocker.patch("klio_core.utils.open", m_open)
mock_safe_load = mocker.Mock()
monkeypatch.setattr(utils.yaml, "safe_load", mock_safe_load)
path = "path/to/a/file"
utils.get_config_by_path(path)
mock_open.assert_called_once_with(path)
def test_get_config_by_path_error(mocker, monkeypatch, caplog):
m_open = mocker.mock_open()
mock_open = mocker.patch("klio_core.utils.open", m_open)
mock_open.side_effect = IOError
with pytest.raises(SystemExit):
path = "path/to/a/file"
utils.get_config_by_path(path)
assert 1 == len(caplog.records)
#########################
# Cli/exec command tests
#########################
@pytest.mark.parametrize(
"image",
(
"dataflow.gcr.io/v1beta3/python",
"dataflow.gcr.io/v1beta3/python-base",
"dataflow.gcr.io/v1beta3/python-fnapi",
),
)
def test_warn_if_py2_job(image, patch_os_getcwd, mocker):
dockerfile = (
'## -*- docker-image-name: "gcr.io/foo/bar" -*-\n'
"FROM {image}:1.2.3\n"
'LABEL maintainer "<EMAIL>"\n'
).format(image=image)
m_open = mock.mock_open(read_data=dockerfile)
mock_open = mocker.patch("klio_core.utils.open", m_open)
m_is_file = mocker.Mock()
m_is_file.return_value = True
mock_is_file = mocker.patch("klio_core.utils.os.path.isfile", m_is_file)
warn_msg = (
"Python 2 support in Klio is deprecated. "
"Please upgrade to Python 3.5+"
)
with pytest.warns(UserWarning, match=warn_msg):
utils.warn_if_py2_job(patch_os_getcwd)
exp_read_file = os.path.join(patch_os_getcwd, "Dockerfile")
mock_is_file.assert_called_once_with(exp_read_file)
mock_open.assert_called_once_with(exp_read_file, "r")
@pytest.mark.parametrize("has_from_line", (True, False))
@pytest.mark.parametrize("file_exists", (True, False))
def test_warn_if_py2_job_no_warn(
has_from_line, file_exists, patch_os_getcwd, mocker
):
from_line = "\n"
if has_from_line:
from_line = "FROM dataflow.gcr.io/v1beta3/python36-fnapi:1.2.3\n"
dockerfile = (
'## -*- docker-image-name: "gcr.io/foo/bar" -*-\n'
+ from_line
+ 'LABEL maintainer "<EMAIL>"\n'
)
m_open = mock.mock_open(read_data=dockerfile)
mock_open = mocker.patch("klio_core.utils.open", m_open)
m_is_file = mocker.Mock()
m_is_file.return_value = file_exists
mock_is_file = mocker.patch("klio_core.utils.os.path.isfile", m_is_file)
utils.warn_if_py2_job(patch_os_getcwd)
exp_read_file = os.path.join(patch_os_getcwd, "Dockerfile")
mock_is_file.assert_called_once_with(exp_read_file)
if file_exists:
mock_open.assert_called_once_with(exp_read_file, "r")
@pytest.mark.parametrize(
"job_dir,conf_file",
(
(None, None),
(None, "klio-job2.yaml"),
("foo/bar", None),
("foo/bar", "klio-job2.yaml"),
),
)
def test_get_config_job_dir(job_dir, conf_file, patch_os_getcwd):
exp_job_dir = patch_os_getcwd
if job_dir:
exp_job_dir = os.path.abspath(os.path.join(patch_os_getcwd, job_dir))
exp_conf_file = conf_file or os.path.join(exp_job_dir, "klio-job.yaml")
if job_dir and conf_file:
exp_conf_file = os.path.join(job_dir, conf_file)
ret_job_dir, ret_conf_file = utils.get_config_job_dir(job_dir, conf_file)
assert exp_job_dir == ret_job_dir
assert exp_conf_file == ret_conf_file
|
gaybro8777/klio | exec/src/klio_exec/commands/audit_steps/multithreaded_tf.py | <gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from klio_exec.commands.audit_steps import base
class MultithreadedTFUsage(base.BaseKlioAuditStep):
"""Use caution when running tensorflow in a multithreaded environment."""
AUDIT_STEP_NAME = "multithreaded_tf"
@property
def _is_tensorflow_loaded(self):
return any(["tensorflow" in module for module in sys.modules])
@property
def _is_job_single_threaded_per_container(self):
exps = self.klio_config.pipeline_options.experiments
return "worker_threads=1" in exps
def after_tests(self):
if not self._is_job_single_threaded_per_container:
if self._is_tensorflow_loaded:
self.emit_warning(
"TensorFlow usage detected within job, but "
"`worker_threads` is not explicitly set to 1 under "
"`pipeline_options.experiments` in the job's configuration "
"file! This can cause threading issues. Be careful."
)
_init = MultithreadedTFUsage
|
gaybro8777/klio | exec/tests/unit/commands/test_audit.py | <reponame>gaybro8777/klio<filename>exec/tests/unit/commands/test_audit.py<gh_stars>100-1000
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
from klio_exec.commands import audit
@pytest.fixture
def mock_pytest(mocker, monkeypatch):
mock_pt = mocker.Mock()
monkeypatch.setattr(audit, "pytest", mock_pt)
return mock_pt
@pytest.fixture
def mock_get_audit_steps(mocker, monkeypatch):
mock = mocker.Mock()
monkeypatch.setattr(audit, "_get_audit_steps", mock)
return mock
@pytest.fixture
def mock_run_pytest(mocker, monkeypatch):
mock = mocker.Mock(return_value=False)
monkeypatch.setattr(audit, "_run_pytest", mock)
return mock
@pytest.mark.parametrize(
"exit_code,exp_pytest_failed,exp_msg,exp_kwargs",
(
(1, True, "PyTest failed!\n", {"yellow": True}),
(0, False, "PyTest passed!\n", {}),
),
)
def test_run_pytest(
exit_code,
exp_pytest_failed,
exp_msg,
exp_kwargs,
mock_pytest,
mock_terminal_writer,
mocker,
):
mock_pytest.main.return_value = exit_code
actual_pytest_failed = audit._run_pytest(mock_terminal_writer)
assert exp_pytest_failed == actual_pytest_failed
exp_calls = [
mocker.call("Running tests for audit validation...\n"),
mocker.call(exp_msg, **exp_kwargs),
]
mock_terminal_writer.write.assert_has_calls(exp_calls)
def test_get_audit_steps(mock_terminal_writer, mocker, monkeypatch):
mock_steps = [mocker.Mock() for i in range(3)]
mock_get_plugins = mocker.Mock()
mock_get_plugins.return_value = mock_steps
monkeypatch.setattr(
audit.plugin_utils, "load_plugins_by_namespace", mock_get_plugins
)
ret = audit._get_audit_steps("job/dir", "config", mock_terminal_writer)
mock_get_plugins.assert_called_once_with("klio.plugins.audit")
assert ret == [s.return_value for s in mock_steps]
for s in mock_steps:
s.assert_called_once_with("job/dir", "config", mock_terminal_writer)
@pytest.mark.parametrize("param_value", (None, "something"))
@pytest.mark.parametrize("res_parsing", (True, False))
def test_list_audit_steps(
res_parsing, param_value, mock_terminal_writer, mocker, monkeypatch
):
mock_click_context = mocker.Mock()
mock_click_context.resilient_parsing = res_parsing
mock_print_plugins = mocker.Mock()
monkeypatch.setattr(
audit.plugin_utils, "print_plugins", mock_print_plugins
)
audit.list_audit_steps(mock_click_context, None, param_value)
if not param_value or res_parsing:
mock_terminal_writer.sep.assert_not_called()
mock_print_plugins.assert_not_called()
mock_click_context.exit.assert_not_called()
else:
mock_terminal_writer.sep.assert_called_once_with(
"=", "Installed audit steps"
)
mock_print_plugins.assert_called_once_with(
"klio.plugins.audit", mock_terminal_writer
)
mock_click_context.exit.assert_called_once_with()
@pytest.mark.parametrize("steps_passed", (0, 1))
@pytest.mark.parametrize("steps_warned", (0, 1))
def test_audit_steps(
steps_passed,
steps_warned,
mock_terminal_writer,
mock_get_audit_steps,
mock_run_pytest,
mocker,
):
mock_passed = []
for i in range(steps_passed):
name = "PassedStep{}".format(i + 1)
mock_passed.append(
mocker.Mock(AUDIT_STEP_NAME=name, warned=False, errored=False)
)
mock_warned = []
for i in range(steps_warned):
name = "WarnStep{}".format(i + 1)
mock_warned.append(
mocker.Mock(AUDIT_STEP_NAME=name, warned=True, errored=False)
)
mock_audit_steps = mock_passed + mock_warned
mock_get_audit_steps.return_value = mock_audit_steps
audit.audit("job/dir", "config")
mock_get_audit_steps.assert_called_once_with(
"job/dir", "config", mock_terminal_writer
)
for step in mock_audit_steps:
step.before_tests.assert_called_once_with()
mock_run_pytest.assert_called_once_with(mock_terminal_writer)
# don't really care about the message strings, just that these funcs have
# been called
assert 2 == mock_terminal_writer.sep.call_count
assert len(mock_passed) + 2 == mock_terminal_writer.write.call_count
def test_audit_steps_errored(
mock_terminal_writer, mock_get_audit_steps, mock_run_pytest, mocker
):
mock_errored = []
for i in range(2):
name = "WarnStep{}".format(i + 1)
mock_errored.append(
mocker.Mock(AUDIT_STEP_NAME=name, warned=False, errored=True)
)
mock_get_audit_steps.return_value = mock_errored
with pytest.raises(SystemExit):
audit.audit("job/dir", "config")
mock_get_audit_steps.assert_called_once_with(
"job/dir", "config", mock_terminal_writer
)
for step in mock_errored:
step.before_tests.assert_called_once_with()
mock_run_pytest.assert_called_once_with(mock_terminal_writer)
# don't really care about the message strings, just that these funcs have
# been called
assert 3 == mock_terminal_writer.sep.call_count
assert 1 == mock_terminal_writer.write.call_count
def test_audit_raises(mock_get_audit_steps, mock_terminal_writer, caplog):
mock_get_audit_steps.side_effect = Exception("Blah")
with pytest.raises(SystemExit) as exc_info:
audit.audit("job/dir", "config")
assert 1 == exc_info.value.code
# don't really care about the message strings, just that these funcs have
# been called
assert 1 == mock_terminal_writer.write.call_count
assert 1 == len(caplog.records)
|
gaybro8777/klio | lib/src/klio/metrics/stackdriver.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Klio ships with a Stackdriver Log-based Metrics relay client. The client
creates metrics objects in Stackdriver Monitoring based off of filters in
Stackdriver Logging. For more information on Stackdriver's Log-based Metrics,
see `related documentation <https://cloud.google.com/logging/docs/
logs-based-metrics/>`_.
When running on Dataflow, the client is on by default with no additional
configuration needed. This must be actively turned off in ``klio-job.yaml``
if not wanted (see below).
The log-based metrics client is not available for direct runner.
To explicitly turn off log-based metrics, in ``klio-job.yaml``:
.. code-block:: yaml
job_config:
metrics:
stackdriver_logger: false
"""
import logging
from googleapiclient import discovery
from googleapiclient import errors as gapi_errors
from klio.metrics import logger
class StackdriverLogMetricsClient(logger.MetricsLoggerClient):
"""Stackdriver client for transform metrics.
Intended to be instantiated by :class:`klio.metrics.client.MetricsRegistry`
and not by itself.
Args:
klio_config (klio_core.config.KlioConfig): the job's configuration.
"""
RELAY_CLIENT_NAME = "stackdriver_logger"
def __init__(self, klio_config):
super(StackdriverLogMetricsClient, self).__init__(klio_config)
self.job_name = klio_config.job_name
@property
def _stackdriver_client(self):
client = getattr(self._thread_local, "stackdriver_client", None)
if not client:
# FYI this does not make a network call
client = discovery.build("logging", "v2")
self._thread_local.stackdriver_client = client
return self._thread_local.stackdriver_client
# get a list of metrics first and use any already-available metrics
# to create metrics objects
def counter(self, name, transform=None, tags=None, **kwargs):
"""Create a :class:`StackdriverLogMetricsCounter` object.
.. note::
Stackdriver counts log lines so initializing a
counter value is not supported .
Args:
name (str): name of counter
transform (str): transform the counter is associated with
tags (dict): any tags of additional contextual information
to associate with the counter
Returns:
StackdriverLogMetricsCounter: a log-based counter
"""
# Since stackdriver literally counts loglines, initializing a
# counter value is not supported
if kwargs.get("value", 0) > 0:
self.logger.log(
logging.WARNING,
"Initializing Stackdriver log-based counters with a value "
"other than 0 is not supported. Defaulting to 0.",
)
ctr = StackdriverLogMetricsCounter(
name=name,
job_name=self.job_name,
project=self.klio_config.pipeline_options.project,
transform=transform,
tags=tags,
)
ctr._init_metric(self._stackdriver_client)
return ctr
def gauge(self, *args, **kwargs):
"""Create a :class:`StackdriverLogMetricsGauge` object.
.. warning::
Gauges for Stackdriver are not yet supported. This will
default to standard logging.
Args:
name (str): name of gauge
value (int): starting value of gauge; defaults to 0
transform (str): transform the gauge is associated with
tags (dict): any tags of additional contextual information
to associate with the gauge
Returns:
StackdriverLogMetricsGauge: a log-based gauge
"""
self.logger.log(
logging.WARNING,
"Gauge is not supported for Stackdriver log-based metrics, "
"defaulting to standard logger.",
)
return StackdriverLogMetricsGauge(*args, **kwargs)
def timer(self, *args, **kwargs):
"""Create a :class:`StackdriverLogMetricsTimer` object.
.. warning::
Timers for Stackdriver are not yet supported. This will
default to standard logging.
Args:
name (str): name of gauge
value (int): starting value of gauge; defaults to 0
transform (str): transform the gauge is associated with
tags (dict): any tags of additional contextual information
to associate with the gauge
Returns:
StackdriverLogMetricsTimer: a log-based timer
"""
self.logger.log(
logging.WARNING,
"Timer is not supported for Stackdriver log-based metrics, "
"defaulting to standard logger.",
)
return StackdriverLogMetricsTimer(*args, **kwargs)
class StackdriverLogMetricsCounter(logger.LoggerCounter):
"""Stackdriver log-based counter metric.
.. note::
Stackdriver counts log lines so initializing a counter value is
not supported .
Args:
name (str): name of counter
job_name (str): name of Dataflow job
project (str): name of GCP project associated with Dataflow job
transform (str): Name of transform associated with counter, if any.
tags (dict): Tags to associate with counter. Note:
``{"metric_type": "counter"}`` will always be an included tag.
"""
# NOTE: The in-memory value (as kept track in the metric Dispatchers) may
# be greater than 1, but it doesn't mean anything because:
# 1. log-based counters in SD count the number log lines, and does not
# extract a value from the log line, and
# 2. state is not maintained within a pipeline, so the in-memory
# value will eventually be wiped and re-initialized.
# Therefore, we're hard-coding counters to the value of 1
DEFAULT_LOG_FORMAT = (
"[{name}] value: 1 transform: '{transform}' tags: {tags}"
)
KLIO_TRANSFORM_LABEL_KEY = "klio_transform"
def __init__(self, name, job_name, project, transform=None, tags=None):
# Since stackdriver literally counts loglines, initializing a
# counter value is not supported; defaulting to 0
super(StackdriverLogMetricsCounter, self).__init__(
name, value=0, transform=transform, tags=tags
)
self.job_name = job_name
self.project = project
self.parent = "projects/{}".format(project)
self.desc = "Klio counter '{}' ".format(self.name)
self.body = self._get_body()
def _get_filter(self):
_filter = (
'resource.type="dataflow_step" '
'logName="projects/{project}/logs/'
'dataflow.googleapis.com%2Fworker" '
# since stackdriver counters do not support regexes, we're
# including the [] around the name to avoid name collisions like
# my-counter & my-counter2
'jsonPayload.message:"[{name}]"'.format(
project=self.project, name=self.name
)
)
return _filter
def _get_transform_label_extractor(self):
# Grab the transform name from the log line, i.e.:
# "[my-counter] value: 1 transform: 'HelloKlio' tags: {'metric_...";
# Needs to be able to follow valid Python chars for class names:
# https://stackoverflow.com/a/10120327/1579977
label_extractor = "\"transform: '([a-zA-Z_][a-zA-Z0-9_]*)' tags:\""
# docs: https://cloud.google.com/logging/docs/
# logs-based-metrics/labels#create-label
label_regex = "REGEXP_EXTRACT(jsonPayload.message, {})".format(
label_extractor
)
return {self.KLIO_TRANSFORM_LABEL_KEY: label_regex}
def _get_body(self):
labels = [
{
"key": self.KLIO_TRANSFORM_LABEL_KEY,
"valueType": "STRING",
"description": "Name of Klio-based transform",
}
]
body = {
"name": self.name,
"description": self.desc,
"filter": self._get_filter(),
"metricDescriptor": {
"metricKind": "DELTA",
"valueType": "INT64",
"unit": "1",
"labels": labels,
},
"labelExtractors": self._get_transform_label_extractor(),
}
return body
def _init_metric(self, stackdriver_client):
req = (
stackdriver_client.projects()
.metrics()
.create(parent=self.parent, body=self.body)
)
try:
req.execute()
except gapi_errors.HttpError as e:
# HTTP Conflict - metric already exists
if e.resp.status == 409:
logging.getLogger("klio.metrics").debug(
"Metric {} already exists".format(self.name)
)
return
logging.getLogger("klio.metrics").error(
"Error creating metric '{}': {}".format(self.name, e),
exc_info=True,
)
except Exception as e:
logging.getLogger("klio.metrics").error(
"Error creating metric '{}': {}".format(self.name, e),
exc_info=True,
)
# including method for documentation purposes
class StackdriverLogMetricsGauge(logger.LoggerGauge):
"""Pass-thru object for naming only. Stackdriver log-based metrics
does not support gauges.
"""
class StackdriverLogMetricsTimer(logger.LoggerTimer):
"""Pass-thru object for naming only. Stackdriver log-based metrics
does not support timers.
"""
|
gaybro8777/klio | cli/src/klio_cli/commands/job/delete.py | <gh_stars>100-1000
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import click
from google.cloud import pubsub_v1
from google.cloud import storage
from klio_cli.utils import stackdriver_utils as sd_utils
class DeleteJob(object):
resource_types = ["topic", "subscription", "location"]
def __init__(self, config):
self.config = config
self._pipeline_options = config.pipeline_options
self._job_config = config.job_config
def _confirmation_dialog(self, resource_type, name):
yes_no = click.confirm(
"Do you want to delete {} {}? ".format(resource_type, name)
)
if yes_no:
confirmation = click.prompt(
"Please confirm by typing out "
"the name of the {}".format(resource_type)
)
if confirmation != name:
raise ValueError(
"Invalid match. {} != {} "
"Are you sure you meant to delete the resource? "
"Please rerun this command and "
"choose 'No' next time.".format(confirmation, name)
)
else:
return False
return True
def _get_resources(self):
# Go through the list of resources set up
# in input/output and ask for user input
# on each one that we should delete.
# The following are the types of resources we are considering:
# 1. topics
# 2. subscriptions
# 3. buckets
to_delete = {
resource_type: [] for resource_type in self.resource_types
}
to_delete["stackdriver_group"] = False
ev_inputs = self._job_config.events.inputs
ev_outputs = self._job_config.events.outputs
for resource in ev_inputs + ev_outputs:
if "pubsub" == resource.name:
if self._confirmation_dialog("topic", resource.topic):
to_delete["topic"].append(resource.topic)
if not hasattr(resource, "subscription"):
continue
if self._confirmation_dialog(
"subscription", resource.subscription
):
to_delete["subscription"].append(resource.subscription)
data_inputs = self._job_config.data.inputs
data_outputs = self._job_config.data.outputs
for resource in data_inputs + data_outputs:
if "gcs" == resource.name:
if self._confirmation_dialog("location", resource.location):
to_delete["location"].append(resource.location)
# Now lets handle the stackdriver group
_, dashboard_name = sd_utils.generate_group_meta(
self._pipeline_options.project,
self.config.job_name,
self._pipeline_options.region,
)
to_delete["stackdriver_group"] = self._confirmation_dialog(
"stackdriver dashboard group", dashboard_name
)
return to_delete
def _delete_subscriptions(self, subscriptions):
if not subscriptions:
return
client = pubsub_v1.SubscriberClient()
for subscription in subscriptions:
logging.info("Deleting subscription {}".format(subscription))
try:
client.delete_subscription(
request={"subscription": subscription}
)
except Exception:
logging.error(
"Failed to delete subscription {}".format(subscription),
exc_info=True,
)
def _delete_topics(self, topics):
if not topics:
return
client = pubsub_v1.PublisherClient()
for topic in topics:
logging.info("Deleting topic {}".format(topic))
try:
client.delete_topic(request={"topic": topic})
except Exception:
logging.error(
"Failed to delete topic {}".format(topic), exc_info=True
)
def _delete_buckets(self, project, data_locations):
if not data_locations:
return
client = storage.Client(project=project)
for data_location in data_locations:
logging.info("Deleting data_location {}".format(data_location))
if data_location.startswith("gs://"):
try:
client.get_bucket(data_location.split("/")[2]).delete(
force=True
)
except Exception:
logging.error(
"Failed to delete bucket {}".format(data_location),
exc_info=True,
)
else:
logging.info(
"Skipping data location {}: Not a GCS location".format(
data_location
)
)
def delete(self):
to_delete = self._get_resources()
# Now we will go one by one through each resource and delete them all!
self._delete_subscriptions(to_delete["subscription"])
self._delete_topics(to_delete["topic"])
self._delete_buckets(
self._pipeline_options.project, to_delete["location"],
)
if to_delete["stackdriver_group"]:
sd_utils.delete_stackdriver_group(
self._pipeline_options.project,
self.config.job_name,
self._pipeline_options.region,
)
|
gaybro8777/klio | exec/tests/unit/commands/audit_steps/test_tempfile_usage.py | <filename>exec/tests/unit/commands/audit_steps/test_tempfile_usage.py
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tempfile
from klio_exec.commands.audit_steps import tempfile_usage
def test_tempfile_usage(klio_config, mock_emit_error, mocker, monkeypatch):
inst = tempfile_usage.TempFileUsage("job/dir", klio_config, "tw")
assert "TemporaryFile" == tempfile.TemporaryFile.__name__ # sanity check
inst.before_tests()
assert "MockTemporaryFile" == tempfile.TemporaryFile.__name__
assert inst._tempfile_used is False
inst.after_tests()
mock_emit_error.assert_not_called()
with tempfile.TemporaryFile():
pass
assert inst._tempfile_used is True
inst.after_tests()
# don't really care about message, just that it was called
assert 1 == mock_emit_error.call_count
|
gaybro8777/klio | exec/src/klio_exec/commands/utils/profile_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
try:
import matplotlib.pyplot as plt
except ImportError: # pragma: no cover
# can be a problem on MacOS w certain Python & OS versions
# https://stackoverflow.com/a/34583958/1579977
import matplotlib
matplotlib.use("agg")
import matplotlib.pyplot as plt
import numpy as np
def _get_profiling_data(filename):
"""Read a given file and parse its content for profiling data."""
data, timestamps = [], []
try:
with open(filename, "r") as f:
file_data = f.readlines()
except Exception:
logging.error("Could not read profiling data.", exc_info=True)
raise SystemExit(1)
for line in file_data:
if line == "\n":
continue
line = line.strip()
line_data = line.split(" ")
if len(line_data) != 3:
continue
_, mem_usage, timestamp = line.split(" ")
data.append(float(mem_usage))
timestamps.append(float(timestamp))
if not data:
logging.error("No samples to parse in {}.".format(filename))
raise SystemExit(1)
return {"data": data, "timestamp": timestamps}
def plot(input_file, output_file, x_label, y_label, title):
"""Plot profiling data."""
profile_data = _get_profiling_data(input_file)
data = np.asarray(profile_data["data"])
timestamp = np.asarray(profile_data["timestamp"])
global_start = float(timestamp[0])
t = timestamp - global_start # start at 0 rather than a specific time
plt.figure(figsize=(14, 6), dpi=90)
plt.plot(t, data, "+-c") # c is for `cyan`
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.title(title)
plt.grid()
plt.savefig(output_file)
|
gaybro8777/klio | lib/tests/unit/metrics/test_logger.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import threading
import pytest
from klio.metrics import logger
@pytest.fixture
def client(klio_config):
return logger.MetricsLoggerClient(klio_config)
@pytest.fixture
def metric():
return logger.LoggerCounter(name="fixture-counter")
@pytest.mark.parametrize(
"metrics_config,exp_log_level",
(
({}, logging.DEBUG),
({"logger": True}, logging.DEBUG),
({"logger": {"key": "value"}}, logging.DEBUG),
({"logger": {"level": "info"}}, logging.INFO),
({"logger": {"level": "INFO"}}, logging.INFO),
({"logger": {"level": "notalevel"}}, logging.DEBUG),
),
)
def test_client_set_log_level(metrics_config, exp_log_level, klio_config):
klio_config.job_config.metrics = metrics_config
client = logger.MetricsLoggerClient(klio_config)
assert exp_log_level == client.log_level
@pytest.mark.parametrize(
"metrics_config,exp_timer_unit",
(
({}, "ns"),
({"logger": True}, "ns"),
({"logger": {"timer_unit": "seconds"}}, "s"),
({"logger": {"timer_unit": "notaunit"}}, "ns"),
),
)
def test_client_set_timer_unit(metrics_config, exp_timer_unit, klio_config):
klio_config.job_config.metrics = metrics_config
client = logger.MetricsLoggerClient(klio_config)
assert exp_timer_unit == client.timer_unit
def test_client_logger(client):
ret_logger = client.logger
assert logging.getLogger("klio.metrics") == ret_logger
def test_client_logger_local(client, monkeypatch):
logger_patch = logging.getLogger("klio.metrics.patch")
thread_local_patch = threading.local()
thread_local_patch.klio_metrics_logger = logger_patch
monkeypatch.setattr(client, "_thread_local", thread_local_patch)
ret_logger = client.logger
assert logger_patch == ret_logger
def test_client_unmarshal(client, metric):
exp_metric_dict = {
"name": "fixture-counter",
"value": 0,
"transform": None,
"tags": {"metric_type": "counter"},
}
unmarshalled = client.unmarshal(metric)
assert exp_metric_dict == unmarshalled
def test_client_emit(client, metric, caplog):
exp_log_record = (
"[fixture-counter] value: 0 transform: 'None' "
"tags: {'metric_type': 'counter'}"
)
client.emit(metric)
assert 1 == len(caplog.records)
assert exp_log_record == caplog.records[0].message
def test_client_counter(client):
counter = client.counter(name="my-counter")
assert isinstance(counter, logger.LoggerCounter)
def test_client_gauge(client):
gauge = client.gauge(name="my-gauge")
assert isinstance(gauge, logger.LoggerGauge)
@pytest.mark.parametrize(
"timer_unit,config_timer_unit,exp_timer_unit",
(
(None, "ns", "ns"),
(None, None, "ns"),
("s", None, "s"),
("microseconds", None, "us"),
("notaunit", None, "ns"),
),
)
def test_client_timer(
timer_unit, config_timer_unit, exp_timer_unit, client, monkeypatch
):
if config_timer_unit:
monkeypatch.setattr(client, "timer_unit", config_timer_unit)
timer = client.timer(name="my-timer", timer_unit=timer_unit)
assert isinstance(timer, logger.LoggerTimer)
assert exp_timer_unit == timer.tags["unit"]
def test_logger_counter():
expected_tags = {"metric_type": "counter"}
counter = logger.LoggerCounter(name="my-counter")
assert expected_tags == counter.tags
def test_logger_gauge():
expected_tags = {"metric_type": "gauge"}
gauge = logger.LoggerGauge(name="my-gauge")
assert expected_tags == gauge.tags
def test_logger_timer():
expected_tags = {"metric_type": "timer", "unit": "ns"}
timer = logger.LoggerTimer(name="my-timer")
assert expected_tags == timer.tags
expected_tags["unit"] = "s"
timer = logger.LoggerTimer(name="my-timer", timer_unit="s")
assert expected_tags == timer.tags
# pseudo-integration test
@pytest.mark.parametrize("disabled,exp", ((True, 0), (False, 1)))
def test_logger_disabled(disabled, exp, client, metric, caplog, monkeypatch):
caplog.set_level(logging.DEBUG, logger="klio.metrics")
# sanity check / clear out thread local
client._thread_local.klio_metrics_logger = None
monkeypatch.setattr(client, "disabled", disabled)
client.emit(metric)
assert exp == len(caplog.records)
|
gaybro8777/klio | exec/src/klio_exec/commands/utils/memory_utils.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
import inspect
import memory_profiler as mp
from klio_exec.commands.utils import wrappers
class KMemoryLineProfiler(wrappers.KLineProfilerMixin, mp.LineProfiler):
@classmethod
def _wrap_per_element_func(cls, func, stream=None):
@functools.wraps(func)
def wrapper(*args, **kwargs):
prof = cls(backend="psutil")
try:
return prof(func)(*args, **kwargs)
finally:
mp.show_results(prof, stream=stream)
return wrapper
@classmethod
def _wrap_per_element_gen(cls, func, stream=None):
@functools.wraps(func)
def wrapper(*args, **kwargs):
prof = cls(backend="psutil")
try:
yield from prof(func)(*args, **kwargs)
finally:
mp.show_results(prof, stream=stream)
return wrapper
@classmethod
def wrap_per_element(cls, func, **kwargs):
if inspect.isgeneratorfunction(func):
return cls._wrap_per_element_gen(func, **kwargs)
return cls._wrap_per_element_func(func, **kwargs)
@staticmethod
def _wrap_maximum_func(prof, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
return prof(func)(*args, **kwargs)
return wrapper
@staticmethod
def _wrap_maximum_gen(prof, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
yield from prof(func)(*args, **kwargs)
return wrapper
@classmethod
def wrap_maximum(cls, prof, func, **_):
if inspect.isgeneratorfunction(func):
return cls._wrap_maximum_gen(prof, func)
return cls._wrap_maximum_func(prof, func)
|
gaybro8777/klio | audio/src/klio_audio/decorators.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import collections
import functools
import inspect
import io
import pickle
import types
import apache_beam as beam
import numpy as np
from apache_beam import pvalue
from klio.transforms import core
# Actual KlioMessage will not take payload that's not bytes (i.e. numpy
# arrays), so we just re-wrap it into a faked klio-like message (just the
# data part) before handing it off to the user
FakeKlioMsg = collections.namedtuple("FakeKlioMsg", ["element", "payload"])
def __is_method(obj):
# we have to do this instead of inspect.ismethod since
# it's not a "bounded" method, ie Foo.process (unbounded)
# vs Foo().process (bounded)
args = inspect.getfullargspec(obj).args
if args:
return "self" == args[0]
return False
def __is_dofn_process_method(self, meth):
if meth.__name__ == "process":
if issubclass(self.__class__, beam.DoFn):
return True
return False
class _BinarySerializer(object):
def __init__(self, skip_load, skip_dump, load_with_numpy, save_with_numpy):
self.skip_load = skip_load
self.skip_dump = skip_dump
self.load_with_numpy = load_with_numpy
self.save_with_numpy = save_with_numpy
@staticmethod
def _pickle_load(ctx, kmsg):
try:
payload = pickle.loads(kmsg.payload)
return FakeKlioMsg(element=kmsg.element, payload=payload)
except Exception as err:
ctx.logger.error(
"Exception occurred when unpickling payload for '%s'.\n"
"Error: %s" % (kmsg.element, err)
)
raise err
@staticmethod
def _numpy_load(ctx, kmsg):
try:
in_ = io.BytesIO(kmsg.payload)
in_.seek(0) # push the read pointer to the beginning
payload = np.load(in_, allow_pickle=True)
return FakeKlioMsg(element=kmsg.element, payload=payload)
except Exception as err:
ctx.logger.error(
"Exception occurred when loading numpy payload for '%s'.\n"
"Error: %s" % (kmsg.element, err)
)
raise err
@staticmethod
def _pickle_dump(ctx, kmsg, ret):
tagged, tag = False, None
if isinstance(ret, pvalue.TaggedOutput):
tagged = True
tag = ret.tag
ret = ret.value
try:
dumped = pickle.dumps(ret)
if tagged:
return pvalue.TaggedOutput(tag, dumped)
return dumped
except Exception as err:
ctx.logger.error(
"Exception occurred when pickling payload for '%s'.\nError: %s"
% (kmsg.element, err)
)
raise err
@staticmethod
def _numpy_dump(ctx, kmsg, ret):
tagged, tag = False, None
if isinstance(ret, pvalue.TaggedOutput):
tagged = True
tag = ret.tag
ret = ret.value
try:
out = io.BytesIO()
np.save(out, ret)
dumped = out.getvalue() # returns the data in `out` in bytes
if tagged:
return pvalue.TaggedOutput(tag, dumped)
return dumped
except Exception as err:
ctx.logger.error(
"Exception occurred when dumping numpy payload for '%s'.\n"
"Error: %s" % (kmsg.element, err)
)
raise err
def load(self, ctx, kmsg):
if self.skip_load:
return kmsg
if self.load_with_numpy:
return self._numpy_load(ctx, kmsg)
return self._pickle_load(ctx, kmsg)
def dump(self, ctx, kmsg, ret_val):
if self.skip_dump:
return ret_val
if self.save_with_numpy:
return self._numpy_dump(ctx, kmsg, ret_val)
return self._pickle_dump(ctx, kmsg, ret_val)
# A separate function from __handle_binary_generator so we can specifically
# `yield from` it (and exhaust transforms that have multiple yields)
def __yield_from_handle_binary(serializer, self, new_msg, ret_generator):
for ret in ret_generator:
ret = serializer.dump(self._klio, new_msg, ret)
yield ret
def __handle_binary_generator(self, meth, kmsg, serializer, *args, **kwargs):
new_msg = serializer.load(self._klio, kmsg)
# any error should be caught further up (i.e. by `@handle_klio`)
ret = meth(self, new_msg, *args, **kwargs)
if not isinstance(ret, types.GeneratorType):
ret = serializer.dump(self._klio, new_msg, ret)
yield ret
else:
yield from __yield_from_handle_binary(serializer, self, new_msg, ret)
def __handle_binary(ctx, func, kmsg, serializer, *args, **kwargs):
_self = ctx
if not isinstance(ctx, core.KlioContext):
ctx = _self._klio
new_msg = serializer.load(ctx, kmsg)
# any error should be caught further up (i.e. by `@handle_klio`)
ret = func(ctx, new_msg, *args, **kwargs)
ret = serializer.dump(ctx, new_msg, ret)
return ret
def handle_binary(*decorator_args, **decorator_kwargs):
"""Decorator to handle the required loading/unloading of binary data.
.. caution::
The ``@handle_binary`` decorator **must** be used in conjunction with
the :func:`@handle_klio <klio.transforms.decorators.handle_klio>`
decorator. As well, ``@handle_binary`` **must** also be applied
**after** ``@handle_klio``.
Example usage:
.. code-block:: python
class MelSpectrogram(beam.DoFn):
@handle_klio
@handle_binary
def process(self, item):
self._klio.logger.info(
f"Generating specgram for {item.element}"
)
audio = item.payload
yield librosa.feature.melspectrogram(y=audio, sr=22050)
@handle_klio
@handle_binary(skip_dump=True)
def save_plt_as_png(ctx, item):
fig = item.payload
output = os.path.join(".", item.element.decode("utf-8") + ".png")
plt.savefig(output, format="png", transparent=True, pad_inches=0)
ctx.logger.info(f"Saved spectrogram: {output}")
return output
class DownloadAudio(beam.DoFn):
def setup(self):
self.client = SomeClient()
@handle_klio
@handle_binary(skip_load=True, save_with_numpy=True)
def process(self, item):
self._klio.logger.info(f"Downloading {item.element}")
filename = item.payload.decode("utf-8")
location = self._klio.config.job_config.data.inputs[0].location
source_path = os.path.join(location, filename)
with self.client.open(source_path, "rb") as source:
out = io.BytesIO(source.read())
self._klio.logger.info(f"Downloaded {item.element} to memory")
yield out
Args:
skip_load (bool): Skip loading the ``KlioMessage`` payload via pickle.
Set this to ``True`` if the incoming ``KlioMessage`` payload is not
binary data, or otherwise has not been pickled to bytes.
Default: ``False``
skip_dump (bool): Skip dumping the ``KlioMessage`` payload via pickle.
Set this to ``True`` if the outgoing ``KlioMessage`` payload is not
binary data, or otherwise should not be pickled to bytes.
Default: ``False``
load_with_numpy (bool): Use :func:`numpy.load` instead of
:func:`pickle.loads` to load arrays or pickled numpy objects. This
is less performant than standard pickling, but uses less memory.
Default: ``False``.
save_with_numpy (bool): Use :func:`numpy.save` instead of
:func:`pickle.dumps` to save arrays as ``bytes``. This is less
performant than standard pickling, but uses less memory.
Default: ``False``
"""
skip_load = decorator_kwargs.pop("skip_load", False)
skip_dump = decorator_kwargs.pop("skip_dump", False)
load_with_numpy = decorator_kwargs.pop("load_with_numpy", False)
save_with_numpy = decorator_kwargs.pop("save_with_numpy", False)
serializer = _BinarySerializer(
skip_load, skip_dump, load_with_numpy, save_with_numpy
)
def inner(func_or_meth):
@functools.wraps(func_or_meth)
def method_wrapper(self, kmsg, *args, **kwargs):
wrapper = __handle_binary
# Only the process method of a DoFn is a generator - otherwise
# beam can't pickle a generator
if __is_dofn_process_method(self, func_or_meth):
wrapper = __handle_binary_generator
return wrapper(
self, func_or_meth, kmsg, serializer, *args, **kwargs
)
@functools.wraps(func_or_meth)
def func_wrapper(ctx, kmsg, *args, **kwargs):
return __handle_binary(
ctx, func_or_meth, kmsg, serializer, *args, **kwargs
)
if __is_method(func_or_meth):
return method_wrapper
return func_wrapper
# allows @handle_binary to be used without parens (i.e. no need to do
# `@handle_binary()`) when there are no args/kwargs provided
if decorator_args:
return inner(decorator_args[0])
return inner
|
gaybro8777/klio | lib/tests/unit/conftest.py | <filename>lib/tests/unit/conftest.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pytest
from klio_core import config
@pytest.fixture
def caplog(caplog):
"""Set global test logging levels."""
caplog.set_level(logging.DEBUG)
return caplog
@pytest.fixture
def job_config_dict():
return {
"metrics": {"logger": {}},
"allow_non_klio_messages": False,
"number_of_retries": 3,
"inputs": [
{
"topic": "test-parent-job-out",
"subscription": "test-parent-job-out-sub",
"data_location": "gs://sigint-output/test-parent-job-out",
}
],
"outputs": [
{
"topic": "test-job-out",
"data_location": "gs://sigint-output/test-job-out",
}
],
"dependencies": [
{"gcp_project": "sigint", "job_name": "test-parent-job"}
],
"more": "config",
"that": {"the": "user"},
"might": ["include"],
}
@pytest.fixture
def pipeline_config_dict():
return {
"project": "test-project",
"staging_location": "gs://some/stage",
"temp_location": "gs://some/temp",
"worker_harness_container_image": "gcr.io/sigint/foo",
"streaming": True,
"experiments": ["beam_fn_api"],
"region": "us-central1",
"num_workers": 3,
"max_num_workers": 5,
"disk_size_gb": 50,
"worker_machine_type": "n1-standard-4",
"runner": "direct",
"autoscaling_algorithm": "THROUGHPUT_BASED",
"update": False,
}
@pytest.fixture
def config_dict(job_config_dict, pipeline_config_dict):
return {
"job_config": job_config_dict,
"pipeline_options": pipeline_config_dict,
"job_name": "test-job",
"version": 1,
}
@pytest.fixture
def klio_config(config_dict):
return config.KlioConfig(config_dict)
@pytest.fixture
def mock_config(mocker, monkeypatch):
mconfig = mocker.Mock(name="MockKlioConfig")
mconfig.job_name = "a-job"
mconfig.pipeline_options.streaming = True
mconfig.pipeline_options.project = "not-a-real-project"
mock_data_input = mocker.Mock(name="MockDataGcsInput")
mock_data_input.type = "gcs"
mock_data_input.location = "gs://hopefully-this-bucket-doesnt-exist"
mock_data_input.file_suffix = ""
mock_data_input.skip_klio_existence_check = True
mconfig.job_config.data.inputs = [mock_data_input]
monkeypatch.setattr(
"klio.transforms.core.RunConfig.get", lambda: mconfig,
)
return mconfig
|
gaybro8777/klio | cli/src/klio_cli/utils/stackdriver_utils.py | <reponame>gaybro8777/klio<filename>cli/src/klio_cli/utils/stackdriver_utils.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from google.cloud import monitoring
STACKDRIVER_GROUP_BASE_URL = "https://app.google.stackdriver.com/groups"
STACKDRIVER_GROUP_TPL = (
"{base_url}/{group_id}/{display_name}?project={project}"
)
DASHBOARD_NAME_TPL = "{job_name}-{region}-klio-dashboard"
def generate_group_meta(project, job_name, region):
name = "projects/{}".format(project)
dashboard_name = DASHBOARD_NAME_TPL.format(
job_name=job_name, region=region
)
return name, dashboard_name
# callees of this function should wrap in try/except since the call to
# client.list_groups does not
def get_stackdriver_group_url(project, job_name, region):
client = monitoring.GroupServiceClient()
name, dashboard_name = generate_group_meta(project, job_name, region)
groups = client.list_groups(request={"name": name})
for group in groups:
if group.display_name == dashboard_name:
group_id = group.name.split("/")[-1]
return STACKDRIVER_GROUP_TPL.format(
base_url=STACKDRIVER_GROUP_BASE_URL,
group_id=group_id,
display_name=dashboard_name,
project=project,
)
# NOTE: this will create a new group with the same name, rather than raise
# an "Already Exists" error.
def create_stackdriver_group(project, job_name, region):
client = monitoring.GroupServiceClient()
name, dashboard_name = generate_group_meta(project, job_name, region)
group = {
"display_name": dashboard_name,
"filter": "resource.metadata.name=starts_with({})".format(job_name),
}
try:
group = client.create_group(request={"name": name, "group": group})
except Exception as e:
msg = (
"Could not create a Stackdriver for job '{}': {}. "
"Skipping...".format(job_name, e)
)
logging.error(msg)
return
group_id = group.name.split("/")[-1]
url = STACKDRIVER_GROUP_TPL.format(
base_url=STACKDRIVER_GROUP_BASE_URL,
group_id=group_id,
display_name=dashboard_name,
project=project,
)
msg = "Created dashboard '{}' for job '{}': {}".format(
dashboard_name, job_name, url
)
logging.info(msg)
return url
# Note: This will attempt to delete a group without retries.
def delete_stackdriver_group(project, job_name, region):
client = monitoring.GroupServiceClient()
name, dashboard_name = generate_group_meta(project, job_name, region)
try:
for group in client.list_groups(request={"name": name}):
if group.display_name == dashboard_name:
client.delete_group(
request={"name": group.name, "recursive": True}
)
msg = "Deleted dashboard '{}' for job '{}'".format(
dashboard_name, job_name
)
logging.info(msg)
return
except Exception as e:
msg = (
"Could not delete a Stackdriver for job '{}': {}. "
"Skipping...".format(job_name, e)
)
logging.error(msg)
return
logging.warning(
"No dashboard for job '{}' could be found. Nothing deleted".format(
job_name
)
)
|
gaybro8777/klio | lib/tests/unit/metrics/test_base.py | <filename>lib/tests/unit/metrics/test_base.py
# Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import six
from klio.metrics import base as base_metrics
class FakeObject(object):
pass
@pytest.mark.parametrize("obj", [None, FakeObject()])
def test_abstract_attr(obj):
ret_obj = base_metrics.abstract_attr(obj)
if obj:
assert isinstance(ret_obj, FakeObject)
else:
assert isinstance(ret_obj, base_metrics._DummyAttribute)
assert hasattr(ret_obj, "__isabstractattr__")
def test__has_abstract_attributes_implemented_decorator():
class TestMeta(six.with_metaclass(base_metrics._ABCBaseMeta)):
@property
@base_metrics.abstract_attr
def foo(self):
pass
class TestABC(TestMeta):
foo = "bar"
rc = TestABC()
assert hasattr(rc, "foo")
def test_has_abstract_attributes_implemented_cls_attr():
class TestMeta(six.with_metaclass(base_metrics._ABCBaseMeta)):
foo = base_metrics.abstract_attr()
class TestABC(TestMeta):
foo = "bar"
rc = TestABC()
assert hasattr(rc, "foo")
def test_has_abstract_attributes_implemented_raises():
class TestMeta(six.with_metaclass(base_metrics._ABCBaseMeta)):
foo = base_metrics.abstract_attr()
with pytest.raises(NotImplementedError):
class TestABC(TestMeta):
not_foo = "nope"
def test_abstract_relay_client():
class RelayClient(base_metrics.AbstractRelayClient):
RELAY_CLIENT_NAME = "test-relay-client"
def unmarshal(self, metric):
return {}
def emit(self, metric):
return {}
def counter(self, name, value=0, transform=None):
return {}
def gauge(self, name, value=0, transform=None):
return {}
def timer(self, name, value=0, transform=None):
return {}
rc = RelayClient({})
assert hasattr(rc, "RELAY_CLIENT_NAME")
assert hasattr(rc, "unmarshal")
assert hasattr(rc, "emit")
assert hasattr(rc, "counter")
assert hasattr(rc, "gauge")
assert hasattr(rc, "timer")
def test_abstract_relay_client_raises():
with pytest.raises(NotImplementedError):
class RelayClient(base_metrics.AbstractRelayClient):
pass
def test_base_metric():
class MyMetric(base_metrics.BaseMetric):
def __init__(self, name, value=0, transform=None, tags=None, **kwargs):
super(MyMetric, self).__init__(
name, value=value, transform=transform
)
self.tags = tags
my_metric_inst = MyMetric(name="foo")
assert isinstance(my_metric_inst, base_metrics.BaseMetric)
assert hasattr(my_metric_inst, "name")
assert hasattr(my_metric_inst, "value")
assert hasattr(my_metric_inst, "transform")
assert hasattr(my_metric_inst, "update")
assert 0 == my_metric_inst.value
my_metric_inst.update(1)
assert 1 == my_metric_inst.value
|
gaybro8777/klio | integration/multi-event-input-batch/tests/test_run.py | <reponame>gaybro8777/klio
# Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pytest
from klio_core.proto import klio_pb2
import run
@pytest.fixture
def caplog(caplog):
"""Set global test logging levels."""
caplog.set_level(logging.DEBUG)
return caplog
@pytest.fixture
def klio_msg():
element = b"s0m3_tr4ck_1d"
msg = klio_pb2.KlioMessage()
msg.data.element = element
msg.version = klio_pb2.Version.V2
return msg
@pytest.mark.parametrize(
"func_to_test,exp_log_prefix",
(
(run.first_func, "[first_func]:"),
(run.second_func, "[second_func]:"),
(run.combined_func, "[combined_func]:"),
)
)
def test_process_funcs(func_to_test, exp_log_prefix, klio_msg, caplog):
ret = func_to_test(klio_msg.SerializeToString())
assert klio_msg.SerializeToString() == ret
assert 1 == len(caplog.records)
exp_log_msg = f"{exp_log_prefix} {klio_msg.data.element}"
assert exp_log_msg == caplog.records[0].message
assert "INFO" == caplog.records[0].levelname
|
gaybro8777/klio | devtools/src/klio_devtools/cli.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import click
from klio_cli import cli as main_cli
from klio_cli import options
from klio_cli.cli import main
from klio_cli.utils import cli_utils
from klio_core import config
from klio_core import options as core_options
from klio_core import utils as core_utils
from klio_devtools.commands import develop
@main.command(
"develop",
short_help="Develop on the klio ecosystem in a job's container.",
help=(
"Builds & runs a job's container, mounts the job's code in "
"`/usr/src/app`, installs klio packages as 'editable' packages "
"that will automatically pick up local changes, and attaches to "
"the container with an interactive terminal to enable manual "
"runs of `klioexec`.\n\nNOTE: It's probably a good idea to locally "
"bump the versions of the libraries to ensure proper installation."
),
)
@core_options.job_dir
@core_options.config_file
@core_options.image_tag(default=None, show_default="``git-sha[dirty?]``")
@options.runtime
@click.option(
"--klio-path",
type=click.Path(
exists=True,
dir_okay=True,
file_okay=False,
readable=True,
writable=True,
resolve_path=True,
),
help="Path to klio repo",
required=True,
)
@click.option(
"--exclude", help="exclude installing a particular package", multiple=True,
)
def develop_job(job_dir, config_file, **kwargs):
job_dir, config_path = core_utils.get_config_job_dir(job_dir, config_file)
config_data = core_utils.get_config_by_path(config_path)
conf = config.KlioConfig(config_data)
git_sha = cli_utils.get_git_sha(job_dir, kwargs.get("image_tag"))
image_tag = kwargs.get("image_tag") or git_sha
if config_file:
basename = os.path.basename(config_file)
image_tag = "{}-{}".format(image_tag, basename)
runtime_config = main_cli.DockerRuntimeConfig(
image_tag=image_tag,
force_build=kwargs.get("force_build"),
config_file_override=config_file,
)
klio_pipeline = develop.DevelopKlioContainer(
job_dir, conf, runtime_config, kwargs["klio_path"], kwargs["exclude"]
)
klio_pipeline.run()
|
gaybro8777/klio | lib/src/klio/transforms/_timeout.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module is heavily inspired by https://github.com/pnpnpn/timeout-decorator
and https://github.com/bitranox/wrapt_timeout_decorator.
It's been simplified for what we need, and adapted to work with generators.
"""
import multiprocessing
import sys
import time
import traceback
import types
import psutil
TRACEBACK_HEADER = "Traceback (most recent call last):\n"
class KlioTimeoutError(Exception):
"""Transform timed out while processing a KlioMessage."""
def _target(queue, function, *args, **kwargs):
"""Run user's function with args/kwargs and return output via a queue."""
try:
ret = function(*args, **kwargs)
if isinstance(ret, types.GeneratorType):
ret = next(ret)
queue.put((True, ret))
except Exception:
# Note: here we're trying to get the traceback context that's relevant
# to the user. Without it, the user would see the timeout wrapper
# context, which won't help the user. So we grab the previous traceback
# (accessed by tb_next) and attach it to the exception as a new
# attribute. Then when we catch the exception in the caller code, we
# can check for that attribute and log it.
exc_infos = sys.exc_info()
user_exc = exc_infos[1]
user_exc_tb = "".join(traceback.format_tb(exc_infos[2].tb_next))
user_exc_tb = TRACEBACK_HEADER + user_exc_tb
setattr(user_exc, "_klio_traceback", user_exc_tb)
queue.put((False, user_exc))
class KlioTimeoutWrapper(object):
"""Wrap a function to execute in a separate process with a timeout."""
DEFAULT_EXC_MSG = "Function '{}' timed out after {} seconds."
def __init__(
self, function, seconds, timeout_exception=None, exception_message=None
):
self._function = function
self._func_name = getattr(function, "__qualname__", function.__name__)
self._seconds = seconds
self._timeout_exception = timeout_exception or KlioTimeoutError
self._exception_message = exception_message
def __call__(self, *args, **kwargs):
self._queue = multiprocessing.Queue(maxsize=1)
args = (self._queue, self._function) + args
proc_name = "KlioTimeoutProcess-{}".format(self._func_name)
self._process = multiprocessing.Process(
target=_target, name=proc_name, args=args, kwargs=kwargs
)
# can't daemonize process in case users nest decorators
self._process.daemon = False
self._process.start()
self._timeout = self._seconds + time.monotonic()
while not self.ready:
time.sleep(0.01)
return self.value
def _raise_exception(self):
if self._exception_message is None:
self._exception_message = self.DEFAULT_EXC_MSG.format(
self._func_name, self._seconds
)
raise self._timeout_exception(self._exception_message)
def _terminate_child_processes(self):
# This is to make sure we clean up any spawned child processes,
# particularly if the user uses `subprocess` in their timeout-
# decorated code
proc = psutil.Process(pid=self._process.pid)
children = proc.children(recursive=True)
while children:
# top-down/handle parents first; if we terminate children first,
# the parents could just respawn
child = children.pop(0)
try:
child.terminate()
except psutil.NoSuchProcess:
pass
def cancel(self):
"""Terminate any possible execution of the process running the func."""
if self._process.is_alive():
self._terminate_child_processes()
self._process.terminate()
# join process so the parent process can reap it - no zombies
self._process.join(timeout=0.1)
self._raise_exception()
@property
def ready(self):
"""Manage the status of "value" property."""
if self._timeout < time.monotonic():
self.cancel()
return self._queue.full() and not self._queue.empty()
@property
def value(self):
"""Get data from the processing queue that was added by the func."""
if self.ready is True:
is_successful, ret_value = self._queue.get()
if is_successful:
return ret_value
raise ret_value
|
gaybro8777/klio | integration/read-file/tests/test_transforms.py | # Copyright 2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import logging
import os
import pytest
from klio_core.proto import klio_pb2
import transforms
@pytest.fixture
def caplog(caplog):
"""Set global test logging levels."""
caplog.set_level(logging.DEBUG)
return caplog
@pytest.fixture
def klio_msg():
msg = klio_pb2.KlioMessage()
msg.data.element = b"s0m3_tr4ck_1d"
msg.version = klio_pb2.Version.V2
return msg
@pytest.fixture
def expected_log_messages(klio_msg):
return [
(
"KlioThreadLimiter(name=LogKlioMessage.process) Blocked – "
"waiting on semaphore for an available thread (available threads:"
),
(
"KlioThreadLimiter(name=LogKlioMessage.process) Released "
"semaphore (available threads:"
),
"Hello, Klio!",
"Received element {}".format(klio_msg.data.element),
"Received payload {}".format(klio_msg.data.payload),
]
def test_process(klio_msg, expected_log_messages, caplog):
helloklio_fn = transforms.LogKlioMessage()
output = helloklio_fn.process(klio_msg.SerializeToString())
assert klio_msg.SerializeToString() == list(output)[0]
assert len(caplog.records) == len(expected_log_messages)
for index, record in enumerate(caplog.records):
assert "INFO" == record.levelname
assert expected_log_messages[index] in record.message
|
gaybro8777/klio | cli/src/klio_cli/commands/job/stop.py | # Copyright 2019-2020 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import logging
import time
import emoji
from googleapiclient import discovery
JOB_STATE_MAP = {"cancel": "JOB_STATE_CANCELLED", "drain": "JOB_STATE_DRAINED"}
class StopJob(object):
def __init__(self, api_version=None):
self._set_dataflow_client(api_version)
def _set_dataflow_client(self, api_version):
if not api_version:
api_version = "v1b3"
self._client = discovery.build("dataflow", api_version)
def _check_job_running(self, job_name, project, region):
request = (
self._client.projects()
.locations()
.jobs()
.list(projectId=project, location=region, filter="ACTIVE",)
)
try:
response = request.execute()
except Exception as e:
logging.warning(
"Could not find running job '{}' in project '{}': {}".format(
job_name, project, e
)
)
logging.warning(
"Continuing to attempt deploying '{}'".format(job_name)
)
return
job_results = response.get("jobs", [])
if job_results:
for result in job_results:
if result["name"] == job_name:
return result
def _update_job_state(self, job, req_state=None, retries=None):
if retries is None:
retries = 0
_req_state = JOB_STATE_MAP.get(req_state, JOB_STATE_MAP["cancel"])
if job.get("requestedState") is not _req_state:
job["requestedState"] = _req_state
request = (
self._client.projects()
.locations()
.jobs()
.update(
jobId=job["id"],
projectId=job["projectId"],
location=job["location"],
body=job,
)
)
try:
request.execute()
except Exception as e:
# generic catch if 4xx error - probably shouldn't retry
if getattr(e, "resp", None):
if e.resp.status < 500:
msg = "Failed to {} job '{}': {}".format(
req_state, job["name"], e
)
logging.error(msg)
raise SystemExit(1)
if retries > 2:
msg = "Max retries reached: could not {} job '{}': {}".format(
req_state, job["name"], e
)
logging.error(msg)
raise SystemExit(1)
logging.info(
"Failed to {} job '{}'. Trying again after 30s...".format(
req_state, job["name"]
)
)
retries += 1
time.sleep(30)
self._update_job_state(job, req_state, retries)
def _watch_job_state(self, job, timeout=600):
timeout = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
request = (
self._client.projects()
.locations()
.jobs()
.get(
jobId=job["id"],
projectId=job["projectId"],
location=job["location"],
)
)
while datetime.datetime.now() < timeout:
try:
resp = request.execute()
except Exception as e:
msg = (
"Failed to get current status for job '{}'. Error: {}.\n"
"Trying again after 5s...".format(job["name"], e)
)
logging.info(msg)
time.sleep(5)
continue
if resp["currentState"] in JOB_STATE_MAP.values():
return
else:
msg = "Waiting for job '{}' to reach terminal state...".format(
job["name"]
)
logging.info(msg)
time.sleep(5)
msg = "Job '{}' did not reach terminal state after '{}' secs.".format(
job["name"], timeout
)
logging.error(msg)
raise SystemExit(1)
def stop(self, job_name, project, region, strategy, api_version=None):
self._set_dataflow_client(api_version)
current_running_job = self._check_job_running(
job_name, project, region
)
if not current_running_job:
return
self._update_job_state(current_running_job, req_state=strategy)
self._watch_job_state(current_running_job)
verb = "cancelled" if strategy == "cancel" else "drained"
msg = "Successfully {} job '{}' :smile_cat:".format(verb, job_name)
logging.info(emoji.emojize(msg, use_aliases=True))
|
harwiltz/mltools | mltools/functional.py | <filename>mltools/functional.py
Id = lambda x: x
def cmp(f, g):
return lambda x: f(g(x))
def mul(f, g):
return lambda x: f(x) * g(x)
def mulk(f, k):
return mul(f, const(k))
def add(f, g):
return lambda x: f(x) + g(x)
def addk(f, k):
return add(f, const(k))
def negate(f):
return lambda x: -f(x)
def const(k):
return lambda _: k
|
harwiltz/mltools | examples/memtest.py | <filename>examples/memtest.py
import jax
import jax.numpy as jnp
from haiku import PRNGSequence
from mltools.logging.html import HTMLExperimentLogger
@profile
def main():
monitor = HTMLExperimentLogger("memtest",
template_path="/home/harwiltz/lab/mltools/examples/",
template_name="html-logger-template.html")
rngs = PRNGSequence(jax.random.PRNGKey(0))
i = 0
monitor.log_metric("dummy", jax.random.uniform(next(rngs), shape=(4,)).sum(), i)
i += 1
monitor.log_metric("dummy", jax.random.uniform(next(rngs), shape=(4,)).sum(), i)
i += 1
monitor.log_metric("dummy", jax.random.uniform(next(rngs), shape=(4,)).sum(), i)
if __name__ == "__main__":
main()
|
harwiltz/mltools | mltools/logging/__init__.py | from .experiment_logger import ExperimentLogger
from .experiment_logger import CLIExperimentLogger
|
harwiltz/mltools | mltools/logging/experiment_logger.py | import logging
class ExperimentLogger(object):
def __init__(self, exp_name, tags=[], **kwargs):
self.exp_name = exp_name
self.epoch = 0
self.tags = tags
def instantiate(logger_type, exp_name, **kwargs):
logger_type = logger_type.lower()
if (logger_type == 'default') or (logger_type == 'cli'):
return CLIExperimentLogger(exp_name, **kwargs)
if logger_type == 'comet':
return CometExperimentLogger(exp_name, **kwargs)
def log_metric(self, tag, value, step, **kwargs):
raise NotImplementedError
def log_image(self, tag, img, step, **kwargs):
raise NotImplementedError
def log_plt(self, tag, plt, step, **kwargs):
raise NotImplementedError
def log_text(self, tag, text, **kwargs):
raise NotImplementedError
def log_parameters(self, params, **kwargs):
raise NotImplementedError
def start_epoch(self, **kwargs):
pass
def end_epoch(self, **kwargs):
self.epoch += 1
def end_experiment(self):
raise NotImplementedError
class CLIExperimentLogger(ExperimentLogger):
def __init__(self, exp_name, log_level=logging.INFO, **kwargs):
super(CLIExperimentLogger, self).__init__(exp_name, **kwargs)
self._has_warned_img = False
self._has_warned_plt = False
self._logger = logging.getLogger("Experiment Logger")
self._logger.setLevel(log_level)
self._logger.debug(f"Instantiated logger for experiment \"{exp_name}\"")
def log_metric(self, tag, value, step, **kwargs):
self._logger.info("[METRIC] {:>32} ({:>5}): {:>16.11f}".format(tag, step, value))
def log_image(self, tag, img, step, **kwargs):
if self._has_warned_img:
return
else:
warn_message_pre = f"[ WARN] {self.__class__} does not support image logging"
warn_message_suf = f"skipping log of {tag} ({step})"
self._logger.warn(f"{warn_message_pre} -- {warn_message_suf}")
self._has_warned_img = True
def log_plt(self, tag, plt, step, **kwargs):
if self._has_warned_plt:
return
else:
warn_message_pre = f"[ WARN] {self.__class__} does not support plt logging"
warn_message_suf = f"skipping log of {tag} ({step})"
self._logger.warn(f"{warn_message_pre} -- {warn_message_suf}")
self._has_warned_plt = True
def log_text(self, tag, text, **kwargs):
tag = tag[:min(6, len(tag))]
self._logger.info("[{:>6}] {}".format(tag, text))
def log_parameters(self, params, **kwargs):
self._logger.info("<PARAMS>")
self._logger.info(params)
self._logger.info("</PARAMS>")
def start_epoch(self, **kwargs):
super(CLIExperimentLogger, self).start_epoch()
self._logger.debug(f"\n{64 * '='}\n")
self._logger.debug(f"Starting epoch {self.epoch + 1}...")
def end_experiment(self):
self._logger.debug(f"\n[ DONE] Experiment \"{self.exp_name}\" has finished.")
|
harwiltz/mltools | mltools/logging/comet.py | from comet_ml import Experiment, OfflineExperiment
from mltools.logging import ExperimentLogger
class CometExperimentLogger(ExperimentLogger):
def __init__(self, project_name, online=True, offline_directory=None, tags=[], **kwargs):
super(CometExperimentLogger, self).__init__(project_name, tags=tags, **kwargs)
if online:
self.comet = Experiment(project_name=project_name,
**kwargs)
else:
self.comet = OfflineExperiment(project_name=project_name,
offline_directory=offline_directory,
**kwargs)
for tag in self.tags:
self.comet.add_tag(tag)
def log_metric(self, tag, value, step, **kwargs):
self.comet.log_metric(tag, value, step=step, **kwargs)
def log_image(self, tag, img, step, **kwargs):
self.comet.log_image(img, name=tag, step=step, **kwargs)
def log_plt(self, tag, plt, step, **kwargs):
self.comet.log_figure(figure=plt, figure_name=tag, step=step, **kwargs)
def log_text(self, tag, text, **kwargs):
self.comet.log_text(text, **kwargs)
def log_parameters(self, params, **kwargs):
self.comet.log_parameters(params, **kwargs)
def start_epoch(self, **kwargs):
super(CometExperimentLogger, self).start_epoch()
def end_epoch(self, **kwargs):
super(CometExperimentLogger, self).end_epoch()
self.comet.log_epoch_end(self.epoch, **kwargs)
def end_experiment(self):
self.comet.end()
|
harwiltz/mltools | test/mltools/test_ops.py | import jax
import jax.numpy as jnp
import unittest
import mltools.ops as ops
from functools import partial
class TestOps(unittest.TestCase):
def setUp(self):
self.bound = 10.
self.small_vector = jnp.array([3.,4.])
self.big_vector = jnp.array([12, 16.])
def test_confine_vector(self):
v = ops.confine(self.bound, self.small_vector)
self.assertTrue(jnp.allclose(v, self.small_vector))
V = ops.confine(self.bound, self.big_vector)
self.assertFalse(jnp.allclose(V, self.big_vector))
self.assertEqual(jnp.sqrt(jnp.square(V).sum()), self.bound)
self.assertEqual(V[1] / V[0], self.big_vector[1] / self.big_vector[0])
def test_confine_matrix(self):
matrix = jnp.array([self.small_vector, self.big_vector])
m = ops.confine(self.bound, matrix)
self.assertTrue(norm(m) <= self.bound)
self.assertFalse(jnp.allclose(m[0], self.small_vector))
self.assertTrue(same_ratio(m[0], self.small_vector))
self.assertFalse(jnp.allclose(m[1], self.big_vector))
self.assertTrue(same_ratio(m[1], self.big_vector))
self.assertEqual(norm(m[1]), self.bound)
def test_confine_batch_vectors(self):
batch = jnp.array([self.small_vector, self.big_vector])
b = jax.vmap(partial(ops.confine, self.bound))(batch)
self.assertTrue(jnp.allclose(b[0], self.small_vector))
self.assertFalse(jnp.allclose(b[1], self.big_vector))
self.assertEqual(norm(b[1]), self.bound)
self.assertTrue(same_ratio(b[1], self.big_vector))
def same_ratio(v1, v2, eps=1e-6):
return jnp.abs((v1[1] / v1[0]) - (v2[1] / v2[0])) < eps
def norm(v):
return jnp.max(jnp.sqrt(jnp.square(v).sum(axis=-1)))
if __name__ == "__main__":
unittest.main()
|
harwiltz/mltools | mltools/ops.py | <filename>mltools/ops.py
import jax
import jax.numpy as jnp
@jax.custom_vjp
def clip_gradient(lo, hi, x):
r"""
Differentiable gradient clipping function.
Taken from https://jax.readthedocs.io/en/latest/notebooks/Custom_derivative_rules_for_Python_code.html
"""
return x
def clip_gradient_fwd(lo, hi, x):
return x, (lo, hi)
def clip_gradient_bwd(res, g):
lo, hi = res
return (None, None, jnp.clip(g, lo, hi))
clip_gradient.defvjp(clip_gradient_fwd, clip_gradient_bwd)
def confine(bound: float,
x : jnp.ndarray) -> jnp.ndarray:
r"""
Rescale a vector if its norm is too large.
Parameters
----------
bound : float
The maximum allowable norm
x : jnp.ndarray
The vector to rescale
"""
norm = jnp.max(jnp.sqrt(jnp.square(x).sum(axis=-1)))
return jnp.where(norm > bound,
x * bound / norm,
x)
|
harwiltz/mltools | mltools/distribution/normal.py | <reponame>harwiltz/mltools
import jax
import jax.numpy as jnp
import jax.scipy.stats.norm as norm
from mltools.distribution import Distribution, Probability
from typing import Any, List, Optional, Union
class Normal(Distribution):
""" Univariate normal distribution """
def __init__(self, loc: float, scale: float) -> Distribution:
self.loc = loc
self.scale = scale
def sample(self, rng: jax.random.PRNGKey) -> float:
return self.loc + self.scale * jax.random.normal(rng)
def pdf(self, x: float) -> Probability:
return norm.pdf(x, loc=self.loc, scale=self.scale)
def instantiate(params: str) -> Distribution:
tokens = params.split()
assert len(tokens) >= 2, \
f"{__class__.__name__} requires 2 parameters: loc and scale"
loc = float(tokens[0])
scale = float(tokens[1])
return Normal(loc, scale)
class MixedNormal(Distribution):
""" Univariate mixture of gaussians """
def __init__(self,
dists: List[Normal],
weights: Optional[List[float]]=None) -> Distribution:
self.dists = dists
self.n = len(dists)
if weights is None:
self.weights = jnp.ones(self.n, dtype=jnp.float32) / self.n
else:
weights = jnp.array(weights)
assert jnp.all(weights >= 0), "Mixture weights must be nonnegative"
assert len(weights) == self.n, \
"Number of weights does not match number of distributions:\n" + \
f"Expect {self.n}, got {len(weights)}"
self.weights = weights / jnp.sum(weights)
def sample(self, rng: jax.random.PRNGKey) -> float:
rng, sub = jax.random.split(rng)
i = jax.random.choice(rng, self.n, p=self.weights)
return self.dists[i].sample(sub)
def pdf(self, x: float) -> Probability:
densities = jnp.array([p.pdf(x) for p in self.dists])
return densities.dot(self.weights)
def instantiate(params: str) -> Distribution:
args = params.split(",")
dists = []
weights = []
for arg in args:
tokens = arg.split()
dists.append(Normal.instantiate(" ".join(tokens[:2])))
if len(tokens) >= 3:
weights.append(float(tokens[2]))
else:
weights.append(None)
assert jnp.all(weights is None) or jnp.all(weights is not None), \
f"Error instantiating {__class__.__name__} with weights {weights}:\n" + \
"Cannot leave a subset of weights unspecified"
if jnp.all(weights is None):
weights = None
return MixedNormal(dists, weights)
|
harwiltz/mltools | mltools/util.py | import functools
from typing import Mapping
def flatten_hparams(hparams: dict, base_key: str='') -> dict:
"""
Flattens nested dicts for easier viewing
Some tools (like comet.ml) require hyperparameters to be in this format for logging
Parameters
----------
hparams: dict
A dictionary
base_key: str (optional)
A prefix to prepend to hyperparameter keys
Returns
-------
dict:
A dict whose elements are not dicts
"""
def parse_object(d: dict, key: str, base_key: str) -> dict:
if len(base_key) > 0 :
base_key = base_key + '.'
if not isinstance(d[key], Mapping):
return {f"{base_key}{key}": d[key]}
return flatten_hparams(d[key], f"{base_key}{key}")
return functools.reduce(
lambda acc, key: {**acc, **parse_object(hparams, key, base_key)},
hparams.keys(),
{}
)
|
harwiltz/mltools | mltools/distribution/__init__.py | from .base import Distribution, Probability
from .normal import (
Normal,
MixedNormal
)
def parse_distribution(dist_type, dist_params):
dist_type = dist_type.lower()
if dist_type == "normal":
return Normal.instantiate(dist_params)
if dist_type == "mixed_normal":
return MixedNormal.instantiate(dist_params)
raise NotImplementedError(f"Distribution type \"{dist_type}\" not recognized")
|
harwiltz/mltools | mltools/distribution/base.py | <reponame>harwiltz/mltools
import jax
import jax.numpy as jnp
from typing import Any
Probability = float
class Distribution:
"""
Distribution base class
"""
def sample(self, rng : jax.random.PRNGKey) -> Any:
raise NotImplementedError
def pdf(self, x: Any) -> Probability:
raise NotImplementedError
|
harwiltz/mltools | mltools/logging/html.py | import hashlib
import jinja2
import logging
import matplotlib.pyplot as plt
import numpy as np
import os
from jinja2 import Environment, FileSystemLoader, select_autoescape
from mltools.logging import ExperimentLogger
class HTMLExperimentLogger(ExperimentLogger):
FILENAME = "index.html"
def __init__(self,
exp_name,
root_path=".htmllogs",
template_path="./templates",
template_name="index.html",
metric_smoothing=0.7,
metric_figsize=(4, 3),
plt_style="seaborn",
**kwargs):
super(HTMLExperimentLogger, self).__init__(exp_name, **kwargs)
self.data = {
"metrics": {},
"metrics_rendered": {},
"images": {},
"plots": {},
"text": {},
"parameters": {},
}
self.metric_smoothing = metric_smoothing
self.metric_figsize = metric_figsize
self.plt_style = plt_style
plt.style.use(plt_style)
self.rootdir = os.path.join(root_path, self.exp_name)
os.makedirs(self.rootdir, exist_ok=True)
self.filename = os.path.join(self.rootdir, HTMLExperimentLogger.FILENAME)
self._env = Environment(
loader=FileSystemLoader(os.path.abspath(template_path)),
autoescape=select_autoescape(['html'])
)
self._template = self._env.get_template(template_name)
self._first_write = True
def build_page(self):
if self._first_write:
(logging
.getLogger("HTMLExperimentLogger")
.info(f"Displaying logs at file://{os.path.abspath(self.filename)}"))
self._first_write = False
with open(self.filename, 'w') as f:
f.write(self._template.render(title=self.exp_name, epoch=self.epoch, data=self.data))
def log_metric(self, tag, value, step, **kwargs):
if tag not in self.data['metrics'].keys():
self.data['metrics'][tag] = [value]
else:
prev = self.data['metrics'][tag][-1]
v = (value - prev) * (1 - self.metric_smoothing) + prev
self.data['metrics'][tag].append(v)
metrics = self.data['metrics'][tag]
fig = plt.figure(figsize=self.metric_figsize)
plt.plot(metrics)
fname = self._plot_fname(tag)
plt.savefig(fname)
plt.close(fig)
del(fig)
self.data['metrics_rendered'][tag] = fname
self.build_page()
def log_image(self, tag: str, img: str, step: int, **kwargs):
self.data['images'][tag] = (img, step)
self.build_page()
def log_plt(self, tag, fig, step, **kwargs):
fname = self._plot_fname(tag)
fig.savefig(fname)
self.data['plots'][tag] = (fname, step)
self.build_page()
def log_text(self, tag, text, **kwargs):
if tag not in self.data['text'].keys():
self.data['text'][tag] = []
self.data['text'][tag].append(text)
self.build_page()
def log_parameters(self, params, **kwargs):
self.data['parameters'] = params
def _plot_fname(self, tag):
h = str(hash(tag)).encode()
fname = hashlib.md5(h).hexdigest()
return os.path.abspath(f"{os.path.join(self.rootdir, fname)}.png")
|
schreven/OMPEval | hand_evaluators.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 22 14:37:11 2019
@author: cyril
"""
""" This file compares the speed of three different methods that compute the equity of a hand:
OMPEval, Deuces, and PyPokerEngine"""
import sys
sys.path.append("..")
import time
##### OMPEVAL #####
print("## OMPEVAL ##")
# Import ctypes, it is native to python
import numpy.ctypeslib as ctl
import ctypes
libname = 'libhandequity.so'
# The path may have to be changed
libdir = '../OMPEval-fork/lib/'
lib = ctl.load_library(libname, libdir)
# Defining the python function from the library
omp_hand_equity = lib.hand_equity
# Determining its arguments and return types
omp_hand_equity.argtypes = [ctypes.c_char_p, ctypes.c_char_p, ctypes.c_int, ctypes.c_int, ctypes.c_double, ctypes.c_bool]
omp_hand_equity.restype = ctypes.c_double
# Defining the arguments
hole_card = "9sKs" # 9 of spades and ten of spades
community_card = "8dAhKh" # Number of cards defined here may very between 0 and 5
nb_players = 2 # Number of players may vary between 2 and 6
nb_board_cards = 5 # Default is 5. If = 3, showdown is at flop
std_err_tol = 10**-3 # Default is 10**-5. This is the std in % at which the hand equity will be returned
verbose = True # Default is False
time_1 = time.time()
# Calling the function (goes from python to C++ and back)
hand_equity = omp_hand_equity(hole_card.encode(), community_card.encode(), nb_players, nb_board_cards, std_err_tol, verbose)
print ("The equity is: " + str(hand_equity*100)+"%")
time_2 = time.time()
print("The total time taken is: "+ str((time_2-time_1)*1000)+ " [ms]")
#print(results)
##### DEUCES #####
print("## DEUCES ##")
from deuces.card import Card
from deuces.evaluator import Evaluator
from deuces.deck import Deck
def deuces_estimate_win_rate(nb_simulation, nb_player, hole_card, board):
# Estimate the win count by doing a Monte Carlo simulation
win_count = sum([deuces_montecarlo_simulation(nb_player, hole_card, board) for _ in range(nb_simulation)])
return 1.0 * win_count / nb_simulation
def deuces_montecarlo_simulation(nb_player, hole_card, board):
deck = Deck()
for card in hole_card+board:
deck.remove_card(card)
board_full = board + deck.draw(5-len(board))
opponents_hole = [deck.draw(2) for i in range(nb_player - 1)]#[unused_cards[2 * i:2 * i + 2]
opponents_score = [evaluator.evaluate(board_full, hole) for hole in opponents_hole]#[HandEvaluator.eval_hand(hole, community_card) for hole in opponents_hole]
my_score = evaluator.evaluate(board_full, hole_card)#HandEvaluator.eval_hand(hole_card, community_card)
return 1 if my_score < min(opponents_score) else 0
evaluator = Evaluator()
hole_card = [Card.new('9s'),Card.new('Ks')]
board = [Card.new('8d'),Card.new('Ah'),Card.new('Kh')]
nb_players = 6
nb_simulations = 10000
time_1 = time.time()
hand_equity = deuces_estimate_win_rate(nb_simulations,nb_players,hole_card, board)
print ("The equity is: " + str(hand_equity*100)+"%")
time_2 = time.time()
print("The total time taken is: "+ str((time_2-time_1)*1000)+ " [ms]")
### Getting evals/s
evaluator = Evaluator()
hole_card = [Card.new('9s'),Card.new('Ks')]
board_full = [Card.new('8d'),Card.new('Ah'),Card.new('Kh'),Card.new('2s'),Card.new('6h')]
nb_simulations = 100000
time_1 = time.time()
for _ in range (nb_simulations):
evaluator.evaluate(board_full, hole_card)
time_2 = time.time()
eval_per_sec = nb_simulations / (time_2-time_1)
print("Speed: "+ str(eval_per_sec*10**-3)+" [kEval/s]")
##### PYPOKERENGINE #####
print('### PYPOKERENGINE ###')
from pypokerengine.engine.hand_evaluator import HandEvaluator
from pypokerengine.utils.card_utils import _pick_unused_card, _fill_community_card, gen_cards
# Estimate the ratio of winning games given the current state of the game
def estimate_win_rate(nb_simulation, nb_player, hole_card, community_card=None):
if not community_card: community_card = []
# Make lists of Card objects out of the list of cards
community_card = gen_cards(community_card)
hole_card = gen_cards(hole_card)
# Estimate the win count by doing a Monte Carlo simulation
win_count = sum([montecarlo_simulation(nb_player, hole_card, community_card) for _ in range(nb_simulation)])
return 1.0 * win_count / nb_simulation
def montecarlo_simulation(nb_player, hole_card, community_card):
# Do a Monte Carlo simulation given the current state of the game by evaluating the hands
community_card = _fill_community_card(community_card, used_card=hole_card + community_card)
unused_cards = _pick_unused_card((nb_player - 1) * 2, hole_card + community_card)
opponents_hole = [unused_cards[2 * i:2 * i + 2] for i in range(nb_player - 1)]
opponents_score = [HandEvaluator.eval_hand(hole, community_card) for hole in opponents_hole]
my_score = HandEvaluator.eval_hand(hole_card, community_card)
return 1 if my_score > max(opponents_score) else 0
hole_card = ['S9','SK']
board = ['D8','HA','HK']
nb_players = 6
nb_simulations = 10000
time_1 = time.time()
hand_equity = estimate_win_rate(nb_simulations,nb_players,hole_card, board)
print ("The equity is: " + str(hand_equity*100)+"%")
time_2 = time.time()
print("The total time taken is: "+ str((time_2-time_1)*1000)+ " [ms]")
### Getting evals/s
hole_card = ['S9','SK']
board_full = ['D8','HA','HK','S2','H6']
hole_card = gen_cards(hole_card)
board_full = gen_cards(board_full)
nb_simulations = 100000
time_1 = time.time()
for _ in range (nb_simulations):
HandEvaluator.eval_hand(hole_card, board_full)
time_2 = time.time()
eval_per_sec = nb_simulations / (time_2-time_1)
print("Speed: "+ str(eval_per_sec*10**-3)+" [kEval/s]")
|
retroxsky06/Final_Project | webpage/app.py | <filename>webpage/app.py
from flask import Flask, render_template, request
from run_ml import predictions
app = Flask (__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route('/predict',methods=['POST'])
def predict():
# Get the data from the POST request.
if request.method == "POST":
print(request.form["age"])
age = float(request.form["age"])
gender = request.form["gender"]
hypertension = request.form ["hypertension"]
heart_disease = request.form["heart_disease"]
ever_married = request.form["ever_married"]
smoking_status = request.form["smoking_status"]
model_age = 0
model_gender = 0
model_hyper = 0
model_heart = 0
model_married = 0
model_smoking = 0
#gender
if(gender == "Male"):
model_gender = 1
elif(gender == "Female"):
model_gender = 0
elif(gender == "Other"):
model_gender = 2
#hypertension
if(hypertension == "No"):
model_hyper = 0
elif(hypertension == "Yes"):
model_hyper = 1
#heart
if(heart_disease == "No"):
model_heart = 0
elif(heart_disease == "Yes"):
model_heart = 1
#married
if(ever_married == "No"):
model_married = 0
elif(ever_married == "Yes"):
model_married = 1
#smoking
if(smoking_status == "former"):
model_smoking = 0
elif(smoking_status == "never smoked"):
model_smoking = 1
elif(smoking_status == "smokes"):
model_smoking = 2
elif(smoking_status == "unknown"):
model_smoking = 3
# data = float(request.form['exp'])
# print("Data", model.predict([[data]]))
prediction = predictions(model_age, model_gender, model_hyper, model_heart,
model_married, model_smoking)
output = prediction[0]
results = ""
if(output == 0):
results = "No Stroke - keep up the healthy habits!"
elif(output == 1):
results = "Stroke - sorry buddy."
print(output)
return render_template("results.html", results=results)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.