id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6,100
|
models.py
|
amonapp_amon/amon/apps/metrics/models.py
|
from django.db import models
from django.contrib.postgres.fields import JSONField
class Metric(models.Model):
name = models.CharField(max_length=128)
tags = JSONField()
retention = models.IntegerField()
organization = models.ForeignKey('organizations.Organization', on_delete=models.CASCADE)
class Meta:
unique_together = ("name", "tags", 'organization')
def __unicode__(self):
return u"Metric - {0}/{1}".format(self.name, self.type)
class MetricData(models.Model):
metric = models.ForeignKey('Metric', on_delete=models.CASCADE)
timestamp = models.IntegerField()
value = models.FloatField()
class Meta:
index_together = ["metric", "timestamp"]
def __unicode__(self):
return u"Metric - {0}/{1}".format(self.metric.name)
class MetricDataSummary(models.Model):
metric = models.ForeignKey('Metric', on_delete=models.CASCADE)
timestamp = models.IntegerField()
sum = models.FloatField()
upper = models.FloatField()
lower = models.FloatField()
mean = models.FloatField()
# TODO
# upper_90 = models.FloatField()
# lower_90 = models.FloatField()
# mean_90 = models.FloatField()
# sum_90 = models.FloatField()
count = models.FloatField()
| 1,266
|
Python
|
.py
| 32
| 34.25
| 92
| 0.691237
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,101
|
0001_initial.py
|
amonapp_amon/amon/apps/metrics/migrations/0001_initial.py
|
# Generated by Django 2.0.2 on 2018-02-19 21:01
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('organizations', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Metric',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128)),
('type', models.CharField(choices=[('metric', 'metric'), ('check', 'check')], default='metric', max_length=10)),
('tags', django.contrib.postgres.fields.jsonb.JSONField()),
('organization', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='organizations.Organization')),
],
),
migrations.CreateModel(
name='MetricData',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.IntegerField()),
('value', models.FloatField()),
('metric', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='metrics.Metric')),
],
),
migrations.CreateModel(
name='MetricDataChecks',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.IntegerField()),
('value', models.CharField(choices=[('ok', 'ok'), ('warn', 'warn'), ('crit', 'crit'), ('unknown', 'unknown')], default='unknown', max_length=10)),
('message', models.TextField()),
('metric', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='metrics.Metric')),
],
),
migrations.AlterIndexTogether(
name='metricdatachecks',
index_together={('metric', 'timestamp')},
),
migrations.AlterIndexTogether(
name='metricdata',
index_together={('metric', 'timestamp')},
),
migrations.AlterUniqueTogether(
name='metric',
unique_together={('name', 'tags', 'organization')},
),
]
| 2,410
|
Python
|
.py
| 52
| 35.076923
| 162
| 0.577381
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,102
|
models.py
|
amonapp_amon/amon/apps/settings/models.py
|
from amon.apps.core.basemodel import BaseModel
class DataRetentionModel(BaseModel):
def __init__(self):
super(DataRetentionModel, self).__init__()
self.collection = self.mongo.get_collection('data_retention_settings')
data_retention_model = DataRetentionModel()
| 285
|
Python
|
.py
| 6
| 42.666667
| 78
| 0.75
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,103
|
urls.py
|
amonapp_amon/amon/apps/settings/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = (
url(r"^$", views.data, name='settings'),
url(r"^data/$", views.data, name='settings_data'),
url(r"^cleanup/$", views.cleanup, name='settings_cleanup'),
url(r"^api/$", views.api, name='settings_api'),
url(r"^api/delete/(?P<key_id>\w+)/$", views.delete_api_key, name='settings_api_delete_key'),
url(r"^api/history/$", views.api_history, name='api_history'),
)
| 455
|
Python
|
.py
| 10
| 41.7
| 96
| 0.650794
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,104
|
forms.py
|
amonapp_amon/amon/apps/settings/forms.py
|
from django import forms
from django.conf import settings
from amon.apps.settings.models import data_retention_model
from amon.apps.api.utils import generate_api_key
from amon.apps.api.models import api_key_model
PERIOD_CHOICES = [
(1, '24 hours'),
(15, '15 days'),
(30, '30 days'),
(60, '60 days'),
(3600, 'Forever'),
]
CLEANUP_CHOICES = [
(10800, 'more than 3 hours'),
(21600, '6 hours'),
(86400, '24 hours'),
(259200, '72 hours'),
]
CHECK_CHOICES = [
(15, '15 seconds'),
(30, '30 seconds'),
(60, '1 minute'),
(180, '3 minutes'),
(300, '5 minutes'),
(900, '15 minutes'),
(1800, '30 minutes'),
]
if settings.DEBUG == True:
CHECK_CHOICES = [(5, '5 seconds'), ] + CHECK_CHOICES
class DataRetentionForm(forms.Form):
def __init__(self, *args, **kwargs):
data_retention_rules = data_retention_model.get_one()
super(DataRetentionForm, self).__init__(*args, **kwargs)
self.fields['keep_data'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
self.fields['check_every'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
self.fields['keep_data'].initial = data_retention_rules.get('keep_data', 30)
self.fields['check_every'].initial = data_retention_rules.get('check_every', 60)
keep_data = forms.TypedChoiceField(choices=PERIOD_CHOICES, label='Keep Data for', coerce=int)
check_every = forms.TypedChoiceField(choices=CHECK_CHOICES, label='Check every', coerce=int)
def save(self):
data_retention_model.delete_all_and_insert(self.cleaned_data)
class CleanupDataForm(forms.Form):
def __init__(self, *args, **kwargs):
super(CleanupDataForm, self).__init__(*args, **kwargs)
self.fields['cleanup_before'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
cleanup_before = forms.TypedChoiceField(choices=CLEANUP_CHOICES, label='Period', coerce=int)
def save(self):
data_retention_model.delete_all_and_insert(self.cleaned_data)
TRIAL_LOCATIONS = (
('send', 'Send'),
('full', 'Send and Create'),
)
class ApiKeyForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ApiKeyForm, self).__init__(*args, **kwargs)
label = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': ''}))
def save(self):
label = self.cleaned_data.get('label')
key = generate_api_key()
data = {'label': label, "key": key, "account_id": settings.ACCOUNT_ID}
api_key_model.add(data=data)
return True
| 2,645
|
Python
|
.py
| 62
| 36.516129
| 101
| 0.65207
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,105
|
views.py
|
amonapp_amon/amon/apps/settings/views.py
|
from django.shortcuts import render
from django.contrib import messages
from django.shortcuts import redirect
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from amon.apps.settings.forms import DataRetentionForm, ApiKeyForm, CleanupDataForm
from amon.apps.api.models import api_key_model, api_history_model
@login_required
def data(request):
if request.method == 'POST':
form = DataRetentionForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Data Retention settings updated')
redirect_url = reverse('settings_data')
return redirect(redirect_url)
else:
form = DataRetentionForm()
return render(request, 'settings/data.html', {
"form": form
})
@login_required
def cleanup(request):
if request.method == 'POST':
form = CleanupDataForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Cleaning up')
redirect_url = reverse('settings_cleanup')
return redirect(redirect_url)
else:
form = CleanupDataForm()
return render(request, 'settings/cleanup.html', {
"form": form
})
@login_required
def api(request):
api_keys = api_key_model.get_all()
if request.method == 'POST':
form = ApiKeyForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'API key created')
redirect_url = reverse('settings_api')
return redirect(redirect_url)
else:
form = ApiKeyForm()
return render(request, 'settings/api.html', {
"form": form,
"api_keys": api_keys
})
@login_required
def api_history(request):
result = api_history_model.get_all()
return render(request, 'settings/api_history.html', {
"api_history": result
})
@login_required
def delete_api_key(request, key_id=None):
api_key_model.delete(key_id)
return redirect(reverse('settings_api'))
| 2,171
|
Python
|
.py
| 61
| 28.311475
| 91
| 0.665207
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,106
|
forms_test.py
|
amonapp_amon/amon/apps/settings/tests/forms_test.py
|
from django.test.client import Client
from django.urls import reverse
from django.test import TestCase
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.settings.models import data_retention_model
class TestDataRetention(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
User.objects.all().delete()
def test_data_retention_form(self):
url = reverse('settings_data')
response = self.c.post(url, {'check_every': 60, 'keep_data': 30})
result = data_retention_model.get_one()
assert result['check_every'] == 60
assert result['keep_data'] == 30
response = self.c.post(url, {'check_every': 300, 'keep_data': 60})
result = data_retention_model.get_one()
assert result['check_every'] == 300
assert result['keep_data'] == 60
| 1,100
|
Python
|
.py
| 25
| 36.76
| 85
| 0.669223
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,107
|
models.py
|
amonapp_amon/amon/apps/api/models.py
|
# from amon.apps.core.basemodel import BaseModel
# from datetime import datetime, timedelta
# from amon.utils.dates import unix_utc_now
# from amon.apps.system.models import system_model
# from amon.apps.processes.models import process_model
# from amon.apps.healthchecks.models import health_checks_results_model
# from django.conf import settings
# from amon.apps.plugins.models import plugin_model
# from amon.apps.alerts.alerter import (
# server_alerter,
# process_alerter,
# uptime_alerter,
# plugin_alerter,
# health_check_alerter
# )
# from amon.apps.api.utils import generate_api_key
# # Proxy, testable model for sending data to the backend
# class ApiModel(BaseModel):
# def __init__(self):
# super(ApiModel, self).__init__()
# def save_data_to_backend(self, data=None, server=None):
# if server is None:
# return
# time_now = unix_utc_now()
# date_now = datetime.utcnow()
# expires_days = server.get('keep_data', 30)
# if settings.KEEP_DATA is not None:
# expires_days = settings.KEEP_DATA
# expires_at = date_now + timedelta(days=expires_days)
# system_data = data.get('system')
# process_data = data.get('processes')
# plugin_data = data.get('plugins')
# checks_data = data.get('checks')
# telegraf_data = data.get('series')
# if telegraf_data:
# formated_data = plugin_model.format_telegraf_to_amon(data=telegraf_data)
# if len(formated_data) > 0:
# for name, d in formated_data.items():
# plugin = plugin_model.save_data(
# server=server,
# name=name,
# data=d,
# time=time_now,
# expires_at=expires_at
# )
# if system_data:
# system_model.save_data(
# server=server,
# data=system_data.copy(),
# time=time_now,
# expires_at=expires_at
# )
# server_alerter.check(data=system_data, server=server)
# if process_data:
# data = process_model.save_data(
# server=server,
# data=process_data,
# time=time_now,
# expires_at=expires_at
# )
# process_alerter.check(data=data, server=server)
# uptime_alerter.check(data=data, server=server)
# if plugin_data:
# formated_data = plugin_model.flatten_plugin_data(data=plugin_data)
# for name, data in formated_data.items():
# plugin = plugin_model.save_data(
# server=server,
# name=name,
# data=data,
# time=time_now,
# expires_at=expires_at
# )
# plugin_alerter.check(data=data, plugin=plugin, server=server)
# if checks_data:
# formated_check_data = health_checks_results_model.save(data=checks_data, server=server)
# health_check_alerter.check(data=formated_check_data, server=server)
# class ApiKeyModel(BaseModel):
# def __init__(self):
# super(ApiKeyModel, self).__init__()
# self.collection = self.mongo.get_collection('api_keys')
# def get_or_create(self):
# result = self.collection.find_one(sort=[("created", self.asc)])
# if result is None:
# self.add_initial_data()
# result = self.collection.find_one(sort=[("created", self.asc)])
# return result
# def add_initial_data(self):
# key = generate_api_key()
# data = {'label': "first-key", "key": key}
# self.add(data)
# def add(self, data=None):
# data['created'] = unix_utc_now()
# self.collection.insert(data)
# self.collection.ensure_index([('created', self.desc)], background=True)
# class ApiHistoryModel(BaseModel):
# def __init__(self):
# super(ApiHistoryModel, self).__init__()
# self.collection = self.mongo.get_collection('api_history')
# def get_all(self):
# result = self.collection.find(sort=[("time", self.desc)])
# return result
# def add(self, data):
# date_now = datetime.utcnow()
# expires_at = date_now + timedelta(days=7)
# data["expires_at"] = expires_at
# self.collection.insert(data)
# self.collection.ensure_index([('time', self.desc)], background=True)
# self.collection.ensure_index([('expires_at', 1)], expireAfterSeconds=0)
# api_key_model = ApiKeyModel()
# api_history_model = ApiHistoryModel()
# api_model = ApiModel()
| 4,822
|
Python
|
.py
| 111
| 42.054054
| 101
| 0.575407
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,108
|
mixins.py
|
amonapp_amon/amon/apps/api/mixins.py
|
from amon.utils.dates import unix_utc_now
from amon.apps.api.models import api_history_model
class SaveRequestHistoryMixin(object):
def finalize_response(self, request, response, *args, **kwargs):
request_data = {
'remote_address': request.META['REMOTE_ADDR'],
'request_method': request.method,
'request_path': request.get_full_path(),
'time': unix_utc_now()
}
api_history_model.add(request_data)
return super(SaveRequestHistoryMixin, self).finalize_response(request, response, *args, **kwargs)
| 599
|
Python
|
.py
| 12
| 40.25
| 105
| 0.665499
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,109
|
urls.py
|
amonapp_amon/amon/apps/api/urls.py
|
from django.conf.urls import url
from rest_framework.documentation import include_docs_urls
urlpatterns = [
url(r'^docs/', include_docs_urls(
title='Amon API Docs',
authentication_classes=[],
permission_classes=[]))
]
# from amon.apps.api.views.core import (
# SystemDataView,
# LegacySystemDataView,
# SystemInfoView,
# TestView,
# CheckIpAddressView
# )
# from amon.apps.api.views.telegraf import TelegrafDataView
# from amon.apps.api.views.collectd import CollectdDataView
# from amon.apps.api.views.servers import (
# ServersListView,
# ServersCreateView,
# ServersDeleteView
# )
# from amon.apps.api.views.tags import (
# TagsListView,
# TagsCreateView,
# TagsDeleteView,
# TagsUpdateView,
# TagGroupsListView,
# TagGroupsCreateView,
# TagGroupsDeleteView,
# TagGroupsUpdateView
# )
# from amon.apps.api.views.alerts import (
# AlertsListView,
# AlertsMuteView,
# AlertsMuteAllView,
# AlertsUnMuteAllView
# )
# urlpatterns = [
# url(r'^test/(?P<server_key>\w+)$', TestView.as_view(), name='api_test'),
# url(r'^checkip$', CheckIpAddressView.as_view(), name='check_ip'),
# url(r'^collectd/(?P<server_key>\w+)$', CollectdDataView.as_view(), name='api_collectd'),
# url(r'^system/(?P<server_key>\w+)$', LegacySystemDataView.as_view(), name='api_system_legacy'),
# url(r'^info/(?P<server_key>\w+)$', SystemInfoView.as_view(), name='api_system_info'),
# url(r'^telegraf/(?P<server_key>\w+)$', TelegrafDataView.as_view(), name='api_telegraf'),
# url(r'^system/v2/$', SystemDataView.as_view(), name='api_system'),
# ]
# server_urls = [
# url(r'^v1/servers/list/$', ServersListView.as_view(), name='api_servers_list'),
# url(r'^v1/servers/create/$', ServersCreateView.as_view(), name='api_servers_create'),
# url(r'^v1/servers/delete/(?P<server_id>\w+)/$', ServersDeleteView.as_view(), name='api_servers_delete'),
# ]
# alerts_urls = [
# url(r'^v1/alerts/list/$', AlertsListView.as_view(), name='api_alerts_list'),
# url(r'^v1/alerts/mute/all/$', AlertsMuteAllView.as_view(), name='api_alerts_mute_all'),
# url(r'^v1/alerts/unmute/all/$', AlertsUnMuteAllView.as_view(), name='api_alerts_unmute_all'),
# url(r'^v1/alerts/mute/(?P<alert_id>\w+)/$', AlertsMuteView.as_view(), name='api_alerts_mute'),
# url(r'^v1/alerts/delete/(?P<alert_id>\w+)/$', AlertsMuteView.as_view(), name='api_alerts_delete'),
# ]
# urlpatterns += server_urls
# urlpatterns += alerts_urls
| 2,544
|
Python
|
.py
| 61
| 39.491803
| 110
| 0.672211
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,110
|
utils.py
|
amonapp_amon/amon/apps/api/utils.py
|
import uuid
import hmac
from hashlib import sha1
from amon.utils import AmonStruct
from amon.utils.dates import unix_utc_now
import json
from bson.json_util import dumps as bson_dumps
def throttle_status(server=None):
result = AmonStruct()
result.allow = False
last_check = server.get('last_check')
server_check_period = server.get('check_every', 60)
if last_check:
period_since_last_check = unix_utc_now() - last_check
# Add 15 seconds buffer, for statsd
period_since_last_check = period_since_last_check + 15
if period_since_last_check >= server_check_period:
result.allow = True
else:
result.allow = True # Never checked
return result
# Data Format
# {u'dstypes': [u'gauge'],
# u'plugin': u'users', u'dsnames': [u'value'],
# u'interval': 10.0, u'host': u'ubuntu', u'values': [7],
# u'time': 1424233591.485, u'plugin_instance': u'',
# u'type_instance': u'', u'type': u'users'}
def parse_statsd_data(data=None):
plugin_data = {}
ignored_plugins = ['irq']
accepted_types = ['gauge', ]
if len(data) > 0:
for p in data:
plugin_name = p.get('plugin')
plugin_instance = p.get('plugin_instance')
dsnames = p.get('dsnames')
values = p.get('values')
dstypes = p.get('dstypes')
name = "collectd.{0}".format(plugin_name)
accepted_type = all(t in accepted_types for t in dstypes)
if accepted_type:
plugin_data[name] = {}
for dsn, v, dstype in zip(dsnames, values, dstypes):
if plugin_name not in ignored_plugins:
value_name = "{0}.{1}".format(plugin_instance, dsn) if plugin_instance else dsn
value_name = "{0}.{1}".format(plugin_name, value_name) if dsn == 'value' else value_name
plugin_data[name][value_name] = v
return plugin_data
def generate_api_key():
# From tastipie https://github.com/django-tastypie/django-tastypie/blob/master/tastypie/models.py#L49
new_uuid = uuid.uuid4()
key = hmac.new(new_uuid.bytes, digestmod=sha1).hexdigest()
return key
def dict_from_cursor(data=None, keys=None):
filtered_dict = {}
# Convert Uids to str
data = bson_dumps(data)
python_dict = json.loads(data)
for key in keys:
value = python_dict.get(key)
if type(value) is dict:
# Try to get mongo_id
mongo_id = value.get('$oid')
if mongo_id:
value = mongo_id
if key == '_id':
key = 'id'
filtered_dict[key] = value
return filtered_dict
| 2,749
|
Python
|
.py
| 69
| 31.115942
| 112
| 0.604286
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,111
|
throttle.py
|
amonapp_amon/amon/apps/api/throttle.py
|
from rest_framework.throttling import BaseThrottle
from amon.apps.servers.models import server_model
from amon.apps.api.utils import throttle_status
class CheckPeriodThrottle(BaseThrottle):
def allow_request(self, request, view):
request.server = None
allow = True
view_name = view.get_view_name()
allowed_views = [u'System Data', u'Collectd Data', u'Legacy System Data']
if view_name in allowed_views:
server_key = view.kwargs.get('server_key')
server = server_model.get_server_by_key(server_key)
if server:
request.server = server # Needed in the Models
server_status = throttle_status(server=server)
if server_status.allow == False:
allow = False
return allow
| 871
|
Python
|
.py
| 18
| 35.833333
| 81
| 0.645443
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,112
|
permissions.py
|
amonapp_amon/amon/apps/api/permissions.py
|
from rest_framework import permissions
from amon.apps.api.models import api_key_model
from amon.utils.dates import unix_utc_now
class ApiKeyPermission(permissions.BasePermission):
def has_permission(self, request, view):
api_key = request.query_params.get('api_key')
is_valid = False
params = {'key': api_key}
result = api_key_model.get_one(params=params)
key = result.get('key', False)
if key is not False:
is_valid = True
api_key_model.update({'last_used': unix_utc_now()}, result['_id'])
if request.user.is_authenticated:
is_valid = True
return is_valid
| 670
|
Python
|
.py
| 16
| 33.8125
| 78
| 0.649612
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,113
|
parsers.py
|
amonapp_amon/amon/apps/api/parsers.py
|
from rest_framework import renderers
from rest_framework.parsers import BaseParser
from rest_framework.exceptions import ParseError
from django.conf import settings
from django.utils import six
import bson
class MongoJSONParser(BaseParser):
"""
Parses JSON-serialized data.
"""
media_type = 'application/json'
renderer_class = renderers.JSONRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data.
"""
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
try:
data = stream.read().decode(encoding)
return bson.json_util.loads(data)
except ValueError as exc:
raise ParseError('JSON parse error - %s' % six.text_type(exc))
| 884
|
Python
|
.py
| 23
| 32.217391
| 78
| 0.701284
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,114
|
servers.py
|
amonapp_amon/amon/apps/api/views/servers.py
|
import re
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from amon.utils.haiku import generate_haiku_name
from amon.apps.api.permissions import ApiKeyPermission
from amon.apps.servers.models import server_model
from amon.apps.alerts.models import alerts_model
from amon.apps.api.mixins import SaveRequestHistoryMixin
from amon.apps.tags.models import tags_model
class ServersListView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
servers = server_model.get_all()
filtered_servers = []
if servers != None:
for server in servers:
filtered_servers.append({'name': server['name'],
'key': server['key'],
'id': str(server['_id']),
'last_check': server.get('last_check'),
'provider': server.get('provider')}
)
status = settings.API_RESULTS['ok']
return Response({'status': status, 'servers': filtered_servers})
class ServersCreateView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def _create_server(self, name=None, key=None, tags=None):
response = {}
create_server = False
if key:
valid_key_format = bool(re.match('[a-z0-9]{32}$', key))
if valid_key_format:
create_server = True
else:
response['status'] = settings.API_RESULTS['unprocessable']
response['error'] = 'Invalid server key. A random 32 character long, lowercase alpha numeric string is required.'
else:
create_server = True # Default
if create_server:
tag_ids = tags_model.create_and_return_ids(tags=tags)
# Check if a server with this key already exists - provisioning tool, update agent:
server = server_model.get_server_by_key(key)
if server == None:
server_key = server_model.add(name=name, account_id=settings.ACCOUNT_ID, key=key, tags=tag_ids)
else:
server_key = key
data = {'name': name}
# New tags sent throught the API
if len(tag_ids) > 0:
data['tags'] = tag_ids
server_model.update(data, server['_id'])
name = server.get('name')
response = {
'status': settings.API_RESULTS['created'],
'name': name,
'server_key': server_key
}
return response
def get(self, request):
name = request.GET.get('name', generate_haiku_name())
key = request.GET.get('key', False)
tags = request.GET.getlist('tags', False)
response = self._create_server(name=name, key=key, tags=tags)
return Response(response)
def post(self, request):
response = {}
create_server = False
data = request.data
name = data.get('name', generate_haiku_name())
key = data.get('key', False)
tags = data.get('tags', False)
response = self._create_server(name=name, key=key, tags=tags)
return Response(response)
class ServersDeleteView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request, server_id):
alerts_model.delete_server_alerts(server_id)
server_model.delete(server_id)
status = settings.API_RESULTS['ok']
return Response({'status': status})
| 3,709
|
Python
|
.py
| 81
| 34.395062
| 129
| 0.609459
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,115
|
tags.py
|
amonapp_amon/amon/apps/api/views/tags.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from amon.apps.api.permissions import ApiKeyPermission
from amon.apps.tags.models import tags_model, tag_groups_model
from amon.apps.api.mixins import SaveRequestHistoryMixin
from amon.apps.api.utils import dict_from_cursor
class TagsListView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
tags = tags_model.get_all()
filtered_tags = []
for tag in tags:
data = dict_from_cursor(data=tag, keys=['name', '_id'])
group = tag.get('group')
if group:
data['group'] = dict_from_cursor(data=group, keys=['name', '_id'])
filtered_tags.append(data)
filtered_tags.sort(key=lambda e: e.get('group', {}).get('name', ''))
status = settings.API_RESULTS['ok']
return Response({'status': status, 'tags': filtered_tags})
class TagsUpdateView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
name = data.get('name')
_id = data.get('id')
group_id = data.get('group', {}).get('id', '')
if name:
update_dict = {'name': name, 'group_id': group_id}
tags_model.update(update_dict, _id)
status = settings.API_RESULTS['ok']
return Response({'status': status})
class TagsCreateView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
name = data.get('name')
group_id = data.get('group', {}).get('id')
tag_id = tags_model.get_or_create_by_name(name=name)
if group_id:
tags_model.update({'group_id': group_id}, tag_id)
new_tag = tags_model.get_by_id(tag_id)
filtered_tag = dict_from_cursor(data=new_tag, keys=['name', '_id'])
group = new_tag.get('group')
if group != None:
filtered_tag['group'] = dict_from_cursor(data=group, keys=['name', '_id'])
status = settings.API_RESULTS['ok']
return Response({'status': status, 'tag': filtered_tag})
class TagsDeleteView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
tag_id = data.get('id')
tags_model.delete(tag_id)
status = settings.API_RESULTS['ok']
return Response({'status': status})
class TagGroupsListView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
groups = tag_groups_model.get_all()
groups_list = []
for g in groups:
group = dict_from_cursor(data=g, keys=['name', '_id'])
groups_list.append(group)
status = settings.API_RESULTS['ok']
return Response({'status': status, 'groups': groups_list})
class TagGroupsUpdateView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
name = data.get('name')
_id = data.get('id')
if name:
tag_groups_model.update({'name': name}, _id)
status = settings.API_RESULTS['ok']
return Response({'status': status})
class TagGroupsCreateView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
name = data.get('name')
_id = tag_groups_model.get_or_create_by_name(name=name)
new_group = tag_groups_model.get_by_id(_id)
new_group_dict = dict_from_cursor(data=new_group, keys=['name', '_id'])
status = settings.API_RESULTS['ok']
return Response({'status': status, 'group': new_group_dict})
class TagGroupsDeleteView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
data = request.data
tag_id = data.get('id')
tag_groups_model.delete(tag_id)
status = settings.API_RESULTS['ok']
return Response({'status': status})
| 4,333
|
Python
|
.py
| 95
| 36.8
| 87
| 0.639003
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,116
|
collectd.py
|
amonapp_amon/amon/apps/api/views/collectd.py
|
from django.conf import settings
from datetime import datetime, timedelta
from rest_framework.views import APIView
from rest_framework.response import Response
from amon.apps.plugins.models import plugin_model
from amon.apps.api.throttle import CheckPeriodThrottle
from amon.utils.dates import unix_utc_now
class CollectdDataView(APIView):
throttle_classes = (CheckPeriodThrottle,)
# Data Format
# {u'dstypes': [u'gauge'],
# u'plugin': u'users', u'dsnames': [u'value'],
# u'interval': 10.0, u'host': u'ubuntu', u'values': [7],
# u'time': 1424233591.485, u'plugin_instance': u'',
# u'type_instance': u'', u'type': u'users'}
def post(self, request, server_key):
plugin_dict = {}
date_now = datetime.utcnow()
time_now = unix_utc_now()
ignored_plugins = ['irq']
accepted_types = ['gauge',]
status = settings.API_RESULTS['not-found']
data = request.data
if request.server:
server = request.server
expires_days = server.get('keep_data', 30)
expires_at = date_now + timedelta(days=expires_days)
for p in data:
plugin_name = p.get('plugin')
plugin_instance = p.get('plugin_instance')
dsnames = p.get('dsnames')
values = p.get('values')
dstypes = p.get('dstypes')
name = "collectd.{0}".format(plugin_name)
plugin_dict[name] = {}
for dsn, v, dstype in zip(dsnames, values, dstypes):
if dstype in accepted_types and plugin_name not in ignored_plugins:
value_name = "{0}.{1}".format(plugin_instance, dsn) if plugin_instance else dsn
value_name = "{0}.{1}".format(plugin_name, value_name) if dsn == 'value' else value_name
plugin_dict[name][value_name] = v
if len(plugin_dict) > 0:
for name, data in plugin_dict.iteritems():
if len(data) > 0:
plugin_data = {'gauges': data, 'counters':{}}
plugin_model.save_data(
server=server,
name=name,
data=plugin_data,
time=time_now,
expires_at=expires_at
)
return Response({'status': status})
| 2,613
|
Python
|
.py
| 52
| 34.038462
| 112
| 0.544246
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,117
|
healthchecks.py
|
amonapp_amon/amon/apps/api/views/healthchecks.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.parsers import JSONParser
from amon.apps.api.permissions import ApiKeyPermission
from amon.apps.healthchecks.models import health_checks_model
from amon.apps.api.mixins import SaveRequestHistoryMixin
class HealthChecksListView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
checks = health_checks_model.get_all()
checks_json = []
for a in checks:
check_dict = {
'id': str(a.get('_id')),
'paused': a.get('paused', False),
'last_executed': a.get('last_executed'),
'execute_every': a.get('execute_every')
}
command = a.get('command')
if command:
check_dict['command'] = command
filename = a.get('filename')
if filename:
check_dict['filename'] = filename
params = a.get('params')
if params:
check_dict['params'] = params
result = a.get('result')
if len(result) > 0:
check_dict['result'] = result
checks_json.append(check_dict)
status = settings.API_RESULTS['ok']
return Response({'status': status, 'checks': checks_json})
| 1,446
|
Python
|
.py
| 34
| 31.647059
| 66
| 0.605054
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,118
|
core.py
|
amonapp_amon/amon/apps/api/views/core.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from amon.apps.servers.models import server_model
from amon.apps.api.throttle import CheckPeriodThrottle
from amon.apps.api.permissions import ApiKeyPermission
from amon.apps.notifications.sender import send_notifications
from amon.apps.api.models import api_model
class CheckIpAddressView(APIView):
def get(self, request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return Response({'ip': ip})
class TestView(APIView):
def get(self, request, server_key):
server = server_model.get_server_by_key(server_key)
response_status = status.HTTP_200_OK if server else status.HTTP_403_FORBIDDEN
return Response(status=response_status)
def post(self, request, server_key):
server = server_model.get_server_by_key(server_key)
response_status = status.HTTP_200_OK if server else status.HTTP_403_FORBIDDEN
return Response(status=response_status)
# New golang agent data, format before saving it
class SystemDataView(APIView):
permission_classes = (ApiKeyPermission,)
def post(self, request):
status = settings.API_RESULTS['not-found']
data = request.data
host_data = data.get('host')
machine_id = host_data.get('machineid')
hostname = host_data.get('host')
# Cloud synced servers
instance_id = host_data.get('instance_id', "")
server = server_model.get_or_create_by_machine_id(machine_id=machine_id,
hostname=hostname,
instance_id=instance_id)
api_model.save_data_to_backend(server=server, data=data)
# Update host data
server_meta = {
'ip_address': host_data.get('ip_address'),
'distro': host_data.get('distro', {}),
}
server_model.update(server_meta, server['_id'])
if settings.DEBUG is True:
send_notifications()
status = settings.API_RESULTS['ok']
return Response({'status': status})
class LegacySystemDataView(APIView):
throttle_classes = (CheckPeriodThrottle,)
def post(self, request, server_key):
status = settings.API_RESULTS['not-found']
data = request.data
if request.server:
api_model.save_data_to_backend(server=request.server, data=data)
if settings.DEBUG is True:
send_notifications()
status = settings.API_RESULTS['ok']
return Response({'status': status})
class SystemInfoView(APIView):
def post(self, request, server_key):
status = settings.API_RESULTS['not-found']
data = request.data
server = server_model.get_server_by_key(server_key)
valid_keys = ['ip_address', 'processor', 'distro', 'uptime']
if server:
if set(data.keys()).issubset(valid_keys):
server_model.update(data, server['_id'])
status = settings.API_RESULTS['ok']
return Response({'status': status})
| 3,280
|
Python
|
.py
| 73
| 36.465753
| 85
| 0.663711
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,119
|
alerts.py
|
amonapp_amon/amon/apps/api/views/alerts.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.parsers import JSONParser
from amon.utils.haiku import generate_haiku_name
from amon.apps.api.permissions import ApiKeyPermission
from amon.apps.servers.models import server_model
from amon.apps.alerts.models import alerts_model
from amon.apps.api.mixins import SaveRequestHistoryMixin
class AlertsListView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
alerts = alerts_model.get_all(account_id=settings.ACCOUNT_ID)
alerts_json = []
# {
# "_id" : ObjectId("551e60bd1d41c88aa37b91b1"),
# "rule_type" : "process",
# "account_id" : 1,
# "metric_value" : 5,
# "process" : ObjectId("54e4823d1d41c86b22bd50ea"),
# "metric" : "Memory",
# "period" : 30,
# "server" : ObjectId("54ddb9781d41c8feae1a5b78"),
# "metric_type" : "MB",
# "above_below" : "above",
# "email_recepients" : [
# "54ddb8f91d41c8fd7e20264d"
# ],
# "notifications" : [
# "victorops"
# ],
# "webhooks" : [],
# "mute" : true
# }
for a in alerts:
metric_type = "" if a.get('metric_type') == None else a.get('metric_type')
alert_string = "{0} {1} {2}{3} for {4} seconds".format(a.get('metric'),
a.get('above_below'),
a.get('metric_value'),
metric_type,
a.get('period')
)
alerts_json.append({
'id': str(a.get('_id')),
'type': a.get('rule_type'),
'metric': alert_string,
'mute': a.get('mute'),
})
status = settings.API_RESULTS['ok']
return Response({'status': status, 'alerts': alerts_json})
class AlertsMuteView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request, alert_id=None):
alerts_model.mute(alert_id)
alert = alerts_model.get_by_id(alert_id)
muted = alert.get('mute')
status = settings.API_RESULTS['ok']
return Response({'status': status, 'muted': muted})
class AlertsMuteAllView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
alerts = alerts_model.mute_all(mute=True)
status = settings.API_RESULTS['ok']
return Response({'status': status})
class AlertsUnMuteAllView(SaveRequestHistoryMixin, APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request):
alerts_model.mute_all(mute=False)
status = settings.API_RESULTS['ok']
return Response({'status': status})
| 2,976
|
Python
|
.py
| 70
| 33.5
| 86
| 0.589976
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,120
|
telegraf.py
|
amonapp_amon/amon/apps/api/views/telegraf.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.parsers import JSONParser
from rest_framework import status
from amon.apps.servers.models import server_model
class TelegrafDataView(APIView):
def get(self, request, server_key):
server = server_model.get_server_by_key(server_key)
response_status = status.HTTP_200_OK if server else status.HTTP_403_FORBIDDEN
return Response(status=response_status)
def post(self, request, server_key):
server = server_model.get_server_by_key(server_key)
response_status = status.HTTP_200_OK if server else status.HTTP_403_FORBIDDEN
return Response(status=response_status)
| 767
|
Python
|
.py
| 15
| 45.333333
| 85
| 0.770692
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,121
|
cloudservers.py
|
amonapp_amon/amon/apps/api/views/cloudservers.py
|
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import renderers
from amon.apps.api.permissions import ApiKeyPermission
# from amon.apps.cloudservers.apicalls import (
# sync_amazon,
# sync_rackspace,
# sync_linode,
# sync_digitalocean
# )
from amon.apps.servers.models import server_model
class CloudServersSyncView(APIView):
permission_classes = (ApiKeyPermission,)
def get(self, request, provider_id):
account_id = settings.ACCOUNT_ID
status = settings.API_RESULTS['ok']
message = ""
# if provider_id == 'amazon':
# sync_amazon(account_id=account_id)
# elif provider_id == 'digitalocean':
# sync_digitalocean(account_id=account_id)
# elif provider_id == 'rackspace':
# sync_rackspace(account_id=account_id)
# elif provider_id == 'linode':
# sync_linode(account_id=account_id)
# else:
# status = settings.API_RESULTS['not-found']
if provider_id in ['amazon', 'rackspace', 'digitalocean', 'linode']:
message = "{0} servers synced".format(provider_id.title())
return Response({'status': status, "message": message})
class PlainTextRenderer(renderers.BaseRenderer):
media_type = 'text/plain'
format = 'txt'
def render(self, data, media_type=None, renderer_context=None):
return data.encode(self.charset)
def _get_server_key(instance_id=None):
key = ''
server = server_model.collection.find_one({'instance_id': instance_id})
if server:
key = server.get('key')
return key
class CloudServersGetServerKeyView(APIView):
permission_classes = (ApiKeyPermission,)
renderer_classes = (PlainTextRenderer,)
def get(self, request, provider_id):
key = ""
params = request.query_params
instance_id = params.get('instance_id')
if provider_id in ['amazon', 'google', 'digitalocean']:
key = _get_server_key(instance_id=instance_id)
else:
status = settings.API_RESULTS['not-found']
return Response(key)
| 2,204
|
Python
|
.py
| 54
| 34.407407
| 76
| 0.664628
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,122
|
models_test.py
|
amonapp_amon/amon/apps/api/tests/models_test.py
|
import unittest
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.api.models import api_key_model
from amon.apps.api.utils import generate_api_key
class APIKeyModelTest(unittest.TestCase):
def setUp(self):
self.collection = api_key_model.collection
def tearDown(self):
self.collection.remove()
def _cleanup(self):
self.collection.remove()
def create_test(self):
self._cleanup()
key = generate_api_key()
api_key_model.add({'key': key})
params = {'key': key}
result = api_key_model.get_one(params=params)
assert result['key'] == key
self._cleanup()
| 724
|
Python
|
.py
| 20
| 28.15
| 53
| 0.667678
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,123
|
core_test.py
|
amonapp_amon/amon/apps/api/tests/core_test.py
|
import json
import os
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.servers.models import server_model
from amon.apps.system.models import system_model
from amon.apps.processes.models import process_model
from amon.apps.devices.models import interfaces_model, volumes_model
from amon.apps.plugins.models import plugin_model
from amon.apps.alerts.models import alerts_model
from amon.apps.cloudservers.models import cloud_credentials_model
class TestCoreApi(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
server_model.collection.remove()
system_model.data_collection.remove()
process_model.data_collection.remove()
interfaces_model.collection.remove()
volumes_model.collection.remove()
plugin_model.collection.remove()
alerts_model.collection.remove()
cloud_credentials_model.collection.remove()
def test_with_user_data(self):
self._cleanup()
alerts_model.add_initial_data()
current_directory = os.path.abspath(os.path.dirname(__file__))
agentdata = os.path.join(current_directory, 'agentdata.json')
data = open(agentdata).read()
url = reverse('api_system')
response = self.c.post(url, data, content_type='application/json')
assert server_model.collection.find().count() == 1
assert system_model.data_collection.find().count() == 1
assert process_model.data_collection.find().count() == 1
assert volumes_model.collection.find().count() == 1
assert interfaces_model.collection.find().count() == 1
assert plugin_model.collection.find().count() == 1
def test_get_or_create_server_view(self):
self._cleanup()
# Default, non cloud servers
data = {"host": {
"host": "debian-jessie",
"machineid": "25e1f5e7b26240109d199892e468d529",
"server_key": "",
"distro": {
"version": "8.2",
"name": "debian"
},
"ip_address": "10.0.2.15",
"instance_id": ""
}}
url = reverse('api_system')
# data = open(JSON_PATH).read()
self.c.post(url, json.dumps(data), content_type='application/json')
assert server_model.collection.find().count() == 1
server = server_model.collection.find_one()
assert server['name'] == 'debian-jessie'
assert server['key'] == '25e1f5e7b26240109d199892e468d529'
self._cleanup()
# Not synced cloud server
data = {"host": {
"host": "debian-jessie",
"machineid": "25e1f5e7b26240109d199892e468d529",
"server_key": "",
"distro": {
"version": "8.2",
"name": "debian"
},
"ip_address": "10.0.2.15",
"instance_id": "100"
}}
url = reverse('api_system')
self.c.post(url, json.dumps(data), content_type='application/json')
assert server_model.collection.find().count() == 1
server = server_model.collection.find_one()
assert server['name'] == 'debian-jessie'
assert server['key'] == '25e1f5e7b26240109d199892e468d529'
assert server['instance_id'] == '100'
def test_system_data_view(self):
self._cleanup()
current_directory = os.path.abspath(os.path.dirname(__file__))
agentdata = os.path.join(current_directory, 'agentdata.json')
data = open(agentdata).read()
url = reverse('api_system')
self.c.post(url, data, content_type='application/json')
assert server_model.collection.find().count() == 1
assert system_model.data_collection.find().count() == 1
assert process_model.data_collection.find().count() == 1
assert volumes_model.collection.find().count() == 1
assert interfaces_model.collection.find().count() == 1
assert plugin_model.collection.find().count() == 1
def stress_test_system_data_view(self):
self._cleanup()
current_directory = os.path.abspath(os.path.dirname(__file__))
agentdata = os.path.join(current_directory, 'agentdata.json')
data = open(agentdata).read()
url = reverse('api_system')
for i in range(10):
self.c.post(url, data, content_type='application/json')
assert server_model.collection.find().count() == 1
| 4,817
|
Python
|
.py
| 108
| 35.796296
| 85
| 0.630295
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,124
|
servers_test.py
|
amonapp_amon/amon/apps/api/tests/servers_test.py
|
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from nose.tools import *
from amon.apps.servers.models import server_model
import json
from django.contrib.auth import get_user_model
User = get_user_model()
class TestServersApi(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
server_model.collection.remove()
def test_add_servers(self):
self._cleanup()
url = reverse('api_servers_create')
# Just the name,
data = {'name': 'testserver'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['status'] == 201 # Created
assert response['name'] == 'testserver'
assert response['server_key']
self._cleanup()
# Invalid key
data = {'name': 'testserver', 'key': 'BlaBla'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['status'] == 422 # Invalid key
assert response['error']
self._cleanup()
# Valid key
# d3vopqnzdnm677keoq3ggsgkg5dw94xg
data = {'name': 'testserver', 'key': 'd3vopqnzdnm677keoq3ggsgkg5dw94xg'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['status'] == 201 # Created
assert response['name'] == 'testserver'
assert response['server_key'] == data['key']
# Check for unique keys
data = {'name': 'testserver_one', 'key': 'd3vopqnzdnm677keoq3ggsgkg5dw94xg'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['status'] == 201 # Created
assert response['name'] == 'testserver'
assert response['server_key'] == data['key']
def test_list_servers(self):
self._cleanup()
url = reverse('api_servers_list')
response = self.c.get(url)
response = json.loads(response.content.decode('utf-8'))
assert len(response['servers']) == 0
for i in range(0, 10):
server_model.add('test')
response = self.c.get(url)
response = json.loads(response.content.decode('utf-8'))
assert len(response['servers']) == 10
| 2,842
|
Python
|
.py
| 62
| 37.112903
| 86
| 0.639114
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,125
|
tags_test.py
|
amonapp_amon/amon/apps/api/tests/tags_test.py
|
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from nose.tools import *
from amon.apps.tags.models import tags_model, tag_groups_model
import json
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.api.utils import dict_from_cursor
class TestTagsApi(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
tags_model.collection.remove()
tag_groups_model.collection.remove()
def test_add_tag(self):
self._cleanup()
url = reverse('api_tags_create')
data = {'name': 'test'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['tag']['name'] == 'test'
self._cleanup()
group_id = tag_groups_model.get_or_create_by_name(name='testgroup')
group = tag_groups_model.get_by_id(group_id)
group_dict = dict_from_cursor(group, keys=['_id', 'name'])
data = {'name': 'test', 'group': group_dict}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['tag']['name'] == 'test'
assert response['tag']['group']['id'] == str(group['_id'])
def test_update_tag(self):
self._cleanup()
url = reverse('api_tags_create')
update_url = reverse('api_tags_update')
data = {'name': 'test'}
response = self.c.post(url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['tag']['name'] == 'test'
data = {'name': 'updatetest', 'id': response['tag']['id']}
response = self.c.post(update_url, json.dumps(data), content_type='application/json')
response = json.loads(response.content.decode('utf-8'))
assert response['status'] == 200
| 2,277
|
Python
|
.py
| 48
| 39.833333
| 94
| 0.642077
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,126
|
cloudservers_test.py
|
amonapp_amon/amon/apps/api/tests/cloudservers_test.py
|
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from django.conf import settings
from amon.apps.servers.models import server_model, cloud_server_model
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.cloudservers.models import cloud_credentials_model
class TestCloudServersApi(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
server_model.collection.remove()
cloud_credentials_model.collection.remove()
def test_get_instance_key(self):
self._cleanup()
data = {"access_key": settings.AMAZON_ACCESS_KEY, "secret_key": settings.AMAZON_SECRET_KEY, "regions": 'eu-west-1'}
credentials_id = cloud_credentials_model.save(data=data, provider_id='amazon')
valid_credentials = cloud_credentials_model.get_by_id(credentials_id)
instance_id = "instance_id_test"
instance_list = []
instance = {
'name': 'test',
'instance_id': instance_id,
'provider': "amazon",
'credentials_id': credentials_id,
'credentials': 'production',
'region': 'eu-west1',
'type': 't1-micro',
'key': 'testserver-key'
}
instance_list.append(instance)
cloud_server_model.save(instances=instance_list, credentials=valid_credentials)
server = server_model.collection.find_one()
key = server.get('key')
url = reverse('api_cloudservers_get_server_key', kwargs={'provider_id': 'amazon'}, )
url = "{0}?instance_id={1}".format(url, instance_id)
response = self.c.get(url)
assert response.content.decode('utf-8') == key
| 1,988
|
Python
|
.py
| 44
| 36.931818
| 123
| 0.654347
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,127
|
utils_test.py
|
amonapp_amon/amon/apps/api/tests/utils_test.py
|
from django.test.client import Client
from django.test import TestCase
from nose.tools import *
from nose.tools import nottest
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.api.utils import throttle_status, parse_statsd_data
from amon.utils.dates import unix_utc_now
from amon.apps.servers.models import server_model
class TestThrottle(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
def tearDown(self):
server_model.collection.remove()
self.user.delete()
def _cleanup(self):
server_model.collection.remove()
def throttle_check_period_test(self):
self._cleanup()
buffer_period = 15 # For collectd
# No data - first check
server_key = server_model.add('test_name', account_id=1, check_every=60)
server = server_model.get_server_by_key(server_key)
result = throttle_status(server=server)
assert result.allow == True
self._cleanup()
now = unix_utc_now()
server_key = server_model.add('test_name', account_id=1, check_every=60)
server = server_model.get_server_by_key(server_key)
data = {'last_check': now-61}
server_model.update(data, server['_id'])
server = server_model.get_server_by_key(server_key)
result = throttle_status(server=server)
assert result.allow == True
last_check_plus_buffer = now-54+buffer_period
data = {'last_check': last_check_plus_buffer}
server_model.update(data, server['_id'])
server = server_model.get_server_by_key(server_key)
result = throttle_status(server=server)
assert result.allow == False
self._cleanup()
server_key = server_model.add('test_name', account_id=1, check_every=300)
server = server_model.get_server_by_key(server_key)
data = {'last_check': now-301}
server_model.update(data, server['_id'])
server = server_model.get_server_by_key(server_key)
result = throttle_status(server=server)
assert result.allow == True
self._cleanup()
class TestParseStatsD(TestCase):
def parse_data_test(self):
# First try, ignore derive, only gauges are permitted
data = [{
u'dstypes':[
u'derive',
u'derive'
],
u'plugin':u'disk',
u'dsnames':[
u'read',
u'write'
],
u'interval':10.0,
u'host':u'ubuntu',
u'values':[
2048,
0
],
u'time':1424265912.232,
u'plugin_instance':u'sda2',
u'type_instance':u'',
u'type':u'disk_octets'
},]
result = parse_statsd_data(data)
assert result == {}
data = [{
u'dstypes':[
u'gauge',
u'gauge'
],
u'plugin':u'disk',
u'dsnames':[
u'read',
u'write'
],
u'interval':10.0,
u'host':u'ubuntu',
u'values':[
2048,
0
],
u'time':1424265912.232,
u'plugin_instance':u'sda2',
u'type_instance':u'',
u'type':u'disk_octets'
},]
result = parse_statsd_data(data)
assert result == {'collectd.disk': {'sda2.write': 0, 'sda2.read': 2048}}
data = [{
u'dstypes':[
u'gauge',
u'gauge'
],
u'plugin':u'disk',
u'dsnames':[
u'read',
u'write'
],
u'interval':10.0,
u'host':u'ubuntu',
u'values':[
2048,
0
],
u'time':1424265912.232,
u'plugin_instance':u'',
u'type_instance':u'',
u'type':u'disk_octets'
},]
result = parse_statsd_data(data)
assert result == {'collectd.disk': {'write': 0, 'read': 2048}}
data = [ {
u'dstypes':[
u'gauge'
],
u'plugin':u'processes',
u'dsnames':[
u'value'
],
u'interval':10.0,
u'host':u'ubuntu',
u'values':[
501
],
u'time':1424265912.297,
u'plugin_instance':u'',
u'type_instance':u'sleeping',
u'type':u'ps_state'
}]
result = parse_statsd_data(data)
assert result == {'collectd.processes': {'processes.value': 501,}}
| 4,739
|
Python
|
.py
| 142
| 22.774648
| 85
| 0.533542
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,128
|
healthchecks_test.py
|
amonapp_amon/amon/apps/api/tests/healthchecks_test.py
|
from django.test.client import Client
from django.test import TestCase
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
class TestHealthChecksApi(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
health_check_model.collection.remove()
# def test_add_servers(self):
# self._cleanup()
# url = reverse('api_servers_create')
# # Just the name,
# data = {'name': 'testserver'}
# response = self.c.post(url, json.dumps(data), content_type='application/json')
# response = json.loads(response.content.decode('utf-8'))
# assert response['status'] == 201 # Created
# assert response['name'] == 'testserver'
# assert response['server_key']
# self._cleanup()
# # Invalid key
# data = {'name': 'testserver', 'key': 'BlaBla'}
# response = self.c.post(url, json.dumps(data), content_type='application/json')
# response = json.loads(response.content.decode('utf-8'))
# assert response['status'] == 422 # Invalid key
# assert response['error']
# self._cleanup()
# # Valid key
# # d3vopqnzdnm677keoq3ggsgkg5dw94xg
# data = {'name': 'testserver', 'key': 'd3vopqnzdnm677keoq3ggsgkg5dw94xg'}
# response = self.c.post(url, json.dumps(data), content_type='application/json')
# response = json.loads(response.content.decode('utf-8'))
# assert response['status'] == 201 # Created
# assert response['name'] == 'testserver'
# assert response['server_key'] == data['key']
# # Check for unique keys
# data = {'name': 'testserver_one', 'key': 'd3vopqnzdnm677keoq3ggsgkg5dw94xg'}
# response = self.c.post(url, json.dumps(data), content_type='application/json')
# response = json.loads(response.content.decode('utf-8'))
# assert response['status'] == 201 # Created
# assert response['name'] == 'testserver'
# assert response['server_key'] == data['key']
# def test_list_servers(self):
# self._cleanup()
# url = reverse('api_servers_list')
# response = self.c.get(url)
# response = json.loads(response.content.decode('utf-8'))
# assert len(response['servers']) == 0
# for i in range(0, 10):
# server_model.add('test')
# response = self.c.get(url)
# response = json.loads(response.content.decode('utf-8'))
# assert len(response['servers']) == 10
| 2,856
|
Python
|
.py
| 59
| 42.084746
| 89
| 0.610275
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,129
|
models.py
|
amonapp_amon/amon/apps/_system/models.py
|
import collections
from amon.apps.core.basemodel import BaseModel
from amon.apps.servers.models import server_model
from amon.apps.devices.models import interfaces_model, volumes_model
from amon.utils.charts import select_colors
from amon.utils.dates import utc_unixtime_to_localtime, unix_utc_now
class SystemModel(BaseModel):
def __init__(self):
super(SystemModel, self).__init__()
self.metric_tuple = collections.namedtuple('Metric', 'key, name')
self.data_collection = self.mongo.get_collection('system_data')
self.keys = {
'windows_cpu': [
self.metric_tuple('idle', 'Idle'),
self.metric_tuple('system', 'System'),
self.metric_tuple('user', 'User'),
],
'cpu': [
self.metric_tuple('idle', 'Idle'),
self.metric_tuple('system', 'System'),
self.metric_tuple('user', 'User'),
self.metric_tuple('iowait', 'IOWait'),
self.metric_tuple('steal', 'Steal'),
],
'memory': [
self.metric_tuple('used_mb', 'Used memory'),
self.metric_tuple('total_mb', 'Total memory'),
],
'loadavg': [
self.metric_tuple('minute', '1 minute'),
self.metric_tuple('five_minutes', '5 minutes'),
self.metric_tuple('fifteen_minutes', '15 minutes'),
],
'disk': [
self.metric_tuple('used', 'Used'),
self.metric_tuple('total', 'Total')
],
'network': [
self.metric_tuple('i', 'Inbound'),
self.metric_tuple('o', 'Outbound')
]
}
# Used in Dashboards, soon in Alerts
def get_keys_list(self):
keys_list = []
for key, metric_list in system_model.keys.items():
if key != 'windows_cpu':
for metric in metric_list:
keys_list.append(metric.key)
return keys_list
def generate_charts(self, result=None, keys=None, check=None, timezone='UTC'):
keys_length = len(keys)
data_lists = []
data_lists = [[] for i in range(keys_length)]
for r in result:
time = r.get('time') if r.get('time') else r.get('t')
time = utc_unixtime_to_localtime(time, tz=timezone)
for i in range(keys_length):
data = r.get(check) if r.get(check) else r
key = keys[i].key
value = data.get(key) # Gets to the specific key -> cpu: {'steal': 4}
value = self.format_float(value)
data_lists[i].append({"x": time, "y": value})
series = []
for i in range(keys_length):
data = [] if len(data_lists[i]) == 0 else data_lists[i]
chart_colors = select_colors(i)
metric_type = {
'name': keys[i].name,
'data': data,
'unit': '%'
}
metric_type.update(chart_colors)
series.append(metric_type)
return series
def get_global_data_after(self, timestamp=None, key=None, enddate=None, check=None, timezone='UTC', filtered_servers=None):
params = {"time": {"$gte": int(timestamp)}}
if enddate:
params['time']["$lte"] = int(enddate)
keys = {}
data_lists = {}
for server in filtered_servers:
server_id = str(server['_id'])
keys[server_id] = server.get('name')
data_lists[server_id] = []
server_ids_list = [x.get('_id') for x in filtered_servers]
params['server_id'] = {"$in": server_ids_list}
result = self.data_collection.find(params).sort('time', self.asc)
for r in result:
r_server_id = str(r['server_id'])
time = r.get('time', 0)
time = utc_unixtime_to_localtime(time, tz=timezone)
key = 'used_percent' if check == 'memory' else key
value = r.get(check, {}).get(key, 0)
value = self.format_float(value)
server_list = data_lists.get(r_server_id)
server_list.append({"x": time, "y": value})
# Dont display empty lines
y_axis_sum = sum([x.get('y', 0) for x in data_lists[r_server_id]])
if y_axis_sum == 0:
data_lists[r_server_id] = []
series = []
for key, name in keys.items():
_index = list(keys.keys()).index(key)
data = [] if len(data_lists[key]) == 0 else data_lists[key]
chart_colors = select_colors(_index)
metric_type = {
'name': name,
'data': data
}
metric_type.update(chart_colors)
if len(data) > 0:
series.append(metric_type)
return series
def get_global_device_data_after(self, timestamp=None, enddate=None, check=None, key=None, timezone="UTC", filtered_servers=None):
params = {"t": {"$gte": int(timestamp)}}
if enddate:
params['t']["$lte"] = int(enddate)
keys = {}
series = []
data_lists = {}
datamodel = volumes_model if check == 'disk' else interfaces_model
devices = datamodel.get_all_for_servers_list(servers=filtered_servers)
for device in devices:
data_collection = datamodel.get_data_collection()
params['device_id'] = device['_id']
result = data_collection.find(params).sort('t', self.asc)
if result.clone().count() > 0:
device_server_id = device['server_id']
device_server = None
for server in filtered_servers:
if server['_id'] == device_server_id:
device_server = server
# Server exists
if device_server != None:
_id = str(device['_id'])
keys[_id] = u"{server}.{device}".format(server=device_server.get('name', ""), device=device.get('name'))
data_lists[_id] = []
for r in result:
time = utc_unixtime_to_localtime(r.get('t', 0), tz=timezone)
value = r.get(key, 0)
value = self.format_float(value)
data_lists[_id].append({"x": time, "y": value})
# Dont display empty lines
y_axis_sum = sum([x.get('y', 0) for x in data_lists[_id]])
if y_axis_sum == 0:
data_lists[_id] = []
for _id, name in keys.items():
_index = list(keys.keys()).index(_id)
data = [] if len(data_lists[_id]) == 0 else data_lists[_id]
chart_colors = select_colors(_index)
metric_type = {
'name': name,
'data': data,
}
metric_type.update(chart_colors)
if len(data) > 0:
series.append(metric_type)
return series
def get_data_after(self, timestamp=None, enddate=None, server=None, check=None, timezone='UTC'):
params = {"time": {"$gte": int(timestamp)}, 'server_id': server['_id']}
if enddate:
params['time']["$lte"] = int(enddate)
result = self.data_collection.find(params).sort('time', self.asc)
keys = []
if check in ['cpu', 'memory', 'loadavg']:
keys = self.keys.get(check)
distro = server.get('distro')
if type(distro) is dict:
name = distro.get('name')
if name == 'windows' and check == 'cpu':
keys = self.keys.get('windows_cpu')
series = self.generate_charts(result=result, timezone=timezone, check=check, keys=keys)
return series
def get_device_data_after(self, timestamp=None, enddate=None, server=None, check=None, device_id=None, timezone="UTC"):
params = {"t": {"$gte": int(timestamp)}, 'server_id': server['_id']}
if enddate:
params['t']["$lte"] = int(enddate)
keys = []
if check in ['disk', 'network']:
datamodel = volumes_model if check == 'disk' else interfaces_model
device = datamodel.get_by_id(device_id)
collection = datamodel.get_data_collection()
keys = self.keys.get(check)
if device:
params['device_id'] = device['_id']
result = collection.find(params).sort('t', self.asc)
series = self.generate_charts(result=result, timezone=timezone, keys=keys, check=check)
return series
def get_first_check_date(self, server=None):
"""
Used in the Javascript calendar - doesn't permit checks for dates before this date
Also used to display no data message in the system tab
"""
params = {'server_id': server['_id']}
start_date = self.data_collection.find_one(params, sort=[("time", self.asc)])
if start_date is not None:
start_date = start_date.get('time', 0)
else:
start_date = 0
return start_date
def save_data(self, server=None, data=None, time=None, expires_at=None):
server_id = server['_id']
time = time if time else unix_utc_now()
volumes_model.save_data(server=server, data=data.get('disk'), time=time, expires_at=expires_at)
interfaces_model.save_data(server=server, data=data.get('network'), time=time, expires_at=expires_at)
server_meta = {
'last_check': time,
'uptime': data.get('uptime', ""),
}
server_model.update(server_meta, server_id)
cleaned_data_dict = dict([(k, v) for k,v in data.items() if k not in ['disk', 'network']])
cleaned_data_dict['time'] = time
cleaned_data_dict['server_id'] = server['_id']
cleaned_data_dict["expires_at"] = expires_at
self.data_collection.insert(cleaned_data_dict)
self.data_collection.ensure_index([('time', self.desc)], background=True)
self.data_collection.ensure_index([('server_id', self.desc)], background=True)
self.data_collection.ensure_index([('expires_at', 1)], expireAfterSeconds=0)
def get_check_for_timestamp(self, server, timestamp):
timestamp = int(timestamp)
server_id = server['_id']
params = {'server_id': server_id, 'time':timestamp}
result = self.data_collection.find_one(params)
system_check = result if result != None else {}
return system_check
system_model = SystemModel()
| 10,771
|
Python
|
.py
| 227
| 34.881057
| 134
| 0.543512
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,130
|
urls.py
|
amonapp_amon/amon/apps/_system/urls.py
|
from django.conf.urls import url
from amon.apps.system import api
from amon.apps.system import views
urlpatterns = (
# AJAX
url(r'^a/get_data_after/$', api.ajax_get_data_after, name='ajax_get_data_after'),
# Views
url(r'^(?P<server_id>\w+)/$', views.system_view, name='server_system'),
)
| 307
|
Python
|
.py
| 9
| 31
| 85
| 0.691525
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,131
|
api.py
|
amonapp_amon/amon/apps/_system/api.py
|
from django.contrib.auth.decorators import login_required
from amon.apps.system.views import get_system_data_after
from rest_framework.decorators import api_view
from rest_framework.response import Response
@login_required
@api_view(['GET'])
def ajax_get_data_after(request):
server_id = request.GET.get('server_id')
timestamp = request.GET.get('timestamp')
check = request.GET.get('check')
enddate = request.GET.get('enddate')
device_id = request.GET.get('device_id')
response = get_system_data_after(
server_id=server_id,
timestamp=timestamp,
check=check,
enddate=enddate,
timezone=request.timezone,
device_id=device_id
)
return Response(response)
| 754
|
Python
|
.py
| 21
| 30
| 57
| 0.715893
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,132
|
helpers.py
|
amonapp_amon/amon/apps/_system/helpers.py
|
def check_data_for_period(data_dict):
total = data_dict.get('total', 0)
if total > 0:
return True
else:
return False
| 145
|
Python
|
.py
| 6
| 18.5
| 37
| 0.604317
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,133
|
views.py
|
amonapp_amon/amon/apps/_system/views.py
|
from datetime import datetime
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from amon.apps.servers.models import server_model
from amon.apps.devices.models import volumes_model
from amon.apps.system.models import system_model
from amon.apps.plugins.models import plugin_model
from amon.apps.processes.models import process_model
from amon.utils.dates import (
datetime_to_unixtime,
dateformatcharts_local
)
from amon.utils.charts import get_disk_unit
@login_required
def system_view(request, server_id):
enddate = request.GET.get('enddate')
charts = request.GET.get('charts', 'all')
duration = request.GET.get('duration', 10800)
duration = int(duration)
server = server_model.get_by_id(server_id)
tags = server_model.get_tags(server=server)
if tags:
server['tags'] = tags
first_check_date = system_model.get_first_check_date(server)
now_unix = datetime_to_unixtime(request.now)
max_date = now_unix * 1000
if enddate:
date_to = int(enddate)
else:
date_to = now_unix
date_from = date_to - int(duration)
data_url = reverse('ajax_get_data_after')
data_url = "{0}?server_id={1}".format(data_url, server.get('_id'))
charts_metadata = {
'cpu': {'id': 'cpu', 'check': 'cpu', 'name': 'CPU', 'url': data_url, },
'loadavg': {'id': 'loadavg', 'check': 'loadavg', 'name': 'Load Average','url': data_url},
'memory': {'id': 'memory', 'check': 'memory', 'name': 'Memory', 'url': data_url, 'type': 'area'},
'network': [
{'id': 'network.inbound', 'check': 'network.inbound', 'name': 'Network - KB/s Received', 'url': data_url, 'unit': 'kb/s'},
{'id': 'network.outbound', 'check': 'network.outbound', 'name': 'Network - KB/s Sent', 'url': data_url, 'unit': 'kb/s'}
]
}
if charts == 'all':
active_checks = ['cpu', 'memory', 'loadavg', 'disk', 'network']
else:
active_checks = [charts]
selected_charts = []
for check in active_checks:
if check in ['network']:
chart_meta = charts_metadata.get(check)
for i in chart_meta:
selected_charts.append(i)
elif check != 'disk':
chart_meta = charts_metadata.get(check)
selected_charts.append(chart_meta)
volumes = volumes_model.get_all_for_server(server_id)
if 'disk' in active_checks:
unit = get_disk_unit(server)
for device in volumes.clone():
device_id, name = device.get('_id'), device.get('name')
url = "{0}&device_id={1}".format(data_url, device_id)
meta = {'id': device_id, 'check': 'disk', 'name': name, 'url': url , 'unit': unit}
last_update = device.get('last_update')
if last_update > date_from:
selected_charts.append(meta)
all_plugins = plugin_model.get_for_server(server_id=server['_id'], last_check_after=date_from)
all_processes = process_model.get_all_for_server(server_id, last_check_after=date_from)
breadcrumb_url = reverse('server_system', kwargs={'server_id': server['_id']})
breadcrumb_url = "{0}?charts={1}".format(breadcrumb_url, charts)
return render(request, 'system/view.html', {
"enddate": enddate,
"duration": duration,
"all_processes": all_processes,
"all_plugins": all_plugins,
"now": now_unix,
"charts":charts,
"selected_charts" : selected_charts,
"date_from" : date_from,
"date_to" : date_to,
"first_check_date" : first_check_date,
"server" : server,
"max_date" : max_date,
"server_id": server_id,
"breadcrumb_url": breadcrumb_url
})
def get_global_system_data_after(timestamp=None, check=None, key=None, enddate=None, timezone='UTC', filtered_servers=None):
data = []
now = datetime.utcnow()
active_checks = ['memory', 'loadavg', 'cpu', 'disk', 'network']
if check in active_checks and timestamp:
if check in ['disk', 'network']:
data = system_model.get_global_device_data_after(timestamp=timestamp,
enddate=enddate,
timezone=timezone,
check=check,
key=key,
filtered_servers=filtered_servers
)
else:
data = system_model.get_global_data_after(timestamp=timestamp,
enddate=enddate,
check=check,
key=key,
timezone=timezone,
filtered_servers=filtered_servers
)
try:
now_local = dateformatcharts_local(datetime_to_unixtime(now), tz=timezone)
except:
now_local = False
response = {
'data': data,
'last_update': datetime_to_unixtime(now),
'now_local': now_local,
'chart_type': 'line',
}
return response
def get_system_data_after(server_id=None, timestamp=None, check=None, enddate=None, timezone='UTC', device_id=None):
server = server_model.get_by_id(server_id)
data = []
now = datetime.utcnow()
active_checks = ['memory', 'loadavg', 'cpu', 'disk', 'network.inbound', 'network.outbound']
if check in active_checks and timestamp:
if check in ['network.inbound', 'network.outbound']:
key = 'i' if check == 'network.inbound' else 'o'
filtered_servers = [server]
data = system_model.get_global_device_data_after(timestamp=timestamp,
enddate=enddate,
filtered_servers=filtered_servers,
key=key,
timezone=timezone,
check='network'
)
elif check == 'disk':
data = system_model.get_device_data_after(timestamp=timestamp, enddate=enddate, server=server, timezone=timezone,
check=check, device_id=device_id)
else:
data = system_model.get_data_after(timestamp=timestamp, enddate=enddate, server=server, check=check, timezone=timezone)
try:
now_local = dateformatcharts_local(datetime_to_unixtime(now), tz=timezone)
except:
now_local = False
response = {
'data': data,
'last_update': datetime_to_unixtime(now),
'now_local': now_local,
'chart_type': 'line'
}
return response
| 6,453
|
Python
|
.py
| 150
| 34.26
| 134
| 0.609682
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,134
|
common_models.py
|
amonapp_amon/amon/apps/_system/common_models.py
|
from amon.apps.servers.models import server_model
from amon.apps.processes.models import process_model
from amon.apps.plugins.models import plugin_model
| 153
|
Python
|
.py
| 3
| 50
| 52
| 0.86
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,135
|
models_test.py
|
amonapp_amon/amon/apps/_system/tests/models_test.py
|
import unittest
from time import time
from nose.tools import eq_
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.system.models import system_model
from amon.apps.servers.models import server_model
from amon.apps.devices.models import interfaces_model, volumes_model
from datetime import datetime, timedelta
now = int(time())
minute_ago = (now-60)
two_minutes_ago = (now-120)
five_minutes_ago = (now-300)
class SystemModelTest(unittest.TestCase):
def setUp(self):
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.account_id = 1
server_model.collection.remove()
server_key = server_model.add('testserver', account_id=self.account_id)
self.server = server_model.get_server_by_key(server_key)
self.server_id = self.server['_id']
def tearDown(self):
self.user.delete()
User.objects.all().delete()
interfaces_model.collection.remove()
volumes_model.collection.remove()
server_model.collection.remove()
def _cleanup(self):
data_collection = system_model.data_collection
data_collection.remove()
network_collection = interfaces_model.get_data_collection(server_id=self.server['_id'])
network_collection.remove()
disk_collection = volumes_model.get_data_collection(server_id=self.server['_id'])
disk_collection.remove()
def generate_charts_cpu_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(10, 30):
cpu_dict = {"time": i, "cpu" : { "iowait" : "0.00", "system" : "7.51", "idle" : "91.15", "user" : "1.34", "steal" : "0.00", "nice" : "0.00" }}
collection.insert(cpu_dict)
keys = [
system_model.metric_tuple('idle', 'Idle'),
system_model.metric_tuple('system', 'System'),
system_model.metric_tuple('user', 'User'),
system_model.metric_tuple('iowait', 'IOWait'),
system_model.metric_tuple('steal', 'Steal'),
]
result = collection.find({"time": {"$gte": int(10), "$lte": int(20) }}).sort('time', system_model.asc)
charts = system_model.generate_charts(check='cpu', keys=keys, result=result)
eq_(len(charts), 5)
data = charts[0]['data']
eq_(len(data), 11)
for entry in data:
assert entry['x'] >= 10
assert entry['x'] <= 20
self._cleanup()
def get_data_collection_test(self):
result = system_model.data_collection
eq_(result.name, "system_data")
def get_network_data_test(self):
self._cleanup()
interface = interfaces_model.get_or_create(server_id=self.server['_id'], name='test_interface')
collection = interfaces_model.get_data_collection()
for i in range(0, 30):
collection.insert({'server_id': self.server_id,
'device_id': interface['_id'], 't': i, 'i': 1, 'o': 2})
result = system_model.get_device_data_after(timestamp=10, enddate=20, server=self.server, check='network', device_id=interface['_id'])
inbound = result[0]['data']
eq_(len(inbound), 11)
for entry in inbound:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 1
assert type(entry['y']) is float
outbound = result[1]['data']
eq_(len(outbound), 11)
for entry in outbound:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 2
# Test global data - used in the dashboards
all_servers = server_model.get_all()
result = system_model.get_global_device_data_after(timestamp=10, enddate=20, filtered_servers=all_servers, check='network', key='i')
used_percent = result[0]['data']
eq_(len(used_percent), 11)
assert result[0]['name'] == "{0}.test_interface".format(self.server['name'])
for entry in used_percent:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 1.0
assert type(entry['y']) is float
self._cleanup()
def get_disk_data_test(self):
self._cleanup()
volume = volumes_model.get_or_create(server_id=self.server['_id'], name='test_volume')
collection = volumes_model.get_data_collection()
for i in range(0, 30):
collection.insert({
'server_id': self.server_id,
'device_id': volume['_id'], 't': i, 'total': 12, 'used': 2, 'percent': 60.0})
result = system_model.get_device_data_after(timestamp=10, enddate=20, server=self.server, check='disk', device_id=volume['_id'])
total = result[1]['data']
eq_(len(total), 11)
for entry in total:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 12
assert type(entry['y']) is float
used = result[0]['data']
eq_(len(used), 11)
for entry in used:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 2
# Test global data - used in the dashboards
all_servers = server_model.get_all()
result = system_model.get_global_device_data_after(timestamp=10, enddate=20, filtered_servers=all_servers, check='disk', key='percent')
used_percent = result[0]['data']
eq_(len(used_percent), 11)
assert result[0]['name'] == "{0}.test_volume".format(self.server['name'])
for entry in used_percent:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 60.0
assert type(entry['y']) is float
self._cleanup()
def get_cpu_data_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(0, 30):
cpu_dict = {"time": i,
"server_id": self.server_id,
"cpu" : { "iowait" : "0.00", "system" : "7.51", "idle" : "91.15", "user" : "1.34", "steal" : "0.00", "nice" : "0.00" }}
collection.insert(cpu_dict)
result = system_model.get_data_after(timestamp=10, enddate=20, server=self.server, check='cpu')
t = ['idle', 'system', 'user', 'iowait' ,'steal']
for i in range(0, 5):
data_dict = result[i]['data']
key = t[i]
eq_(len(data_dict), 11)
for entry in data_dict:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == float(cpu_dict['cpu'][key])
assert type(entry['y']) is float
keys = [
system_model.metric_tuple('idle', 'Idle'),
system_model.metric_tuple('system', 'System'),
system_model.metric_tuple('user', 'User'),
system_model.metric_tuple('iowait', 'IOWait'),
system_model.metric_tuple('steal', 'Steal'),
]
result = collection.find({'server_id': self.server_id,
"time": {"$gte": int(10), "$lte": int(20) }}).sort('time', system_model.asc)
charts = system_model.generate_charts(check='cpu', keys=keys, result=result)
eq_(len(charts), 5)
data = charts[0]['data']
eq_(len(data), 11)
for entry in data:
assert entry['x'] >= 10
assert entry['x'] <= 20
# Test global data - used in the dashboards
all_servers = server_model.get_all()
result = system_model.get_global_data_after(timestamp=10, enddate=20, filtered_servers=all_servers, check='cpu', key='system')
used_percent = result[0]['data']
eq_(len(used_percent), 11)
assert result[0]['name'] == self.server['name']
for entry in used_percent:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 7.51
assert type(entry['y']) is float
self._cleanup()
def get_memory_data_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(0, 30):
memory_dict = {"time": i,
"server_id": self.server_id,
"memory": {"used_percent": 15, "swap_used_mb": 0, "total_mb": 166, "free_mb": 4.44, "used_mb": 66.55,}}
collection.insert(memory_dict)
result = system_model.get_data_after(timestamp=10, enddate=20, server=self.server, check='memory')
total = result[1]['data']
eq_(len(total), 11)
for entry in total:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 166
assert type(entry['y']) is float
used = result[0]['data']
eq_(len(used), 11)
for entry in used:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 66.55
assert type(entry['y']) is float
keys = [
system_model.metric_tuple('total_mb', 'Total memory'),
system_model.metric_tuple('used_mb', 'Used memory'),
]
result = collection.find({'server_id': self.server_id, "time": {"$gte": int(10), "$lte": int(20) }}).sort('time', system_model.asc)
charts = system_model.generate_charts(check='memory', keys=keys, result=result)
eq_(len(charts), 2)
data = charts[0]['data']
eq_(len(data), 11)
for entry in data:
assert entry['x'] >= 10
assert entry['x'] <= 20
all_servers = server_model.get_all()
# Test global data for memory - used in the dashboards
result = system_model.get_global_data_after(timestamp=10, enddate=20, filtered_servers=all_servers, check='memory')
used_percent = result[0]['data']
eq_(len(used_percent), 11)
assert result[0]['name'] == self.server['name']
for entry in used_percent:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 15.0
assert type(entry['y']) is float
self._cleanup()
def get_loadavg_data_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(0, 30):
data_dict = {"time": i,
"server_id": self.server_id,
"loadavg" : { "cores" : 4, "fifteen_minutes" : "0.18", "minute" : "0.34", "five_minutes" : "0.27" }}
collection.insert(data_dict)
result = system_model.get_data_after(timestamp=10, enddate=20, server=self.server, check='loadavg')
t = ['minute', 'five_minutes', 'fifteen_minutes']
for i in range(0, 3):
result_dict = result[i]['data']
key = t[i]
eq_(len(result_dict), 11)
for entry in result_dict:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == float(data_dict['loadavg'][key])
assert type(entry['y']) is float
keys = [
system_model.metric_tuple('minute', '1 minute'),
system_model.metric_tuple('five_minutes', '5 minutes'),
system_model.metric_tuple('fifteen_minutes', '15 minutes'),
]
result = collection.find({'server_id': self.server_id, "time": {"$gte": int(10), "$lte": int(20) }}).sort('time', system_model.asc)
charts = system_model.generate_charts(check='loadavg', keys=keys, result=result)
eq_(len(charts), 3)
data = charts[0]['data']
eq_(len(data), 11)
for entry in data:
assert entry['x'] >= 10
assert entry['x'] <= 20
# Test global data - used in the dashboards
all_servers = server_model.get_all()
result = system_model.get_global_data_after(timestamp=10, enddate=20, filtered_servers=all_servers, check='loadavg', key='minute')
used_percent = result[0]['data']
eq_(len(used_percent), 11)
assert result[0]['name'] == self.server['name']
for entry in used_percent:
assert entry['x'] >= 10
assert entry['x'] <= 20
assert entry['y'] == 0.34
assert type(entry['y']) is float
self._cleanup()
def get_first_check_date_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(11, 100):
collection.insert({'time': i, 'server_id': self.server['_id']})
result = system_model.get_first_check_date(server=self.server)
eq_(result, 11)
def save_data_test(self):
self._cleanup()
expires_at = datetime.utcnow()+timedelta(hours=24)
last_check = 99999
system_data = {u'disk':
{u'sda1':
{u'used': u'21350', u'percent': u'46', u'free': u'25237', u'volume': u'/dev/sda1', u'path': u'/', u'total': u'49086'}},
u'memory':
{u'used_percent': 34, u'swap_used_mb': 0, u'total_mb': 3954, u'free_mb': 2571, u'swap_used_percent': 0,
u'swap_free_mb': 0, u'used_mb': 1383, u'swap_total_mb': 0},
u'loadavg': {u'cores': 4, u'fifteen_minutes': u'0.36', u'minute': u'0.12', u'five_minutes': u'0.31'},
u'network': {u'eth3': {u'inbound': u'6.05', u'outbound': u'1.97'}},
u'cpu': {u'iowait': u'0.00', u'system': u'1.32', u'idle': u'98.68', u'user': u'0.00', u'steal': u'0.00', u'nice': u'0.00'}}
system_model.save_data(self.server, system_data, time=last_check, expires_at=expires_at)
disk_collection = volumes_model.get_data_collection(server_id=self.server['_id'])
disk = volumes_model.get_by_name(server=self.server, name='sda1')
eq_(disk_collection.find().count(), 1)
for r in disk_collection.find():
eq_(r['t'], last_check)
eq_(r['total'], "49086")
eq_(r['used'], "21350")
eq_(r['device_id'], disk['_id'])
eq_(r['expires_at'].date(), expires_at.date())
network_collection = interfaces_model.get_data_collection(server_id=self.server['_id'])
adapter = interfaces_model.get_by_name(server=self.server, name='eth3')
eq_(network_collection.find().count(), 1)
for r in network_collection.find():
eq_(r['t'], last_check)
eq_(r['i'], "6.05")
eq_(r['o'], "1.97")
eq_(r['device_id'], adapter['_id'])
eq_(r['expires_at'].date(), expires_at.date())
data_collection = system_model.data_collection
for r in data_collection.find():
eq_(r['time'], last_check)
eq_(r['memory']['free_mb'], 2571)
eq_(r['loadavg']['fifteen_minutes'], '0.36')
eq_(r['cpu']['system'], '1.32')
eq_(r['expires_at'].date(), expires_at.date())
server_updated = server_model.get_by_id(self.server['_id'])
eq_(server_updated['last_check'], last_check)
self._cleanup()
def get_last_check_test(self):
self._cleanup()
collection = system_model.data_collection
for i in range(11, 100):
collection.insert({'server_id': self.server['_id'], 'time': i })
result = system_model.get_first_check_date(server=self.server)
eq_(result, 11)
self._cleanup()
# def get_global_device_data_after_test(self):
# assert False
# def get_global_data_after_test(self):
# assert False
# def get_device_data_after(self):
# assert False
| 16,190
|
Python
|
.py
| 327
| 37.480122
| 154
| 0.55914
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,136
|
views_test.py
|
amonapp_amon/amon/apps/_system/tests/views_test.py
|
import json
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.servers.models import server_model, interfaces_model, volumes_model
from amon.apps.system.models import system_model
class TestSystemViews(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.account_id = 1
self.c.login(username='foo@test.com', password='qwerty')
server_model.add('testserver', account_id=self.account_id)
self.server = server_model.collection.find_one()
url = reverse('ajax_get_data_after')
self.base_ajax_url = "{0}?server_id={1}".format(url, self.server['_id'])
def tearDown(self):
self.c.logout()
self.user.delete()
server_model.collection.remove()
def server_system_test(self):
url = reverse('server_system', kwargs={'server_id': self.server['_id']})
response = self.c.get(url)
assert response.status_code == 200
def ajax_get_data_no_enddate_test(self):
url = "{0}?server_id={1}&check=cpu".format(
reverse('ajax_get_data_after'),
self.server['_id']
)
response = self.c.get(url)
assert response.status_code == 200
url = "{0}?server_id={1}&check=network".format(
reverse('ajax_get_data_after'),
self.server['_id']
)
response = self.c.get(url)
assert response.status_code == 200
def ajax_get_nodata_for_period_test(self):
response = self.c.get(self.base_ajax_url)
assert response.status_code == 200
url = "{0}&check=cpu×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
data = to_json['data']
eq_(len(data), 5) # System, wait, idle, sleep, user
url = "{0}&check=memory×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
data = to_json['data']
eq_(len(data), 2) # Total, Used
url = "{0}&check=disk&device_id=1×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
data = to_json['data']
eq_(len(data), 2) # Total, Used
url = "{0}&check=loadavg×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
data = to_json['data']
eq_(len(data), 3) # 1, 5, 15
def ajax_get_memory_data_after_test(self):
system_collection = system_model.data_collection
memory_dict = {"time": 1,
"server_id": self.server['_id'],
"memory": {"used_percent": 0, "swap_used_mb": 0, "total_mb": 100, "free_mb": 10, "used_mb": 10,}}
system_collection.insert(memory_dict)
url = "{0}&check=memory×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
total_memory_dict = data[1]
eq_(total_memory_dict['data'], [{u'y': 100, u'x': 1}])
used_memory_dict = data[0]
eq_(used_memory_dict['data'], [{u'y': 10, u'x': 1}])
system_collection.remove()
def ajax_get_loadavg_data_after_test(self):
system_collection = system_model.data_collection
memory_dict = {"time": 10, "server_id": self.server['_id'],
"loadavg" : { "cores" : 4, "fifteen_minutes" : "0.18", "minute" : "0.34", "five_minutes" : "0.27" }}
system_collection.insert(memory_dict)
url = "{0}&check=loadavg×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
minute = data[0]
eq_(minute['data'], [{u'y': 0.34, u'x': 10}])
five_minutes = data[1]
eq_(five_minutes['data'], [{u'y': 0.27, u'x': 10}])
fifteen_minutes = data[2]
eq_(fifteen_minutes['data'], [{u'y': 0.18, u'x': 10}])
system_collection.remove()
def ajax_get_cpu_data_after_test(self):
system_collection = system_model.data_collection
data_dict = {"time": 10,
"server_id": self.server['_id'],
"cpu" : { "iowait" : "0.00", "system" : "7.51", "idle" : "91.15", "user" : "1.34", "steal" : "0.00", "nice" : "0.00" }}
system_collection.insert(data_dict)
url = "{0}&check=cpu×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
idle = data[0]
eq_(idle['data'], [{u'y': 91.15, u'x': 10}])
system = data[1]
eq_(system['data'], [{u'y': 7.51, u'x': 10}])
user = data[2]
eq_(user['data'], [{u'y':1.34, u'x': 10}])
iowait = data[3]
eq_(iowait['data'], [{u'y':0.00, u'x': 10}])
steal = data[4]
eq_(steal['data'], [{u'y':0.00, u'x': 10}])
system_collection.remove()
def ajax_get_network_data_after_test(self):
network_collection = interfaces_model.get_data_collection()
adapter = interfaces_model.get_or_create(server_id=self.server['_id'], name='test')
adapter_dict = {"t": 10, "o" : 6.12, "i" : 1.11, "device_id": adapter['_id'], "server_id": self.server['_id'],}
network_collection.insert(adapter_dict)
url = "{0}&check=network.inbound×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
inbound = data[0]
eq_(inbound['data'], [{u'y': 1.11, u'x': 10}])
url = "{0}&check=network.outbound×tamp=1".format(self.base_ajax_url)
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
outbound = data[0]
eq_(outbound['data'], [{u'y': 6.12, u'x': 10}])
interfaces_model.delete(adapter['_id'])
network_collection.remove()
def ajax_get_volume_data_after_test(self):
disk_collection = volumes_model.get_data_collection()
volume = volumes_model.get_or_create(server_id=self.server['_id'], name='test')
disk_dict = {"t": 10, "used" : "19060", "percent" : "41", "free" : "27527", "total": 62, "device_id": volume['_id'], "server_id": self.server['_id']}
disk_collection.insert(disk_dict)
url = "{0}&check=disk&device_id={1}×tamp=1".format(self.base_ajax_url, volume['_id'])
response = self.c.get(url)
to_json = json.loads(response.content.decode('utf-8'))
assert to_json['now_local']
assert to_json['last_update']
data = to_json['data']
total = data[1]
eq_(total['data'], [{u'y': 62, u'x': 10}])
free = data[0]
eq_(free['data'], [{u'y': 19060.0, u'x': 10}])
volumes_model.delete(volume['_id'])
disk_collection.remove()
| 7,945
|
Python
|
.py
| 161
| 39.310559
| 157
| 0.584726
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,137
|
models.py
|
amonapp_amon/amon/apps/_account/models.py
|
import random
import string
# from amon.apps.core.basemodel import BaseModel
# class AccountModel(BaseModel):
# def __init__(self):
# super(AccountModel, self).__init__()
# class UserPreferencesModel(BaseModel):
# def __init__(self):
# super(UserPreferencesModel, self).__init__()
# self.collection = self.mongo.get_collection('user_preferences')
# def save_preferences(self, data=None, user_id=None):
# self.collection.update({'user_id': user_id}, {"$set": data}, upsert=True)
# self.collection.ensure_index([('user_id', self.desc)], background=True)
# def get_preferences(self, user_id=None):
# result = self.collection.find_one({'user_id': user_id})
# result = {} if result is None else result
# return result
# class ForgottenPasswordTokensModel(BaseModel):
# def __init__(self):
# super(ForgottenPasswordTokensModel, self).__init__()
# self.collection = self.mongo.get_collection('forgotten_pass_tokens')
# def generate_token(self, size=30, chars=string.ascii_lowercase + string.digits):
# return ''.join(random.choice(chars) for _ in range(size))
# def set_token(self, email=None):
# token = self.generate_token()
# data = {'token': token}
# self.collection.update({'email': email}, {"$set": data}, upsert=True)
# self.collection.ensure_index([('token', self.desc)], background=True)
# return token
# forgotten_pass_tokens_model = ForgottenPasswordTokensModel()
# user_preferences_model = UserPreferencesModel()
# account_model = AccountModel()
| 1,639
|
Python
|
.py
| 32
| 49.21875
| 86
| 0.664975
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,138
|
backends.py
|
amonapp_amon/amon/apps/_account/backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth import get_user_model
User = get_user_model()
class EmailAuthBackend(ModelBackend):
def authenticate(self, email=None, password=None, **kwargs):
if email and password:
try:
user = User.objects.get(email__iexact=email)
if user.check_password(password):
return user
return None
except User.DoesNotExist:
return None
return None
| 538
|
Python
|
.py
| 14
| 27.928571
| 64
| 0.626204
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,139
|
urls.py
|
amonapp_amon/amon/apps/_account/urls.py
|
from django.conf.urls import url
from amon.apps.account import views
urlpatterns = [
url(r'^login/$', views.loginview, name='login'),
url(r'^profile/$', views.view_profile , name='view_profile'),
# url(r'^change_password/$', views.change_password , name='change_password'),
url(r'^forgotten_password/$', views.forgotten_password , name='forgotten_password'),
# url(r'^reset_password/(?P<token>\w+)$', views.reset_password , name='reset_password'),
url(r'^create_admin_user/$', views.create_admin_user , name='create_admin_user'),
url(r'^logout/$', views.logout_user, name='logout'),
# AJAX
# url(r'^api/update_preferences/$', UserPreferencesView.as_view(), name='api_update_user_preferences'),
]
| 734
|
Python
|
.py
| 13
| 52.538462
| 107
| 0.687065
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,140
|
api.py
|
amonapp_amon/amon/apps/_account/api.py
|
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
# from amon.apps.account.models import user_preferences_model
# class UserPreferencesView(APIView):
# def post(self, request):
# sidebar = request.data.get('sidebar','wide')
# data = {'sidebar': sidebar}
# user_preferences_model.save_preferences(user_id=request.user.id, data=data)
# return Response(status=status.HTTP_200_OK)
| 492
|
Python
|
.py
| 10
| 47.3
| 85
| 0.739958
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,141
|
mailer.py
|
amonapp_amon/amon/apps/_account/mailer.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.utils import translation
from django.template.loader import render_to_string
from amon.apps.notifications.mail.sender import _send_email
def send_email_forgotten_password(token=None, recipients=None):
subject = translation.ugettext("Amon Password Reset")
html_content = render_to_string('account/emails/reset_password.html',{
'token': token,
})
_send_email(subject=subject,
recipients_list=recipients,
html_content=html_content
)
| 550
|
Python
|
.py
| 14
| 33.857143
| 74
| 0.732177
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,142
|
middleware.py
|
amonapp_amon/amon/apps/_account/middleware.py
|
from datetime import datetime
from django.conf import settings
# from amon.utils.dates import localtime_utc_timedelta
# from amon.apps.dashboards.models import dashboard_model
# from amon.apps.bookmarks.models import bookmarks_model
from amon import VERSION
from django.utils.deprecation import MiddlewareMixin
class AccountMiddleware(MiddlewareMixin):
def process_request(self, request):
# Defaults
request.now = datetime.utcnow()
request.debug = settings.DEBUG
request.version = VERSION
request.timezone = 'UTC'
if request.user.is_authenticated:
# user_preferences = user_preferences_model.get_preferences(user_id=request.user.id)
# user_timezone = user_preferences.get('timezone', 'UTC')
# request.timezone = str(user_timezone) # Pytz timezone object
# request.timezone_offset = localtime_utc_timedelta(tz=request.timezone)
# request.account_id = settings.ACCOUNT_ID
# request.dashboards = dashboard_model.get_all(account_id=request.account_id)
# request.bookmarks = bookmarks_model.get_all()
# Enable disable minified js and css files
try:
request.devmode = settings.DEVMODE
except:
request.devmode = False
def process_view(self, request, view_func, view_args, view_kwargs):
request.current_page = request.resolver_match.url_name
request.server_pages = ['server_system', 'view_process', 'add_server', 'edit_server']
| 1,560
|
Python
|
.py
| 30
| 43.066667
| 96
| 0.689314
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,143
|
forms.py
|
amonapp_amon/amon/apps/_account/forms.py
|
from django import forms
from django.contrib.auth import authenticate
from django.conf import settings
from django.contrib.auth import get_user_model
# from amon.apps.notifications.models import notifications_model
# from amon.apps.alerts.models import alerts_model
# from amon.apps.account.models import user_preferences_model, forgotten_pass_tokens_model
# from amon.apps.api.models import api_key_model
from timezone_field import TimeZoneFormField
from amon.apps.account.mailer import send_email_forgotten_password
User = get_user_model()
class LoginForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
remember_me = forms.BooleanField(widget=forms.CheckboxInput(), label='Remember Me', required=False)
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = authenticate(email=email, password=password)
if user:
return self.cleaned_data
raise forms.ValidationError("Invalid login details")
def clean_remember_me(self):
remember_me = self.cleaned_data.get('remember_me')
if not remember_me:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = True
else:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = False
return remember_me
class AdminUserForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = User.objects.filter(email=email).count()
if user:
raise forms.ValidationError("User already exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
user = User.objects.create_user(email, password)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save()
# notifications_model.save(data={"email": email}, provider_id='email')
# api_key_model.add_initial_data()
class ProfileForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
user_preferences = user_preferences_model.get_preferences(user_id=self.user.id)
user_timezone = user_preferences.get('timezone', 'UTC')
super(ProfileForm, self).__init__(*args, **kwargs)
self.fields['timezone'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
self.fields['timezone'].initial = user_timezone
self.fields['email'].initial = self.user.email
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
timezone = TimeZoneFormField()
# Check email uniqueness
def clean_email(self):
email = self.cleaned_data.get('email')
if email:
if self.user.email != email:
unique = User.objects.filter(email__iexact=email).count()
if unique > 0:
raise forms.ValidationError(u'An user with this email address already exists.')
return email
def save(self):
data = {'timezone': str(self.cleaned_data['timezone'])}
# user_preferences_model.save_preferences(user_id=self.user.id, data=data)
self.user.email = self.cleaned_data['email']
self.user.save()
class ChangePasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ChangePasswordForm, self).__init__(*args, **kwargs)
current_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
new_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
def clean_current_password(self):
password = self.cleaned_data.get('current_password')
if self.user.check_password(password):
return self.cleaned_data
raise forms.ValidationError("Your current password is not correct")
def save(self):
password = self.cleaned_data.get('new_password')
self.user.set_password(password)
self.user.save()
return True
class ForgottenPasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ForgottenPasswordForm, self).__init__(*args, **kwargs)
email = forms.EmailField(required=True, widget=(forms.TextInput(attrs={'placeholder': 'Your Login Email'})))
def clean(self):
email = self.cleaned_data.get('email')
if email:
user = User.objects.filter(email=email).count()
if user == 0:
raise forms.ValidationError("User does not exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
token = forgotten_pass_tokens_model.set_token(email=email)
send_email_forgotten_password(token=token, recipients=[email])
return True
class ResetPasswordForm(forms.Form):
password = forms.CharField(
required=True,
label='Your new password',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'})
)
repeat_password = forms.CharField(
required=True,
label='Confirm it',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Repeat Password'})
)
def clean(self):
repeat_password = self.cleaned_data.get('repeat_password')
password = self.cleaned_data.get('password')
if repeat_password and password:
if repeat_password != password:
raise forms.ValidationError("Passwords does not match")
return self.cleaned_data
def save(self, user=None):
password = self.cleaned_data.get('password')
user.set_password(password)
user.save()
| 6,520
|
Python
|
.py
| 130
| 40.846154
| 128
| 0.672959
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,144
|
views.py
|
amonapp_amon/amon/apps/_account/views.py
|
from amon.apps.core.views import *
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth import get_user_model
# from amon.apps.account.models import forgotten_pass_tokens_model
from amon.apps.account.forms import LoginForm
from amon.apps.account.forms import (
AdminUserForm,
ChangePasswordForm,
ForgottenPasswordForm,
ProfileForm,
ResetPasswordForm
)
User = get_user_model()
def loginview(request):
all_users = User.objects.all().count()
if all_users == 0:
return redirect(reverse('create_admin_user'))
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
data = form.cleaned_data
user = authenticate(username=data['email'], password=data['password'])
if user:
login(request, user)
redirect_url = reverse('servers')
return redirect(redirect_url)
else:
form = LoginForm()
return render(request, 'account/login.html', {
'form': form,
})
def create_admin_user(request):
all_users = User.objects.all().count()
if all_users > 0:
return redirect(reverse('login'))
if request.method == 'POST':
form = AdminUserForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'User created')
redirect_url = reverse('login')
return redirect(redirect_url)
else:
form = AdminUserForm()
return render(request, 'account/create_admin_user.html', {
'form': form,
})
@login_required
def logout_user(request):
try:
del request.account_id
except:
pass
logout(request)
return redirect(reverse('login'))
@login_required
def view_profile(request):
form = ProfileForm(user=request.user)
if request.method == 'POST':
form = ProfileForm(request.POST, user=request.user)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Profile settings updated')
return redirect(reverse('view_profile'))
return render(request, 'account/view_profile.html', {
"form": form,
})
def reset_password(request, token=None):
token_object = forgotten_pass_tokens_model.get_one({'token': token})
if len(token_object) == 0:
raise Http404
try:
user = User.objects.get(email=token_object['email'])
except:
user = None
raise Http404
form = ResetPasswordForm()
if request.method == 'POST':
form = ResetPasswordForm(request.POST)
if form.is_valid():
form.save(user=user)
messages.add_message(request, messages.INFO, 'Your password has been changed.')
return redirect(reverse('login'))
return render(request, 'account/reset_password.html', {
"form": form,
"token": token
})
def forgotten_password(request):
form = ForgottenPasswordForm()
if request.method == 'POST':
form = ForgottenPasswordForm(request.POST)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'A Reset password email was sent to the specified email.')
return redirect(reverse('forgotten_password'))
return render(request, 'account/forgotten_password.html', {
"form": form,
})
@login_required
def change_password(request):
form = ChangePasswordForm(user=request.user)
if request.method == 'POST':
form = ChangePasswordForm(request.POST, user=request.user)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Password updated.')
return redirect(reverse('view_profile'))
return render(request, 'account/change_password.html', {
"form": form,
})
| 3,947
|
Python
|
.py
| 110
| 28.3
| 115
| 0.643857
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,145
|
forms_test.py
|
amonapp_amon/amon/apps/_account/tests/forms_test.py
|
from django.test.client import Client
from django.urls import reverse
from django.test import TestCase
from nose.tools import *
from django.contrib.auth import get_user_model
# from amon.apps.account.models import user_preferences_model, forgotten_pass_tokens_model
User = get_user_model()
class TestAccountForms(TestCase):
def setUp(self):
self.c = Client()
self.email = "network-operations@something.com"
def tearDown(self):
User.objects.all().delete()
def _cleanup(self):
User.objects.all().delete()
forgotten_pass_tokens_model.collection.remove()
def test_create_admin_user(self):
url = reverse('create_admin_user')
response = self.c.post(url, {'email': self.email, 'password': '123456'})
user = User.objects.get(email=self.email)
eq_(user.email, self.email)
eq_(user.is_superuser, True)
def test_forgotten_password_form(self):
self._cleanup()
url = reverse('forgotten_password')
response = self.c.post(url, {'email': self.email})
assert response.context['form'].errors
# Create user and reset password
self.user = User.objects.create_user(password='qwerty', email=self.email)
response = self.c.post(url, {'email': self.email})
# assert forgotten_pass_tokens_model.collection.find().count() == 1
response = self.c.post(url, {'email': self.email})
# assert forgotten_pass_tokens_model.collection.find().count() == 1
def test_reset_password_form(self):
self._cleanup()
self.user = User.objects.create_user(self.email, 'qwerty')
# Generate token
url = reverse('forgotten_password')
response = self.c.post(url, {'email': self.email})
assert forgotten_pass_tokens_model.collection.find().count() == 1
token = forgotten_pass_tokens_model.collection.find_one()
url = reverse("reset_password", kwargs={'token': token['token']})
response = self.c.post(url, {'password': 'newpass', 'repeat_password': 'newpasssssss'})
assert response.context['form'].errors
url = reverse("reset_password", kwargs={'token': token['token']})
response = self.c.post(url, {'password': 'newpass', 'repeat_password': 'newpass'})
self.assertFalse(self.c.login(email=self.email, password='qwerty'))
self.assertTrue(self.c.login(email=self.email, password='newpass'))
class TestProfileForms(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(email='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
User.objects.all().delete()
def test_update_password(self):
url = reverse('change_password')
# Test password update with wrong current password
response = self.c.post(url, {'current_password': 'wrongoldpass',
'new_password': '123456'})
errors = dict(response.context['form'].errors.items())
assert 'current_password' in errors
updated_user = User.objects.get(id=self.user.id)
assert_true(updated_user.check_password('qwerty'))
# Test password update
response = self.c.post(url, {'current_password': 'qwerty', 'new_password': '123456'})
self.assertRedirects(response, reverse('view_profile'), fetch_redirect_response=False)
updated_user = User.objects.get(id=self.user.id)
assert_true(updated_user.check_password('123456'))
def test_update_profile(self):
url = reverse('view_profile')
# Test profile update with the same email - Nothing happens
response = self.c.post(url, {'email': 'foo@test.com', 'timezone': 'UTC'})
self.assertRedirects(response, reverse('view_profile'), status_code=302)
user_preferences = user_preferences_model.get_preferences(user_id=self.user.id)
assert user_preferences['timezone'] == 'UTC'
response = self.c.post(url, {'email': 'foo@test.com', 'timezone': 'Europe/Sofia'})
user_preferences = user_preferences_model.get_preferences(user_id=self.user.id)
assert user_preferences['timezone'] == 'Europe/Sofia'
# Test profile update with the same email
response = self.c.post(url, {'email': 'network-operations@maynardnetworks.com', 'timezone': 'UTC'})
self.assertRedirects(response, reverse('view_profile'), status_code=302)
updated_user = User.objects.get(id=self.user.id)
assert updated_user.email == 'network-operations@maynardnetworks.com'
| 4,739
|
Python
|
.py
| 85
| 46.529412
| 107
| 0.666225
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,146
|
models.py
|
amonapp_amon/amon/apps/checks/models.py
|
from django.db import models
from django.contrib.postgres.fields import JSONField
CHECK_CHOICES = (
('ok', 'ok'),
('warn', 'warn'),
('crit', 'crit'),
('unknown', 'unknown'),
)
class Check(models.Model):
name = models.CharField(max_length=128)
tags = JSONField()
organization = models.ForeignKey('organizations.Organization', on_delete=models.CASCADE)
class Meta:
unique_together = ("name", "tags", 'organization')
def __unicode__(self):
return u"Check - {0}".format(self.name)
class CheckData(models.Model):
check = models.ForeignKey('Check', on_delete=models.CASCADE)
timestamp = models.IntegerField()
value = models.CharField(
max_length=10,
choices=CHECK_CHOICES,
default='unknown'
)
message = models.TextField()
class Meta:
index_together = ["check", "timestamp"]
def __unicode__(self):
return u"Metric - {0}/{1}".format(self.metric.name)
| 970
|
Python
|
.py
| 29
| 28.068966
| 92
| 0.652034
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,147
|
models.py
|
amonapp_amon/amon/apps/_servers/models.py
|
from amon.apps.core.basemodel import BaseModel
from amon.utils import generate_random_string
from amon.utils.haiku import generate_haiku_name
from amon.apps.processes.models import process_model
from amon.apps.devices.models import interfaces_model, volumes_model
from amon.apps.tags.models import tags_model, tag_groups_model
from amon.utils.dates import unix_utc_now
class ServerModel(BaseModel):
def __init__(self):
super(ServerModel, self).__init__()
self.collection = self.mongo.get_collection('servers')
self.data_collection = self.mongo.get_collection('system_data')
def server_exists(self, name):
result = self.collection.find({"name": name}).count()
return result
def get_or_create_by_machine_id(
self,
machine_id=None,
hostname=None,
check_every=60,
keep_data=30,
instance_id=None,
tags=[]):
server = self.collection.find_one({"key": machine_id}) # Randomly generated
instance_id = "" if instance_id == None else instance_id
name = hostname if hostname else generate_haiku_name()
# keep_data - in days
# check_every - in seconds
# settings/forms/data retention
data = {
"name": name,
"key": machine_id,
"check_every": check_every,
"keep_data": keep_data,
"date_created": unix_utc_now(),
"tags": tags
}
# Bare metal servers
if server == None and len(instance_id) == 0:
self.collection.insert(data)
server = self.collection.find_one({"key": machine_id})
# Cloud servers
if len(instance_id) > 0:
server = self.collection.find_one({"instance_id": instance_id})
# Cloud server synced and found
if server is not None:
data = {"key": machine_id}
self.collection.update({"instance_id": instance_id}, {"$set": data}, upsert=True)
else:
data["key"] = machine_id
data["instance_id"] = instance_id
self.collection.insert(data)
server = self.collection.find_one({"key": machine_id})
self.collection.ensure_index([('name', self.desc)], background=True)
self.collection.ensure_index([('tags', self.desc)], background=True)
self.collection.ensure_index([('key', self.desc)], background=True)
self.collection.ensure_index([('last_check', self.desc)], background=True)
self.collection.ensure_index([('account_id', self.desc)], background=True)
self.collection.ensure_index([('instance_id', self.desc)], background=True)
return server
def get_by_ids(self, id_list=None):
result = []
_ids = [self.object_id(x) for x in id_list]
query = self.collection.find({'_id': {"$in": _ids}})
for r in query:
result.append(r) # Expects list
return result
# Tags is a list
def get_with_tags(self, tags=None):
result = []
tags = [self.object_id(x) for x in tags]
query = self.collection.find({'tags': {"$in": tags}})
for r in query:
result.append(r) # Expects list
return result
def get_tags(self, server=None):
result = []
tags = server.get('tags', None)
if tags:
result = [tags_model.get_by_id(x) for x in tags]
result = list(filter(lambda x: x is not None, result)) # Remove non existing tags
return result
def add(self, name, account_id=None, check_every=60, keep_data=30, tags=[], key=None):
server_key = key if key else generate_random_string(size=32)
# keep_data - in days
# check_every - in seconds
# settings/forms/data retention
data = {"name": name,
"key": server_key,
"account_id": account_id,
"check_every": check_every,
"keep_data": keep_data,
"date_created": unix_utc_now(),
"tags": tags}
self.collection.insert(data)
self.collection.ensure_index([('tags', self.desc)], background=True)
self.collection.ensure_index([('key', self.desc)], background=True)
self.collection.ensure_index([('name', self.desc)], background=True)
self.collection.ensure_index([('last_check', self.desc)], background=True)
self.collection.ensure_index([('account_id', self.desc)], background=True)
self.collection.ensure_index([('instance_id', self.desc)], background=True)
return server_key
# TODO
# Refactor this method to avoid confusion
def get_active_last_five_minutes(self, account_id=None, count=None):
five_minutes_ago = unix_utc_now() - 300
params = {"last_check": {"$gte": five_minutes_ago}}
if account_id:
params['account_id'] = account_id
result = self.collection.find(params)
if count:
result = result.count()
return result
def get_all_ids(self):
id_list = []
result = self.collection.find()
if result.clone().count() > 0:
for r in result:
_id = str(r.get("_id"))
id_list.append(_id)
return id_list
def get_all(self, account_id=None):
server_list = []
count = self.collection.find().count()
if count == 0:
return None
else:
result = self.collection.find(sort=[("name", self.asc), ("last_check", self.asc)])
for server in result:
server['tags'] = self.get_tags(server=server)
server_list.append(server)
return server_list
def get_servers_count(self, account_id):
if account_id:
count = self.collection.find().count()
return count
else:
return 0
def get_server_by_key(self, key):
params = {'key': key}
return self.collection.find_one(params)
def delete_data(self, server_id=None, soft=None):
server_id = self.object_id(server_id)
params = {'server_id': server_id}
process_params = {'server': server_id}
processes = self.mongo.get_collection('processes')
processes.remove(process_params)
volumes = self.mongo.get_collection('volumes')
volumes.remove(params)
interfaces = self.mongo.get_collection('interfaces')
interfaces.remove(params)
plugins_col = self.mongo.get_collection('plugins')
plugin_gauges_col = self.mongo.get_collection('plugin_gauges')
plugins_for_server = plugins_col.find(params)
for p in plugins_for_server:
plugin_gauges_col.remove({'plugin_id': p['_id']})
# Soft delete - deletes process names, plugin names, network and disk volumes.
# Leaves the data which already has TTL and will be automatically deleted according to the 'Keep data' option
if soft == False:
process_data_collection = process_model.data_collection
process_data_collection.remove(params)
system_data_collection = self.data_collection
system_data_collection.remove(params)
volume_data_collection = volumes_model.get_data_collection()
volume_data_collection.remove(params)
interface_data_collection = interfaces_model.get_data_collection()
interface_data_collection.remove(params)
def delete(self, id, soft=None):
server_id = self.object_id(id)
if server_id:
# Enforce default delete mode
if soft == None:
server = self.get_by_id(server_id)
keep_data = server.get('keep_data', 3600)
soft = True if keep_data < 3600 else False # Delete only data kept forever
self.delete_data(server_id=server_id, soft=soft)
self.collection.remove(server_id)
def cleanup(self, server, date_before=None):
process_data_collection = process_model.data_collection
params = {"t": {"$lte": date_before}, 'server_id': server['_id']}
process_data_collection.remove(params)
system_params = {"time": {"$lte": date_before}}
system_data_collection = self.data_collection
system_data_collection.remove(system_params)
volume_data_collection = volumes_model.get_data_collection()
volume_data_collection.remove(params)
interfaces_data_collection = interfaces_model.get_data_collection()
interfaces_data_collection.remove(params)
class CloudServerModel(BaseModel):
def __init__(self):
super(CloudServerModel, self).__init__()
self.collection = self.mongo.get_collection('servers')
def delete_servers_for_credentials(self, credentials_id=None):
params = {'credentials_id': credentials_id}
result = self.collection.find(params)
for r in result:
server_model.delete(r['_id'])
self.collection.remove(params)
def update_server(self, data=None, account_id=None):
instance_id = data['instance_id']
result = self.collection.find_one({"instance_id": instance_id})
if result == None:
data['key'] = generate_random_string(size=32)
data['account_id'] = account_id
self.collection.update({"instance_id": instance_id}, {"$set": data}, upsert=True)
def delete_all_for_provider(self, credentials_id=None):
params = {'credentials_id': credentials_id}
return self.collection.remove(params)
def get_all_for_provider(self, credentials_id=None):
params = {'credentials_id': credentials_id}
return self.collection.find(params)
def get_instance_ids_list(self, credentials_id=None, account_id=None):
instance_list = []
all_servers_for_provider = self.get_all_for_provider(credentials_id=credentials_id)
if all_servers_for_provider.clone().count() > 0:
for s in all_servers_for_provider:
instance_id = s.get("instance_id")
if instance_id:
instance_list.append(instance_id)
return instance_list
def diff_instance_ids(self, new_instances=None, old_instances=None):
# This method should return a list with all the instances that have to be removed
return list(set(old_instances) - set(new_instances))
def delete_instances(self, instance_ids_list=None):
if len(instance_ids_list) > 0:
for i in instance_ids_list:
server = self.collection.find_one({"instance_id": i})
server_model.delete(server['_id'])
def save(self, instances=None, credentials=None):
credentials_id = credentials.get('_id')
old_instances = self.get_instance_ids_list(credentials_id=credentials_id)
if len(instances) > 0:
new_instances = [i['instance_id'] for i in instances]
instance_diff = self.diff_instance_ids(new_instances=new_instances, old_instances=old_instances)
self.delete_instances(instance_ids_list=instance_diff)
for instance in instances:
synced_tags = instance.get('tags')
# Reset the tags
instance['tags'] = []
# Amazon tags example: {u'application': u'rails', u'type': u'dbserver'}
if type(synced_tags) is dict:
for group, tag in synced_tags.items():
group_id = tag_groups_model.get_or_create_by_name(group)
_id = tags_model.get_or_create(name=tag, group_id=group_id)
instance['tags'].append(_id)
generated_tags_list = ['provider', 'type', 'region', 'zone', 'size', 'credentials']
for auto_tag in generated_tags_list:
value = instance.get(auto_tag)
if value != None:
group_id = tag_groups_model.get_or_create_by_name(auto_tag)
_id = tags_model.get_or_create(name=value, group_id=group_id)
instance['tags'].append(_id)
self.update_server(data=instance)
else:
# Empty list, clear out all instances for this provider
self.delete_all_for_provider(credentials_id=credentials_id)
self.collection.ensure_index('instance_id', background=True)
self.collection.ensure_index('credentials_id', background=True)
server_model = ServerModel()
cloud_server_model = CloudServerModel()
| 12,772
|
Python
|
.py
| 260
| 38.073077
| 117
| 0.609026
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,148
|
urls.py
|
amonapp_amon/amon/apps/_servers/urls.py
|
from django.conf.urls import url
from amon.apps.servers import views
urlpatterns = (
url(r'^$', views.all, name='servers'),
url(r'^add/$', views.add_server, name='add_server'),
url(r'^edit/(?P<server_id>\w+)/$', views.edit_server, name='edit_server'),
url(r'^delete/(?P<server_id>\w+)/$', views.delete_server, name='delete_server'),
url(r'^delete-data/(?P<server_id>\w+)/$', views.delete_data, name='delete_data'),
url(r'^delete-plugin/(?P<plugin_id>\w+)/server/(?P<server_id>\w+)/$', views.delete_plugin, name='delete_plugin'),
)
| 557
|
Python
|
.py
| 10
| 52.1
| 117
| 0.647706
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,149
|
utils.py
|
amonapp_amon/amon/apps/_servers/utils.py
|
def filter_tags(server=None, tags=None):
"""
>>> filter_tags(server={'name': 'frosty-mountain-6164', 'tags': ['58824ea91d41c8ed3761d8b0']}, tags='58824ea91d41c8ed3761d8b0')
True
>>> filter_tags(server={'name': 'frosty-mountain-6164', 'tags': ['58824761d8b0']}, tags='58824ea91d41c8ed3761d8b0')
False
"""
show_server = False
if len(tags) > 0:
tags_list = tags.split(',')
server_tags = server.get('tags') # Convert <filter object to list>
server_tag_ids = []
for x in server_tags:
if type(x)is dict:
value = str(x.get("_id"))
else:
value = str(x)
server_tag_ids.append(value)
if list(tags_list) <= list(server_tag_ids):
show_server = True
else:
show_server = True
return show_server
| 864
|
Python
|
.py
| 23
| 28.652174
| 131
| 0.574365
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,150
|
admin.py
|
amonapp_amon/amon/apps/_servers/admin.py
|
from django.contrib import admin
from django.shortcuts import render
from amon.apps.servers.models import server_model
def servers(request, *args, **kwargs):
account_id = kwargs['account_id']
all_servers = server_model.get_all(account_id=int(account_id))
return render(request, 'admin/servers.html', {
'all_servers': all_servers,
'title': 'Servers'
})
admin.site.register_view('servers/(?P<account_id>\d+)', view=servers)
| 480
|
Python
|
.py
| 11
| 37.363636
| 69
| 0.702882
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,151
|
forms.py
|
amonapp_amon/amon/apps/_servers/forms.py
|
from django import forms
from django.conf import settings
from amon.apps.settings.forms import DataRetentionForm
from amon.apps.servers.models import server_model
from amon.apps.tags.models import tags_model
from django.urls import reverse
class ServerForm(DataRetentionForm):
tags = forms.CharField(max_length=256, required=False)
name = forms.CharField(max_length=256, widget=forms.TextInput(attrs={'placeholder': 'Server name'}))
def __init__(self, *args, **kwargs):
self.server = kwargs.pop('server', None)
super(ServerForm, self).__init__(*args, **kwargs)
if self.server:
tags = self.server.get('tags', [])
self.fields['name'].initial = self.server.get('name', "")
self.fields['tags'].initial = ",".join(map(str, tags))
self.fields['keep_data'].initial = self.server.get('keep_data', 30)
self.fields['check_every'].initial = self.server.get('check_every', 60)
self.fields['tags'].widget.attrs.update({
'tags-dropdown': '', 'data-size': 360,
'data-tags-url': reverse('api_tags_get_tags'),
'data-url': reverse('api_tags_get_tags_for_server', kwargs={'server_id': self.server['_id']}),
})
else:
self.fields['tags'].widget.attrs.update({
'tags-dropdown': '', 'data-size': 360,
'data-tags-url': reverse('api_tags_get_tags'),
})
def save(self):
data = self.cleaned_data
tags = data.get('tags', [])
if len(tags) > 0:
data['tags'] = tags_model.get_tags_ids(tags_string=tags)
# Update
if self.server:
server_model.update(data, self.server['_id'])
# Create
else:
server_key = server_model.add(
data.get('name'),
account_id=settings.ACCOUNT_ID,
keep_data=data.get('keep_data'),
check_every=data.get('check_every'),
tags=data['tags']
)
server = server_model.get_server_by_key(server_key)
return server
| 2,167
|
Python
|
.py
| 47
| 35.06383
| 110
| 0.579828
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,152
|
views.py
|
amonapp_amon/amon/apps/_servers/views.py
|
from amon.apps.core.views import *
from amon.apps.system.models import system_model
from amon.apps.servers.models import server_model
from amon.apps.devices.models import volumes_model
from amon.apps.processes.models import process_model
from amon.apps.plugins.models import plugin_model
from amon.apps.alerts.models import alerts_model
from amon.apps.servers.forms import ServerForm
from amon.apps.servers.utils import filter_tags
from amon.apps.api.models import api_key_model
from amon.apps.bookmarks.forms import BookMarkForm
@login_required
def all(request):
api_key = api_key_model.get_or_create()
all_servers = server_model.get_all(account_id=request.account_id)
servers_data = []
form = ServerForm()
tags = request.GET.get('tags', "")
bookmark_id = request.GET.get('bookmark_id')
# now = unix_utc_now()
if all_servers:
for server in all_servers:
append_server = filter_tags(server=server, tags=tags)
active_server = False
key = server.get('key')
last_check = server.get('last_check', 0)
# seconds_since_check = now - last_check
server_dict = {
'server': server,
'key': key,
'last_check': last_check
}
# Don't get data for non active servers, 48 hours as default
# Disable this check for now
# if seconds_since_check < 172800:
server_dict_data = {
'system': system_model.get_check_for_timestamp(server, last_check),
'volume_data': volumes_model.get_check_for_timestamp(server, last_check),
'plugins': plugin_model.get_check_for_timestamp(server, last_check),
'processes': process_model.get_check_for_timestamp(server, last_check),
}
server_dict.update(server_dict_data)
active_server = True
if append_server and active_server is not False:
servers_data.append(server_dict)
servers_data = sorted(servers_data, key=lambda k: k['last_check'], reverse=True)
else:
all_servers = False
bookmark_form = BookMarkForm(initial={'tags': tags})
return render(request, 'servers/view.html', {
"all_servers": all_servers,
"servers_data": servers_data,
"form": form,
"tags": tags,
"bookmark_form": bookmark_form,
"bookmark_id": bookmark_id,
"api_key": api_key
})
@login_required
def delete_data(request, server_id=None):
if server_id:
server_model.delete_data(server_id=server_id)
return redirect(reverse('servers'))
@login_required
def delete_plugin(request, plugin_id=None, server_id=None):
server = server_model.get_by_id(server_id)
plugin = plugin_model.get_by_id(plugin_id)
if server and plugin:
plugin_model.delete(plugin=plugin, server=server)
messages.add_message(request, messages.INFO, 'Plugin deleted')
return redirect(reverse('edit_server', kwargs={"server_id": server_id}))
@login_required
def delete_server(request, server_id=None):
server = server_model.get_by_id(server_id)
if server:
alerts_model.delete_server_alerts(server['_id'])
server_model.delete(server['_id'])
return redirect(reverse('servers'))
@login_required
def edit_server(request, server_id=None):
server = server_model.get_by_id(server_id)
plugins = plugin_model.get_for_server(server_id)
form = ServerForm(server=server)
if request.method == 'POST':
form = ServerForm(request.POST, server=server)
if form.is_valid():
form.save()
messages.add_message(request, messages.INFO, 'Server settings updated.')
return redirect(reverse('servers'))
return render(request, 'servers/edit.html', {
"server": server,
"plugins": plugins,
'form': form,
})
@login_required
def add_server(request):
if request.method == 'POST':
form = ServerForm(request.POST)
if form.is_valid():
server = form.save()
url = reverse('servers')
redirect_to = "{0}#{1}".format(url, server['_id'])
return redirect(redirect_to)
else:
form = ServerForm()
return render(request, "servers/add.html", {
'form': form,
})
| 4,401
|
Python
|
.py
| 108
| 32.787037
| 89
| 0.644235
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,153
|
models_test.py
|
amonapp_amon/amon/apps/_servers/tests/models_test.py
|
import unittest
from time import time
from nose.tools import eq_
from amon.apps.servers.models import (
ServerModel,
cloud_server_model
)
from amon.apps.devices.models import interfaces_model, volumes_model
from amon.apps.processes.models import process_model
from amon.apps.system.models import system_model
from amon.apps.tags.models import tags_model, tag_groups_model
from amon.apps.cloudservers.models import cloud_credentials_model
now = int(time())
minute_ago = (now-60)
two_minutes_ago = (now-120)
five_minutes_ago = (now-300)
class ServerModelTest(unittest.TestCase):
def setUp(self):
self.model = ServerModel()
self.collection = self.model.mongo.get_collection('servers')
def _cleanup(self):
self.collection.remove()
tags_model.collection.remove()
tag_groups_model.collection.remove()
def get_or_create_by_machine_id_test(self):
self._cleanup()
self.collection.insert({"name" : "cloud-server", "key": "somekeyhere", "instance_id": "150"})
self.model.get_or_create_by_machine_id(instance_id="150", machine_id="cloudkey")
result = self.collection.find_one()
assert result["key"] == "cloudkey"
assert result["name"] == "cloud-server"
self._cleanup()
self.collection.insert({"name" : "test", "key": "somekeyhere", "instance_id": ""})
self.model.get_or_create_by_machine_id(instance_id="", machine_id="somekeyhere")
result = self.collection.find_one()
assert result["key"] == "somekeyhere"
assert result["name"] == "test"
self._cleanup()
def get_all_with_tags_test(self):
self._cleanup()
tags = {'rds': 'value', 'ebs': 'volume'}
tags_list = tags_model.create_and_return_ids(tags)
self.collection.insert({"name" : "test", "tags": tags_list})
result = self.model.get_with_tags(tags=[tags_list[0]])
assert len(result) == 1
result = self.model.get_with_tags(tags=tags_list)
assert len(result) == 1
self._cleanup()
tags = {'rds': 'value', 'ebs': 'volume', 'region': 'uswest-1', 'provider': 'amazon'}
tags_list = tags_model.create_and_return_ids(tags)
self.collection.insert({"name" : "test", "tags": tags_list})
result = self.model.get_with_tags(tags=[tags_list[0], tags_list[1], tags_list[2]])
assert len(result) == 1
result = self.model.get_with_tags(tags=tags_list)
assert len(result) == 1
def check_server_exists_test(self):
self.collection.remove()
self.collection.insert({"name" : "test"})
result = self.model.server_exists('test')
eq_(result, 1)
self.collection.remove()
def update_server_test(self):
self.collection.remove()
self.collection.insert({"name" : "test"})
server = self.collection.find_one()
self.model.update({"name": "test_updated", "default": 1 }, server['_id'])
result = self.collection.find_one()
eq_(result['name'],'test_updated')
self.collection.remove()
def add_server_test(self):
self.collection.remove()
self.model.add('test')
result = self.collection.find_one()
eq_(result['name'],'test')
if result['key']:
assert True
self.collection.remove()
def get_server_test(self):
self.collection.remove()
self.collection.insert({"name" : "test"})
server = self.collection.find_one()
result = self.model.get_by_id(server['_id'])
eq_(result['name'],'test')
eq_(result['_id'],server['_id'])
self.collection.remove()
def get_active_last_five_minutes_test(self):
self.collection.remove()
for i in range(0, 100):
self.collection.insert({"name" : "test", 'last_check': now-i})
result = self.model.get_active_last_five_minutes(count=True)
eq_(result, 100)
self.collection.remove()
for i in range(0, 100):
self.collection.insert({"name" : "test", 'last_check': five_minutes_ago-i})
result = self.model.get_active_last_five_minutes(count=True)
eq_(result, 0)
def get_server_by_key_test(self):
self.collection.remove()
self.collection.insert({"name" : "test", "key": "test_me"})
server = self.collection.find_one()
result = self.model.get_server_by_key('test_me')
eq_(result['name'],'test')
eq_(result['key'],'test_me')
eq_(result['_id'],server['_id'])
self.collection.remove()
def delete_server_test(self):
self.collection.remove()
self.collection.insert({"name" : "test", "key": "test_me"})
server = self.collection.find_one()
self.model.delete(server['_id'])
result = self.collection.count()
eq_(result,0)
self.collection.remove()
def get_all_servers_test(self):
self.collection.remove()
for i in range(0, 1000):
name = "test-{0}".format(i)
key = "testkey-{0}".format(i)
self.collection.insert({"name" : name, "key": key, "last_check": minute_ago})
result = self.model.get_all()
eq_(len(result), 1000)
self.collection.remove()
def cleanup_test(self):
self.collection.remove()
self.collection.insert({"name" : "testserver", "key": "test_me"})
server = self.collection.find_one()
date_before = 100
process_collection = process_model.data_collection
process_collection.remove()
system_collection = system_model.data_collection
system_collection.remove()
interface_collection = interfaces_model.get_data_collection()
interface_collection.remove()
volume_collection = volumes_model.get_data_collection()
volume_collection.remove()
for i in range(0, date_before):
process_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
system_collection.insert({'i' : 'test', 'time': i, 'server_id': server['_id']})
interface_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
volume_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
params = {'server_id': server['_id']}
self.model.cleanup(server, date_before=date_before)
process_entries = process_collection.find(params).count()
eq_(process_entries, 0)
system_entries = system_collection.find(params).count()
eq_(system_entries, 0)
interface_entries = interface_collection.find(params).count()
eq_(interface_entries, 0)
volume_entries = volume_collection.find(params).count()
eq_(volume_entries, 0)
system_collection.remove()
process_collection.remove()
interface_collection.remove()
volume_collection.remove()
entries = interface_collection.find().count()
eq_(entries, 0)
for i in range(0, 300):
process_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
system_collection.insert({'i' : 'test', 'time': i, 'server_id': server['_id']})
interface_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
volume_collection.insert({'i' : 'test', 't': i, 'server_id': server['_id']})
process_collection.ensure_index('server_id', background=True)
process_collection.ensure_index('t', background=True)
system_collection.ensure_index('time', background=True)
system_collection.ensure_index('server_id', background=True)
interface_collection.ensure_index('t', background=True)
interface_collection.ensure_index('server_id', background=True)
volume_collection.ensure_index('t', background=True)
volume_collection.ensure_index('server_id', background=True)
self.model.cleanup(server, date_before=date_before)
process_entries = process_collection.find(params).count()
eq_(process_entries, 199)
for p in process_collection.find(sort=[('t', self.model.asc)]):
assert p['t'] > date_before
system_entries = system_collection.find(params).count()
eq_(system_entries, 199)
for p in system_collection.find(sort=[('time', self.model.asc)]):
assert p['time'] > date_before
entries = interface_collection.find(params).count()
eq_(entries, 199)
for p in interface_collection.find(sort=[('t', self.model.asc)]):
assert p['t'] > date_before
entries = volume_collection.find(params).count()
eq_(entries, 199)
for p in volume_collection.find(sort=[('t', self.model.asc)]):
assert p['t'] > date_before
process_collection.drop()
system_collection.drop()
interface_collection.drop()
volume_collection.drop()
class CloudServerModelTest(unittest.TestCase):
def setUp(self):
self.collection = cloud_server_model.mongo.get_collection('servers')
def _cleanup(self):
self.collection.remove()
tags_model.collection.remove()
cloud_credentials_model.collection.remove()
def update_cloud_server_test(self):
self._cleanup()
s = self.collection
s.remove()
s.insert({"account_id": 1, "name" : "test", "key": "server_key_test", "instance_id": 2})
result = s.find_one()
eq_(result['instance_id'], 2)
data = {'instance_id': 2, 'provider': 'amazon'}
cloud_server_model.update_server(data)
result = s.find_one()
eq_(result['provider'], 'amazon')
# Create new server if it does not exists
self.collection.remove()
data = {"name":"create_server", 'instance_id': 3}
cloud_server_model.update_server(data, account_id=1)
result = s.find_one()
assert(result['key'])
eq_(result['instance_id'], 3)
eq_(result['account_id'], 1)
self._cleanup()
def delete_servers_for_credentials_test(self):
self._cleanup()
credentials_id = "test_credentials"
self.collection.insert({"account_id": 1, "name" : "test", "key": "server_key_test", "credentials_id": credentials_id})
server = self.collection.find_one()
eq_(server['credentials_id'], 'test_credentials')
cloud_server_model.delete_servers_for_credentials(credentials_id=credentials_id)
result = self.collection.find().count()
eq_(result, 0)
self._cleanup()
def delete_all_for_credentials_test(self):
self._cleanup()
data = {'name': 'test', 'token': 'test-token'}
credentials_id = cloud_credentials_model.save(data=data, provider_id='digitalocean')
for i in range(5):
self.collection.insert({"account_id": 1, "name" : "test", "key": "server_key_test", "credentials_id": credentials_id})
cloud_server_model.delete_all_for_provider(credentials_id=credentials_id)
result = self.collection.find().count()
eq_(result, 0)
self._cleanup()
def get_all_for_credentials_test(self):
self._cleanup()
credentials_id = "test_credentials"
for i in range(5):
self.collection.insert({"account_id": 1, "name" : "test", "key": "server_key_test", "credentials_id": credentials_id})
result = cloud_server_model.get_all_for_provider(credentials_id=credentials_id)
eq_(result.count(), 5)
self._cleanup()
def get_instance_ids_list_test(self):
self._cleanup()
credentials_id = "test_credentials"
for i in range(5):
self.collection.insert({"account_id": 1, "name" : "test",
"key": "server_key_test",
"credentials_id": credentials_id,
"instance_id": "instance_id_{0}".format(i)
})
result = cloud_server_model.get_instance_ids_list(credentials_id=credentials_id)
eq_(sorted(result), [u'instance_id_0', u'instance_id_1', u'instance_id_2', u'instance_id_3', u'instance_id_4'] )
self._cleanup()
def diff_instance_ids_test(self):
old_instances = ['test', 'test1', 'test2']
new_instances = ['somethingnew', 'test1']
result = cloud_server_model.diff_instance_ids(old_instances=old_instances, new_instances=new_instances)
eq_(sorted(result), ['test', 'test2']) # These have to be removed
def save_test(self):
self._cleanup()
credentials_id = "test_credentials"
data = {'name': 'test', 'token': 'test-token'}
credentials_id = cloud_credentials_model.save(data=data, provider_id='digitalocean')
credentials = cloud_credentials_model.collection.find_one()
# Empty list
instance_list = []
cloud_server_model.save(instances=instance_list, credentials=credentials)
result = self.collection.find()
eq_(result.count(), 0)
# Normal list
for i in range(5):
instance = {
'name': 'test',
'instance_id': "instance_id_{0}".format(i),
'provider': "rackspace",
'credentials_id': credentials_id,
'region': 'eu-west1',
'type': 't1-micro'
}
instance_list.append(instance)
cloud_server_model.save(instances=instance_list, credentials=credentials)
result = self.collection.find()
for r in result.clone():
assert len(r['tags']) == 3
for tag in r['tags']:
tag_object = tags_model.get_by_id(tag)
name = tag_object.get('name')
group = tag_object.get('group', {}).get('name')
assert name in ['rackspace', 'eu-west1', 't1-micro']
assert group in ['region', 'provider', 'type']
eq_(r['credentials_id'], credentials_id)
eq_(result.count(), 5)
self._cleanup()
# Filter and delete some old instances
for i in range(4):
self.collection.insert({
"account_id": 1,
"name": "test",
"key": "server_key_test",
"credentials_id": credentials_id,
"instance_id": "instance_id_{0}".format(i)
})
result = self.collection.find().count()
eq_(result, 4)
# Check if duplicate tags are being saved
for i in ['rackspace', 'bla']:
tags_model.get_or_create_by_name(name=i)
instance_list = []
for i in range(5, 10):
instance = {
'name': 'test',
'instance_id': i,
'provider': "rackspace",
'credentials_id': credentials_id,
}
instance_list.append(instance)
cloud_server_model.save(instances=instance_list, credentials=credentials)
result = self.collection.find()
eq_(result.count(), 5)
for r in result:
for tag in r['tags']:
tag_object = tags_model.get_by_id(tag)
assert tag_object['name'] in ['rackspace', 'bla']
self.assertTrue(r['key'])
assert r['instance_id'] <= 10
assert r['instance_id'] >= 5
# Filter and delete all instances, the instance list is empty
instance_list = []
cloud_server_model.save(instances=instance_list, credentials=credentials)
result = self.collection.find()
eq_(result.count(), 0)
self._cleanup()
| 16,055
|
Python
|
.py
| 336
| 36.973214
| 130
| 0.600459
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,154
|
views_tests.py
|
amonapp_amon/amon/apps/_servers/tests/views_tests.py
|
from django.test.client import Client
from django.test import TestCase
from django.urls import reverse
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.servers.models import server_model
class TestServerViews(TestCase):
def setUp(self):
User.objects.all().delete()
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
server_model.collection.remove()
def all_servers_test(self):
url = reverse('servers')
response = self.c.get(url)
assert response.status_code == 200
def add_server_test(self):
server_model.collection.remove()
url = reverse('add_server')
response = self.c.get(url)
assert response.status_code == 200
response = self.c.post(url, {'name': 'test', 'check_every': 60,'keep_data': 30})
created_server = server_model.collection.find_one()
eq_(created_server['name'], 'test')
response_url = "{0}#{1}".format(reverse('servers'), created_server['_id'])
self.assertRedirects(response, response_url)
server_model.collection.remove()
def edit_server_test(self):
server_model.collection.remove()
server_model.collection.insert({'name': 'test' , 'check_every': 60,'keep_data': 30, "key": "test"})
server = server_model.collection.find_one()
url = reverse('edit_server', kwargs={'server_id': server['_id']})
response = self.c.get(url)
assert response.status_code == 200
response = self.c.post(url, {'name': 'changetest', 'check_every': 300,'keep_data': 30})
updated_server = server_model.collection.find_one()
self.assertRedirects(response, reverse('servers'))
eq_(updated_server['name'], 'changetest')
eq_(updated_server['check_every'], 300)
server_model.collection.remove()
def delete_server_test(self):
server_model.collection.remove()
server_model.collection.insert({'name': 'test'})
server = server_model.collection.find_one()
url = reverse('delete_server', kwargs={'server_id': server['_id']})
response = self.c.get(url)
self.assertRedirects(response, reverse('servers'))
deleted_server = server_model.collection.find().count()
eq_(deleted_server, 0)
server_model.collection.remove()
| 2,693
|
Python
|
.py
| 55
| 39.145455
| 107
| 0.648627
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,155
|
forms_test.py
|
amonapp_amon/amon/apps/_servers/tests/forms_test.py
|
from django.test import TestCase
from nose.tools import *
from amon.apps.servers.models import server_model
from amon.apps.servers.forms import ServerForm
from amon.apps.tags.models import tags_model
class TestAddServerForm(TestCase):
def setUp(self):
tags_model.collection.remove()
server_model.collection.remove()
def tearDown(self):
tags_model.collection.remove()
server_model.collection.remove()
def test_form(self):
tags_string = u'tag1, tag2'
updated_tags = u'tag1, tag2, tag3'
form_data = {'name': 'test', 'check_every': 60,'keep_data': 30,
'tags':tags_string}
form = ServerForm(data=form_data)
self.assertEqual(form.is_valid(), True)
form.save()
assert tags_model.collection.find().count() == 2
tags = tags_model.get_tags_ids(tags_string=tags_string)
server = server_model.collection.find_one()
assert server.get('tags') == tags
form_data = {'name': 'test', 'check_every': 60,'keep_data': 30,
'tags':updated_tags}
form = ServerForm(data=form_data, server=server)
self.assertEqual(form.is_valid(), True)
form.save()
assert tags_model.collection.find().count() == 3
tags_list = [x.strip() for x in updated_tags.split(',')]
for r in tags_model.collection.find():
assert r['name'] in tags_list
updated_tags_ids = tags_model.get_tags_ids(tags_string=updated_tags)
server = server_model.collection.find_one()
assert server.get('tags') == updated_tags_ids
| 1,627
|
Python
|
.py
| 36
| 36.694444
| 76
| 0.648422
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,156
|
urls.py
|
amonapp_amon/amon/apps/install/urls.py
|
from django.conf.urls import url
from amon.apps.install import views
urlpatterns = [
url(r"^$", views.install_agent, name='install_agent'),
]
| 148
|
Python
|
.py
| 5
| 27.4
| 58
| 0.744681
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,157
|
views.py
|
amonapp_amon/amon/apps/install/views.py
|
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.conf import settings
def install_agent(request):
domain_url = settings.HOST.rstrip('/') # Remove trailing slash
return HttpResponse(render_to_string('install/agent.sh', {
'domain_url': domain_url,
'hostname': settings.HOSTNAME
}), content_type='text/plain; charset=utf-8')
| 407
|
Python
|
.py
| 9
| 40.666667
| 67
| 0.738579
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,158
|
urls.py
|
amonapp_amon/amon/apps/charts/urls.py
|
from django.conf.urls import url
from amon.apps.charts import views
urlpatterns = [
# AJAX
url(r'^ajax_localtime_to_unix/$',views.ajax_localtime_to_unix, name='localtime_to_unix'),
]
| 199
|
Python
|
.py
| 6
| 29.5
| 93
| 0.740541
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,159
|
context_processors.py
|
amonapp_amon/amon/apps/charts/context_processors.py
|
from amon.apps.charts.forms import (
DurationForm,
SystemChartsForm,
ProcessChartsForm
)
# from amon.apps.servers.models import server_model
def charts_global_variables(request):
if request.user.is_authenticated:
# all_servers = server_model.get_all(account_itd=request.account_id)
global_variables_dict = {
'duration_form': DurationForm(),
'system_charts_form': SystemChartsForm(),
'process_charts_form': ProcessChartsForm(),
# 'all_servers': all_servers
}
else:
global_variables_dict = {
'duration_form': DurationForm(),
}
return global_variables_dict
| 697
|
Python
|
.py
| 20
| 26.7
| 76
| 0.638393
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,160
|
forms.py
|
amonapp_amon/amon/apps/charts/forms.py
|
from django import forms
DURATION_CHOICES = (
(1800, 'Last 30 minutes'),
(3600, 'Last 60 minutes'),
(10800, 'Last 3 hours'),
(21600, 'Last 6 hours'),
(43200, 'Last 12 hours'),
(86400, 'Last 24 hours'),
(259200, 'Last 3 days'),
(604800, 'Last 7 days'),
)
class DurationForm(forms.Form):
duration = forms.ChoiceField(choices=DURATION_CHOICES)
SYSTEM_CHARTS = (
('all', 'All'),
('cpu', 'CPU'),
('memory', 'Memory'),
('loadavg', 'Load Average'),
('network', 'Network'),
('disk', 'Disk'),
)
class SystemChartsForm(forms.Form):
charts = forms.ChoiceField(choices=SYSTEM_CHARTS)
PROCESS_CHARTS = (
('cpu', 'CPU'),
('memory', 'Memory'),
)
class ProcessChartsForm(forms.Form):
charts = forms.ChoiceField(choices=PROCESS_CHARTS)
| 809
|
Python
|
.py
| 29
| 23.793103
| 58
| 0.633159
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,161
|
views.py
|
amonapp_amon/amon/apps/charts/views.py
|
from django.contrib.auth.decorators import login_required
from amon.utils.dates import (
datestring_to_utc_datetime,
datetime_to_unixtime,
)
from rest_framework.decorators import api_view
from rest_framework.response import Response
@login_required
@api_view(['GET'])
def ajax_localtime_to_unix(request):
date_to_local = request.GET.get('date_to_local', None)
utc_datetime = datestring_to_utc_datetime(date_to_local, tz=request.timezone)
unixtime = datetime_to_unixtime(utc_datetime)
return Response({'unixtime': unixtime})
| 566
|
Python
|
.py
| 14
| 36.428571
| 81
| 0.773832
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,162
|
models.py
|
amonapp_amon/amon/apps/notifications/models.py
|
from amon.apps.core.basemodel import BaseModel
class NotificationsModel(BaseModel):
def __init__(self):
super(NotificationsModel, self).__init__()
self.collection = self.mongo.get_collection('notifications_settings')
def save(self, data=None, provider_id=None):
data['provider_id'] = provider_id
self.collection.insert(data)
self.collection.ensure_index('provider_id', background=True)
def update(self, data=None, id=None):
id = self.object_id(id)
self.collection.update({"_id": id}, {"$set": data}, upsert=True)
self.collection.ensure_index('provider_id', background=True)
# Used in alerts and health checks
def get_list_of_strings_to_mongo_objects(self, notifications_list=None):
result = []
if len(notifications_list) > 0:
for x in notifications_list:
split_provider_id = x.split(':') # email:id
if len(split_provider_id) == 2:
_id = split_provider_id[1]
cursor = self.get_by_id(_id)
if cursor: # Append if exists
result.append(cursor)
return result
def get_all_formated(self):
result = self.get_all(sort_by='provider_id')
result_list = []
for r in result.clone():
r['formated_id'] = "{0}:{1}".format(r.get('provider_id'), r.get('_id'))
result_list.append(r)
return result_list
def get_all_for_provider(self, provider_id=None):
params = {'provider_id': provider_id}
result = self.collection.find(params)
return result
notifications_model = NotificationsModel()
| 1,713
|
Python
|
.py
| 37
| 35.918919
| 83
| 0.606776
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,163
|
cron.py
|
amonapp_amon/amon/apps/notifications/cron.py
|
import kronos
import logging
logger = logging.getLogger(__name__)
from amon.apps.notifications.sender import send_notifications
@kronos.register('* * * * *')
def send_notifications_task():
send_notifications()
logger.debug('Sending notifications ...')
| 262
|
Python
|
.py
| 8
| 30.5
| 61
| 0.757937
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,164
|
urls.py
|
amonapp_amon/amon/apps/notifications/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^$', views.view, name='notifications_all'),
url(r'^(?P<provider_id>\w+)/$', views.edit, name='notifications_edit'),
url(r'^test/(?P<provider_id>\w+)/$', views.test, name='notifications_test'),
url(r'^add/(?P<provider_id>\w+)/$', views.add, name='notifications_add'),
url(r'^edit/(?P<provider_id>\w+)/(?P<notification_id>\w+)/$', views.edit, name='notifications_edit'),
url(r'^test/(?P<provider_id>\w+)/(?P<notification_id>\w+)/$', views.test, name='notifications_test'),
url(r'^delete/(?P<provider_id>\w+)/(?P<notification_id>\w+)/$', views.delete, name='notifications_delete'),
)
| 686
|
Python
|
.py
| 11
| 58.636364
| 111
| 0.643388
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,165
|
generator.py
|
amonapp_amon/amon/apps/notifications/generator.py
|
import logging
from amon.apps.alerts.models import alerts_model, alerts_history_model, alert_mute_servers_model
from amon.apps.servers.models import server_model
from amon.apps.processes.models import process_model
from amon.apps.plugins.models import plugin_model
from amon.apps.devices.models import volumes_model
from amon.apps.devices.models import interfaces_model
from amon.apps.healthchecks.models import health_checks_results_model, health_checks_model
from amon.utils import AmonStruct
from django.template.loader import render_to_string
logger = logging.getLogger(__name__)
def generate_message(notification=None):
message = 'Amon - Test Notification'
if notification:
alert_type = notification.alert.get('rule_type')
template = False
if alert_type in ['process', 'process_global']:
template = "process_alert.txt"
elif alert_type == 'uptime':
template = "uptime_alert.txt"
elif alert_type in ['system', 'global']:
template = "system_alert.txt"
elif alert_type in ['plugin', 'plugin_global']:
template = "plugin_alert.txt"
elif alert_type == 'notsendingdata':
template = "notsending_alert.txt"
elif alert_type == 'health_check':
template = "health_check_alert.txt"
if template:
try:
message = render_to_string(template, {'notification': notification})
except Exception as e:
logger.exception('Can not generate notification')
return message
def generate_notifications():
notifications_list = []
unsent_alerts = alerts_history_model.get_unsent()
for trigger in unsent_alerts.get('data'):
result = AmonStruct()
result.global_mute = False
metadata = None
timezone = 'UTC'
try:
alert = alerts_model.get_by_id(trigger['alert_id'])
except:
alert = None # Deleted alert here
if alert:
rule_type = alert.get('rule_type', 'system')
metric_type = alert.get('metric', None)
else:
rule_type = 'alert-does-not-exist'
if rule_type in ['global', 'process_global', 'plugin_global', 'process', 'system', 'plugin', 'uptime', 'health_check']:
if rule_type in ['global', 'process_global', 'plugin_global', 'health_check']:
server_id = trigger.get('server_id')
else:
server_id = alert.get('server')
if server_id:
server = server_model.get_by_id(server_id)
result.server = server
result.global_mute = alert_mute_servers_model.check_if_server_is_muted(server=server)
if metric_type:
metric_type = metric_type.lower()
if metric_type in ['cpu', 'memory', 'loadavg']:
trigger_period_from = trigger['from']
trigger_period_to = trigger['time']
metric_type = 'cpu' if metric_type == 'loadavg' else metric_type # Get CPU top consumers for Load average
if server:
metadata = process_model.get_top_consumers_for_period(date_from=trigger_period_from,
date_to=trigger_period_to, server=server, metric_type=metric_type)
# Overwrite rule_type for the new type
if metric_type == 'notsendingdata':
alert['rule_type'] = 'notsendingdata'
if metric_type == 'disk':
volume_id = trigger.get('volume')
metadata = volumes_model.get_by_id(volume_id)
if metric_type in ['network/inbound', 'network/outbound']:
interface_id = trigger.get('interface')
metadata = interfaces_model.get_by_id(interface_id)
if rule_type == 'process_global':
process_name = alert.get('process')
result.process = process_model.get_by_name_and_server_id(server_id=server_id, name=process_name)
if rule_type == 'plugin_global':
gauge_name = alert.get('gauge')
plugin_name = alert.get('plugin')
result.plugin = plugin_model.get_by_name_and_server_id(server_id=server_id, name=plugin_name)
result.gauge = plugin_model.get_gauge_by_name_and_plugin_id(plugin=result.plugin, name=gauge_name)
# Process and Uptime alerts
if rule_type == 'process' or rule_type == 'uptime':
process_dict = alert.get('process')
if process_dict:
result.process = process_model.get_by_id(process_dict.get('_id'))
if rule_type == 'plugin':
result.plugin = alert.get('plugin')
result.gauge = alert.get('gauge')
if rule_type == 'health_check':
health_check_result_id = trigger.get('health_checks_data_id')
health_check_result = health_checks_results_model.get_by_id(health_check_result_id)
if type(health_check_result) is dict:
health_check_id = health_check_result.get('check_id')
health_check = health_checks_model.get_by_id(health_check_id)
result.healthcheck = health_check
result.health_check_result = health_check_result
if alert:
result.alert = alert
result.metadata = metadata
result.timezone = timezone
result.trigger = trigger
result.mute = alert.get('mute', False) # Shortcut
notifications_list.append(result)
return notifications_list
| 5,806
|
Python
|
.py
| 110
| 39.490909
| 127
| 0.603529
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,166
|
utils.py
|
amonapp_amon/amon/apps/notifications/utils.py
|
from amon.apps.notifications.models import notifications_model
def active_notification_providers_list():
providers = ['pagerduty', 'opsgenie', 'pushover','victorops']
active_list = []
for provider_id in providers:
result = notifications_model.get_for_provider(provider_id=provider_id)
if result != None:
active_list.append(provider_id)
return active_list
| 406
|
Python
|
.py
| 9
| 38.555556
| 79
| 0.710997
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,167
|
sender.py
|
amonapp_amon/amon/apps/notifications/sender.py
|
from amon.apps.notifications.generator import generate_notifications
from amon.apps.notifications.generator import generate_message
from amon.apps.notifications.mail.sender import send_notification_email
from amon.apps.notifications.webhooks.sender import send_webhook_notification
from amon.apps.notifications.pushover.sender import send_pushover_notification
from amon.apps.notifications.victorops.sender import send_victorops_notification
from amon.apps.notifications.pagerduty.sender import send_pagerduty_notification
from amon.apps.notifications.opsgenie.sender import send_opsgenie_notification
from amon.apps.notifications.slack.sender import send_slack_notification
from amon.apps.notifications.telegram.sender import send_telegram_notification
from amon.apps.notifications.hipchat.sender import send_hipchat_notification
from amon.apps.alerts.models import alerts_history_model
def send_notifications():
notifications_to_send = generate_notifications()
for n in notifications_to_send:
if n.mute != True and n.global_mute != True:
message = generate_message(notification=n)
notify = n.alert.get('notifications', [])
# Collect all emails
emails_list = []
for x in notify:
email = x.get('email')
if email:
emails_list.append(email)
if len(emails_list) > 0:
send_notification_email(notification=n, emails=emails_list)
for provider_auth in notify:
provider = provider_auth.get('provider_id')
if provider == 'pushover':
send_pushover_notification(message=message, auth=provider_auth)
if provider == 'opsgenie':
send_opsgenie_notification(message=message, auth=provider_auth)
if provider == 'pagerduty':
send_pagerduty_notification(message=message, auth=provider_auth)
if provider == 'victorops':
send_victorops_notification(message=message, auth=provider_auth)
if provider == 'slack':
send_slack_notification(message=message, auth=provider_auth)
if provider == 'telegram':
send_telegram_notification(message=message, auth=provider_auth)
if provider == 'hipchat':
send_hipchat_notification(message=message, auth=provider_auth)
if provider == 'webhook':
send_webhook_notification(notification=n, auth=provider_auth, message=message)
alerts_history_model.mark_as_sent(n.trigger['_id'])
return notifications_to_send # For the remote command execute
| 2,791
|
Python
|
.py
| 46
| 47.891304
| 98
| 0.678824
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,168
|
legacymodels.py
|
amonapp_amon/amon/apps/notifications/legacymodels.py
|
from amon.apps.core.basemodel import BaseModel
from django.conf import settings
class EmailRecepientModel(BaseModel):
def __init__(self):
super(EmailRecepientModel, self).__init__()
self.collection = self.mongo.get_collection('email_recepients')
def get_all(self):
count = self.collection.find().count()
if count == 0:
return None
else:
return self.collection.find(sort=[("name", self.asc)])
def add_initial_data(self, email=None):
count = self.collection.find().count()
if count == 0:
data = {'account_id': settings.ACCOUNT_ID, 'email': email}
self.collection.insert(data)
class WebHooksModel(BaseModel):
def __init__(self):
super(WebHooksModel, self).__init__()
self.collection = self.mongo.get_collection('webhooks')
webhooks_model = WebHooksModel()
email_recepient_model = EmailRecepientModel()
| 965
|
Python
|
.py
| 23
| 33.73913
| 71
| 0.656798
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,169
|
forms.py
|
amonapp_amon/amon/apps/notifications/forms.py
|
from django import forms
class BaseNotificationForm(forms.Form):
def __init__(self, *args, **kwargs):
kwargs.setdefault('label_suffix', '')
super(BaseNotificationForm, self).__init__(*args, **kwargs)
class WebHookForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(WebHookForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['url'].initial = self.form_data.get('url')
self.fields['secret'].initial = self.form_data.get('secret')
except:
pass
name = forms.CharField(required=True)
url = forms.URLField(required=True, max_length=256, widget=forms.TextInput(attrs={'placeholder': 'URL'}))
secret = forms.CharField(required=False, widget=forms.TextInput(attrs={'placeholder': 'A secret string passed in the POST data'}))
class EmailForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(EmailForm, self).__init__(*args, **kwargs)
try:
self.fields['email'].initial = self.form_data.get('email')
except:
pass
email = forms.EmailField()
# Valid values: yellow, green, red, purple, gray, random.
HIPCHAT_COLORS =[('gray','Gray'), ('yellow','Yellow'), ('green','Green'), ('red','Red'), ('purple','Purple')]
class HipChatForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(HipChatForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['url'].initial = self.form_data.get('url')
self.fields['color'].initial = self.form_data.get('color')
except:
pass
name = forms.CharField(required=True)
url = forms.CharField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Hichat Integration URL'}))
color = forms.ChoiceField(required=False, choices=HIPCHAT_COLORS, widget=forms.RadioSelect(attrs={'class': 'radiolist'}), initial='gray')
class SlackForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(SlackForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['webhook_url'].initial = self.form_data.get('webhook_url')
except:
pass
name = forms.CharField(required=True)
webhook_url = forms.CharField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Webhook URL'}))
class TelegramForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(TelegramForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['token'].initial = self.form_data.get('token')
self.fields['chat_id'].initial = self.form_data.get('chat_id')
except:
pass
name = forms.CharField(required=True)
token = forms.CharField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Token'}))
chat_id = forms.CharField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Chat ID'}))
class PushoverForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(PushoverForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['user_key'].initial = self.form_data.get('user_key')
self.fields['application_api_key'].initial = self.form_data.get('application_api_key')
except:
pass
name = forms.CharField(required=True)
user_key = forms.CharField(required=True, max_length=256, widget=forms.TextInput(attrs={'placeholder': 'User Key'}),
label='User Key')
application_api_key = forms.CharField(required=True, label='Application API Key')
class PagerDutyForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(PagerDutyForm, self).__init__(*args, **kwargs)
try:
self.fields['api_key'].initial = self.form_data.get('api_key')
self.fields['name'].initial = self.form_data.get('name')
self.fields['incident_key'].initial = self.form_data.get('incident_key')
except:
pass
name = forms.CharField(required=True)
incident_key = forms.CharField(required=True)
api_key = forms.CharField(required=True, max_length=256, widget=forms.TextInput(attrs={'placeholder': 'API Key'}))
class OpsGenieForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(OpsGenieForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['api_key'].initial = self.form_data.get('api_key')
except:
pass
name = forms.CharField(required=True)
api_key = forms.CharField(required=True, max_length=256, widget=forms.TextInput(attrs={'placeholder': 'API Key'}))
class VictorOpsForm(BaseNotificationForm):
def __init__(self, *args, **kwargs):
self.form_data = kwargs.pop('provider_data', None)
super(VictorOpsForm, self).__init__(*args, **kwargs)
try:
self.fields['name'].initial = self.form_data.get('name')
self.fields['rest_endpoint'].initial = self.form_data.get('rest_endpoint')
except:
pass
name = forms.CharField(required=True)
rest_endpoint = forms.URLField(required=True)
| 6,062
|
Python
|
.py
| 115
| 44.008696
| 141
| 0.641069
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,170
|
views.py
|
amonapp_amon/amon/apps/notifications/views.py
|
from django.shortcuts import render
from django.shortcuts import redirect
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from amon.apps.notifications.forms import (
PushoverForm,
PagerDutyForm,
VictorOpsForm,
OpsGenieForm,
SlackForm,
TelegramForm,
HipChatForm,
EmailForm,
WebHookForm
)
from amon.apps.notifications.models import notifications_model
from amon.apps.notifications.pushover.sender import send_pushover_notification
from amon.apps.notifications.pagerduty.sender import send_pagerduty_notification
from amon.apps.notifications.victorops.sender import send_victorops_notification
from amon.apps.notifications.opsgenie.sender import send_opsgenie_notification
from amon.apps.notifications.slack.sender import send_slack_notification
from amon.apps.notifications.telegram.sender import send_telegram_notification
from amon.apps.notifications.hipchat.sender import send_hipchat_notification
from amon.apps.notifications.webhooks.sender import _send_webhook
from amon.apps.notifications.mail.sender import send_test_email
PROVIDERS = {
'pushover': PushoverForm,
'pagerduty': PagerDutyForm,
'victorops': VictorOpsForm,
'opsgenie': OpsGenieForm,
'slack': SlackForm,
'telegram': TelegramForm,
'hipchat': HipChatForm,
'email': EmailForm,
'webhook': WebHookForm
}
@login_required
def view(request):
return render(request, 'notifications/view.html', {
})
@login_required
def delete(request, provider_id=None, notification_id=None):
notifications_model.delete(notification_id)
return redirect(reverse('notifications_edit', kwargs={'provider_id': provider_id}))
@login_required
def add(request, provider_id=None):
all_for_provider = notifications_model.get_all_for_provider(provider_id=provider_id)
provider_form = PROVIDERS.get(provider_id, False)
if not provider_form:
return redirect(reverse('notifications_all'))
if request.method == "POST":
form = provider_form(request.POST)
if form.is_valid():
data = form.cleaned_data
notifications_model.save(data=data, provider_id=provider_id)
messages.add_message(request, messages.INFO, '{0} settings updated'.format(provider_id.title()))
return redirect(reverse('notifications_edit', kwargs={'provider_id': provider_id}))
else:
form = provider_form()
return render(request, 'notifications/view.html', {
"form": form,
"provider_id": provider_id,
"add_form": True,
"all_for_provider": all_for_provider
})
@login_required
def edit(request, provider_id=None, notification_id=None):
provider_data = notifications_model.get_by_id(notification_id)
all_for_provider = notifications_model.get_all_for_provider(provider_id=provider_id)
provider_form = PROVIDERS.get(provider_id, False)
if not provider_form:
return redirect(reverse('notifications_all'))
if request.method == "POST":
form = provider_form(request.POST)
if form.is_valid():
data = form.cleaned_data
notifications_model.update(data=data, id=notification_id)
if 'test' in request.POST:
redirect_url = reverse('notifications_test', kwargs={'provider_id': provider_id, 'notification_id': notification_id})
else:
redirect_url = reverse('notifications_edit', kwargs={'provider_id': provider_id, 'notification_id': notification_id})
messages.add_message(request, messages.INFO, '{0} settings updated'.format(provider_id.title()))
return redirect(redirect_url)
else:
form = provider_form(provider_data=provider_data)
return render(request, 'notifications/view.html', {
"form": form,
"provider_id": provider_id,
"provider_data": provider_data,
"all_for_provider": all_for_provider,
"notification_id": notification_id
})
@login_required
def test(request, provider_id=None, notification_id=None):
auth = notifications_model.get_by_id(notification_id)
message = 'Test Notification'
if provider_id == 'pushover':
send_pushover_notification(message=message, auth=auth)
elif provider_id == 'pagerduty':
send_pagerduty_notification(message=message, auth=auth)
elif provider_id == 'victorops':
send_victorops_notification(message=message, auth=auth)
elif provider_id == 'opsgenie':
send_opsgenie_notification(message=message, auth=auth)
elif provider_id == 'slack':
send_slack_notification(message=message, auth=auth)
elif provider_id == 'telegram':
send_telegram_notification(message=message, auth=auth)
elif provider_id == 'hipchat':
send_hipchat_notification(message=message, auth=auth)
elif provider_id == 'webhook':
_send_webhook(auth=auth, data={"message": message})
elif provider_id == 'email':
emails = [auth.get('email')]
send_test_email(emails=emails)
else:
return redirect(reverse('notifications_all'))
messages.add_message(request, messages.INFO, 'Sending test notification to {0}'.format(provider_id.title()))
return redirect(reverse('notifications_edit', kwargs={'provider_id': provider_id, 'notification_id': notification_id}))
| 5,418
|
Python
|
.py
| 120
| 39.025
| 133
| 0.717842
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,171
|
compiler.py
|
amonapp_amon/amon/apps/notifications/mail/compiler.py
|
import logging
from django.template.loader import render_to_string
from django.conf import settings
from amon.utils.dates import datetimeformat_local
logger = logging.getLogger(__name__)
def compile_notsendingdata_email(notification=None):
n = notification
html_content = render_to_string('notsendingdata_alert.html', {
'alert': n.alert,
'trigger': n.trigger,
'server': n.server,
'timezone': n.timezone,
'domain_url': settings.HOST,
'notification': n
})
server_name = n.server.get('name', None)
last_check=n.server.get('last_check')
if last_check:
last_check = datetimeformat_local(last_check)
subject = ''
subject = 'Server: {server} has not sent data since {last_check} '.format(
server=server_name,
last_check=last_check
)
result = {
'html_content': html_content,
'subject': subject,
}
return result
def compile_uptime_email(notification=None):
n = notification
html_content = render_to_string('process_uptime_alert.html', {
'alert': n.alert,
'process': n.process,
'trigger': n.trigger,
'server': n.server,
'timezone': n.timezone,
'domain_url': settings.HOST
})
process_name = n.process.get('name', None)
server_name = n.server.get('name', None)
subject = ''
subject = 'Server: {server} / {process} is Down'.format(
server=server_name,
process=process_name,
)
result = {
'html_content': html_content,
'subject': subject,
}
return result
def compile_plugin_email(notification=None):
n = notification
html_content = render_to_string('plugin_alert.html', {
'alert': n.alert,
'plugin': n.plugin,
'gauge': n.gauge,
'trigger': n.trigger,
'server': n.server,
'timezone': n.timezone,
'domain_url': settings.HOST
})
plugin_name = n.plugin.get('name', None)
server_name = n.server.get('name', None)
gauge_name = n.gauge.get('name', '')
above_below_value = ">" if n.alert.get('above_below') == 'above' else "<"
subject = ''
subject = 'Server: {server} - {plugin}.{gauge_name}.{key} {above_below} {metric_value} (Current value: {current})'.format(
server=server_name,
plugin=plugin_name,
gauge_name=gauge_name,
key=n.alert.get('key'),
above_below=above_below_value,
metric_value=n.alert.get('metric_value'),
current=n.trigger.get('average_value')
)
result = {
'html_content': html_content,
'subject': subject,
}
return result
def compile_process_email(notification=None):
n = notification
html_content = render_to_string('process_alert.html', {
'alert': n.alert,
'process': n.process,
'trigger': n.trigger,
'server': n.server,
'timezone': n.timezone,
'domain_url': settings.HOST
})
metric = n.alert.get('metric')
process_name = n.process.get('name', None)
server_name = n.server.get('name', None)
above_below_value = ">" if n.alert.get('above_below') == 'above' else "<"
subject = ''
subject = 'Server: {server} - {process}/{metric} {above_below} {metric_value}{metric_type} alert (Current value: {current}{metric_type})'.format(
server=server_name,
process=process_name,
metric=metric,
above_below=above_below_value,
metric_value=n.alert.get('metric_value'),
current=n.trigger.get('average_value'),
metric_type=n.alert.get('metric_type', '')
)
result = {
'html_content': html_content,
'subject': subject,
}
return result
def compile_system_email(notification=None):
n = notification
try:
html_content = render_to_string('system_alert.html', {
'alert': n.alert,
'trigger': n.trigger,
'server': n.server,
'metadata': n.metadata,
'timezone': n.timezone,
'domain_url': settings.HOST
})
except Exception as e:
logger.exception("Can't compile system email")
check = n.alert.get('metric')
above_below_value = ">" if n.alert.get('above_below') == 'above' else "<"
try:
meta = n.metadata.get('name', '')
meta = "{0}/".format(meta) # eth1/100kbs, sda1/100MB
except:
meta = ''
subject = 'Server: {server} - {check} {above_below} {metric_value}{metric_type} alert (Current value: {meta}{current}{metric_type})'.format(
server=n.server.get('name', None),
check=check,
above_below=above_below_value,
metric_value=n.alert.get('metric_value'),
current=n.trigger.get('average_value'),
metric_type=n.alert.get('metric_type', ''),
meta=meta
)
result = {
'html_content': html_content,
'subject': subject,
}
return result
def compile_health_check_email(notification=None):
n = notification
# Alert with param
param = n.alert.get('param')
if not param:
# Global alerts, get the param value from the health check itself
param = n.healthcheck.get('params', "")
param = "" if param == False else param
try:
html_content = render_to_string('health_check_alert.html', {
'alert': n.alert,
'trigger': n.trigger,
'server': n.server,
'timezone': n.timezone,
'domain_url': settings.HOST,
'notification': n,
'param': param
})
except Exception as e:
logger.exception("Can't compile health check alert email")
subject = 'Server: {server} - {command}{param} status is {status}'.format(
server=n.server.get('name', None),
command=n.alert.get('command'),
param=param,
status=n.alert.get('status', "").upper()
)
result = {
'html_content': html_content,
'subject': subject
}
return result
| 6,121
|
Python
|
.py
| 175
| 27.245714
| 149
| 0.599249
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,172
|
sender.py
|
amonapp_amon/amon/apps/notifications/mail/sender.py
|
import logging
from django.template.loader import render_to_string
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from amon.apps.notifications.mail.compiler import (
compile_system_email,
compile_process_email,
compile_uptime_email,
compile_plugin_email,
compile_notsendingdata_email,
compile_health_check_email
)
logger = logging.getLogger(__name__)
def _send_email(subject=None, recipients_list=None, html_content=None):
for to in recipients_list:
msg = EmailMultiAlternatives(subject, '', settings.DEFAULT_FROM_EMAIL, [to])
msg.attach_alternative(html_content, "text/html")
msg.send()
def send_test_email(emails=None):
html_content = render_to_string('test.html')
subject = "Amon Test Email"
_send_email(subject=subject, recipients_list=emails, html_content=html_content)
def send_notification_email(notification=None, emails=None):
sent = False
rule_type = notification.alert.get('rule_type', 'system')
if len(emails) > 0:
compile_functions = {
'system': compile_system_email,
'global': compile_system_email,
'process': compile_process_email,
'process_global': compile_process_email,
'uptime': compile_uptime_email,
'plugin': compile_plugin_email,
'plugin_global': compile_plugin_email,
'notsendingdata': compile_notsendingdata_email,
'health_check': compile_health_check_email
}
message = None
if rule_type in compile_functions.keys():
try:
message = compile_functions[rule_type](notification=notification)
except Exception as e:
logger.exception('Can not generate {0} email notification'.format(rule_type))
if message:
_send_email(subject=message['subject'],
recipients_list=emails,
html_content=message['html_content'])
sent = True
return sent
| 2,043
|
Python
|
.py
| 49
| 33.469388
| 93
| 0.666667
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,173
|
emails_test.py
|
amonapp_amon/amon/apps/notifications/mail/tests/emails_test.py
|
from django.test.client import Client
from django.test import TestCase
from django.core import mail
from nose.tools import eq_
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.alerts.models import alerts_model, alerts_history_model
from amon.apps.servers.models import server_model
from amon.apps.processes.models import process_model
from amon.apps.alerts.alerter import (
server_alerter,
process_alerter,
uptime_alerter,
plugin_alerter,
health_check_alerter
)
from amon.apps.notifications.mail.sender import send_notification_email
from amon.apps.plugins.models import plugin_model
from amon.apps.healthchecks.models import health_checks_results_model
from amon.apps.notifications.generator import generate_notifications
from amon.apps.notifications.models import notifications_model
class TestAlertEmails(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.account_id = 1
self.c.login(username='foo@test.com', password='qwerty')
server_key = server_model.add('test', account_id=self.account_id)
self.server = server_model.get_server_by_key(server_key)
self.server_id = self.server['_id']
notifications_model.save(data={"email": "foo@test.com"}, provider_id="email")
notifications = notifications_model.get_all_formated()
self.notifications_list = [x['formated_id'] for x in notifications]
self.emails = [x['email'] for x in notifications]
self.process = process_model.get_or_create(server_id=self.server_id, name='testprocess')
self.process_id = self.process['_id']
def tearDown(self):
self.c.logout()
self.user.delete()
server_model.collection.remove()
process_model.collection.remove()
plugin_model.collection.remove()
plugin_model.gauge_collection.remove()
notifications_model.collection.remove()
def _cleanup(self):
alerts_history_model.collection.remove()
alerts_model.collection.remove()
mail.outbox = []
def test_global_emails(self):
self._cleanup()
global_alert = {
"above_below": "above",
"rule_type": "global",
"server": "all",
"account_id": self.account_id,
"period": 0,
"notifications": self.notifications_list
}
# CPU alert
cpu_alert = {**global_alert, 'metric': 'CPU', 'metric_value': 1, 'metric_type': "%"}
alerts_model.collection.insert(cpu_alert)
global_rules = alerts_model.get_global_alerts(account_id=self.account_id)
eq_(len(global_rules), 1)
data = {u'cpu': {u'system': u'1.30', u'idle': u'98.70', u'user': u'0.00', u'steal': u'0.00', u'nice': u'0.00'}}
server_alerter.check(data, self.server)
unsent_alerts = alerts_history_model.get_unsent(server_id=self.server_id)
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test - CPU > 1% alert (Current value: 1.3%)')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
def test_system_emails(self):
self._cleanup()
system_alert = {
"above_below": "above",
"rule_type": "system",
"server": self.server_id,
"account_id": self.account_id,
"period": 0,
"notifications": self.notifications_list
}
# CPU alert
cpu_alert = {**system_alert, 'metric': 'CPU', 'metric_value': 1, 'metric_type': "%"}
alerts_model.collection.insert(cpu_alert)
data = {u'cpu': {u'system': u'1.30', u'idle': u'98.70', u'user': u'0.00', u'steal': u'0.00', u'nice': u'0.00'}}
server_alerter.check(data, self.server)
unsent_alerts = alerts_history_model.get_unsent(server_id=self.server_id)
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test - CPU > 1% alert (Current value: 1.3%)')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
def test_process_emails(self):
self._cleanup()
process_alert = {
"above_below": "above",
"rule_type": "process",
"server": self.server_id,
"process": self.process_id,
"account_id": self.account_id,
"period": 0,
"notifications": self.notifications_list
}
# CPU alert
cpu_alert = {**process_alert, 'metric': 'CPU', 'metric_value': 1, 'metric_type': "%"}
alerts_model.collection.insert(cpu_alert)
data = {'data': [{'p': self.process_id, 'c': 2, 'm': 254.0}]}
process_alerter.check(data, self.server)
unsent_alerts = alerts_history_model.get_unsent()
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test - testprocess/CPU > 1% alert (Current value: 2.0%)')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
def test_plugin_emails(self):
self._cleanup()
plugin = plugin_model.get_or_create(server_id=self.server_id, name='testplugin')
gauge = plugin_model.get_or_create_gauge_by_name(plugin=plugin, name='gauge')
plugin_alert = {
"above_below": "above",
"rule_type": "plugin",
"server": self.server_id,
"gauge": gauge['_id'],
"plugin": plugin['_id'],
"account_id": self.account_id,
"key": "testkey",
"period": 0,
"metric_value": 5,
"notifications": self.notifications_list
}
alert_id = alerts_model.collection.insert(plugin_alert)
key_name = '{0}.testkey'.format(gauge['name'])
data = {'gauges': {'bla.test': 1, key_name: 6}}
plugin_alerter.check(data=data, plugin=plugin)
unsent_alerts = alerts_history_model.get_unsent()
for trigger in unsent_alerts['data']:
assert trigger['alert_id'] == alert_id
assert trigger['average_value'] == 6
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test - testplugin.gauge.testkey > 5 (Current value: 6.0)')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
def test_uptime_emails(self):
self._cleanup()
# GLOBAL ALERT
uptime_alert = {
"above_below": "above",
"rule_type": "uptime",
"server": self.server_id,
"process": self.process_id,
"account_id": self.account_id,
"period": 0,
"notifications": self.notifications_list
}
down_alert = {**uptime_alert, 'metric': 'Down', 'metric_value': 0}
alerts_model.collection.insert(down_alert)
data = {'data': []}
uptime_alerter.check(data, self.server)
unsent_alerts = alerts_history_model.get_unsent(server_id=self.server_id)
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test / testprocess is Down')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
def test_health_check_emails(self):
self._cleanup()
health_check_alert = {
"rule_type": "health_check",
"server": self.server_id,
"status": "critical",
"command": "check-http.rb",
"period": 0,
}
alert_id = alerts_model.collection.insert(health_check_alert)
data = [{
u'command': u'check-http.rb',
u'name': u'',
u'exit_code': 2,
}]
formated_check_data = health_checks_results_model.save(data=data, server=self.server)
health_check_alerter.check(data=formated_check_data, server=self.server)
unsent_alerts = alerts_history_model.get_unsent(server_id=self.server_id)
eq_(unsent_alerts['data'].count(), 1)
notifications = generate_notifications()
for n in notifications:
send_notification_email(notification=n, emails=self.emails)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Server: test - check-http.rb status is CRITICAL')
self.assertEqual(mail.outbox[0].to, ['foo@test.com'])
self._cleanup()
| 9,633
|
Python
|
.py
| 206
| 37.126214
| 120
| 0.617868
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,174
|
sender.py
|
amonapp_amon/amon/apps/notifications/pushover/sender.py
|
import requests
def send_pushover_notification(message=None, auth=None):
url = "https://api.pushover.net/1/messages.json"
data = {
'token': auth.get('application_api_key'),
'user': auth.get('user_key'),
'message': message
}
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
| 405
|
Python
|
.py
| 14
| 22.785714
| 56
| 0.617571
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,175
|
sender.py
|
amonapp_amon/amon/apps/notifications/victorops/sender.py
|
import requests
import json
def send_victorops_notification(message=None, auth=None):
sent = False
url = auth.get('rest_endpoint')
# {
# "message_type":"CRITICAL",
# "timestamp":"1383239337",
# "entity_id":"disk space/db01.mycompany.com",
# "state_message":"the disk is really really full"
# }
data = {'message_type': 'CRITICAL',
'entity_id': 'amon',
'state_message': message
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
| 641
|
Python
|
.py
| 22
| 23.136364
| 58
| 0.591133
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,176
|
sender.py
|
amonapp_amon/amon/apps/notifications/hipchat/sender.py
|
import requests
import json
def send_hipchat_notification(message=None, auth=None):
sent = False
url = auth.get('url')
color = auth.get('color', 'gray')
data = {'message': message, "message_format": "text", "color": color, "notify": "true"}
headers = {
"content-type": "application/json"
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5, headers=headers)
except Exception as e:
error = e
return error
| 517
|
Python
|
.py
| 17
| 24.882353
| 91
| 0.631365
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,177
|
sender.py
|
amonapp_amon/amon/apps/notifications/telegram/sender.py
|
import requests
def send_telegram_notification(message=None, auth=None):
token = auth.get('token')
chat_id = auth.get('chat_id')
endpoint = '/sendMessage'
url = 'https://api.telegram.org/bot' + token + endpoint
data = {'text': message, 'chat_id': chat_id}
error = None
try:
r = requests.get(url, params=data).json()
except Exception as e:
error = e
return error
| 419
|
Python
|
.py
| 13
| 26.846154
| 59
| 0.638404
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,178
|
sender.py
|
amonapp_amon/amon/apps/notifications/opsgenie/sender.py
|
import requests
import json
from amon.apps.notifications.models import notifications_model
def send_opsgenie_notification(message=None, auth=None):
sent = False
url = "https://api.opsgenie.com/v2/alerts"
headers = {
'Authorization': 'GenieKey '+ auth.get('api_key'),
'Content-Type': 'application/json'
}
# Message is limited to 130 chars
data = {
'message': message,
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5, headers=headers)
except Exception as e:
error = e
return error
| 620
|
Python
|
.py
| 21
| 23.714286
| 69
| 0.648148
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,179
|
generator_test.py
|
amonapp_amon/amon/apps/notifications/tests/generator_test.py
|
from django.test import TestCase
from amon.apps.servers.models import server_model
from amon.apps.processes.models import process_model
from amon.apps.notifications.generator import generate_notifications, generate_message
from amon.apps.alerts.models import alerts_model, alerts_history_model
from amon.apps.alerts.alerter import server_alerter, process_alerter, uptime_alerter
class GeneratorTests(TestCase):
def setUp(self):
self.account_id = 1
server_key = server_model.add('testserver', account_id=self.account_id)
self.server = server_model.get_server_by_key(server_key)
self.server_id = self.server['_id']
self.process = process_model.get_or_create(server_id=self.server_id, name='testprocess')
self.process_id = self.process['_id']
def tearDown(self):
server_model.collection.remove()
process_model.collection.remove()
alerts_history_model.collection.remove()
alerts_model.collection.remove()
def generate_notifications_test(self):
# System alert
system_alert = {
"above_below": "above",
"rule_type": "system",
"server": self.server_id,
"account_id": self.account_id,
"period": 0,
}
# CPU alert
cpu_alert_dict = {**system_alert, 'metric': 'CPU', 'metric_value': 1, 'metric_type': "%"}
alerts_model.collection.insert(cpu_alert_dict)
for r in alerts_model.collection.find():
print(r)
data = {u'cpu': {u'system': u'1.30', u'idle': u'98.70', u'user': u'0.00', u'steal': u'0.00', u'nice': u'0.00'}}
server_alerter.check(data, self.server)
process_alert = {
"above_below": "above",
"rule_type": "process",
"server": self.server_id,
"process": self.process_id,
"account_id": self.account_id,
"period": 0,
}
cpu_alert = {**process_alert, 'metric': 'CPU', 'metric_value': 1, 'metric_type': "%"}
alerts_model.collection.insert(cpu_alert)
data = {'data': [{'p': self.process_id, 'c': 2, 'm': 254.0}] }
process_alerter.check(data, self.server)
uptime_alert = {
"above_below": "above",
"rule_type": "uptime",
"server": self.server_id,
"process": self.process_id,
"account_id": self.account_id,
"period": 0,
}
down_alert = {**uptime_alert, 'metric': 'Down', 'metric_value': 0}
alerts_model.collection.insert(down_alert)
data = {'data': []}
uptime_alerter.check(data, self.server)
result = generate_notifications()
assert len(result) == 3
# Assert notification dict
system_keys = ['alert', 'server', 'metadata', 'timezone', 'trigger', 'mute', 'global_mute']
process_keys = list(system_keys)
process_keys.append('process')
for r in result:
rule_type = r.alert['rule_type']
if rule_type in ['uptime', 'process']:
assert set(r.__dict__.keys()) == set(process_keys)
else:
assert set(r.__dict__.keys()) == set(system_keys)
for r in result:
message = generate_message(notification=r)
if r.alert['rule_type'] == 'process':
assert message == 'Server:testserver/testprocess CPU > 1% for 0 seconds (Current value: 2.0%)'
elif r.alert['rule_type'] == 'system':
assert message == 'Server:testserver CPU>1% for 0 seconds (Current value: 1.3%)'
elif r.alert['rule_type'] == 'uptime':
assert message == 'testprocess on testserver is Down'
| 3,776
|
Python
|
.py
| 79
| 37.189873
| 119
| 0.588542
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,180
|
views_test.py
|
amonapp_amon/amon/apps/notifications/tests/views_test.py
|
from django.test.client import Client
from django.urls import reverse
from django.test import TestCase
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.notifications.models import notifications_model
class TestNotifications(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
notifications_model.collection.remove()
def _cleanup(self):
notifications_model.collection.remove()
def test_add_url(self):
self._cleanup()
url = reverse('notifications_add', kwargs={"provider_id": "pushover"})
response = self.c.post(url,{
'name': 'default',
'user_key': 'somekey',
'application_api_key': 'some'
})
self.assertRedirects(response, reverse('notifications_edit', kwargs={'provider_id': 'pushover'}))
assert notifications_model.collection.find().count() == 1
result = notifications_model.collection.find_one()
edit_url = reverse('notifications_edit', kwargs={"provider_id": "pushover", 'notification_id': result['_id']})
response = self.c.post(edit_url,{
'name': 'default',
'user_key': 'changed_user_key',
'application_api_key': 'changed_data'
})
self.assertRedirects(response, reverse('notifications_edit', kwargs={'provider_id': 'pushover', 'notification_id': result['_id']}))
assert notifications_model.collection.find().count() == 1
for r in notifications_model.collection.find():
assert r['application_api_key'] == 'changed_data'
assert r['user_key'] == 'changed_user_key'
| 1,904
|
Python
|
.py
| 40
| 39.3
| 139
| 0.649675
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,181
|
sender.py
|
amonapp_amon/amon/apps/notifications/webhooks/sender.py
|
import logging
import requests
import json
logger = logging.getLogger(__name__)
def _send_webhook(auth=None, data=None):
url = auth.get('url')
secret = auth.get('secret')
if secret:
data['secret'] = secret
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
logger.exception('Can not send webhook: {0}'.format(e))
error = e
return error
def generate_webhook_data(notification=None, message=None):
hook_values = {
'server': ['name'],
'custom_metric': ['name'],
'process': ['name'],
'plugin': ['name'],
'trigger': ['from', 'average_value', 'time'],
'alert': ['rule_type', 'metric', 'period','metric_type', 'metric_value', 'above_below', 'key']
}
beautify_names = {
'trigger': {'time': 'to'}
}
values = notification.__dict__
hook_data = {}
for key, values_list in hook_values.items():
value = values.get(key, False)
name_dict = beautify_names.get(key, {})
if value:
hook_data[key] = {}
for inner_key in values_list:
new_key = name_dict.get(inner_key, inner_key) # Get or default
hook_data[key][new_key] = value.get(inner_key)
hook_data['message'] = message
return hook_data
def send_webhook_notification(notification=None, auth=None, message=None):
sent = False
if auth != None and notification.mute == False:
hook_data = generate_webhook_data(notification=notification, message=message)
_send_webhook(auth=auth, data=hook_data)
return sent
| 1,730
|
Python
|
.py
| 47
| 28.702128
| 102
| 0.607564
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,182
|
sender_test.py
|
amonapp_amon/amon/apps/notifications/webhooks/tests/sender_test.py
|
from django.test import TestCase
from nose.tools import eq_
from amon.apps.notifications.webhooks.sender import generate_webhook_data
from amon.utils import AmonStruct
class WebhookGeneratorTests(TestCase):
def generate_notifications_test(self):
notification = AmonStruct()
# Structure
hook_values = {
'server': ['name'],
'process': ['name'],
'message': ['test'],
'trigger': ['from', 'average_value', 'to'],
'alert': ['rule_type', 'metric', 'period','metric_type', 'metric_value', 'above_below', 'key']
}
notification.server = {'name': 'test'}
notification.process = {'name': 'process'}
notification.alert = {'rule_type': 'process', 'metric': 'CPU', 'period': 10, 'above_below': 'above', 'metric_value': "%"}
notification.metadata = {}
notification.timezone = 'UTC'
notification.trigger = {'from': 1, 'time': 10, 'average_value': 100}
notification.mute = False
result = generate_webhook_data(notification=notification)
assert set(result.keys()) == set(hook_values.keys())
for k, v in result.items():
if k != 'message':
assert set(hook_values[k]) == set(result[k].keys())
| 1,299
|
Python
|
.py
| 27
| 38.666667
| 129
| 0.602564
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,183
|
sender.py
|
amonapp_amon/amon/apps/notifications/pagerduty/sender.py
|
import requests
import json
def send_pagerduty_notification(message=None, auth=None):
sent = False
url = "https://events.pagerduty.com/generic/2010-04-15/create_event.json"
incident_key = auth.get('incident_key', 'amon')
# {
# "service_key": "servicekey",
# "incident_key": "disk/local",
# "event_type": "trigger",
# "description": "Disk > 80% for 15 minutes",
# "client": "local",
# "details": {
# "average_value": 63
# }
# }
data = {'service_key': auth.get('api_key'),
'incident_key': incident_key,
'event_type': 'trigger',
'description': message
}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
| 884
|
Python
|
.py
| 28
| 24.892857
| 77
| 0.552913
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,184
|
sender.py
|
amonapp_amon/amon/apps/notifications/slack/sender.py
|
import requests
import json
def send_slack_notification(message=None, auth=None):
sent = False
url = auth.get('webhook_url')
data = {'text': message}
data = json.dumps(data)
error = None
try:
r = requests.post(url, data=data, timeout=5)
except Exception as e:
error = e
return error
| 340
|
Python
|
.py
| 13
| 20.769231
| 53
| 0.650943
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,185
|
models.py
|
amonapp_amon/amon/apps/bookmarks/models.py
|
from amon.apps.core.basemodel import BaseModel
class BookmarksModel(BaseModel):
def __init__(self):
super(BookmarksModel, self).__init__()
self.collection = self.mongo.get_collection('bookmarks')
def create(self, data=None):
result = self.insert(data)
self.collection.ensure_index([('type', self.desc)], background=True)
return result
bookmarks_model = BookmarksModel()
| 426
|
Python
|
.py
| 10
| 36
| 76
| 0.693627
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,186
|
urls.py
|
amonapp_amon/amon/apps/bookmarks/urls.py
|
from django.conf.urls import url
from amon.apps.bookmarks import views
urlpatterns = [
url(
r'^add$',
views.add,
name='bookmarks_add'
),
url(
r'^delete/(?P<bookmark_id>\w+)$',
views.delete,
name='bookmarks_delete'
),
]
| 290
|
Python
|
.py
| 14
| 14.642857
| 41
| 0.564576
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,187
|
forms.py
|
amonapp_amon/amon/apps/bookmarks/forms.py
|
from django import forms
from amon.apps.bookmarks.models import bookmarks_model
BOOKMARK_TYPES =[('server','Server'), ('metric','Metric'), ]
class BookMarkForm(forms.Form):
def __init__(self, *args, **kwargs):
super(BookMarkForm, self).__init__(*args, **kwargs)
name = forms.CharField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Bookmark Name', 'required': True}))
type = forms.ChoiceField(required=True, choices=BOOKMARK_TYPES, initial='server')
tags = forms.CharField(required=True)
def save(self):
data = self.cleaned_data
bookmark_type = data.get('type')
bookmarks_model.create(data)
return bookmark_type
| 704
|
Python
|
.py
| 14
| 43.714286
| 123
| 0.69494
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,188
|
views.py
|
amonapp_amon/amon/apps/bookmarks/views.py
|
from amon.apps.core.views import *
from amon.apps.bookmarks.models import bookmarks_model
from amon.apps.bookmarks.forms import BookMarkForm
@login_required
def add(request):
form_type = 'servers'
if request.method == 'POST':
form = BookMarkForm(request.POST)
if form.is_valid():
form_type = form.save()
messages.add_message(request, messages.INFO, 'Bookmark created')
else:
messages.add_message(request, messages.INFO, form.errors)
if form_type == 'server':
url = reverse('servers')
else:
url = reverse('metrics')
return redirect(url)
else:
return redirect(reverse('servers'))
@login_required
def delete(request, bookmark_id=None):
bookmark = bookmarks_model.get_by_id(bookmark_id)
bookmark_type = bookmark.get('type')
bookmarks_model.delete(bookmark_id)
if bookmark_type == 'server':
url = reverse('servers')
else:
url = reverse('metrics')
messages.add_message(request, messages.INFO, 'Bookmark deleted')
return redirect(url)
| 1,121
|
Python
|
.py
| 31
| 28.935484
| 76
| 0.66043
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,189
|
views_test.py
|
amonapp_amon/amon/apps/bookmarks/tests/views_test.py
|
from django.test.client import Client
from django.urls import reverse
from django.test import TestCase
from nose.tools import *
from django.contrib.auth import get_user_model
User = get_user_model()
from amon.apps.tags.models import tags_model, tag_groups_model
from amon.apps.bookmarks.models import bookmarks_model
class TestBookmarks(TestCase):
def setUp(self):
self.c = Client()
self.user = User.objects.create_user(password='qwerty', email='foo@test.com')
self.c.login(username='foo@test.com', password='qwerty')
def tearDown(self):
self.c.logout()
self.user.delete()
def _cleanup(self):
tags_model.collection.remove()
tag_groups_model.collection.remove()
bookmarks_model.collection.remove()
def add_delete_bookmark_test(self):
self._cleanup()
url = reverse('bookmarks_add')
tags = {'provider': 'digitalocean', 'credentials': 'production'}
tag_ids = [str(x) for x in tags_model.create_and_return_ids(tags)]
tag_ids_str = ",".join(tag_ids)
form_data = {'name': 'test', 'tags': tag_ids_str, 'type': 'server'}
response = self.c.post(url, form_data)
assert response.status_code == 302
assert bookmarks_model.collection.find().count() == 1
| 1,357
|
Python
|
.py
| 30
| 37
| 85
| 0.675911
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,190
|
models.py
|
amonapp_amon/amon/apps/organizations/models.py
|
from django.db import models
from django.utils import timezone
class Organization(models.Model):
name = models.CharField(max_length=255, unique=True)
date_created = models.DateTimeField(('date created'), default=timezone.now)
def __unicode__(self):
return u"Organization: {0}".format(self.name)
| 317
|
Python
|
.py
| 7
| 41.142857
| 79
| 0.743506
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,191
|
0001_initial.py
|
amonapp_amon/amon/apps/organizations/migrations/0001_initial.py
|
# Generated by Django 2.0.2 on 2018-02-19 20:48
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date created')),
],
),
]
| 654
|
Python
|
.py
| 17
| 30.058824
| 119
| 0.62916
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,192
|
_font-stacks.scss
|
amonapp_amon/amon/static/sass/bourbon/addons/_font-stacks.scss
|
@charset "UTF-8";
/// Georgia font stack.
///
/// @type List
$georgia: "Georgia", "Cambria", "Times New Roman", "Times", serif;
/// Helvetica font stack.
///
/// @type List
$helvetica: "Helvetica Neue", "Helvetica", "Roboto", "Arial", sans-serif;
/// Lucida Grande font stack.
///
/// @type List
$lucida-grande: "Lucida Grande", "Tahoma", "Verdana", "Arial", sans-serif;
/// Monospace font stack.
///
/// @type List
$monospace: "Bitstream Vera Sans Mono", "Consolas", "Courier", monospace;
/// Verdana font stack.
///
/// @type List
$verdana: "Verdana", "Geneva", sans-serif;
| 586
|
Python
|
.tac
| 21
| 26.428571
| 74
| 0.664865
|
amonapp/amon
| 1,334
| 108
| 37
|
AGPL-3.0
|
9/5/2024, 5:09:37 PM (Europe/Amsterdam)
|
6,193
|
tasks.py
|
getpelican_pelican/tasks.py
|
import os
from pathlib import Path
from shutil import which
from invoke import task
PKG_NAME = "pelican"
PKG_PATH = Path(PKG_NAME)
DOCS_PORT = os.environ.get("DOCS_PORT", 8000)
BIN_DIR = "bin" if os.name != "nt" else "Scripts"
PTY = os.name != "nt"
ACTIVE_VENV = os.environ.get("VIRTUAL_ENV", None)
VENV_HOME = Path(os.environ.get("WORKON_HOME", "~/virtualenvs"))
VENV_PATH = Path(ACTIVE_VENV) if ACTIVE_VENV else (VENV_HOME / PKG_NAME)
VENV = str(VENV_PATH.expanduser())
VENV_BIN = Path(VENV) / Path(BIN_DIR)
TOOLS = ["pdm", "pre-commit", "psutil"]
PDM = which("pdm") or VENV_BIN / "pdm"
PRECOMMIT = which("pre-commit") or VENV_BIN / "pre-commit"
@task
def docbuild(c):
"""Build documentation"""
c.run(f"{VENV_BIN}/sphinx-build -W docs docs/_build", pty=PTY)
@task(docbuild)
def docserve(c):
"""Serve docs at http://localhost:$DOCS_PORT/ (default port is 8000)"""
from livereload import Server
server = Server()
server.watch("docs/conf.py", lambda: docbuild(c))
server.watch("CONTRIBUTING.rst", lambda: docbuild(c))
server.watch("docs/*.rst", lambda: docbuild(c))
server.serve(port=DOCS_PORT, root="docs/_build")
@task
def tests(c):
"""Run the test suite"""
c.run(f"{VENV_BIN}/pytest", pty=PTY)
@task
def coverage(c):
"""Generate code coverage of running the test suite."""
c.run(
f"{VENV_BIN}/pytest --cov=pelican --cov-report term-missing "
"--cov-fail-under 75",
pty=PTY,
)
c.run(f"{VENV_BIN}/coverage html", pty=PTY)
@task
def format(c, check=False, diff=False):
"""Run Ruff's auto-formatter, optionally with --check or --diff"""
check_flag, diff_flag = "", ""
if check:
check_flag = "--check"
if diff:
diff_flag = "--diff"
c.run(
f"{VENV_BIN}/ruff format {check_flag} {diff_flag} {PKG_PATH} tasks.py", pty=PTY
)
@task
def ruff(c, fix=False, diff=False):
"""Run Ruff to ensure code meets project standards."""
diff_flag, fix_flag = "", ""
if fix:
fix_flag = "--fix"
if diff:
diff_flag = "--diff"
c.run(f"{VENV_BIN}/ruff check {diff_flag} {fix_flag} .", pty=PTY)
@task
def lint(c, fix=False, diff=False):
"""Check code style via linting tools."""
ruff(c, fix=fix, diff=diff)
format(c, check=not fix, diff=diff)
@task
def tools(c):
"""Install tools in the virtual environment if not already on PATH"""
for tool in TOOLS:
if not which(tool):
c.run(f"{VENV_BIN}/python -m pip install {tool}", pty=PTY)
@task
def precommit(c):
"""Install pre-commit hooks to .git/hooks/pre-commit"""
c.run(f"{PRECOMMIT} install", pty=PTY)
@task
def setup(c):
c.run(f"{VENV_BIN}/python -m pip install -U pip", pty=PTY)
tools(c)
c.run(f"{PDM} install", pty=PTY)
precommit(c)
@task
def update_functional_tests(c):
"""Update the generated functional test output"""
c.run(
f"bash -c 'LC_ALL=en_US.utf8 pelican -o {PKG_PATH}/tests/output/custom/ \
-s samples/pelican.conf.py samples/content/'",
pty=PTY,
)
c.run(
f"bash -c 'LC_ALL=fr_FR.utf8 pelican -o {PKG_PATH}/tests/output/custom_locale/ \
-s samples/pelican.conf_FR.py samples/content/'",
pty=PTY,
)
c.run(
f"bash -c 'LC_ALL=en_US.utf8 pelican -o \
{PKG_PATH}/tests/output/basic/ samples/content/'",
pty=PTY,
)
| 3,413
|
Python
|
.py
| 102
| 28.833333
| 88
| 0.636225
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,194
|
pelican.conf_FR.py
|
getpelican_pelican/samples/pelican.conf_FR.py
|
AUTHOR = "Alexis Métaireau"
SITENAME = "Alexis' log"
SITEURL = "http://blog.notmyidea.org"
TIMEZONE = "Europe/Paris"
# can be useful in development, but set to False when you're ready to publish
RELATIVE_URLS = True
GITHUB_URL = "http://github.com/ametaireau/"
DISQUS_SITENAME = "blog-notmyidea"
PDF_GENERATOR = False
REVERSE_CATEGORY_ORDER = True
LOCALE = "fr_FR.UTF-8"
DEFAULT_PAGINATION = 4
DEFAULT_DATE = (2012, 3, 2, 14, 1, 1)
DEFAULT_DATE_FORMAT = "%d %B %Y"
ARTICLE_URL = "posts/{date:%Y}/{date:%B}/{date:%d}/{slug}/"
ARTICLE_SAVE_AS = ARTICLE_URL + "index.html"
FEED_ALL_RSS = "feeds/all.rss.xml"
CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
LINKS = (
("Biologeek", "http://biologeek.org"),
("Filyb", "http://filyb.info/"),
("Libert-fr", "http://www.libert-fr.com"),
("N1k0", "http://prendreuncafe.com/blog/"),
("Tarek Ziadé", "http://ziade.org/blog"),
("Zubin Mithra", "http://zubin71.wordpress.com/"),
)
SOCIAL = (
("twitter", "http://twitter.com/ametaireau"),
("lastfm", "http://lastfm.com/user/akounet"),
("github", "http://github.com/ametaireau"),
)
# global metadata to all the contents
DEFAULT_METADATA = {"yeah": "it is"}
# path-specific metadata
EXTRA_PATH_METADATA = {
"extra/robots.txt": {"path": "robots.txt"},
}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
"pictures",
"extra/robots.txt",
]
# custom page generated with a jinja2 template
TEMPLATE_PAGES = {"pages/jinja2_template.html": "jinja2_template.html"}
# code blocks with line numbers
PYGMENTS_RST_OPTIONS = {"linenos": "table"}
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
foobar = "barbaz"
| 1,715
|
Python
|
.py
| 49
| 32.734694
| 77
| 0.691283
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,195
|
pelican.conf.py
|
getpelican_pelican/samples/pelican.conf.py
|
AUTHOR = "Alexis Métaireau"
SITENAME = "Alexis' log"
SITESUBTITLE = "A personal blog."
SITEURL = "http://blog.notmyidea.org"
TIMEZONE = "Europe/Paris"
# can be useful in development, but set to False when you're ready to publish
RELATIVE_URLS = True
GITHUB_URL = "http://github.com/ametaireau/"
DISQUS_SITENAME = "blog-notmyidea"
REVERSE_CATEGORY_ORDER = True
LOCALE = "C"
DEFAULT_PAGINATION = 4
DEFAULT_DATE = (2012, 3, 2, 14, 1, 1)
FEED_ALL_RSS = "feeds/all.rss.xml"
CATEGORY_FEED_RSS = "feeds/{slug}.rss.xml"
LINKS = (
("Biologeek", "http://biologeek.org"),
("Filyb", "http://filyb.info/"),
("Libert-fr", "http://www.libert-fr.com"),
("N1k0", "http://prendreuncafe.com/blog/"),
("Tarek Ziadé", "http://ziade.org/blog"),
("Zubin Mithra", "http://zubin71.wordpress.com/"),
)
SOCIAL = (
("twitter", "http://twitter.com/ametaireau"),
("lastfm", "http://lastfm.com/user/akounet"),
("github", "http://github.com/ametaireau"),
)
# global metadata to all the contents
DEFAULT_METADATA = {"yeah": "it is"}
# path-specific metadata
EXTRA_PATH_METADATA = {
"extra/robots.txt": {"path": "robots.txt"},
}
# static paths will be copied without parsing their contents
STATIC_PATHS = [
"images",
"extra/robots.txt",
]
# custom page generated with a jinja2 template
TEMPLATE_PAGES = {"pages/jinja2_template.html": "jinja2_template.html"}
# there is no other HTML content
READERS = {"html": None}
# code blocks with line numbers
PYGMENTS_RST_OPTIONS = {"linenos": "table"}
# foobar will not be used, because it's not in caps. All configuration keys
# have to be in caps
foobar = "barbaz"
| 1,635
|
Python
|
.py
| 48
| 31.770833
| 77
| 0.696122
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,196
|
conf.py
|
getpelican_pelican/docs/conf.py
|
import datetime
import os
import sys
if sys.version_info >= (3, 11):
import tomllib
else:
import tomli as tomllib
sys.path.append(os.path.abspath(os.pardir))
with open("../pyproject.toml", "rb") as f:
project_data = tomllib.load(f).get("project")
if project_data is None:
raise KeyError("project data is not found")
# -- General configuration ----------------------------------------------------
templates_path = ["_templates"]
locale_dirs = ["locale/"]
gettext_compact = False
gettext_uuid = True
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.extlinks",
"sphinxext.opengraph",
]
source_suffix = ".rst"
master_doc = "index"
project = project_data.get("name").upper()
year = datetime.datetime.now().date().year
copyright = f"2010–{year}" # noqa: RUF001
exclude_patterns = ["_build"]
release = project_data.get("version")
version = ".".join(release.split(".")[:1])
last_stable = project_data.get("version")
rst_prolog = f"""
.. |last_stable| replace:: :pelican-doc:`{last_stable}`
.. |min_python| replace:: {project_data.get('requires-python').split(",")[0]}
"""
extlinks = {"pelican-doc": ("https://docs.getpelican.com/en/latest/%s.html", "%s")}
# -- Options for HTML output --------------------------------------------------
html_theme = "furo"
html_title = f"<strong>{project}</strong> <i>{release}</i>"
html_static_path = ["_static"]
html_theme_options = {
"light_logo": "pelican-logo.svg",
"dark_logo": "pelican-logo.svg",
"navigation_with_keys": True,
}
# Output file base name for HTML help builder.
htmlhelp_basename = "Pelicandoc"
html_use_smartypants = True
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
def setup(app):
# overrides for wide tables in RTD theme
app.add_css_file("theme_overrides.css") # path relative to _static
# -- Options for LaTeX output -------------------------------------------------
latex_documents = [
("index", "Pelican.tex", "Pelican Documentation", "Justin Mayer", "manual"),
]
# -- Options for manual page output -------------------------------------------
man_pages = [
("index", "pelican", "pelican documentation", ["Justin Mayer"], 1),
(
"pelican-themes",
"pelican-themes",
"A theme manager for Pelican",
["Mickaël Raybaud"],
1,
),
(
"themes",
"pelican-theming",
"How to create themes for Pelican",
["The Pelican contributors"],
1,
),
]
| 2,636
|
Python
|
.py
| 79
| 30.050633
| 83
| 0.619179
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,197
|
signals.py
|
getpelican_pelican/pelican/signals.py
|
raise ImportError(
"Importing from `pelican.signals` is deprecated. "
"Use `from pelican import signals` or `import pelican.plugins.signals` instead."
)
| 161
|
Python
|
.py
| 4
| 37.25
| 84
| 0.751592
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,198
|
writers.py
|
getpelican_pelican/pelican/writers.py
|
import logging
import os
from posixpath import join as posix_join
from urllib.parse import urljoin
from feedgenerator import Atom1Feed, Rss201rev2Feed, get_tag_uri
from markupsafe import Markup
from pelican.paginator import Paginator
from pelican.plugins import signals
from pelican.utils import (
get_relative_path,
path_to_url,
sanitised_join,
set_date_tzinfo,
)
logger = logging.getLogger(__name__)
class Writer:
def __init__(self, output_path, settings=None):
self.output_path = output_path
self.reminder = {}
self.settings = settings or {}
self._written_files = set()
self._overridden_files = set()
# See Content._link_replacer for details
if self.settings.get("RELATIVE_URLS"):
self.urljoiner = posix_join
else:
self.urljoiner = lambda base, url: urljoin(
base if base.endswith("/") else base + "/", str(url)
)
def _create_new_feed(self, feed_type, feed_title, context):
feed_class = Rss201rev2Feed if feed_type == "rss" else Atom1Feed
if feed_title:
feed_title = context["SITENAME"] + " - " + feed_title
else:
feed_title = context["SITENAME"]
return feed_class(
title=Markup(feed_title).striptags(),
link=(self.site_url + "/"),
feed_url=self.feed_url,
description=context.get("SITESUBTITLE", ""),
subtitle=context.get("SITESUBTITLE", None),
)
def _add_item_to_the_feed(self, feed, item):
title = Markup(item.title).striptags()
link = self.urljoiner(self.site_url, item.url)
if self.settings["FEED_APPEND_REF"]:
link = link + "?ref=feed"
if isinstance(feed, Rss201rev2Feed):
# RSS feeds use a single tag called 'description' for both the full
# content and the summary
content = None
if self.settings.get("RSS_FEED_SUMMARY_ONLY"):
description = item.summary
else:
description = item.get_content(self.site_url)
else:
# Atom feeds have two different tags for full content (called
# 'content' by feedgenerator) and summary (called 'description' by
# feedgenerator).
#
# It does not make sense to have the summary be the
# exact same thing as the full content. If we detect that
# they are we just remove the summary.
content = item.get_content(self.site_url)
description = item.summary
if description == content:
description = None
categories = []
if hasattr(item, "category"):
categories.append(item.category)
if hasattr(item, "tags"):
categories.extend(item.tags)
feed.add_item(
title=title,
link=link,
unique_id=get_tag_uri(link, item.date),
description=description,
content=content,
categories=categories or None,
author_name=getattr(item, "author", ""),
pubdate=set_date_tzinfo(item.date, self.settings.get("TIMEZONE", None)),
updateddate=set_date_tzinfo(
item.modified, self.settings.get("TIMEZONE", None)
)
if hasattr(item, "modified")
else None,
)
def _open_w(self, filename, encoding, override=False):
"""Open a file to write some content to it.
Exit if we have already written to that file, unless one (and no more
than one) of the writes has the override parameter set to True.
"""
if filename in self._overridden_files:
if override:
raise RuntimeError(f"File {filename} is set to be overridden twice")
logger.info("Skipping %s", filename)
filename = os.devnull
elif filename in self._written_files:
if override:
logger.info("Overwriting %s", filename)
else:
raise RuntimeError(f"File {filename} is to be overwritten")
if override:
self._overridden_files.add(filename)
self._written_files.add(filename)
return open(filename, "w", encoding=encoding)
def write_feed(
self,
elements,
context,
path=None,
url=None,
feed_type="atom",
override_output=False,
feed_title=None,
):
"""Generate a feed with the list of articles provided
Return the feed. If no path or output_path is specified, just
return the feed object.
:param elements: the articles to put on the feed.
:param context: the context to get the feed metadata.
:param path: the path to output.
:param url: the publicly visible feed URL; if None, path is used
instead
:param feed_type: the feed type to use (atom or rss)
:param override_output: boolean telling if we can override previous
output with the same name (and if next files written with the same
name should be skipped to keep that one)
:param feed_title: the title of the feed.o
"""
self.site_url = context.get("SITEURL", path_to_url(get_relative_path(path)))
self.feed_domain = context.get("FEED_DOMAIN")
self.feed_url = self.urljoiner(self.feed_domain, url or path)
feed = self._create_new_feed(feed_type, feed_title, context)
# FEED_MAX_ITEMS = None means [:None] to get every element
for element in elements[: self.settings["FEED_MAX_ITEMS"]]:
self._add_item_to_the_feed(feed, element)
signals.feed_generated.send(context, feed=feed)
if path:
complete_path = sanitised_join(self.output_path, path)
try:
os.makedirs(os.path.dirname(complete_path))
except Exception:
pass
with self._open_w(complete_path, "utf-8", override_output) as fp:
feed.write(fp, "utf-8")
logger.info("Writing %s", complete_path)
signals.feed_written.send(complete_path, context=context, feed=feed)
return feed
def write_file(
self,
name,
template,
context,
relative_urls=False,
paginated=None,
template_name=None,
override_output=False,
url=None,
**kwargs,
):
"""Render the template and write the file.
:param name: name of the file to output
:param template: template to use to generate the content
:param context: dict to pass to the templates.
:param relative_urls: use relative urls or absolutes ones
:param paginated: dict of article list to paginate - must have the
same length (same list in different orders)
:param template_name: the template name, for pagination
:param override_output: boolean telling if we can override previous
output with the same name (and if next files written with the same
name should be skipped to keep that one)
:param url: url of the file (needed by the paginator)
:param **kwargs: additional variables to pass to the templates
"""
if name is False or name == "":
return
elif not name:
# other stuff, just return for now
return
def _write_file(template, localcontext, output_path, name, override):
"""Render the template write the file."""
# set localsiteurl for context so that Contents can adjust links
if localcontext["localsiteurl"]:
context["localsiteurl"] = localcontext["localsiteurl"]
output = template.render(localcontext)
path = sanitised_join(output_path, name)
try:
os.makedirs(os.path.dirname(path))
except Exception:
pass
with self._open_w(path, "utf-8", override=override) as f:
f.write(output)
logger.info("Writing %s", path)
# Send a signal to say we're writing a file with some specific
# local context.
signals.content_written.send(path, context=localcontext)
def _get_localcontext(context, name, kwargs, relative_urls):
localcontext = context.copy()
localcontext["localsiteurl"] = localcontext.get("localsiteurl", None)
if relative_urls:
relative_url = path_to_url(get_relative_path(name))
localcontext["SITEURL"] = relative_url
localcontext["localsiteurl"] = relative_url
localcontext["output_file"] = name
localcontext.update(kwargs)
return localcontext
if paginated is None:
paginated = {
key: val for key, val in kwargs.items() if key in {"articles", "dates"}
}
# pagination
if paginated and template_name in self.settings["PAGINATED_TEMPLATES"]:
# pagination needed
per_page = (
self.settings["PAGINATED_TEMPLATES"][template_name]
or self.settings["DEFAULT_PAGINATION"]
)
# init paginators
paginators = {
key: Paginator(name, url, val, self.settings, per_page)
for key, val in paginated.items()
}
# generated pages, and write
for page_num in range(next(iter(paginators.values())).num_pages):
paginated_kwargs = kwargs.copy()
for key in paginators.keys():
paginator = paginators[key]
previous_page = paginator.page(page_num) if page_num > 0 else None
page = paginator.page(page_num + 1)
next_page = (
paginator.page(page_num + 2)
if page_num + 1 < paginator.num_pages
else None
)
paginated_kwargs.update(
{
f"{key}_paginator": paginator,
f"{key}_page": page,
f"{key}_previous_page": previous_page,
f"{key}_next_page": next_page,
}
)
localcontext = _get_localcontext(
context, page.save_as, paginated_kwargs, relative_urls
)
_write_file(
template,
localcontext,
self.output_path,
page.save_as,
override_output,
)
else:
# no pagination
localcontext = _get_localcontext(context, name, kwargs, relative_urls)
_write_file(template, localcontext, self.output_path, name, override_output)
| 11,122
|
Python
|
.py
| 257
| 31.093385
| 88
| 0.575136
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|
6,199
|
urlwrappers.py
|
getpelican_pelican/pelican/urlwrappers.py
|
import functools
import logging
import os
import pathlib
from pelican.utils import slugify
logger = logging.getLogger(__name__)
@functools.total_ordering
class URLWrapper:
def __init__(self, name, settings):
self.settings = settings
self._name = name
self._slug = None
self._slug_from_name = True
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
# if slug wasn't explicitly set, it needs to be regenerated from name
# so, changing name should reset slug for slugification
if self._slug_from_name:
self._slug = None
@property
def slug(self):
if self._slug is None:
class_key = f"{self.__class__.__name__.upper()}_REGEX_SUBSTITUTIONS"
regex_subs = self.settings.get(
class_key, self.settings.get("SLUG_REGEX_SUBSTITUTIONS", [])
)
preserve_case = self.settings.get("SLUGIFY_PRESERVE_CASE", False)
self._slug = slugify(
self.name,
regex_subs=regex_subs,
preserve_case=preserve_case,
use_unicode=self.settings.get("SLUGIFY_USE_UNICODE", False),
)
return self._slug
@slug.setter
def slug(self, slug):
# if slug is expliticly set, changing name won't alter slug
self._slug_from_name = False
self._slug = slug
def as_dict(self):
d = self.__dict__
d["name"] = self.name
d["slug"] = self.slug
return d
def __hash__(self):
return hash(self.slug)
def _normalize_key(self, key):
class_key = f"{self.__class__.__name__.upper()}_REGEX_SUBSTITUTIONS"
regex_subs = self.settings.get(
class_key, self.settings.get("SLUG_REGEX_SUBSTITUTIONS", [])
)
use_unicode = self.settings.get("SLUGIFY_USE_UNICODE", False)
preserve_case = self.settings.get("SLUGIFY_PRESERVE_CASE", False)
return slugify(
key,
regex_subs=regex_subs,
preserve_case=preserve_case,
use_unicode=use_unicode,
)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.slug == other.slug
if isinstance(other, str):
return self.slug == self._normalize_key(other)
return False
def __ne__(self, other):
if isinstance(other, self.__class__):
return self.slug != other.slug
if isinstance(other, str):
return self.slug != self._normalize_key(other)
return True
def __lt__(self, other):
if isinstance(other, self.__class__):
return self.slug < other.slug
if isinstance(other, str):
return self.slug < self._normalize_key(other)
return False
def __str__(self):
return self.name
def __repr__(self):
return f"<{type(self).__name__} {self._name!r}>"
def _from_settings(self, key, get_page_name=False):
"""Returns URL information as defined in settings.
When get_page_name=True returns URL without anything after {slug} e.g.
if in settings: CATEGORY_URL="cat/{slug}.html" this returns
"cat/{slug}" Useful for pagination.
"""
setting = f"{self.__class__.__name__.upper()}_{key}"
value = self.settings[setting]
if isinstance(value, pathlib.Path):
value = str(value)
if not isinstance(value, str):
logger.warning("%s is set to %s", setting, value)
return value
elif get_page_name:
return os.path.splitext(value)[0].format(**self.as_dict())
else:
return value.format(**self.as_dict())
page_name = property(
functools.partial(_from_settings, key="URL", get_page_name=True)
)
url = property(functools.partial(_from_settings, key="URL"))
save_as = property(functools.partial(_from_settings, key="SAVE_AS"))
class Category(URLWrapper):
pass
class Tag(URLWrapper):
def __init__(self, name, *args, **kwargs):
super().__init__(name.strip(), *args, **kwargs)
class Author(URLWrapper):
pass
| 4,260
|
Python
|
.py
| 114
| 28.631579
| 80
| 0.592233
|
getpelican/pelican
| 12,478
| 1,806
| 72
|
AGPL-3.0
|
9/5/2024, 5:09:45 PM (Europe/Amsterdam)
|