repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
murny/jupiter
|
app/helpers/items/draft_helper.rb
|
module Items::DraftHelper
def progress_bar_step_class(wizard_step, draft)
if draft.uncompleted_step?(draft.class.wizard_steps, wizard_step)
'disabled'
elsif wizard_step == step
'active'
else
'visted'
end
end
def header
if @draft.is_a? DraftItem
@is_edit ? t('items.draft.header_edit') : t('items.draft.header')
else
@is_edit ? t('admin.theses.draft.header_edit') : t('admin.theses.draft.header')
end
end
def progress_bar_percentage
((step_index(step).to_f / wizard_steps.size) * 100).to_i
end
def progress_bar_text
t('items.draft.progress_bar_text', step_index: step_index(step), total_steps: wizard_steps.size)
end
def step_index(wizard_step)
(wizard_steps.index(wizard_step) + 1)
end
def license_accordions
{ most_commonly_used: [:attribution_non_commercial],
other_creative_commons_licenses: [
:attribution,
:attribution_non_commercial_no_derivatives,
:attribution_non_commercial_share_alike,
:attribution_no_derivatives,
:attribution_share_alike,
:cco_universal
],
public_domain: [:public_domain_mark],
link_to_another_license: [:license_text] }
end
def license_accordion_open?(accordion, license, truthy_value = 'true', falsey_value = 'false')
if license_accordions[accordion].include?(license.to_sym)
truthy_value
else
falsey_value
end
end
end
|
murny/jupiter
|
test/services/doi_service_test.rb
|
require 'test_helper'
class DoiServiceTest < ActiveSupport::TestCase
include ActiveJob::TestHelper
EXAMPLE_DOI = 'doi:10.21967/fk2-jaje-4d53'.freeze
test 'DOI state transitions' do
assert_no_enqueued_jobs
Rails.application.secrets.doi_minting_enabled = true
community = Community.new_locked_ldp_object(title: 'Community', owner: 1,
visibility: JupiterCore::VISIBILITY_PUBLIC)
community.unlock_and_fetch_ldp_object(&:save!)
collection = Collection.new_locked_ldp_object(title: 'Collection', owner: 1,
visibility: JupiterCore::VISIBILITY_PUBLIC,
community_id: community.id)
collection.unlock_and_fetch_ldp_object(&:save!)
item = Item.new_locked_ldp_object(title: 'Test Title', owner: 1, visibility: JupiterCore::VISIBILITY_PUBLIC,
created: '2017-02-02',
languages: [CONTROLLED_VOCABULARIES[:language].english],
creators: ['<NAME>'],
subject: ['Things'],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].book)
item.unlock_and_fetch_ldp_object do |unlocked_item|
unlocked_item.add_to_path(community.id, collection.id)
unlocked_item.save!
end
assert_nil item.doi
assert_enqueued_jobs 1, only: DOICreateJob
clear_enqueued_jobs
VCR.use_cassette('ezid_minting', erb: { id: item.id }, record: :none) do
assert_equal 'unminted', item.doi_state.aasm_state
ezid_identifer = DOIService.new(item).create
assert_not_nil ezid_identifer
assert_equal EXAMPLE_DOI, ezid_identifer.id
assert_equal 'University of Alberta Libraries', ezid_identifer.datacite_publisher
assert_equal 'Test Title', ezid_identifer.datacite_title
assert_equal 'Text/Book', ezid_identifer.datacite_resourcetype
assert_equal '2017', ezid_identifer.datacite_publicationyear
assert_equal Ezid::Status::PUBLIC, ezid_identifer.status
assert_equal 'yes', ezid_identifer.export
assert_not_nil item.doi
assert_equal 'available', item.doi_state.aasm_state
end
VCR.use_cassette('ezid_updating', erb: { id: item.id }, record: :none) do
assert_no_enqueued_jobs
item.unlock_and_fetch_ldp_object do |uo|
uo.title = 'Different Title'
uo.save!
end
assert_enqueued_jobs 1, only: DOIUpdateJob
clear_enqueued_jobs
ezid_identifer = DOIService.new(item).update
assert_not_nil ezid_identifer
assert_equal EXAMPLE_DOI, ezid_identifer.id
assert_equal Ezid::Status::PUBLIC, ezid_identifer.status
assert_equal 'Different Title', ezid_identifer.datacite_title
assert_equal 'yes', ezid_identifer.export
assert_equal 'available', item.doi_state.aasm_state
end
VCR.use_cassette('ezid_updating_unavailable', erb: { id: item.id }, record: :none) do
assert_no_enqueued_jobs
item.unlock_and_fetch_ldp_object do |uo|
uo.visibility = JupiterCore::VISIBILITY_PRIVATE
uo.save!
end
assert_enqueued_jobs 1, only: DOIUpdateJob
clear_enqueued_jobs
ezid_identifer = DOIService.new(item).update
assert_not_nil ezid_identifer
assert_equal EXAMPLE_DOI, ezid_identifer.id
# TODO: will be fixed by cheetoh release 0.10.2 by 'bug on concatenating reason' commit
# see https://github.com/datacite/cheetoh/commit/103699867478d5086a76bfe602efe21be02f2994#diff-4a07abe40929a2b2d94ac79e73c5a0a1
assert_equal 'unavailable | unavailable | not publicly released', ezid_identifer.status
assert_equal 'not_available', item.doi_state.aasm_state
end
VCR.use_cassette('ezid_removal', erb: { id: item.id }, record: :none, allow_unused_http_interactions: false) do
assert_equal 0, Sidekiq::Worker.jobs.size
item.unlock_and_fetch_ldp_object(&:destroy)
assert_enqueued_jobs 1, only: DOIRemoveJob
clear_enqueued_jobs
ezid_identifer = DOIService.remove(item.doi)
assert_not_nil ezid_identifer
assert_equal EXAMPLE_DOI, ezid_identifer.id
# TODO: will be fixed by cheetoh release 0.10.2 bug on concatenating reason
# see https://github.com/datacite/cheetoh/commit/103699867478d5086a76bfe602efe21be02f2994#diff-4a07abe40929a2b2d94ac79e73c5a0a1
assert_equal 'unavailable | unavailable | withdrawn', ezid_identifer.status
assert_equal 'no', ezid_identifer.export
end
Rails.application.secrets.doi_minting_enabled = false
end
end
|
murny/jupiter
|
app/models/thesis.rb
|
class Thesis < JupiterCore::LockedLdpObject
include ObjectProperties
include ItemProperties
include GlobalID::Identification
ldp_object_includes Hydra::Works::WorkBehavior
# Dublin Core attributes
has_attribute :abstract, ::RDF::Vocab::DC.abstract, type: :text, solrize_for: :search
# Note: language is single-valued for Thesis, but languages is multi-valued for Item
# See below for faceting
has_attribute :language, ::RDF::Vocab::DC.language, solrize_for: :search
has_attribute :date_accepted, ::RDF::Vocab::DC.dateAccepted, type: :date, solrize_for: :exact_match
has_attribute :date_submitted, ::RDF::Vocab::DC.dateSubmitted, type: :date, solrize_for: :exact_match
# BIBO
has_attribute :degree, ::RDF::Vocab::BIBO.degree, solrize_for: :exact_match
# SWRC
has_attribute :institution, TERMS[:swrc].institution, solrize_for: :exact_match
# UAL attributes
# This one is faceted in `all_contributors`, along with the Item creators/contributors
has_attribute :dissertant, TERMS[:ual].dissertant, solrize_for: [:search, :sort]
has_attribute :graduation_date, TERMS[:ual].graduation_date, solrize_for: [:search, :sort]
has_attribute :thesis_level, TERMS[:ual].thesis_level, solrize_for: :exact_match
has_attribute :proquest, TERMS[:ual].proquest, solrize_for: :exact_match
has_attribute :unicorn, TERMS[:ual].unicorn, solrize_for: :exact_match
has_attribute :specialization, TERMS[:ual].specialization, solrize_for: :search
has_attribute :departments, TERMS[:ual].department_list, type: :json_array, solrize_for: [:search]
has_attribute :supervisors, TERMS[:ual].supervisor_list, type: :json_array, solrize_for: [:search]
has_multival_attribute :committee_members, TERMS[:ual].committee_member, solrize_for: :exact_match
has_multival_attribute :unordered_departments, TERMS[:ual].department, solrize_for: :search
has_multival_attribute :unordered_supervisors, TERMS[:ual].supervisor, solrize_for: :exact_match
# This gets mixed with the item types for `Item`
additional_search_index :item_type_with_status,
solrize_for: :facet,
as: -> { item_type_with_status_code }
# Dissertants are indexed with the Item creators/contributors
additional_search_index :all_contributors, solrize_for: :facet, as: -> { [dissertant] }
# Index subjects with Item subjects (topical, temporal, etc).
additional_search_index :all_subjects, solrize_for: :facet, as: -> { subject }
# Making `language` consistent with Item `languages`
additional_search_index :languages,
solrize_for: :facet,
as: -> { [language] }
# Present a consistent interface with Item#item_type_with_status_code
def item_type_with_status_code
:thesis
end
def self.from_draft(draft_thesis)
thesis = Thesis.find(draft_thesis.uuid) if draft_thesis.uuid.present?
thesis ||= Thesis.new_locked_ldp_object
thesis.unlock_and_fetch_ldp_object do |unlocked_obj|
unlocked_obj.owner = draft_thesis.user_id if unlocked_obj.owner.blank?
unlocked_obj.title = draft_thesis.title
unlocked_obj.alternative_title = draft_thesis.alternate_title
unlocked_obj.language = draft_thesis.language_as_uri
unlocked_obj.dissertant = draft_thesis.creator
unlocked_obj.abstract = draft_thesis.description
unlocked_obj.graduation_date = if draft_thesis.graduation_term.present?
"#{draft_thesis.graduation_year}-#{draft_thesis.graduation_term}"
else
draft_thesis.graduation_year.to_s
end
# Handle visibility plus embargo logic
unlocked_obj.visibility = draft_thesis.visibility_as_uri
if draft_thesis.embargo_end_date.present?
unlocked_obj.visibility_after_embargo = CONTROLLED_VOCABULARIES[:visibility].public
end
unlocked_obj.embargo_end_date = draft_thesis.embargo_end_date
# Handle rights
unlocked_obj.rights = draft_thesis.rights
# Additional fields
unlocked_obj.date_accepted = draft_thesis.date_accepted
unlocked_obj.date_submitted = draft_thesis.date_submitted
unlocked_obj.degree = draft_thesis.degree
unlocked_obj.thesis_level = draft_thesis.degree_level
unlocked_obj.institution = draft_thesis.institution_as_uri
unlocked_obj.specialization = draft_thesis.specialization
unlocked_obj.subject = draft_thesis.subjects
unlocked_obj.committee_members = draft_thesis.committee_members
unlocked_obj.supervisors = draft_thesis.supervisors
unlocked_obj.departments = draft_thesis.departments
unlocked_obj.member_of_paths = []
draft_thesis.each_community_collection do |community, collection|
unlocked_obj.add_to_path(community.id, collection.id)
end
unlocked_obj.save!
# remove old filesets and attachments and recreate
unlocked_obj.purge_filesets
# NOTE: destroy the attachment record, DON'T use #purge, which will wipe the underlying blob shared with the
# draft item
thesis.files.each(&:destroy) if thesis.files.present?
# add an association between the same underlying blobs the Draft uses and the Item
draft_thesis.files_attachments.each do |attachment|
new_attachment = ActiveStorage::Attachment.create(record: thesis.files_attachment_shim,
blob: attachment.blob, name: :shimmed_files)
FileAttachmentIngestionJob.perform_later(new_attachment.id)
end
thesis.set_thumbnail(thesis.files.find_by(blob_id: draft_thesis.thumbnail.blob.id))
end
draft_thesis.uuid = thesis.id
draft_thesis.save!
thesis
end
unlocked do
before_save :copy_departments_to_unordered_predicate
before_save :copy_supervisors_to_unordered_predicate
validates :dissertant, presence: true
validates :graduation_date, presence: true
validates :sort_year, presence: true
validates :language, uri: { in_vocabulary: :language }
validates :institution, uri: { in_vocabulary: :institution }
type [::Hydra::PCDM::Vocab::PCDMTerms.Object, ::RDF::Vocab::BIBO.Thesis]
before_validation do
# Note: for Item, the sort_year attribute is derived from dcterms:created
begin
self.sort_year = Date.parse(graduation_date).year.to_i if graduation_date.present?
rescue ArgumentError
# date was unparsable, try to pull out the first 4 digit number as a year
capture = graduation_date.scan(/\d{4}/)
self.sort_year = capture[0].to_i if capture.present?
end
def copy_departments_to_unordered_predicate
return unless departments_changed?
self.unordered_departments = []
departments.each { |d| self.unordered_departments += [d] }
end
def copy_supervisors_to_unordered_predicate
return unless supervisors_changed?
self.unordered_supervisors = []
supervisors.each { |s| self.unordered_supervisors += [s] }
end
end
end
end
|
murny/jupiter
|
test/controllers/downloads_controller_test.rb
|
require 'test_helper'
class DownloadsControllerTest < ActionDispatch::IntegrationTest
def before_all
super
community = locked_ldp_fixture(Community, :nice).unlock_and_fetch_ldp_object(&:save!)
collection = locked_ldp_fixture(Collection, :nice).unlock_and_fetch_ldp_object(&:save!)
item = locked_ldp_fixture(Item, :fancy).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, collection.id)
uo.save!
end
Sidekiq::Testing.inline! do
File.open(file_fixture('text-sample.txt'), 'r') do |file|
item.add_and_ingest_files([file])
end
end
@file = item.files.first
end
test 'file should be viewable with proper headings' do
get file_view_item_url(id: @file.record.owner.id,
file_set_id: @file.fileset_uuid,
file_name: @file.filename)
assert_response :success
assert_equal @response.content_type, 'text/plain'
assert_equal @response.headers['Content-Disposition'], 'inline'
assert_includes @response.body, 'A nice, brief file, with some great text.'
end
test 'file should be downloadable with proper headings' do
get file_download_item_url(id: @file.record.owner.id,
file_set_id: @file.fileset_uuid)
assert_response :success
assert_equal @response.content_type, 'text/plain'
assert_equal @response.headers['Content-Disposition'], 'attachment'
assert_includes @response.body, 'A nice, brief file, with some great text.'
end
end
|
murny/jupiter
|
app/helpers/page_layout_helper.rb
|
<reponame>murny/jupiter
module PageLayoutHelper
def page_title(title = nil)
# title tags should be around 55 characters, so lets truncate them if they quite long
# With '... | ERA' being appended, we want to aim for a bit smaller like 45 characters
title = truncate(strip_tags(title), length: 45, separator: ' ', omission: '...', escape: false)
@page_title ||= []
@page_title.push(title) if title.present?
@page_title.join(' | ')
end
# Define or get a description for the current page
#
# description - String (default: nil)
#
# If this helper is called multiple times with an argument, only the last
# description will be returned when called without an argument. Descriptions
# have newlines replaced with spaces and all HTML tags are sanitized.
#
# Examples:
#
# page_description # => "Default Jupiter Welcome Lead"
# page_description("Foo")
# page_description # => "Foo"
#
# page_description("<b>Bar</b>\nBaz")
# page_description # => "Bar Baz"
#
# Returns an HTML-safe String.
def page_description(description = nil)
if description.present?
@page_description = description.squish
elsif @page_description.present?
truncate(strip_tags(@page_description), length: 140, separator: ' ', omission: '...', escape: false)
else
@page_description = t('welcome.index.welcome_lead')
end
end
def page_image
default_url = image_url('era-logo.png')
# We only have images on community and item/thesis show pages
image_url = @community&.thumbnail_url || @item&.thumbnail_url
image_url || default_url
end
def page_type(type = nil)
type || 'website'
end
end
|
murny/jupiter
|
test/system/deposit_item_test.rb
|
<reponame>murny/jupiter
require 'application_system_test_case'
class DepositItemTest < ApplicationSystemTestCase
def before_all
super
# Setup a community/collection pair for respective dropdowns
@community = Community.new_locked_ldp_object(title: 'Books', owner: 1).unlock_and_fetch_ldp_object(&:save!)
@collection = Collection.new_locked_ldp_object(title: 'Fantasy Books',
owner: 1,
community_id: @community.id)
.unlock_and_fetch_ldp_object(&:save!)
end
test 'be able to deposit a new item into jupiter successfully' do
user = users(:regular)
login_user(user)
click_link I18n.t('application.navbar.links.new_item')
skip 'This test continues to flap on CI for unknown reasons that should be investigated ASAP' if ENV['TRAVIS']
# 1. Describe Item Form
assert_selector 'h1', text: I18n.t('items.draft.header')
assert_selector 'h2', text: I18n.t('items.draft.describe_item.header')
fill_in I18n.t('items.draft.describe_item.title'),
# Need to narrow down by placeholder since capybara can't differentiate from title and alternate title labels
placeholder: I18n.t('items.draft.describe_item.title_placeholder'),
with: 'A Dance with Dragons'
select 'Book', from: I18n.t('items.draft.describe_item.type_id')
selectize_option '.draft_item_languages', with: 'English'
selectize_set_text '.draft_item_creators', with: '<NAME>'
selectize_set_text '.draft_item_subjects', with: 'A Song of Ice and Fire'
selectize_set_text '.draft_item_subjects', with: 'Fantasy'
select_date '2011/07/12', field_id: 'draft_item_date_created'
fill_in I18n.t('items.draft.describe_item.description'), with: 'A Dance with Dragons Description Goes Here!!!'
select @community.title, from: 'draft_item[community_id][]'
select @collection.title, from: 'draft_item[collection_id][]'
click_on I18n.t('items.draft.save_and_continue')
# 2. Choose License and Visibility Form
assert_selector 'h2', text: I18n.t('items.draft.choose_license_and_visibility.header')
# Open accordion
click_on I18n.t('items.draft.choose_license_and_visibility.license.link_to_another_license')
choose I18n.t('items.draft.choose_license_and_visibility.license.license_text_html')
fill_in 'draft_item_license_text_area', with: 'License Text Goes Here!!!'
choose I18n.t('items.draft.choose_license_and_visibility.visibility.embargo')
select_date '2023/01/01', field_id: 'draft_item_embargo_end_date'
click_on I18n.t('items.draft.save_and_continue')
# 3. Upload File Form
assert_selector 'h2', text: I18n.t('items.draft.upload_files.header')
attach_file_in_dropzone(file_fixture('image-sample.jpeg'))
has_css? '.j-thumbnail'
click_on I18n.t('items.draft.save_and_continue'), wait: 5
# 4. Review and Deposit Form
assert_selector 'h2', text: I18n.t('items.draft.review_and_deposit_item.header')
click_on I18n.t('items.draft.header')
# Success! Deposit Successful
assert_text I18n.t('items.draft.successful_deposit')
assert_selector 'h1', text: Item.last.title
# verify editing
click_on I18n.t('edit')
assert_selector 'h1', text: I18n.t('items.draft.header_edit')
fill_in I18n.t('items.draft.describe_item.title'),
currently_with: 'A Dance with Dragons',
with: 'The Winds of Winter'
click_on I18n.t('items.draft.save_and_continue')
click_on I18n.t('items.draft.save_and_continue')
click_on I18n.t('items.draft.save_and_continue')
click_on I18n.t('items.draft.save_and_deposit_edits')
assert_text I18n.t('items.draft.successful_deposit')
assert_selector 'h1', text: 'The Winds of Winter'
end
test 'should populate community and collection when coming from collection page' do
user = users(:regular)
login_user(user)
# Navigate to collection page
click_link I18n.t('application.navbar.links.communities')
click_link 'Books'
click_link 'Fantasy Books'
# Click deposit button
click_link I18n.t('collections.show.deposit_item')
assert has_select?('draft_item[community_id][]', selected: 'Books')
assert has_select?('draft_item[collection_id][]', selected: 'Fantasy Books')
end
# Helper methods for javascript fields (selectize/dropzone) and date select
# (could be moved and made as generic helpers if these are needed elsewhere)
private
def selectize_set_text(key, with:)
within key do
first('.selectize-input input').set(with)
first('.selectize-dropdown-content .create').click
end
end
def selectize_option(key, with:)
within key do
first('.selectize-input input').click
find('.option', text: with).click
end
end
def select_date(date, field_id:)
date = Date.parse(date)
select date.year.to_s, from: "#{field_id}_1i"
select I18n.l(date, format: '%B'), from: "#{field_id}_2i"
select date.day.to_s, from: "#{field_id}_3i"
end
def attach_file_in_dropzone(file_path)
# Attach the file to the hidden input selector
attach_file(nil, file_path, class: 'dz-hidden-input', visible: false)
end
end
|
PaymentsHubRebels/kafka-boshrelease
|
spec/jobs/sanitytest_spec.rb
|
require 'rspec'
require 'json'
require 'yaml' # todo fix bosh-template
require 'bosh/template/test'
describe 'sanitytest job' do
let(:release) { Bosh::Template::Test::ReleaseDir.new(File.join(File.dirname(__FILE__), '../..')) }
let(:job) { release.job('sanitytest') }
describe "config/kafka_discovery/cluster.yaml template" do
let(:template) { job.template("config/kafka_discovery/cluster.yaml") }
let(:zookeeper_link) {
Bosh::Template::Test::Link.new(
name: 'zookeeper',
instances: [Bosh::Template::Test::LinkInstance.new(name: 'zook-1',
index: 0,
az: 'az4',
address: 'zook-1.example.com',
bootstrap: true),
Bosh::Template::Test::LinkInstance.new(name: 'zook-2',
index: 1,
az: 'az4',
address: 'zook-2.example.com',
bootstrap: false)],
properties: {
"client_port" => 1
}
)
}
def produce_kafka_links(topics = [])
Bosh::Template::Test::Link.new(
name: 'kafka',
instances: [Bosh::Template::Test::LinkInstance.new(name: 'kafka-1',
index: 0,
az: 'az4',
address: 'kafka-1.example.com',
bootstrap: true),
Bosh::Template::Test::LinkInstance.new(name: 'kafka-2',
index: 1,
az: 'az4',
address: 'kafka-2.example.com',
bootstrap: false)],
properties: {
"tls" => {
"ca_certs" => "",
"certificate" => ""
},
"keystore_password" => "",
"enable_sasl_scram" => "",
"jaas_admin" => {
"username" => "",
"password" => ""
},
"topics" => topics,
"listen_port" => 1234
}
)
end
let(:kafka_link) { produce_kafka_links([{
"replication_factor" => 3,
"partition" => 2,
"name" => "aTopic"
}]) }
let(:links) { [ zookeeper_link, kafka_link ]}
describe "with default manifest values" do
it "renders properly" do
expect { template.render({}, consumes: links) }.not_to raise_error
end
it "contains kafka brokers instances" do
kafkahosts = ['kafka-1.example.com', 'kafka-2.example.com']
renderedTemplate = template.render({}, consumes: links)
kafkahosts.each do |khost|
expect(renderedTemplate).to include(khost)
end
end
it "contains zookeeper instances" do
zookeeperhosts = ['zook-1.example.com', 'zook-2.example.com']
renderedTemplate = template.render({}, consumes: links)
zookeeperhosts.each do |zhost|
expect(renderedTemplate).to include(zhost)
end
end
end
end
end
|
PaymentsHubRebels/kafka-boshrelease
|
spec/jobs/reassignpartitions_spec.rb
|
<gh_stars>0
require 'rspec'
require 'json'
require 'yaml' # todo fix bosh-template
require 'bosh/template/test'
describe 'reassignpartitions job' do
let(:release) { Bosh::Template::Test::ReleaseDir.new(File.join(File.dirname(__FILE__), '../..')) }
let(:job) { release.job('reassignpartitions') }
describe "run script" do
let(:template) { job.template("bin/run") }
let(:zookeeper_link) {
Bosh::Template::Test::Link.new(
name: 'zookeeper',
instances: [Bosh::Template::Test::LinkInstance.new()],
properties: {
"client_port" => 1
}
)
}
def produce_kafka_links(number = 1, topics = [])
Bosh::Template::Test::Link.new(
name: 'kafka',
instances: (1..number).map { |n| Bosh::Template::Test::LinkInstance.new() },
properties: {
"tls" => {
"ca_certs" => "",
"certificate" => ""
},
"keystore_password" => "",
"enable_sasl_scram" => "",
"jaas_admin" => {
"username" => "",
"password" => ""
},
"topics" => topics,
"listen_port" => 1
}
)
end
describe "with no topics" do
let(:kafka_link) { produce_kafka_links }
let(:links) { [ zookeeper_link, kafka_link ]}
it "renders properly" do
expect { template.render({}, consumes: links) }.not_to raise_error
end
end
describe "with bad topics" do
let(:kafka_link) { produce_kafka_links }
let(:links) { [ zookeeper_link, kafka_link ]}
let(:property_with_wrong_type) {
{
"topics" => "3"
}
}
let(:nil_property) {
{
"topics" => nil
}
}
it "reacts accordingly" do
expect { template.render(property_with_wrong_type, consumes: links) }.to raise_error(Bosh::Template::EvaluationContext::InvalidTopicsTypeError)
expect { template.render(nil_property, consumes: links) }.not_to raise_error
end
it "does not produce output" do
expect(template.render(nil_property, consumes: links)).not_to include("# processing topic")
end
end
describe "with topics" do
let(:single_kafka_link) { produce_kafka_links }
let(:multiple_kafka_link) { produce_kafka_links(3) }
let(:single_kafka_links) { [ zookeeper_link, single_kafka_link ]}
let(:multiple_kafka_links) { [ zookeeper_link, multiple_kafka_link ]}
let(:one_topic) {
{
"topics" => [
{
"name" => "topic1",
"partitions" => 3,
"replication_factor" => 3
}
]
}
}
let(:multiple_topics) {
{
"topics" => [
{
"name"=> "topic1",
"partitions"=> 3,
"replication_factor"=> 3
},
{
"name"=> "topic2",
"partitions"=> 3,
"replication_factor"=> 2
}
]
}
}
it "render properly" do
expect { template.render(one_topic, consumes: multiple_kafka_links) }.not_to raise_error
expect { template.render(multiple_topics, consumes: multiple_kafka_links) }.not_to raise_error
expect(template.render(one_topic, consumes: multiple_kafka_links)).to include("# processing topic topic1")
expect(template.render(multiple_topics, consumes: multiple_kafka_links)).to include("# processing topic topic1")
expect(template.render(multiple_topics, consumes: multiple_kafka_links)).to include("# processing topic topic2")
end
it "does not render if brokers are few" do
expect { template.render(one_topic, consumes: single_kafka_links) }.to raise_error(Bosh::Template::EvaluationContext::ReplicationFactorTooBigError)
expect { template.render(multiple_topics, consumes: single_kafka_links) }.to raise_error(Bosh::Template::EvaluationContext::ReplicationFactorTooBigError)
end
end
end
end
|
PaymentsHubRebels/kafka-boshrelease
|
spec/jobs/generatetopics_spec.rb
|
<reponame>PaymentsHubRebels/kafka-boshrelease
require 'rspec'
require 'json'
require 'yaml' # todo fix bosh-template
require 'bosh/template/test'
describe 'generatetopics job' do
let(:release) { Bosh::Template::Test::ReleaseDir.new(File.join(File.dirname(__FILE__), '../..')) }
let(:job) { release.job('generatetopics') }
describe "run script" do
let(:template) { job.template("bin/run") }
let(:zookeeper_link) {
Bosh::Template::Test::Link.new(
name: 'zookeeper',
instances: [Bosh::Template::Test::LinkInstance.new()],
properties: {
"client_port" => 1
}
)
}
def produce_kafka_links(topics = [])
Bosh::Template::Test::Link.new(
name: 'kafka',
instances: [Bosh::Template::Test::LinkInstance.new()],
properties: {
"tls" => {
"ca_certs" => "",
"certificate" => ""
},
"keystore_password" => "",
"enable_sasl_scram" => "",
"jaas_admin" => {
"username" => "",
"password" => ""
},
"topics" => topics,
"listen_port" => 1
}
)
end
describe "with no topics" do
let(:kafka_link) { produce_kafka_links }
let(:links) { [ zookeeper_link, kafka_link ]}
it "renders properly" do
expect { template.render({}, consumes: links) }.not_to raise_error
end
end
describe "with topics" do
let(:kafka_link) { produce_kafka_links([{
"replication_factor" => 3,
"partition" => 2,
"name" => "aTopic"
}]) }
let(:links) { [ zookeeper_link, kafka_link ]}
it "renders properly" do
kafka_topics_parts = [
"kafka-topics.sh",
"--replication-factor 3",
"--partitions 1"
]
rendered_template = template.render({}, consumes: links)
kafka_topics_parts.each do |part|
expect(rendered_template).to include(part)
end
end
end
end
end
|
skipteel/form_builder
|
db/schema.rb
|
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20090918234029) do
create_table "form_fields", force: :cascade do |t|
t.integer "form_id"
t.string "label"
t.string "tag"
t.text "values"
t.boolean "required", default: false
t.integer "position"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "form_values", force: :cascade do |t|
t.integer "submission_id"
t.integer "form_field_id"
t.string "entry"
t.datetime "entry_datetime"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "forms", force: :cascade do |t|
t.string "name"
t.string "email"
t.text "description"
t.boolean "published", default: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "submissions", force: :cascade do |t|
t.integer "form_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
|
skipteel/form_builder
|
config/routes.rb
|
<reponame>skipteel/form_builder
Rails.application.routes.draw do
resources :forms, :member => { :sort_fields => :post } do |forms|
resources :submissions, :collection => { :thank_you => :get }
end
root :controller => :forms, :action => :index
end
|
skipteel/form_builder
|
app/models/form_field.rb
|
<filename>app/models/form_field.rb
class FormField < ActiveRecord::Base
acts_as_list :scope => :form
belongs_to :form
has_many :form_values
end
|
skipteel/form_builder
|
app/models/notifier.rb
|
<reponame>skipteel/form_builder
class Notifier < ActionMailer::Base
def form_submission(submission, sent_at = Time.now)
subject "Form Submission: #{submission.form.name}"
recipients submission.form.email
from "<EMAIL>"
sent_on sent_at
body :submission => submission
end
end
|
skipteel/form_builder
|
app/helpers/submissions_helper.rb
|
<reponame>skipteel/form_builder<filename>app/helpers/submissions_helper.rb
module SubmissionsHelper
def generate_label_and_form_tag(ff)
html = "<p>"
case ff.object.form_field.tag
when "text_field"
html += "<strong>" + (ff.label :entry, "#{ff.object.form_field.label}:") + "</strong>" + "<br />"
html += ff.text_field :entry
when "text_area"
html += "<strong>" + (ff.label :entry, "#{ff.object.form_field.label}:") + "</strong>" + "<br />"
html += ff.text_area :entry, :cols => 50, :rows => 6
when "select"
html += "<strong>" + (ff.label :entry, "#{ff.object.form_field.label}:") + "</strong>" + "<br />"
html += ff.select :entry, ff.object.form_field.values.split("\r\n"), :include_blank => "Select #{ff.object.form_field.label}..."
when "check_box"
html += ff.check_box :entry
html += " "
html += "<strong>" + (ff.label :entry, "#{ff.object.form_field.label}") + "</strong>"
when "date_select"
html += "<strong>" + (ff.label :entry_datetime, "#{ff.object.form_field.label}:") + "</strong>" + "<br />"
html += ff.date_select :entry_datetime, :include_blank => true
#when "radio_button"
# ff.object.form_field.values.split("\n") do
# html += ff.radio_button :entry, ff.object.form_field.values.split("\n").first
# html += ff.label :entry, "#{ff.object.form_field.label}:"
# end.join("<br />")
end
html += "</p>"
end
end
|
skipteel/form_builder
|
app/controllers/submissions_controller.rb
|
class SubmissionsController < ApplicationController
before_filter :find_form
before_filter :find_submission, :only => %w(show edit update destroy)
def index
@submissions = Submission.all
end
def show
end
def new
@submission = @form.submissions.new
@form.form_fields.all.each do |field|
new_field = @submission.form_values.build
new_field.form_field = field
end
end
def create
@submission = @form.submissions.build(params[:submission])
if @submission.save
if @submission.form.email.present?
Notifier.deliver_form_submission(@submission)
end
flash[:notice] = "Successfully created submission."
redirect_to [@form, @submission]
else
render :action => 'new'
end
end
def edit
end
def update
if @submission.update_attributes(params[:submission])
flash[:notice] = "Successfully updated submission."
redirect_to [@form, @submission]
else
render :action => 'edit'
end
end
def destroy
@submission.destroy
flash[:notice] = "Successfully destroyed submission."
redirect_to form_submissions_path(@form)
end
protected
def find_form
@form = Form.find(params[:form_id])
end
def find_submission
@submission = @form.submissions.find(params[:id])
end
end
|
skipteel/form_builder
|
app/models/form.rb
|
<reponame>skipteel/form_builder
class Form < ActiveRecord::Base
has_many :submissions, :dependent => :destroy
has_many :form_fields,-> { order "position" }, :dependent => :destroy
accepts_nested_attributes_for :form_fields, :allow_destroy => true
scope :published, -> { where(published: true) }
end
|
skipteel/form_builder
|
db/migrate/20090918234029_create_form_values.rb
|
class CreateFormValues < ActiveRecord::Migration[5.0]
def self.up
create_table :form_values do |t|
t.integer :submission_id
t.integer :form_field_id
t.string :entry
t.datetime :entry_datetime
t.timestamps
end
end
def self.down
drop_table :form_values
end
end
|
skipteel/form_builder
|
app/models/form_value.rb
|
class FormValue < ActiveRecord::Base
belongs_to :form_field
belongs_to :submission
end
|
skipteel/form_builder
|
app/models/submission.rb
|
class Submission < ActiveRecord::Base
has_many :form_values, :dependent => :destroy
belongs_to :form
accepts_nested_attributes_for :form_values
validate :required_form_values_are_present?
private
def required_form_values_are_present?
form_values.each do |val|
if val.form_field.required?
if (val.form_field.tag == "date_select" || val.form_field.tag == "datetime_select") && val.entry_datetime.blank?
errors.add_to_base("#{val.form_field.label} can't be blank")
end
if (val.form_field.tag != "date_select" && val.form_field.tag != "datetime_select") && val.entry.blank?
errors.add_to_base("#{val.form_field.label} can't be blank")
end
end
end
end
end
|
skipteel/form_builder
|
app/controllers/forms_controller.rb
|
<filename>app/controllers/forms_controller.rb
class FormsController < ApplicationController
def index
@forms = Form.all
end
def show
@form = Form.find(params[:id])
end
def new
@form = Form.new
@form.form_fields.build
end
def create
binding.pry
@form = Form.new(form_params)
binding.pry
if @form.save
flash[:notice] = "Successfully created form."
redirect_to @form
else
render :action => 'new'
end
end
def edit
@form = Form.find(params[:id])
end
def update
@form = Form.find(params[:id])
if @form.update_attributes(form_params)
flash[:notice] = "Successfully updated form."
redirect_to @form
else
render :action => 'edit'
end
end
def destroy
@form = Form.find(params[:id])
@form.destroy
flash[:notice] = "Successfully destroyed form."
redirect_to forms_url
end
def sort_fields
@form = Form.find(params[:id])
params[:form_fields].each_with_index do |id, index|
@form.form_fields.update_all(['position=?', index+1], ['id=?', id])
end
render :nothing => true
end
def form_params
params.require(:form).permit(:name, :email, :description, :published,
:form_field_attributes => [:label, :tag, :required, :position, :_destroy])
end
end
|
skipteel/form_builder
|
app/helpers/application_helper.rb
|
module ApplicationHelper
def remove_child_link(name)
content_tag(:div,"<span>#{name}</span>".html_safe,
:class => "remove_child")
end
def add_child_link(name, association, target)
content_tag(:button,"<span>#{name}</span>".html_safe,
:class => "add_child",
:"data-association" => association,
:target => target)
end
def new_fields_template(f,association,options={})
options[:object] ||= f.object.class.reflect_on_association(association).klass.new
options[:partial] ||= association.to_s.singularize+"_fields"
options[:template] ||= association.to_s+"_fields"
options[:f] ||= :f
tmpl = content_tag(:div,:id =>"#{options[:template]}") do
tmpl = f.fields_for(association,options[:object], :child_index => "new_#{association}") do |b|
render(:partial=>options[:partial],:locals =>{:f => b})
end
end
tmpl = tmpl.gsub /(?<!\n)\n(?!\n)/, ' '
return "<script> var #{options[:template]} = '#{tmpl.to_s}' </script>".html_safe
end
end
|
speedy32129/entityid-sequence
|
lib/entityid/sequence.rb
|
<filename>lib/entityid/sequence.rb<gh_stars>0
require "entityid/sequence/version"
module Entityid
module Sequence
class Error < StandardError; end
# Your code goes here...
end
end
|
speedy32129/entityid-sequence
|
test/entityid/sequence_test.rb
|
<filename>test/entityid/sequence_test.rb
require "test_helper"
class Entityid::SequenceTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Entityid::Sequence::VERSION
end
def test_it_does_something_useful
assert false
end
end
|
speedy32129/entityid-sequence
|
lib/entityid/sequence/version.rb
|
<filename>lib/entityid/sequence/version.rb
module Entityid
module Sequence
VERSION = "0.1.0"
end
end
|
abegosum/carhole_minder
|
button_listener.rb
|
require 'rpi_gpio'
BUTTON_PIN = 25
RELAY_PIN = 24
LED_PIN = 2
LOOP_DELAY = 0.01
def initialize_gpio
RPi::GPIO.set_numbering :bcm
RPi::GPIO.setup BUTTON_PIN, :as => :input
RPi::GPIO.setup RELAY_PIN, :as => :output
RPi::GPIO.setup LED_PIN, :as => :output
end
def toggle_relay
if RPi::GPIO.high? RELAY_PIN
RPi::GPIO.set_low RELAY_PIN
else
RPi::GPIO.set_high RELAY_PIN
end
end
def toggle_led
if RPi::GPIO.high? LED_PIN
RPi::GPIO.set_low LED_PIN
else
RPi::GPIO.set_high LED_PIN
end
end
def button_pressed?
RPi::GPIO.low? BUTTON_PIN
end
def initialize_pins
RPi::GPIO.set_high RELAY_PIN
RPi::GPIO.set_low LED_PIN
end
def wait_for_button_release
loop do
button_still_pressed = button_pressed?
puts "Button Released" unless button_still_pressed
break unless button_still_pressed
sleep LOOP_DELAY
end
end
initialize_gpio
initialize_pins
loop do
if button_pressed?
toggle_relay
toggle_led
puts "Button Pressed"
wait_for_button_release
end
sleep LOOP_DELAY
end
|
abegosum/carhole_minder
|
button_listener_service.rb
|
require 'rpi_gpio'
require_relative 'constants'
class ButtonListenerService
attr_reader :button_pin
attr_reader :button_name
attr_accessor :long_press_lambda
attr_accessor :long_press_delay
def initialize(button_pin, button_name)
@button_pin = button_pin
@button_name = button_name
@long_press_delay = LONG_PRESS_SECONDS
end
def initialize_gpio
RPi::GPIO.setup button_pin, :as => :input
@gpio_is_initialized = true
end
def button_pressed?
RPi::GPIO.low? button_pin
end
def wait_for_button_release
start_wait_time = Time.now.to_i
loop do
unless start_wait_time.nil?
seconds_past = Time.now.to_i - start_wait_time
if seconds_past >= @long_press_delay
puts "Long press of #{button_name} detected"
long_press_lambda.call unless long_press_lambda.nil?
start_wait_time = nil
end
end
button_still_pressed = button_pressed?
break unless button_still_pressed
sleep LOOP_DELAY
end
puts "#{button_name} button released"
end
def stop_button_listener
@service_thread[:stop] = true
end
def rejoin_service
@service_thread.join
end
def start_button_listener
initialize_gpio unless @gpio_is_initialized
@service_thread = Thread.new do
current_thread = Thread.current
while ! current_thread[:stop]
if button_pressed?
puts "#{button_name} button pressed"
yield
wait_for_button_release
end
sleep LOOP_DELAY
end
puts "Service stopped"
end
end
private
@gpio_is_initialized = false
@service_running = false
@service_thread = nil
end
|
abegosum/carhole_minder
|
alert_mailer.rb
|
require_relative 'constants'
require 'net/smtp'
require 'date'
class AlertMailer
def self.send_door_long_opened_alert(timestamp_opened)
time_opened = Time.at(timestamp_opened).to_datetime
message = <<~EOF
Subject: Garage Door Open too Long
Your garage door has been open since #{time_opened.strftime("%l:%M %P on %-m/%-d/%Y")}.
If the timer is disabled and this is expected, you may ignore this message. Otherwise, please check the timer on the door and any obstructions.
Thank you,
- <NAME>
EOF
Net::SMTP.start('localhost') do |smtp|
smtp.send_message(message, FROM_EMAIL, ALERT_EMAILS)
end
end
def self.send_door_failed_closing_alert(timestamp_opened, timestamp_attempted_close)
time_opened = Time.at(timestamp_opened).to_datetime
time_attempted_close = Time.at(timestamp_attempted_close).to_datetime
message = <<~EOF
Subject: Garage Door Couldn't Close
An attempt to close your garage at #{time_attempted_close.strftime("%l:%M %P on %-m/%-d/%Y")} failed to close it within #{DOOR_CLOSING_ALERT_DELAY_SECONDS} seconds.
Check to see if the sensor on the door is misaligned. If the sensor is in the proper position, check whether the door is obstructed.
Thank you,
- <NAME>
EOF
Net::SMTP.start('localhost') do |smtp|
smtp.send_message(message, FROM_EMAIL, ALERT_EMAILS)
end
end
end
|
abegosum/carhole_minder
|
daemon_start.rb
|
require_relative 'carhole_minder'
require_relative 'service_frontend'
require_relative 'constants'
require 'drb/drb'
SERVICE_SAFE = 1
daemon_object = CarholeMinder.new
drb_front_object = ServiceFrontend.new(daemon_object)
DRb.start_service("druby://localhost:#{DRB_PORT}", drb_front_object, { :safe_level => SERVICE_SAFE})
#DRb.start_service("druby://localhost:#{DRB_PORT}", drb_front_object)
daemon_object.run!
|
abegosum/carhole_minder
|
service_frontend.rb
|
require_relative 'carhole_minder'
require_relative 'constants'
class ServiceFrontend
def initialize(carhole_minder)
@carhole_minder = carhole_minder
end
def door_open?
@carhole_minder.door_open?
end
def open_or_close_garage_door
if @carhole_minder.door_open?
result = :closing
else
result = :opening
end
@carhole_minder.open_or_close_garage_door
result
end
def timer_settings
@carhole_minder.timer_settings
end
def timer_setting_index
@carhole_minder.get_timer_setting_index
end
def advance_timer_setting
@carhole_minder.advance_timer_setting_and_update
@carhole_minder.get_timer_setting_index
end
def timer_setting_index=(value)
@carhole_minder.set_timer_setting_index value
end
def seconds_since_last_open
@carhole_minder.seconds_since_last_open
end
def door_last_opened_time
@carhole_minder.door_last_opened_time
end
def door_last_closed_time
@carhole_minder.door_last_closed_time
end
def door_close_attempted_time
@carhole_minder.door_close_attempted_time
end
private
@carhole_minder
end
|
abegosum/carhole_minder
|
daemon_control.rb
|
<filename>daemon_control.rb
$LOAD_PATH.unshift('.')
require 'daemons'
require 'carhole_minder'
Daemons.run('daemon_start.rb')
|
abegosum/carhole_minder
|
door_open_switch_listener_service.rb
|
<gh_stars>0
require 'rpi_gpio'
require_relative 'constants'
DOOR_DELAYS = [TIMER_SETTING_1_MINUTES, TIMER_SETTING_2_MINUTES, TIMER_SETTING_3_MINUTES]
class DoorOpenSwitchListenerService
attr_reader :timer_setting
def initialize(timer_setting)
@timer_setting = timer_setting
@door_open_lambdas = []
@door_close_lambdas = []
end
def disble_timer
@service_thread[:timer_enabled] = false unless @service_thread.nil?
if @service_thread
puts "Disabling Timer"
else
puts "Service Not Defined"
end
end
def enable_timer
@service_thread[:timer_enabled] = true unless @service_thread.nil?
if @service_thread
puts "Enabling Timer"
else
puts "Service Not Defined"
end
end
def toggle_timer
unless @service_thread.nil?
@service_thread[:timer_enabled] = ! @service_thread[:timer_enabled]
end
end
def initialize_gpio
RPi::GPIO.setup DOOR_OPEN_SWITCH_PIN, :as => :input
RPi::GPIO.setup TIMER_BUTTON_LED_PIN, :as => :output
turn_on_timer_button_led
@gpio_is_initialized = true
end
def turn_on_timer_button_led
RPi::GPIO.set_high TIMER_BUTTON_LED_PIN
end
def turn_off_timer_button_led
RPi::GPIO.set_low TIMER_BUTTON_LED_PIN
end
def start_blinking_timer_button
@button_blink_thread = Thread.new do
current_thread = Thread.current
while ! current_thread[:stop]
turn_off_timer_button_led
sleep TIMER_BUTTON_BLINK_DURATION
turn_on_timer_button_led
sleep TIMER_BUTTON_BLINK_DURATION
end
turn_on_timer_button_led
end if @button_blink_thread.nil?
end
def stop_blinking_timer_button
@button_blink_thread[:stop] = true unless @button_blink_thread.nil?
@button_blink_thread = nil
turn_on_timer_button_led
end
def door_open?
RPi::GPIO.low? DOOR_OPEN_SWITCH_PIN
end
def delay_in_seconds
DOOR_DELAYS[@timer_setting] * 60
end
def get_timer_setting_index
@timer_setting
end
def update_timer_setting(setting_index)
enable_timer unless setting_index == TIMER_DISABLED_INDICATOR
@timer_setting = setting_index
@service_thread[:delay_seconds] = delay_in_seconds unless @service_thread.nil?
end
def reset_timer
@door_open_detected_time = nil
@timer_has_been_tripped = false
end
def notify_door_opened
unless @door_open_detected_time
puts "Door opened"
@door_open_detected_time = Time.now.to_i
@door_open_lambdas.each do |cur_listener|
cur_listener.call(@door_open_detected_time)
end
end
end
def notify_door_closed
unless @door_open_detected_time.nil?
puts "Door closed"
door_closed_time = Time.now.to_i
@door_close_lambdas.each do |cur_listener|
cur_listener.call(door_closed_time)
end
end
end
def add_door_opened_listener(listener)
@door_open_lambdas << listener
end
def add_door_closed_listener(listener)
@door_close_lambdas << listener
end
def start_door_open_switch_listener
initialize_gpio unless @gpio_is_initialized
stop_blinking_timer_button
@service_thread = Thread.new do
# Setup a the thread to determine whether timer is enabled, set default to true
current_thread = Thread.current
current_thread[:timer_enabled] = true
# While "true" loop to listen for door
while ! current_thread[:stop]
if door_open?
# If we haven't previously detected a time the door opened, the door JUST Opened
#puts "Door opened" unless @door_open_detected_time
start_blinking_timer_button
#@door_open_detected_time = Time.now.to_i unless @door_open_detected_time
notify_door_opened
if current_thread[:delay_seconds] && current_thread[:timer_enabled]
seconds_since_open = Time.now.to_i - @door_open_detected_time
if (seconds_since_open > current_thread[:delay_seconds]) && !@timer_has_been_tripped
@timer_has_been_tripped = true
yield
end
end
else
notify_door_closed
reset_timer
stop_blinking_timer_button
end
sleep LOOP_DELAY
end
puts "Service stopped"
end
update_timer_setting(timer_setting)
end
private
@service_thread = nil
@button_blink_thread = nil
@door_open_detected_time = nil
@gpio_is_initialized = false
@timer_has_been_tripped = false
@door_open_lambdas = []
@door_close_lambdas = []
end
|
abegosum/carhole_minder
|
carhole_minder.rb
|
<reponame>abegosum/carhole_minder<gh_stars>0
require_relative 'constants'
require_relative 'button_listener_service'
require_relative 'door_open_switch_listener_service'
require_relative 'alert_mailer'
TIMER_PINS = [ TIMER_SETTING_1_LED, TIMER_SETTING_2_LED, TIMER_SETTING_3_LED ]
class CarholeMinder
attr_reader :timer_setting
attr_reader :door_last_opened_time
attr_reader :door_last_closed_time
attr_reader :door_close_attempted_time
attr_reader :door_long_opened_alert_sent
attr_reader :door_failed_closing_alert_sent
def initialize
@timer_setting = 0
@door_last_opened_time = nil
@door_last_closed_time = nil
@door_long_opened_alert_sent = false
@door_failed_closing_alert_sent = false
end
def init_gpio
puts 'Setting up GPIO in/out settings'
RPi::GPIO.set_numbering :bcm
RPi::GPIO.setup DOOR_BUTTON_LED_PIN, :as => :output
RPi::GPIO.setup READY_LED_PIN, :as => :output
RPi::GPIO.setup RELAY_PIN, :as => :output
RPi::GPIO.setup TIMER_SETTING_1_LED, :as => :output
RPi::GPIO.setup TIMER_SETTING_2_LED, :as => :output
RPi::GPIO.setup TIMER_SETTING_3_LED, :as => :output
end
def init_pins
puts 'Initializing pin states'
turn_off_led READY_LED_PIN
turn_on_led TIMER_SETTING_1_LED
turn_off_led TIMER_SETTING_2_LED
turn_off_led TIMER_SETTING_3_LED
RPi::GPIO.set_high RELAY_PIN
set_default_led_state(DOOR_BUTTON_LED_PIN, DOOR_BUTTON_LED_DEFAULT_STATE)
turn_on_led READY_LED_PIN
end
def set_default_led_state(led_pin, state)
if state == :on
RPi::GPIO.set_high led_pin
else
RPi::GPIO.set_low led_pin
end
end
def turn_on_led(pin_number)
RPi::GPIO.set_high pin_number
end
def turn_off_led(pin_number)
RPi::GPIO.set_low pin_number
end
def blink_door_button_led
relay_thread = Thread.new do
for i in 0..DOOR_OPEN_LIGHT_BLINKS
puts "Turning off"
turn_off_led DOOR_BUTTON_LED_PIN
sleep DOOR_OPEN_LIGHT_BLINK_DURATION
puts "Turning on"
turn_on_led DOOR_BUTTON_LED_PIN
sleep DOOR_OPEN_LIGHT_BLINK_DURATION
end
end
end
def open_or_close_garage_door
if door_open?
@door_close_attempted_time = Time.now.to_i
end
blink_door_button_led
RPi::GPIO.set_low RELAY_PIN
sleep RELAY_OPEN_DELAY
RPi::GPIO.set_high RELAY_PIN
end
def close_garage_door_by_timer
puts "Attempting to close open door"
open_or_close_garage_door
end
def update_timer_led_indicator
highest_index = TIMER_PINS.length - 1
for i in 0..highest_index
if i == @timer_setting
puts "Turning on LED at #{i} - #{TIMER_PINS[i]}"
turn_on_led TIMER_PINS[i]
else
puts "Turning off LED at #{i} - #{TIMER_PINS[i]}"
turn_off_led TIMER_PINS[i]
end
end
end
def advance_timer_setting
if @timer_setting == (TIMER_PINS.length - 1)
@timer_setting = 0
else
@timer_setting += 1
end
update_timer_led_indicator
end
def advance_timer_setting_and_update
advance_timer_setting
@door_open_service.update_timer_setting timer_setting
@door_open_service.reset_timer
end
def disable_timer
@timer_setting = TIMER_DISABLED_INDICATOR
puts "Timer disabled"
update_timer_led_indicator
end
def enable_timer
@timer_setting = 0
puts "Timer enabled"
update_timer_led_indicator
end
def timer_disabled?
@timer_setting == TIMER_DISABLED_INDICATOR
end
def get_timer_setting_index
@door_open_service.get_timer_setting_index
end
def set_timer_setting_index(index)
@door_open_service.update_timer_setting index
@timer_setting = index
update_timer_led_indicator
end
def toggle_timer
if timer_disabled?
enable_timer
else
disable_timer
end
end
def timer_settings
[ TIMER_SETTING_1_MINUTES, TIMER_SETTING_2_MINUTES, TIMER_SETTING_3_MINUTES ]
end
def shutdown_service_leds
turn_off_led READY_LED_PIN
turn_off_led DOOR_BUTTON_LED_PIN
turn_off_led TIMER_BUTTON_LED_PIN
turn_off_led TIMER_SETTING_1_LED
turn_off_led TIMER_SETTING_2_LED
turn_off_led TIMER_SETTING_3_LED
end
def seconds_since_last_open
if door_last_opened_time
time_since_last_open = (Time.now.to_i - door_last_opened_time)
else
0
end
end
def door_long_opened_alert_time_elsapsed?
door_open? and time_since_last_open >= (DOOR_LONG_OPEN_ALERT_DELAY_MINUTES * 60)
end
def shutdown_computer
system '/usr/bin/sudo /sbin/shutdown -h 0'
end
def door_open?
(! @door_open_service.nil?) && @door_open_service.door_open?
end
def reset_alerts
@door_long_opened_alert_sent = false
@door_failed_closing_alert_sent = false
end
def check_for_alerts_and_send
current_timestamp = Time.now.to_i
if door_open?
if @door_close_attempted_time && ((current_timestamp - @door_close_attempted_time) > DOOR_CLOSING_ALERT_DELAY_SECONDS)
unless @door_failed_closing_alert_sent
puts "Sending door close failure alert"
AlertMailer.send_door_failed_closing_alert(@door_last_opened_time, @door_close_attempted_time)
@door_failed_closing_alert_sent = true
end
end
if @door_last_opened_time && ((current_timestamp - @door_last_opened_time) > (DOOR_LONG_OPEN_ALERT_DELAY_MINUTES * 60))
unless @door_long_opened_alert_sent
puts "Sending door long open alert"
AlertMailer.send_door_long_opened_alert(@door_last_opened_time)
@door_long_opened_alert_sent = true
end
end
end
end
def run!
Thread.abort_on_exception = true
init_gpio
init_pins
door_button_service = ButtonListenerService.new(DOOR_BUTTON_PIN, 'DOOR_BUTTON')
door_button_service.long_press_delay = SHUTDOWN_LONG_PRESS_SECONDS
door_button_service.long_press_lambda = lambda do
shutdown_service_leds
shutdown_computer
end
door_button_service.start_button_listener do
open_or_close_garage_door
end
@door_open_service = DoorOpenSwitchListenerService.new(timer_setting)
@door_open_service.start_door_open_switch_listener do
puts "TIMER REACHED!"
close_garage_door_by_timer
end
@door_open_service.add_door_opened_listener(lambda do |door_opened_time|
@door_last_opened_time = door_opened_time
end)
@door_open_service.add_door_closed_listener(lambda do |door_closed_time|
@door_close_attempted_time = nil
@door_last_closed_time = door_closed_time
reset_alerts
end)
timer_button_service = ButtonListenerService.new(TIMER_BUTTON_PIN, 'TIMER_BUTTON')
timer_button_service.long_press_lambda = lambda do
toggle_timer
@door_open_service.toggle_timer
end
timer_button_service.start_button_listener do
if timer_disabled?
@door_open_service.reset_timer # prevent instantaneous closing on timer enable
enable_timer
else
advance_timer_setting
end
@door_open_service.update_timer_setting timer_setting
@door_open_service.reset_timer
end
Kernel.trap 'SIGTERM' do
puts "Trapped Signal"
shutdown_service_leds
exit(0)
end
begin
while true
check_for_alerts_and_send
sleep MAIN_THREAD_SLEEP_DELAY
end
rescue SignalException => e
puts "Received Signal Exception #{e}"
shutdown_service_leds
exit(0)
end
end
private
@door_open_service = nil
end
|
shoji-k/useful
|
serverspec/spec/newdev/sample_spec.rb
|
require 'spec_helper'
describe package('httpd'), :if => os[:family] == 'redhat' do
it { should be_installed }
end
describe package('apache2'), :if => os[:family] == 'ubuntu' do
it { should be_installed }
end
describe service('httpd'), :if => os[:family] == 'redhat' do
it { should be_enabled }
it { should be_running }
end
describe service('apache2'), :if => os[:family] == 'ubuntu' do
it { should be_enabled }
it { should be_running }
end
describe service('org.apache.httpd'), :if => os[:family] == 'darwin' do
it { should be_enabled }
it { should be_running }
end
describe port(80) do
it { should be_listening }
end
describe file('/etc/passwd') do
it { should be_file }
end
describe file('/var/log/apache2') do
it { should be_directory }
end
describe command('ls -al /') do
its(:stdout) { should match /bin/ }
end
describe package('mysql-server') do
it { should be_installed }
end
describe service('mysql') do
it { should be_enabled }
it { should be_running }
end
describe port(3306) do
it { should be_listening.on('127.0.0.1').with('tcp') }
end
db_user = "user"
db_password = "password"
db_name = "sample"
describe command("mysqlshow -u#{db_user} -p#{db_password}") do
# its (:stdout) { should eq 'user' }
its (:stdout) { should match /Databases/ }
end
# describe command("mysqlshow -u#{db_user} -p#{db_password} #{db_name}") do
# its (:stdout) { should match /Database:\ #{db_name}/ }
# end
describe command("mysqladmin -u#{db_user} -p#{db_password} variables |grep character_set_server") do
its (:stdout) { should match /utf8/ }
end
# check php.ini for cli
# todo: fail 2 parameters
php_values = [
{'default_mimetype' => 'text/html'},
# {'max_execution_time' => 30},
{'memory_limit' => '-1'},
{'post_max_size' => '8M'},
{'upload_max_filesize' => '2M'},
# {'max_input_time' => 60},
{'date.timezone' => 'Asia/Tokyo'}
]
describe 'PHP config parameters' do
php_values.each do |php_value|
context php_config(php_value.keys.first) do
its(:value) { should eq php_value[php_value.keys.first] }
end
end
end
describe command('ruby -v') do
let(:disable_sudo) { true }
its(:stdout) { should match /ruby 2\.3\.0.+/ }
end
|
FPhillips27/lojong
|
spec/controllers/lojong_saying_controller_spec.rb
|
require 'spec_helper'
require 'rails_helper'
RSpec.describe LojongSayingsController, :type => :controller do
describe "GET #index" do
it "should be successful" do
get :index
response.should be_successful
end
end
end
|
FPhillips27/lojong
|
db/migrate/20160514222716_create_lojong_sayings.rb
|
<filename>db/migrate/20160514222716_create_lojong_sayings.rb
class CreateLojongSayings < ActiveRecord::Migration[4.2]
def change
create_table :lojong_sayings do |t|
t.string :content
t.string :number
t.timestamps null: false
end
end
end
|
FPhillips27/lojong
|
app/controllers/lojong_sayings_controller.rb
|
class LojongSayingsController < ApplicationController
#GET
def index
@lojong_sayings = LojongSaying.randomSaying
end
#GET
def show
end
#GET
def new
end
#GET
def edit
end
#POST
def create
end
#PUT
def update
end
#DELETE
def destroy
end
end
|
FPhillips27/lojong
|
spec/requests/navigation_spec.rb
|
require "rails_helper"
require "spec_helper"
RSpec.describe "Navbar Link", :type => :request do
it "takes to the user to the About page when they click ABOUT" do
visit "#index"
click_on("ABOUT")
current_path.should == "/about"
end
it "takes to the user to the Slogans page when they click SLOGANS" do
visit "#index"
click_on("SLOGANS")
current_path.should == "/lojong_sayings"
end
end
RSpec.describe "Refresh Button", :type => :request do
it "takes to the user to /lojong_sayings when they click it" do
visit "#index"
page.find('#new_slogan_mobile').click
current_path.should == "/lojong_sayings"
end
end
|
FPhillips27/lojong
|
app/controllers/about_controller.rb
|
<filename>app/controllers/about_controller.rb
class AboutController < ApplicationController
def show
end
end
|
FPhillips27/lojong
|
features/step_definitions/lojong_steps.rb
|
Given(/^that I am on the Lojong Slogans page$/) do
visit('/lojong_sayings')
end
Given(/^that I am on the about page$/) do
visit('/about')
end
Then(/^I will see the number of a Lojong saying$/) do
find("p.number")
end
Then(/^I will see the text of a Lojong saying$/) do
page.has_css?('.content')
end
Then(/^I will see the reload button$/) do
page.has_css?('.reload-button')
end
When(/^I click the reload button$/) do
page.find('.refresh-button-mobile.glyphicon.glyphicon-refresh').click
end
Then(/^I will see the text "([^"]*)"$/) do |text|
expect(page).to have_css 'p', text: "#{text}"
end
When(/^I click "([^"]*)"$/) do |text|
click_on(("#{text}"), :match => :first)
end
|
FPhillips27/lojong
|
app/models/lojong_saying.rb
|
<filename>app/models/lojong_saying.rb
class LojongSaying < ActiveRecord::Base
scope :recent, lambda { order('created_at DESC').limit(10) }
scope :randomSaying, lambda { order('id DESC').sample(1).shuffle }
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/utils/recommendation_util.rb
|
module DomoscioRails
class RecommendationUtil < Resource
include DomoscioRails::HTTPCalls::Util
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails.rb
|
require 'net/https'
require 'cgi/util'
require 'multi_json'
# helpers
require 'domoscio_rails/version'
require 'domoscio_rails/json'
require 'domoscio_rails/errors'
require 'domoscio_rails/authorization_token'
# resources
require 'domoscio_rails/http_calls'
require 'domoscio_rails/resource'
require 'domoscio_rails/data/content.rb'
require 'domoscio_rails/data/event.rb'
require 'domoscio_rails/data/instance.rb'
require 'domoscio_rails/data/recommendation.rb'
require 'domoscio_rails/data/learning_session.rb'
require 'domoscio_rails/data/student.rb'
require 'domoscio_rails/knowledge/knowledge_edge.rb'
require 'domoscio_rails/knowledge/knowledge_graph.rb'
require 'domoscio_rails/knowledge/knowledge_node_content.rb'
require 'domoscio_rails/knowledge/knowledge_node_student.rb'
require 'domoscio_rails/knowledge/knowledge_node.rb'
require 'domoscio_rails/objective/objective_knowledge_node_student.rb'
require 'domoscio_rails/objective/objective_knowledge_node.rb'
require 'domoscio_rails/objective/objective_student.rb'
require 'domoscio_rails/objective/objective.rb'
require 'domoscio_rails/tag/tag_edge.rb'
require 'domoscio_rails/tag/tag_set.rb'
require 'domoscio_rails/tag/tag.rb'
require 'domoscio_rails/tag/tagging.rb'
require 'domoscio_rails/utils/gameplay_util.rb'
require 'domoscio_rails/utils/recommendation_util.rb'
require 'domoscio_rails/utils/review_util.rb'
module DomoscioRails
class Configuration
attr_accessor :root_url, :client_id, :client_passphrase, :temp_dir, :version
# Refers to AdaptiveEngine Version
def version
@version ||= 2
end
def root_url
@root_url ||= ""
end
end
class << self
attr_accessor :configuration
end
def self.configure
self.configuration ||= Configuration.new
yield configuration
end
def self.api_uri(url='')
URI(configuration.root_url + url)
end
#
# - +method+: HTTP method; lowercase symbol, e.g. :get, :post etc.
# - +url+: the part after Configuration#root_url
# - +params+: hash; entity data for creation, update etc.; will dump it by JSON and assign to Net::HTTPRequest#body
#
# Performs HTTP requests to Adaptive Engine
# On token issues, will try once to get a new token then will output a DomoscioRails::ReponseError with details
#
# Raises DomoscioRails::ResponseError on Adaptive Error Status
# Raises DomoscioRails::ProcessingError on Internal Error
#
def self.request(method, url, params={})
store_tokens, headers = request_headers
params.merge!({'per_page': 2000}) unless params[:per_page]
uri = api_uri(url)
response = DomoscioRails.send_request(uri, method, params, headers)
return response if response.kind_of? DomoscioRails::ProcessingError
begin
raise_http_failure(uri, response, params)
data = DomoscioRails::JSON.load(response.body.nil? ? '' : response.body)
DomoscioRails::AuthorizationToken::Manager.storage.store({access_token: response['Accesstoken'], refresh_token: response['Refreshtoken']}) if store_tokens
rescue MultiJson::LoadError => exception
data = ProcessingError.new(uri, 500, exception, response.body, params)
rescue ResponseError => exception
data = exception
end
if response['Total']
pagetotal = (response['Total'].to_i / response['Per-Page'].to_f).ceil
for j in 2..pagetotal
response = DomoscioRails.send_request(uri, method, params.merge({page: j}), headers)
return response if response.kind_of? DomoscioRails::ProcessingError
begin
raise_http_failure(uri, response, params)
body = DomoscioRails::JSON.load(response.body.nil? ? '' : response.body)
data += body
data.flatten!
rescue MultiJson::LoadError => exception
return ProcessingError.new(uri, 500, exception, response.body, params)
rescue ResponseError => exception
return exception
end
end
end
data
end
private
# This function catches usual Http errors during calls
#
def self.send_request(uri, method, params, headers)
begin
response = perform_call(uri, method, params, headers)
response = retry_call_and_store_tokens(uri, method, params, headers) if ['401','403'].include? response.code
response
rescue Timeout::Error, Errno::EINVAL, Errno::ECONNREFUSED, Errno::ECONNRESET, EOFError, Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError => exception
ProcessingError.new(uri, 500, exception, response)
end
end
# This helper will check the response status and build the correcponding DomoscioRails::ResponseError
#
def self.raise_http_failure(uri, response, params)
unless response.kind_of? Net::HTTPSuccess
if response.blank?
raise ResponseError.new(uri, 500, {error: {status: 500, message: 'AdaptiveEngine not available'}}, {}, params)
else
raise ResponseError.new(uri, response.code.to_i, DomoscioRails::JSON.load((response.body.nil? ? '' : response.body), :symbolize_keys => true), response.body, params)
end
end
end
# Actual HTTP call is performed here
#
def self.perform_call(uri, method, params, headers)
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
req = Net::HTTP::const_get(method.capitalize).new(uri.request_uri, headers)
req.body = DomoscioRails::JSON.dump(params)
http.request req
end
end
# This method is called when AdaptiveEngine returns tokens errors
# Action on those errors is to retry and request new tokens, those new token are then stored
def self.retry_call_and_store_tokens(uri, method, params, headers)
headers = request_new_tokens
response = perform_call(uri, method, params, headers)
DomoscioRails::AuthorizationToken::Manager.storage.store({access_token: response['Accesstoken'], refresh_token: response['Refreshtoken']})
response
end
def self.user_agent
@uname ||= get_uname
{
bindings_version: DomoscioRails::VERSION,
lang: 'ruby',
lang_version: "#{RUBY_VERSION} p#{RUBY_PATCHLEVEL} (#{RUBY_RELEASE_DATE})",
platform: RUBY_PLATFORM,
uname: @uname
}
end
def self.get_uname
`uname -a 2>/dev/null`.strip if RUBY_PLATFORM =~ /linux|darwin/i
rescue Errno::ENOMEM
'uname lookup failed'
end
# Process the token loading and analyze
# will return the processed headers and a token store flag
#
def self.request_headers
begin
auth_token = DomoscioRails::AuthorizationToken::Manager.get_token
if auth_token && auth_token[:access_token] && auth_token[:refresh_token]
[false, send_current_tokens(auth_token)]
else
[true, request_new_tokens]
end
rescue SyntaxError, StandardError
[true, request_new_tokens]
end
end
# If stored token successfully loaded we build the header with them
#
def self.send_current_tokens(auth_token)
{
'user_agent' => "#{DomoscioRails.user_agent}",
'AccessToken' => "#{auth_token[:access_token]}",
'RefreshToken' => "#{auth_token[:refresh_token]}",
'Content-Type' => 'application/json'
}
end
# If we cant find tokens of they are corrupted / expired, then we set headers to request new ones
def self.request_new_tokens
{
'user_agent' => "#{DomoscioRails.user_agent}",
'Authorization' => "Token token=#{DomoscioRails.configuration.client_passphrase}",
'Content-Type' => 'application/json'
}
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/version.rb
|
module DomoscioRails
VERSION = "0.3.8a"
end
|
Celumproject/domoscio_rails_v2
|
spec/spec_helper.rb
|
require_relative '../lib/domoscio_rails'
require_relative './lib/domoscio_rails/shared_resources'
require 'fileutils'
require 'pp'
require 'active_support/all'
def reset_domoscio_rails_configuration
DomoscioRails.configure do |c|
c.client_id = 14
c.client_passphrase = '<PASSWORD>'#
c.temp_dir = File.expand_path('../tmp', __FILE__)
FileUtils.mkdir_p(c.temp_dir) unless File.directory?(c.temp_dir)
end
end
reset_domoscio_rails_configuration
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/http_calls.rb
|
<reponame>Celumproject/domoscio_rails_v2<filename>lib/domoscio_rails/http_calls.rb
module DomoscioRails
module HTTPCalls
module Create
module ClassMethods
def create(*id, params)
id = id.empty? ? nil : id[0]
DomoscioRails.request(:post, url(id), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
module Update
module ClassMethods
def update(id = nil, params = {})
DomoscioRails.request(:put, url(id), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
module UpdateSelf
module ClassMethods
def update_self(params = {})
DomoscioRails.request(:put, url(nil, nil, true), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
module Fetch
module ClassMethods
def fetch(id = nil, params = {})
DomoscioRails.request(:get, url(id), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
module Destroy
module ClassMethods
def destroy(id = nil, params = {})
DomoscioRails.request(:delete, url(id), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
module Util
module ClassMethods
def util(id = nil, util_name = nil, params = {})
DomoscioRails.request(:get, url(id, util_name), params)
end
def util_post(id = nil, util_name = nil, params = {})
DomoscioRails.request(:post, url(id, util_name), params)
end
end
def self.included(base)
base.extend(ClassMethods)
end
end
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/objective/objective_knowledge_node_student.rb
|
<filename>lib/domoscio_rails/objective/objective_knowledge_node_student.rb
module DomoscioRails
class ObjectiveKnowledgeNodeStudent < Resource
include DomoscioRails::HTTPCalls::Create
include DomoscioRails::HTTPCalls::Fetch
include DomoscioRails::HTTPCalls::Destroy
include DomoscioRails::HTTPCalls::Update
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/data/learning_session.rb
|
<filename>lib/domoscio_rails/data/learning_session.rb
module DomoscioRails
class LearningSession < Resource
include DomoscioRails::HTTPCalls::Create
include DomoscioRails::HTTPCalls::Fetch
include DomoscioRails::HTTPCalls::Destroy
include DomoscioRails::HTTPCalls::Update
include DomoscioRails::HTTPCalls::Util
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/data/recommendation.rb
|
<reponame>Celumproject/domoscio_rails_v2<filename>lib/domoscio_rails/data/recommendation.rb
module DomoscioRails
class Recommendation < Resource
include DomoscioRails::HTTPCalls::Fetch
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/data/instance.rb
|
<filename>lib/domoscio_rails/data/instance.rb<gh_stars>1-10
module DomoscioRails
class Instance < Resource
include DomoscioRails::HTTPCalls::Create
include DomoscioRails::HTTPCalls::Fetch
include DomoscioRails::HTTPCalls::UpdateSelf
include DomoscioRails::HTTPCalls::Destroy
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/errors.rb
|
module DomoscioRails
# Generic error superclass for MangoPay specific errors.
class Error < StandardError
end
# Error Message from AdaptiveEngine
class ResponseError < Error
attr_reader :request_url, :code, :details, :body, :request_params
def initialize(request_url, code, details = {}, body = nil, request_params = {})
@request_url, @code, @details, @body, @request_params = request_url, code, details, body, request_params
super(message) if message
end
def message; (@details.is_a?(Hash) && @details[:error].is_a?(Hash)) ? @details.dig(:error, :message) : @details; end
end
# ProcessingError from DomoscioRails
class ProcessingError < Error
attr_reader :request_url, :code, :details, :body, :request_params
def initialize(request_url, code, details = {}, body = nil, request_params = {})
@request_url, @code, @details, @body, @request_params = request_url, code, details, body, request_params
super(message) if message
end
def message; @details.message; end
end
end
|
Celumproject/domoscio_rails_v2
|
lib/domoscio_rails/utils/gameplay_util.rb
|
module DomoscioRails
class GameplayUtil < Resource
include DomoscioRails::HTTPCalls::Util
end
end
|
xattacker/RxRequiredPropertyChecker
|
RxRequiredPropertyChecker.podspec
|
Pod::Spec.new do |s|
s.name = 'RxRequiredPropertyChecker'
s.version = '1.0.11'
s.license = 'MIT'
s.summary = 'a RxSwift Related component'
s.homepage = 'https://github.com/xattacker/RxRequiredPropertyChecker'
s.authors = { 'Xattacker' => '<EMAIL>' }
s.source = { :git => 'https://github.com/xattacker/RxRequiredPropertyChecker.git', :tag => s.version.to_s }
s.ios.deployment_target = '10.0'
s.swift_version = '5.0'
s.requires_arc = true
s.source_files = 'RxRequiredPropertyChecker/Sources/*.swift'
s.dependency "RxSwift"
s.dependency "RxCocoa"
end
|
khaled/mongoid_acts_as_tree
|
test/models/ordered_category.rb
|
<gh_stars>0
require "mongoid"
require "mongoid/acts_as_tree"
class OrderedCategory
include Mongoid::Document
include Mongoid::Acts::Tree
field :name, :type => String
field :value, :type => Integer
acts_as_tree :order => [['value', 'asc']]
end
|
khaled/mongoid_acts_as_tree
|
test/models/category.rb
|
<gh_stars>0
require "mongoid"
require "mongoid/acts_as_tree"
class Category
include Mongoid::Document
include Mongoid::Acts::Tree
field :name, :type => String
acts_as_tree
end
|
khaled/mongoid_acts_as_tree
|
test/test_order.rb
|
require 'helper'
require 'set'
class TestMongoidActsAsTree < Test::Unit::TestCase
context "Ordered tree" do
setup do
@root_1 = OrderedCategory.create(:name => "Root 1", :value => 2)
@child_1 = OrderedCategory.create(:name => "Child 1", :value => 1)
@child_2 = OrderedCategory.create(:name => "Child 2", :value => 9)
@child_2_1 = OrderedCategory.create(:name => "Child 2.1", :value => 2)
@child_3 = OrderedCategory.create(:name => "Child 3", :value => 5)
@root_2 = OrderedCategory.create(:name => "Root 2", :value => 1)
@root_1.children << @child_1
@root_1.children << @child_2
@root_1.children << @child_3
@child_2.children << @child_2_1
end
should "be in order" do
assert_equal OrderedCategory.roots.to_a, [@root_2, @root_1]
assert_equal @root_1.children, [@child_1, @child_3, @child_2]
assert_equal @root_1.descendants, [@child_1, @child_2_1, @child_3, @child_2]
assert_equal @root_1.self_and_descendants, [@root_1, @child_1, @child_2_1, @child_3, @child_2]
assert_equal @child_2.siblings, [@child_1, @child_3]
assert_equal @child_2.self_and_siblings, [@child_1, @child_3, @child_2]
assert_equal @root_1.self_and_siblings, [@root_2, @root_1]
end
end
end
|
khaled/mongoid_acts_as_tree
|
test/models/sub_category_2.rb
|
class SubCategory2 < SubCategory
end
|
khaled/mongoid_acts_as_tree
|
test/helper.rb
|
<reponame>khaled/mongoid_acts_as_tree
require 'rubygems'
require 'test/unit'
require 'shoulda'
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'mongoid'
Mongoid.load!(File.join(File.dirname(__FILE__), "mongoid.yml"), :test)
Dir["#{File.dirname(__FILE__)}/models/*.rb"].each {|file| require file}
class Test::Unit::TestCase
# Drop all columns after each test case.
def teardown
Mongoid.default_session.collections.each(&:drop)
end
# Make sure that each test case has a teardown
# method to clear the db after each test.
def inherited(base)
base.define_method teardown do
super
end
end
def eql_arrays?(first, second)
first.map{|i| i._id}.to_set == second.map{|i| i._id}.to_set
end
end
|
khaled/mongoid_acts_as_tree
|
lib/mongoid/acts_as_tree.rb
|
require "mongoid"
require "mongoid/acts/tree/fields"
require "mongoid/acts/tree/children"
module Mongoid
module Acts
module Tree
def self.included(model)
model.class_eval do
extend InitializerMethods
end
end
module InitializerMethods
def acts_as_tree(options = {})
options = {
:parent_id_field => "parent_id",
:path_field => "path",
:depth_field => "depth",
:class => self
}.merge(options)
class_attribute :acts_as_tree_options
self.acts_as_tree_options = options
include InstanceMethods
include Fields
extend Fields
extend ClassMethods
field parent_id_field, :type => Moped::BSON::ObjectId
field path_field, :type => Array, :default => []
field depth_field, :type => Integer, :default => 0
# These indexes are recommended to add to Classes using this
# index parent_id_field
# index path_field
self.class_eval do
define_method "#{parent_id_field}=" do | new_parent_id |
if new_parent_id.present?
parent = parent_cursor(new_parent_id).only(path_field, depth_field).one
self.write_attribute parent_id_field, parent.id
self.set_parent_information(parent)
else
self.write_attribute parent_id_field, nil
self[path_field] = []
self[depth_field] = 0
end
end
end
before_validation :set_position_information, :if => lambda { |obj|
# TODO: Not a fan of this, but mongoid does not seem to be correctly honoring :on => :create/:update
(obj.new_record? && obj[self.parent_id_field].present?) or (!obj.new_record? && obj["#{self.parent_id_field}_changed?".to_sym])
}
#before_validation :set_position_information#, :on => :create, :unless => lambda { |obj| obj[self.parent_id_field].blank? }
#before_validation :set_position_information, :on => :update, :if => lambda { |obj| obj["#{self.parent_id_field}_changed?".to_sym] }
validate :will_save_tree
after_save :move_children
before_destroy :destroy_descendants
end
end
module ClassMethods
def roots
self.where(parent_id_field => nil).order_by tree_order
end
end
module InstanceMethods
def [](field_name)
self.send field_name
end
def []=(field_name, value)
self.send "#{field_name}=", value
end
def ==(other)
return true if other.equal?(self)
return true if other.kind_of?(acts_as_tree_options[:class]) and other._id == self._id
false
end
def will_save_tree
if @_cyclic
errors.add(:base, "Can't be children of a descendant")
end
end
def parent
@_parent or (self[parent_id_field].nil? ? nil : parent_cursor.one)
end
def parent=(new_parent)
self.send("#{parent_id_field}=".to_sym, new_parent.id)
end
def root?
self[parent_id_field].nil?
end
def root_id
self[path_field].first
end
def root
self[path_field].first.nil? ? self : acts_as_tree_options[:class].find(self[path_field].first)
end
def ancestors
return [] if root?
acts_as_tree_options[:class].where(:_id.in => self[path_field]).order_by("#{depth_field} ASC")
end
def self_and_ancestors
ancestors << self
end
def siblings
acts_as_tree_options[:class].where(:_id.ne => self._id, parent_id_field => self[parent_id_field]).order_by tree_order
end
def self_and_siblings
acts_as_tree_options[:class].where(parent_id_field => self[parent_id_field]).order_by tree_order
end
def children
Children.new self, acts_as_tree_options[:class]
end
def children=(new_children_list)
self.children.replace_with(new_children_list)
end
alias replace children=
def descendants
return [] if new_record?
self.class.all_in(path_field => [self._id]).order_by tree_order
end
def self_and_descendants
[self] + self.descendants
end
def is_ancestor_of?(other)
other[path_field].include?(self._id)
end
def is_or_is_ancestor_of?(other)
(other == self) or is_ancestor_of?(other)
end
def is_descendant_of?(other)
self[path_field].include?(other._id)
end
def is_or_is_descendant_of?(other)
(other == self) or is_descendant_of?(other)
end
def is_sibling_of?(other)
(other != self) and (other[parent_id_field] == self[parent_id_field])
end
def is_or_is_sibling_of?(other)
(other == self) or is_sibling_of?(other)
end
def move_children
if @_will_move
@_will_move = false
self.children.each do | child |
child.set_position_information
child.save
end
@_will_move = true
end
end
def destroy_descendants
self.descendants.each(&:destroy)
end
def set_position_information
if parent.present? && parent.already_exists_in_tree?(self)
self.instance_variable_set :@_cyclic, true
else
self.update_position_information
end
end
def update_position_information
@_will_move = true
parent.nil? ? self.clear_parent_information : self.set_parent_information
end
def clear_parent_information
self.write_attribute parent_id_field, nil
self[path_field] = []
self[depth_field] = 0
end
def clear_parent_information!
self.clear_parent_information
self.save
end
def set_parent_information(parent=self.parent)
self.write_attribute parent_id_field, parent._id
self[path_field] = parent[path_field] + [parent._id]
self[depth_field] = parent[depth_field] + 1
end
def already_exists_in_tree?(root)
tree_ids = root.class.collection.find(root.path_field => root.id).select(_id: 1).map { |x| x['_id'] } + [ root.id ]
tree_ids.include?(self.id)
end
private
def parent_cursor(parent_id=self[parent_id_field])
acts_as_tree_options[:class].where(:_id => parent_id)
end
end
end
end
end
|
khaled/mongoid_acts_as_tree
|
test/test_children.rb
|
<filename>test/test_children.rb
require 'helper'
require 'set'
class TestMongoidActsAsTree < Test::Unit::TestCase
context "Create Children Criteria" do
setup do
@category = Category.create(:name => "Root 2")
@children = Mongoid::Acts::Tree::Children.new(@category, Category)
end
should "have initialized Children subtype of Mongoid::Criteria" do
assert_instance_of Mongoid::Acts::Tree::Children, @children
assert_kind_of Mongoid::Criteria, @children
end
should "have set the selector" do
expected_selector = { "parent_id" => @category.id }
assert_equal expected_selector, @children.selector
end
should "have defaulted the ordering to nothing" do
assert_equal Array.new, @children.sort({}).to_a
end
end
context "Build Child" do
setup do
@root = Category.create(:name => "Root")
@child = @root.children.build(:name => "Child 1")
end
should "set @root as the parent for the child" do
assert_equal @root, @child.parent
end
should "not have saved @child1 into @root's children" do
assert_equal Array.new, @root.children.to_a
end
should "still have zero children" do
assert_equal 0, @root.children.size
assert_equal 0, @root.children.count
end
should "not have saved @child" do
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child.id) }
end
end
context "Create Child" do
setup do
@root = Category.create(:name => "Root")
@child = @root.children.create(:name => "Child 1")
end
should "set @root as the parent for the child" do
assert_equal @root, @child.parent
end
should "have saved @child1 into @root's children" do
assert_equal [ @child ], @root.children.to_a
end
should "have 1 child" do
assert_equal 1, @root.children.size
assert_equal 1, @root.children.count
end
end
context "Add Created Child via <<" do
setup do
@root = Category.create(:name => "The Root")
@child = Category.create(:name => "of All Evil")
@root.children << @child
end
should "have updated @child to have @root_1 as it's parent" do
assert_equal @root, @child.parent
assert_equal @root, @child.reload.parent
end
should "be able to access @child through children of @root" do
assert_equal [ @child ], @root.children.to_a
end
should "have 1 child" do
assert_equal 1, @root.children.size
assert_equal 1, @root.children.count
end
end
context "Cyclic Children" do
context "Root adding to Root" do
setup do
@root = Category.create(:name => "Root")
end
should "not save under itself" do
assert_equal false, (@root.children << @root)
end
end
end
context "Replace Children" do
setup do
@root = Category.create(:name => "The Root")
@child1 = @root.children.create(:name => "The Kid 1")
@child2 = @root.children.create(:name => "The Kid 2")
@kid = Category.create(:name => "Another Kid")
end
should "start out with 2 children" do
assert_equal [ @child1, @child2 ], @root.children.to_a
end
should "remove all if passed an empty Array" do
assert @root.children.replace_with([])
assert_equal [], @root.reload.children.to_a
end
should "remove @child2 if 'replaced' with only @child1" do
assert @root.children.replace_with([@child1])
assert_equal [ @child1 ], @root.reload.children.to_a
end
should "add @kid if all are passed in" do
assert @root.children.replace_with([@child1, @child2, @kid])
assert_equal [ @child1, @child2, @kid ], @root.reload.children.to_a
end
should "add @kid and remove @child1" do
assert @root.children.replace_with([@kid, @child2])
assert_equal [ @child2, @kid ], @root.reload.children.to_a
end
end
end
|
khaled/mongoid_acts_as_tree
|
lib/mongoid/acts/tree/fields.rb
|
module Mongoid
module Acts
module Tree
module Fields
def parent_id_field
acts_as_tree_options[:parent_id_field]
end
def path_field
acts_as_tree_options[:path_field]
end
def depth_field
acts_as_tree_options[:depth_field]
end
def tree_order
acts_as_tree_options[:order] or []
end
end
end
end
end
|
khaled/mongoid_acts_as_tree
|
lib/mongoid/acts/tree/children.rb
|
module Mongoid
module Acts
module Tree
class Children < Mongoid::Criteria
def initialize(owner, tree_base_class)
@parent = owner
@tree_base_class = tree_base_class
super(tree_base_class)
other = self.merge!(tree_base_class.where(@parent.parent_id_field => @parent.id).order_by(@parent.tree_order))
end
alias_method :size, :count
def build(attributes)
child = @parent.class.new(attributes)
child.parent = @parent
child
end
def create(attributes)
child = self.build(attributes)
child.save
child
end
def <<(object)
object.parent = @parent
object.save
end
alias push <<
def replace_with(new_objects)
new_object_ids = new_objects.collect(&:id)
existing_objects = self.to_a
existing_object_ids = existing_objects.collect(&:id)
self.to_a.each { |existing| existing.clear_parent_information! unless new_object_ids.include?(existing.id) }
new_objects.each { |new_object| self << new_object unless existing_object_ids.include?(new_object.id) }
end
#Clear children list
def clear!
self.each(&:destroy)
end
end # Children
end # Tree
end # Acts
end # Mongoid
|
khaled/mongoid_acts_as_tree
|
test/test_tree.rb
|
require 'helper'
require 'set'
$verbose = false
class TestMongoidActsAsTree < Test::Unit::TestCase
context "Tree" do
setup do
@root_1 = Category.create(:name => "Root 1")
@child_1 = Category.create(:name => "Child 1")
@child_2 = Category.create(:name => "Child 2")
@child_2_1 = SubCategory.create(:name => "Child 2.1")
@child_2_1_1 = SubCategory2.create(:name => "Sub Child 2.1.1")
@child_3 = SubCategory.create(:name => "Child 3")
@root_2 = Category.create(:name => "Root 2")
@root_1.children << @child_1
@root_1.children << @child_2
@root_1.children << @child_3
@child_2.children << @child_2_1
@child_2_1.children << @child_2_1_1
end
should "have 3 Children for @root_1" do
assert_equal 3, @root_1.children.count
end
should "have 0 Children for @root_2" do
assert_equal 0, @root_2.children.count
end
should "have 0 Children for @child_1" do
assert_equal 0, @child_1.children.count
end
should "have 1 Child for @child_2" do
assert_equal 1, @child_2.children.count
end
should "have set @child_2's descendants" do
assert_equal [ @child_2_1, @child_2_1_1 ], @child_2.descendants.to_a
end
should "not have a parent for the root nodes" do
assert_nil @root_1.parent
assert_nil @root_2.parent
end
should "have parents for the child nodes" do
assert_equal @root_1, @child_1.parent
assert_equal @child_2, @child_2_1.parent
assert_equal @child_2_1, @child_2_1_1.parent
end
should "have roots" do
assert_same_elements [ @root_1, @root_2 ], Category.roots.to_a
end
should "assign parent_id" do
child = Category.create :name => 'child'
parent = Category.create :name => 'parent'
child.parent_id = parent.id
child.save
assert_equal [ child ], parent.children.to_a
assert_equal parent.children.first.id, child.id
assert_equal parent.id, child.parent_id
assert parent.children.include? child
assert_equal 1, child.depth
assert_equal [parent.id], child.path
more_deep_child = Category.new(
:name => 'more deep child',
:parent_id => child.id
)
assert more_deep_child.new_record?
assert more_deep_child.save
assert !more_deep_child.new_record?
assert_equal child.children.first.id, more_deep_child.id
assert_equal child.id, more_deep_child.parent_id
assert child.children.include? more_deep_child
assert_equal 2, more_deep_child.depth
assert_equal [parent.id, child.id], more_deep_child.path
assert parent.descendants.include? child
assert parent.descendants.include? more_deep_child
assert more_deep_child.ancestors.include? child
assert more_deep_child.ancestors.include? parent
end
should "assign blank parent_id" do
@child_1.parent_id = ''
@child_1.save
assert_nil @child_1.reload.parent_id
assert_equal 0, @child_1.depth
assert_equal [], @child_1.path
@child_1.parent_id = nil
@child_1.save
assert_nil @child_1.reload.parent_id
assert_equal 0, @child_1.depth
assert_equal [], @child_1.path
end
should "replace children list" do
new_children_list = [ Category.create(:name => "test 1"), Category.create(:name => "test 2") ]
@root_1.children = new_children_list
assert_equal new_children_list, @root_1.children.to_a
@root_1.children = []
assert_equal [], @root_1.children.to_a
end
context "Destroying a Childless Top Level Node" do
setup do
@child_1.destroy
end
should "reduce the size of @root_1's children to 2" do
assert_equal 2, @root_1.children.count
end
should "no longer show in Children" do
assert_equal [ @child_2, @child_3 ], @root_1.children.to_a
end
end
context "Destroying a Sub Level Node with Children" do
setup do
@child_2_1.destroy
end
should "not reduce @root_1's children count" do
assert_equal 3, @root_1.children.count
end
should "reduce @child_2's children count" do
assert_equal 0, @child_2.children.count
end
should "have destroyed it's children" do
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_2_1.id) }
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_2_1_1.id) }
end
end
context "Clear Children List" do
setup do
@root_1.children.clear!
end
should "have 0 children" do
assert_equal 0, @root_1.children.size
end
should "have destroyed all the children" do
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_1.id) }
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_2.id) }
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_2_1.id) }
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_2_1_1.id) }
assert_raise(Mongoid::Errors::DocumentNotFound) { Category.find(@child_3.id) }
end
end
context "node" do
should "have a root" do
assert_equal @root_1.root, @root_1
assert_not_equal @root_1.root, @root_2.root
assert_equal @root_1, @child_2_1.root
end
should "have ancestors" do
assert_equal @root_1.ancestors, []
assert_equal @child_2.ancestors, [@root_1]
assert_equal @child_2_1.ancestors, [@root_1, @child_2]
assert_equal @root_1.self_and_ancestors, [@root_1]
assert_equal @child_2.self_and_ancestors, [@root_1, @child_2]
assert_equal @child_2_1.self_and_ancestors, [@root_1, @child_2, @child_2_1]
end
should "have siblings" do
assert eql_arrays?(@root_1.siblings, [@root_2])
assert eql_arrays?(@child_2.siblings, [@child_1, @child_3])
assert eql_arrays?(@child_2_1.siblings, [])
assert eql_arrays?(@root_1.self_and_siblings, [@root_1, @root_2])
assert eql_arrays?(@child_2.self_and_siblings, [@child_1, @child_2, @child_3])
assert eql_arrays?(@child_2_1.self_and_siblings, [@child_2_1])
end
should "set depth" do
assert_equal 0, @root_1.depth
assert_equal 1, @child_1.depth
assert_equal 2, @child_2_1.depth
assert_equal 3, @child_2_1_1.depth
end
should "have children" do
assert_same_elements [ @child_1, @child_2, @child_3 ], @root_1.children.to_a
end
should "have descendants" do
assert_same_elements [ @child_1, @child_2, @child_3, @child_2_1, @child_2_1_1 ], @root_1.descendants.to_a
assert_same_elements [ @child_2_1, @child_2_1_1 ], @child_2.descendants.to_a
assert @child_2_1_1.descendants.empty?
assert_same_elements [ @root_1, @child_1, @child_2, @child_3, @child_2_1, @child_2_1_1 ], @root_1.self_and_descendants.to_a
assert_same_elements [ @child_2, @child_2_1, @child_2_1_1 ], @child_2.self_and_descendants.to_a
assert_same_elements [ @child_2_1, @child_2_1_1 ], @child_2_1.self_and_descendants.to_a
assert_same_elements [ @child_2_1_1 ], @child_2_1_1.self_and_descendants.to_a
end
should "be able to tell if ancestor" do
assert @root_1.is_ancestor_of?(@child_1)
assert !@root_2.is_ancestor_of?(@child_2_1)
assert !@child_2.is_ancestor_of?(@child_2)
assert @root_1.is_or_is_ancestor_of?(@child_1)
assert !@root_2.is_or_is_ancestor_of?(@child_2_1)
assert @child_2.is_or_is_ancestor_of?(@child_2)
end
should "be able to tell if descendant" do
assert !@root_1.is_descendant_of?(@child_1)
assert @child_1.is_descendant_of?(@root_1)
assert !@child_2.is_descendant_of?(@child_2)
assert !@root_1.is_or_is_descendant_of?(@child_1)
assert @child_1.is_or_is_descendant_of?(@root_1)
assert @child_2.is_or_is_descendant_of?(@child_2)
end
should "be able to tell if sibling" do
assert !@root_1.is_sibling_of?(@child_1)
assert !@child_1.is_sibling_of?(@child_1)
assert !@child_2.is_sibling_of?(@child_2)
assert !@root_1.is_or_is_sibling_of?(@child_1)
assert @child_1.is_or_is_sibling_of?(@child_2)
assert @child_2.is_or_is_sibling_of?(@child_2)
end
context "when moving" do
should "recalculate path and depth" do
@child_2.children << @child_3
assert @child_2.is_or_is_ancestor_of?(@child_3)
assert @child_3.is_or_is_descendant_of?(@child_2)
assert @child_2.children.include?(@child_3)
assert @child_2.descendants.include?(@child_3)
assert @child_2_1.is_or_is_sibling_of?(@child_3)
assert_equal 2, @child_3.depth
end
should "move children on save" do
@root_2.children << @child_2
@child_2_1.reload
assert @root_2.is_or_is_ancestor_of?(@child_2_1)
assert @child_2_1.is_or_is_descendant_of?(@root_2)
assert @root_2.descendants.include?(@child_2_1)
end
should "check against cyclic graph" do
assert_equal false, (@child_2_1.children << @root_1)
assert_equal [ "Can't be children of a descendant" ], @root_1.errors[:base]
end
end
should "destroy descendants when destroyed" do
@child_2.destroy
assert_nil Category.where(:id => @child_2_1._id).first
end
end
end
context "A Root" do
setup do
@root = Category.create(:name => "The Root")
end
should "know it is a root" do
assert @root.root?
end
should "return itself for :root" do
assert_equal @root, @root.root
end
context "new Leaf" do
setup do
@leaf = Category.new(:name => 'A Leaf', :parent_id => @root.id)
end
should "not think it is a root" do
assert !@leaf.root?
end
should "have @root as it's :parent" do
assert_equal @root, @leaf.parent
end
should "have @root as its :root" do
assert_equal @root, @leaf.root
end
context "is saved" do
setup do
@leaf.save!
@root.reload
end
should "still not think it is a root" do
assert !@leaf.root?
end
should "still have @root as it's :parent" do
assert_equal @root, @leaf.parent
end
should "still have @root as its :root" do
assert_equal @root, @leaf.root
end
end
end
end
end
|
khaled/mongoid_acts_as_tree
|
test/models/sub_category.rb
|
class SubCategory < Category
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
breadcrumbs_rails.gemspec
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'breadcrumbs_rails/version'
Gem::Specification.new do |spec|
spec.name = 'breadcrumbs_rails'
spec.version = BreadcrumbsRails::VERSION
spec.authors = %w(EdCordata)
spec.summary = %q{Rails breadcrumbs gem}
spec.description = %q{Rails breadcrumbs gem}
spec.homepage = 'https://github.com/EdCordata-Ruby-Gems/breadcrumbs_rails'
spec.metadata = {
'documentation_uri' => 'https://github.com/EdCordata-Ruby-Gems/breadcrumbs_rails/blob/master/readme.md',
'source_code_uri' => 'https://github.com/EdCordata-Ruby-Gems/breadcrumbs_rails',
'bug_tracker_uri' => 'https://github.com/EdCordata-Ruby-Gems/breadcrumbs_rails/issues'
}
spec.license = 'CC BY 4.0'
spec.files = `git ls-files`.split($/)
spec.require_paths = %w(lib)
spec.required_ruby_version = '>= 1.9.3'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rails', '>= 3.0'
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/breadcrumbs_rails/breadcrumb.rb
|
<filename>lib/breadcrumbs_rails/breadcrumb.rb
module BreadcrumbsRails
class Breadcrumb
attr_accessor :name, :name_string,
:path, :path_string,
:localize, :locale
def initialize(name_string: nil, path_string: nil, localize: nil, locale: nil)
@locale = locale || 'en'
@localize = localize || false
@name_string = name_string
@name = generate_name
@path_string = path_string
@path = generate_path
end
private
def generate_name
if @localize
::I18n.t(@name_string, locale: @locale, default: @name_string)
else
@name_string
end
end
def generate_path
url_helpers = ::Rails.application.routes.url_helpers
if url_helpers.methods.include?(@path_string.to_s.to_sym)
url_helpers.send(@path_string)
else
@path_string
end
end
end
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/generators/breadcrumbs/config_generator.rb
|
module Breadcrumbs
module Generators
class ConfigGenerator < Rails::Generators::Base
source_root ::File.expand_path(::File.join(::File.dirname(__FILE__), 'templates/config/initializers'))
def copy_config_file
template 'breadcrumbs_config.rb', 'config/initializers/breadcrumbs.rb'
end
end
end
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/generators/breadcrumbs/views_generator.rb
|
module Breadcrumbs
module Generators
class ViewsGenerator < ::Rails::Generators::NamedBase
source_root ::File.expand_path(::File.join(::File.dirname(__FILE__), 'templates/app/views/breadcrumbs'))
desc 'Template engine for the views. Available options are "erb", "haml".'
class_option :format, type: :string, default: 'all'
def copy_default_views
case options['format'].try(:to_s).try(:downcase)
when 'haml'
copy_file("_#{@name}.html.haml", 'app/views/breadcrumbs/_breadcrumbs.html.haml')
when 'erb'
copy_file("_#{@name}.html.erb", 'app/views/breadcrumbs/_breadcrumbs.html.erb')
else
copy_file("_#{@name}.html.haml", 'app/views/breadcrumbs/_breadcrumbs.html.haml')
copy_file("_#{@name}.html.erb", 'app/views/breadcrumbs/_breadcrumbs.html.erb')
end
end
end
end
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/breadcrumbs_rails.rb
|
require 'breadcrumbs_rails/version'
require 'breadcrumbs_rails/railtie'
require 'breadcrumbs_rails/breadcrumb'
require 'breadcrumbs_rails/breadcrumbs'
module BreadcrumbsRails
extend ActiveSupport::Concern
# ----------------------------------------------------------
included do |base|
unless base.respond_to?(:before_action)
base.alias_method :before_filter, :before_action
end
helper_method :render_breadcrumbs_title, :render_breadcrumbs
def add_breadcrumb(name, options = {})
self.breadcrumbs_gem.breadcrumbs << ::BreadcrumbsRails::Breadcrumb.new(
name_string: name,
path_string: options[:path],
locale: options[:locale],
localize: options[:localize],
)
self.breadcrumbs_gem.localize = options[:localize] if options.has_key?(:localize)
self.breadcrumbs_gem.locale = options[:locale] if options.has_key?(:locale)
self.breadcrumbs_gem.scope = options[:scope] if options.has_key?(:scope)
self.breadcrumbs_gem.title_string = options[:title] if options.has_key?(:title)
end
def breadcrumbs_gem
@breadcrumbs_gem ||= ::BreadcrumbsRails::Breadcrumbs.new
end
def render_breadcrumbs_title(default = nil)
breadcrumbs_gem.title || default
end
def render_breadcrumbs(template: nil, &block)
if block_given?
yield(breadcrumbs_gem)
else
if template.nil?
partial = 'breadcrumbs/breadcrumbs'
partial = (breadcrumbs_gem.scope + '/' + partial) unless breadcrumbs_gem.scope.nil?
else
partial = "breadcrumbs/#{template}"
end
partial += '.html'
render(partial: partial,
locals: { breadcrumbs: breadcrumbs_gem },
handlers: [:erb, :haml])
end
end
end
# ----------------------------------------------------------
# ----------------------------------------------------------
class_methods do
def add_breadcrumb(name, options = {})
before_action(options) do |controller|
controller.send(:add_breadcrumb, name, options)
end
end
end
# ----------------------------------------------------------
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/generators/breadcrumbs/templates/config/initializers/breadcrumbs_config.rb
|
<filename>lib/generators/breadcrumbs/templates/config/initializers/breadcrumbs_config.rb
# breadcrumbs config coming soon
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/breadcrumbs_rails/railtie.rb
|
module BreadcrumbsRails
class Railtie < Rails::Railtie
ActiveSupport.on_load(:action_controller) do
views_path = "#{File.dirname(__FILE__)}/../generators/breadcrumbs/templates/app/views"
::ActionController::Base.append_view_path(views_path)
end
end
end
|
EdCordata-Ruby-Gems/breadcrumbs_rails
|
lib/breadcrumbs_rails/breadcrumbs.rb
|
<reponame>EdCordata-Ruby-Gems/breadcrumbs_rails
module BreadcrumbsRails
class Breadcrumbs
attr_accessor :breadcrumbs,
:scope, :format,
:title, :title_string,
:localize, :locale
def initialize(breadcrumbs: [], title_string: nil, scope: nil, format: nil, localize: nil, locale: nil)
@breadcrumbs = breadcrumbs || []
@format = format || 'html'
@scope = scope
@title_string = title_string
@title = generate_title
@localize = localize || false
@locale = locale || 'en'
end
def title_string=(title_string)
@title_string = title_string
@title = generate_title
end
private
def generate_title
if @localize
::I18n.t(@title_string, locale: @locale, default: @title_string)
else
@title_string
end
end
end
end
|
shurunxuan/vgm_ripping
|
demux/voxhound/voxhound.rb
|
puts 'voxhound 0.3 by hcs'
def valid_frame?(frame, idx)
# TODO: generally we have expectations and it would
# be nice to check them:
# 4 at the beginning of mono from MFAudio
# 6 at the beginning of stereo from MFAudio
# 7 at the end of a stream from MFAudio (after usable data)
# 2 in the original VOX
# 0 otherwise
flags = frame[idx+1].ord
(frame[idx+0].ord < 0x50 && (flags == 6 || flags == 4 || flags == 2 || flags == 0))
end
if ARGV[0] == '-r'
mode = 'read'
vox_fn = ARGV[1]
vag_fn = ARGV[2]
vox = File.open vox_fn, mode:'rb'
vag = File.open vag_fn, mode:'wb'
puts "extracting audio from #{vox_fn} to #{vag_fn}"
elsif ARGV[0] == '-w'
mode = 'write'
vox_fn = ARGV[1]
vag_fn = ARGV[2]
vox = File.open vox_fn, mode:'r+b'
vag = File.open vag_fn, mode:'rb'
puts "replacing audio in #{vox_fn} with #{vag_fn}"
else
puts <<-HERE
usage:
extract audio: voxhound -r original.vox output.vag
replace audio: voxhound -w original.vox new_audio.vag
HERE
exit
end
until vox.eof?
# read block header
offset = vox.tell
header = vox.read(4).unpack('<L')[0]
type = header & 0xFF
size = header >> 8
size >= 4 or raise '%08x: block size %x too small for header' % [offset, size]
body_size = size - 4
if type == 0xF0
# end of file, only padding remains
until vox.eof?
vox.read(1).ord == 0 or raise 'nonzero padding after F0 block'
end
elsif type == 1
# audio block
body_size % 16 == 0 or raise 'body not evenly divided into frames'
frames = body_size / 16
if mode == 'read'
body = vox.read body_size
body != nil && body.length == body_size or raise 'short read'
frames.times { |i|
valid_frame? body, 16*i or raise '%08x: doesn\'t look like a valid frame' % (offset + 4 + 16*i)
}
vag.write body
elsif mode == 'write'
body = vag.read body_size
body != nil && body.length == body_size or raise 'short read'
frames.times { |i|
valid_frame? body, 16*i or raise '%s %08x: doesn\'t look like a valid frame' % [vag_fn, vag.tell - body_size + 16*i]
body[16*i+1] = 2.chr # force flags to 2
}
vox.write body
end
else
# skip all other block types
vox.seek body_size, IO::SEEK_CUR
end
# uncomment to show all blocks while parsing
#puts '%08x: %02x %x' % [offset, type, body_size]
end
# currently ignore extra data when replacing, this is often just an extra
# frame with 07 flags indicating end of stream
#if mode == 'write'
# vag.eof? or raise 'extra data in .vag'
#end
vag.close
vox.close
puts 'ok!'
puts
|
shurunxuan/vgm_ripping
|
etc/ddwlg/ddwlg00.rb
|
require "chunky_png"
fn = ARGV[0]
exit unless fn
File.open(fn, mode='rb') do |f|
puts fn
width = ARGV[1].to_i
raise 'bad width' if width == 0
magic, unk1, subfiles = (f.read(16).unpack('a8 L<2'))
raise 'Missing DDWLG00' unless magic == "DDWLG00\x00"
puts "#{subfiles} images"
start = 0x60
f.seek(start, IO::SEEK_SET)
subfiles.times do |n|
out_name = "#{File.basename(fn, File.extname(fn))}_#{n}.png"
fmt, = f.read(2).unpack('S<')
f.seek(0x3e, IO::SEEK_CUR)
packed_size, = f.read(4).unpack('L<')
unpacked_size = 0
StringIO.open('', 'w+b') do |out_data|
while true
header, = f.read(4).unpack('L<')
if header == 0 then
break
end
if header >= 0x80000000 then
zeroes = header - 0x80000000
zeroes.times do
out_data.write "\x00\x00\x00\x00"
end
unpacked_size += zeroes * 4
else
count = header
count.times do
b,g,r,a = f.read(4).unpack('C4')
out_data.write [r,g,b,a].pack('C4')
end
unpacked_size += count * 4
end
end
height = unpacked_size / 4 / width
print "#{out_name}:"
print " #{f.tell.to_s(16)} #{fmt} #{packed_size} -> #{unpacked_size}"
puts " #{width}x#{height}"
out_data.seek(0, IO::SEEK_SET)
image = ChunkyPNG::Canvas.from_rgba_stream(width, height, out_data)
image.save(out_name)
end
end
end
|
shurunxuan/vgm_ripping
|
soundbank/lara/lara.rb
|
for fn in ARGV do
File.open(fn, mode='rb') do |f|
print fn, ': '
magic, data_size, six, zero1, id, neg1, sample_rate, zero2, zero3 =
f.read(0x24).unpack('a4 L<4 l< L<3')
raise 'Missing SECT' unless magic == 'SECT'
raise 'unknown values differ' unless
six == 6 and zero1 == 0 and neg1 == -1 and zero2 == 0 and zero3 == 0
data_size -= 0xc
print data_size, " bytes, ", sample_rate, "Hz\n"
out_name = File.basename(fn, File.extname(fn)) + ".wav"
raise "Output #{out_name} already exists" if File.exists?(out_name)
File.open(out_name, mode='wb') do |out|
codec_id = 0x11 # Microsoft IMA ADPCM
channels = 1
block_size = 0x24
samples_per_block = (block_size - 4) * 2 + 1
byte_rate = sample_rate * block_size / samples_per_block * channels
sample_size = 4
fmt = [codec_id, channels, sample_rate, byte_rate, block_size * channels, sample_size, 2, samples_per_block
].pack('S< S< L< L< S< S< S< S<')
wave = ['WAVE', 'fmt ', fmt.size, fmt, 'data', data_size
].pack('a4 a4 L< a* a4 L<')
out.write( ['RIFF', wave.size + data_size].pack('a4L<' ) )
out.write( wave )
copied = IO.copy_stream(f, out, data_size)
raise 'data truncated' if copied != data_size
raise 'extra data' unless f.eof
end
end
end
|
thaniyarasu/sysenv
|
lib/sysenv.rb
|
<filename>lib/sysenv.rb
require 'sysenv/sysenv'
|
thaniyarasu/sysenv
|
test/test_sysenv.rb
|
require 'minitest/autorun'
require 'active_support'
require 'sysenv'
class SysenvTest < Minitest::Test
def setup
ENV["API_SECRET_KEY"] = "API_SECRET_VALUE" # system defined environment variables
@env = {api:{secret:{key: 'value',cert:'certificate'}}} # app defined environment variables
@sysenv = Sysenv.new
end
def test_parse
result = @sysenv.apply(@env)
assert_equal result[:api][:secret][:key], ENV["API_SECRET_KEY"]
assert_equal result[:api][:secret][:cert], 'certificate'
end
def test_override_nested_hash
overrided_env = @sysenv.override_nested_hash(@env, { "api_secret_key": "API_SECRET_KEY"} )
assert_equal overrided_env[:api][:secret][:key] , ENV["API_SECRET_KEY"]
assert_equal overrided_env[:api][:secret][:cert] , 'certificate'
end
end
|
thaniyarasu/sysenv
|
sysenv.gemspec
|
<reponame>thaniyarasu/sysenv
Gem::Specification.new do |s|
s.name = 'sysenv'
s.version = '0.0.1'
s.date = '2015-02-20'
s.summary = "Sysenv ! will load app envs and override with system envs"
s.description = <<-STRING
In Most Rails Application config/envs.yml file will be there , some
time we have to use System settings env values . so this gem is created, this
this will Override Project Envs with system envs
STRING
s.authors = ["<NAME>"]
s.email = '<EMAIL>'
s.files = ["lib/sysenv.rb", "lib/sysenv/sysenv.rb"]
s.homepage = 'https://github.com/thaniyarasu/sysenv'
s.license = 'MIT'
s.add_runtime_dependency 'activesupport', '>= 3.0' , '< 6.0'
end
|
thaniyarasu/sysenv
|
lib/sysenv/sysenv.rb
|
<reponame>thaniyarasu/sysenv
require "active_support"
class Sysenv
# Override Project Specific Environment variables with system specific
#
# Example:
# >> sysenv = Sysenv.new
# >> sysenv.parse({api:{secret:{key: 'value',cert:'certificate'}}})
# => {api:{secret:{key: 'value',cert:'certificate'}}}
#
# Arguments:
# apply: (Hash)
def apply(hash, prefix=[])
envs = {} # hash of { underscorized env keys : normal env keys }
ENV.keys.collect { |e| envs[ActiveSupport::Inflector.underscore(e)] = e }
override_nested_hash(hash, envs, prefix)
end
def override_nested_hash(hash, envs={}, prefix=[])
hash.each do |key, value|
if value.class == Hash
hash[key] = override_nested_hash(value, envs, prefix.clone << key)
end
exp_key = (prefix.clone << key).join('_')
index = envs.keys.index(exp_key) || envs.keys.index(exp_key.to_sym)
hash[key] = ENV[envs.values[index]] if index
end
hash
end
end
|
rafasoares/newrelic-perfmon-plugin
|
perfmon_metrics.rb
|
class PerfmonMetrics
attr_accessor :metric_types, :metric_samples, :typeperf_error_msg, :thread_count
def initialize
@metric_samples = 1
@typeperf_error_msg = "Error: No valid counters."
@thread_count = 5
@metric_types = Hash.new("ms")
@metric_types["% 401 HTTP Response Sent"] = "%"
@metric_types["% 403 HTTP Response Sent"] = "%"
@metric_types["% 404 HTTP Response Sent"] = "%"
@metric_types["% 500 HTTP Response Sent"] = "%"
@metric_types["% Managed Processor Time (estimated)"] = "%"
@metric_types["% Time in GC"] = "%"
@metric_types["% Time in Jit"] = "%"
@metric_types["% Time in RT checks"] = "%"
@metric_types["% Time Loading"] = "%"
@metric_types["% Time Sig. Authenticating"] = "%"
@metric_types["Cache % Machine Memory Limit Used"] = "%"
@metric_types["Cache % Process Memory Limit Used"] = "%"
@metric_types["File Cache Hits %"] = "%"
@metric_types["Kernel: URI Cache Hits %"] = "%"
@metric_types["Metadata Cache Hits %"] = "%"
@metric_types["Output Cache Current Hits %"] = "%"
@metric_types["URI Cache Hits %"] = "%"
@metric_types["Current appdomains"] = "appdomains"
@metric_types["Rate of appdomains"] = "appdomains"
@metric_types["Rate of appdomains unloaded"] = "appdomains"
@metric_types["Total Appdomains"] = "appdomains"
@metric_types["Total appdomains unloaded"] = "appdomains"
@metric_types["Applications Running"] = "apps"
@metric_types["ArrivalRate"] = "arrivals"
@metric_types["Current Assemblies"] = "assemblies"
@metric_types["Rate of Assemblies"] = "assemblies"
@metric_types["Total Assemblies"] = "assemblies"
@metric_types["Connection Attempts/sec"] = "attempts"
@metric_types["ConnectionAttempts"] = "attempts"
@metric_types["Logon Attempts/sec"] = "attempts"
@metric_types["Total Connection Attempts (all instances)"] = "attempts"
@metric_types["Total Logon Attempts"] = "attempts"
@metric_types["WebSocket Connection Attempts / Sec"] = "attempts"
@metric_types["# of Sink Blocks in use"] = "blocks"
@metric_types["# Bytes in all Heaps"] = "bytes"
@metric_types["# of IL Bytes Jitted"] = "bytes"
@metric_types["# Total committed Bytes"] = "bytes"
@metric_types["# Total reserved Bytes"] = "bytes"
@metric_types["Allocated Bytes/sec"] = "bytes"
@metric_types["Bytes in Loader Heap"] = "bytes"
@metric_types["Bytes Received"] = "bytes"
@metric_types["Bytes Received/sec"] = "bytes"
@metric_types["Bytes Sent"] = "bytes"
@metric_types["Bytes Sent/sec"] = "bytes"
@metric_types["Bytes Total/sec"] = "bytes"
@metric_types["BytesReceivedRate"] = "bytes"
@metric_types["BytesSentRate"] = "bytes"
@metric_types["BytesTransferredRate"] = "bytes"
@metric_types["Current blocked bandwidth bytes."] = "bytes"
@metric_types["Current File Cache Memory Usage"] = "bytes"
@metric_types["Current Metadata Cached"] = "bytes"
@metric_types["CurrentQueueSize"] = "bytes"
@metric_types["Gen 0 heap size"] = "bytes"
@metric_types["Gen 0 Promoted Bytes/Sec"] = "bytes"
@metric_types["Gen 1 heap size"] = "bytes"
@metric_types["Gen 1 Promoted Bytes/Sec"] = "bytes"
@metric_types["Gen 2 heap size"] = "bytes"
@metric_types["IL Bytes Jitted / sec"] = "bytes"
@metric_types["Large Object Heap size"] = "bytes"
@metric_types["Managed Memory Used (estimated)"] = "bytes"
@metric_types["Maximum File Cache Memory Usage"] = "bytes"
@metric_types["Measured Async I/O Bandwidth Usage"] = "bytes"
@metric_types["Output Cache Current Memory Usage"] = "bytes"
@metric_types["Promoted Finalization-Memory from Gen 0"] = "bytes"
@metric_types["Promoted Memory from Gen 0"] = "bytes"
@metric_types["Promoted Memory from Gen 1"] = "bytes"
@metric_types["Request Bytes In Total"] = "bytes"
@metric_types["Request Bytes In Total (WebSockets)"] = "bytes"
@metric_types["Request Bytes Out Total"] = "bytes"
@metric_types["Request Bytes Out Total (WebSockets)"] = "bytes"
@metric_types["Total # of IL Bytes Jitted"] = "bytes"
@metric_types["Total blocked bandwidth bytes."] = "bytes"
@metric_types["Total Bytes Received"] = "bytes"
@metric_types["Total Bytes Sent"] = "bytes"
@metric_types["Total Bytes Transferred"] = "bytes"
@metric_types["Total Flushed Metadata"] = "bytes"
@metric_types["Total Metadata Cached"] = "bytes"
@metric_types["Call Failed Per Second"] = "calls"
@metric_types["Calls"] = "calls"
@metric_types["Calls Failed"] = "calls"
@metric_types["Calls Failed Per Second"] = "calls"
@metric_types["Calls Faulted"] = "calls"
@metric_types["Calls Faulted Per Second"] = "calls"
@metric_types["Calls Outstanding"] = "calls"
@metric_types["Calls Per Second"] = "calls"
@metric_types["Percent Of Max Concurrent Calls"] = "calls"
@metric_types["Remote Calls/sec"] = "calls"
@metric_types["Security Calls Not Authorized"] = "calls"
@metric_types["Security Calls Not Authorized Per Second"] = "calls"
@metric_types["Total Remote Calls"] = "calls"
@metric_types["# of CCWs"] = "CCWs"
@metric_types["Active Listener Channels"] = "channels"
@metric_types["Channels"] = "channels"
@metric_types["# Link Time Checks"] = "checks"
@metric_types["Total Runtime Checks"] = "checks"
@metric_types["Context-Bound Classes Loaded"] = "classes"
@metric_types["Current Classes Loaded"] = "classes"
@metric_types["Rate of Classes Loaded"] = "classes"
@metric_types["Total Classes Loaded"] = "classes"
@metric_types["# Gen 0 Collections"] = "collections"
@metric_types["# Gen 1 Collections"] = "collections"
@metric_types["# Gen 2 Collections"] = "collections"
@metric_types["# Induced GC"] = "collections"
@metric_types["SqlClient: Total # failed commands"] = "commands"
@metric_types["Compilations Total"] = "compilations"
@metric_types["HardConnectsPerSecond"] = "connects"
@metric_types["SoftConnectsPerSecond"] = "connects"
@metric_types["SqlClient: Total # failed connects"] = "connects"
@metric_types["Connections Accepted over net.pipe"] = "conns"
@metric_types["Connections Accepted over net.tcp"] = "conns"
@metric_types["Connections Dispatched over net.pipe"] = "conns"
@metric_types["Connections Dispatched over net.tcp"] = "conns"
@metric_types["Connections Established"] = "conns"
@metric_types["Current CAL count for SSL connections"] = "conns"
@metric_types["Current Connections"] = "conns"
@metric_types["CurrentConnections"] = "conns"
@metric_types["MaxConnections"] = "conns"
@metric_types["Maximum CAL count for SSL connections"] = "conns"
@metric_types["Maximum Connections"] = "conns"
@metric_types["NumberOfActiveConnections"] = "conns"
@metric_types["NumberOfFreeConnections"] = "conns"
@metric_types["NumberOfNonPooledConnections"] = "conns"
@metric_types["NumberOfPooledConnections"] = "conns"
@metric_types["NumberOfReclaimedConnections"] = "conns"
@metric_types["NumberOfStasisConnections"] = "conns"
@metric_types["Session SQL Server connections total"] = "conns"
@metric_types["Session State Server connections total"] = "conns"
@metric_types["SqlClient: Current # pooled and nonpooled connections"] = "conns"
@metric_types["SqlClient: Current # pooled connections"] = "conns"
@metric_types["SqlClient: Peak # pooled connections"] = "conns"
@metric_types["WebSocket Connections Accepted / Sec"] = "conns"
@metric_types["WebSocket Connections Rejected / Sec"] = "conns"
@metric_types["Contention Rate / sec"] = "contentions"
@metric_types["Total # of Contentions"] = "contentions"
@metric_types["Contexts"] = "contexts"
@metric_types["Datagrams Received"] = "datagrams"
@metric_types["Datagrams Sent"] = "datagrams"
@metric_types["HardDisconnectsPerSecond"] = "disconnects"
@metric_types["SoftDisconnectsPerSecond"] = "disconnects"
@metric_types["Script Engines Cached"] = "engines"
@metric_types["Active Flushed Entries"] = "entries"
@metric_types["Cache API Entries"] = "entries"
@metric_types["Cache Total Entries"] = "entries"
@metric_types["Output Cache Entries"] = "entries"
@metric_types["Errors During Compilation"] = "errors"
@metric_types["Errors During Execution"] = "errors"
@metric_types["Errors During Preprocessing"] = "errors"
@metric_types["Errors During Script Runtime"] = "errors"
@metric_types["Errors From ASP Preprocessor"] = "errors"
@metric_types["Errors From Script Compilers"] = "errors"
@metric_types["Errors Total"] = "errors"
@metric_types["Errors Total/Sec"] = "errors"
@metric_types["Errors Unhandled During Execution"] = "errors"
@metric_types["Errors Unhandled During Execution/Sec"] = "errors"
@metric_types["Errors/Sec"] = "errors"
@metric_types["Locked Errors/sec"] = "errors"
@metric_types["Not Found Errors/sec"] = "errors"
@metric_types["Total Locked Errors"] = "errors"
@metric_types["Total Not Found Errors"] = "errors"
@metric_types["Application Lifetime Events"] = "events"
@metric_types["Application Lifetime Events/Sec"] = "events"
@metric_types["Audit Failure Events Raised"] = "events"
@metric_types["Audit Success Events Raised"] = "events"
@metric_types["Error Events Raised"] = "events"
@metric_types["Error Events Raised/Sec"] = "events"
@metric_types["Events Raised"] = "events"
@metric_types["Events Raised/Sec"] = "events"
@metric_types["Infrastructure Error Events Raised"] = "events"
@metric_types["Infrastructure Error Events Raised/Sec"] = "events"
@metric_types["Request Error Events Raised"] = "events"
@metric_types["Request Error Events Raised/Sec"] = "events"
@metric_types["Request Events Raised"] = "events"
@metric_types["Request Events Raised/Sec"] = "events"
@metric_types["# of Exceps Thrown / sec"] = "exceptions"
@metric_types["# of TLB exports / sec"] = "exports"
@metric_types["Dispatch Failures over net.pipe"] = "failures"
@metric_types["Dispatch Failures over net.tcp"] = "failures"
@metric_types["Forms Authentication Failure"] = "failures"
@metric_types["Membership Authentication Failure"] = "failures"
@metric_types["Protocol Failures over net.pipe"] = "failures"
@metric_types["Protocol Failures over net.tcp"] = "failures"
@metric_types["Rate of Load Failures"] = "failures"
@metric_types["Recent Worker Process Failures"] = "failures"
@metric_types["Security Validation and Authentication Failures"] = "failures"
@metric_types["Security Validation and Authentication Failures Per Second"] = "failures"
@metric_types["Standard Jit Failures"] = "failures"
@metric_types["Time Since Last Worker Process Failure"] = "failures"
@metric_types["Total # of Load Failures"] = "failures"
@metric_types["Total Worker Process Failures"] = "failures"
@metric_types["Total Worker Process Ping Failures"] = "failures"
@metric_types["Total Worker Process Shutdown Failures"] = "failures"
@metric_types["Total Worker Process Startup Failures"] = "failures"
@metric_types["Viewstate MAC Validation Failure"] = "failures"
@metric_types["Current Files Cached"] = "files"
@metric_types["Files Received/sec"] = "files"
@metric_types["Files Sent/sec"] = "files"
@metric_types["Files/sec"] = "files"
@metric_types["Total Files Cached"] = "files"
@metric_types["Total Files Received"] = "files"
@metric_types["Total Files Sent"] = "files"
@metric_types["Total Files Transferred"] = "files"
@metric_types["Total Flushed Files"] = "files"
@metric_types["# of Filters / sec"] = "filters"
@metric_types["# of Finallys / sec"] = "finallys"
@metric_types["File Cache Flushes"] = "flushes"
@metric_types["Kernel: URI Cache Flushes"] = "flushes"
@metric_types["Metadata Cache Flushes"] = "flushes"
@metric_types["Output Cache Total Flushes"] = "flushes"
@metric_types["URI Cache Flushes"] = "flushes"
@metric_types["UriCacheFlushes"] = "flushes"
@metric_types["NumberOfActiveConnectionPoolGroups"] = "groups"
@metric_types["NumberOfInactiveConnectionPoolGroups"] = "groups"
@metric_types["Active Protocol Handlers"] = "handlers"
@metric_types["# GC Handles"] = "handles"
@metric_types["Cache API Hit Ratio"] = "hits"
@metric_types["Cache API Hits"] = "hits"
@metric_types["Cache Total Hit Ratio"] = "hits"
@metric_types["Cache Total Hits"] = "hits"
@metric_types["CacheHitRate"] = "hits"
@metric_types["File Cache Hits"] = "hits"
@metric_types["File Cache Hits / sec"] = "hits"
@metric_types["In Memory Template Cache Hit Rate"] = "hits"
@metric_types["Kernel: URI Cache Hits"] = "hits"
@metric_types["Kernel: Uri Cache Hits/sec"] = "hits"
@metric_types["Metadata Cache Hits"] = "hits"
@metric_types["Metadata Cache Hits / sec"] = "hits"
@metric_types["Output Cache Hit Ratio"] = "hits"
@metric_types["Output Cache Hits"] = "hits"
@metric_types["Output Cache Hits / sec"] = "hits"
@metric_types["Output Cache Total Hits"] = "hits"
@metric_types["Script Engine Cache Hit Rate"] = "hits"
@metric_types["Template Cache Hit Rate"] = "hits"
@metric_types["URI Cache Hits"] = "hits"
@metric_types["Uri Cache Hits / sec"] = "hits"
@metric_types["UriCacheHits"] = "hits"
@metric_types["Process ID"] = "ignore"
@metric_types["# of TLB imports / sec"] = "imports"
@metric_types["Instances"] = "instances"
@metric_types["Instances Created Per Second"] = "instances"
@metric_types["Percent Of Max Concurrent Instances"] = "instances"
@metric_types["Pipeline Instance Count"] = "instances"
@metric_types["Output Cache Current Flushed Items"] = "items"
@metric_types["Output Cache Current Items"] = "items"
@metric_types["Output Cache Total Flushed Items"] = "items"
@metric_types["Assembly Search Length"] = "length"
@metric_types["Current Queue Length"] = "length"
@metric_types["Queue Length / sec"] = "length"
@metric_types["Queue Length Peak"] = "length"
@metric_types["Stack Walk Depth"] = "levels"
@metric_types["Throw To Catch Depth / sec"] = "levels"
@metric_types["# of marshalling"] = "marshalling"
@metric_types["Queued Messages Dropped"] = "messages"
@metric_types["Queued Messages Dropped Per Second"] = "messages"
@metric_types["Queued Messages Rejected"] = "messages"
@metric_types["Queued Messages Rejected Per Second"] = "messages"
@metric_types["Queued Poison Messages"] = "messages"
@metric_types["Queued Poison Messages Per Second"] = "messages"
@metric_types["Reliable Messaging Messages Dropped"] = "messages"
@metric_types["Reliable Messaging Messages Dropped Per Second"] = "messages"
@metric_types["Total Messages Sent to WAS"] = "messages"
@metric_types["Total WAS Messages Received"] = "messages"
@metric_types["# of Methods Jitted"] = "methods"
@metric_types["Other Request Methods/sec"] = "methods"
@metric_types["Total Other Request Methods"] = "methods"
@metric_types["Cache API Misses"] = "misses"
@metric_types["Cache Total Misses"] = "misses"
@metric_types["File Cache Misses"] = "misses"
@metric_types["File Cache Misses / sec"] = "misses"
@metric_types["Kernel: URI Cache Misses"] = "misses"
@metric_types["Metadata Cache Misses"] = "misses"
@metric_types["Metadata Cache Misses / sec"] = "misses"
@metric_types["Output Cache Misses"] = "misses"
@metric_types["Output Cache Misses / sec"] = "misses"
@metric_types["Output Cache Total Misses"] = "misses"
@metric_types["URI Cache Misses"] = "misses"
@metric_types["Uri Cache Misses / sec"] = "misses"
@metric_types["UriCacheMisses"] = "misses"
@metric_types["Average Workflow Load Time"] = "ms"
@metric_types["Average Workflow Persist Time"] = "ms"
@metric_types["Calls Duration"] = "ms"
@metric_types["Current Application Pool Uptime"] = "ms"
@metric_types["Health Ping Reply Latency"] = "ms"
@metric_types["MaxQueueItemAge"] = "ms"
@metric_types["Request Execution Time"] = "ms"
@metric_types["Request Wait Time"] = "ms"
@metric_types["Service Uptime"] = "ms"
@metric_types["Session Duration"] = "ms"
@metric_types["Total Application Pool Uptime"] = "ms"
@metric_types["Engine Flush Notifications"] = "notifications"
@metric_types["Template Notifications"] = "notifications"
@metric_types["# of Pinned Objects"] = "objects"
@metric_types["Context-Bound Objects Alloc / sec"] = "objects"
@metric_types["Transacted Operations Aborted"] = "ops"
@metric_types["Transacted Operations Aborted Per Second"] = "ops"
@metric_types["Transacted Operations Committed"] = "ops"
@metric_types["Transacted Operations Committed Per Second"] = "ops"
@metric_types["Transacted Operations In Doubt"] = "ops"
@metric_types["Transacted Operations In Doubt Per Second"] = "ops"
@metric_types["Total Health Pings."] = "pings"
@metric_types["NumberOfActiveConnectionPools"] = "pools"
@metric_types["NumberOfInactiveConnectionPools"] = "pools"
@metric_types["SqlClient: Current # connection pools"] = "pools"
@metric_types["Current Worker Processes"] = "processes"
@metric_types["Maximum Worker Processes"] = "processes"
@metric_types["Total Worker Processes Created"] = "processes"
@metric_types["Worker Processes Running"] = "processes"
@metric_types["Context Proxies"] = "proxies"
@metric_types["HardProcedureQueries"] = "queries"
@metric_types["SoftProcedureQueries"] = "queries"
@metric_types["Total Runtime Status Queries"] = "queries"
@metric_types["Total Application Pool Recycles"] = "recycles"
@metric_types["Registrations Active for net.pipe"] = "registrations"
@metric_types["Registrations Active for net.tcp"] = "registrations"
@metric_types["RejectionRate"] = "rejections"
@metric_types["Active Requests"] = "requests"
@metric_types["AllRequests"] = "requests"
@metric_types["Anonymous Requests"] = "requests"
@metric_types["Anonymous Requests/Sec"] = "requests"
@metric_types["CGI Requests/sec"] = "requests"
@metric_types["Copy Requests/sec"] = "requests"
@metric_types["Current Blocked Async I/O Requests"] = "requests"
@metric_types["Current CGI Requests"] = "requests"
@metric_types["Current ISAPI Extension Requests"] = "requests"
@metric_types["Debugging Requests"] = "requests"
@metric_types["Delete Requests/sec"] = "requests"
@metric_types["Get Requests/sec"] = "requests"
@metric_types["GetRequests"] = "requests"
@metric_types["Head Requests/sec"] = "requests"
@metric_types["HeadRequests"] = "requests"
@metric_types["HttpWebRequests Aborted/Sec"] = "requests"
@metric_types["HttpWebRequests Average Lifetime"] = "requests"
@metric_types["HttpWebRequests Average Queue Time"] = "requests"
@metric_types["HttpWebRequests Created/Sec"] = "requests"
@metric_types["HttpWebRequests Failed/Sec"] = "requests"
@metric_types["HttpWebRequests Queued/Sec"] = "requests"
@metric_types["ISAPI Extension Requests/sec"] = "requests"
@metric_types["Lock Requests/sec"] = "requests"
@metric_types["Maximum CGI Requests"] = "requests"
@metric_types["Maximum ISAPI Extension Requests"] = "requests"
@metric_types["Mkcol Requests/sec"] = "requests"
@metric_types["Move Requests/sec"] = "requests"
@metric_types["Options Requests/sec"] = "requests"
@metric_types["Post Requests/sec"] = "requests"
@metric_types["Propfind Requests/sec"] = "requests"
@metric_types["Proppatch Requests/sec"] = "requests"
@metric_types["Put Requests/sec"] = "requests"
@metric_types["RejectedRequests"] = "requests"
@metric_types["Requests / Sec"] = "requests"
@metric_types["Requests Current"] = "requests"
@metric_types["Requests Disconnected"] = "requests"
@metric_types["Requests Executing"] = "requests"
@metric_types["Requests Executing (WebSockets)"] = "requests"
@metric_types["Requests Failed"] = "requests"
@metric_types["Requests Failed (WebSockets)"] = "requests"
@metric_types["Requests Failed Total"] = "requests"
@metric_types["Requests In Application Queue"] = "requests"
@metric_types["Requests In Native Queue"] = "requests"
@metric_types["Requests Not Authorized"] = "requests"
@metric_types["Requests Not Found"] = "requests"
@metric_types["Requests Queued"] = "requests"
@metric_types["Requests Rejected"] = "requests"
@metric_types["Requests Succeeded"] = "requests"
@metric_types["Requests Succeeded (WebSockets)"] = "requests"
@metric_types["Requests Timed Out"] = "requests"
@metric_types["Requests Total"] = "requests"
@metric_types["Requests Total (WebSockets)"] = "requests"
@metric_types["Requests/Sec"] = "requests"
@metric_types["Search Requests/sec"] = "requests"
@metric_types["Total Allowed Async I/O Requests"] = "requests"
@metric_types["Total Blocked Async I/O Requests"] = "requests"
@metric_types["Total CGI Requests"] = "requests"
@metric_types["Total Copy Requests"] = "requests"
@metric_types["Total count of failed CAL requests for authenticated users"] = "requests"
@metric_types["Total count of failed CAL requests for SSL connections"] = "requests"
@metric_types["Total Delete Requests"] = "requests"
@metric_types["Total Get Requests"] = "requests"
@metric_types["Total Head Requests"] = "requests"
@metric_types["Total HTTP Requests Served"] = "requests"
@metric_types["Total ISAPI Extension Requests"] = "requests"
@metric_types["Total Lock Requests"] = "requests"
@metric_types["Total Method Requests"] = "requests"
@metric_types["Total Method Requests/sec"] = "requests"
@metric_types["Total Mkcol Requests"] = "requests"
@metric_types["Total Move Requests"] = "requests"
@metric_types["Total Options Requests"] = "requests"
@metric_types["Total Post Requests"] = "requests"
@metric_types["Total Propfind Requests"] = "requests"
@metric_types["Total Proppatch Requests"] = "requests"
@metric_types["Total Put Requests"] = "requests"
@metric_types["Total Rejected Async I/O Requests"] = "requests"
@metric_types["Total Requests Served"] = "requests"
@metric_types["Total Search Requests"] = "requests"
@metric_types["Total Trace Requests"] = "requests"
@metric_types["Total Unlock Requests"] = "requests"
@metric_types["Trace Requests/sec"] = "requests"
@metric_types["Unlock Requests/sec"] = "requests"
@metric_types["WebSocket Active Requests"] = "requests"
@metric_types["Application Restarts"] = "restarts"
@metric_types["Worker Process Restarts"] = "restarts"
@metric_types["Percent Of Max Concurrent Sessions"] = "sessions"
@metric_types["Reliable Messaging Sessions Faulted"] = "sessions"
@metric_types["Reliable Messaging Sessions Faulted Per Second"] = "sessions"
@metric_types["Sessions Abandoned"] = "sessions"
@metric_types["Sessions Active"] = "sessions"
@metric_types["Sessions Current"] = "sessions"
@metric_types["Sessions Timed Out"] = "sessions"
@metric_types["Sessions Total"] = "sessions"
@metric_types["State Server Sessions Abandoned"] = "sessions"
@metric_types["State Server Sessions Active"] = "sessions"
@metric_types["State Server Sessions Timed Out"] = "sessions"
@metric_types["State Server Sessions Total"] = "sessions"
@metric_types["Current Application Pool State"] = "state"
@metric_types["# of Stubs"] = "stubs"
@metric_types["Forms Authentication Success"] = "successes"
@metric_types["Membership Authentication Success"] = "successes"
@metric_types["Finalization Survivors"] = "survivors"
@metric_types["In Memory Templates Cached"] = "templates"
@metric_types["Templates Cached"] = "templates"
@metric_types["# of current logical Threads"] = "threads"
@metric_types["# of current physical Threads"] = "threads"
@metric_types["# of current recognized threads"] = "threads"
@metric_types["# of Exceps Thrown"] = "threads"
@metric_types["# of total recognized threads"] = "threads"
@metric_types["Active Threads Count"] = "threads"
@metric_types["Maximum Threads Count"] = "threads"
@metric_types["rate of recognized threads / sec"] = "threads"
@metric_types["Total Threads"] = "threads"
@metric_types["Transactions Aborted"] = "transactions"
@metric_types["Transactions Committed"] = "transactions"
@metric_types["Transactions Flowed"] = "transactions"
@metric_types["Transactions Flowed Per Second"] = "transactions"
@metric_types["Transactions Pending"] = "transactions"
@metric_types["Transactions Total"] = "transactions"
@metric_types["Transactions/Sec"] = "transactions"
@metric_types["Cache API Trims"] = "trims"
@metric_types["Cache Total Trims"] = "trims"
@metric_types["Output Cache Trims"] = "trims"
@metric_types["Cache API Turnover Rate"] = "turnovers"
@metric_types["Cache Total Turnover Rate"] = "turnovers"
@metric_types["Output Cache Turnover Rate"] = "turnovers"
@metric_types["Current URIs Cached"] = "URIs"
@metric_types["CurrentUrisCached"] = "URIs"
@metric_types["Kernel: Current URIs Cached"] = "URIs"
@metric_types["Kernel: Total Flushed URIs"] = "URIs"
@metric_types["Kernel: Total URIs Cached"] = "URIs"
@metric_types["Total Flushed URIs"] = "URIs"
@metric_types["Total URIs Cached"] = "URIs"
@metric_types["TotalFlushedUris"] = "URIs"
@metric_types["TotalUrisCached"] = "URIs"
@metric_types["Uris Registered for net.pipe"] = "URIs"
@metric_types["Uris Registered for net.tcp"] = "URIs"
@metric_types["Uris Unregistered for net.pipe"] = "URIs"
@metric_types["Uris Unregistered for net.tcp"] = "URIs"
@metric_types["Anonymous Users/sec"] = "users"
@metric_types["Current Anonymous Users"] = "users"
@metric_types["Current CAL count for authenticated users"] = "users"
@metric_types["Current NonAnonymous Users"] = "users"
@metric_types["Maximum Anonymous Users"] = "users"
@metric_types["Maximum CAL count for authenticated users"] = "users"
@metric_types["Maximum NonAnonymous Users"] = "users"
@metric_types["NonAnonymous Users/sec"] = "users"
@metric_types["Total Anonymous Users"] = "users"
@metric_types["Total NonAnonymous Users"] = "users"
@metric_types["Workflows Aborted"] = "workflows"
@metric_types["Workflows Aborted Per Second"] = "workflows"
@metric_types["Workflows Aborted/sec"] = "workflows"
@metric_types["Workflows Completed"] = "workflows"
@metric_types["Workflows Completed Per Second"] = "workflows"
@metric_types["Workflows Completed/sec"] = "workflows"
@metric_types["Workflows Created"] = "workflows"
@metric_types["Workflows Created Per Second"] = "workflows"
@metric_types["Workflows Created/sec"] = "workflows"
@metric_types["Workflows Executing"] = "workflows"
@metric_types["Workflows Idle Per Second"] = "workflows"
@metric_types["Workflows Idle/sec"] = "workflows"
@metric_types["Workflows In Memory"] = "workflows"
@metric_types["Workflows Loaded"] = "workflows"
@metric_types["Workflows Loaded Per Second"] = "workflows"
@metric_types["Workflows Loaded/sec"] = "workflows"
@metric_types["Workflows Pending"] = "workflows"
@metric_types["Workflows Persisted"] = "workflows"
@metric_types["Workflows Persisted Per Second"] = "workflows"
@metric_types["Workflows Persisted/sec"] = "workflows"
@metric_types["Workflows Runnable"] = "workflows"
@metric_types["Workflows Suspended"] = "workflows"
@metric_types["Workflows Suspended Per Second"] = "workflows"
@metric_types["Workflows Suspended/sec"] = "workflows"
@metric_types["Workflows Terminated"] = "workflows"
@metric_types["Workflows Terminated Per Second"] = "workflows"
@metric_types["Workflows Terminated/sec"] = "workflows"
@metric_types["Workflows Unloaded"] = "workflows"
@metric_types["Workflows Unloaded Per Second"] = "workflows"
@metric_types["Workflows Unloaded/sec"] = "workflows"
@metric_types["Free Megabytes"] = "megabytes"
@metric_types["% Free Space"] = "%"
end
end
|
rafasoares/newrelic-perfmon-plugin
|
perfmon_plugin_multithread.rb
|
#!/usr/bin/env ruby
require "rubygems"
require "bundler/setup"
require "newrelic_plugin"
require_relative "perfmon_metrics.rb"
# Fixes SSL cert without monkeying with PEM file!
require "certified"
module PerfmonAgent
class Agent < NewRelic::Plugin::Agent::Base
agent_config_options :local, :hostname, :debug, :testrun
# Change the following agent_guid if you fork and use this as your own plugin
# Visit https://newrelic.com/docs/plugin-dev/ for more information
default_guid = "com.52projects.plugins.perfmon"
agent_version "0.0.2"
# Allow GUID to be set in config file under "newrelic" stanza
if NewRelic::Plugin::Config.config.newrelic['guid'].to_s.empty?
agent_guid default_guid
else
agent_guid NewRelic::Plugin::Config.config.newrelic['guid'].to_s
end
agent_human_labels('Perfmon') do
if hostname.to_s.empty?
if local
"#{Socket.gethostname}"
else
abort("No hostname defined.\nEnter \"hostname: [your_hostname]\" or \"local: true\" in newrelic_plugin.yml")
end
else
"#{hostname}"
end
end
# Returns true if there is an environment variable with the given name.
# Fixes SSL Connection Error in Windows execution of Ruby
# Based on fix found at: https://gist.github.com/fnichol/867550
# ENV['SSL_CERT_FILE'] = File.expand_path(File.dirname(__FILE__) + "/config/cacert.pem")
# puts("CERT FILE: #{ENV['SSL_CERT_FILE']}")
def setup_metrics
if ENV.key?('OCRA_EXECUTABLE')
fileloc = File.dirname(ENV['OCRA_EXECUTABLE'].gsub(/\\/, "/"))
else
fileloc = File.expand_path(File.dirname(__FILE__))
end
pidfile = fileloc + "/ruby.pid"
if File.exist?(pidfile)
File.delete(pidfile)
end
File.open(pidfile, 'w') { |file| file.write("#{Process.pid}") }
@pm = PerfmonMetrics.new
countersfile = NewRelic::Plugin::Config.config.newrelic['countersfile'].to_s
if countersfile.to_s.empty?
counters_file = File.expand_path(File.dirname(__FILE__)) + "/config/perfmon_totals_counters.txt"
else
counters_file = fileloc + "/config/#{countersfile}"
end
if File.file?(counters_file)
if !countersfile.to_s.empty?
puts("Using Counters File: #{counters_file}")
end
@counters = [] #Array.new(@pm.thread_count, "")
File.open(counters_file) do |counters|
size = (counters.size/@pm.thread_count.to_f).ceil
@counters = counters.each_slice(size).map do |slice|
slice.reduce('') do |result, current|
next result unless current =~ /^(?!#).+$/
result + " \"#{current.strip}\""
end
end
end
else
abort("No Perfmon counters file named #{counters_file}.")
end
if !self.local
@typeperf_string = "-s #{self.hostname} -sc #{@pm.metric_samples}"
else
@typeperf_string = "-sc #{@pm.metric_samples}"
end
end
def poll_cycle
if self.testrun
File.open("typeperf_test.txt") { |file| get_perf_data(file) }
exit
else
perf_threads = @counters.map do |c|
Thread.new(c) do |cthread|
perf_input = `typeperf #{cthread} #{@typeperf_string}`
if !perf_input.include? @pm.typeperf_error_msg
get_perf_data(perf_input.split("\n"))
elsif self.debug
puts("This path has no valid counters: #{cthread}")
end
end
end
perf_threads.each do |t|
t.join
Signal.trap("TERM") do
puts "Exiting..."
shutdown()
end
end
end
end
private
def get_perf_data(perf_input)
perf_lines = Array.new
perf_input.each do |pl|
if pl.chr.eql?("\"")
perf_lines << pl.gsub(/\"/, "").gsub(/\[/, "(").gsub(/\]/, ")").gsub(/\\\\[^\\]+\\/, "")
end
end
perf_names = perf_lines[0].split(",")
perf_values = perf_lines[1].split(",")
perf_names.each_index do |i|
if !perf_names[i].rindex("\\").nil?
metric_name = perf_names[i].slice(perf_names[i].rindex("\\")+1, perf_names[i].length)
report_metric_check_debug perf_names[i].strip.gsub(/\//," per ").gsub(/\s{2}/," ").gsub(/\\/,"/"), @pm.metric_types[metric_name], perf_values[i]
end
end
end
def report_metric_check_debug(metricname, metrictype, metricvalue)
if self.debug
puts("#{metricname}[#{metrictype}] : #{metricvalue}")
else
report_metric metricname, metrictype, metricvalue
end
end
def thishost
end
end
NewRelic::Plugin::Setup.install_agent :perfmon, self
NewRelic::Plugin::Run.setup_and_run
end
|
AliShahbaj/alispec
|
test/test_helper.rb
|
<filename>test/test_helper.rb<gh_stars>0
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "alispec"
require "minitest/autorun"
|
AliShahbaj/alispec
|
lib/alispec.rb
|
require "alispec/version"
module Alispec
# Your code goes here...
end
|
PTC-Global/sensu-plugins-dcos
|
bin/metrics-dcos-system-health.rb
|
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# metric-dcos-system-health
#
# DESCRIPTION:
# This plugin collects DC/OS system health status as metric exposed by the system/health/v1/[units|nodes] API endpoints
#
# OUTPUT:
# Metric data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: uri
# gem: net/http
# gem: socket
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/metric/cli'
require 'json'
require 'net/http'
require 'uri'
require 'socket'
require 'sensu-plugins-dcos'
class DcosHealthMetrics < Sensu::Plugin::Metric::CLI::Graphite
include Common
option :scheme,
description: 'Metric naming scheme',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.dcos.health"
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://127.0.0.1:1050/system/health/v1'
def run
{ units: ['id'], nodes: %w[role host_ip] }.each do |endpoint, attributes|
url = "#{config[:url]}/#{endpoint}"
resource = get_data(url)
resource[endpoint.to_s].each do |item|
path = attributes.map { |attr| item[attr].tr('.', '-') }.join('.')
output([config[:scheme], endpoint, path].join('.'), item['health'])
end
end
ok
end
end
|
PTC-Global/sensu-plugins-dcos
|
bin/check-dcos-ping.rb
|
<gh_stars>1-10
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-dcos-ping
#
# DESCRIPTION:
# This plugin checks the status of a DCOS host using the /ping entrypoint from the dcos-metrics API
#
# OUTPUT:
# Plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
#
# USAGE:
# This example checks if the host is reporting himself as healthy
# check-dcos-ping.rb -u 'http://127.0.0.1:61001/system/v1/metrics/v0/ping'
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'uri'
require 'sensu-plugins-dcos'
#
# Check DCOS API
#
class CheckDcosPing < Sensu::Plugin::Check::CLI
include Common
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://127.0.0.1:61001/system/v1/metrics/v0/ping'
def run
value = get_data(config[:url])['ok']
message "OK = #{value}"
if value == true
ok
else
critical
end
end
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/check-dcos-node-health-shared_spec.rb
|
<filename>test/integration/helpers/serverspec/check-dcos-node-health-shared_spec.rb<gh_stars>1-10
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'check-dcos-node-health.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe command("#{check} -u http://localhost/system/health/nodes/fail -r master") do
its(:exit_status) { should eq 2 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosNodeHealth CRITICAL: master.nodes.unhealthy = 1'))) }
end
describe command("#{check} -u http://localhost/system/health/nodes") do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosNodeHealth OK: nodes.unhealthy = 0'))) }
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/metric-dcos-system-health-shared_spec.rb
|
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'metrics-dcos-system-health.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe command("#{check} -s dcos.health -u http://localhost/system/health") do
its(:exit_status) { should eq 0 }
pattern = 'dcos\.health\.units\.dcos-mesos-slave-public-service 0 \d{10}\n'\
'dcos\.health\.units\.dcos-log-master-socket 0 \d{10}\n'\
'dcos\.health\.units\.dcos-metrics-master-socket 0 \d{10}\n'\
'dcos\.health\.units\.dcos-3dt-socket 0 \d{10}\n'\
'dcos\.health\.nodes\.agent\.10-0-3-118 0 \d{10}\n'\
'dcos\.health\.nodes\.agent\.10-0-3-25 0 \d{10}\n'\
'dcos\.health\.nodes\.agent\.10-0-3-245 0 \d{10}\n'\
'dcos\.health\.nodes\.agent\.10-0-3-201 0 \d{10}\n'\
'dcos\.health\.nodes\.master\.10-0-1-39 0 \d{10}'
its(:stdout) { should match(Regexp.new(pattern)) }
end
|
PTC-Global/sensu-plugins-dcos
|
lib/sensu-plugins-dcos/common.rb
|
<reponame>PTC-Global/sensu-plugins-dcos<filename>lib/sensu-plugins-dcos/common.rb
# frozen_string_literal: true
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Common
def initialize
super()
end
def get_data(url)
url = URI.parse(url)
response = Net::HTTP.get_response(url)
if response.code == '204'
return {}
end
JSON.parse(response.body)
rescue Errno::ECONNREFUSED
warning 'Connection refused'
rescue JSON::ParserError
critical 'Invalid JSON'
end
def get_value(url, metric, filter, name_field = 'name', value_field = 'value', root_field = 'datapoints') # rubocop:disable Metrics/ParameterLists
resource = get_data(url)
return {} if resource.nil? || resource.empty?
if filter
filter = filter.split(':')
value = resource[root_field].select { |data| data['tags'] == { filter[0] => filter[1] } }
value.select { |data| data[name_field] == metric }.first[value_field]
else
resource[root_field].select { |data| data[name_field] == metric }.first[value_field]
end
end
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/check-dcos-component-health-shared_spec.rb
|
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'check-dcos-component-health.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe command("#{check} -u http://localhost/system/health/units") do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosComponentHealth OK: components.unhealthy = 0'))) }
end
describe command("#{check} -u http://localhost/system/health/units/fail") do
its(:exit_status) { should eq 2 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosComponentHealth CRITICAL: components.unhealthy = 2'))) }
end
describe command("#{check} -u http://localhost/system/health/units -c 'dcos-mesos-slave-public.service'") do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosComponentHealth OK: dcos-mesos-slave-public.service = 0'))) }
end
describe command("#{check} -u http://localhost/system/health/units/fail -c 'dcos-mesos-slave-public.service'") do
its(:exit_status) { should eq 2 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosComponentHealth CRITICAL: dcos-mesos-slave-public.service = 1'))) }
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/check-dcos-ping-shared_spec.rb
|
<reponame>PTC-Global/sensu-plugins-dcos<gh_stars>1-10
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'check-dcos-ping.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe file(check) do
it { should be_file }
it { should be_executable }
end
describe command("#{check} -u http://localhost/ping") do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosPing OK: OK = true'))) }
end
|
PTC-Global/sensu-plugins-dcos
|
bin/check-dcos-container-metrics.rb
|
<reponame>PTC-Global/sensu-plugins-dcos
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-dcos-metrics
#
# DESCRIPTION:
# This plugin checks the value of a metric exposed by the dcos-metrics API across all running containers
#
# OUTPUT:
# Plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
#
# USAGE:
# This example checks if the container is being throttled
# check-dcos-container-metrics.rb -m 'cpus.throttled.time' -W 10 -C 20
#
# NOTES:
# TODO: investigate https://github.com/thirtysixthspan/descriptive_statistics to have more options for mode
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'uri'
require 'daybreak'
require 'sensu-plugins-dcos'
#
# Check DCOS API
#
class CheckDcosContainersApi < Sensu::Plugin::Check::CLI
include Common
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://127.0.0.1:61001/system/v1/metrics/v0/containers'
option :metric,
description: 'Metric Name',
short: '-m METRIC',
long: '--metric METRIC',
default: 'foo'
option :mode,
description: 'min max or avg',
short: '-M MODE',
long: '--mode MODE',
default: 'avg'
option :filter,
description: 'Filter by Tags',
short: '-f TAG_NAME:TAG_VALUE',
long: '--filter TAG_NAME:TAG_VALUE',
default: nil
option :warnhigh,
short: '-W N',
long: '--warnhigh N',
description: 'WARNING HIGH threshold',
proc: proc(&:to_i),
default: 5000
option :crithigh,
short: '-C N',
long: '--crithigh N',
description: 'CRITICAL HIGH threshold',
proc: proc(&:to_i),
default: 9000
option :warnlow,
short: '-w N',
long: '--warnlow N',
description: 'WARNING LOW threshold',
proc: proc(&:to_i),
default: -1
option :critlow,
short: '-c N',
long: '--critlow N',
description: 'CRITICAL LOW threshold',
proc: proc(&:to_i),
default: -1
option :delta,
short: '-d',
long: '--delta',
description: 'Use this flag to compare the metric with the previously retreived value',
boolean: true
def run
mode = config[:mode]
value = ['all containers', -1]
data = {}
if config[:delta]
db = Daybreak::DB.new '/tmp/dcos-metrics.db', default: 0
end
containers = get_data(config[:url])
unless containers.nil? || containers.empty?
containers.each do |container|
v = get_value("#{config[:url]}/#{container}", config[:metric], config[:filter])
if config[:delta]
prev_value = db["#{container}_#{config[:metric]}"]
db.lock do
db["#{container}_#{config[:metric]}"] = v
end
v -= prev_value
end
data[container] = v
end
end
if config[:delta]
db.flush
db.compact
db.close
end
if data.empty?
ok 'No containers found'
end
case mode
when 'min'
value = data.min_by { |_k, v| v }
when 'max'
value = data.max_by { |_k, v| v }
when 'avg'
value[1] = data.values.inject(:+).to_f / data.length
end
message "#{mode} #{config[:metric]} = #{value[1]} on #{value[0]}"
if value[1] >= config[:crithigh] || value[1] <= config[:critlow]
critical
elsif value[1] >= config[:warnhigh] || value[1] <= config[:warnlow]
warning
else
ok
end
end
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/check-dcos-container-metrics-shared_spec.rb
|
<gh_stars>1-10
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'check-dcos-container-metrics.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe file(check) do
it { should be_file }
it { should be_executable }
end
describe command("#{check} -u http://localhost/containers -m 'cpus.throttled.time' -d -M max -W 10 -C 20") do
its(:exit_status) { should eq 1 }
regex = Regexp.escape('CheckDcosContainersApi WARNING: max cpus.throttled.time = 10 on A4CB4E86-7730-4071-BF7F-D3AE9010140D')
its(:stdout) { should match(Regexp.new(regex)) }
end
|
PTC-Global/sensu-plugins-dcos
|
lib/sensu-plugins-dcos.rb
|
<gh_stars>1-10
# frozen_string_literal: true
require 'sensu-plugins-dcos/version'
require 'sensu-plugins-dcos/common'
|
PTC-Global/sensu-plugins-dcos
|
bin/metrics-dcos-host.rb
|
<filename>bin/metrics-dcos-host.rb
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# dcos-metrics
#
# DESCRIPTION:
# This plugin extracts the metrics from a dcos server
#
# OUTPUT:
# metric data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: uri
# gem: net/http
# gem: socket
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'sensu-plugin/metric/cli'
require 'json'
require 'net/http'
require 'uri'
require 'socket'
require 'sensu-plugins-dcos'
class DCOSMetrics < Sensu::Plugin::Metric::CLI::Graphite
include Common
option :scheme,
description: 'Metric naming scheme',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.dcos"
option :server,
description: 'DCOS Host',
short: '-h SERVER',
long: '--host SERVER',
default: 'localhost'
option :port,
description: 'DCOS-metrics port',
short: '-p PORT',
long: '--port PORT',
required: false,
default: '61001'
option :uri,
description: 'Endpoint URI',
short: '-u URI',
long: '--uri URI',
default: '/system/v1/metrics/v0/node'
def run
all_metrics = get_data("http://#{config[:server]}:#{config[:port]}#{config[:uri]}")
if all_metrics.key?('datapoints')
all_metrics['datapoints'].each do |metric|
if metric.key?('tags')
metric['tags'].each do |k, v|
metric['name'] = [metric['name'], k, v].join('.')
end
end
metric['name'].tr!('/', '.')
metric['name'].squeeze!('.')
metric['unit'] = 'na' if metric['unit'].empty?
output([config[:scheme], metric['unit'], metric['name']].join('.'), metric['value'])
end
end
ok
end
end
|
PTC-Global/sensu-plugins-dcos
|
test/integration/helpers/serverspec/check-dcos-metrics-shared_spec.rb
|
<reponame>PTC-Global/sensu-plugins-dcos
# frozen_string_literal: true
require 'spec_helper'
require 'shared_spec'
gem_path = '/usr/local/bin'
check_name = 'check-dcos-metrics.rb'
check = "#{gem_path}/#{check_name}"
describe 'ruby environment' do
it_behaves_like 'ruby checks', check
end
describe file(check) do
it { should be_file }
it { should be_executable }
end
describe command("#{check} -u http://localhost/node -m 'process.count' -c 50 -w 100 -C 300 -W 250") do
its(:exit_status) { should eq 0 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosApi OK: process.count = 208'))) }
end
describe command("#{check} -u http://localhost/node -m 'network.in' -f 'interface:dummy1' -c 50 -w 100 -C 300 -W 250") do
its(:exit_status) { should eq 2 }
its(:stdout) { should match(Regexp.new(Regexp.escape('CheckDcosApi CRITICAL: network.in = 0'))) }
end
|
PTC-Global/sensu-plugins-dcos
|
bin/check-dcos-jobs-health.rb
|
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-dcos-jobs-health
#
# DESCRIPTION:
# This plugin checks the health of a DC/OS jobs exposed by the mesos API endpoint /tasks
#
# OUTPUT:
# Plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
#
# USAGE:
# check-dcos-jobs-health.rb -u 'http://leader.mesos:5050/tasks' -p 'cron.jobname' -w 1000.0000
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'uri'
require 'sensu-plugins-dcos'
#
# Check DCOS System Health API
#
class CheckDcosJobsHealth < Sensu::Plugin::Check::CLI
include Common
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://leader.mesos:5050/tasks'
option :pattern,
description: 'Pattern',
short: '-p pattern',
long: '--pattern PATTERN',
default: 'cron'
option :window,
description: 'Window/history for tasks',
short: '-w time',
long: '--window time',
default: 1000.0000
option :threshold,
description: 'Threshold for a running task - the Window',
short: '-t float',
long: '--threshold float',
default: 200.0000
def run
t = Time.now.to_f.round(4)
resource = get_data(config[:url])
resource['tasks'].each do |unit|
if unit['id'].match?(/#{config[:pattern].sub('.', '.*')}/) && unit['statuses'][0]['timestamp'] > t - config[:window].to_f.round(4)
if unit['state'].match?(/RUNNING/)
if t - unit['statuses'][0]['timestamp'] > (config[:window].to_f.round(4) - config[:threshold].to_f.round(4))
message "JOB: #{unit['id']} is taking too long to finish..."
critical
end
elsif unit['state'].match?(/FAILED|KILLED/)
message "JOB: #{unit['id']}"
critical
end
end
end
ok
end
end
|
PTC-Global/sensu-plugins-dcos
|
bin/metrics-dcos-containers.rb
|
<reponame>PTC-Global/sensu-plugins-dcos
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# dcos-metrics
#
# DESCRIPTION:
# This plugin extracts the container metrics from a dcos server
#
# OUTPUT:
# metric data
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: uri
# gem: net/http
# gem: socket
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'sensu-plugin/metric/cli'
require 'json'
require 'net/http'
require 'uri'
require 'socket'
require 'sensu-plugins-dcos'
class DCOSMetrics < Sensu::Plugin::Metric::CLI::Graphite
include Common
option :scheme,
description: 'Metric naming scheme',
short: '-s SCHEME',
long: '--scheme SCHEME',
default: "#{Socket.gethostname}.dcos.container"
option :server,
description: 'DCOS Host',
short: '-h SERVER',
long: '--host SERVER',
default: 'localhost'
option :port,
description: 'DCOS-metrics port',
short: '-p PORT',
long: '--port PORT',
required: false,
default: '61001'
option :agent_ip_discovery_command,
description: 'DCOS agent ip discovery command',
long: '--agent-ip-discovery-command COMMAND',
required: false,
default: '/opt/mesosphere/bin/detect_ip'
option :agent_port,
description: 'DCOS agent port',
long: '--agent-port PORT',
required: false,
default: '5051'
option :uri,
description: 'Endpoint URI',
short: '-u URI',
long: '--uri URI',
default: '/system/v1/metrics/v0/containers'
option :dimensions,
description: 'comma seperated list of dimensions to add into the output',
short: '-d DIMENSIONS',
long: '--dimensions DIMENSIONS',
required: false
def mesos_frameworks
# Return the memoized result if exists. This will ensure that the mesos
# state endpoint will be called only once and when needed and return the
# cached result immediately for subsequent calls.
return @mesos_frameworks if @mesos_frameworks
agent_ip = `#{config[:agent_ip_discovery_command]}`
state = get_data("http://#{agent_ip}:#{config[:agent_port]}/state")
@mesos_frameworks = {}
%w[frameworks completed_frameworks].each do |fw_key|
state[fw_key].each do |framework|
@mesos_frameworks[framework['id']] = framework['name']
end
end
@mesos_frameworks
end
def get_extra_tags(dimensions)
extra_tags = []
return extra_tags unless config[:dimensions]
config[:dimensions].tr(' ', '').split(',').each do |d|
# Special case for app metrics, framework_name dimension does not exist
# in app metrics and in some cases app metrics/dimensions are not
# available, see https://jira.mesosphere.com/browse/DCOS_OSS-2043 for
# upstream issue.
if d == 'framework_name' && !dimensions.key?('framework_name')
extra_tags.push(mesos_frameworks[dimensions['framework_id']])
else
extra_tags.push(dimensions[d])
end
end
extra_tags
end
def run
containers = get_data("http://#{config[:server]}:#{config[:port]}#{config[:uri]}")
unless containers.nil? || containers.empty?
containers.each do |container|
container_metrics = get_data("http://#{config[:server]}:#{config[:port]}#{config[:uri]}/#{container}")
if container_metrics.key?('datapoints')
extra_tags = get_extra_tags(container_metrics['dimensions'])
container_metrics['datapoints'].each do |metric|
metric['name'].tr!('/', '.')
metric['name'].squeeze!('.')
output([config[:scheme], extra_tags, container, metric['unit'], metric['name']].compact.join('.'), metric['value'])
end
end
app_metrics = get_data("http://#{config[:server]}:#{config[:port]}#{config[:uri]}/#{container}/app")
next if app_metrics['datapoints'].nil?
app_dimensions = app_metrics['dimensions']
# merge container dimensions into app dimensions since app dimensions does have less
app_dimensions = container_metrics['dimensions'].merge(app_dimensions) if container_metrics.key?('dimensions')
extra_tags = get_extra_tags(app_dimensions)
app_metrics['datapoints'].each do |metric|
unless metric['tags'].nil?
metric['tags'].each do |k, v|
metric['name'] = [metric['name'], k, v].join('.')
end
end
metric['name'].tr!('/', '.')
metric['name'].squeeze!('.')
metric['unit'] = 'na' if metric['unit'].empty?
output([config[:scheme], extra_tags, container, 'app', metric['unit'], metric['name']].compact.join('.'), metric['value'])
end
end
end
ok
end
end
|
PTC-Global/sensu-plugins-dcos
|
bin/check-dcos-container-count.rb
|
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-dcos-metrics
#
# DESCRIPTION:
# This plugin checks the number of containers exposed by the dcos-metrics API
#
# OUTPUT:
# Plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
#
# USAGE:
# This example checks that the count of running processes is between 150 and 300
# check-dcos-metrics.rb -u 'http://127.0.0.1:61001/system/v1/metrics/v0/containers' -w 150 -c 100 -W 300 -C 350
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'uri'
require 'sensu-plugins-dcos'
#
# Check DCOS API
#
class CheckDcosContainerCount < Sensu::Plugin::Check::CLI
include Common
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://127.0.0.1:61001/system/v1/metrics/v0/containers'
option :metric,
description: 'Metric Name',
short: '-m METRIC',
long: '--metric METRIC',
default: 'foo'
option :filter,
description: 'Filter by Tags',
short: '-f TAG_NAME:TAG_VALUE',
long: '--filter TAG_NAME:TAG_VALUE',
default: nil
option :warnhigh,
short: '-W N',
long: '--warnhigh N',
description: 'WARNING HIGH threshold',
proc: proc(&:to_i),
default: 5000
option :crithigh,
short: '-C N',
long: '--crithigh N',
description: 'CRITICAL HIGH threshold',
proc: proc(&:to_i),
default: 9000
option :warnlow,
short: '-w N',
long: '--warnlow N',
description: 'WARNING LOW threshold',
proc: proc(&:to_i),
default: -1
option :critlow,
short: '-c N',
long: '--critlow N',
description: 'CRITICAL LOW threshold',
proc: proc(&:to_i),
default: -1
def run
value = get_data(config[:url]).length
message "container.count = #{value}"
if value >= config[:crithigh] || value <= config[:critlow]
critical
elsif value >= config[:warnhigh] || value <= config[:warnlow]
warning
else
ok
end
end
end
|
PTC-Global/sensu-plugins-dcos
|
bin/check-dcos-component-health.rb
|
<reponame>PTC-Global/sensu-plugins-dcos<filename>bin/check-dcos-component-health.rb
#! /usr/bin/env ruby
# frozen_string_literal: true
#
# check-dcos-component-health
#
# DESCRIPTION:
# This plugin checks the health of a DC/OS components exposed by the system/health/v1/units API endpoint
#
# OUTPUT:
# Plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
#
# USAGE:
# check-dcos-component-health.rb -u 'http://127.0.0.1:1050/system/health/v1/units' -c 'exhibitor.service'
#
# You can also run an ultimate health report to see if there are nay filing units::
# check-dcos-component-health.rb -u 'http://127.0.0.1:1050/system/health/v1/units'
#
# NOTES:
#
# LICENCE:
# PTC http://www.ptc.com/
# Copyright 2017 PTC Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'sensu-plugin/check/cli'
require 'json'
require 'net/http'
require 'uri'
require 'sensu-plugins-dcos'
#
# Check DCOS System Health API
#
class CheckDcosComponentHealth < Sensu::Plugin::Check::CLI
include Common
option :url,
description: 'URL',
short: '-u URL',
long: '--url URL',
default: 'http://1192.168.3.11:1050/system/health/v1/units'
option :component,
description: 'Component ID',
short: '-c COMPONENT',
long: '--component COMPONENT',
default: nil
option :filter,
description: 'Filter by Tags',
short: '-f TAG_NAME:TAG_VALUE',
long: '--filter TAG_NAME:TAG_VALUE',
default: nil
def run
if config[:component]
value = get_value(config[:url], config[:component], config[:filter], 'id', 'health', 'units')
message "#{config[:component]} = #{value}"
if value.zero?
ok
else
critical
end
else
failed = 0
resource = get_data(config[:url])
resource['units'].each do |unit|
failed += unit['health']
end
message "components.unhealthy = #{failed}"
if failed.zero?
ok
else
critical
end
end
end
end
|
dinesh/mongo-memcached
|
test/test_mongo-memcached.rb
|
<gh_stars>1-10
require 'helper'
require 'db'
class TestMongoMemcached < Test::Unit::TestCase
context "Testing memcached connection" do
setup do
$config = YAML.load(IO.read(File.join(File.dirname(__FILE__), '/../config/memcache.yml')))['test']
$cache = Memcached.new( Array($config['servers']) )
$cache.flush
DB::clear
end
should "memcached connection should be according to the given servers" do
assert_equal($cache.servers.size, $config['servers'].size )
end
end
end
|
dinesh/mongo-memcached
|
lib/mongo_memcached/membase.rb
|
<filename>lib/mongo_memcached/membase.rb
module MongoMemcached
module Membase
def self.included base
class << base
attr_accessor :repository
delegate :repository, :to => "self.class"
end
end
def fetch(keys, options = {}, &block)
case keys
when Array
keys = keys.collect { |key| cache_key(key) }
hits = repository.get(keys)
if (missed_keys = keys - hits.keys).any?
missed_values = block.call(missed_keys)
hits.merge!( missed_keys.zip( Array(missed_values) ).flatten.to_hash )
end
hits
else
begin
value = repository.get(keys)
rescue
options[:owerite] = true
end
repository.set(keys, new_value = options[:raw] || ( block ? block.call : nil ) ) if options[:owerite] or value.nil?
v = new_value || value
puts "\tGET: #{keys} : #{v.inspect}\n"
v
end
end
def get(keys, options = {}, &block)
case keys
when Array
fetch(keys, options, &block)
else
fetch(keys, options) do
if block_given?
set(keys, result = yield(keys), options)
result
end
end
end
end
def add(key, value, options = {})
if repository.add(cache_key(key), value, options[:ttl] || 0, options[:raw]) == "NOT_STORED\r\n"
yield
end
end
def set(key, value, options = {})
key = cache_key(key, options[:primary])
puts "\tSET:#{key.inspect} ==> #{value.inspect}\n"
repository.set(key, value, options[:ttl] || 0)
end
def incr(key, delta = 1, ttl = 0)
repository.incr(cache_key = cache_key(key), delta) || begin
repository.add(cache_key, (result = yield).to_s, ttl, true) { repository.incr(cache_key) }
result
end
end
def decr(key, delta = 1, ttl = 0)
repository.decr(cache_key = cache_key(key), delta) || begin
repository.add(cache_key, (result = yield).to_s, ttl, true) { repository.decr(cache_key) }
result
end
end
def expire(key)
puts "memcache delete: #{key}"
repository.delete(cache_key(key))
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.