repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/aws_queue_client.rb
|
module EventQ
module Amazon
class QueueClient
def initialize(options = {})
if options.has_key?(:aws_key)
Aws.config[:credentials] = Aws::Credentials.new(options[:aws_key], options[:aws_secret])
end
if !options.has_key?(:aws_account_number)
raise ':aws_account_number option must be specified.'.freeze
end
@aws_account = options[:aws_account_number]
@sns_keep_alive_timeout = options[:sns_keep_alive_timeout] || 30
@sns_continue_timeout = options[:sns_continue_timeout] || 15
if options.has_key?(:aws_region)
@aws_region = options[:aws_region]
Aws.config[:region] = @aws_region
else
@aws_region = Aws.config[:region]
end
end
# Returns the AWS SQS Client
def sqs
@sqs ||= Aws::SQS::Client.new
end
# Returns the AWS SNS Client
def sns
@sns ||= Aws::SNS::Client.new(
http_idle_timeout: @sns_keep_alive_timeout,
http_continue_timeout: @sns_continue_timeout
)
end
def get_topic_arn(event_type)
_event_type = EventQ.create_event_type(event_type)
return "arn:aws:sns:#{@aws_region}:#{@aws_account}:#{aws_safe_name(_event_type)}"
end
def get_queue_arn(queue)
_queue_name = EventQ.create_queue_name(queue.name)
return "arn:aws:sqs:#{@aws_region}:#{@aws_account}:#{aws_safe_name(_queue_name)}"
end
def create_topic_arn(event_type)
_event_type = EventQ.create_event_type(event_type)
response = sns.create_topic(name: aws_safe_name(_event_type))
return response.topic_arn
end
# Returns the URL of the queue. The queue will be created when it does
#
# @param queue [EventQ::Queue]
def get_queue_url(queue)
_queue_name = EventQ.create_queue_name(queue.name)
response= sqs.get_queue_url(
queue_name: aws_safe_name(_queue_name),
queue_owner_aws_account_id: @aws_account,
)
return response.queue_url
end
def aws_safe_name(name)
return name[0..79].gsub(/[^a-zA-Z\d_\-]/,'')
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/date_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class DateWriter < AttributeWriter
def valid?(obj)
obj.is_a?(Date) && !obj.is_a?(DateTime)
end
def exec(obj)
{
'^O': 'Date',
year: obj.year,
month: obj.month,
day: obj.day,
start: obj.start
}
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/version.rb
|
# frozen_string_literal: true
module EventQ
module Amazon
VERSION = "1.17.0"
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws.rb
|
<filename>eventq_aws/lib/eventq_aws.rb<gh_stars>0
require 'aws-sdk-core'
require 'eventq_base'
require 'eventq_aws/version'
require 'eventq_aws/aws_eventq_client'
require 'eventq_aws/aws_queue_client'
require 'eventq_aws/aws_queue_manager'
require 'eventq_aws/aws_subscription_manager'
require_relative 'eventq_aws/aws_status_checker'
if RUBY_PLATFORM =~ /java/
require 'eventq_aws/jruby/aws_queue_worker'
else
require 'eventq_aws/aws_queue_worker'
require 'eventq_aws/aws_queue_worker_v2'
end
module EventQ
def self.namespace
@namespace
end
def self.namespace=(value)
@namespace = value
end
def self.create_event_type(event_type)
if EventQ.namespace == nil
return event_type
end
return "#{EventQ.namespace}-#{event_type}"
end
def self.create_queue_name(queue_name)
if EventQ.namespace == nil
return queue_name
end
return "#{EventQ.namespace}-#{queue_name}"
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base.rb
|
<filename>eventq_base/lib/eventq_base.rb<gh_stars>0
require 'securerandom'
require 'redlock'
require 'class_kit'
require 'hash_kit'
require_relative 'eventq_base/version'
require_relative 'eventq_base/eventq_logger'
require_relative 'eventq_base/queue'
require_relative 'eventq_base/exchange'
require_relative 'eventq_base/queue_message'
require_relative 'eventq_base/message_args'
require_relative 'eventq_base/queue_worker_contract'
require_relative 'eventq_base/event_raised_exchange'
require_relative 'eventq_base/event_raised_queue'
require_relative 'eventq_base/subscription_manager_contract'
require_relative 'eventq_base/eventq_client_contract'
require_relative 'eventq_base/configuration'
require_relative 'eventq_base/serialization_providers'
require_relative 'eventq_base/worker_id'
require_relative 'eventq_base/nonce_manager'
require_relative 'eventq_base/signature_providers'
require_relative 'eventq_base/exceptions'
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/rational_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class RationalWriter < AttributeWriter
def valid?(obj)
obj.is_a?(Rational)
end
def exec(obj)
{
'^O': 'Rational',
numerator: obj.numerator,
denominator: obj.denominator
}
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/class_writer.rb
|
<reponame>adamgeorgeson/eventq
module EventQ
module SerializationProviders
module JRuby
module Oj
class ClassWriter < AttributeWriter
def valid?(obj)
false
end
def exec(obj)
hash = { '^o': obj.class }
obj.instance_variables.each do |key|
hash[key[1..-1]] = AttributeWriter.exec(obj.instance_variable_get(key))
end
hash
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_rabbitmq/lib/eventq_rabbitmq/rabbitmq_queue_manager.rb
|
<filename>eventq_rabbitmq/lib/eventq_rabbitmq/rabbitmq_queue_manager.rb
module EventQ
module RabbitMq
class QueueManager
X_DEAD_LETTER_EXCHANGE = 'x-dead-letter-exchange'.freeze
X_MESSAGE_TTL = 'x-message-ttl'.freeze
attr_accessor :durable
def initialize
@event_raised_exchange = EventQ::EventRaisedExchange.new
@durable = true
end
def get_queue(channel, queue)
_queue_name = EventQ.create_queue_name(queue.name)
# get/create the queue
q = channel.queue(_queue_name, :durable => @durable)
subscriber_exchange = get_subscriber_exchange(channel, queue)
if queue.allow_retry
retry_exchange = get_retry_exchange(channel, queue)
retry_queue = get_retry_queue(channel, queue)
retry_queue.bind(retry_exchange)
end
q.bind(subscriber_exchange)
return q
end
def pop_message(queue:)
if RUBY_PLATFORM =~ /java/
headers, payload = queue.pop({ :ack => true, :block => true })
if headers == nil
return [nil,nil]
end
[headers.delivery_tag, payload]
else
headers, properties, payload = queue.pop({ :manual_ack => true, :block => true })
if headers == nil
return [nil,nil]
end
[headers.delivery_tag, payload]
end
end
def get_queue_exchange(channel, queue)
_exchange_name = EventQ.create_exchange_name(queue.name)
channel.fanout("#{_exchange_name}.ex")
end
def get_retry_exchange(channel, queue)
_queue_name = EventQ.create_queue_name(queue.name)
return channel.fanout("#{_queue_name}.r.ex")
end
def get_subscriber_exchange(channel, queue)
_queue_name = EventQ.create_queue_name(queue.name)
return channel.fanout("#{_queue_name}.ex")
end
def get_delay_exchange(channel, queue, delay)
_queue_name = EventQ.create_queue_name(queue.name)
channel.direct("#{_queue_name}.#{delay}.d.ex")
end
def get_retry_queue(channel, queue)
subscriber_exchange = get_subscriber_exchange(channel, queue)
_queue_name = EventQ.create_queue_name(queue.name)
if queue.allow_retry_back_off == true
EventQ.logger.debug { "[#{self.class}] - Requesting retry queue. x-dead-letter-exchange: #{subscriber_exchange.name} | x-message-ttl: #{queue.max_retry_delay}" }
return channel.queue("#{_queue_name}.r", :durable => @durable, :arguments => { X_DEAD_LETTER_EXCHANGE => subscriber_exchange.name, X_MESSAGE_TTL => queue.max_retry_delay })
else
EventQ.logger.debug { "[#{self.class}] - Requesting retry queue. x-dead-letter-exchange: #{subscriber_exchange.name} | x-message-ttl: #{queue.retry_delay}" }
return channel.queue("#{_queue_name}.r", :durable => @durable, :arguments => { X_DEAD_LETTER_EXCHANGE => subscriber_exchange.name, X_MESSAGE_TTL => queue.retry_delay })
end
end
def create_delay_queue(channel, queue, dlx_name, delay=0)
queue_name = EventQ.create_queue_name(queue.name)
channel.queue("#{queue_name}.#{delay}.delay", durable: @durable,
arguments: { X_DEAD_LETTER_EXCHANGE => dlx_name, X_MESSAGE_TTL => delay * 1000 })
end
def get_exchange(channel, exchange)
_exchange_name = EventQ.create_exchange_name(exchange.name)
return channel.direct(_exchange_name, :durable => @durable)
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/array_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class ArrayWriter < AttributeWriter
def valid?(obj)
obj.is_a?(Array)
end
def exec(obj)
array = []
obj.each do |a|
array << AttributeWriter.exec(a)
end
array
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/aws_queue_manager.rb
|
module EventQ
module Amazon
class QueueManager
VISIBILITY_TIMEOUT = 'VisibilityTimeout'.freeze
MESSAGE_RETENTION_PERIOD = 'MessageRetentionPeriod'.freeze
def initialize(options)
if options[:client] == nil
raise ':client (QueueClient) must be specified.'.freeze
end
@client = options[:client]
@visibility_timeout = 300 #5 minutes
if options.key?(:visibility_timeout)
@visibility_timeout = options[:visibility_timeout]
end
@message_retention_period = 1209600 #14 days (max aws value)
if options.key?(:message_retention_period)
@message_retention_period = options[:message_retention_period]
end
end
def get_queue(queue)
if queue_exists?(queue)
update_queue(queue)
else
create_queue(queue)
end
end
def create_queue(queue)
_queue_name = EventQ.create_queue_name(queue.name)
response = @client.sqs.create_queue({
queue_name: _queue_name,
attributes: {
VISIBILITY_TIMEOUT => @visibility_timeout.to_s,
MESSAGE_RETENTION_PERIOD => @message_retention_period.to_s
}
})
return response.queue_url
end
def drop_queue(queue)
q = get_queue(queue)
@client.sqs.delete_queue({ queue_url: q})
return true
end
def drop_topic(event_type)
topic_arn = @client.get_topic_arn(event_type)
@client.sns.delete_topic({ topic_arn: topic_arn})
return true
end
def topic_exists?(event_type)
topic_arn = @client.get_topic_arn(event_type)
begin
@client.sns.get_topic_attributes({ topic_arn: topic_arn })
rescue
return false
end
return true
end
def queue_exists?(queue)
_queue_name = EventQ.create_queue_name(queue.name)
return @client.sqs.list_queues({ queue_name_prefix: _queue_name }).queue_urls.length > 0
end
def update_queue(queue)
url = @client.get_queue_url(queue)
@client.sqs.set_queue_attributes({
queue_url: url, # required
attributes: {
VISIBILITY_TIMEOUT => @visibility_timeout.to_s,
MESSAGE_RETENTION_PERIOD => @message_retention_period.to_s
}
})
return url
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_rabbitmq/spec/eventq_rabbitmq/rabbitmq_queue_client_spec.rb
|
RSpec.describe EventQ::RabbitMq::QueueClient do
let(:client) do
return EventQ::RabbitMq::QueueClient.new({ endpoint: 'rabbitmq' })
end
let(:connection) { client.get_connection }
let(:channel) { connection.create_channel }
it 'should use dead-letter exchange' do
manager = EventQ::RabbitMq::QueueManager.new
x = channel.fanout("amq.fanout")
dlx = channel.fanout("bunny.examples.dlx.exchange")
q = channel.queue("subscriber", :durable => true, :arguments => {"x-dead-letter-exchange" => dlx.name}).bind(x, :routing_key => 'post')
# dead letter queue
dlq = channel.queue("subscriber_retry", :exclusive => true).bind(dlx)
x.publish("", :routing_key => 'post')
sleep 0.2
delivery_tag, payload = manager.pop_message(queue: q)
EventQ.logger.debug { "#{dlq.message_count} messages dead lettered so far" }
expect(dlq.message_count).to eq(0)
EventQ.logger.debug { "Rejecting a message" }
channel.nack(delivery_tag)
sleep 0.2
channel = connection.create_channel
dlx = channel.fanout("bunny.examples.dlx.exchange")
dlq = channel.queue("subscriber_retry", :exclusive => true).bind(dlx)
EventQ.logger.debug { "#{dlq.message_count} messages dead lettered so far" }
expect(dlq.message_count).to eq(1)
dlx.delete
EventQ.logger.debug { "Disconnecting..." }
end
it 'should use a delay queue correctly' do
retry_exchange = channel.fanout(SecureRandom.uuid)
subscriber_exchange = channel.fanout(SecureRandom.uuid)
retry_queue_def = EventQ::Queue.new
retry_queue_def.name = SecureRandom.uuid
queue_manager = EventQ::RabbitMq::QueueManager.new
retry_queue = channel.queue(retry_queue_def.name, :arguments => { "x-dead-letter-exchange" => subscriber_exchange.name, "x-message-ttl" => 600 }).bind(retry_exchange)
subscriber_queue_name = SecureRandom.uuid
subscriber_queue = channel.queue(subscriber_queue_name).bind(subscriber_exchange)
message = 'Hello World'
retry_exchange.publish(message)
delivery_tag, payload = queue_manager.pop_message(queue: retry_queue)
expect(payload).to eq(message)
retry_queue.purge
retry_exchange.publish(message)
channel = connection.create_channel
subscriber_queue = channel.queue(subscriber_queue_name).bind(subscriber_exchange)
sleep(1)
delivery_tag, payload = queue_manager.pop_message(queue: subscriber_queue)
expect(payload).to eq(message)
channel.acknowledge(delivery_tag, false)
end
it 'should expire message from retry queue back into subscriber queue' do
q = EventQ::Queue.new
q.name = 'retry.test.queue'
q.allow_retry = true
q.retry_delay = 500
qm = EventQ::RabbitMq::QueueManager.new
queue = qm.get_queue(channel, q)
retry_queue = qm.get_retry_queue(channel, q)
retry_exchange = qm.get_retry_exchange(channel, queue)
message = 'Hello World'
retry_exchange.publish(message)
sleep(2)
delivery_tag, payload = qm.pop_message(queue: queue)
expect(payload).to eq(message)
channel.acknowledge(delivery_tag, false)
end
it 'should deliver a message from a queue' do
manager = EventQ::RabbitMq::QueueManager.new
queue = channel.queue(SecureRandom.uuid, :durable => true)
exchange = channel.fanout(SecureRandom.uuid)
queue.bind(exchange)
exchange.publish('Hello World')
sleep 0.5
delivery_tag, payload = manager.pop_message(queue: queue)
expect(payload).to eq 'Hello World'
end
after do
channel.close if channel.open?
connection.close
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/exceptions.rb
|
require_relative 'exceptions/invalid_signature_exception'
|
adamgeorgeson/eventq
|
eventq_base/spec/eventq_base/serialization_providers/jruby/oj/serializer_spec.rb
|
require_relative 'test_item'
RSpec.describe EventQ::SerializationProviders::JRuby::Oj::Serializer do
let(:hash1) do
{ string: 'foo', time: Time.now }
end
let(:hash2) do
{ string: 'bar', datetime: DateTime.now }
end
let(:item1) do
TestItem.new.tap do |e|
e.string = 'foo'
e.number = 10
e.float = 12.5
e.date = Date.today
e.datetime = DateTime.now
e.time = Time.now
end
end
let(:item2) do
TestItem.new.tap do |e|
e.string = 'bar'
e.number = 34
e.float = 50.02
e.date = Date.today
e.datetime = DateTime.now
e.time = Time.now
end
end
let(:item3) do
TestItem.new.tap do |e|
e.string = 'bar'
e.number = 20
e.float = 22.2
e.date = Date.today
e.datetime = DateTime.now
e.time = Time.now
e.hash = hash1.dup
e.array_hash = [hash1.dup, hash2.dup]
e.test_item = item1.dup
e.array_test_item = [item1.dup, item2.dup]
end
end
describe '#dump' do
let(:json) { subject.dump(item3) }
unless RUBY_PLATFORM =~ /java/
require 'oj'
it 'creates json that CRuby OJ can deserialize' do
itm = Oj.load(json)
expect(itm).to be_a(TestItem)
expect(itm.string).to eq item3.string
expect(itm.number).to eq item3.number
expect(itm.float).to eq item3.float
expect(itm.date).to eq item3.date
expect(itm.datetime).to eq item3.datetime
expect(itm.time.to_f).to eq item3.time.to_f
expect(itm.hash).to be_a(Hash)
expect(itm.hash['string']).to eq hash1[:string]
expect(itm.hash['time'].to_f).to eq hash1[:time].to_f
expect(itm.array_hash).to be_a(Array)
expect(itm.array_hash.length).to eq 2
expect(itm.test_item).to be_a(TestItem)
expect(itm.test_item.string).to eq item1.string
expect(itm.array_test_item).to be_a(Array)
expect(itm.array_test_item.length).to eq 2
end
end
it 'serializes to json in a timely manner' do
require 'benchmark'
Benchmark.measure { subject.dump(item3) }
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/queue.rb
|
module EventQ
class Queue
attr_accessor :name
attr_accessor :allow_retry
attr_accessor :retry_delay
attr_accessor :max_retry_attempts
attr_accessor :allow_retry_back_off
attr_accessor :max_retry_delay
attr_accessor :require_signature
def initialize
@allow_retry = false
#default retry delay is 30 seconds
@retry_delay = 30000
#default max retry attempts is 5
@max_retry_attempts = 5
#default retry back off settings
@allow_retry_back_off = false
#default require signature to false
@require_signature = false
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/aws_subscription_manager.rb
|
<filename>eventq_aws/lib/eventq_aws/aws_subscription_manager.rb
module EventQ
module Amazon
class SubscriptionManager
def initialize(options)
if options[:client] == nil
raise "[#{self.class}] - :client (QueueClient) must be specified."
end
@client = options[:client]
if options[:queue_manager] == nil
raise "[#{self.class}] - :queue_manager (QueueManager) must be specified."
end
@manager = options[:queue_manager]
end
def subscribe(event_type, queue)
topic_arn = @client.create_topic_arn(event_type)
q = @manager.get_queue(queue)
queue_arn = @client.get_queue_arn(queue)
@client.sqs.set_queue_attributes({
queue_url: q,
attributes:{
'Policy'.freeze => '{
"Version": "2012-10-17",
"Id": "SNStoSQS",
"Statement": [
{
"Sid":"rule1",
"Effect": "Allow",
"Principal": "*",
"Action": "sqs:*",
"Resource": "' + queue_arn + '"
}
]
}'
}
})
@client.sns.subscribe({
topic_arn: topic_arn,
protocol: 'sqs'.freeze,
endpoint: queue_arn
})
EventQ.logger.debug do
"[#{self.class} #subscribe] - Subscribing Queue: #{queue.name} to topic_arn: #{topic_arn}, endpoint: #{queue_arn}"
end
return true
end
def unsubscribe(queue)
raise "[#{self.class}] - Not implemented. Please unsubscribe the queue from the topic inside the AWS Management Console."
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/exchange.rb
|
<reponame>adamgeorgeson/eventq
module EventQ
class Exchange
attr_accessor :name
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/attribute_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class AttributeWriter
def self.exec(obj)
aw = descendants.detect { |a| a.new.valid?(obj) } || ClassWriter
aw.new.exec(obj)
end
def self.descendants
descendants = []
ObjectSpace.each_object(singleton_class) do |k|
next if k.singleton_class?
descendants.unshift k unless k == self
end
descendants
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/hash_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class HashWriter < AttributeWriter
def valid?(obj)
obj.is_a?(Hash)
end
def exec(obj)
obj.each do |key, value|
obj[key] = AttributeWriter.exec(value)
end
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/spec/integration/aws_queue_worker_v2_spec.rb
|
<filename>eventq_aws/spec/integration/aws_queue_worker_v2_spec.rb
require 'spec_helper'
RSpec.describe EventQ::Amazon::QueueWorkerV2, integration: true do
let(:queue_client) do
EventQ::Amazon::QueueClient.new({ aws_account_number: EventQ.AWS_ACCOUNT_NUMBER, aws_region: 'eu-west-1' })
end
let(:queue_manager) do
EventQ::Amazon::QueueManager.new({ client: queue_client })
end
let(:subscription_manager) do
EventQ::Amazon::SubscriptionManager.new({ client: queue_client, queue_manager: queue_manager })
end
let(:eventq_client) do
EventQ::Amazon::EventQClient.new({ client: queue_client })
end
let(:subscriber_queue) do
EventQ::Queue.new.tap do |sq|
sq.name = SecureRandom.uuid.to_s
end
end
let(:event_type) { 'queue_worker_event1' }
let(:event_type2) { 'queue_worker_event2' }
let(:message) { 'Hello World' }
let(:message_context) { { 'foo' => 'bar' } }
it 'should receive an event from the subscriber queue' do
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message, message_context)
received = false
context = nil
# wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
context = message_context
received = true
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(2)
subject.stop
expect(received).to eq(true)
expect(context).to eq message_context
expect(subject.is_running).to eq(false)
end
context 'when queue requires a signature' do
let(:secret) { 'secret' }
before do
EventQ::Configuration.signature_secret = secret
subscriber_queue.require_signature = true
end
context 'and the received message contains a valid signature' do
it 'should process the message' do
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message)
received = false
# wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
received = true
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(2)
subject.stop
expect(received).to eq(true)
expect(subject.is_running).to eq(false)
end
end
context 'and the received message contains an invalid signature' do
before do
EventQ::Configuration.signature_secret = 'invalid'
end
it 'should NOT process the message' do
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message)
received = false
#wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
received = true
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(2)
subject.stop
expect(received).to eq(true)
expect(subject.is_running).to eq(false)
end
end
end
it 'should receive an event from the subscriber queue and retry it (abort).' do
subscriber_queue.retry_delay = 1000
subscriber_queue.allow_retry = true
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message)
received = false
received_count = 0
received_attribute = 0;
# wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
received = true
received_count += 1
received_attribute = args.retry_attempts
EventQ.logger.debug { "Message Received: #{event}" }
if received_count != 2
args.abort = true
end
end
sleep(4)
subject.stop
expect(received).to eq(true)
expect(received_count).to eq(2)
expect(received_attribute).to eq(1)
expect(subject.is_running).to eq(false)
end
it 'should receive an event from the subscriber queue and retry it (error).' do
subscriber_queue.retry_delay = 1000
subscriber_queue.allow_retry = true
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message)
received = false
received_count = 0
received_attribute = 0;
# wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
received = true
received_count += 1
received_attribute = args.retry_attempts
EventQ.logger.debug { "Message Received: #{event}" }
if received_count != 2
raise 'fake error'
end
end
sleep(4)
subject.stop
expect(received).to eq(true)
expect(received_count).to eq(2)
expect(received_attribute).to eq(1)
expect(subject.is_running).to eq(false)
end
it 'should receive multiple events from the subscriber queue' do
subscription_manager.subscribe(event_type2, subscriber_queue)
10.times do
eventq_client.raise_event(event_type2, message)
end
received_messages = []
message_count = 0
mutex = Mutex.new
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
mutex.synchronize do
EventQ.logger.debug { "Message Received: #{event}" }
message_count += 1
add_to_received_list(received_messages)
EventQ.logger.debug { 'message processed.' }
end
end
sleep(5)
expect(message_count).to eq(10)
subject.stop
expect(subject.is_running).to eq(false)
end
context 'queue.allow_retry_back_off = true' do
before do
subscriber_queue.retry_delay = 1000
subscriber_queue.allow_retry = true
subscriber_queue.allow_retry_back_off = true
subscriber_queue.max_retry_delay = 5000
end
it 'should receive an event from the subscriber queue and retry it.' do
subscription_manager.subscribe(event_type, subscriber_queue)
eventq_client.raise_event(event_type, message)
retry_attempt_count = 0
# wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
expect(event).to eq(message)
expect(args).to be_a(EventQ::MessageArgs)
retry_attempt_count = args.retry_attempts + 1
raise 'Fail on purpose to send event to retry queue.'
end
sleep(1)
expect(retry_attempt_count).to eq(1)
sleep(2)
expect(retry_attempt_count).to eq(2)
sleep(3)
expect(retry_attempt_count).to eq(3)
sleep(4)
expect(retry_attempt_count).to eq(4)
subject.stop
expect(subject.is_running).to eq(false)
end
end
def add_to_received_list(received_messages)
thread_name = Thread.current.object_id
EventQ.logger.debug { "[THREAD] #{thread_name}" }
thread = received_messages.detect { |i| i[:thread] == thread_name }
if thread != nil
thread[:events] += 1
else
received_messages.push({ :events => 1, :thread => thread_name })
end
end
context 'NonceManager' do
context 'when a message has already been processed' do
before do
EventQ::NonceManager.configure(server: 'redis://redis:6379')
end
let(:queue_message) { EventQ::QueueMessage.new }
let(:event_type) { 'queue_worker_event_noncemanager' }
it 'should NOT process the message again' do
subscription_manager.subscribe(event_type, subscriber_queue)
allow(eventq_client).to receive(:new_message).and_return(queue_message)
eventq_client.raise_event(event_type, message)
eventq_client.raise_event(event_type, message)
received_count = 0
#wait 1 second to allow the message to be sent and broadcast to the queue
sleep(1)
subject.start(subscriber_queue, { client: queue_client }) do |event, args|
received_count += 1
end
sleep(2.5)
subject.stop
expect(received_count).to eq 1
end
after do
EventQ::NonceManager.reset
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/spec/integration/aws_queue_manager_spec.rb
|
<filename>eventq_aws/spec/integration/aws_queue_manager_spec.rb
require 'spec_helper'
RSpec.describe EventQ::Amazon::QueueManager, integration: true do
let(:queue_client) do
EventQ::Amazon::QueueClient.new({ aws_account_number: EventQ.AWS_ACCOUNT_NUMBER, aws_region: 'eu-west-1' })
end
subject do
EventQ::Amazon::QueueManager.new({ client: queue_client })
end
describe '#get_queue' do
let(:queue) do
EventQ::Queue.new.tap do |queue|
queue.name = SecureRandom.uuid.gsub('-','')
queue.allow_retry = true
queue.max_retry_attempts = 5
queue.retry_delay = 30
end
end
context 'when a queue does not exist' do
it 'should create the queue' do
queue_url = subject.get_queue(queue)
expect(queue_url).not_to be_nil
end
end
context 'when a queue already exists' do
it 'should update the the queue' do
queue_url = subject.create_queue(queue)
expect(queue_url).not_to be_nil
update_url = subject.get_queue(queue)
expect(update_url).to eq(queue_url)
end
end
end
class TestEvent
end
describe '#topic_exists?' do
context 'when a topic exists' do
let(:event_type) { 'test-event' }
before do
queue_client.create_topic_arn(event_type)
end
it 'should return true' do
expect(subject.topic_exists?(event_type)).to be true
end
end
context 'when a topic does NOT exists' do
let(:event_type) { 'unknown-test-event' }
it 'should return true' do
expect(subject.topic_exists?(event_type)).to be false
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/aws_queue_worker.rb
|
module EventQ
module Amazon
class QueueWorker
include EventQ::WorkerId
APPROXIMATE_RECEIVE_COUNT = 'ApproximateReceiveCount'.freeze
MESSAGE = 'Message'.freeze
attr_accessor :is_running
def initialize
@threads = []
@forks = []
@is_running = false
@on_retry_exceeded_block = nil
@on_retry_block = nil
@on_error_block = nil
@hash_helper = HashKit::Helper.new
@serialization_provider_manager = EventQ::SerializationProviders::Manager.new
@signature_provider_manager = EventQ::SignatureProviders::Manager.new
@last_gc_flush = Time.now
@gc_flush_interval = 10
@queue_poll_wait = 10
end
def start(queue, options = {}, &block)
EventQ.logger.info("[#{self.class}] - Preparing to start listening for messages.")
configure(queue, options)
if options[:client] == nil
raise "[#{self.class}] - :client (QueueClient) must be specified."
end
raise "[#{self.class}] - Worker is already running." if running?
client = options[:client]
EventQ.logger.debug do
"[#{self.class} #start] - Listening for messages on queue: #{queue.name}, Queue Url: #{client.get_queue_url(queue)}, Queue arn: #{client.get_queue_arn(queue)}"
end
EventQ.logger.info("[#{self.class}] - Listening for messages.")
@forks = []
if @fork_count > 1
Thread.new do
@fork_count.times do
pid = fork do
start_process(options, queue, block)
end
@forks.push(pid)
end
@forks.each { |pid| Process.wait(pid) }
end
else
start_process(options, queue, block)
end
return true
end
def start_process(options, queue, block)
%w'INT TERM'.each do |sig|
Signal.trap(sig) {
stop
exit
}
end
@is_running = true
@threads = []
#loop through each thread count
@thread_count.times do
thr = Thread.new do
client = options[:client]
manager = EventQ::Amazon::QueueManager.new({ client: client })
#begin the queue loop for this thread
while true do
#check if the worker is still allowed to run and break out of thread loop if not
if !@is_running
break
end
has_message_received = thread_process_iteration(client, manager, queue, block)
gc_flush
if !has_message_received
EventQ.logger.debug { "[#{self.class}] - No message received." }
if @sleep > 0
EventQ.logger.debug { "[#{self.class}] - Sleeping for #{@sleep} seconds" }
sleep(@sleep)
end
end
end
end
@threads.push(thr)
end
if (options.key?(:wait) && options[:wait] == true) || (options.key?(:fork_count) && options[:fork_count] > 1)
@threads.each { |thr| thr.join }
end
end
def gc_flush
if Time.now - last_gc_flush > @gc_flush_interval
GC.start
@last_gc_flush = Time.now
end
end
def last_gc_flush
@last_gc_flush
end
def thread_process_iteration(client, manager, queue, block)
#get the queue
q = manager.get_queue(queue)
received = false
begin
# request a message from the queue
response = client.sqs.receive_message({
queue_url: q,
max_number_of_messages: 1,
wait_time_seconds: @queue_poll_wait,
attribute_names: [APPROXIMATE_RECEIVE_COUNT]
})
#check that a message was received
if response.messages.length > 0
received = true
begin
tag_processing_thread
process_message(response, client, queue, q, block)
ensure
untag_processing_thread
end
end
rescue => e
EventQ.log(:error, "[#{self.class}] - An unhandled error occurred. Error: #{e} | Backtrace: #{e.backtrace}")
call_on_error_block(error: e)
end
return received
end
def call_on_error_block(error:, message: nil)
if @on_error_block
EventQ.logger.debug { "[#{self.class}] - Executing on_error block." }
begin
@on_error_block.call(error, message)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_error block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_error block specified to execute." }
end
end
def call_on_retry_exceeded_block(message)
if @on_retry_exceeded_block != nil
EventQ.logger.debug { "[#{self.class}] - Executing on_retry_exceeded block." }
begin
@on_retry_exceeded_block.call(message)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_retry_exceeded block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_retry_exceeded block specified." }
end
end
def call_on_retry_block(message)
if @on_retry_block
EventQ.logger.debug { "[#{self.class}] - Executing on_retry block." }
begin
@on_retry_block.call(message, abort)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_retry block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_retry block specified." }
end
end
def stop
EventQ.logger.info("[#{self.class}] - Stopping.")
@is_running = false
@threads.each { |thr| thr.join }
return true
end
def on_retry_exceeded(&block)
@retry_exceeded_block = block
end
def on_retry(&block)
@on_retry_block = block
return nil
end
def on_error(&block)
@on_error_block = block
return nil
end
def running?
return @is_running
end
def deserialize_message(payload)
provider = @serialization_provider_manager.get_provider(EventQ::Configuration.serialization_provider)
return provider.deserialize(payload)
end
def serialize_message(msg)
provider = @serialization_provider_manager.get_provider(EventQ::Configuration.serialization_provider)
return provider.serialize(msg)
end
private
def process_message(response, client, queue, q, block)
msg = response.messages[0]
retry_attempts = msg.attributes[APPROXIMATE_RECEIVE_COUNT].to_i - 1
#deserialize the message payload
payload = JSON.load(msg.body)
message = deserialize_message(payload[MESSAGE])
message_args = EventQ::MessageArgs.new(
type: message.type,
retry_attempts: retry_attempts,
context: message.context,
content_type: message.content_type,
id: message.id,
sent: message.created
)
EventQ.logger.info("[#{self.class}] - Message received. Retry Attempts: #{retry_attempts}")
@signature_provider_manager.validate_signature(message: message, queue: queue)
if(!EventQ::NonceManager.is_allowed?(message.id))
EventQ.logger.info("[#{self.class}] - Duplicate Message received. Ignoring message.")
return false
end
#begin worker block for queue message
begin
block.call(message.content, message_args)
if message_args.abort == true
EventQ.logger.info("[#{self.class}] - Message aborted.")
else
#accept the message as processed
client.sqs.delete_message({ queue_url: q, receipt_handle: msg.receipt_handle })
EventQ.logger.info("[#{self.class}] - Message acknowledged.")
end
rescue => e
EventQ.logger.error("[#{self.class}] - An unhandled error happened while attempting to process a queue message. Error: #{e} | Backtrace: #{e.backtrace}")
error = true
call_on_error_block(error: e, message: message)
end
if message_args.abort || error
EventQ::NonceManager.failed(message.id)
reject_message(queue, client, msg, q, retry_attempts, message, message_args.abort)
else
EventQ::NonceManager.complete(message.id)
end
return true
end
def reject_message(queue, client, msg, q, retry_attempts, message, abort)
if !queue.allow_retry || retry_attempts >= queue.max_retry_attempts
EventQ.logger.info("[#{self.class}] - Message rejected removing from queue. Message: #{serialize_message(message)}")
#remove the message from the queue so that it does not get retried again
client.sqs.delete_message({ queue_url: q, receipt_handle: msg.receipt_handle })
if retry_attempts >= queue.max_retry_attempts
EventQ.logger.info("[#{self.class}] - Message retry attempt limit exceeded.")
call_on_retry_exceeded_block(message)
end
elsif queue.allow_retry
retry_attempts += 1
EventQ.logger.info("[#{self.class}] - Message rejected requesting retry. Attempts: #{retry_attempts}")
if queue.allow_retry_back_off == true
EventQ.logger.debug { "[#{self.class}] - Calculating message back off retry delay. Attempts: #{retry_attempts} * Delay: #{queue.retry_delay}" }
visibility_timeout = (queue.retry_delay * retry_attempts) / 1000
if visibility_timeout > (queue.max_retry_delay / 1000)
EventQ.logger.debug { "[#{self.class}] - Max message back off retry delay reached." }
visibility_timeout = queue.max_retry_delay / 1000
end
else
EventQ.logger.debug { "[#{self.class}] - Setting fixed retry delay for message." }
visibility_timeout = queue.retry_delay / 1000
end
if visibility_timeout > 43200
EventQ.logger.debug { "[#{self.class}] - AWS max visibility timeout of 12 hours has been exceeded. Setting message retry delay to 12 hours." }
visibility_timeout = 43200
end
EventQ.logger.debug { "[#{self.class}] - Sending message for retry. Message TTL: #{visibility_timeout}" }
client.sqs.change_message_visibility({
queue_url: q, # required
receipt_handle: msg.receipt_handle, # required
visibility_timeout: visibility_timeout.to_s, # required
})
call_on_retry_block(message)
end
end
def configure(queue, options = {})
@queue = queue
# default thread count
@thread_count = 5
if options.key?(:thread_count)
@thread_count = options[:thread_count]
end
# default sleep time in seconds
@sleep = 0
if options.key?(:sleep)
@sleep = options[:sleep]
end
@fork_count = 1
if options.key?(:fork_count)
@fork_count = options[:fork_count]
end
if options.key?(:gc_flush_interval)
@gc_flush_interval = options[:gc_flush_interval]
end
@queue_poll_wait = 15
if options.key?(:queue_poll_wait)
@queue_poll_wait = options[:queue_poll_wait]
end
EventQ.logger.info("[#{self.class}] - Configuring. Process Count: #{@fork_count} | Thread Count: #{@thread_count} | Interval Sleep: #{@sleep} | GC Flush Interval: #{@gc_flush_interval} | Queue Poll Wait: #{@queue_poll_wait}.")
return true
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/queue_message.rb
|
module EventQ
class QueueMessage
extend ClassKit
attr_accessor_type :id, type: String
attr_accessor_type :retry_attempts, type: Integer
attr_accessor_type :type, type: String
attr_accessor_type :content
attr_accessor_type :content_type, type: String
attr_accessor_type :created, type: Float
attr_accessor_type :signature, type: String
attr_accessor_type :context, type: Hash
def initialize
@retry_attempts = 0
@created = Time.now.to_f
@id = SecureRandom.uuid
@context = {}
end
# Creates a signature for the message
#
# @param provider [EventQ::SignatureProviders::Sha256SignatureProvider] Signature provider that implements
# a write method
def sign(provider)
return unless EventQ::Configuration.signature_secret
self.signature = provider.write(message: self, secret: EventQ::Configuration.signature_secret)
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/version.rb
|
module EventqBase
VERSION = "1.17.2"
end
|
adamgeorgeson/eventq
|
eventq_aws/spec/aws_queue_client_spec.rb
|
require 'spec_helper'
RSpec.describe EventQ::Amazon::QueueClient do
let(:options) { { aws_account_number: '123' } }
subject { described_class.new(options) }
describe '#sqs' do
specify do
expect(subject.sqs).to be_a Aws::SQS::Client
end
end
describe '#sns' do
specify do
expect(subject.sns).to be_a Aws::SNS::Client
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/date_time_writer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class DateTimeWriter < AttributeWriter
def valid?(obj)
obj.is_a?(DateTime)
end
def exec(obj)
seconds = obj.strftime('%S%N')
d = 1_000_000_000
if seconds.start_with?('0')
seconds[0] = ''
d = 100_000_000
end
{
'^O': 'DateTime',
year: obj.year,
month: obj.month,
day: obj.day,
hour: obj.hour,
min: obj.min,
sec: RationalWriter.new.exec(Rational(Integer(seconds), d)),
offset: RationalWriter.new.exec(obj.offset),
start: obj.start
}
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj.rb
|
<reponame>adamgeorgeson/eventq
require_relative 'oj/attribute_writer'
require_relative 'oj/class_writer'
require_relative 'oj/rational_writer'
require_relative 'oj/date_time_writer'
require_relative 'oj/date_writer'
require_relative 'oj/time_writer'
require_relative 'oj/array_writer'
require_relative 'oj/hash_writer'
require_relative 'oj/value_writer'
require_relative 'oj/serializer'
|
adamgeorgeson/eventq
|
eventq_base/spec/eventq_base/serialization_providers/manager_spec.rb
|
<filename>eventq_base/spec/eventq_base/serialization_providers/manager_spec.rb<gh_stars>0
RSpec.describe EventQ::SerializationProviders::Manager do
describe '#get_provider' do
context 'OJ' do
if RUBY_PLATFORM =~ /java/
it 'returns expected provider' do
expect(subject.get_provider(EventQ::SerializationProviders::OJ_PROVIDER))
.to be_a EventQ::SerializationProviders::JRuby::OjSerializationProvider
end
else
it 'returns expected provider' do
expect(subject.get_provider(EventQ::SerializationProviders::OJ_PROVIDER))
.to be_a EventQ::SerializationProviders::OjSerializationProvider
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/value_writer.rb
|
<reponame>adamgeorgeson/eventq<gh_stars>0
module EventQ
module SerializationProviders
module JRuby
module Oj
class ValueWriter < AttributeWriter
def valid?(obj)
obj.is_a?(String) || obj.is_a?(Integer) || obj.is_a?(Float)
end
def exec(obj)
obj
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_rabbitmq/lib/eventq_rabbitmq.rb
|
require 'eventq_base'
if RUBY_PLATFORM =~ /java/
require 'march_hare'
else
require 'bunny'
end
require 'hash_kit'
require_relative '../lib/eventq_rabbitmq/version'
require_relative '../lib/eventq_rabbitmq/rabbitmq_queue_client'
require_relative '../lib/eventq_rabbitmq/rabbitmq_queue_manager'
if RUBY_PLATFORM =~ /java/
require_relative '../lib/eventq_rabbitmq/jruby/rabbitmq_queue_worker'
else
require_relative '../lib/eventq_rabbitmq/rabbitmq_queue_worker'
require_relative '../lib/eventq_rabbitmq/rabbitmq_queue_worker_v2'
end
require_relative '../lib/eventq_rabbitmq/rabbitmq_subscription_manager'
require_relative '../lib/eventq_rabbitmq/rabbitmq_eventq_client'
require_relative '../lib/eventq_rabbitmq/default_queue'
require_relative '../lib/eventq_rabbitmq/rabbitmq_status_checker'
module EventQ
def self.namespace
@namespace
end
def self.namespace=(value)
@namespace = value
end
def self.create_event_type(event_type)
if EventQ.namespace == nil
return event_type
end
return "#{EventQ.namespace}-#{event_type}"
end
def self.create_queue_name(queue_name)
if EventQ.namespace == nil
return queue_name
end
return "#{EventQ.namespace}-#{queue_name}"
end
def self.create_exchange_name(exchange_name)
if EventQ.namespace == nil
return exchange_name
end
return "#{EventQ.namespace}-#{exchange_name}"
end
end
|
adamgeorgeson/eventq
|
eventq_rabbitmq/spec/eventq_rabbitmq/rabbitmq_queue_worker_v2_spec.rb
|
RSpec.describe EventQ::RabbitMq::QueueWorkerV2 do
let(:client) do
return EventQ::RabbitMq::QueueClient.new({ endpoint: 'rabbitmq' })
end
let(:connection) { client.get_connection }
let(:channel) { connection.create_channel }
after do
begin
channel.close if channel.open?
connection.close if connection.open?
rescue => e
EventQ.logger.error { "Timeout error occurred closing connection. Error: #{e}" }
end
end
describe '#deserialize_message' do
unless RUBY_PLATFORM =~ /java/
context 'when serialization provider is OJ_PROVIDER' do
before do
EventQ::Configuration.serialization_provider = EventQ::SerializationProviders::OJ_PROVIDER
end
context 'when payload is for a known type' do
let(:a) do
A.new.tap do |a|
a.text = 'ABC'
end
end
let(:payload) { Oj.dump(a) }
it 'should deserialize the message into an object of the known type' do
message = subject.deserialize_message(payload)
expect(message).to be_a(A)
expect(message.text).to eq('ABC')
end
end
context 'when payload is for an unknown type' do
let(:a) do
A.new.tap do |a|
a.text = 'ABC'
end
end
let(:payload) do
string = Oj.dump(a)
JSON.load(string.sub('"^o":"A"', '"^o":"B"'))
end
let(:message) do
EventQ::QueueMessage.new.tap do |m|
m.content = payload
end
end
let(:json) do
Oj.dump(message)
end
it 'should deserialize the message into a Hash' do
message = subject.deserialize_message(json)
expect(message.content).to be_a(Hash)
expect(message.content[:text]).to eq('ABC')
end
end
end
end
context 'when serialization provider is JSON_PROVIDER' do
before do
EventQ::Configuration.serialization_provider = EventQ::SerializationProviders::JSON_PROVIDER
end
let(:payload) do
{
content: { text: 'ABC' }
}
end
let(:json) do
JSON.dump(payload)
end
it 'should deserialize payload' do
message = subject.deserialize_message(json)
expect(message).to be_a(EventQ::QueueMessage)
expect(message.content).to be_a(Hash)
expect(message.content[:text]).to eq('ABC')
end
unless RUBY_PLATFORM =~ /java/
after do
EventQ::Configuration.serialization_provider = EventQ::SerializationProviders::OJ_PROVIDER
end
end
end
end
context 'NonceManager' do
context 'when a duplicate message is received' do
let(:queue_message) { EventQ::QueueMessage.new }
before do
EventQ::NonceManager.configure(server: 'redis://redis:6379')
end
it 'should NOT process the message more than once' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
#set queue retry delay to 0.5 seconds
subscriber_queue.retry_delay = 500
subscriber_queue.allow_retry = true
qm = EventQ::RabbitMq::QueueManager.new
q = qm.get_queue(channel, subscriber_queue)
q.delete
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
allow(eqclient).to receive(:new_message).and_return(queue_message)
eqclient.raise_event(event_type, message)
eqclient.raise_event(event_type, message)
subject.configure(subscriber_queue, { sleep: 0 })
received_count = 0
subject.start(subscriber_queue, { client: client, wait: false, sleep: 0, thread_count: 1 }) do |content, args|
received_count += 1
end
sleep(2)
subject.stop
expect(received_count).to eq 1
end
after do
EventQ::NonceManager.reset
end
end
end
describe '#call_on_error_block' do
let(:error) { double }
let(:message) { double }
context 'when a block is specified' do
let(:block) { double }
before do
subject.instance_variable_set(:@on_error_block, block)
allow(block).to receive(:call)
end
it 'should execute the block' do
expect(block).to receive(:call).with(error, message).once
subject.call_on_error_block(error: error, message: message)
end
end
context 'when a block is NOT specified' do
let(:block) { nil }
before do
subject.instance_variable_set(:@on_error_block, block)
end
it 'should NOT execute the block' do
expect(block).not_to receive(:call)
subject.call_on_error_block(error: error, message: message)
end
end
end
describe '#call_on_retry_block' do
let(:error) { double }
let(:message) { double }
context 'when a block is specified' do
let(:block) { double }
before do
subject.instance_variable_set(:@on_retry_block, block)
allow(block).to receive(:call)
end
xit 'should execute the block' do
expect(block).to receive(:call).with(error, message).once
subject.call_on_retry_block(error: error, message: message)
end
end
context 'when a block is NOT specified' do
let(:block) { nil }
before do
subject.instance_variable_set(:@on_retry_block, block)
end
xit 'should NOT execute the block' do
expect(block).not_to receive(:call)
subject.call_on_retry_block(error: error, message: message)
end
end
end
describe '#call_on_retry_exceeded_block' do
let(:error) { double }
let(:message) { double }
context 'when a block is specified' do
let(:block) { double }
before do
subject.instance_variable_set(:@on_retry_exceeded_block, block)
allow(block).to receive(:call)
end
xit 'should execute the block' do
expect(block).to receive(:call).with(error, message).once
subject.call_on_retry_exceeded_block(error: error, message: message)
end
end
context 'when a block is NOT specified' do
let(:block) { nil }
before do
subject.instance_variable_set(:@on_retry_exceeded_block, block)
end
xit 'should NOT execute the block' do
expect(block).not_to receive(:call)
subject.call_on_retry_exceeded_block(error: error, message: message)
end
end
end
it 'should receive an event from the subscriber queue' do
event_type = 'queue.worker.event1'
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = 'queue.worker1'
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({ client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
message_context = { 'foo' => 'bar' }
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message, message_context)
subject.start(subscriber_queue, {:sleep => 1, client: client, thread_count: 1 }) do |event, args|
expect(event).to eq(message)
expect(args.type).to eq(event_type)
expect(args.content_type).to eq message.class.to_s
expect(args.context).to eq message_context
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(1)
subject.stop
expect(subject.is_running).to eq(false)
end
context 'when queue requires a signature' do
let(:secret) { 'secret' }
before do
EventQ::Configuration.signature_secret = secret
end
context 'and the received message contains a valid signature' do
it 'should process the message' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
subscriber_queue.require_signature = true
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({ client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
received = false
subject.start(subscriber_queue, {:sleep => 1, client: client}) do |event, args|
expect(event).to eq(message)
expect(args.type).to eq(event_type)
received = true
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(1)
expect(received).to eq(true)
subject.stop
expect(subject.is_running).to eq(false)
end
end
context 'and the received message contains an invalid signature' do
it 'should NOT process the message' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
subscriber_queue.require_signature = true
EventQ::Configuration.signature_secret = 'invalid'
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({ client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
EventQ::Configuration.signature_secret = secret
received = false
subject.start(subscriber_queue, {:sleep => 1, client: client}) do |event, args|
expect(event).to eq(message)
expect(args.type).to eq(event_type)
received = true
EventQ.logger.debug { "Message Received: #{event}" }
end
sleep(0.5)
expect(received).to eq(false)
subject.stop
expect(subject.is_running).to eq(false)
end
end
end
it 'should receive events in parallel on each thread from the subscriber queue' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
10.times do
eqclient.raise_event(event_type, message)
end
received_messages = []
message_count = 0
mutex = Mutex.new
subject.start(subscriber_queue, {client: client.dup}) do |event, args|
expect(event).to eq(message)
expect(args.type).to eq(event_type)
mutex.synchronize do
EventQ.logger.debug { "Message Received: #{event}" }
message_count += 1
add_to_received_list(received_messages)
EventQ.logger.debug { 'message processed.' }
sleep 0.2
end
end
sleep(2)
expect(message_count).to eq(10)
subject.stop
expect(subject.is_running).to eq(false)
end
it 'should send messages that fail to process to the retry queue and then receive them again after the retry delay' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
#set queue retry delay to 0.5 seconds
subscriber_queue.retry_delay = 500
subscriber_queue.allow_retry = true
qm = EventQ::RabbitMq::QueueManager.new
q = qm.get_queue(channel, subscriber_queue)
q.delete
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
retry_attempt_count = 0
subject.start(subscriber_queue, { :thread_count => 1, :sleep => 0.5, client: client}) do |event, args|
if args.retry_attempts == 0
raise 'Fail on purpose to send event to retry queue.'
end
retry_attempt_count = args.retry_attempts
end
sleep(5)
expect(retry_attempt_count).to eq(1)
subject.stop
expect(subject.is_running).to eq(false)
end
context 'queue.allow_retry_back_off = true' do
it 'should send messages that fail to process to the retry queue and then receive them again after the retry delay' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
#set queue retry delay to 0.5 seconds
subscriber_queue.retry_delay = 500
subscriber_queue.allow_retry = true
subscriber_queue.allow_retry_back_off = true
#set to max retry delay to 5 seconds
subscriber_queue.max_retry_delay = 5000
qm = EventQ::RabbitMq::QueueManager.new
q = qm.get_queue(channel, subscriber_queue)
q.delete
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
retry_attempt_count = 0
subject.start(subscriber_queue, { :thread_count => 1, :sleep => 0.5, client: client}) do |event, args|
retry_attempt_count = args.retry_attempts
raise 'Fail on purpose to send event to retry queue.'
end
sleep(0.8)
expect(retry_attempt_count).to eq(1)
sleep(1.3)
expect(retry_attempt_count).to eq(2)
sleep(1.8)
expect(retry_attempt_count).to eq(3)
sleep(2.3)
expect(retry_attempt_count).to eq(4)
subject.stop
expect(subject.is_running).to eq(false)
end
end
context 'retry block execution' do
xit 'should execute the #on_retry_exceeded block when a message exceeds its retry limit' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
#set queue retry delay to 0.5 seconds
subscriber_queue.retry_delay = 500
subscriber_queue.allow_retry = true
subscriber_queue.max_retry_attempts = 1
qm = EventQ::RabbitMq::QueueManager.new
q = qm.get_queue(channel, subscriber_queue)
q.delete
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
retry_attempt_count = 0
failed_message = nil
subject.on_retry_exceeded do |message|
failed_message = message
end
subject.start(subscriber_queue, { :thread_count => 1, :sleep => 0.5, client: client }) do |event, args|
retry_attempt_count = args.retry_attempts
raise 'Fail on purpose to send event to retry queue.'
end
sleep(5)
expect(retry_attempt_count).to eq(1)
expect(failed_message.content).to eq(message)
expect(failed_message.retry_attempts).to eq(1)
expect(failed_message.type).to eq(event_type)
subject.stop
expect(subject.is_running).to eq(false)
end
xit 'should execute the #on_retry block when a message is retried' do
event_type = SecureRandom.uuid
subscriber_queue = EventQ::Queue.new
subscriber_queue.name = SecureRandom.uuid
#set queue retry delay to 0.5 seconds
subscriber_queue.retry_delay = 500
subscriber_queue.allow_retry = true
subscriber_queue.max_retry_attempts = 1
qm = EventQ::RabbitMq::QueueManager.new
q = qm.get_queue(channel, subscriber_queue)
q.delete
subscription_manager = EventQ::RabbitMq::SubscriptionManager.new({client: client})
subscription_manager.subscribe(event_type, subscriber_queue)
message = 'Hello World'
eqclient = EventQ::RabbitMq::EventQClient.new({client: client, subscription_manager: subscription_manager})
eqclient.raise_event(event_type, message)
retry_attempt_count = 0
failed_message = nil
is_abort = false
subject.on_retry do |message, abort|
failed_message = message
is_abort = abort
end
subject.start(subscriber_queue, { :thread_count => 1, :sleep => 0.5, client: client }) do |event, args|
retry_attempt_count = args.retry_attempts
raise 'Fail on purpose to send event to retry queue.'
end
sleep(1)
subject.stop
expect(retry_attempt_count).to eq(1)
expect(failed_message.content).to eq(message)
expect(failed_message.retry_attempts).to eq(1)
expect(failed_message.type).to eq(event_type)
expect(subject.is_running).to eq(false)
end
end
class A
attr_accessor :text
end
def add_to_received_list(received_messages)
thread_name = Thread.current.object_id
EventQ.logger.debug { "[THREAD] #{thread_name}" }
thread = received_messages.select { |i| i[:thread] == thread_name }
if thread.length > 0
thread[0][:events] += 1
else
received_messages.push({ :events => 1, :thread => thread_name })
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby.rb
|
require_relative 'jruby/oj'
require_relative 'jruby/oj_serialization_provider'
|
adamgeorgeson/eventq
|
eventq_rabbitmq/lib/eventq_rabbitmq/rabbitmq_queue_worker.rb
|
module EventQ
module RabbitMq
class QueueWorker
include EventQ::WorkerId
attr_accessor :is_running
def initialize
@threads = []
@forks = []
@is_running = false
@retry_exceeded_block = nil
@on_retry_block = nil
@on_error_block = nil
@hash_helper = HashKit::Helper.new
@serialization_provider_manager = EventQ::SerializationProviders::Manager.new
@signature_provider_manager = EventQ::SignatureProviders::Manager.new
@last_gc_flush = Time.now
@gc_flush_interval = 10
end
def start(queue, options = {}, &block)
EventQ.logger.info("[#{self.class}] - Preparing to start listening for messages.")
configure(queue, options)
raise "[#{self.class}] - Worker is already running." if running?
if options[:client] == nil
raise "[#{self.class}] - :client (QueueClient) must be specified."
end
EventQ.logger.info("[#{self.class}] - Listening for messages.")
EventQ.logger.debug do
"[#{self.class} #start] - Listening for messages on queue: #{EventQ.create_queue_name(queue.name)}"
end
@forks = []
if @fork_count > 1
@fork_count.times do
pid = fork do
start_process(options, queue, block)
end
@forks.push(pid)
end
if options.key?(:wait) && options[:wait] == true
@forks.each { |pid| Process.wait(pid) }
end
else
start_process(options, queue, block)
end
end
def start_process(options, queue, block)
@is_running = true
%w'INT TERM'.each do |sig|
Signal.trap(sig) {
stop
exit
}
end
if !options.key?(:durable)
options[:durable] = true
end
client = options[:client]
manager = EventQ::RabbitMq::QueueManager.new
manager.durable = options[:durable]
@connection = client.get_connection
@threads = []
#loop through each thread count
@thread_count.times do
thr = Thread.new do
#begin the queue loop for this thread
while true do
#check if the worker is still allowed to run and break out of thread loop if not
unless running?
break
end
has_received_message = false
begin
channel = @connection.create_channel
has_received_message = thread_process_iteration(channel, manager, queue, block)
rescue => e
EventQ.logger.error("An unhandled error occurred. Error: #{e} | Backtrace: #{e.backtrace}")
call_on_error_block(error: e)
end
if channel != nil && channel.open?
channel.close
end
gc_flush
if !has_received_message
EventQ.logger.debug { "[#{self.class}] - No message received." }
if @sleep > 0
EventQ.logger.debug { "[#{self.class}] - Sleeping for #{@sleep} seconds" }
sleep(@sleep)
end
end
end
end
@threads.push(thr)
end
if options.key?(:wait) && options[:wait] == true
@threads.each { |thr| thr.join }
@connection.close if @connection.open?
end
return true
end
def call_on_error_block(error:, message: nil)
if @on_error_block
EventQ.logger.debug { "[#{self.class}] - Executing on_error block." }
begin
@on_error_block.call(error, message)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_error block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_error block specified to execute." }
end
end
def gc_flush
if Time.now - last_gc_flush > @gc_flush_interval
GC.start
@last_gc_flush = Time.now
end
end
def last_gc_flush
@last_gc_flush
end
def thread_process_iteration(channel, manager, queue, block)
#get the queue
q = manager.get_queue(channel, queue)
retry_exchange = manager.get_retry_exchange(channel, queue)
received = false
begin
delivery_info, payload = manager.pop_message(queue: q)
#check that message was received
if payload != nil
received = true
begin
tag_processing_thread
process_message(payload, queue, channel, retry_exchange, delivery_info, block)
ensure
untag_processing_thread
end
end
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred attempting to process a message. Error: #{e} | Backtrace: #{e.backtrace}")
call_on_error_block(error: e)
end
return received
end
def stop
EventQ.logger.info { "[#{self.class}] - Stopping..." }
@is_running = false
Thread.list.each do |thread|
thread.exit unless thread == Thread.current
end
if @connection != nil
begin
@connection.close if @connection.open?
rescue Timeout::Error
EventQ.logger.error { 'Timeout occurred closing connection.' }
end
end
return true
end
def on_retry_exceeded(&block)
@retry_exceeded_block = block
return nil
end
def on_retry(&block)
@on_retry_block = block
return nil
end
def on_error(&block)
@on_error_block = block
return nil
end
def running?
return @is_running
end
def deserialize_message(payload)
provider = @serialization_provider_manager.get_provider(EventQ::Configuration.serialization_provider)
return provider.deserialize(payload)
end
def serialize_message(msg)
provider = @serialization_provider_manager.get_provider(EventQ::Configuration.serialization_provider)
return provider.serialize(msg)
end
def call_on_retry_exceeded_block(message)
if @retry_exceeded_block != nil
EventQ.logger.debug { "[#{self.class}] - Executing on_retry_exceeded block." }
begin
@retry_exceeded_block.call(message)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_retry_exceeded block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_retry_exceeded block specified." }
end
end
def call_on_retry_block(message)
if @on_retry_block
EventQ.logger.debug { "[#{self.class}] - Executing on_retry block." }
begin
@on_retry_block.call(message, abort)
rescue => e
EventQ.logger.error("[#{self.class}] - An error occurred executing the on_retry block. Error: #{e}")
end
else
EventQ.logger.debug { "[#{self.class}] - No on_retry block specified." }
end
end
def reject_message(channel, message, delivery_tag, retry_exchange, queue, abort)
EventQ.logger.info("[#{self.class}] - Message rejected removing from queue.")
#reject the message to remove from queue
channel.reject(delivery_tag, false)
#check if the message retry limit has been exceeded
if message.retry_attempts >= queue.max_retry_attempts
EventQ.logger.info("[#{self.class}] - Message retry attempt limit exceeded. Msg: #{serialize_message(message)}")
call_on_retry_exceeded_block(message)
#check if the message is allowed to be retried
elsif queue.allow_retry
EventQ.logger.debug { "[#{self.class}] - Incrementing retry attempts count." }
message.retry_attempts += 1
if queue.allow_retry_back_off == true
EventQ.logger.debug { "[#{self.class}] - Calculating message back off retry delay. Attempts: #{message.retry_attempts} * Retry Delay: #{queue.retry_delay}" }
message_ttl = message.retry_attempts * queue.retry_delay
if (message.retry_attempts * queue.retry_delay) > queue.max_retry_delay
EventQ.logger.debug { "[#{self.class}] - Max message back off retry delay reached." }
message_ttl = queue.max_retry_delay
end
else
EventQ.logger.debug { "[#{self.class}] - Setting fixed retry delay for message." }
message_ttl = queue.retry_delay
end
EventQ.logger.debug { "[#{self.class}] - Sending message for retry. Message TTL: #{message_ttl}" }
retry_exchange.publish(serialize_message(message), :expiration => message_ttl)
EventQ.logger.debug { "[#{self.class}] - Published message to retry exchange." }
call_on_retry_block(message)
end
return true
end
def configure(queue, options = {})
@queue = queue
#default thread count
@thread_count = 4
if options.key?(:thread_count)
@thread_count = options[:thread_count]
end
#default sleep time in seconds
@sleep = 15
if options.key?(:sleep)
@sleep = options[:sleep]
end
@fork_count = 1
if options.key?(:fork_count)
@fork_count = options[:fork_count]
end
@gc_flush_interval = 10
if options.key?(:gc_flush_interval)
@gc_flush_interval = options[:gc_flush_interval]
end
EventQ.logger.info("[#{self.class}] - Configuring. Process Count: #{@fork_count} | Thread Count: #{@thread_count} | Interval Sleep: #{@sleep}.")
return true
end
private
def process_message(payload, queue, channel, retry_exchange, delivery_tag, block)
abort = false
error = false
message = deserialize_message(payload)
EventQ.logger.info("[#{self.class}] - Message received. Retry Attempts: #{message.retry_attempts}")
@signature_provider_manager.validate_signature(message: message, queue: queue)
message_args = EventQ::MessageArgs.new(
type: message.type,
retry_attempts: message.retry_attempts,
context: message.context,
content_type: message.content_type,
id: message.id,
sent: message.created
)
if(!EventQ::NonceManager.is_allowed?(message.id))
EventQ.logger.info("[#{self.class}] - Duplicate Message received. Dropping message.")
channel.acknowledge(delivery_tag, false)
return false
end
#begin worker block for queue message
begin
block.call(message.content, message_args)
if message_args.abort == true
abort = true
EventQ.logger.info("[#{self.class}] - Message aborted.")
else
#accept the message as processed
channel.acknowledge(delivery_tag, false)
EventQ.logger.info("[#{self.class}] - Message acknowledged.")
end
rescue => e
EventQ.logger.error("[#{self.class}] - An unhandled error happened attempting to process a queue message. Error: #{e} | Backtrace: #{e.backtrace}")
error = true
call_on_error_block(error: e, message: message)
end
if error || abort
EventQ::NonceManager.failed(message.id)
reject_message(channel, message, delivery_tag, retry_exchange, queue, abort)
else
EventQ::NonceManager.complete(message.id)
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/lib/eventq_aws/aws_eventq_client.rb
|
module EventQ
module Amazon
# Implements a general interface to raise an event
# EventQ::RabbitMq::EventQClient is the sister-class which does the same for RabbitMq
class EventQClient
def initialize(options)
if options[:client] == nil
raise ':client (QueueClient) must be specified.'.freeze
end
@client = options[:client]
@serialization_manager = EventQ::SerializationProviders::Manager.new
@signature_manager = EventQ::SignatureProviders::Manager.new
#this array is used to record known event types
@known_event_types = []
end
def registered?(event_type)
@known_event_types.include?(event_type)
end
def register_event(event_type)
if registered?(event_type)
return true
end
@client.create_topic_arn(event_type)
@known_event_types << event_type
true
end
def publish(topic:, event:, context: {})
raise_event(topic, event, context)
end
def raise_event(event_type, event, context = {})
register_event(event_type)
with_prepared_message(event_type, event, context) do |message|
response = @client.sns.publish(
topic_arn: topic_arn(event_type),
message: message,
subject: event_type
)
EventQ.logger.debug do
"[#{self.class} #raise_event] - Published to SNS with topic_arn: #{topic_arn(event_type)} | event_type: #{event_type} | Message: #{message}"
end
response
end
end
def raise_event_in_queue(event_type, event, queue, delay, context = {})
queue_url = @client.get_queue_url(queue)
with_prepared_message(event_type, event, context) do |message|
response = @client.sqs.send_message(
queue_url: queue_url,
message_body: sqs_message_body_for(message),
delay_seconds: delay
)
EventQ.logger.debug do
"[#{self.class} #raise_event_in_queue] - Raised event to SQS queue: #{queue_url} | event_type: #{event_type} | Message: #{message}"
end
response
end
end
def new_message
EventQ::QueueMessage.new
end
private
def with_prepared_message(event_type, event, context)
qm = new_message
qm.content = event
qm.type = event_type
qm.context = context
qm.content_type = event.class.to_s
if EventQ::Configuration.signature_secret != nil
provider = @signature_manager.get_provider(EventQ::Configuration.signature_provider)
qm.signature = provider.write(message: qm, secret: EventQ::Configuration.signature_secret)
end
message = serialized_message(qm)
response = yield(message)
EventQ.log(:debug, "[#{self.class}] - Raised event. Message: #{message} | Type: #{event_type}.")
response.message_id
end
def serialized_message(queue_message)
serialization_provider = @serialization_manager.get_provider(EventQ::Configuration.serialization_provider)
serialization_provider.serialize(queue_message)
end
def topic_arn(event_type)
@client.get_topic_arn(event_type)
end
def sqs_message_body_for(payload_message)
JSON.dump(EventQ::Amazon::QueueWorker::MESSAGE => payload_message)
end
end
end
end
|
james2m/reports
|
test/dummy4/app/reports/simple_report.rb
|
<reponame>james2m/reports
class SimpleReport < Reports::Base
end
|
james2m/reports
|
lib/reports/scoped.rb
|
<reponame>james2m/reports
module Reports
class Scoped < Base
def initialize(options = {})
@scopes = Array(options['scopes'])
super
end
private
def base_relation
@base_relation ||= base_class
end
def scoped_rows
@scoped_rows ||= @scopes.inject(base_relation) do |relation, scope|
relation.send(*scope)
end
end
def rows
@rows ||= scoped_rows.map { |row| new_row(row) }
end
end
# Scoped
end
# Reports
|
james2m/reports
|
lib/reports/railtie.rb
|
<filename>lib/reports/railtie.rb
require 'reports'
require 'rails'
module Reports
class Railtie < Rails::Railtie
initializer "reports.load", :after => "action_dispatch.configure" do |app|
reloader = rails5? ? ActiveSupport::Reloader : ActionDispatch::Reloader
if reloader.respond_to?(:to_prepare)
reloader.to_prepare { Reports.reload }
elsif reloader.respond_to?(:before)
reloader.before { Reports.reload }
end
end
private
def rails5?
ActionPack::VERSION::MAJOR == 5
end
end
end
|
james2m/reports
|
lib/reports/base.rb
|
module Reports
class Base
class NotImplementedError < ::NotImplementedError; end
include ActiveModel::Validations
include ActiveModel::Conversion
extend ActiveModel::Naming
def initialize(options = {})
@options = options
options.stringify_keys!
end
def file
file = File.open(file_name, "wb", :row_sep => "\r\n")
file.puts stream
file.close
file
end
def file_name
"#{title.parameterize}.csv"
end
def stream
@stream ||= begin
CSV.generate(:row_sep => "\r\n") do |csv|
csv << headers
rows.each { |row| csv << to_columns(row) }
end
end
end
def title
@title ||= I18n.t 'title', { :scope => ['reports', type_name], :default => "#{type_name} report".titleize }.merge(locale_params)
end
def base_class
type_name.classify.constantize
end
def type_name
self.class.name.demodulize.underscore.gsub(/_report$/, '')
end
def new_row(row)
Row.new(row)
end
def self.find(*args); nil; end
private
def locale_params
@locale_params ||= {}
end
def rows
raise NotImplementedError
end
def headers
raise NotImplementedError
end
def to_columns(row)
row.to_columns
end
def new_record?; true; end
def persisted?; false; end
end
# Base
end
# Reports
|
james2m/reports
|
reports.gemspec
|
<gh_stars>0
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "reports/version"
Gem::Specification.new do |s|
s.name = "reports"
s.version = Reports::VERSION
s.date = `git log -1 --format="%cd" --date=short lib/reports/version.rb`
s.authors = ["James McCarthy"]
s.email = ["<EMAIL>"]
s.homepage = "https://github.com/james2m/reports"
s.summary = %q{Adds simple extensible reporting to a Rails application.}
s.description = %q{Adds simple extensible reporting to a Rails application.}
s.rubyforge_project = "reports"
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_development_dependency "minitest", "> 4"
s.add_development_dependency "rails", "> 4"
end
|
james2m/reports
|
test/test_helper.rb
|
<reponame>james2m/reports<gh_stars>0
require 'rubygems'
gem 'minitest'
require 'minitest/autorun'
# Configure Rails Environment
environment = ENV["RAILS_ENV"] = 'test'
rails_root = File.expand_path('../dummy4', __FILE__)
# Load dummy rails app
require File.expand_path('config/environment.rb', rails_root)
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
|
james2m/reports
|
lib/reports/periodic.rb
|
module Reports
class Periodic < Scoped
class InvalidPeriodError < ArgumentError; end
class Period < Struct.new(:type, :title); end
attr_reader :period
attr_accessor :month, :from, :to
class << self
def periods
@periods ||= ['for_month', 'between_months', 'year_to_date', 'all'].map do |period|
title = I18n.t('title', { scope: ['reports', 'periods', period], default: period.titleize })
Period.new(period, title)
end
end
end
def initialize(options = {})
super
@period = String(options['period'] || 'all')
extract_date_options(options)
if periodic_scopes_on_base_class
@scopes << period_scope
add_period_scopes_to_base_relation?
end
end
def file_name
basename = [title, period].join('-')
"#{basename.parameterize}.csv"
end
def period_name
@period_name ||= begin
options = [:month, :to, :from].inject({}) do |hash, option|
value = send(option)
hash[option] = value.to_formatted_s(:month_and_year) if value.is_a?(DateTime)
hash
end
I18n.t(period, { scope: ['reports', 'periods'], default: "#{period}".titleize }.merge(options))
end
end
def valid?
(period == 'for_month' && month) \
|| (period == 'between_months' && to && from) \
|| (period == 'year_to_date') \
|| (period == 'all')
end
private
def periodic_scopes_on_base_class
true
end
def add_period_scopes_to_base_relation?
scopes = Scopes.dup
scopes.instance_methods.each do |method_name|
scopes.send(:remove_method, method_name) if base_relation.respond_to?(method_name)
end
base_relation.extend scopes
end
def extract_date(attrs)
case attrs
when DateTime then attrs.to_date
when Date then attrs
when ActiveSupport::TimeWithZone then attrs.to_date
when Array then Date.civil(*attrs)
else Date.today
end
end
def extract_date_options(options)
# construct a date for to, from & month using the fields created by select_date view helper or fall back to options[:to] etc
['month', 'to', 'from'].each do |option|
date_options = options.slice("#{option}(1i)", "#{option}(2i)", "#{option}(3i)")
date = date_options.length == 3 ? date_options.sort_by(&:first).map{ |arry| arry.last.to_i } : options[option]
instance_variable_set( "@#{option}", extract_date(date) )
end
end
def period_scope
case @period
when 'all' then ['every']
when 'year_to_date' then [@period]
when 'for_month', 'between_months' then [@period] + period_scope_parameters
else raise InvalidPeriodError, 'must be either :for_month, :between_months, :year_to_date, or :all'
end
end
def period_scope_parameters
period == 'for_month' ? [month] : [from, to]
end
end
# Periodic
end
# Reports
|
james2m/reports
|
lib/report.rb
|
class Report
include ActiveModel::Model
attr_accessor :type
delegate :title, :from, :to, :period, :month, :file_name, :stream, to: :report, allow_nil: true
def self.all
Reports.all.map { |klass| new('type' => String(klass)) }
end
def initialize(options = {})
@type = String(options.delete('type'))
klass = Reports.all.find { |mod| String(mod) == @type }
@report = klass.new(options) if klass
end
def report
@report
end
def new_record?; true; end
def persisted?; false; end
end
|
james2m/reports
|
lib/reports/row.rb
|
<reponame>james2m/reports<gh_stars>0
module Reports
class Row
def initialize(row)
@row = row
end
def to_columns
@columns ||= columns.map { |column| send(column) }
end
def columns
raise NotImplementedError
end
end
end
|
james2m/reports
|
lib/reports.rb
|
require 'csv'
require 'reports/row'
require 'reports/base'
require 'reports/scoped'
require 'reports/periodic'
require 'reports/periodic/scopes'
require 'report'
require 'reports/version'
require 'reports/railtie' if defined?(Rails) && Rails::VERSION::MAJOR >= 3
module Reports
def self.all
@all ||= Base.descendants.reject { |mod| ["Reports::Scoped", "Reports::Periodic"].include?(mod.to_s) }
end
def self.reload #:nodoc:
load_paths = [File.expand_path('app/reports', Rails.root)]
load_paths.each do |path|
Dir[File.join(path, '**', '*.rb')].sort.each do |file|
load file
end
end
end
end
|
james2m/reports
|
lib/reports/periodic/scopes.rb
|
module Reports
class Periodic
module Scopes
def every(*args)
all
end
def for_month(date, column='created_at')
where(["DATE_FORMAT(#{quoted_table_name}.#{connection.quote_column_name column}, '%Y%m') = DATE_FORMAT(?, '%Y%m')", date])
end
def between_months(date1, date2, column='created_at')
query = [
"(DATE_FORMAT(#{quoted_table_name}.#{connection.quote_column_name column}, '%Y%m') >= DATE_FORMAT(?, '%Y%m'))",
"(DATE_FORMAT(#{quoted_table_name}.#{connection.quote_column_name column}, '%Y%m') <= DATE_FORMAT(?, '%Y%m'))"
].join(' AND ')
where([date1, date2].sort.unshift(query))
end
def year_to_date(column='created_at')
between_months DateTime.new(Time.zone.now.year), Time.zone.now, column
end
end
end
end
|
james2m/reports
|
test/reports/base_test.rb
|
<reponame>james2m/reports
require 'test_helper'
describe Reports::Base do
before do
end
describe "new" do
describe "type" do
let(:user) { nil }
subject { SimpleReport.new }
it "assign the type to the report" do
subject.type.must_equal 'simple'
end
end
end
end
|
bjjb/adminimum
|
test/test_helper.rb
|
<filename>test/test_helper.rb
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'adminimum'
require 'minitest/autorun'
|
bjjb/adminimum
|
lib/adminimum.rb
|
require "adminimum/version"
module Adminimum
# Your code goes here...
end
|
s-p-k/acg_chef
|
cookbooks/apache/recipes/default.rb
|
<reponame>s-p-k/acg_chef<gh_stars>0
#
# Cookbook:: apache
# Recipe:: default
#
# Copyright:: 2019, The Authors, All Rights Reserved.
#
include_recipe 'apache::webserver'
|
s-p-k/acg_chef
|
cookbooks/chef-workstation/recipes/hello-recipe.rb
|
package 'cowsay'
file '/home/spk/hello.txt' do
action :create
content 'Hello world of mine.'
end
|
s-p-k/acg_chef
|
cookbooks/apache/recipes/webserver.rb
|
<filename>cookbooks/apache/recipes/webserver.rb
#
# Cookbook:: apache
# Recipe:: webeserver
#
# Copyright:: 2019, The Authors, All Rights Reserved.
package 'apache2' do
action :install
end
|
s-p-k/acg_chef
|
cookbooks/chef-workstation/recipes/default.rb
|
#
# Cookbook:: chef-workstation
# Recipe:: default
#
# Copyright:: 2019, The Authors, All Rights Reserved.
include_recipe 'chef-workstation::hello-recipe'
|
umbresp/ELang
|
einterpreter.rb
|
<filename>einterpreter.rb
if ARGV[0]
if ARGV[0].end_with? ".e"
# stuff
begin
stuff = File.read(ARGV[0])
arr = stuff.split(" ")
output = ""
for i in arr
len = i.length
thing = len.chr
output += thing
end
puts output
rescue Exception => e
puts e.message
end
else
puts "Invalid source file."
end
else
puts "No file specified."
end
|
sonots/triglav
|
lib/triglav/error/bad_request.rb
|
<gh_stars>1-10
module Triglav
module Error
class BadRequest < Triglav::Error::StandardError
def initialize(message = nil)
@code = 400
@message = message || "Bad request."
end
end
end
end
|
sonots/triglav
|
benchmark/client.rb
|
require 'triglav_client'
class Client
class Error < StandardError
attr_reader :cause
def initialize(message, cause)
@cause = cause
super(message)
end
end
class AuthenticationError < Error; end
class ConnectionError < Error; end
attr_reader :url, :username, :password, :authenticator
def initialize(
url: 'http://localhost:7800',
username: 'triglav_test',
password: '<PASSWORD>',
authenticator: 'local',
timeout: nil,
debugging: nil
)
@url = url
@username = username
@password = password
@authenticator = authenticator
@timeout = timeout
@debugging = debugging
initialize_current_token
authenticate
end
# Send messages
#
# @param [Array] events array of event messages
#
# {
# resource_uri: "hdfs://host:port/path/to/resource",
# resource_unit: 'daily',
# resource_time: Time.at.to_i,
# resource_timezone: "+09:00",
# payload: {free: "text"}.to_json,
# }
#
# @see TriglavAgent::MessageRequest
def send_messages(events)
messages_api = TriglavClient::MessagesApi.new(api_client)
handle_error { messages_api.send_messages(events) }
end
# Fetch messages
#
# @param [Integer] offset
# @param [Integer] limit
# @param [Array] resource_uris
# @return [Array] array of messages
# @see TriglavClient::MessageEachResponse
# id
# resource_uri
# resource_unit
# resource_time
# resource_timezone
# payload
def fetch_messages(offset, limit: 100, resource_uris: [])
messages_api = TriglavClient::MessagesApi.new(api_client)
# fetch_request = TriglavClient::MessageFetchRequest.new.tap {|request|
# request.offset = offset
# request.limit = limit
# request.resource_uris = resource_uris
# }
# with_token { messages_api.fetch_messages(fetch_request) }
handle_error { messages_api.list_messages(offset, {limit: limit, resource_uris: resource_uris}) }
end
private
def api_client
return @api_client if @api_client
config = TriglavClient::Configuration.new do |config|
uri = URI.parse(url)
config.scheme = uri.scheme
config.host = "#{uri.host}:#{uri.port}"
config.timeout = @timeout if @timeout
config.debugging = @debugging if @debugging
end
@api_client = TriglavClient::ApiClient.new(config)
end
# Authenticate
#
# 1. Another process saved a newer token onto the token_file => read it
# 2. The token saved on the token_file is same with current token => re-authenticate
# 3. The token saved on the token_file is older than the current token
# => unknown situation, re-authenticate and save into token_file to refresh anyway
# 4. No token is saved on the token_file => authenticate
def authenticate
auth_api = TriglavClient::AuthApi.new(api_client)
credential = TriglavClient::Credential.new(
username: username, password: password, authenticator: authenticator
)
handle_auth_error do
result = auth_api.create_token(credential)
token = {access_token: result.access_token}
update_current_token(token)
end
end
def initialize_current_token
@current_token = {
access_token: (api_client.config.api_key['Authorization'] = String.new),
}
end
def update_current_token(token)
@current_token[:access_token].replace(token[:access_token])
end
def handle_auth_error(&block)
begin
yield
rescue TriglavClient::ApiError => e
if e.code == 0
raise ConnectionError.new("Could not connect to #{triglav_url}", e)
elsif e.message == 'Unauthorized'.freeze
raise AuthenticationError.new("Failed to authenticate on triglav API.".freeze, e)
else
raise Error.new(e.message, e)
end
end
end
def handle_error(&block)
begin
yield
rescue TriglavClient::ApiError => e
if e.code == 0
raise ConnectionError.new("Could not connect to #{triglav_url}", e)
elsif e.message == 'Unauthorized'.freeze
authenticate
retry
else
raise Error.new(e.message, e)
end
end
end
end
|
sonots/triglav
|
config/routes.rb
|
<filename>config/routes.rb
Rails.application.routes.draw do
namespace :api do
namespace :v1 do
get '/apidocs' => 'apidocs#index'
post '/auth/token' => 'auth#create'
delete '/auth/token' => 'auth#destroy'
get '/auth/me' => 'auth#me'
resources :users
get '/clusters' => 'clusters#index'
post '/clusters' => 'clusters#create'
get '/clusters/:id_or_name' => 'clusters#show', constraints: {id_or_name: /.+/}
put '/clusters/:id_or_name' => 'clusters#update', constraints: {id_or_name: /.+/}
patch '/clusters/:id_or_name' => 'clusters#update', constraints: {id_or_name: /.+/}
delete '/clusters/:id_or_name' => 'clusters#destroy', constraints: {id_or_name: /.+/}
get '/resources' => 'resources#index'
post '/resources' => 'resources#create'
get '/aggregated_resources' => 'resources#aggregated_resources'
get '/resources/:id_or_uri' => 'resources#show', constraints: {id_or_uri: /.+/}
put '/resources/:id_or_uri' => 'resources#update', constraints: {id_or_uri: /.+/}
patch '/resources/:id_or_uri' => 'resources#update', constraints: {id_or_uri: /.+/}
delete '/resources/:id_or_uri' => 'resources#destroy', constraints: {id_or_uri: /.+/}
post '/jobs' => 'jobs#create'
put '/jobs' => 'jobs#update'
patch '/jobs' => 'jobs#update'
get '/jobs/:id_or_uri' => 'jobs#show', constraints: {id_or_uri: /.+/}
put '/jobs/:id_or_uri' => 'jobs#update', constraints: {id_or_uri: /.+/}
patch '/jobs/:id_or_uri' => 'jobs#update', constraints: {id_or_uri: /.+/}
delete '/jobs/:id_or_uri' => 'jobs#destroy', constraints: {id_or_uri: /.+/}
get '/messages' => 'messages#index'
post '/messages' => 'messages#create'
get '/messages/last_id' => 'messages#last_id'
get '/job_messages' => 'job_messages#index'
get '/job_messages/last_id' => 'job_messages#last_id'
end
end
get '/apidocs' => redirect('/swagger/dist/index.html?url=/api/v1/apidocs.json')
# Not Found
match '*anything' => 'application#routing_error', via: :all
get '*anything' => 'application#routing_error' # Need this for HEAD request
end
|
sonots/triglav
|
app/models/job.rb
|
class Job < ApplicationRecord
validates :logical_op, inclusion: { in: %w(and or) }
has_many :jobs_input_resources, dependent: :destroy
has_many :jobs_output_resources, dependent: :destroy
has_many :input_resources, through: :jobs_input_resources, source: 'resource'
has_many :output_resources, through: :jobs_output_resources, source: 'resource'
# This method does not support `id_or_uri`, but requires `id` to update `uri` parameter
# This method also requires ids for input and output resources to update them
# `output_resources` would be empty for some jobs which does not transfer data such as argus
def self.create_or_update_with_resources!(params)
if job = self.find_by(id: params['id'])
job.update_with_resources!(params)
job
else
self.create_with_resources!(params)
end
end
def destroy_with_resources!
Job.transaction do
input_resources.each {|r| r.destroy! }
output_resources.each {|r| r.destroy! }
self.destroy!
end
end
# private
def self.create_with_resources!(params)
Job.transaction do
job = Job.create!(params.except('input_resources', 'output_resources'))
Job.set_input_output_resources!(job, params)
job
end
end
def update_with_resources!(params)
Job.transaction do
Job.set_input_output_resources!(self, params)
self.update!(params.except('input_resources', 'output_resources'))
end
end
def self.set_input_output_resources!(job, params)
params = params.respond_to?(:to_unsafe_h) ? params.to_unsafe_h : params
(params['input_resources'] || []).map {|r| r['consumable'] = true }
(params['output_resources'] || []).map {|r| r['consumable'] = false }
Job.set_resources!(job, JobsInputResource, params['input_resources'])
Job.set_resources!(job, JobsOutputResource, params['output_resources'])
end
def self.set_resources!(job, relation_class, resource_params_list)
resource_params_list ||= []
current_ids = relation_class.where(job_id: job.id).pluck(:resource_id)
# destroy
request_ids = resource_params_list.map {|p| p['id'] }.compact
destroy_ids = current_ids - request_ids
if destroy_ids.present?
relation_class.where(job_id: job.id, resource_id: destroy_ids).destroy_all
Resource.where(id: destroy_ids).destroy_all
end
# create or update
(resource_params_list || []).each do |resource_params|
resource = Resource.create_or_update!(resource_params)
relation_class.find_or_create_by!(job_id: job.id, resource_id: resource.id)
end
end
end
|
sonots/triglav
|
lib/triglav/error/standard_error.rb
|
<reponame>sonots/triglav
module Triglav
module Error
class StandardError < ::StandardError
attr_reader :code, :message
def initialize(message = nil)
@code = 500
@message = message || "Some error was occurred."
end
end
end
end
|
sonots/triglav
|
app/models/user.rb
|
class User < ApplicationRecord
serialize :groups, JSON
ADMIN_ROLE = 'triglav_admin'
def admin?
!!self.groups.try(:include?, ADMIN_ROLE)
end
attr_accessor :password
after_destroy :invalidate_api_keys
# authenticator is reserved for future extension such as LDAP authentication
validates :authenticator, :presence => true,
:inclusion => ['local']
validates :password, :presence => true, on: :create,
if: ->(u) { u.authenticator == 'local' }
validates_length_of :password,
:in => (Settings.authentication.min_password_length .. 127),
if: ->(u) { u.password.present? }
before_save :encrypt_password
after_save :clear_password
def self.find_by_access_token(access_token)
return nil unless api_key = ApiKey.find_by(access_token: access_token)
unless api_key.expired?
api_key.extend_expiration
self.find_by(id: api_key.user_id)
else
nil
end
end
def self.authenticate(sign_in_params)
username, password = sign_in_params[:username], sign_in_params[:password]
user = self.find_by(name: username, authenticator: 'local')
return nil unless user
if user.match_password?(password)
return user
else
return false
end
end
def match_password?(password="")
encrypted_password == BCrypt::Engine.hash_secret(password, salt)
end
private
def clear_password
self.password = nil
true
end
def encrypt_password
if password.present?
self.salt = BCrypt::Engine.generate_salt
self.encrypted_password= BCrypt::Engine.hash_secret(password, salt)
end
true
end
def invalidate_api_keys
ApiKey.destroy_for_user(self.id)
true
end
end
|
sonots/triglav
|
app/models/job_message.rb
|
<gh_stars>1-10
# Consumer monitors a JobMessage rather than a Message.
#
# JobMessage is created (fired) if AND/OR conditions are statisfied.
class JobMessage < ApplicationRecord
belongs_to :job
validates :job_id, presence: true
validates :time, presence: true, numericality: { only_integer: true }
validates :timezone, presence: true, format: { with: /\A[+-]\d\d:\d\d\z/ }
# OR conditions
#
# 1) Fire if all input resources of a job for a specific resource_time is set
#
# 1. Created
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Created(*) |
# |ResourceB | | |
#
# 2. Created => Fire
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Created |
# |ResourceB | | Created(*) |
#
# 3. Updated => Fire
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Updated(*) |
# |ResourceB | | Created |
def self.create_if_orset(params)
job_id = params[:job_id] || raise('job_id is required')
resource_uri = params[:resource_uri] || raise('resource_uri is required')
resource_unit = params[:resource_unit] || raise('resource_unit is required')
resource_time = params[:resource_time] || raise('resource_time is required')
resource_timezone = params[:resource_timezone] || raise('resource_timezone is required')
resource_ids = JobsInputResource.where(job_id: job_id).pluck(:resource_id)
resource = Resource.where(id: resource_ids).where(uri: resource_uri).first
return nil unless resource
if resource.span_in_days
return nil unless resource.in_span?(resource_time)
end
JobInternalMessage.create_with(
resource_unit: resource_unit,
resource_timezone: resource_timezone
).find_or_create_by(
job_id: job_id,
resource_time: resource_time,
resource_uri: resource_uri,
)
input_resources_size = JobsInputResource.where(job_id: job_id).size
time_resources_size = JobInternalMessage.where(job_id: job_id, resource_time: resource_time).size
if time_resources_size == input_resources_size
# Fire
JobMessage.create(job_id: job_id, time: resource_time, timezone: resource_timezone)
else
nil
end
end
# AND conditions
#
# 1) Fire if all input resources of a job for a specific resource_time is set
# 2) Reset all events for the resource_time after fired
#
# 1. Created
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Created(*) |
# |ResourceB | | |
#
# 2. Created => Fire
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Created |
# |ResourceB | | Created(*) |
#
# Then, once delete
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | |
# |ResourceB | | |
#
# 3. Updated
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Updated(*) |
# |ResourceB | | |
#
# 4. Updated => Fire
#
# | | 2017-04-16 | 2017-04-17 |
# |:---------|:-----------|:-----------|
# |ResourceA | | Updated |
# |ResourceB | | Updated(*) |
def self.create_if_andset(params)
obj = create_if_orset(params)
return unless obj
JobInternalMessage.where(job_id: params[:job_id], resource_time: params[:resource_time]).destroy_all
obj
end
end
|
sonots/triglav
|
spec/factories/resources.rb
|
<gh_stars>1-10
FactoryGirl.define do
factory :resource do
description "MyString"
uri "hdfs://localhost/path/to/file.csv.gz"
unit "daily"
timezone "+09:00"
span_in_days nil
consumable true
notifiable false
end
end
|
sonots/triglav
|
spec/rails_helper.rb
|
<filename>spec/rails_helper.rb
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
RSpec.configure do |config|
config.before :all do
FactoryGirl.reload
end
config.before :suite do
DatabaseRewinder.clean_all
end
config.after :each do
DatabaseRewinder.clean
end
Autodoc.configuration.toc = true
end
|
sonots/triglav
|
spec/factories/job_messages.rb
|
<reponame>sonots/triglav<gh_stars>1-10
FactoryGirl.define do
factory :job_message do
job_id 1
time 1467298800 # "2016-07-01" in +09:00
timezone "+09:00"
end
end
|
sonots/triglav
|
app/serializers/resource_each_serializer.rb
|
class ResourceEachSerializer < ActiveModel::Serializer
attributes :uri, :unit, :timezone, :span_in_days, :consumable, :notifiable
include Swagger::Blocks
swagger_schema :ResourceEachResponse do
allOf do
schema do
key :'$ref', :AggregatedResourceEachResponse
end
schema do
property :unit do
key :type, :string
key :description, "'singualr' or 'daily' or 'hourly'"
end
property :consumable do
key :type, :boolean
key :description, 'True if this resource should be consumed'
end
property :notifiable do
key :type, :boolean
key :description, 'True if a job notifies its end of task to triglav for this resource, that is, monitoring in agent is not necessary'
end
end
end
end
end
|
sonots/triglav
|
spec/models/api_key_spec.rb
|
<gh_stars>1-10
require 'rails_helper'
RSpec.describe ApiKey, type: :model do
describe '.create' do
it do
api_key = ApiKey.create!(user_id: 1)
expect(api_key.access_token).to be_present
expect(api_key.user_id).to be_present
expect(api_key.expires_at).to be_present
expect(api_key.last_accessed_at).to be_present
end
end
describe '.find_by' do
before { @api_key = ApiKey.create(user_id: 1) }
it do
api_key = ApiKey.find_by(access_token: @api_key.access_token)
expect(api_key.access_token).to be_present
expect(api_key.user_id).to be_present
expect(api_key.expires_at).to be_present
expect(api_key.last_accessed_at).to be_present
end
end
describe '.expired?' do
before { @api_key = ApiKey.create!(user_id: 1) }
it do
@api_key.tap {|a| a.expires_at = Time.current - 1 }.save!
expect(ApiKey.expired?(access_token: @api_key.access_token)).to be_truthy
end
end
describe 'extend_expiration' do
before { @api_key = ApiKey.create(user_id: 1) }
it do
@api_key.tap {|a| a.expires_at = Time.current - 1 }.save
expires_at = @api_key.expires_at
@api_key.extend_expiration
expect(@api_key.expires_at > expires_at).to be_truthy
end
end
describe '#destroy' do
before { @api_key = ApiKey.create(user_id: 1) }
it do
@api_key.destroy
expect(ApiKey.find_by(access_token: @api_key.access_token)).to be_nil
end
end
describe '.destroy_for_user' do
before { @api_key = ApiKey.create(user_id: 1) }
it do
ApiKey.destroy_for_user(1)
expect(ApiKey.find_by(user_id: 1)).to be_nil
end
end
end
|
sonots/triglav
|
app/controllers/api/v1/apidocs_controller.rb
|
module Api
module V1
class ApidocsController < ApplicationController
SWAGGERED_CLASSES = [
self,
Api::V1::AuthController,
ApiKeySerializer,
JobSerializer,
JobEachSerializer,
Api::V1::JobsController,
MessageSerializer,
MessageEachSerializer,
Api::V1::MessagesController,
JobMessageEachSerializer,
Api::V1::JobMessagesController,
ResourceSerializer,
ResourceEachSerializer,
AggregatedResourceEachSerializer,
Api::V1::ResourcesController,
UserSerializer,
UserEachSerializer,
Api::V1::UsersController
].freeze
include Swagger::Blocks
swagger_root do
key :swagger, '2.0'
info version: '1.0.0' do
key :title, 'Triglav API'
key :description, 'Triglav API Reference'
contact do
key :name, '<NAME>'
key :email, '<EMAIL>'
end
license do
key :name, 'MIT'
end
end
key :basePath, '/api/v1'
key :schemes, ['http', 'https']
key :consumes, ['application/json']
key :produces, ['application/json']
security_definition :api_key, type: :apiKey do
key :name, 'Authorization'
key :in, 'header'
end
tag name: 'triglav' do
key :description, 'Triglav operations'
externalDocs description: 'Find more info here' do
key :url, 'https://triglav.github.io'
end
end
end
swagger_schema :ErrorModel do
key :required, [:error, :backtrace]
property :error do
key :type, :string
end
property :backtrace do
key :type, :array
items do
key :type, :string
end
end
end
def index
render json: Swagger::Blocks.build_root_json(SWAGGERED_CLASSES)
end
end
end
end
|
sonots/triglav
|
spec/factories/jobs.rb
|
<reponame>sonots/triglav
FactoryGirl.define do
factory :job do
uri "http://localhost/path/to/job?query=parameter"
logical_op "or"
end
factory :job_with_resources, parent: :job do
after(:create) do |job|
2.times do |i|
resource = create(:resource, uri: "resource://input/uri/#{i}")
JobsInputResource.create(job_id: job.id, resource_id: resource.id)
end
2.times do |i|
resource = create(:resource, uri: "resource://output/uri/#{i}")
JobsOutputResource.create(job_id: job.id, resource_id: resource.id)
end
end
end
factory :job_with_or_resources, parent: :job_with_resources do
logical_op "or"
end
factory :job_with_and_resources, parent: :job_with_resources do
logical_op "and"
end
factory :job_with_single_resource, parent: :job do
after(:create) do |job|
1.times do |i|
resource = create(:resource, uri: "resource://input/uri/#{i}")
JobsInputResource.create(job_id: job.id, resource_id: resource.id)
end
1.times do |i|
resource = create(:resource, uri: "resource://output/uri/#{i}")
JobsOutputResource.create(job_id: job.id, resource_id: resource.id)
end
end
end
end
|
sonots/triglav
|
lib/tasks/swagger.rake
|
<reponame>sonots/triglav<filename>lib/tasks/swagger.rake
require 'tempfile'
namespace :swagger do
VERSION = File.read("#{Rails.root}/VERSION").chomp
SWAGGER_JSON_PATH = "#{Rails.root}/tmp/swagger.json"
SWAGGER_CODEGEN_CLI_PATH = "#{Rails.root}/bin/swagger-codegen-cli.jar"
TRIGLAV_CLIENT_JAVA_PATH = File.expand_path("#{Rails.root}/../triglav-client-java")
TRIGLAV_CLIENT_RUBY_PATH = File.expand_path("#{Rails.root}/../triglav-client-ruby")
SWAGGER_CODEGEN_JAVA_CONFIG = {
"modelPackage"=>"io.github.triglav_dataflow.client",
"apiPackage"=>"io.github.triglav_dataflow.client.api",
"groupId"=>"io.github.triglav_dataflow",
"artifactId"=>"triglav-client-java",
"artifactVersion"=>VERSION,
}
SWAGGER_CODEGEN_RUBY_CONFIG = {
"gemName"=>"triglav_client",
"moduleName"=>"TriglavClient",
"gemAuthor"=>"<NAME>",
"gemHomepage"=>"https://github.com/triglav-dataflow/triglav-client-ruby",
"gemSummary"=>"A ruby client library for Triglav, data-driven workflow tool",
"gemDescription"=>"A ruby client library for Triglav, data-driven workflow tool",
"gemVersion"=>VERSION,
}
desc "Generate tmp/swagger.json"
task :generate => :environment do |t, args|
swagger_data = Swagger::Blocks.build_root_json(
Api::V1::ApidocsController::SWAGGERED_CLASSES
)
File.open(SWAGGER_JSON_PATH, 'w') { |file| file.write(swagger_data.to_json) }
puts "Generated #{SWAGGER_JSON_PATH}"
end
desc "Generate ../triglav-client-java"
task :"codegen-java" => :generate do |t, args|
%w[
LICENSE
README.md
build.gradle
build.sbt
git_push.sh
gradle.properties
gradlew.bat
pom.xml
settings.gradle
].each do |file|
sh "rm -f #{File.join(TRIGLAV_CLIENT_JAVA_PATH, file)}"
end
Tempfile.create('triglav') do |fp|
fp.write SWAGGER_CODEGEN_JAVA_CONFIG.to_json
fp.close
sh "java -jar #{SWAGGER_CODEGEN_CLI_PATH} generate" \
" -i #{SWAGGER_JSON_PATH}" \
" -l java" \
" -c #{fp.path}" \
" -o #{TRIGLAV_CLIENT_JAVA_PATH}"
sh "cd #{TRIGLAV_CLIENT_JAVA_PATH} && ./gradlew build"
end
end
desc "Generate ../triglav-client-ruby"
task :"codegen-ruby" => :generate do |t, args|
Tempfile.create('triglav') do |fp|
fp.write SWAGGER_CODEGEN_RUBY_CONFIG.to_json
fp.close
sh "java -jar #{SWAGGER_CODEGEN_CLI_PATH} generate" \
" -i #{SWAGGER_JSON_PATH}" \
" -l ruby" \
" -c #{fp.path}" \
" -o #{TRIGLAV_CLIENT_RUBY_PATH}"
end
end
desc "Generate triglav clients"
task :codegen => [:"codegen-java", :"codegen-ruby"]
end
|
sonots/triglav
|
spec/requests/api/v1/messages_spec.rb
|
<reponame>sonots/triglav
# coding: utf-8
require 'rails_helper'
RSpec.describe 'Message resources', :type => :request do
let(:params) do
{}
end
let(:env) do
{
'CONTENT_TYPE' => 'application/json',
'HOST' => 'triglav.analytics.mbga.jp',
'HTTP_ACCEPT' => 'application/json',
'HTTP_AUTHORIZATION' => access_token,
}
end
let(:access_token) do
ApiKey.create(user_id: user.id).access_token
end
let(:user) do
FactoryGirl.create(:user, :triglav_admin)
end
let(:message) do
FactoryGirl.create(:message)
end
describe "Send messages", :autodoc do
let(:description) do
"Send messages<br/>" \
"<br/>" \
"`resource_time` is in unix timestamp<br/>"
end
let(:message) do
FactoryGirl.build(:message)
end
let(:params) do
[message.attributes.slice(*MessageSerializer.request_permit_params.map(&:to_s))]
end
it "POST /api/v1/messages" do
post "/api/v1/messages", params: params.to_json, env: env
expect(response.status).to eq 200
end
end
describe "Fetch messages", :autodoc do
let(:description) do
"Fetch messages whose message id is greater than or equal to offset<br/>" \
"<br/>" \
"`offset` is required.<br/>" \
"`resource_uris` are optional, but one resource_uri should be set usually.<br/>" \
"`limit` is optional, and default is 100.<br/>" \
"Returned `resource_time` is in unix timestamp<br/>"
end
let(:params) do
{
offset: message.id,
resource_uris: [message.resource_uri],
limit: 100,
}
end
it "GET /api/v1/messages" do
get "/api/v1/messages", params: params, env: env
expect(response.status).to eq 200
end
end
describe "Get last message id", :autodoc do
let(:description) do
"Get last message id which would be used as a first offset to fetch messages<br/>"
end
it "GET /api/v1/messages/last_id" do
FactoryGirl.create(:message)
get "/api/v1/messages/last_id", params: nil, env: env
expect(response.status).to eq 200
end
end
end
|
sonots/triglav
|
spec/models/resource_spec.rb
|
require 'rails_helper'
RSpec.describe Resource, type: :model do
describe "#set_default" do
context 'with daily' do
let(:resource) do
FactoryGirl.create(:resource, unit: 'daily')
end
it do
expect(resource.span_in_days).to eq(32)
expect(resource.timezone).to eq(Settings.resource.default_timezone)
end
end
context 'with hourly' do
let(:resource) do
FactoryGirl.create(:resource, unit: 'hourly')
end
it do
expect(resource.span_in_days).to eq(32)
expect(resource.timezone).to eq(Settings.resource.default_timezone)
end
end
end
describe 'destroy with job relation' do
let(:job) do
FactoryGirl.create(:job_with_resources)
end
before { job }
it do
input_count_before_destroy = JobsInputResource.all.count
job.input_resources.first.destroy
expect(JobsInputResource.all.count).to eq(input_count_before_destroy - 1)
output_count_before_destroy = JobsOutputResource.all.count
job.output_resources.first.destroy
expect(JobsOutputResource.all.count).to eq(output_count_before_destroy - 1)
end
end
end
|
sonots/triglav
|
benchmark/send_messages.rb
|
require_relative 'client'
require 'parallel'
require 'optparse'
params = ARGV.getopts('p:d:u:')
num_parallels = Integer(params['p'] || 2)
duration = Integer(params['d'] || 1)
triglav_url = params['u'] || 'http://localhost:7800'
puts "-p num_parallels: #{num_parallels} -d duration: #{duration} -u triglav_url: #{triglav_url}"
client = Client.new(url: triglav_url)
event = {
resource_uri: "hdfs://host:port/%d/path/to/resource",
resource_unit: 'daily',
resource_time: Time.now.to_i,
resource_timezone: "+09:00",
payload: {free: "text"}.to_json,
}
started = Time.now
counts = Parallel.map(1..num_parallels, in_processes: num_parallels) do |i|
count = 0
loop do
10.times do
count += 1
_event = event.merge(resource_uri: "hdfs://host:port/#{count}/path/to/resource")
client.send_messages([_event])
end
elapsed = Time.now - started
break if elapsed > duration
end
count
end
elapsed = Time.now - started
puts "#{counts.inject(:+) / elapsed.to_f} request / sec"
puts "#{counts.inject(:+) / num_parallels.to_f / elapsed.to_f} request / sec / process"
|
sonots/triglav
|
spec/models/job_spec.rb
|
require 'rails_helper'
RSpec.describe Job, type: :model do
describe '#create_or_update_with_resources!' do
context 'create a new job' do
let(:params) do
FactoryGirl.build(:job).attributes.except('id').merge({
'input_resources' => [
FactoryGirl.build(:resource, uri: "resource://input/uri/0", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/1", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/0", unit: "hourly").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/1", unit: "hourly").attributes.except('id'),
],
'output_resources' => [
FactoryGirl.build(:resource, uri: "resource://output/uri/0", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/1", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/0", unit: "hourly").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/1", unit: "hourly").attributes.except('id'),
]
})
end
it do
job = Job.create_or_update_with_resources!(params)
expect(job.uri).to eql(params['uri'])
expect(JobsInputResource.all.size).to eql(4)
expect(JobsOutputResource.all.size).to eql(4)
expect(Resource.all.size).to eql(8)
end
end
context 'update a job and append resources' do
let(:job) do
FactoryGirl.create(:job)
end
let(:params) do
job.attributes.merge({
'input_resources' => [
FactoryGirl.build(:resource, uri: "resource://input/uri/0", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/1", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/0", unit: "hourly").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://input/uri/1", unit: "hourly").attributes.except('id'),
],
'output_resources' => [
FactoryGirl.build(:resource, uri: "resource://output/uri/0", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/1", unit: "daily").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/0", unit: "hourly").attributes.except('id'),
FactoryGirl.build(:resource, uri: "resource://output/uri/1", unit: "hourly").attributes.except('id'),
]
})
end
it do
Job.create_or_update_with_resources!(params)
expect(job.uri).to eql(params['uri'])
expect(JobsInputResource.all.size).to eql(4)
expect(JobsOutputResource.all.size).to eql(4)
expect(Resource.all.size).to eql(8)
end
end
context 'update a job and update resources' do
let(:job) do
FactoryGirl.create(:job_with_resources)
end
let(:params) do
job.attributes.merge({
'input_resources' => job.input_resources.map {|r| r.attributes.merge('unit'=>'hourly') },
'output_resources' => job.output_resources.map {|r| r.attributes.merge('unit'=>'hourly') },
})
end
it do
Job.create_or_update_with_resources!(params)
expect(job.uri).to eql(params['uri'])
expect(JobsInputResource.all.size).to eql(2)
expect(JobsOutputResource.all.size).to eql(2)
expect(Resource.all.size).to eql(4)
end
end
context 'destroy resources' do
let(:job) do
FactoryGirl.create(:job_with_resources)
end
let(:params) do
job.attributes
end
it do
Job.create_or_update_with_resources!(params)
expect(job.uri).to eql(params['uri'])
expect(JobsInputResource.all.size).to eql(0)
expect(JobsOutputResource.all.size).to eql(0)
expect(Resource.all.size).to eql(0)
end
end
end
describe '#destroy_with_resources!' do
let(:job) do
FactoryGirl.create(:job_with_resources)
end
it do
resource_ids = job.input_resources.map(&:id) + job.output_resources.map(&:id)
job.destroy_with_resources!
expect(Resource.where(id: resource_ids).size).to eql(0)
end
end
end
|
sonots/triglav
|
lib/tasks/release.rake
|
namespace :release do
task :core do
sh "git tag #{VERSION} || true"
sh "git push origin #{VERSION}"
end
task :"client-java" do
Dir.chdir(TRIGLAV_CLIENT_JAVA_PATH) do
sh "git commit -a -m '#{VERSION}'"
sh "git push origin master"
end
end
task :"client-ruby" do
Dir.chdir(TRIGLAV_CLIENT_RUBY_PATH) do
sh "git commit -a -m '#{VERSION}'"
sh "git push origin master"
sh "bundle"
sh "bundle exec rake release"
end
end
task :all => ['swagger:codegen', 'client-java', 'client-ruby', 'core']
end
|
sonots/triglav
|
app/serializers/resource_serializer.rb
|
class ResourceSerializer < ActiveModel::Serializer
attributes :id,
:description,
:uri,
:unit,
:timezone,
:span_in_days,
:consumable,
:notifiable,
:created_at,
:updated_at
def self.request_params(params)
params.require(:uri)
params.require(:unit)
params.permit(*request_permit_params)
end
def self.request_permit_params
[
:id,
:description,
:uri,
:unit,
:timezone,
:span_in_days,
:consumable,
:notifiable
]
end
include Swagger::Blocks
swagger_schema :ResourceResponse do
allOf do
schema do
key :'$ref', :ResourceRequest
end
schema do
property :created_at do
key :type, :string
key :format, :"date-time"
end
property :updated_at do
key :type, :string
key :format, :"date-time"
end
end
end
end
swagger_schema :ResourceRequest, required: [:uri] do
property :id do
key :type, :integer
key :format, :int64
end
property :description do
key :type, :string
end
property :uri do
key :type, :string
key :description, 'URI of Resource'
end
property :unit do
key :type, :string
key :description, 'Time unit of resource to monitor such as singular, daily, or hourly'
end
property :timezone do
key :type, :string
key :description, 'Timezone of resource time, that is, timezone of %Y-%m-%d for hdfs://path/to/%Y-%m-%d such as +09:00'
end
property :span_in_days do
key :type, :integer
key :format, :int64
key :description, 'Time span of resource to monitor, default is 32'
end
property :consumable do
key :type, :boolean
key :description, 'True if this resource should be consumed. Input resources are automatically set to true, and output resources are set to false'
end
property :notifiable do
key :type, :boolean
key :description, 'True if a job notifies its end of task to triglav for this resource, that is, monitoring in agent is not necessary'
end
end
end
|
sonots/triglav
|
spec/factories/messages.rb
|
require 'securerandom'
FactoryGirl.define do
factory :message do
uuid SecureRandom.uuid
resource_uri "hdfs://localhost/path/to/file.csv.gz"
resource_unit "daily"
resource_time 1467298800 # "2016-07-01" in +09:00
resource_timezone "+09:00"
payload '{"foo":"bar"}'
end
end
|
sonots/triglav
|
spec/requests/api/v1/job_messages_spec.rb
|
# coding: utf-8
require 'rails_helper'
RSpec.describe 'JobMessage resources', :type => :request do
let(:params) do
{}
end
let(:env) do
{
'CONTENT_TYPE' => 'application/json',
'HOST' => 'triglav.analytics.mbga.jp',
'HTTP_ACCEPT' => 'application/json',
'HTTP_AUTHORIZATION' => access_token,
}
end
let(:access_token) do
ApiKey.create(user_id: user.id).access_token
end
let(:user) do
FactoryGirl.create(:user, :triglav_admin)
end
let(:message) do
FactoryGirl.create(:job_message)
end
describe "Fetch job-messages", :autodoc do
let(:description) do
"Fetch job-messages whose message id is greater than or equal to offset<br/>" \
"<br/>" \
"`offset` is required.<br/>" \
"`job_id` is required.<br/>" \
"`limit` is optional, and default is 100.<br/>" \
"Returned `time` is in unix timestamp of returned `timestamp`.<br/>"
end
let(:params) do
{
offset: message.id,
job_id: message.job_id,
limit: 100,
}
end
it "GET /api/v1/job_messages" do
get "/api/v1/job_messages", params: params, env: env
expect(response.status).to eq 200
end
end
describe "Get last job-message id", :autodoc do
let(:description) do
"Get last AND message id which would be used as a first offset to fetch messages<br/>"
end
it "GET /api/v1/job_messages/last_id" do
FactoryGirl.create(:message)
get "/api/v1/job_messages/last_id", params: nil, env: env
expect(response.status).to eq 200
end
end
end
|
sonots/triglav
|
app/serializers/user_serializer.rb
|
<reponame>sonots/triglav
class UserSerializer < ActiveModel::Serializer
attributes :id, :name, :description, :authenticator, :groups, :email, :created_at, :updated_at
include Swagger::Blocks
swagger_schema :UserResponse, required: [:id, :name] do
property :id do
key :type, :integer
key :format, :int64
end
property :name do
key :type, :string
end
property :description do
key :type, :string
end
property :authenticator do
key :type, :string
key :description, 'local or ldap'
key :enum, ['local', 'ldap']
end
property :groups do
key :type, :array
items do
key :type, :string
end
end
property :email do
key :type, :string
end
property :created_at do
key :type, :string
key :format, :"date-time"
end
property :updated_at do
key :type, :string
key :format, :"date-time"
end
end
swagger_schema :UserRequest do
# allOf do
# schema do
# key :'$ref', :User
# end
# schema do
# key :required, [:name]
# property :password do
# key :type, :string
# end
# end
# end
property :name do
key :type, :string
end
property :description do
key :type, :string
end
property :authenticator do
key :type, :string
key :description, 'local or ldap'
key :enum, ['local', 'ldap']
end
property :groups do
key :type, :array
items do
key :type, :string
end
end
property :email do
key :type, :string
end
property :password do
key :type, :string
end
end
end
|
sonots/triglav
|
app/controllers/api/v1/resources_controller.rb
|
<filename>app/controllers/api/v1/resources_controller.rb
module Api
module V1
class ResourcesController < ApplicationController
include Swagger::Blocks
# GET /aggregated_resources
# GET /aggregated_resources.json
swagger_path '/aggregated_resources' do
operation :get do
key :description, 'Returns aggregated resources to be monitored'
key :operationId, 'listAggregatedResources'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :uri_prefix
key :in, :query
key :description, 'Prefix of Resource URI'
key :required, true
key :type, :string
end
response 200 do
key :description, 'resource response'
schema do
key :type, :array
items do
key :'$ref', :AggregatedResourceEachResponse
end
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /aggregated_resources
# GET /aggregated_resources.json
def aggregated_resources
resources = Resource.aggregated_resources(uri_prefix: params.require(:uri_prefix))
render json: resources, each_serializer: AggregatedResourceEachSerializer
end
# GET /resources
# GET /resources.json
swagger_path '/resources' do
operation :get do
key :description, 'Returns all resources'
key :operationId, 'listResources'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :uri_prefix
key :in, :query
key :description, 'Prefix of Resource URI'
key :required, false
key :type, :string
end
response 200 do
key :description, 'resource response'
schema do
key :type, :array
items do
key :'$ref', :ResourceEachResponse
end
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /resources
# GET /resources.json
def index
params.permit(:uri_prefix)
if uri_prefix = params[:uri_prefix]
@resources = Resource.where('uri LIKE ?', "#{uri_prefix}%")
else
@resources = Resource.all
end
render json: @resources, each_serializer: ResourceEachSerializer
end
# GET /resources/1
# GET /resources/1.json
swagger_path '/resources/{id_or_uri}' do
operation :get do
key :description, 'Returns a single resource'
key :operationId, 'getResource'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :id_or_uri
key :in, :path
key :description, 'ID or URI of resource to fetch'
key :required, true
key :type, :string
end
response 200 do
key :description, 'resource response'
schema do
key :'$ref', :ResourceResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /resources/1
# GET /resources/1.json
def show
set_resource!
render json: @resource
end
# POST /resources
# POST /resources.json
swagger_path '/resources' do
operation :post do
key :description, 'Creates a new resource'
key :operationId, 'createResource'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :resource
key :in, :body
key :description, 'Resource to add'
key :required, true
schema do
key :'$ref', :ResourceRequest
end
end
response 201 do
key :description, 'resource response'
schema do
key :'$ref', :ResourceResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# POST /resources
# POST /resources.json
def create
@resource = Resource.new(resource_params)
if @resource.save
render json: @resource
else
render json: @resource.errors, status: :unprocessable_entity
end
end
# PATCH/PUT /resources
# PATCH/PUT /resources.json
swagger_path '/resources/{id_or_uri}' do
operation :patch do
key :description, 'Updates a single resource'
key :operationId, 'updateResource'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :id_or_uri
key :in, :path
key :description, 'ID or URI of resource to fetch'
key :required, true
key :type, :string
end
parameter do
key :name, :resource
key :in, :body
key :description, 'Resource parameters to update'
key :required, true
schema do
key :'$ref', :ResourceRequest
end
end
response 200 do
key :description, 'resource response'
schema do
key :'$ref', :ResourceResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# PATCH/PUT /resources
# PATCH/PUT /resources.json
def update
set_resource!
if @resource.update(resource_params)
render json: @resource
else
render json: @resource.errors, status: :unprocessable_entity
end
end
# DELETE /resources/1
# DELETE /resources/1.json
swagger_path '/resources/{id_or_uri}' do
operation :delete do
key :description, 'Deletes single resource'
key :operationId, 'deleteResource'
key :tags, ['resources']
security do
key :api_key, []
end
parameter do
key :name, :id_or_uri
key :in, :path
key :description, 'ID or URI of resource to fetch'
key :required, true
key :type, :string
end
response 204 do
key :description, 'deleted'
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# DELETE /resources/1
# DELETE /resources/1.json
def destroy
set_resource!
@resource.destroy
head :no_content
end
private
def set_resource!
begin
@resource = Resource.find(Integer(params[:id_or_uri]))
rescue
@resource = Resource.find_by!(uri: params[:id_or_uri])
end
end
def as_boolean(query_param)
return true if %w[1 true].include?(query_param)
return false if %w[0 false].include?(query_param)
query_param
end
def resource_params
ResourceSerializer.request_params(params)
end
end
end
end
|
sonots/triglav
|
app/controllers/application_controller.rb
|
class ApplicationController < ActionController::API
before_action :set_operator
after_action :set_access_log_info
def set_operator
RecordWithOperator.operator = current_user
end
def current_access_token
request.env['HTTP_AUTHORIZATION']
end
def current_user=(user)
@current_user = user
end
def current_user
if @current_user.present?
@current_user
elsif access_token = current_access_token
@current_user = User.find_by_access_token(access_token)
else
@current_user = nil
end
end
def authenticate!
if current_user.present?
true
else
raise Triglav::Error::InvalidAuthenticityToken
end
end
def set_access_log_info
info = {}
info[:controller] = params[:controller]
info[:user_name] = current_user.name if current_user
request.env['triglav_access_log'] = info
# access_log is written by Triglav::Rack::AccessLogger rack middleware. See lib/triglav/rack/access_logger
end
# Error Handle
def routing_error
raise ActionController::RoutingError, "No route matches #{request.path.inspect}"
end
rescue_from StandardError, with: :rescue_exception
private
def rescue_exception(e)
@exception = e
response = { error: e.message.present? ? e.message : e.class.name }
status_code =
case e
when ActiveRecord::RecordNotFound; 400
when ActiveRecord::RecordNotUnique; 400
when ActiveRecord::RecordInvalid; 400
when JSON::ParserError; 400
when ActionController::ParameterMissing; 400
when Triglav::Error::StandardError; e.code
else; ActionDispatch::ExceptionWrapper.new(env, e).status_code
end
if Rails.env.development? or Rails.env.test?
response[:backtrace] = e.backtrace
end
log_exception(e)
render :json => response, status: status_code
end
def log_exception(e)
case e
when Triglav::Error::InvalidAuthenticityCredential
when Triglav::Error::InvalidAuthenticityToken
when ActiveRecord::RecordNotFound
when ActiveRecord::RecordInvalid
when JSON::ParserError
when ActionController::ParameterMissing
when ActionController::RoutingError
when ActionController::InvalidAuthenticityToken
when ActionDispatch::ParamsParser::ParseError
else
Rails.logger.error e
end
end
end
|
sonots/triglav
|
app/controllers/api/v1/jobs_controller.rb
|
module Api
module V1
class JobsController < ApplicationController
include Swagger::Blocks
# GET /jobs
# GET /jobs.json
swagger_path '/jobs' do
operation :get do
key :description, 'Returns all jobs'
key :operationId, 'listJobs'
key :tags, ['jobs']
security do
key :api_key, []
end
response 200 do
key :description, 'job response'
schema do
key :type, :array
items do
key :'$ref', :JobEachResponse
end
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /jobs
# GET /jobs.json
def index
@jobs = Job.all
render json: @jobs, each_serializer: JobEachSerializer
end
# GET /jobs/1
# GET /jobs/1.json
swagger_path '/jobs/{id_or_uri}' do
operation :get do
key :description, 'Returns a single job'
key :operationId, 'getJob'
key :tags, ['jobs']
security do
key :api_key, []
end
parameter do
key :name, :id_or_uri
key :in, :path
key :description, 'ID or URI of job to fetch'
key :required, true
key :type, :string
end
response 200 do
key :description, 'job response'
schema do
key :'$ref', :JobResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /jobs/1
# GET /jobs/1.json
def show
set_job!
render json: @job
end
# PATCH/PUT /jobs
# PATCH/PUT /jobs.json
swagger_path '/jobs' do
operation :patch do
key :description, 'Creates or Updates a single job'
key :operationId, 'createOrUpdateJob'
key :tags, ['jobs']
security do
key :api_key, []
end
parameter do
key :name, :job
key :in, :body
key :description, 'Job parameters'
key :required, true
schema do
key :'$ref', :JobRequest
end
end
response 200 do
key :description, 'job response'
schema do
key :'$ref', :JobResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# PATCH/PUT /jobs
# PATCH/PUT /jobs.json
def update
@job = Job.create_or_update_with_resources!(job_params)
render json: @job
end
# DELETE /jobs/1
# DELETE /jobs/1.json
swagger_path '/jobs/{id_or_uri}' do
operation :delete do
key :description, 'Deletes single job'
key :operationId, 'deleteJob'
key :tags, ['jobs']
security do
key :api_key, []
end
parameter do
key :name, :id_or_uri
key :in, :path
key :description, 'ID or URI of job to fetch'
key :required, true
key :type, :string
end
response 204 do
key :description, 'deleted'
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# DELETE /jobs/1
# DELETE /jobs/1.json
def destroy
set_job! rescue nil
@job.destroy_with_resources! if @job
head :no_content
end
private
def set_job!
begin
@job = Job.find(Integer(params[:id_or_uri]))
rescue
@job = Job.find_by!(uri: params[:id_or_uri])
end
end
def job_params
JobSerializer.request_params(params)
end
end
end
end
|
sonots/triglav
|
spec/models/message_spec.rb
|
require 'rails_helper'
require 'securerandom'
RSpec.describe Message, type: :model do
let(:message_params) {
{
resource_uri: 'hdfs://foo/bar',
resource_unit: 'daily',
resource_time: 1356361200,
resource_timezone: '+09:00',
payload: '{"path":"hdfs://foo/bar","last_modification_time":1356361200000}',
}
}
let(:now) { Time.parse('2012-12-28 00:00:00 +0900') }
before do
Timecop.freeze(now)
end
after do
Timecop.return
end
describe '#validates' do
it 'valid' do
expect { Message.create!(message_params) }.not_to raise_error
end
it 'invalid' do
expect { Message.create!(message_params.except(:resource_uri)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { Message.create!(message_params.except(:resource_unit)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { Message.create!(message_params.except(:resource_time)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { Message.create!(message_params.except(:resource_timezone)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { Message.create!(message_params.merge(payload: 'foo')) }.to raise_error(ActiveRecord::RecordInvalid)
end
end
describe 'build_with_job_message' do
let(:job) do
FactoryGirl.create(:job_with_single_resource)
end
let(:resource) do
job.input_resources.first
end
let(:message_params) {
{
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_timezone: resource.timezone,
resource_time: Time.now.to_i,
payload: '{"path":"hdfs://foo/bar","last_modification_time":1356361200000}',
}
}
it do
subject = Message.build_with_job_message(message_params)
expect(JobMessage.all.size).to be > 0
end
end
describe 'create_messages' do
let(:job) do
FactoryGirl.create(:job_with_single_resource)
end
let(:resource) do
job.input_resources.first
end
context 'without uuid' do
let(:message_params) {
{
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_timezone: resource.timezone,
resource_time: Time.now.to_i,
payload: '{"path":"hdfs://foo/bar","last_modification_time":1356361200000}',
}
}
it do
result = Message.create_messages([message_params, message_params])
expect(result[:num_inserts]).to eq(2)
expect(Message.all.size).to eq(2)
end
end
context 'with duplicated uuid' do
let(:message_params) {
{
uuid: SecureRandom.uuid,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_timezone: resource.timezone,
resource_time: Time.now.to_i,
payload: '{"path":"hdfs://foo/bar","last_modification_time":1356361200000}',
}
}
it do
result = Message.create_messages([message_params, message_params])
expect(result[:num_inserts]).to eq(1)
expect(Message.all.size).to eq(1)
end
end
end
end
|
sonots/triglav
|
app/serializers/api_key_serializer.rb
|
<gh_stars>1-10
class ApiKeySerializer < ActiveModel::Serializer
attributes :access_token, :expires_at
include Swagger::Blocks
swagger_schema :"Credential", required: [:username, :password] do
property :username do
key :type, :string
end
property :password do
key :type, :string
end
property :authenticator do
key :type, :string
key :enum, ['local']
end
end
swagger_schema :TokenResponse do
property :access_token do
key :type, :string
end
property :expires_at do
key :type, :string
key :format, :"date-time"
end
end
end
|
sonots/triglav
|
spec/models/job_message_spec.rb
|
<gh_stars>1-10
require 'rails_helper'
RSpec.describe JobMessage, type: :model do
let(:job_message_params) {
{
job_id: 1,
time: 1356361200,
timezone: '+09:00',
}
}
describe '#validates' do
it 'valid' do
expect { JobMessage.create!(job_message_params) }.not_to raise_error
end
it 'invalid' do
expect { JobMessage.create!(job_message_params.except(:job_id)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { JobMessage.create!(job_message_params.except(:time)) }.to raise_error(ActiveRecord::RecordInvalid)
expect { JobMessage.create!(job_message_params.except(:timezone)) }.to raise_error(ActiveRecord::RecordInvalid)
end
end
let(:message_params_with_job) {
{
job_id: 1,
resource_uri: 'hdfs://foo/bar',
resource_unit: 'daily',
resource_time: 1356361200,
resource_timezone: '+09:00',
}
}
let(:job_with_single_resource) do
FactoryGirl.create(:job_with_single_resource)
end
let(:job_with_or_resources) do
FactoryGirl.create(:job_with_or_resources)
end
let(:job_with_and_resources) do
FactoryGirl.create(:job_with_and_resources)
end
let(:now) { Time.parse('2012-12-28 00:00:00 +0900') }
before do
Timecop.freeze(now)
end
after do
Timecop.return
end
describe 'create_if_orset' do
context 'with single input resource' do
it do
job = job_with_single_resource
resource = job.input_resources.first
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_orset(params)
expect(subject).to be_present
expect(subject.job_id).to eq(params[:job_id])
expect(subject.time).to eq(params[:resource_time])
expect(subject.timezone).to eq(params[:resource_timezone])
end
end
context 'with multiple input resources' do
it do
job = job_with_or_resources
resources = job.input_resources
(0...resources.size-1).each do |i|
resource = resources[i]
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_orset(params)
expect(subject).to be_nil
end
resource = resources.last
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
# all set
subject = JobMessage.create_if_orset(params)
expect(subject).to be_present
expect(subject.job_id).to eq(params[:job_id])
expect(subject.time).to eq(params[:resource_time])
expect(subject.timezone).to eq(params[:resource_timezone])
# a next comming event fires next OR event immediatelly
subject = JobMessage.create_if_orset(params)
expect(subject).to be_present
end
end
context 'with resource_time in span_in_days' do
it do
job = job_with_single_resource
resource = job.input_resources.first
resource.tap {|r| r.span_in_days = 1 }.save!
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_orset(params)
expect(subject).to be_present
expect(subject.job_id).to eq(params[:job_id])
expect(subject.time).to eq(params[:resource_time])
expect(subject.timezone).to eq(params[:resource_timezone])
end
end
context 'with resource_time not in span_in_days' do
it do
job = job_with_single_resource
resource = job.input_resources.first
resource.tap {|r| r.span_in_days = 1 }.save!
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i - 24*3600 -1,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_orset(params)
expect(subject).to be_nil
end
end
end
describe 'create_if_andset' do
context 'with single input resource' do
it do
job = job_with_single_resource
resource = job.input_resources.first
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_andset(params)
expect(subject).to be_present
expect(subject.job_id).to eq(params[:job_id])
expect(subject.time).to eq(params[:resource_time])
expect(subject.timezone).to eq(params[:resource_timezone])
end
end
context 'with multiple input resources' do
it do
job = job_with_and_resources
resources = job.input_resources
(0...resources.size-1).each do |i|
resource = resources[i]
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
subject = JobMessage.create_if_andset(params)
expect(subject).to be_nil
end
resource = resources.last
params = {
job_id: job.id,
resource_uri: resource.uri,
resource_unit: resource.unit,
resource_time: Time.now.to_i,
resource_timezone: resource.timezone
}
# all set
subject = JobMessage.create_if_andset(params)
expect(subject).to be_present
expect(subject.job_id).to eq(params[:job_id])
expect(subject.time).to eq(params[:resource_time])
expect(subject.timezone).to eq(params[:resource_timezone])
# a next comming event does not fire AND event until all events are set
subject = JobMessage.create_if_andset(params)
expect(subject).to be_nil
end
end
end
end
|
sonots/triglav
|
app/controllers/api/v1/users_controller.rb
|
## We may not need this
module Api
module V1
class UsersController < ApplicationController
before_action :authenticate!
# before_action :admin!
before_action :check_authenticator!, only: [:create, :update]
include Swagger::Blocks
swagger_path '/users' do
operation :get do
key :description, 'Returns all users from the system that the user has access to'
key :operationId, 'listUsers'
key :tags, ['users']
security do
key :api_key, []
end
response 200 do
key :description, 'user response'
schema do
key :type, :array
items do
key :'$ref', :UserEachResponse
end
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
def index
@users = User.all
render json: @users, each_serializer: UserEachSerializer
end
swagger_path '/users/{id}' do
operation :get do
key :description, 'Returns a single user'
key :operationId, 'getUser'
key :tags, ['users']
security do
key :api_key, []
end
parameter do
key :name, :id
key :in, :path
key :description, 'ID of user to fetch'
key :required, true
key :type, :integer
key :format, :int64
end
response 200 do
key :description, 'user response'
schema do
key :'$ref', :UserResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
def show
set_user!
render json: @user
end
swagger_path '/users' do
operation :post do
key :description, 'Creates a new user in the store'
key :operationId, 'createUser'
key :tags, ['users']
security do
key :api_key, []
end
parameter do
key :name, :user
key :in, :body
key :description, 'User to add to the store'
key :required, true
schema do
key :'$ref', :UserRequest
end
end
response 201 do
key :description, 'user response'
schema do
key :'$ref', :UserResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
def create
@user = User.new(user_create_params)
if @user.save
render json: @user, status: 201
else
render json: @user.errors, status: :unprocessable_entity
end
end
swagger_path '/users/{id}' do
operation :patch do
key :description, 'Updates a single user'
key :operationId, 'updateUser'
key :tags, ['users']
security do
key :api_key, []
end
parameter do
key :name, :id
key :in, :path
key :description, 'ID of user to fetch'
key :required, true
key :type, :integer
key :format, :int64
end
parameter do
key :name, :user
key :in, :body
key :description, 'User parameters to update'
key :required, true
schema do
key :'$ref', :UserRequest
end
end
response 200 do
key :description, 'user response'
schema do
key :'$ref', :UserResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
def update
set_user!
if @user.update(user_update_params)
render json: @user
else
render json: @user.errors, status: :unprocessable_entity
end
end
swagger_path '/users/{id}' do
operation :delete do
key :description, 'Deletes single user'
key :operationId, 'deleteUser'
key :tags, ['users']
security do
key :api_key, []
end
parameter do
key :name, :id
key :in, :path
key :description, 'ID of user to fetch'
key :required, true
key :type, :integer
key :format, :int64
end
response 204 do
key :description, 'deleted'
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
def destroy
set_user!
@user.destroy!
head :no_content
end
private
def set_user!
@user = User.find(params[:id])
end
def user_create_params
params.permit(
:name,
:description,
:authenticator,
:password,
:email,
:groups => [],
)
end
def user_update_params
params.permit(
:description,
:authenticator,
:password,
:email,
:groups => [],
)
end
def check_authenticator!
unless params[:authenticator] == 'local'
raise Triglav::Error::BadRequest, 'Invalid authenticator'
end
end
end
end
end
|
sonots/triglav
|
spec/requests/api/v1/resources_spec.rb
|
# coding: utf-8
require 'rails_helper'
RSpec.describe 'Resource resources', :type => :request do
let(:params) do
{}
end
let(:env) do
{
'CONTENT_TYPE' => 'application/json',
'HOST' => 'triglav.analytics.mbga.jp',
'HTTP_ACCEPT' => 'application/json',
'HTTP_AUTHORIZATION' => access_token,
}
end
let(:access_token) do
ApiKey.create(user_id: user.id).access_token
end
let(:user) do
FactoryGirl.create(:user, :triglav_admin)
end
let(:resources) do
[
FactoryGirl.create(:resource, uri: 'hdfs://localhost/aaa.csv.gz', unit: 'singular', span_in_days: 32, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/aaa.csv.gz', unit: 'daily', span_in_days: 32, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/aaa.csv.gz', unit: 'hourly', span_in_days: 16, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/aaa.csv.gz', unit: 'hourly', span_in_days: 48, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/bbb.csv.gz', unit: 'daily', span_in_days: 32, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/ccc.csv.gz', unit: 'daily', span_in_days: 32, consumable: true, notifiable: false),
FactoryGirl.create(:resource, uri: 'hdfs://localhost/ccc.csv.gz', unit: 'daily', span_in_days: 32, consumable: true, notifiable: true),
]
end
let(:resource) do
FactoryGirl.create(:resource, uri: 'hdfs://localhost/aaa.csv.gz')
end
describe "Get aggregated_resources", :autodoc do
let(:description) do
"Get aggregated resources required to be monitored (i.e., consumable = true and notifiable = false).<br/>" \
"<br/>" \
"`resource_prefix` query parameter is required. " \
"Each returned resource has `uri`, `unit`, `timezone`, `span_in_days` parameters. " \
"`unit` is `singular` or `daily` or `hourly`, or their combinations such as `daily,hourly` or `daily,hourly,singular`.<br/>" \
"<br/>" \
"FYI: Aggregation is operated as following SQL: " \
"`SELECT uri, GROUP_CONCAT(DISTINCT(unit) order by unit) AS unit, timezone, MAX(span_in_days) AS span_in_days GROUP BY uri`<br/>" \
end
before do
resources
end
let(:params) do
{ uri_prefix: 'hdfs://localhost' }
end
it "GET /api/v1/aggregated_resources" do
get "/api/v1/aggregated_resources", params: params, env: env
json = JSON.parse(response.body)
expect(response.status).to eq 200
expect(json.size).to eq 2
expect(json[0]['unit']).to eq 'daily,hourly,singular'
end
end
describe "Get resources", :autodoc do
let(:description) do
"Get resource index<br/>"
end
before do
resources
end
let(:params) do
{ uri_prefix: 'hdfs://localhost' }
end
it "GET /api/v1/resources" do
get "/api/v1/resources", params: params, env: env
json = JSON.parse(response.body)
expect(response.status).to eq 200
expect(json.size).to eq 7
end
end
describe "Get a resource" do
let(:description) do
'Get a resource'
end
it "GET /api/v1/resources/:resource_id_or_uri", :autodoc do
get "/api/v1/resources/#{resource.id}", params: params, env: env
expect(response.status).to eq 200
end
it "GET /api/v1/resources/:resource_uri" do
get "/api/v1/resources/#{CGI.escape(resource.uri)}", params: params, env: env
expect(response.status).to eq 200
end
end
describe "Create a resource" do
let(:description) do
'Create a resource'
end
let(:resource) do
FactoryGirl.build(:resource)
end
let(:params) do
resource.attributes.slice(*ResourceSerializer.request_permit_params.map(&:to_s))
end
it "POST /api/v1/resources", :autodoc do
post "/api/v1/resources", params: params.to_json, env: env
expect(response.status).to eq 200
expect(Resource.all.size).to eq(1)
end
end
describe "Update a resource" do
let(:description) do
'Update a resource'
end
let(:resource) do
FactoryGirl.create(:resource)
end
let(:params) do
resource.attributes.slice(*ResourceSerializer.request_permit_params.map(&:to_s))
end
it "PUT/PATCH /api/v1/resources/:resource_id_or_uri", :autodoc do
put "/api/v1/resources/#{resource.id}", params: params.to_json, env: env
expect(response.status).to eq 200
expect(Resource.all.size).to eq(1)
end
it "PUT/PATCH /api/v1/resources/:resource_uri" do
put "/api/v1/resources/#{CGI.escape(resource.uri)}", params: params.to_json, env: env
expect(response.status).to eq 200
expect(Resource.all.size).to eq(1)
end
end
describe "Delete a resource" do
let(:description) do
'Delete a resource'
end
let(:resource) do
FactoryGirl.create(:resource)
end
it "DELETE /api/v1/resources/:resource_id_or_uri", :autodoc do
delete "/api/v1/resources/#{resource.id}", params: params, env: env
expect(response.status).to eq 204
end
it "DELETE /api/v1/resources/:resource_uri" do
delete "/api/v1/resources/#{CGI.escape(resource.uri)}", params: params, env: env
expect(response.status).to eq 204
end
end
end
|
sonots/triglav
|
app/models/job_internal_message.rb
|
<reponame>sonots/triglav
# Internal representation to handle AND/OR condtions
# of Messesges to produce a JobMessage.
#
# See job_message.rb
class JobInternalMessage < ApplicationRecord
end
|
sonots/triglav
|
app/models/resource.rb
|
# frozen-string-literal: true
class Resource < ApplicationRecord
has_many :messages, primary_key: 'uri', foreign_key: 'resource_uri'
validates :unit, inclusion: { in: %w(singular daily hourly) } # monthly, streaming support?
validates :timezone, presence: true, format: { with: /\A[+-]\d\d:\d\d\z/ }
before_save :set_default
has_many :jobs_input_resources, dependent: :destroy
has_many :jobs_output_resources, dependent: :destroy
has_many :input_jobs, through: :jobs_input_resources, source: 'job' # reverse of job.input_resources
has_many :output_jobs, through: :jobs_output_resources, source: 'job' # reverse of job.output_resources
def set_default
self.timezone ||= Settings.resource.default_timezone
self.span_in_days ||= Settings.resource.default_span_in_days
true
end
def self.create_or_update!(params)
if resource = self.find_by(id: params['id'])
resource.update!(params)
resource
else
self.create!(params)
end
end
def in_span?(time)
return nil if span_in_days.nil?
now = Time.now.localtime(timezone)
from = now - (span_in_days * 86400)
time.to_i >= from.to_i
end
# @param [Hash] params
# @option params [String] :uri_prefix
#
# @todo: Using materialized view is better (In mysql, insert select with trigger)
#
# EXAMPLE:
# uri unit span_in_days consumable notifiable
# hdfs://a hourly 10 true false
# hdfs://a daily 32 true false
# hdfs://b hourly 32 true false
# hdfs://b hourly 32 true true
#
# non_notifiable_uris =>
# ['hdfs://a']
#
# aggregated_resources =>
# uri unit span_in_days
# hdfs://a daily,hourly 32
def self.aggregated_resources(params)
raise ArgumentError, ':uri_prfix is required' unless params[:uri_prefix]
non_notifiable_uris = Resource.
where('uri LIKE ?', "#{params[:uri_prefix]}%").
group('uri').
having('BIT_OR(notifiable) = false').
pluck('uri')
aggregated_resources = Resource.
select('uri, GROUP_CONCAT(DISTINCT(unit) order by unit) AS unit, timezone, MAX(span_in_days) AS span_in_days').
where(uri: non_notifiable_uris).
where(consumable: true).
group('uri', 'timezone').
order('uri')
end
end
|
sonots/triglav
|
app/serializers/job_each_serializer.rb
|
class JobEachSerializer < ActiveModel::Serializer
attributes :id, :uri, :logical_op, :created_at, :updated_at
include Swagger::Blocks
swagger_schema :JobEachResponse do
property :id do
key :type, :integer
key :format, :int64
end
property :uri do
key :type, :string
end
property :logical_op do
key :type, :string
end
property :created_at do
key :type, :string
key :format, :"date-time"
end
property :updated_at do
key :type, :string
key :format, :"date-time"
end
end
end
|
sonots/triglav
|
lib/triglav/rack/access_logger.rb
|
require 'time'
module Triglav
module Rack
class AccessLogger
PID = Process.pid
def initialize(app, logger=nil)
@app = app
@logger = logger || $stdout
end
def call(env)
began_at = Time.now.instance_eval { to_i + (usec/1000000.0) }
status, headers, body = @app.call(env)
now = Time.now
reqtime = now.instance_eval { to_i + (usec/1000000.0) } - began_at
params = {
time: now.iso8601,
logtime: now.to_i,
d: now.strftime("%Y-%m-%d"),
# host: env["REMOTE_ADDR"] || "-", # useless, nginx
vhost: env['HTTP_HOST'] || "-",
# pid: PID,
# forwardedfor: env['HTTP_X_FORWARDED_FOR'] || "-",
# user: env["REMOTE_USER"] || "-",
method: env["REQUEST_METHOD"],
path: env["PATH_INFO"],
# query: env["QUERY_STRING"].empty? ? "" : "?"+env["QUERY_STRING"],
# protocol: env["HTTP_VERSION"],
ua: env['HTTP_USER_AGENT'] || "-",
status: status.to_s[0..3],
size: extract_content_length(headers) || -1,
reqtime: reqtime,
}
# See ApplicationController for 'triglav_access_log'
params.merge!(env['triglav_access_log']) if env['triglav_access_log']
@logger.write params.to_json << "\n"
[status, headers, body]
end
private
def extract_content_length(headers)
value = headers['Content-Length'] or return nil
value.to_i
end
end
end
end
|
sonots/triglav
|
app/controllers/api/v1/messages_controller.rb
|
<filename>app/controllers/api/v1/messages_controller.rb
module Api
module V1
class MessagesController < ApplicationController
include Swagger::Blocks
# GET /messages
# GET /messages.json
#
# MEMO: Query can not use schema type
swagger_path '/messages' do
operation :get do
key :description, 'Fetch messages'
key :operationId, 'fetchMessages'
key :tags, ['messages']
security do
key :api_key, []
end
parameter do
key :name, :offset
key :in, :query
key :description, 'Offset (Greater than or equal to) ID for Messages to list from'
key :required, true
key :type, :integer
key :format, :int64
end
parameter do
key :name, :limit
key :in, :query
key :description, 'Number of limits'
key :required, false
key :type, :integer
key :format, :int64
end
parameter do
key :name, :resource_uris
key :in, :query
key :description, 'URIs of Resource'
key :required, false
schema do
key :type, :array
items do
key :type, :string
end
end
end
response 200 do
key :description, 'message response'
schema do
key :type, :array
items do
key :'$ref', :MessageEachResponse
end
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /messages
# GET /messages.json
def index
@messages = Message.where("id >= ?", params.require(:offset))
if params[:resource_uris].present?
case params[:resource_uris]
when ActionController::Parameters
# swagger client passes an array value as a hash like {"0"=>val} in query parameter
@messages = @messages.where(resource_uri: params[:resource_uris].to_unsafe_h.values)
else
@messages = @messages.where(resource_uri: params[:resource_uris])
end
end
@messages = @messages.order(id: :asc)
@messages = @messages.limit(params[:limit] || 100)
render json: @messages, each_serializer: MessageEachSerializer
end
# POST /messages
# POST /messages.json
swagger_path '/messages' do
operation :post do
key :description, 'Enqueues new messages'
key :operationId, 'sendMessages'
key :tags, ['messages']
security do
key :api_key, []
end
parameter do
key :name, :messages
key :in, :body
key :description, 'Messages to enqueue'
key :required, true
schema do
key :type, :array
items do
key :'$ref', :MessageRequest
end
end
end
response 201 do
key :description, 'bulkinsert response'
schema do
key :'$ref', :BulkinsertResponse
end
end
response :default do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
swagger_schema :BulkinsertResponse do
property :num_inserts do
key :description, 'Number of inserts'
key :type, :integer
end
end
# POST /messages
# POST /messages.json
def create
begin
result = Message.create_messages(messages_params)
render json: {num_inserts: result[:num_inserts]}
rescue ActiveRecord::StatementInvalid => e
render json: {error: "#{e.class} #{e.message}", backtrace: e.backtrace}, status: :unprocessable_entity
end
end
swagger_path '/messages/last_id' do
operation :get do
key :description, 'Get the current last message id which would be used as a first offset to fetch messages'
key :operationId, 'getLastMessageId'
key :tags, ['messages']
security do
key :api_key, []
end
response 200 do
key :description, 'last message id response'
schema do
key :'$ref', :LastMessageIdResponse
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
swagger_schema :LastMessageIdResponse do
property :id do
key :description, 'last message id'
key :type, :integer
key :format, :int64
end
end
def last_id
last_id = Message.last&.id || 0
render json: {id: last_id}
end
private
def messages_params
params.permit(_json: MessageSerializer.request_permit_params, message: {})[:_json]
end
end
end
end
|
sonots/triglav
|
spec/models/user_spec.rb
|
<reponame>sonots/triglav
require 'rails_helper'
RSpec.describe User, type: :model do
before(:all) { User.delete_all }
describe '#authenticator=' do
context 'with local' do
it do
expect(User.create(authenticator: 'local', password: '<PASSWORD>')).to be_valid
end
end
context 'others' do
it do
expect(User.create(authenticator: 'foo')).not_to be_valid
end
end
end
describe '#password=' do
context 'with valid length' do
it do
expect(User.create(authenticator: 'local', password: '<PASSWORD>')).to be_valid
end
end
context 'shorther length' do
it do
expect(User.create(authenticator: 'local', password: '<PASSWORD>')).not_to be_valid
end
end
context 'longer length' do
it do
expect(User.create(authenticator: 'local', password: 'f' * 1000)).not_to be_valid
end
end
end
describe '#encrypt_password' do
it do
user = User.create(authenticator: 'local', password: '<PASSWORD>')
expect(user.salt).to be_present
expect(user.encrypted_password).to be_present
end
end
describe '#clear_password' do
it do
user = User.create(authenticator: 'local', password: '<PASSWORD>')
expect(user.password).to be_nil
end
end
describe '#match_password?' do
let(:password) { '<PASSWORD>' }
let(:user) { User.create(authenticator: 'local', password: password) }
context 'with valid password' do
it do
expect(user.match_password?(password)).to be_truthy
end
end
context 'with invalid password' do
it do
expect(user.match_password?('something_wrong')).to be_falsey
end
end
end
describe '#authenticate' do
let(:username) { 'foobar' }
let(:password) { '<PASSWORD>' }
let(:user) { User.create(name: username, authenticator: 'local', password: password) }
before { user }
context 'with valid name, password' do
it do
expect(User.authenticate(username: username, password: password).class).to eql(User)
end
end
context 'with invalid name' do
it do
expect(User.authenticate(username: 'something_wrong', password: password)).to be_falsey
end
end
context 'with invalid password' do
it do
expect(User.authenticate(username: username, password: '<PASSWORD>')).to be_falsey
end
end
end
describe '#find_by_access_token' do
before do
@user = User.create(name: 'foobar', authenticator: 'local', password: '<PASSWORD>')
@api_key = ApiKey.create(user_id: @user.id)
end
it do
user = User.find_by_access_token(@api_key.access_token)
expect(user.id).to eql(@user.id)
end
end
describe '#invalidate_api_keys' do
before do
@user = User.create(name: 'foobar', authenticator: 'local', password: '<PASSWORD>')
@api_key = ApiKey.create(user_id: @user.id)
@user.destroy
end
it do
user = User.find_by_access_token(@api_key.access_token)
expect(user).to be_nil
end
end
end
|
sonots/triglav
|
app/models/api_key.rb
|
<gh_stars>1-10
require 'securerandom'
class ApiKey < ApplicationRecord
belongs_to :user
validates :user_id, :presence => true
before_create :generate_access_token
before_create :extend_expiration
scope :expired, -> { where('expires_at < ?', Time.current) }
def expired?
self.expires_at < Time.current
end
def extend_expiration
self.expires_at = ApiKey.expires_at
self.last_accessed_at = Time.current
self
end
def self.expired?(access_token: )
api_key = self.find_by(access_token: access_token)
api_key ? api_key.expired? : true
end
def self.destroy_for_user(user_id)
ApiKey.where(user_id: user_id).destroy_all
end
private
def generate_access_token
begin
self.access_token = SecureRandom.hex
end while self.class.exists?(access_token: access_token)
access_token
end
def self.expires_at
(Time.current + Settings.authentication.expire.days)
end
end
|
sonots/triglav
|
spec/requests/api/v1/users_spec.rb
|
<gh_stars>1-10
# coding: utf-8
require 'rails_helper'
RSpec.describe 'User resources', :type => :request do
let(:params) do
{}
end
let(:env) do
{
'CONTENT_TYPE' => 'application/json',
'HOST' => 'triglav.analytics.mbga.jp',
'HTTP_ACCEPT' => 'application/json',
'HTTP_AUTHORIZATION' => access_token,
}
end
let(:access_token) do
ApiKey.create(user_id: user.id).access_token
end
let(:user) do
FactoryGirl.create(:user, :triglav_admin)
end
describe "Get user index", :autodoc do
let(:description) do
"Returns user index<br/>" \
"Group is not included<br/>"
end
before do
FactoryGirl.create(:user, :project_admin, name: 'project_admin')
FactoryGirl.create(:user, :editor, name: 'editable_user')
FactoryGirl.create(:user, :read_only, name: 'read_only_user_1')
FactoryGirl.create(:user, :read_only, name: 'read_only_user_2')
FactoryGirl.create(:user, :read_only, name: 'read_only_user_3')
end
it "GET /api/v1/users" do
get "/api/v1/users", params: params, env: env
expect(response.status).to eq 200
end
end
describe "Get a user", :autodoc do
let(:description) do
"Get a user"
end
it "GET /api/v1/users/:user_id" do
get "/api/v1/users/#{user.id}", params: params, env: env
expect(response.status).to eq 200
end
end
describe "Create a user", :autodoc do
let(:description) do
"Create a user<br/>" \
"`authenticator` parameter accepts only `local`<br/>" \
"`ldap` users are automatically created on authentication<br/>" \
"Specify an Array for `groups`"
end
let(:params) do
FactoryGirl.attributes_for(:user, :read_only, name: 'new user').merge(password: '<PASSWORD>')
end
it "POST /api/v1/users" do
post "/api/v1/users", params: params.to_json, env: env
expect(response.status).to eq 201
end
end
describe '#check_authenticator!' do
let(:params) do
FactoryGirl.attributes_for(
:user, :read_only, name: 'new user'
).merge(
password: '<PASSWORD>',
authenticator: 'something_wrong',
)
end
it "POST /api/v1/users" do
post "/api/v1/users", params: params.to_json, env: env
expect(response.status).to eq 400
end
end
describe "Update user", :autodoc do
let(:description) do
"Update a user<br/>" \
"`authenticator` parameter accepts only `local`<br/>" \
"`name` cannot be changed (ignored even if specified)<br/>"
end
let(:target_user) do
FactoryGirl.create(:user, :project_admin, name: 'original name')
end
let(:params) do
attrs = FactoryGirl.attributes_for(:user, :read_only, name: 'new user')
attrs[:name] = 'try to update name'
attrs[:description] = 'try to update description'
attrs[:groups] = ['editor', 'group1', 'group2']
attrs
end
it "PUT/PATCH /api/v1/users/:user_id" do
put "/api/v1/users/#{target_user.id}", params: params.to_json, env: env
expect(response.status).to eq 200
end
end
describe "Delete a user", :autodoc do
let(:description) do
"Delete a user"
end
let(:target_user) do
FactoryGirl.create(:user, :project_admin, name: 'delete target')
end
it "DELETE /api/v1/users/:user_id" do
delete "/api/v1/users/#{target_user.id}", params: params, env: env
expect(response.status).to eq 204
end
end
end
|
sonots/triglav
|
app/models/jobs_output_resource.rb
|
class JobsOutputResource < ApplicationRecord
belongs_to :job
belongs_to :resource
end
|
sonots/triglav
|
app/controllers/api/v1/job_messages_controller.rb
|
<gh_stars>1-10
module Api
module V1
class JobMessagesController < ApplicationController
include Swagger::Blocks
# GET /job_messages
# GET /job_messages.json
#
# MEMO: Query can not use schema type
swagger_path '/job_messages' do
operation :get do
key :description, 'Fetch Job messages'
key :operationId, 'fetchJobMessages'
key :tags, ['jobMessages']
security do
key :api_key, []
end
parameter do
key :name, :offset
key :in, :query
key :description, 'Offset (Greater than or equal to) ID for Messages to fetch from'
key :required, true
key :type, :integer
key :format, :int64
end
parameter do
key :name, :limit
key :in, :query
key :description, 'Number of limits'
key :required, false
key :type, :integer
key :format, :int64
end
parameter do
key :name, :job_id
key :in, :query
key :description, 'Job ID'
key :required, true
key :type, :integer
key :format, :int64
end
response 200 do
key :description, 'message response'
schema do
key :type, :array
items do
key :'$ref', :JobMessageEachResponse
end
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
# GET /job_messages
# GET /job_messages.json
def index
@messages = JobMessage.where("id >= ?", params.require(:offset))
@messages = @messages.where(job_id: params.require(:job_id))
@messages = @messages.order(id: :asc)
@messages = @messages.limit(params[:limit] || 100)
render json: @messages, each_serializer: JobMessageEachSerializer
end
swagger_path '/job_messages/last_id' do
operation :get do
key :description, 'Get the current last message id which would be used as a first offset to fetch messages'
key :operationId, 'getLastJobMessageId'
key :tags, ['jobMessages']
security do
key :api_key, []
end
response 200 do
key :description, 'last message id response'
schema do
key :'$ref', :LastJobMessageIdResponse
end
end
response :unprocessable_entity do
key :description, 'unexpected error'
schema do
key :'$ref', :ErrorModel
end
end
end
end
swagger_schema :LastJobMessageIdResponse do
property :id do
key :description, 'last message id'
key :type, :integer
key :format, :int64
end
end
def last_id
last_id = JobMessage.last&.id || 0
render json: {id: last_id}
end
end
end
end
|
sonots/triglav
|
lib/rails_runner.rb
|
<gh_stars>1-10
# cf. http://qiita.com/sonots/items/2bf7f15adb40c012a643
APP_PATH = File.expand_path('../../config/application', __FILE__) unless defined?(APP_PATH)
require_relative '../config/boot'
options = { environment: (ENV['RAILS_ENV'] || ENV['RACK_ENV'] || "development").dup }
ENV["RAILS_ENV"] = options[:environment]
require APP_PATH
Rails.application.require_environment!
Rails.application.load_runner
|
sonots/triglav
|
spec/factories/users.rb
|
<filename>spec/factories/users.rb
FactoryGirl.define do
factory :user do
sequence(:name) { |i| "user #{i}" }
description { "description for #{name}"}
authenticator 'local'
email "<EMAIL>"
groups { [] }
to_create do |instance|
instance.save validate: false
end
trait :triglav_admin do | user |
groups { ["triglav_admin"] }
end
trait :project_admin do | user |
groups { [ 'project_admin' ] }
end
trait :editor do | user |
groups { [ 'editor' ] }
end
trait :read_only do | user |
groups { [ 'read_only' ] }
end
end
end
|
sonots/triglav
|
lib/triglav/error/invalid_authenticity_credential.rb
|
<gh_stars>1-10
module Triglav
module Error
class InvalidAuthenticityCredential < Triglav::Error::StandardError
def initialize
@code = 401
@message = 'Not authenticated. Invalid authenticity credential.'
end
end
end
end
|
sonots/triglav
|
app/models/message.rb
|
<reponame>sonots/triglav
# Agent (or Producer) sends a Message as an event
class Message < ApplicationRecord
belongs_to :resource, primary_key: 'uri', foreign_key: 'resource_uri'
validates :resource_uri, presence: true
validates :resource_unit, presence: true, inclusion: { in: %w(singular daily hourly) }
validates :resource_time, presence: true, numericality: { only_integer: true }
validates :resource_timezone, presence: true, format: { with: /\A[+-]\d\d:\d\d\z/ }
validates :payload, json: true
def self.create_messages(params_list)
ActiveRecord::Base.transaction do
before = self.all.size
params_list.each do |params|
record = self.build_with_job_message(params)
record.save! if record
end
{num_inserts: self.all.size - before}
end
end
def self.build_with_job_message(params)
return nil if params[:uuid] and Message.find_by(uuid: params[:uuid])
resource_uri = params[:resource_uri] || raise('resource_uri is required')
resource_unit = params[:resource_unit] || raise('resource_unit is required')
resource_timezone = params[:resource_timezone] || raise('resource_timezone is required')
resource_ids = Resource.where(
uri: resource_uri,
unit: resource_unit,
timezone: resource_timezone
).pluck(:id)
job_ids = JobsInputResource.where(
resource_id: resource_ids
).pluck(:job_id)
job_ids.each do |job_id|
job = Job.find_by(id: job_id)
params_with_job = params.merge(job_id: job_id)
if job.logical_op&.downcase == 'and'.freeze
JobMessage.create_if_andset(params_with_job)
else
JobMessage.create_if_orset(params_with_job)
end
end
new(params)
end
end
|
sonots/triglav
|
benchmark/fetch_messages.rb
|
<filename>benchmark/fetch_messages.rb
require_relative 'client'
require 'parallel'
require 'optparse'
params = ARGV.getopts('p:d:o:')
num_parallels = Integer(params['p'] || 2)
duration = Integer(params['d'] || 1)
offset = Integer(params['o'] || 0)
triglav_url = params['u'] || 'http://localhost:7800'
puts "-p (num_parallels) #{num_parallels} -d (duration) #{duration} -u (triglav_url) #{triglav_url} -o (offset) #{offset}"
client = Client.new(url: triglav_url)
started = Time.now
counts = Parallel.map(1..num_parallels, in_processes: num_parallels) do |i|
count = 0
loop do
10.times do
count += 1
resource_uri = "hdfs://host:port/#{count}/path/to/resource"
client.fetch_messages(offset, limit: 100, resource_uris: [resource_uri])
end
elapsed = Time.now - started
break if elapsed > duration
end
count
end
elapsed = Time.now - started
puts "#{counts.inject(:+) / elapsed.to_f} request / sec"
puts "#{counts.inject(:+) / num_parallels.to_f / elapsed.to_f} request / sec / process"
|
sonots/triglav
|
config/puma.rb
|
APP_ROOT = File.dirname(File.dirname(__FILE__))
require 'server/starter/puma_listener'
listener = ::Server::Starter::PumaListener
status_file = File.join(APP_ROOT, 'tmp/pids/start_server.stat')
pidfile File.join(APP_ROOT, 'tmp/pids/puma.pid')
state_path File.join(APP_ROOT, 'tmp/pids/puma.state')
# Run puma via start_puma.rb to configure PUMA_INHERIT_\d ENV from SERVER_STARTER_PORT ENV as
# $ bundle exec --keep-file-descriptors start_puma.rb puma -C config/puma.conf.rb config.ru
puts ENV['SERVER_STARTER_PORT']
if ENV['SERVER_STARTER_PORT']
puma_inherits = listener.listen
puma_inherits.each do |puma_inherit|
bind puma_inherit[:url]
end
else
puts '[WARN] Fallback to 0.0.0.0:7800 since not running under Server::Starter'
bind 'tcp://0.0.0.0:7800'
end
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum, this matches the default thread size of Active Record.
#
threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }.to_i
threads threads_count, threads_count
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked webserver processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory. If you use this option
# you need to make sure to reconnect any threads in the `on_worker_boot`
# block.
#
# MEMO: Hmm, I get WARNING: Detected 18 Thread(s) started in app boot:
# preload_app!
# Allow puma to be restarted by `rails restart` command.
# plugin :tmp_restart
# Code to run before doing a restart. This code should
# close log files, database connections, etc.
# This can be called multiple times to add code each time.
on_restart do
puts 'On restart...'
end
# Code to run when a worker boots to setup the process before booting
# the app. This can be called multiple times to add hooks.
on_worker_boot do
puts 'On worker boot...'
defined?(ActiveRecord::Base) and ActiveRecord::Base.connection.disconnect!
end
# Code to run when a worker boots to setup the process after booting
# the app. This can be called multiple times to add hooks.
after_worker_boot do
puts 'After worker boot...'
defined?(ActiveRecord::Base) and ActiveRecord::Base.establish_connection
end
# Code to run when a worker shutdown.
on_worker_shutdown do
puts 'On worker shutdown...'
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.