repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
cervinka/jira-worklog
|
lib/jira/worklog/api_client.rb
|
class APIClient
attr_reader :user, :password, :url_base
def initialize(url_base, user, password)
@user = user
@password = password
@url_base = url_base
end
def issues
data = request("search?jql=assignee=#{user}")
data['issues'].map { |issue| {key: issue['key'], id: issue['id'], summary: issue['fields']['summary']} }
end
def delete_worklog(issue, worklog_id)
delete_request("issue/#{issue}/worklog/#{worklog_id}")
end
def add_worklog(worklog)
req = {
comment: worklog[:comment],
started: worklog[:date].strftime('%Y-%m-%d') + 'T07:00:00.000+0100',
timeSpentSeconds: worklog[:duration]
}
data = post_request("issue/#{worklog[:issue]}/worklog", req.to_json)
# puts "added: #{worklog.inspect}"
data['id']
end
def worklogs(issue)
data = request("issue/#{issue}/worklog")
# pp data
data['worklogs'].map { |worklog|
{
duration: worklog['timeSpentSeconds'],
comment: worklog['comment'],
issue_id: worklog['issueId'],
id: worklog['id'],
date: Date.strptime(worklog['started'][0..9], '%Y-%m-%d')
}
}
end
private
def request(path)
url = url_base + path
resp = RestClient::Request.execute method: :get, url: url, user: user, password: password, :verify_ssl => false
JSON.parse resp.body
end
def delete_request(path)
url = url_base + path
RestClient::Request.execute method: :delete, url: url, user: user, password: password, :verify_ssl => false
end
def post_request(path, payload)
url = url_base + path
resp = RestClient::Request.execute method: :post, url: url, user: user, password: password, :verify_ssl => false, payload: payload, headers: {'Content-Type' => 'application/json; charset=utf-8'}
JSON.parse resp.body
end
end
|
hswick/sandoz
|
lib/sandoz/version.rb
|
<gh_stars>1-10
module Sandoz
VERSION = "0.1.52"
end
|
hswick/sandoz
|
lib/cli.rb
|
<filename>lib/cli.rb
require 'thor'
module Sandoz
class Cli < Thor
desc "hello", "Say hello"
def hello
puts "Well, I met a girl called Sandoz\nAnd she taught me many, many things\nGood things, very good things, sweet things."
end
desc "new", "Take a hit and create a new sandoz project"
def new
`say Well, I met a girl called Sandoz`
end
end
end
|
hswick/sandoz
|
sandoz.gemspec
|
# coding: utf-8
require File.expand_path('../lib/sandoz/version', __FILE__)
Gem::Specification.new do |s|
s.name = 'sandoz'
s.version = Sandoz::VERSION
s.authors = ["<NAME>"]
s.email = ["<EMAIL>"]
s.summary = "A gem named Sandoz."
s.description = "Ruby p5.js wrapper for trippy visuals."
s.homepage = "https://github.com/hswick/sandoz"
s.license = "MIT"
if s.respond_to?(:metadata)
s.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
s.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.bindir = "bin"
s.executables = ["sandoz"]
s.require_paths = ["lib"]
s.add_development_dependency 'bundler', '~> 1.11'
s.add_dependency 'opal', '~> 0.9'
s.add_dependency 'thor', '~> 0.19.1'
end
|
hswick/sandoz
|
lib/sandoz/sandoz.rb
|
<reponame>hswick/sandoz<filename>lib/sandoz/sandoz.rb
module Sandoz
# https://github.com/processing/p5.js/wiki/p5.js-overview#instantiation--namespace
# TODO Add html element argument
def defsketch(id, &block)
sketch = Proc.new do |p|
init(p)
block.call
end
@p5 = `new p5(#{sketch}, #{id})`
end
def view_p
`return #{@p5}`
end
def init(p)
@@p = p
end
def size(w, h)
`#{@@p}.createCanvas(#{w}, #{h})`
end
def background(r, g=nil, b=nil)
if g == nil && b == nil
`#{@@p}.background(#{r})`
else
`#{@@p}.background(#{r}, #{g}, #{b})`
end
end
def fill(r, g=nil, b=nil, a=nil)
if g==nil && b ==nil
`#{@@p}.fill(#{r})`
elsif a == nil
`#{@@p}.fill(#{r}, #{g}, #{b})`
else
`#{@@p}.fill(#{r}, #{g}, #{b}, #{a})`
end
end
def rect(x, y, w, h)
`#{@@p}.rect(#{x}, #{y}, #{w}, #{h})`
end
def ellipse(x, y, w, h)
`#{@@p}.ellipse(#{x}, #{y}, #{w}, #{h})`
end
def width
`#{@@p}.width`
end
def height
`#{@@p}.height`
end
def line(x1, y1, x2, y2)
`#{@@p}.line(#{x1}, #{y1}, #{x2}, #{y2})`
end
def point(x, y)
`#{@@p}.point(#{x}, #{y})`
end
def stroke(r, g=nil, b=nil, a=nil)
if g==nil && b ==nil
`#{@@p}.stroke(#{r})`
elsif a == nil
`#{@@p}.stroke(#{r}, #{g}, #{b})`
else
`#{@@p}.stroke(#{r}, #{g}, #{b}, #{a})`
end
end
def no_stroke
`#{@@p}.noStroke()`
end
def stroke_weight(weight)
`#{@@p}.strokeWeight(#{weight})`
end
def setup(&block)
`#{@@p}.setup = #{block}`
end
def draw(&block)
`#{@@p}.draw = #{block}`
end
def dist(x1, y1, x2, y2)
`return #{@@p}.dist(x1, y1, x2, y2)`
end
def random(min, max=nil)
if max
`return #{@@p}.random(#{min}, #{max})`
else
`return #{@@p}.random(#{min})`
end
end
def color(r, g=nil, b=nil, a=nil)
if g==nil && b ==nil
`return #{@@p}.color(#{r})`
elsif a == nil
`return #{@@p}.color(#{r}, #{g}, #{b})`
else
`return #{@@p}.color(#{r}, #{g}, #{b}, #{a})`
end
end
def map(value, start1, stop1, start2, stop2)
`return #{@@p}.map(#{value}, #{start1}, #{stop1}, #{start2}, #{stop2})`
end
def millis
`return #{@@p}.millis();`
end
def no_fill
`#{@@p}.noFill()`
end
def noise(x, y=nil, z=nil)
if y == nil && z == nil
`return #{@@p}.noise(#{x})`
elsif z == nil
`return #{@@p}.noise(#{x}, #{y})`
else
`return #{@@p}.noise(#{x}, #{y}, #{z})`
end
end
def text(text, x, y)
`#{@@p}.text(#{text}, #{x}, #{y})`
end
end
|
hswick/sandoz
|
lib/sandoz.rb
|
<filename>lib/sandoz.rb
# require all the files, only if Opal is executing
if RUBY_ENGINE == 'opal'
require_relative 'sandoz/sandoz.rb'
require_relative 'sandoz/version'
else
# NOT running inside of opal, set things up
# so opal can find the files.
require 'opal'
Opal.append_path File.expand_path('..', __FILE__).untaint
end
|
rcapozzi/brm-jruby
|
lib/brm_test.rb
|
<filename>lib/brm_test.rb
require 'minitest/autorun'
require 'brm-jruby'
require 'rainbow'
class BrmTest < Minitest::Test
@xop_debug = false
attr_accessor :ctxp
def assert_error_info_result(resp, value)
assert(resp.has_key?("PIN_FLD_ERROR_INFO"), "Expect PIN_FLD_ERROR_INFO key")
assert_equal(value, resp["PIN_FLD_ERROR_INFO"][0]["PIN_FLD_RESULT"], "Expect error_info.result == #{value}")
end
def read_obj_by(klass, fld_name, fld_value, read_obj=0)
resp = find_by(klass, fld_name, fld_value)
if poid = resp["PIN_FLD_RESULTS"][read_obj]
return read_obj(poid)
end
end
def find_by(klass, fld_name, fld_value)
req = <<~_flist_
0 PIN_FLD_POID POID [0] 0.0.0.1 /search -1 0
0 PIN_FLD_FLAGS INT [0] 512
0 PIN_FLD_TEMPLATE STR [0] "select X from #{klass} 1 where 1.F1 = V1 "
0 PIN_FLD_ARGS ARRAY [1]
1 #{fld_name} STR [0] "#{fld_value}"
0 PIN_FLD_RESULTS ARRAY [*]
1 PIN_FLD_POID POID [0]
_flist_
return xop("SEARCH", 0, FList.from_str(req), "#{klass}.#{fld_name}=#{fld_value}")
end
def delete_all(resp)
ary_fld = 'PIN_FLD_RESULTS'
poid_fld = 'PIN_FLD_POID'
return unless resp.has_key?(ary_fld)
resp[ary_fld].each do |k, v|
delete_obj(v[poid_fld])
end
end
def delete_obj(poid)
return xop("DELETE_OBJ", 0, PIN_FLD_POID: poid);
end
def read_obj(poid)
poid = poid["PIN_FLD_POID"] if poid.is_a?(Hash)
return xop("READ_OBJ", 0, PIN_FLD_POID: poid);
end
def get_product_info(poid_or_flist)
poid = poid_or_flist['PIN_FLD_POID'] if poid_or_flist.is_a?(Hash)
poid ||= poid_or_flist[:PIN_FLD_POID] if poid_or_flist.is_a?(Hash)
poid ||= poid_or_flist
resp = xop("PRICE_GET_PRODUCT_INFO", 0, PIN_FLD_POID: poid);
assert_equal(1, resp["PIN_FLD_PRODUCTS"].size, 'Exact match')
return resp["PIN_FLD_PRODUCTS"][0]
end
# filename: File with input flist for set pricelist
def load_product(filename)
doc = File.read(filename)
flist = FList.from_str(doc)
product = flist.to_hash
product_code = product['PIN_FLD_PRODUCTS'][0]['PIN_FLD_CODE']
#|| abort "Bad file: #{filename}"
resp = find_by('/product', 'PIN_FLD_CODE', product_code)
if r = resp["PIN_FLD_RESULTS"]
r.each do |k, v|
prod = read_obj(v["PIN_FLD_POID"])
req = <<~_flist_.strip
0 PIN_FLD_POID POID [0] #{prod['PIN_FLD_POID']}
0 PIN_FLD_PROGRAM_NAME STR [0] "testnap"
0 PIN_FLD_PRODUCTS ARRAY [0]
1 PIN_FLD_DELETED_FLAG INT [0] 1
1 PIN_FLD_CODE STR [0] "#{prod['PIN_FLD_CODE']}"
1 PIN_FLD_NAME STR [0] "#{prod['PIN_FLD_NAME']}"
1 PIN_FLD_TAILORMADE INT [0] #{prod['PIN_FLD_TAILORMADE']}
_flist_
resp = xop('PRICE_SET_PRICE_LIST', 0, req, "Delete #{prod['PIN_FLD_POID']}")
end
# Verify nothing there
resp = find_by('/product', 'PIN_FLD_CODE', product_code)
assert(!resp.has_key?("PIN_FLD_RESULTS"), 'No existing product')
else
puts '# No existing prodcuts to delete'
end
# Create the product
resp = xop('PRICE_SET_PRICE_LIST', 0, flist, "create product.#{product_code}")
# Should return following for newly created product
# {"PIN_FLD_RESULT"=>1, "PIN_FLD_PRODUCTS"=>{0=>{"PIN_FLD_PRODUCT_OBJ"=>"0.0.0.1 /product 123 0"}}, "PIN_FLD_POID"=>"0.0.0.1 /dummy 1 0"}
assert_equal(resp['PIN_FLD_RESULT'], 1, 'PRICE_SET_PRICE_LIST should return 1');
@product_poid = resp['PIN_FLD_PRODUCTS'][0]['PIN_FLD_PRODUCT_OBJ']
assert(@product_poid.size > 0, 'New product created')
product
end
# flist:: Can be hash, flist, or string. Always return hash
def xop(opcode, flags, flist, label=nil)
unless @ctxp
@ctxp = com.portal.pcm.PortalContext.new
@ctxp.connect
end
flist_hash = case flist
when Hash; flist
when FList; flist.to_hash
when String; FList.from_str(flist).to_hash
else 'Unknown'
end
poid = flist_hash[:PIN_FLD_POID] || flist_hash['PIN_FLD_POID'] || 'Unknown'
if poid =~ / -1 /
str = flist_hash['PIN_FLD_CODE'] || flist_hash['PIN_FLD_NAME']
poid = poid.gsub(/ -1 .*/, '.CODE=' + str) if str
end
puts '# xop %-35s [%s]' % [Rainbow(opcode).yellow, Rainbow(label||poid).magenta]
if @xop_debug == true
puts "# xop %s\n%p" % [Rainbow('input ' + flist.class.to_s).yellow, flist_hash]
end
File.open("pcm_session.log", 'a+') {|f| f.puts "# xop #{opcode} request"; f.puts(FList.from_hash(flist_hash)) }
resp = @ctxp.xop(opcode, flags, flist)
resp = case resp
when Hash; resp
when FList; resp.to_hash
when String; FList.from_str(resp).to_hash
end
if @xop_debug == true
puts "# xop #{ Rainbow('output').yellow }%s (#{__FILE__}:#{__LINE__})\n%p" % [Rainbow(resp.class).yellow, resp]
end
File.open("pcm_session.log", 'a+') {|f| f.puts "# xop #{opcode} response"; f.puts(FList.from_hash(resp)) }
return resp
end
def pre
end
def self.exec
t = self.new("BRMTestX")
`./logcheck.sh zap`
template = "# TEST %s start: %s" % [@name, @descr]
template = "%-70s" % template
puts Rainbow(template).black.bg(:white) if @name
t.pre
t.run
puts Rainbow("# TEST #{@name} end").black.bg(:white) if @name
ensure
#puts Rainbow('# Closing context').gray if @xop_debug
t.ctxp.close(true) if t and t.ctxp
puts `./logcheck.sh check`
end
def dbconnect
# dbuser = prop.getProperty("db.user");
# dbpass = prop.getProperty("db.pass");
# dbhost = prop.getProperty("db.host");
# dbport = prop.getProperty("db.port");
# dbservicename = prop.getProperty("db.servicename");
# try {
# Class.forName("oracle.jdbc.driver.OracleDriver");
# logger.info("Connecting to database...");
# logger.info("jdbc:oracle:thin:" + dbuser + "/" + dbpass + "@//"
# + dbhost + ":" + dbport + "/" + dbservicename);
# dbConn = DriverManager.getConnection(
# "jdbc:oracle:thin:@//" + dbhost + ":" + dbport + "/"
# + dbservicename, dbuser, dbpass);
# } catch (ClassNotFoundException e) {
# e.printStackTrace();
# fail("Error connecting to BRM DB");
# } catch (SQLException e) {
# e.printStackTrace();
# fail("Error connecting to BRM DB");
# }
end
end
|
rcapozzi/brm-jruby
|
lib/brm-jruby.rb
|
#
# JRuby helpers for PCM.
#
# Custom fields should be defined in Infranet.properties.
# infranet.custom.field.package=com.foo
# infranet.custom.field.123=XXX_FLD_FOO
# then require the foo-flds.jar
class BRMJRuby
#VERSION = '0.0.1'
end
require 'jruby'
$LOAD_PATH.unshift 'jars' unless $LOAD_PATH.include? 'jars'
require 'pcm'
require 'pcmext'
require 'commons-logging-1.2'
require 'httpclient-4.5.11'
require 'httpcore-4.4.13'
require 'oraclepki'
require 'osdt_cert'
require 'osdt_core'
include Java
java_import "java.util.Properties"
java_import "com.portal.pcm.FList"
java_import "com.portal.pcm.PortalContext"
java_import "com.portal.pcm.Poid"
java_import "com.portal.pcm.SparseArray"
java_import "com.portal.pcm.Element"
# Import field and name it something we know
java_import ("com.portal.pcm.Field"){|p,c| "PIN_#{c}" }
# Avoid warnings if already required
begin
Kernel.const_get("PIN_FLDT_INT")
rescue
PCM_OP_SDK_GET_FLD_SPECS = 575
PIN_FLDT_UNUSED = 0
PIN_FLDT_INT = 1
PIN_FLDT_UINT = 2
PIN_FLDT_ENUM = 3
PIN_FLDT_NUM = 4
PIN_FLDT_STR = 5
PIN_FLDT_BUF = 6
PIN_FLDT_POID = 7
PIN_FLDT_TSTAMP = 8
PIN_FLDT_ARRAY = 9
PIN_FLDT_SUBSTRUCT = 10
PIN_FLDT_OBJ = 11
PIN_FLDT_BINSTR = 12
PIN_FLDT_ERR = 13
PIN_FLDT_DECIMAL = 14
PIN_FLDT_TIME = 15
PIN_FLDT_TEXTBUF = 16
PIN_FLDT_ERRBUF = PIN_FLDT_ERR
end
def define_constants
fields = com.portal.pcm.Field
fields.constants.each do |c|
puts "#{c} #{com.portal.pcm.Field.const_get(c)}"
end
end
class Hash
def pin_dump(level=0)
buf = []
max = self.keys.inject(0){|max,o| s = o.size; max = max > s ? max : s }
max = max > 30 ? max : 30
format = "%d %-#{max}s %s\n"
for k, v in self
if v.is_a?(Hash)
buf << v.pin_dump(level+1)
else
buf << format % [level, k, v]
end
end
buf
end
end
class Java::ComPortalPcm::PortalContext
# poid_str: A string version of a poid
def robj(poid)
flist = xop("READ_OBJ", 0, "PIN_FLD_POID" => poid)
end
def rflds(flist)
flist = xop("READ_FLDS", 0, flist)
end
def wflds(flist)
flist = xop("WRITE_FLDS", 0, flist)
end
def get_pvt(flist)
flist = xop("GET_PIN_VIRTUAL_TIME", 0, PIN_FLD_POID: '0.0.0.1 /dummy -1')
end
# Execute an opcode
# opcode: The int or string of the opcode. For example "ACT_FIND_VERIFY"
# flags: An int for flags
# flist: A +Hash+, +String+, or +FList+
# NOTE: Returns the same type as supplied.
def xop(opcode, flags, flist)
if String === opcode
opcode = Java::ComPortalPcm::PortalOp.const_get(opcode.upcase)
end
in_flist = case flist
when Hash
FList.from_hash(flist)
when String
FList.create_from_string(flist)
else
flist
end
ret_flist = self.opcode(opcode, flags, in_flist)
return case flist
when Hash
ret_flist.to_hash
when String
ret_flist.to_s
else
ret_flist
end
end
def self.xconnect
import 'java.io.StringReader'
data = File.read("Infranet.properties")
properties = Properties.new
properties.load(StringReader.new(data))
ctxp = com.portal.pcm.PortalContext.new(properties)
ctxp.connect(properties)
ctxp
rescue => e
puts "XXX Error test_connect_1"
puts e.inspect
nil
end
def self.exec
ctxp = com.portal.pcm.PortalContext.new
ctxp.connect
rv = yield ctxp if block_given?
ctxp.close(true)
rv
ensure
ctxp.close(true) if ctxp and ctxp.is_context_valid?
end
end
class Java::ComPortalPcm::Poid
def inspect
"#<%s:%d %s>" % ['Poid', self.object_id, self.toString() ]
end
class << self
# Convert db, type, id, rev into a poid.
def from_string(str)
ary = str.split
if ary.size == 2
(db, poid_id0, type) = [1,ary[1],ary[0]]
else
(db, poid_id0, type) = [ary[0],ary[2],ary[1]]
end
if (db =~ /\./)
ary = db.split(".")
db = (ary[3].to_i) + (256*ary[2].to_i) + (256*256*ary[1].to_i) + (256*256*256*ary[0].to_i)
end
self.new(db.to_i, poid_id0.to_i, type)
end
alias_method :from_str, :from_string
end
end
class Java::ComPortalPcm::FList
def [](field)
fld = FList.field(field)
get(fld)
end
def []=(key,value)
xset(key, value)
end
def xset(field,value)
fld = self.class.field(field)
case fld.get_pin_type
when /DECIMAL/
value = java.math.BigDecimal.new(value)
when /TSTAMP/
value = java.util.Date.new(value.to_i * 1000)
when /INT|ENUM/
value = value.to_i
when /PIN_FLDT_POID/
value = String === value ? Poid.from_str(value) : value
end
set(fld, value)
self
end
# to_str is the normal way of seeing things. +as_string+ is ackward.
def to_str
self.as_string
end
def to_s
self.as_string
end
def to_hash(convert_buffers=false)
hash = {}
get_fields.each do |fld|
val = self.get(fld)
key = fld.name_string
case fld.type_id
when PIN_FLDT_ARRAY
pairs = val.pairs
while (pairs.hasMoreElements)
pair = pairs.nextPair
idx = pair.key
hash[key] ||= {}
hash[key][idx] = pair.value.to_hash
end
when PIN_FLDT_SUBSTRUCT
hash[key] ||= {}
hash[key][0] = val.to_hash
when PIN_FLDT_POID
hash[key] = val.to_s
when PIN_FLDT_TSTAMP
hash[key] = (val.getTime() / 1000)
when PIN_FLDT_DECIMAL
#hash[key] = val.nil? ? "" : java.math.BigDecimal.new(val.to_string)
hash[key] = val.nil? ? nil : val.to_string
when PIN_FLDT_BUF
buf = val.to_s
buf = FList.create_from_string(buf).to_str rescue buf if convert_buffers == true
hash[key] = buf
else
hash[key] = val
end
end
hash
end
class << self
# Create from a doc/string
def from_str(doc)
create_from_string(doc.gsub(/^\s*/,''))
end
# Converts Camel case BigDeal to BIG_DEAL
def to_pinname(str)
return str if str =~ /_FLD_/
name = str.to_s.
gsub(/([A-Z]+)([A-Z][a-z])/,'\1_\2').
gsub(/([a-z\d])([A-Z])/,'\1_\2').
upcase
"PIN_FLD_#{name}"
end
# Uses the com.portal.pcm.Field class to instatiate
# the singleton for given +field+
# where +field+ is a string or symbol
def field(field)
fld = com.portal.pcm.Field.from_pin_name(field)
fld ||= com.portal.pcm.Field.from_name(field)
fld ||= com.portal.pcm.Field.from_name("Fld" + field)
return fld if fld
pin_name = to_pinname(field)
fld = Java::ComPortalPcm::Field
fld = fld.from_pin_name(pin_name)
return fld if fld
if name = sdk_field(field)
com.portal.pcm.Field.from_pin_name(name)
end
rescue
raise "Cannot load field named #{field}"
end
def sdk_field(field)
field = field.to_s if Symbol === field
ary = @@dd_fields.keys.grep /#{field}/i
if ary.size == 1
ary.first
end
end
# Create a new FList from the supplied Ruby hash.
# PIN_FLDT_TSTAMP | Time | Date | Ruby is seconds. Java is ms.
def from_hash(hash)
flist = com.portal.pcm.FList.new
hash.each do |k,v|
if !field = self.field(k)
raise "Bad load of #{k} => #{v}"
end
case field.type_id
when PIN_FLDT_POID
flist.set(field,Poid.from_string(v))
when PIN_FLDT_STR,
PIN_FLDT_INT,
PIN_FLDT_ENUM
v.nil? ? flist.set(field) : flist.set(field,v)
when PIN_FLDT_TSTAMP
d = java.util.Date.new(v.to_i * 1000)
flist.set(field,d)
when PIN_FLDT_DECIMAL
if v.nil?
flist.set(field)
else
v = "0" if v.is_a?(String) and v.size == 0
flist.set(field,java.math.BigDecimal.new(v))
end
when PIN_FLDT_ARRAY
# Two ways. Use SA and set OR setElement
# sa = SparseArray.new
for key, value in v
# Element.const_get "ELEMID_ANY" => -1
idx = key == "*" ? -1: key
#sa.add(idx, self.from_hash(value))
flist.setElement(field, idx, value ? self.from_hash(value) : nil)
end
#flist.set(field,sa)
when PIN_FLDT_SUBSTRUCT
key = v.keys.first
value = self.from_hash(v[key])
flist.set(field,value)
when PIN_FLDT_BUF
bbuf = Java::com.portal.pcm.ByteBuffer.new
#bbuf.set_bytes(v.sub(/\n\u0000.*?$/,"").to_java_bytes)
bbuf.set_bytes(v.to_java_bytes)
flist.set(field, bbuf)
else
raise "Unknown #{field} #{field.pintype} #{field.type_id} #{v.inspect}"
end
end
flist
end
# Loads fields from the database.
def sdk_fields(ctx)
@@dd_fields
rescue
flist = Java::ComPortalPcm::FList.new
poid = Java::ComPortalPcm::Poid.from_str("0.0.0.1 /dd/objects 1")
flist.set(Java::ComPortalPcmFields::FldPoid.getInst,poid)
out_flist = ctx.opcode(PCM_OP_SDK_GET_FLD_SPECS, flist)
hash = out_flist.to_hash
Struct.send(:remove_const, :PinFld) if Struct.const_defined?("PinFld")
pf = Struct.new("PinFld", :name, :num, :type, :status)
dd_fields = {}
hash["PIN_FLD_FIELD"].each do |i,href|
dd_fields[href["PIN_FLD_FIELD_NAME"]] = pf.new(href["PIN_FLD_FIELD_NAME"], href["PIN_FLD_FIELD_NUM"].to_i, href["PIN_FLD_FIELD_TYPE"].to_i, href["PIN_FLD_STATUS"].to_i)
end
@@dd_fields = dd_fields
end
end
end
if false # __FILE__ == $0
flist = com.portal.pcm.FList.new
poid = com.portal.pcm.Poid.value_of("$DB /service/pcm_client 1", 1)
flist.set(com.portal.pcm.fields.FldPoid.getInst,poid)
flist.set(com.portal.pcm.Field.from_name("FldName"),"Yummy")
flist.set(FList.field("Name"),"Yummy")
fld = com.portal.pcm.Field.from_name("FldName")
hash = {
"PIN_FLD_POID" => "0.0.0.1 /service/pcm_client -1 0",
"PIN_FLD_LOGIN" => "root.0.0.0.1",
"PIN_FLD_PASSWD_CLEAR" => "password",
"PIN_FLD_CM_PTRS" => {0=>{"PIN_FLD_CM_PTR"=>"ip localhost 11960"}},
"PIN_FLD_TYPE" => 1
}
flist = FList.from_hash(hash)
ctx = com.portal.pcm.PortalContext.new(flist)
end
|
rcapozzi/brm-jruby
|
test/test_brm-jruby.rb
|
<reponame>rcapozzi/brm-jruby
%w( $PIN/HOME/jars
lib).each do |path|
$LOAD_PATH.unshift(path) unless $LOAD_PATH.include?(path)
puts "INFO: Added to path #{path}"
end
require 'guard-jruby-minitest'
require 'minitest/autorun'
require "brm-jruby"
class BRMJRubyTest < Minitest::Test
def test_flunk
# $stderr.puts 'Boom start'
# flunk('Boom')
# assert(false == true)
# puts 'Boom end'
end
def test_poid
poid = com.portal.pcm.Poid.from_str('0.0.0.5 /service/pcm_client -1')
assert(poid.db == 5)
assert(poid.type == '/service/pcm_client')
assert(poid.id == -1)
end
def test_flist_set
flist = com.portal.pcm.FList.new
flist.xset("Name", "Bob")
assert(flist["Name"] == "Bob")
actual = flist["Name"] = "Joe"
assert(actual == "Joe")
assert(flist["Name"] == "Joe")
poid = com.portal.pcm.Poid.from_str('0.0.0.5 /service/pcm_client -1')
poidx = com.portal.pcm.Poid.from_str('0.0.5.5 /service/pcm_client -1')
flist.set(com.portal.pcm.fields.FldPoid.getInst,poid)
assert(flist['PIN_FLD_POID'] === poid)
assert(flist['PIN_FLD_POID'] != poidx)
flist['PIN_FLD_POID'] = poid
assert(flist['PIN_FLD_POID'] === poid)
assert(flist['PIN_FLD_POID'] != poidx)
flist.xset('Poid', poid)
assert(flist['PIN_FLD_POID'] === poid)
assert(flist['PIN_FLD_POID'] != poidx)
flist.xset("AccountObj", Poid.from_str('0.0.0.3 /account -123'))
flist['ServiceObj'] = Poid.from_str('0.0.0.4 /account -789')
assert(Java::ComPortalPcm::FList === flist.xset("Poid", "0.0.0.5 /dummy -1"))
end
def test_flist_from_str
v1, v2 = 'Bob', 'Description'
doc = com.portal.pcm.FList.new.xset('Name', v1).xset('Descr', v2).to_s
flist = com.portal.pcm.FList.from_str("\n\t\n" + doc + "\n\n")
assert(flist['PIN_FLD_NAME'] == v1)
assert(flist['PIN_FLD_DESCR'] == v2)
end
def test_flist_buf
doc = <<~_flist_.strip
0 PIN_FLD_SELECTOR BUF [0] flag/size/offset 0x0 293 0 data:
_flist_
flist = FList.from_str(doc)
assert(flist.hash_key?('PIN_FLD_SELECTOR'))
end
def test_flist_from_hash
flist = com.portal.pcm.FList.from_hash("Name" => "Bob", PIN_FLD_DESCR: "Description")
assert(flist['PIN_FLD_NAME'] == 'Bob')
end
# def test_big_flist
# flist = com.portal.pcm.FList.from_hash("Name" => "Bob", PIN_FLD_DESCR: "Description")
# sa = SparseArray.new
# sa.add(idx, self.from_hash("Name" => "Bob", PIN_FLD_DESCR: "Description"))
# end
end
|
rynelaster/brewery
|
config.ru
|
require 'sinatra/base'
require 'sinatra/activerecord'
require './controllers/ApplicationController'
# require './controllers/BreweryController'
# require './controllers/LocationContoller'
map ('/'){
run ApplicationController
}
|
rynelaster/brewery
|
controllers/LocationController.rb
|
class Location < ApplicationController
get '/locations' do
end
end
|
rynelaster/brewery
|
controllers/ApplicationController.rb
|
<reponame>rynelaster/brewery<gh_stars>0
class ApplicationController < Sinatra::Base
require 'http'
require 'brewery_db'
require 'bundler'
Bundler.require()
register Sinatra::CrossOrigin
set :allow_origin, :any
set :all_methods, [:get, :post, :options, :put, :patch, :delete]
set :allow_credentials, true
configure do
enable :cross_origin
end
not_found do
halt 404
end
options '*' do
response.headers['Allow'] = 'HEAD, GET, POST, PUT, PATCH, DELETE'
respnse.headers['Access-Control-Allow-Origin'] = '*'
respnse.headers['Access-Control-Allow-Headers'] = 'X-Requested-With, X-HTTP-Method-Override, Content-Type, Cache-Control, Accept'
end
# BreweryDB Gem
brewery_db = BreweryDB::Client.new do |config|
config.api_key = '262ffd433f83ede508850a0a4a1c01fd'
end
use Rack::MethodOverride
set :method_override, true
get '/locations' do
# HTTP.get('http://api.brewerydb.com/v2/locations/?key=262ffd433f83ede508850a0a4a1c01fd').body
locs = brewery_db.locations.all(locality: 'San Francisco')
locs.to_json
end
# get '/chicago' do
# HTTP.get('http://api.brewerydb.com/v2/locations/')
get '/unitedstates' do
HTTP.get('http://api.brewerydb.com/v2/locations/?locality=chicago&key=262ffd433f83ede508850a0a4a1c01fd').body
end
# get '/beers' do
# brewery_db.search.breweries(q: 'IPA');
# end
get '/breweries/in/:city' do
breweries = brewery_db.locations.all(locality: params[:city])
breweries.to_json
end
# get '/random' do
# randoms = brewery_db.beers.random;
# randoms.to_json
# end
end
|
jutonz/dctl
|
lib/dctl.rb
|
<reponame>jutonz/dctl<filename>lib/dctl.rb
require "rainbow"
require "config"
require "dctl/version"
require "dctl/main"
module Dctl
ERROR_COLOR = :red
SUCCESS_COLOR = :green
CMD_COLOR = :dimgray
end
|
jutonz/dctl
|
spec/dctl/main_spec.rb
|
require "spec_helper"
require "tempfile"
require "thor"
RSpec.describe Dctl::Main do
describe "#define_custom_commands" do
it "defines a command on the passed class" do
config = <<~CONFIG
org: jutonz
project: dctl_rb
custom_commands:
single: pwd
multiple: ["pwd", "whoami"]
CONFIG
with_config(config) do |config_path|
dctl = Dctl::Main.new(config: config_path)
klass = Class.new(Thor)
dctl.define_custom_commands(klass)
expect(klass.commands.key?("single")).to be true
expect(klass.commands.key?("multiple")).to be true
end
end
it "is okay if there are no custom commands" do
config = <<~CONFIG
org: jutonz
project: dctl_rb
CONFIG
with_config(config) do |config_path|
dctl = Dctl::Main.new(config: config_path)
klass = Class.new(Thor)
expect {
dctl.define_custom_commands(klass)
}.to_not raise_error
end
end
end
describe "#image_tag" do
it "returns a tag" do
config = <<~CONFIG
org: jutonz
project: dctl_rb
CONFIG
with_config(config) do |config_path|
dctl = Dctl::Main.new(config: config_path)
service = "app"
expect(dctl).to receive(:current_version_for_image).with(service)
.and_return(10)
expect(dctl.image_tag(service)).to eq "jutonz/dctl_rb-dev-app:10"
end
end
it "allows specifying negative version numbers" do
config = <<~CONFIG
org: jutonz
project: dctl_rb
CONFIG
with_config(config) do |config_path|
dctl = Dctl::Main.new(config: config_path)
service = "app"
expect(dctl).to receive(:current_version_for_image).with(service)
.and_return(10)
expect(dctl.image_tag(service, version: -1)).to eq(
"jutonz/dctl_rb-dev-app:9"
)
end
end
it "doesn't mind if version numbers are strings" do
config = <<~CONFIG
org: jutonz
project: dctl_rb
CONFIG
with_config(config) do |config_path|
dctl = Dctl::Main.new(config: config_path)
service = "app"
expect(dctl).to receive(:current_version_for_image).with(service)
.and_return(10)
expect(dctl.image_tag(service, version: "-1")).to eq(
"jutonz/dctl_rb-dev-app:9"
)
end
end
end
end
def with_config(config_str, &block)
Tempfile.open [".dctl", ".yml"] do |tf|
tf.write config_str
tf.flush
yield tf.path
end
end
|
jutonz/dctl
|
lib/dctl/cli/kubernetes.rb
|
require "thor"
require "dctl/kubernetes"
module Dctl::Kubernetes
class Cli < Thor
class_option :namespace, type: :string, aliases: :n
class_option :env, type: :string
desc "live-image", "Returns the active image for the given deployment"
def live_image(service)
puts Dctl::Kubernetes.live_image(service, k8s_opts)
end
desc "is-outdated", "Exit 1 if deployed image would be updated by a deploy, or 0 otherwise."
long_desc <<~LONGDESC
Check whether the currently deployed image is outdated and would be
updated by a new deployment.
This is determined by checking the tag specified in the compose file for
this environment against a random pod in the corresponding k8s deployment.
For example, if the tag in the compose file is `jutonz/app:4` and the live
image is `jutonz/app:3`, this would exit with 0. If the tags matched this
would exit with 1.
This is useful for determining when it is possible to skip building new
images, e.g with a CI/CD setup.
Example:\x5
export DCTL_ENV=prod\x5
if dctl is-outdated app; then\x5
\tdctl build app\x5
\tdctl push app\x5
else\x5
\techo "app is up to date"\x5
fi\x5
LONGDESC
option :verbose, type: :boolean, default: false
def is_outdated(service)
verbose = options[:verbose]
dctl = Dctl::Main.new dctl_opts
compose_tag = dctl.image_tag service
puts "Tag in compose file is #{compose_tag}" if verbose
live_tag = Dctl::Kubernetes.live_image(service, k8s_opts)
puts "Deployed tag is #{live_tag}" if verbose
is_outdated = compose_tag != live_tag
if is_outdated
puts "yes"
exit 0
else
puts "no"
exit 1
end
end
no_commands do
# Transform Thor's HashWithIndifferentAccess to a regular hash so it can
# be passed to methods and treated as named arguments.
def k8s_opts
{ namespace: options["namespace"] }
end
# Transform Thor's HashWithIndifferentAccess to a regular hash so it can
# be passed to methods and treated as named arguments.
def dctl_opts
{ env: dctl_env }
end
# Support both --env and DCTL_ENV, but prefer --env if both are present
def dctl_env
options[:env] || ENV["DCTL_ENV"] || "dev"
end
end
end
end
|
jutonz/dctl
|
lib/dctl/main.rb
|
<filename>lib/dctl/main.rb
module Dctl
class Main
attr_reader :env, :settings
def initialize(env: "dev", config: nil)
@env = env
load_config!(config)
end
##
# Generate the full tag for the given image, concatenating the org,
# project, env, image name, and version.
#
# Pass `version: nil` to exclude the version portion.
#
# @example
# image_tag("app") # => jutonz/dctl-dev-app:1
def image_tag(image, version: current_version_for_image(image))
org = settings.org
project = settings.project
tag = "#{org}/#{project}-#{env}-#{image}"
if !version.nil?
version = version.to_i
tag +=
if version.negative?
current_version = current_version_for_image(image)
":#{current_version.to_i + version}"
else
":#{version}"
end
end
tag
end
def current_version_for_image(image)
versions[image]
end
##
# Returns the path to the given image's data directory (which includes at
# minimum the Dockerfile, plus any other relevant files the user may have
# placed there).
def image_dir(image)
relative = File.join "docker", env, image
File.expand_path relative, Dir.pwd
end
def image_dockerfile(image)
File.expand_path "Dockerfile", image_dir(image)
end
def expand_images(*images)
images = versions.keys if images.empty?
images = Array(images)
images.each { |image| check_image(image) }
images
end
def bump(image)
check_image(image)
parsed = parsed_compose_file
service = parsed.dig "services", image
old_tag = service["image"]
puts "Found existing image #{old_tag}"
version = versions[image].to_i
new_tag = image_tag image, version: version + 1
puts "New tag will be #{new_tag}"
service["image"] = new_tag
print "Updating..."
File.write(compose_file_path, parsed.to_yaml)
puts "done"
# Cache bust
@parsed_compose_file = nil
@versions = nil
puts Rainbow("#{image} is now at version #{version + 1}").fg :green
end
##
# Returns the path to the .dctl.yml file for the current project
def config_path
path = File.expand_path ".dctl.yml", Dir.pwd
unless File.exist? path
error = "Could not find config file at #{path}"
puts Rainbow(error).red
exit 1
end
path
end
##
# Confirms that there is an entry for the given image in the compose file
# for this environment, and that the image tag within is formatted as we
# expect it to be.
#
# Prints a warning if the tag has the wrong name, but errors out if the
# service tag is not present
#
# Expected names look like org/project-env-image:version
def check_image(image)
tag = image_tag(image)
# Check that a service exists for the image
service = parsed_compose_file.dig "services", image
unless service
error = "The service \"#{image}\" is not present in the compose " \
"file for this environment. Please add a service entry for " \
"#{image} to #{compose_file_path}\n"
puts Rainbow(error).fg :red
puts <<~EOL
It might look something like this:
version: '3'
services:
#{image}:
image: #{image_tag(image)}
EOL
exit 1
end
# Check that the image has the correct tag
expected_tag = image_tag(image)
actual_tag = service["image"]
if actual_tag != expected_tag
warning = "Expected the tag for the image \"#{image}\" to be " \
"\"#{expected_tag}\", but it was \"#{actual_tag}\". While not " \
"critical, this can cause issues with some commands."
puts Rainbow(warning).fg :orange
end
end
def versions
@versions ||= begin
images = parsed_compose_file["services"].keys
version_map = {}
images.each do |image|
version_map[image] = parsed_compose_file["services"][image]["image"].split(":").last
end
version_map
end
end
def parsed_compose_file
@parsed_compose_file ||= YAML.load_file compose_file_path
end
##
# If there are user defined commands in .dctl.yml, dynamically add them to
# the passed thor CLI so they may be executed.
def define_custom_commands(klass)
Array(settings.custom_commands).each do |command, args|
klass.send(:desc, command, "[Custom Command] #{command}")
# Concat with string so we can use exec rather than executing multiple
# subshells. Exec allows us to reuse the shell in which dctl is being
# executed, so we get to do things like reuse sudo authorizations
# rather than always having to prmopt.
concatenated = Array(args).join(" && ").strip
klass.send(:define_method, command, -> do
stream_output(concatenated, exec: true)
end)
end
end
##
# Ensure the current project's .dctl.yml contains all the requisite keys.
def check_settings!
required_keys = %w(
org
project
)
required_keys.each do |key|
unless Settings.send key
error = "Config is missing required key '#{key}'. Please add it " \
"to #{config_path} and try again."
error += "\n\nFor more info, see https://github.com/jutonz/dctl_rb#required-keys"
puts Rainbow(error).red
exit 1
end
end
end
##
# Load the current project's config file, complaining if it does not exist
# or is malformed.
def load_config!(custom_config_path = nil)
Config.load_and_set_settings(custom_config_path || config_path)
check_settings!
@settings = Settings
end
def compose_file_path
path = File.expand_path "docker/#{env}/docker-compose.yml"
unless File.exist? path
err = "There is no docker compose file for env #{env} (I expected to find it at #{path})"
puts Rainbow(err).red
exit 1
end
path
end
end
end
|
jutonz/dctl
|
lib/dctl/kubernetes.rb
|
<reponame>jutonz/dctl<filename>lib/dctl/kubernetes.rb
module Dctl::Kubernetes
def self.live_image(service, namespace: nil)
# Check if namespace exists
if namespace && `kubectl get ns #{namespace}`.empty?
error = "Could not find namespace #{namespace}"
puts Rainbow(error).fg ERROR_COLOR
exit 1
end
# Check if deployment exists
deploy_check_command = "kubectl get deploy #{service}"
deploy_check_command += " -n #{namespace}" if namespace
if `#{deploy_check_command}`.empty?
error = "Could not find deployment for #{service}"
error += " in namespace #{namespace}" if namespace
puts Rainbow(error).fg ERROR_COLOR
exit 1
end
jsonpath = "{$.spec.template.spec.containers[:1].image}"
live_image_command = "kubectl get deploy #{service}"
live_image_command += " -ojsonpath='#{jsonpath}'"
live_image_command += " -n #{namespace}" if namespace
`#{live_image_command}`
end
end
|
jutonz/dctl
|
spec/dctl/dctl_spec.rb
|
<filename>spec/dctl/dctl_spec.rb<gh_stars>1-10
require "spec_helper"
RSpec.describe Dctl do
it "has a version number" do
expect(Dctl::VERSION).not_to be nil
end
end
|
p/integrity
|
init.rb
|
<filename>init.rb<gh_stars>0
$LOAD_PATH.unshift(File.expand_path("../lib", __FILE__))
begin
require ".bundle/environment"
rescue LoadError
require "bundler/setup"
end
require "integrity"
# Uncomment as appropriate for the notifier you want to use
# = Email
# require "integrity/notifier/email"
# = SES Email
# require "integrity/notifier/ses"
# = Campfire
# require "integrity/notifier/campfire"
# = TCP
# require "integrity/notifier/tcp"
# = HTTP
# require "integrity/notifier/http"
# = AMQP
# require "integrity/notifier/amqp"
# = Shell
# require "integrity/notifier/shell"
# = Co-op
# require "integrity/notifier/coop"
Integrity.configure do |c|
c.database = "sqlite3:db/integrity.db"
# PostgreSQL via the local socket to "integrity" database:
# c.database = "postgres:///integrity"
# PostgreSQL via a more full specification:
# c.database = "postgres://user:pass@host:port/database"
# Heroku
# c.database = ENV['DATABASE_URL']
c.directory = "builds"
# Heroku
# c.directory = File.dirname(__FILE__) + '/tmp/builds'
c.base_url = "http://ci.example.org"
# Heroku - Comment out c.log
c.log = "integrity.log"
c.github_token = "<PASSWORD>"
c.build_all = true
c.trim_branches = false
c.builder = :threaded, 5
c.project_default_build_count = 10
c.build_output_interval = 5
# Use https://github.com/grahamc/git-cachecow to cache repository locally
# c.checkout_proc = Proc.new do |runner, repo_uri, branch, sha1, target_directory|
# runner.run! "git scclone #{repo_uri} #{target_directory} #{sha1}"
# end
end
|
p/integrity
|
lib/app/app.rb
|
<filename>lib/app/app.rb
require "app/helpers"
require 'cgi'
module Integrity
class App < Sinatra::Base
set :root, File.dirname(__FILE__)
enable :methodoverride, :static
disable :build_all
helpers Integrity::Helpers
not_found do
status 404
show :not_found, :title => "lost, are we?"
end
error do
@error = request.env["sinatra.error"]
status 500
show :error, :title => "something has gone terribly wrong"
end
before do
halt 404 if request.path_info.include?("favico")
unless Integrity.config.base_url
Integrity.configure { |c| c.base_url = url_for("/", :full) }
end
end
post "/github/:token" do |token|
unless Integrity.config.github_enabled?
pass
end
unless token == Integrity.config.github_token
halt 403
end
Payload.build(
JSON.parse(params[:payload]),
Integrity.config.build_all?
).to_s
end
get '/?', :provides => 'application/json' do
load_projects
projects_json = @projects.map do |project|
project.attributes_for_json
end
wrapped_projects = {:projects => projects_json}
json wrapped_projects
end
get "/?" do
load_projects
show :home, :title => "projects"
end
def load_projects
@projects = authorized? ? Project.all : Project.all(:public => true)
# we may have no projects defined yet
@status = :blank
# statuses can be thought of as having the following hierarchy:
# success -> pending -> building -> failed
# status of Integrity overall is the rightmost status of
# any of displayed projects.
# statuses are listed in lib/integrity/build.rb.
@projects.each do |project|
if project.status == :blank
# project with no builds.
# do not change overall status
next
end
case @status
when :blank
# first project's status unconditionally sets overall status
@status = project.status
when :success
# any status takes precedence over success
if project.status != :success
@status = project.status
end
when :pending
# building and failed take precedence over pending
if project.status != :success && project.status != :pending
@status = project.status
end
when :building
# failed takes precedence over building
if project.status == :failed
@status = :failed
end
else
# overall status is failed, don't change it
break
end
end
end
private :load_projects
get "/login" do
login_required
redirect root_url.to_s
end
get "/new" do
login_required
@project = Project.new
show :new, :title => ["projects", "new project"]
end
post "/?" do
login_required
@project = Project.new(params[:project_data])
if @project.save
update_notifiers_of(@project)
redirect project_url(@project).to_s
else
show :new, :title => ["projects", "new project"]
end
end
get "/:project\.png" do
login_required unless current_project.public?
send_file File.join(File.dirname(__FILE__), 'public', 'status', current_project.status.to_s + '.png')
end
get "/:project\.json" do
@format = :json
login_required unless current_project.public?
json current_project
end
get "/:project" do
login_required unless current_project.public?
if limit = Integrity.config.project_default_build_count
@builds = current_project.sorted_builds.all(:limit => limit + 1)
if @builds.length <= limit
@showing_all_builds = true
else
# we fetched one build more than needed
@builds.pop
end
else
@builds = current_project.sorted_builds
@showing_all_builds = true
end
@status = current_project.status
show :project, :title => ["projects", current_project.name]
end
get "/:project/all" do
login_required unless current_project.public?
@builds = current_project.sorted_builds
@showing_all_builds = true
show :project, :title => ["projects", current_project.name]
end
get "/:project/ping" do
login_required unless current_project.public?
if current_project.status != :success
halt 412, current_project.status.to_s
else
current_project.last_build.sha1
end
end
put "/:project" do
login_required
if current_project.update(params[:project_data])
update_notifiers_of(current_project)
redirect project_url(current_project).to_s
else
show :new, :title => ["projects", current_project.permalink, "edit"]
end
end
delete "/:project" do
login_required
current_project.destroy
redirect root_url.to_s
end
get "/:project/edit" do
login_required
show :new, :title => ["projects", current_project.permalink, "edit"]
end
post "/:project/builds" do
login_required
@build = current_project.build_head
redirect build_url(@build).to_s
end
get "/:project/builds/:build/artifacts/:artifact" do |project, build, artifact|
login_required unless current_project.public?
artifact = CGI.unescape(artifact)
artifact_files = current_build.artifact_files
file = artifact_files.detect do |file|
file[:relative_path] == artifact
end
if file.nil?
halt 404
end
fs_path = current_build.build_directory.join(file[:relative_path])
unless File.exist?(fs_path)
halt 404
end
send_file fs_path, :filename => file[:name]
end
get "/:project/builds/:build\.json" do
@format = :json
login_required unless current_project.public?
json current_build
end
get "/:project/builds/:build" do
login_required unless current_project.public?
@status = current_build.status
show :build, :title => ["projects", current_project.permalink,
current_build.sha1_short]
end
get "/:project/builds/:build/raw" do
login_required unless current_project.public?
content_type :text
current_build.output
end
post "/:project/builds/:build" do
login_required
@build = current_project.build(current_build.commit)
redirect build_url(@build).to_s
end
post "/:project/builds/:build/notify" do
login_required unless current_project.public?
current_build.notify
redirect build_url(current_build).to_s
end
delete "/:project/builds/:build" do
login_required
url = project_url(current_build.project).to_s
current_build.destroy!
redirect url
end
end
end
|
crate/activerecord-crate-adapter
|
spec/dummy/app/models/post.rb
|
<filename>spec/dummy/app/models/post.rb
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
class Post < ActiveRecord::Base
before_create :set_id
private
def set_id
self.id = SecureRandom.uuid
end
end
|
crate/activerecord-crate-adapter
|
lib/active_record/connection_adapters/crate/database_statements.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
module ActiveRecord
module ConnectionAdapters
module DatabaseStatements
def exec_query(sql, name = 'SQL', binds = [])
result = nil
log(sql, name, binds) {
result = do_exec_query(sql, name, binds)
}
fields = result.cols
ActiveRecord::Result.new(fields, result.values)
end
def do_exec_query(sql, name, binds)
params = []
binds.each_with_index do |(column, value), index|
ar_column = column.is_a?(ActiveRecord::ConnectionAdapters::Column)
# only quote where clause values
unless ar_column # && column.sql_type == 'timestamp'
v = value
quoted_value = ar_column ? quote(v, column) : quote(v, nil)
params << quoted_value
else
params << value
end
end
@connection.execute sql, params
end
# Returns the statement identifier for the client side cache
# of statements
def sql_key(sql)
sql
end
# Executes an SQL statement, returning a ResultSet object on success
# or raising a CrateError exception otherwise.
def execute(sql, name = nil)
log(sql, name) do
@connection.execute(sql)
end
end
protected
def select(sql, name, binds)
exec_query(sql, name, binds)
end
end
end
end
|
crate/activerecord-crate-adapter
|
spec/data_types/object_spec.rb
|
<filename>spec/data_types/object_spec.rb
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require_relative '../spec_helper'
describe "User#object" do
before(:all) do
ActiveRecord::Migration.class_eval do
create_table :users do |t|
t.string :name
t.object :address, object_schema_behaviour: :strict,
object_schema: {street: :string, city: :string, phones: {array: :string}, zip: :integer}
end
end
ensure_status('yellow')
User.reset_column_information
end
after(:all) do
ActiveRecord::Migration.class_eval do
drop_table :users
end
end
describe "#object column type" do
let(:address) {Address.new(street: '1010 W 2nd Ave', city: 'Vancouver', phones: ["123", "987"], zip: 6888)}
let(:user) {@user = User.create!(name: '<NAME>', address: address)}
it 'should store and return an object' do
p = User.find(user.id)
p.address.should be_a Address
p.address.street.should eq address.street
p.address.city.should eq address.city
p.address.zip.should eq address.zip # without a object schema numbers are converted to strings
p.address.phones.should eq address.phones
end
end
end
|
crate/activerecord-crate-adapter
|
spec/spec_helper.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
$LOAD_PATH.unshift(File.dirname(__FILE__))
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
require 'activerecord-crate-adapter'
require 'logger'
#require 'debugger'
require 'dummy/app/models/address'
require 'dummy/app/models/post'
require 'dummy/app/models/user'
HOST = "127.0.0.1"
PORT = 44200
RSpec.configure do |config|
config.before(:each) do
end
config.after(:each) do
end
config.before(:suite) do
connect
end
config.after(:all) do
end
end
def connect
ActiveRecord::Base.logger = Logger.new("log/debug.log")
ActiveRecord::Base.logger.level = Logger::DEBUG
ActiveRecord::Base.configurations = {
'arunit' => {
adapter: 'crate',
min_messages: 'warning',
host: HOST,
port: PORT,
}
}
ActiveRecord::Base.establish_connection :arunit
end
# Crate is eventually consistent therefore we need
# to refresh the table when doing queries, except we
# query for the primary key
def refresh_posts
Post.connection.raw_connection.refresh_table('posts')
end
# Wait till all table is synced to all shards
# this should be used after each create_table to prevent flaky tests
def ensure_status(expected_status)
req = Net::HTTP::Get.new("/_cluster/health?wait_for_status=#{expected_status}&timeout=10s")
resp = Net::HTTP.new(HOST, PORT)
response = resp.start { |http| http.request(req) }
actual_status = JSON.parse(response.body)['status']
raise WrongStatusError, "expected status #{expected_status}, got #{actual_status}" if actual_status != expected_status
end
class WrongStatusError < StandardError; end
|
crate/activerecord-crate-adapter
|
lib/active_record/connection_adapters/crate_adapter.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require 'active_record'
require 'active_record/base'
require 'active_record/base'
require 'arel/arel_crate'
require 'arel/visitors/bind_visitor'
require 'active_support/dependencies/autoload'
require 'active_support/callbacks'
require 'active_support/core_ext/string'
require 'active_record/connection_adapters/abstract_adapter'
require 'active_record/connection_adapters/statement_pool'
require 'active_record/connection_adapters/column'
require 'active_record/connection_adapters/crate/schema_statements'
require 'active_record/connection_adapters/crate/database_statements'
require 'active_support/core_ext/kernel'
begin
require 'crate_ruby'
rescue LoadError => e
raise e
end
module ActiveRecord
class Base
def self.crate_connection(config) #:nodoc:
config = config.symbolize_keys
ConnectionAdapters::CrateAdapter.new(nil, logger, nil, config)
end
end
module ConnectionAdapters
class CrateAdapter < AbstractAdapter
class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
attr_accessor :array, :object
end
include Crate::SchemaStatements
include DatabaseStatements
ADAPTER_NAME = 'Crate'.freeze
def schema_creation # :nodoc:
Crate::SchemaCreation.new self
end
NATIVE_DATABASE_TYPES = {
boolean: {name: "boolean"},
string: {name: "string"},
integer: {name: "integer"},
float: {name: "float"},
binary: {name: "byte"},
datetime: {name: "timestamp"},
timestamp: {name: "timestamp"},
object: {name: "object"},
array: {name: "array"}
}
class BindSubstitution < Arel::Visitors::Crate # :nodoc:
include Arel::Visitors::BindVisitor
end
def initialize(connection, logger, pool, config)
@port = config[:port]
@host = config[:host]
super(connection, logger, pool)
@schema_cache = SchemaCache.new self
@visitor = Arel::Visitors::Crate.new self
@quoted_column_names = {}
connect
end
def adapter_name
ADAPTER_NAME
end
# Adds `:array` option to the default set provided by the
# AbstractAdapter
def prepare_column_options(column, types)
spec = super
spec[:array] = 'true' if column.respond_to?(:array) && column.array
spec
end
# Adds `:array` as a valid migration key
def migration_keys
super + [:array, :object_schema_behaviour, :object_schema]
end
#TODO check what call to use for active
def active?
true
end
#TODO
def clear_cache!
end
#TODO
def reset!
end
def supports_migrations?
true
end
def connect
@connection = CrateRuby::Client.new(["#{@host}:#{@port}"])
end
def columns(table_name) #:nodoc:
cols = @connection.table_structure(table_name).map do |field|
name = dotted_name(field[2])
CrateColumn.new(name, nil, field[4], nil)
end
cols
end
def dotted_name(name)
name.gsub(%r(\[['"]), '.').delete(%{'"]})
end
def tables
@connection.tables
end
# def quote_column_name(name) #:nodoc:
# @quoted_column_names[name] ||= %Q{"#{name.to_s}"}
# end
class CrateColumn < Column
def simplified_type(field_type)
case field_type
when /_array/i
:array
when /object/i
:object
else
super(field_type)
end
end
end
class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
# Crate doesn't support auto incrementing, therefore we need to manually
# set a primary key. You need to assure that you always provide an unique
# id. This might be done via the
# +SecureRandom.uuid+ method and a +before_save+ callback, for instance.
def primary_key(name, type = :primary_key, options = {})
options[:primary_key] = true
column name, "STRING PRIMARY KEY", options
end
def column(name, type = nil, options = {})
super
column = self[name]
column.array = options[:array]
column.object = options[:object]
self
end
def object(name, options = {})
schema_behaviour = options.delete(:object_schema_behaviour)
type = schema_behaviour ? "object(#{schema_behaviour})" : schema_behaviour
schema = options.delete(:object_schema)
type = "#{type} as (#{object_schema_to_string(schema)})" if schema
column name, type, options.merge(object: true)
end
def array(name, options = {})
array_type = options.delete(:array_type)
raise "Array columns must specify an :array_type (e.g. array_type: :string)" unless array_type.present?
column name, "array(#{array_type})", options.merge(array: true)
end
private
def create_column_definition(name, type)
ColumnDefinition.new name, type
end
def object_schema_to_string(s)
ary = []
s.each_pair do |k, v|
if v.is_a?(Symbol)
ary << "#{k} #{v}"
elsif v.is_a?(Hash)
a = "array(#{v[:array]})"
ary << "#{k} #{a}"
end
end
ary.join(', ')
end
end
def create_table_definition(name, temporary, options, as = nil)
TableDefinition.new native_database_types, name, temporary, options, as
end
def native_database_types
NATIVE_DATABASE_TYPES
end
end
end
end
|
crate/activerecord-crate-adapter
|
lib/active_record/attribute_methods/crate_object.rb
|
<filename>lib/active_record/attribute_methods/crate_object.rb
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
module CrateObject
extend ActiveSupport::Concern
module ClassMethods
def load(object)
case object
when String
object.gsub!('=', ':')
object = JSON.parse("{#{object}}")
end
new(object.symbolize_keys)
end
def dump(object)
object ? object.to_hash : nil
end
end
def to_hash
h = {}
instance_variables.each do |var|
h.merge!({"#{var.to_s.gsub(/@/, '')}" => instance_variable_get(var)})
end
h
end
end
|
crate/activerecord-crate-adapter
|
activerecord-crate-adapter.gemspec
|
# coding: utf-8
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'activerecord-crate-adapter/version'
Gem::Specification.new do |spec|
spec.name = "activerecord-crate-adapter"
spec.version = ActiverecordCrateAdapter::VERSION
spec.authors = ["<NAME>", "CRATE Technology GmbH"]
spec.email = ["<EMAIL>"]
spec.summary = "ActiveRecord adapter for Crate"
spec.description = "ActiveRecord adapter for Crate, the distributed database for Docker."
spec.homepage = "https://crate.io"
spec.license = "Apache License, v2.0"
spec.files = `git ls-files -z`.split("\x0")
#spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.metadata = {
"bug_tracker_uri" => "https://github.com/crate/activerecord-crate-adapter/issues",
"changelog_uri" => "https://github.com/crate/activerecord-crate-adapter/blob/master/history.txt",
"source_code_uri" => "https://github.com/crate/activerecord-crate-adapter"
}
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 2.14"
spec.add_dependency('activerecord', '~> 4.1.0')
spec.add_dependency('arel', '>= 5.0.0')
spec.add_dependency('crate_ruby', '~> 0.0.7')
end
|
crate/activerecord-crate-adapter
|
spec/activerecord/connection_adapters/crate/table_definition_spec.rb
|
<reponame>crate/activerecord-crate-adapter<filename>spec/activerecord/connection_adapters/crate/table_definition_spec.rb
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require_relative '../../../spec_helper'
describe ActiveRecord::ConnectionAdapters::CrateAdapter::TableDefinition do
describe '#object_schema_to_string' do
let(:td) { ActiveRecord::ConnectionAdapters::CrateAdapter::TableDefinition.new(nil, nil, nil, nil) }
it 'should simply set the key and values' do
s = {street: :string, city: :string}
str = td.send(:object_schema_to_string, s)
str.should eq "street string, city string"
end
it 'should simply properly parse an array definition' do
s = {street: :string, city: :string, phones: {array: :string}}
str = td.send(:object_schema_to_string, s)
str.should eq "street string, city string, phones array(string)"
end
end
end
|
crate/activerecord-crate-adapter
|
spec/dummy/app/models/address.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require 'active_record/attribute_methods/crate_object'
class Address
attr_accessor :street, :city, :phones, :zip
include CrateObject
def initialize(opts)
@street = opts[:street]
@city = opts[:city]
@phones = opts[:phones]
@zip = opts[:zip]
end
end
|
crate/activerecord-crate-adapter
|
lib/active_record/connection_adapters/crate/schema_statements.rb
|
<reponame>crate/activerecord-crate-adapter
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
module ActiveRecord
module ConnectionAdapters
module Crate
class SchemaCreation < AbstractAdapter::SchemaCreation
private
def add_column_options!(sql, options)
if options[:array] || options[:column].try(:array)
sql.gsub!(/(.*)\s(\w+)$/, '\1 array(\2)')
end
super(sql, options)
end
end
module SchemaStatements
def primary_key(table_name)
res = @connection.execute("select constraint_name from information_schema.table_constraints
where table_name = '#{quote_table_name(table_name)}' and constraint_type = 'PRIMARY_KEY'")
res[0].try(:first).try(:first)
end
# overriding as Crate does not support "version primary key" syntax. Need to add the column type.
def initialize_schema_migrations_table
unless table_exists?('schema_migrations')
execute("CREATE TABLE schema_migrations (version string primary key INDEX using plain)")
end
end
def add_index(table_name, column_name, options = {}) #:nodoc:
puts
puts "#########"
puts "Adding indices is currently not supported by Crate"
puts "See issue: https://github.com/crate/crate/issues/733"
puts "#########"
puts
end
def remove_index(table_name, column_name, options = {}) #:nodoc:
puts
puts "#########"
puts "Dropping indices is currently not supported by Crate"
puts "See issue: https://github.com/crate/crate/issues/733"
puts "#########"
puts
end
end
end
end
end
|
crate/activerecord-crate-adapter
|
spec/test_server.rb
|
#!/usr/bin/env ruby
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require 'net/http'
class TestServer
NAME = "TestCluster"
HOST = "127.0.0.1"
PORT = 44200
TIMEOUT = 30
def initialize(crate_home = '~/crate', run_in_background = false)
@crate_home = crate_home
@run_in_background = run_in_background
end
def start
cmd = "sh #{File.join(@crate_home, 'bin', 'crate')} #{start_params}"
@pid = spawn(cmd, out: "/tmp/crate_test_server.out",
err: "/tmp/crate_test_server.err")
Process.detach(@pid)
puts 'Starting Crate... (this will take a few seconds)'
time_slept = 0
interval = 2
while true
if !alive? and time_slept > TIMEOUT
puts "Crate hasn't started for #{TIMEOUT} seconds. Giving up now..."
exit
end
if alive? and @run_in_background
exit
end
sleep(interval)
time_slept += interval
end
end
private
def start_params
"-Des.index.storage.type=memory " +
"-Des.node.name=#{NAME} " +
"-Des.cluster.name=Testing#{PORT} " +
"-Des.http.port=#{PORT}-#{PORT} " +
"-Des.network.host=localhost " +
"-Des.discovery.zen.ping.multicast.enabled=false " +
"-Des.es.api.enabled=true"
end
def alive?
req = Net::HTTP::Get.new('/')
resp = Net::HTTP.new(HOST, PORT)
begin
response = resp.start { |http| http.request(req) }
response.code == "200" ? true : false
rescue Errno::ECONNREFUSED
false
end
end
end
server = TestServer.new(*ARGV)
server.start
|
crate/activerecord-crate-adapter
|
spec/data_types/array_spec.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require_relative '../spec_helper'
describe "Post#array" do
before(:all) do
ActiveRecord::Migration.class_eval do
create_table :posts do |t|
t.string :title
t.integer :comment_count
t.array :tags, array_type: :string
t.array :votes, array_type: :integer
t.array :bool_arr, array_type: :boolean
end
end
ensure_status('yellow')
Post.reset_column_information
end
after(:all) do
ActiveRecord::Migration.class_eval do
drop_table :posts
end
end
describe "#array column type" do
let(:array) { %w(hot fresh) }
let(:votes) { [9, 8, 7] }
let(:bool_arr) { [true, false, true] }
let(:post) { Post.create!(title: 'Arrays are awesome', tags: array, votes: votes, bool_arr: bool_arr) }
context 'create' do
it 'should store and return an array' do
p = Post.find(post.id)
p.tags.should be_a Array
p.votes.should be_a Array
p.bool_arr.should be_a Array
p.tags.should eq array
p.votes.should eq votes
p.bool_arr.should eq bool_arr
end
it 'should find the post by array value' do
post = Post.create!(title: 'Arrays are awesome', tags: array, votes: votes)
refresh_posts
Post.where("'fresh' = ANY (tags)").should include(post)
end
end
context '#update' do
it 'should update and existing array value' do
post = Post.create!(title: 'Arrays are awesome', tags: array, votes: votes)
refresh_posts
new_tags = %w(ok)
post.update_attributes!(tags: new_tags)
refresh_posts
post.reload
post.tags.should eq new_tags
end
end
end
end
|
crate/activerecord-crate-adapter
|
spec/models/post_spec.rb
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
require_relative '../spec_helper'
describe Post do
before(:all) do
ActiveRecord::Migration.class_eval do
create_table :posts do |t|
t.string :title
t.integer :views
end
end
ensure_status('yellow')
Post.reset_column_information
end
after(:all) do
ActiveRecord::Migration.class_eval do
drop_table :posts
end
end
let(:params) { {title: "Crate rocks", views: 10000} }
context 'initialization' do
it 'should initialize a post object with all columns' do
post = Post.new(params)
post.should be_a(Post)
post.title.should eq "Crate rocks"
post.views.should eq 10000
end
end
context 'persistance' do
before do
@post = Post.create!(params)
end
after do
@post.destroy
end
it 'should persist the record to the database' do
@post.persisted?.should eq true
refresh_posts
Post.count.should eq 1
end
end
context 'deletion' do
before do
@post = Post.create!(params)
end
it 'should destroy the record to the database' do
@post.destroy
Post.where(id: @post.id).should be_empty
end
end
describe 'existing record manipulation' do
before do
@post = Post.create!(params)
end
after do
@post.destroy
end
context 'find' do
it 'should find the crated record' do
post = Post.where(id: @post.id).first
post.id.should eq(@post.id)
end
it 'should find the crated record by title' do
refresh_posts
Post.where(title: @post.title).count.should eq 1
post = Post.where(title: @post.title).first
post.id.should eq(@post.id)
end
end
context 'update' do
it 'should update the record' do
@post.update_attributes(title: 'Crate Dope')
@post.reload.title.should eq('Crate Dope')
end
end
end
describe 'sql input sanitization' do
before do
@post = Post.create!(params)
end
after do
@post.destroy
end
it 'should not return all records but sanitize string' do
sql = Post.where(id: "#{@post.id} or 1=1").to_sql
sql.should match(/'#{@post.id} or 1=1'/)
end
it 'should not drop the table but sanitize string' do
Post.where(id: "#{@post.title}; DROP TABLE POST")
refresh_posts
Post.last.id.should eq @post.id
end
end
end
|
elia/localizer-rails-plugin
|
localizer.gemspec
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{localizer}
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["<NAME>"]
s.date = %q{2010-01-09}
s.description = %q{Includes localization helpers for: routing, views and controllers.}
s.email = %q{<EMAIL>}
s.extra_rdoc_files = [
"README"
]
s.files = [
"MIT-LICENSE",
"README",
"Rakefile",
"VERSION",
"init.rb",
"install.rb",
"lib/localizer.rb",
"lib/localizer/assets/flags/de.gif",
"lib/localizer/assets/flags/de_big.png",
"lib/localizer/assets/flags/de_medium.png",
"lib/localizer/assets/flags/en.gif",
"lib/localizer/assets/flags/en_big.png",
"lib/localizer/assets/flags/en_medium.png",
"lib/localizer/assets/flags/es.gif",
"lib/localizer/assets/flags/es_big.png",
"lib/localizer/assets/flags/es_medium.png",
"lib/localizer/assets/flags/fr.gif",
"lib/localizer/assets/flags/fr_big.png",
"lib/localizer/assets/flags/fr_medium.png",
"lib/localizer/assets/flags/gb.gif",
"lib/localizer/assets/flags/gb_big.png",
"lib/localizer/assets/flags/gb_medium.png",
"lib/localizer/assets/flags/it.gif",
"lib/localizer/assets/flags/it_big.png",
"lib/localizer/assets/flags/it_medium.png",
"lib/localizer/assets/flags/shadow.gif",
"lib/localizer/assets/flags/us.gif",
"lib/localizer/assets/flags/us_big.png",
"lib/localizer/assets/flags/us_medium.png",
"lib/localizer/config.rb",
"lib/localizer/controller.rb",
"lib/localizer/helper.rb",
"lib/localizer/routes.rb",
"localizer.gemspec",
"tasks/localizer_tasks.rake",
"test/localizer_test.rb",
"test/test_helper.rb",
"uninstall.rb"
]
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.5}
s.summary = %q{Rails Plugin for easing simple sites localization.}
s.test_files = [
"test/localizer_test.rb",
"test/test_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 1.2.9"])
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
else
s.add_dependency(%q<rspec>, [">= 1.2.9"])
end
end
|
elia/localizer-rails-plugin
|
init.rb
|
# Include hook code here
require 'localizer'
class ActionController::Base
include Localizer::Controller
end
class ActionController::Routing::RouteSet::Mapper
include Localizer::Routes
end
class ActionView::Base
include Localizer::Helper
end
|
elia/localizer-rails-plugin
|
lib/localizer/routes.rb
|
<gh_stars>1-10
module Localizer
module Routes
# Usage:
#
# include Localizer::Routes
# localize_paths map, :index, :about, :contact
# localize_path map, :home
def localize_path controller, action_name
self.with_options :controller => controller, :action => action_name do |site|
site.with_options :requirements => {:locale => /\w\w/} do |site_with_locale|
site_with_locale.connect ":locale" if action_name.to_s == 'index'
site_with_locale.connect ":locale/#{action_name}"
site_with_locale.connect ":locale/#{action_name}.htm"
site_with_locale.send action_name, ":locale/#{action_name}.html"
end
# site.with_options :locale => 'it' do |site|
site.connect "#{action_name}"
site.connect "#{action_name}.htm"
site.send "#{action_name}_default", "#{action_name}.html"
# end
end
end
def localize_paths_for controller, *action_names
for action_name in action_names
Rails.logger.error "******** #{controller} => #{action_name.to_s}"
localize_path controller, action_name.to_s
end
end
def include_localized_routes!
Localizer.config.controllers.each_pair do |controller, pages|
localize_paths_for controller, *pages
end
end
def localized_resource resource_name, options = {}
options.merge! :path_prefix => ':locale',
:requirements => {:locale => Localizer.config.locales}
resources resource_name, options do |resource|
yield resource if block_given?
end
end
end
end
|
elia/localizer-rails-plugin
|
lib/localizer.rb
|
# Localizer
require 'localizer/config'
require 'localizer/routes'
require 'localizer/controller'
require 'localizer/helper'
|
elia/localizer-rails-plugin
|
tasks/localizer_tasks.rake
|
# desc "Explaining what the task does"
# task :localizer do
# # Task goes here
# end
namespace :localizer do
desc "Install localizer assets in public folder."
task :assets do
require 'fileutils'
localizer_assets = File.join(File.dirname(__FILE__),'..','localizer','lib','assets')
File.cp_r File.join(localizer_assets,'flags'), File.join(Rails.public_path, 'images')
end
end
|
elia/localizer-rails-plugin
|
lib/localizer/controller.rb
|
module Localizer
module Controller
module InstanceMethods
private
def set_locale
I18n.locale = params[:locale].try(:to_sym) || I18n.default_locale
end
def locale
I18n.locale
end
end
def self.included(controller)
# controller.extend ClassMethods
controller.send :include, InstanceMethods
controller.send :before_filter, :set_locale
controller.helper_method :locale
end
end
end
|
elia/localizer-rails-plugin
|
lib/localizer/config.rb
|
module Localizer
class Config
attr_accessor :controllers, :locales
end
def self.config
unless @config
@config = Config.new
@config.controllers = {}
end
yield @config if block_given?
return @config
end
def self.localize! *pages
options = pages.last.kind_of?(Hash) ? pages.pop : {}
raise ArgumentError "Missing oprtion :from => controller" unless options[:from]
config.controllers[options[:from].to_s] = pages
Rails.logger.error "******" + @config.inspect
config.locales = options[:to]
end
end
|
elia/localizer-rails-plugin
|
lib/localizer/helper.rb
|
<filename>lib/localizer/helper.rb
module Localizer
module Helper
def locale
params[:locale] || 'it'
end
def localized_root
locale == 'it' ? root_path : local_root_path(locale)
end
def with_locale locale
locale_backup = I18n.locale
I18n.locale = locale
result = yield locale
I18n.locale = locale_backup
return result
end
def when_locale locale
locale_backup = I18n.locale
I18n.locale = locale
result = yield locale
I18n.locale = locale_backup
return result
end
def locales options = {}
locales = Localizer.config.locales
if options[:exclude]
locales - [ options[:exclude] ]
end
locales
end
def link_to_locale locale, name
active = (I18n.locale.to_s == locale.to_s)
link_to image_tag("flags/#{locale}.png", :title => name),
send( controller.action_name+'_path', locale ), :class => (active ? 'active' : '')
end
def localized_current_path locale
url_for(:action => nil, :locale => locale)
end
end
end
|
erikthered/chef-jboss4
|
recipes/default.rb
|
#
# Cookbook Name:: chef-jboss4
# Recipe:: default
#
# Copyright (C) 2013 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "java"
include_recipe "user"
package "unzip" do
action :install
end
jboss_home = node['jboss']['home']
ruby_block "set-env-jboss-home" do
block do
ENV["JBOSS_HOME"] = jboss_home
end
not_if { ENV["JBOSS_HOME"] == jboss_home }
end
file "/etc/profile.d/jboss.sh" do
content "export JBOSS_HOME=#{node['jboss']['home']}"
mode 0755
end
user_account node['jboss']['user'] do
comment "JBoss application server user"
action :create
end
ark 'jboss' do
url node['jboss']['url']
checksum node['jboss']['checksum']
version node['jboss']['version']
owner node['jboss']['user']
group node['jboss']['user']
action :install
end
template '/etc/init.d/jboss' do
source node['jboss']['initscript']
mode '0755'
owner 'root'
group 'root'
end
service 'jboss' do
action [:enable,:start]
end
|
erikthered/chef-jboss4
|
metadata.rb
|
<reponame>erikthered/chef-jboss4
name "jboss"
maintainer "<NAME>"
maintainer_email "<EMAIL>"
license "Apache 2.0"
description "Installs/Configures chef-jboss4"
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version "0.1.0"
depends "ark"
depends "java"
depends "user"
supports "centos"
supports "ubuntu"
|
erikthered/chef-jboss4
|
attributes/default.rb
|
<gh_stars>1-10
default['jboss']['url'] = "http://sourceforge.net/projects/jboss/files/JBoss/JBoss-4.2.3.GA/jboss-4.2.3.GA-jdk6.zip"
default['jboss']['checksum'] = "48797e150ee7dd6e035290fab86f17a5fe7d174b4980e941b833369b80273620"
default['jboss']['version'] = "4.2.3.GA"
default['jboss']['user'] = "jboss"
default['jboss']['home'] = "/usr/local/jboss"
# Runtime options
default['jboss']['host'] = "0.0.0.0"
default['jboss']['configuration'] = "default"
case platform
when "redhat", "centos", "scientific", "fedora", "suse", "amazon"
default['jboss']['initscript'] = "jboss_init_redhat.sh.erb"
when "debian", "ubuntu"
default['jboss']['initscript'] = "jboss_init_ubuntu.sh.erb"
else
default['jboss']['initscript'] = "jboss_init_redhat.sh.erb"
end
|
dtk/dtk-common-core
|
lib/pretty_print_form.rb
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module DTK
module Common
module PrettyPrintForm
# opts can have terms:
# :namespace
# :version
def self.module_ref(module_name, opts = {})
ret = opts[:namespace] ? "#{opts[:namespace]}/#{module_name}" : module_name
ret << "(#{opts[:version]})" if opts[:version]
ret
end
end
end
end
|
dtk/dtk-common-core
|
lib/log.rb
|
<filename>lib/log.rb<gh_stars>0
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#TODO: bring in a production quality ruby logging capability that gets wrapped here
#TODO: would put this in config
module DTK
module Log
Config = Hash.new
Config[:print_time] = false
Config[:print_method] = false
def self.info(msg, out = $stdout)
out << "info: "
out << format(msg)
end
def self.debug(msg, out = $stdout)
out << "debug: "
out << format(msg)
end
def self.error(msg, out = $stdout)
out << "error: "
out << format(msg)
end
def self.info_pp(obj, out = $stdout)
out << Aux::pp_form(obj)
end
def self.debug_pp(obj, out = $stdout)
out << Aux::pp_form(obj)
obj
end
private
def self.format(msg)
ret = String.new
ret << "#{Time.now}: " if Config[:print_time]
ret << "in fn: #{this_parent_method}: " if Config[:print_method]
ret << msg
ret << "\n"
end
end
end
|
dtk/dtk-common-core
|
lib/errors/errors.rb
|
<filename>lib/errors/errors.rb
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#TODO: should have a Common namespace put in after DTK
module DTK
class Error < NameError
def self.top_error_in_hash()
{:error => :Error}
end
def initialize(msg="",name_or_opts=nil)
name = nil
opts = Hash.new
if name_or_opts.kind_of?(Hash)
opts = name_or_opts
else
name = name_or_opts
end
super(msg,name)
#TODO: might make default to be :log_error => false
unless opts.has_key?(:log_error) and not opts[:log_error]
if caller_info = opts[:caller_info]
caller_depth = (caller_info.kind_of?(Hash) ? caller_info[:depth] : nil)||DefaultCallerDepth
Log.info_pp(caller[CallerOffset,caller_depth])
end
end
end
CallerOffset = 3
DefaultCallerDepth = 3
def to_hash()
if to_s == ""
Error.top_error_in_hash()
elsif name.nil?
{:error => {:Error => {:msg => to_s}}}
else
{:error => {name.to_sym => {:msg => to_s}}}
end
end
end
class R8ParseError < Error
def initialize(msg,calling_obj=nil)
msg = (calling_obj ? "#{msg} in class #{calling_obj.class.to_s}" : msg)
super(msg)
end
end
class ErrorUsage < Error
end
class ErrorConstraintViolations < ErrorUsage
def initialize(violations)
super(msg(violations),:ConstraintViolations)
end
private
def msg(violations)
return ("constraint violation: " + violations) if violations.kind_of?(String)
v_with_text = violations.compact
if v_with_text.size < 2
return "constraint violations"
elsif v_with_text.size == 2
return "constraint violations: #{v_with_text[1]}"
end
ret = "constraint violations: "
ret << (v_with_text.first == :or ? "(atleast) one of " : "")
ret << "(#{v_with_text[1..v_with_text.size-1].join(", ")})"
end
end
class ErrorUserInputNeeded < ErrorUsage
def initialize(needed_inputs)
super()
@needed_inputs = needed_inputs
end
def to_s()
ret = "following inputs are needed:\n"
@needed_inputs.each do |k,v|
ret << " #{k}: type=#{v[:type]}; description=#{v[:description]}\n"
end
ret
end
end
class ErrorNotImplemented < Error
def initialize(msg="NotImplemented error")
super("in #{this_parent_parent_method}: #{msg}",:NotImplemented)
end
end
class ErrorNotFound < Error
attr_reader :obj_type,:obj_value
def initialize(obj_type=nil,obj_value=nil)
@obj_type = obj_type
@obj_value = obj_value
end
def to_s()
if obj_type.nil?
"NotFound error:"
elsif obj_value.nil?
"NotFound error: type = #{@obj_type.to_s}"
else
"NotFound error: #{@obj_type.to_s} = #{@obj_value.to_s}"
end
end
def to_hash()
if obj_type.nil?
{:error => :NotFound}
elsif obj_value.nil?
{:error => {:NotFound => {:type => @obj_type}}}
else
{:error => {:NotFound => {:type => @obj_type, :value => @obj_value}}}
end
end
end
class ErrorAMQP < Error
def to_s()
"AMQP error"
end
end
class ErrorAMQPQueueDoesNotExist < ErrorAMQP
attr_reader :queue_name
def initialize(queue_name)
@queue_name = queue_name
end
def to_s()
"queue #{queue_name} does not exist"
end
end
end
|
dtk/dtk-common-core
|
lib/auxiliary.rb
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#TODO: will start moving over to using DtkCommon namespace; versions in DtkCommon namespace also in DTK::Common are teh upgraded versions
require 'etc'
module DtkCommon
module Aux
def self.dtk_instance_repo_username(tenant_id=nil)
instance_unique_id = get_ec2_instance_id() || get_macaddress().gsub(/:/,'-')
tenant_id ||= ::DTK::Common::Aux.running_process_user()
"dtk-#{instance_unique_id}--#{tenant_id}"
end
end
end
module DTK
module Common
module AuxMixin
def get_ssh_rsa_pub_key()
path = "#{running_process_home_dir()}/.ssh/id_rsa.pub"
begin
File.open(path){|f|f.read}.chomp
rescue Errno::ENOENT
raise Error.new("user (#{ENV['USER']}) does not have a public key under #{path}")
rescue => e
raise e
end
end
def hash_subset(hash,keys_subset,opts={})
keys_subset.inject(Hash.new) do |h,k|
index = k.kind_of?(Hash) ? k.keys.first : k
if opts[:no_non_nil] and hash[index].nil? then h
elsif not hash.has_key?(index) then h
else
key = k.kind_of?(Hash) ? k.values.first : k
val = hash[index]
h.merge(key => val)
end
end
end
def convert_keys_to_symbols(hash)
hash.keys.inject(Hash.new){|h,k|h.merge(k.to_sym => hash[k])}
end
def dtk_instance_repo_username()
#on ec2 changing mac addresses; so selectively pick instance id on ec2
unique_id = get_ec2_instance_id() || get_macaddress().gsub(/:/,'-')
"dtk-#{unique_id}"
end
def update_ssh_known_hosts(remote_host)
fingerprint = `ssh-keyscan -H -t rsa #{remote_host}`
ssh_known_hosts = "#{running_process_home_dir()}/.ssh/known_hosts"
if File.file?(ssh_known_hosts)
`ssh-keygen -f "#{ssh_known_hosts}" -R #{remote_host}`
end
File.open(ssh_known_hosts,"a"){|f| f << "#{fingerprint}\n"}
end
def get_macaddress()
return @macaddress if @macaddress
#TODO: may just use underlying routines for facter - macaddress
require 'facter'
collection = ::Facter.collection
@macaddress = collection.fact('macaddress').value
end
def get_ec2_public_dns()
get_ec2_meta_data('public-hostname')
end
def get_ec2_instance_id()
# @ec2_instance_id_cached used because it could have tried to get this info and result was null
return @ec2_instance_id if @ec2_instance_id_cached
@ec2_instance_id_cached = true
@ec2_instance_id = get_ec2_meta_data('instance-id')
end
def snake_to_camel_case(snake_case)
snake_case.gsub(/(^|_)(.)/) { $2.upcase }
end
def platform_is_linux?()
RUBY_PLATFORM.downcase.include?("linux")
end
def platform_is_windows?()
RUBY_PLATFORM.downcase.include?("mswin") or RUBY_PLATFORM.downcase.include?("mingw")
end
def running_process_user()
if platform_is_windows?()
Etc.getlogin
else
Etc.getpwuid(Process.uid).name
end
end
def running_process_home_dir()
if platform_is_windows?()
File.expand_path('~')
else
Etc.getpwuid(Process.uid).dir
end
end
private
def get_ec2_meta_data(var)
#Fragments taken from Puppetlabs facter ec2
require 'open-uri'
require 'timeout'
ret = nil
begin
url = "http://169.254.169.254:80/"
Timeout::timeout(WaitSec) {open(url)}
ret = OpenURI.open_uri("http://169.254.169.254/2008-02-01/meta-data/#{var}").read
rescue Timeout::Error
rescue
#TODO: unexpected; write t log what error is
end
ret
end
WaitSec = 2
end
module Aux
class << self
include AuxMixin
end
end
end
end
|
dtk/dtk-common-core
|
lib/errors/rest_error.rb
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#TODO: should have a Common namespace put in after DTK
#When creating these objects, an internal errro class is passed to the creation functions
module DTK
class RestError
def self.create(err)
if RestUsageError.match?(err)
RestUsageError.new(err)
elsif NotFound.match?(err)
NotFound.new(err)
else
Internal.new(err)
end
end
def initialize(err)
@code = nil
@message = nil
end
def hash_form()
{:code => code||:error, :message => message||''}
end
private
attr_reader :code, :message
public
#its either its a usage or and internal (application error) bug
class Internal < RestError
def hash_form()
super.merge(:internal => true)
end
private
def initialize(err)
super
@message = "#{err.to_s} (#{err.backtrace.first})"
end
end
class RestUsageError < RestError
def initialize(err)
super
@message = err.to_s
end
def self.match?(err)
err.kind_of?(ErrorUsage)
end
end
class NotFound < RestUsageError
def self.match?(err)
err.kind_of?(::NoMethodError) and is_controller_method(err)
end
def initialize(err)
super
@code = :not_found
@message = "'#{err.name}' was not found"
end
private
def self.is_controller_method(err)
err.to_s =~ /#<XYZ::.+Controller:/
end
end
end
end
|
dtk/dtk-common-core
|
lib/response.rb
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'restclient'
require 'json'
module DTK
module Common
module ResponseTokens
StatusOK = "ok"
StatusNotok = "notok"
DataField = "data"
StatusField = "status"
ErrorsField = "errors"
ValidationField = "validation"
ErrorsSubFieldCode = "code"
ErrorsOriginalException = "original_exception"
GenericError = "error"
end
module ResponseConstants
# these trems dont get properly marshalled
NilTerm = '***NIL***'
BooleanTrue = '***TRUE***'
BooleanFalse = '***FALSE***'
end
class Response < Hash
include ResponseTokens
module Term
include ResponseConstants
def self.nil()
NilTerm
end
module Boolean
def self.true()
Term::BooleanTrue
end
def self.false()
Term::BooleanFalse
end
end
end
def initialize(hash={})
super()
replace(hash)
end
def ok?()
self[StatusField] == StatusOK
end
def validation_response?
!self[ValidationField].nil?
end
def validation_message
self[ValidationField]['message']
end
def error_message
self["errors"] ? (self["errors"].map { |e| e["message"]}).join(', ') : nil
end
def validation_actions
return self[ValidationField]['actions_needed']
end
def data(*data_keys)
data = self[DataField]
case data_keys.size
when 0 then data||{}
when 1 then data && data[internal_key_form(data_keys.first)]
else data_keys.map{|key|data && data[internal_key_form(key)]}.compact
end
end
def data_hash_form(*data_keys)
ret = Hash.new
unless data = self[DataField]
return ret
end
if data_keys.size == 0
data.inject(Hash.new){|h,(k,v)|h.merge(external_key_form(k) => v)}
else
data_keys.each do |k|
if v = data[internal_key_form(k)]
ret.merge!(external_key_form(k) => v)
end
end
ret
end
end
def set_data(*data_values)
self[DataField]=data_values
end
def set_data_hash(data_hash)
self[DataField]=data_hash
end
def data_ret_and_remove!(*data_keys)
data = data()
data_keys.map{|key|data.delete(internal_key_form(key))}
end
def add_data_value!(key,value)
data()[key.to_s] = value
self
end
def internal_key_form(key)
key.to_s
end
def external_key_form(key)
key.to_sym
end
private :internal_key_form,:external_key_form
module ErrorMixin
def ok?()
false
end
end
class Error < self
include ErrorMixin
def initialize(hash={})
super(hash)
end
end
class RestClientWrapper
class << self
include ResponseTokens
def get_raw(url,body={},opts={},&block)
error_handling(opts) do
url_with_params = generate_query_params_url(url, body)
raw_response = ::RestClient::Resource.new(url_with_params,opts).get()
block ? block.call(raw_response) : raw_response
end
end
def get(url, body={}, opts={})
get_raw(url,body, opts){|raw_response|Response.new(json_parse_if_needed(raw_response))}
end
def post_raw(url,body={},opts={},&block)
error_handling(opts) do
raw_response = ::RestClient::Resource.new(url,opts).post(body)
block ? block.call(raw_response) : raw_response
end
end
def delete_raw(url,body={},opts={},&block)
error_handling(opts) do
# DELETE method supports only query params
url_with_params = generate_query_params_url(url, body)
raw_response = ::RestClient::Resource.new(url_with_params,opts).delete()
block ? block.call(raw_response) : raw_response
end
end
def post(url,body={},opts={})
post_raw(url,body,opts){|raw_response|Response.new(json_parse_if_needed(raw_response))}
end
def delete(url, body={}, opts={})
delete_raw(url,body,opts){|raw_response|Response.new(json_parse_if_needed(raw_response))}
end
def json_parse_if_needed(item)
item.kind_of?(String) ? JSON.parse(item) : item
end
private
def generate_query_params_url(url, params_hash)
if params_hash.empty?
return url
else
query_params_string = params_hash.map { |k,v| "#{CGI.escape(k.to_s)}=#{CGI.escape(v.to_s)}" }.join('&')
return url.concat('?').concat(query_params_string)
end
end
def safe_json_parse(string)
begin
JSON.parse(string)
rescue => e
nil
end
end
def errors_field(msg=nil)
[msg.nil? ? {} : {'message' => msg}]
end
def error_handling(opts={},&block)
begin
block.call
rescue ::RestClient::ResourceNotFound, RestClient::Request::Unauthorized, RestClient::BadRequest,::RestClient::InternalServerError => e
# with latest set of changes we will consider this as special case since most of legacy code is expecting Response class
parsed_response = safe_json_parse(e.response) || {}
errors = parsed_response['errors'] || parsed_response['error'] || errors_field('Server Error')
Response.new(StatusField => StatusNotok, ErrorsField => errors)
rescue ::RestClient::Forbidden => e
return error_response({ErrorsSubFieldCode => RestClientErrors[e.class.to_s]||GenericError, ErrorsOriginalException => e},opts) unless e.inspect.to_s.include?("PG::Error")
errors = {"code" => "pg_error", "message" => e.inspect.to_s.strip, ErrorsOriginalException => e}
error_response(errors)
rescue ::RestClient::ServerBrokeConnection,::RestClient::RequestTimeout, Errno::ECONNREFUSED => e
error_response({ErrorsSubFieldCode => RestClientErrors[e.class.to_s]||GenericError, ErrorsOriginalException => e},opts)
rescue Exception => e
error_response({ErrorsSubFieldCode => RestClientErrors[e.class.to_s], ErrorsOriginalException => e},opts)
end
end
def error_response(error_or_errors,opts={})
errors = error_or_errors.kind_of?(Hash) ? [error_or_errors] : error_or_errors
(opts[:error_response_class]||Error).new(StatusField => StatusNotok, ErrorsField => errors)
end
RestClientErrors = {
"RestClient::Forbidden" => "forbidden",
"RestClient::ServerBrokeConnection" => "broken",
"RestClient::Request::Unauthorized" => "unauthorized",
"RestClient::ServiceUnavailable" => "unavailable",
"RestClient::InternalServerError" => "internal_server_error",
"RestClient::RequestTimeout" => "timeout",
"RestClient::ResourceNotFound" => "resource_not_found",
"Errno::ECONNREFUSED" => "connection_refused"
}
end
end
end
end
end
|
dtk/dtk-common-core
|
dtk-common-core.gemspec
|
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/dtk-common-core/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["<NAME>"]
gem.email = ["<EMAIL>"]
gem.description = %q{DTK Common Core is a shared library used by several DTK components.}
gem.summary = %q{Common libraries used for DTK CLI client.}
gem.homepage = "https://github.com/rich-reactor8/dtk-common-repo"
gem.licenses = ["Apache-2.0"]
gem.files = `git ls-files`.split($\)
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "dtk-common-core"
gem.require_paths = ["lib"]
gem.version = DtkCommonCore::VERSION
gem.add_dependency 'rest-client', '~> 1.7.3'
end
|
dtk/dtk-common-core
|
lib/hash_object.rb
|
#
# Copyright (C) 2010-2016 dtk contributors
#
# This file is part of the dtk project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module DTK
module Common
class SimpleHashObject < Hash
def initialize(initial_val=nil,&block)
block ? super(&block) : super()
if initial_val
replace(initial_val)
end
end
end
# require 'active_support/ordered_hash'
# class SimpleOrderedHash < ::ActiveSupport::OrderedHash
class SimpleOrderedHash < Hash
def initialize(elements=[])
super()
elements = [elements] unless elements.kind_of?(Array)
elements.each{|el|self[el.keys.first] = el.values.first}
end
#set unless value is nill
def set_unless_nil(k,v)
self[k] = v unless v.nil?
end
end
class PrettyPrintHash < SimpleOrderedHash
#field with '?' suffix means optioanlly add depending on whether name present and non-null in source
#if block is given then apply to source[name] rather than returning just source[name]
def add(model_object,*keys,&block)
keys.each do |key|
#if marked as optional skip if not present
if key.to_s =~ /(^.+)\?$/
key = $1.to_sym
next unless model_object[key]
end
#special treatment of :id
val = (key == :id ? model_object.id : model_object[key])
self[key] = (block ? block.call(val) : val)
end
self
end
def slice(*keys)
keys.inject(self.class.new){|h,k|h.merge(k => self[k])}
end
end
end
end
|
tarimo-lab/colorfulcamels
|
db/migrate/20200615024752_create_posts.rb
|
<filename>db/migrate/20200615024752_create_posts.rb
class CreatePosts < ActiveRecord::Migration[5.2]
def change
create_table :posts do |t|
t.text :title
t.text :body
t.boolean :approved, default: false
t.references :user, foreign_key: true
t.boolean :anonymous, default: false
t.boolean :public, default: false
t.integer :approves, default: 0
t.integer :disapproves, default: 0
t.timestamps
end
end
end
|
tarimo-lab/colorfulcamels
|
app/controllers/home_controller.rb
|
class HomeController < ApplicationController
before_action :check_user
def index
if @user
@posts = Post.approved_all(params[:page])
else
@posts = Post.approved_public(params[:page])
end
#@popular = @posts.order('likes DESC').first(10)
#@latest = @posts.order('id DESC').first(10)
end
def about
end
def search
end
def feedback
end
end
|
tarimo-lab/colorfulcamels
|
app/models/user.rb
|
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :validatable
has_many :posts
has_many :likes
validates :first_name, :last_name, :affiliation, presence: true
validates :username, :email, uniqueness: true, presence: true
auto_strip_attributes :first_name, :last_name, :username, :squish => true
before_save :capitalize_names
def capitalize_names
self.first_name = self.first_name.to_s.capitalize.titleize
self.last_name = self.last_name.to_s.capitalize.titleize
end
def to_s
first_name + " " + last_name
end
end
|
tarimo-lab/colorfulcamels
|
app/models/tag.rb
|
class Tag < ApplicationRecord
has_many :taggings
has_many :posts, through: :taggings
end
|
tarimo-lab/colorfulcamels
|
db/seeds.rb
|
<gh_stars>1-10
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
require 'faker'
%w[BlackLivesMatter AllLivesMatter Inequality Oppression Privilege Racism Camels Conn ConnecticutCollege NewLondon StudentLife FacultyLife \
StaffLife Student Staff Faculty Minority PersonOfColor MinorityGroup BIPOC QTBIPOC].each do |tag|
Tag.create(name:tag)
end
10.times {
User.create(
first_name: Faker::Name,
last_name: Faker::Name,
email: Faker::Internet.unique.free_email,
username: Faker::Internet.unique.username,
password: <PASSWORD>.password(min_length: 10, max_length: 20),
image_url: Faker::Avatar.image(size: "50x50"),
role: Faker::Number.within(range: 1..3),
bio: Faker::Lorem.paragraph(sentence_count: 2, supplemental: false, random_sentences_to_add: 4),
affiliation: Faker::Number.within(range: 0..6)
)
}
tag_count = Tag.count;
50.times {
post = Post.create(
title: Faker::Lorem.sentence,
body: Faker::Lorem.paragraphs(number: 5),
approved: Faker::Boolean.boolean,
user_id: Faker::Number.within(range: 0..10),
anonymous: Faker::Boolean.boolean,
approves: Faker::Number.within(range: 0..10),
disapproves: Faker::Number.within(range: 0..5),
public: Faker::Boolean.boolean
)
Faker::Number.within(range: 0..5).times {
Tagging.create(post_id:post.id,tag_id:Faker::Number.within(range: 0..tag_count))
}
Faker::Number.within(range: 0..5).times {
Comment.create(
user_id: Faker::Number.within(range: 0..10),
body: Faker::Lorem.paragraph(sentence_count: 2, supplemental: false, random_sentences_to_add: 4),
commentable_id: post.id,
commentable_type: "Post"
)
comment_count = Comment.count;
Comment.create(
user_id: Faker::Number.within(range: 0..10),
body: Faker::Lorem.paragraph(sentence_count: 2, supplemental: false, random_sentences_to_add: 4),
commentable_id: Faker::Number.within(range: 0..tag_count)),
commentable_type: "Comment"
)
}
}
50.times {
Like.create(
user_id: Faker::Number.within(range: 0..10),
post_id: Faker::Number.within(range: 0..50),
)
}
|
tarimo-lab/colorfulcamels
|
app/models/comment.rb
|
class Comment < ApplicationRecord
belongs_to :user
belongs_to :commentable, polymorphic: true
has_many :comments, as: :commentable, dependent: :destroy
end
|
tarimo-lab/colorfulcamels
|
app/controllers/users_controller.rb
|
<reponame>tarimo-lab/colorfulcamels<filename>app/controllers/users_controller.rb
class UsersController < ApplicationController
before_action :authenticate_user!
before_action :set_current_user
def show
@posts = Post.approved_all(params[:page])
end
end
|
tarimo-lab/colorfulcamels
|
config/routes.rb
|
<reponame>tarimo-lab/colorfulcamels
Rails.application.routes.draw do
devise_for :users
mount ActionCable.server => '/cable'
root 'home#index'
get 'home/index'
get 'home/about'
get 'home/search'
get 'home/feedback'
# Provides default path for signed in users
match 'user_root' => 'users#show', via: :all
#match 'users/sign_out' => 'devise/sessions#destroy', via: :all
devise_scope :user do
get 'sign_in' => 'devise/sessions#new'
get 'users/sign_out' => 'devise/sessions#destroy'
end
resources :posts do
resources :comments
end
end
|
FanchenBao/magic-modules
|
api/resource/iam_policy.rb
|
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'api/object'
require 'google/string_utils'
module Api
# An object available in the product
class Resource < Api::Object::Named
# Information about the IAM policy for this resource
# Several GCP resources have IAM policies that are scoped to
# and accessed via their parent resource
# See: https://cloud.google.com/iam/docs/overview
class IamPolicy < Api::Object
# boolean of if this binding should be generated
attr_reader :exclude
# Character that separates resource identifier from method call in URL
# For example, PubSub subscription uses {resource}:getIamPolicy
# While Compute subnetwork uses {resource}/getIamPolicy
attr_reader :method_name_separator
def validate
super
check :exclude, type: :boolean, default: false
check :method_name_separator, type: String, default: '/'
end
end
end
end
|
FanchenBao/magic-modules
|
google/yaml_validator.rb
|
<filename>google/yaml_validator.rb<gh_stars>0
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/logger'
require 'yaml'
module Google
# A helper class to validate contents coming from YAML files.
class YamlValidator
class << self
def parse(content)
# TODO(nelsonjr): Allow specifying which symbols to restrict it further.
# But it requires inspecting all configuration files for symbol sources,
# such as Enum values. Leaving it as a nice-to-have for the future.
YAML.safe_load(content, allowed_classes)
end
def allowed_classes
ObjectSpace.each_object(Class).select do |klass|
klass < Google::YamlValidator
end.concat([Time, Symbol])
end
end
def validate
Google::LOGGER.debug "Validating #{self.class} '#{@name}'"
check_extraneous_properties
end
def set_variable(value, property)
Google::LOGGER.debug "Setting variable of #{value} to #{self}"
instance_variable_set("@#{property}", value)
end
# Does all validation checking for a particular variable.
# options:
# :default - the default value for this variable if its nil
# :type - the allowed types (single or array) that this value can be
# :item_type - the allowed types that all values in this array should be
# (impllied that type == array)
# :allowed - the allowed values that this non-array variable should be.
# :required - is the variable required? (defaults: false)
def check(variable, **opts)
value = instance_variable_get("@#{variable}")
# Set default value.
if !opts[:default].nil? && value.nil?
instance_variable_set("@#{variable}", opts[:default])
value = instance_variable_get("@#{variable}")
end
# Check if value is required. Print nested path if available.
lineage_path = respond_to?('lineage') ? lineage : ''
raise "#{lineage_path} > Missing '#{variable}'" if value.nil? && opts[:required]
return if value.nil?
# Check type
check_property_value(variable, value, opts[:type]) if opts[:type]
# Check item_type
if value.is_a?(Array)
raise "#{lineage_path} > #{variable} must have item_type on arrays" unless opts[:item_type]
value.each_with_index do |o, index|
check_property_value("#{variable}[#{index}]", o, opts[:item_type])
end
end
# Check if value is allowed
return unless opts[:allowed]
raise "#{value} on #{variable} should be one of #{opts[:allowed]}" \
unless opts[:allowed].include?(value)
end
def conflicts(list)
value_checked = false
list.each do |item|
next if instance_variable_get("@#{item}").nil?
raise "#{list.join(',')} cannot be set at the same time" if value_checked
value_checked = true
end
end
private
def check_type(name, object, type)
if type == :boolean
return unless [TrueClass, FalseClass].find_index(object.class).nil?
elsif type.is_a? ::Array
return if type.find_index(:boolean) && [TrueClass, FalseClass].find_index(object.class)
return unless type.find_index(object.class).nil?
elsif object.is_a?(type)
return
end
raise "Property '#{name}' is '#{object.class}' instead of '#{type}'"
end
def log_check_type(object)
if object.respond_to?(:name)
Google::LOGGER.debug "Checking object #{object.name}"
else
Google::LOGGER.debug "Checking object #{object}"
end
end
def check_property_value(property, prop_value, type)
Google::LOGGER.debug "Checking '#{property}' on #{object_display_name}"
check_type property, prop_value, type unless type.nil?
prop_value.validate if prop_value.is_a?(Api::Object)
end
def check_extraneous_properties
instance_variables.each do |variable|
var_name = variable.id2name[1..-1]
next if var_name.start_with?('__')
Google::LOGGER.debug "Validating '#{var_name}' on #{object_display_name}"
raise "Extraneous variable '#{var_name}' in #{object_display_name}" \
unless methods.include?(var_name.intern)
end
end
def set_variables(objects, property)
return if objects.nil?
objects.each do |object|
object.set_variable(self, property) if object.respond_to?(:set_variable)
end
end
def ensure_property_does_not_exist(property)
raise "Conflict of property '#{property}' for object '#{self}'" \
unless instance_variable_get("@#{property}").nil?
end
def object_display_name
"#{@name}<#{self.class.name}>"
end
end
end
|
FanchenBao/magic-modules
|
provider/ansible/version_added.rb
|
<filename>provider/ansible/version_added.rb
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
CURRENT_ANSIBLE_VERSION = '2.9'.freeze
module Provider
module Ansible
# All logic involved with altering the version_added yaml file and reading
# values from it.
module VersionAdded
def build_version_added
product_name = @api.name.downcase
versions_file = "products/#{product_name}/ansible_version_added.yaml"
versions = if File.exist?(versions_file)
YAML.safe_load(File.read(versions_file), [Symbol])
else
{}
end
struct = {
facts: {},
regular: {}
}
# Build out paths for regular modules.
@api.objects.reject(&:exclude).each do |obj|
next if obj.not_in_version?(@api.version_obj_or_closest('ga'))
resource = {
version_added: correct_version([:regular, obj.name], versions)
}
# Add properties.
# Only properties that aren't output-only + excluded should get versions.
# These are the only properties that become module fields.
obj.all_user_properties.reject(&:exclude).reject(&:output).each do |prop|
next if prop.min_version > @api.version_obj_or_closest('ga')
resource[prop.name.to_sym] = property_version(prop, [:regular, obj.name], versions)
end
struct[:regular][obj.name.to_sym] = resource
# Add facts modules from facts datasources.
struct[:facts][obj.name.to_sym] = {
version_added: correct_version([:facts, obj.name], versions)
}
end
# Write back to disk.
File.write("products/#{product_name}/ansible_version_added.yaml", struct.to_yaml)
struct
end
# This fetches a version_added from the config file for a Resource or Property.
# While the machine-generated config has every property,
# this function only returns a version_added if it cannot be inferred from
# elsewhere in the module.
def version_added(object, type = :regular)
if object.is_a?(Api::Resource)
correct_version([type, object.name], @version_added)
else
path = [type] + build_path(object)
res_version = correct_version(path[0, 2], @version_added)
prop_version = correct_version(path, @version_added)
# We don't need a version added if it matches the resource.
return nil if res_version == prop_version
# If property is the same as the one above it, ignore it.
return nil if version_path(path).last == version_path(path)[-2]
prop_version
end
end
private
# Builds out property information (with nesting)
def property_version(prop, path, struct)
property_hash = {
version_added: correct_version(path + [prop.name], struct)
}
# Only properties that aren't output-only + excluded should get versions.
# These are the only properties that become module fields.
prop.nested_properties.reject(&:exclude).reject(&:output).each do |nested_p|
property_hash[nested_p.name.to_sym] = property_version(nested_p,
path + [prop.name], struct)
end
property_hash
end
def correct_version(path, struct)
path = path.map(&:to_sym) + [:version_added]
struct.dig(*path) || CURRENT_ANSIBLE_VERSION
end
# Build out the path of resources/properties that this property exists within.
def build_path(prop)
path = []
while prop
path << prop if !path.last || (path.last.name != prop.name)
prop = prop.__parent
end
[path.last.__resource.name] + path.map(&:name).reverse
end
# Given a path of resources/properties, return the same path, but with
# versions substituted for names.
def version_path(path)
version_path = []
(path.length - 1).times.each do |i|
version_path << correct_version(path[0, i + 2], @version_added)
end
version_path
end
end
end
end
|
FanchenBao/magic-modules
|
overrides/runner.rb
|
# Copyright 2018 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'api/product'
require 'overrides/resources'
require 'overrides/validator'
module Overrides
# This runner takes an Api::Product and applies a set of Overrides::ResourceOverrides
# It does this by building a brand new Api::Product object from scratch, using
# the values from either the original Api::Product or the override values.
# Example usage in a provider.yaml file where you want to extend a resource
# description:
#
# overrides: !ruby/object:Overrides::ResourceOverrides
# SomeResource: !ruby/object:Provider::MyProvider::ResourceOverride
# description: '{{description}} A tool-specific description complement'
# parameters:
# someParameter: !ruby/object:Provider::MyProvider::PropertyOverride
# description: 'foobar' # replaces description
# properties:
# someProperty: !ruby/object:Provider::MyProvider::PropertyOverride
# description: 'foobar' # replaces description
# anotherProperty.someNestedProperty:
# !ruby/object:Provider::MyProvider::PropertyOverride
# description: 'baz'
# anotherProperty[].someNestedPropertyInAnArray:
# !ruby/object:Provider::MyProvider::PropertyOverride
# description: 'baz'
# ...
class Runner
class << self
def initialize(api, overrides,
res_override_class = Overrides::ResourceOverride,
prop_override_class = Overrides::PropertyOverride)
@api = api
@overrides = overrides
@res_override_class = res_override_class
@prop_override_class = prop_override_class
end
def build(api, overrides, res_override_class = Overrides::ResourceOverride,
prop_override_class = Overrides::PropertyOverride)
overrides = Overrides::ResourceOverrides.new if overrides.nil?
validator = Overrides::Validator.new(api, overrides)
validator.run
build_product(api, overrides, resource: res_override_class, property: prop_override_class)
end
def build_single_property(api_property, property_override, prop_override_class)
build_property(api_property, property_override, { property: prop_override_class }, '')
end
private
# Given a old Api::Product, and Overrides::ResourceOverrides,
# returns a new Api::Product with overrides applied
def build_product(old_prod, all_overrides, override_classes)
prod = Api::Product.new
old_prod.instance_variables
.reject { |o| o == :@objects }.each do |var_name|
if (all_overrides['product'] || {})[var_name]
prod.instance_variable_set(var_name, all_overrides['product'][var_name])
else
prod.instance_variable_set(var_name, old_prod.instance_variable_get(var_name))
end
end
prod.instance_variable_set('@objects',
old_prod.objects
.map do |o|
build_resource(o, all_overrides[o.name],
override_classes)
end)
prod
end
# Given a Api::Resource and Provider::Override::ResourceOverride,
# return a new Api::Resource with overrides applied.
def build_resource(old_resource, res_override, override_classes)
res_override = override_classes[:resource].new if res_override.nil? || res_override.empty?
explicit_overrides = res_override.instance_variables
res_override.validate
res_override.apply old_resource
res = Api::Resource.new
set_additional_values(res, res_override)
variables = (old_resource.instance_variables + res_override.instance_variables).uniq
variables.reject { |o| %i[@properties @parameters].include?(o) }
.each do |var_name|
# Check if the variable is defined on the original api but wasn't explicitly specified
# on the override. This stops override defaults (such as custom_code) from clobbering
# properties that exist on the api already
if old_resource.instance_variables.include?(var_name) \
&& !explicit_overrides.include?(var_name)
res.instance_variable_set(var_name, old_resource.instance_variable_get(var_name))
elsif !res_override[var_name].nil?
res.instance_variable_set(var_name, res_override[var_name])
else
res.instance_variable_set(var_name, old_resource.instance_variable_get(var_name))
end
end
# Using instance_variable_get('properties') to make sure we get `exclude: true` properties
['@properties', '@parameters'].each do |val|
new_props = ((old_resource.instance_variable_get(val) || [])).map do |p|
build_property(p, res_override['properties'], override_classes)
end
res.instance_variable_set(val, new_props)
end
res
end
# Given a Api::Type property and a hash of properties, create a new Api::Type property
# This will handle NestedObjects, Arrays of NestedObjects of arbitrary length
def build_property(old_property, property_overrides, override_classes, prefix = '')
property_overrides = {} if property_overrides.nil?
new_prop = build_primitive_property(old_property,
property_overrides["#{prefix}#{old_property.name}"],
override_classes)
if old_property.nested_properties?
new_props = old_property.nested_properties.map do |p|
build_property(p, property_overrides, override_classes,
"#{prefix}#{old_property.name}.")
end
if old_property.is_a?(Api::Type::NestedObject)
new_prop.instance_variable_set('@properties', new_props)
elsif old_property.is_a?(Api::Type::Map) && \
old_property.value_type.is_a?(Api::Type::NestedObject)
new_prop.instance_variable_set('@value_type', Api::Type::NestedObject.new)
new_prop.value_type.instance_variable_set('@properties', new_props)
elsif old_property.is_a?(Api::Type::Array) && \
old_property.item_type.is_a?(Api::Type::NestedObject)
new_prop.instance_variable_set('@item_type', Api::Type::NestedObject.new)
new_prop.item_type.instance_variable_set('@properties', new_props)
end
end
new_prop
end
# Given a primitive Api::Type (string, integers, times, etc) and override,
# return a new Api::Type with overrides applied.
# This will be called by build_property, which handles nesting.
def build_primitive_property(old_property, prop_override, override_classes)
prop_override = override_classes[:property].new \
if prop_override.nil? || prop_override.empty?
explicit_overrides = prop_override.instance_variables
prop_override.validate
prop_override.apply old_property
prop = if prop_override['type']
Module.const_get(prop_override['type']).new
else
old_property.class.new
end
set_additional_values(prop, prop_override)
variables = (old_property.instance_variables + prop_override.instance_variables).uniq
# Set api_name with old property so that a potential new name doesn't override it.
prop.instance_variable_set('@api_name', old_property.api_name || old_property.name)
variables.reject { |o| o == :@properties }
.each do |var_name|
# Check if the variable is defined on the original api but wasn't explicitly specified
# on the override. This stops override defaults (such as is_set) from clobbering
# properties that exist on the api already
if old_property.instance_variables.include?(var_name) \
&& !explicit_overrides.include?(var_name)
prop.instance_variable_set(var_name, old_property.instance_variable_get(var_name))
elsif !prop_override[var_name].nil? # and not default
prop.instance_variable_set(var_name, prop_override[var_name])
else
prop.instance_variable_set(var_name, old_property.instance_variable_get(var_name))
end
end
prop
end
# Overrides have additional values inside the override that do not regularly belong
# on the Api::* object. These values need to be set + they need getters so they
# can be accessed propertly in the templates.
def set_additional_values(object, override)
override.class.attributes.each do |o|
object.instance_variable_set("@#{o}", override[o])
object.define_singleton_method(o.to_sym) { instance_variable_get("@#{o}") }
end
end
end
end
end
|
FanchenBao/magic-modules
|
api/resource/nested_query.rb
|
<reponame>FanchenBao/magic-modules<gh_stars>1-10
# Copyright 2019 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'api/object'
require 'google/string_utils'
module Api
# An object available in the product
class Resource < Api::Object::Named
# Query information for finding resource nested in an returned API object
# i.e. fine-grained resources
class NestedQuery < Api::Object
# A list of keys to traverse in order.
# i.e. backendBucket --> cdnPolicy.signedUrlKeyNames
# should be ["cdnPolicy", "signedUrlKeyNames"]
attr_reader :keys
# If true, we expect the the nested list to be
# a list of IDs for the nested resource, rather
# than a list of nested resource objects
# i.e. backendBucket.cdnPolicy.signedUrlKeyNames is a list of key names
# rather than a list of the actual key objects
attr_reader :is_list_of_ids
# This is used by Ansible, but may not be necessary.
attr_reader :kind
def validate
super
check :keys, type: Array, item_type: String, required: true
check :is_list_of_ids, type: :boolean, default: false
check :kind, type: String
end
end
end
end
|
FanchenBao/magic-modules
|
provider/terraform_oics.rb
|
# Copyright 2017 Google Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'provider/terraform'
module Provider
# Code generator for runnable Terraform examples that can be run via an
# Open in Cloud Shell link.
class TerraformOiCS < Provider::Terraform
# We don't want *any* static generation, so we override generate to only
# generate objects.
def generate(output_folder, types, version_name, _product_path, _dump_yaml)
generate_objects(output_folder, types, version_name)
end
# Create a directory of examples per resource
def generate_resource(data)
version = @api.version_obj_or_closest(data.version)
examples = data.object.examples
.reject(&:skip_test)
.reject { |e| !e.test_env_vars.nil? && e.test_env_vars.any? }
.reject { |e| version < @api.version_obj_or_closest(e.min_version) }
examples.each do |example|
target_folder = data.output_folder
target_folder = File.join(target_folder, example.name)
FileUtils.mkpath target_folder
data.example = example
data.generate(
'templates/terraform/examples/base_configs/example_file.tf.erb',
File.join(target_folder, 'main.tf'),
self
)
data.generate(
'templates/terraform/examples/base_configs/tutorial.md.erb',
File.join(target_folder, 'tutorial.md'),
self
)
data.generate(
'templates/terraform/examples/base_configs/example_backing_file.tf.erb',
File.join(target_folder, 'backing_file.tf'),
self
)
data.generate(
'templates/terraform/examples/static/motd',
File.join(target_folder, 'motd'),
self
)
end
end
# We don't want to generate anything but the resource.
def generate_resource_tests(data) end
end
end
|
hiroaki-kitagawa/rails-tutorial-local
|
.history/app/helpers/users_helper_20200818213434.rb
|
module UsersHelper
# 引数で与えられたユーザーのGravatar画像を返す
def gravatar_for(user)
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url =
end
end
|
hiroaki-kitagawa/rails-tutorial-local
|
.history/app/controllers/relationships_controller_20200825010447.rb
|
class RelationshipsController < ApplicationController
before_action :logged_in_user
def create
end
def destroy
end
end
|
hiroaki-kitagawa/rails-tutorial-local
|
.history/app/controllers/relationships_controller_20200825005932.rb
|
class RelationshipsController < ApplicationController
end
|
hiroaki-kitagawa/rails-tutorial-local
|
.history/app/models/relationship_20200824220811.rb
|
class Relationship < ApplicationRecord
end
|
camilleg/timeliner
|
movie-soundtrack.rb
|
<filename>movie-soundtrack.rb
#!/usr/bin/env ruby
# Compute wavfile from comments in timeliner-recording.txt.
Src = 'example/mono/marshal/mixed.wav'
SR = 16000
s = `grep ^# timeliner-recording.txt` .split /\n/
cFrame = `grep -v ^# timeliner-recording.txt|wc -l`.to_i
def tmp(i) "/run/shm/timeliner/tmp#{i}.wav" end
def tmps() "/run/shm/timeliner/tmp*.wav" end
`rm -f #{tmps}`
if true
# This handles ONLY consecutive start commands.
$secDstPrev = -1.0
$secSrcPrev = -1.0
s.each_with_index {|c,i|
c = c.split
$secSrc = c[8].to_f
$secDst = c[14].to_f
if ($secDstPrev >= 0.0)
$secDur = $secDst - $secDstPrev
# puts "prev dstBgn #$secDstPrev"
# puts "curr dstBgn #{$secDst}"
# puts "previous clip's dur #{$secDur}"
# puts "Thus, clip src start #{$secSrcPrev}, dstBgn #{$secDstPrev}, dur #{$secDur}\n\n"
`sox #{Src} #{tmp(i)} trim #{($secSrcPrev*SR).to_i}s #{($secDur*SR).to_i}s pad #{($secDstPrev*SR).to_i}s`
end
$secDstPrev = $secDst
$secSrcPrev = $secSrc
# puts "clip src start #{$secSrc}, dst #{$secDst}"
}
i = s.size
$secVideo = cFrame/30.0
$secDur = $secVideo - $secDst
# puts " last clip's dur #{$secDur}"
# puts "Thus, clip src start #{$secSrc}, dst #{$secDst}, dur #{$secDur} dstEnd #{$secDst+$secDur}\n\n"
`sox #{Src} #{tmp(i)} trim #{($secSrc*SR).to_i}s #{($secDur*SR).to_i}s pad #{($secDst*SR).to_i}s`
else
# This handles alternating start and stop commands.
# Join each start-stop pair of lines.
clips = []
s.each_slice(2) {|c| clips << c.join}
clips.each_with_index {|c,i|
# parse "# audio playback start from wavfile offset = 7.8 s, at screenshot-recording offset = 1.9 s# audio playback stop at offset = 8.2 s"
c = c.split
secSrc = c[8].to_f
secDst = c[14].to_f
secDur = c[-2].to_f
puts "clip src start #{secSrc}, dst #{secDst}, dur #{secDur} "
`sox #{Src} #{tmp(i)} trim #{(secSrc*SR).to_i}s #{(secDur*SR).to_i}s pad #{(secDst*SR).to_i}s`
}
end
`sox -m #{tmps} /run/shm/timeliner/out.wav gain -n -1`
`rm -f #{tmps}`
|
codesnik/rails-recipes
|
lib/rails-recipes/console.rb
|
unless Capistrano::Configuration.respond_to?(:instance)
abort "rails-recipes requires Capistrano 2"
end
Capistrano::Configuration.instance(:must_exist).load do
namespace :rails do
desc "script/console on a remote server"
task :console do
rails_env = fetch(:rails_env, "production")
server = find_servers(:roles => [:app]).first
run_with_tty server, %W( script/console #{rails_env} )
end
desc "script/dbconsole on a remote server"
task :dbconsole do
rails_env = fetch(:rails_env, "production")
server = find_servers(:roles => [:app]).first
run_with_tty server, %W( script/dbconsole #{rails_env} )
end
set :rake_cmd do
rails_env = fetch(:rails_env, "production")
"cd #{current_path} && rake RAILS_ENV=#{rails_env}"
end
# FIXME run on only one server?
desc "task=command runs rake 'command' on application servers"
task :rake, :roles => [:app] do
if ENV['task']
run "#{rake_cmd} #{ENV['task']}"
else
# FIXME use logger instead of warn?
warn "USAGE: cap rails:rake task=..."
end
end
def run_with_tty server, cmd
# looks like total pizdets
command = []
command += %W( ssh -t #{gateway} -l #{self[:gateway_user] || self[:user]} ) if self[:gateway]
command += %W( ssh -t )
command += %W( -p #{server.port}) if server.port
command += %W( -l #{user} #{server.host} )
command += %W( cd #{current_path} )
# have to escape this once if running via double ssh
command += [self[:gateway] ? '\&\&' : '&&']
command += Array(cmd)
system *command
end
end
end
|
codesnik/rails-recipes
|
test/rails-recipes_test.rb
|
<filename>test/rails-recipes_test.rb<gh_stars>1-10
require 'test_helper'
class RailsRecipesTest < Test::Unit::TestCase
should "probably rename this file and start testing for real" do
assert "still won't bother myself with it right now"
end
end
|
codesnik/rails-recipes
|
lib/rails-recipes.rb
|
<filename>lib/rails-recipes.rb<gh_stars>1-10
require 'capistrano'
dir = File.dirname(__FILE__)
require File.join(dir, 'rails-recipes', 'console')
|
Metaphysiker/fatfreedemo
|
lib/tasks/sync.rake
|
<gh_stars>0
namespace :sync do
task users: :environment do
getusers
end
task societies: :environment do
getsocieties
end
task totalsync: :environment do
getusers
getsocieties
end
def getusers
url = 'http://localhost:3000/getusers'
uri = URI(url)
response = Net::HTTP.get(uri)
response2 = JSON.parse(response)
response3 = response2.first
users = response3.second
users.each do |u|
user = OpenStruct.new(u)
puts user.firstname
Contact.create(
first_name: user.firstname,
last_name: user.lastname,
access: "Public",
email: user.email,
created_at: user.created_at,
philosophie_id: user.id
)
end
end
def getsocieties
url = 'http://localhost:3000/getsocieties'
uri = URI(url)
response = Net::HTTP.get(uri)
response2 = JSON.parse(response)
response3 = response2.first
societies = response3.second
societies.each do |s|
society = OpenStruct.new(s)
Account.create(
name: society.name,
access: "Public",
created_at: society.created_at,
philosophie_society_id: society.id
)
society.profiles.each do |p|
puts p["id"]
Contact.find_by_philosophie_id(p["id"]).account = Account.find_by_philosophie_society_id(society.id) unless Contact.find_by_philosophie_id(p["id"]).nil?
end
end
end
end
|
Metaphysiker/fatfreedemo
|
db/migrate/20100928030607_create_contacts.rb
|
<gh_stars>0
# frozen_string_literal: true
class CreateContacts < ActiveRecord::Migration[4.2]
def self.up
create_table :contacts, force: true do |t|
t.string :uuid, limit: 36
t.references :user
t.references :lead
t.integer :assigned_to
t.integer :reports_to
t.integer :philosophie_id
t.string :first_name, limit: 64, null: false, default: ""
t.string :last_name, limit: 64, null: false, default: ""
t.string :access, limit: 8, default: "Public"
t.string :title, limit: 64
t.string :department, limit: 64
t.string :source, limit: 32
t.string :email, limit: 64
t.string :alt_email, limit: 64
t.string :phone, limit: 32
t.string :mobile, limit: 32
t.string :fax, limit: 32
t.string :blog, limit: 128
t.string :linkedin, limit: 128
t.string :facebook, limit: 128
t.string :twitter, limit: 128
t.string :address
t.date :born_on
t.boolean :do_not_call, null: false, default: false
t.datetime :deleted_at
t.timestamps
end
add_index :contacts, %i[user_id last_name deleted_at], unique: true, name: 'id_last_name_deleted'
add_index :contacts, :assigned_to
end
def self.down
drop_table :contacts
end
end
|
atomicjolt/starter_app
|
app/controllers/sessions_controller.rb
|
<filename>app/controllers/sessions_controller.rb
class SessionsController < Devise::SessionsController
def destroy
super
end
end
|
atomicjolt/starter_app
|
app/controllers/admin_controller.rb
|
<gh_stars>1-10
class AdminController < ApplicationController
layout "admin"
def index
end
end
|
atomicjolt/starter_app
|
app/controllers/application_controller.rb
|
<reponame>atomicjolt/starter_app
class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
before_action :configure_permitted_parameters, if: :devise_controller?
helper_method :current_account
protected
rescue_from CanCan::AccessDenied do |exception|
redirect_to root_url, :alert => exception.message
end
def configure_permitted_parameters
devise_parameter_sanitizer.for(:sign_up) << :name
devise_parameter_sanitizer.for(:account_update) << :name
end
# **********************************************
#
# OAuth related functionality:
#
def check_external_identifier(user, only_build=false)
if session[:external_identifier]
exid = user.external_identifiers.build(:identifier => session[:external_identifier], :provider => session[:provider])
exid.save! unless only_build
session[:external_identifier] = nil
session[:provider] = nil
exid
end
end
def find_external_identifier(url)
return nil unless url.present?
@provider = UrlHelper.host(url)
@identifier = params[:user_id]
ExternalIdentifier.find_by(provider: @provider, identifier: @identifier)
end
def create_external_identifier_with_url(auth, user)
json = Yajl::Parser.parse(auth['json_response'])
key = UrlHelper.host(json['info']['url'])
user.external_identifiers.create(:identifier => auth.uid, :provider => key) # If they already have an exernal identifier this can just fail silently
end
def find_external_identifier(url)
return nil unless url.present?
@provider = UrlHelper.host(url)
@identifier = params[:user_id]
ExternalIdentifier.find_by(provider: @provider, identifier: @identifier)
end
# **********************************************
#
# Account related functionality:
#
def current_account
@current_account ||= Account.find_by(code: request.subdomains.first) || Account.find_by(domain: request.host) || Account.main
end
private
def user_not_authorized
flash[:alert] = "Access denied."
redirect_to (request.referrer || root_path)
end
end
|
atomicjolt/starter_app
|
spec/controllers/api/accounts_controller_spec.rb
|
<reponame>atomicjolt/starter_app
# require 'spec_helper'
# describe Api::AccountsController do
# before do
# @request.host = @account_default.domain
# end
# describe "GET index" do
# context "valid api key" do
# it "returns all accounts as json" do
# account = FactoryGirl.create(:account)
# get :index, internal_api_key: Settings.internal_api_key, format: :json
# result = JSON.parse(response.body)
# result['accounts'].should be_present
# result['accounts'].find{|s| s['id'] == account.id}.should be_present
# end
# end
# context "invalid api key" do
# it "returns unauthorized" do
# get :index, format: :json
# response.status.should == 401
# end
# end
# end
# end
|
atomicjolt/starter_app
|
db/seeds.rb
|
user = CreateAdminService.new.call
puts 'CREATED ADMIN USER: ' << user.email
# Setup default accounts
if Rails.env.production?
accounts = [{
code: 'amasynsoftware',
name: '<NAME>',
domain: 'http://www.amasynsoftware.com',
}]
else
accounts = [{
code: 'amasynsoftware',
name: '<NAME>',
domain: 'http://amasynsoftware.ngrok.com',
}]
end
# Setup accounts
accounts.each do |account|
if a = Account.find_by(code: account[:code])
a.update_attributes!(account)
else
Account.create!(account)
end
end
|
atomicjolt/starter_app
|
app/models/authentication.rb
|
class Authentication < ActiveRecord::Base
belongs_to :user, inverse_of: :authentications
validates :provider, presence: true, uniqueness: {scope: [:uid, :user_id, :provider_url]}
end
|
atomicjolt/starter_app
|
spec/factories/accounts.rb
|
<reponame>atomicjolt/starter_app
FactoryGirl.define do
factory :account do
code { FactoryGirl.generate(:code) }
name { FactoryGirl.generate(:name) }
domain { FactoryGirl.generate(:domain) }
end
end
|
atomicjolt/starter_app
|
spec/factories/authentication.rb
|
<gh_stars>1-10
FactoryGirl.define do
factory :authentication do
user
provider { FactoryGirl.generate(:name) }
factory :authentication_facebook do
provider 'facebook'
uid '12345'
username 'myusername'
provider_avatar 'http://graph.facebook.com/12345/picture?type=large'
end
end
end
|
atomicjolt/starter_app
|
app/models/role.rb
|
class Role < ActiveRecord::Base
has_many :permissions, :dependent => :destroy
has_many :users, :through => :permissions
validates :name, :presence => true
validates :name, :uniqueness => true
scope :by_alpha, -> {order('roles.name ASC')}
# roles can be defined as symbols. We want to store them as strings in the database
def name= val
write_attribute(:name, val.to_s)
end
end
|
atomicjolt/starter_app
|
config/routes.rb
|
<reponame>atomicjolt/starter_app
class CustomDomain
def matches?(request)
return false if request.subdomain.length <= 0 || request.subdomain == 'www'
true
end
end
Rails.application.routes.draw do
root :to => "home#index"
devise_for :users, controllers: {
sessions: "sessions",
registrations: "registrations",
omniauth_callbacks: "omniauth_callbacks"
}
resources :users
resources :admin, only: [:index]
mount MailPreview => 'mail_view' if Rails.env.development?
end
|
atomicjolt/starter_app
|
config/unicorn.rb
|
###################################################################################
#
# Use this unicorn configuration for deploying to Heroku
#
worker_processes Integer(ENV["WEB_CONCURRENCY"] || 3)
timeout 15
preload_app true
before_fork do |server, worker|
Signal.trap 'TERM' do
puts 'Unicorn master intercepting TERM and sending myself QUIT instead'
Process.kill 'QUIT', Process.pid
end
defined?(ActiveRecord::Base) and
ActiveRecord::Base.connection.disconnect!
end
after_fork do |server, worker|
Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Wait for master to send QUIT'
end
defined?(ActiveRecord::Base) and
ActiveRecord::Base.establish_connection
end
###################################################################################
#
# Use this unicorn configuration for zero downtime deployment to other services (ie AWS)
#
# See http://unicorn.bogomips.org/Unicorn/Configurator.html for complete documentation.
# env = ENV["RAILS_ENV"] || "development" # Set environment to development unless something else is specified
# worker_processes 4
# preload_app true # Preload our app for more speed
# timeout 1800 # nuke workers after 30 seconds instead of 60 seconds (the default)
# pid "/tmp/unicorn.amasynsoftware.pid"
# if env == "production" || env == 'staging'
# listen "/tmp/unicorn.amasynsoftware.socket", :backlog => 64
# working_directory "/u/apps/amasynsoftware/current"
# user 'ubuntu', 'ubuntu'
# shared_path = "/u/apps/amasynsoftware/shared"# feel free to point this anywhere accessible on the filesystem
# stderr_path "/u/apps/amasynsoftware/shared/log/unicorn.stderr.log"
# stdout_path "/u/apps/amasynsoftware/shared/log/unicorn.stdout.log"
# before_exec do |server|
# ENV["BUNDLE_GEMFILE"] = "/u/apps/amasynsoftware/current/Gemfile"
# end
# end
# before_fork do |server, worker|
# # The following is highly recomended for Rails + "preload_app true".
# # There's no need for the master process to hold a connection.
# if defined?(ActiveRecord::Base)
# ActiveRecord::Base.connection.disconnect!
# end
# # Before forking, kill the master process that belongs to the .oldbin PID.
# # This enables 0 downtime deploys.
# old_pid = "/tmp/unicorn.amasynsoftware.pid.oldbin"
# if File.exists?(old_pid) && server.pid != old_pid
# begin
# Process.kill("QUIT", File.read(old_pid).to_i)
# rescue Errno::ENOENT, Errno::ESRCH
# # someone else did our job for us
# end
# end
# end
# after_fork do |server, worker|
# # The following is *required* for Rails + "preload_app true",
# if defined?(ActiveRecord::Base)
# ActiveRecord::Base.establish_connection
# end
# end
|
atomicjolt/starter_app
|
spec/support/macros.rb
|
<filename>spec/support/macros.rb
def login_user
@request.env["devise.mapping"] = Devise.mappings[:user]
user = FactoryGirl.create(:user)
user.confirm! # or set a confirmed_at inside the factory. Only necessary if you are using the "confirmable" module
sign_in user
end
|
atomicjolt/starter_app
|
db/schema.rb
|
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150318170605) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "accounts", force: true do |t|
t.string "name"
t.string "domain"
t.datetime "created_at"
t.datetime "updated_at"
t.string "code"
end
add_index "accounts", ["code"], name: "index_accounts_on_code", using: :btree
add_index "accounts", ["domain"], name: "index_accounts_on_domain", unique: true, using: :btree
create_table "authentications", force: true do |t|
t.integer "user_id"
t.string "token"
t.string "secret"
t.string "provider"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "json_response"
t.string "uid"
t.string "provider_avatar"
t.string "username"
t.string "provider_url", limit: 2048
t.string "refresh_token"
end
add_index "authentications", ["provider", "uid"], name: "index_authentications_on_provider_and_uid", using: :btree
add_index "authentications", ["user_id"], name: "index_authentications_on_user_id", using: :btree
create_table "external_identifiers", force: true do |t|
t.integer "user_id"
t.string "identifier"
t.string "provider"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "external_identifiers", ["identifier", "provider"], name: "index_external_identifiers_on_identifier_and_provider", using: :btree
add_index "external_identifiers", ["user_id"], name: "index_external_identifiers_on_user_id", using: :btree
create_table "permissions", force: true do |t|
t.integer "role_id"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "permissions", ["role_id", "user_id"], name: "index_permissions_on_role_id_and_user_id", using: :btree
create_table "profiles", force: true do |t|
t.integer "user_id"
t.string "location"
t.decimal "lat", precision: 15, scale: 10
t.decimal "lng", precision: 15, scale: 10
t.text "about"
t.string "city"
t.integer "state_id"
t.integer "country_id"
t.integer "language_id"
t.integer "profile_views"
t.text "policy"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "website"
t.string "blog"
t.string "twitter"
t.string "facebook"
t.string "linkedin"
end
create_table "roles", force: true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "user_accounts", force: true do |t|
t.integer "user_id"
t.integer "account_id"
t.string "role"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "user_accounts", ["user_id", "account_id"], name: "index_user_accounts_on_user_id_and_account_id", using: :btree
create_table "users", force: true do |t|
t.string "email", default: "", null: false
t.string "encrypted_password", default: "", null: false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0, null: false
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at"
t.datetime "updated_at"
t.string "name"
t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.string "unconfirmed_email"
t.integer "role"
t.integer "account_id"
t.string "username"
t.string "avatar"
t.string "time_zone", default: "UTC"
t.string "password_salt"
t.string "provider_avatar"
t.string "profile_privacy", default: "private"
t.string "profile_privacy_token"
t.string "active_avatar", default: "none"
end
add_index "users", ["account_id"], name: "index_users_on_account_id", using: :btree
add_index "users", ["email"], name: "index_users_on_email", unique: true, using: :btree
add_index "users", ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true, using: :btree
end
|
atomicjolt/starter_app
|
app/controllers/users_controller.rb
|
<reponame>atomicjolt/starter_app
class UsersController < ApplicationController
before_filter :authenticate_user!
load_and_authorize_resource
after_action :verify_authorized, except: [:show]
def index
end
def show
end
def update
if @user.update_attributes(secure_params)
redirect_to users_path, :notice => "User updated."
else
redirect_to users_path, :alert => "Unable to update user."
end
end
def destroy
user.destroy
redirect_to users_path, :notice => "User deleted."
end
private
def create_params
params.require(:user).permit(:role)
end
def update_params
params.require(:user).permit(:role)
end
end
|
jwedoff/ruby-net-ldap
|
test/test_auth_adapter.rb
|
<reponame>jwedoff/ruby-net-ldap
require 'test_helper'
class TestAuthAdapter < Test::Unit::TestCase
class FakeSocket
include Net::BER::BERParser
def initialize(*args)
end
end
def test_undefined_auth_adapter
conn = Net::LDAP::Connection.new(host: 'ldap.example.com', port: 379, :socket_class => FakeSocket)
assert_raise Net::LDAP::AuthMethodUnsupportedError, "Unsupported auth method (foo)" do
conn.bind(method: :foo)
end
end
end
|
jwedoff/ruby-net-ldap
|
test/test_ssl_ber.rb
|
require_relative 'test_helper'
require 'timeout'
class TestSSLBER < Test::Unit::TestCase
# Transmits str to @to and reads it back from @from.
#
def transmit(str)
Timeout.timeout(1) do
@to.write(str)
@to.close
@from.read
end
end
def setup
@from, @to = IO.pipe
# The production code operates on sockets, which do need #connect called
# on them to work. Pipes are more robust for this test, so we'll skip
# the #connect call since it fails.
#
# TODO: Replace test with real socket
# https://github.com/ruby-ldap/ruby-net-ldap/pull/121#discussion_r18746386
#
# without this fix, a number of "warning: SSL session is not started yet" are emitted on the nonblocking reads.
flexmock(OpenSSL::SSL::SSLSocket)
.new_instances.should_receive(:connect => nil)
@to = Net::LDAP::Connection.wrap_with_ssl(@to)
@from = Net::LDAP::Connection.wrap_with_ssl(@from)
end
def test_transmit_strings
omit_if RUBY_PLATFORM == "java", "JRuby throws an error without a real socket"
assert_equal "foo", transmit("foo")
end
def test_transmit_ber_encoded_numbers
omit_if RUBY_PLATFORM == "java", "JRuby throws an error without a real socket"
@to.write 1234.to_ber
assert_equal 1234, @from.read_ber
end
end
|
jwedoff/ruby-net-ldap
|
test/integration/test_bind.rb
|
require_relative '../test_helper'
class TestBindIntegration < LDAPIntegrationTestCase
INTEGRATION_HOSTNAME = 'ldap.example.org'.freeze
def test_bind_success
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_timeout
@ldap.host = "10.255.255.1" # non-routable IP
error = assert_raise Net::LDAP::Error do
@ldap.bind BIND_CREDS
end
msgs = ['Operation timed out - user specified timeout',
'Connection timed out - user specified timeout']
assert_send([msgs, :include?, error.message])
end
def test_bind_anonymous_fail
refute @ldap.bind(BIND_CREDS.merge(password: '')),
@ldap.get_operation_result.inspect
result = @ldap.get_operation_result
assert_equal Net::LDAP::ResultCodeUnwillingToPerform, result.code
assert_equal Net::LDAP::ResultStrings[Net::LDAP::ResultCodeUnwillingToPerform], result.message
assert_equal "unauthenticated bind (DN with no password) disallowed",
result.error_message
assert_equal "", result.matched_dn
end
def test_bind_fail
refute @ldap.bind(BIND_CREDS.merge(password: "<PASSWORD>")),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_cafile
omit "We need to update our CA cert"
@ldap.host = INTEGRATION_HOSTNAME
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(ca_file: CA_FILE),
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_bad_hostname_verify_none_no_ca_passes
@ldap.host = INTEGRATION_HOSTNAME
@ldap.encryption(
method: :start_tls,
tls_options: { verify_mode: OpenSSL::SSL::VERIFY_NONE },
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_bad_hostname_verify_none_no_ca_opt_merge_passes
@ldap.host = 'cert.mismatch.example.org'
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(verify_mode: OpenSSL::SSL::VERIFY_NONE),
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_bad_hostname_verify_peer_ca_fails
omit "We need to update our CA cert"
@ldap.host = 'cert.mismatch.example.org'
@ldap.encryption(
method: :start_tls,
tls_options: { verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: CA_FILE },
)
error = assert_raise Net::LDAP::Error,
Errno::ECONNREFUSED do
@ldap.bind BIND_CREDS
end
assert_equal(
"hostname \"#{@ldap.host}\" does not match the server certificate",
error.message,
)
end
def test_bind_tls_with_bad_hostname_ca_default_opt_merge_fails
omit "We need to update our CA cert"
@ldap.host = 'cert.mismatch.example.org'
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(ca_file: CA_FILE),
)
error = assert_raise Net::LDAP::Error,
Errno::ECONNREFUSED do
@ldap.bind BIND_CREDS
end
assert_equal(
"hostname \"#{@ldap.host}\" does not match the server certificate",
error.message,
)
end
def test_bind_tls_with_bad_hostname_ca_no_opt_merge_fails
omit "We need to update our CA cert"
@ldap.host = 'cert.mismatch.example.org'
@ldap.encryption(
method: :start_tls,
tls_options: { ca_file: CA_FILE },
)
error = assert_raise Net::LDAP::Error,
Errno::ECONNREFUSED do
@ldap.bind BIND_CREDS
end
assert_equal(
"hostname \"#{@ldap.host}\" does not match the server certificate",
error.message,
)
end
def test_bind_tls_with_valid_hostname_default_opts_passes
omit "We need to update our CA cert"
@ldap.host = INTEGRATION_HOSTNAME
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: CA_FILE),
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_valid_hostname_just_verify_peer_ca_passes
omit "We need to update our CA cert"
@ldap.host = INTEGRATION_HOSTNAME
@ldap.encryption(
method: :start_tls,
tls_options: { verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: CA_FILE },
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_bogus_hostname_system_ca_fails
@ldap.host = 'cert.mismatch.example.org'
@ldap.encryption(method: :start_tls, tls_options: {})
error = assert_raise Net::LDAP::Error,
Errno::ECONNREFUSED do
@ldap.bind BIND_CREDS
end
assert_equal(
"hostname \"#{@ldap.host}\" does not match the server certificate",
error.message,
)
end
def test_bind_tls_with_multiple_hosts
omit "We need to update our CA cert"
@ldap.host = nil
@ldap.hosts = [[INTEGRATION_HOSTNAME, 389], [INTEGRATION_HOSTNAME, 389]]
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: CA_FILE),
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_multiple_bogus_hosts
# omit "We need to update our CA cert"
@ldap.host = nil
@ldap.hosts = [['cert.mismatch.example.org', 389], ['bogus.example.com', 389]]
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(verify_mode: OpenSSL::SSL::VERIFY_PEER,
ca_file: CA_FILE),
)
error = assert_raise Net::LDAP::Error,
Net::LDAP::ConnectionError do
@ldap.bind BIND_CREDS
end
assert_equal("Unable to connect to any given server: ",
error.message.split("\n").shift)
end
def test_bind_tls_with_multiple_bogus_hosts_no_verification
omit "We need to update our CA cert"
@ldap.host = nil
@ldap.hosts = [['cert.mismatch.example.org', 389], ['bogus.example.com', 389]]
@ldap.encryption(
method: :start_tls,
tls_options: TLS_OPTS.merge(verify_mode: OpenSSL::SSL::VERIFY_NONE),
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
def test_bind_tls_with_multiple_bogus_hosts_ca_check_only_fails
@ldap.host = nil
@ldap.hosts = [['cert.mismatch.example.org', 389], ['bogus.example.com', 389]]
@ldap.encryption(
method: :start_tls,
tls_options: { ca_file: CA_FILE },
)
error = assert_raise Net::LDAP::Error,
Net::LDAP::ConnectionError do
@ldap.bind BIND_CREDS
end
assert_equal("Unable to connect to any given server: ",
error.message.split("\n").shift)
end
# This test is CI-only because we can't add the fixture CA
# to the system CA store on people's dev boxes.
def test_bind_tls_valid_hostname_system_ca_on_travis_passes
omit "not sure how to install custom CA cert in travis"
omit_unless ENV['TRAVIS'] == 'true'
@ldap.host = INTEGRATION_HOSTNAME
@ldap.encryption(
method: :start_tls,
tls_options: { verify_mode: OpenSSL::SSL::VERIFY_PEER },
)
assert @ldap.bind(BIND_CREDS),
@ldap.get_operation_result.inspect
end
end
|
jwedoff/ruby-net-ldap
|
lib/net/ldap/version.rb
|
<gh_stars>0
module Net
class LDAP
VERSION = "0.17.0.1.akajw"
end
end
|
jwedoff/ruby-net-ldap
|
lib/net/ber/ber_parser_nonblock.rb
|
# Implements nonbocking and timeout handling routines for BER parsing.
module Net::BER::BERParserNonblock
# Internal: Returns the BER message ID or nil.
def read_ber_id
ber_timeout_getbyte
end
private :read_ber_id
# Internal: specify the BER socket read timeouts, nil by default (no timeout).
attr_accessor :ber_io_deadline
private :ber_io_deadline
##
# sets a timeout of timeout seconds for read_ber and ber_timeout_write operations in the provided block the proin the future for if there is not already a earlier deadline set
def with_timeout(timeout)
timeout = timeout.to_f
# don't change deadline if run without timeout
return yield if timeout <= 0
# clear deadline if it is not in the future
self.ber_io_deadline = nil unless ber_io_timeout.to_f > 0
new_deadline = Time.now + timeout
# don't add deadline if current deadline is shorter
return yield if ber_io_deadline && ber_io_deadline < new_deadline
old_deadline = ber_io_deadline
begin
self.ber_io_deadline = new_deadline
yield
ensure
self.ber_io_deadline = old_deadline
end
end
# seconds until ber_io_deadline
def ber_io_timeout
ber_io_deadline ? ber_io_deadline - Time.now : nil
end
private :ber_io_timeout
def read_select!
return if IO.select([self], nil, nil, ber_io_timeout)
raise Errno::ETIMEDOUT, "Timed out reading from the socket"
end
private :read_select!
def write_select!
return if IO.select(nil, [self], nil, ber_io_timeout)
raise Errno::ETIMEDOUT, "Timed out reading from the socket"
end
private :write_select!
# Internal: Replaces `getbyte` with nonblocking implementation.
def ber_timeout_getbyte
read_nonblock(1).ord
rescue IO::WaitReadable
read_select!
retry
rescue IO::WaitWritable
write_select!
retry
rescue EOFError
# nothing to read on the socket (StringIO)
nil
end
private :ber_timeout_getbyte
# Internal: Read `len` bytes, respecting timeout.
def ber_timeout_read(len)
buffer ||= ''.force_encoding(Encoding::ASCII_8BIT)
begin
read_nonblock(len, buffer)
return buffer if buffer.bytesize >= len
rescue IO::WaitReadable, IO::WaitWritable
buffer.clear
rescue EOFError
# nothing to read on the socket (StringIO)
nil
end
block ||= ''.force_encoding(Encoding::ASCII_8BIT)
len -= buffer.bytesize
loop do
begin
read_nonblock(len, block)
rescue IO::WaitReadable
read_select!
retry
rescue IO::WaitWritable
write_select!
retry
rescue EOFError
return buffer.empty? ? nil : buffer
end
buffer << block
len -= block.bytesize
return buffer if len <= 0
end
end
private :ber_timeout_read
##
# Writes val as a plain write would, but respecting the dealine set by with_timeout
def ber_timeout_write(val)
total_written = 0
while val.bytesize > 0
begin
written = write_nonblock(val)
rescue IO::WaitReadable
read_select!
retry
rescue IO::WaitWritable
write_select!
retry
end
total_written += written
val = val.byteslice(written..-1)
end
total_written
end
end
|
adamgeorgeson/eventq
|
eventq_aws/spec/integration/aws_eventq_client_spec.rb
|
require 'spec_helper'
RSpec.describe EventQ::Amazon::EventQClient, integration: true do
let(:queue_client) do
EventQ::Amazon::QueueClient.new({ aws_account_number: EventQ.AWS_ACCOUNT_NUMBER, aws_region: 'eu-west-1' })
end
let(:queue_manager) do
EventQ::Amazon::QueueManager.new({ client: queue_client })
end
let(:subscription_manager) do
EventQ::Amazon::SubscriptionManager.new({ client: queue_client, queue_manager: queue_manager })
end
let(:eventq_client) do
EventQ::Amazon::EventQClient.new({ client: queue_client })
end
let(:subscriber_queue) do
EventQ::Queue.new.tap do |sq|
sq.name = SecureRandom.uuid.to_s
end
end
let(:class_kit) { ClassKit::Helper.new }
let(:event_type) { 'test_queue1_event1' }
let(:message) { 'Hello World' }
let(:message_context) { { 'foo' => 'bar' } }
describe '#publish' do
it 'should raise an event object and be broadcast to a subscriber queue' do
subscription_manager.subscribe(event_type, subscriber_queue)
id = eventq_client.publish(topic: event_type, event: message, context: message_context)
EventQ.logger.debug { "Message ID: #{id}" }
# sleep for 2 seconds to allow the aws message to be sent to the topic and broadcast to subscribers
sleep(1)
q = queue_manager.get_queue(subscriber_queue)
EventQ.logger.debug { '[QUEUE] waiting for message...' }
# request a message from the queue
response = queue_client.sqs.receive_message({
queue_url: q,
max_number_of_messages: 1,
wait_time_seconds: 5,
message_attribute_names: ['ApproximateReceiveCount']
})
expect(response.messages.length).to eq(1)
msg = response.messages[0]
msg_body = JSON.load(msg.body)
payload_hash = JSON.load(msg_body["Message"])
payload = class_kit.from_hash(hash: payload_hash, klass: EventQ::QueueMessage)
EventQ.logger.debug { "[QUEUE] - received message: #{payload}" }
#remove the message from the queue so that it does not get retried
queue_client.sqs.delete_message({ queue_url: q, receipt_handle: msg.receipt_handle })
expect(payload).to_not be_nil
expect(payload.content).to eq(message)
expect(payload.context).to eq(message_context)
end
end
describe '#raise_event' do
shared_examples 'any event raising' do
it 'should raise an event object and be broadcast to a subscriber queue' do
subscription_manager.subscribe(event_type, subscriber_queue)
id = eventq_client.raise_event(event_type, message, message_context)
EventQ.logger.debug { "Message ID: #{id}" }
#sleep for 2 seconds to allow the aws message to be sent to the topic and broadcast to subscribers
sleep(1)
q = queue_manager.get_queue(subscriber_queue)
EventQ.logger.debug { '[QUEUE] waiting for message...' }
#request a message from the queue
response = queue_client.sqs.receive_message({
queue_url: q,
max_number_of_messages: 1,
wait_time_seconds: 5,
message_attribute_names: ['ApproximateReceiveCount']
})
expect(response.messages.length).to eq(1)
msg = response.messages[0]
msg_body = JSON.load(msg.body)
payload_hash = JSON.load(msg_body["Message"])
payload = class_kit.from_hash(hash: payload_hash, klass: EventQ::QueueMessage)
EventQ.logger.debug { "[QUEUE] - received message: #{payload}" }
#remove the message from the queue so that it does not get retried
queue_client.sqs.delete_message({ queue_url: q, receipt_handle: msg.receipt_handle })
expect(payload).to_not be_nil
expect(payload.content).to eq(message)
expect(payload.context).to eq(message_context)
end
end
context 'when EventQ.namespace is NOT specified' do
it_behaves_like 'any event raising'
end
context 'when EventQ.namespace is specified' do
before do
EventQ.namespace = 'test'
end
it_behaves_like 'any event raising'
after do
EventQ.namespace = nil
end
end
end
describe '#raise_event_in_queue' do
let(:queue_name) { 'How_do_I_learn_to_queue_like_a_British_person' }
let(:queue) do
EventQ::Queue.new.tap do |queue|
queue.name = queue_name
end
end
let(:delay_seconds) { 3 }
it 'should send a message to SQS with a delay' do
queue_manager.create_queue(queue)
queue_client.sqs.purge_queue(queue_url: queue_client.get_queue_url(queue))
id = eventq_client.raise_event_in_queue(event_type, message, queue, delay_seconds)
EventQ.logger.debug { "Message ID: #{id}" }
EventQ.logger.debug { '[QUEUE] waiting for message...' }
#request a message from the queue
queue_url = queue_client.get_queue_url(queue)
response = queue_client.sqs.receive_message(
queue_url: queue_url,
max_number_of_messages: 1,
wait_time_seconds: 1,
message_attribute_names: ['ApproximateReceiveCount']
)
expect(response.messages.length).to eq(0)
sleep(2)
response = queue_client.sqs.receive_message(
queue_url: queue_url,
max_number_of_messages: 1,
wait_time_seconds: 3,
message_attribute_names: ['ApproximateReceiveCount']
)
expect(response.messages.length).to eq(1)
msg = response.messages[0]
payload_hash = JSON.load(JSON.load(msg.body)[EventQ::Amazon::QueueWorker::MESSAGE])
payload = class_kit.from_hash(hash: payload_hash, klass: EventQ::QueueMessage)
EventQ.logger.debug { "[QUEUE] - received message: #{msg_body}" }
#remove the message from the queue so that it does not get retried
queue_client.sqs.delete_message(queue_url: queue_url, receipt_handle: msg.receipt_handle)
expect(payload.content).to eq(message)
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/serializer.rb
|
module EventQ
module SerializationProviders
module JRuby
module Oj
class Serializer
def dump(obj)
JSON.dump(AttributeWriter.exec(obj))
end
def load(json)
raise NotImplementedError.new("[#{self.class}] - #load method has not yet been implemented.")
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_base/spec/eventq_base/serialization_providers/jruby/oj/test_item.rb
|
<reponame>adamgeorgeson/eventq
class TestItem
attr_accessor :string
attr_accessor :number
attr_accessor :float
attr_accessor :date
attr_accessor :datetime
attr_accessor :time
attr_accessor :hash
attr_accessor :array_hash
attr_accessor :test_item
attr_accessor :array_test_item
end
|
adamgeorgeson/eventq
|
eventq_base/lib/eventq_base/serialization_providers/jruby/oj/time_writer.rb
|
<filename>eventq_base/lib/eventq_base/serialization_providers/jruby/oj/time_writer.rb
module EventQ
module SerializationProviders
module JRuby
module Oj
class TimeWriter < AttributeWriter
def valid?(obj)
obj.is_a?(Time)
end
def exec(obj)
{
'^t': obj.to_f
}
end
end
end
end
end
end
|
adamgeorgeson/eventq
|
eventq_aws/spec/integration/aws_status_checker_spec.rb
|
require 'spec_helper'
RSpec.describe EventQ::Amazon::StatusChecker, integration: true do
let(:queue_client) do
EventQ::Amazon::QueueClient.new({ aws_account_number: EventQ.AWS_ACCOUNT_NUMBER, aws_region: 'eu-west-1' })
end
let(:queue_manager) do
EventQ::Amazon::QueueManager.new({ client: queue_client })
end
subject do
EventQ::Amazon::StatusChecker.new(queue_manager: queue_manager, client: nil)
end
describe '#queue?' do
let(:queue) do
EventQ::Queue.new.tap do |e|
e.name = SecureRandom.uuid
end
end
context 'when a queue can be connected to' do
before do
queue_manager.create_queue(queue)
end
it 'should return true' do
expect(subject.queue?(queue)).to be true
end
end
context 'when a queue cant be connected to' do
it 'should return false' do
expect(subject.queue?(queue)).to be false
end
end
end
describe '#event_type?' do
let(:event_type) { SecureRandom.uuid }
context 'when an event_type can be connected to' do
before do
queue_client.create_topic_arn(event_type)
end
it 'should return true' do
expect(subject.event_type?(event_type)).to be true
end
end
context 'when an event_type can NOT be connected to' do
it 'should return false' do
expect(subject.event_type?(event_type)).to be false
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.