repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
arthurdandrea/sequel-activemodel
|
spec/sequel/activemodel_spec.rb
|
<gh_stars>1-10
require 'spec_helper'
describe Sequel::ActiveModel do
it 'has a version number' do
expect(Sequel::ActiveModel::VERSION).not_to be nil
end
end
|
arthurdandrea/sequel-activemodel
|
lib/sequel/activemodel.rb
|
require 'sequel/activemodel/version'
module Sequel
module ActiveModel
# Your code goes here...
end
end
|
rafaelrosafu/mit-rss
|
mit-rss.rb
|
require 'nokogiri'
require 'open-uri'
require 'erb'
class Feed
attr_accessor :rss_url, :title, :url, :language, :title, :subtitle,
:author, :summary, :author, :email, :image_url, :category,
:original_xml_url, :template_file, :output_file
end
class Episode
attr_accessor :title, :description, :mp3_url,
:mp3_length, :mp3_duration, :tags
attr_reader :mtime, :length_in_secs
def mtime=(param)
@mtime = param.to_i
end
def size=(param)
@size = param.to_i
end
def length_in_secs=(param)
@length_in_secs = param.to_f
end
def short_description
self.description[0..249]
end
def date
Time.at(self.mtime).xmlschema
end
def mp3_duration
minutes = (self.length_in_secs / 60).truncate
seconds = ((self.length_in_secs / 60).modulo(1) * 60).truncate
duration = "%.2d:%.2d" % [minutes, seconds]
end
end
class Renderer
attr_reader :erb, :feed, :episodes
def initialize(erb_template_file, feed_info, episode_info)
@erb = ERB.new(File.open(erb_template_file).read)
@feed = feed_info
@episodes = episode_info
end
def result
@erb.result binding
end
end
def parse_xml(mit_xml_url)
uri = URI.parse(mit_xml_url)
file = "./#{uri.path.split("/").last}"
xml = Nokogiri::XML(open(mit_xml_url))
remote_files = xml.xpath('//file')
result = []
remote_files.each do |remote_file|
extension = File.extname(remote_file['name'])
next unless extension == ".mp3"
remote_file_path = uri.path.split('/')[0..-2].concat([remote_file['name']]).join('/')
uri.path = remote_file_path
episode = Episode.new
episode.title = remote_file.children.at('title').text
episode.description = remote_file.children.at('title').text
episode.mp3_url = uri.to_s
episode.mp3_length = remote_file.children.at('size').text
episode.length_in_secs = remote_file.children.at('length').text
episode.mtime = remote_file.children.at('mtime').text
episode.tags = []
result << episode
end
result
end
def generate_rss_feed(feed_info, episode_data)
renderer = Renderer.new(feed_info.template_file, feed_info, episode_data)
File.open(feed_info.output_file, 'w+') do |file|
file.write renderer.result
end
feed_info.output_file
end
def convert_mit_xml_to_rss_feed(feed_info)
episode_data = parse_xml(feed_info.original_xml_url)
generate_rss_feed(feed_info, episode_data)
end
def generate_rss_for_MITCMS_608JS14
feed_info = Feed.new
feed_info.original_xml_url = 'http://ia902606.us.archive.org/10/items/MITCMS.608JS14/MITCMS.608JS14_files.xml'
feed_info.template_file = './feed.xml.erb'
feed_info.output_file = 'MITCMS_608JS14_rss.xml'
feed_info.rss_url = "http://geekout.fm/mit/#{feed_info.output_file}"
feed_info.title = 'MIT OpenCourseWare - Game Design - <NAME>, <NAME> - CMS.608 - Spring 2014'
feed_info.url = 'http://ocw.mit.edu/courses/comparative-media-studies-writing/cms-608-game-design-spring-2014/index.htm'
feed_info.language = 'en-US'
feed_info.subtitle = 'MIT OpenCourseWare - Game Design - CMS.608 - Spring 2014'
feed_info.author = '<NAME>, <NAME>, students'
feed_info.summary = "This course is built around practical instruction in the design and analysis of non-digital games. It provides students the texts, tools, references, and historical context to analyze and compare game designs across a variety of genres. In teams, students design, develop, and thoroughly test their original games to better understand the interaction and evolution of game rules. Covers various genres and types of games, including sports, game shows, games of chance, card games, schoolyard games, board games, and role-playing games."[0..249]
feed_info.email = ''
feed_info.image_url = 'http://ocw.mit.edu/courses/comparative-media-studies-writing/cms-608-game-design-spring-2014/cms-608s14.jpg'
feed_info.category = 'Games & Hobbies'
convert_mit_xml_to_rss_feed feed_info
end
|
ef4/cardstack
|
packages/boxel/tests/dummy/app/data/fixtures/media-registry/api/RenamePaths.rb
|
<gh_stars>100-1000
#!/usr/bin/env ruby
javascript = IO.read 'all_tracks_combined.js'
cover_art_file_names = javascript
.scan(/"cover_art": "\/boxel\/media-registry\/covers\/(.+)\.jpg"/)
.map{|match| match[0] }
.sort
.uniq
imports = cover_art_file_names.map do |filename|
modulename_prefix = filename.gsub('-', '')
"import #{modulename_prefix}Cover from 'dummy/images/media-registry/covers/#{filename}.jpg';
import #{modulename_prefix}Thumb from 'dummy/images/media-registry/covers/thumb/#{filename}.jpg';
import #{modulename_prefix}Medium from 'dummy/images/media-registry/covers/medium/#{filename}.jpg';
import #{modulename_prefix}Large from 'dummy/images/media-registry/covers/large/#{filename}.jpg';
"
end
# insert imports at top
javascript.gsub!(/(export default \[)/, imports.join("\n") + "\n\/\/ All covers reexpored at bottom of file\n" + '\1')
# replace usages with modulenames
javascript.gsub!(/"cover_art": "\/boxel\/media-registry\/covers\/(.+)\.jpg"/) {
filename = Regexp.last_match[1]
modulename_prefix = filename.gsub('-','')
%Q|"cover_art": #{modulename_prefix}Cover|
}
['thumb', 'medium', 'large'].each do |size|
javascript.gsub!(/"cover_art_#{size}": "\/boxel\/media-registry\/covers\/#{size}\/(.+)\.jpg"/) {
filename = Regexp.last_match[1]
modulename_prefix = filename.gsub('-','')
%Q|"cover_art_#{size}": #{modulename_prefix}#{size.capitalize}|
}
end
# generate exports
modules = cover_art_file_names.map {|filename| filename.gsub('-', '') }.map{|modulename|
["#{modulename}Cover", "#{modulename}Thumb", "#{modulename}Medium", "#{modulename}Large"]
}.flatten
javascript << "\n\nexport { #{modules.join(", ")} };"
# write out the file
File.open("all_tracks_combined.js", 'w') do |file|
file.write(javascript)
end
|
Alexis-Trainee/desafios-ruby
|
challenge_5_two_product_problem.rb
|
<filename>challenge_5_two_product_problem.rb<gh_stars>0
#Two Product Problem
#Resolução da internet
def two_product(arr, n)
m = []
arr.each do |i|
next unless n % i == 0
return [n / i, i] if m.include?(i)
m << n / i
end
nil
end
puts "Forma da internet"
puts two_product([1, 2, -1, 4, 5], 20)
puts "\n"
#Resolução feita em casa
def two_product(arr, n)
m = []
arr.each do |i|
if n % i == 0
return [n / i, i] if m.include?(i)
m << n / i
end
end
nil
end
puts "Forma de casa"
puts two_product([1, 2, -1, 4, 5], 20)
puts "\n"
puts two_product([1, 2, 3, 4, 5], 10)
puts "\n"
puts two_product([100, 12, 4, 1, 2], 15)
#terceira forma com ajuda da quêmia
def two_product(arr, n)
m = []
arr.each do |i|
if n % i == 0
x = n/i
return [x, i] if m.include?(i)
m << x
end
end
nil
end
puts two_product([10, -1, 4, 5], 20)
puts "\n"
puts two_product([1, 2, 3, 4, 5], 10)
puts "\n"
puts two_product([100, 12, 4, 1, 2], 15)
puts "\n"
puts two_product([100, 12, 5, 5, 2, 3], 25)
|
Alexis-Trainee/desafios-ruby
|
challenge_3_sort_and_rotate.rb
|
<reponame>Alexis-Trainee/desafios-ruby
arr = [3, 4, 5, 1, 2]
check = [7, 9, 11, 12, 5]
array = [1, 2, 3]
#puts array.length
# puts "\n"
def check(arr)
sort_array = arr.sort
puts "sort_array = #{sort_array}"
puts "--------------------"
puts "-------arr= #{arr}"
return "NO" if arr == sort_array #retorne NO quando arr == s
arr.length.times do
puts "arr.rotate! #{arr.rotate!}"
return "YES" if arr == sort_array
end
"NO"
end
# puts check(arr)
# puts check(check)
# puts check(array)
|
Alexis-Trainee/desafios-ruby
|
challenge_1_expensive_orders.rb
|
<reponame>Alexis-Trainee/desafios-ruby
#desafio 1 Expensive Orders
def expensive_order(hash, k)
hash.select {|a, b| b > k}
end
puts expensive_order({ "a" => 3000, "b" => 200, "c" => 1050 }, 1000)
######################################
|
Alexis-Trainee/desafios-ruby
|
challenge_4_car_park_exit.rb
|
# Car Park Exit
arr = [
[1, 0, 0, 0, 2],
[0, 0, 0, 0, 0]
]
def parking_exit(arr)
result = []
level = 0; space = 0
arr.each_with_index {|f, i| (level = i; space = f.index(2)) if f.index(2) != nil}
#############################################################################################################################
#f.index()é um buscador que busca o index do número passado entre parenteses
# arr.each_with_index do |f, i|
#puts "f itera cada elemento [[f1], [f2]]"
#puts f #elemento iterador
#puts '----------'
# level = i; # O i é o índice
# space = f.index(2) #Funciona como um buscador e procura o elemento passado entre parênteses no array
# if f.index(2) != nil
##end
#puts 'level'
#puts level
#puts 'space'
#puts space
#puts '-----------'
#end
#end
############################################################################################################################
begin #inicia
#comprimento do array -1 ou seja arr=[[1, 2], [3, 4]] retornaria 1 este um não é o numero 1 e sim o 1° elemento
if level == arr.length - 1
#puts "\n"
#puts "LEVEL #{level}" com estes puts é possível ver que level é igual a 1 portanto level é igual a arr.length -1
if space != arr[0].length - 1 #refere-se ao número de elementos menos um ou seja se a matriz tem 5 elementos vai diminuir uma
result << "R" + (arr[0].length - space - 1).to_s #transforma a expressão em string e concatena com "R"
end
space = arr[0].length - 1 #número de colunas -1
else
if arr[level][space] == 1
count = 0
while (arr[level][space] == 1 && level != arr.length ) do
level += 1;
count += 1
end
result << "D" + count.to_s
else
pos = arr[level].index(1)
d = pos < space ? "L" : "R"
result << d + ((pos-space).abs).to_s
space = pos
end
end
end until (space == arr[0].length - 1 && level == arr.length - 1)
result
end
puts parking_exit(arr)
#####################################################################
# arr = [ [1, 0, 0, 0, 5], [0, 0, 5, 0, 0],[5, 0, 0, 0, 0]]
#arr.each_with_index(2) do |value, index|
# puts "#{index}: #{value}"
#end
#arr.each_with_index do |value, index|
# puts "#{index}: #{value}"
#end
# arr.each_with_index do |f, i|
# azeite = f
# level = i
# puts f.index(5)
# space = f.index(5) if f.index(5) != nil
# puts "level: #{level}"
# puts "space:#{space}"
# puts "azeite:#{azeite}"
# puts "----------------"
# end
|
Alexis-Trainee/desafios-ruby
|
poquer.rb
|
#RANK:
#Carta Alta : Carta de valor mais alto (de 2 a Ás).
#Um Par : Duas cartas do mesmo valor.
#Dois pares : dois pares diferentes.
#Trinca : Três cartas do mesmo valor.
#Straight : Todas as cartas são valores consecutivos.
#Flush : Todas as cartas do mesmo naipe.
#Full House : uma trinca e um par.
#Quadra : Quatro cartas do mesmo valor.
#Straight Flush : Todas as cartas têm valores consecutivos do mesmo naipe.
#Royal Flush : 10, Valete, Rainha, Rei, Ás, do mesmo naipe.
#Entrada
#As duas mãos de cartas
#Processamento
#Percorrer as mãos
#Saída
#<NAME>
FACES = "AKQJT98765432"
SUITS = "cdhs"
srand
#imprime as cartas
deck = []
FACES.each_byte do |f|
SUITS.each_byte do |s|
deck.push(f.chr + s.chr)
end
end
#shuffle deck
3.times do
shuf = []
deck.each do |c|
loc = rand(shuf.size + 1)
shuf.insert(loc, c)
end
deck = shuf.reverse
end
common = Array.new(5){deck.pop}
hole = Array.new(8){Array.new(2) {deck.pop}}
hands = []
all_fold = true
while all_fold do
hands = []
hole.each do |h|
num_common = [0, 3, 4, 5][rand (4)]
puts " num_common = #{num_common}"
if num_common ==5
all_fold = false
end
if num_common > 0
hand = h + common[0...num_common]
else
hand = h
end
hands.push(hand.join(' '))
end
end
hands.each{|h| puts h}
#puts hole
#puts common
# puts deck
|
rubiojr/playground
|
forcefield.rb
|
<reponame>rubiojr/playground<gh_stars>1-10
require 'rubygems'
require 'net/http'
require 'rack'
require 'delegate'
require 'util'
require 'uri'
require 'thin'
#
# Dumb HTTP Proxy
#
# Initial idea and code taken from http://github.com/mikehale/rat-hole
#
module ForceField
class Proxy
def initialize(host=nil)
@host = host
@request_callback = nil
@response_callback = nil
end
def on_request(&block)
if block
@request_callback = block
else
@request_callback.call @req if @request_callback
end
end
def on_response(&block)
if block
@response_callback = block
else
@response_callback.call @resp if @response_callback
end
end
def call(env)
# we don't want to handle compressed stuff ATM
env.delete('HTTP_ACCEPT_ENCODING')
@req = Rack::Request.new(env)
target = @host || @req.host
# process request hooks
on_request
user_headers = request_headers(@req.env)
uri = URI.parse(env['REQUEST_URI'])
upath = "#{uri.path}?#{(uri.query || '')}"
Net::HTTP.start(target) do |http|
if @req.get?
response = http.get(upath, user_headers)
elsif @req.post?
post = Net::HTTP::Post.new(upath, user_headers)
post.form_data = @req.POST
response = http.request(post)
end
code = response.code.to_i
headers = response.to_hash
body = response.body || ''
@resp = Rack::Response.new(body, code)
# process response hooks
on_response
@resp.finish
end
end
def request_headers(env)
env.select{|k,v| k =~ /^HTTP/}.inject({}) do |h, e|
k,v = e
h.merge(k.split('_')[1..-1].join('-').to_camel_case => v)
end
end
end
end
if $0 == __FILE__
require 'loggers'
p = ForceField::Proxy.new
p.on_request do |req|
if req.get?
uri = URI.parse(req.env['REQUEST_URI'])
if not uri.query.nil?
fullpath = "#{uri.path}?#{uri.query}"
else
fullpath = uri.path
end
req.env['rack.errors'].puts "req: GET '#{fullpath}' [#{req.host}]"
elsif req.post?
puts "req: POST #{req.fullpath} params: #{req.params.inspect} [#{req.host}]"
else
puts 'req: UNKNOWN'
end
end
builder = Rack::Builder.new do
#use Rack::CombinedLogger
run p
end
Rack::Handler::Mongrel.run builder, :Port => 3001
end
|
riddler/scat-rb
|
lib/statifier.rb
|
require "statifier/version"
module Statifier
class Error < StandardError; end
end
|
riddler/scat-rb
|
test/statifier_test.rb
|
<gh_stars>0
require "test_helper"
class StatifierTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Statifier::VERSION
end
end
|
sr/sinatra
|
lib/sinatra/tilt.rb
|
<gh_stars>1-10
module Tilt
VERSION = '0.7'
@template_mappings = {}
# Hash of template path pattern => template implementation class mappings.
def self.mappings
@template_mappings
end
# Register a template implementation by file extension.
def self.register(ext, template_class)
ext = ext.to_s.sub(/^\./, '')
mappings[ext.downcase] = template_class
end
# Create a new template for the given file using the file's extension
# to determine the the template mapping.
def self.new(file, line=nil, options={}, &block)
if template_class = self[file]
template_class.new(file, line, options, &block)
else
fail "No template engine registered for #{File.basename(file)}"
end
end
# Lookup a template class for the given filename or file
# extension. Return nil when no implementation is found.
def self.[](file)
if @template_mappings.key?(pattern = file.to_s.downcase)
@template_mappings[pattern]
elsif @template_mappings.key?(pattern = File.basename(pattern))
@template_mappings[pattern]
else
while !pattern.empty?
if @template_mappings.key?(pattern)
return @template_mappings[pattern]
else
pattern = pattern.sub(/^[^.]*\.?/, '')
end
end
nil
end
end
# Mixin allowing template compilation on scope objects.
#
# Including this module in scope objects passed to Template#render
# causes template source to be compiled to methods the first time they're
# used. This can yield significant (5x-10x) performance increases for
# templates that support it (ERB, Erubis, Builder).
#
# It's also possible (though not recommended) to include this module in
# Object to enable template compilation globally. The downside is that
# the template methods will polute the global namespace and could lead to
# unexpected behavior.
module CompileSite
def __tilt__
end
end
# Base class for template implementations. Subclasses must implement
# the #prepare method and one of the #evaluate or #template_source
# methods.
class Template
# Template source; loaded from a file or given directly.
attr_reader :data
# The name of the file where the template data was loaded from.
attr_reader :file
# The line number in #file where template data was loaded from.
attr_reader :line
# A Hash of template engine specific options. This is passed directly
# to the underlying engine and is not used by the generic template
# interface.
attr_reader :options
# Used to determine if this class's initialize_engine method has
# been called yet.
@engine_initialized = false
class << self
attr_accessor :engine_initialized
alias engine_initialized? engine_initialized
end
# Create a new template with the file, line, and options specified. By
# default, template data is read from the file. When a block is given,
# it should read template data and return as a String. When file is nil,
# a block is required.
#
# All arguments are optional.
def initialize(file=nil, line=1, options={}, &block)
@file, @line, @options = nil, 1, {}
[options, line, file].compact.each do |arg|
case
when arg.respond_to?(:to_str) ; @file = arg.to_str
when arg.respond_to?(:to_int) ; @line = arg.to_int
when arg.respond_to?(:to_hash) ; @options = arg.to_hash
else raise TypeError
end
end
raise ArgumentError, "file or block required" if (@file || block).nil?
# call the initialize_engine method if this is the very first time
# an instance of this class has been created.
if !self.class.engine_initialized?
initialize_engine
self.class.engine_initialized = true
end
# used to generate unique method names for template compilation
stamp = (Time.now.to_f * 10000).to_i
@_prefix = "__tilt_O#{object_id.to_s(16)}T#{stamp.to_s(16)}"
# load template data and prepare
@reader = block || lambda { |t| File.read(@file) }
@data = @reader.call(self)
prepare
end
# Render the template in the given scope with the locals specified. If a
# block is given, it is typically available within the template via
# +yield+.
def render(scope=Object.new, locals={}, &block)
evaluate scope, locals || {}, &block
end
# The basename of the template file.
def basename(suffix='')
File.basename(file, suffix) if file
end
# The template file's basename with all extensions chomped off.
def name
basename.split('.', 2).first if basename
end
# The filename used in backtraces to describe the template.
def eval_file
file || '(__TEMPLATE__)'
end
protected
# Called once and only once for each template subclass the first time
# the template class is initialized. This should be used to require the
# underlying template library and perform any initial setup.
def initialize_engine
end
# Do whatever preparation is necessary to setup the underlying template
# engine. Called immediately after template data is loaded. Instance
# variables set in this method are available when #evaluate is called.
#
# Subclasses must provide an implementation of this method.
def prepare
if respond_to?(:compile!)
# backward compat with tilt < 0.6; just in case
warn 'Tilt::Template#compile! is deprecated; implement #prepare instead.'
compile!
else
raise NotImplementedError
end
end
# Process the template and return the result. When the scope mixes in
# the Tilt::CompileSite module, the template is compiled to a method and
# reused given identical locals keys. When the scope object
# does not mix in the CompileSite module, the template source is
# evaluated with instance_eval. In any case, template executation
# is guaranteed to be performed in the scope object with the locals
# specified and with support for yielding to the block.
def evaluate(scope, locals, &block)
if scope.respond_to?(:__tilt__)
method_name = compiled_method_name(locals.keys.hash)
if scope.respond_to?(method_name)
# fast path
scope.send method_name, locals, &block
else
# compile first and then run
compile_template_method(method_name, locals)
scope.send method_name, locals, &block
end
else
source, offset = local_assignment_code(locals)
source = [source, template_source].join("\n")
scope.instance_eval source, eval_file, line - offset
end
end
# Return a string containing the (Ruby) source code for the template. The
# default Template#evaluate implementation requires this method be
# defined and guarantees correct file/line handling, custom scopes, and
# support for template compilation when the scope object allows it.
def template_source
raise NotImplementedError
end
private
def local_assignment_code(locals)
return ['', 1] if locals.empty?
source = locals.collect { |k,v| "#{k} = locals[:#{k}]" }
[source.join("\n"), source.length]
end
def compiled_method_name(locals_hash)
"#{@_prefix}L#{locals_hash.to_s(16).sub('-', 'n')}"
end
def compile_template_method(method_name, locals)
source, offset = local_assignment_code(locals)
source = [source, template_source].join("\n")
offset += 1
# add the new method
CompileSite.module_eval <<-RUBY, eval_file, line - offset
def #{method_name}(locals)
#{source}
end
RUBY
# setup a finalizer to remove the newly added method
ObjectSpace.define_finalizer self,
Template.compiled_template_method_remover(CompileSite, method_name)
end
def self.compiled_template_method_remover(site, method_name)
proc { |oid| garbage_collect_compiled_template_method(site, method_name) }
end
def self.garbage_collect_compiled_template_method(site, method_name)
site.module_eval do
begin
remove_method(method_name)
rescue NameError
# method was already removed (ruby >= 1.9)
end
end
end
def require_template_library(name)
if Thread.list.size > 1
warn "WARN: tilt autoloading '#{name}' in a non thread-safe way; " +
"explicit require '#{name}' suggested."
end
require name
end
end
# Extremely simple template cache implementation. Calling applications
# create a Tilt::Cache instance and use #fetch with any set of hashable
# arguments (such as those to Tilt.new):
# cache = Tilt::Cache.new
# cache.fetch(path, line, options) { Tilt.new(path, line, options) }
#
# Subsequent invocations return the already loaded template object.
class Cache
def initialize
@cache = {}
end
def fetch(*key)
@cache[key] ||= yield
end
def clear
@cache = {}
end
end
# Template Implementations ================================================
# The template source is evaluated as a Ruby string. The #{} interpolation
# syntax can be used to generated dynamic output.
class StringTemplate < Template
def prepare
@code = "%Q{#{data}}"
end
def template_source
@code
end
end
register 'str', StringTemplate
# ERB template implementation. See:
# http://www.ruby-doc.org/stdlib/libdoc/erb/rdoc/classes/ERB.html
class ERBTemplate < Template
def initialize_engine
require_template_library 'erb' unless defined? ::ERB
end
def prepare
@outvar = (options[:outvar] || '_erbout').to_s
@engine = ::ERB.new(data, options[:safe], options[:trim], @outvar)
end
def template_source
@engine.src
end
def evaluate(scope, locals, &block)
preserve_outvar_value(scope) { super }
end
private
# Retains the previous value of outvar when configured to use
# an instance variable. This allows multiple templates to be rendered
# within the context of an object without overwriting the outvar.
def preserve_outvar_value(scope)
if @outvar[0] == ?@
previous = scope.instance_variable_get(@outvar)
output = yield
scope.instance_variable_set(@outvar, previous)
output
else
yield
end
end
# ERB generates a line to specify the character coding of the generated
# source in 1.9. Account for this in the line offset.
if RUBY_VERSION >= '1.9.0'
def local_assignment_code(locals)
source, offset = super
[source, offset + 1]
end
end
end
%w[erb rhtml].each { |ext| register ext, ERBTemplate }
# Erubis template implementation. See:
# http://www.kuwata-lab.com/erubis/
class ErubisTemplate < ERBTemplate
def initialize_engine
require_template_library 'erubis' unless defined? ::Erubis
end
def prepare
@options.merge!(:preamble => false, :postamble => false)
@outvar = (options.delete(:outvar) || '_erbout').to_s
@engine = ::Erubis::Eruby.new(data, options)
end
def template_source
["#{@outvar} = _buf = ''", @engine.src, "_buf.to_s"].join(";")
end
private
# Erubis doesn't have ERB's line-off-by-one under 1.9 problem. Override
# and adjust back.
if RUBY_VERSION >= '1.9.0'
def local_assignment_code(locals)
source, offset = super
[source, offset - 1]
end
end
end
register 'erubis', ErubisTemplate
# Haml template implementation. See:
# http://haml.hamptoncatlin.com/
class HamlTemplate < Template
def initialize_engine
require_template_library 'haml' unless defined? ::Haml::Engine
end
def prepare
@engine = ::Haml::Engine.new(data, haml_options)
end
# Precompiled Haml source. Taken from the precompiled_with_ambles
# method in Haml::Precompiler:
# http://github.com/nex3/haml/blob/master/lib/haml/precompiler.rb#L111-126
def template_source
@engine.instance_eval do
<<-RUBY
_haml_locals = locals
begin
extend Haml::Helpers
_hamlout = @haml_buffer = Haml::Buffer.new(@haml_buffer, #{options_for_buffer.inspect})
_erbout = _hamlout.buffer
__in_erb_template = true
#{precompiled}
#{precompiled_method_return_value}
ensure
@haml_buffer = @haml_buffer.upper
end
RUBY
end
end
private
def local_assignment_code(locals)
source, offset = super
[source, offset + 6]
end
def haml_options
options.merge(:filename => eval_file, :line => line)
end
end
register 'haml', HamlTemplate
# Sass template implementation. See:
# http://haml.hamptoncatlin.com/
#
# Sass templates do not support object scopes, locals, or yield.
class SassTemplate < Template
def initialize_engine
require_template_library 'sass' unless defined? ::Sass::Engine
end
def prepare
@engine = ::Sass::Engine.new(data, sass_options)
end
def evaluate(scope, locals, &block)
@output ||= @engine.render
end
private
def sass_options
options.merge(:filename => eval_file, :line => line)
end
end
register 'sass', SassTemplate
# Lessscss template implementation. See:
# http://lesscss.org/
#
# Less templates do not support object scopes, locals, or yield.
class LessTemplate < Template
def initialize_engine
require_template_library 'less' unless defined? ::Less::Engine
end
def prepare
@engine = ::Less::Engine.new(data)
end
def evaluate(scope, locals, &block)
@engine.to_css
end
end
register 'less', LessTemplate
# Builder template implementation. See:
# http://builder.rubyforge.org/
class BuilderTemplate < Template
def initialize_engine
require_template_library 'builder' unless defined?(::Builder)
end
def prepare
end
def evaluate(scope, locals, &block)
xml = ::Builder::XmlMarkup.new(:indent => 2)
if data.respond_to?(:to_str)
locals[:xml] = xml
super(scope, locals, &block)
elsif data.kind_of?(Proc)
data.call(xml)
end
xml.target!
end
def template_source
data.to_str
end
end
register 'builder', BuilderTemplate
# Liquid template implementation. See:
# http://liquid.rubyforge.org/
#
# Liquid is designed to be a *safe* template system and threfore
# does not provide direct access to execuatable scopes. In order to
# support a +scope+, the +scope+ must be able to represent itself
# as a hash by responding to #to_h. If the +scope+ does not respond
# to #to_h it will be ignored.
#
# LiquidTemplate does not support yield blocks.
#
# It's suggested that your program require 'liquid' at load
# time when using this template engine.
class LiquidTemplate < Template
def initialize_engine
require_template_library 'liquid' unless defined? ::Liquid::Template
end
def prepare
@engine = ::Liquid::Template.parse(data)
end
def evaluate(scope, locals, &block)
locals = locals.inject({}){ |h,(k,v)| h[k.to_s] = v ; h }
if scope.respond_to?(:to_h)
scope = scope.to_h.inject({}){ |h,(k,v)| h[k.to_s] = v ; h }
locals = scope.merge(locals)
end
locals['yield'] = block.nil? ? '' : yield
locals['content'] = locals['yield']
@engine.render(locals)
end
end
register 'liquid', LiquidTemplate
# Discount Markdown implementation. See:
# http://github.com/rtomayko/rdiscount
#
# RDiscount is a simple text filter. It does not support +scope+ or
# +locals+. The +:smart+ and +:filter_html+ options may be set true
# to enable those flags on the underlying RDiscount object.
class RDiscountTemplate < Template
def flags
[:smart, :filter_html].select { |flag| options[flag] }
end
def initialize_engine
require_template_library 'rdiscount' unless defined? ::RDiscount
end
def prepare
@engine = RDiscount.new(data, *flags)
end
def evaluate(scope, locals, &block)
@engine.to_html
end
end
register 'markdown', RDiscountTemplate
register 'mkd', RDiscountTemplate
register 'md', RDiscountTemplate
# RedCloth implementation. See:
# http://redcloth.org/
class RedClothTemplate < Template
def initialize_engine
require_template_library 'redcloth' unless defined? ::RedCloth
end
def prepare
@engine = RedCloth.new(data)
end
def evaluate(scope, locals, &block)
@engine.to_html
end
end
register 'textile', RedClothTemplate
# Mustache is written and maintained by <NAME>. See:
# http://github.com/defunkt/mustache
#
# When a scope argument is provided to MustacheTemplate#render, the
# instance variables are copied from the scope object to the Mustache
# view.
class MustacheTemplate < Template
attr_reader :engine
def initialize_engine
require_template_library 'mustache' unless defined? ::Mustache
end
def prepare
Mustache.view_namespace = options[:namespace]
Mustache.view_path = options[:view_path] || options[:mustaches]
@engine = options[:view] || Mustache.view_class(name)
options.each do |key, value|
next if %w[view view_path namespace mustaches].include?(key.to_s)
@engine.send("#{key}=", value) if @engine.respond_to? "#{key}="
end
end
def evaluate(scope=nil, locals={}, &block)
instance = @engine.new
# copy instance variables from scope to the view
scope.instance_variables.each do |name|
instance.instance_variable_set(name, scope.instance_variable_get(name))
end
# locals get added to the view's context
locals.each do |local, value|
instance[local] = value
end
# if we're passed a block it's a subview. Sticking it in yield
# lets us use {{yield}} in layout.html to render the actual page.
instance[:yield] = block.call if block
instance.template = data unless instance.compiled?
instance.to_html
end
end
register 'mustache', MustacheTemplate
# RDoc template. See:
# http://rdoc.rubyforge.org/
#
# It's suggested that your program require 'rdoc/markup' and
# 'rdoc/markup/to_html' at load time when using this template
# engine.
class RDocTemplate < Template
def initialize_engine
unless defined?(::RDoc::Markup)
require_template_library 'rdoc/markup'
require_template_library 'rdoc/markup/to_html'
end
end
def prepare
markup = RDoc::Markup::ToHtml.new
@engine = markup.convert(data)
end
def evaluate(scope, locals, &block)
@engine.to_s
end
end
register 'rdoc', RDocTemplate
# CoffeeScript info:
# http://jashkenas.github.com/coffee-script/
class CoffeeTemplate < Template
def initialize_engine
require_template_library 'coffee-script' unless defined? ::CoffeeScript
end
def prepare
@engine = ::CoffeeScript::compile(data, options)
end
def evaluate(scope, locals, &block)
@engine
end
end
register 'coffee', CoffeeTemplate
end
|
dressupgeekout/rite-club
|
recorder/riteclubrecorder.rb
|
#
# riteclubrecorder
#
# This script implements a TCP server which acts as a front-end to starting
# and stopping a FFmpeg subprocess. We try to determine where Pyre's window
# is being displayed, and invoke FFmpeg to record its footage. It will keep
# recording until explicitly stopped.
#
# The idea is that the Rite Club Companion app will start and stop FFmpeg
# automatically, on the behalf of the player, when interesting events happen
# from within Pyre (namely, when a rite starts and when a rite ends). The
# Companion will rely on this script in order to communicate with FFmpeg.
#
# XXX Really should come up with a way to let the user tailor the video
# encoding settings... what video you're capable of highly depends on the
# player's specs.
#
require 'fileutils'
require 'optparse'
require 'rubygems'
require 'socket'
$stdout.sync = true
$stderr.sync = true
class App
attr_accessor :ffmpeg
attr_accessor :quiet
attr_accessor :outdir
DEFAULT_HOST = "127.0.0.1"
DEFAULT_PORT = 9876
DEFAULT_FFMPEG = "ffmpeg"
WINDOWS_FLAVORS = ["windows", "mingw32"]
def initialize(**kwargs)
@host = kwargs[:host] || DEFAULT_HOST
@port = kwargs[:port] || DEFAULT_PORT
@platform = Gem::Platform.new(Gem::Platform::CURRENT).os
@quiet = kwargs[:quiet]
@ffmpeg = kwargs[:ffmpeg] || DEFAULT_FFMPEG
@screen_no = kwargs[:screen_no] || 1
@outdir = kwargs[:outdir] || Dir.pwd
end
def windows?
return WINDOWS_FLAVORS.include?(@platform)
end
# We create a new thread which will launch ffmpeg(1).
def start_recording(outfile)
@ffmpeg_pid = -1
@ffmpeg_exitstatus = nil
@capture_thread = Thread.new do
v = @quiet ? "0" : "1"
case @platform
when "darwin"
capture_cmd = [
@ffmpeg, "-v", v, "-y", "-nostdin",
"-f", "avfoundation", "-capture_cursor", "1", "-i", "#{@screen_no.to_s}:",
"-s", "1280x720", "-codec:v", "libx264", "-preset", "ultrafast", outfile
]
when *WINDOWS_FLAVORS
capture_cmd = [
@ffmpeg, "-v", v, "-y", "-nostdin",
"-f", "gdigrab", "-draw_mouse", "1", "-i", "title=Pyre",
"-f", "dshow", "-i", "audio=virtual-audio-capturer",
"-s", "1280x720", "-codec:v", "libx264", "-preset", "ultrafast", outfile
]
end
@ffmpeg_pid = spawn(*capture_cmd)
$stdout.puts("\t- FFMPEG_PID=#{@ffmpeg_pid}")
$stdout.puts("\t- FFMPEG_CMD=#{capture_cmd.join(' ')}")
_, @ffmpeg_exitstatus = Process.wait2(@ffmpeg_pid)
$stdout.puts("\t- " + @ffmpeg_exitstatus.inspect)
# Exit status 255 is normal for FFmpeg, apparently. Additionally, on
# Windows, an exit status of 1 seems acceptable when using 'taskkill /f'.
ok_options = [0, 255]
ok_options << 1 if self.windows?
if ok_options.include?(@ffmpeg_exitstatus.exitstatus)
$stdout.puts("\t- OK (#{@ffmpeg_exitstatus.exitstatus})")
else
$stderr.puts("\t- ERROR: FFmpeg had an error! (#{@ffmpeg_exitstatus})")
end
end
end
def stop_recording
# This only kills the FFmpeg process, it does NOT terminate the thread in which
# it was spawned.
if self.windows?
system("taskkill /pid #{@ffmpeg_pid} /f")
else
Process.kill("INT", @ffmpeg_pid)
end
# So now let's really make sure the thread actually terminates.
@capture_thread.join
@capture_thread = nil
end
def main_loop
$stdout.puts("******************************")
$stdout.puts("*** Rite Club Video Server ***")
$stdout.puts("******************************")
$stdout.puts(">> PID #{Process.pid}")
$stdout.puts(">> Using FFmpeg: #{@ffmpeg}")
$stdout.puts(">> Video directory: #{@outdir}")
$stdout.puts(">> Listening on #{@host}:#{@port.to_s}")
@server = TCPServer.new(@host, @port)
done = false
until done
client = @server.accept
client.sync = true
remote = client.remote_address
$stdout.puts("CONNECTION from #{remote.ip_address}:#{remote.ip_port}")
msg = client.gets.chomp
case msg
when "START"
if @capture_thread
client.puts("ALREADY STARTED")
else
container = "mkv"
basename = "#{Time.now.strftime('%Y%m%d%H%M%S')}.#{container}"
self.start_recording(File.join(@outdir, basename))
client.puts("STARTING")
end
when "STOP"
if @capture_thread
self.stop_recording
client.puts("STOPPING")
else
client.puts("WASNT ALREADY STARTED")
end
when "QUIT"
if @capture_thread
client.puts("STILL RECORDING")
else
done = true
$stdout.puts("(quitting)")
client.puts("QUITTING")
end
else
client.puts("UNKNOWN COMMAND")
end
client.close
end
end
end
########## ########## ##########
ffmpeg = "ffmpeg"
screen_no = nil
quiet = false
outdir = nil
parser = OptionParser.new do |opts|
opts.on("--video-dir PATH", "Directory to save videos") { |path| outdir = File.expand_path(path) }
opts.on("--ffmpeg PATH") { |path| ffmpeg = File.expand_path(path) }
opts.on("--screen N", "for AVFoundation") { |n| screen_no = n.to_i }
opts.on("-q", "--quiet") { quiet = true }
end
parser.parse!(ARGV)
Thread.abort_on_exception = true
app = App.new(
:quiet => quiet,
:screen_no => screen_no,
:outdir => outdir,
:ffmpeg => ffmpeg,
)
app.main_loop
|
dressupgeekout/rite-club
|
recorder/recorderclient.rb
|
#
# Simple client app to control the riteclubrecorder server from the command
# line.
#
require 'optparse'
require 'socket'
$stdout.sync = true
$stderr.sync = true
PROGNAME = File.basename($0)
class App
DEFAULT_HOST = "127.0.0.1"
DEFAULT_PORT = 9876
def initialize(**kwargs)
@host = kwargs[:host] || DEFAULT_HOST
@port = kwargs[:port] || DEFAULT_PORT
@cmd = kwargs[:cmd].upcase
end
def main
case @cmd
when "START", "STOP", "QUIT"
msg = @cmd
else
$stderr.puts("FATAL: unknown command #{@cmd}")
return 1
end
socket = TCPSocket.new(@host, @port)
socket.sync = true
socket.puts(@cmd)
puts socket.gets.chomp
socket.close
return 0
end
end
######### ########## ##########
host = nil
port = nil
cmd = nil
parser = OptionParser.new do |opts|
opts.banner = "usage: #{PROGNAME} [options] START|STOP|QUIT"
opts.on("-h", "--host HOST", "default: #{App::DEFAULT_HOST}") { |h| host = h }
opts.on("-p", "--port PORT", "default: #{App::DEFAULT_PORT.to_s}") { |p| port = p.to_i }
end
parser.parse!(ARGV)
cmd = ARGV.shift
if not cmd
$stderr.puts("FATAL: expected a command!")
$stderr.puts(parser.to_s)
exit 1
end
exit App.new(host: host, port: port, cmd: cmd).main
|
RisePeopleInc/jsonapi-resource-1
|
test/unit/pagination/offset_paginator_test.rb
|
require File.expand_path('../../../test_helper', __FILE__)
require 'jsonapi-resources'
class OffsetPaginatorTest < ActiveSupport::TestCase
def test_offset_default_page_params
params = ActionController::Parameters.new(
{
}
)
paginator = OffsetPaginator.new(params)
assert_equal JSONAPI.configuration.default_page_size, paginator.limit
assert_equal 0, paginator.offset
end
def test_offset_parse_page_params_default_offset
params = ActionController::Parameters.new(
{
limit: 20
}
)
paginator = OffsetPaginator.new(params)
assert_equal 20, paginator.limit
assert_equal 0, paginator.offset
end
def test_offset_parse_page_params
params = ActionController::Parameters.new(
{
limit: 5,
offset: 7
}
)
paginator = OffsetPaginator.new(params)
assert_equal 5, paginator.limit
assert_equal 7, paginator.offset
end
def test_offset_parse_page_params_limit_too_large
params = ActionController::Parameters.new(
{
limit: 50,
offset: 0
}
)
assert_raises JSONAPI::Exceptions::InvalidPageValue do
OffsetPaginator.new(params)
end
end
def test_offset_parse_page_params_not_allowed
params = ActionController::Parameters.new(
{
limit: 50,
start: 0
}
)
assert_raises JSONAPI::Exceptions::PageParametersNotAllowed do
OffsetPaginator.new(params)
end
end
def test_offset_parse_page_params_start
params = ActionController::Parameters.new(
{
limit: 5,
offset: 0
}
)
paginator = OffsetPaginator.new(params)
assert_equal 5, paginator.limit
assert_equal 0, paginator.offset
end
def test_offset_links_page_params_empty_results
params = ActionController::Parameters.new(
{
limit: 5,
offset: 0
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 0)
assert_equal 2, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 0, links_params['last']['offset']
end
def test_offset_links_page_params_small_resultsets
params = ActionController::Parameters.new(
{
limit: 5,
offset: 0
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 3)
assert_equal 2, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 0, links_params['last']['offset']
end
def test_offset_links_page_params_large_data_set_start
params = ActionController::Parameters.new(
{
limit: 5,
offset: 0
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 50)
assert_equal 3, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['next']['limit']
assert_equal 5, links_params['next']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 45, links_params['last']['offset']
end
def test_offset_links_page_params_large_data_set_before_start
params = ActionController::Parameters.new(
{
limit: 5,
offset: 2
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 50)
assert_equal 4, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['previous']['limit']
assert_equal 0, links_params['previous']['offset']
assert_equal 5, links_params['next']['limit']
assert_equal 7, links_params['next']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 45, links_params['last']['offset']
end
def test_offset_links_page_params_large_data_set_middle
params = ActionController::Parameters.new(
{
limit: 5,
offset: 27
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 50)
assert_equal 4, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['previous']['limit']
assert_equal 22, links_params['previous']['offset']
assert_equal 5, links_params['next']['limit']
assert_equal 32, links_params['next']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 45, links_params['last']['offset']
end
def test_offset_links_page_params_large_data_set_end
params = ActionController::Parameters.new(
{
limit: 5,
offset: 45
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 50)
assert_equal 3, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['previous']['limit']
assert_equal 40, links_params['previous']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 45, links_params['last']['offset']
end
def test_offset_links_page_params_large_data_set_past_end
params = ActionController::Parameters.new(
{
limit: 5,
offset: 48
}
)
paginator = OffsetPaginator.new(params)
links_params = paginator.links_page_params(record_count: 50)
assert_equal 3, links_params.size
assert_equal 5, links_params['first']['limit']
assert_equal 0, links_params['first']['offset']
assert_equal 5, links_params['previous']['limit']
assert_equal 43, links_params['previous']['offset']
assert_equal 5, links_params['last']['limit']
assert_equal 45, links_params['last']['offset']
end
end
|
RisePeopleInc/jsonapi-resource-1
|
lib/jsonapi/active_record_operations_processor.rb
|
class ActiveRecordOperationsProcessor < JSONAPI::OperationsProcessor
private
def transaction
if @transactional
ActiveRecord::Base.transaction do
yield
end
else
yield
end
end
def rollback
fail ActiveRecord::Rollback if @transactional
end
# Catch errors that should be handled before JSONAPI::Exceptions::Error
# and other unprocessed exceptions
def process_operation(operation)
with_default_handling do
begin
operation.apply
rescue ActiveRecord::DeleteRestrictionError => e
record_locked_error = JSONAPI::Exceptions::RecordLocked.new(e.message)
return JSONAPI::ErrorsOperationResult.new(record_locked_error.errors[0].code, record_locked_error.errors)
rescue ActiveRecord::RecordNotFound
record_not_found = JSONAPI::Exceptions::RecordNotFound.new(operation.associated_key)
return JSONAPI::ErrorsOperationResult.new(record_not_found.errors[0].code, record_not_found.errors)
end
end
end
end
|
RisePeopleInc/jsonapi-resource-1
|
lib/jsonapi/error_codes.rb
|
<gh_stars>0
module JSONAPI
VALIDATION_ERROR = 100
INVALID_RESOURCE = 101
FILTER_NOT_ALLOWED = 102
INVALID_FIELD_VALUE = 103
INVALID_FIELD = 104
PARAM_NOT_ALLOWED = 105
PARAM_MISSING = 106
INVALID_FILTER_VALUE = 107
COUNT_MISMATCH = 108
KEY_ORDER_MISMATCH = 109
KEY_NOT_INCLUDED_IN_URL = 110
INVALID_INCLUDE = 112
RELATION_EXISTS = 113
INVALID_SORT_CRITERIA = 114
INVALID_LINKS_OBJECT = 115
TYPE_MISMATCH = 116
INVALID_PAGE_OBJECT = 117
INVALID_PAGE_VALUE = 118
INVALID_FIELD_FORMAT = 119
INVALID_FILTERS_SYNTAX = 120
SAVE_FAILED = 121
FORBIDDEN = 403
RECORD_NOT_FOUND = 404
UNSUPPORTED_MEDIA_TYPE = 415
LOCKED = 423
INTERNAL_SERVER_ERROR = 500
TEXT_ERRORS =
{ VALIDATION_ERROR => 'VALIDATION_ERROR',
INVALID_RESOURCE => 'INVALID_RESOURCE',
FILTER_NOT_ALLOWED => 'FILTER_NOT_ALLOWED',
INVALID_FIELD_VALUE => 'INVALID_FIELD_VALUE',
INVALID_FIELD => 'INVALID_FIELD',
PARAM_NOT_ALLOWED => 'PARAM_NOT_ALLOWED',
PARAM_MISSING => 'PARAM_MISSING',
INVALID_FILTER_VALUE => 'INVALID_FILTER_VALUE',
COUNT_MISMATCH => 'COUNT_MISMATCH',
KEY_ORDER_MISMATCH => 'KEY_ORDER_MISMATCH',
KEY_NOT_INCLUDED_IN_URL => 'KEY_NOT_INCLUDED_IN_URL',
INVALID_INCLUDE => 'INVALID_INCLUDE',
RELATION_EXISTS => 'RELATION_EXISTS',
INVALID_SORT_CRITERIA => 'INVALID_SORT_CRITERIA',
INVALID_LINKS_OBJECT => 'INVALID_LINKS_OBJECT',
TYPE_MISMATCH => 'TYPE_MISMATCH',
INVALID_PAGE_OBJECT => 'INVALID_PAGE_OBJECT',
INVALID_PAGE_VALUE => 'INVALID_PAGE_VALUE',
INVALID_FIELD_FORMAT => 'INVALID_FIELD_FORMAT',
INVALID_FILTERS_SYNTAX => 'INVALID_FILTERS_SYNTAX',
SAVE_FAILED => 'SAVE_FAILED',
FORBIDDEN => 'FORBIDDEN',
RECORD_NOT_FOUND => 'RECORD_NOT_FOUND',
UNSUPPORTED_MEDIA_TYPE => 'UNSUPPORTED_MEDIA_TYPE',
LOCKED => 'LOCKED',
INTERNAL_SERVER_ERROR => 'INTERNAL_SERVER_ERROR'
}
end
|
RisePeopleInc/jsonapi-resource-1
|
lib/jsonapi/exceptions.rb
|
<filename>lib/jsonapi/exceptions.rb
module JSONAPI
module Exceptions
class Error < RuntimeError; end
class InternalServerError < Error
attr_accessor :exception
def initialize(exception)
@exception = exception
end
def errors
unless Rails.env.production?
meta = Hash.new
meta[:exception] = exception.message
meta[:backtrace] = exception.backtrace
end
[JSONAPI::Error.new(code: JSONAPI::INTERNAL_SERVER_ERROR,
status: :internal_server_error,
title: 'Internal Server Error',
detail: 'Internal Server Error',
meta: meta)]
end
end
class InvalidResource < Error
attr_accessor :resource
def initialize(resource)
@resource = resource
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_RESOURCE,
status: :bad_request,
title: 'Invalid resource',
detail: "#{resource} is not a valid resource.")]
end
end
class RecordNotFound < Error
attr_accessor :id
def initialize(id)
@id = id
end
def errors
[JSONAPI::Error.new(code: JSONAPI::RECORD_NOT_FOUND,
status: :not_found,
title: 'Record not found',
detail: "The record identified by #{id} could not be found.")]
end
end
class UnsupportedMediaTypeError < Error
attr_accessor :media_type
def initialize(media_type)
@media_type = media_type
end
def errors
[JSONAPI::Error.new(code: JSONAPI::UNSUPPORTED_MEDIA_TYPE,
status: :unsupported_media_type,
title: 'Unsupported media type',
detail: "All requests that create or update resources must use the '#{JSONAPI::MEDIA_TYPE}' Content-Type. This request specified '#{media_type}.'")]
end
end
class HasManyRelationExists < Error
attr_accessor :id
def initialize(id)
@id = id
end
def errors
[JSONAPI::Error.new(code: JSONAPI::RELATION_EXISTS,
status: :bad_request,
title: 'Relation exists',
detail: "The relation to #{id} already exists.")]
end
end
class ToManySetReplacementForbidden < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::FORBIDDEN,
status: :forbidden,
title: 'Complete replacement forbidden',
detail: 'Complete replacement forbidden for this relationship')]
end
end
class InvalidFiltersSyntax < Error
attr_accessor :filters
def initialize(filters)
@filters = filters
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_FILTERS_SYNTAX,
status: :bad_request,
title: 'Invalid filters syntax',
detail: "#{filters} is not a valid syntax for filtering.")]
end
end
class FilterNotAllowed < Error
attr_accessor :filter
def initialize(filter)
@filter = filter
end
def errors
[JSONAPI::Error.new(code: JSONAPI::FILTER_NOT_ALLOWED,
status: :bad_request,
title: 'Filter not allowed',
detail: "#{filter} is not allowed.")]
end
end
class InvalidFilterValue < Error
attr_accessor :filter, :value
def initialize(filter, value)
@filter = filter
@value = value
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_FILTER_VALUE,
status: :bad_request,
title: 'Invalid filter value',
detail: "#{value} is not a valid value for #{filter}.")]
end
end
class InvalidFieldValue < Error
attr_accessor :field, :value
def initialize(field, value)
@field = field
@value = value
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_FIELD_VALUE,
status: :bad_request,
title: 'Invalid field value',
detail: "#{value} is not a valid value for #{field}.")]
end
end
class InvalidFieldFormat < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_FIELD_FORMAT,
status: :bad_request,
title: 'Invalid field format',
detail: 'Fields must specify a type.')]
end
end
class InvalidLinksObject < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_LINKS_OBJECT,
status: :bad_request,
title: 'Invalid Links Object',
detail: 'Data is not a valid Links Object.')]
end
end
class TypeMismatch < Error
attr_accessor :type
def initialize(type)
@type = type
end
def errors
[JSONAPI::Error.new(code: JSONAPI::TYPE_MISMATCH,
status: :bad_request,
title: 'Type Mismatch',
detail: "#{type} is not a valid type for this operation.")]
end
end
class InvalidField < Error
attr_accessor :field, :type
def initialize(type, field)
@field = field
@type = type
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_FIELD,
status: :bad_request,
title: 'Invalid field',
detail: "#{field} is not a valid field for #{type}.")]
end
end
class InvalidInclude < Error
attr_accessor :relationship, :resource
def initialize(resource, relationship)
@resource = resource
@relationship = relationship
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_INCLUDE,
status: :bad_request,
title: 'Invalid field',
detail: "#{relationship} is not a valid relationship of #{resource}")]
end
end
class InvalidSortCriteria < Error
attr_accessor :sort_criteria, :resource
def initialize(resource, sort_criteria)
@resource = resource
@sort_criteria = sort_criteria
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_SORT_CRITERIA,
status: :bad_request,
title: 'Invalid sort criteria',
detail: "#{sort_criteria} is not a valid sort criteria for #{resource}")]
end
end
class ParametersNotAllowed < Error
attr_accessor :params
def initialize(params)
@params = params
end
def errors
params.collect do |param|
JSONAPI::Error.new(code: JSONAPI::PARAM_NOT_ALLOWED,
status: :bad_request,
title: 'Param not allowed',
detail: "#{param} is not allowed.")
end
end
end
class ParameterMissing < Error
attr_accessor :param
def initialize(param)
@param = param
end
def errors
[JSONAPI::Error.new(code: JSONAPI::PARAM_MISSING,
status: :bad_request,
title: 'Missing Parameter',
detail: "The required parameter, #{param}, is missing.")]
end
end
class CountMismatch < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::COUNT_MISMATCH,
status: :bad_request,
title: 'Count to key mismatch',
detail: 'The resource collection does not contain the same number of objects as the number of keys.')]
end
end
class KeyNotIncludedInURL < Error
attr_accessor :key
def initialize(key)
@key = key
end
def errors
[JSONAPI::Error.new(code: JSONAPI::KEY_NOT_INCLUDED_IN_URL,
status: :bad_request,
title: 'Key is not included in URL',
detail: "The URL does not support the key #{key}")]
end
end
class MissingKey < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::KEY_ORDER_MISMATCH,
status: :bad_request,
title: 'A key is required',
detail: 'The resource object does not contain a key.')]
end
end
class RecordLocked < Error
attr_accessor :message
def initialize(message)
@message = message
end
def errors
[JSONAPI::Error.new(code: JSONAPI::LOCKED,
status: :locked,
title: 'Locked resource',
detail: "#{message}")]
end
end
class ValidationErrors < Error
attr_reader :error_messages, :resource_relationships
def initialize(resource)
@error_messages = resource.model_error_messages
@resource_relationships = resource.class._relationships.keys
@key_formatter = JSONAPI.configuration.key_formatter
end
def format_key(key)
@key_formatter.format(key)
end
def errors
error_messages.flat_map do |attr_key, messages|
messages.map { |message| json_api_error(attr_key, message) }
end
end
private
def json_api_error(attr_key, message)
JSONAPI::Error.new(code: JSONAPI::VALIDATION_ERROR,
status: :unprocessable_entity,
title: message,
detail: "#{format_key(attr_key)} - #{message}",
source: { pointer: pointer(attr_key) })
end
def pointer(attr_or_relationship_name)
formatted_attr_or_relationship_name = format_key(attr_or_relationship_name)
if resource_relationships.include?(attr_or_relationship_name)
"/data/relationships/#{formatted_attr_or_relationship_name}"
else
"/data/attributes/#{formatted_attr_or_relationship_name}"
end
end
end
class SaveFailed < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::SAVE_FAILED,
status: :unprocessable_entity,
title: 'Save failed or was cancelled',
detail: 'Save failed or was cancelled')]
end
end
class InvalidPageObject < Error
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_PAGE_OBJECT,
status: :bad_request,
title: 'Invalid Page Object',
detail: 'Invalid Page Object.')]
end
end
class PageParametersNotAllowed < Error
attr_accessor :params
def initialize(params)
@params = params
end
def errors
params.collect do |param|
JSONAPI::Error.new(code: JSONAPI::PARAM_NOT_ALLOWED,
status: :bad_request,
title: 'Page parameter not allowed',
detail: "#{param} is not an allowed page parameter.")
end
end
end
class InvalidPageValue < Error
attr_accessor :page, :value
def initialize(page, value, msg = nil)
@page = page
@value = value
@msg = msg || "#{value} is not a valid value for #{page} page parameter."
end
def errors
[JSONAPI::Error.new(code: JSONAPI::INVALID_PAGE_VALUE,
status: :bad_request,
title: 'Invalid page value',
detail: @msg)]
end
end
end
end
|
RisePeopleInc/jsonapi-resource-1
|
lib/jsonapi/resource_serializer.rb
|
module JSONAPI
class ResourceSerializer
attr_reader :link_builder, :key_formatter, :serialization_options, :primary_class_name
# initialize
# Options can include
# include:
# Purpose: determines which objects will be side loaded with the source objects in a linked section
# Example: ['comments','author','comments.tags','author.posts']
# fields:
# Purpose: determines which fields are serialized for a resource type. This encompasses both attributes and
# relationship ids in the links section for a resource. Fields are global for a resource type.
# Example: { people: [:id, :email, :comments], posts: [:id, :title, :author], comments: [:id, :body, :post]}
# key_formatter: KeyFormatter class to override the default configuration
# serializer_options: additional options that will be passed to resource meta and links lambdas
def initialize(primary_resource_klass, options = {})
@primary_class_name = primary_resource_klass._type
@fields = options.fetch(:fields, {})
@include = options.fetch(:include, [])
@include_directives = options[:include_directives]
@key_formatter = options.fetch(:key_formatter, JSONAPI.configuration.key_formatter)
@link_builder = generate_link_builder(primary_resource_klass, options)
@always_include_to_one_linkage_data = options.fetch(:always_include_to_one_linkage_data,
JSONAPI.configuration.always_include_to_one_linkage_data)
@always_include_to_many_linkage_data = options.fetch(:always_include_to_many_linkage_data,
JSONAPI.configuration.always_include_to_many_linkage_data)
@serialization_options = options.fetch(:serialization_options, {})
end
# Converts a single resource, or an array of resources to a hash, conforming to the JSONAPI structure
def serialize_to_hash(source)
is_resource_collection = source.respond_to?(:to_ary)
@included_objects = {}
@include_directives ||= JSONAPI::IncludeDirectives.new(@include)
process_primary(source, @include_directives.include_directives)
included_objects = []
primary_objects = []
@included_objects.each_value do |objects|
objects.each_value do |object|
if object[:primary]
primary_objects.push(object[:object_hash])
else
included_objects.push(object[:object_hash])
end
end
end
primary_hash = { data: is_resource_collection ? primary_objects : primary_objects[0] }
primary_hash[:included] = included_objects if included_objects.size > 0
primary_hash
end
def serialize_to_links_hash(source, requested_relationship)
if requested_relationship.is_a?(JSONAPI::Relationship::ToOne)
data = to_one_linkage(source, requested_relationship)
else
data = to_many_linkage(source, requested_relationship)
end
{
links: {
self: self_link(source, requested_relationship),
related: related_link(source, requested_relationship)
},
data: data
}
end
def query_link(query_params)
link_builder.query_link(query_params)
end
def format_key(key)
@key_formatter.format(key)
end
def format_value(value, format)
value_formatter = JSONAPI::ValueFormatter.value_formatter_for(format)
value_formatter.format(value)
end
private
# Process the primary source object(s). This will then serialize associated object recursively based on the
# requested includes. Fields are controlled fields option for each resource type, such
# as fields: { people: [:id, :email, :comments], posts: [:id, :title, :author], comments: [:id, :body, :post]}
# The fields options controls both fields and included links references.
def process_primary(source, include_directives)
if source.respond_to?(:to_ary)
source.each do |resource|
id = resource.id
if already_serialized?(resource.class._type, id)
set_primary(@primary_class_name, id)
end
add_included_object(id, object_hash(resource, include_directives), true)
end
else
return {} if source.nil?
resource = source
id = resource.id
add_included_object(id, object_hash(source, include_directives), true)
end
end
# Returns a serialized hash for the source model
def object_hash(source, include_directives)
obj_hash = {}
id_format = source.class._attribute_options(:id)[:format]
# protect against ids that were declared as an attribute, but did not have a format set.
id_format = 'id' if id_format == :default
obj_hash['id'] = format_value(source.id, id_format)
obj_hash['type'] = format_key(source.class._type.to_s)
links = relationship_links(source)
obj_hash['links'] = links unless links.empty?
attributes = attribute_hash(source)
obj_hash['attributes'] = attributes unless attributes.empty?
relationships = relationship_data(source, include_directives)
obj_hash['relationships'] = relationships unless relationships.nil? || relationships.empty?
meta = source.meta(custom_generation_options)
if meta.is_a?(Hash) && !meta.empty?
obj_hash['meta'] = meta
end
obj_hash
end
def requested_fields(klass)
return if @fields.nil? || @fields.empty?
if @fields[klass._type]
@fields[klass._type]
elsif klass.superclass != JSONAPI::Resource
requested_fields(klass.superclass)
end
end
def attribute_hash(source)
requested = requested_fields(source.class)
fields = source.fetchable_fields & source.class._attributes.keys.to_a
fields = requested & fields unless requested.nil?
fields.each_with_object({}) do |name, hash|
format = source.class._attribute_options(name)[:format]
unless name == :id
hash[format_key(name)] = format_value(source.public_send(name), format)
end
end
end
def custom_generation_options
{
serializer: self,
serialization_options: @serialization_options
}
end
def relationship_data(source, include_directives)
relationships = source.class._relationships
requested = requested_fields(source.class)
fields = relationships.keys
fields = requested & fields unless requested.nil?
field_set = Set.new(fields)
included_relationships = source.fetchable_fields & relationships.keys
data = {}
relationships.each_with_object(data) do |(name, relationship), hash|
if included_relationships.include? name
ia = include_directives[:include_related][name]
include_linkage = ia && ia[:include]
include_linked_children = ia && !ia[:include_related].empty?
if field_set.include?(name)
hash[format_key(name)] = link_object(source, relationship, include_linkage)
end
type = relationship.type
# If the object has been serialized once it will be in the related objects list,
# but it's possible all children won't have been captured. So we must still go
# through the relationships.
if include_linkage || include_linked_children
if relationship.is_a?(JSONAPI::Relationship::ToOne)
resource = source.public_send(name)
if resource
id = resource.id
type = relationship.type_for_source(source)
relationships_only = already_serialized?(type, id)
if include_linkage && !relationships_only
add_included_object(id, object_hash(resource, ia))
elsif include_linked_children || relationships_only
relationship_data(resource, ia)
end
end
elsif relationship.is_a?(JSONAPI::Relationship::ToMany)
resources = source.public_send(name)
resources.each do |resource|
id = resource.id
relationships_only = already_serialized?(type, id)
if include_linkage && !relationships_only
add_included_object(id, object_hash(resource, ia))
elsif include_linked_children || relationships_only
relationship_data(resource, ia)
end
end
end
end
end
end
end
def relationship_links(source)
links = {}
links[:self] = link_builder.self_link(source)
links
end
def already_serialized?(type, id)
type = format_key(type)
@included_objects.key?(type) && @included_objects[type].key?(id)
end
def self_link(source, relationship)
link_builder.relationships_self_link(source, relationship)
end
def related_link(source, relationship)
link_builder.relationships_related_link(source, relationship)
end
def to_one_linkage(source, relationship)
linkage = {}
linkage_id = foreign_key_value(source, relationship)
if linkage_id
linkage[:type] = format_key(relationship.type_for_source(source))
linkage[:id] = linkage_id
else
linkage = nil
end
linkage
end
def to_many_linkage(source, relationship)
linkage = []
linkage_types_and_values = foreign_key_types_and_values(source, relationship)
linkage_types_and_values.each do |type, value|
linkage.append({type: format_key(type), id: value})
end
linkage
end
def link_object_to_one(source, relationship, include_linkage)
include_linkage = include_linkage | @always_include_to_one_linkage_data | relationship.always_include_linkage_data
link_object_hash = {}
link_object_hash[:links] = {}
link_object_hash[:links][:self] = self_link(source, relationship)
link_object_hash[:links][:related] = related_link(source, relationship)
link_object_hash[:data] = to_one_linkage(source, relationship) if include_linkage
link_object_hash
end
def link_object_to_many(source, relationship, include_linkage)
link_object_hash = {}
link_object_hash[:links] = {}
link_object_hash[:links][:self] = self_link(source, relationship)
link_object_hash[:links][:related] = related_link(source, relationship)
link_object_hash[:data] = to_many_linkage(source, relationship) if include_linkage
link_object_hash
end
def link_object(source, relationship, include_linkage = false)
if relationship.is_a?(JSONAPI::Relationship::ToOne)
link_object_to_one(source, relationship, include_linkage)
elsif relationship.is_a?(JSONAPI::Relationship::ToMany)
link_object_to_many(source, relationship, include_linkage)
end
end
# Extracts the foreign key value for a to_one relationship.
def foreign_key_value(source, relationship)
foreign_key = relationship.foreign_key
value = source.public_send(foreign_key)
IdValueFormatter.format(value)
end
def foreign_key_types_and_values(source, relationship)
if relationship.is_a?(JSONAPI::Relationship::ToMany)
if relationship.polymorphic?
source._model.public_send(relationship.name).pluck(:type, :id).map do |type, id|
[type.pluralize, IdValueFormatter.format(id)]
end
else
source.public_send(relationship.foreign_key).map do |value|
[relationship.type, IdValueFormatter.format(value)]
end
end
end
end
# Sets that an object should be included in the primary document of the response.
def set_primary(type, id)
type = format_key(type)
@included_objects[type][id][:primary] = true
end
# Collects the hashes for all objects processed by the serializer
def add_included_object(id, object_hash, primary = false)
type = object_hash['type']
@included_objects[type] = {} unless @included_objects.key?(type)
if already_serialized?(type, id)
@included_objects[type][id][:object_hash].merge!(object_hash)
set_primary(type, id) if primary
else
@included_objects[type].store(id, primary: primary, object_hash: object_hash)
end
end
def generate_link_builder(primary_resource_klass, options)
LinkBuilder.new(
base_url: options.fetch(:base_url, ''),
route_formatter: options.fetch(:route_formatter, JSONAPI.configuration.route_formatter),
primary_resource_klass: primary_resource_klass,
)
end
end
end
|
RisePeopleInc/jsonapi-resource-1
|
lib/jsonapi/operations_processor.rb
|
module JSONAPI
class OperationsProcessor
include Callbacks
define_jsonapi_resources_callbacks :operation,
:operations,
:find_operation,
:show_operation,
:show_relationship_operation,
:show_related_resource_operation,
:show_related_resources_operation,
:create_resource_operation,
:remove_resource_operation,
:replace_fields_operation,
:replace_to_one_relationship_operation,
:replace_polymorphic_to_one_relationship_operation,
:create_to_many_relationship_operation,
:replace_to_many_relationship_operation,
:remove_to_many_relationship_operation,
:remove_to_one_relationship_operation
class << self
def operations_processor_for(operations_processor)
operations_processor_class_name = "#{operations_processor.to_s.camelize}OperationsProcessor"
operations_processor_class_name.safe_constantize
end
end
def process(request)
@results = JSONAPI::OperationResults.new
@request = request
@operations = request.operations
# Use transactions if more than one operation and if one of the operations can be transactional
# Even if transactional transactions won't be used unless the derived OperationsProcessor supports them.
@transactional = false
@operations.each do |operation|
@transactional |= operation.transactional
end
run_callbacks :operations do
transaction do
# Links and meta data global to the set of operations
@operations_meta = {}
@operations_links = {}
@operations.each do |operation|
@operation = operation
# Links and meta data for each operation
@operation_meta = {}
@operation_links = {}
run_callbacks :operation do
@result = nil
run_callbacks @operation.class.name.demodulize.underscore.to_sym do
@result = process_operation(@operation)
end
@result.meta.merge!(@operation_meta)
@result.links.merge!(@operation_links)
@results.add_result(@result)
rollback if @results.has_errors?
end
end
@results.meta = @operations_meta
@results.links = @operations_links
end
end
@results
end
private
# The base OperationsProcessor provides no transaction support
# Override the transaction and rollback methods to provide transaction support.
# For ActiveRecord transactions you can use the ActiveRecordOperationsProcessor
def transaction
yield
end
def rollback
end
# If overriding in child operation processors, call operation.apply and
# catch errors that should be handled before JSONAPI::Exceptions::Error
# and other unprocessed exceptions
def process_operation(operation)
with_default_handling do
operation.apply
end
end
def with_default_handling(&block)
yield
rescue JSONAPI::Exceptions::Error => e
raise e
rescue => e
if JSONAPI.configuration.exception_class_whitelist.any? { |k| e.class.ancestors.include?(k) }
raise e
else
@request.server_error_callbacks.each { |callback| safe_run_callback(callback, e) }
internal_server_error = JSONAPI::Exceptions::InternalServerError.new(e)
Rails.logger.error { "Internal Server Error: #{e.message} #{e.backtrace.join("\n")}" }
return JSONAPI::ErrorsOperationResult.new(internal_server_error.errors[0].code, internal_server_error.errors)
end
end
def safe_run_callback(callback, error)
begin
callback.call(error)
rescue => e
Rails.logger.error { "Error in error handling callback: #{e.message} #{e.backtrace.join("\n")}" }
internal_server_error = JSONAPI::Exceptions::InternalServerError.new(e)
return JSONAPI::ErrorsOperationResult.new(internal_server_error.errors[0].code, internal_server_error.errors)
end
end
end
end
class BasicOperationsProcessor < JSONAPI::OperationsProcessor
end
|
garfieldmoore/Boomerang
|
build/make.rb
|
<filename>build/make.rb<gh_stars>1-10
require "rexml/document"
include REXML
# set properties
programFilesPath=""
buildConfiguration = "release"
productName="Boomerang"
VsSolutionFile = "boomerang.host.sln"
workingDir=Dir.pwd
packageDir = "buildArtifacts/bin"
installersPath = "buildArtifacts/installers/Installer.zip"
windirectory = ENV['windir']
msbuild = "#{windirectory}\\Microsoft.NET\\Framework\\v4.0.30319\\msbuild.exe"
nunitconsole="packages_manual/NUnit/tools/nunit-console.exe"
nugetPath= 'packages_manual/nuget.exe'
zipExe = "packages_manual/7zip/7z.exe"
os_platform="x64"
task :default => :build
task :build => [ :clean, :envConfig, :compile]
task :commitBuild => [ :clean, :compile, :runTests, :codeAnalysis, :package, :createInstallers]
task :clean do
FileUtils.rm_rf("buildartifacts")
Dir::mkdir("buildArtifacts")
end
task :envConfig do
puts "Starting rake task: envConfig"
nugetParams = '-o packages'
packages = Dir.glob("source/**/packages.config")+Dir.glob("tests/**/packages.config")
packages.each do |package |
sh "#{nugetPath} install #{package} #{nugetParams}"
end
end
task :compile do
puts "starting rake task: compile."
componentName=VsSolutionFile.sub('.sln','')
outputPath = "Components"
signAssembly = false
if (componentName.include? 'tests') || (componentName.include? 'Fake') || (componentName.include? 'TestDrivers')
signAssembly = false
end
if ! componentName.include? 'Proxies'
puts "Building:"
puts "Source: #{componentName}"
puts "Destination: #{outputPath}"
params = " /t:Clean /t:Build /nologo /v:m /p:OutputPath=..\\..\\buildArtifacts\\bin\\#{outputPath} /p:Configuration=#{buildConfiguration} /p:StyleCopTreatErrorsAsWarnings=false #{VsSolutionFile} /p:SignAssembly=#{signAssembly} /p:AssemblyOriginatorKeyFile=..\\..\\build\\project.snk /p:DebugSymbols=true /p:DebugType=pdbonly"
sh "#{msbuild} #{params}"
end
end
task :runTests do
if (ENV['TEAMCITY_JRE'] !=nil) then
puts "Skipping unit tests: build server has built in steps for unit tests."
else
if ! FileTest::directory?("buildArtifacts/Reports")
puts "Creating reports folder"
Dir::mkdir("buildArtifacts/Reports")
end
puts "Looking for tests"
testAssemblies = Dir.glob("buildArtifacts/bin/tests/**/*tests*.dll")
testAssemblies.delete_if do |c| c.include?(".Tests.Acceptance") end
testAssemblyCmd = testAssemblies.join(" ")
puts "Test assemblies found. Running tests in following assemblies;"
puts testAssemblies
params='/xml=buildArtifacts/Reports/TestResults.xml /exclude:Acceptance'
sh "#{nunitconsole}" " #{testAssemblyCmd} #{params}"
end
end
task :package do
projects = Dir.glob('source/**/**.csproj')+Dir.glob('tests/**/*.csproj')
puts "Projects to package:"
puts projects
projects.each do|project|
params = " #{project} /T:Package /p:configuration=#{buildConfiguration};outputpath=../../buildartifacts/bin/Packages"
sh "#{msbuild} #{params}"
end
end
task :createInstallers do
if File.file?(installersPath)
File.delete(installersPath)
end
zipFolder("#{zipExe}", "#{packageDir}", "#{installersPath}")
end
def zipFolder(zipExe, source, target)
sh "#{zipExe} a #{target} .\\#{source}\\*"
sh "#{zipExe} d #{target} tests/**.Tests.Unit"
end
|
garfieldmoore/Boomerang
|
build/bootstrapper.deploy.rb
|
<filename>build/bootstrapper.deploy.rb
task :default => :runDeploy
task :runDeploy do
deployDir="buildArtifacts/deployables/_PublishedWebsites"
sh "rake -f #{deployDir}/deploy.rb -t deploySite[\"#{deployDir}, localHost, sa, Irdog247, C:/inetpub/wwwroot\"]"
end
|
garfieldmoore/Boomerang
|
build/nft.rb
|
# runs non-functional tests
task :default => :capacity
task :capacity do
puts "Running capacity tests..."
puts "finished running capacity tests."
end
|
garfieldmoore/Boomerang
|
build/deploy.rb
|
require 'fileutils'
deployDir=""
params=""
webDeployPath="C:/inetpub/wwwroot/baseline.web.mvc3_deploy"
configTool="c:/dev/ConfigTool/imenvcfg.exe"
task :default do
puts "Deploys the system to a remote host"
end
task :deploySite, :deployablesDir, :webAppPath do |t, args|
deployDir=args[:deployablesDir]
webDeployPath=args[:webAppPath]
Rake::Task["deploy"].invoke
end
task :deploy => [:environmentConfig, :runInstaller]
# environment config
task :environmentConfig do
end
task :runInstaller do
puts "Starting task:runInstaller"
packages=Dir.glob("BuildArtifacts/**/_PublishedWebsites/**/*.deploy.cmd")
params='/Y /M:localhost'
packages.each do|package|
puts "Deploying package:#{package}"
sh "#{package} #{params}"
end
end
task :applicationConfig do
puts "Starting task:applicationConfig"
environments= ENV['BuildEnvironments'].split(';')
if (environments!=nil)
environment=environments[0]
puts "Configuring components for environment:#{environment}"
configs=Dir.glob("#{webDeployPath}/**/_Env.Config/#{environment}*.settings.env")
configs.delete_if do |c| c.include?("/bin/") end
puts "Configurations found at:#{webDeployPath}"
puts "pattern: #{webDeployPath}/**/_Env.Config/#{environment}*.settings.env"
configs.each do |config|
puts "#{config}"
end
configs.each do |config|
target=config.sub("#{environment}.",'')
target.sub!(".settings.env",'.Template')
if (File.exists?(target) && File.exists?(config))
puts "Transforming configuration: Source:#{config}, target:#{target}"
sh "#{configTool} #{config} #{target}"
replaceFile=target.sub('_Env.Config/','')
replaceFile.sub!('.Template','')
puts "Copying files: Source:#{target}, target:#{replaceFile}"
FileUtils.cp(target,replaceFile)
else
puts "Transform files not found:\n\rsource:#{config} \n\rtarget:#{target}"
end
end
end
end
|
garfieldmoore/Boomerang
|
build/smokeTests.rb
|
# tests application services are working following a deployment
require "net/http"
DB_MAJOR="6"
DB_MINOR="0"
DB_REVISION ="12"
DB_BUILD="0"
task :CheckDatabaseAccess do
puts "Starting task:CheckDatabaseAccess"
sqlCmd = ENV['SQLCMDPATH']
params = "-o CheckDataBaseOutput.txt -U SN_ADMIN -P ke%f73Xe5N0 -d IM_MONEY -Q \"SELECT [Major], [Minor], [Revision], [Build] FROM [IM_Money].[dbo].[SchemaVersion] WHERE [Revision] = (select MIN([Revision]) FROM [IM_Money].[dbo].[SchemaVersion]);\""
sh "#{sqlCmd} #{params}"
file = File.open('CheckDataBaseOutput.txt')
contents = file.readline
contents = file.readline
contents = file.readline
contents.squeeze!(' ')
schemaVersion = contents.split
puts "Database schema version:"
puts "Major:'#{schemaVersion[0]}'"
puts "Minor:'#{schemaVersion[1]}'"
puts "Revision:'#{schemaVersion[2]}'"
puts "Build:'#{schemaVersion[3]}'"
if (DB_MAJOR != schemaVersion[0] || DB_MINOR != schemaVersion[1] || DB_REVISION !=schemaVersion[2] || DB_BUILD !=schemaVersion[3])
puts "Smoke tests failed: Unexpected database schema;"
exit(-1)
raise
end
puts "database access response:ok"
end
task :CheckWebsiteAccess, :domain, :expectedHttpCode, :url do |t, args|
puts "Starting task:CheckWebsiteAccess"
puts args
urlStrings=args[:url].split(';')
domain=args[:domain]
expectedHttpCode=args[:expectedHttpCode]
urlStrings.each do|urlString|
uri = URI.parse("#{domain}#{urlString}")
http = Net::HTTP.new(uri.host, uri.port)
request = Net::HTTP::Get.new(uri.request_uri)
request.initialize_http_header({"User-Agent" => "Smoke Script"})
puts "Checking access to #{uri}"
#check response
result = -1
begin
response = http.request(request)
puts "Server response: " + response.code
if response.code.to_s <= "#{expectedHttpCode}"
puts "Access ok"
result = 0
else
puts "Request to #{urlString} failed"
end
rescue
puts "Request to #{urlString} failed"
end
if result != 0
exit(result)
raise
end
end
end
|
abhaykumarPS/puppetdb
|
docker/spec/puppetdb_spec.rb
|
<filename>docker/spec/puppetdb_spec.rb
include Pupperware::SpecHelpers
# unifies volume naming
ENV['COMPOSE_PROJECT_NAME'] ||= 'puppetdb'
Pupperware::SpecHelpers.load_compose_services = 'postgres,puppet'
RSpec.configure do |c|
c.before(:suite) do
ENV['PUPPETDB_IMAGE'] = require_test_image
pull_images('puppetdb')
teardown_cluster()
docker_compose_up(preload_certs: true)
end
c.after(:suite) do
emit_logs
teardown_cluster()
end
end
describe 'puppetdb container specs' do
it 'should have installed postgres extensions' do
installed_extensions = get_postgres_extensions
expect(installed_extensions).to match(/\s+pg_trgm\s+/)
expect(installed_extensions).to match(/\s+pgcrypto\s+/)
end
it 'should have started puppetdb' do
expect(get_service_container('puppetdb')).to_not be_empty
end
end
|
abhaykumarPS/puppetdb
|
puppet/spec/unit/util/puppetdb_spec.rb
|
<reponame>abhaykumarPS/puppetdb
#!/usr/bin/env rspec
# encoding: UTF-8
require 'spec_helper'
require 'digest/sha1'
require 'puppet/util/puppetdb'
require 'puppet/util/puppetdb/command_names'
require 'puppet/util/puppetdb/http'
require 'json'
class FakeHttpResponse
def initialize(body)
@body = body
end
attr_reader :body
end
describe Puppet::Util::Puppetdb do
subject { Object.new.extend described_class }
describe "#submit_command" do
let(:payload) { {'resistance' => 'futile', 'opinion' => 'irrelevant'} }
let(:command1) { Puppet::Util::Puppetdb::Command.new("OPEN SESAME", 1, 'foo.localdomain', Time.now.utc,
payload.merge(:uniqueprop => "command1")) }
it "should submit the command" do
# careful here... since we're going to stub Command.new, we need to
# make sure we reference command1 first, because it calls Command.new.
command1.expects(:submit).once
Puppet::Util::Puppetdb::Command.expects(:new).once.returns(command1)
subject.submit_command(command1.certname,
command1.command,
command1.producer_timestamp_utc,
command1.version) { command1.payload }
end
end
describe ".query_puppetdb" do
let(:response) { JSON.generate({'certname' => 'futile', 'status' => 'irrelevant'}) }
let(:query) { ["=", "type", "Foo"] }
let(:http_response) { FakeHttpResponse.new(response) }
it "should query PuppetDB" do
# careful here... since we're going to stub Command.new, we need to
# make sure we reference command1 first, because it calls Command.new.
Puppet::Util::Puppetdb::Http.expects(:action).once.returns(http_response)
Puppet::Util::Puppetdb.query_puppetdb(query)
end
end
end
|
abhaykumarPS/puppetdb
|
puppet/spec/unit/indirector/facts/puppetdb_spec.rb
|
<gh_stars>100-1000
#!/usr/bin/env rspec
require 'spec_helper'
require 'puppet/util/feature'
require 'puppet/indirector/facts/puppetdb'
require 'puppet/util/puppetdb'
require 'puppet/util/puppetdb/command_names'
require 'json'
require 'date'
require 'time'
describe Puppet::Node::Facts::Puppetdb do
CommandReplaceFacts = Puppet::Util::Puppetdb::CommandNames::CommandReplaceFacts
let(:http) { stub 'http' }
before :each do
Puppet::Util::Puppetdb.config.stubs(:server_urls).returns [URI("https://localhost:8282")]
Puppet::Node::Facts.indirection.stubs(:terminus).returns(subject)
Puppet::Network::HttpPool.stubs(:connection).returns(http)
create_environmentdir("my_environment")
end
describe "#save" do
let(:response) { Net::HTTPOK.new('1.1', 200, 'OK') }
let(:facts) { Puppet::Node::Facts.new('foo') }
let(:options) {{
:environment => "my_environment",
}}
before :each do
response.stubs(:body).returns '{"uuid": "a UUID"}'
end
def save
subject.save(Puppet::Node::Facts.indirection.request(:save, facts.name, facts, options))
end
it "should POST the trusted data we tell it to" do
trusted_data = {"foo" => "foobar", "certname" => "testing_posting"}
subject.stubs(:get_trusted_info).returns trusted_data
Puppet[:node_name_value] = "mom"
payload = {
"certname" => facts.name,
"values" => facts.values.merge({"trusted" => trusted_data}),
"environment" => "my_environment",
"producer" => "mom"
}
http.expects(:post).with do |uri, body, headers|
assert_command_req(payload, body)
end.returns response
save
end
it "should retain integer type when submitting" do
facts.values['something'] = 100
sent_payload = nil
http.expects(:post).with do |uri, body, headers|
sent_payload = body
end.returns response
save
message = JSON.parse(sent_payload)
# We shouldn't modify the original instance
facts.values['something'].should == 100
message['values']['something'].should == 100
end
it "should transform the package inventory fact when submitting" do
fact_tuple = ['openssl', '1.0.2g-1ubuntu4.6', 'apt']
inventory_fact_value = { 'packages' => [fact_tuple] }
facts.values['_puppet_inventory_1'] = inventory_fact_value
sent_payload = nil
http.expects(:post).with do |uri, body, headers|
sent_payload = body
end.returns response
save
message = JSON.parse(sent_payload)
# We shouldn't modify the original instance
facts.values['_puppet_inventory_1'].should == inventory_fact_value
message['values']['_puppet_inventory_1'].should be_nil
message['package_inventory'].should == [fact_tuple]
end
it "shouldn't crash with a malformed inventory fact" do
facts.values['_puppet_inventory_1'] = ['foo', 'bar']
sent_payload = nil
http.expects(:post).with do |uri, body, headers|
sent_payload = body
end.returns response
save
end
end
describe "#get_trusted_info" do
it 'should return trusted data' do
node = Puppet::Node.new('my_certname')
trusted = subject.get_trusted_info(node)
# External key added by PUP-9994, Puppet 6.11.0
if trusted.has_key?('external')
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'testing',
'extensions'=>{}, 'external'=>{"trusted_testhelper"=>true}, 'hostname'=>'testing', 'domain'=>nil})
# Extra keys domain & hostname introduced by PUP-5097, Puppet 4.3.0
elsif trusted.has_key?("domain")
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'testing',
'extensions'=>{}, 'hostname'=>'testing', 'domain'=>nil})
else
# Puppet 4.2.x and older
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'testing', 'extensions'=>{}})
end
end
it 'should return trusted data when falling back to the node' do
# This removes :trusted_information from the global context, triggering our fallback code.
if Puppet.methods.include? :rollback_context
Puppet.rollback_context('initial testing state')
else
Puppet.pop_context # puppet 3.5.1
end
node = Puppet::Node.new('my_certname', :parameters => {'clientcert' => 'trusted_certname'})
trusted = subject.get_trusted_info(node)
# External key added by PUP-9994, Puppet 6.11.0
if trusted.has_key?('external')
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'trusted_certname',
'extensions'=>{}, 'external'=>{}, 'hostname'=>'trusted_certname', 'domain'=>nil})
# Extra keys domainname & hostname introduced by PUP-5097, Puppet 4.3.0
elsif trusted.has_key?("domain")
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'trusted_certname',
'extensions'=>{}, 'hostname'=>'trusted_certname', 'domain'=>nil})
else
# Puppet 4.2.x and older
expect(trusted).to eq({'authenticated'=>'local', 'certname'=>'trusted_certname', 'extensions'=>{}})
end
# Put the context back the way the test harness expects
Puppet.push_context({}, 'context to make the tests happy')
if Puppet.methods.include? :mark_context
Puppet.mark_context('initial testing state')
end
end
end
describe "#find" do
def find_facts()
Puppet::Node::Facts.indirection.find('some_node')
end
let(:options) { {:metric_id => [:puppetdb, :facts, :find]} }
it "should return the facts if they're found" do
body = [{"certname" => "some_node", "environment" => "production", "name" => "a", "value" => "1"},
{"certname" => "some_node", "environment" => "production", "name" => "b", "value" => "2"}].to_json
response = Net::HTTPOK.new('1.1', 200, 'OK')
response.stubs(:body).returns body
http.stubs(:get).with("/pdb/query/v4/nodes/some_node/facts", subject.headers, options).returns response
result = find_facts
result.should be_a(Puppet::Node::Facts)
result.name.should == 'some_node'
result.values.should include('a' => '1', 'b' => '2')
end
it "should return nil if no facts are found" do
body = {"error" => "No information known about factset some_node"}.to_json
response = Net::HTTPNotFound.new('1.1', 404, 'NotFound')
response.stubs(:body).returns body
http.stubs(:get).with("/pdb/query/v4/nodes/some_node/facts", subject.headers, options).returns response
find_facts.should be_nil
end
it "should fail if an HTTP error code is returned" do
response = Net::HTTPForbidden.new('1.1', 403, "Forbidden")
response.stubs(:body).returns ''
http.stubs(:get).with("/pdb/query/v4/nodes/some_node/facts", subject.headers, options).returns response
expect {
find_facts
}.to raise_error Puppet::Error, /\[403 Forbidden\]/
end
it "should fail if an error occurs" do
http.stubs(:get).with("/pdb/query/v4/nodes/some_node/facts", subject.headers, options).raises Puppet::Error, "Everything is terrible!"
expect {
find_facts
}.to raise_error Puppet::Error, /Everything is terrible!/
end
it "should log a deprecation warning if one is returned from PuppetDB" do
response = Net::HTTPOK.new('1.1', 200, 'OK')
response['x-deprecation'] = "This is deprecated!"
body = [].to_json
response.stubs(:body).returns body
http.stubs(:get).with("/pdb/query/v4/nodes/some_node/facts", subject.headers, options).returns(response)
Puppet.expects(:deprecation_warning).with do |msg|
msg =~ /This is deprecated!/
end
find_facts
end
end
describe "#search" do
def search_facts(query)
Puppet::Node::Facts.indirection.search('facts', query)
end
let(:response) { Net::HTTPOK.new('1.1', 200, 'OK') }
let(:options) { {:metric_id => [:puppetdb, :facts, :search]} }
it "should return the nodes from the response" do
args = {
'facts.kernel.eq' => 'Linux',
}
response.stubs(:body).returns '["foo", "bar", "baz"]'
response.stubs(:body).returns '[{"name": "foo", "deactivated": null, "expired": null, "catalog_timestamp": null, "facts_timestamp": null, "report_timestamp": null},
{"name": "bar", "deactivated": null, "expired": null, "catalog_timestamp": null, "facts_timestamp": null, "report_timestamp": null},
{"name": "baz", "deactivated": null, "expired": null, "catalog_timestamp": null, "facts_timestamp": null, "report_timestamp": null}]'
query = CGI.escape("[\"and\",[\"=\",[\"fact\",\"kernel\"],\"Linux\"]]")
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
search_facts(args).should == ['foo', 'bar', 'baz']
end
it "should only allow searches against facts" do
args = {
'facts.kernel.eq' => 'Linux',
'wrong.kernel.eq' => 'Linux',
}
expect do
search_facts(args)
end.to raise_error(Puppet::Error, /Fact search against keys of type 'wrong' is unsupported/)
end
it "should combine multiple terms with 'and'" do
args = {
'facts.kernel.eq' => 'Linux',
'facts.uptime.eq' => '10 days',
}
query = CGI.escape(["and", ["=", ["fact", "kernel"], "Linux"],
["=", ["fact", "uptime"], "10 days"]].to_json)
response.stubs(:body).returns '[]'
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
search_facts(args)
end
it "should add 'not' to a != query" do
args = {
'facts.kernel.ne' => 'Linux',
}
query = CGI.escape(["and", ["not", ["=", ["fact", "kernel"], "Linux"]]].to_json)
response.stubs(:body).returns '[]'
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
search_facts(args)
end
it "should default the operator to = if one is not specified" do
args = {
'facts.kernel' => 'Linux',
}
query = CGI.escape(["and", ["=", ["fact", "kernel"], "Linux"]].to_json)
response.stubs(:body).returns '[]'
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
search_facts(args)
end
{
'gt' => '>',
'lt' => '<',
'ge' => '>=',
'le' => '<='
}.each do |name, operator|
it "should map '#{name}' to #{operator}" do
args = {
"facts.kernel.#{name}" => 'Linux',
}
query = CGI.escape(["and", [operator, ["fact", "kernel"], "Linux"]].to_json)
response.stubs(:body).returns '[]'
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
search_facts(args)
end
end
it "should raise an error if a failure occurs" do
response = Net::HTTPBadRequest.new('1.1', 400, 'Bad Request')
response.stubs(:body).returns 'Something bad happened!'
query = CGI.escape(["and"].to_json)
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
expect do
search_facts(nil)
end.to raise_error(Puppet::Error, /\[400 Bad Request\] Something bad happened!/)
end
it "should log a deprecation warning if one is returned from PuppetDB" do
response['x-deprecation'] = "This is deprecated!"
response.stubs(:body).returns '[]'
query = CGI.escape(["and"].to_json)
http.stubs(:get).with("/pdb/query/v4/nodes?query=#{query}", subject.headers, options).returns(response)
Puppet.expects(:deprecation_warning).with do |msg|
msg =~ /This is deprecated!/
end
search_facts(nil)
end
end
end
|
abhaykumarPS/puppetdb
|
acceptance/setup/pre_suite/40_install_deps.rb
|
unless (test_config[:skip_presuite_provisioning])
step "Update CA cerificates" do
os = test_config[:os_families][master.name]
case os
when :redhat
if is_el6
# workaround for old ca-certificates package, trick
# yum into looking for a newer redhat 6.y version's package
on master, "rm -f /etc/yum.repos.d/localmirror-extras.repo /etc/yum.repos.d/localmirror-optional.repo && sed -i 's/68/610/' /etc/yum.repos.d/localmirror-os.repo"
end
on master, "yum install -y ca-certificates"
when :fedora
on master, "yum install -y ca-certificates"
when :debian
on master, "apt-get install -y ca-certificates libgnutls30"
on master, "apt-get update"
end
end
if is_el8
# work around for testing on rhel8 and the repos on the image not finding the pg packages it needs
step "Install PostgreSQL manually" do
on master, "dnf install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm"
on master, "dnf -qy module disable postgresql"
end
end
end
|
abhaykumarPS/puppetdb
|
puppet/lib/puppet/util/puppetdb/command_names.rb
|
module Puppet::Util::Puppetdb
module CommandNames
CommandReplaceCatalog = "replace catalog"
CommandReplaceFacts = "replace facts"
CommandDeactivateNode = "deactivate node"
CommandStoreReport = "store report"
CommandReplaceCatalogInputs = "replace catalog inputs"
end
end
|
abhaykumarPS/puppetdb
|
puppet/lib/puppet/indirector/node/puppetdb.rb
|
<filename>puppet/lib/puppet/indirector/node/puppetdb.rb
require 'puppet/node'
require 'puppet/indirector/rest'
require 'puppet/util/puppetdb'
class Puppet::Node::Puppetdb < Puppet::Indirector::REST
include Puppet::Util::Puppetdb
def find(request)
end
def save(request)
end
def destroy(request)
current_time = Time.now
submit_command(request.key, CommandDeactivateNode, 3, current_time.clone.utc) do
{:certname => request.key,
:producer_timestamp => Puppet::Util::Puppetdb.to_wire_time(current_time)}
end
end
end
|
abhaykumarPS/puppetdb
|
acceptance/setup/pre_suite/70_install_released_puppetdb.rb
|
<reponame>abhaykumarPS/puppetdb
# We skip this step entirely unless we are running in :upgrade mode.
version = test_config[:package_build_version].to_s
latest_released = get_latest_released(version)
if ([:upgrade_oldest, :upgrade_latest].include? test_config[:install_mode]) \
&& !(test_config[:skip_presuite_provisioning])
install_target = test_config[:install_mode] == :upgrade_latest ? latest_released : oldest_supported
step "Install most recent released PuppetDB on the PuppetDB server for upgrade test" do
databases.each do |database|
enable_https_apt_sources(database)
install_puppetdb(database, install_target)
start_puppetdb(database)
install_puppetdb_termini(master, databases, install_target)
end
end
end
|
SpringMT/unicorn-log_error_one_liner
|
spec/spec_helper.rb
|
<reponame>SpringMT/unicorn-log_error_one_liner<gh_stars>1-10
require 'bundler'
Bundler.setup(:default, :test)
Bundler.require(:default, :test)
require 'simplecov'
require 'simplecov-rcov'
SimpleCov.formatter = SimpleCov::Formatter::RcovFormatter
SimpleCov.start
$TESTING=true
$:.unshift File.join(File.dirname(__FILE__), '..', 'lib')
|
SpringMT/unicorn-log_error_one_liner
|
lib/unicorn/log_error_one_liner.rb
|
<filename>lib/unicorn/log_error_one_liner.rb
require 'unicorn'
module Unicorn
def self.log_error(logger, prefix, exc)
message = exc.message
message = message.dump if /[[:cntrl:]]/ =~ message
backtrace_str = exc.backtrace.join("\t")
logger.error "#{prefix}: #{message} (#{exc.class}) #{backtrace_str}"
end
end
|
SpringMT/unicorn-log_error_one_liner
|
spec/log_error_one_liner_spec.rb
|
<gh_stars>1-10
require File.dirname(__FILE__) + '/spec_helper'
describe Unicorn do
let(:log_dir) { "#{File.dirname(__FILE__)}/log" }
let(:log_file) { "unicorn.log" }
let(:logger) { Logger.new("#{log_dir}/#{log_file}") }
before do
Dir.mkdir(log_dir)
end
after do
FileUtils.rm_rf log_dir
end
describe '.log_error' do
context 'valid' do
before do
begin
raise ArgumentError
rescue => e
Unicorn.log_error(logger, "hoge", e)
end
end
it do
expect(File.read("#{log_dir}/#{log_file}").count("\n")).to eq 2
end
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby.rb
|
require "openapi2ruby/version"
require 'openapi2ruby/cli'
require 'openapi2ruby/parser'
require 'openapi2ruby/generator'
require 'openapi2ruby/openapi'
require 'openapi2ruby/openapi/schema'
require 'openapi2ruby/openapi/schema/property'
require 'active_support/core_ext/string/inflections'
|
upscent/openapi2ruby
|
spec/openapi2ruby/parser_spec.rb
|
require 'spec_helper'
RSpec.describe Openapi2ruby::Parser do
subject { Openapi2ruby::Parser.new(path) }
let(:path) { 'spec/fixtures/files/petstore.yaml' }
describe '#parse_file' do
it 'returns parsed from openapi.yaml hash' do
expect(subject.parse_file['openapi']).to eq '3.0.0'
end
end
describe '#parse' do
it 'returns Openapi instance' do
expect(subject.parse).to be_a Openapi2ruby::Openapi
end
end
describe '.parse' do
subject { Openapi2ruby::Parser.parse(path) }
it { is_expected.to be_a Openapi2ruby::Openapi }
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/openapi/schema.rb
|
<reponame>upscent/openapi2ruby
module Openapi2ruby
class Openapi::Schema
def initialize(content)
@name = content[:name]
@definition = content[:definition]
end
# OpenAPI camelcase schema name
# @return [String]
def name
@name.camelcase
end
# OpenAPI required properties name
# @return [Array[String]]
def requireds
@definition['required']
end
# OpenAPI schema properties
# @return [Array[Openapi2ruby::Openapi::Schema]]
def properties
return [] if @definition['properties'].nil?
@definition['properties'].each_with_object([]) do |(key, value), results|
content = { name: key, definition: value }
results << Openapi2ruby::Openapi::Schema::Property.new(content)
end
end
# Whether property is required or not
# @param [Openapi2ruby::Openapi::Schema::Property] OpenAPI schema property
# @return [Boolean]
def required?(property)
return false if requireds.nil?
requireds.include?(property.name)
end
def one_ofs
return [] if properties.empty?
properties.each_with_object([]) do |value, results|
if value.one_of?
results << value.one_of_refs
else
results
end
end
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/openapi.rb
|
module Openapi2ruby
class Openapi
def initialize(content)
@content = content
end
# Creates OpenAPI Schema array
# @return [Array[Openapi2ruby::Openapi::Schema]]
def schemas
@content['components']['schemas'].each_with_object([]) do |(key, value), results|
schema_content = { name: key, definition: value}
schema = Openapi2ruby::Openapi::Schema.new(schema_content)
results << schema unless schema.properties.empty?
end
end
end
end
|
upscent/openapi2ruby
|
spec/openapi2ruby/openapi_schema_spec.rb
|
require 'spec_helper'
RSpec.describe Openapi2ruby::Openapi::Schema do
subject { openapi.schemas.first }
let(:path) { 'spec/fixtures/files/petstore.yaml' }
let(:content) { YAML.load(File.read(path)) }
let(:openapi) { Openapi2ruby::Openapi.new(content) }
describe '#name' do
it 'returns camelcase schema name' do
expect(subject.name).to eq 'Pet'
end
end
describe '#requires' do
it 'returns required fields' do
expect(subject.requireds).to eq %w(id name)
end
end
describe '#properties' do
it 'returns Openapi::Schema::Property instances' do
expect(subject.properties).to all be_an(Openapi2ruby::Openapi::Schema::Property)
end
end
describe '#required?' do
subject { schema.required?(property) }
let(:schema) { openapi.schemas.first }
context 'when required property' do
let(:property) { schema.properties.first }
it { is_expected.to be true }
end
context 'when not be required property' do
let(:property) do
Openapi2ruby::Openapi::Schema::Property.new(
{ name: 'hoge', definition: {} }
)
end
it { is_expected.to be false }
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/parser.rb
|
require 'yaml'
module Openapi2ruby
class Parser
# Parse openapi.yaml
# @param path [String] OpenAPI schema file path
# @return [Openapi2ruby::Openapi]
def self.parse(path)
new(path).parse
end
def initialize(path)
@path = path
end
# Parse openapi.yaml
# @return [Openapi2ruby::Openapi]
def parse
Openapi.new(parse_file)
end
def parse_file
file = File.read(@path)
YAML.load(file)
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/generator.rb
|
<filename>lib/openapi2ruby/generator.rb
require 'active_support/core_ext/string/inflections'
require 'erb'
require 'pathname'
module Openapi2ruby
class Generator
TEMPLATE_PATH = File.expand_path('../templates/serializer.rb.erb', __FILE__)
# Generate ruby class from OpenAPI schema
# @param schema [Openapi2ruby::Openapi::Schema] parsed OpenAPI schema
# @param output_path [String] parsed OpenAPI YAML
# @param template_path [String] original template path
def self.generate(schema, output_path, template_path)
new(schema).generate(output_path, template_path)
end
def initialize(schema)
@schema = schema
end
# Generate ruby class from OpenAPI schema
# @param output_path [String] parsed OpenAPI YAML
# @param template_path [String] original template path
def generate(output_path, template_path)
template_path = TEMPLATE_PATH if template_path.nil?
template = File.read(template_path)
generated_class = ERB.new(template, nil, '-').result(binding)
output_file = Pathname.new(output_path).join("#{@schema.name.underscore}_serializer.rb")
File.open(output_file.to_s, 'w') { |file| file << generated_class }
output_file.to_s
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/cli.rb
|
<filename>lib/openapi2ruby/cli.rb
require 'thor'
module Openapi2ruby
class Cli < Thor
desc 'parse', 'load openapi.yaml'
def parse(path)
puts 'Loading OpenAPI yaml file...'
raise "File not found. #{path}" unless File.exist?(path)
openapi = Openapi2ruby::Parser.parse(path)
p openapi.schemas
end
option :template, type: :string
option :out, required: true, type: :string
desc 'generate', 'load openapi.yaml and generate serializer'
def generate(path)
puts 'Loading OpenAPI yaml file...'
raise "File not found. #{path}" unless File.exist?(path)
openapi = Openapi2ruby::Parser.parse(path)
openapi.schemas.each do |schema|
serializer = Openapi2ruby::Generator.generate(schema, options[:out], options[:template])
puts "Created: #{serializer}"
end
end
end
end
|
upscent/openapi2ruby
|
lib/openapi2ruby/openapi/schema/property.rb
|
<filename>lib/openapi2ruby/openapi/schema/property.rb
module Openapi2ruby
class Openapi::Schema::Property
attr_reader :name
def initialize(content)
@name = content[:name]
@type = content[:definition]['type']
@items = content[:definition]['items']
@format = content[:definition]['format']
@ref = content[:definition]['$ref']
if content[:definition]['oneOf']
@type = 'oneOf'
@one_of_refs = content[:definition]['oneOf'].map do |content|
content['$ref'].split('/').last
end
else
@one_of_refs = []
end
end
# OpenAPI schema ref property name
# @return [String]
def ref
return @items['$ref'].split('/').last if ref_items?
@ref.split('/').last
end
# OpenAPI schema ref property class name
# @return [String]
def ref_class
ref.camelcase
end
# Whether property is ref or not
# @return [Boolean]
def ref?
!@ref.nil?
end
# Whether property has ref array items
# @return [Boolean]
def ref_items?
@type == 'array' && !@items['$ref'].nil?
end
# OpenAPI schema property types
# @return [Array[Class]]
def types
return [Hash] if one_of?
return [ref] if @type.nil?
converted_types
end
def one_of?
@type == 'oneOf' && !@one_of_refs.empty?
end
def one_of_refs
@one_of_refs
end
private
# OpenAPI schema property types in Ruby
# @return [Array[Class]]
def converted_types
case @type
when 'string', 'integer', 'array'
[Object.const_get(@type.capitalize)]
when 'number'
[Float]
when 'boolean'
[TrueClass, FalseClass]
when 'object'
[Hash]
end
end
end
end
|
upscent/openapi2ruby
|
spec/openapi2ruby/openapi_schema_property_spec.rb
|
<gh_stars>10-100
require 'spec_helper'
RSpec.describe Openapi2ruby::Openapi::Schema::Property do
let(:path) { 'spec/fixtures/files/link-example.yaml' }
let(:content) { YAML.load(File.read(path)) }
let(:openapi) { Openapi2ruby::Openapi.new(content) }
let(:pull_req_schema) { openapi.schemas.last }
let(:properties) { pull_req_schema.properties }
describe '#ref' do
subject { property.ref }
let(:property) { properties.last }
it { is_expected.to eq 'user' }
end
describe '#ref_class' do
subject { property.ref_class }
let(:property) { properties.last }
it { is_expected.to eq 'User' }
end
describe '#ref?' do
subject { property.ref? }
context 'when property has ref type' do
let(:property) { properties.last }
it { is_expected.to be true }
end
context 'when property has primitive type' do
let(:property) { properties.first }
it { is_expected.to be false }
end
end
describe '#ref_items?' do
subject { property.ref_items? }
let(:property) { properties.last }
it { is_expected.to be false }
end
describe '#types' do
subject { properties.first.types }
it { is_expected.to eq [Integer] }
end
end
|
upscent/openapi2ruby
|
spec/openapi2ruby/openapi_spec.rb
|
<reponame>upscent/openapi2ruby<filename>spec/openapi2ruby/openapi_spec.rb
require 'spec_helper'
RSpec.describe Openapi2ruby::Openapi do
subject { Openapi2ruby::Openapi.new(content) }
let(:path) { 'spec/fixtures/files/petstore.yaml' }
let(:content) { YAML.load(File.read(path)) }
describe '#schemas' do
it 'returns Openapi::Schema array' do
expect(subject.schemas).to all be_an(Openapi2ruby::Openapi::Schema)
end
end
end
|
upscent/openapi2ruby
|
spec/fixtures/files/pet_serializer.rb
|
class PetSerializer < ActiveModel::Serializer
attributes :id, :name, :tag
def id
required_check(:id)
type_check(:id, [Integer])
object.id
end
def name
required_check(:name)
type_check(:name, [String])
object.name
end
def tag
type_check(:tag, [String])
object.tag
end
private
def required_check(name)
raise "Required field is nil. #{name}" if object.send(name).nil?
end
def type_check(name, types)
raise "Field type is invalid. #{name}" unless types.include?(object.send(name).class)
end
end
|
nicolasblanco/basic_active_model
|
basic_active_model.gemspec
|
<reponame>nicolasblanco/basic_active_model
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{basic_active_model}
s.version = "1.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["slainer68"]
s.date = %q{2010-09-28}
s.description = %q{BasicActiveModel provides a minimal architecture for a model that may be used in Rails forms.}
s.email = %q{<EMAIL>}
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
".gitignore",
"Gemfile",
"Gemfile.lock",
"LICENSE",
"README.textile",
"Rakefile",
"VERSION",
"init.rb",
"lib/basic_active_model.rb",
"spec/basic_active_model_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/novagile/basic_active_model}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{BasicActiveModel provides a minimal architecture for a model that may be used in Rails forms.}
s.test_files = [
"spec/basic_active_model_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, [">= 2.0.0.beta.22"])
else
s.add_dependency(%q<rspec>, [">= 2.0.0.beta.22"])
end
else
s.add_dependency(%q<rspec>, [">= 2.0.0.beta.22"])
end
end
|
nicolasblanco/basic_active_model
|
lib/basic_active_model.rb
|
class BasicActiveModel
extend ActiveModel::Naming
include ActiveModel::Validations
include ActiveModel::MassAssignmentSecurity
include ActiveModel::Conversion
def persisted?
false
end
def initialize(attributes = {})
if attributes.present?
sanitize_for_mass_assignment(attributes).each { |k, v| send("#{k}=", v) if respond_to?("#{k}=") }
end
end
end
|
nicolasblanco/basic_active_model
|
spec/basic_active_model_spec.rb
|
require "spec_helper"
class ContactForm < BasicActiveModel
attr_accessor :email, :subject, :body, :admin
attr_accessible :email, :subject, :body
validates_presence_of :body
end
describe 'BasicActiveModel' do
it 'should initialize without arguments' do
lambda { ContactForm.new }.should_not raise_error
end
it 'should initialize with a hash containing symbols and strings keys and set attributes' do
contact_form = ContactForm.new(:email => "<EMAIL>", "subject" => "lol")
contact_form.email.should == "<EMAIL>"
contact_form.subject.should == "lol"
end
it 'should ignore invalid attributes in the hash' do
contact_form = ContactForm.new(:invalid => "oh noes", :body => "plop")
contact_form.body.should == "plop"
end
it 'should have mass assignment security' do
contact_form = ContactForm.new(:email => "<EMAIL>", :admin => true)
contact_form.admin.should be_nil
end
it "should validate" do
contact_form = ContactForm.new
contact_form.should_not be_valid
contact_form.body = "plop"
contact_form.should be_valid
end
it "should respond to some required methods in order to pass it in a form" do
contact_form = ContactForm.new
contact_form.should_not be_persisted
contact_form.to_key.should be_nil
contact_form.to_param.should be_nil
end
end
|
nicolasblanco/basic_active_model
|
init.rb
|
require "basic_active_model"
|
nicolasblanco/basic_active_model
|
spec/spec_helper.rb
|
require 'ruby-debug'
require 'rspec'
require "active_model"
require File.dirname(__FILE__) + '/../lib/basic_active_model'
RSpec.configure do |config|
config.mock_with :rspec
end
|
zauzaj/bookshelf
|
app/utils/errors.rb
|
module App
module Utils
module Errors
extend ActiveSupport::Concern
included do
rescue_from ActiveRecord::RecordNotFound do |e|
error_response(message: e.message, status: 404)
end
rescue_from Grape::Exceptions::ValidationErrors do |e|
error_response(message: e.message, status: 406)
end
end
end
end
end
|
zauzaj/bookshelf
|
app/grape_token_auth_setup.rb
|
require_relative 'models/user'
GrapeTokenAuth.setup! do |config|
config.mappings = { user: User }
config.secret = '3fe397575565365108556c3e5549f139e8078a8ec8fd2675a83de96289b30550a266ac04488d7086322efbe573738e7b3ae005b2e3d9afd718aa337fa5e329cf'
end
|
zauzaj/bookshelf
|
spec/api_spec.rb
|
<gh_stars>1-10
require_relative './spec_helper'
require 'grape'
require_relative '../app/api'
require 'pry-rails'
describe App::API do
include Rack::Test::Methods
def app
App::API
end
describe 'Simple test' do
it 'should return array of 3' do
expect([1,2,3].length).to eq 3
end
end
context 'Book' do
describe 'All Books' do
before do
get '/books'
end
it 'should return status 200' do
expect(last_response.status).to eq 200
end
it 'should return list of books' do
expect(last_response.body.length > 0).to be_truthy
end
end
describe 'Get Book' do
it 'should return status 200' do
get '/books/1'
expect(last_response.status).to eq 200
end
it 'should return status 404 - not found' do
get '/books/1000'
expect(last_response.status).to eq 404
end
end
describe 'Create Book' do
it 'should return status 201' do
post 'users/1/books', title: "Functional Javascript", description: 'Javascript Great Book'
expect(last_response.status).to eq 201
end
it 'should return 404' do
post 'users/4/books', title: 'Javascript 2', description: 'Another great JS book'
expect(last_response.status).to eq 404
end
end
end
context 'Elasticsearch' do
describe 'Create new book' do
it 'should create new book' do
post 'users/1/books', title: "Functional programming", description: 'Scala Great Book'
expect(last_response.status).to eq 201
end
end
describe 'Search books' do
it 'should return all books with Ruby' do
get "/books/search?q='Ruby'"
expect(JSON.parse(last_response.body).count > 0).to be_truthy
end
it 'should return one book with GO' do
get "/books/search?q='GO'"
expect(JSON.parse(last_response.body).count ).to eq 1
end
it 'should have one scala book' do
get "/books/search?q='scala'"
binding.pry
expect(JSON.parse(last_response.body).count ).to eq 1
end
it 'should have two functional books' do
get "/books/search?q='functional'"
expect(JSON.parse(last_response.body).count ).to eq 2
end
end
end
end
|
zauzaj/bookshelf
|
config.ru
|
<reponame>zauzaj/bookshelf
require 'bundler/setup'
require './app/api'
require 'warden'
require 'grape_token_auth'
Bundler.require
GrapeTokenAuth.setup_warden!(self)
run App::API
|
zauzaj/bookshelf
|
db/seeds.rb
|
<filename>db/seeds.rb
require_relative '../app/models/book'
require_relative '../app/models/user'
require 'pry-rails'
db_config = YAML::load(IO.read('config/database.yml'))['development']
ActiveRecord::Base.establish_connection(db_config)
#Remove all indexes
#curl -XDELETE 'http://localhost:9200/*'
Book.destroy_all
User.destroy_all
author1 = User.create!(first_name: 'Rus', last_name: 'Olsen', email: '<EMAIL>', password: 'password', password_confirmation: 'password')
250.times do |time|
author1.books.create!(title: "Eloquent Ruby V#{time}", description: 'Ruby programming')
author1.books.create!(title: "Ruby Pixic V#{time}", description: 'Advance Ruby')
author1.books.create!(title: "Rails web apps V#{time}", description: 'Ruby on Rails web applications')
end
author2 = User.create!(first_name: 'Obie', last_name: 'Fernandez', email: '<EMAIL>', password: 'password', password_confirmation: 'password')
250.times do |time|
author2.books.create!(title: "Learn to program V#{time}", description: 'Ruby for newbies')
author2.books.create!(title: "Ruby pocket reference V#{time}", description: 'Another ruby book')
author2.books.create!(title: "Ruby on Rails V#{time}", description: 'Ruby on Rails web applications')
end
author2.books.create!(title: "Go Lang in Action", description: 'Start making concurent web apps with GO')
Book.import(force: true)
puts "Final number of books is #{Book.count}"
|
zauzaj/bookshelf
|
app/api.rb
|
<reponame>zauzaj/bookshelf
require 'bundler/setup'
require 'active_record'
require_relative './models/book'
require_relative './models/user'
require_relative './utils/errors'
Bundler.require
db_config = YAML::load(IO.read('config/database.yml'))['development']
ActiveRecord::Base.establish_connection(db_config)
module App
class API < Grape::API
include App::Utils::Errors
format :json
resource :books do
desc 'Return all books'
get '/' do
Book.all
end
desc 'Search all books'
params do
requires :q, type: String, desc: 'Query string'
end
get '/search' do
books = Book.search(params[:q])
end
desc 'Return specific book'
params do
requires :id, type: Integer, desc: 'Book id'
end
route_param :id do
get do
Book.find(params[:id])
end
end
end
resource :users do
desc 'Create book'
params do
requires :title, type: String
requires :description, type: String
end
post ':id/books' do
author = User.find(params[:id])
author.books.create title: params[:title], description: params[:description]
end
end
end
end
|
zauzaj/bookshelf
|
spec/spec_helper.rb
|
require 'simplecov'
SimpleCov.start
require 'rspec'
require 'rack/test'
RSpec.configure do |config|
config.color = true
config.formatter = :documentation
end
|
zauzaj/bookshelf
|
app/models/book.rb
|
<filename>app/models/book.rb
require 'active_record'
require 'elasticsearch/model'
class Book < ActiveRecord::Base
include Elasticsearch::Model
include Elasticsearch::Model::Callbacks
belongs_to :user, class_name: 'User', foreign_key: 'author_id'
def self.search query
__elasticsearch__.search(
{
query: {
multi_match: {
query: query,
fields: ['title', 'description']
}
}
}
)
end
end
|
zauzaj/bookshelf
|
db/schema.rb
|
<reponame>zauzaj/bookshelf<filename>db/schema.rb
require 'active_record'
require 'yaml'
db_config = YAML::load(IO.read('config/database.yml'))['development']
ActiveRecord::Base.establish_connection(db_config)
ActiveRecord::Schema.define(version: 20161108140517) do
create_table "books", force: :cascade do |t|
t.string "title", null: false
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "author_id"
t.index ["author_id"], name: "index_books_on_author_id"
end
create_table "users", force: :cascade do |t|
t.string "first_name", null: false
t.string "last_name", null: false
t.string 'email', default: ''
t.string 'encrypted_password', default: '', null: false
t.datetime 'remember_created_at'
t.integer 'sign_in_count', default: 0, null: false
t.datetime 'current_sign_in_at'
t.datetime 'last_sign_in_at'
t.string 'current_sign_in_ip'
t.string 'last_sign_in_ip'
t.datetime 'created_at'
t.datetime 'updated_at'
t.string 'confirmation_token'
t.datetime 'confirmed_at'
t.datetime 'confirmation_sent_at'
t.string 'provider', default: '', null: false
t.string 'uid', default: '', null: false
t.string 'nickname', default: '', null: false
t.string 'name', default: '', null: false
t.string 'favorite_color', default: '', null: false
t.text 'tokens'
end
end
|
zauzaj/bookshelf
|
app/models/user.rb
|
require 'active_record'
require 'grape_token_auth'
class User < ActiveRecord::Base
include GrapeTokenAuth::ActiveRecord::TokenAuth
has_many :books, foreign_key: "author_id"
end
|
yanotec/yano-backbone-rails
|
lib/yano-backbone-rails.rb
|
<reponame>yanotec/yano-backbone-rails<filename>lib/yano-backbone-rails.rb
require 'yano-jquery-rails'
require 'handlebars_assets'
require 'yano/backbone/rails'
|
yanotec/yano-backbone-rails
|
lib/yano/backbone/rails/version.rb
|
<gh_stars>0
module Yano
module Backbone
module Rails
VERSION = "2.2.2"
UNDERSCORE_VERSION = "1.8.3"
BACKBONE_VERSION = "1.3.3"
MARIONETTE_VERSION = "3.1.0"
RADIO_VERSION = "2.0.0"
end
end
end
|
yanotec/yano-backbone-rails
|
lib/yano/backbone/rails.rb
|
<gh_stars>0
require "yano/backbone/rails/engine"
require "yano/backbone/rails/version"
module Yano
module Backbone
module Rails
end
end
end
|
NizarBerjawi/countries-api
|
.docker/Dockerfile.builder
|
##########################
## Install PHP packages ##
##########################
FROM composer:2 as composer
WORKDIR /app
COPY composer.* /app/
RUN composer install \
--no-dev \
--no-scripts \
--no-suggest \
--no-interaction \
--prefer-dist \
--optimize-autoloader
COPY . .
RUN composer dump-autoload \
--no-dev \
--optimize \
--classmap-authoritative
###########################
## Generate OpenAPI Spec ##
###########################
FROM php:8-fpm-alpine3.13 as documentation
# Because .env file variables are not available during
# build, we have to explicitly set APP_URL environment
# variable to be used when generating the spec
ARG APP_URL ${APP_URL}
ENV APP_URL ${APP_URL}
WORKDIR /app
COPY . .
COPY --from=composer /app/vendor ./vendor
RUN php artisan docs:generate
#############################
## Bundle front-end assets ##
#############################
FROM node:14-alpine3.13 as builder
WORKDIR /app
COPY ./package.json ./package-lock.json /app/
COPY ./resources/src /app/resources/src
COPY ./webpack.config.js /app/webpack.config.js
COPY --from=composer /app/vendor ./vendor
COPY --from=documentation /app/public/openApi.json ./public/openApi.json
RUN npm ci --verbose
RUN npm run build
|
rafaelrosafu/dm-more
|
dm-types/spec/integration/ip_address_spec.rb
|
<reponame>rafaelrosafu/dm-more<filename>dm-types/spec/integration/ip_address_spec.rb<gh_stars>1-10
require 'pathname'
require Pathname(__FILE__).dirname.parent.expand_path + 'spec_helper'
describe DataMapper::Types::IPAddress do
before(:all) do
class ::IPAddressTest
include DataMapper::Resource
property :id, Serial
property :ip, IPAddress
end
IPAddressTest.auto_migrate!
end
it "should work" do
DataMapper.repository(:default) do
IPAddressTest.create(:ip => '127.0.0.1')
end
IPAddressTest.first.ip.should == IPAddr.new('127.0.0.1')
end
it 'should immediately typecast supplied values' do
IPAddressTest.new(:ip => '10.0.0.1').ip.should == IPAddr.new('10.0.0.1')
end
end
|
rafaelrosafu/dm-more
|
dm-validations/lib/dm-validations/uniqueness_validator.rb
|
<reponame>rafaelrosafu/dm-more
module DataMapper
module Validate
##
#
# @author <NAME>
# @since 0.9
class UniquenessValidator < GenericValidator
include Extlib::Assertions
def initialize(field_name, options = {})
assert_kind_of 'scope', options[:scope], Array, Symbol if options.has_key?(:scope)
super
@field_name, @options = field_name, options
@options[:allow_nil] = true unless @options.include?(:allow_nil)
end
def call(target)
scope = Array(@options[:scope])
return true if @options[:allow_nil] && target.send(field_name).blank?
repository_name = target.repository.name
opts = {
:fields => target.model.key,
field_name => target.validation_property_value(field_name),
}
scope.each do |item|
if target.model.properties(repository_name).named?(item)
opts[item] = target.validation_property_value(item)
elsif target.model.relationships(repository_name).has_key?(item)
target.validation_association_keys(item).each do |key|
opts[key] = target.validation_property_value(key)
end
end
end
resource = DataMapper.repository(repository_name) { target.model.first(opts) }
return true if resource.nil?
return true if target.saved? && resource == target
error_message = @options[:message] || ValidationErrors.default_error_message(:taken, field_name, {:target => self})
add_error(target, error_message, field_name)
false
end
end # class UniquenessValidator
module ValidatesIsUnique
# Validate the uniqueness of a field
#
def validates_is_unique(*fields)
opts = opts_from_validator_args(fields)
add_validator_to_context(opts, fields, DataMapper::Validate::UniquenessValidator)
end
end # module ValidatesIsUnique
end # module Validate
end # module DataMapper
|
rafaelrosafu/dm-more
|
dm-validations/lib/dm-validations/validation_errors_i18n.rb
|
# Ensure I18n presence
unless defined?(I18n) then
require 'rubygems'
gem 'i18n'
require 'i18n'
end
module DataMapper
module Validate
class ValidationErrors
class << self
alias_method :original_default_error_message, :default_error_message
@@translations_loaded_to_i18n = false
end
def self.default_error_message(key, field, *values)
extra = values.last.is_a?(::Hash) ? values.pop : {}
if extra[:target] and defined?(I18n) then
load_default_translations
klass = Extlib::Inflection.underscore(extra[:target].class.to_s)
translated_field = find_translation( field, [
["models.#{klass}.properties"],
["models._default.properties"],
["data_mapper.models.#{klass}.properties"],
["data_mapper.models._default.properties"]
])
return find_translation( key, [
["data_mapper.errors.#{klass}.properties.#{field}"],
["data_mapper.errors.#{klass}"],
["data_mapper.errors.messages"]
], {:field => translated_field}.merge(extra) )
else
original_default_error_message key, field, values
end
end
private
def self.find_translation(field, scopes, options = {})
result = nil
scopes.each {|scope|
begin
result = I18n.translate field, options.merge({ :raise => true, :scope => scope })
break
rescue I18n::MissingTranslationData
next
end
}
result || field
end
def self.load_default_translations
return if @@translations_loaded_to_i18n
I18n.load_path.insert(0, Dir[File.join(File.dirname(__FILE__), 'locale', '*.{rb,yml}')]).flatten!
I18n.reload! if I18n.backend.initialized?
@@translations_loaded_to_i18n = true
end
end
end
end
|
rafaelrosafu/dm-more
|
adapters/dm-rest-adapter/lib/rest_adapter/adapter.rb
|
<gh_stars>1-10
module DataMapperRest
# TODO: Abstract XML support out from the protocol
# TODO: Build JSON support
# All http_"verb" (http_post) method calls use method missing in connection class which uses run_verb
class Adapter < DataMapper::Adapters::AbstractAdapter
include Extlib
def connection
@connection ||= Connection.new(normalized_uri, @format)
end
# Creates a new resource in the specified repository.
# TODO: map all remote resource attributes to this resource
def create(resources)
created = 0
resources.each do |resource|
response = connection.http_post(resource_name(resource), resource.to_xml)
populate_resource_from_xml(response.body, resource)
created += 1
end
created
end
# read_set
#
# Examples of query string:
# A. []
# GET /books/
#
# B. [[:eql, #<Property:Book:id>, 4200]]
# GET /books/4200
#
# IN PROGRESS
# TODO: Need to account for query.conditions (i.e., [[:eql, #<Property:Book:id>, 1]] for books/1)
def read_many(query)
model = query.model
resource_name = Inflection.underscore(query.model.name)
resources_meta = case query.conditions
when [] then read_set_all(query, resource_name)
else read_set_for_condition(query, resource_name)
end
resources_meta.map do |resource_meta|
if resource_meta.has_key?(:associations)
load_nested_resources_from resource_meta[:associations], query
end
model.load(resource_meta[:values], query)
end
end
def read_one(query)
resource = nil
resource_name = resource_name_from_query(query)
resources_meta = nil
if query.conditions.empty? && query.limit == 1
results = read_set_all(query, resource_name)
resource_meta = results.first unless results.empty?
else
id = query.conditions.first[2]
# KLUGE: Again, we're assuming below that we're dealing with a pluralized resource mapping
response = connection.http_get("/#{resource_name.pluralize}/#{id}")
data = response.body
resource_meta = parse_resource(data, query.model, query)
end
if resource_meta
if resource_meta.has_key?(:associations)
load_nested_resources_from resource_meta[:associations], query
end
resource = query.model.load(resource_meta[:values], query)
end
resource
end
def update(attributes, query)
# TODO What if we have a compound key?
raise NotImplementedError.new unless is_single_resource_query? query
id = query.conditions.first[2]
resource = nil
query.repository.scope do
resource = query.model.get(id)
end
attributes.each do |attr, val|
resource.send("#{attr.name}=", val)
end
# KLUGE: Again, we're assuming below that we're dealing with a pluralized resource mapping
res = connection.http_put("/#{resource_name_from_query(query).pluralize}/#{id}", resource.to_xml)
# TODO: Raise error if cannot reach server
res.kind_of?(Net::HTTPSuccess) ? 1 : 0
end
def delete(query)
raise NotImplementedError.new unless is_single_resource_query? query
id = query.conditions.first[2]
res = connection.http_delete("/#{resource_name_from_query(query).pluralize}/#{id}")
res.kind_of?(Net::HTTPSuccess) ? 1 : 0
end
protected
def initialize(*)
super
@format = @options.fetch(:format, 'xml')
end
def normalized_uri
@normalized_uri ||=
begin
query = @options.except(:adapter, :user, :password, :host, :port, :path, :fragment)
query = nil if query.empty?
Addressable::URI.new(
:scheme => 'http',
:user => @options[:user],
:password => @options[:password],
:host => @options[:host],
:port => @options[:port],
:path => @options[:path],
:query_values => query,
:fragment => @options[:fragment]
).freeze
end
end
def load_nested_resources_from(nested_resources, query)
nested_resources.each do |resource_meta|
# TODO: Houston, we have a problem. Model#load expects a Query. When we're nested, we don't have a query yet...
#resource_meta[:model].load(resource_meta[:values])
#if resource_meta.has_key? :associations
# load_nested_resources_from resource_meta, query
#end
end
end
def read_set_all(query, resource_name)
# TODO: how do we know whether the resource we're talking to is singular or plural?
res = connection.http_get("#{resource_name.pluralize}")
data = res.body
parse_resources(data, query.model, query)
# TODO: Raise error if cannot reach server
end
# GET /books/4200
def read_set_for_condition(query, resource_name)
# More complex conditions
raise NotImplementedError.new
end
# query.conditions like [[:eql, #<Property:Book:id>, 4200]]
def is_single_resource_query?(query)
query.conditions.length == 1 && query.conditions.first.first == :eql && query.conditions.first[1].name == :id
end
def values_from_rexml(entity_element, dm_model_class)
resource = {}
resource[:values] = []
entity_element.elements.each do |field_element|
attribute = dm_model_class.properties(name).find do |property|
property.name.to_s == field_element.name.to_s.tr('-', '_')
end
if attribute
resource[:values] << field_element.text
next
end
association = dm_model_class.relationships.find do |name, dm_relationship|
field_element.name.to_s == Inflection.pluralize(Inflection.underscore(dm_relationship.child_model.to_s))
end
if association
field_element.each_element do |associated_element|
model = association[1].child_model
(resource[:associations] ||= []) << {
:model => model,
:value => values_from_rexml(associated_element, association[1].child_model)
}
end
end
end
resource
end
def parse_resource(xml, dm_model_class, query = nil)
doc = REXML::Document::new(xml)
# TODO: handle singular resource case as well....
entity_element = REXML::XPath.first(doc, "/#{resource_name_from_model(dm_model_class)}")
return nil unless entity_element
values_from_rexml(entity_element, dm_model_class)
end
def parse_resources(xml, dm_model_class, query = nil)
doc = REXML::Document::new(xml)
# # TODO: handle singular resource case as well....
# array = XPath(doc, "/*[@type='array']")
# if array
# parse_resources()
# else
resource_name = resource_name_from_model dm_model_class
doc.elements.collect("#{resource_name.pluralize}/#{resource_name}") do |entity_element|
values_from_rexml(entity_element, dm_model_class)
end
end
def resource_name_from_model(model)
Inflection.underscore(model.name)
end
def resource_name(resource)
Inflection.underscore(resource.class.name).pluralize
end
def resource_name_from_query(query)
resource_name_from_model(query.model)
end
def populate_resource_from_xml(xml, resource)
doc = REXML::Document::new(xml)
entity_element = REXML::XPath.first(doc, "/#{resource_name_from_model(resource.class)}")
raise "No root element matching #{resource_name_from_model(resource.class)} in xml" unless entity_element
entity_element.elements.each do |field_element|
attribute = resource.class.properties(name).find { |property| property.name.to_s == field_element.name.to_s.tr('-', '_') }
resource.send("#{attribute.name.to_s}=", field_element.text) if attribute && !field_element.text.nil?
# TODO: add association saving
end
resource
end
end
end
|
rafaelrosafu/dm-more
|
adapters/dm-rest-adapter/lib/rest_adapter.rb
|
<gh_stars>1-10
$:.push File.expand_path(File.dirname(__FILE__))
gem 'dm-core', '0.10.0'
require 'dm-core'
require 'extlib'
require 'pathname'
require 'rexml/document'
require 'rubygems'
require 'addressable/uri'
require 'dm-serializer'
require 'rest_adapter/version'
require 'rest_adapter/adapter'
require 'rest_adapter/connection'
require 'rest_adapter/formats'
require 'rest_adapter/exceptions'
DataMapper::Adapters::RestAdapter = DataMapperRest::Adapter
|
rafaelrosafu/dm-more
|
dm-serializer/spec/public/to_json_spec.rb
|
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
describe DataMapper::Serialize, '#to_json' do
#
# ==== ajaxy JSON
#
before(:all) do
DataMapper.auto_migrate!
query = DataMapper::Query.new(DataMapper::repository(:default), Cow)
resources = [
[ 1, 2, 'Betsy', 'Jersey' ],
[ 10, 20, 'Berta', 'Guernsey' ],
]
@collection = DataMapper::Collection.new(query, resources.map { |r| query.model.load(r, query) })
@harness = Class.new(SerializerTestHarness) do
def method_name
:to_json
end
protected
def deserialize(result)
JSON.parse(result)
end
end.new
end
it_should_behave_like "A serialization method"
it_should_behave_like 'A serialization method that also serializes core classes'
it "handles options given to a collection properly" do
deserialized_collection = JSON.parse(@collection.to_json(:only => [:composite]))
betsy = deserialized_collection.first
berta = deserialized_collection.last
betsy["id"].should be_nil
betsy["composite"].should == 2
betsy["name"].should be_nil
betsy["breed"].should be_nil
berta["id"].should be_nil
berta["composite"].should == 20
berta["name"].should be_nil
berta["breed"].should be_nil
end
it "supports :include option for one level depth"
it "supports :include option for more than one level depth"
it "has :repository option to override used repository"
end
|
rafaelrosafu/dm-more
|
dm-validations/spec/integration/numeric_validator_spec.rb
|
<gh_stars>1-10
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
class Bill # :nodoc:
include DataMapper::Resource
property :id, Serial
property :amount_1, String, :auto_validation => false
property :amount_2, Float, :auto_validation => false
validates_is_number :amount_1, :amount_2
end
class Hillary # :nodoc:
include DataMapper::Resource
property :id, Serial
property :amount_1, Float, :auto_validation => false, :default => 0.01
validates_is_number :amount_1
end
describe DataMapper::Validate::NumericValidator do
it "should validate a floating point value on the instance of a resource" do
b = Bill.new
b.should_not be_valid
b.errors.on(:amount_1).should include('Amount 1 must be a number')
b.errors.on(:amount_2).should include('Amount 2 must be a number')
b.amount_1 = 'ABC'
b.amount_2 = 27.343
b.should_not be_valid
b.errors.on(:amount_1).should include('Amount 1 must be a number')
b.amount_1 = '34.33'
b.should be_valid
end
it "should validate an integer value on the instance of a resource" do
class ::Bill
property :quantity_1, String, :auto_validation => false
property :quantity_2, Integer, :auto_validation => false
validators.clear!
validates_is_number :quantity_1, :quantity_2, :integer_only => true
end
b = Bill.new
b.valid?.should_not == true
b.errors.on(:quantity_1).should include('Quantity 1 must be an integer')
b.errors.on(:quantity_2).should include('Quantity 2 must be an integer')
b.quantity_1 = '12.334'
b.quantity_2 = 27.343
b.valid?.should_not == true
b.errors.on(:quantity_1).should include('Quantity 1 must be an integer')
pending 'dm-core truncates float to integer' do
# FIXME: The next line should pass, but :quantity_2 has no errors. This is
# because 27.343 has been truncated to 27 by the time it reaches the
# validation. Is this a bug?
b.errors.on(:quantity_2).should include('Quantity 2 must be an integer')
end
b.quantity_1 = '34.33'
b.quantity_2 = 22
b.valid?.should_not == true
b.errors.on(:quantity_1).should include('Quantity 1 must be an integer')
b.quantity_1 = '34'
b.valid?.should == true
end
it "should validate if a default fufills the requirements" do
h = Hillary.new
h.should be_valid
end
describe 'auto validation' do
before :all do
class ::Fish
include DataMapper::Resource
property :id, Serial
property :scales, Integer
end
end
describe 'Float' do
describe 'with default precision and scale' do
before :all do
class ::CloudFish < Fish
property :average_weight, Float
end
end
before do
@cloud_fish = CloudFish.new
end
it 'should allow up to 10 digits before the decimal' do
@cloud_fish.average_weight = 0
@cloud_fish.should be_valid
@cloud_fish.average_weight = 9_999_999_999
@cloud_fish.should be_valid
@cloud_fish.average_weight = 10_000_000_000
@cloud_fish.should_not be_valid
end
it 'should allow 0 digits after the decimal' do
@cloud_fish.average_weight = 0
@cloud_fish.should be_valid
end
it 'should allow any digits after the decimal' do
@cloud_fish.average_weight = 1.2
@cloud_fish.should be_valid
@cloud_fish.average_weight = 123.456
@cloud_fish.should be_valid
end
it "should only allow up to 10 digits overall" do
@cloud_fish.average_weight = 1.234567890
@cloud_fish.should be_valid
@cloud_fish.average_weight = 1.2345678901
@cloud_fish.should_not be_valid
end
end
describe 'with default precision and scaleof 0' do
before :all do
class ::RobotFish < Fish
property :average_weight, Float, :scale => 0
end
end
before do
@robot_fish = RobotFish.new
end
it 'should allow up to 10 digits before the decimal' do
@robot_fish.average_weight = 0
@robot_fish.should be_valid
@robot_fish.average_weight = 9_999_999_999
@robot_fish.should be_valid
@robot_fish.average_weight = 10_000_000_000
@robot_fish.should_not be_valid
end
it 'should allow 0 digits after the decimal' do
@robot_fish.average_weight = 0
@robot_fish.should be_valid
end
it 'should allow 1 digit after the decimal if it is a zero' do
@robot_fish.average_weight = 0.0
@robot_fish.should be_valid
@robot_fish.average_weight = 9_999_999_999.0
@robot_fish.should be_valid
@robot_fish.average_weight = 0.1
@robot_fish.should_not be_valid
end
end
describe 'with a precision of 4 and a scale of 2' do
before :all do
class ::GoldFish < Fish
property :average_weight, Float, :precision => 4, :scale => 2
end
end
before do
@gold_fish = GoldFish.new
end
it "should have scale of 2" do
@gold_fish.model.average_weight.scale.should == 2
end
it 'should allow up to 2 digits before the decimal' do
@gold_fish.average_weight = 0
@gold_fish.should be_valid
@gold_fish.average_weight = 99
@gold_fish.should be_valid
@gold_fish.average_weight = -99
@gold_fish.should be_valid
@gold_fish.average_weight = 100
@gold_fish.should_not be_valid
@gold_fish.average_weight = -100
@gold_fish.should_not be_valid
end
it 'should allow 2 digits after the decimal' do
@gold_fish.average_weight = 99.99
@gold_fish.should be_valid
@gold_fish.average_weight = -99.99
@gold_fish.should be_valid
@gold_fish.average_weight = 99.999
@gold_fish.should_not be_valid
@gold_fish.average_weight = -99.999
@gold_fish.should_not be_valid
end
end
describe 'with a precision of 2 and a scale of 2' do
before :all do
class ::SilverFish < Fish
property :average_weight, Float, :precision => 2, :scale => 2
end
end
before do
@silver_fish = SilverFish.new
end
it 'should allow a 0 before the decimal' do
@silver_fish.average_weight = 0
@silver_fish.should be_valid
@silver_fish.average_weight = 0.1
@silver_fish.should be_valid
@silver_fish.average_weight = -0.1
@silver_fish.should be_valid
@silver_fish.average_weight = 1
@silver_fish.should_not be_valid
@silver_fish.average_weight = -1
@silver_fish.should_not be_valid
end
it 'should allow 2 digits after the decimal' do
@silver_fish.average_weight = 0.99
@silver_fish.should be_valid
@silver_fish.average_weight = -0.99
@silver_fish.should be_valid
@silver_fish.average_weight = 0.999
@silver_fish.should_not be_valid
@silver_fish.average_weight = -0.999
@silver_fish.should_not be_valid
end
end
end
end
end
|
rafaelrosafu/dm-more
|
dm-aggregates/lib/dm-aggregates/adapters/data_objects_adapter.rb
|
<reponame>rafaelrosafu/dm-more<filename>dm-aggregates/lib/dm-aggregates/adapters/data_objects_adapter.rb
module DataMapper
module Adapters
class DataObjectsAdapter < AbstractAdapter
def aggregate(query)
# with_reader(select_statement(query), query.bind_values) do |reader|
# results = []
#
# while(reader.next!) do
# row = query.fields.zip(reader.values).map do |field,value|
# if field.respond_to?(:operator)
# send(field.operator, field.target, value)
# else
# field.typecast(value)
# end
# end
#
# results << (query.fields.size > 1 ? row : row[0])
# end
#
# results
# end
with_connection do |connection|
command = connection.create_command(select_statement(query))
command.set_types(query.fields.map { |p| p.primitive })
begin
reader = command.execute_reader(*query.bind_values)
model = query.model
results = []
while(reader.next!)
row = query.fields.zip(reader.values).map do |field,value|
if field.respond_to?(:operator)
send(field.operator, field.target, value)
else
field.typecast(value)
end
end
results << (query.fields.size > 1 ? row : row[0])
end
results
ensure
reader.close if reader
end
end
end
private
def count(property, value)
value.to_i
end
def min(property, value)
property.typecast(value)
end
def max(property, value)
property.typecast(value)
end
def avg(property, value)
property.type == Integer ? value.to_f : property.typecast(value)
end
def sum(property, value)
property.typecast(value)
end
module SQL
private
# FIXME Does not find the original method with dm-core 0.10.0 even though it seems to be there
alias original_property_to_column_name property_to_column_name
def property_to_column_name(property, qualify)
case property
when Query::Operator
aggregate_field_statement(property.operator, property.target, qualify)
when Property, Query::Path
original_property_to_column_name(property, qualify)
else
raise ArgumentError, "+property+ must be a DataMapper::Query::Operator, a DataMapper::Property or a Query::Path, but was a #{property.class} (#{property.inspect})"
end
end
def aggregate_field_statement(aggregate_function, property, qualify)
column_name = if aggregate_function == :count && property == :all
'*'
else
property_to_column_name(property, qualify)
end
function_name = case aggregate_function
when :count then 'COUNT'
when :min then 'MIN'
when :max then 'MAX'
when :avg then 'AVG'
when :sum then 'SUM'
else raise "Invalid aggregate function: #{aggregate_function.inspect}"
end
"#{function_name}(#{column_name})"
end
end # module SQL
include SQL
end # class DataObjectsAdapter
end # module Adapters
end # module DataMapper
|
rafaelrosafu/dm-more
|
dm-serializer/spec/lib/serialization_method_shared_spec.rb
|
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
share_examples_for 'A serialization method that also serializes core classes' do
# This spec ensures that we don't break any serialization methods attached
# to core classes, such as Array
before(:all) do
%w[ @harness ].each do |ivar|
raise "+#{ivar}+ should be defined in before block" unless instance_variable_get(ivar)
end
DataMapper.auto_migrate!
end
before(:each) do
Cow.all.destroy!
Planet.all.destroy!
FriendedPlanet.all.destroy!
end
it 'serializes an array of extended objects' do
Cow.create(
:id => 89,
:composite => 34,
:name => 'Berta',
:breed => 'Guernsey'
)
result = @harness.test(Cow.all.to_a)
result[0].values_at("id", "composite", "name", "breed").should ==
[89, 34, "Berta", "Guernsey"]
end
it 'serializes an array of collections' do
query = DataMapper::Query.new(DataMapper::repository(:default), Cow)
resources = [
[ 1, 2, 'Betsy', 'Jersey' ],
[ 89, 34, 'Berta', 'Guernsey' ],
]
collection = DataMapper::Collection.new(query, resources.map { |r| query.model.load(r, query) })
result = @harness.test(collection)
result[0].values_at("id", "composite", "name", "breed").should == resources[0]
result[1].values_at("id", "composite", "name", "breed").should == resources[1]
end
end
share_examples_for 'A serialization method' do
before(:all) do
%w[ @harness ].each do |ivar|
raise "+#{ivar}+ should be defined in before block" unless instance_variable_get(ivar)
end
DataMapper.auto_migrate!
end
before(:each) do
Cow.all.destroy!
Planet.all.destroy!
FriendedPlanet.all.destroy!
end
describe '(serializing single resources)' do
it 'should serialize Model.first' do
# At the moment this is implied by serializing a resource, but this
# test ensures the contract even if dm-core changes
Cow.create(
:id => 89,
:composite => 34,
:name => 'Berta',
:breed => 'Guernsey'
)
result = @harness.test(Cow.first)
result.values_at("name", "breed").should == ["Berta", "Guernsey"]
end
it 'should serialize a resource' do
cow = Cow.new(
:id => 89,
:composite => 34,
:name => 'Berta',
:breed => 'Guernsey'
)
result = @harness.test(cow)
result.values_at("id", "composite", "name", "breed").should == [89, 34, 'Berta', 'Guernsey']
end
it 'should exclude nil properties' do
cow = Cow.new(
:id => 89,
:name => nil
)
result = @harness.test(cow)
result.values_at("id", "composite").should == [89, nil]
end
it "should only includes properties given to :only option" do
planet = Planet.new(
:name => "Mars",
:aphelion => 249_209_300.4
)
result = @harness.test(planet, :only => [:name])
result.values_at("name", "aphelion").should == ["Mars", nil]
end
it "should serialize values returned by methods given to :methods option" do
planet = Planet.new(
:name => "Mars",
:aphelion => 249_209_300.4
)
result = @harness.test(planet, :methods => [:category, :has_known_form_of_life?])
# XML currently can't serialize ? at the end of method names
boolean_method_name = @harness.method_name == :to_xml ? "has_known_form_of_life" : "has_known_form_of_life?"
result.values_at("category", boolean_method_name).should == ["terrestrial", false]
end
it "should only include properties given to :only option" do
planet = Planet.new(
:name => "Mars",
:aphelion => 249_209_300.4
)
result = @harness.test(planet, :only => [:name])
result.values_at("name", "aphelion").should == ["Mars", nil]
end
it "should exclude properties given to :exclude option" do
planet = Planet.new(
:name => "Mars",
:aphelion => 249_209_300.4
)
result = @harness.test(planet, :exclude => [:aphelion])
result.values_at("name", "aphelion").should == ["Mars", nil]
end
it "should give higher precendence to :only option over :exclude" do
planet = Planet.new(
:name => "Mars",
:aphelion => 249_209_300.4
)
result = @harness.test(planet, :only => [:name], :exclude => [:name])
result.values_at("name", "aphelion").should == ["Mars", nil]
end
end
describe "(collections and proxies)" do
it 'should serialize Model.all' do
# At the moment this is implied by serializing a collection, but this
# test ensures the contract even if dm-core changes
Cow.create(
:id => 89,
:composite => 34,
:name => 'Berta',
:breed => 'Guernsey'
)
result = @harness.test(Cow.all)
result[0].values_at("name", "breed").should == ["Berta", "Guernsey"]
end
it 'should serialize a collection' do
query = DataMapper::Query.new(DataMapper::repository(:default), Cow)
resources = [
[ 1, 2, 'Betsy', 'Jersey' ],
[ 10, 20, 'Berta', 'Guernsey' ],
]
collection = DataMapper::Collection.new(query, resources.map { |r| query.model.load(r, query) })
result = @harness.test(collection)
result[0].values_at("id", "composite", "name", "breed").should == resources[0]
result[1].values_at("id", "composite", "name", "breed").should == resources[1]
end
it 'should serialize an empty collection' do
query = DataMapper::Query.new(DataMapper::repository(:default), Cow)
collection = DataMapper::Collection.new(query)
result = @harness.test(collection)
result.should be_empty
end
it "serializes a one to many relationship" do
parent = Cow.new(:id => 1, :composite => 322, :name => "Harry", :breed => "Angus")
baby = Cow.new(:mother_cow => parent, :id => 2, :composite => 321, :name => "Felix", :breed => "Angus")
parent.save
baby.save
result = @harness.test(parent.baby_cows)
result.should be_kind_of(Array)
result[0].values_at(*%w{id composite name breed}).should == [2, 321, "Felix", "Angus"]
end
it "serializes a many to one relationship" do
parent = Cow.new(:id => 1, :composite => 322, :name => "Harry", :breed => "Angus")
baby = Cow.new(:mother_cow => parent, :id => 2, :composite => 321, :name => "Felix", :breed => "Angus")
parent.save
baby.save
result = @harness.test(baby.mother_cow)
result.should be_kind_of(Hash)
result.values_at(*%w{id composite name breed}).should == [1, 322, "Harry", "Angus"]
end
it "serializes a many to many relationship" do
pending 'TODO: fix many to many in dm-core' do
p1 = Planet.create(:name => 'earth')
p2 = Planet.create(:name => 'mars')
FriendedPlanet.create(:planet => p1, :friend_planet => p2)
result = @harness.test(p1.reload.friend_planets)
result.should be_kind_of(Array)
result[0]["name"].should == "mars"
end
end
end
describe "(multiple repositories)" do
before(:all) do
QuanTum::Cat.auto_migrate!
DataMapper.repository(:alternate) { QuanTum::Cat.auto_migrate! }
end
it "should use the repsoitory for the model" do
gerry = QuanTum::Cat.create(:name => "gerry")
george = DataMapper.repository(:alternate){ QuanTum::Cat.create(:name => "george", :is_dead => false) }
@harness.test(gerry )['is_dead'].should be(nil)
@harness.test(george)['is_dead'].should be(false)
end
end
end
|
rafaelrosafu/dm-more
|
dm-types/spec/integration/uri_spec.rb
|
require 'pathname'
require Pathname(__FILE__).dirname.parent.expand_path + 'spec_helper'
describe DataMapper::Types::URI do
before(:all) do
class ::URITest
include DataMapper::Resource
property :id, Serial
property :uri, URI
end
URITest.auto_migrate!
end
it "should work" do
DataMapper.repository(:default) do
URITest.create(:uri => 'http://localhost')
end
URITest.first.uri.should == Addressable::URI.parse('http://localhost')
end
it 'should immediately typecast supplied values' do
URITest.new(:uri => 'http://localhost').uri.should == Addressable::URI.parse('http://localhost')
end
it "should correctly typecast nil values" do
URITest.new(:uri => nil).uri.should == nil
end
end
|
rafaelrosafu/dm-more
|
dm-validations/spec/integration/uniqueness_validator_spec.rb
|
<filename>dm-validations/spec/integration/uniqueness_validator_spec.rb
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
if HAS_SQLITE3 || HAS_MYSQL || HAS_POSTGRES
describe DataMapper::Validate::UniquenessValidator do
before do
class ::Organisation
include DataMapper::Resource
property :id, Serial
property :name, String
property :domain, String #, :unique => true
validates_is_unique :domain, :allow_nil => true
end
class ::User
include DataMapper::Resource
property :id, Serial
property :organisation_id, Integer
property :user_name, String
belongs_to :organisation #has :organisation, n..1
validates_is_unique :user_name, :when => :testing_association, :scope => [:organisation]
validates_is_unique :user_name, :when => :testing_property, :scope => [:organisation_id]
end
Organisation.auto_migrate!
User.auto_migrate!
DataMapper.repository do
Organisation.new(:id=>1, :name=>'Org One', :domain=>'taken').save
Organisation.new(:id=>2, :name=>'Org Two', :domain=>'two').save
User.new(:id=>1, :organisation_id=>1, :user_name=>'guy').save
end
end
it 'should validate the uniqueness of a value on a resource' do
DataMapper.repository do
o = Organisation.get!(1)
o.should be_valid
o = Organisation.new(:id=>20, :name=>"Org Twenty", :domain=>nil)
o.should be_valid
o.save
o = Organisation.new(:id=>30, :name=>"Org Thirty", :domain=>nil)
o.should be_valid
end
end
it "should not even check if :allow_nil is true" do
DataMapper.repository do
o = Organisation.get!(1)
o.should be_valid
o = Organisation.new(:id=>2, :name=>"Org Two", :domain=>"taken")
o.should_not be_valid
o.errors.on(:domain).should include('Domain is already taken')
o = Organisation.new(:id=>2, :name=>"Org Two", :domain=>"not_taken")
o.should be_valid
end
end
it 'should validate uniqueness on a string key' do
class ::Department
include DataMapper::Resource
property :name, String, :key => true
validates_is_unique :name
auto_migrate!
end
hr = Department.create(:name => "HR")
hr2 = Department.new(:name => "HR")
hr2.valid?.should == false
end
it 'should validate the uniqueness of a value with scope' do
DataMapper.repository do
u = User.new(:id => 2, :organisation_id=>1, :user_name => 'guy')
u.should_not be_valid_for_testing_property
u.errors.on(:user_name).should include('User name is already taken')
u.should_not be_valid_for_testing_association
u.errors.on(:user_name).should include('User name is already taken')
u = User.new(:id => 2, :organisation_id => 2, :user_name => 'guy')
u.should be_valid_for_testing_property
u.should be_valid_for_testing_association
end
end
end
end
|
rafaelrosafu/dm-more
|
dm-constraints/spec/spec_helper.rb
|
<gh_stars>1-10
require 'pathname'
require 'rubygems'
gem 'rspec', '~>1.1.12'
require 'spec'
gem 'dm-core', '0.10.0'
require 'dm-core'
ADAPTERS = []
def load_driver(name, default_uri)
begin
DataMapper.setup(name, ENV["#{name.to_s.upcase}_SPEC_URI"] || default_uri)
DataMapper::Repository.adapters[:default] = DataMapper::Repository.adapters[name]
ADAPTERS << name
rescue LoadError => e
warn "Could not load do_#{name}: #{e}"
false
end
end
load_driver(:postgres, 'postgres://postgres@localhost/dm_core_test')
load_driver(:mysql, 'mysql://localhost/dm_core_test')
require Pathname(__FILE__).dirname.expand_path.parent + 'lib/dm-constraints'
|
rafaelrosafu/dm-more
|
dm-validations/spec/integration/length_validator/spec_helper.rb
|
class MotorLaunch
include DataMapper::Resource
property :id, Serial
property :name, String, :auto_validation => false
end
class BoatDock
include DataMapper::Resource
property :id, Serial
property :name, String, :auto_validation => false, :default => "I'm a long string"
validates_length :name, :min => 3
end
|
rafaelrosafu/dm-more
|
dm-validations/spec/integration/validation_errors_i18n_spec.rb
|
<gh_stars>1-10
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
describe DataMapper::Validate::ValidationErrors do
it "should not detect the presence of the I18n module the first time it's called" do
DataMapper::Validate::ValidationErrors.i18n_present?.should == false
end
it "should return the message with the humanized field" do
DataMapper::Validate::ValidationErrors.default_error_message(:absent, 'fake_property').should == 'Fake property must be absent'
end
end
|
rafaelrosafu/dm-more
|
dm-validations/lib/dm-validations/format_validator.rb
|
<reponame>rafaelrosafu/dm-more<filename>dm-validations/lib/dm-validations/format_validator.rb
#require File.dirname(__FILE__) + '/formats/email'
require 'pathname'
require Pathname(__FILE__).dirname.expand_path + 'formats/email'
require Pathname(__FILE__).dirname.expand_path + 'formats/url'
module DataMapper
module Validate
##
#
# @author <NAME>
# @since 0.9
class FormatValidator < GenericValidator
FORMATS = {}
include DataMapper::Validate::Format::Email
include DataMapper::Validate::Format::Url
def initialize(field_name, options = {}, &b)
super(field_name, options)
@field_name, @options = field_name, options
@options[:allow_nil] = true unless @options.include?(:allow_nil)
end
def call(target)
value = target.validation_property_value(field_name)
return true if @options[:allow_nil] && value.blank?
validation = @options[:as] || @options[:with]
raise "No such predefined format '#{validation}'" if validation.is_a?(Symbol) && !FORMATS.has_key?(validation)
validator = validation.is_a?(Symbol) ? FORMATS[validation][0] : validation
valid = case validator
when Proc then validator.call(value)
when Regexp then value =~ validator
else
raise UnknownValidationFormat, "Can't determine how to validate #{target.class}##{field_name} with #{validator.inspect}"
end
return true if valid
error_message = @options[:message] || ValidationErrors.default_error_message(:invalid, field_name, { :target => self })
field = Extlib::Inflection.humanize(field_name)
error_message = error_message.call(field, value) if error_message.respond_to?(:call)
add_error(target, error_message, field_name)
false
end
#class UnknownValidationFormat < StandardError; end
end # class FormatValidator
module ValidatesFormat
##
# Validates that the attribute is in the specified format. You may use the
# :as (or :with, it's an alias) option to specify the pre-defined format
# that you want to validate against. You may also specify your own format
# via a Proc or Regexp passed to the the :as or :with options.
#
# @option :allow_nil<Boolean> true/false (default is true)
# @option :as<Format, Proc, Regexp> the pre-defined format, Proc or Regexp to validate against
# @option :with<Format, Proc, Regexp> an alias for :as
#
# @details [Pre-defined Formats]
# :email_address (format is specified in DataMapper::Validate::Format::Email)
# :url (format is specified in DataMapper::Validate::Format::Url)
#
# @example [Usage]
# require 'dm-validations'
#
# class Page
# include DataMapper::Resource
#
# property :email, String
# property :zip_code, String
#
# validates_format :email, :as => :email_address
# validates_format :zip_code, :with => /^\d{5}$/
#
# # a call to valid? will return false unless:
# # email is formatted like an email address
# # and
# # zip_code is a string of 5 digits
#
def validates_format(*fields)
opts = opts_from_validator_args(fields)
add_validator_to_context(opts, fields, DataMapper::Validate::FormatValidator)
end
end # module ValidatesFormat
end # module Validate
end # module DataMapper
|
rafaelrosafu/dm-more
|
dm-validations/spec/integration/within_validator_spec.rb
|
require 'pathname'
require Pathname(__FILE__).dirname.expand_path.parent + 'spec_helper'
describe DataMapper::Validate::WithinValidator do
before(:all) do
class ::Telephone
include DataMapper::Resource
property :id, Serial
property :type_of_number, String, :auto_validation => false
validates_within :type_of_number, :set => ['Home', 'Work', 'Cell']
end
class ::Inf
include DataMapper::Resource
property :id, Serial
property :gte, Integer, :auto_validation => false
property :lte, Integer, :auto_validation => false
property :between, Integer, :auto_validation => false
validates_within :gte, :set => (10..n)
validates_within :lte, :set => (-n..10)
validates_within :between, :set => (10..20)
end
class ::Receiver
include DataMapper::Resource
property :id, Serial
property :holder, String, :auto_validation => false, :default => 'foo'
validates_within :holder, :set => ['foo', 'bar', 'bang']
end
class ::Nullable
include DataMapper::Resource
property :id, Serial
property :nullable, Integer, :auto_validation => false
validates_within :nullable, :set => (1..5), :allow_nil => true
end
end
it "should validate a value on an instance of a resource within a predefined
set of values" do
tel = Telephone.new
tel.valid?.should_not == true
tel.errors.full_messages.first.should == 'Type of number must be one of [Home, Work, Cell]'
tel.type_of_number = 'Cell'
tel.valid?.should == true
end
it "should validate a value within range with infinity" do
inf = Inf.new
inf.should_not be_valid
inf.errors.on(:gte).first.should == 'Gte must be greater than or equal to 10'
inf.errors.on(:lte).first.should == 'Lte must be less than or equal to 10'
inf.errors.on(:between).first.should == 'Between must be between 10 and 20'
inf.gte = 10
inf.lte = 10
inf.between = 10
inf.valid?.should == true
end
it "should validate a value by its default" do
tel = Receiver.new
tel.should be_valid
end
it "should allow a nil value if :allow_nil is true" do
nullable = Nullable.new
nullable.nullable = nil
nullable.should be_valid
nullable.nullable = 11
nullable.should_not be_valid
nullable.nullable = 3
nullable.should be_valid
end
end
|
nuwansh/url-fetcher
|
test/test_url-fetcher.rb
|
require 'helper'
describe Fetch::UrlFetcher do
before do
@submit_url = "http://google.com"
@submit_url_without_schem = "google.com"
@submit_wrong = "google"
end
describe "when submited correct URL" do
it "must respond with correct url" do
uri = Fetch::UrlFetcher.new({:url => @submit_url, :width => 150})
assert_equal @submit_url, uri.url
end
end
describe "when user submit URLs without schema" do
it "must respond with correct url" do
uri = Fetch::UrlFetcher.new({:url => @submit_url_without_schem, :width => 100})
assert_equal @submit_url, uri.url
end
end
describe "When system process given correct URL" do
it "must respond with paths of cannced images" do
uri = Fetch::UrlFetcher.new({:url => @submit_url, :width => 100})
uri.find
assert uri
end
end
end
|
nuwansh/url-fetcher
|
lib/url-fetcher.rb
|
<reponame>nuwansh/url-fetcher<gh_stars>0
require 'uri'
require 'open-uri'
module Fetch
class UrlFetcher
attr_accessor :url
attr_reader :title, :image_urls, :width
def initialize(options={})
@url = get_url_params(options[:url]).scheme.nil? ? "http://#{options[:url]}" : options[:url]
@width = options[:width].nil? ? 100 : options[:width]
end
#def custom_error
# raise(Fetch::MyCustomException, 'Your custom error message here')
#end
def find
agent = Mechanize.new
doc = agent.get(url)
images = doc.parser.xpath("//img/@src | //a/img/@src").map {|a|
unless get_url_params(a.value).nil?
image_uri = a.value
#if fetch uri is relative, we just add host name
if is_related_uri?(image_uri)
image_uri = url+image_uri
end
#check this uri has file extension
unless has_extention?(image_uri).empty?
image_sizes = calculate_img_size(image_uri)
if image_sizes
(image_sizes[0].to_i > width) ? image_uri : nil
else
return nil
end
end
end
}
@image_urls = images.compact
@title = doc.title
end
private
# def get_url_scheme(url)
# params = get_url_params(url)
# params.scheme.nil? ? "http://" : "#{params.scheme}://"
# #TODO: params return nil? system give error message to object
# end
def get_url_params(url)
begin
URI.parse(url)
rescue URI::Error => err
STDERR.puts err.message
end
end
def is_related_uri?(image_uri)
params = get_url_params(image_uri)
if params.scheme.nil?
return true
else
return false
end
end
def has_extention?(uri)
begin
File.extname("#{uri}")
rescue Exception => err
STDERR.puts err.message
end
end
def calculate_img_size(uri)
begin
open(uri, "rb") do |fh|
ImageSize.new(fh.read).size
end
rescue Exception => err
STDERR.puts err.message
end
end
end
#class MyCustomException < StandardError; end #nodoc
end
|
ploch/rbot
|
plugins/test/plugin_stub.rb
|
if RUBY_PLATFORM =~ /darwin/ then
# fix for scrapi on Mac OS X
require "rubygems"
require "tidy"
Tidy.path = "/usr/lib/libtidy.dylib"
end
require 'rubygems'
require 'scrapi'
require 'yaml'
require 'htmlentities'
require 'ostruct'
require 'awesome_print'
class Array
def to_perly_hash()
h = {}
self.each_index { |i| g
next if i % 2 != 0
h[ self[i] ] = self[i+1]
}
return h
end
end
module PluginMethods
def strip_tags(html)
HTMLEntities.new.decode(
html.gsub(/<.+?>/,'').
gsub(/<br *\/>/m, '')
)
end
def fetchurl(url)
puts "< fetching: #{url}"
uri = url.kind_of?(String) ? URI.parse(URI.escape(url)) : url
http = Net::HTTP.new(uri.host, uri.port)
http.start do |http|
req = Net::HTTP::Get.new(uri.path + '?' + (uri.query || ''), {"User-Agent" => "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.5; en-US; rv:1.9.0.10) Gecko/2009042315 Firefox/3.0.10"})
res = http.request(req)
if res.key?('location') then
puts "< following redir: " + res['location']
return fetchurl(URI.join(uri.to_s, res['location']))
end
return res.body
end
end
# alternate to fetchurl() above
def f(url)
uri = URI.parse(URI.escape(url))
res = Net::HTTP.start(uri.host, uri.port) {|http|
http.get(uri.path + '?' + uri.query)
}
return res.body
end
def strip_html_entities(str)
str.gsub!(/ /, ' ')
str.gsub!(/&[#0-9a-z]+;/, '')
str
end
def cleanup_html(str, strip_entities = false)
str.gsub!(/ /, '')
str = strip_html_entities(str) if strip_entities
str = strip_tags(str)
str.strip!
str.squeeze!(" \n\r")
return str
end
end
class Plugin
def map(*args)
end
def debug(msg)
puts "DEBUG: #{msg}"
end
def log(msg)
puts "INFO: #{msg}"
end
def warn(msg)
puts "WARN: #{msg}"
end
def error(msg)
puts "ERROR: #{msg}"
end
def registry=(obj)
@registry = obj
end
end
class Msg
def reply(str)
puts "reply: #{str}"
end
def sourcenick
"crown"
end
def sourceaddress
"<EMAIL>"
end
end
$: << File.join(File.expand_path(".."))
def load_plugin(file)
require(file)
File.open(File.join(File.expand_path(".."), "#{file}.rb")).readlines.each{ |l|
if l =~ /^class (.+?) </ then
Kernel.const_get($1).class_exec {
include PluginMethods
}
plugin = Kernel.const_get($1).new
plugin.registry = {}
return plugin
end
}
end
|
ploch/rbot
|
plugins/bitcoin.rb
|
<filename>plugins/bitcoin.rb<gh_stars>1-10
#
# by <NAME> <brian -at- ploch.net> 2013-04-10
#
# retrive bitcoin exchange values from mtgox.com
#
#
# Version 0.1
# Initial script to pull USD only
#
# 0.2
# Add multi-currency support
#
require 'json'
require 'net/http'
require 'uri'
class BitcoinPricePlugin < Plugin
def initialize
super
@user_agent = "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us) AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16"
@currency_list = { "USD" => "US Dollar",
"GBP" => "Great British Pound",
"EUR" => "Euro",
"JPY" => "Japanese Yen",
"AUD" => "Australian Dollar",
"CAD" => "Canadian Dollar",
"CHF" => "Swiss Franc",
"CNY" => "Chinese Yuan",
"DKK" => "Danish Krone",
"HKD" => "Hong Kong Dollar",
"PLN" => "Polish Złoty",
"RUB" => "Russian Rouble",
"SEK" => "Swedish Krona",
"SGD" => "Singapore Dollar",
"THB" => "Thai Baht "
}
end
def help(plugin, topic="")
return "btc [CURRENCY] : AUD/CAD [USD Default]"
end
def do_btc_price(m,params)
cur = params[:symbol].upcase
if @currency_list.has_key?(cur)
api_url = "http://data.mtgox.com/api/2/BTC#{cur}/money/ticker"
uri = URI.parse(api_url)
http = Net::HTTP.new(uri.host, uri.port)
html = http.start do |http|
req = Net::HTTP::Get.new(uri.path, {"User-Agent" => @user_agent})
response = http.request(req)
response.body
end
if html.nil?
m.reply("Unable to reach MTGOX")
return
end
data = JSON.parse(html)
btc_val = data["data"]["last"]["display"]
btc_high = data["data"]["high"]["display"]
btc_low = data["data"]["low"]["display"]
btc_vol = data["data"]["vol"]["display"]
m.reply sprintf( "#{cur}: %s High: %s Low: %s Volume: %s", btc_val, btc_high, btc_low, btc_vol)
else
m.reply("Invalid Currency")
end
end
end
plugin = BitcoinPricePlugin.new
plugin.map 'btc [:symbol]', :defaults => {:symbol => "USD"}, :action => 'do_btc_price'
|
ploch/rbot
|
plugins/woot.rb
|
<filename>plugins/woot.rb
# woot!
#
# by <NAME> <brian -at- ploch.net>
#
# lookup current woot.com daily deal
#
# first line is daily woot
# second line is daily sellout, unless its sold out,
# then its a random from the deals.woot page
#
# Version 0.1 2010-06-30
require 'rubygems'
require 'scrapi'
require 'net/http'
class WootPlugin < Plugin
def initialize
super
@woots =["http://www.woot.com/", "http://deals.woot.com/sellout/"]
end
def help(plugin, topic="")
return "woot => get current woot deal"
end
def woot(m,params)
woot = get_woot()
m.reply "unable to get current woots" if not woot[0].price
m.reply sprintf("Woot! %s - $%s", woot[0].descrip, woot[0].price)
m.reply sprintf("Sellout! %s - $%s", woot[1].descrip, woot[1].price)
end
def get_woot()
woot = []
wc = 0
@woots.each { |site|
scraper = Scraper.define do
process "div.productDescription h2.fn", :descrip => :text
process "div.productDescription h3.price span.amount", :price => :text
result :descrip, :price
end
uri = URI.parse(site)
http = Net::HTTP.new(uri.host, uri.port)
html = http.start do |http|
req = Net::HTTP::Get.new(uri.path, {"User-Agent" => "Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/533.4 (KHTML, like Gecko) Chrome/5.0.375.55 Safari/533.4"})
response = http.request(req)
response.body
end
woot[wc] = scraper.scrape(html)
wc+=1
}
return woot
end
end
plugin = WootPlugin.new
plugin.map 'woot', :action => 'woot'
|
ploch/rbot
|
plugins/dvd_release.rb
|
<reponame>ploch/rbot<gh_stars>1-10
#
# upcoming dvd release list with date
#
# by <NAME> <brian -at- ploch.net>
#
# retrieve upcoming dvd releases with dates from moviefone.com
#
# version 0.1 2010-06-27
#
# TODO:
# add search by title for upcoming movies
#
require 'rubygems'
require 'scrapi'
require 'net/http'
class DVD_ReleasePlugin < Plugin
Config.register Config::IntegerValue.new("dvd_release.display_limit",
:default => 5,
:desc => "Amount of dvds to list")
def initialize
super
@dvd_coming_url = "http://www.moviefone.com/dvd/coming-soon"
@dvd_recent_url = "http://www.moviefone.com/dvd/dvd-release-date"
@user_agent = "Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/533.4 (KHTML, like Gecko) Chrome/5.0.375.55 Safari/533.4"
end
def help(plugin, topic="")
return "dvd new: (default) to view a list of upcoming dvd releases; dvd out: to see recently released"
end
def get_dvd_release(m,params)
scraper = Scraper.define do
array :dvds
process "div.movie", :dvds => Scraper.define {
process "a.movieTitle", :name => :text
process "div.movie div.thisWeekCont div.thisWeek", :date => :text
result :name,:date
}
result :dvds
end
if params[:type] == "out"
dvd_url = @dvd_recent_url
else
dvd_url = @dvd_coming_url
end
uri = URI.parse(dvd_url)
http = Net::HTTP.new(uri.host, uri.port)
html = http.start do |http|
req = Net::HTTP::Get.new(uri.path, {"User-Agent" => @user_agent})
response = http.request(req)
response.body
end
unless html
m.reply "Unable to retrieve movie data"
return
end
dvdData = scraper.scrape(html)
limit = 1
dvdData.each do |dvd|
m.reply "%s: %s" % [dvd.date, dvd.name]
if limit == (@bot.config['dvd_release.display_limit'])
break
end
limit+=1
end
end
end
plugin = DVD_ReleasePlugin.new
plugin.map 'dvd [:type]', :defaults => {:type => "new"},:action => 'get_dvd_release'
|
ploch/rbot
|
rss/types.rb
|
#-- vim:sw=2:et
#++
rss_type(:twitter) do |s|
line1 = "%{date}%{title}"
make_stream(line1, nil, s)
end
rss_type(:tinydefault) do |s|
s[:link] = WWW::ShortURL.shorten(s[:link], :tinyurl)
line1 = "%{handle}%{date}%{title}%{at}%{link}"
line1 << " (by %{author})" if s[:author]
make_stream(line1, nil, s)
end
|
ploch/rbot
|
plugins/steam.rb
|
<reponame>ploch/rbot<filename>plugins/steam.rb<gh_stars>1-10
#
#
# steam profile info
#
# by <NAME> <brian -at- ploch.net>
#
# basic profile information lookup based on profile name or steam64 id
#
#
# Version: 0.2 2010-06-26
# Complete rewrite + made public
#
#
#
# TODO (?)
#
# parse stateMessage and show current game being played
#
# show friend summery for online, offline, games being played + counts
#
# anything else?
#
require 'rexml/document'
class SteamPlugin < Plugin
include REXML
def initialize
super
@steam_id_base = "http://steamcommunity.com/id/"
@steam_profile_base = "http://steamcommunity.com/profiles/"
end
def help(plugin, topic="")
return "steam [user]=> get info on steam user; steam setaccount [account] => set default steam account"
end
def send_steam_info(m,account_name,player_info,game_data)
m.reply "#{Bold}Steam status for #{NormalText}%s: #{Bold}Currently:#{NormalText} %s\n#{Bold}Game Name:#{NormalText} %s #{Bold}Rating: #{NormalText}%s #{Bold}Playtime (2 weeks): #{NormalText}%s hours" %
[ account_name , player_info["online_status"], player_info["current_name"], player_info["steam_rating"], player_info["hours_2week"] ]
top_games = "#{Bold}Most Recent Games #{NormalText}[2 weeks/Total]: "
game_data.each { |g| top_games += "#{Bold} #{g['game']}#{NormalText} [#{g['time']}h/#{g['total']}h] " }
m.reply "#{top_games}"
end
def steam_profile_lookup(m,params)
if params[:account]
games,player_info,friend_info = steam_fetch_profile(params[:account])
send_steam_info(m,params[:account],player_info,games)
elsif @registry.has_key? "#{m.sourceaddress}_steamuser" then
games,player_info,friend_info = steam_fetch_profile(@registry["#{m.sourceaddress}_steamuser"])
send_steam_info(m,@registry["#{m.sourceaddress}_steamuser"],player_info,games)
else
m.reply "Please specify an account name or set a default \"steam setaccount <account or id>\""
end
return
end
# return steam url based on account
def steam_user_url(account_name)
if account_name.is_a? Numeric
return @steam_profile_base + account_name + "/?xml=1"
else
return @steam_id_base + account_name + "/?xml=1"
end
end
def steam_fetch_profile(account_name)
gamedata = Array.new
player_info ={}
friend_info ={}
steam_url = steam_user_url(account_name)
begin
file = @bot.httputil.get(steam_url, :cache => false)
raise unless file
rescue => e
m.reply "Unable to fetch steam xml data"
return
end
doc = Document.new(file)
player_info['current_name'] = XPath.first(doc, "//steamID").text
player_info['online_status']= XPath.first(doc, "//onlineState").text
player_info['online_msg'] = XPath.first(doc, "//stateMessage").text
player_info['member_time'] = XPath.first(doc, "//memberSince").text
player_info['steam_rating'] = XPath.first(doc, "//steamRating").text
player_info['hours_2week'] = XPath.first(doc, "//hoursPlayed2Wk").text
# get game list, time, total time for last 2 weeks
doc.elements.each("//mostPlayedGame") { |g|
game = g.elements["gameName"].text
time = g.elements["hoursPlayed"].text
ttime= g.elements["hoursOnRecord"].text
gamedata << Hash['game'=> game,'time' => time,'total'=>ttime]
}
# retrieve friends and their status (summary only)
doc.elements.each("//friend") { |f|
friend_info[f.elements["steamID"].text] = f.elements["stateMessage"].text
}
return gamedata,player_info,friend_info
end
def set_default_account(m,params)
if not params[:account] then
return m.reply "Missing account name for setting a default account."
end
@registry["#{m.sourceaddress}_steamuser"] = params[:account]
m.reply "%s has been set as your steam account." % params[:account]
end
end
plugin = SteamPlugin.new
plugin.map 'steam setaccount [:account]', :action => 'set_default_account'
plugin.map 'steam [:account]', :action => 'steam_profile_lookup'
|
ploch/rbot
|
plugins/rotten.rb
|
# rotten tomatoes
# by <NAME> <<EMAIL>> 2007-01-23
#
# if given a movie title, finds the rating for that movie, or
# displays ratings for movies opening this week
require 'rexml/document'
require 'uri/common'
require '0lib_rbot'
require 'scrapi'
require 'ostruct'
require 'time'
Struct.new("Movie", :title, :link, :percent, :rating, :desc, :count, :fresh, :rotten, :release)
class RottenPlugin < Plugin
include REXML
include PluginLib
def initialize
super
@rss = "http://i.rottentomatoes.com/syndication/rss/"
@search = "http://www.rottentomatoes.com/search/full_search.php?search="
@site = "http://www.rottentomatoes.com"
end
def help(plugin, topic="")
return "rotten|rt [num] [opening|upcoming|top|current|<movie title>] => ratings for movies opening this week, rt top => ratings for the current top movies, rt upcoming => advance ratings for upcoming movies, rt current => top recent releases, rt <movie title> => lookup rating for a movie"
end
def do_rotten(m, params)
num = params[:num].to_i if params[:num]
movie = params[:movie]
movie = movie.join(" ").downcase if not movie.nil?
if movie.nil? or movie.length == 0 or movie == 'opening' or movie == 'new'
opening m, params, @rss + "opening.xml", num
elsif movie == 'upcoming'
opening m, params, @rss + "upcoming.xml"
elsif movie == 'top'
opening m, params, @rss + "top_movies.xml"
elsif movie == 'current'
opening m, params, @rss + "in_theaters.xml"
else
search m, params, movie
end
end
def search(m, params, movie)
info = nil
# first, search in the complete xml feed to see if its a current movie
begin
info = search_xml(m, movie)
rescue => ex
m.reply "xml search failed: #{ex}"
error ([ex.to_s] + ex.backtrace).join("\n")
end
# try searching the site
begin
info = search_site(m, movie) if info.nil?
rescue => ex
m.reply "site search failed: #{ex}"
error ([ex.to_s] + ex.backtrace).join("\n")
end
# couldn't find anything
return m.reply(sprintf("`%s' not found", movie)) if info.nil?
if info.fresh == 0 and info.total == 0 and info.release_date and info.release_date > Time.new then
# zero ratings and is in the future
return m.reply(sprintf("%s - %s (no reviews) %s", info.title, info.release_date.strftime("%b %d, %Y"), info.link))
end
m.reply(sprintf("%s - %s%% = %s (%s/%s) %s", info.title, info.rating, info.status, info.fresh, info.total, info.link))
end
def search_xml(m, movie)
r = search_xml_feed("#{@rss}opening.xml", m, movie)
r = search_xml_feed("#{@rss}complete_movies.xml", m, movie) if not r
return r
end
def search_xml_feed(feed_url, m, movie)
xml = fetchurl(feed_url)
unless xml
warn "faild to fetch feed #{feed_url}"
return nil
end
doc = Document.new xml
unless doc
m.reply "invalid xml returned from #{feed_url}"
return nil
end
begin
title = percent = rating = link = desc = release = nil
doc.elements.each("rss/channel/item") {|e|
title = e.elements["title"].text.strip
link = e.elements["link"].text
if not e.elements["RTmovie:tomatometer_percent"].text.nil?
# movie has a rating
title = title.slice(title.index(' ')+1, title.length) if title.include? '%'
end
if title.downcase == movie or title.downcase.include? movie
return get_movie_info(m, title, link)
end
}
rescue => ex
error ex.inspect
error ex.backtrace.join("\n")
end
return nil
end
def scrape_page_title(html)
title_scraper = Scraper.define do
process_first "head title", :title => :text
result :title
end
return title_scraper.scrape(html)
end
def search_site(m, movie)
# second, try searching for the movie title
html = fetchurl(@search + movie)
title = scrape_page_title(html)
if title !~ /search/i and title.strip =~ /^(.*?) - Rotten Tomatoes$/ then
# we were redirected to a movie page, use it
return scrape_movie_info(m, $1, nil, html)
end
movie_scraper = Scraper.define do
process "li h3", :title => :text
process "li h3 a", :url => "@href"
process "li span.tMeterScore", :score => :text
result :title, :url, :score
end
movies_scraper = Scraper.define do
array :movies
process "ul#movie_results_ul li", :movies => movie_scraper
result :movies
end
movies = movies_scraper.scrape(html)
movies.each { |_m|
if _m.title.downcase == movie then
return get_movie_info(m, _m.title, @site + _m.url)
end
}
# no exact match, let's use the first result..
return get_movie_info(m, movies[0].title, @site + movies[0].url)
end
def get_movie_info(m, title, link)
html = fetchurl(link)
if html.nil? then
debug "error fetching " + link
return nil
end
return scrape_movie_info(m, title, link, html)
end
def scrape_movie_info(m, title, link, html)
movie_scraper = Scraper.define do
array :info
process "div#all-critics-numbers p.critic_stats", :ratings => :text
process "div#all-critics-numbers span#all-critics-meter", :rating => :text
process "div#top-critics-numbers span#all-critics-meter", :rating_top => :text
process "div#movie_stats span", :info => :text
process "link[rel=canonical]", :link => "@href"
result :link, :ratings, :rating, :rating_top, :info
end
info = movie_scraper.scrape(html)
movie_info = OpenStruct.new({:title => title,
:rating => info.rating.to_i,
:rating_top => info.rating_top.to_i,
:link => link || info.link
})
if info.ratings then
if info.ratings.match(/Reviews Counted: ?(\d+)/) then
movie_info.total = $1.to_i
end
if info.ratings.match(/Fresh: ?(\d+)/) then
movie_info.fresh = $1.to_i
end
if info.ratings.match(/Rotten: ?(\d+)/) then
movie_info.rotten = $1.to_i
end
if info.ratings.match(/Average Rating: ?(.*)/) then
movie_info.average = $1
end
else
movie_info.total = movie_info.fresh = movie_info.rotten = movie_info.average = 0
end
# pull out stats
if info.info then
movie_stats = info.info.to_perly_hash
movie_info.runtime = movie_stats['Runtime:']
movie_info.release_date = movie_stats['Theatrical Release:']
movie_info.box_office = movie_stats['Box Office:']
movie_info.rated = movie_stats['Rated:']
movie_info.genre = movie_stats['Genre:']
end
# cleanup release date
if movie_info.release_date then
begin
rd = movie_info.release_date.split(' ')[0..2].join(' ')
movie_info.release_date = Time.parse(rd)
rescue => ex
error ([ex.to_s] + ex.backtrace).join("\n")
end
end
# double check the rating
begin
r = (movie_info.fresh.to_f / movie_info.total * 100).round
rescue => ex
r = 0
end
movie_info.rating = r if r != movie_info.rating
movie_info.status = movie_info.rating >= 60 ? 'Fresh' : 'Rotten'
return movie_info
end
# print opening movies and their scores
def opening(m, params, url, num = 5)
xml = fetchurl(url)
unless xml
m.reply "faild to fetch feed"
return
end
begin
doc = Document.new xml
rescue => ex
if xml.include? '<html>' then
return m.reply("rottentomatoes rss feeds are currently down")
else
return m.reply("error parsing feed: " + ex)
end
end
begin
movies = []
doc.elements.each("rss/channel/item") {|e|
title = e.elements["title"].text.strip
if not e.elements["RTmovie:tomatometer_percent"].text.nil?
# movie has a rating
title = title.slice(title.index(' ')+1, title.length) if title.include? '%'
percent = e.elements["RTmovie:tomatometer_percent"].text + "%"
rating = e.elements["RTmovie:tomatometer_rating"].text
else
# not yet rated
percent = "n/a"
rating = ""
end
movies << OpenStruct.new({:title => title, :percent => percent, :rating => rating})
}
rescue => ex
error ex.inspect
error ex.backtrace.join("\n")
end
if num < movies.size then
m.reply sprintf("displaying top %s of %s movies opening this weekend. use rt <num> to show more", num, movies.size)
end
num = 5 if num < 0
movies[0,num].each { |t|
m.reply sprintf("%s - %s = %s", t.title, t.percent, t.rating).strip
}
end
end
plugin = RottenPlugin.new
plugin.map 'rotten [:num] *movie', :action => 'do_rotten', :defaults => { :movie => nil, :num => 5 }, :requirements => { :num => %r|\d+| }
plugin.map 'rt [:num] *movie', :action => 'do_rotten', :defaults => { :movie => nil, :num => 5 }, :requirements => { :num => %r|\d+| }
|
ploch/rbot
|
plugins/0lib_rbot.rb
|
<gh_stars>1-10
require 'htmlentities'
require 'net/http'
require 'uri/common'
_lib = File.expand_path( File.dirname(__FILE__) )
$: << _lib if not $:.include? _lib
_lib = File.expand_path( File.dirname(__FILE__) + '/lib' )
$: << _lib if not $:.include? _lib
class Array
def to_perly_hash()
h = {}
self.each_index { |i|
next if i % 2 != 0
h[ self[i] ] = self[i+1]
}
return h
end
end
module PluginLib
OUR_UNSAFE = Regexp.new("[^#{URI::PATTERN::UNRESERVED}#{URI::PATTERN::RESERVED}%# ]", false, 'N')
if Module.constants.include?('Irc') then
Irc::Bot::Config.register Irc::Bot::Config::StringValue.new('pixelcop.db.host',
:default => 'localhost',
:desc => "MySQL DB hostname or IP")
Irc::Bot::Config.register Irc::Bot::Config::StringValue.new('pixelcop.db.name',
:default => 'rbot',
:desc => "MySQL DB name")
Irc::Bot::Config.register Irc::Bot::Config::StringValue.new('pixelcop.db.user',
:default => 'rbot',
:desc => "MySQL DB username")
Irc::Bot::Config.register Irc::Bot::Config::StringValue.new('pixelcop.db.pass',
:default => '<PASSWORD>',
:desc => "MySQL DB password")
end
def extract_urls(m)
escaped = URI.escape(m.message, OUR_UNSAFE)
URI.extract(escaped, ['http', 'https'])
end
def connect_db
host = @bot.config['pixelcop.db.host']
name = @bot.config['pixelcop.db.name']
user = @bot.config['pixelcop.db.user']
pass = @bot.config['pixelcop.db.pass']
str = sprintf('DBI:Mysql:database=%s;host=%s', name, host)
return DBI.connect(str, user, pass)
end
# Schema used by save_url_in_db()
#
# CREATE TABLE `urls` (
# `id` int(10) NOT NULL auto_increment,
# `nick` varchar(255) default NULL,
# `source` varchar(255) default NULL,
# `url` varchar(255) default NULL,
# `url_full` text,
# `datetime` timestamp NOT NULL default CURRENT_TIMESTAMP,
# `mirror` text,
# PRIMARY KEY (`id`)
# ) ENGINE=MyISAM DEFAULT CHARSET=utf8;
def save_url_in_db(url, orig_url = nil)
begin
dbh = connect_db()
# don't insert of url already exists
q = dbh.prepare('SELECT * from urls where url = ?')
q.execute(url.url)
return if q.rows() > 0
query = dbh.prepare('INSERT into urls (nick, source, url, url_full, mirror) values (?, ?, ?, ?, ?)')
query.execute(url.nick, url.channel, url.url, url.url, orig_url)
dbh.disconnect
rescue => ex
error ex
return
end
end
def save_url_to_file(uri, filename)
if not File.exist? filename then
url = URI.parse(uri)
req = Net::HTTP::Get.new(url.path)
res = Net::HTTP.start(url.host, url.port) { |http|
http.request(req)
}
open(filename, "wb") { |file|
file.write(res.body)
}
end
end
def fetchurl(url, cache = true)
url = URI.parse(URI.escape(url)) if not url.kind_of? URI
for i in 1..3
begin
html = @bot.httputil.get(url, :cache => cache)
return html
rescue => ex
error sprintf("failure #%d", i)
error ex.inspect
error ex.backtrace.join("\n")
end
end
end
def strip_tags(html)
HTMLEntities.new.decode(
html.gsub(/<.+?>/,'').
gsub(/<br *\/>/m, '')
)
end
# gsub(/ /,' ').
# gsub(/&/,'&').
# gsub(/"/,'"').
# gsub(/</,'<').
# gsub(/>/,'>').
# gsub(/&ellip;/,'...').
# gsub(/'/, "'").
# gsub(/\r\n/, ' | ').
def strip_html_entities(str)
str.gsub!(/ /, ' ')
str.gsub!(/&[#0-9a-z]+;/, '')
str
end
def cleanup_html(str, strip_entities = false)
str.gsub!(/ /, '')
str = strip_html_entities(str) if strip_entities
str = strip_tags(str)
str.strip!
str.squeeze!(" \n\r")
return str
end
def limit_output(str)
str.strip[0, 1130]
end
def eastern_time()
return TZInfo::Timezone.get('America/New_York').utc_to_local(Time.new)
end
end
|
ploch/rbot
|
plugins/wikipedia.rb
|
<reponame>ploch/rbot<gh_stars>1-10
# wikipedia
# by <NAME> <<EMAIL>> 2008-04-07
#
# search wikipedia. similar to core 'search' plugin, but also gets article text.
# also includes a listener for grabbing text of pasted wikipedia links.
require 'rubygems'
require 'json'
require 'net/http'
require 'uri/common'
require '0lib_rbot'
Net::HTTP.version_1_2
class WikiPediaPlugin < Plugin
include PluginLib
Config.register Config::ArrayValue.new("wikipedia.ignore_channels",
:desc => "Don't show info on these channels",
:default => [])
Config.register Config::ArrayValue.new("wikipedia.ignore",
:desc => "Don't show info for urls from users represented as hostmasks on this list. Useful for ignoring other bots, for example.",
:default => [])
def help(plugin, topic="")
return "wikipedia|wp <term>"
end
def lookup(m,params)
if params[:query].length == 0
m.reply 'incorrect usage: ' + help(m.plugin)
return
end
s = params[:query].join(' ')
url = google_search(s + " site:en.wikipedia.org")
return m.reply "No entry found for #{s}" if not url
#debug "got url: #{url}"
# http://en.wikipedia.org/wiki/Java_(programming_language)
url =~ %r|^http://[a-z]+.wikipedia.org/wiki/(.*)$|
title = $1
#debug "found title #{title}"
begin
m.reply "#{title}: " + shorten( title, get_article(title) )
rescue
m.reply $!
end
end
def shorten(title, article)
max_len = 1143 - (title.length + 2)
return article[0, max_len]
end
def listen(m)
return if m.nil? or not m.kind_of?(PrivMessage) or m.channel.nil?
return if @bot.config["wikipedia.ignore_channels"].map { |c| c.nil? ? c : c.downcase }.include? m.channel.downcase
return if @bot.config["wikipedia.ignore"].find { |u| m.source.matches?(u) }
if m.message =~ %r|http://en.wikipedia.org/wiki/([^ ]+)|
# found a wikipedia link
title = $1
return if title =~ /^File:/
begin
m.reply "#{title}: " + shorten( title, get_article(title) )
rescue
m.reply $!
end
end
end
def google_search(str)
searchfor = URI.escape str
query = "/search?q=#{searchfor}&btnI=I%27m%20feeling%20lucky"
result = "not found!"
proxy_host = nil
proxy_port = nil
if(ENV['http_proxy'])
if(ENV['http_proxy'] =~ /^http:\/\/(.+):(\d+)$/)
proxy_host = $1
proxy_port = $2
end
end
http = @bot.httputil.get_proxy(URI.parse("http://www.google.com"))
begin
http.start {|http|
resp = http.get(query)
if resp.code == "302"
result = resp['location']
end
}
rescue => e
p e
if e.response && e.response['location']
result = e.response['location']
else
result = "error!"
end
end
return result
end
def get_article(title)
wp = 'http://en.wikipedia.org/w/index.php?title=%s&action=raw§ion=0'
res = fetchurl(sprintf(wp, title))
raise sprintf('Lookup failed for "%s"', title) if not res
if res =~ /^#REDIRECT \[\[(.*)\]\]/i then
title = $1
res = fetchurl(sprintf(wp, title))
raise sprintf('Lookup failed for "%s"', title) if not res
end
debug res
m = res.match(/^\{\{ *infobox.*^\}\}(.*)/mi)
if m
entry = m[1]
else
entry = res
end
if entry =~ %r|<title>Error</title>| or entry =~ %r|<title>Error</title>| then
return 'Error fetching entry'
end
text = strip_tags( parse(entry) ).
gsub(/\r\n/, ' | ').
gsub(/[\n\r]/, ' ').
strip
return text
end
def parse(raw)
scanner = StringScanner.new(raw)
cursor = 0
categories = Array.new
languages = Hash.new
fulltext = ''
related = Array.new
headings = Array.new
text = ''
seen_heading = false
while cursor < raw.length do
scanner.pos = cursor
## [[ ... ]]
if (substr = scanner.scan(/\G\[\[ *(.*?) *\]\]/)) and substr =~ /\G\[\[ *(.*?) *\]\]/ then
directive = $1
cursor += $&.length - 1
if directive =~ /\:/ then
(type, text) = directive.split(':')
if type.downcase == 'category' then
categories << text
end
# language codes
if type.length == 2 and type.downcase == type then
languages[type] = Array.new if not languages[type]
languages[type] = text
end
elsif directive =~ /\|/ then
(lookup, name) = directive.split('|')
fulltext += name
related << lookup if lookup !~ /^#/
else
fulltext += directive
related << directive
end
## === heading 2 ===
elsif (substr = scanner.scan(/=== *(.*?) *===/)) and substr =~ /=== *(.*?) *===/ then
### don't bother storing these headings
fulltext += $1
cursor += $&.length - 1
next
## == heading 1 ==
elsif (substr = scanner.scan(/== *(.*?) *==/)) and substr =~ /== *(.*?) *==/ then
headings << $1
text = fulltext if not seen_heading
seen_heading = true
fulltext += $1
cursor += $&.length - 1
next
## '' italics '' or ''' bold '''
elsif (substr = scanner.scan(/''' *(.*?) *'''/)) and substr =~ /''' *(.*?) *'''/ then
fulltext += $1
cursor += $&.length
next
## {{ disambig }}
elsif (substr = scanner.scan(/\{\{ *(.*?) *\}\}/)) and substr =~ /\{\{ *(.*?) *\}\}/ then
## ignore for now
cursor += $&.length
next
else
fulltext += raw[cursor,1]
end
cursor += 1
end
return fulltext
end
end
plugin = WikiPediaPlugin.new
plugin.register("wikipedia")
plugin.map 'wikipedia *query', :action => 'lookup'
plugin.map 'wiki *query', :action => 'lookup'
|
ploch/rbot
|
plugins/lastnight-0.0.2.rb
|
#-- vim:sw=2:et
#++
#
# :title: Texts from Last Night for rbot
#
# Author:: <NAME> <<EMAIL>>
# License:: GPL v2
# Homepage:: http://leapster.org/software/lastnight/
#
# Adapted from <NAME> and <NAME>'s Grouphug.rb
class LastNightPlugin < Plugin
REG = Regexp.new('<description>(.*?)<p><a href', Regexp::MULTILINE)
def initialize
super
@texts = Array.new
end
def help( plugin, topic="" )
return _("LastNight plugin. Usage: 'lastnight' for random text.")
end
def lastnight(m, params)
opts = { :cache => false }
begin
if @texts.empty?
data = @bot.httputil.get("http://www.textsfromlastnight.com/feed/", opts)
res = data.scan(REG)
res.each do |quote|
@texts << quote[0].ircify_html
end
end
text = @texts.pop
m.reply text
rescue
m.reply "failed to connect to textsfromlastnight.com"
end
end
end
plugin = LastNightPlugin.new
plugin.default_auth('create', false)
plugin.map "lastnight [:num]",
:thread => true, :action => :lastnight, :requirements => { :num => /\d+/ }
|
ploch/rbot
|
plugins/nba.rb
|
# nba
# by <NAME> <<EMAIL>> 2007-01-23
#
# display nba scores
#
# changelog:
# 2008-05-19 - refactored to use ysports module
require 'uri/common'
require 'yaml'
require 'yahoo_sports'
require '0lib_rbot'
class NbaPlugin < Plugin
include PluginLib
def initialize
super
@mynba = "mynba => get results for your teams, mynba [clear] [ team [team...] ] => save your fav teams"
end
def help(plugin, topic="")
"nba => get last nights results, nba <team> => get results of last game for <team>, mynba => get results for your teams, " + @mynba
end
# get latest results for a specific team
def nba_team(m, params)
info = YahooSports::NBA.get_team_stats(params[:team])
last_game = info.last5[-1]
game_date = last_game.date.strftime('%a %b %d')
ret = sprintf("%s (%s, %s): %s, %s%s - %s",
info.name, info.standing, info.position,
game_date, (last_game.away ? "at " : ""), last_game.team, last_game.status)
return m.reply(ret)
end
def nba_playoffs(m, params)
begin
html = fetchurl('http://sports.yahoo.com/nba/playoffs')
rescue => ex
m.reply 'Error fetching url'
debug ex.inspect
debug ex.backtrace.join("\n")
return
end
if not html then
m.reply 'Error fetching url'
return
end
mteams = html.scan(/<td class=yspscores align=left>(.*?)<\/td>/m)
teams = Array.new
mteams.each { |t|
t = t[0]
t.gsub!(/ /, '')
t.gsub!(/<a.*?>/, '')
t.gsub!(/<\/a>/, '')
t.gsub!(/<b>/, '')
t.gsub!(/<\/b>/, '')
t.squeeze!(' ')
t.strip!
teams << t
}
series = html.scan(/<span class="yspdetailttl">(.*?series.*?)<\/span>/m)
games = html.scan(/<tr class=ysprow\d><td height=16 class=yspscores><span class=yspdetailttl>(\d)\.\*?<\/span>(.*?)<\/td><\/tr>/)
series_start = false
next_games = Array.new
games.each { |g|
if g[1].include? '<b>' then
series_start = true
elsif series_start then
# at the next game in the series
desc = /<a href="\/nba\/.*?">(.*?)<\/a> –(.*)/.match(g[1])
if desc then
next_games << sprintf('Game %s %s %s', g[0], desc[1], desc[2].gsub(/&n.*?;/, '').squeeze(' ').strip)
else
next_games << sprintf('Game %s %s', g[0], g[1].gsub(/&n.*?;/, '').squeeze(' ').strip)
end
series_start = false
end
}
series.each_index { |i|
m.reply sprintf('%s: %s, %s', teams[i], series[i][0].gsub(/ /, '').strip, next_games[i])
}
end
def mynba(m, params)
if not @registry.has_key?(m.sourceaddress)
m.reply("you need to setup your favs! " + @mynba)
return
end
teams = @registry[m.sourceaddress]
if teams.empty? then
m.reply("you need to setup your favs! " + @mynba)
return
end
params = Hash.new
teams.each { |t|
params[:team] = t
nba_team(m, params)
}
end
def set_default(m, params)
teams = params[:teams]
if @registry.has_key?(m.sourceaddress) then
saved_teams = @registry[m.sourceaddress]
else
saved_teams = Array.new
end
if teams[0] == 'clear' then
saved_teams.clear
@registry[m.sourceaddress] = saved_teams
m.reply('done')
return
elsif teams[0] == 'list' then
m.reply('current teams: ' + saved_teams.join(' '))
return
end
saved_teams.clear
teams.each { |t|
(team, html) = YahooSports::NBA.find_team_page(t)
saved_teams.push(t) unless team.nil?
}
@registry[m.sourceaddress] = saved_teams
m.reply('saved')
end
def nba_live(m, params)
games = YahooSports::NBA.get_homepage_games('live')
date = Time.parse(eastern_time().strftime('%Y%m%d'))
show_games(m, games, date, "Live games: ")
end
def nba_today(m, params)
games = YahooSports::NBA.get_homepage_games()
date = Time.parse(eastern_time().strftime('%Y%m%d'))
show_games(m, games, date, "Today's games: ")
end
def nba_yesterday(m, params)
games = YahooSports::NBA.get_homepage_games()
date = Time.parse(eastern_time().strftime('%Y%m%d')) - 86400
show_games(m, games, date, "Yesterday's games: ")
end
def show_games(m, games, date, text, team = '')
scores = []
games.each { |game|
next if not team.nil? and not team.empty? and not
(game.team1.downcase.include? team or
game.team2.downcase.include? team)
next if Time.parse(game.date.strftime('%Y%m%d')) != date
if game.state == 'preview' then
game.status = sprintf("%s, %s", game.status, game.extra) if game.extra
scores << sprintf("%s at %s (%s, %s)", game.team1, game.team2, game.state, game.status)
next
end
if game.state == 'final' then
if game.score1.to_i > game.score2.to_i then
game.team1 += '*'
else
game.team2 += '*'
end
game.status = 'F'
else
# live
game.status = sprintf('%s, %s', game.state, game.status)
end
scores << sprintf("%s %s at %s %s (%s)",
game.team1, game.score1,
game.team2, game.score2,
game.status.strip)
}
return m.reply(text + 'none') if scores.empty?
m.reply(text + scores.join(' / '))
end
end
plugin = NbaPlugin.new
plugin.map 'nba live', :action => 'nba_live'
plugin.map 'nba now', :action => 'nba_live'
plugin.map 'nba today', :action => 'nba_today'
plugin.map 'nba yest', :action => 'nba_yesterday'
plugin.map 'nba yesterday', :action => 'nba_yesterday'
plugin.map 'nba', :action => 'nba_yesterday'
plugin.map 'nba :team', :action => 'nba_team'
plugin.map 'mynba', :action => 'mynba'
plugin.map 'mynba *teams', :action => 'set_default'
plugin.map 'nbap', :action => 'nba_playoffs'
plugin.map 'nbaplayoffs', :action => 'nba_playoffs'
|
ploch/rbot
|
plugins/bo.rb
|
# BoxOffice
# by <NAME> <<EMAIL>> 2007-01-25
#
# display box office numbers
require 'uri/common'
require '0lib_rbot'
class BoxOffficePlugin < Plugin
include PluginLib
def help(plugin, topic="")
"boxoffice|bo => get latest box office numbers (top 5)"
end
# get boxoffice numbers for a specific movie
def boxoffice_movie(m, params)
movie = params[:movie].join(' ').downcase
html = fetchurl('http://www.boxofficemojo.com/search/?q=' + movie)
row_scraper = Scraper.define do
array :cols
process "td", :cols => :text
result :cols
end
table_scraper = Scraper.define do
array :rows
process "tr", :rows => row_scraper
result :rows
end
search_scraper = Scraper.define do
array :table
process "div#body table:nth-child(5)", :table => table_scraper
result :table
end
ret = search_scraper.scrape(html)
rows = ret.first
# verify we got the right table/data
begin
if rows.first.first.downcase !~ /^movie title/ then
raise "bad data"
end
rows.shift # discart first row
rescue Exception => ex
# will raise on nil, etc as well
m.reply "error parsing data"
return
end
# look for exact match first
found = nil
rows.each do |row|
if row.first.strip.downcase == movie then
found = row
end
end
# take first match if none found
match = found || rows.first
# [0] "Movie Title (click title to view)",
# [1] "Studio",
# [2] "Lifetime Gross
# [3] Theaters",
# [4] "Opening
# [5] Theaters",
# [6] "Release",
# [7] "Links"
m.reply sprintf("%s (%s) - %s Lifetime Gross / %s Opening", match.first.strip, match[6], match[2], match[4])
# show list of other matches
if found.nil? and rows.size > 1 then
others = []
rows.each do |row|
if row.first != match.first then
others << row.first.strip
end
end
m.reply sprintf("Other matches: %s", others.join(", "))
end
end
# if numbers.length == 3 then
# m.reply(sprintf('%s - %s Domestic (%s Opening), %s Total, Budget: %s', title, numbers[0][0], opening[0],numbers[2][0], budget[0]))
# elsif numbers.length >= 1 then
# m.reply(sprintf('%s - %s Domestic (%s Opening), Budget: %s', title, numbers[0][0], opening[0], budget[0]))
# end
# get weekly box office chart numbers
def boxoffice_chart(m, params)
begin
html = fetchurl('http://www.imdb.com/boxoffice/')
rescue => ex
debug ex.inspect
debug ex.backtrace.join("\n")
return
end
row_scraper = Scraper.define do
array :cols
process "td", :cols => :text
result :cols
end
table_scraper = Scraper.define do
array :rows
process "tr", :rows => row_scraper
result :rows
end
chart_scraper = Scraper.define do
array :table
process_first "div#main table", :table => table_scraper
result :table
end
ret = chart_scraper.scrape(html)
rows = ret.first
m.reply sprintf("Weekend box office")
count = 0
rows.each { |bo|
# bo = ["1", "", "The Avengers (2012)", "$200M", "$200M", "1"]
# rank, nil, name, weekend gross, total gross, weeks on chart
count += 1
# rank. title - weekend (total)
m.reply sprintf("%s. %s - %s (%s)", bo[0], strip_tags(bo[2]), bo[3], bo[4])
break if count == 5
}
end
end
plugin = BoxOffficePlugin.new
plugin.map 'bo', :action => 'boxoffice_chart'
plugin.map 'boxoffice', :action => 'boxoffice_chart'
plugin.map 'bo *movie', :action => 'boxoffice_movie'
plugin.map 'boxoffice *movie', :action => 'boxoffice_movie'
|
ploch/rbot
|
plugins/old.rb
|
# old!
# by <NAME> <<EMAIL>> 2008-09-11
#
# never forget! don't paste old links, bitch!@
class OldNewsPlugin < Plugin
def say_old(m, params)
# http://www.pixelcop.org/~chetan/files/jpg/old.jpg
# http://is.gd/2vj6
m.reply "nicca that's so old! http://is.gd/2vj6"
end
def say_sad(m, params)
m.reply"sad panda :( http://www.sadtrombone.com/"
end
def say_zero(m, params)
m.reply "this is how much I care: http://fw2.org/eomi0w"
end
def say_kobe(m, params)
m.reply "http://www.pixelcop.org/~chetan/pics/kobe.jpg"
end
def say_np(m, params)
m.reply "no problemo http://www.pixelcop.org/~chetan/pics/youre_welcome.jpg"
end
def say_yes(m, params)
m.reply "YES! http://www.pixelcop.org/~chetan/pics/YES.jpg"
end
def say_notbad(m, params)
m.reply "notbad.jpg http://bit.ly/notbad"
end
end
plugin = OldNewsPlugin.new
plugin.map 'old', :action => 'say_old'
plugin.map 'sad', :action => 'say_sad'
plugin.map 'zero', :action => 'say_zero'
plugin.map 'care', :action => 'say_zero'
plugin.map 'kobe', :action => 'say_kobe'
plugin.map 'np', :action => 'say_np'
plugin.map 'yes', :action => 'say_yes'
plugin.map 'notbad', :action => 'say_notbad'
plugin.map 'nb', :action => 'say_notbad'
|
ploch/rbot
|
plugins/idlerpg.rb
|
<gh_stars>1-10
require 'rubygems'
require 'open-uri'
require 'rexml/document'
require 'duration'
require 'scrapi'
require '0lib_rbot'
# to pull idle rpg player infoz
#
class IdleRpgPlugin < Plugin
include REXML
include PluginLib
def help(plugin,topic="")
"irpg [player]..."
end
def do_playerinfo(m, params)
params[:user] = m.sourcenick.downcase if params[:user].empty?
params[:user].each { |user|
url = sprintf("http://rpg.killme.org/xml.php?player=%s",user)
xml = open(url)
unless xml
m.reply "problem getting stats for " + user
return
end
doc = Document.new xml
unless doc
m.reply "parse failed (invalid xml) for " + user
return
end
level = doc.elements["//level"].text
pclass = doc.elements["//class"].text
isonline = doc.elements["//online"].text
psum = doc.elements["//items/total"].text
nlvl = doc.elements["//ttl"].text
a_desc= doc.elements["//alignment"].text
rank = get_rank(user)
if isonline == "1"
online = "Yes"
online_s = "online :)"
else
online = "No"
online_s = "offline :("
end
if a_desc == "e"
align = "Evil"
elsif a_desc =="n"
align = "Neutral"
elsif a_desc =="g"
align = "Good"
end
if level != nil
lvltime = Duration.new(nlvl)
#m.reply sprintf("%s: Rank: %s Level: %s Class: %s Alignment: %s Online: %s Item Score: %s Next level in %s", user, rank, level, pclass, align, online, psum, lvltime)
m.reply sprintf("%s: the %s (rank: %s, level: %s) is %s next level in %s", user, pclass, rank, level, online_s, lvltime)
end
}
end
def get_rank(player)
html = fetchurl('http://rpg.killme.org/players.php')
if not html then
return -1
end
irpg_player = Scraper.define do
array :players
process "ol > li > a", :players => :text
result :players
end
players = irpg_player.scrape(html)
players.each_with_index {|item, index|
return index + 1 if item == player or item.downcase == player.downcase
}
end
end
plugin = IdleRpgPlugin.new
plugin.map 'irpg [*user]', :action => 'do_playerinfo'
|
ploch/rbot
|
plugins/oil.rb
|
# oil!
# by <NAME> <<EMAIL>> 2008-05-30
#
# oil futures price lookup
require 'rubygems'
require 'scrapi'
require 'net/http'
class OilPlugin < Plugin
def initialize
super
end
def help(plugin, topic="")
return "oil => get current Nymex Crude Future price"
end
def crude(m,params)
price = get_crude_price()
m.reply "unable to get current price" if not price
m.reply sprintf("Nymex Crude Future: $%s", price)
end
def get_crude_price()
bloomberg_energy = Scraper.define do
array :prices
process "div#stock_data > table td.value", :prices => :text
result :prices
end
uri = URI.parse(URI.escape('http://www.bloomberg.com/energy/'))
http = Net::HTTP.new(uri.host, uri.port)
html = http.start do |http|
req = Net::HTTP::Get.new(uri.path, {"User-Agent" => "stickin it to the man"})
response = http.request(req)
response.body
end
prices = bloomberg_energy.scrape(html)
return nil if not prices
return prices[0]
end
end
plugin = OilPlugin.new
#plugin.map 'oil', :action => 'crude'
plugin.map 'crude', :action => 'crude'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.