repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
dorianmariefr/activejob-limiter
|
lib/activejob/limiter.rb
|
<reponame>dorianmariefr/activejob-limiter
# frozen_string_literal: true
# This file allows Bundler to auto-require
# the library without a separate :require arg
require_relative '../active_job/limiter'
|
dorianmariefr/activejob-limiter
|
spec/spec_helper.rb
|
# frozen_string_literal: true
require 'bundler/setup'
require 'active_job'
require 'active_job/limiter'
require 'byebug'
RSpec.configure do |config|
config.include(ActiveJob::TestHelper)
config.before(:all) do
ActiveJob::Base.queue_adapter = :test
ActiveJob::Base.queue_adapter.perform_enqueued_jobs = true
ActiveJob::Base.queue_adapter.perform_enqueued_at_jobs = false
end
config.before(:each) do
ActiveJob::Base.queue_adapter.performed_jobs = []
end
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
|
nlhkh/delay_message_on_rails_dockerized
|
app/views/messages/show.json.jbuilder
|
<gh_stars>0
json.extract! @message, :id, :recipient_email, :text, :delay_until_time, :timezone_offset, :sent, :created_at, :updated_at
|
nlhkh/delay_message_on_rails_dockerized
|
app/mailers/message_mailer.rb
|
<gh_stars>0
class MessageMailer < ApplicationMailer
def default(recipient_email, content)
@content = content
mail to: recipient_email
end
end
|
nlhkh/delay_message_on_rails_dockerized
|
app/workers/delay_message_worker.rb
|
class DelayMessageWorker
include Sidekiq::Worker
def perform(message_id)
# Get the message object
message = Message.find(message_id)
# Send an email
MessageMailer.delay.default(message.recipient_email, message.text)
# Change the status of the Message object to `sent`
message.sent = true
message.save
end
end
|
nlhkh/delay_message_on_rails_dockerized
|
test/mailers/previews/message_mailer_preview.rb
|
<filename>test/mailers/previews/message_mailer_preview.rb
# Preview all emails at http://localhost:3000/rails/mailers/message_mailer
class MessageMailerPreview < ActionMailer::Preview
# Preview this email at http://localhost:3000/rails/mailers/message_mailer/default
def default
MessageMailer.default
end
end
|
nlhkh/delay_message_on_rails_dockerized
|
config/initializers/sidekiq.rb
|
<gh_stars>0
Sidekiq.configure_server do |config|
config.redis = { url: 'redis://redis:6379/1', namespace: 'delay_message_dev', driver: :hiredis }
end
Sidekiq.configure_client do |config|
config.redis = { url: 'redis://redis:6379/1', namespace: 'delay_message_dev', driver: :hiredis }
end
|
nlhkh/delay_message_on_rails_dockerized
|
app/models/message.rb
|
class Message < ActiveRecord::Base
validates :recipient_email, :text, :delay_until_time, :timezone_offset, presence: true
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
validates :recipient_email, format: { with: VALID_EMAIL_REGEX, message: 'is not valid' }
end
|
nlhkh/delay_message_on_rails_dockerized
|
app/views/messages/index.json.jbuilder
|
json.array!(@messages) do |message|
json.extract! message, :id, :recipient_email, :text, :delay_until_time, :timezone_offset, :sent
json.url message_url(message, format: :json)
end
|
nlhkh/delay_message_on_rails_dockerized
|
db/migrate/20150225040826_create_messages.rb
|
<reponame>nlhkh/delay_message_on_rails_dockerized
class CreateMessages < ActiveRecord::Migration
def change
create_table :messages do |t|
t.string :recipient_email
t.text :text
t.datetime :delay_until_time
t.integer :timezone_offset
t.boolean :sent, default: false # update this line
t.timestamps null: false
end
end
end
|
ShinRyubi/warnme
|
web/app/controllers/application_controller.rb
|
class ApplicationController < ActionController::Base
before_action :configure_permitted_parameters, if: :devise_controller?
before_action :set_locale
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_up, keys: [:email, :name, :city, :address, :photo, :password, :password_confirmation])
end
def set_locale
I18n.locale = params[:locale] || I18n.default_locale
end
end
|
ShinRyubi/warnme
|
web/db/migrate/20181017063711_create_posts.rb
|
<filename>web/db/migrate/20181017063711_create_posts.rb
class CreatePosts < ActiveRecord::Migration[5.2]
def change
create_table :posts do |t|
t.references :incident, index: true, foreign_key: {on_delete: :nullify}
t.references :user, index: true, foreign_key: {on_delete: :nullify}
t.string :user_pic
t.text :content
t.text :content_local
t.timestamps
end
end
end
|
ShinRyubi/warnme
|
web/app/models/user.rb
|
class User < ApplicationRecord
acts_as_taggable
belongs_to :incident, optional: true
validates_uniqueness_of :email
devise :database_authenticatable, :registerable
#, :recoverable, :rememberable, :trackable, :validatable
#:recoverable, :rememberable, :validatable
end
|
ShinRyubi/warnme
|
web/app/controllers/sessions_controller.rb
|
class SessionsController < Devise::SessionsController
before_action :configure_permitted_parameters #, if: :devise_controller?
protected
def configure_permitted_parameters
devise_parameter_sanitizer.permit(:sign_in,
keys: [:name, :email, :password, :<PASSWORD>,
:city, :address, :photo, :info
])
end
def failure
redirect_to root_path
end
end
|
ShinRyubi/warnme
|
web/app/views/incidents/_incident.json.jbuilder
|
json.extract! incident, :id, :created_at, :updated_at
json.url incident_url(incident, format: :json)
|
ShinRyubi/warnme
|
web/app/controllers/users_controller.rb
|
class UsersController < ApplicationController
before_action :authenticate_user!, only: [:new, :index, :edit, :update, :destroy, :male, :female, :featured]
before_action :set_user, only: [:show, :edit, :update, :destroy, :upvote, :downvote, :cancel_account]
def index
@users = User.all
@posts = Post.all.order("created_at DESC").limit(7)
end
def show
@tags = User.all.tag_counts_on(:tags)
end
def edit
end
def update
respond_to do |format|
if @user.update(user_params)
format.html { redirect_to @user, notice: t('helpers.successfully_updated') }
format.json { render :show, status: :ok, location: @user }
format.js do
end
else
format.html { render :edit }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
def destroy
@user.destroy
respond_to do |format|
format.html { redirect_to users_url, notice: t('helpers.successfully_deleted') }
format.json { head :no_content }
end
end
private
def set_user
@user = User.find(params[:id])
end
# Duc's code
# def blank_database(f)
# !current_user.f.blank?
# end
def user_params
params.require(:user).permit(
:name,
:address,
:city,
:password, :tag_list)
end
end
|
ShinRyubi/warnme
|
web/db/migrate/20181018083635_create_donations.rb
|
<reponame>ShinRyubi/warnme
class CreateDonations < ActiveRecord::Migration[5.2]
def change
create_table :donations do |t|
t.references :user, index: true, foreign_key: {on_delete: :nullify}
t.integer :target_id
t.string :name
t.integer :amount
t.timestamps
end
end
end
|
ShinRyubi/warnme
|
web/app/models/incident.rb
|
<gh_stars>1-10
class Incident < ApplicationRecord
has_many :posts
has_many :users
has_many_attached :images
acts_as_taggable # Alias for acts_as_taggable_on :tags
end
|
ShinRyubi/warnme
|
web/db/migrate/20181017010705_create_incidents.rb
|
<gh_stars>1-10
class CreateIncidents < ActiveRecord::Migration[5.2]
def change
create_table :incidents do |t|
t.string :name
t.string :address
t.float :latitude
t.float :longitude
t.text :content
t.text :content_local
t.string :kind
t.string :photo # for url seeding
t.timestamps
end
end
end
|
ShinRyubi/warnme
|
web/app/models/incident_user.rb
|
class IncidentUser < ApplicationRecord
# has_many :users, dependent: :destroy, join_table: :syndications, optional: true
has_many :users, dependent: :destroy, optional: true
belongs_to :incident, optional: true
end
|
ShinRyubi/warnme
|
web/config/application.rb
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module WarnMe
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
config.to_prepare do
DeviseController.respond_to :html, :json
end
config.action_view.raise_on_missing_translations = false
config.active_record.default_timezone = :local
config.active_record.time_zone_aware_types = [:datetime, :time]
config.i18n.default_locale = :'en'
config.i18n.fallbacks = true # use the default locale when nothing can be found
I18n.config.enforce_available_locales = false
end
end
|
ShinRyubi/warnme
|
web/app/controllers/incidents_controller.rb
|
<filename>web/app/controllers/incidents_controller.rb
class IncidentsController < ApplicationController
before_action :set_incident, only: [:show, :edit, :update, :destroy]
def index
@incidents = Incident.all
end
def show
@map_hash = Gmaps4rails.build_markers(@incident) do |location, marker|
marker.lat location.latitude
marker.lng location.longitude
end
end
def new
@incident = Incident.new
end
def edit
end
def create
@incident = Incident.new(incident_params)
respond_to do |format|
if @incident.save
format.html { redirect_to @incident, notice: 'Incident was successfully created.' }
format.json { render :show, status: :created, location: @incident }
else
format.html { render :new }
format.json { render json: @incident.errors, status: :unprocessable_entity }
end
end
end
def update
respond_to do |format|
if @incident.update(incident_params)
format.html { redirect_to @incident, notice: 'Incident was successfully updated.' }
format.json { render :show, status: :ok, location: @incident }
else
format.html { render :edit }
format.json { render json: @incident.errors, status: :unprocessable_entity }
end
end
end
def destroy
@incident.destroy
respond_to do |format|
format.html { redirect_to incidents_url, notice: 'Incident was successfully destroyed.' }
format.json { head :no_content }
end
end
private
def set_incident
@incident = Incident.find(params[:id])
end
def incident_params
params.require(:incident).permit(:name, :address, :city, :items, images: [])
end
end
|
ShinRyubi/warnme
|
web/db/seeds.rb
|
<filename>web/db/seeds.rb
require 'open-uri'
puts "Seeding incidents.."
File.open("schema - incidents.csv", "r") do |f|
f.each_with_index do |line, index|
name, address, lat, long, content, content_local, kind, photo = line.chomp.split (",")
Incident.create(name: name, address: address, latitude: lat, longitude: long, content: content, content_local: content_local, kind: kind, photo: photo)
end
end
puts "Seeding users.."
File.open("schema - users.csv", "r") do |f|
f.each_with_index do |line, index|
incident_id, email, password, name, city, address, photo, info = line.chomp.split (",")
User.create(incident_id: incident_id, email: email, password: password, name: name, city: city, address: address, photo: photo, info: info)
end
end
puts "Seeding posts.."
File.open("schema - posts.csv", "r") do |f|
f.each_with_index do |line, index|
incident_id, user_id, user_pic, content, content_local = line.chomp.split (",")
Post.create(incident_id: incident_id, user_id: user_id, user_pic: user_pic, content: content, content_local: content_local)
end
end
|
ShinRyubi/warnme
|
web/app/models/post.rb
|
class Post < ApplicationRecord
belongs_to :incident, optional: true
end
|
ShinRyubi/warnme
|
web/config/routes.rb
|
<reponame>ShinRyubi/warnme<filename>web/config/routes.rb
Rails.application.routes.draw do
devise_for :users, controllers: { sessions: 'sessions', registrations: 'registrations' }
scope "(:locale)", locale: /#{I18n.available_locales.join("|")}/, defaults: {locale: "en"} do
root 'posts#index'
resources :donations
resources :incidents
resources :items
resources :posts
resources :users
end
end
|
ShinRyubi/warnme
|
web/app/controllers/registrations_controller.rb
|
class RegistrationsController < Devise::RegistrationsController
before_action :configure_permitted_parameters
protected
def after_inactive_sign_up_path_for(resource)
posts_path(session[:registration_params])
end
def update_resource(resource, params)
resource.update_without_password(params)
end
end
|
pj4533/mixpanel_statusboard
|
mixpanel_statusboard.rb
|
<filename>mixpanel_statusboard.rb
require 'sinatra'
require 'mixpanel_client'
require 'json'
get '/mixpanel_html' do
api_key = params[:api_key]
api_secret = params[:api_secret]
event = params[:event]
event_type = 'general'
if params[:event_type]
event_type = params[:event_type]
end
title = params[:title]
width = 698
height = 506
if params[:width]
width = params[:width]
end
if params[:height]
height = params[:height]
end
html = "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">
<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">
<head>
<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf8\" />
<meta http-equiv=\"Cache-control\" content=\"no-cache\" />
<style type=\"text/css\">
@font-face
{
font-family: \"Roadgeek2005SeriesD\";
src: url(\"http://panic.com/fonts/Roadgeek 2005 Series D/Roadgeek 2005 Series D.otf\");
}
body, *
{
}
body,div,dl,dt,dd,ul,ol,li,h1,h2,h3,h4,h5,h6,pre,form,fieldset,input,textarea,p,blockquote,th,td
{
margin: 0;
padding: 0;
}
fieldset,img
{
border: 0;
}
/* Settin' up the page */
html, body, #main
{
overflow: hidden; /* */
}
body
{
color: white;
font-family: 'Roadgeek2005SeriesD', sans-serif;
font-size: 20px;
line-height: 24px;
}
body, html, #main
{
background: transparent !important;
}
#spacepeopleContainer
{
width: #{width}px;
height: #{height}px;
text-align: center;
}
#spacepeopleContainer *
{
font-weight: normal;
}
h1
{
font-size: 240px;
line-height: 240px;
margin-top: 15px;
margin-bottom: 28px;
color: white;
text-shadow:0px -2px 0px black;
text-transform: uppercase;
}
h2
{
width: 180px;
margin: 0px auto;
padding-top: 20px;
font-size: 32px;
line-height: 36px;
color: #7e7e7e;
text-transform: uppercase;
}
</style>
<script type=\"text/javascript\">
function refresh()
{
var req = new XMLHttpRequest();
console.log(\"Refreshing Count...\");
req.onreadystatechange=function() {
if (req.readyState==4 && req.status==200) {
document.getElementById('howmany').innerText = req.responseText;
}
}
req.open(\"GET\", '/mixpanel_number?api_key=#{api_key}&api_secret=#{api_secret}&event=#{event}&event_type=#{event_type}', true);
req.send(null);
}
function init()
{
// Change page background to black if the URL contains \"?desktop\", for debugging while developing on your computer
if (document.location.href.indexOf('desktop') > -1)
{
document.getElementById('spacepeopleContainer').style.backgroundColor = 'black';
}
refresh()
var int=self.setInterval(function(){refresh()},300000);
}
</script>
</head>
<body onload=\"init()\">
<div id=\"main\">
<div id=\"spacepeopleContainer\">
<h2>#{title}</h2>
<h1 id=\"howmany\"></h1>
</div><!-- spacepeopleContainer -->
</div><!-- main -->
</body>
</html>"
html
end
get '/mixpanel_number' do
api_key = params[:api_key]
api_secret = params[:api_secret]
event = params[:event]
interval = 1
if params[:interval]
interval = params[:interval]
end
unit = 'day'
if params[:unit]
unit = params[:unit]
end
event_type = 'general'
if params[:event_type]
event_type = params[:event_type]
end
config = {api_key: api_key, api_secret: api_secret}
client = Mixpanel::Client.new(config)
if event == 'engage_total'
data = client.request('engage', {
where: "not properties[\"User Type\"]"
})
"#{data['total']}"
elsif event == 'engage_dau'
data = client.request('engage', {
where: "not properties[\"User Type\"]"
})
total_users = data['total']
data = client.request('engage', {
where: "((datetime(#{Time.now.to_i} - 86400) < properties[\"$last_seen\"])) and (not properties[\"User Type\"])"
})
last_day_users = data['total']
"#{((last_day_users.to_f / total_users.to_f) * 100.0).to_i }"
elsif event == 'engage_wau'
data = client.request('engage', {
where: "not properties[\"User Type\"]"
})
total_users = data['total']
data = client.request('engage', {
where: "((datetime(#{Time.now.to_i} - 604800) < properties[\"$last_seen\"])) and (not properties[\"User Type\"])"
})
last_week_users = data['total']
"#{((last_week_users.to_f / total_users.to_f) * 100.0).to_i }"
elsif event == 'engage_today'
seconds_since_midnight = ((Time.now.hour.to_i) * 60 * 60) + (Time.now.min.to_i * 60) + (Time.now.sec.to_i)
data = client.request('engage', {
where: "((datetime(#{Time.now.to_i} - #{seconds_since_midnight}) < properties[\"$last_seen\"])) and (not properties[\"User Type\"])"
})
"#{data['total']}"
else
t = Time.now.utc - 18000
today_date_string = t.strftime("%Y-%m-%d")
data = client.request('events', {
event: [ event ],
unit: unit,
type: event_type,
interval: interval
})
if interval == 1
date_string = data["data"]["series"][0]
event_hash = data["data"]["values"][event]
"#{event_hash[date_string]}"
else
output_number = 0
data["data"]["values"][event].each do |unit_string,unitvalue|
output_number += unitvalue
end
"#{output_number}"
end
end
end
get '/mixpanel_funnel' do
return unless params[:funnel_id]
api_key = params[:api_key]
api_secret = params[:api_secret]
funnel_id = params[:funnel_id]
config = {api_key: api_key, api_secret: api_secret}
client = Mixpanel::Client.new(config)
today = Time.now.utc - 18000
data = client.request('funnels', {
funnel_id: funnel_id,
from_date: '2014-07-14',
to_date: today.strftime("%Y-%m-%d")
})
dates = data['meta']['dates']
completed = 0
started = 0
dates.each do |date|
completed += data['data'][date]['analysis']['completion']
started += data['data'][date]['analysis']['starting_amount']
end
percentage = (completed.to_f / started.to_f).round(2) * 100
"completed: #{completed}\nstarted: #{started}\npercentage: #{percentage}"
end
get '/mixpanel' do
api_key = params[:api_key]
api_secret = params[:api_secret]
event = params[:event]
on_prop = params[:on]
title = params[:title]
display_props = params[:display_props]
layout = "80%,20%"
if params[:layout]
layout = params[:layout]
end
engage_prop = on_prop
if params[:engage_prop]
engage_prop = params[:engage_prop]
end
limit = 50
if params[:limit]
limit = params[:limit]
end
type = 'general'
if params[:type]
type = params[:type]
end
config = {api_key: api_key, api_secret: api_secret}
client = Mixpanel::Client.new(config)
t = Time.now.utc - 18000
today_date_string = t.strftime("%Y-%m-%d")
response = "#{layout}\n#{title},\n"
data = client.request('segmentation', {
event: event,
name: 'feature',
limit: limit,
type: type,
from_date: today_date_string,
to_date: today_date_string,
on: "properties[\"#{on_prop}\"]"
})
date_string = data["data"]["series"][0]
where_string = ""
first_time = true
prop_array = []
launches = Hash[]
data["data"]["values"].each do |prop, value|
prop = prop.gsub(",","")
prop = prop.gsub("\"","")
if first_time
where_string = "(properties[\"#{engage_prop}\"] == \"#{prop}\")"
first_time = false
else
where_string = where_string + " or (properties[\"#{engage_prop}\"] == \"#{prop}\")"
end
prop_array.push(prop)
launches[prop] = value[date_string]
end
array_to_sort = []
if display_props and prop_array.count > 0
engagedata = client.request('engage', {
where: where_string
})
engagedata["results"].each do |person|
response_string = "#{person['$properties'][engage_prop]}"
display_props.each do |display_prop|
response_string += ",#{person['$properties'][display_prop]}"
end
response_string += ",#{launches[person['$properties'][engage_prop]]}\n"
array_to_sort.push(Hash[:response_string => response_string, :launches => launches[person['$properties'][engage_prop]] ] )
end
else
prop_array.each do |prop|
response_string = "#{prop},#{launches[prop]}\n"
array_to_sort.push(Hash[:response_string => response_string, :launches => launches[prop] ])
end
end
array_to_sort.sort_by { |hsh| hsh[:launches] }.reverse.each do |hsh|
response += hsh[:response_string]
end
response
end
|
pj4533/mixpanel_statusboard
|
lib/url_builder.rb
|
#!/usr/bin/env ruby
require 'uri'
require 'cgi'
require 'commander/import'
require 'version'
module MixPanelSB
class URLBuilder
def build
program :help_formatter, :compact
program :name, 'Dolly'
program :version, VERSION
program :description, 'Clone and genetically modify premium apps.'
command :funnel do |c|
c.syntax = 'mixpanel funnel'
c.description = 'Build StatusBoard URL for Funnel'
c.action do |args, options|
puts "Enter URL (no http): "
url = $stdin.gets.chomp.strip
puts "Enter API key: "
apikey = $stdin.gets.chomp.strip
puts "Enter API secret: "
apisecret = $stdin.gets.chomp.strip
puts "Enter MixPanel Funnel ID: "
funnel_id = $stdin.gets.chomp.strip
puts "Enter StatusBoard Title: "
title = $stdin.gets.chomp.strip
url_string = URI.escape("http://#{url}/mixpanel_funnel?api_key=#{apikey}&api_secret=#{apisecret}&title=#{title}&funnel_id=#{funnel_id}&unit=day")
puts "LOCAL:\ncurl \"#{url_string}\"\n\n"
url_string = CGI.escape(url_string)
puts "APP:\npanicboard://?url=#{url_string}&panel=table&sourceDisplayName=MPStatus\n\n"
end
end
command :event do |c|
c.syntax = 'mixpanel event'
c.description = 'Build StatusBoard URL for Event'
c.action do |args, options|
puts "Enter URL (no http): "
url = $stdin.gets.chomp.strip
puts "Enter API key: "
apikey = $stdin.gets.chomp.strip
puts "Enter API secret: "
apisecret = $stdin.gets.chomp.strip
puts "Enter Mixpanel Event: "
event = $stdin.gets.chomp.strip
puts "Enter Mixpanel Event Property: "
prop = $stdin.gets.chomp.strip
puts "Enter StatusBoard Title: "
title = $stdin.gets.chomp.strip
mp_limit = '50'
puts "Enter Limit [default=50]: "
limit = $stdin.gets.chomp.strip
mp_type = 'general'
puts "Enter Type [default=general]: "
type = $stdin.gets.chomp.strip
mp_provider = 'MPStatus'
puts "Enter Provider [default=MPStatus]: "
provider = $stdin.gets.chomp.strip
if limit != ""
mp_limit = limit
end
if type != ""
mp_type = type
end
if provider != ""
mp_provider = provider
end
url_string = URI.escape("http://#{url}/mixpanel?api_key=#{apikey}&api_secret=#{apisecret}&on=#{prop}&title=#{title}&event=#{event}&type=#{mp_type}&limit=#{mp_limit}")
puts "LOCAL:\ncurl \"#{url_string}\"\n\n"
url_string = CGI.escape(url_string)
puts "APP:\npanicboard://?url=#{url_string}&panel=table&sourceDisplayName=#{mp_provider}\n\n"
end
end
end
end
end
|
pj4533/mixpanel_statusboard
|
mixpanel_statusboard.gemspec
|
<filename>mixpanel_statusboard.gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'version'
Gem::Specification.new do |spec|
spec.name = "mixpanel_statusboard"
spec.version = MixPanelSB::VERSION
spec.authors = ["<NAME>"]
spec.email = ["<EMAIL>"]
spec.summary = "Generate StatusBoard URLs for MixPanel"
spec.description = "Generate StatusBoard URLs for MixPanel"
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.5"
spec.add_development_dependency "rake"
spec.add_dependency 'commander'
spec.add_dependency 'sinatra', '1.1.0'
spec.add_dependency 'mixpanel_client'
end
|
PierreR/puppetlabs-java
|
spec/classes/java_spec.rb
|
<gh_stars>0
require 'spec_helper'
describe 'java', :type => :class do
context 'select openjdk for Centos 5.8' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '5.8'} }
it { should contain_package('java').with_name('java-1.6.0-openjdk-devel') }
end
context 'select openjdk for Centos 6.3' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '6.3'} }
it { should contain_package('java').with_name('java-1.7.0-openjdk-devel') }
end
context 'select openjdk for Centos 6.2' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '6.2'} }
it { should contain_package('java').with_name('java-1.6.0-openjdk-devel') }
it { should_not contain_exec('update-java-alternatives') }
end
context 'select Oracle JRE with alternatives for Centos 6.3' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '6.3'} }
let(:params) { { 'package' => 'jre', 'java_alternative' => '/usr/bin/java', 'java_alternative_path' => '/usr/java/jre1.7.0_67/bin/java'} }
it { should contain_package('java').with_name('jre') }
it { should contain_exec('create-java-alternatives').with_command('alternatives --install /usr/bin/java java /usr/java/jre1.7.0_67/bin/java 20000') }
it { should contain_exec('update-java-alternatives').with_command('alternatives --set java /usr/java/jre1.7.0_67/bin/java') }
end
context 'select openjdk for Fedora 20' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Fedora', :operatingsystemrelease => '20'} }
it { should contain_package('java').with_name('java-1.7.0-openjdk-devel') }
end
context 'select openjdk for Fedora 21' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Fedora', :operatingsystemrelease => '21'} }
it { should contain_package('java').with_name('java-1.8.0-openjdk-devel') }
end
context 'select passed value for Fedora 20' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Fedora', :operatingsystemrelease => '20'} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('java-1.7.0-openjdk') }
end
context 'select passed value for Fedora 21' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Fedora', :operatingsystemrelease => '21'} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('java-1.8.0-openjdk') }
end
context 'select passed value for Centos 5.3' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '5.3'} }
let(:params) { { 'package' => 'jdk' } }
it { should contain_package('java').with_name('jdk') }
it { should_not contain_exec('update-java-alternatives') }
end
context 'select default for Centos 5.3' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Centos', :operatingsystemrelease => '5.3'} }
it { should contain_package('java').with_name('java-1.6.0-openjdk-devel') }
it { should_not contain_exec('update-java-alternatives') }
end
context 'select default for Debian Wheezy' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'wheezy', :operatingsystemrelease => '7.1', :architecture => 'amd64',} }
it { should contain_package('java').with_name('openjdk-7-jdk') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set java-1.7.0-openjdk-amd64 --jre') }
end
context 'select Oracle JRE for Debian Wheezy' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'wheezy', :operatingsystemrelease => '7.1', :architecture => 'amd64',} }
let(:params) { { 'distribution' => 'oracle-jre' } }
it { should contain_package('java').with_name('oracle-j2re1.7') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set j2re1.7-oracle --jre') }
end
context 'select OpenJDK JRE for Debian Wheezy' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'wheezy', :operatingsystemrelease => '7.1', :architecture => 'amd64',} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('openjdk-7-jre-headless') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set java-1.7.0-openjdk-amd64 --jre-headless') }
end
context 'select default for Debian Squeeze' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'squeeze', :operatingsystemrelease => '6.0.5', :architecture => 'amd64',} }
it { should contain_package('java').with_name('openjdk-6-jdk') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set java-6-openjdk-amd64 --jre') }
end
context 'select Oracle JRE for Debian Squeeze' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'squeeze', :operatingsystemrelease => '6.0.5', :architecture => 'amd64',} }
let(:params) { { 'distribution' => 'sun-jre', } }
it { should contain_package('java').with_name('sun-java6-jre') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set java-6-sun --jre') }
end
context 'select OpenJDK JRE for Debian Squeeze' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'squeeze', :operatingsystemrelease => '6.0.5', :architecture => 'amd64',} }
let(:params) { { 'distribution' => 'jre', } }
it { should contain_package('java').with_name('openjdk-6-jre-headless') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set java-6-openjdk-amd64 --jre-headless') }
end
context 'select random alternative for Debian Wheezy' do
let(:facts) { {:osfamily => 'Debian', :operatingsystem => 'Debian', :lsbdistcodename => 'wheezy', :operatingsystemrelease => '7.1', :architecture => 'amd64',} }
let(:params) { { 'java_alternative' => 'bananafish' } }
it { should contain_package('java').with_name('openjdk-7-jdk') }
it { should contain_exec('update-java-alternatives').with_command('update-java-alternatives --set bananafish --jre') }
end
context 'select openjdk for Amazon Linux' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Amazon', :operatingsystemrelease => '3.4.43-43.43.amzn1.x86_64'} }
it { should contain_package('java').with_name('java-1.7.0-openjdk-devel') }
end
context 'select passed value for Amazon Linux' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Amazon', :operatingsystemrelease => '5.3.4.43-43.43.amzn1.x86_64'} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('java-1.7.0-openjdk') }
end
context 'select openjdk for Oracle Linux' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'OracleLinux', :operatingsystemrelease => '6.4'} }
it { should contain_package('java').with_name('java-1.7.0-openjdk-devel') }
end
context 'select openjdk for Oracle Linux 6.2' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'OracleLinux', :operatingsystemrelease => '6.2'} }
it { should contain_package('java').with_name('java-1.6.0-openjdk-devel') }
end
context 'select passed value for Oracle Linux' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'OracleLinux', :operatingsystemrelease => '6.3'} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('java-1.7.0-openjdk') }
end
context 'select passed value for Scientific Linux' do
let(:facts) { {:osfamily => 'RedHat', :operatingsystem => 'Scientific', :operatingsystemrelease => '6.4'} }
let(:params) { { 'distribution' => 'jre' } }
it { should contain_package('java').with_name('java-1.7.0-openjdk') }
end
context 'select default for OpenSUSE 12.3' do
let(:facts) { {:osfamily => 'Suse', :operatingsystem => 'OpenSUSE', :operatingsystemrelease => '12.3'}}
it { should contain_package('java').with_name('java-1_7_0-openjdk-devel')}
end
describe 'incompatible OSs' do
[
{
# C14706
:osfamily => 'windows',
:operatingsystem => 'windows',
:operatingsystemrelease => '8.1',
},
{
# C14707
:osfamily => 'Darwin',
:operatingsystem => 'Darwin',
:operatingsystemrelease => '13.3.0',
},
{
# C14708
:osfamily => 'AIX',
:operatingsystem => 'AIX',
:operatingsystemrelease => '7100-02-00-000',
},
{
# C14708
:osfamily => 'AIX',
:operatingsystem => 'AIX',
:operatingsystemrelease => '6100-07-04-1216',
},
{
# C14708
:osfamily => 'AIX',
:operatingsystem => 'AIX',
:operatingsystemrelease => '5300-12-01-1016',
},
].each do |facts|
let(:facts) { facts }
it "should fail on #{facts[:operatingsystem]} #{facts[:operatingsystemrelease]}" do
expect { subject }.to raise_error Puppet::Error, /unsupported platform/
end
end
end
end
|
yaauie/logstash-codec-es_bulk
|
lib/logstash/codecs/es_bulk.rb
|
<reponame>yaauie/logstash-codec-es_bulk
# encoding: utf-8
require "logstash/codecs/base"
require "logstash/codecs/line"
require "logstash/json"
require 'logstash/plugin_mixins/ecs_compatibility_support'
require 'logstash/plugin_mixins/ecs_compatibility_support/target_check'
require 'logstash/plugin_mixins/validator_support/field_reference_validation_adapter'
require 'logstash/plugin_mixins/event_support/event_factory_adapter'
require 'logstash/plugin_mixins/event_support/from_json_helper'
# This codec will decode the http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html[Elasticsearch bulk format]
# into individual events, plus metadata into the `@metadata` field.
#
# Encoding is not supported at this time as the Elasticsearch
# output submits Logstash events in bulk format.
class LogStash::Codecs::ESBulk < LogStash::Codecs::Base
config_name "es_bulk"
include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
extend LogStash::PluginMixins::ValidatorSupport::FieldReferenceValidationAdapter
include LogStash::PluginMixins::EventSupport::EventFactoryAdapter
# Defines a target field for placing decoded fields.
# If this setting is omitted, data gets stored at the root (top level) of the event.
#
# NOTE: the target is only relevant while decoding data into a new event.
config :target, :validate => :field_reference
public
def initialize(params={})
super(params)
@lines = LogStash::Codecs::Line.new
@lines.charset = "UTF-8"
@state = :initial
@metadata = Hash.new
@metadata_field = ecs_select[disabled: '[@metadata]', v1: '[@metadata][codec][es_bulk]']
end
def register
end
public
def decode(data)
@lines.decode(data) do |bulk|
begin
line = LogStash::Json.load(bulk.get("message"))
case @state
when :metadata
event = targeted_event_factory.new_event(line)
event.set(@metadata_field, @metadata)
yield event
@state = :initial
when :initial
@metadata = line[line.keys[0]]
@metadata["action"] = line.keys[0].to_s
@state = :metadata
if line.keys[0] == 'delete'
event = targeted_event_factory.new_event
event.set(@metadata_field, @metadata)
yield event
@state = :initial
end
end
rescue LogStash::Json::ParserError => e
@logger.error("JSON parse failure. ES Bulk messages must in be UTF-8 JSON", :error => e, :data => data)
end
end
end # def decode
end # class LogStash::Codecs::ESBulk
|
UAlbanyArchives/arclight-UAlbany
|
lib/arclight/normalized_date.rb
|
<reponame>UAlbanyArchives/arclight-UAlbany
# frozen_string_literal: true
module Arclight
##
# A utility class to normalize dates, typically by joining inclusive and bulk dates
# e.g., "1990-2000, bulk 1990-1999"
# @see http://www2.archivists.org/standards/DACS/part_I/chapter_2/4_date
class NormalizedDate
# @param [String | Array<String>] `inclusive` from the `unitdate`
# @param [String] `bulk` from the `unitdate`
# @param [String] `other` from the `unitdate` when type is not specified
def initialize(inclusiveHash, bulkHash = nil, otherHash = nil)
@inclusive = []
@bulk = []
@other = []
inclusiveHash.each do |inclusive|
if inclusive.is_a? Array # of YYYY-YYYY for ranges
@inclusive << YearRange.new(inclusive.include?('/') ? inclusive : inclusive.map { |v| v.tr('-', '/') }).to_s
elsif inclusive.present?
@inclusive << inclusive.strip
end
end
bulkHash.each do |bulk|
@bulk << bulk.strip if bulk.present?
end
otherHash.each do |other|
@other << other.strip if other.present?
end
@inclusive = @inclusive.join(", ")
@bulk = @bulk.join(", ")
@other = @other.join(", ")
end
# @return [String] the normalized title/date
def to_s
normalize
end
private
attr_reader :inclusive, :bulk, :other
# @see http://www2.archivists.org/standards/DACS/part_I/chapter_2/4_date for rules
def normalize
if inclusive.present?
if other.present?
result = other.to_s
result << ", #{inclusive}"
result << ", bulk #{bulk}" if bulk.present?
else
result = inclusive.to_s
result << ", bulk #{bulk}" if bulk.present?
end
elsif other.present?
result = other.to_s
else
result = nil
end
return if result.blank?
result.strip
end
end
end
|
UAlbanyArchives/arclight-UAlbany
|
config/initializers/licenses.rb
|
<gh_stars>0
LICENCES = YAML.load_file(Rails.root.join('config/licenses.yml'))
|
UAlbanyArchives/arclight-UAlbany
|
lib/arclight/repository.rb
|
<filename>lib/arclight/repository.rb
# frozen_string_literal: true
module Arclight
#
# Static information about a given repository identified by a unique `slug`
#
class Repository
include ActiveModel::Conversion # for to_partial_path
FIELDS = %i[name
description
tagline
visit_note
building
address1
address2
city
state
zip
country
phone
contact_info
thumbnail_url
google_request_url
google_request_mappings
collection_count].freeze
attr_accessor :slug, *FIELDS
# @param [String] `slug` the unique identifier for the repository
# @param [Hash] `data`
def initialize(slug, data = {})
@slug = slug
FIELDS.each do |field|
value = data[field.to_s]
send("#{field}=", value) if value.present?
end
end
# @return [String] handles the formatting of "city, state zip, country"
def city_state_zip_country
state_zip = state
state_zip += " #{zip}" if zip
[city, state_zip, country].compact.join(', ')
end
# Load repository information from a YAML file
#
# @param [String] `filename`
# @return [Hash<Slug,Repository>]
def self.from_yaml(file)
repos = {}
data = YAML.safe_load(File.read(file))
data.keys.each do |slug|
repos[slug] = new(slug, data[slug])
end
repos
end
# Mimics ActiveRecord's `all` behavior
#
# @return [Array<Repository>]
def self.all
from_yaml(ENV['REPOSITORY_FILE'] || 'config/repositories.yml').values
end
# Mimics ActiveRecord dynamic `find_by` behavior for the slug or name
#
# @param [String] `slug` or `name`
# @return [Repository]
def self.find_by(slug: nil, name: nil)
if slug
all.find { |repo| repo.slug == slug }
elsif name
all.find { |repo| repo.name == name }
else
raise ArgumentError, 'Requires either slug or name parameters to find_by'
end
end
# Mimics ActiveRecord dynamic `find_by!` behavior for the slug or name
#
# @param [String] `slug` or `name` -- same as `find_by`
# @return [Repository]
# @raise [ActiveRecord::RecordNotFound] if cannot find repository
def self.find_by!(*args)
repository = find_by(*args)
raise ActiveRecord::RecordNotFound if repository.blank?
repository
end
end
end
|
UAlbanyArchives/arclight-UAlbany
|
lib/arclight/shared_indexing_behavior.rb
|
# frozen_string_literal: true
module Arclight
##
# A mixin intended to share indexing behavior between
# the CustomDocument and CustomComponent classes
module SharedIndexingBehavior
# @see http://eadiva.com/2/unitdate/
# @return [YearRange] all of the years between the given years
def unitdate_for_range
range = YearRange.new
return range if normal_unit_dates.blank?
range << range.parse_ranges(normal_unit_dates)
range
end
def subjects_array(elements, parent:)
xpath_elements = elements.map { |el| "local-name()='#{el}'" }.join(' or ')
subjects = search("//#{parent}/controlaccess/*[#{xpath_elements}]").to_a
clean_facets_array(subjects.flatten.map(&:text))
end
def names_array(elements, parent:)
xpath_elements = elements.map { |el| "local-name()='#{el}'" }.join(' or ')
names = search("//#{parent}/controlaccess/*[#{xpath_elements}]").to_a
clean_facets_array(names.flatten.map(&:text))
end
# Return a cleaned array of facets without marc subfields
#
# E.g. clean_facets_array(
# ['FacetValue1 |z FacetValue2','FacetValue3']
# ) => ['FacetValue1 -- FacetValue2', 'FacetValue3']
def clean_facets_array(facets_array)
Array(facets_array).map { |text| fix_subfield_demarcators(text) }.compact.uniq
end
# Replace MARC style subfield demarcators
#
# Usage: fix_subfield_demarcators("Subject 1 |z Sub-Subject 2") => "Subject 1 -- Sub-Subject 2"
def fix_subfield_demarcators(value)
value.gsub(/\|\w{1}/, '--')
end
# Wrap OM's find_by_xpath for convenience
def search(path)
find_by_xpath(path) # rubocop:disable DynamicFindBy
end
# If a repository slug is provided via an environment variable `REPOSITORY_ID`,
# then use that to lookup the name rather than the parsed out name from the EAD
# @param [String] `repository` the default repository name
def repository_as_configured(repository)
slug = ENV['REPOSITORY_ID']
if slug.present?
begin
Arclight::Repository.find_by(slug: slug).name
rescue => e
raise "The repository slug '#{slug}' was given but it is not found in the Repository configuration data: #{e}"
end
else
repository
end
end
def add_digital_content(prefix:, solr_doc:)
dao = ng_xml.xpath("#{prefix}/dao").to_a
return if dao.blank?
field_name = Solrizer.solr_name('digital_objects', :displayable)
solr_doc[field_name] = digital_objects(dao)
end
def digital_objects(objects)
objects.map do |dao|
label = dao.attributes['title'].try(:value) || dao.xpath('daodesc/p').try(:text)
href = (dao.attributes['href'] || dao.attributes['xlink:href']).try(:value)
Arclight::DigitalObject.new(label: label, href: href).to_json
end
end
def add_date_ranges(solr_doc)
Solrizer.insert_field(solr_doc, 'date_range', unitdate_for_range.years, :facetable)
end
def add_normalized_title(solr_doc)
dates = Arclight::NormalizedDate.new(unitdate_inclusive, unitdate_bulk, unitdate_other).to_s
title = Arclight::NormalizedTitle.new(solr_doc['title_ssm'].try(:first), dates).to_s
solr_doc['normalized_title_ssm'] = [title]
solr_doc['normalized_date_ssm'] = [dates]
title
end
def online_content?
#Use the original way just for collection-level
if self.ng_xml.root.name == "ead"
search('//dao[@href]').present?
end
end
end
end
|
UAlbanyArchives/arclight-UAlbany
|
lib/arclight/solr_ead_indexer_ext.rb
|
# frozen_string_literal: true
module Arclight
##
# An module to extend SolrEad::Indexer behaviors to allow us to add
# or override behaviors that require knowledge of the entire XML document.
module SolrEadIndexerExt
def additional_component_fields(node, addl_fields = {})
solr_doc = super
add_count_of_child_compontents(node, solr_doc)
add_ancestral_titles(node, solr_doc)
add_ancestral_ids(node, solr_doc)
add_collection_creator_to_component(node, solr_doc)
add_self_or_parents_restrictions(node, solr_doc)
online_content?(node, solr_doc)
add_self_or_parents_terms(node, solr_doc)
solr_doc
end
def delete_all
solr.delete_by_query('*:*')
solr.commit
end
private
# Note that we need to redo what solr_ead does for ids due to our normalization process
def add_ancestral_ids(node, solr_doc)
@parent_id_name ||= Solrizer.solr_name('parent', :stored_sortable)
@parent_ids_field_name ||= Solrizer.solr_name('parent', :displayable)
@parent_ids_search_field_name ||= Solrizer.solr_name('parent', :searchable)
ids = ancestral_ids(node)
solr_doc[@parent_ids_field_name] = ids
solr_doc[@parent_ids_search_field_name] = ids
solr_doc[@parent_id_name] = ids.last
end
# Note that we need to redo what solr_ead does for titles due to our normalization process
def add_ancestral_titles(node, solr_doc)
@parent_titles_field_name ||= Solrizer.solr_name('parent_unittitles', :displayable)
@parent_titles_search_field_name ||= Solrizer.solr_name('parent_unittitles', :searchable)
@collection_facet_name ||= Solrizer.solr_name('collection', :facetable)
@collection_name ||= Solrizer.solr_name('collection', :displayable)
titles = ancestral_titles(node)
solr_doc[@parent_titles_field_name] = titles
solr_doc[@parent_titles_search_field_name] = titles
solr_doc[@collection_name] = [titles.first] # collection is always on top
solr_doc[@collection_facet_name] = [titles.first]
end
def add_count_of_child_compontents(node, solr_doc)
@child_component_count_name ||= Solrizer.solr_name('child_component_count', type: :integer)
solr_doc[@child_component_count_name] = node.xpath('count(c)').to_i
end
def ancestral_ids(node)
ancestral_visit(node, :normalized_component_id, :normalized_collection_id)
end
def ancestral_titles(node)
ancestral_visit(node, :normalized_component_title, :normalized_collection_title)
end
# visit each component's parent and finish with a visit on the collection
def ancestral_visit(node, component_fn, collection_fn, results = [])
while node.parent && node.parent.name == 'c'
parent = node.parent
results << send(component_fn, parent)
node = parent
end
results << send(collection_fn, node)
results.reverse
end
def normalized_component_title(node)
data = extract_title_and_dates(node)
normalize_title(data)
end
def normalized_collection_title(node)
data = extract_title_and_dates(node, '//archdesc/')
normalize_title(data)
end
def normalize_title(data)
Arclight::NormalizedTitle.new(
data[:title],
Arclight::NormalizedDate.new(
data[:unitdate_inclusive],
data[:unitdate_bulk],
data[:unitdate_other]
).to_s
).to_s
end
# TODO: these xpaths should be DRY'd up -- they're in both terminologies
def extract_title_and_dates(node, prefix = nil)
data = {
unitdate_inclusive: [],
unitdate_bulk: [],
unitdate_other: []
}
data[:title] = node.xpath("#{prefix}did/unittitle").text if node.xpath("#{prefix}did/unittitle")
node.xpath("#{prefix}did/unitdate").each do |unitdate|
if unitdate.attr("type")
if unitdate.attr("type").downcase == "inclusive"
data[:unitdate_inclusive] << unitdate.text if unitdate
elsif unitdate.attr("type").downcase == "bulk"
data[:unitdate_bulk] << unitdate.text if unitdate
else
data[:unitdate_other] << unitdate.text if unitdate
end
else
data[:unitdate_other] << unitdate.text if unitdate
end
end
data
end
def normalized_component_id(node)
Arclight::NormalizedId.new(node['id'].to_s).to_s
end
def normalized_collection_id(node)
Arclight::NormalizedId.new(node.document.at_xpath('//eadid').text).to_s
end
# This mimics similar behavior in Arclight::CustomDocument
def add_collection_creator_to_component(node, solr_doc)
field_name = Solrizer.solr_name('collection_creator', :displayable)
repository = solr_doc[Solrizer.solr_name('repository', :displayable)]
creators = node.xpath('//archdesc/did/origination[@label="creator"]/*/text()').map(&:text)
solr_doc[field_name] = creators - [repository]
end
def parent_check_list(node, root_path, element_path, results = [])
orginal_node = node
#results = node.xpath("#{root_path}/#{element_path}").map(&:text)
# if current restriction return, else go up to parent and check
while node.parent.name == 'c' && results.blank?
parent = node.parent
results = parent.xpath("#{element_path}").map(&:text)
node = parent
end
# If no parental results, check the collection
results = orginal_node.xpath("//archdesc/#{element_path}").map(&:text) if results.blank?
results.flatten # can't use with flatten! because that returns nil
end
def add_self_or_parents_restrictions(node, solr_doc)
field_name = Solrizer.solr_name('parent_access_restrict', :displayable)
solr_doc[field_name] = parent_check_list(node, './', 'accessrestrict/p/text()')
solr_doc[field_name]
end
def add_self_or_parents_terms(node, solr_doc)
field_name = Solrizer.solr_name('parent_access_terms', :displayable)
solr_doc[field_name] = parent_check_list(node, './', 'userestrict/p/text()')
solr_doc[field_name]
end
def online_content?(node, solr_doc)
dao = node.xpath('.//dao[@href]').present?
Solrizer.set_field(solr_doc, "has_online_content", dao, :symbol)
end
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/lambda_block.rb
|
module LambdaBlock
private
def assert_funcs(fn, block_given)
raise 'Cannot pass both lambda and block expressions' if !fn.nil? && block_given
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/functions_spec.rb
|
<reponame>raymanoz/totally_lazy
require_relative '../spec_helper'
describe 'Functions' do
it 'should allow function composition and method chaining' do
add_2 = ->(value) { value+2 }
divide_by_2 = ->(value) { value/2 }
expect(sequence(10).map(divide_by_2 * add_2)).to eq(sequence(6))
expect(sequence(10).map(divide_by_2.and_then(add_2))).to eq(sequence(7))
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/sequence_spec.rb
|
<filename>spec/totally_lazy/sequence_spec.rb
require_relative '../spec_helper'
describe 'Sequence' do
it 'should create empty sequence when iterable is nil' do
expect(sequence(nil)).to eq(empty)
end
it 'should support head' do
expect(sequence(1, 2).head).to eq(1)
end
it 'should return same result when head called many times' do
expect(sequence(1, 2).head).to eq(1)
expect(sequence(1, 2).head).to eq(1)
end
it 'should support head_option' do
expect(sequence(1).head_option).to eq(some(1))
expect(empty.head_option).to eq(none)
end
it 'should support reverse' do
expect(sequence(1, 2, 3).reverse).to eq(sequence(3, 2, 1))
end
it 'should support last' do
expect(sequence(1, 2, 3).last).to eq(3)
end
it 'should support last_option' do
expect(sequence(1, 2, 3).last_option).to eq(some(3))
expect(empty.last_option).to eq(none)
end
it 'should support tail' do
expect(sequence(1, 2, 3).tail).to eq(sequence(2, 3))
end
it 'should lazily return tail' do
expect(Sequence.new((1..Float::INFINITY).lazy).tail.head).to eq(2)
expect(range_from(100).tail.head).to eq(101)
end
it 'should return empty tail on sequence with 1 item' do
expect(sequence(1).tail).to eq(empty)
end
it 'should raise NoSuchElementException when getting a tail of empty' do
expect { empty.tail }.to raise_error(NoSuchElementException)
end
it 'should support init' do
expect(sequence(1, 2, 3).init).to eq(sequence(1, 2))
end
it 'should raise NoSuchElementException when getting init of an empty' do
expect { empty.init }.to raise_error(NoSuchElementException)
end
it 'should support map' do
expect(sequence(1, 2, 3).map(->(a) { a*2 })).to eq(sequence(2, 4, 6))
expect(sequence(1, 2, 3).map { |a| a*2 }).to eq(sequence(2, 4, 6))
end
it 'should ensure map is lazy' do
result = sequence(returns(1), call_raises(RuntimeError.new)).map(call_fn)
expect(result.head).to eq(1)
end
it 'should support filter' do
expect(sequence(1, 2, 3, 4).filter(even)).to eq(sequence(2, 4))
expect(sequence(1, 2, 3, 4).filter { |value| even.(value) }).to eq(sequence(2, 4))
end
it 'should ensure filter is lazy' do
result = sequence(returns(1), returns(2), call_raises(RuntimeError.new)).map(call_fn).filter(even)
expect(result.head).to eq(2)
end
it 'should support composite predicates' do
expect(sequence(1, 2, 3, 4).filter(is_not(even))).to eq(sequence(1, 3))
end
it 'should support reject' do
expect(sequence(1, 2, 3, 4).reject(even)).to eq(sequence(1, 3))
expect(sequence(1, 2, 3, 4).reject { |value| even.(value) }).to eq(sequence(1, 3))
end
it 'should support fold (aka fold_left)' do
expect(sequence(1, 2, 3).fold(0, sum)).to eq(6)
expect(sequence(1, 2, 3).fold(0) { |a, b| a + b }).to eq(6)
expect(sequence(1, 2, 3).fold_left(0, sum)).to eq(6)
expect(sequence(1, 2, 3).fold_left(0) { |a, b| a + b }).to eq(6)
expect(sequence('1', '2', '3').fold(0, join)).to eq('0123')
expect(sequence('1', '2', '3').fold_left(0, join)).to eq('0123')
end
it 'should support reduce (aka reduce_left)' do
expect(sequence(1, 2, 3).reduce(sum)).to eq(6)
expect(sequence(1, 2, 3).reduce { |a, b| a + b }).to eq(6)
expect(sequence(1, 2, 3).reduce_left(sum)).to eq(6)
expect(sequence(1, 2, 3).reduce_left { |a, b| a + b }).to eq(6)
expect(sequence('1', '2', '3').reduce(join)).to eq('123')
expect(sequence('1', '2', '3').reduce_left(join)).to eq('123')
end
it 'should support reduce of empty sequence' do
expect(empty.reduce(sum)).to eq(0)
end
it 'should support fold_right' do
expect(empty.fold_right(4, sum)).to eq(4)
expect(sequence(1).fold_right(4, sum)).to eq(5)
expect(sequence(1, 2).fold_right(4, sum)).to eq(7)
expect(sequence(1, 2, 3).fold_right(4, sum)).to eq(10)
expect(sequence(1, 2, 3).fold_right(4) { |a, b| a + b }).to eq(10)
expect(empty.fold_right('4', join)).to eq('4')
expect(sequence('1').fold_right('4', join)).to eq('14')
expect(sequence('1', '2').fold_right('4', join)).to eq('124')
expect(sequence('1', '2', '3').fold_right('4', join)).to eq('1234')
end
it 'should support reduce_right' do
expect(empty.reduce_right(sum)).to eq(0)
expect(sequence(1).reduce_right(sum)).to eq(1)
expect(sequence(1, 2).reduce_right(sum)).to eq(3)
expect(sequence(1, 2, 3).reduce_right(sum)).to eq(6)
expect(sequence(1, 2, 3).reduce_right { |a, b| a+b }).to eq(6)
expect(empty.reduce_right(join)).to eq('')
expect(sequence('1').reduce_right(join)).to eq('1')
expect(sequence('1', '2').reduce_right(join)).to eq('12')
expect(sequence('1', '2', '3').reduce_right(join)).to eq('123')
end
it 'should support find' do
expect(empty.find(even)).to eq(none)
expect(sequence(1, 3, 5).find(even)).to eq(none)
expect(sequence(1, 2, 3).find(even)).to eq(some(2))
expect(sequence(1, 2, 3).find { |value| even.(value) }).to eq(some(2))
end
it 'should support find_index_of' do
expect(sequence(1, 3, 5).find_index_of(even)).to eq(none)
expect(sequence(1, 3, 6).find_index_of(even)).to eq(some(2))
expect(sequence(1, 3, 6).find_index_of { |value| even.(value) }).to eq(some(2))
end
it 'should support finding the first some' do
expect(sequence(none, some(2), some(3)).flat_map(identity).head_option).to eq(some(2))
end
it 'should support zip_with_index' do
expect(sequence('Dan', 'Kings', 'Raymond').zip_with_index).to eq(sequence(pair(0, 'Dan'), pair(1, 'Kings'), pair(2, 'Raymond')))
end
it 'should support zip' do
sequence = sequence(1, 3, 5)
expect(sequence.zip(sequence(2, 4, 6, 8))).to eq(sequence(pair(1, 2), pair(3, 4), pair(5, 6)))
expect(sequence.zip(sequence(2, 4, 6))).to eq(sequence(pair(1, 2), pair(3, 4), pair(5, 6)))
expect(sequence.zip(sequence(2, 4))).to eq(sequence(pair(1, 2), pair(3, 4)))
end
it 'should support take' do
sequence = sequence(1, 2, 3).take(2)
expect(sequence).to eq(sequence(1, 2))
expect(sequence(1).take(2)).to eq(sequence(1))
expect(empty.take(2)).to eq(empty)
end
it 'should not take more than it needs' do
sequence = repeat_fn(-> { raise RuntimeError }).take(0)
expect(sequence.is_empty?).to eq(true)
expect(sequence.size).to eq(0)
end
it 'should support take_while' do
sequence = sequence(1, 3, 5, 6, 8, 1, 3)
expect(sequence.take_while(odd)).to eq(sequence(1, 3, 5))
expect(sequence.take_while { |value| odd.(value) }).to eq(sequence(1, 3, 5))
expect(sequence(1).take_while(odd)).to eq(sequence(1))
expect(empty.take_while(odd)).to eq(empty)
end
it 'should support size' do
expect(range(10000000000, 10000000099).size).to eq(100)
end
it 'should support repeat' do
expect(repeat(10).take(5)).to eq(sequence(10, 10, 10, 10, 10))
expect(repeat_fn(returns(20)).take(5)).to eq(sequence(20, 20, 20, 20, 20))
end
it 'should support is_empty?' do
expect(empty.is_empty?).to be(true)
expect(sequence(1).is_empty?).to be(false)
end
it 'should support flat_map' do
expect(sequence('Hello').flat_map(to_characters)).to eq(sequence('H', 'e', 'l', 'l', 'o'))
expect(sequence(sequence(1, 2), sequence(3, 4)).flat_map { |s| s.map { |v| v+1 } }).to eq(sequence(2, 3, 4, 5))
expect(sequence(sequence(1, 2), sequence(3, 4)).flat_map { |s| s }).to eq(sequence(1, 2, 3, 4))
expect(sequence(pair(1, 2), pair(3, 4)).flat_map { |s| s }).to eq(sequence(1, 2, 3, 4))
expect(sequence(some(1), none, some(2)).flat_map { |s| s }).to eq(sequence(1, 2))
end
it 'should support flatten' do
expect(sequence('Hello').map(to_characters).flatten).to eq(sequence('H', 'e', 'l', 'l', 'o'))
expect(sequence(some(1), none, some(3)).flatten).to eq(sequence(1, 3))
expect(sequence(sequence(1)).flatten).to eq(sequence(1))
end
it 'should allow flattening multiple times' do
sequence = sequence(sequence('1', '2'), empty, sequence('4', '5'))
expect(sequence.flatten).to eq(sequence('1', '2', '4', '5'))
expect(sequence.flatten).to eq(sequence('1', '2', '4', '5'))
end
it 'should support drop' do
expect(sequence(1, 2, 3).drop(2)).to eq(sequence(3))
expect(sequence(1).drop(2)).to eq(empty)
expect(empty.drop(1)).to eq(empty)
end
it 'should support drop_while' do
sequence = sequence(1, 3, 5, 6, 8, 1, 3)
expect(sequence.drop_while(odd)).to eq(sequence(6, 8, 1, 3))
expect(sequence.drop_while { |value| odd.(value) }).to eq(sequence(6, 8, 1, 3))
expect(sequence(1).drop_while(odd)).to eq(empty)
expect(empty.drop_while(odd)).to eq(empty)
end
it 'should support sort' do
expect(sort(sequence(5, 6, 1, 3, 4, 2))).to eq(sequence(1, 2, 3, 4, 5, 6))
expect(sort(sequence('Matt', 'Dan', 'Bob'))).to eq(sequence('Bob', 'Dan', 'Matt'))
end
it 'should support sort descending' do
expect(sort(sequence(5, 6, 1, 3, 4, 2), descending)).to eq(sequence(6, 5, 4, 3, 2, 1))
expect(sequence(5, 6, 1, 3, 4, 2).sort_by(descending)).to eq(sequence(6, 5, 4, 3, 2, 1))
expect(sort(sequence('Bob', 'Dan', 'Matt'), descending)).to eq(sequence('Matt', 'Dan', 'Bob'))
expect(sequence('Bob', 'Dan', 'Matt').sort_by(descending)).to eq(sequence('Matt', 'Dan', 'Bob'))
end
it 'should support contains' do
expect(sequence(1, 3, 5).contains?(2)).to eq(false)
expect(sequence(1, 2, 3).contains?(2)).to eq(true)
end
it 'should support exists' do
expect(sequence(1, 3, 5).exists?(even)).to eq(false)
expect(sequence(1, 3, 5).exists? { |value| even.(value) }).to eq(false)
expect(sequence(1, 2, 3).exists?(even)).to eq(true)
end
it 'should support for_all' do
expect(sequence(1, 3, 5).for_all?(odd)).to eq(true)
expect(sequence(1, 3, 5).for_all? { |value| odd.(value) }).to eq(true)
expect(sequence(1, 2, 3).for_all?(odd)).to eq(false)
end
it 'should support group_by and preserve order' do
groups_fn = sequence(1, 2, 3, 4).group_by(mod(2))
expect(groups_fn.first).to eq(group(1, sequence(1, 3).enumerator))
expect(groups_fn.second).to eq(group(0, sequence(2, 4).enumerator))
groups_block = sequence(1, 2, 3, 4).group_by { |value| mod(2).(value) }
expect(groups_block.first).to eq(group(1, sequence(1, 3).enumerator))
expect(groups_block.second).to eq(group(0, sequence(2, 4).enumerator))
end
it 'should support each' do
sum = 0
sequence(1, 2).each(->(value) { sum = sum + value })
expect(sum).to eq(3)
sequence(3, 4).each { |value| sum = sum + value }
expect(sum).to eq(10)
end
it 'should support map_concurrently' do
strings = sequence(1, 2).map_concurrently(to_string)
expect(strings).to eq(sequence('1', '2'))
strings_block = sequence(1, 2).map_concurrently { |value| to_string.(value) }
expect(strings_block).to eq(sequence('1', '2'))
end
it 'should allow arrays to be converted to sequences' do
expect([1, 2, 3, 4, 5].to_seq).to eq(sequence(1, 2, 3, 4, 5))
end
it 'should allow sequences to be converted to arrays' do
expect(sequence(1, 2, 3, 4, 5).to_a).to eq([1, 2, 3, 4, 5])
end
it 'should be able to display sequence as a string' do
expect(sequence(1, 2, 3, 4, 5).to_s).to eq('[1,2,3,4,5]')
expect(empty.to_s).to eq('[]')
expect(sequence(sequence(1)).flatten.to_s).to eq('[1]')
end
it 'should support join' do
expect(sequence(1, 2, 3).join(sequence(4, 5, 6))).to eq(sequence(1, 2, 3, 4, 5, 6))
end
it 'should support cycle' do
expect(range(1, 3).cycle.take(10)).to eq(sequence(1, 2, 3, 1, 2, 3, 1, 2, 3, 1))
end
it 'should raise exception if you try to use both lambda and block' do
expect { empty.map(->(a) { a+1 }) { |b| b+2 } }.to raise_error(RuntimeError)
expect { empty.map_concurrently(->(a) { a+1 }) { |b| b+2 } }.to raise_error(RuntimeError)
expect { empty.flat_map(->(a) { a+1 }) { |b| b+2 } }.to raise_error(RuntimeError)
expect { empty.fold(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.fold_left(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.fold_right(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.reduce(->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.reduce_left(->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.reduce_right(->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { empty.find(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.find_index_of(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.take_while(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.drop_while(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.exists?(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.for_all?(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.filter(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.reject(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.group_by(->(_) { true }) { |_| true } }.to raise_error(RuntimeError)
expect { empty.each(->(v) { puts(v) }) { |v| puts(v) } }.to raise_error(RuntimeError)
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/numbers.rb
|
module Numbers
private
def sum
monoid(->(a, b) { a + b }, 0)
end
def add(increment)
-> (number) { number + increment }
end
def even
remainder_is(2, 0)
end
def odd
remainder_is(2, 1)
end
def divide(divisor)
->(dividend) { dividend / divisor }
end
def remainder_is(divisor, remainder)
predicate(->(dividend) { remainder(dividend, divisor) == remainder })
end
def remainder(dividend, divisor)
dividend % divisor
end
def range_from(start)
Sequence.new(enumerator_of(increment, start))
end
def range(from, to)
Sequence.new((from..to).lazy)
end
def increment
->(number) { number + 1 }
end
def mod(divisor)
->(number) { number % divisor }
end
def greater_than(right)
predicate(->(left) { left > right })
end
def multiply(y)
monoid(->(x){x * y}, 1)
end
def powers_of(amount)
enumerate(multiply(amount), 1)
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/option_spec.rb
|
require_relative '../spec_helper'
describe 'Option' do
it 'should support is_empty? & is_defined?' do
expect(some(1).is_empty?).to eq(false)
expect(some(1).is_defined?).to eq(true)
expect(none.is_empty?).to eq(true)
expect(none.is_defined?).to eq(false)
end
it 'should support contains?' do
expect(option(1).contains?(1)).to eq(true)
expect(option(1).contains?(2)).to eq(false)
expect(none.contains?(2)).to eq(false)
end
it 'should support exists' do
expect(option(1).exists?(greater_than(0))).to eq(true)
expect(option(1).exists? { |a| a > 0 }).to eq(true)
expect(option(1).exists?(greater_than(1))).to eq(false)
expect(none.exists?(greater_than(0))).to eq(false)
end
it 'should support is alias' do
expect(option(1).is?(greater_than(0))).to eq(true)
expect(option(1).is? { |a| a > 0 }).to eq(true)
expect(option(1).is?(greater_than(1))).to eq(false)
expect(none.is?(greater_than(0))).to eq(false)
end
it 'should support fold (aka fold_left)' do
expect(option(1).fold(1, sum)).to eq(2)
expect(option(1).fold_left(1, sum)).to eq(2)
expect(option(1).fold(1) { |a, b| a + b }).to eq(2)
expect(some(1).fold(1, sum)).to eq(2)
expect(none.fold(1, sum)).to eq(1)
end
it 'should support map' do
expect(option(1).map(to_string)).to eq(option('1'))
expect(option(1).map { |value| value.to_s }).to eq(option('1'))
expect(some(2).map(to_string)).to eq(some('2'))
expect(none.map(to_string)).to eq(none)
expect(some(2).map(ignore_and_return(nil))).to eq(none)
end
it 'should support flat_map' do
expect(some(4).flat_map(divide(2).optional)).to eq(some(2))
expect(some(4).flat_map { |v| divide(2).optional.(v) }).to eq(some(2))
expect(some(4).flat_map(divide(0).optional)).to eq(none)
expect(none.flat_map(constant(none))).to eq(none)
expect(none.flat_map(some(4))).to eq(none)
end
it 'should support flatten' do
expect(some(some(1)).flatten).to eq(some(1))
expect(some(none).flatten).to eq(none)
expect(none.flatten).to eq(none)
end
it 'should support size' do
expect(some(1).size).to eq(1)
expect(none.size).to eq(0)
end
it 'should support get' do
expect(some(1).get).to eq(1)
expect { none.get }.to raise_error(NoSuchElementException)
end
it 'should support get_or_else with value (aka or_else)' do
expect(some(1).get_or_else(2)).to eq(1)
expect(some(1).or_else(2)).to eq(1)
expect(none.get_or_else(2)).to eq(2)
expect(option(1).get_or_else(2)).to eq(1)
expect(option(nil).get_or_else(2)).to eq(2)
end
it 'should support get_or_else with a function' do
expect(some(1).get_or_else(returns(2))).to eq(1)
expect(some(1).get_or_else { 2 }).to eq(1)
expect(none.get_or_else(returns(2))).to eq(2)
expect(option(1).get_or_else(returns(2))).to eq(1)
expect(option(nil).get_or_else(returns(2))).to eq(2)
expect { option(nil).get_or_else(call_raises(RuntimeError.new)) }.to raise_error(RuntimeError)
end
it 'should support get_or_nil' do
expect(some(1).get_or_nil).to eq(1)
expect(none.get_or_nil).to eq(nil)
expect(option(1).get_or_nil).to eq(1)
expect(option(nil).get_or_nil).to eq(nil)
end
it 'should support get_or_raise' do
expect(some('bob').get_or_raise(RuntimeError.new)).to eq('bob')
expect{none.get_or_raise(RuntimeError.new)}.to raise_error(RuntimeError)
end
it 'should support converting some to_either as right' do
expect(some('all good').to_either('borked')).to eq(right('all good'))
end
it 'should support converting none to_either as left' do
expect(none.to_either('borked')).to eq(left('borked'))
end
it 'should raise exception if you try to use both lambda and block' do
expect { some(1).exists?(->(a) { a == 1 }) { |b| b == 2 } }.to raise_error(RuntimeError)
expect { none.exists?(->(a) { a == 1 }) { |b| b == 2 } }.to raise_error(RuntimeError)
expect { some(1).is?(->(a) { a == 1 }) { |b| b == 2 } }.to raise_error(RuntimeError)
expect { none.is?(->(a) { a == 1 }) { |b| b == 2 } }.to raise_error(RuntimeError)
expect { some(1).fold(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { none.fold(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { some(1).fold_left(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { none.fold_left(0, ->(a, b) { a+b }) { |a, b| a+b } }.to raise_error(RuntimeError)
expect { some(1).map(->(v) { v.to_s }) { |v| v.to_s } }.to raise_error(RuntimeError)
expect { none.map(->(v) { v.to_s }) { |v| v.to_s } }.to raise_error(RuntimeError)
expect { some(4).flat_map(divide(2).optional) { |v| divide(2).optional.(v) } }.to raise_error(RuntimeError)
expect { some(1).get_or_else(returns(2)) { |value| 3 } }.to raise_error(RuntimeError)
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/strings.rb
|
<reponame>raymanoz/totally_lazy
module Strings
private
def join
monoid(->(a, b) { "#{a}#{b}" }, '')
end
def join_with_sep(separator)
->(a, b) { "#{a}#{separator}#{b}" }
end
def to_characters
->(string) { Sequence.new(character_enumerator(string)) }
end
def to_string
->(value) { value.to_s }
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/pair.rb
|
module Pairs
private
def pair(first, second)
Pair.new(first, second)
end
def first
->(pair) { pair.first }
end
def second
->(pair) { pair.second }
end
end
class Pair
include Comparable
def initialize(first, second)
@first = -> { first }
@second = -> { second }
end
def first
@first.()
end
def second
@second.()
end
def enumerator
Enumerator.new { |y|
y << first
y << second
raise StopIteration.new
}
end
def <=>(other)
(first <=> other.first) <=> (second <=> other.second)
end
def to_s
"(#{first}, #{second})"
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/functions.rb
|
<reponame>raymanoz/totally_lazy
require 'concurrent/executors'
require 'concurrent/promise'
class Proc
def self.compose(f, g)
lambda { |*args| f[g[*args]] }
end
def *(g)
Proc.compose(self, g)
end
def and_then(g)
Proc.compose(g, self)
end
end
module Functions
private
def monoid(fn, id)
fn.define_singleton_method(:identity) do
id
end
fn
end
def returns(value)
-> { value }
end
def ignore_and_return(value)
returns1(value)
end
def returns1(value)
constant(value)
end
def constant(value)
->(_) { value }
end
def identity
-> (a) { a }
end
def call_raises(e)
-> { raise e }
end
alias call_throws call_raises
def call_fn
->(fn) { fn.() }
end
def flip(fn)
->(a, b) { fn.(b, a) }
end
def defer_return(fn)
->(value) { defer_apply(fn, value) }
end
def defer_apply(fn, value)
->() { fn.(value) }
end
def call_concurrently(sequence_of_fn)
pool = Concurrent::CachedThreadPool.new
begin
call_concurrently_with_pool(sequence_of_fn, pool)
ensure
pool.shutdown
end
end
def call_concurrently_with_pool(sequence_of_fn, pool)
sequence_of_fn.
map(as_promise).
map(execute_with(pool)).
realise.
map(realise_promise)
end
def as_promise
-> (fn) { Concurrent::Promise.new { fn.() } }
end
def execute_with(pool)
-> (promise) {
pool.post { promise.execute }
promise
}
end
def realise_promise
->(promise) { promise.value! }
end
def get_left
->(either) { either.left_value }
end
def get_right
->(either) { either.right_value }
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/either_spec.rb
|
<reponame>raymanoz/totally_lazy
require_relative '../spec_helper'
describe 'Either' do
it 'should support creating rights' do
either = right(3)
expect(either.is_right?).to eq(true)
expect(either.is_left?).to eq(false)
expect(either.right_value).to eq(3)
end
it 'should support creating lefts' do
exception = NoSuchElementException.new
either = left(exception)
expect(either.is_right?).to eq(false)
expect(either.is_left?).to eq(true)
expect(either.left_value).to eq(exception)
end
it 'can be used in filter and map' do
eithers = sequence(left('error'), right(3))
expect(eithers.filter(is_left).map(get_left)).to eq(sequence('error'))
expect(eithers.filter(is_right).map(get_right)).to eq(sequence(3))
end
it 'should support map' do
expect(right(3).map(add(2))).to eq(right(5))
expect(right(3).map { |a| a+2 }).to eq(right(5))
end
it 'should support map_left' do
expect(right(3).map_left(add(2))).to eq(right(3))
expect(right(3).map_left { |a| a+2 }).to eq(right(3))
expect(left(3).map_left(add(2))).to eq(left(5))
expect(left(3).map_left { |a| a+2 }).to eq(left(5))
end
it 'should support flat_map' do
expect(right(4).flat_map(divide(2).or_exception)).to eq(right(2))
expect(right(4).flat_map { right(2) }).to eq(right(2))
result = right(4).flat_map(divide(0).or_exception)
expect(result.is_left?).to eq(true)
expect(result.left_value.class).to eq(ZeroDivisionError)
expect(left(2).flat_map(divide(2).or_exception)).to eq(left(2))
expect(left(2).flat_map { right(5) }).to eq(left(2))
end
it 'should support flatten' do
expect(right(left(ZeroDivisionError.new)).flatten).to eq(left(ZeroDivisionError.new))
expect(right(right(1)).flatten).to eq(right(1))
end
it 'should support fold' do
expect(left(3).fold(2, sum, nil)).to eq(5)
expect(right(3).fold(2, nil, sum)).to eq(5)
end
it 'should allow map to left and right' do
expect(some('foo').map(as_left).get).to eq(left('foo'))
expect(some('bar').map(as_right).get).to eq(right('bar'))
end
it 'should not support getting left from right or right from left' do
expect { left('foo').right_value }.to raise_error(NoSuchElementException)
expect { right('foo').left_value }.to raise_error(NoSuchElementException)
end
it 'should raise exception if you try to use both lambda and block' do
expect { right(1).map(add(2)) { |a| a+2 } }.to raise_error(RuntimeError)
expect { right(1).map_left(add(2)) { |a| a+2 } }.to raise_error(RuntimeError)
expect { left(1).map_left(add(2)) { |a| a+2 } }.to raise_error(RuntimeError)
expect { right(10).flat_map(divide(2)) { |a| a/2 } }.to raise_error(RuntimeError)
expect { left(10).flat_map(divide(2)) { |a| a/2 } }.to raise_error(RuntimeError)
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/either.rb
|
require_relative 'lambda_block'
class Proc
def or_exception
-> (value) {
begin
right(self.(value))
rescue Exception => e
left(e)
end
}
end
end
module Eithers
private
def left(value)
Either.left(value)
end
def right(value)
Either.right(value)
end
def is_left
predicate(-> (either) { either.is_left? })
end
def is_right
predicate(-> (either) { either.is_right? })
end
def as_left
->(value) { left(value) }
end
def as_right
->(value) { right(value) }
end
end
class Either
include Comparable
include LambdaBlock
def self.left(value)
Left.new(value)
end
def self.right(value)
Right.new(value)
end
def flatten
flat_map(identity)
end
end
class Left < Either
def initialize(value)
@value = value
end
def is_left?
true
end
def is_right?
false
end
def left_value
@value
end
def right_value
raise NoSuchElementException.new
end
def map_left(fn=nil, &block)
assert_funcs(fn, block_given?)
left(block_given? ? block.call(@value) : fn.(@value))
end
def flat_map(fn=nil, &block) # a function which returns an either
assert_funcs(fn, block_given?)
self
end
def fold(seed, fn_left, fn_right)
fn_left.(seed, @value)
end
def <=>(other)
@value <=> other.left_value
end
def to_s
"left(#{@value})"
end
end
class Right < Either
def initialize(value)
@value = value
end
def is_left?
false
end
def is_right?
true
end
def left_value
raise NoSuchElementException.new
end
def right_value
@value
end
def map(fn=nil, &block)
assert_funcs(fn, block_given?)
right(block_given? ? block.call(@value) : fn.(@value))
end
def map_left(fn=nil, &block)
assert_funcs(fn, block_given?)
self
end
def flat_map(fn=nil, &block) # a function which returns an either
assert_funcs(fn, block_given?)
block_given? ? block.call(@value) : fn.(@value)
end
def fold(seed, fn_left, fn_right)
fn_right.(seed, @value)
end
def <=>(other)
@value <=> other.right_value
end
def to_s
"right(#{@value})"
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/maps_spec.rb
|
require_relative '../spec_helper'
describe 'Maps' do
it 'should allow maps to be merged' do
expect(merge(empty)).to eq({})
expect(merge(sequence({1 => 2}, {3 => 4, 5 => 6}))).to eq({1 => 2, 3 => 4, 5 => 6})
expect(merge(sequence({1 => 2, 3 => 3}, {3 => 4, 5 => 6}))).to eq({1 => 2, 3 => 4, 5 => 6})
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy.rb
|
require_relative 'totally_lazy/comparators'
require_relative 'totally_lazy/either'
require_relative 'totally_lazy/enumerators'
require_relative 'totally_lazy/functions'
require_relative 'totally_lazy/maps'
require_relative 'totally_lazy/numbers'
require_relative 'totally_lazy/option'
require_relative 'totally_lazy/pair'
require_relative 'totally_lazy/predicates'
require_relative 'totally_lazy/sequence'
require_relative 'totally_lazy/strings'
include Comparators
include Eithers
include Enumerators
include Functions
include Maps
include Numbers
include Options
include Pairs
include Predicates
include Sequences
include Strings
|
raymanoz/totally_lazy
|
spec/totally_lazy/numbers_spec.rb
|
<reponame>raymanoz/totally_lazy<gh_stars>0
require_relative '../spec_helper'
describe 'Numbers' do
it 'should support arbitrary multiply' do
expect(sequence(1, 2, 3, 4, 5).map(multiply(5))).to eq(sequence(5, 10, 15, 20, 25))
end
it 'should treat multiply as a monoid' do
expect(empty.reduce(multiply(5))).to eq(1)
end
it 'should be able to get powers_of a number' do
expect(powers_of(3).take(10)).to eq(sequence(1, 3, 9, 27, 81, 243, 729, 2187, 6561, 19683))
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/comparators.rb
|
module Comparators
private
def ascending
-> (a,b) { a <=> b }
end
def descending
-> (a,b) { b <=> a }
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/maps.rb
|
module Maps
private
def merge(sequence_of_maps)
sequence_of_maps.fold({}){|a,b| a.merge(b) }
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/predicates.rb
|
<reponame>raymanoz/totally_lazy
module Predicates
private
def predicate(fn)
def fn.and(other)
-> (value) { self.(value) && other.(value) }
end
def fn.or(other)
-> (value) { self.(value) || other.(value) }
end
fn
end
def is_not(pred)
predicate(-> (bool) { !pred.(bool) })
end
def matches(regex)
predicate(->(value) { !regex.match(value).nil? })
end
def equal_to?(that)
predicate(->(this) { this == that })
end
alias is equal_to?
def where(fn, predicate)
predicate(->(value) { predicate.(fn.(value)) })
end
end
class PredicateFunction < Proc
def and(other)
->(value) { self.(value) && other.(value) }
end
def or(other)
->(value) { self.(value) || other.(value) }
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/option.rb
|
<filename>lib/totally_lazy/option.rb
require_relative 'lambda_block'
class Proc
def optional
->(value) {
begin
Option.option(self.(value))
rescue
Option.none
end
}
end
end
module Options
private
def option(value)
Option.option(value)
end
def some(value)
Option.some(value)
end
def none
Option.none
end
end
class Option
include Comparable
def self.option(value)
value.nil? ? none : some(value)
end
def self.some(value)
Some.new(value)
end
def self.none
NONE
end
def is_defined?
!is_empty?
end
def is?(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
exists?(block_given? ? ->(value) { block_pred.call(value) } : fn_pred)
end
def flatten
flat_map(identity)
end
end
class Some < Option
include LambdaBlock
attr_reader :value
def initialize(value)
@value = value
end
def contains?(value)
@value == value
end
def exists?(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
block_given? ? block_pred.call(@value) : fn_pred.(@value)
end
def map(fn=nil, &block)
assert_funcs(fn, block_given?)
option(block_given? ? block.call(@value) : fn.(@value))
end
def flat_map(fn=nil, &block) # function should return an option
assert_funcs(fn, block_given?)
block_given? ? block.call(@value) : fn.(@value)
end
def fold(seed, fn=nil, &block)
assert_funcs(fn, block_given?)
block_given? ? block.call(seed, @value) : fn.(seed, @value)
end
alias fold_left fold
def is_empty?
false
end
def size
1
end
def get
@value
end
def get_or_else(value_or_fn=nil, &block)
assert_funcs(value_or_fn, block_given?)
get
end
alias or_else get_or_else
def get_or_nil
get
end
def get_or_raise(error)
get
end
alias get_or_throw get_or_raise
def to_either(left)
right(value)
end
def enumerator
Enumerator.new { |y|
y << @value
raise StopIteration.new
}
end
def <=>(other)
@value <=> other.value
end
def to_s
"some(#{value})"
end
end
class None < Option
include LambdaBlock
def is_empty?
true
end
def contains?(value)
false
end
def exists?(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
false
end
def map(fn=nil, &block)
assert_funcs(fn, block_given?)
none
end
def flat_map(fn=nil, &block) # function should return an option
assert_funcs(fn, block_given?)
none
end
def fold(seed, fn=nil &block)
assert_funcs(fn, block_given?)
seed
end
alias fold_left fold
def size
0
end
def get
raise NoSuchElementException.new
end
def get_or_else(value_or_fn=nil, &block)
assert_funcs(value_or_fn, block_given?)
if (value_or_fn.respond_to? :call) || block_given?
block_given? ? block.call : value_or_fn.()
else
value_or_fn
end
end
alias or_else get_or_else
def get_or_nil
nil
end
def get_or_raise(error)
raise error
end
alias get_or_throw get_or_raise
def enumerator
Enumerator.new { |y|
raise StopIteration.new
}
end
def to_either(value)
left(value)
end
def <=>(other)
other == NONE
end
def to_s
'none'
end
end
NONE=None.new
|
raymanoz/totally_lazy
|
lib/totally_lazy/enumerators.rb
|
<filename>lib/totally_lazy/enumerators.rb
module Enumerators
private
def reverse_enumerator(e)
e.reverse_each
end
def has_next(e)
begin
e.peek
true
rescue StopIteration
false
end
end
def enumerator_of(fn, init)
Enumerator.new do |y|
value = init
y << value
loop do
value = fn.(value)
y << value
end
end.lazy
end
def repeat_fn_enumerator(fn)
Enumerator.new do |y|
loop do
y << fn.()
end
end.lazy
end
def repeat_enumerator(value)
repeat_fn_enumerator(returns(value))
end
def character_enumerator(string)
Enumerator.new do |y|
index = 0
loop do
raise StopIteration.new unless index < string.size
y << string[index]
index = index + 1
end
end.lazy
end
def flatten_enumerator(enumerator)
enumerator.rewind
Enumerator.new do |y|
current_enumerator = empty_enumerator
get_current_enumerator = ->() {
until has_next(current_enumerator)
return empty_enumerator unless has_next(enumerator)
current_enumerator = enumerator.next.enumerator
current_enumerator.rewind
end
current_enumerator
}
loop do
current = get_current_enumerator.()
if has_next(current)
y << current.next
else
raise StopIteration.new
end
end
end.lazy
end
def cycle_enumerator(e)
Enumerator.new do |y|
loop do
e.rewind unless has_next(e)
y << e.next
end
end.lazy
end
def empty_enumerator
[].lazy
end
end
|
raymanoz/totally_lazy
|
spec/totally_lazy/predicates_spec.rb
|
require_relative '../spec_helper'
describe 'Predicates' do
it 'should allow regex matching' do
expect(sequence('Stacy').find(matches(/Stac/))).to eq(some('Stacy'))
expect(sequence('Raymond').find(matches(/NotAwesome/))).to eq(none)
end
it 'should allow is' do
expect(sequence('Stuff').find(is('Stuff'))).to eq(some('Stuff'))
expect(sequence('Stuff').find(equal_to?('Stuff'))).to eq(some('Stuff'))
expect(sequence('Stuff').find(is('Nothing'))).to eq(none)
end
class Person
attr_reader :name
attr_reader :age
def initialize(name, age)
@name = name
@age = age
end
end
raymond = Person.new('Raymond', 41)
mathilda = Person.new('Mathilda', 4)
age = ->(person) { person.age }
it 'should allow where' do
expect(sequence(raymond, mathilda).filter(where(age, greater_than(40)))).to eq(sequence(raymond))
end
it 'should be able to negate other predicates using is_not' do
expect(sequence(raymond).filter(where(age, is_not(greater_than(40))))).to eq(empty)
end
it 'should allow predicates to be composed using logical operations (AND/OR)' do
expect(sequence(1,2,3,4,5).filter(greater_than(2).and(odd))).to eq(sequence(3,5))
expect(sequence(1,2,3,4,5).filter(greater_than(2).or(odd))).to eq(sequence(1,3,4,5))
end
end
|
raymanoz/totally_lazy
|
lib/totally_lazy/sequence.rb
|
require_relative 'lambda_block'
class NoSuchElementException < RuntimeError
end
class Array
def to_seq
Sequence.new(self.lazy)
end
end
module Sequences
def empty
Sequence.empty
end
def sequence(*items)
Sequence.sequence(*items)
end
def drop(sequence, count)
Sequence.drop(sequence, count)
end
def sort(sequence, comparator=ascending)
Sequence.sort(sequence, comparator)
end
def map_concurrently(sequence, fn=nil, &block)
Sequence.map_concurrently(sequence, block_given? ? ->(value) { block.call(value) } : fn)
end
def group(key, enumerator)
Group.new(key, enumerator)
end
def repeat(item)
Sequence.new(repeat_enumerator(item))
end
def repeat_fn(item)
Sequence.new(repeat_fn_enumerator(item))
end
def enumerate(fn, start)
Sequence.new(Enumerator.new do |y|
current = start
loop do
result = current
current = fn.(current)
y << result
end
end.lazy)
end
private
def pair_enumerator(left, right)
Enumerator.new do |y|
left.rewind
right.rewind
loop do
y << pair(left.next, right.next)
end
end.lazy
end
end
class Sequence
include Comparable
include LambdaBlock
attr_reader :enumerator
def self.map_concurrently(sequence, fn=nil, &block)
call_concurrently(sequence.map(defer_return(block_given? ? ->(value) { block.call(value) } : fn)))
end
def self.sort(sequence, comparator=ascending)
Sequence.new(sequence.enumerator.sort { |a, b| comparator.(a, b) }.lazy)
end
def self.drop(sequence, count)
Sequence.new(sequence.enumerator.drop(count))
end
def self.repeat(item)
Sequence.new(repeat_enumerator(item))
end
def self.repeat_fn(item)
Sequence.new(repeat_fn_enumerator(item))
end
def self.take(sequence, count)
Sequence.new(sequence.enumerator.take(count))
end
def self.zip(left, right)
left.zip(right)
end
def self.sequence(*items)
if items.first.nil?
empty
else
Sequence.new(items.lazy)
end
end
def self.empty
EMPTY
end
def initialize(enumerator)
raise "Sequence only accepts Enumerator::Lazy, not #{enumerator.class}" unless (enumerator.class == Enumerator::Lazy)
@enumerator = enumerator
end
def is_empty?
@enumerator.rewind
begin
@enumerator.peek
false
rescue
true
end
end
def size
@enumerator.count
end
def head
@enumerator.first
end
alias first head
def second
tail.head
end
def head_option
option(head)
end
def last
reverse.head
end
def last_option
reverse.head_option
end
def reverse
Sequence.new(reverse_enumerator(@enumerator))
end
def tail
unless has_next(@enumerator)
raise NoSuchElementException.new
end
Sequence.new(@enumerator.drop(1))
end
def init
reverse.tail.reverse
end
def map(fn=nil, &block)
assert_funcs(fn, block_given?)
Sequence.new(@enumerator.map { |value|
block_given? ? block.call(value) : fn.(value)
})
end
def fold(seed, fn=nil, &block)
assert_funcs(fn, block_given?)
@enumerator.inject(seed) { |accumulator, value|
block_given? ? block.call(accumulator, value) : fn.(accumulator, value)
}
end
alias fold_left fold
def fold_right(seed, fn=nil, &block)
assert_funcs(fn, block_given?)
reverse_enumerator(@enumerator).inject(seed) { |accumulator, value|
block_given? ? block.call(value, accumulator) : fn.(value, accumulator)
}
end
def reduce(fn=nil, &block)
assert_funcs(fn, block_given?)
_fn = block_given? ? ->(a, b) { block.call(a, b) } : fn
accumulator = seed(@enumerator, fn)
while has_next(@enumerator)
accumulator = _fn.(accumulator, @enumerator.next)
end
accumulator
end
alias reduce_left reduce
def reduce_right(fn=nil, &block)
assert_funcs(fn, block_given?)
_fn = block_given? ? ->(a, b) { block.call(a, b) } : fn
reversed = reverse_enumerator(@enumerator)
accumulator = seed(reversed, fn)
while has_next(reversed)
accumulator = _fn.(reversed.next, accumulator)
end
accumulator
end
def find(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
@enumerator.rewind
while has_next(@enumerator)
item = @enumerator.next
result = block_given? ? block_pred.call(item) : fn_pred.(item)
if result
return(some(item))
end
end
none
end
def zip(other)
Sequence.new(pair_enumerator(@enumerator, other.enumerator))
end
def zip_with_index
Sequence.zip(range_from(0), self)
end
def find_index_of(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
zip_with_index.find(->(pair) { block_given? ? block_pred.call(pair.second) : fn_pred.(pair.second) }).map(->(pair) { pair.first })
end
def take(count)
Sequence.take(self, count)
end
def take_while(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
Sequence.new(@enumerator.take_while { |value| block_given? ? block_pred.call(value) : fn_pred.(value) })
end
def drop(count)
Sequence.drop(self, count)
end
def drop_while(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
Sequence.new(@enumerator.drop_while { |value| block_given? ? block_pred.call(value) : fn_pred.(value) })
end
def flat_map(fn=nil, &block)
assert_funcs(fn, block_given?)
map(block_given? ? ->(value) { block.call(value) } : fn).flatten
end
def flatten
Sequence.new(flatten_enumerator(enumerator))
end
def sort_by(comparator)
Sequence.sort(self, comparator)
end
def contains?(value)
@enumerator.member?(value)
end
def exists?(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
@enumerator.any? { |value| block_given? ? block_pred.call(value) : fn_pred.(value) }
end
def for_all?(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
@enumerator.all? { |value| block_given? ? block_pred.call(value) : fn_pred.(value) }
end
def filter(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
Sequence.new(@enumerator.select { |value| block_given? ? block_pred.call(value) : fn_pred.(value) })
end
def reject(fn_pred=nil, &block_pred)
assert_funcs(fn_pred, block_given?)
filter(is_not(block_given? ? ->(value) { block_pred.call(value) } : fn_pred))
end
def group_by(fn=nil, &block)
assert_funcs(fn, block_given?)
groups = @enumerator.group_by { |value| block_given? ? block.call(value) : fn.(value) }
Sequence.new(groups.to_a.map { |group| Group.new(group[0], group[1].lazy) }.lazy)
end
def each(fn=nil, &block)
assert_funcs(fn, block_given?)
@enumerator.each { |value| block_given? ? block.call(value) : fn.(value) }
end
def map_concurrently(fn=nil, &block)
assert_funcs(fn, block_given?)
Sequence.map_concurrently(self, block_given? ? ->(value) { block.call(value) } : fn)
end
def realise
Sequence.new(@enumerator.to_a.lazy)
end
def join(other)
Sequence.new(Enumerator.new do |y|
@enumerator.each { |value| y << value }
other.enumerator.each { |value| y << value }
end.lazy)
end
def cycle
Sequence.new(cycle_enumerator(@enumerator))
end
def to_a
@enumerator.to_a
end
def <=>(other)
@enumerator.entries <=> other.enumerator.entries
end
def inspect
to_s
end
def to_s
sample = take(100).to_a.to_seq
"[#{sample.is_empty? ? '' : sample.reduce(join_with_sep(','))}]"
end
private
def seed(enumerator, fn)
enumerator.rewind
!fn.nil? && fn.respond_to?(:identity) ? fn.identity : enumerator.next
end
end
class Group < Sequence
include Comparable
attr_reader :key
def initialize(key, enumerator)
super(enumerator)
@key = key
end
def <=>(other)
(@key <=> other.key) <=> (enumerator.entries<=>(other.enumerator.entries))
end
end
EMPTY=Sequence.new([].lazy)
|
pushcx/rncurses
|
lib/ncurses.rb
|
<filename>lib/ncurses.rb
# ncurses-ruby is a ruby module for accessing the FSF's ncurses library
# (C) 2002, 2003, 2004 <NAME> <<EMAIL>>
# (C) 2004 <NAME> <<EMAIL>>
# (C) 2005 <NAME>
#
# This module is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this module; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# $Id: ncurses.rb,v 1.7 2005/02/26 22:51:29 t-peters Exp $
require "ncurses_bin.so"
# Ncurses constants with leading underscore
def Ncurses._XOPEN_CURSES
Ncurses::XOPEN_CURSES
end
def Ncurses._SUBWIN
Ncurses::SUBWIN
end
def Ncurses._ENDLINE
Ncurses::ENDLINE
end
def Ncurses._FULLWIN
Ncurses::FULLWIN
end
def Ncurses._SCROLLWIN
Ncurses::SCROLLWIN
end
def Ncurses._ISPAD
Ncurses::ISPAD
end
def Ncurses._HASMOVED
Ncurses::HASMOVED
end
def Ncurses._WRAPPED
Ncurses::WRAPPED
end
def Ncurses._NOCHANGE
Ncurses::NOCHANGE
end
def Ncurses._NEWINDEX
Ncurses::NEWINDEX
end
module Ncurses
module Destroy_checker; def destroyed?; @destroyed; end; end
class WINDOW
include Destroy_checker
def method_missing(name, *args)
name = name.to_s
if (name[0,2] == "mv")
test_name = name.dup
test_name[2,0] = "w" # insert "w" after"mv"
if (Ncurses.respond_to?(test_name))
return Ncurses.send(test_name, self, *args)
end
end
test_name = "w" + name
if (Ncurses.respond_to?(test_name))
return Ncurses.send(test_name, self, *args)
end
Ncurses.send(name, self, *args)
end
def respond_to?(name)
name = name.to_s
if (name[0,2] == "mv" && Ncurses.respond_to?("mvw" + name[2..-1]))
return true
end
Ncurses.respond_to?("w" + name) || Ncurses.respond_to?(name)
end
def del
Ncurses.delwin(self)
end
alias delete del
def WINDOW.new(*args)
Ncurses.newwin(*args)
end
end
class SCREEN
include Destroy_checker
def del
Ncurses.delscreen(self)
end
alias delete del
end
class MEVENT
attr_accessor :id, :x,:y,:z, :bstate
end
GETSTR_LIMIT = 1024
module Panel
class PANEL; end
end
module Form
class FORM
attr_reader :user_object
# This placeholder replaces the field_userptr function in curses
def user_object=(obj)
@user_object = obj
end
end
class FIELD
attr_reader :user_object
# This placeholder replaces the field_userptr function in curses
def user_object=(obj)
@user_object = obj
end
end
class FIELDTYPE
end
end
end
def Ncurses.inchnstr(str,n)
Ncurses.winchnstr(Ncurses.stdscr, str, n)
end
def Ncurses.inchstr(str)
Ncurses.winchstr(Ncurses.stdscr, str)
end
def Ncurses.mvinchnstr(y,x, str, n)
Ncurses.mvwinchnstr(Ncurses.stdscr, y,x, str, n)
end
def Ncurses.mvinchstr(y,x, str)
Ncurses.mvwinchstr(Ncurses.stdscr, y,x, str)
end
def Ncurses.mvwinchnstr(win, y,x, str, n)
if (Ncurses.wmove(win,y,x) == Ncurses::ERR)
Ncurses::ERR
else
Ncurses.winchnstr(win,str,n)
end
end
def Ncurses.mvwinchstr(win, y,x, str)
maxy = []; maxx = []; getmaxyx(win, maxy,maxx)
return Ncurses::ERR if (maxx[0] == Ncurses::ERR)
Ncurses.mvwinchnstr(win, y,x, str, maxx[0]+1)
end
def Ncurses.winchstr(win, str)
maxy = []; maxx = []; getmaxyx(win, maxy,maxx)
return Ncurses::ERR if (maxx[0] == Ncurses::ERR)
Ncurses.winchnstr(win, str, maxx[0]+1)
end
def Ncurses.getnstr(str,n)
Ncurses.wgetnstr(Ncurses.stdscr, str, n)
end
def Ncurses.mvgetnstr(y,x, str, n)
Ncurses.mvwgetnstr(Ncurses.stdscr, y,x, str, n)
end
def Ncurses.mvwgetnstr(win, y,x, str, n)
if (Ncurses.wmove(win,y,x) == Ncurses::ERR)
Ncurses::ERR
else
Ncurses.wgetnstr(win,str,n)
end
end
def Ncurses.innstr(str,n)
Ncurses.winnstr(Ncurses.stdscr, str, n)
end
def Ncurses.instr(str)
Ncurses.winstr(Ncurses.stdscr, str)
end
def Ncurses.mvinnstr(y,x, str, n)
Ncurses.mvwinnstr(Ncurses.stdscr, y,x, str, n)
end
def Ncurses.mvinstr(y,x, str)
Ncurses.mvwinstr(Ncurses.stdscr, y,x, str)
end
def Ncurses.mvwinnstr(win, y,x, str, n)
if (Ncurses.wmove(win,y,x) == Ncurses::ERR)
Ncurses::ERR
else
Ncurses.winnstr(win,str,n)
end
end
def Ncurses.mvwinstr(win, y,x, str)
maxy = []; maxx = []; getmaxyx(win, maxy,maxx)
return Ncurses::ERR if (maxx[0] == Ncurses::ERR)
Ncurses.mvwinnstr(win, y,x, str, maxx[0]+1)
end
def Ncurses.winstr(win, str)
maxy = []; maxx = []; getmaxyx(win, maxy,maxx)
return Ncurses::ERR if (maxx[0] == Ncurses::ERR)
Ncurses.winnstr(win, str, maxx[0]+1)
end
def Ncurses.mouse_trafo(pY, pX, to_screen)
Ncurses.wmouse_trafo(Ncurses.stdscr, pY, pX, to_screen)
end
def Ncurses.getcurx(win)
x = []; y = []; Ncurses.getyx(win, y,x); x[0]
end
def Ncurses.getcury(win)
x = []; y = []; Ncurses.getyx(win, y,x); y[0]
end
def Ncurses.getbegx(win)
x = []; y = []; Ncurses.getbegyx(win, y,x); x[0]
end
def Ncurses.getbegy(win)
x = []; y = []; Ncurses.getbegyx(win, y,x); y[0]
end
def Ncurses.getmaxx(win)
x = []; y = []; Ncurses.getmaxyx(win, y,x); x[0]
end
def Ncurses.getmaxy(win)
x = []; y = []; Ncurses.getmaxyx(win, y,x); y[0]
end
def Ncurses.getparx(win)
x = []; y = []; Ncurses.getparyx(win, y,x); x[0]
end
def Ncurses.getpary(win)
x = []; y = []; Ncurses.getparyx(win, y,x); y[0]
end
def Ncurses.erase
Ncurses.werase(Ncurses.stdscr)
end
def Ncurses.getstr(str)
Ncurses.getnstr(str, Ncurses::GETSTR_LIMIT)
end
def Ncurses.mvgetstr(y,x, str)
Ncurses.mvgetnstr(y,x, str, Ncurses::GETSTR_LIMIT)
end
def Ncurses.mvwgetstr(win, y,x, str)
Ncurses.mvwgetnstr(win, y,x, str, Ncurses::GETSTR_LIMIT)
end
def Ncurses.wgetstr(win, str)
Ncurses.wgetnstr(win, str, Ncurses::GETSTR_LIMIT)
end
def Ncurses.scanw(format, result)
Ncurses.wscanw(Ncurses.stdscr, format, result)
end
def Ncurses.mvscanw(y,x, format, result)
Ncurses.mvwscanw(Ncurses.stdscr, y,x, format, result)
end
def Ncurses.mvwscanw(win, y,x, format, result)
if (Ncurses.wmove(win, y,x) == Ncurses::ERR)
Ncurses::ERR
else
Ncurses.wscanw(win, format, result)
end
end
def Ncurses.wscanw(win, format, result)
str = ""
if (Ncurses.wgetstr(win, str) == Ncurses::ERR)
Ncurses::ERR
else
require "scanf.rb" # Use ruby's implementation of scanf
result.replace(str.scanf(format))
end
end
def Ncurses.mvprintw(*args)
Ncurses.mvwprintw(Ncurses.stdscr, *args)
end
def Ncurses.mvwprintw(win, y,x, *args)
if (Ncurses.wmove(win,y,x) == Ncurses::ERR)
Ncurses::ERR
else
wprintw(win, *args)
end
end
def Ncurses.printw(*args)
Ncurses.wprintw(Ncurses.stdscr, *args)
end
def Ncurses.touchline(win, start, count)
Ncurses.wtouchln(win, start, count, 1)
end
def Ncurses.touchwin(win)
wtouchln(win, 0, getmaxy(win), 1)
end
module Ncurses
Ncurses = self # for accessing Ncurses from a Module that includes Ncurses
# Some users like to include ncurses names despite namespace pollution
# This module is for them
module Namespace
def self.append_features(target)
# include constants
unless target.ancestors.member?(Ncurses)
target.__send__(:include, Ncurses)
end
# make methods available
unless target.respond_to?(:pre_Ncurses_method_missing)
target.module_eval{
alias pre_Ncurses_method_missing method_missing
def method_missing(name, *args)
if Ncurses.respond_to?(name)
Ncurses.send(name, *args)
else
pre_Ncurses_method_missing(name, *args)
end
end
}
end
end
def self.entend_object(object)
class << object
self
end.__send__(:include, self)
end
end
end
|
pushcx/rncurses
|
test/test_define_key_fails.rb
|
#!/usr/bin/env ruby
require "ncurses"
# call should fail, but must not terminate the ruby interpreter
begin
Ncurses.define_key("Hi!", 22)
rescue Ncurses::Exception, NoMethodError
exit 0
else
exit 1
end
|
pushcx/rncurses
|
test/test_keyok_fails.rb
|
<gh_stars>1-10
#!/usr/bin/env ruby
require "ncurses"
# call should fail, but must not terminate the ruby interpreter
begin
Ncurses.keyok(22, true)
rescue Ncurses::Exception, NoMethodError
exit 0
else
exit 1
end
|
pushcx/rncurses
|
test/test_newterm_isinit.rb
|
<reponame>pushcx/rncurses<filename>test/test_newterm_isinit.rb<gh_stars>1-10
#!/usr/bin/env ruby
require "ncurses"
term = Ncurses.newterm(nil, 1, 0)
Ncurses.start_color
Ncurses.endwin
|
pushcx/rncurses
|
extconf.rb
|
#!/usr/bin/env ruby
# ncurses-ruby is a ruby module for accessing the FSF's ncurses library
# (C) 2002, 2004 <NAME> <<EMAIL>>
# (C) 2005 <NAME>
#
# This module is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this module; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# $Id: extconf.rb,v 1.11 2005/02/26 22:51:45 t-peters Exp $
require "mkmf"
$CFLAGS += " -g"
$CXXFLAGS = $CFLAGS
have_header("unistd.h")
if have_header("ncurses.h")
curses_header = "ncurses.h"
elsif have_header("ncurses/curses.h")
curses_header = "ncurses/curses.h"
elsif have_header("curses.h")
curses_header = "curses.h"
else
raise "ncurses header file not found"
end
if have_library("ncurses", "wmove")
curses_lib = "ncurses"
elsif have_library("pdcurses", "wmove")
curses_lib = "pdcurses"
else
raise "ncurses library not found"
end
have_func("newscr")
have_func("TABSIZE")
have_func("ESCDELAY")
have_func("keybound")
have_func("curses_version")
have_func("tigetstr")
have_func("getwin")
have_func("putwin")
have_func("ungetmouse")
have_func("mousemask")
have_func("wenclose")
have_func("mouseinterval")
have_func("wmouse_trafo")
have_func("mcprint")
have_func("has_key")
have_func("delscreen")
have_func("define_key")
have_func("keyok")
have_func("resizeterm")
have_func("use_default_colors")
have_func("use_extended_names")
have_func("wresize")
have_func("attr_on")
have_func("attr_off")
have_func("attr_set")
have_func("chgat")
have_func("color_set")
have_func("filter")
have_func("intrflush")
have_func("mvchgat")
have_func("mvhline")
have_func("mvvline")
have_func("mvwchgat")
have_func("mvwhline")
have_func("mvwvline")
have_func("noqiflush")
have_func("putp")
have_func("qiflush")
have_func("scr_dump")
have_func("scr_init")
have_func("scr_restore")
have_func("scr_set")
have_func("slk_attr_off")
have_func("slk_attr_on")
have_func("slk_attr")
have_func("slk_attr_set")
have_func("slk_color")
have_func("tigetflag")
have_func("tigetnum")
have_func("use_env")
have_func("vidattr")
have_func("vid_attr")
have_func("wattr_on")
have_func("wattr_off")
have_func("wattr_set")
have_func("wchgat")
have_func("wcolor_set")
have_func("getattrs")
puts "checking which debugging functions to wrap..."
have_func("_tracef")
have_func("_tracedump")
have_func("_nc_tracebits")
have_func("_traceattr")
have_func("_traceattr2")
have_func("_tracechar")
have_func("_tracechtype")
have_func("_tracechtype2")
have_func("_tracemouse")
puts "checking for other functions that appeared after ncurses version 5.0..."
have_func("assume_default_colors")
have_func("attr_get")
puts "checking for the panel library..."
if have_header("panel.h")
have_library("panel", "panel_hidden")
end
puts "checking for the form library..."
if have_header("form.h")
have_library("form", "new_form")
end
create_makefile('ncurses_bin')
|
pushcx/rncurses
|
test/test_start_color_fails.rb
|
<reponame>pushcx/rncurses
#!/usr/bin/env ruby
require "ncurses"
# call should fail, but must not terminate the ruby interpreter
begin
Ncurses.start_color
rescue Ncurses::Exception, NoMethodError
exit 0
else
exit 1
end
|
pushcx/rncurses
|
make_dist.rb
|
#!/usr/bin/env ruby
# $Id: make_dist.rb,v 1.6 2003/08/29 22:50:12 t-peters Exp $
require "fileutils"
def sys(i)
puts("\"#{i}\"")
system(i)
end
dir = File.dirname(__FILE__)
base = File.basename(dir)
base = "ncurses-ruby" if base == "."
files = IO.readlines(dir + "/MANIFEST").collect{|filename|filename.chomp}
Version = File.new("#{dir}/VERSION").readline.chomp!
FileUtils.mkdir "#{base}-#{Version}"
files.each{|filename|
if filename.index "/"
FileUtils.mkdir_p "#{base}-#{Version}/#{File.dirname(filename)}"
end
sys "cp #{dir}/#{filename} #{base}-#{Version}/#{filename}"
}
sys "tar cjf #{base}-#{Version}.tar.bz2 --owner=0 --group=0 #{base}-#{Version}"
# check if we create a binary distribution for a mingw extension
binary_description = `file ncurses.so`
if ((binary_description =~ /\s(windows)\s/i) &&
(binary_description =~ /\s(pe)|(portable executable)\s/i) &&
(binary_description =~ /\s(dll)\s/i))
sys "cp ncurses.so README.binary #{base}-#{Version}"
Dir.glob("#{base}-#{Version}/README*").each{|textfile|
text = IO.readlines(textfile).map{|line|line.chomp + "\r\n"}
File.open(textfile + ".txt", "wb"){|outfd| outfd.write(text.join)}
sys "rm #{textfile}"
}
sys "rm #{base}-#{Version}/{MANIFEST,make_dist.rb}"
sys "zip -9 -r #{base}-#{Version}-i386-mswin32.zip #{base}-#{Version}"
end
sys "rm -r #{base}-#{Version}/"
|
REDNBLACK/preferences
|
homebrew/Formula/lolcat-c.rb
|
<filename>homebrew/Formula/lolcat-c.rb
class LolcatC < Formula
desc "Faster lolcat implementation in CLang"
homepage "https://github.com/jaseg/lolcat"
url "https://github.com/jaseg/lolcat/archive/refs/tags/v1.2.tar.gz"
sha256 "b6e1a0e24479fbdd4eb907531339e2cafc0c00b78d19caf70e8377b8b7546331"
license "WTFPL"
head "https://github.com/jaseg/lolcat.git", branch: "main"
def install
system "make", "DESTDIR=#{prefix}"
bin.install "lolcat"
end
test do
system "false"
end
end
|
REDNBLACK/preferences
|
homebrew/Casks/openjdk-jmc.rb
|
<reponame>REDNBLACK/preferences
cask "openjdk-jmc" do
version "8.1.0,07"
sha256 "6719d9e9e22e3d456994e398c47b280090c2eff58dc4cb69f8b3d45713dfc29c"
url "https://download.java.net/java/GA/jmc#{version.major}/#{version.after_comma}/binaries/jmc-#{version.before_comma}_osx-x64.tar.gz"
name "JDK Mission Control"
desc "Tools to manage, monitor, profile and troubleshoot Java applications"
homepage "https://jdk.java.net/jmc/8"
livecheck do
url :homepage
strategy :page_match do |page|
match = page.match(%r{href=.*?/(\d+)/binaries/jmc-(\d+(?:\.\d+)*)_osx-x64.tar\.gz}i)
next if match.blank?
"#{match[2]},#{match[1]}"
end
end
target_app = "jmc-#{version.before_comma}_osx-x64/JDK Mission Control.app".freeze
binary "#{target_app}/Contents/MacOS/jmc"
postflight do
eclipse_dir = "#{staged_path}/#{target_app}/Contents/Eclipse"
config_file = "#{eclipse_dir}/configuration/config.ini"
system_command "/usr/bin/sed", args: ["-i", ".old", "-e", "s;@user.home/.jmc/;@user.home/.config/jmc/;", config_file]
end
zap trash: [
'~/.config/jmc'
]
caveats do
depends_on_java "11"
end
end
|
REDNBLACK/preferences
|
homebrew/Casks/touchbar-nyancat.rb
|
cask "touchbar-nyancat" do
version "0.3.0"
sha256 "c4aff7fbf593860e76def6e8200390d96b3ad9076a38deb28cdfdfc1471d1c88"
name "Touchbar Nyan Cat"
url "https://github.com/avatsaev/touchbar_nyancat/releases/download/#{version}/touchbar_nyancat.app.zip"
desc "Stupid Nyan Cat animation on your +$2k MacBook Pro's Touchbar. Enjoy."
homepage "https://github.com/avatsaev/touchbar_nyancat"
app "touchbar_nyancat.app", target: "Touchbar Nyan Cat.app"
zap trash: [
'~/Library/Caches/com.vatsaev.touchbar-nyancat'
]
end
|
robertdimarco/puzzles
|
stripe-ctf-2/level04-code/srv.rb
|
<reponame>robertdimarco/puzzles
#!/usr/bin/env ruby
require 'yaml'
require 'set'
require 'rubygems'
require 'bundler/setup'
require 'sequel'
require 'sinatra'
module KarmaTrader
PASSWORD = File.read('password.txt').strip
STARTING_KARMA = 500
KARMA_FOUNTAIN = '<PASSWORD>'
# Only needed in production
URL_ROOT = File.read('url_root.txt').strip rescue ''
module DB
def self.db_file
'karma.db'
end
def self.conn
@conn ||= Sequel.sqlite(db_file)
end
def self.init
return if File.exists?(db_file)
conn.create_table(:users) do
primary_key :id
String :username
String :password
Integer :karma
Time :last_active
end
conn.create_table(:transfers) do
primary_id :id
String :from
String :to
Integer :amount
end
# Karma Fountain has infinite karma, so just set it to -1
conn[:users].insert(
:username => KarmaTrader::KARMA_FOUNTAIN,
:password => <PASSWORD>,
:karma => -1,
:last_active => Time.now.utc
)
end
end
class KarmaSrv < Sinatra::Base
enable :sessions
# Use persistent entropy file
entropy_file = 'entropy.dat'
unless File.exists?(entropy_file)
File.open(entropy_file, 'w') do |f|
f.write(OpenSSL::Random.random_bytes(24))
end
end
set :session_secret, File.read(entropy_file)
helpers do
def absolute_url(path)
KarmaTrader::URL_ROOT + path
end
end
# Hack to make this work with a URL root
def redirect(url)
super(absolute_url(url))
end
def die(msg, view)
@error = msg
halt(erb(view))
end
before do
refresh_state
update_last_active
end
def refresh_state
@user = logged_in_user
@transfers = transfers_for_user
@trusts_me = trusts_me
@registered_users = registered_users
end
def update_last_active
return unless @user
DB.conn[:users].where(:username => @user[:username]).
update(:last_active => Time.now.utc)
end
def logged_in_user
return unless username = session[:user]
DB.conn[:users][:username => username]
end
def transfers_for_user
return [] unless @user
DB.conn[:transfers].where(
Sequel.or(:from => @user[:username], :to => @user[:username])
)
end
def trusts_me
trusts_me = Set.new
return trusts_me unless @user
# Get all the users who have transferred credits to me
DB.conn[:transfers].where(:to => @user[:username]).
join(:users, :username => :from).each do |result|
trusts_me.add(result[:username])
end
trusts_me
end
def registered_users
KarmaTrader::DB.conn[:users].reverse_order(:id)
end
# KARMA_FOUNTAIN gets all the karma it wants. (Part of why getting
# its password would be so great...)
def user_has_infinite_karma?
@user[:username] == KARMA_FOUNTAIN
end
get '/' do
if @user
erb :home
else
erb :login
end
end
get '/register' do
erb :register
end
post '/register' do
username = params[:username]
password = params[:password]
unless username && password
die("Please specify both a username and a password.", :register)
end
unless username =~ /^\w+$/
die("Invalid username. Usernames must match /^\w+$/", :register)
end
unless DB.conn[:users].where(:username => username).count == 0
die("This username is already registered. Try another one.",
:register)
end
DB.conn[:users].insert(
:username => username,
:password => password,
:karma => STARTING_KARMA,
:last_active => Time.now.utc
)
session[:user] = username
redirect '/'
end
get '/login' do
redirect '/'
end
post '/login' do
username = params[:username]
password = params[:password]
user = DB.conn[:users][:username => username, :password => password]
unless user
die('Could not authenticate. Perhaps you meant to register a new' \
' account? (See link below.)', :login)
end
session[:user] = user[:username]
redirect '/'
end
get '/transfer' do
redirect '/'
end
post '/transfer' do
redirect '/' unless @user
from = @user[:username]
to = params[:to]
amount = params[:amount]
die("Please fill out all the fields.", :home) unless amount && to
amount = amount.to_i
die("Invalid amount specified.", :home) if amount <= 0
die("You cannot send yourself karma!", :home) if to == from
unless DB.conn[:users][:username => to]
die("No user with username #{to.inspect} found.", :home)
end
unless user_has_infinite_karma?
if @user[:karma] < amount
die("You only have #{@user[:karma]} karma left.", :home)
end
end
DB.conn[:transfers].insert(:from => from, :to => to, :amount => amount)
DB.conn[:users].where(:username=>from).update(:karma => :karma - amount)
DB.conn[:users].where(:username=>to).update(:karma => :karma + amount)
refresh_state
@success = "You successfully transfered #{amount} karma to" +
" #{to.inspect}."
erb :home
end
get '/logout' do
session.clear
redirect '/'
end
end
end
def main
KarmaTrader::DB.init
KarmaTrader::KarmaSrv.run!
end
if $0 == __FILE__
main
exit(0)
end
|
robertdimarco/puzzles
|
spotify-puzzles/bestbefore/bestbefore.rb
|
<reponame>robertdimarco/puzzles<filename>spotify-puzzles/bestbefore/bestbefore.rb
#!/usr/bin/env ruby -n
require 'date'
def best_before(str)
valid_dates = []
input = str.split("/").map { |x| x.to_i }
input.permutation { |perm|
begin
year, month, day = perm
year = (year < 1000) ? year + 2000 : year
date = Date.new(year, month, day)
valid_dates << date
rescue ArgumentError
end
}
return (valid_dates.length == 0) ? "#{str} is illegal" : valid_dates.min().to_s
end
puts best_before($_.chop)
|
robertdimarco/puzzles
|
spotify-code-quest-2012/trollhunt/trollhunt.rb
|
#!/usr/bin/env ruby -n
# b - the number of bridges
# k - the number of knights
# g - min knights per group
b, k, g = $_.chop.split(" ").map { |i| i.to_f }
groups = (k / g).floor
days = ((b - 1) / groups).ceil
puts days
|
robertdimarco/puzzles
|
spotify-code-quest-2012/collapse/collapse.rb
|
#!/usr/bin/env ruby
# n - number of islands
# t - island survival threshold
# k - number of receiving islands
# j - list of receiving islands with quantity
Pair = Struct.new(:to, :amount)
while input = gets
n = input.chomp!.to_i
surplus, from_to = Array.new(n + 1, 0), Array.new(n + 1)
# initialize list of islands
(1..n).each do |idx|
from_to[idx] = []
end
# load islands thresholds and set up resource dependencies
(1..n).each do |idx|
t, k, *j = gets.chomp!.split(" ").map { |token| token.to_i }
(1..k).each do |ji|
pair = Pair.new(idx, j[2*ji-1])
surplus[idx] += pair.amount
from_to[j[2*ji-2]] << pair
end
surplus[idx] -= t
end
# queue first isle, and remove resources until queue is empty
dead, dead_queue, surplus[1] = 0, [1], -1
while !dead_queue.empty?
dead += 1
dead_isle_id = dead_queue.pop
from_to[dead_isle_id].each do |pair|
if surplus[pair.to] >= 0
surplus[pair.to] -= pair.amount
if surplus[pair.to] < 0
dead_queue << pair.to
end
end
end
end
puts n - dead
end
|
robertdimarco/puzzles
|
spotify-puzzles/lottery/lottery.rb
|
<reponame>robertdimarco/puzzles
#!/usr/bin/env ruby -n
# binomial coefficient
def nchoosek(n, k)
if k < 0 or k > n
return 0
elsif k > (n - k)
k = n - k
end
c = 1
(0...k).each { |i|
c = c * (n - (k - (i+1)))
c = (c / (i+1))
}
return c
end
# hypergeometric distribution
def hypergeometric(r, m, n, k)
return nchoosek(m,k) * nchoosek(r-m, n-k) / nchoosek(r, n)
end
# m the total number of people who entered the lottery
# n the total number of winners drawn
# t the number of tickets each winner is allowed to buy
# p the number of people in your group
m, n, t, p = $_.chop.split(" ").map { |i| i.to_f }
min_wins = (p / t).ceil.to_i
prob = 0.0
if min_wins <= n
prob = 1.0
(0...min_wins).each { |i|
prob -= hypergeometric(m, p, n, i)
}
end
puts "%.10f" % prob
|
AndrewRLloyd88/sample-app
|
test/helpers/application_helper_test.rb
|
require 'test_helper'
class ApplicationHelperTest < ActionView::TestCase
def setup
@base_title = "Ruby on Rails Tutorial Sample App"
end
test "full title helper" do
assert_equal full_title("Contact"), "Contact | #{@base_title}"
assert_equal full_title("Help"), "Help | #{@base_title}"
assert_equal full_title("Sign up"), "Sign up | #{@base_title}"
end
end
|
AndrewRLloyd88/sample-app
|
app/helpers/application_helper.rb
|
module ApplicationHelper
# Returns the full title on a per-page basis
def full_title(page_title = "")
base_title = "Ruby on Rails Tutorial Sample App"
if page_title.empty?
base_title
else
page_title + " | " + base_title
end
end
# Checks to see if an input string is a palindrome
def palindrome_tester(s)
result = false
if s == s.reverse
result = true
return result
else
result = false
return result
end
end
# Returns an array as an uppercase string
def yeller(s)
puts s.map(&:upcase).join
end
# Randomly generates an 8 character string
def random_subdomain
puts ("a".."z").to_a.sample(8).join
end
# Randomly shuffles letters around in an array and returns string
def string_shuffle(s)
puts s.split('').shuffle.join
end
# Returns a hash including a name email and random generated password
def my_hash
{
name: "John",
email: "<EMAIL>",
password: ('a'..'z').to_a.sample(16).join
}
end
end
|
AndrewRLloyd88/sample-app
|
example_user.rb
|
<reponame>AndrewRLloyd88/sample-app<filename>example_user.rb
class User
#creates attribute accessors correspondin to users name + email address
#getters and setters
attr_accessor :name, :email
# initialize is called when we execute User.new
def initialize(attributes = {})
@name = attributes[:name]
@email = attributes[:email]
end
def formatted_email
"#{@name} <#{@email}>"
end
end
|
akinomaeni-sandbox/yaffle
|
lib/yaffle.rb
|
require 'yaffle/core_ext'
module Yaffle
end
|
wolfeidau/aws-lambda-python-cfn
|
ec2_required_tags.rb
|
<reponame>wolfeidau/aws-lambda-python-cfn
#!/usr/bin/env ruby
require 'bundler/setup'
require 'cloudformation-ruby-dsl/cfntemplate'
require 'cloudformation-ruby-dsl/spotprice'
require 'cloudformation-ruby-dsl/table'
template do
value AWSTemplateFormatVersion: '2010-09-09'
value Description: 'AWS CloudFormation'
resource 'LambdaExecutionRole', Type: 'AWS::IAM::Role', Properties: {
AssumeRolePolicyDocument: {
Version: '2012-10-17',
Statement: [
{
Effect: 'Allow',
Principal: { Service: ['lambda.amazonaws.com'] },
Action: ['sts:AssumeRole']
}
]
},
Path: '/',
ManagedPolicyArns: [
'arn:aws:iam::aws:policy/service-role/AWSConfigRulesExecutionRole',
'arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole'
]
}
resource 'ConfigPermissionToCallLambda', Type: 'AWS::Lambda::Permission', Properties: {
FunctionName: get_att('Ec2TagComplianceCheck', 'Arn'),
Action: 'lambda:InvokeFunction',
Principal: 'config.amazonaws.com'
}
resource 'Ec2TagComplianceCheck', Type: 'AWS::Lambda::Function', Properties: {
Code: {
ZipFile: interpolate(file('ec2_required_tags.py'))
},
Handler: 'index.lambda_handler',
Runtime: 'python2.7',
Timeout: '60',
Role: get_att('LambdaExecutionRole', 'Arn')
}
resource 'ConfigRuleForEc2TagCompliance', Type: 'AWS::Config::ConfigRule', DependsOn: 'ConfigPermissionToCallLambda', Properties: {
ConfigRuleName: 'ConfigRuleForEc2TagCompliance',
Scope: {
ComplianceResourceTypes: ['AWS::EC2::Instance']
},
InputParameters: {
Environment: 'Stage,Dev,Prod'
},
Source: {
Owner: 'CUSTOM_LAMBDA',
SourceDetails: [
{ EventSource: 'aws.config', MessageType: 'ConfigurationItemChangeNotification' }
],
SourceIdentifier: get_att('Ec2TagComplianceCheck', 'Arn')
}
}
end.exec!
|
ccdcoe/alert-visualizer
|
proxy/aggregation_request.rb
|
<filename>proxy/aggregation_request.rb<gh_stars>0
require 'http'
class AggregationRequest
QUERY = {
"query": {
"bool": {
"must": [
{
"range": {
"@timestamp": {
"gte": 0
}
}
}, {
"query_string": {
"query": "event_type:alert AND alert.source.net_info:*",
"analyze_wildcard": true
}
}
]
}
},
"aggs": {
"src_ip": {
"terms": {
"field": "alert.source.ip.keyword",
"size": 40,
"order": {
"_count": "desc"
}
},
"aggs": {
"net_src": {
"terms": {
"field": "alert.source.net_info.keyword",
"size": 2,
"order": {
"_count": "desc"
}
},
"aggs": {
"dest_ip": {
"terms": {
"field": "alert.target.ip.keyword",
"size": 40,
"order": {
"_count": "desc"
}
},
"aggs": {
"net_dest": {
"terms": {
"field": "alert.target.net_info.keyword",
"size": 2,
"order": {
"_count": "desc"
}
},
"aggs": {
"alerts": {
"terms": {
"field": "alert.signature.keyword",
"size": 20,
"order": {
"_count": "desc"
}
}
}
}
}
}
}
}
}
}
}
}
}
ENDPOINT = 'http://ELASTIC:9200/suricata-alert-*/_search'
def initialize
@nodes = []
@ip_list = []
@links = []
@groups = Set.new
end
def perform
@result = HTTP
.headers(content_type: "application/json")
.get(ENV.fetch('ES_ENDPOINT', ENDPOINT), body: QUERY.to_json).parse
empty_result || parsed_result
end
private
def empty_result
return unless @result.dig('hits', 'total').zero?
{
error: "Invalid response from ElasticSearch"
}
end
def parsed_result
valid_sources.each do |src_ip|
destination = src_ip
.dig('net_src', 'buckets')
.detect { |bucket| bucket["key"] != "Blue Team" }
handle_source src_ip, destination
handle_destinations src_ip, destination['dest_ip']['buckets']
end
{
nodes: @nodes,
links: @links,
groups: @groups.to_a
}
end
def valid_sources
@result.dig('aggregations', 'src_ip', 'buckets')
.select do |src_ip|
src_ip.dig 'net_src', 'buckets', 0
end
end
def handle_source(src_ip, destination)
if @ip_list.include? src_ip['key']
set_node_as_source src_ip['key']
else
@groups << destination['key']
add_source_node src_ip['key'], destination['key']
end
end
def handle_destinations(src_ip, dest_ips)
dest_ips.each do |dest_ip|
unless @ip_list.include? dest_ip['key']
group = dest_ip
.dig('net_dest', 'buckets')
.detect { |bucket| bucket["key"] != "Blue Team" }
.dig 'key'
next unless group
@groups << group
add_destination_node dest_ip['key'], group
end
add_link src_ip, dest_ip
end
end
def set_node_as_source(key)
@nodes
.select { |node| node['id'] == key }
.each { |node| node['type'] = :source }
end
def add_node(ip, group, type)
@nodes << { id: ip, group: @groups.to_a.index(group), type: :source }
@ip_list << ip
end
def add_source_node(ip, group)
add_node ip, group, :source
end
def add_destination_node(ip, group)
add_node ip, group, :target
end
def add_link(src, dst)
@links << {
source: src['key'],
target: dst['key'],
value: (Math.log(dst['doc_count']) + 1) * 2,
alerts: dst.dig('net_dest', 'buckets', 0, 'alerts', 'buckets')
}
end
end
|
ccdcoe/alert-visualizer
|
proxy/proxy.rb
|
require "bundler"
Bundler.setup(:default)
require "sinatra"
require_relative 'aggregation_request'
set :port, 4567
before do
response.headers['Access-Control-Allow-Origin'] = '*'
end
get '/' do
response.headers['Content-Type'] = "application/json"
AggregationRequest.new.perform.to_json.tap do |result|
status 400 if result["error"]
end
end
|
royhsu/tiny-kit
|
TinyKit.podspec
|
Pod::Spec.new do |spec|
spec.name = 'TinyKit'
spec.version = '0.11.0'
spec.license = 'MIT'
spec.summary = 'TinyKit provides practical functionalities that will help us to build apps much more quickly.'
spec.homepage = 'https://github.com/royhsu/tiny-kit'
spec.authors = { '<NAME>' => '<EMAIL>' }
spec.source = {
:git => 'https://github.com/royhsu/tiny-kit.git',
:tag => spec.version
}
spec.framework = 'UIKit'
spec.source_files = 'Sources/Core/Sources/*.swift', 'Sources/Core/Sources/**/*.swift', 'Sources/Core/Sources/**/**/*.swift', 'Sources/Core/Sources/**/**/**/*.swift'
spec.ios.source_files = 'Sources/iOS/Sources/*.swift', 'Sources/iOS/Sources/**/*.swift', 'Sources/iOS/Sources/**/**/*.swift', 'Sources/iOS/Sources/**/**/**/*.swift'
spec.ios.deployment_target = '10.0'
spec.swift_version = '5.0'
spec.dependency 'TinyCore', '0.9.0'
spec.dependency 'TinyValidation', '0.3.0'
end
|
Guyutongxue/VSC_ProgrammingGrid
|
scripts/pause-console.rb
|
<filename>scripts/pause-console.rb
#!/usr/bin/ruby
###
# https://github.com/Guyutongxue/VSCodeConfigHelper3/blob/main/scripts/pause-console.rb
# Modified for redirecting input
require 'io/console'
if ARGV.length == 0 then
puts "Usage: #{__FILE__} <Executable> [<InputFile>]"
exit
end
command_line = ARGV.map { |arg| %|"#{arg.gsub('"', '\"')}"| }
start_time = Time.now
if ARGV.length == 1 then
system("#{command_line[0]}")
else
system("#{command_line[0]} < #{command_line[1]}")
end
exit_code = $?.exitstatus
end_time = Time.now
elapsed_time = "%.4f" % (end_time - start_time)
puts
print "----------------"
RESET = "\033[0m"
BG_RED = "\033[41m"
BG_GREEN = "\033[42m"
BG_YELLOW_FG_BLACK = "\033[43;30m"
FG_RED = "\033[0;31m"
FG_GREEN = "\033[0;32m"
FG_YELLOW = "\033[0;33m"
# PowerLine Glyphs < and >
GT="\ue0b0"
LT="\ue0b2"
if exit_code == 0 then
exit_fg_color = FG_GREEN
exit_bg_color = BG_GREEN
else
exit_fg_color = FG_RED
exit_bg_color = BG_RED
end
print "#{exit_fg_color}#{LT}#{RESET}"
print "#{exit_bg_color} 返回值 #{exit_code} #{RESET}"
print "#{BG_YELLOW_FG_BLACK} 用时 #{elapsed_time}s #{RESET}"
print "#{FG_YELLOW}#{GT}#{RESET}"
puts "----------------"
puts "进程已退出。按任意键退出..." # "close window" is controlled by Terminal.app preference
STDIN.getch
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/schema.rb
|
<filename>ClosedSocialNetwork/db/schema.rb<gh_stars>0
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20141107161009) do
create_table "activities", :force => true do |t|
t.integer "activity_verb_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "ancestry"
t.integer "author_id"
t.integer "user_author_id"
t.integer "owner_id"
end
add_index "activities", ["activity_verb_id"], :name => "index_activities_on_activity_verb_id"
add_index "activities", ["author_id"], :name => "index_activities_on_author_id"
add_index "activities", ["owner_id"], :name => "index_activities_on_owner_id"
add_index "activities", ["user_author_id"], :name => "index_activities_on_user_author_id"
create_table "activity_actions", :force => true do |t|
t.integer "actor_id"
t.integer "activity_object_id"
t.boolean "follow", :default => false
t.boolean "author", :default => false
t.boolean "user_author", :default => false
t.boolean "owner", :default => false
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "activity_actions", ["activity_object_id"], :name => "index_activity_actions_on_activity_object_id"
add_index "activity_actions", ["actor_id"], :name => "index_activity_actions_on_actor_id"
create_table "activity_object_activities", :force => true do |t|
t.integer "activity_id"
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "object_type"
end
add_index "activity_object_activities", ["activity_id"], :name => "index_activity_object_activities_on_activity_id"
add_index "activity_object_activities", ["activity_object_id"], :name => "index_activity_object_activities_on_activity_object_id"
create_table "activity_object_audiences", :force => true do |t|
t.integer "activity_object_id"
t.integer "relation_id"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "activity_object_properties", :force => true do |t|
t.integer "activity_object_id"
t.integer "property_id"
t.string "type"
t.boolean "main"
end
add_index "activity_object_properties", ["activity_object_id"], :name => "index_activity_object_properties_on_activity_object_id"
add_index "activity_object_properties", ["property_id"], :name => "index_activity_object_properties_on_property_id"
create_table "activity_objects", :force => true do |t|
t.string "title", :default => ""
t.text "description"
t.datetime "created_at"
t.datetime "updated_at"
t.string "object_type", :limit => 45
t.integer "like_count", :default => 0
t.integer "follower_count", :default => 0
t.integer "visit_count", :default => 0
t.integer "comment_count", :default => 0
end
create_table "activity_verbs", :force => true do |t|
t.string "name", :limit => 45
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "actor_keys", :force => true do |t|
t.integer "actor_id"
t.binary "key_der"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "actor_keys", ["actor_id"], :name => "index_actor_keys_on_actor_id"
create_table "actors", :force => true do |t|
t.string "name"
t.string "email", :default => "", :null => false
t.string "colgid", :default => "", :null => false
t.string "slug"
t.string "subject_type"
t.boolean "notify_by_email", :default => true
t.datetime "created_at"
t.datetime "updated_at"
t.integer "activity_object_id"
t.string "logo_file_name"
t.string "logo_content_type"
t.integer "logo_file_size"
t.datetime "logo_updated_at"
end
add_index "actors", ["activity_object_id"], :name => "index_actors_on_activity_object_id"
add_index "actors", ["colgid"], :name => "index_actors_on_colgid"
add_index "actors", ["email"], :name => "index_actors_on_email"
add_index "actors", ["slug"], :name => "index_actors_on_slug", :unique => true
create_table "admins", :force => true do |t|
t.string "email", :default => "hy", :null => false
t.string "encrypted_password", :default => "hyper451", :null => false
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", :default => 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "admins", ["email"], :name => "index_admins_on_email", :unique => true
add_index "admins", ["reset_password_token"], :name => "index_admins_on_reset_password_token", :unique => true
create_table "audiences", :force => true do |t|
t.integer "relation_id"
t.integer "activity_id"
end
add_index "audiences", ["activity_id"], :name => "index_audiences_on_activity_id"
add_index "audiences", ["relation_id"], :name => "index_audiences_on_relation_id"
create_table "authentications", :force => true do |t|
t.integer "user_id"
t.string "provider"
t.string "uid"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "authentications", ["user_id"], :name => "index_authentications_on_user_id"
create_table "comments", :force => true do |t|
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "comments", ["activity_object_id"], :name => "index_comments_on_activity_object_id"
create_table "contacts", :force => true do |t|
t.integer "sender_id"
t.integer "receiver_id"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "inverse_id"
t.integer "ties_count", :default => 0
end
add_index "contacts", ["inverse_id"], :name => "index_contacts_on_inverse_id"
add_index "contacts", ["receiver_id"], :name => "index_contacts_on_receiver_id"
add_index "contacts", ["sender_id"], :name => "index_contacts_on_sender_id"
create_table "conversations", :force => true do |t|
t.string "subject", :default => ""
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
create_table "documents", :force => true do |t|
t.string "type"
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "file_file_name"
t.string "file_content_type"
t.string "file_file_size"
t.boolean "file_processing"
end
add_index "documents", ["activity_object_id"], :name => "index_documents_on_activity_object_id"
create_table "events", :force => true do |t|
t.integer "activity_object_id"
t.datetime "start_at"
t.datetime "end_at"
t.boolean "all_day"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "room_id"
t.date "start_date"
t.date "end_date"
t.integer "frequency", :default => 0
t.integer "interval"
t.integer "days", :default => 0
t.integer "interval_flag", :default => 0
end
add_index "events", ["room_id"], :name => "index_events_on_room_id"
create_table "groups", :force => true do |t|
t.integer "actor_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "groups", ["actor_id"], :name => "index_groups_on_actor_id"
create_table "links", :force => true do |t|
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "url"
t.string "callback_url"
t.string "image"
t.integer "width", :default => 470
t.integer "height", :default => 353
end
add_index "links", ["activity_object_id"], :name => "index_links_on_activity_object_id"
create_table "notifications", :force => true do |t|
t.string "type"
t.text "body"
t.string "subject", :default => ""
t.integer "sender_id"
t.string "sender_type"
t.integer "conversation_id"
t.boolean "draft", :default => false
t.datetime "updated_at", :null => false
t.datetime "created_at", :null => false
t.integer "notified_object_id"
t.string "notified_object_type"
t.string "notification_code"
t.string "attachment"
t.boolean "global", :default => false
t.datetime "expires"
end
add_index "notifications", ["conversation_id"], :name => "index_notifications_on_conversation_id"
create_table "permissions", :force => true do |t|
t.string "action"
t.string "object"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "posts", :force => true do |t|
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "posts", ["activity_object_id"], :name => "index_posts_on_activity_object_id"
create_table "profiles", :force => true do |t|
t.integer "actor_id"
t.date "birthday"
t.datetime "created_at"
t.datetime "updated_at"
t.string "organization", :limit => 45
t.string "phone", :limit => 45
t.string "mobile", :limit => 45
t.string "fax", :limit => 45
t.string "address"
t.string "city"
t.string "zipcode", :limit => 45
t.string "province", :limit => 45
t.string "country", :limit => 45
t.integer "prefix_key"
t.string "description"
t.string "experience"
t.string "website"
t.string "skype", :limit => 45
t.string "im", :limit => 45
end
add_index "profiles", ["actor_id"], :name => "index_profiles_on_actor_id"
create_table "receipts", :force => true do |t|
t.integer "receiver_id"
t.string "receiver_type"
t.integer "notification_id", :null => false
t.boolean "is_read", :default => false
t.boolean "trashed", :default => false
t.boolean "deleted", :default => false
t.string "mailbox_type", :limit => 25
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "receipts", ["notification_id"], :name => "index_receipts_on_notification_id"
create_table "relation_permissions", :force => true do |t|
t.integer "relation_id"
t.integer "permission_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "relation_permissions", ["permission_id"], :name => "index_relation_permissions_on_permission_id"
add_index "relation_permissions", ["relation_id"], :name => "index_relation_permissions_on_relation_id"
create_table "relations", :force => true do |t|
t.integer "actor_id"
t.string "type"
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.string "sender_type"
t.string "receiver_type"
t.string "ancestry"
end
add_index "relations", ["actor_id"], :name => "index_relations_on_actor_id"
add_index "relations", ["ancestry"], :name => "index_relations_on_ancestry"
create_table "remote_subjects", :force => true do |t|
t.integer "actor_id"
t.string "webfinger_id"
t.text "webfinger_info"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "remote_subjects", ["actor_id"], :name => "index_remote_subjects_on_actor_id"
create_table "rooms", :force => true do |t|
t.integer "actor_id"
t.string "name"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
end
add_index "rooms", ["actor_id"], :name => "index_rooms_on_actor_id"
create_table "sites", :force => true do |t|
t.text "config"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.string "type"
t.integer "actor_id"
end
add_index "sites", ["actor_id"], :name => "index_sites_on_actor_id"
create_table "taggings", :force => true do |t|
t.integer "tag_id"
t.integer "taggable_id"
t.string "taggable_type"
t.integer "tagger_id"
t.string "tagger_type"
t.string "context", :limit => 128
t.datetime "created_at"
end
add_index "taggings", ["tag_id"], :name => "index_taggings_on_tag_id"
add_index "taggings", ["taggable_id", "taggable_type", "context"], :name => "index_taggings_on_taggable_id_and_taggable_type_and_context"
create_table "tags", :force => true do |t|
t.string "name"
end
create_table "ties", :force => true do |t|
t.integer "contact_id"
t.integer "relation_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "ties", ["contact_id"], :name => "index_ties_on_contact_id"
add_index "ties", ["relation_id"], :name => "index_ties_on_relation_id"
create_table "users", :force => true do |t|
t.string "encrypted_password", :limit => 128, :default => "", :null => false
t.string "password_salt"
t.string "reset_password_token"
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", :default => 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip"
t.string "last_sign_in_ip"
t.string "authentication_token"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "actor_id"
t.string "language"
t.boolean "connected", :default => false
t.string "status", :default => "available"
t.boolean "chat_enabled", :default => true
t.string "provider"
t.string "uid"
t.string "confirmation_token"
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.string "unconfirmed_email"
end
add_index "users", ["actor_id"], :name => "index_users_on_actor_id"
add_index "users", ["confirmation_token"], :name => "index_users_on_confirmation_token", :unique => true
add_index "users", ["reset_password_token"], :name => "index_users_on_reset_password_token", :unique => true
end
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/migrate/20130513154647_sites_as_actor.social_stream_base_engine.rb
|
<filename>ClosedSocialNetwork/db/migrate/20130513154647_sites_as_actor.social_stream_base_engine.rb
# This migration comes from social_stream_base_engine (originally 20130125100112)
class SitesAsActor < ActiveRecord::Migration
def change
add_column :sites, :type, :string
add_column :sites, :actor_id, :integer
add_index :sites, :actor_id, name: 'index_sites_on_actor_id'
add_foreign_key :sites, :actors, name: 'index_sites_on_actor_id'
end
end
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/seeds.rb
|
def h
s = Roo::Excelx.new("#{Dir.getwd}/db/students.xlsx")
s.default_sheet = s.sheets.first
s1 = 1
s2 = s.last_row
(s1..s2).each do |line|
attr_one = s.cell(line, 'A')
attr_two = s.cell(line, 'B')
attr_three = s.cell(line, 'C')
attr_four = s.cell(line, 'D')
user = User.create! :name => attr_one, :email => attr_two, :password => <PASSWORD>, :password_confirmation => <PASSWORD>, :colgid => attr_four
end
end
h()
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/migrate/20130513154648_main_activity_object_properties.social_stream_base_engine.rb
|
<filename>ClosedSocialNetwork/db/migrate/20130513154648_main_activity_object_properties.social_stream_base_engine.rb
# This migration comes from social_stream_base_engine (originally 20130212092035)
class MainActivityObjectProperties < ActiveRecord::Migration
class APMigration < ActiveRecord::Base
self.table_name = 'activity_object_properties'
self.record_timestamps = false
self.inheritance_column = "other"
end
def up
add_column :activity_object_properties, :main, :boolean
ActivityObjectProperty.reset_column_information
APMigration.where(type: 'ActivityObjectProperty::Poster').all.each do |a|
a.update_attributes! main: true,
type: nil
end
end
end
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/migrate/20130513154650_create_social_stream_documents.social_stream_documents_engine.rb
|
<gh_stars>0
# This migration comes from social_stream_documents_engine (originally 20120208143721)
class CreateSocialStreamDocuments < ActiveRecord::Migration
def change
create_table "documents", :force => true do |t|
t.string "type"
t.integer "activity_object_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "file_file_name"
t.string "file_content_type"
t.string "file_file_size"
t.boolean "file_processing"
end
add_index "documents", ["activity_object_id"], :name => "index_documents_on_activity_object_id"
add_foreign_key "documents", "activity_objects", :name => "documents_on_activity_object_id"
end
end
|
sagarkrkv/Closed-Social-Network
|
ClosedSocialNetwork/db/migrate/20130513154654_create_social_stream_ostatus.social_stream_ostatus_engine.rb
|
# This migration comes from social_stream_ostatus_engine (originally 20120905145030)
class CreateSocialStreamOstatus < ActiveRecord::Migration
def change
create_table :actor_keys do |t|
t.integer :actor_id
t.binary :key_der
t.timestamps
end
add_index "actor_keys", "actor_id"
create_table :remote_subjects, :force => true do |t|
t.integer :actor_id
t.string :webfinger_id
t.text :webfinger_info
t.timestamps
end
add_index "remote_subjects", "actor_id"
add_foreign_key "actor_keys", "actors", :name => "actor_keys_on_actor_id"
add_foreign_key "remote_subjects", "actors", :name => "remote_subjects_on_actor_id"
end
end
|
mevansam/chef-cookbook-sysutils
|
recipes/default.rb
|
#
# Cookbook Name:: sysutils
# Recipe:: default
#
# Author: <NAME>
# Email: <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
platform_family = node['platform_family']
# Set up proxies if provided
["http_proxy", "https_proxy", "no_proxy"].each do |proxy_config|
if node["env"].has_key?(proxy_config) &&
!node["env"][proxy_config].nil? &&
!node["env"][proxy_config].empty?
Chef::Config[proxy_config] = ENV[proxy_config] = ENV[proxy_config.upcase] = node["env"][proxy_config]
elsif !Chef::Config[proxy_config].nil? &&
!Chef::Config[proxy_config].empty?
unless ENV[proxy_config] || ENV[proxy_config.upcase]
ENV[proxy_config] = ENV[proxy_config.upcase] = Chef::Config[proxy_config]
end
node.set["env"][proxy_config] = Chef::Config[proxy_config]
node.save
end
end
http_proxy = node["env"]["http_proxy"]
if !http_proxy.nil? && !http_proxy.empty?
sysutils_global_proxy "http proxy" do
http_proxy http_proxy
https_proxy node["env"]["https_proxy"]
ftp_proxy node["env"]["ftp_proxy"]
no_proxy node["env"]["no_proxy"]
end
end
# Check if extra storage was provided and if it was format and mount it
if node["env"].has_key?("data_disk") && !node["env"]["data_disk"].nil? && !node["env"]["data_disk"].empty? &&
node["env"].has_key?("data_path") && !node["env"]["data_path"].nil? && !node["env"]["data_path"].empty?
data_disk = node["env"]["data_disk"]
data_path = node["env"]["data_path"]
script "prepare data disk" do
interpreter "bash"
user "root"
cwd "/tmp"
code <<-EOH
if [ -n "$(lsblk | grep #{data_disk.split("/").last})" ] && \
[ -z "$(blkid | grep #{data_disk})"]; then
echo "**** Formating data disk #{data_disk} with ext4 file system..."
mkfs.ext4 -F #{data_disk}
fi
EOH
end
directory data_path do
recursive true
end
mount data_path do
device data_disk
fstype "ext4"
action [:mount, :enable]
end
end
# Update sysctl settings
execute "reload sysctl" do
command "sysctl -p /etc/sysctl.conf"
action :nothing
end
unless node["env"]["sysctl_remove"].empty?
sysutils_config_file "/etc/sysctl.conf" do
values node["env"]["sysctl_remove"]
format_in Regexp.new('(\S+)\s+=\s+(.+)')
format_out "%s = %s"
daemon_config_dir "/etc/sysctl.d"
action :remove
notifies :run, "execute[reload sysctl]"
end
end
unless node["env"]["sysctl_add"].empty?
sysutils_config_file "/etc/sysctl.conf" do
values node["env"]["sysctl_add"]
format_in Regexp.new('(\S+)\s+=\s+(.+)')
format_out "%s = %s"
daemon_config_dir "/etc/sysctl.d"
action :add
notifies :run, "execute[reload sysctl]"
end
end
# Update ulimit settings
unless node["env"]["ulimit_remove"].empty?
sysutils_config_file "/etc/security/limits.conf" do
values node["env"]["ulimit_remove"]
format_in Regexp.new('(\S+)\s+(\S+)\s+(\S+)\s+(\S+)')
format_out "%-16s%-8s%-16s%s"
daemon_config_dir "/etc/security/limits.d"
action :remove
end
end
unless node["env"]["ulimit_add"].empty?
sysutils_config_file "/etc/security/limits.conf" do
values node["env"]["ulimit_add"]
format_in Regexp.new('(\S+)\s+(\S+)\s+(\S+)\s+(\S+)')
format_out "%-16s%-8s%-16s%s"
daemon_config_dir "/etc/security/limits.d"
action :add
end
end
# Enable/Disable firewall
if !node["env"]["firewall"].nil?
if !node["env"]["firewall"]
case platform_family
when "fedora", "rhel"
script "disable firewall" do
interpreter "bash"
user "root"
code <<-EOH
service iptables save
service iptables stop
chkconfig iptables off
EOH
end
when "debian"
if platform?("ubuntu")
execute '[ -n "$(ufw status | grep inactive)" ] || (ufw disable)'
end
end
end
end
# Setup package repos and install packages
needs_update = false
if !node.attribute?("package_repos_updated") &&
node["env"]["package_repos"].has_key?(platform_family) &&
node["env"]["package_repos"][platform_family].size > 0
package_repos = node['env']['package_repos']['added'] || [ ]
node["env"]["package_repos"][platform_family].each do |repo_detail|
repo_detail_desc = "#{repo_detail}"
if !package_repos.include?(repo_detail_desc)
case platform_family
when "fedora", "rhel"
execute "adding yum repo '#{repo_detail}'" do
command "yum-config-manager --add-repo #{repo_detail}"
end
when "debian"
name = repo_detail[0]
uri = repo_detail[1]
distribution = (repo_detail.size > 2 ? repo_detail[2] : node['lsb']['codename'])
components = (repo_detail.size > 3 ? repo_detail[3].split : [ "main" ])
keyserver = (repo_detail.size > 4 ? repo_detail[4] : nil)
key = (repo_detail.size > 5 ? repo_detail[5] : nil)
apt_repository name do
uri uri
distribution distribution
components components
keyserver keyserver
key key
end
end
package_repos << repo_detail_desc
needs_update = true
end
end
node.set['env']['package_repos']['added'] = package_repos
node.save
end
if needs_update || !node['env']['package_repos']['cache_updated']
case platform_family
when "fedora", "rhel"
execute "update package cache" do
command "yum clean all"
end
ruby_block "refresh chef yum cache" do
block do
yum = Chef::Provider::Package::Yum::YumCache.instance
yum.reload
yum.refresh
end
end
when "debian"
execute "update package cache" do
command "
apt-get -y --force-yes install ubuntu-cloud-keyring;
apt-get -y --force-yes install gplhost-archive-keyring;
apt-get update
"
end
end
node.set["env"]["package_repos"]["cache_updated"] = true
node.save
end
if node["env"]["packages"].has_key?(platform_family)
node["env"]["packages"][platform_family].each do |pkg|
case platform_family
when "debian"
if pkg.kind_of?(Array)
execute "apt-get seed commands" do
command pkg[0]
end
package pkg[1]
else
package pkg
end
else
package pkg
end
end
end
node["env"]["packages"]["pip"].each \
{ |pkg| execute "pip install #{pkg}" } if node["env"]["packages"].has_key?("pip")
# Create additional groups and users
groups = node["env"]["groups"]
if !groups.nil? &&
!groups.empty?
groups.each do |g|
group g
end
end
authorized_keys_file = node["env"]["authorized_keys_file"]
users = node["env"]["users"]
if !users.nil? &&
!users.empty?
users.each do |u|
if !u.kind_of?(Array) || u.size < 4
Chef::Application.fatal!("default[env][users] must be an array of [ user_name, group_name_or_id, home_dir, is_passwordless_sudo ]", 999)
end
user u[0] do
supports :manage_home => true
home u[1]
gid u[2]
shell "/bin/bash"
end
if u[3]
sudo u[0] do
user u[0]
nopasswd true
defaults [ '!requiretty' ]
end
end
sysutils_user_certs u[0] do
cert_data (u.size==6 ? u[5] : nil)
authorized_keys (u.size==5 ? u[4] : nil)
authorized_keys_file authorized_keys_file
end
end
end
# Setup cron jobs
if node["env"]["cron_jobs"]
# Ensure cron service is installed
include_recipe 'cron::default'
node["env"]["cron_jobs"].each do |name, params|
Chef::Log.info("Adding cron job '#{name}' with params: #{params}")
cron_d name do
predefined_value params["predefined_value"] if params["predefined_value"]
command params["command"]
minute params["minute"] if params["minute"]
hour params["hour"] if params["hour"]
day params["day"] if params["day"]
month params["month"] if params["month"]
weekday params["weekday"] if params["weekday"]
user params["user"] if params["user"]
mailto params["mailto"] if params["mailto"]
path params["path"] if params["path"]
home params["home"] if params["home"]
shell params["shell"] if params["shell"]
comment params["comment"] if params["comment"]
environment params["environment"] if params["environment"]
mode params["mode"] if params["mode"]
end
end
end
# Export directories via NFS
if node["env"]["exports"]
# Ensure nfs server is installed
include_recipe 'nfs::server'
node["env"]["exports"].each do |export|
path = export['path']
directory path do
mode '0777'
recursive true
end
nfs_export path do
network export['network']
writeable export['writeable']
sync export['sync']
options export['options']
end
end
end
if node["env"]["imports"]
# Ensure nfs client is installed
include_recipe 'nfs'
node["env"]["imports"].each do |import|
mount_path = import['mount_path']
directory mount_path do
group import['group'] if import['group']
owner import['owner'] if import['owner']
recursive true
end
mount mount_path do
device "#{import['host']}:#{import['path']}"
fstype "nfs"
options import['options'] || 'rw'
action [:mount, :enable]
end
end
end
|
mevansam/chef-cookbook-sysutils
|
.chef/knife.rb
|
current_dir = File.dirname(__FILE__)
log_level "info"
chef_server_url "http://192.168.50.1:9999"
node_name "cookbook_test"
client_key "#{current_dir}/chef-zero_node.pem"
validation_client_name "chef-zero_validator"
validation_key "#{current_dir}/chef-zero_validator.pem"
|
mevansam/chef-cookbook-sysutils
|
providers/user_certs.rb
|
#
# Author:: <NAME> (<<EMAIL>>)
# Cookbook Name:: sysutils
# Provider: user_certs
#
# Copyright 2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ::Chef::Provider # rubocop:disable Documentation
include ::SysUtils::Helper
end
def whyrun_supported?
true
end
action :add do
user = new_resource.name
cert_data = new_resource.cert_data
other_cert_data = new_resource.other_cert_data
authorized_keys = new_resource.authorized_keys
encryption_key = get_encryption_secret(node)
if !encryption_key.nil?
begin
user_data = Chef::EncryptedDataBagItem.load("#{new_resource.data_bag}-#{node.chef_environment}", "#{user}", encryption_key)
if !user_data.nil?
cert_data = user_data["cert_data"] if user_data["cert_data"]
other_cert_data = user_data["other_cert_data"] if user_data["other_cert_data"]
authorized_keys = user_data["authorized_keys"] if user_data["authorized_keys"]
end
rescue
Chef::Log.info("No encrypted data bag with certificate details found for user '#{user}'.")
end
end
if !cert_data.nil? || !other_cert_data.size==0 || !authorized_keys.size==0
Chef::Log.debug("Contents of data bag '#{new_resource.data_bag}' item user_data[cert_data]: #{cert_data}")
Chef::Log.debug("Contents of data bag '#{new_resource.data_bag}' item user_data[other_cert_data]: #{other_cert_data}")
Chef::Log.debug("Contents of data bag '#{new_resource.data_bag}' item user_data[authorized_keys]: #{authorized_keys}")
authorized_keys_file_name = new_resource.authorized_keys_file
known_hosts = new_resource.known_hosts
user_home = `echo ~#{user}`.split[0]
if ::Dir.exists?(user_home)
ssh_dir = user_home + "/.ssh/"
id_rsa_file = ssh_dir + "id_rsa"
known_hosts_file = ssh_dir + "known_hosts"
authorized_keys_file = ssh_dir + authorized_keys_file_name
group = `groups #{user}`.split[2]
r = Chef::Resource::Directory.new(ssh_dir, @run_context)
r.owner user
r.group group
r.run_action(:create)
r = Chef::Resource::File.new(id_rsa_file, @run_context)
r.content cert_data
r.owner user
r.group group
r.mode 0400
r.run_action(:create)
r = Chef::Resource::File.new(known_hosts_file, @run_context)
r.owner user
r.group group
r.not_if { ::File.exists?(known_hosts_file) }
r.run_action(:create)
r = Chef::Resource::RubyBlock.new("update known hosts", @run_context)
r.block do
hosts = Set.new
::IO.readlines(known_hosts_file).each do |known_host|
host_fields = known_host.split(/,|\s+/)
hosts << host_fields[0]
hosts << host_fields[1]
end
::File.open(known_hosts_file, 'a') do |file|
known_hosts.each do |host|
unless hosts.include?(host)
Chef::Log.debug("Adding host \"#{host}\" to \"#{known_hosts_file}\".")
file.write(`ssh-keyscan -t rsa #{host}`)
end
end
end
end
r.run_action(:create)
ssh_configs = [ ]
other_cert_data.each do |other_cert|
key_file = ssh_dir + other_cert["name"]
r = Chef::Resource::File.new(key_file, @run_context)
r.content other_cert["data"]
r.owner user
r.group group
r.mode 0400
r.run_action(:create)
if other_cert.has_key?("hosts")
other_cert["hosts"].each do |host|
ssh_configs << [ host, key_file ]
end
end
end
template "#{ssh_dir}config" do
source "ssh_config.erb"
owner user
group group
mode "0644"
variables(
:ssh_configs => ssh_configs
)
end
if ::File.exists?(authorized_keys_file)
public_keys = ::IO.readlines(authorized_keys_file)
authorized_keys.each do |key|
i = 0
while (i<public_keys.size)
break if public_keys[i][/(.*)\n?/, 1]==key
i += 1
end
public_keys << "#{key}\n" if i==public_keys.size
end
else
public_keys = authorized_keys
end
Chef::Log.info("Writing authorized_keys: #{public_keys}")
::File.open(authorized_keys_file, "w") { |f| f.write(public_keys.join) }
else
Chef::Log.warn("User \"#{user}\" does not exist or does not have a home directory.")
end
end
end
action :authorize do
user = new_resource.name
cert_data = new_resource.cert_data
authorized_keys = new_resource.authorized_keys
user_data = data_bag_item(new_resource.data_bag, user)
if user_data.nil?
cert_data = user_data if user_data["cert_data"]
authorized_keys = user_data if user_data["authorized_keys"]
end
end
|
mevansam/chef-cookbook-sysutils
|
libraries/helpers.rb
|
#
# Author: <NAME>
# Email: <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'rubygems'
require 'rubygems/dependency_installer'
require 'mixlib/shellout'
module ::SysUtils # rubocop:disable Documentation
module Helper
def gem_installed?(name, version = Gem::Requirement.default)
version = Gem::Requirement.create version unless version.is_a? Gem::Requirement
Gem::Specification.each.any? { |spec| name == spec.name and version.satisfied_by? spec.version }
end
def install_gem(name, options = {})
version = options.fetch(:version, Gem::Requirement.default)
return if gem_installed? name, version
installed_gems = Gem::DependencyInstaller.new({:document => []}).install name, version
end
def shell(cmd, ignore_error = false)
Chef::Log.debug("Executing shell command: #{cmd}")
sh = Mixlib::ShellOut.new(cmd)
sh.run_command
sh.error! if !ignore_error
return sh.stdout.chomp
end
def shell!(cmd, ignore_error = false)
Chef::Log.debug("Executing shell command: #{cmd}")
sh = Mixlib::ShellOut.new(cmd)
sh.run_command
sh.error! if !ignore_error
end
end
end
|
mevansam/chef-cookbook-sysutils
|
libraries/ssh_helper.rb
|
#
# Author: <NAME>
# Email: <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include ::SysUtils::Helper
install_gem("net-ssh")
require "net/ssh"
install_gem("net-scp")
require 'net/scp'
module ::SysUtils # rubocop:disable Documentation
class SSH
def initialize(host, user, key)
@host = host
@user = user
@key = key
end
def create_ssh_session()
Chef::Log.debug("Starting ssh session to #{@user}@#{@host}.")
if @key.nil?
Chef::Application.fatal!("No password or key data provided for ssh session.", 999)
elsif @key.start_with?("-----BEGIN RSA PRIVATE KEY-----")
Chef::Log.debug("Using ssh key.")
return Net::SSH.start(@host, @user,
{
:key_data => @key,
:user_known_hosts_file => "/dev/null"
} )
else
Chef::Log.debug("Using ssh password.")
return Net::SSH.start(@host, @user,
{
:password => @key,
:user_known_hosts_file => "/dev/null"
} )
end
end
def copy(src = nil, dest = nil, clean = false, verbose = false)
Chef::Log.debug("Executing remote copy from #{src} to #{@host}:#{dest}.")
ssh = create_ssh_session()
ssh.exec!("rm -fr #{dest}") if clean
ssh.scp.upload!(src, dest, :recursive => true, :verbose => verbose)
ensure
ssh.close
end
def execute(cmd)
Chef::Log.debug("Executing remote command on host #{@host}: #{cmd}")
ssh = create_ssh_session()
result = ssh.exec!(cmd)
ensure
ssh.close
end
def execute_ex(cmd, env = {}, src = nil, dest = nil, sudo = false, clean = false, verbose = false)
unless cmd.nil? || cmd.empty?
Chef::Log.debug("Executing remote command on host #{@host}: #{cmd}")
output = StringIO.new
copy(src, dest, clean, verbose) if !src.nil? && !dest.nil?
environment = env.map { |k,v| "export #{k}=#{v}" }.join("; ")
environment += ";" if environment.length > 0
if sudo
tmp_cmd_file = "/tmp/cmd#{SecureRandom.uuid}"
cmd = "#{environment}" \
"echo \"#{cmd}\" > #{tmp_cmd_file} && " \
"chmod 755 #{tmp_cmd_file} && " \
"sudo -E su -c #{tmp_cmd_file} && " \
"rm -f #{tmp_cmd_file}"
else
cmd = "#{environment}#{cmd}"
end
ssh = create_ssh_session()
begin
channel = ssh.open_channel do |ch|
ch.request_pty do |_, success1|
Chef::Application.fatal!("Could not execute command #{command} on remote host #{@primary_hostname}", 999) unless success1
ch.exec(cmd) do |_, success2|
Chef::Application.fatal!("Could not execute command #{command} on remote host #{@primary_hostname}", 999) unless success2
ch.on_data do |_, data|
output.print(data)
data.split("\n").each { |line| puts "#{@host}: #{line}" } if verbose
end
ch.on_extended_data do |_, _, data|
output.print(data)
data.split("\n").each { |line| puts "#{@host}: #{line}" } if verbose
end
end
end
end
channel.wait
ensure
ssh.close
end
result = "#{output.string}"
return result
end
end
end
end
|
mevansam/chef-cookbook-sysutils
|
providers/config_file.rb
|
<reponame>mevansam/chef-cookbook-sysutils<filename>providers/config_file.rb
#
# Author:: <NAME> (<<EMAIL>>)
# Cookbook Name:: sysutils
# Provider: config_file
#
# Copyright 2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "fileutils"
class ::Chef::Provider # rubocop:disable Documentation
include ::SysUtils::Helper
end
def whyrun_supported?
true
end
action :add do
file_data = get_updated_config_file_data(
new_resource.values, [],
new_resource.format_in, new_resource.format_out,
new_resource.name, new_resource.daemon_config_dir,
new_resource.comment_format )
unless file_data.empty?
converge_by("Adding #{ @new_resource }") do
write_config_file_data(file_data)
end
end
if !new_resource.owner.nil?
file new_resource.name do
owner new_resource.owner
group new_resource.group
mode "0644"
action :touch
end
end
end
action :remove do
file_data = get_updated_config_file_data(
[], new_resource.values,
new_resource.format_in, new_resource.format_out,
new_resource.name, new_resource.daemon_config_dir,
new_resource.comment_format )
unless file_data.empty?
converge_by("Removing #{ @new_resource }") do
write_config_file_data(file_data)
end
end
end
|
mevansam/chef-cookbook-sysutils
|
metadata.rb
|
name 'sysutils'
maintainer '<NAME>'
maintainer_email '<EMAIL>'
license 'All rights reserved'
description 'Installs/Configures sysutils'
long_description 'Resources for common environment configurations such as sysctl, users, groups, ssh keys, etc.'
version '1.0.0'
depends 'modules', '>= 0.1.2'
depends 'hostsfile', '~> 2.4.5'
depends 'apt', '2.4.0'
depends 'yum', '3.2.2'
depends 'sudo', '2.6.0'
depends 'cron', '>= 1.2.0'
depends 'nfs', '>= 2.1.0'
|
mevansam/chef-cookbook-sysutils
|
resources/user_certs.rb
|
#
# Author:: <NAME> (<<EMAIL>>)
# Cookbook Name:: sysutils
# Resource: user_certs
#
# Copyright 2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
actions :add, :authorize
attribute :cert_data, :kind_of => String, :default => nil
attribute :other_cert_data, :kind_of => Hash, :default => { }
attribute :authorized_keys, :kind_of => Array, :default => [ ]
# If data bag exists for the user then all of the above values are taken from the bag
attribute :data_bag, :kind_of => String, :default => "users"
# The default authorized keys file. This maybe overriden if the
# system is configured to use additional authorized key files.
attribute :authorized_keys_file, :kind_of => String, :default => "authorized_keys"
# List of know hosts whose keys will be saved
attribute :known_hosts, :kind_of => Array, :default => [ ]
def initialize(*args)
super
@resource_name = :user_certs
@action = :add
end
|
mevansam/chef-cookbook-sysutils
|
attributes/default.rb
|
# Default attributes for sysutils cookbook
default["env"]["secret_file_path"] = nil
# Array of network interfaces. Each interface hash must map to an attribute
# of the network_interface resource of the network_interface cookbook.
# https://github.com/redguide/network_interfaces/blob/master/resources/default.rb
#
# example:
#
# * note: Each network interface is represented as a hash of key-value pairs
# which will be mapped to attributes of the network_interfaces resource.
#
# "network_interfaces" => [
# {
# device => ...
# .
# .
# }
# ]
#
# Currently this applies only for Ubuntu/Debian systems only
#
default["env"]["network_interfaces"] = [ ]
default["env"]["http_proxy"] = nil
default["env"]["https_proxy"] = nil
default["env"]["ftp_proxy"] = nil
default["env"]["no_proxy"] = nil
# Additional block storage to allocate as a data disk
default["env"]["data_disk"] = nil
default["env"]["data_path"] = nil
default["env"]["sysctl_add"] = [ ]
default["env"]["sysctl_remove"] = [ ]
default["env"]["ulimit_add"] = [ ]
default["env"]["ulimit_remove"] = [ ]
# Enable/Disable local firewall
default["env"]["firewall"] = true
# Each repo should be an array of [ name uri, distribution, components, keyserver, key ]
# Only the name and uri are required and all the other values are optional
default["env"]["package_repos"]["rhel"] = [ ]
default["env"]["package_repos"]["debian"] = [ ]
# Indicates if package cache needs to be updated
default["env"]["package_repos"]["cache_updated"] = false
# Each package is either a string or an array of [ cmd, package ] where
# cmd is a list of commands to execute in the shell such as debconf
# selections.
default["env"]["packages"]["rhel"] = [ ]
default["env"]["packages"]["debian"] = [ ]
# Add user groups
default["env"]["groups"] = [ ]
# Add users - array of
#
# - [ user_name, home_dir, group_name_or_id, is_passwordless_sudo, [ authorized_key_1, authorized_key_2, ... ], user_cert ]
#
# * if home_dir is nil then the user's home will be created in the default folder
# * if group_name_or_id is nil then the users default group will be used
# * last two elements of the array are optional
#
default["env"]["users"] = [ ]
# Override the file in which to add authorized public keys for ssh logins
default["env"]["authorized_keys_file"] = "authorized_keys"
# Cron jobs - see 'cron' cookbook
#
# Map of:
#
# name => {
# predefined_value => '',
# command => '',
# minute => '',
# hour => '',
# day => '',
# month => '',
# weekday => '',
# user => '',
# mailto => '',
# path => '',
# home => '',
# shell => '',
# comment => '',
# environment => '',
# mode => ''
# }
#
default["env"]["cron_jobs"] = nil
# Export folders via NFS - see 'nfs' cookbook
#
# Array of:
# path: exported folder
# network: export network CIDR or IPs
# sync: true of false
# options: list of nfs options
#
default["env"]["exports"] = nil
# Remote NFS shares to mount.
#
# Array of:
# mount_path: local path to mount share
# group: (optional) group id of mount path
# owner: (optional) owner of mount path
# host: remote NFS host
# path: exported path
# options: mount options (default to 'rw')
#
default["env"]["imports"] = nil
|
mevansam/chef-cookbook-sysutils
|
providers/global_proxy.rb
|
<reponame>mevansam/chef-cookbook-sysutils
#
# Author:: <NAME> (<<EMAIL>>)
# Cookbook Name:: sysutils
# Provider: global_proxy
#
# Copyright 2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class ::Chef::Provider # rubocop:disable Documentation
include ::SysUtils::Helper
end
def whyrun_supported?
true
end
action :install do
http_proxy = new_resource.http_proxy
https_proxy = (new_resource.https_proxy.nil? || new_resource.https_proxy.empty? ? http_proxy : new_resource.https_proxy)
ftp_proxy = (new_resource.ftp_proxy.nil? || new_resource.ftp_proxy.empty? ? http_proxy : new_resource.ftp_proxy)
no_proxy = "localhost,127.0.0.1,#{node["hostname"]},#{node["fqdn"]},#{node["ipaddress"]}"
if !new_resource.no_proxy.nil? && !new_resource.no_proxy.empty?
no_proxy += ",#{new_resource.no_proxy}"
end
template "/etc/profile.d/proxy_inits.sh" do
source "proxy_inits.sh.erb"
mode "0755"
variables(
:http_proxy => http_proxy,
:https_proxy => https_proxy,
:no_proxy => no_proxy,
:host_ip => node["ipaddress"]
)
end
script "configure proxy_inits to run for non-login shell sessions" do
interpreter "bash"
user "root"
cwd "/tmp"
code <<-EOH
if [ -e /etc/bashrc ]; then
sed -i '/proxy_inits.sh/d' /etc/bashrc
sed -i '2i [ -r /etc/profile.d/proxy_inits.sh ] && source /etc/profile.d/proxy_inits.sh' /etc/bashrc
elif [ -e /etc/bash.bashrc ]; then
sed -i '/proxy_inits.sh/d' /etc/bash.bashrc
sed -i '2i [ -r /etc/profile.d/proxy_inits.sh ] && source /etc/profile.d/proxy_inits.sh' /etc/bash.bashrc
fi
EOH
end
case
when platform_family?("rhel", "fedora")
sysutils_config_file "/etc/yum.conf" do
values [
[ "proxy", http_proxy ]
]
format_in Regexp.new('(\S+)\s*=\s*(\S+)\s*')
format_out "%s=%s"
action :add
end
when platform_family?("debian")
sysutils_config_file "/etc/apt/apt.conf.d/01proxy" do
values [
[ "Acquire::http::Proxy", http_proxy ],
[ "Acquire::https::Proxy", https_proxy ],
[ "Acquire::ftp::Proxy", ftp_proxy ]
]
format_in Regexp.new('(\S+)\s+\"(\S+)\";')
format_out "%s \"%s\";"
comment_format "//"
daemon_config_dir "/etc/apt/apt.conf.d"
action :add
end
end
end
|
mevansam/chef-cookbook-sysutils
|
resources/global_proxy.rb
|
#
# Author:: <NAME> (<<EMAIL>>)
# Cookbook Name:: sysutils
# Resource: global_proxy
#
# Copyright 2014, <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
actions :install
attribute :http_proxy, :kind_of => String, :required => true
attribute :https_proxy, :kind_of => String
attribute :ftp_proxy, :kind_of => String
attribute :no_proxy, :kind_of => String
def initialize(*args)
super
@resource_name = :global_proxy
@action = :install
end
|
mevansam/chef-cookbook-sysutils
|
libraries/config_file_helper.rb
|
<gh_stars>0
#
# Author: <NAME>
# Email: <EMAIL>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef"
require "fileutils"
module ::SysUtils # rubocop:disable Documentation
module Helper
def get_updated_config_file_data(values_to_add, values_to_remove, format_in, format_out, config_file, daemon_config_dir = nil, comment_format = "#")
if !File.exists?(config_file)
File.open(config_file, 'w+') { |f| f.write("") }
end
config_files = [ ]
if !daemon_config_dir.nil? && Dir.exists?(daemon_config_dir)
config_files.concat(Dir.entries(daemon_config_dir).select { |e| e != "." && e != ".." }.collect { |e| "#{daemon_config_dir}/#{e}"})
end
unless config_files.include?(config_file)
config_files << config_file
end
file_data = { }
config_file_data = ""
new_values = Array.new(values_to_add)
Chef::Log.debug("Values to add: #{values_to_add}")
Chef::Log.debug("Values to remove: #{values_to_remove}")
config_files.each do |file|
Chef::Log.debug("Checking for updates to configuration file #{file}...")
lines = IO.readlines(file)
changed = false
update = false
for i in 0..(lines.size-1)
line = lines[i]
unless line.nil? || line.start_with?(comment_format)
values = [ ]
line.scan(format_in) { |v| values.concat(v) }
next if values.empty?
Chef::Log.debug(" - Extracted values from file: #{values}")
values_to_remove.each do |v|
if values == v
lines[i] = "#{comment_format}#{line}"
changed = true
break
end
end
(new_values.size-1).downto(0) do |j|
v = new_values[j]
for k in 0..(v.size-1)
if v[k] != values[k]
update = (k > 0)
break
end
end
if k > 0
if update
Chef::Log.debug(" - Updating line in #{file}: \"#{lines[i]}\"")
lines[i] = (format_out % v) + "\n"
Chef::Log.debug(" - Updated line in #{file}: \"#{lines[i]}\"")
update = false
changed = true
end
new_values.delete_at(j)
end
end
end
end
if file == config_file
config_file_data = lines.join.chomp
file_data[file] = config_file_data if changed
else
file_data[file] = lines.join if changed
end
end
Chef::Log.debug("New values remaining to add: #{new_values}")
if new_values.size > 0
if file_data.has_key?(config_file)
file_data[config_file] << "\n" << new_values.collect { |v| format_out % v }.join("\n") << "\n"
elsif !config_file_data.empty?
file_data[config_file] = config_file_data << "\n" << new_values.collect { |v| format_out % v }.join("\n") << "\n"
else
file_data[config_file] = new_values.collect { |v| format_out % v }.join("\n") << "\n"
end
end
return file_data
end
def write_config_file_data(file_data)
file_data.each_pair do |file, data|
Chef::Log.debug("Writing file #{file}: \n#{data}")
::File.open(file, "w") { |f| f.write(data) }
end
end
end
end
|
murny/jupiter
|
lib/jupiter/version.rb
|
module Jupiter
VERSION = '1.2.7'.freeze
end
|
murny/jupiter
|
db/seeds.rb
|
<reponame>murny/jupiter
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
if Rails.env.development? || Rails.env.uat?
require 'active_fedora/cleaner'
require "open-uri"
require 'faker'
# For the main community/collections
THINGS = ['cat', 'dog', 'unicorn', 'hamburger', 'librarian'].freeze
# For padding community/collection lists for pagination (need at least 26, a couple uppercase to confirm sort)
EXTRA_THINGS = ['Library', 'DONAIR', 'magpie', 'toque', 'sombrero', 'yeti', 'mimosa', 'ukulele', 'tourtière',
'falafel', 'calculator', 'papusa'].freeze
puts 'Starting seeding of dev database...'
# start fresh
[Announcement, ActiveStorage::Blob, ActiveStorage::Attachment, JupiterCore::AttachmentShim,
Identity, User, Type, Language, Institution].each(&:destroy_all)
ActiveFedora::Cleaner.clean!
# Seed an admin user
admin = User.create(name: '<NAME>', email: '<EMAIL>', admin: true)
admin.identities.create(provider: 'developer', uid: '<EMAIL>')
# Seed a non-admin user
non_admin = User.create(name: '<NAME>', email: '<EMAIL>', admin: false)
non_admin.identities.create(provider: 'developer', uid: '<EMAIL>')
# Seed an suspended admin user
bad_admin = User.create(name: '<NAME>', email: '<EMAIL>', admin: true, suspended: true)
bad_admin.identities.create(provider: 'developer', uid: '<EMAIL>')
# Seed an suspended regular user
bad_user = User.create(name: '<NAME>', email: '<EMAIL>', admin: false, suspended: true)
bad_user.identities.create(provider: 'developer', uid: '<EMAIL>')
# A bunch of non-identity users for to manipulate in the admin interface
100.times do
name = Faker::GameOfThrones.unique.character
User.create(name: name, email: "#{<EMAIL>.gsub(/ +/, '.').<EMAIL>", admin: false)
end
# Lets pick 10 prolific creators, 10 contributors
creators = 10.times.map { "#{Faker::Cat.unique.name} #{Faker::Cat.unique.breed.gsub(/[ ,]+/, '-')}" }
contributors = 10.times.map { Faker::FunnyName.unique.name_with_initial }
institutions = [CONTROLLED_VOCABULARIES[:institution].uofa, CONTROLLED_VOCABULARIES[:institution].st_stephens]
THINGS.each_with_index do |thing, idx|
if idx % 2 == 0
title = "The department of #{thing.capitalize}"
else
title = "Special reports about #{thing.pluralize}"
end
community = Community.new_locked_ldp_object(
owner: admin.id,
title: title,
description: Faker::Lorem.sentence(20, false, 0).chop
).unlock_and_fetch_ldp_object(&:save!)
# Attach logos, if possible
filename = File.expand_path(Rails.root + "tmp/#{thing}.png")
unless File.exist?(filename)
unless ENV['SKIP_DOWNLOAD_COMMUNITY_LOGOS'].present?
set = (thing == 'cat') ? 'set4' : 'set1'
url = Faker::Avatar.image(thing, "100x100", "png", set)
File.open(filename, 'wb') do |fo|
fo.write open(url).read
end
end
end
if File.exist?(filename)
community.logo.attach(io: File.open(filename), filename: "#{thing}.png", content_type: "image/png")
end
item_collection = Collection.new_locked_ldp_object(
owner: admin.id,
title: "The annals of '#{thing.capitalize} International'",
community_id: community.id,
description: Faker::Lorem.sentence(40, false, 0).chop
).unlock_and_fetch_ldp_object(&:save!)
thesis_collection = Collection.new_locked_ldp_object(
owner: admin.id,
title: "Theses about #{thing.pluralize}",
community_id: community.id,
description: Faker::Lorem.sentence(40, false, 0).chop
).unlock_and_fetch_ldp_object(&:save!)
# Items
20.times do |i|
seed = rand(10)
seed2 = rand(10)
base_attributes = {
owner: admin.id,
visibility: JupiterCore::VISIBILITY_PUBLIC,
subject: [thing.capitalize],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
}
# Add an occasional verbose description
description = if i % 10 == 5
Faker::Lorem.sentence(100, false, 0).chop
else
Faker::Lorem.sentence(20, false, 0).chop
end
# Probabilistically about 70% English, 20% French, 10% Ukrainian
languages = if seed % 10 > 2
[CONTROLLED_VOCABULARIES[:language].english]
elsif seed % 10 > 0
[CONTROLLED_VOCABULARIES[:language].french]
else
[CONTROLLED_VOCABULARIES[:language].ukrainian]
end
licence_right = {}
item_attributes = base_attributes.merge({
title: "The effects of #{Faker::Beer.name} on #{thing.pluralize}",
created: rand(20_000).days.ago.to_s,
creators: [creators[seed]],
contributors: [contributors[seed2]],
description: description,
languages: languages,
})
# Add the occasional double-author work
item_attributes[:creators] << creators[(seed + 5) % 10] if i % 7 == 3
if seed % 10 < 6
item_attributes[:license] = CONTROLLED_VOCABULARIES[:license].attribution_4_0_international
elsif seed % 10 < 7
item_attributes[:license] = CONTROLLED_VOCABULARIES[:license].public_domain_mark_1_0
elsif seed % 10 < 8
item_attributes[:license] = CONTROLLED_VOCABULARIES[:old_license].attribution_3_0_international
else
item_attributes[:rights] = 'Share my stuff with everybody'
end
if idx % 3 == 0
item_attributes[:item_type] = CONTROLLED_VOCABULARIES[:item_type].article
item_attributes[:publication_status] = [CONTROLLED_VOCABULARIES[:publication_status].published]
elsif idx % 3 == 1
item_attributes[:item_type] = CONTROLLED_VOCABULARIES[:item_type].article
item_attributes[:publication_status] = [CONTROLLED_VOCABULARIES[:publication_status].draft,
CONTROLLED_VOCABULARIES[:publication_status].submitted]
else
item_attributes[:item_type] = CONTROLLED_VOCABULARIES[:item_type].report
end
# Every once in a while, create a mondo-item with full, rich metadata to help view-related work
if i == 8
item_attributes[:title] = item_attributes[:title].gsub(/^The/, 'The complete')
# Throw in a second language occasionally
item_attributes[:languages] << CONTROLLED_VOCABULARIES[:language].other
# Why 3 and 7 below? Neither number shares a divisor with 10, ensuring a unique set
item_attributes[:creators] += 4.times.map { |j| creators[(seed + 3 * j) % 10] }
item_attributes[:contributors] += 3.times.map { |j| contributors[(seed2 + 7 * j) % 10] }
item_attributes[:subject] += ['Mondo']
item_attributes[:spatial_subjects] = ['Vegreville']
item_attributes[:temporal_subjects] = ['1980s']
item_attributes[:alternative_title] = "A full, holistic, #{thing}-tastic approach"
item_attributes[:related_link] = "http://www.example.com/#{thing}"
item_attributes[:is_version_of] = ["The CDROM titled '#{thing.pluralize.capitalize}!'",
'The original laserdisc series from Orange-on-a-Blue-Background studios']
item_attributes[:source] = "Chapter 5 of '#{thing.pluralize.capitalize} and what they drink'"
end
item = Item.new_locked_ldp_object(item_attributes).unlock_and_fetch_ldp_object do |uo|
if i == 8
uo.add_to_path(community.id, item_collection.id)
uo.add_to_path(community.id, thesis_collection.id)
uo.save!
else
uo.add_to_path(community.id, item_collection.id)
uo.save!
end
end
if i == 8
# Attach two files to the mondo-item
File.open(Rails.root + 'app/assets/images/theses.jpg', 'r') do |file1|
File.open(Rails.root + 'test/fixtures/files/image-sample.jpeg', 'r') do |file2|
# Bit of a hack to fake a long file name ...
def file2.original_filename
'wefksdkhvkasdkfjhwekkjahsdkjkajvbkejfkwejfjkdvkhdkfhw&ükefkhoiekldkfhkdfjhiwuegfugksjdcjbsjkdbw.jpeg'
end
item.add_and_ingest_files([file1, file2])
end
end
end
item.set_thumbnail(item.files.first) if item.files.first.present?
field = Faker::Job.field
level = ["Master's", 'Doctorate'][i % 2]
thesis_attributes = base_attributes.merge({
title: "Thesis about the effects of #{Faker::Beer.name} on #{thing.pluralize}",
graduation_date: "Fall #{rand(20_000).days.ago.year}",
dissertant: creators[seed],
abstract: description,
language: languages.first,
specialization: field,
departments: ["Deparment of #{field}"],
supervisors: ["#{contributors[seed]} (#{field})"],
committee_members: ["#{contributors[seed2]} (#{field})"],
rights: 'Share my stuff with everybody',
thesis_level: level,
degree: "#{level} of #{field}",
institution: institutions[(i / 10) % 2]
})
# Every once in a while, create a mondo-thesis with full, rich metadata to help view-related work
if i == 8
thesis_attributes[:title] = thesis_attributes[:title].gsub(/^Thesis/, 'An über-thesis')
thesis_attributes[:subject] += ['Mondo']
thesis_attributes[:alternative_title] = "A full, holistic, #{thing}-tastic approach"
thesis_attributes[:is_version_of] = ["The CDROM titled '#{thing.pluralize.capitalize}!'",
'The original laserdisc series from Orange-on-a-Blue-Background studios']
department2 = 'Department of Everything'
thesis_attributes[:departments] += [department2]
thesis_attributes[:supervisors] += ["#{contributors[(seed + 3 * seed2) % 10]} (#{department2})"]
thesis_attributes[:committee_members] += ["#{contributors[(seed + 7 * seed2) % 10]} (#{department2})"]
end
thesis = Thesis.new_locked_ldp_object(thesis_attributes).unlock_and_fetch_ldp_object do |uo|
if i == 8
uo.add_to_path(community.id, item_collection.id)
uo.add_to_path(community.id, thesis_collection.id)
uo.save!
else
uo.add_to_path(community.id, thesis_collection.id)
uo.save!
end
end
if i == 8
# To test PCDM/list_source ordering, attach three files to the mondo-thesis!
File.open(Rails.root + 'app/assets/images/theses.jpg', 'r') do |file1|
File.open(Rails.root + 'test/fixtures/files/image-sample.jpeg', 'r') do |file2|
File.open(Rails.root + 'app/assets/images/era-logo.png', 'r') do |file3|
thesis.add_and_ingest_files([file1, file2, file3])
end
end
end
end
thesis.set_thumbnail(thesis.files.first) if thesis.files.first.present?
end
# Add a private item
Item.new_locked_ldp_object(
owner: admin.id,
creators: [creators[rand(10)]],
visibility: JupiterCore::VISIBILITY_PRIVATE,
created: rand(20_000).days.ago.to_s,
title: "Private #{thing.pluralize}, public lives: a survey of social media trends",
description: Faker::Lorem.sentence(20, false, 0).chop,
languages: [CONTROLLED_VOCABULARIES[:language].english],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].chapter,
subject: [thing.capitalize, 'Privacy'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.save!
end
# Add a CCID protected item
Item.new_locked_ldp_object(
owner: admin.id,
creators: [creators[rand(10)]],
visibility: JupiterCore::VISIBILITY_AUTHENTICATED,
created: rand(20_000).days.ago.to_s,
title: "Everything You Need To Know About: University of Alberta and #{thing.pluralize}!",
description: Faker::Lorem.sentence(20, false, 0).chop,
languages: [CONTROLLED_VOCABULARIES[:language].english],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].report,
subject: [thing.capitalize, 'CCID'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.save!
end
# Add a currently embargoed item
Item.new_locked_ldp_object(
owner: admin.id,
creators: [creators[rand(10)]],
visibility: Item::VISIBILITY_EMBARGO,
created: rand(20_000).days.ago.to_s,
title: "Embargo and #{Faker::Address.country}: were the #{thing.pluralize} left behind?",
description: Faker::Lorem.sentence(20, false, 0).chop,
languages: [CONTROLLED_VOCABULARIES[:language].english],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].conference_workshop_presentation,
subject: [thing.capitalize, 'Embargoes'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.embargo_end_date = 20.years.from_now.to_date
uo.visibility_after_embargo = CONTROLLED_VOCABULARIES[:visibility].public
uo.save!
end
# Add a formerly embargoed item
Item.new_locked_ldp_object(
owner: admin.id,
creators: [creators[rand(10)]],
visibility: Item::VISIBILITY_EMBARGO,
created: rand(20_000).days.ago.to_s,
title: "Former embargo of #{Faker::Address.country}: the day the #{thing.pluralize} were free",
description: Faker::Lorem.sentence(20, false, 0).chop,
languages: [CONTROLLED_VOCABULARIES[:language].english],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].dataset,
subject: [thing.capitalize, 'Freedom'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.embargo_end_date = 2.days.ago.to_date
uo.visibility_after_embargo = CONTROLLED_VOCABULARIES[:visibility].public
uo.save!
end
# Add an item owned by non-admin
Item.new_locked_ldp_object(
owner: non_admin.id,
creators: [creators[rand(10)]],
visibility: JupiterCore::VISIBILITY_PUBLIC,
created: rand(20_000).days.ago.to_s,
title: "Impact of non-admin users on #{thing.pluralize}",
description: Faker::Lorem.sentence(20, false, 0).chop,
languages: [CONTROLLED_VOCABULARIES[:language].english],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].learning_object,
subject: [thing.capitalize, 'Equality'],
# Add a temporal subject
temporal_subjects: ['The 1950s'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.save!
end
# Want one multi-collection item per community
Item.new_locked_ldp_object(
owner: admin.id,
creators: [creators[rand(10)]],
visibility: JupiterCore::VISIBILITY_PUBLIC,
created: rand(20_000).days.ago.to_s,
title: "Multi-collection random images of #{thing.pluralize}",
description: Faker::Lorem.sentence(20, false, 0).chop,
# No linguistic content
languages: [CONTROLLED_VOCABULARIES[:language].no_linguistic_content],
license: CONTROLLED_VOCABULARIES[:license].attribution_4_0_international,
item_type: CONTROLLED_VOCABULARIES[:item_type].image,
subject: [thing.capitalize, 'Randomness', 'Pictures'],
# Add a spacial subject
spatial_subjects: ['Onoway'],
doi: "doi:bogus-#{Time.current.utc.iso8601(3)}"
).unlock_and_fetch_ldp_object do |uo|
uo.add_to_path(community.id, item_collection.id)
uo.add_to_path(community.id, thesis_collection.id)
uo.save!
end
end
# Pad with empty communities for pagination (starts with Z for sort order)
EXTRA_THINGS.each do |thing|
Community.new_locked_ldp_object(
owner: admin.id,
title: "Zoo#{thing}ology Institute of North-Eastern Upper Alberta (and Saskatchewan)",
description: Faker::Lorem.sentence(20, false, 0).chop
).unlock_and_fetch_ldp_object(&:save!)
end
# One community with a lot of empty restricted collections
community = Community.new_locked_ldp_object(
owner: admin.id,
title: "The Everything Department",
description: Faker::Lorem.sentence(20, false, 0).chop
).unlock_and_fetch_ldp_object(&:save!)
EXTRA_THINGS.each do |thing|
collection = Collection.new_locked_ldp_object(
owner: admin.id,
title: "Articles about the relationship between #{thing.pluralize} and non-#{thing.pluralize}",
community_id: community.id,
restricted: true,
description: "A restricted collection"
).unlock_and_fetch_ldp_object(&:save!)
end
end
# Types
[:book, :book_chapter, :conference_workshop_poster,
:conference_workshop_presentation, :dataset,
:image, :journal_article_draft, :journal_article_published,
:learning_object, :report, :research_material, :review].each do |type_name|
Type.create(name: type_name)
end
# Languages
[:english, :french, :spanish, :chinese, :german,
:italian, :russian, :ukrainian, :japanese,
:no_linguistic_content, :other].each do |language_name|
Language.create(name: language_name)
end
# Institutions
[:uofa, :st_stephens].each do |institution_name|
Institution.create(name: institution_name)
end
puts 'Database seeded successfully!'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.