_id stringlengths 2 6 | title stringlengths 9 130 | partition stringclasses 3 values | text stringlengths 66 10.5k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q21300 | RConfig.LoadPaths.add_load_path | train | def add_load_path(path)
if path = parse_load_paths(path).first # only accept first one.
self.load_paths << path
self.load_paths.uniq!
return reload(true) # Load Paths have changed so force a reload
end
false
end | ruby | {
"resource": ""
} |
q21301 | RConfig.Reload.enable_reload= | train | def enable_reload=(reload)
raise ArgumentError, 'Argument must be true or false.' unless [true, false].include?(reload)
self.enable_reload = reload
end | ruby | {
"resource": ""
} |
q21302 | RConfig.Reload.reload_interval= | train | def reload_interval=(interval)
raise ArgumentError, 'Argument must be Integer.' unless interval.kind_of?(Integer)
self.enable_reload = false if interval == 0 # Sett
self.reload_interval = interval
end | ruby | {
"resource": ""
} |
q21303 | RConfig.Reload.reload | train | def reload(force=false)
raise ArgumentError, 'Argument must be true or false.' unless [true, false].include?(force)
if force || reload?
flush_cache
return true
end
false
end | ruby | {
"resource": ""
} |
q21304 | IsoDoc::Function.Utils.noko | train | def noko(&block)
doc = ::Nokogiri::XML.parse(NOKOHEAD)
fragment = doc.fragment("")
::Nokogiri::XML::Builder.with fragment, &block
fragment.to_xml(encoding: "US-ASCII").lines.map do |l|
l.gsub(/\s*\n/, "")
end
end | ruby | {
"resource": ""
} |
q21305 | IsoDoc::Function.XrefGen.anchor_names | train | def anchor_names(docxml)
initial_anchor_names(docxml)
back_anchor_names(docxml)
# preempt clause notes with all other types of note
note_anchor_names(docxml.xpath(ns("//table | //example | //formula | "\
"//figure")))
note_anchor_names(docxml.xpath(ns(SECTIONS_XPATH)))
example_anchor_names(docxml.xpath(ns(SECTIONS_XPATH)))
list_anchor_names(docxml.xpath(ns(SECTIONS_XPATH)))
end | ruby | {
"resource": ""
} |
q21306 | IsoDoc::Function.Cleanup.figure_cleanup | train | def figure_cleanup(docxml)
docxml.xpath(FIGURE_WITH_FOOTNOTES).each do |f|
key = figure_get_or_make_dl(f)
f.xpath(".//aside").each do |aside|
figure_aside_process(f, aside, key)
end
end
docxml
end | ruby | {
"resource": ""
} |
q21307 | IsoDoc::Function.XrefSectGen.preface_names | train | def preface_names(clause)
return if clause.nil?
@anchors[clause["id"]] =
{ label: nil, level: 1, xref: preface_clause_name(clause), type: "clause" }
clause.xpath(ns("./clause | ./terms | ./term | ./definitions | ./references")).each_with_index do |c, i|
preface_names1(c, c.at(ns("./title"))&.text, "#{preface_clause_name(clause)}, #{i+1}", 2)
end
end | ruby | {
"resource": ""
} |
q21308 | Strainer.Sandbox.destroy_sandbox | train | def destroy_sandbox
if File.directory?(Strainer.sandbox_path)
Strainer.ui.debug " Destroying sandbox at '#{Strainer.sandbox_path}'"
FileUtils.rm_rf(Strainer.sandbox_path)
else
Strainer.ui.debug " Sandbox does not exist... skipping"
end
end | ruby | {
"resource": ""
} |
q21309 | Strainer.Sandbox.create_sandbox | train | def create_sandbox
unless File.directory?(Strainer.sandbox_path)
Strainer.ui.debug " Creating sandbox at '#{Strainer.sandbox_path}'"
FileUtils.mkdir_p(Strainer.sandbox_path)
end
copy_globals
place_knife_rb
copy_cookbooks
end | ruby | {
"resource": ""
} |
q21310 | Strainer.Sandbox.load_cookbooks | train | def load_cookbooks(cookbook_names)
Strainer.ui.debug "Sandbox#load_cookbooks(#{cookbook_names.inspect})"
cookbook_names.collect{ |cookbook_name| load_cookbook(cookbook_name) }
end | ruby | {
"resource": ""
} |
q21311 | Strainer.Sandbox.load_cookbook | train | def load_cookbook(cookbook_name)
Strainer.ui.debug "Sandbox#load_cookbook('#{cookbook_name.inspect}')"
cookbook_path = cookbooks_paths.find { |path| path.join(cookbook_name).exist? }
cookbook = if cookbook_path
path = cookbook_path.join(cookbook_name)
Strainer.ui.debug " found cookbook at '#{path}'"
begin
Berkshelf::CachedCookbook.from_path(path)
rescue Berkshelf::CookbookNotFound
raise Strainer::Error::CookbookNotFound, "'#{path}' existed, but I could not extract a cookbook. Is there a 'metadata.rb'?"
end
else
Strainer.ui.debug " did not find '#{cookbook_name}' in any of the sources - resorting to the default cookbook_store..."
Berkshelf.cookbook_store.cookbooks(cookbook_name).last
end
cookbook || raise(Strainer::Error::CookbookNotFound, "Could not find '#{cookbook_name}' in any of the sources.")
end | ruby | {
"resource": ""
} |
q21312 | Strainer.Sandbox.load_self | train | def load_self
Strainer.ui.debug "Sandbox#load_self"
begin
Berkshelf::CachedCookbook.from_path(File.expand_path('.'))
rescue Berkshelf::CookbookNotFound
raise Strainer::Error::CookbookNotFound, "'#{File.expand_path('.')}' existed, but I could not extract a cookbook. Is there a 'metadata.rb'?"
end
end | ruby | {
"resource": ""
} |
q21313 | Strainer.Sandbox.cookbooks_and_dependencies | train | def cookbooks_and_dependencies
loaded_dependencies = Hash.new(false)
dependencies = @cookbooks.dup
dependencies.each do |cookbook|
loaded_dependencies[cookbook.cookbook_name] = true
cookbook.metadata.dependencies.keys.each do |dependency_name|
unless loaded_dependencies[dependency_name]
dependencies << load_cookbook(dependency_name)
loaded_dependencies[dependency_name] = true
end
end
end
end | ruby | {
"resource": ""
} |
q21314 | Strainer.Sandbox.chef_repo? | train | def chef_repo?
@_chef_repo ||= begin
chef_folders = %w(.chef certificates config cookbooks data_bags environments roles)
(root_folders & chef_folders).size > 2
end
end | ruby | {
"resource": ""
} |
q21315 | Strainer.Sandbox.root_folders | train | def root_folders
@root_folders ||= Dir.glob("#{Dir.pwd}/*", File::FNM_DOTMATCH).collect do |f|
File.basename(f) if File.directory?(f)
end.reject { |dir| %w(. ..).include?(dir) }.compact!
end | ruby | {
"resource": ""
} |
q21316 | Strainer.Runner.run! | train | def run!
@cookbooks.each do |name, c|
cookbook = c[:cookbook]
strainerfile = c[:strainerfile]
Strainer.ui.debug "Starting Runner for #{cookbook.cookbook_name} (#{cookbook.version})"
Strainer.ui.header("# Straining '#{cookbook.cookbook_name} (v#{cookbook.version})'")
strainerfile.commands.each do |command|
success = command.run!
@report[cookbook.cookbook_name] ||= {}
@report[cookbook.cookbook_name][command.label] = success
Strainer.ui.debug "Strainer::Runner#report: #{@report.inspect}"
if options[:fail_fast] && !success
Strainer.ui.debug "Run was not successful and --fail-fast was specified"
Strainer.ui.fatal "Exited early because '--fail-fast' was specified. Some tests may have been skipped!"
return false
end
end
end
# Move the logfile back over
if File.exist?(Strainer.sandbox_path.join('strainer.out'))
FileUtils.mv(Strainer.logfile_path, Strainer.sandbox_path.join('strainer.out'))
end
success = @report.values.collect(&:values).flatten.all?
msg = success ? "Strainer marked build OK" : "Strainer marked build as failure"
Strainer.ui.say msg
return success
end | ruby | {
"resource": ""
} |
q21317 | Strainer.Strainerfile.commands | train | def commands
@commands ||= if @options[:except]
@all_commands.reject{ |command| @options[:except].include?(command.label) }
elsif @options[:only]
@all_commands.select{ |command| @options[:only].include?(command.label) }
else
@all_commands
end
end | ruby | {
"resource": ""
} |
q21318 | Strainer.Strainerfile.load! | train | def load!
return if @all_commands
contents = File.read @strainerfile
contents.strip!
contents.gsub! '$COOKBOOK', @cookbook.cookbook_name
contents.gsub! '$SANDBOX', Strainer.sandbox_path.to_s
# Drop empty lines and comments
lines = contents.split("\n")
lines.reject!{ |line| line.strip.empty? || line.strip.start_with?('#') }
lines.compact!
lines ||= []
# Parse the line and split it into the label and command parts
#
# @example Example Line
# foodcritic -f any phantomjs
@all_commands = lines.collect{ |line| Command.new(line, @cookbook, @options) }
end | ruby | {
"resource": ""
} |
q21319 | Strainer.Command.speak | train | def speak(message, options = {})
message.to_s.strip.split("\n").each do |line|
next if line.strip.empty?
line.gsub! Strainer.sandbox_path.to_s, @cookbook.original_path.dirname.to_s
Strainer.ui.say label_with_padding + line, options
end
end | ruby | {
"resource": ""
} |
q21320 | Strainer.Command.inside_sandbox | train | def inside_sandbox(&block)
Strainer.ui.debug "Changing working directory to '#{Strainer.sandbox_path}'"
original_pwd = ENV['PWD']
ENV['PWD'] = Strainer.sandbox_path.to_s
success = Dir.chdir(Strainer.sandbox_path, &block)
ENV['PWD'] = original_pwd
Strainer.ui.debug "Restored working directory to '#{original_pwd}'"
success
end | ruby | {
"resource": ""
} |
q21321 | Strainer.Command.inside_cookbook | train | def inside_cookbook(&block)
cookbook_path = File.join(Strainer.sandbox_path.to_s, @cookbook.cookbook_name)
Strainer.ui.debug "Changing working directory to '#{cookbook_path}'"
original_pwd = ENV['PWD']
ENV['PWD'] = cookbook_path
success = Dir.chdir(cookbook_path, &block)
ENV['PWD'] = original_pwd
Strainer.ui.debug "Restoring working directory to '#{original_pwd}'"
success
end | ruby | {
"resource": ""
} |
q21322 | Strainer.Command.run_as_pty | train | def run_as_pty(command)
Strainer.ui.debug 'Using PTY'
PTY.spawn(command) do |r, _, pid|
begin
r.sync
r.each_line { |line| speak line }
rescue Errno::EIO => e
# Ignore this. Otherwise errors will be thrown whenever
# the process is closed
ensure
::Process.wait pid
end
end
end | ruby | {
"resource": ""
} |
q21323 | Strainer.UI.error | train | def error(message, color = :red)
Strainer.log.error(message)
return if quiet?
message = set_color(message, *color) if color
super(message)
end | ruby | {
"resource": ""
} |
q21324 | Tr8n.Token.token_value | train | def token_value(object, options, language)
# token is an array
if object.is_a?(Array)
# if you provided an array, it better have some values
if object.empty?
return raise Tr8n::TokenException.new("Invalid array value for a token: #{full_name}")
end
# if the first value of an array is an array handle it here
if object.first.kind_of?(Enumerable)
return token_array_value(object, options, language)
end
# if the first item in the array is an object, process it
return evaluate_token_method_array(object.first, object, options, language)
elsif object.is_a?(Hash)
# if object is a hash, it must be of a form: {:object => {}, :value => "", :attribute => ""}
# either value can be passed, or the attribute. attribute will be used first
if object[:object].nil?
return raise Tr8n::TokenException.new("Hash token is missing an object key for a token: #{full_name}")
end
value = object[:value]
unless object[:attribute].blank?
value = object[:object][object[:attribute]]
end
if value.blank?
return raise Tr8n::TokenException.new("Hash object is missing a value or attribute key for a token: #{full_name}")
end
object = value
end
# simple token
sanitize_token_value(object, object.to_s, options, language)
end | ruby | {
"resource": ""
} |
q21325 | Tr8n.LanguageController.update_rules | train | def update_rules
@rules = rules_by_dependency(parse_language_rules)
unless params[:rule_action]
return render(:partial => "edit_rules")
end
if params[:rule_action].index("add_at")
position = params[:rule_action].split("_").last.to_i
cls = Tr8n::Config.language_rule_dependencies[params[:rule_type]]
@rules[cls.dependency].insert(position, cls.new(:language => tr8n_current_language))
elsif params[:rule_action].index("delete_at")
position = params[:rule_action].split("_").last.to_i
cls = Tr8n::Config.language_rule_dependencies[params[:rule_type]]
@rules[cls.dependency].delete_at(position)
end
render :partial => "edit_rules"
end | ruby | {
"resource": ""
} |
q21326 | Tr8n.LanguageController.update_language_cases | train | def update_language_cases
@cases = parse_language_cases
unless params[:case_action]
return render(:partial => "edit_cases")
end
if params[:case_action].index("add_at")
position = params[:case_action].split("_").last.to_i
@cases.insert(position, Tr8n::LanguageCase.new(:language => tr8n_current_language))
elsif params[:case_action].index("delete_at")
position = params[:case_action].split("_").last.to_i
@cases.delete_at(position)
elsif params[:case_action].index("clear_all")
@cases = []
end
render :partial => "edit_language_cases"
end | ruby | {
"resource": ""
} |
q21327 | Tr8n.LanguageController.update_language_case_rules | train | def update_language_case_rules
cases = parse_language_cases
case_index = params[:case_index].to_i
lcase = cases[case_index]
if params[:case_action].index("add_rule_at")
position = params[:case_action].split("_").last.to_i
rule_data = params[:edit_rule].merge(:language => tr8n_current_language)
lcase.language_case_rules.insert(position, Tr8n::LanguageCaseRule.new(rule_data))
elsif params[:case_action].index("update_rule_at")
position = params[:case_action].split("_").last.to_i
rule_data = params[:edit_rule].merge(:language => tr8n_current_language)
lcase.language_case_rules[position].definition = params[:edit_rule][:definition]
elsif params[:case_action].index("move_rule_up_at")
position = params[:case_action].split("_").last.to_i
temp_node = lcase.language_case_rules[position-1]
lcase.language_case_rules[position-1] = lcase.language_case_rules[position]
lcase.language_case_rules[position] = temp_node
elsif params[:case_action].index("move_rule_down_at")
position = params[:case_action].split("_").last.to_i
temp_node = lcase.language_case_rules[position+1]
lcase.language_case_rules[position+1] = lcase.language_case_rules[position]
lcase.language_case_rules[position] = temp_node
elsif params[:case_action].index("delete_rule_at")
position = params[:case_action].split("_").last.to_i
lcase.language_case_rules.delete_at(position)
elsif params[:case_action].index("clear_all")
lcase.language_case_rules = []
end
render(:partial => "edit_language_case_rules", :locals => {:lcase => lcase, :case_index => case_index})
end | ruby | {
"resource": ""
} |
q21328 | Tr8n.LanguageController.select | train | def select
@inline_translations_allowed = false
@inline_translations_enabled = false
if tr8n_current_user_is_translator?
unless tr8n_current_translator.blocked?
@inline_translations_allowed = true
@inline_translations_enabled = tr8n_current_translator.enable_inline_translations?
end
else
@inline_translations_allowed = Tr8n::Config.open_registration_mode?
end
@inline_translations_allowed = true if tr8n_current_user_is_admin?
@source_url = request.env['HTTP_REFERER']
@source_url.gsub!("locale", "previous_locale") if @source_url
@all_languages = Tr8n::Language.enabled_languages
@user_languages = Tr8n::LanguageUser.languages_for(tr8n_current_user) unless tr8n_current_user_is_guest?
render_lightbox
end | ruby | {
"resource": ""
} |
q21329 | Tr8n.LanguageController.switch | train | def switch
language_action = params[:language_action]
return redirect_to_source if tr8n_current_user_is_guest?
if tr8n_current_user_is_translator? # translator mode
if language_action == "toggle_inline_mode"
if tr8n_current_translator.enable_inline_translations?
language_action = "disable_inline_mode"
else
language_action = "enable_inline_mode"
end
end
if language_action == "enable_inline_mode"
tr8n_current_translator.enable_inline_translations!
elsif language_action == "disable_inline_mode"
tr8n_current_translator.disable_inline_translations!
elsif language_action == "switch_language"
tr8n_current_translator.switched_language!(Tr8n::Language.find_by_locale(params[:locale]))
end
elsif language_action == "switch_language" # non-translator mode
Tr8n::LanguageUser.create_or_touch(tr8n_current_user, Tr8n::Language.find_by_locale(params[:locale]))
elsif language_action == "become_translator" # non-translator mode
Tr8n::Translator.register
elsif language_action == "enable_inline_mode" or language_action == "toggle_inline_mode" # non-translator mode
Tr8n::Translator.register.enable_inline_translations!
end
redirect_to_source
end | ruby | {
"resource": ""
} |
q21330 | Tr8n.LanguageController.parse_language_rules | train | def parse_language_rules
rulz = []
return rulz unless params[:rules]
Tr8n::Config.language_rule_classes.each do |cls|
next unless params[:rules][cls.dependency]
index = 0
while params[:rules][cls.dependency]["#{index}"]
rule_params = params[:rules][cls.dependency]["#{index}"]
rule_definition = params[:rules][cls.dependency]["#{index}"][:definition]
if rule_params.delete(:reset_values) == "true"
rule_definition = {}
end
rule_id = rule_params[:id]
keyword = rule_params[:keyword]
if rule_id.blank?
rulz << cls.new(:keyword => keyword, :definition => rule_definition)
else
rule = cls.find_by_id(rule_id)
rule = cls.new unless rule
rule.keyword = keyword
rule.definition = rule_definition
rulz << rule
end
index += 1
end
end
rulz
end | ruby | {
"resource": ""
} |
q21331 | Tr8n.LanguageCasesController.index | train | def index
@maps = Tr8n::LanguageCaseValueMap.where("language_id = ? and (reported is null or reported = ?)", tr8n_current_language.id, false)
@maps = @maps.where("keyword like ?", "%#{params[:search]}%") unless params[:search].blank?
@maps = @maps.order("updated_at desc").page(page).per(per_page)
end | ruby | {
"resource": ""
} |
q21332 | Tr8n.BaseController.validate_current_translator | train | def validate_current_translator
if tr8n_current_user_is_translator? and tr8n_current_translator.blocked?
trfe("Your translation privileges have been revoked. Please contact the site administrator for more details.")
return redirect_to(Tr8n::Config.default_url)
end
return if Tr8n::Config.current_user_is_translator?
redirect_to("/tr8n/translator/registration")
end | ruby | {
"resource": ""
} |
q21333 | Tr8n.BaseController.validate_language_management | train | def validate_language_management
# admins can do everything
return if tr8n_current_user_is_admin?
if tr8n_current_language.default?
trfe("Only administrators can modify this language")
return redirect_to(tr8n_features_tabs.first[:link])
end
unless tr8n_current_user_is_translator? and tr8n_current_translator.manager?
trfe("In order to manage a language you first must request to become a manager of that language.")
return redirect_to(tr8n_features_tabs.first[:link])
end
end | ruby | {
"resource": ""
} |
q21334 | Elasticsearch.Drain.client | train | def client
return @client unless @client.nil?
@client = ::Elasticsearch::Client.new(
hosts: hosts,
retry_on_failure: true,
log: true,
logger: ::Logger.new('es_client.log', 10, 1_024_000)
)
end | ruby | {
"resource": ""
} |
q21335 | Tr8n.ActionViewExtension.tr8n_client_sdk_tag | train | def tr8n_client_sdk_tag(opts = {})
# opts[:default_source] ||= tr8n_default_client_source
opts[:scheduler_interval] ||= Tr8n::Config.default_client_interval
opts[:enable_inline_translations] = (Tr8n::Config.current_user_is_translator? and Tr8n::Config.current_translator.enable_inline_translations? and (not Tr8n::Config.current_language.default?))
opts[:default_decorations] = Tr8n::Config.default_decoration_tokens
opts[:default_tokens] = Tr8n::Config.default_data_tokens
opts[:rules] = {
:number => Tr8n::Config.rules_engine[:numeric_rule],
:gender => Tr8n::Config.rules_engine[:gender_rule],
:list => Tr8n::Config.rules_engine[:gender_list_rule],
:date => Tr8n::Config.rules_engine[:date_rule]
}
# build a list of actual rules of the language
client_var_name = opts[:client_var_name] || :tr8nProxy
opts.merge!(:enable_tml => Tr8n::Config.enable_tml?)
"<script>Tr8n.SDK.Proxy.init(#{opts.to_json});</script>".html_safe
end | ruby | {
"resource": ""
} |
q21336 | Rebi.ZipHelper.gen | train | def gen
log("Creating zip archivement", env_conf.name)
start = Time.now
ebextensions = env_conf.ebextensions
tmp_file = raw_zip_archive
tmp_folder = Dir.mktmpdir
Zip::File.open(tmp_file.path) do |z|
ebextensions.each do |ex_folder|
z.remove_folder ex_folder unless ex_folder == ".ebextensions"
Dir.glob("#{ex_folder}/*.config*") do |fname|
next unless File.file?(fname)
basename = File.basename(fname)
source_file = fname
if fname.match(/\.erb$/)
next unless y = YAML::load(ErbHelper.new(File.read(fname), env_conf).result)
basename = basename.gsub(/\.erb$/,'')
source_file = "#{tmp_folder}/#{basename}"
File.open(source_file, 'w') do |f|
f.write y.to_yaml
end
end
target = ".ebextensions/#{basename}"
z.remove target if z.find_entry target
z.remove fname if z.find_entry fname
z.add target, source_file
end
end
dockerrun_file = env_conf.dockerrun || "Dockerrun.aws.json"
if File.exists?(dockerrun_file)
dockerrun = JSON.parse ErbHelper.new(File.read(dockerrun_file), env_conf).result
tmp_dockerrun = "#{tmp_folder}/Dockerrun.aws.json"
File.open(tmp_dockerrun, 'w') do |f|
f.write dockerrun.to_json
end
z.remove env_conf.dockerrun if z.find_entry env_conf.dockerrun
z.remove "Dockerrun.aws.json" if z.find_entry "Dockerrun.aws.json"
z.add "Dockerrun.aws.json", tmp_dockerrun
end
end
FileUtils.rm_rf tmp_folder
log("Zip was created in: #{Time.now - start}s", env_conf.name)
return {
label: Time.now.strftime("app_#{env_conf.name}_#{version_label}_%Y%m%d_%H%M%S"),
file: File.open(tmp_file.path),
message: message,
}
end | ruby | {
"resource": ""
} |
q21337 | Liquid.Configuration.reload! | train | def reload!
clear
@mixins.each do |file|
mixin(file)
end
@callbacks.each do |callback|
callback.call(self)
end
end | ruby | {
"resource": ""
} |
q21338 | Liquid.Logger.called_from | train | def called_from
location = caller.detect('unknown:0') do |line|
line.match(/\/liquid(-|\/)ext/).nil?
end
file, line, _ = location.split(':')
{ :file => file, :line => line }
end | ruby | {
"resource": ""
} |
q21339 | LogBuddy.Utils.read_line | train | def read_line(frame)
file, line_number = frame.split(/:/, 2)
line_number = line_number.to_i
lines = File.readlines(file)
lines[line_number - 1]
end | ruby | {
"resource": ""
} |
q21340 | Symbiont.PublicTrigger.method | train | def method(method_name)
__context__ = __actual_context__(method_name)
# NOTE:
# block is used cuz #__actual_context__can raise
# ::NoMethodError (ContextNoMethodError) too (and we should raise it)
begin
__context__.method(method_name)
rescue ::NoMethodError
# NOTE:
# this situation is caused when the context object does not respond
# to #method method (BasicObject instances for example). We can extract
# method objects via it's singleton class.
__context_singleton__ = __extract_singleton_class__(__context__)
__context_singleton__.public_instance_method(method_name).bind(__context__)
end
end | ruby | {
"resource": ""
} |
q21341 | Symbiont.Isolator.public_method | train | def public_method(method_name, *required_contexts, direction: default_direction)
public_trigger(*required_contexts, direction: direction).method(method_name)
end | ruby | {
"resource": ""
} |
q21342 | Symbiont.Isolator.private_method | train | def private_method(method_name, *required_contexts, direction: default_direction)
private_trigger(*required_contexts, direction: direction).method(method_name)
end | ruby | {
"resource": ""
} |
q21343 | Symbiont.Isolator.public_trigger | train | def public_trigger(*required_contexts, direction: default_direction)
PublicTrigger.new(*required_contexts, context_direction: direction, &closure)
end | ruby | {
"resource": ""
} |
q21344 | Symbiont.Isolator.private_trigger | train | def private_trigger(*required_contexts, direction: default_direction)
PrivateTrigger.new(*required_contexts, context_direction: direction, &closure)
end | ruby | {
"resource": ""
} |
q21345 | Potracer.Trace.trace | train | def trace(bitmap = nil, params = nil, &block)
if block_given?
do_trace(bitmap || @bitmap, params || @params, &block)
else
do_trace(bitmap || @bitmap, params || @params)
end
end | ruby | {
"resource": ""
} |
q21346 | Railjet.Context.method_missing | train | def method_missing(name, *args, &block)
getter_name = name[0..-2]
if name =~ /^[a-z]+=$/ && !respond_to?(getter_name)
define_accessor(getter_name, args.first)
else
super
end
end | ruby | {
"resource": ""
} |
q21347 | Sprout::Executable.Session.handle_user_input | train | def handle_user_input
while true
begin
break if !wait_for_prompt
input = $stdin.gets.chomp!
execute_action(input, true)
rescue SignalException => e
return false
end
end
wait
end | ruby | {
"resource": ""
} |
q21348 | Sprout::Executable.Session.execute_action | train | def execute_action action, silence=false
action = action.strip
if wait_for_prompt
stdout.puts(action) unless silence
@prompted = false
process_runner.puts action
end
end | ruby | {
"resource": ""
} |
q21349 | GovukMessageQueueConsumer.Consumer.run | train | def run(subscribe_opts: {})
@rabbitmq_connection.start
subscribe_opts = { block: true, manual_ack: true}.merge(subscribe_opts)
queue.subscribe(subscribe_opts) do |delivery_info, headers, payload|
begin
message = Message.new(payload, headers, delivery_info)
@statsd_client.increment("#{@queue_name}.started")
message_consumer.process(message)
@statsd_client.increment("#{@queue_name}.#{message.status}")
rescue Exception => e
@statsd_client.increment("#{@queue_name}.uncaught_exception")
GovukError.notify(e) if defined?(GovukError)
@logger.error "Uncaught exception in processor: \n\n #{e.class}: #{e.message}\n\n#{e.backtrace.join("\n")}"
exit(1) # Ensure rabbitmq requeues outstanding messages
end
end
end | ruby | {
"resource": ""
} |
q21350 | Sprout.ProcessRunner.update_status | train | def update_status sig=0
pid_int = Integer("#{ @pid }")
begin
Process::kill sig, pid_int
true
rescue Errno::ESRCH
false
end
end | ruby | {
"resource": ""
} |
q21351 | RandomSources.HotBits.bytes | train | def bytes(num=10)
num = [[2048, num.to_i].min , 0].max
numbers = []
response = REXML::Document.new( open("https://www.fourmilab.ch/cgi-bin/Hotbits?fmt=xml&nbytes=#{num}"))
status = REXML::XPath.first( response, "//status")
case status.attributes['result'].to_i
when 200
data = REXML::XPath.first( response, "//random-data" ).text.split
data.each{|byte| numbers << byte.hex}
when 503
raise StandardError.new "#{status.text}"
end
numbers
end | ruby | {
"resource": ""
} |
q21352 | Sprout::System.UnixSystem.should_repair_executable | train | def should_repair_executable path
return (File.exists?(path) && !File.directory?(path) && File.read(path).match(/^\#\!\/bin\/sh/))
end | ruby | {
"resource": ""
} |
q21353 | RandomSources.RandomOrg.integers | train | def integers(options = {})
url_params = { max: clean(options[:max]) || 100,
min: clean(options[:min]) || 1,
num: clean(options[:num]) || 10,
base: clean(options[:base]) || 10,
rnd: 'new',
format: 'plain',
col: 1
}
numbers=[]
check_for_http_errors{
response=open("#{@website}integers/?max=#{url_params[:max]}&min=#{url_params[:min]}&base=#{url_params[:base]}&col=#{url_params[:col]}&rnd=#{url_params[:rnd]}&format=#{url_params[:format]}&num=#{url_params[:num]}")
response.each_line{|line| numbers << line.to_i}
}
numbers
end | ruby | {
"resource": ""
} |
q21354 | RandomSources.RandomOrg.sequence | train | def sequence(min, max)
url_params = { max: clean(max) || 10,
min: clean(min) || 1,
rnd: 'new',
format: 'plain',
col: 1
}
sequence_numbers=[]
check_for_http_errors{
response=open("#{@website}sequences/?max=#{url_params[:max]}&min=#{url_params[:min]}&col=#{url_params[:col]}&rnd=#{url_params[:rnd]}&format=#{url_params[:format]}")
response.each_line{|line| sequence_numbers << line.to_i}
}
sequence_numbers
end | ruby | {
"resource": ""
} |
q21355 | RandomSources.RandomOrg.strings | train | def strings(options = {})
url_params = { num: clean(options[:num]) || 10,
len: clean(options[:len]) || 8,
digits: check_on_off(options[:digits]) || 'on',
unique: check_on_off(options[:unique]) || 'on',
upperalpha: check_on_off(options[:upperalpha]) || 'on',
loweralpha: check_on_off(options[:loweralpha]) || 'on',
rnd: 'new',
format: 'plain'
}
strings=[]
check_for_http_errors{
response=open("#{@website}strings/?num=#{url_params[:num]}&len=#{url_params[:len]}&digits=#{url_params[:digits]}&unique=#{url_params[:unique]}&upperalpha=#{url_params[:upperalpha]}&loweralpha=#{url_params[:loweralpha]}&rnd=#{url_params[:rnd]}&format=#{url_params[:format]}")
response.each_line{|line| strings << line.strip}
}
strings
end | ruby | {
"resource": ""
} |
q21356 | Sprout.FileTarget.add_library | train | def add_library name, path
if path.is_a?(Array)
path = path.collect { |p| expand_local_path(p) }
else
path = expand_local_path path
end
library = Sprout::Library.new( :name => name, :path => path, :file_target => self )
libraries << library
library
end | ruby | {
"resource": ""
} |
q21357 | Sprout.FileTarget.add_executable | train | def add_executable name, path
path = expand_local_path path
executables << OpenStruct.new( :name => name, :path => path, :file_target => self )
end | ruby | {
"resource": ""
} |
q21358 | Sprout::System.BaseSystem.execute | train | def execute(tool, options='')
Sprout.stdout.puts("#{tool} #{options}")
runner = get_and_execute_process_runner(tool, options)
error = runner.read_err
result = runner.read
if(result.size > 0)
Sprout.stdout.puts result
end
if(error.size > 0)
raise Sprout::Errors::ExecutionError.new("[ERROR] #{error}")
end
result || error
end | ruby | {
"resource": ""
} |
q21359 | Sprout::System.BaseSystem.execute_thread | train | def execute_thread tool, options='', prompt=nil, &block
t = Thread.new do
Thread.current.abort_on_exception = true
runner = execute_silent(tool, options)
Thread.current['runner'] = runner
out = read_from runner.r, prompt, &block
err = read_from runner.e, prompt, &block
out.join && err.kill
end
# Wait for the runner to be created
# before returning a nil reference
# that never gets populated...
while t['runner'].nil? do
sleep(0.1)
end
if !t.alive?
raise Sprout::Errors::UsageError.new(t['runner'].read_err)
end
t
end | ruby | {
"resource": ""
} |
q21360 | Sprout::System.BaseSystem.get_and_execute_process_runner | train | def get_and_execute_process_runner tool, options=nil
runner = get_process_runner
runner.execute_open4 clean_path(tool), options
runner
end | ruby | {
"resource": ""
} |
q21361 | Sprout::System.WinSystem.get_and_execute_process_runner | train | def get_and_execute_process_runner tool, options=nil
tool = clean_path find_tool(tool)
runner = get_process_runner
runner.execute_win32 tool, options
runner
end | ruby | {
"resource": ""
} |
q21362 | ShellSpinner.Runner.build_new_exception | train | def build_new_exception e
e.class.new(e.message)
rescue
Exception.new e.message
end | ruby | {
"resource": ""
} |
q21363 | GithubStats.User.streak | train | def streak
return [] if streaks.empty?
streaks.last.last.date >= Date.today - 1 ? streaks.last : []
end | ruby | {
"resource": ""
} |
q21364 | GithubStats.User.guess_user | train | def guess_user(names = [])
names << Rugged::Config.global['github.user'] if USE_RUGGED
names << ENV['USER']
names.find { |name| name } || (raise 'Failed to guess username')
end | ruby | {
"resource": ""
} |
q21365 | GithubStats.User.real_streak_rewind | train | def real_streak_rewind(partial_streak)
new_data = download(partial_streak.first.date - 1)
old_data = partial_streak.map(&:to_a)
new_stats = GithubStats::Data.new(new_data + old_data)
partial_streak = new_stats.streaks.last
return partial_streak if partial_streak.first.date != new_stats.start_date
real_streak_rewind partial_streak
end | ruby | {
"resource": ""
} |
q21366 | GithubStats.User.download | train | def download(to_date = nil)
url = to_date ? @url + "?to=#{to_date.strftime('%Y-%m-%d')}" : @url
res = Curl::Easy.perform(url)
code = res.response_code
raise("Failed loading data from GitHub: #{url} #{code}") if code != 200
html = Nokogiri::HTML(res.body_str)
html.css('.day').map do |x|
x.attributes.values_at('data-date', 'data-count').map(&:value)
end
end | ruby | {
"resource": ""
} |
q21367 | Renalware.Patient.to_s | train | def to_s(format = :default)
title_suffix = " (#{title})" if has_title?
formatted_name = "#{family_name.upcase}, #{given_name}#{title_suffix}"
formatted_nhs_number = " (#{nhs_number})" if nhs_number.present?
case format
when :default then formatted_name
when :long then "#{formatted_name}#{formatted_nhs_number}"
else full_name
end
end | ruby | {
"resource": ""
} |
q21368 | CloudProviders.Connections.ssh_cleanup_known_hosts! | train | def ssh_cleanup_known_hosts!(hosts=[host, public_ip])
hosts = [hosts] unless hosts.respond_to? :each
hosts.compact.each do |name|
system_run "ssh-keygen -R %s" % name
end
end | ruby | {
"resource": ""
} |
q21369 | GithubStats.Data.to_h | train | def to_h
@raw.reduce(Hash.new(0)) do |acc, elem|
acc.merge(elem.date => elem.score)
end
end | ruby | {
"resource": ""
} |
q21370 | GithubStats.Data.streaks | train | def streaks
streaks = @raw.each_with_object(Array.new(1, [])) do |point, acc|
point.score.zero? ? acc << [] : acc.last << point
end
streaks.reject!(&:empty?)
streaks
end | ruby | {
"resource": ""
} |
q21371 | GithubStats.Data.outliers | train | def outliers
return [] if scores.uniq.size < 5
scores.select { |x| ((mean - x) / std_var).abs > GITHUB_MAGIC }.uniq
end | ruby | {
"resource": ""
} |
q21372 | GithubStats.Data.quartiles | train | def quartiles
quartiles = Array.new(5) { [] }
@raw.each_with_object(quartiles) do |elem, acc|
acc[quartile(elem.score)] << elem
end
end | ruby | {
"resource": ""
} |
q21373 | GithubStats.Data.quartile | train | def quartile(score)
return nil if score < 0 || score > max.score
quartile_boundaries.count { |bound| score > bound }
end | ruby | {
"resource": ""
} |
q21374 | GithubStats.Data.pad | train | def pad(fill_value = -1, data = @raw.clone)
data = _pad data, 0, fill_value, 0
_pad data, -1, fill_value, 6
end | ruby | {
"resource": ""
} |
q21375 | Renalware.ApplicationHelper.page_title | train | def page_title(separator = Renalware.config.page_title_spearator)
[
content_for(:page_title),
Renalware.config.site_name
].compact.join(separator)
end | ruby | {
"resource": ""
} |
q21376 | CloudProviders.Ec2Instance.make_image | train | def make_image(opts={})
opts = {:volume => '/',
:size => 6000,
:destination => '/mnt/bundle',
:exclude => nil
}.merge(opts)
image_file = File.join(opts[:destination], opts[:prefix] )
cmds = ["mkdir -p #{opts[:destination]}"]
cmds << "dd if=/dev/zero of=#{image_file} bs=1M count=#{opts[:size]}"
cmds << "mkfs.ext3 -F -j #{image_file}"
cmds << "mkdir -p #{opts[:destination]}/loop"
cmds << "mount -o loop #{image_file} #{opts[:destination]}/loop"
cmds << "rsync -ax #{rsync_excludes(opts[:exclude])} #{opts[:volume]}/ #{opts[:destination]}/loop/"
cmds << "if [[ -f /etc/init.d/ec2-ssh-host-key-gen ]]; then chmod u+x /etc/init.d/ec2-ssh-host-key-gen ;fi"
cmds << "umount #{opts[:destination]}/loop"
self.ssh cmds
image_file
end | ruby | {
"resource": ""
} |
q21377 | CloudProviders.ElasticAutoScaler.teardown | train | def teardown
triggers.each do |trigger|
trigger.teardown
end
if autoscaling_groups.select {|n| n.name == name }.empty?
puts "Cloud #{cloud.name} autoscaling group does not exist"
else
self.minimum_instances = 0
self.maximum_instances = 0
@new_launch_configuration_name = old_launch_configuration_name
puts "Updating autoscaling group: #{@new_launch_configuration_name}"
update_autoscaling_group!
puts "Terminating nodes in autoscaling group: #{name}"
reset!
# cloud.nodes.each {|n| n.terminate! }
delete_autoscaling_group!
delete_launch_configuration!
puts ""
end
end | ruby | {
"resource": ""
} |
q21378 | Sapience.Configuration.map_levels | train | def map_levels
return [] unless defined?(::Logger::Severity)
@@map_levels ||=
::Logger::Severity.constants.each_with_object([]) do |constant, levels|
levels[::Logger::Severity.const_get(constant)] = level_by_index_or_error(constant)
end
end | ruby | {
"resource": ""
} |
q21379 | Renalware.Broadcasting.broadcasting_to_configured_subscribers | train | def broadcasting_to_configured_subscribers
subscribers = Array(Renalware.config.broadcast_subscription_map[self.class.name])
subscribers.each do |subscriber|
# Support String subscribers eg a simple class name as well as Subscriber instances.
subscriber = Subscriber.new(subscriber) unless subscriber.respond_to?(:klass)
subscribe(subscriber.instance, async: subscriber.async?)
end
self
end | ruby | {
"resource": ""
} |
q21380 | PoolParty.Chef.recipe | train | def recipe(recipe_name, hsh={})
_recipes << recipe_name unless _recipes.include?(recipe_name)
head = {}
tail = head
recipe_name.split("::").each do |key|
unless key == "default"
n = {}
tail[key] = n
tail = n
end
end
tail.replace hsh
override_attributes.merge!(head) unless hsh.empty?
end | ruby | {
"resource": ""
} |
q21381 | Sapience.LogMethods.measure | train | def measure(level, message, params = {}, &block)
index = Sapience.config.level_to_index(level)
if level_index <= index
measure_internal(level, index, message, params, &block)
else
yield params if block
end
end | ruby | {
"resource": ""
} |
q21382 | Renalware.PDRegimesHelper.available_pd_treatments_for | train | def available_pd_treatments_for(regime)
scope = "renalware.pd.treatments"
key = regime.capd? ? "capd" : "apd"
I18n.t(key, scope: scope)
end | ruby | {
"resource": ""
} |
q21383 | Sapience.Logger.log | train | def log(log, message = nil, progname = nil, &block)
# Compatibility with ::Logger
return add(log, message, progname, &block) unless log.is_a?(Sapience::Log)
if @@appender_thread
@@appender_thread << lambda do
Sapience.appenders.each do |appender|
next unless appender.valid?
begin
appender.log(log)
rescue StandardError => exc
$stderr.write("Appender thread: Failed to log to appender: #{appender.inspect}\n #{exc.inspect}")
end
end
Sapience.clear_tags!
end
end
end | ruby | {
"resource": ""
} |
q21384 | Sapience.Base.with_payload | train | def with_payload(payload)
current_payload = self.payload
Thread.current[:sapience_payload] = current_payload ? current_payload.merge(payload) : payload
yield
ensure
Thread.current[:sapience_payload] = current_payload
end | ruby | {
"resource": ""
} |
q21385 | Sapience.Base.include_message? | train | def include_message?(log)
return true if @filter.nil?
if @filter.is_a?(Regexp)
!(@filter =~ log.name).nil?
elsif @filter.is_a?(Proc)
@filter.call(log) == true
end
end | ruby | {
"resource": ""
} |
q21386 | Sapience.Base.extract_backtrace | train | def extract_backtrace
stack = caller
while (first = stack.first) && first.include?(SELF_PATTERN)
stack.shift
end
stack
end | ruby | {
"resource": ""
} |
q21387 | DataPackage.Helpers.dereference_descriptor | train | def dereference_descriptor(resource, base_path: nil, reference_fields: nil)
options = {
base_path: base_path,
reference_fields: reference_fields,
}
case resource
when Hash
resource.inject({}) do |new_resource, (key, val)|
if reference_fields.nil? || reference_fields.include?(key)
new_resource[key] = dereference_descriptor(val, **options)
else
new_resource[key] = val
end
new_resource
end
when Enumerable
resource.map{ |el| dereference_descriptor(el, **options)}
when String
begin
resolve_json_reference(resource, deep_dereference: true, base_path: base_path)
rescue Errno::ENOENT
resource
end
else
resource
end
end | ruby | {
"resource": ""
} |
q21388 | CloudProviders.Ec2.describe_instances | train | def describe_instances(id=nil)
begin
@describe_instances = ec2.describe_instances.reservationSet.item.map do |r|
r.instancesSet.item.map do |i|
inst_options = i.merge(r.merge(:cloud => cloud)).merge(cloud.cloud_provider.dsl_options)
Ec2Instance.new(inst_options)
end
end.flatten
rescue AWS::InvalidClientTokenId => e # AWS credentials invalid
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
[]
end
end | ruby | {
"resource": ""
} |
q21389 | CloudProviders.Ec2.aws_options | train | def aws_options(opts={})
uri=URI.parse(ec2_url)
{ :access_key_id => access_key,
:secret_access_key=> secret_access_key,
:use_ssl => (uri.scheme=='https'),
:path => uri.path,
:host => uri.host,
:port => uri.port
}.merge(opts)
end | ruby | {
"resource": ""
} |
q21390 | CloudProviders.Ec2.ec2 | train | def ec2
@ec2 ||= begin
AWS::EC2::Base.new( aws_options )
rescue AWS::ArgumentError => e # AWS credentials missing?
puts "Error contacting AWS: #{e}"
raise e
rescue Exception => e
puts "Generic error #{e.class}: #{e}"
end
end | ruby | {
"resource": ""
} |
q21391 | CloudProviders.Ec2.credential_file | train | def credential_file(file=nil)
unless file.nil?
dsl_options[:credential_file]=file
dsl_options.merge!(Ec2.load_keys_from_credential_file(file))
else
fetch(:credential_file)
end
end | ruby | {
"resource": ""
} |
q21392 | CFA.BaseModel.generic_set | train | def generic_set(key, value, tree = data)
modify(key, value, tree) || uncomment(key, value, tree) ||
add_new(key, value, tree)
end | ruby | {
"resource": ""
} |
q21393 | PaperTrailScrapbook.Chapter.story | train | def story
updates = changes
return unless tell_story?(updates)
[preface, (updates unless destroy?)].compact.join("\n")
end | ruby | {
"resource": ""
} |
q21394 | CFA.AugeasWriter.report_error | train | def report_error
return if yield
error = aug.error
# zero is no error, so problem in lense
if aug.error[:code].nonzero?
raise "Augeas error #{error[:message]}. Details: #{error[:details]}."
end
msg = aug.get("/augeas/text/store/error/message")
location = aug.get("/augeas/text/store/error/lens")
raise "Augeas serializing error: #{msg} at #{location}"
end | ruby | {
"resource": ""
} |
q21395 | PaperTrailScrapbook.Changes.change_log | train | def change_log
text =
changes
.map { |k, v| digest(k, v) }
.compact
.join("\n")
text = text.gsub(' id:', ':') if PaperTrailScrapbook.config.drop_id_suffix
text
end | ruby | {
"resource": ""
} |
q21396 | ConfigVar.Context.method_missing | train | def method_missing(name, *args)
value = @values[name]
if value.nil? && !@values.has_key?(name)
address = "<#{self.class.name}:0x00#{(self.object_id << 1).to_s(16)}>"
raise NoMethodError.new("undefined method `#{name}' for ##{address}")
end
value
end | ruby | {
"resource": ""
} |
q21397 | ConfigVar.Context.required_string | train | def required_string(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => value}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | {
"resource": ""
} |
q21398 | ConfigVar.Context.required_int | train | def required_int(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_int(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | {
"resource": ""
} |
q21399 | ConfigVar.Context.required_bool | train | def required_bool(name)
required_custom(name) do |env|
if value = env[name.to_s.upcase]
{name => parse_bool(name, value)}
else
raise ConfigError.new("A value must be provided for #{name.to_s.upcase}")
end
end
end | ruby | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.