_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q8800
|
Regenerate.WebPage.initializePageObject
|
train
|
def initializePageObject(pageObject)
@pageObject = pageObject
pathDepth = @pathComponents.length-1
rootLinkPath = "../" * pathDepth
setPageObjectInstanceVar("@fileName", @fileName)
setPageObjectInstanceVar("@baseDir", File.dirname(@fileName))
setPageObjectInstanceVar("@baseFileName", File.basename(@fileName))
setPageObjectInstanceVar("@pathDepth", pathDepth)
setPageObjectInstanceVar("@pathComponents", @pathComponents)
setPageObjectInstanceVar("@rootLinkPath", rootLinkPath)
setPageObjectInstanceVar("@baseUrl", rootLinkPath)
setPageObjectInstanceVar("@site", @site)
@initialInstanceVariables = Set.new(@pageObject.instance_variables)
pageObject.postInitialize
end
|
ruby
|
{
"resource": ""
}
|
q8801
|
Regenerate.WebPage.startNewComponent
|
train
|
def startNewComponent(component, startComment = nil)
component.parentPage = self
@currentComponent = component
#puts "startNewComponent, @currentComponent = #{@currentComponent.inspect}"
@components << component
if startComment
component.processStartComment(startComment)
end
end
|
ruby
|
{
"resource": ""
}
|
q8802
|
Regenerate.WebPage.writeRegeneratedFile
|
train
|
def writeRegeneratedFile(outFile, makeBackup, checkNoChanges)
puts "writeRegeneratedFile, #{outFile}, makeBackup = #{makeBackup}, checkNoChanges = #{checkNoChanges}"
if makeBackup
backupFileName = makeBackupFile(outFile)
end
File.open(outFile, "w") do |f|
for component in @components do
f.write(component.output)
end
end
puts " wrote regenerated page to #{outFile}"
if checkNoChanges
if !makeBackup
raise Exception.new("writeRegeneratedFile #{outFile}: checkNoChanges specified, but no backup was made")
end
checkAndEnsureOutputFileUnchanged(outFile, backupFileName)
end
end
|
ruby
|
{
"resource": ""
}
|
q8803
|
Regenerate.WebPage.readFileLines
|
train
|
def readFileLines
puts "Reading source file #{@fileName} ..."
lineNumber = 0
File.open(@fileName).each_line do |line|
line.chomp!
lineNumber += 1 # track line numbers for when Ruby code needs to be executed (i.e. to populate stack traces)
#puts "line #{lineNumber}: #{line}"
commentLineMatch = COMMENT_LINE_REGEX.match(line)
if commentLineMatch # it matches the Regenerate command line regex (but might not actually be a command ...)
parsedCommandLine = ParsedRegenerateCommentLine.new(line, commentLineMatch)
#puts "parsedCommandLine = #{parsedCommandLine}"
if parsedCommandLine.isRegenerateCommentLine # if it is a Regenerate command line
parsedCommandLine.checkIsValid # check it is valid, and then,
processCommandLine(parsedCommandLine, lineNumber) # process the command line
else
processTextLine(line, lineNumber) # process a text line which is not a Regenerate command line
end
else
processTextLine(line, lineNumber) # process a text line which is not a Regenerate command line
end
end
# After processing all source lines, the only unfinished page component permitted is a static HTML component.
finishAtEndOfSourceFile
#puts "Finished reading #{@fileName}."
end
|
ruby
|
{
"resource": ""
}
|
q8804
|
Regenerate.WebPage.regenerateToOutputFile
|
train
|
def regenerateToOutputFile(outFile, checkNoChanges = false)
executeRubyComponents
@pageObject.process
writeRegeneratedFile(outFile, checkNoChanges, checkNoChanges)
end
|
ruby
|
{
"resource": ""
}
|
q8805
|
Regenerate.WebPage.executeRubyComponents
|
train
|
def executeRubyComponents
fileDir = File.dirname(@fileName)
#puts "Executing ruby components in directory #{fileDir} ..."
Dir.chdir(fileDir) do
for rubyComponent in @rubyComponents
rubyCode = rubyComponent.text
#puts ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
#puts "Executing ruby (line #{rubyComponent.lineNumber}) #{rubyCode.inspect} ..."
@pageObject.instance_eval(rubyCode, @fileName, rubyComponent.lineNumber)
#puts "Finished executing ruby at line #{rubyComponent.lineNumber}"
end
end
#puts "Finished executing ruby components."
end
|
ruby
|
{
"resource": ""
}
|
q8806
|
Regenerate.PageObject.erb
|
train
|
def erb(templateFileName)
@binding = binding
fullTemplateFilePath = relative_path(@rootLinkPath + templateFileName)
File.open(fullTemplateFilePath, "r") do |input|
templateText = input.read
template = ERB.new(templateText, nil, nil)
template.filename = templateFileName
result = template.result(@binding)
end
end
|
ruby
|
{
"resource": ""
}
|
q8807
|
WhereWasI.Gpx.add_tracks
|
train
|
def add_tracks
@tracks = []
doc = Nokogiri::XML(@gpx_data)
doc.css('xmlns|trk').each do |trk|
track = Track.new
trk.css('xmlns|trkpt').each do |trkpt|
# https://en.wikipedia.org/wiki/GPS_Exchange_Format#Units
# decimal degrees, wgs84.
# elevation in meters.
track.add_point(
lat: trkpt.attributes['lat'].text.to_f,
lon: trkpt.attributes['lon'].text.to_f,
elevation: trkpt.at_css('xmlns|ele').text.to_f,
time: Time.parse(trkpt.at_css('xmlns|time').text)
)
end
@tracks << track
end
@intersegments = []
@tracks.each_with_index do |track,i|
next if i == 0
this_track = track
prev_track = @tracks[i-1]
inter_track = Track.new
inter_track.add_point(
lat: prev_track.end_location[0],
lon: prev_track.end_location[1],
elevation: prev_track.end_location[2],
time: prev_track.end_time
)
inter_track.add_point(
lat: this_track.start_location[0],
lon: this_track.start_location[1],
elevation: this_track.start_location[2],
time: this_track.start_time
)
@intersegments << inter_track
end
@tracks_added = true
end
|
ruby
|
{
"resource": ""
}
|
q8808
|
WhereWasI.Gpx.at
|
train
|
def at(time)
add_tracks if ! @tracks_added
if time.is_a?(String)
time = Time.parse(time)
end
time = time.to_i
location = nil
@tracks.each do |track|
location = track.at(time)
break if location
end
if ! location
case @intersegment_behavior
when :interpolate then
@intersegments.each do |track|
location = track.at(time)
break if location
end
when :nearest then
# hash is sorted in ascending time order.
# all start/end points for all segments
points = {}
@tracks.each do |t|
points[t.start_time.to_i] = t.start_location
points[t.end_time.to_i] = t.end_location
end
last_diff = Infinity
last_time = -1
points.each do |p_time,p_location|
this_diff = (p_time.to_i - time).abs
# as long as the differences keep getting smaller, we keep going
# as soon as we see a larger one, we step back one and use that value.
if this_diff > last_diff
l = points[last_time]
location = Track.array_to_hash(points[last_time])
break
else
last_diff = this_diff
last_time = p_time
end
end
# if we got here, time is > the end of the last segment
location = Track.array_to_hash(points[last_time])
end
end
# each segment has a begin and end time.
# which one is this time closest to?
# array of times in order. compute abs diff between time and each point.
# put times in order. abs diff to each, until we get a larger value or we run out of points. then back up one and use that.
# {time => [lat, lon, elev], time => [lat, lon, elev]}
location
end
|
ruby
|
{
"resource": ""
}
|
q8809
|
RsUserPolicy.User.clear_permissions
|
train
|
def clear_permissions(account_href, client, options={})
options = {:dry_run => false}.merge(options)
current_permissions = get_api_permissions(account_href)
if options[:dry_run]
Hash[current_permissions.map{|p| [p.href, p.role_title]}]
else
retval = RsUserPolicy::RightApi::PermissionUtilities.destroy_permissions(
current_permissions,
client
)
@permissions.delete(account_href)
retval
end
end
|
ruby
|
{
"resource": ""
}
|
q8810
|
RsUserPolicy.User.set_api_permissions
|
train
|
def set_api_permissions(permissions, account_href, client, options={})
options = {:dry_run => false}.merge(options)
existing_api_permissions_response = get_api_permissions(account_href)
existing_api_permissions = Hash[existing_api_permissions_response.map{|p| [p.role_title, p] }]
if permissions.length == 0
removed = clear_permissions(account_href, client, options)
@permissions.delete(account_href)
return removed, {}
else
permissions_to_remove = (existing_api_permissions.keys - permissions).map{|p| existing_api_permissions[p]}
remove_response = Hash[permissions_to_remove.map{|p| [p.href, p.role_title]}]
unless options[:dry_run]
remove_response = RsUserPolicy::RightApi::PermissionUtilities.destroy_permissions(permissions_to_remove, client)
end
permissions_to_add = {
@href => Hash[(permissions - existing_api_permissions.keys).map{|p| [p,nil]}]
}
add_response = {}
if options[:dry_run]
href_idx = 0
add_response = {
@href => Hash[(permissions - existing_api_permissions.keys).map{|p| [p,(href_idx += 1)]}]
}
else
add_response = RsUserPolicy::RightApi::PermissionUtilities.create_permissions(permissions_to_add, client)
end
@permissions[account_href] = client.permissions.index(:filter => ["user_href==#{@href}"]) unless options[:dry_run]
return remove_response, Hash[add_response[@href].keys.map{|p| [add_response[@href][p],p]}]
end
end
|
ruby
|
{
"resource": ""
}
|
q8811
|
Hosties.UsesAttributes.finish
|
train
|
def finish
retval = {}
# Ensure all required attributes have been set
@attributes.each do |attr|
val = instance_variable_get "@#{attr}"
raise ArgumentError, "Missing attribute #{attr}" if val.nil?
retval[attr] = val
end
retval
end
|
ruby
|
{
"resource": ""
}
|
q8812
|
EM::Xmpp.Handler.run_xpath_handlers
|
train
|
def run_xpath_handlers(ctx, handlers, remover)
handlers.each do |h|
if (not ctx.done?) and (h.match?(ctx.stanza))
ctx['xpath.handler'] = h
ctx = h.call(ctx)
raise RuntimeError, "xpath handlers should return a Context" unless ctx.is_a?(Context)
send remover, h unless ctx.reuse_handler?
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8813
|
MtaJson.Wrapper.update_params
|
train
|
def update_params env, json
env[FORM_HASH] = json
env[BODY] = env[FORM_INPUT] = StringIO.new(Rack::Utils.build_query(json))
end
|
ruby
|
{
"resource": ""
}
|
q8814
|
MtaJson.Wrapper.verify_request_method
|
train
|
def verify_request_method env
allowed = ALLOWED_METHODS
allowed |= ALLOWED_METHODS_PRIVATE if whitelisted?(env)
if !allowed.include?(env[METHOD])
raise "Request method #{env[METHOD]} not allowed"
end
end
|
ruby
|
{
"resource": ""
}
|
q8815
|
MtaJson.Wrapper.update_options
|
train
|
def update_options env, options
if options[:method] and (ALLOWED_METHODS | ALLOWED_METHODS_PRIVATE).include?(options[:method])
# (possibly) TODO - pass parameters for GET instead of POST in update_params then?
# see https://github.com/rack/rack/blob/master/lib/rack/request.rb -> def GET
env[METHOD] = options[:method]
end
end
|
ruby
|
{
"resource": ""
}
|
q8816
|
MtaJson.Wrapper.add_csrf_info
|
train
|
def add_csrf_info env
env[CSRF_TOKEN] = env[SESSION][:_csrf_token] = SecureRandom.base64(32).to_s if env[METHOD] != 'GET' and whitelisted?(env)
end
|
ruby
|
{
"resource": ""
}
|
q8817
|
Cerealizer.Base.read_keys
|
train
|
def read_keys
self.class.keys.inject Hash.new do |hash, key|
hash[key.name] = proxy_reader(key.name) if readable?(key.name)
hash
end
end
|
ruby
|
{
"resource": ""
}
|
q8818
|
Cerealizer.Base.write_keys
|
train
|
def write_keys(attrs)
attrs.each { |key, value| proxy_writer(key, value) if writeable?(key) }
self
end
|
ruby
|
{
"resource": ""
}
|
q8819
|
Cerealizer.Base.proxy_writer
|
train
|
def proxy_writer(key, *args)
meth = "#{key}="
if self.respond_to? meth
self.send(meth, *args)
else
object.send(meth, *args)
end
end
|
ruby
|
{
"resource": ""
}
|
q8820
|
ActiveRecord.Fixtures.table_rows
|
train
|
def table_rows
now = ActiveRecord::Base.default_timezone == :utc ? Time.now.utc : Time.now
now = now.to_s(:db)
# allow a standard key to be used for doing defaults in YAML
fixtures.delete('DEFAULTS')
# track any join tables we need to insert later
rows = Hash.new { |h,table| h[table] = [] }
rows[table_name] = fixtures.map do |label, fixture|
row = fixture.to_hash
if model_class && model_class < ActiveRecord::Base
# fill in timestamp columns if they aren't specified and the model is set to record_timestamps
if model_class.record_timestamps
timestamp_column_names.each do |name|
row[name] = now unless row.key?(name)
end
end
# interpolate the fixture label
row.each do |key, value|
row[key] = label if value == "$LABEL"
end
# generate a primary key if necessary
if has_primary_key_column? && !row.include?(primary_key_name)
row[primary_key_name] = ActiveRecord::Fixtures.identify(label)
end
# If STI is used, find the correct subclass for association reflection
reflection_class =
if row.include?(inheritance_column_name)
row[inheritance_column_name].constantize rescue model_class
else
model_class
end
reflection_class.reflect_on_all_associations.each do |association|
case association.macro
when :belongs_to
# Do not replace association name with association foreign key if they are named the same
fk_name = (association.options[:foreign_key] || "#{association.name}_id").to_s
if association.name.to_s != fk_name && value = row.delete(association.name.to_s)
if association.options[:polymorphic] && value.sub!(/\s*\(([^\)]*)\)\s*$/, "")
# support polymorphic belongs_to as "label (Type)"
row[association.foreign_type] = $1
end
row[fk_name] = ActiveRecord::Fixtures.identify(value)
end
when :has_and_belongs_to_many
if (targets = row.delete(association.name.to_s))
targets = targets.is_a?(Array) ? targets : targets.split(/\s*,\s*/)
table_name = association.options[:join_table]
rows[table_name].concat targets.map { |target|
{ association.foreign_key => row[primary_key_name],
association.association_foreign_key => ActiveRecord::Fixtures.identify(target) }
}
end
end
end
end
row
end
rows
end
|
ruby
|
{
"resource": ""
}
|
q8821
|
Mimi.Core.use
|
train
|
def use(mod, opts = {})
raise ArgumentError, "#{mod} is not a Mimi module" unless mod < Mimi::Core::Module
mod.configure(opts)
used_modules << mod unless used_modules.include?(mod)
true
end
|
ruby
|
{
"resource": ""
}
|
q8822
|
Mimi.Core.require_files
|
train
|
def require_files(glob, root_path = app_root_path)
Pathname.glob(root_path.join(glob)).each do |filename|
require filename.expand_path
end
end
|
ruby
|
{
"resource": ""
}
|
q8823
|
RailsIdentity.User.valid_user
|
train
|
def valid_user
if (self.username.blank? || self.password_digest.blank?) &&
(self.oauth_provider.blank? || self.oauth_uid.blank?)
errors.add(:username, " and password OR oauth must be specified")
end
end
|
ruby
|
{
"resource": ""
}
|
q8824
|
RailsIdentity.User.issue_token
|
train
|
def issue_token(kind)
session = Session.new(user: self, seconds: 3600)
session.save
if kind == :reset_token
self.reset_token = session.token
elsif kind == :verification_token
self.verification_token = session.token
end
end
|
ruby
|
{
"resource": ""
}
|
q8825
|
VirtualMonkey.ELBRunner.lookup_scripts
|
train
|
def lookup_scripts
scripts = [
[ 'connect', 'ELB connect' ],
[ 'disconnect', 'ELB disconnect' ]
]
# @scripts_to_run = {}
server = @servers.first
server.settings
st = ServerTemplate.find(server.server_template_href)
lookup_scripts_table(st,scripts)
# @scripts_to_run['connect'] = st.executables.detect { |ex| ex.name =~ /ELB connect/i }
# @scripts_to_run['disconnect'] = st.executables.detect { |ex| ex.name =~ /ELB disconnect/i }
end
|
ruby
|
{
"resource": ""
}
|
q8826
|
VirtualMonkey.ELBRunner.log_rotation_checks
|
train
|
def log_rotation_checks
detect_os
# this works for php
app_servers.each do |server|
server.settings
force_log_rotation(server)
log_check(server,"/mnt/log/#{server.apache_str}/access.log.1")
end
end
|
ruby
|
{
"resource": ""
}
|
q8827
|
Guise.Introspection.has_guise?
|
train
|
def has_guise?(value)
value = value.to_s.classify
unless guise_options.values.include?(value)
raise ArgumentError, "no such guise #{value}"
end
association(guise_options.association_name).reader.any? do |record|
!record.marked_for_destruction? &&
record[guise_options.attribute] == value
end
end
|
ruby
|
{
"resource": ""
}
|
q8828
|
Technologist.YamlParser.instancify
|
train
|
def instancify(technology, rule)
class_name, attributes = send("parse_rule_of_type_#{rule.class.name.downcase}", rule)
Rule.const_get("#{class_name}Rule").new(technology, attributes)
end
|
ruby
|
{
"resource": ""
}
|
q8829
|
FaviconParty.Fetcher.find_favicon_urls_in_html
|
train
|
def find_favicon_urls_in_html(html)
doc = Nokogiri.parse html
candidate_urls = doc.css(ICON_SELECTORS.join(",")).map {|e| e.attr('href') }.compact
candidate_urls.sort_by! {|href|
if href =~ /\.ico/
0
elsif href =~ /\.png/
1
else
2
end
}
uri = URI final_url
candidate_urls.map! do |href|
href = URI.encode(URI.decode(href.strip))
if href =~ /\A\/\//
href = "#{uri.scheme}:#{href}"
elsif href !~ /\Ahttp/
# Ignore invalid URLS - ex. {http://i50.tinypic.com/wbuzcn.png}
href = URI.join(url_root, href).to_s rescue nil
end
href
end.compact.uniq
end
|
ruby
|
{
"resource": ""
}
|
q8830
|
FaviconParty.Fetcher.final_url
|
train
|
def final_url
return @final_url if !@final_url.nil?
location = final_location(FaviconParty::HTTPClient.head(@query_url))
if !location.nil?
if location =~ /\Ahttp/
@final_url = URI.encode location
else
uri = URI @query_url
root = "#{uri.scheme}://#{uri.host}"
@final_url = URI.encode URI.join(root, location).to_s
end
end
if !@final_url.nil?
if %w( 127.0.0.1 localhost ).any? {|host| @final_url.include? host }
# TODO Exception for invalid final urls
@final_url = @query_url
end
return @final_url
end
@final_url = @query_url
end
|
ruby
|
{
"resource": ""
}
|
q8831
|
Kawaii.RoutingMethods.context
|
train
|
def context(path, &block)
ctx = RouteContext.new(self, path)
# @todo Is there a better way to keep ordering of routes?
# An alternative would be to enter each route in a context only once
# (with 'prefix' based on containing contexts).
# On the other hand, we're only doing that when compiling routes, further
# processing is faster this way.
ctx.instance_eval(&block)
ctx.methods_used.each do |meth|
add_route!(meth, ctx)
end
end
|
ruby
|
{
"resource": ""
}
|
q8832
|
Kawaii.RoutingMethods.match
|
train
|
def match(env)
routes[env[Rack::REQUEST_METHOD]]
.lazy # Lazy to avoid unnecessary calls to #match.
.map { |r| r.match(env) }
.find { |r| !r.nil? }
end
|
ruby
|
{
"resource": ""
}
|
q8833
|
Glass.TimelineItem.insert!
|
train
|
def insert!(mirror=@client)
timeline_item = self
result = []
if file_upload?
for file in file_to_upload
media = Google::APIClient::UploadIO.new(file.contentUrl, file.content_type)
result << client.execute!(
:api_method => mirror.timeline.insert,
:body_object => timeline_item,
:media => media,
:parameters => {
:uploadType => 'multipart',
:alt => 'json'})
end
else
result << client.execute(
:api_method => mirror.timeline.insert,
:body_object => timeline_item)
end
return result.data
end
|
ruby
|
{
"resource": ""
}
|
q8834
|
SycLink.Link.select_defined
|
train
|
def select_defined(args)
args.select { |k, v| (ATTRS.include? k) && !v.nil? }
end
|
ruby
|
{
"resource": ""
}
|
q8835
|
Disqussion.Exports.exportForum
|
train
|
def exportForum(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
if args.size == 1
options.merge!(:forum => args[0])
response = post('exports/exportForum', options)
else
puts "#{Kernel.caller.first}: exports.exportForum expects an arguments: forum"
end
end
|
ruby
|
{
"resource": ""
}
|
q8836
|
ActiveRecord.Persistence.update_column
|
train
|
def update_column(name, value)
name = name.to_s
raise ActiveRecordError, "#{name} is marked as readonly" if self.class.readonly_attributes.include?(name)
raise ActiveRecordError, "can not update on a new record object" unless persisted?
updated_count = self.class.update_all({ name => value }, self.class.primary_key => id)
raw_write_attribute(name, value)
updated_count == 1
end
|
ruby
|
{
"resource": ""
}
|
q8837
|
ActiveCopy.Paths.source_path
|
train
|
def source_path options={}
@source_path ||= if options[:relative]
File.join collection_path, "#{self.id}.md"
else
File.join root_path, collection_path, "#{self.id}.md"
end
end
|
ruby
|
{
"resource": ""
}
|
q8838
|
Cathode.UpdateRequest.default_action_block
|
train
|
def default_action_block
proc do
begin
record = if resource.singular
parent_model = resource.parent.model.find(parent_resource_id)
parent_model.send resource.name
else
record = model.find(params[:id])
end
record.update(instance_eval(&@strong_params))
body record.reload
rescue ActionController::ParameterMissing => error
body error.message
status :bad_request
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8839
|
GroupDocsSignatureCloud.ApiClient.deserialize
|
train
|
def deserialize(response, return_type)
body = response.body
# handle file downloading - return the File instance processed in request callbacks
# note that response body is empty when the file is written in chunks in request on_body callback
return @tempfile if return_type == 'File'
return nil if body.nil? || body.empty?
# return response body directly for String return type
return body if return_type == 'String'
# ensuring a default content type
content_type = response.headers['Content-Type'] || 'application/json'
raise "Content-Type is not supported: #{content_type}" unless json_mime?(content_type)
begin
data = JSON.parse("[#{body}]", :symbolize_names => true)[0]
rescue JSON::ParserError => e
if %w[String Date DateTime].include?(return_type)
data = body
else
raise e
end
end
convert_to_type data, return_type
end
|
ruby
|
{
"resource": ""
}
|
q8840
|
RailsIdentity.SessionsController.index
|
train
|
def index
@sessions = Session.where(user: @user)
expired = []
active = []
@sessions.each do |session|
if session.expired?
expired << session.uuid
else
active << session
end
end
SessionsCleanupJob.perform_later(*expired)
render json: active, except: [:secret]
end
|
ruby
|
{
"resource": ""
}
|
q8841
|
RailsIdentity.SessionsController.create
|
train
|
def create
# See if OAuth is used first. When authenticated successfully, either
# the existing user will be found or a new user will be created.
# Failure will be redirected to this action but will not match this
# branch.
if (omniauth_hash = request.env["omniauth.auth"])
@user = User.from_omniauth_hash(omniauth_hash)
# Then see if the request already has authentication. Note that if the
# user does not have access to the specified session owner, 401 will
# be thrown.
elsif accept_auth
@user = @auth_user
# Otherwise, it's a normal login process. Use username and password to
# authenticate. The user must exist, the password must be vaild, and
# the email must have been verified.
else
@user = User.find_by_username(session_params[:username])
if (@user.nil? || !@user.authenticate(session_params[:password]) ||
!@user.verified)
raise ApplicationController::UNAUTHORIZED_ERROR
end
end
# Finally, create session regardless of the method and store it.
@session = Session.new(user: @user)
if @session.save
if omniauth_hash
# redirect_to the app page that accepts new session token
url = Rails.application.config.oauth_landing_page_url
url = "#{url}?token=#{@session.token}"
render inline: "", status: 302, location: url
else
render json: @session, except: [:secret], status: 201
end
else
# :nocov:
render_errors 400, @session.full_error_messages
# :nocov:
end
end
|
ruby
|
{
"resource": ""
}
|
q8842
|
RailsIdentity.SessionsController.get_session
|
train
|
def get_session
session_id = params[:id]
if session_id == "current"
if @auth_session.nil?
raise Repia::Errors::NotFound
end
session_id = @auth_session.id
end
@session = find_object(Session, session_id)
authorize_for!(@session)
if @session.expired?
@session.destroy
raise Repia::Errors::NotFound
end
end
|
ruby
|
{
"resource": ""
}
|
q8843
|
HasEnumeration.ClassMethods.has_enumeration
|
train
|
def has_enumeration(enumeration, mapping, options = {})
unless mapping.is_a?(Hash)
# Recast the mapping as a symbol -> string hash
mapping_hash = {}
mapping.each {|m| mapping_hash[m] = m.to_s}
mapping = mapping_hash
end
# The underlying attribute
attribute = options[:attribute] || enumeration
# ActiveRecord's composed_of method will do most of the work for us.
# All we have to do is cons up a class that implements the bidirectional
# mapping described by the provided hash.
klass = create_enumeration_mapping_class(mapping)
attr_enumeration_mapping_classes[enumeration] = klass
# Bind the class to a name within the scope of this class
mapping_class_name = enumeration.to_s.camelize
const_set(mapping_class_name, klass)
scoped_class_name = [self.name, mapping_class_name].join('::')
composed_of(enumeration,
:class_name => scoped_class_name,
:mapping => [attribute.to_s, 'raw_value'],
:converter => :from_sym,
:allow_nil => true
)
if ActiveRecord::VERSION::MAJOR >= 3 && ActiveRecord::VERSION::MINOR == 0
# Install this attributes mapping for use later when extending
# Arel attributes on the fly.
::Arel::Table.has_enumeration_mappings[table_name][attribute] = mapping
else
# Install our aggregate condition handling override, but only once
unless @aggregate_conditions_override_installed
extend HasEnumeration::AggregateConditionsOverride
@aggregate_conditions_override_installed = true
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8844
|
SimilarityTree.Node.depth_first_recurse
|
train
|
def depth_first_recurse(node = nil, depth = 0, &block)
node = self if node == nil
yield node, depth
node.children.each do |child|
depth_first_recurse(child, depth+1, &block)
end
end
|
ruby
|
{
"resource": ""
}
|
q8845
|
Curl.ThreadPool.perform
|
train
|
def perform(async=false, &block)
@results = {}
@clients.each do |client|
@threads << Thread.new do
loop do
break if @reqs.empty?
req = @reqs.shift
break if req.nil? # can sometimes reach here due to a race condition. saw it a lot on travis
client.url = req.uri
args = ["http_#{req.method}"]
if [:put, :post].include? req.method
# add body to args for these methods
if req.body then
if req.body.kind_of? Array then
args += req.body
else
args << req.body
end
else
args << ""
end
end
client.send(*args)
if block then
yield(req, client.body_str)
else
@results[req.key] = client.body_str
end
end
end
end
if async then
# don't wait for threads to join, just return
return true
end
join()
return true if block
return @results
end
|
ruby
|
{
"resource": ""
}
|
q8846
|
Curl.ThreadPool.collate_results
|
train
|
def collate_results(results)
ret = []
results.size.times do |i|
ret << results[i]
end
return ret
end
|
ruby
|
{
"resource": ""
}
|
q8847
|
Kelp.XPaths.xpath_row_containing
|
train
|
def xpath_row_containing(texts)
texts = [texts] if texts.class == String
conditions = texts.collect do |text|
"contains(., #{xpath_sanitize(text)})"
end.join(' and ')
return ".//tr[#{conditions}]"
end
|
ruby
|
{
"resource": ""
}
|
q8848
|
ResponseFor.ActionController.respond_to_action_responses
|
train
|
def respond_to_action_responses
if !respond_to_performed? && action_responses.any?
respond_to do |responder|
action_responses.each {|response| instance_exec(responder, &response) }
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8849
|
SengiriYaml.Loader.load_dir
|
train
|
def load_dir(src_dir)
merged_content = ""
Pathname.glob("#{src_dir}/*.yml").sort.each do |yaml_path|
content = yaml_path.read.gsub(/^---$/, "")
merged_content << content
end
YAML.load(merged_content)
end
|
ruby
|
{
"resource": ""
}
|
q8850
|
TranslateColumns.InstanceMethods.translation_locale
|
train
|
def translation_locale
locale = @translation_locale || I18n.locale.to_s
locale == I18n.default_locale.to_s ? nil : locale
end
|
ruby
|
{
"resource": ""
}
|
q8851
|
TranslateColumns.InstanceMethods.translation
|
train
|
def translation
if translation_enabled?
if !@translation || (@translation.locale != translation_locale)
raise MissingParent, "Cannot create translations without a stored parent" if new_record?
# try to find translation or build a new one
@translation = translations.where(:locale => translation_locale).first || translations.build(:locale => translation_locale)
end
@translation
else
nil
end
end
|
ruby
|
{
"resource": ""
}
|
q8852
|
TranslateColumns.InstanceMethods.attributes_with_locale=
|
train
|
def attributes_with_locale=(new_attributes, guard_protected_attributes = true)
return if new_attributes.nil?
attributes = new_attributes.dup
attributes.stringify_keys!
attributes = sanitize_for_mass_assignment(attributes) if guard_protected_attributes
send(:locale=, attributes["locale"]) if attributes.has_key?("locale") and respond_to?(:locale=)
send(:attributes_without_locale=, attributes, guard_protected_attributes)
end
|
ruby
|
{
"resource": ""
}
|
q8853
|
DataMapper.ValidationsExt.validate_parents
|
train
|
def validate_parents
parent_relationships.each do |relationship|
parent = relationship.get(self)
unless parent.valid?
unless errors[relationship.name].include?(parent.errors)
errors[relationship.name] = parent.errors
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8854
|
DataMapper.ValidationsExt.validate_children
|
train
|
def validate_children
child_associations.each do |collection|
if collection.dirty?
collection.each do |child|
unless child.valid?
relationship_errors = (errors[collection.relationship.name] ||= [])
unless relationship_errors.include?(child.errors)
relationship_errors << child.errors
end
end
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8855
|
NoSequel.Container.method_missing
|
train
|
def method_missing(meth, *args, &block)
db.to_hash(:key, :value).send(meth, *args, &block)
end
|
ruby
|
{
"resource": ""
}
|
q8856
|
NoSequel.Container.validate_key
|
train
|
def validate_key(key)
unless key.is_a?(Symbol) || key.is_a?(String)
raise ArgumentError, 'Key must be a string or symbol'
end
key
end
|
ruby
|
{
"resource": ""
}
|
q8857
|
SimpleInteraction.ClassMethods.run
|
train
|
def run(**options)
@options = options
fail RequirementsNotMet.new("#{self} requires the following parameters #{requirements}") unless requirements_met?
new(@options).tap do |interaction|
interaction.__send__(:run)
end
end
|
ruby
|
{
"resource": ""
}
|
q8858
|
SimpleInteraction.ClassMethods.run!
|
train
|
def run!(**options)
interaction = run(options)
raise error_class.new(interaction.error) unless interaction.success?
interaction.result
end
|
ruby
|
{
"resource": ""
}
|
q8859
|
ChainOptions.OptionSet.add_option
|
train
|
def add_option(name, parameters)
self.class.handle_warnings(name, **parameters.dup)
chain_options.merge(name => parameters.merge(method_hash(parameters)))
end
|
ruby
|
{
"resource": ""
}
|
q8860
|
ChainOptions.OptionSet.option
|
train
|
def option(name)
config = chain_options[name] || raise_no_option_error(name)
Option.new(config).tap { |o| o.initial_value(values[name]) if values.key?(name) }
end
|
ruby
|
{
"resource": ""
}
|
q8861
|
Brat.Request.set_request_defaults
|
train
|
def set_request_defaults(endpoint, private_token, sudo=nil)
raise Error::MissingCredentials.new("Please set an endpoint to API") unless endpoint
@private_token = private_token
self.class.base_uri endpoint
self.class.default_params :sudo => sudo
self.class.default_params.delete(:sudo) if sudo.nil?
end
|
ruby
|
{
"resource": ""
}
|
q8862
|
Spell.Spell.best_match
|
train
|
def best_match(given_word)
words = (@word_list.is_a? Array) ? @word_list : @word_list.keys
word_bigrams = bigramate(given_word)
word_hash = words.map do |key|
[key, bigram_compare(word_bigrams, bigramate(key))]
end
word_hash = Hash[word_hash]
# Weight by word usage, if logical
word_hash = apply_usage_weights(word_hash) if @word_list.is_a? Hash
word_hash.max_by { |_key, value| value }.first
end
|
ruby
|
{
"resource": ""
}
|
q8863
|
Spell.Spell.num_matching
|
train
|
def num_matching(one_bigrams, two_bigrams, acc = 0)
return acc if one_bigrams.empty? || two_bigrams.empty?
one_two = one_bigrams.index(two_bigrams[0])
two_one = two_bigrams.index(one_bigrams[0])
if one_two.nil? && two_one.nil?
num_matching(one_bigrams.drop(1), two_bigrams.drop(1), acc)
else
# If one is nil, it is set to the other
two_one ||= one_two
one_two ||= two_one
if one_two < two_one
num_matching(one_bigrams.drop(one_two + 1),
two_bigrams.drop(1), acc + 1)
else
num_matching(one_bigrams.drop(1),
two_bigrams.drop(two_one + 1), acc + 1)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8864
|
Spell.Spell.bigram_compare
|
train
|
def bigram_compare(word1_bigrams, word2_bigrams)
most_bigrams = [word1_bigrams.count, word2_bigrams.count].max
num_matching(word1_bigrams, word2_bigrams).to_f / most_bigrams
end
|
ruby
|
{
"resource": ""
}
|
q8865
|
Spell.Spell.apply_usage_weights
|
train
|
def apply_usage_weights(word_hash)
max_usage = @word_list.values.max.to_f
max_usage = 1 if max_usage == 0
weighted_array = word_hash.map do |word, bigram_score|
usage_score = @word_list[word].to_f / max_usage
[word, (bigram_score * (1 - @alpha)) + (usage_score * @alpha)]
end
Hash[weighted_array]
end
|
ruby
|
{
"resource": ""
}
|
q8866
|
Aker::Rack.Failure.call
|
train
|
def call(env)
conf = configuration(env)
if login_required?(env)
if interactive?(env)
::Warden::Strategies[conf.ui_mode].new(env).on_ui_failure.finish
else
headers = {}
headers["WWW-Authenticate"] =
conf.api_modes.collect { |mode_key|
::Warden::Strategies[mode_key].new(env).challenge
}.join("\n")
headers["Content-Type"] = "text/plain"
[401, headers, ["Authentication required"]]
end
else
log_authorization_failure(env)
msg = "#{user(env).username} may not use this page."
Rack::Response.
new("<html><head><title>Authorization denied</title></head><body>#{msg}</body></html>",
403,
"Content-Type" => "text/html").finish
end
end
|
ruby
|
{
"resource": ""
}
|
q8867
|
Xmms.Client.shuffle_by
|
train
|
def shuffle_by(playlist, field)
pl = playlist.entries.wait.value
artists = Hash.new
rnd = Random.new
playlist.clear.wait
field = field.to_sym
pl.each do |id|
infos = self.medialib_get_info(id).wait.value
a = infos[field].first[1]
if artists.has_key?(a)
artists[a].insert(0,id)
else
artists[a] = [id]
end
end
artist_names = artists.keys
for _ in pl
artist_idx = (rnd.rand * artist_names.length).to_i
artist = artist_names[artist_idx]
songs = artists[artist]
song_idx = rnd.rand * songs.length
song = songs[song_idx]
playlist.add_entry(song).wait
songs.delete(song)
if songs.empty?
artists.delete(artist)
artist_names.delete(artist)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8868
|
Xmms.Client.extract_medialib_info
|
train
|
def extract_medialib_info(id, *fields)
infos = self.medialib_get_info(id).wait.value
res = Hash.new
if !infos.nil?
fields = fields.map! {|f| f.to_sym }
fields.each do |field|
values = infos[field]
if not values.nil?
my_value = values.first[1] # actual value from the top source [0]
if field == :url
my_value = Xmms::decode_xmms2_url(my_value)
end
res[field] = my_value.to_s.force_encoding("utf-8")
end
end
end
res
end
|
ruby
|
{
"resource": ""
}
|
q8869
|
Releaselog.Change.check_scope
|
train
|
def check_scope(scope = nil)
# If no scope is requested or the change has no scope include this change unchanged
return self unless scope
change_scope = /^\s*\[\w+\]/.match(@note)
return self unless change_scope
# change_scope is a string of format `[scope]`, need to strip the `[]` to compare the scope
if change_scope[0][1..-2] == scope
# Change has the scope that is requested, strip the whole scope scope from the change note
@note = change_scope.post_match.strip
return self
else
# Change has a different scope than requested
return nil
end
end
|
ruby
|
{
"resource": ""
}
|
q8870
|
VcenterLib.Vcenter.vms
|
train
|
def vms
logger.debug "get all VMs in all datacenters: begin"
result = dcs.inject([]) do |r, dc|
r + serviceContent.viewManager.CreateContainerView(
container: dc.vmFolder,
type: ['VirtualMachine'],
recursive: true
).view
end
logger.debug "get all VMs in all datacenters: end"
result
end
|
ruby
|
{
"resource": ""
}
|
q8871
|
Sevendigital.Artist.various?
|
train
|
def various?
joined_names = "#{name} #{appears_as}".downcase
various_variations = ["vario", "v???????????????rio", "v.a", "vaious", "varios" "vaious", "varoius", "variuos", \
"soundtrack", "karaoke", "original cast", "diverse artist"]
various_variations.each{|various_variation| return true if joined_names.include?(various_variation)}
return false
end
|
ruby
|
{
"resource": ""
}
|
q8872
|
Weechat.Helper.command_callback
|
train
|
def command_callback(id, buffer, args)
Weechat::Command.find_by_id(id).call(Weechat::Buffer.from_ptr(buffer), args)
end
|
ruby
|
{
"resource": ""
}
|
q8873
|
Weechat.Helper.command_run_callback
|
train
|
def command_run_callback(id, buffer, command)
Weechat::Hooks::CommandRunHook.find_by_id(id).call(Weechat::Buffer.from_ptr(buffer), command)
end
|
ruby
|
{
"resource": ""
}
|
q8874
|
Weechat.Helper.timer_callback
|
train
|
def timer_callback(id, remaining)
Weechat::Timer.find_by_id(id).call(remaining.to_i)
end
|
ruby
|
{
"resource": ""
}
|
q8875
|
Weechat.Helper.input_callback
|
train
|
def input_callback(method, buffer, input)
Weechat::Buffer.call_input_callback(method, buffer, input)
end
|
ruby
|
{
"resource": ""
}
|
q8876
|
Weechat.Helper.bar_build_callback
|
train
|
def bar_build_callback(id, item, window)
Weechat::Bar::Item.call_build_callback(id, window)
end
|
ruby
|
{
"resource": ""
}
|
q8877
|
Weechat.Helper.info_callback
|
train
|
def info_callback(id, info, arguments)
Weechat::Info.find_by_id(id).call(arguments).to_s
end
|
ruby
|
{
"resource": ""
}
|
q8878
|
Weechat.Helper.print_callback
|
train
|
def print_callback(id, buffer, date, tags, displayed, highlight, prefix, message)
buffer = Weechat::Buffer.from_ptr(buffer)
date = Time.at(date.to_i)
tags = tags.split(",")
displayed = Weechat.integer_to_bool(displayed)
highlight = Weechat.integer_to_bool(highlight)
line = PrintedLine.new(buffer, date, tags, displayed, highlight, prefix, message)
Weechat::Hooks::Print.find_by_id(id).call(line)
end
|
ruby
|
{
"resource": ""
}
|
q8879
|
Weechat.Helper.signal_callback
|
train
|
def signal_callback(id, signal, data)
data = Weechat::Utilities.apply_transformation(signal, data, SignalCallbackTransformations)
Weechat::Hooks::Signal.find_by_id(id).call(signal, data)
end
|
ruby
|
{
"resource": ""
}
|
q8880
|
Weechat.Helper.config_callback
|
train
|
def config_callback(id, option, value)
ret = Weechat::Hooks::Config.find_by_id(id).call(option, value)
end
|
ruby
|
{
"resource": ""
}
|
q8881
|
Weechat.Helper.process_callback
|
train
|
def process_callback(id, command, code, stdout, stderr)
code = case code
when Weechat::WEECHAT_HOOK_PROCESS_RUNNING
:running
when Weechat::WEECHAT_HOOK_PROCESS_ERROR
:error
else
code
end
process = Weechat::Process.find_by_id(id)
if process.collect?
process.buffer(stdout, stderr)
if code == :error || code != :running
process.call(code, process.stdout, process.stderr)
end
else
process.call(code, stdout, stderr)
end
end
|
ruby
|
{
"resource": ""
}
|
q8882
|
Weechat.Helper.modifier_callback
|
train
|
def modifier_callback(id, modifier, modifier_data, s)
classes = Weechat::Hook.hook_classes
modifier_data = Weechat::Utilities.apply_transformation(modifier, modifier_data, ModifierCallbackTransformations)
modifier_data = [modifier_data] unless modifier_data.is_a?(Array)
args = modifier_data + [Weechat::Line.parse(s)]
callback = classes.map {|cls| cls.find_by_id(id)}.compact.first
ret = callback.call(*args)
return Weechat::Utilities.apply_transformation(modifier, ret, ModifierCallbackRTransformations).to_s
end
|
ruby
|
{
"resource": ""
}
|
q8883
|
Basecampeverest.Connect.auth=
|
train
|
def auth=(authorization)
clensed_auth_hash = {}
authorization.each {|k, v|
clensed_auth_hash[k.to_sym] = v
}
# nice and pretty now
authorization = clensed_auth_hash
if authorization.has_key? :access_token
# clear the basic_auth, if it's set
self.class.default_options.reject!{ |k| k == :basic_auth }
# set the Authorization headers
self.class.headers.merge!("Authorization" => "Bearer #{authorization[:access_token]}")
elsif authorization.has_key?(:username) && authorization.has_key?(:password)
# ... then we pass it off to basic auth
self.class.basic_auth authorization[:username], authorization[:password]
# check if the user tried passing in some other stupid stuff.
# this should never be the case if the user follows instructions.
self.class.headers.reject!{ |k, v| k == "Authorization" }
else
# something inportant is missing if we get here.
raise "Incomplete Authorization hash. Please check the Authentication Hash."
#end else
end
# end method
end
|
ruby
|
{
"resource": ""
}
|
q8884
|
Ponder.UserList.kill_zombie_users
|
train
|
def kill_zombie_users(users)
@mutex.synchronize do
(@users - users - Set.new([@thaum_user])).each do |user|
@users.delete(user)
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8885
|
Annotator.Attributes.lines
|
train
|
def lines
ret = [Attributes::HEADER]
# Sort by name, but id goes first
@attrs.sort_by{|x| x[:name] == 'id' ? '_' : x[:name]}.each do |row|
line = "# * #{row[:name]} [#{row[:type]}]#{row[:desc].to_s.empty? ? "" : " - #{row[:desc]}"}"
# split into lines that don't exceed 80 chars
lt = wrap_text(line, MAX_CHARS_PER_LINE-3).split("\n")
line = ([lt[0]] + lt[1..-1].map{|x| "# #{x}"}).join("\n")
ret << line
end
ret
end
|
ruby
|
{
"resource": ""
}
|
q8886
|
Annotator.Attributes.update!
|
train
|
def update!
@model.columns.each do |column|
if row = @attrs.find {|x| x[:name] == column.name}
if row[:type] != type_str(column)
puts " M #{@model}##{column.name} [#{row[:type]} -> #{type_str(column)}]"
row[:type] = type_str(column)
elsif row[:desc] == InitialDescription::DEFAULT_DESCRIPTION
new_desc = InitialDescription.for(@model, column.name)
if row[:desc] != new_desc
puts " M #{@model}##{column.name} description updated"
row[:desc] = new_desc
end
end
else
puts " A #{@model}##{column.name} [#{type_str(column)}]"
@attrs << {
:name => column.name,
:type => type_str(column),
:desc => InitialDescription.for(@model, column.name)
}
end
end
# find columns that no more exist in db
orphans = @attrs.map{|x| x[:name]} - @model.columns.map(&:name)
unless orphans.empty?
orphans.each do |orphan|
puts " D #{@model}##{orphan}"
@attrs = @attrs.select {|x| x[:name] != orphan}
end
end
@attrs
end
|
ruby
|
{
"resource": ""
}
|
q8887
|
Annotator.Attributes.parse
|
train
|
def parse
@lines.each do |line|
if m = line.match(R_ATTRIBUTE)
@attrs << {:name => m[1].strip, :type => m[2].strip, :desc => m[4].strip}
elsif m = line.match(R_ATTRIBUTE_NEXT_LINE)
@attrs[-1][:desc] += " #{m[1].strip}"
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8888
|
Annotator.Attributes.truncate_default
|
train
|
def truncate_default(str)
return str unless str.kind_of? String
str.sub!(/^'(.*)'$/m,'\1')
str = "#{str[0..10]}..." if str.size > 10
str.inspect
end
|
ruby
|
{
"resource": ""
}
|
q8889
|
Annotator.Attributes.type_str
|
train
|
def type_str(c)
ret = c.type.to_s
ret << ", primary" if c.primary
ret << ", default=#{truncate_default(c.default)}" if c.default
ret << ", not null" unless c.null
ret << ", limit=#{c.limit}" if c.limit && (c.limit != 255 && c.type != :string)
ret
end
|
ruby
|
{
"resource": ""
}
|
q8890
|
Bitsa.BitsaApp.run
|
train
|
def run(global_opts, cmd, search_data)
settings = load_settings(global_opts)
process_cmd(cmd, search_data, settings.login, settings.password,
ContactsCache.new(settings.cache_file_path,
settings.auto_check))
end
|
ruby
|
{
"resource": ""
}
|
q8891
|
Bitsa.BitsaApp.load_settings
|
train
|
def load_settings(global_opts)
settings = Settings.new
settings.load(ConfigFile.new(global_opts[:config_file]), global_opts)
settings
end
|
ruby
|
{
"resource": ""
}
|
q8892
|
Bitsa.BitsaApp.search
|
train
|
def search(cache, search_data)
puts '' # Force first entry to be displayed in mutt
# Write out as EMAIL <TAB> NAME
cache.search(search_data).each { |k, v| puts "#{k}\t#{v}" }
end
|
ruby
|
{
"resource": ""
}
|
q8893
|
Aker::Form.LoginFormAssetProvider.asset_root
|
train
|
def asset_root
File.expand_path(File.join(File.dirname(__FILE__),
%w(.. .. ..),
%w(assets aker form)))
end
|
ruby
|
{
"resource": ""
}
|
q8894
|
Aker::Form.LoginFormAssetProvider.login_html
|
train
|
def login_html(env, options = {})
login_base = env['SCRIPT_NAME'] + login_path(env)
template = File.read(File.join(asset_root, 'login.html.erb'))
ERB.new(template).result(binding)
end
|
ruby
|
{
"resource": ""
}
|
q8895
|
PuppetBox.Result.passed?
|
train
|
def passed?
passed = nil
@report.each { |r|
if passed == nil
passed = (r[:status] == PS_OK)
else
passed &= (r[:status] == PS_OK)
end
}
passed
end
|
ruby
|
{
"resource": ""
}
|
q8896
|
RailsExceptionLogger.LoggedExceptionsController.get_auth_data
|
train
|
def get_auth_data
auth_key = @@http_auth_headers.detect { |h| request.env.has_key?(h) }
auth_data = request.env[auth_key].to_s.split unless auth_key.blank?
return auth_data && auth_data[0] == 'Basic' ? Base64.decode64(auth_data[1]).split(':')[0..1] : [nil, nil]
end
|
ruby
|
{
"resource": ""
}
|
q8897
|
Mongolicious.Backup.parse_jobfile
|
train
|
def parse_jobfile(jobfile)
YAML.load(File.read(jobfile))
rescue Errno::ENOENT
Mongolicious.logger.error("Could not find job file at #{ARGV[0]}")
exit
rescue ArgumentError => e
Mongolicious.logger.error("Could not parse job file #{ARGV[0]} - #{e}")
exit
end
|
ruby
|
{
"resource": ""
}
|
q8898
|
Mongolicious.Backup.schedule_jobs
|
train
|
def schedule_jobs(jobs)
scheduler = Rufus::Scheduler.start_new
jobs.each do |job|
if job['cron']
Mongolicious.logger.info("Scheduled new job for #{job['db'].split('/').last} with cron: #{job['cron']}")
scheduler.cron job['cron'] do
backup(job)
end
else
scheduler.every job['interval'] do
Mongolicious.logger.info("Scheduled new job for #{job['db'].split('/').last} with interval: #{job['interval']}")
backup(job)
end
end
end
scheduler.join
end
|
ruby
|
{
"resource": ""
}
|
q8899
|
Kanpachi.ResourceList.add
|
train
|
def add(resource)
if @list.key? resource.route
raise DuplicateResource, "A resource accessible via #{resource.http_verb} #{resource.url} already exists"
end
@list[resource.route] = resource
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.