_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 66
10.5k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q8500
|
FileBlobs.ActionControllerDataStreamingExtensions.send_file_blob
|
train
|
def send_file_blob(proxy, options = {})
if request.get_header(HTTP_IF_NONE_MATCH) == proxy.blob_id
head :not_modified
else
response.headers[ETAG] = proxy.blob_id
send_options = { type: proxy.mime_type, filename: proxy.original_name }
send_options.merge! options
send_data proxy.data, send_options
end
end
|
ruby
|
{
"resource": ""
}
|
q8501
|
Diversion.Configurable.validate_configuration!
|
train
|
def validate_configuration!
unless @host.is_a?(String) && @host.length > 0
raise(Error::ConfigurationError, "Invalid host specified: Host must contain the host to redirect to.")
end
if @host.end_with?('/')
raise(Error::ConfigurationError, "Invalid host specified: #{@host} should not end with a trailing slash.")
end
unless @path.is_a?(String) && @path.length > 0
raise(Error::ConfigurationError, "Invalid path specified: Path must contain a path to redirect to.")
end
unless @path.end_with?('/')
raise(Error::ConfigurationError, "Invalid path specified: #{@path} should end with a trailing slash.")
end
unless @port.is_a?(Integer) && @port > 0
raise(Error::ConfigurationError, "Invalid port specified: #{@port} must be an integer and non-zero.")
end
if !@sign_key.nil? && !@sign_key.is_a?(String)
raise(Error::ConfigurationError, "Invalid sign_key specified: #{@sign_key} must be a String.")
end
unless @sign_length.is_a?(Integer) && @sign_length.between?(0, Signing::MAX_SIGN_LENGTH)
raise(Error::ConfigurationError, "Invalid sign_length specified: #{@sign_length} must be an integer between 0-#{Signing::MAX_SIGN_LENGTH}.")
end
unless @encode_uris.is_a?(Array) && @encode_uris.count > 0
raise(Error::ConfigurationError, "Invalid encode_uris specified: #{@encode_uris} must be an array with at least one URI scheme.")
end
unless @url_encoding.is_a?(Module) && Encode::ENCODERS.include?(@url_encoding)
raise(Error::ConfigurationError, "Invalid url_encoding specified: #{@url_encoding} must be a valid encoder module.")
end
unless @url_decoding.is_a?(Module) && Decode::DECODERS.include?(@url_decoding)
raise(Error::ConfigurationError, "Invalid url_decoding specified: #{@url_decoding} must be a valid decoder module.")
end
end
|
ruby
|
{
"resource": ""
}
|
q8502
|
Brocade.InstanceMethods.barcode
|
train
|
def barcode(opts = {})
data = format_for_subset_c_if_applicable send(barcodable)
if (subset = opts[:subset])
case subset
when 'A'; Barby::Code128A.new data
when 'B'; Barby::Code128B.new data
when 'C'; Barby::Code128C.new data
end
else
most_efficient_barcode_for data
end
end
|
ruby
|
{
"resource": ""
}
|
q8503
|
Brocade.InstanceMethods.create_barcode
|
train
|
def create_barcode(opts = {})
path = barcode_path
FileUtils.mkdir_p File.dirname(path)
File.open(path, 'wb') do |f|
f.write barcode(opts).to_png(self.class.options.merge(opts))
end
FileUtils.chmod(0666 &~ File.umask, path)
end
|
ruby
|
{
"resource": ""
}
|
q8504
|
Checkdin.Leaderboard.classification_leaderboard
|
train
|
def classification_leaderboard(campaign_id)
response = connection.get do |req|
req.url "campaigns/#{campaign_id}/classification_leaderboard"
end
return_error_or_body(response)
end
|
ruby
|
{
"resource": ""
}
|
q8505
|
DummyText.Base.character
|
train
|
def character(count, template)
out_of_order("c", count, template)
raw Character.new.render(count, template)
end
|
ruby
|
{
"resource": ""
}
|
q8506
|
DummyText.Base.word
|
train
|
def word(count, template)
out_of_order("w", count, template)
raw Word.new.render(count, template)
end
|
ruby
|
{
"resource": ""
}
|
q8507
|
DummyText.Base.paragraph
|
train
|
def paragraph(count, template)
out_of_order("p", count, template)
i = 0
result = ""
data = Paragraph.new.render(template)
while i < count
result += "<p>#{data[i]}</p>"
i += 1
end
raw result
end
|
ruby
|
{
"resource": ""
}
|
q8508
|
CaRuby.Database.open
|
train
|
def open(user=nil, password=nil)
raise ArgumentError.new("Database open requires an execution block") unless block_given?
raise DatabaseError.new("The caRuby application database is already in use.") if open?
# reset the execution timers
persistence_services.each { |svc| svc.timer.reset }
# Start the session.
start_session(user, password)
# Call the block and close when done.
yield(self) ensure close
end
|
ruby
|
{
"resource": ""
}
|
q8509
|
CaRuby.Database.close
|
train
|
def close
return if @session.nil?
begin
@session.terminate_session
rescue Exception => e
logger.error("Session termination unsuccessful - #{e.message}")
end
# clear the cache
clear
logger.info("Disconnected from application server.")
@session = nil
end
|
ruby
|
{
"resource": ""
}
|
q8510
|
CaRuby.Database.perform
|
train
|
def perform(op, obj, opts=nil, &block)
op_s = op.to_s.capitalize_first
pa = Options.get(:attribute, opts)
attr_s = " #{pa}" if pa
ag_s = " autogenerated" if Options.get(:autogenerated, opts)
ctxt_s = " in context #{print_operations}" unless @operations.empty?
logger.info(">> #{op_s}#{ag_s} #{obj.pp_s(:single_line)}#{attr_s}#{ctxt_s}...")
# Clear the error flag.
@error = nil
# Push the operation on the nested operation stack.
@operations.push(Operation.new(op, obj, opts))
begin
# perform the operation
result = perform_operation(&block)
rescue Exception => e
# If the current operation is the immediate cause, then print the
# error to the log.
if @error.nil? then
msg = "Error performing #{op} on #{obj}:\n#{e.message}\n#{obj.dump}\n#{e.backtrace.qp}"
logger.error(msg)
@error = e
end
raise e
ensure
# the operation is done
@operations.pop
# If this is a top-level operation, then clear the transient set.
if @operations.empty? then @transients.clear end
end
logger.info("<< Completed #{obj.qp}#{attr_s} #{op}.")
result
end
|
ruby
|
{
"resource": ""
}
|
q8511
|
CaRuby.Database.start_session
|
train
|
def start_session(user=nil, password=nil)
user ||= @user
password ||= @password
if user.nil? then raise DatabaseError.new('The caRuby application is missing the login user') end
if password.nil? then raise DatabaseError.new('The caRuby application is missing the login password') end
@session = ClientSession.instance
connect(user, password)
end
|
ruby
|
{
"resource": ""
}
|
q8512
|
CaRuby.Database.print_operations
|
train
|
def print_operations
ops = @operations.reverse.map do |op|
attr_s = " #{op.attribute}" if op.attribute
"#{op.type.to_s.capitalize_first} #{op.subject.qp}#{attr_s}"
end
ops.qp
end
|
ruby
|
{
"resource": ""
}
|
q8513
|
CaRuby.Database.connect
|
train
|
def connect(user, password)
logger.debug { "Connecting to application server with login id #{user}..." }
begin
@session.start_session(user, password)
rescue Exception => e
logger.error("Login of #{user} with password #{password} was unsuccessful - #{e.message}")
raise e
end
logger.info("Connected to application server.")
end
|
ruby
|
{
"resource": ""
}
|
q8514
|
MarkovWords.FileStore.retrieve_data
|
train
|
def retrieve_data(key = '')
key = key.to_s unless key.is_a? String
data_array = @db.execute 'SELECT value FROM data WHERE key = ?', key
Marshal.load(data_array[0][0]) unless data_array[0].nil?
end
|
ruby
|
{
"resource": ""
}
|
q8515
|
GitOrgFileScanner.Scanner.setup_client
|
train
|
def setup_client(token)
client = Octokit::Client.new(access_token: token)
client.auto_paginate = true
client
end
|
ruby
|
{
"resource": ""
}
|
q8516
|
Derelict.Parser::PluginList.plugins
|
train
|
def plugins
raise NeedsReinstall, output if needs_reinstall?
plugin_lines.map {|l| parse_line l.match(PARSE_PLUGIN) }.to_set
end
|
ruby
|
{
"resource": ""
}
|
q8517
|
Derelict.Parser::PluginList.parse_line
|
train
|
def parse_line(match)
raise InvalidFormat.new "Couldn't parse plugin" if match.nil?
Derelict::Plugin.new *match.captures[0..1]
end
|
ruby
|
{
"resource": ""
}
|
q8518
|
XiamiSauce.Track.sospa
|
train
|
def sospa(location)
string = location[1..-1]
col = location[0].to_i
row = (string.length.to_f / col).floor
remainder = string.length % col
address = [[nil]*col]*(row+1)
sizes = [row+1] * remainder + [row] * (col - remainder)
pos = 0
sizes.each_with_index { |size, i|
size.times { |index| address[col * index + i] = string[pos + index] }
pos += size
}
address = CGI::unescape(address.join).gsub('^', '0')
rescue
raise location
end
|
ruby
|
{
"resource": ""
}
|
q8519
|
SimpleSolrClient.Client._post_json
|
train
|
def _post_json(path, object_to_post)
resp = @rawclient.post(url(path), JSON.dump(object_to_post), {'Content-type' => 'application/json'})
JSON.parse(resp.content)
end
|
ruby
|
{
"resource": ""
}
|
q8520
|
SimpleSolrClient.Client.get
|
train
|
def get(path, args = {}, response_type = nil)
response_type = SimpleSolrClient::Response::GenericResponse if response_type.nil?
response_type.new(_get(path, args))
end
|
ruby
|
{
"resource": ""
}
|
q8521
|
SimpleSolrClient.Client.post_json
|
train
|
def post_json(path, object_to_post, response_type = nil)
response_type = SimpleSolrClient::Response::GenericResponse if response_type.nil?
response_type.new(_post_json(path, object_to_post))
end
|
ruby
|
{
"resource": ""
}
|
q8522
|
SimpleSolrClient.Client.core
|
train
|
def core(corename)
raise "Core #{corename} not found" unless cores.include? corename.to_s
SimpleSolrClient::Core.new(@base_url, corename.to_s)
end
|
ruby
|
{
"resource": ""
}
|
q8523
|
SimpleSolrClient.Client.new_core
|
train
|
def new_core(corename)
dir = temp_core_dir_setup(corename)
args = {
:wt => 'json',
:action => 'CREATE',
:name => corename,
:instanceDir => dir
}
get('admin/cores', args)
core(corename)
end
|
ruby
|
{
"resource": ""
}
|
q8524
|
SimpleSolrClient.Client.temp_core_dir_setup
|
train
|
def temp_core_dir_setup(corename)
dest = Dir.mktmpdir("simple_solr_#{corename}_#{SecureRandom.uuid}")
src = SAMPLE_CORE_DIR
FileUtils.cp_r File.join(src, '.'), dest
dest
end
|
ruby
|
{
"resource": ""
}
|
q8525
|
Jimson.ClientHelper.send_single_request
|
train
|
def send_single_request(method, args)
post_data = {
'jsonrpc' => JSON_RPC_VERSION,
'method' => method,
'params' => args,
'id' => self.class.make_id
}.to_json
resp = RestClient.post(@url, post_data, content_type: 'application/json', user_agent: "syncano-ruby-#{Syncano::VERSION}")
if resp.nil? || resp.body.nil? || resp.body.empty?
raise Jimson::ClientError::InvalidResponse.new
end
return resp.body
rescue Exception, StandardError
raise Jimson::ClientError::InternalError.new($!)
end
|
ruby
|
{
"resource": ""
}
|
q8526
|
Jimson.ClientHelper.send_batch_request
|
train
|
def send_batch_request(batch)
post_data = batch.to_json
resp = RestClient.post(@url, post_data, content_type: 'application/json', user_agent: "syncano-ruby-#{Syncano::VERSION}")
if resp.nil? || resp.body.nil? || resp.body.empty?
raise Jimson::ClientError::InvalidResponse.new
end
return resp.body
end
|
ruby
|
{
"resource": ""
}
|
q8527
|
Jimson.ClientHelper.send_batch
|
train
|
def send_batch
batch = @batch.map(&:first) # get the requests
response = send_batch_request(batch)
begin
responses = JSON.parse(response)
rescue
raise Jimson::ClientError::InvalidJSON.new(json)
end
process_batch_response(responses)
responses = @batch
@batch = []
responses
end
|
ruby
|
{
"resource": ""
}
|
q8528
|
Smsified.Helpers.camelcase_keys
|
train
|
def camelcase_keys(options)
options = options.clone
if options[:destination_address]
options[:destinationAddress] = options[:destination_address]
options.delete(:destination_address)
end
if options[:notify_url]
options[:notifyURL] = options[:notify_url]
options.delete(:notify_url)
end
if options[:client_correlator]
options[:clientCorrelator] = options[:client_correlator]
options.delete(:client_correlator)
end
if options[:callback_data]
options[:callbackData] = options[:callback_data]
options.delete(:callback_data)
end
options
end
|
ruby
|
{
"resource": ""
}
|
q8529
|
Smsified.Helpers.build_query_string
|
train
|
def build_query_string(options)
options = camelcase_keys(options)
query = ''
options.each do |k,v|
if k == :address
if RUBY_VERSION.to_f >= 1.9
if v.instance_of?(String)
v.each_line { |address| query += "#{ '&' if query != '' }address=#{CGI.escape address}" }
else
v.each { |address| query += "#{ '&' if query != '' }address=#{CGI.escape address}" }
end
else
v.each { |address| query += "#{ '&' if query != '' }address=#{CGI.escape address}" }
end
else
query += "#{ '&' if query != '' }#{k.to_s}=#{CGI.escape v}"
end
end
query
end
|
ruby
|
{
"resource": ""
}
|
q8530
|
Redlander.Serializing.to
|
train
|
def to(options = {})
format = options[:format].to_s
mime_type = options[:mime_type] && options[:mime_type].to_s
type_uri = options[:type_uri] && options[:type_uri].to_s
base_uri = options[:base_uri] && options[:base_uri].to_s
rdf_serializer = Redland.librdf_new_serializer(Redlander.rdf_world, format, mime_type, type_uri)
raise RedlandError, "Failed to create a new serializer" if rdf_serializer.null?
begin
if options[:file]
Redland.librdf_serializer_serialize_model_to_file(rdf_serializer, options[:file], base_uri, @rdf_model).zero?
else
Redland.librdf_serializer_serialize_model_to_string(rdf_serializer, base_uri, @rdf_model)
end
ensure
Redland.librdf_free_serializer(rdf_serializer)
end
end
|
ruby
|
{
"resource": ""
}
|
q8531
|
Djinn.Base.start
|
train
|
def start config={}, &block
@config.update(config).update(load_config)
#@config = (config.empty?) ? load_config : config
log "Starting #{name} in the background.."
logfile = get_logfile(config)
daemonize(logfile, get_pidfile(config)) do
yield(self) if block_given?
trap('TERM') { handle_exit }
trap('INT') { handle_exit }
(respond_to?(:__start!)) ? __start! : perform(@config)
# If this process doesn't loop or otherwise breaks out of
# the loop we still want to clean up after ourselves
handle_exit
end
end
|
ruby
|
{
"resource": ""
}
|
q8532
|
Djinn.Base.run
|
train
|
def run config={}, &block
@config.update(config).update(load_config)
# @config = (config.empty?) ? load_config : config
log "Starting #{name} in the foreground.."
trap('TERM') { handle_exit }
trap('INT') { handle_exit }
yield(self) if block_given?
(respond_to?(:__start!)) ? __start! : perform(@config)
# If this process doesn't loop or otherwise breaks out of
# the loop we still want to clean up after ourselves
handle_exit
end
|
ruby
|
{
"resource": ""
}
|
q8533
|
BitMagic.BitField.read_bits
|
train
|
def read_bits(*args)
{}.tap do |m|
args.each { |bit| m[bit] = @value[bit] }
end
end
|
ruby
|
{
"resource": ""
}
|
q8534
|
BitMagic.BitField.read_field
|
train
|
def read_field(*args)
m = 0
args.flatten.each_with_index do |bit, i|
if bit.is_a?(Integer)
m |= ((@value[bit] || 0) << i)
end
end
m
end
|
ruby
|
{
"resource": ""
}
|
q8535
|
Zcloudjp.Utils.parse_params
|
train
|
def parse_params(params, key_word)
body = params.has_key?(:path) ? load_file(params[:path], key_word) : params
body = { key_word => body } unless body.has_key?(key_word.to_sym)
body
end
|
ruby
|
{
"resource": ""
}
|
q8536
|
Zcloudjp.Utils.load_file
|
train
|
def load_file(path, key_word)
begin
data = MultiJson.load(IO.read(File.expand_path(path)), symbolize_keys: true)
rescue RuntimeError, Errno::ENOENT => e
raise e.message
rescue MultiJson::LoadError => e
raise e.message
end
if data.has_key?(key_word)
data[key_word].map { |k,v| data[key_word][k] = v } if data[key_word].is_a? Hash
end
data
end
|
ruby
|
{
"resource": ""
}
|
q8537
|
Pinion.Server.bundle_url
|
train
|
def bundle_url(name)
bundle = Bundle[name]
raise "No such bundle: #{name}" unless bundle
return bundle.paths.map { |p| asset_url(p) } unless Pinion.environment == "production"
["#{@mount_point}/#{bundle.name}-#{bundle.checksum}.#{bundle.extension}"]
end
|
ruby
|
{
"resource": ""
}
|
q8538
|
EasyTag.Taggable.set_tags
|
train
|
def set_tags(tag_list, options = {})
options.reverse_merge! :context => nil,
:tagger => nil,
:downcase => true,
:delimiter => ','
if block_given?
tags = yield(klass)
else
tags = EasyTag::Tag.compact_tag_list(tag_list, options.slice(:downcase, :delimiter))
end
context = compact_context(options[:context])
tagger = compact_tagger(options[:tagger])
# Remove old tags
self.taggings.where(:tag_context_id => context.try(:id), :tagger_id => tagger.try(:id)).destroy_all
# TODO: should remove unused tags and contexts
if tags
tags.each do |t|
tag = EasyTag::Tag.where(:name => t).first_or_create
raise SimgleTag::InvalidTag if tag.nil?
self.taggings.where(:tagger_id => tagger.try(:id), :tag_context_id => context.try(:id), :tag_id => tag.id).first_or_create
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8539
|
Rlocu2.Venue.external=
|
train
|
def external=(externals_list)
@external = []
externals_list.each { |external_id| @external << Rlocu2::ExternalID.new(id: external_id['id'], url: external_id['url'], mobile_url: external_id['mobile_url'])}
end
|
ruby
|
{
"resource": ""
}
|
q8540
|
FileCharLicker.Licker.forward_lines
|
train
|
def forward_lines(size = 10)
file = @file
result = ""
while result.scan(/\r\n|\r|\n/).size < size && !file.eof?
result += file.gets
end
result
end
|
ruby
|
{
"resource": ""
}
|
q8541
|
Parole.Comment.ensure_valid_role_for_commentable
|
train
|
def ensure_valid_role_for_commentable
allowed_roles = commentable.class.commentable_options[:roles]
if allowed_roles.any?
errors.add(:role, :invalid) unless allowed_roles.include?(self.role)
else
errors.add(:role, :invalid) unless self.role.blank?
end
end
|
ruby
|
{
"resource": ""
}
|
q8542
|
Activr.Dispatcher.route
|
train
|
def route(activity)
raise "Activity must be stored before routing: #{activity.inspect}" if !activity.stored?
result = 0
activity.run_callbacks(:route) do
# iterate on all timelines
Activr.registry.timelines.values.each do |timeline_class|
# check if timeline refuses that activity
next unless timeline_class.should_route_activity?(activity)
# store activity in timelines
self.recipients_for_timeline(timeline_class, activity).each do |recipient, route|
result += 1
timeline = timeline_class.new(recipient)
if timeline.should_handle_activity?(activity, route)
Activr::Async.hook(:timeline_handle, timeline, activity, route)
end
end
end
end
result
end
|
ruby
|
{
"resource": ""
}
|
q8543
|
Activr.Dispatcher.recipients_for_timeline
|
train
|
def recipients_for_timeline(timeline_class, activity)
result = { }
routes = timeline_class.routes_for_activity(activity.class)
routes.each do |route|
route.resolve(activity).each do |recipient|
recipient_id = timeline_class.recipient_id(recipient)
# keep only one route per recipient
if result[recipient_id].nil?
result[recipient_id] = { :rcpt => recipient, :route => route }
end
end
end
result.inject({ }) do |memo, (recipient_id, infos)|
memo[infos[:rcpt]] = infos[:route]
memo
end
end
|
ruby
|
{
"resource": ""
}
|
q8544
|
SimpleFormDojo.DojoPropsMethods.get_and_merge_dojo_props!
|
train
|
def get_and_merge_dojo_props!
add_dojo_options_to_dojo_props
if object.id.present?
add_dojo_compliant_id
else
input_html_options["id"] = nil #let dojo generate internal id
end
input_html_options[:'data-dojo-props'] = SimpleFormDojo::FormBuilder.encode_as_dojo_props(@dojo_props) if !@dojo_props.blank?
end
|
ruby
|
{
"resource": ""
}
|
q8545
|
NSISam.Client.store
|
train
|
def store(data)
request_data = {:value => data}
request_data[:expire] = @expire if @expire
request = prepare_request :POST, request_data.to_json
Response.new(execute_request(request))
end
|
ruby
|
{
"resource": ""
}
|
q8546
|
NSISam.Client.store_file
|
train
|
def store_file(file_content, filename, type=:file)
store(type => Base64.encode64(file_content), :filename => filename)
end
|
ruby
|
{
"resource": ""
}
|
q8547
|
NSISam.Client.delete
|
train
|
def delete(key)
request_data = {:key => key}.to_json
request = prepare_request :DELETE, request_data
Response.new(execute_request(request))
end
|
ruby
|
{
"resource": ""
}
|
q8548
|
NSISam.Client.get
|
train
|
def get(key, expected_checksum=nil)
request_data = {:key => key}.to_json
request = prepare_request :GET, request_data
response = execute_request(request)
verify_checksum(response["data"], expected_checksum) unless expected_checksum.nil?
Response.new(response)
end
|
ruby
|
{
"resource": ""
}
|
q8549
|
NSISam.Client.get_file
|
train
|
def get_file(key, type=:file, expected_checksum = nil)
response = get(key, expected_checksum)
response = Response.new(
'key' => response.key,
'checksum' => response.checksum,
'filename' => response.data['filename'],
'file' => Base64.decode64(response.data[type.to_s]),
'deleted' => response.deleted?)
end
|
ruby
|
{
"resource": ""
}
|
q8550
|
NSISam.Client.update
|
train
|
def update(key, value)
request_data = {:key => key, :value => value}
request_data[:expire] = @expire if @expire
request = prepare_request :PUT, request_data.to_json
Response.new(execute_request(request))
end
|
ruby
|
{
"resource": ""
}
|
q8551
|
NSISam.Client.update_file
|
train
|
def update_file(key, type=:file, new_content, filename)
encoded = Base64.encode64(new_content)
update(key, type => encoded, filename: filename)
end
|
ruby
|
{
"resource": ""
}
|
q8552
|
RubyEdit.CLI.configure
|
train
|
def configure(*)
if options[:help]
invoke :help, ['configure']
else
require_relative 'commands/configure'
RubyEdit::Commands::Configure.new(options).execute
end
end
|
ruby
|
{
"resource": ""
}
|
q8553
|
Dassets.Server.call!
|
train
|
def call!(env)
if (request = Request.new(env)).for_asset_file?
Response.new(env, request.asset_file).to_rack
else
@app.call(env)
end
end
|
ruby
|
{
"resource": ""
}
|
q8554
|
Brandish.PathSet.find_all
|
train
|
def find_all(short, options = {})
return to_enum(:find_all, short, options) unless block_given?
short = ::Pathname.new(short)
options = DEFAULT_FIND_OPTIONS.merge(options)
@paths.reverse.each do |path|
joined = path_join(path, short, options)
yield joined if (options[:file] && joined.file?) || joined.exist?
end
nil
end
|
ruby
|
{
"resource": ""
}
|
q8555
|
Tay.Builder.get_compiled_file_content
|
train
|
def get_compiled_file_content(path)
begin
Tilt.new(path.to_s).render({}, {
:spec => spec
})
rescue RuntimeError
File.read(path)
end
end
|
ruby
|
{
"resource": ""
}
|
q8556
|
Tay.Builder.simple_compile_directory
|
train
|
def simple_compile_directory(directory)
if directory.is_a?(String)
# If we just have a single dirname, assume it's under src
from_directory = (directory[/\//] ? '' : 'src/') + directory
directory = {
:from => from_directory,
:as => directory
}
end
directory[:use_tilt] |= true
Dir[@base_dir.join(directory[:from], '**/*')].each do |path|
file_in_path = Pathname.new(path)
next unless file_in_path.file?
file_out_path = remap_path_to_build_directory(path, directory)
if directory[:use_tilt]
content = get_compiled_file_content(file_in_path)
file_out_path = asset_output_filename(file_out_path, Tilt.mappings.keys)
else
content = File.read(file_in_path)
end
FileUtils.mkdir_p(file_out_path.dirname)
File.open(file_out_path, 'w') do |f|
f.write content
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8557
|
Tay.Builder.compile_files
|
train
|
def compile_files(files)
files.each do |base_path|
# We do this second glob in case the path provided in the tayfile
# references a compiled version
Dir[@base_dir.join('src', base_path + '*')].each do |path|
path = Pathname.new(path).relative_path_from(@base_dir.join('src'))
file_in_path = @base_dir.join('src', path)
file_out_path = asset_output_filename(@output_dir.join(path), @sprockets.engines.keys)
if @sprockets.extensions.include?(path.extname)
content = @sprockets[file_in_path].to_s
else
content = File.read(file_in_path)
end
FileUtils.mkdir_p(file_out_path.dirname)
File.open(file_out_path, 'w') do |f|
f.write content
end
end
end
end
|
ruby
|
{
"resource": ""
}
|
q8558
|
Tay.Builder.write_manifest
|
train
|
def write_manifest
generator = ManifestGenerator.new(spec)
File.open(@output_dir.join('manifest.json'), 'w') do |f|
f.write JSON.pretty_generate(generator.spec_as_json)
end
end
|
ruby
|
{
"resource": ""
}
|
q8559
|
Tay.Builder.create_sprockets_environment
|
train
|
def create_sprockets_environment
@sprockets = Sprockets::Environment.new
@sprockets.append_path(@base_dir.join('src/javascripts').to_s)
@sprockets.append_path(@base_dir.join('src/templates').to_s)
@sprockets.append_path(@base_dir.join('src/stylesheets').to_s)
@sprockets.append_path(@base_dir.join('src').to_s)
@sprockets.append_path(@base_dir.to_s)
end
|
ruby
|
{
"resource": ""
}
|
q8560
|
Jabber.Simple.deliver
|
train
|
def deliver(jid, message, type=:chat)
contacts(jid) do |friend|
unless subscribed_to? friend
add(friend.jid)
return deliver_deferred(friend.jid, message, type)
end
if message.kind_of?(Jabber::Message)
msg = message
msg.to = friend.jid
else
msg = Message.new(friend.jid)
msg.type = type
msg.body = message
end
send!(msg)
end
end
|
ruby
|
{
"resource": ""
}
|
q8561
|
Jabber.Simple.status
|
train
|
def status(presence, message)
@presence = presence
@status_message = message
stat_msg = Presence.new(@presence, @status_message)
send!(stat_msg)
end
|
ruby
|
{
"resource": ""
}
|
q8562
|
Jabber.Simple.send!
|
train
|
def send!(msg)
attempts = 0
begin
attempts += 1
client.send(msg)
rescue Errno::EPIPE, IOError => e
sleep 1
disconnect
reconnect
retry unless attempts > 3
raise e
rescue Errno::ECONNRESET => e
sleep (attempts^2) * 60 + 60
disconnect
reconnect
retry unless attempts > 3
raise e
end
end
|
ruby
|
{
"resource": ""
}
|
q8563
|
Jabber.Simple.deliver_deferred
|
train
|
def deliver_deferred(jid, message, type)
msg = {:to => jid, :message => message, :type => type}
queue(:pending_messages) << [msg]
end
|
ruby
|
{
"resource": ""
}
|
q8564
|
Jabber.Simple.start_deferred_delivery_thread
|
train
|
def start_deferred_delivery_thread #:nodoc:
Thread.new {
loop {
messages = [queue(:pending_messages).pop].flatten
messages.each do |message|
if subscribed_to?(message[:to])
deliver(message[:to], message[:message], message[:type])
else
queue(:pending_messages) << message
end
end
}
}
end
|
ruby
|
{
"resource": ""
}
|
q8565
|
FentonShell.Key.create
|
train
|
def create(options)
status, body = key_create(options)
if status
save_message('Key': ['created!'])
true
else
save_message(body)
false
end
end
|
ruby
|
{
"resource": ""
}
|
q8566
|
FentonShell.Key.key_create
|
train
|
def key_create(options)
ssh_key = key_generation(options)
ssh_private_key_file = options[:private_key]
ssh_public_key_file = "#{ssh_private_key_file}.pub"
# TODO: - add to .fenton/config file
File.write(ssh_private_key_file, ssh_key.private_key)
File.chmod(0o600, ssh_private_key_file)
File.write(ssh_public_key_file, ssh_key.ssh_public_key)
File.chmod(0o600, ssh_public_key_file)
[true, 'Key': ['creation failed']]
end
|
ruby
|
{
"resource": ""
}
|
q8567
|
FentonShell.Key.key_generation
|
train
|
def key_generation(options)
SSHKey.generate(
type: options[:type],
bits: options[:bits],
comment: 'ssh@fenton_shell',
passphrase: options[:passphrase]
)
end
|
ruby
|
{
"resource": ""
}
|
q8568
|
Raca.Containers.metadata
|
train
|
def metadata
log "retrieving containers metadata from #{storage_path}"
response = storage_client.head(storage_path)
{
:containers => response["X-Account-Container-Count"].to_i,
:objects => response["X-Account-Object-Count"].to_i,
:bytes => response["X-Account-Bytes-Used"].to_i
}
end
|
ruby
|
{
"resource": ""
}
|
q8569
|
XS.Context.setctxopt
|
train
|
def setctxopt name, value
length = 4
pointer = LibC.malloc length
pointer.write_int value
rc = LibXS.xs_setctxopt @context, name, pointer, length
LibC.free(pointer) unless pointer.nil? || pointer.null?
rc
end
|
ruby
|
{
"resource": ""
}
|
q8570
|
XS.Context.socket
|
train
|
def socket type
sock = nil
begin
sock = Socket.new @context, type
rescue ContextError => e
sock = nil
end
sock
end
|
ruby
|
{
"resource": ""
}
|
q8571
|
Attention.Instance.publish
|
train
|
def publish
publisher.publish('instance', added: info) do |redis|
redis.setex "instance_#{ @id }", Attention.options[:ttl], JSON.dump(info)
end
heartbeat
end
|
ruby
|
{
"resource": ""
}
|
q8572
|
Coulda.WebSteps.given_a
|
train
|
def given_a(factory_name, args = {})
Given "a #{factory_name} #{humanize args}" do
args.each do |key, value|
if value.is_a? Symbol
instance_var_named_value = instance_variable_get("@#{value}")
args[key] = instance_var_named_value if instance_var_named_value
end
end
model = Factory(factory_name.to_sym, args)
instance_variable_set("@#{factory_name}", model)
end
end
|
ruby
|
{
"resource": ""
}
|
q8573
|
Octo.Enterprise.setup_notification_categories
|
train
|
def setup_notification_categories
templates = Octo.get_config(:push_templates)
if templates
templates.each do |t|
args = {
enterprise_id: self._id,
category_type: t[:name],
template_text: t[:text],
active: true
}
Octo::Template.new(args).save!
end
Octo.logger.info("Created templates for Enterprise: #{ self.name }")
end
end
|
ruby
|
{
"resource": ""
}
|
q8574
|
Octo.Enterprise.setup_intelligent_segments
|
train
|
def setup_intelligent_segments
segments = Octo.get_config(:intelligent_segments)
if segments
segments.each do |seg|
args = {
enterprise_id: self._id,
name: seg[:name],
type: seg[:type].constantize,
dimensions: seg[:dimensions].collect(&:constantize),
operators: seg[:operators].collect(&:constantize),
values: seg[:values].collect(&:constantize),
active: true,
intelligence: true,
}
Octo::Segment.new(args).save!
end
Octo.logger.info "Created segents for Enterprise: #{ self.name }"
end
end
|
ruby
|
{
"resource": ""
}
|
q8575
|
FastExt.SessionsController.forgot_password
|
train
|
def forgot_password
klass = params[:type] || 'FastExt::MPerson'
@user = klass.constantize.where(username:params[:username])
random_password = Array.new(10).map { (65 + rand(58)).chr }.join
@user.password = random_password
@user.save!
#Mailer.create_and_deliver_password_change(@user, random_password)
end
|
ruby
|
{
"resource": ""
}
|
q8576
|
Sem4rSpecHelper.Hash.except
|
train
|
def except(*keys)
self.reject { |k,v| keys.include?(k || k.to_sym) }
end
|
ruby
|
{
"resource": ""
}
|
q8577
|
Sem4rSpecHelper.Hash.only
|
train
|
def only(*keys)
self.reject { |k,v| !keys.include?(k || k.to_sym) }
end
|
ruby
|
{
"resource": ""
}
|
q8578
|
Brandish.Configure.build
|
train
|
def build(which = :all)
return to_enum(:build, which) unless block_given?
select_forms(which).each { |f| yield proc { f.build(self) } }
end
|
ruby
|
{
"resource": ""
}
|
q8579
|
Brandish.Configure.roots
|
train
|
def roots
@_roots ||= ::Hash.new do |h, k|
h[k] = nil
h[k] = parse_from(k)
end
end
|
ruby
|
{
"resource": ""
}
|
q8580
|
Brandish.Configure.parse_from
|
train
|
def parse_from(path, short = path.relative_path_from(root))
contents = path.read
digest = Digest::SHA2.digest(contents)
cache.fetch(digest) do
scanner = Scanner.new(contents, short, options)
parser = Parser.new(scanner.call)
parser.call.tap { |r| cache[digest] = r }
end
end
|
ruby
|
{
"resource": ""
}
|
q8581
|
RDO.Statement.execute
|
train
|
def execute(*bind_values)
t = Time.now
@executor.execute(*bind_values).tap do |rs|
rs.info[:execution_time] ||= Time.now - t
if logger.debug?
logger.debug(
"(%.6f) %s %s" % [
rs.execution_time,
command,
("<Bind: #{bind_values.inspect}>" unless bind_values.empty?)
]
)
end
end
rescue RDO::Exception => e
logger.fatal(e.message) if logger.fatal?
raise
end
|
ruby
|
{
"resource": ""
}
|
q8582
|
ActiveRecord.AttributeMethods.attributes
|
train
|
def attributes
attrs = {}
attribute_names.each { |name| attrs[name] = read_attribute(name) }
attrs
end
|
ruby
|
{
"resource": ""
}
|
q8583
|
Declare.Assertions.EQL?
|
train
|
def EQL?(sample)
@it.eql?(sample) && sample.eql?(@it) && (@it.hash == sample.hash) &&
({@it => true}.has_key? sample)
end
|
ruby
|
{
"resource": ""
}
|
q8584
|
Declare.Assertions.CATCH
|
train
|
def CATCH(exception_klass, &block)
block.call
rescue ::Exception
if $!.instance_of? exception_klass
pass
else
failure("Faced a exception, that instance of #{exception_klass}.",
"Faced a exception, that instance of #{$!.class}.", 2)
end
else
failure("Faced a exception, that instance of #{exception_klass}.",
'The block was not faced any exceptions.', 2)
ensure
_declared!
end
|
ruby
|
{
"resource": ""
}
|
q8585
|
ActiveHarmony.QueueItem.process_push
|
train
|
def process_push
factory = "::#{object_type}".constantize
local_object = factory.find(object_local_id)
syncer = factory.synchronizer
syncer.push_object(local_object)
self.state = 'done'
self.save
end
|
ruby
|
{
"resource": ""
}
|
q8586
|
ActiveHarmony.QueueItem.process_pull
|
train
|
def process_pull
factory = "::#{object_type}".constantize
syncer = factory.synchronizer
syncer.pull_object(self.object_remote_id)
self.state = 'done'
self.save
end
|
ruby
|
{
"resource": ""
}
|
q8587
|
StronglyTyped.Attributes.attribute
|
train
|
def attribute(name, type=Object)
name = name.to_sym #normalize
raise NameError, "attribute `#{name}` already created" if members.include?(name)
raise TypeError, "second argument, type, must be a Class but got `#{type.inspect}` insted" unless type.is_a?(Class)
raise TypeError, "directly converting to Bignum is not supported, use Integer instead" if type == Bignum
new_attribute(name, type)
end
|
ruby
|
{
"resource": ""
}
|
q8588
|
StronglyTyped.Attributes.new_attribute
|
train
|
def new_attribute(name, type)
attributes[name] = type
define_attr_reader(name)
define_attr_writer(name, type)
name
end
|
ruby
|
{
"resource": ""
}
|
q8589
|
StronglyTyped.Attributes.define_attr_writer
|
train
|
def define_attr_writer(name, type)
define_method("#{name}=") do |value|
unless value.kind_of?(type)
value = coerce(value, to: type)
unless value.kind_of?(type)
raise TypeError, "Attribute `#{name}` only accepts `#{type}` but got `#{value}`:`#{value.class}` instead"
end
end
instance_variable_set("@#{name}", value)
end
end
|
ruby
|
{
"resource": ""
}
|
q8590
|
Derelict.VirtualMachine.validate!
|
train
|
def validate!
logger.debug "Starting validation for #{description}"
raise NotFound.new name, connection unless exists?
logger.info "Successfully validated #{description}"
self
end
|
ruby
|
{
"resource": ""
}
|
q8591
|
Derelict.VirtualMachine.execute!
|
train
|
def execute!(command, options)
# Build up the arguments to pass to connection.execute!
arguments = [command, name, *arguments_for(command)]
arguments << "--color" if options[:color]
if options[:provider]
arguments << "--provider"
arguments << options[:provider]
end
if options[:log_mode]
arguments << {:mode => options[:log_mode]}
end
# Set up the block to use when executing -- if logging is
# enabled, use a block that logs the output; otherwise no block.
block = options[:log] ? shell_log_block : nil
# Execute the command
connection.execute! *arguments, &block
end
|
ruby
|
{
"resource": ""
}
|
q8592
|
Duvet.Cov.total_coverage
|
train
|
def total_coverage
return 0.0 if lines.size.zero?
ran_lines.size.to_f / lines.size.to_f
end
|
ruby
|
{
"resource": ""
}
|
q8593
|
Duvet.Cov.code_coverage
|
train
|
def code_coverage
return 0.0 if code_lines.size.zero?
ran_lines.size.to_f / code_lines.size.to_f
end
|
ruby
|
{
"resource": ""
}
|
q8594
|
StackableFlash.StackLayer.[]=
|
train
|
def []=(key, value)
if StackableFlash.stacking
super(key,
StackableFlash::FlashStack.new.replace(
value.kind_of?(Array) ?
value :
# Preserves nil values in the result... I suppose that's OK, users can compact if needed :)
Array.new(1, value)
)
)
else
# All StackableFlash functionality is completely bypassed
super(key, value)
end
end
|
ruby
|
{
"resource": ""
}
|
q8595
|
ActiveSupport.BufferedLogger.add
|
train
|
def add(severity, message = nil, progname = nil, &block)
return if @level > severity
message = (message || (block && block.call) || progname).to_s
message = formatter.call(formatter.number_to_severity(severity), Time.now.utc, progname, message)
message = "#{message}\n" unless message[-1] == ?\n
buffer << message
auto_flush
message
end
|
ruby
|
{
"resource": ""
}
|
q8596
|
Bitsa.GmailContactsLoader.load_chunk
|
train
|
def load_chunk(client, idx, cache, orig_last_modified)
# last_modified = nil
url = generate_loader_url(idx, orig_last_modified)
feed = client.get(url).to_xml
feed.elements.each('entry') do |entry|
process_entry(cache, entry)
# last_modified = entry.elements['updated'].text
end
feed.elements.count
end
|
ruby
|
{
"resource": ""
}
|
q8597
|
Bitsa.GmailContactsLoader.process_entry
|
train
|
def process_entry(cache, entry)
gmail_id = entry.elements['id'].text
if entry.elements['gd:deleted']
cache.delete(gmail_id)
else
addrs = []
entry.each_element('gd:email') { |a| addrs << a.attributes['address'] }
cache.update(gmail_id, entry.elements['title'].text || '', addrs)
end
end
|
ruby
|
{
"resource": ""
}
|
q8598
|
Bitsa.GmailContactsLoader.generate_loader_url
|
train
|
def generate_loader_url(idx, cache_last_modified)
# FIXME: Escape variables
url = "https://www.google.com/m8/feeds/contacts/#{@user}/thin"
url += '?orderby=lastmodified'
url += '&showdeleted=true'
url += "&max-results=#{@fetch_size}"
url += "&start-index=#{idx}"
if cache_last_modified
url += "&updated-min=#{CGI.escape(cache_last_modified)}"
end
url
end
|
ruby
|
{
"resource": ""
}
|
q8599
|
MultiGit.Walkable.walk
|
train
|
def walk( mode = :pre, &block )
raise ArgumentError, "Unknown walk mode #{mode.inspect}. Use either :pre, :post or :leaves" unless MODES.include? mode
return to_enum(:walk, mode) unless block
case(mode)
when :pre then walk_pre(&block)
when :post then walk_post(&block)
when :leaves then walk_leaves(&block)
end
end
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.