_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q8200
Optser.OptSet.get!
train
def get!(key, default=nil, &block) value = get key, default, &block raise "Nil value found for option: #{key}, #{default}" if value.nil? return value end
ruby
{ "resource": "" }
q8201
Solrizer::Fedora.Indexer.connect
train
def connect if defined?(Blacklight) solr_config = Blacklight.solr_config elsif defined?(Rails.root.to_s) solr_config = load_rails_config else solr_config = load_fallback_config end if index_full_text == true && solr_config.has_key?(:fulltext) && solr_config[:fulltext].has_key?('url') solr_config[:url] = solr_config[:fulltext]['url'] elsif solr_config.has_key?(:default) && solr_config[:default].has_key?('url') solr_config[:url] = solr_config[:default]['url'] elsif !solr_config.has_key?(:url) raise "Unable to find a solr url in the config file" end @solr = RSolr.connect solr_config rescue RuntimeError => e logger.debug "Unable to establish SOLR Connection with #{solr_config.inspect}. Failed with #{e.message}" raise URI::InvalidURIError end
ruby
{ "resource": "" }
q8202
Solrizer::Fedora.Indexer.generate_dates
train
def generate_dates(solr_doc) # This will check for valid dates, but it seems most of the dates are currently invalid.... #date_check = /^(19|20)\d\d([- \/.])(0[1-9]|1[012])\2(0[1-9]|[12][0-9]|3[01])/ #if there is not date_t, add on with easy-to-find value if solr_doc[:date_t].nil? ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :date_t, "9999-99-99") end #if # Grab the date value from date_t regardless of wheter it is inside of an array # then convert it to a Date object date_value = solr_doc[:date_t] if date_value.kind_of? Array date_value = date_value.first end date_obj = Date._parse(date_value) if date_obj[:mon].nil? ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :month_facet, "99") elsif 0 < date_obj[:mon] && date_obj[:mon] < 13 ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :month_facet, date_obj[:mon].to_s.rjust(2, '0')) else ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :month_facet, "99") end if date_obj[:mday].nil? ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :day_facet, "99") elsif 0 < date_obj[:mday] && date_obj[:mday] < 32 ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :day_facet, date_obj[:mday].to_s.rjust(2, '0')) else ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :day_facet, "99") end return solr_doc end
ruby
{ "resource": "" }
q8203
Solrizer::Fedora.Indexer.create_document
train
def create_document( obj ) solr_doc = Hash.new model_klazz_array = ActiveFedora::ContentModel.known_models_for( obj ) model_klazz_array.delete(ActiveFedora::Base) # If the object was passed in as an ActiveFedora::Base, call to_solr in order to get the base field entries from ActiveFedora::Base # Otherwise, the object was passed in as a model instance other than ActiveFedora::Base,so call its to_solr method & allow it to insert the fields from ActiveFedora::Base if obj.class == ActiveFedora::Base solr_doc = obj.to_solr(solr_doc) logger.debug " added base fields from #{obj.class.to_s}" else solr_doc = obj.to_solr(solr_doc) model_klazz_array.delete(obj.class) logger.debug " added base fields from #{obj.class.to_s} and model fields from #{obj.class.to_s}" end # Load the object as an instance of each of its other models and get the corresponding solr fields # Include :model_only=>true in the options in order to avoid adding the metadata from ActiveFedora::Base every time. model_klazz_array.each do |klazz| instance = obj.adapt_to(klazz) solr_doc = instance.to_solr(solr_doc, :model_only=>true) logger.debug " added solr fields from #{klazz.to_s}" end ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :id_t, "#{obj.pid}" ) ::Solrizer::Extractor.insert_solr_field_value(solr_doc, :id, "#{obj.pid}" ) unless solr_doc[:id] return solr_doc end
ruby
{ "resource": "" }
q8204
Megam.API.post_accounts
train
def post_accounts(new_account) @options = {path: '/accounts/content', body: Megam::JSONCompat.to_json(new_account)}.merge(@options) request( :expects => 201, :method => :post, :body => @options[:body] ) end
ruby
{ "resource": "" }
q8205
MIPPeR.LPSolveModel.store_constraint_matrix
train
def store_constraint_matrix(constr, type) # Initialize arrays used to hold the coefficients for each variable row = [] colno = [] constr.expression.terms.each do |var, coeff| row << coeff * 1.0 colno << var.index end row_buffer = build_pointer_array row, :double colno_buffer = build_pointer_array colno, :int ret = LPSolve.add_constraintex(@ptr, constr.expression.terms.length, row_buffer, colno_buffer, type, constr.rhs) fail if ret != 1 end
ruby
{ "resource": "" }
q8206
IMDB.Person.birthdate
train
def birthdate month_data_element = bio_document.at("td.label[text()*='Date of Birth']"). next_element.first_element_child date_month = month_data_element.inner_text.strip rescue "" year = month_data_element.next_element.inner_text.strip rescue "" Date.parse("#{date_month} #{year}") rescue nil end
ruby
{ "resource": "" }
q8207
IMDB.Person.deathdate
train
def deathdate date_month = bio_document.at("h5[text()*='Date of Death']").next_element.inner_text.strip rescue "" year = bio_document.at("a[@href*='death_date']").inner_text.strip rescue "" Date.parse("#{date_month} #{year}") rescue nil end
ruby
{ "resource": "" }
q8208
IMDB.Person.filmography
train
def filmography #@return [Hash] # writer: [Movie] # actor: [Movie] # director: [Movie] # composer: [Movie] #as_writer = main_document.at("#filmo-head-Writer").next_element.search('b a').map { |e| e.get_attribute('href')[/tt(\d+)/, 1] } rescue [] #as_actor = main_document.at("#filmo-head-Actor").next_element.search('b a').map { |e| e.get_attribute('href')[/tt(\d+)/, 1] } rescue [] #as_director = main_document.at("#filmo-head-Director").next_element.search('b a').map { |e| e.get_attribute('href')[/tt(\d+)/, 1] } rescue [] #as_composer = main_document.at("#filmo-head-Composer").next_element.search('b a').map { |e| e.get_attribute('href')[/tt(\d+)/, 1] } rescue [] #{ writer: as_writer.map { |m| Movie.new(m) }, actor: as_actor.map { |m| Movie.new(m) }, director: as_director.map { |m| Movie.new(m) }, composer: as_composer.map { |m| Movie.new(m) } } films=main_document.css(".filmo-row b a").map { |e| e.get_attribute('href')[/tt(\d+)/, 1] } rescue [] films.map { |f| Movie.new(f.to_i) } end
ruby
{ "resource": "" }
q8209
Tay.Specification.all_javascript_paths
train
def all_javascript_paths all_paths = [] all_paths += @javascripts all_paths += @background_scripts all_paths += @content_scripts.map { |cs| cs.javascripts }.compact all_paths.flatten.uniq end
ruby
{ "resource": "" }
q8210
Tay.Specification.all_stylesheet_paths
train
def all_stylesheet_paths all_paths = [] all_paths += @stylesheets all_paths += @content_scripts.map { |cs| cs.stylesheets }.compact all_paths.flatten.uniq end
ruby
{ "resource": "" }
q8211
LatoBlog.Category::SerializerHelpers.serialize
train
def serialize serialized = {} # set basic info serialized[:id] = id serialized[:title] = title serialized[:meta_language] = meta_language serialized[:meta_permalink] = meta_permalink # add category father informations serialized[:category_father] = category_father ? category_father.serialize_base : nil # add category children informations serialized[:category_children] = serialize_category_children # add category parent informations serialized[:other_informations] = serialize_other_informations # return serialized post serialized end
ruby
{ "resource": "" }
q8212
LatoBlog.Category::SerializerHelpers.serialize_base
train
def serialize_base serialized = {} # set basic info serialized[:id] = id serialized[:title] = title serialized[:meta_language] = meta_language serialized[:meta_permalink] = meta_permalink # return serialized category serialized end
ruby
{ "resource": "" }
q8213
MarkLogic.Collection.from_criteria
train
def from_criteria(criteria) queries = [] criteria.each do |k, v| name, operator, index_type, value = nil query_options = {} if (v.is_a?(Hash)) name = k.to_s query_options.merge!(v.delete(:options) || {}) sub_queries = [] v.each do |kk, vv| operator = kk.to_s.gsub('$', '').upcase || "EQ" if @operators.include?(operator) value = vv value = value.to_s if value.is_a?(MarkLogic::ObjectId) sub_queries << build_query(name, operator, value, query_options) elsif value.is_a?(Hash) child_queries = value.map do |kk, vv| build_query(kk, vv, query_options) end sub_queries << Queries::ContainerQuery.new(name, Queries::AndQuery.new(child_queries)) end end if sub_queries.length > 1 queries << Queries::AndQuery.new(sub_queries) elsif sub_queries.length == 1 queries << sub_queries[0] end else name = k.to_s value = v operator = "EQ" queries << build_query(name, operator, value, query_options) end end if queries.length > 1 MarkLogic::Queries::AndQuery.new(*queries) elsif queries.length == 1 queries[0] end end
ruby
{ "resource": "" }
q8214
RImageAnalysisTools.Drawing.draw
train
def draw(im, draw_value = 255.0) im.each do |ic| if (yield ic) then im.setValue(ic, draw_value) end end end
ruby
{ "resource": "" }
q8215
RImageAnalysisTools.Drawing.draw_shape
train
def draw_shape(im, location, shape_name=:circle, shape_parameters= 10) if self.respond_to?(shape_name) then self.send(shape_name, im, location, shape_parameters) end end
ruby
{ "resource": "" }
q8216
RImageAnalysisTools.Drawing.circle
train
def circle(im, center, radius) lower = ImageCoordinate[center[0]-radius-1, center[1]-radius-1, 0,0,0] upper = ImageCoordinate[center[0]+radius+1, center[1]+radius+1, 0,0,0] im.box_conservative(lower, upper, [:x, :y]) draw(im) do |ic| xdiff = ic[:x] - center[0] ydiff = ic[:y] - center[1] if (Math.hypot(xdiff, ydiff) - radius).abs <= Math.sqrt(2) then true else false end end lower.recycle upper.recycle end
ruby
{ "resource": "" }
q8217
RImageAnalysisTools.Drawing.ellipse
train
def ellipse(im, foci, radius_inc) min_x = foci[0][0] max_x = foci[1][0] min_y = foci[0][1] max_y = foci[1][1] if foci[1][0] < min_x then min_x = foci[1][0] max_x = foci[0][0] end if foci[1][1] < min_y then min_y = foci[1][1] max_y = foci[0][1] end radius = radius_inc + Math.hypot(max_x-min_x, max_y-min_y) lower = ImageCoordinate[min_x-radius-1, min_y-radius-1, 0,0,0] upper = ImageCoordinate[max_x+radius+1, max_y+radius+1, 0,0,0] im.box_conservative(lower, upper, [:x, :y]) draw(im) do |ic| xdiff0 = ic[:x] - foci[0][0] ydiff0 = ic[:y] - foci[0][1] xdiff1 = ic[:x] - foci[1][0] ydiff1 = ic[:y] - foci[1][1] if (Math.hypot(xdiff0, ydiff0) + Math.hypot(xdiff1, ydiff1) - radius).abs <= Math.sqrt(2) then true else false end end end
ruby
{ "resource": "" }
q8218
CSS.Annotate.annotate
train
def annotate(filename) engine = Sass::Engine.new(IO.read(filename), options.merge(:syntax=>guess_syntax(filename))) tree = engine.to_tree tree.perform!(Sass::Environment.new) resolve_rules tree @rows = to_rows(tree) end
ruby
{ "resource": "" }
q8219
CSS.Annotate.to_html
train
def to_html(filename) rows = annotate(filename) ERB.new(IO.read(File.dirname(__FILE__) + "/annotate/template.erb")).result(binding) rescue Sass::SyntaxError=>error error = Sass::SyntaxError.exception_to_css error, @options.merge(:full_exception=>true) ERB.new(IO.read(File.dirname(__FILE__) + "/annotate/template.erb")).result(binding) end
ruby
{ "resource": "" }
q8220
CSS.Annotate.styles
train
def styles Sass::Engine.new(IO.read(File.dirname(__FILE__) + "/annotate/style.scss"), :syntax=>:scss).render end
ruby
{ "resource": "" }
q8221
Kelp.Visibility.page_contains?
train
def page_contains?(text_or_regexp) if text_or_regexp.class == String return page.has_content?(text_or_regexp) elsif text_or_regexp.class == Regexp return page.has_xpath?('.//*', :text => text_or_regexp) else raise ArgumentError, "Expected String or Regexp, got #{text_or_regexp.class}" end end
ruby
{ "resource": "" }
q8222
Kelp.Visibility.should_see
train
def should_see(texts, scope={}) in_scope(scope) do texts = [texts] if (texts.class == String || texts.class == Regexp) # Select all expected values that don't appear on the page unexpected = texts.select do |text| !page_contains?(text) end if !unexpected.empty? raise Kelp::Unexpected, "Expected to see: #{texts.inspect}\nDid not see: #{unexpected.inspect}" end end end
ruby
{ "resource": "" }
q8223
Kelp.Visibility.should_see_in_same_row
train
def should_see_in_same_row(texts, scope={}) in_scope(scope) do if !page.has_xpath?(xpath_row_containing(texts)) raise Kelp::Unexpected, "Expected, but did not see: #{texts.inspect} in the same row" end end end
ruby
{ "resource": "" }
q8224
VirtualMonkey.EBS.wait_for_snapshots
train
def wait_for_snapshots timeout=1500 step=10 while timeout > 0 puts "Checking for snapshot completed" snapshots = behavior(:find_snapshots) status= snapshots.map { |x| x.aws_status } break unless status.include?("pending") sleep step timeout -= step end raise "FATAL: timed out waiting for all snapshots in lineage #{@lineage} to complete" if timeout == 0 end
ruby
{ "resource": "" }
q8225
VirtualMonkey.EBS.find_snapshot_timestamp
train
def find_snapshot_timestamp last_snap = behavior(:find_snapshots).last last_snap.tags.detect { |t| t["name"] =~ /timestamp=(\d+)$/ } timestamp = $1 end
ruby
{ "resource": "" }
q8226
OpenNamespace.ClassMethods.require_file
train
def require_file(name) name = name.to_s path = File.join(namespace_root,File.expand_path(File.join('',name))) begin require path rescue Gem::LoadError => e raise(e) rescue ::LoadError return nil end return true end
ruby
{ "resource": "" }
q8227
OpenNamespace.ClassMethods.const_defined?
train
def const_defined?(name,*inherit) if super(name,*inherit) true else # attempt to load the file that might have the constant require_file(OpenNamespace.const_path(name)) # check for the constant again return super(name,*inherit) end end
ruby
{ "resource": "" }
q8228
TodoLint.Todo.relative_path
train
def relative_path current_dir = Pathname.new(File.expand_path("./")) Pathname.new(path).relative_path_from(current_dir).to_s end
ruby
{ "resource": "" }
q8229
TodoLint.Todo.lookup_tag_due_date
train
def lookup_tag_due_date config.fetch(:tags).fetch(match[:tag]) rescue KeyError msg = "#{match[:tag]} tag not defined in config file" raise KeyError, msg end
ruby
{ "resource": "" }
q8230
DCPU16.Debug.debug
train
def debug(msg = nil, &block) return unless debug? puts "\n[DEBUG] - #{caller.first}" msg.each { |m| puts(m) } if msg.is_a?(Array) if msg.is_a?(Hash) msg.each do |k, v| puts "[#{k.to_s}]" if v.is_a?(Array) v.each {|m| puts(m) } else puts v end end elsif (msg.is_a?(String) || msg.is_a?(Symbol)) puts msg.to_s end yield if block_given? puts "\n" end
ruby
{ "resource": "" }
q8231
ICU.Result.rateable=
train
def rateable=(rateable) if opponent.nil? @rateable = false return end @rateable = case rateable when nil then true # default is true when false then false # this is the only way to turn it off else true end end
ruby
{ "resource": "" }
q8232
GroupDocsStorageCloud.Configuration.auth_settings
train
def auth_settings { 'appsid' => { type: 'api_key', in: 'query', key: 'appsid', value: api_key_with_prefix('appsid') }, 'oauth' => { type: 'oauth2', in: 'header', key: 'Authorization', value: "Bearer #{access_token}" }, 'signature' => { type: 'api_key', in: 'query', key: 'signature', value: api_key_with_prefix('signature') }, } end
ruby
{ "resource": "" }
q8233
Bixby.HttpChannel.execute_internal
train
def execute_internal(json_request, &block) if json_request.respond_to?(:headers) then # always required for posting to API json_request.headers["Content-Type"] = "application/json" end req = HTTPI::Request.new(:url => @uri, :body => json_request.to_wire) # add in extra headers if we have a SignedJsonRequest (or anything which has additional headers) if json_request.respond_to? :headers then req.headers.merge!(json_request.headers) end if block then # execute request with block req.on_body(&block) HTTPI.post(req) return JsonResponse.new("success") else # execute normal req, and return parsed response res = HTTPI.post(req).body return JsonResponse.from_json(res) end end
ruby
{ "resource": "" }
q8234
ChronuscopClient.Synchronizer.write_last_update
train
def write_last_update(last_update_at) # Check for the presence of the tmp directory. if(! File::directory?("tmp")) then Dir.mkdir("tmp") end f = File.new("tmp/chronuscop.tmp","w") f.printf("%d",last_update_at.to_i) f.close() end
ruby
{ "resource": "" }
q8235
ChronuscopClient.Synchronizer.xml_time_to_integer
train
def xml_time_to_integer(str) arr = str.gsub(/T|Z|:/,"-").split(/-/) year = arr[0] month = arr[1] day = arr[2] hour = arr[3] min = arr[4] sec = arr[5] Time.utc(year,month,day,hour,min,sec).to_i end
ruby
{ "resource": "" }
q8236
ChronuscopClient.Synchronizer.sync_it_now
train
def sync_it_now puts "Attempt Sync" # Getting the last sync value. last_update_at = get_last_update_at # querying the page. page = @mechanize_agent.get("#{ChronuscopClient.configuration_object.chronuscop_server_address}/projects/#{ChronuscopClient.configuration_object.project_number}/translations.xml/?auth_token=#{ChronuscopClient.configuration_object.api_token}&last_update_at=#{last_update_at}") # converting the returned xml page into a hash. words_hash = XmlSimple.xml_in(page.body) # catching the case when no-translations are returned. if(!words_hash) then puts "Nothing new added." return end # collecting the translations array. all_translations = words_hash["translation"] # catching the case when no-translations are returned. if(!all_translations) then puts "Nothing new added." return end all_translations.each do |t| # Inserting into the redis store. @redis_agent.set "#{t["key"]}","#{t["value"]}" # Bad hack used here. Should fix this. str = t["updated-at"][0]["content"] key_updated_at = xml_time_to_integer(str) # Updating the value last_update_at if(key_updated_at > last_update_at) then last_update_at = key_updated_at end end # Writing the value of last_update_at to the file. write_last_update(last_update_at.to_i) puts "Finished synchronizing !!!" end
ruby
{ "resource": "" }
q8237
VimPrinter.CLI.get_input_files
train
def get_input_files(args = {}) command = args.fetch(:command, nil) if command.nil? CodeLister.files(args) else # Note: base_dir must be the the same the directory where the command is executed from CodeLister.files_from_shell(command, args.fetch(:base_dir, ".")) end end
ruby
{ "resource": "" }
q8238
VimPrinter.CLI.execute
train
def execute(options = {}) input_files = get_input_files(options) # we want to avoid printing the binary file input_files.delete_if do |file| File.binary?(file.gsub(/^\./, options[:base_dir])) end if input_files.empty? puts "No file found for your option: #{options}" return end to_htmls(input_files, options) generated_files = input_files.map { |f| "#{f}.xhtml" } index_file = "./index.html" IndexHtml.htmlify generated_files, base_dir: options[:base_dir], output: index_file, drop_ext: true generated_files << index_file if options[:index] output_file = "vim_printer_#{File.basename(File.expand_path(options[:base_dir]))}.tar.gz" AgileUtils::FileUtil.tar_gzip_files(generated_files, output_file) AgileUtils::FileUtil.delete(generated_files) FileUtils.rm_rf(index_file) if options[:index] puts "Your output file is '#{File.absolute_path(output_file)}'" end
ruby
{ "resource": "" }
q8239
VimPrinter.CLI.to_htmls
train
def to_htmls(files, options = {}) FileUtils.chdir(File.expand_path(options[:base_dir])) files.each_with_index do |file, index| puts "FYI: process file #{index + 1} of #{files.size} : #{file}" to_html(file, options) end end
ruby
{ "resource": "" }
q8240
Takeout.Client.substitute_template_values
train
def substitute_template_values(endpoint, request_type, options={}) # Gets the proper template for the give CUSTOM_SCHEMA string for this endpoint and substitutes value for it based on give options endpoint_templates = @schemas.fetch(request_type.to_sym, nil) template = endpoint_templates.fetch(endpoint.to_sym, nil) if endpoint_templates if template extracted_options, options = extract_template_options(options.merge({endpoint: endpoint}), template) # Render out the template rendered_template = Liquid::Template.parse(template).render(extracted_options) end return rendered_template, options end
ruby
{ "resource": "" }
q8241
Regenerate.SiteRegenerator.copySrcToOutputFile
train
def copySrcToOutputFile(srcFile, outFile, makeBackup) if makeBackup makeBackupFile(outFile) end FileUtils.cp(srcFile, outFile, :verbose => true) end
ruby
{ "resource": "" }
q8242
BrownPaperTickets.Httpost.handle_deflation
train
def handle_deflation case last_response["content-encoding"] when "gzip" body_io = StringIO.new(last_response.body) last_response.body.replace Zlib::GzipReader.new(body_io).read when "deflate" last_response.body.replace Zlib::Inflate.inflate(last_response.body) end end
ruby
{ "resource": "" }
q8243
Timely.Cell.value_from_redis
train
def value_from_redis if val = Timely.redis.hget(redis_hash_key, redis_value_key) val = val.include?(".") ? val.to_f : val.to_i else val = value_without_caching Timely.redis.hset(redis_hash_key, redis_value_key, val) end val end
ruby
{ "resource": "" }
q8244
UsingYAML.ClassMethods.using_yaml
train
def using_yaml(*args) # Include the instance methods which provide accessors and # mutators for reading/writing from/to the YAML objects. include InstanceMethods # Each argument is either a filename or a :path option args.each do |arg| case arg when Symbol, String # Define accessors for this file using_yaml_file(arg.to_s) when Hash # Currently only accepts { :path => ... } next unless arg.size == 1 && arg.keys.first == :path # Take note of the path UsingYAML.path = [self.inspect, arg.values.first] end end end
ruby
{ "resource": "" }
q8245
RestlessRouter.Route.url_for
train
def url_for(options={}) if templated? template = Addressable::Template.new(base_path) template = template.expand(options) template.to_s else base_path end end
ruby
{ "resource": "" }
q8246
Serket.FieldDecrypter.decrypt
train
def decrypt(field) return if field !~ /\S/ iv, encrypted_aes_key, encrypted_text = parse(field) private_key = OpenSSL::PKey::RSA.new(File.read(private_key_filepath)) decrypted_aes_key = private_key.private_decrypt(Base64.decode64(encrypted_aes_key)) decrypted_field = decrypt_data(iv, decrypted_aes_key, encrypted_text) decrypted_field.force_encoding(encoding) end
ruby
{ "resource": "" }
q8247
Serket.FieldDecrypter.parse
train
def parse(field) case @format when :delimited field.split(field_delimiter) when :json parsed = JSON.parse(field) [parsed['iv'], parsed['key'], parsed['message']] end end
ruby
{ "resource": "" }
q8248
TJBootstrapHelper.Helper.page_header
train
def page_header *args, &block if block_given? size = (1..6) === args.first ? args.first : 1 content_tag :div, :class => "page-header" do content_tag "h#{size}" do capture(&block) end end else title = args.first size = (1..6) === args.second ? args.second : 1 content_tag :div, content_tag("h#{size}", title), :class => "page-header" end end
ruby
{ "resource": "" }
q8249
TJBootstrapHelper.Helper.nav_li
train
def nav_li *args, &block options = (block_given? ? args.first : args.second) || {} url = url_for(options) active = "active" if url == request.path || url == request.url content_tag :li, :class => active do link_to *args, &block end end
ruby
{ "resource": "" }
q8250
Services.Connection.find_servers
train
def find_servers # need a run_context to find anything in return nil unless run_context # If there are already servers in attribs use those return node[:etcd][:servers] if node.key?(:etcd) && node[:etcd].key?(:servers) # if we have already searched in this run use those return node.run_state[:etcd_servers] if node.run_state.key? :etcd_servers # find nodes and build array of ip's etcd_nodes = search(:node, search_query) servers = etcd_nodes.map { |n| n[:ipaddress] } # store that in the run_state node.run_state[:etcd_servers] = servers end
ruby
{ "resource": "" }
q8251
Services.Connection.try_connect
train
def try_connect(server) c = ::Etcd.client(host: server, port: port, allow_redirect: @redirect) begin c.get '/_etcd/machines' return c rescue puts "ETCD: failed to connect to #{c.host}:#{c.port}" return nil end end
ruby
{ "resource": "" }
q8252
CronR.Cron.run
train
def run time=nil puts "[cron] run called #{Time.now}" if @debug time = self.time if time.nil? self.each { |cron_job| ok,details = cron_job.runnable?(time) if ok then @queue.enq(cron_job) if cron_job.once? then cron_job[:delete] = true end end } self.reject! { |cron_job| cron_job[:delete] } end
ruby
{ "resource": "" }
q8253
CronR.Cron.start
train
def start debug=false,method=:every_minute,*args @stopped = false @suspended = false @dead = Queue.new @thread = CronR::Utils.send(method,debug,*args) { time = self.time @mutex.synchronize { if @stopped then # It's important we put something on this queue ONLY AFTER # we've acquired the mutex... @dead.enq(true) true elsif @suspended then else self.run(time) end } } end
ruby
{ "resource": "" }
q8254
CronR.Cron.stop
train
def stop &block if block_given? then @stopped = true @suspended = false # Wait till something is put on the dead queue... # This stops us from acquiring the mutex until after @thread # has processed @stopped set to true. sig = @dead.deq # The cron thread should be dead now, or wrapping up (with the # acquired mutex)... @mutex.synchronize { while @thread.alive? sleep 0.2 end block.call(self) } end end
ruby
{ "resource": "" }
q8255
Aker::Form::Middleware.LoginRenderer.provide_login_html
train
def provide_login_html(env) request = ::Rack::Request.new(env) html = login_html(env, :url => request['url'], :session_expired => request['session_expired']) html_response(html).finish end
ruby
{ "resource": "" }
q8256
ActiveAdminSimpleLife.SimpleElements.filter_for_main_fields
train
def filter_for_main_fields(klass, options ={}) klass.main_fields.each do |f| if f == :gender filter ExtensionedSymbol.new(f).cut_id, collection: genders else filter ExtensionedSymbol.new(f).cut_id end end end
ruby
{ "resource": "" }
q8257
ActiveAdminSimpleLife.SimpleElements.nested_form_for_main_fields
train
def nested_form_for_main_fields(klass, nested_klass, options={}) form_for_main_fields(klass,options) do |form_field| nested_table_name = nested_klass.to_s.underscore.pluralize.to_sym main_model_name = klass.to_s.underscore.to_sym form_field.has_many nested_table_name, allow_destroy: true do |form| nested_klass.main_fields.map { |f| ExtensionedSymbol.new(f).cut_id }.each do |nested_field| current_options = options.fetch(nested_table_name){{}}.fetch(nested_field){{}} form.input(nested_field, current_options) unless nested_field == main_model_name end end end end
ruby
{ "resource": "" }
q8258
Authpwn.HttpBasicControllerInstanceMethods.authenticate_using_http_basic
train
def authenticate_using_http_basic return if current_user authenticate_with_http_basic do |email, password| signin = Session.new email: email, password: password auth = User.authenticate_signin signin self.current_user = auth unless auth.kind_of? Symbol end end
ruby
{ "resource": "" }
q8259
ActiveRecordTranslatable.ClassMethods.translate
train
def translate(*attributes) self._translatable ||= Hash.new { |h,k| h[k] = [] } self._translatable[base_name] = translatable.concat(attributes).uniq end
ruby
{ "resource": "" }
q8260
EventMachine.SystemCommand.execute
train
def execute &block raise 'Previous process still exists' unless pipes.empty? # clear callbacks @callbacks = [] @errbacks = [] pid, stdin, stdout, stderr = POSIX::Spawn.popen4 @command.to_s @pid = pid @stdin = attach_pipe_handler :stdin, stdin @stdout = attach_pipe_handler :stdout, stdout @stderr = attach_pipe_handler :stderr, stderr if block block.call self elsif @execution_proc @execution_proc.call self end self end
ruby
{ "resource": "" }
q8261
EventMachine.SystemCommand.unbind
train
def unbind name pipes.delete name if pipes.empty? exit_callbacks.each do |cb| EM.next_tick do cb.call status end end if status.exitstatus == 0 EM.next_tick do succeed self end else EM.next_tick do fail self end end end end
ruby
{ "resource": "" }
q8262
EventMachine.SystemCommand.kill
train
def kill signal = 'TERM', wait = false Process.kill signal, self.pid val = status if wait @stdin.close @stdout.close @stderr.close val end
ruby
{ "resource": "" }
q8263
AiGames.Parser.run
train
def run AiGames::Logger.info 'Parser.run : Starting loop' loop do command = read_from_engine break if command.nil? command.strip! next if command.length == 0 response = parse split_line command write_to_engine response unless response.nil? || response.length < 1 end AiGames::Logger.info 'Parser.run : Stopping loop' end
ruby
{ "resource": "" }
q8264
Fried::Typings.EnumeratorOf.valid?
train
def valid?(enumerator) return false unless Is[Enumerator].valid?(enumerator) enumerator.all? { |obj| Is[type].valid?(obj) } end
ruby
{ "resource": "" }
q8265
Activr.Activity.to_hash
train
def to_hash result = { } # id result['_id'] = @_id if @_id # timestamp result['at'] = @at # kind result['kind'] = kind.to_s # entities @entities.each do |entity_name, entity| result[entity_name.to_s] = entity.model_id end # meta result['meta'] = @meta.stringify_keys unless @meta.blank? result end
ruby
{ "resource": "" }
q8266
Activr.Activity.humanization_bindings
train
def humanization_bindings(options = { }) result = { } @entities.each do |entity_name, entity| result[entity_name] = entity.humanize(options.merge(:activity => self)) result["#{entity_name}_model".to_sym] = entity.model end result.merge(@meta) end
ruby
{ "resource": "" }
q8267
Activr.Activity.humanize
train
def humanize(options = { }) raise "No humanize_tpl defined" if self.humanize_tpl.blank? Activr.sentence(self.humanize_tpl, self.humanization_bindings(options)) end
ruby
{ "resource": "" }
q8268
Activr.Activity.check!
train
def check! # check mandatory entities self.allowed_entities.each do |entity_name, entity_options| if !entity_options[:optional] && @entities[entity_name].blank? raise Activr::Activity::MissingEntityError, "Missing '#{entity_name}' entity in this '#{self.kind}' activity: #{self.inspect}" end end end
ruby
{ "resource": "" }
q8269
Confrider.Vault.deep_merge!
train
def deep_merge!(other_hash) other_hash.each_pair do |k,v| tv = self[k] self[k] = tv.is_a?(Hash) && v.is_a?(Hash) ? self.class.new(tv).deep_merge(v) : v end self end
ruby
{ "resource": "" }
q8270
Easymongo.Query.ids
train
def ids(data) # Just return if nothing to do return data if data and data.empty? # Support passing id as string data = {'_id' => data} if !data or data.is_a?(String) # Turn all keys to string data = data.stringify_keys # Convert id to _id for mongo data['_id'] = data.delete('id') if data['id'] # Convert ids to BSON ObjectId data.each do |k, v| if v.is_a?(String) and v =~ /^[0-9a-fA-F]{24}$/ data[k] = oid(v) end end # Return data data end
ruby
{ "resource": "" }
q8271
Easymongo.Query.oid
train
def oid(v = nil) return BSON::ObjectId.new if v.nil?; BSON::ObjectId.from_string(v) rescue v end
ruby
{ "resource": "" }
q8272
EncryptedAttributes.MacroMethods.encrypts
train
def encrypts(*attr_names, &config) base_options = attr_names.last.is_a?(Hash) ? attr_names.pop : {} attr_names.each do |attr_name| options = base_options.dup attr_name = attr_name.to_s to_attr_name = (options.delete(:to) || attr_name).to_s # Figure out what cipher is being configured for the attribute mode = options.delete(:mode) || :sha class_name = "#{mode.to_s.classify}Cipher" if EncryptedAttributes.const_defined?(class_name) cipher_class = EncryptedAttributes.const_get(class_name) else cipher_class = EncryptedStrings.const_get(class_name) end # Define encryption hooks define_callbacks("before_encrypt_#{attr_name}", "after_encrypt_#{attr_name}") send("before_encrypt_#{attr_name}", options.delete(:before)) if options.include?(:before) send("after_encrypt_#{attr_name}", options.delete(:after)) if options.include?(:after) # Set the encrypted value on the configured callback callback = options.delete(:on) || :before_validation # Create a callback method to execute on the callback event send(callback, :if => options.delete(:if), :unless => options.delete(:unless)) do |record| record.send(:write_encrypted_attribute, attr_name, to_attr_name, cipher_class, config || options) true end # Define virtual source attribute if attr_name != to_attr_name && !column_names.include?(attr_name) attr_reader attr_name unless method_defined?(attr_name) attr_writer attr_name unless method_defined?("#{attr_name}=") end # Define the reader when reading the encrypted attribute from the database define_method(to_attr_name) do read_encrypted_attribute(to_attr_name, cipher_class, config || options) end unless included_modules.include?(EncryptedAttributes::InstanceMethods) include EncryptedAttributes::InstanceMethods end end end
ruby
{ "resource": "" }
q8273
EncryptedAttributes.InstanceMethods.write_encrypted_attribute
train
def write_encrypted_attribute(attr_name, to_attr_name, cipher_class, options) value = send(attr_name) # Only encrypt values that actually have content and have not already # been encrypted unless value.blank? || value.encrypted? callback("before_encrypt_#{attr_name}") # Create the cipher configured for this attribute cipher = create_cipher(cipher_class, options, value) # Encrypt the value value = cipher.encrypt(value) value.cipher = cipher # Update the value based on the target attribute send("#{to_attr_name}=", value) callback("after_encrypt_#{attr_name}") end end
ruby
{ "resource": "" }
q8274
EncryptedAttributes.InstanceMethods.read_encrypted_attribute
train
def read_encrypted_attribute(to_attr_name, cipher_class, options) value = read_attribute(to_attr_name) # Make sure we set the cipher for equality comparison when reading # from the database. This should only be done if the value is *not* # blank, is *not* encrypted, and hasn't changed since it was read from # the database. The dirty checking is important when the encypted value # is written to the same attribute as the unencrypted value (i.e. you # don't want to encrypt when a new value has been set) unless value.blank? || value.encrypted? || attribute_changed?(to_attr_name) # Create the cipher configured for this attribute value.cipher = create_cipher(cipher_class, options, value) end value end
ruby
{ "resource": "" }
q8275
EncryptedAttributes.InstanceMethods.create_cipher
train
def create_cipher(klass, options, value) options = options.is_a?(Proc) ? options.call(self) : options.dup # Only use the contextual information for this plugin's ciphers klass.parent == EncryptedAttributes ? klass.new(value, options) : klass.new(options) end
ruby
{ "resource": "" }
q8276
Edoors.Room.add_iota
train
def add_iota i raise Edoors::Exception.new "Iota #{i.name} already has #{i.parent.name} as parent" if not i.parent.nil? and i.parent!=self raise Edoors::Exception.new "Iota #{i.name} already exists in #{path}" if @iotas.has_key? i.name i.parent = self if i.parent.nil? @iotas[i.name]=i end
ruby
{ "resource": "" }
q8277
Edoors.Room.add_link
train
def add_link l l.door = @iotas[l.src] raise Edoors::Exception.new "Link source #{l.src} does not exist in #{path}" if l.door.nil? (@links[l.src] ||= [])<< l end
ruby
{ "resource": "" }
q8278
Edoors.Room._try_links
train
def _try_links p puts " -> try_links ..." if @spin.debug_routing links = @links[p.src.name] return false if links.nil? pending_link = nil links.each do |link| if p.link_with? link if pending_link p2 = @spin.require_p p.class p2.clone_data p p2.apply_link! pending_link send_p p2 end pending_link = link end end if pending_link p.apply_link! pending_link _send p end pending_link end
ruby
{ "resource": "" }
q8279
Edoors.Room._route
train
def _route p if p.room.nil? or p.room==path if door = @iotas[p.door] p.dst_routed! door else p.error! Edoors::ERROR_ROUTE_RRWD end elsif door = @spin.search_world(p.room+Edoors::PATH_SEP+p.door) p.dst_routed! door else p.error! Edoors::ERROR_ROUTE_DNE end end
ruby
{ "resource": "" }
q8280
Edoors.Room._send
train
def _send p, sys=false if not sys and p.src.nil? # do not route non system orphan particles !! p.error! Edoors::ERROR_ROUTE_NS, @spin elsif p.dst # direct routing through pointer return elsif p.door # direct routing through path _route p elsif p.next_dst p.split_dst! if p.door _route p elsif not sys # boomerang p.dst_routed! p.src elsif p.action p.dst_routed! @spin end elsif not sys and _try_links p return else p.error!( sys ? Edoors::ERROR_ROUTE_SND : Edoors::ERROR_ROUTE_NDNL) end end
ruby
{ "resource": "" }
q8281
Edoors.Room.send_p
train
def send_p p puts " * send_p #{(p.next_dst.nil? ? 'no dst' : p.next_dst)} ..." if @spin.debug_routing _send p puts " -> #{p.dst.path}#{Edoors::ACT_SEP}#{p.action}" if @spin.debug_routing @spin.post_p p end
ruby
{ "resource": "" }
q8282
Edoors.Room.send_sys_p
train
def send_sys_p p puts " * send_sys_p #{(p.next_dst.nil? ? 'no dst' : p.next_dst)} ..." if @spin.debug_routing _send p, true puts " -> #{p.dst.path}#{Edoors::ACT_SEP}#{p.action}" if @spin.debug_routing @spin.post_sys_p p end
ruby
{ "resource": "" }
q8283
Edoors.Room.process_sys_p
train
def process_sys_p p if p.action==Edoors::SYS_ACT_ADD_LINK add_link Edoors::Link.from_particle p elsif p.action==Edoors::SYS_ACT_ADD_ROOM Edoors::Room.from_particle p, self end @spin.release_p p end
ruby
{ "resource": "" }
q8284
Xyml.Document.out_XML
train
def out_XML io,indent=nil if indent Xyml.rawobj2domobj(@root).write(io,indent.to_i) else sio=StringIO.new Xyml.rawobj2domobj(@root).write(sio) sio.rewind io.print sio.read,"\n" end io.close end
ruby
{ "resource": "" }
q8285
Xyml.Document.load_XYML
train
def load_XYML io raw_yaml=YAML.load(io) @root=Xyml.rawobj2element raw_yaml[0] self.clear.push @root io.close end
ruby
{ "resource": "" }
q8286
Xyml.Document.out_JSON
train
def out_JSON io serialized=JSON.generate(Xyml.remove_parent_rcsv(self)) io.print serialized io.close end
ruby
{ "resource": "" }
q8287
Raca.Account.public_endpoint
train
def public_endpoint(service_name, region = nil) return IDENTITY_URL if service_name == "identity" endpoints = service_endpoints(service_name) if endpoints.size > 1 && region region = region.to_s.upcase endpoints = endpoints.select { |e| e["region"] == region } || {} elsif endpoints.size > 1 && region.nil? raise ArgumentError, "The requested service exists in multiple regions, please specify a region code" end if endpoints.size == 0 raise ArgumentError, "No matching services found" else endpoints.first["publicURL"] end end
ruby
{ "resource": "" }
q8288
Raca.Account.refresh_cache
train
def refresh_cache # Raca::HttpClient depends on Raca::Account, so we intentionally don't use it here # to avoid a circular dependency Net::HTTP.new(identity_host, 443).tap {|http| http.use_ssl = true }.start {|http| payload = { auth: { 'RAX-KSKEY:apiKeyCredentials' => { username: @username, apiKey: @key } } } response = http.post( tokens_path, JSON.dump(payload), {'Content-Type' => 'application/json'}, ) if response.is_a?(Net::HTTPSuccess) cache_write(cache_key, JSON.load(response.body)) else raise_on_error(response) end } end
ruby
{ "resource": "" }
q8289
Raca.Account.extract_value
train
def extract_value(data, *keys) if keys.empty? data elsif data.respond_to?(:[]) && data[keys.first] extract_value(data[keys.first], *keys.slice(1,100)) else nil end end
ruby
{ "resource": "" }
q8290
Slackdraft.Message.generate_payload
train
def generate_payload payload = {} payload[:channel] = self.channel unless self.channel.nil? payload[:username] = self.username unless self.username.nil? payload[:icon_url] = self.icon_url unless self.icon_url.nil? payload[:icon_emoji] = self.icon_emoji unless self.icon_emoji.nil? payload[:text] = self.text unless self.text.nil? payload[:attachments] = self.attachments unless self.attachments.empty? payload end
ruby
{ "resource": "" }
q8291
HTTPAccess2.Client.set_basic_auth
train
def set_basic_auth(uri, user, passwd) uri = urify(uri) @www_auth.basic_auth.set(uri, user, passwd) reset_all end
ruby
{ "resource": "" }
q8292
HTTPAccess2.SSLConfig.set_client_cert_file
train
def set_client_cert_file(cert_file, key_file) @client_cert = OpenSSL::X509::Certificate.new(File.open(cert_file).read) @client_key = OpenSSL::PKey::RSA.new(File.open(key_file).read) change_notify end
ruby
{ "resource": "" }
q8293
HTTPAccess2.SSLConfig.set_context
train
def set_context(ctx) # Verification: Use Store#verify_callback instead of SSLContext#verify*? ctx.cert_store = @cert_store ctx.verify_mode = @verify_mode ctx.verify_depth = @verify_depth if @verify_depth ctx.verify_callback = @verify_callback || method(:default_verify_callback) # SSL config ctx.cert = @client_cert ctx.key = @client_key ctx.client_ca = @client_ca ctx.timeout = @timeout ctx.options = @options ctx.ciphers = @ciphers end
ruby
{ "resource": "" }
q8294
HTTPAccess2.BasicAuth.set
train
def set(uri, user, passwd) if uri.nil? @cred = ["#{user}:#{passwd}"].pack('m').tr("\n", '') else uri = Util.uri_dirname(uri) @auth[uri] = ["#{user}:#{passwd}"].pack('m').tr("\n", '') end end
ruby
{ "resource": "" }
q8295
HTTPAccess2.Session.connect
train
def connect site = @proxy || @dest begin retry_number = 0 timeout(@connect_timeout) do @socket = create_socket(site) begin @src.host = @socket.addr[3] @src.port = @socket.addr[1] rescue SocketError # to avoid IPSocket#addr problem on Mac OS X 10.3 + ruby-1.8.1. # cf. [ruby-talk:84909], [ruby-talk:95827] end if @dest.scheme == 'https' @socket = create_ssl_socket(@socket) connect_ssl_proxy(@socket) if @proxy @socket.ssl_connect @socket.post_connection_check(@dest) @ssl_peer_cert = @socket.peer_cert end # Use Ruby internal buffering instead of passing data immediatly # to the underlying layer # => we need to to call explicitely flush on the socket @socket.sync = @socket_sync end rescue TimeoutError if @connect_retry == 0 retry else retry_number += 1 retry if retry_number < @connect_retry end close raise end @state = :WAIT @readbuf = '' end
ruby
{ "resource": "" }
q8296
HTTPAccess2.Session.read_header
train
def read_header if @state == :DATA get_data {} check_state() end unless @state == :META raise InvalidState, 'state != :META' end parse_header(@socket) @content_length = nil @chunked = false @headers.each do |line| case line when /^Content-Length:\s+(\d+)/i @content_length = $1.to_i when /^Transfer-Encoding:\s+chunked/i @chunked = true @content_length = true # how? @chunk_length = 0 when /^Connection:\s+([\-\w]+)/i, /^Proxy-Connection:\s+([\-\w]+)/i case $1 when /^Keep-Alive$/i @next_connection = true when /^close$/i @next_connection = false end else # Nothing to parse. end end # Head of the request has been parsed. @state = :DATA req = @requests.shift if req.header.request_method == 'HEAD' @content_length = 0 if @next_connection @state = :WAIT else close end end @next_connection = false unless @content_length return [@version, @status, @reason] end
ruby
{ "resource": "" }
q8297
Mongolicious.Storage.upload
train
def upload(bucket, key, path) Mongolicious.logger.info("Uploading archive to #{key}") @con.put_object( bucket, key, File.open(path, 'r'), {'x-amz-acl' => 'private', 'Content-Type' => 'application/x-tar'} ) end
ruby
{ "resource": "" }
q8298
Mongolicious.Storage.upload_part
train
def upload_part(bucket, key, upload_id, part_number, data) response = @con.upload_part(bucket, key, upload_id, part_number, data) return response.headers['ETag'] end
ruby
{ "resource": "" }
q8299
Mongolicious.Storage.complete_multipart_upload
train
def complete_multipart_upload(bucket, key, upload_id, parts) response = @con.complete_multipart_upload(bucket, key, upload_id, parts) return response end
ruby
{ "resource": "" }