_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q23000
Dradis::Plugins::ContentService.Issues.issue_cache
train
def issue_cache @issue_cache ||= begin issues_map = all_issues.map do |issue| cache_key = [ issue.fields['plugin'], issue.fields['plugin_id'] ].join('-') [cache_key, issue] end Hash[issues_map] end end
ruby
{ "resource": "" }
q23001
Bunto.Renderer.render_liquid
train
def render_liquid(content, payload, info, path = nil) template = site.liquid_renderer.file(path).parse(content) template.warnings.each do |e| Bunto.logger.warn "Liquid Warning:", LiquidRenderer.format_error(e, path || document.relative_path) end template.render!(payload, info) # rubocop: disable RescueException rescue Exception => e Bunto.logger.error "Liquid Exception:", LiquidRenderer.format_error(e, path || document.relative_path) raise e end
ruby
{ "resource": "" }
q23002
Bunto.Renderer.place_in_layouts
train
def place_in_layouts(content, payload, info) output = content.dup layout = layouts[document.data["layout"]] Bunto.logger.warn( "Build Warning:", "Layout '#{document.data["layout"]}' requested in "\ "#{document.relative_path} does not exist." ) if invalid_layout? layout used = Set.new([layout]) # Reset the payload layout data to ensure it starts fresh for each page. payload["layout"] = nil while layout payload["content"] = output payload["layout"] = Utils.deep_merge_hashes(layout.data, payload["layout"] || {}) output = render_liquid( layout.content, payload, info, layout.relative_path ) # Add layout to dependency tree site.regenerator.add_dependency( site.in_source_dir(document.path), site.in_source_dir(layout.path) ) if document.write? if (layout = layouts[layout.data["layout"]]) break if used.include?(layout) used << layout end end output end
ruby
{ "resource": "" }
q23003
Bunto.Site.reset
train
def reset if config["time"] self.time = Utils.parse_date(config["time"].to_s, "Invalid time in _config.yml.") else self.time = Time.now end self.layouts = {} self.pages = [] self.static_files = [] self.data = {} @collections = nil @regenerator.clear_cache @liquid_renderer.reset if limit_posts < 0 raise ArgumentError, "limit_posts must be a non-negative number" end Bunto::Hooks.trigger :site, :after_reset, self end
ruby
{ "resource": "" }
q23004
Bunto.Site.setup
train
def setup ensure_not_in_dest plugin_manager.conscientious_require self.converters = instantiate_subclasses(Bunto::Converter) self.generators = instantiate_subclasses(Bunto::Generator) end
ruby
{ "resource": "" }
q23005
Bunto.Site.ensure_not_in_dest
train
def ensure_not_in_dest dest_pathname = Pathname.new(dest) Pathname.new(source).ascend do |path| if path == dest_pathname raise( Errors::FatalException, "Destination directory cannot be or contain the Source directory." ) end end end
ruby
{ "resource": "" }
q23006
Bunto.Site.generate
train
def generate generators.each do |generator| start = Time.now generator.generate(self) Bunto.logger.debug "Generating:", "#{generator.class} finished in #{Time.now - start} seconds." end end
ruby
{ "resource": "" }
q23007
Bunto.Site.render
train
def render relative_permalinks_are_deprecated payload = site_payload Bunto::Hooks.trigger :site, :pre_render, self, payload render_docs(payload) render_pages(payload) Bunto::Hooks.trigger :site, :post_render, self, payload end
ruby
{ "resource": "" }
q23008
Bunto.Page.dir
train
def dir if url.end_with?(FORWARD_SLASH) url else url_dir = File.dirname(url) url_dir.end_with?(FORWARD_SLASH) ? url_dir : "#{url_dir}/" end end
ruby
{ "resource": "" }
q23009
Bunto.Convertible.read_yaml
train
def read_yaml(base, name, opts = {}) filename = File.join(base, name) begin self.content = File.read(@path || site.in_source_dir(base, name), Utils.merged_file_read_opts(site, opts)) if content =~ Document::YAML_FRONT_MATTER_REGEXP self.content = $POSTMATCH self.data = SafeYAML.load(Regexp.last_match(1)) end rescue SyntaxError => e Bunto.logger.warn "YAML Exception reading #{filename}: #{e.message}" rescue => e Bunto.logger.warn "Error reading file #{filename}: #{e.message}" end self.data ||= {} validate_data! filename validate_permalink! filename self.data end
ruby
{ "resource": "" }
q23010
Bunto.Filters.sassify
train
def sassify(input) site = @context.registers[:site] converter = site.find_converter_instance(Bunto::Converters::Sass) converter.convert(input) end
ruby
{ "resource": "" }
q23011
Bunto.Filters.scssify
train
def scssify(input) site = @context.registers[:site] converter = site.find_converter_instance(Bunto::Converters::Scss) converter.convert(input) end
ruby
{ "resource": "" }
q23012
ValidateWebsite.Crawl.extract_imgs_from_page
train
def extract_imgs_from_page(page) return Set[] if page.is_redirect? page.doc.search('//img[@src]').reduce(Set[]) do |result, elem| u = elem.attributes['src'].content result << page.to_absolute(URI.parse(URI.encode(u))) end end
ruby
{ "resource": "" }
q23013
Geos.GeometryCollection.each
train
def each if block_given? num_geometries.times do |n| yield get_geometry_n(n) end self else num_geometries.times.collect { |n| get_geometry_n(n) }.to_enum end end
ruby
{ "resource": "" }
q23014
Bunto.Collection.read
train
def read filtered_entries.each do |file_path| full_path = collection_dir(file_path) next if File.directory?(full_path) if Utils.has_yaml_header? full_path read_document(full_path) else read_static_file(file_path, full_path) end end docs.sort! end
ruby
{ "resource": "" }
q23015
IndexAsCalendar.DSL.index_as_calendar
train
def index_as_calendar( options={}, &block ) default_options = { :ajax => true, # Use AJAX to fetch events. Set to false to send data during render. :model => nil, # Model to be used to fetch events. Defaults to ActiveAdmin resource model. :includes => [], # Eager loading of related models :start_date => :created_at, # Field to be used as start date for events :end_date => nil, # Field to be used as end date for events :block => block, # Block with the model<->event field mappings :fullCalendarOptions => nil, # fullCalendar options to be sent upon initialization :default => false # Set this index view as default } options = default_options.deep_merge(options) # Defines controller for event_mapping model items to events controller do def event_mapping( items, options ) events = items.map do |item| if !options[:block].blank? instance_exec(item, &options[:block]) else { :id => item.id, :title => item.to_s, :start => (options[:start_date].blank? or item.send(options[:start_date]).blank?) ? Date.today.to_s : item.send(options[:start_date]), :end => (options[:end_date].blank? or item.send(options[:end_date]).blank?) ? nil : item.send(options[:end_date]) } end end end end # Setup AJAX if options[:ajax] # Setup fullCalendar to use AJAX calls to retrieve event data index as: :calendar, default: options[:default] do |context| context[:fullCalendarOptions] = options[:fullCalendarOptions] events = { url: "#{collection_path()}/index_as_events.json", type: 'GET', data: params } end # Defines collection_action to get events data collection_action :index_as_events, :method => :get do items = options[:model] || end_of_association_chain items = items.send(params[:scope]) if params[:scope].present? items = items.includes(options[:includes]) unless options[:includes].blank? items = items.where(options[:start_date] => params[:start].to_date...params[:end].to_date).ransack(params[:q]).result events = event_mapping(items, options) respond_to do |format| format.json { render :json => events } end end # Return events to be used during partial render else index as: :calendar, default: options[:default] do |context| context[:fullCalendarOptions] = options[:fullCalendarOptions] events = self.controller.event_mapping(context[:collection], options) end end end
ruby
{ "resource": "" }
q23016
Geos.Geometry.union
train
def union(geom = nil) if geom check_geometry(geom) cast_geometry_ptr(FFIGeos.GEOSUnion_r(Geos.current_handle_pointer, ptr, geom.ptr), srid_copy: pick_srid_from_geoms(srid, geom.srid)) else if respond_to?(:unary_union) unary_union else union_cascaded end end end
ruby
{ "resource": "" }
q23017
Geos.Geometry.relate_pattern
train
def relate_pattern(geom, pattern) check_geometry(geom) bool_result(FFIGeos.GEOSRelatePattern_r(Geos.current_handle_pointer, ptr, geom.ptr, pattern)) end
ruby
{ "resource": "" }
q23018
Roboto.ContentProvider.contents
train
def contents(custom_binding = nil) return @contents unless @contents.nil? @contents = File.read(path) if path.extname == '.erb' @contents = ERB.new(@contents, nil, '>').result(custom_binding ? custom_binding : binding) end @contents end
ruby
{ "resource": "" }
q23019
Bunto.Regenerator.regenerate?
train
def regenerate?(document) case document when Page regenerate_page?(document) when Document regenerate_document?(document) else source_path = document.respond_to?(:path) ? document.path : nil dest_path = if document.respond_to?(:destination) document.destination(@site.dest) end source_modified_or_dest_missing?(source_path, dest_path) end end
ruby
{ "resource": "" }
q23020
Bunto.Regenerator.read_metadata
train
def read_metadata @metadata = if !disabled? && File.file?(metadata_file) content = File.binread(metadata_file) begin Marshal.load(content) rescue TypeError SafeYAML.load(content) rescue ArgumentError => e Bunto.logger.warn("Failed to load #{metadata_file}: #{e}") {} end else {} end end
ruby
{ "resource": "" }
q23021
Bunto.FrontmatterDefaults.applies?
train
def applies?(scope, path, type) applies_path?(scope, path) && applies_type?(scope, type) end
ruby
{ "resource": "" }
q23022
Phcscriptcdn.Script::VersionsController.show
train
def show @script_versions = Script::Version.friendly.find(params[:id]) @versions = Phcscriptcdn::ScriptversionVersions.where(item_id: params[:id], item_type: 'Phcscriptcdn::Script::Version') end
ruby
{ "resource": "" }
q23023
Phcscriptcdn.Script::VersionsController.new
train
def new @script_version = Script::Version.new @script_version.user_id = current_user.id @script_version.org_id = current_user.org_id end
ruby
{ "resource": "" }
q23024
Phcscriptcdn.Script::VersionsController.create
train
def create @script_version = Script::Version.new(script_version_params) @script_version.user_id = current_user.id @script_version.org_id = current_user.org_id if @script_version.save redirect_to script_versions_url, notice: 'Version was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23025
Nightwing.Metric.for
train
def for(queue:, worker: nil) worker_name = worker.to_s.underscore.tr("/", "_") if worker [namespace, queue, worker_name].compact.join(".") end
ruby
{ "resource": "" }
q23026
Phcscriptcdn.Script::UrlsController.show
train
def show script_listing = Script::Listing.find(params[:listing_id]) @script_url = script_listing.urls.friendly.find(params[:id]) end
ruby
{ "resource": "" }
q23027
Phcscriptcdn.Script::UrlsController.new
train
def new script_listing = Script::Listing.find(params[:listing_id]) @script_url = script_listing.urls.build @script_url.user_id = current_user.id @script_url.org_id = current_user.org_id end
ruby
{ "resource": "" }
q23028
Phcscriptcdn.Script::UrlsController.edit
train
def edit script_listing = Script::Listing.find(params[:listing_id]) @script_url = script_listing.urls.find(params[:id]) end
ruby
{ "resource": "" }
q23029
Phcscriptcdn.Script::UrlsController.create
train
def create @script_listing = Script::Listing.find(params[:listing_id]) @script_url = @script_listing.urls.create(script_url_params) @script_url.user_id = current_user.id @script_url.org_id = current_user.org_id if @script_url.save redirect_to script_listing_urls_path, notice: 'Author was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23030
Phcscriptcdn.Script::UrlsController.destroy
train
def destroy @script_listing = Script::Listing.find(params[:listing_id]) @script_url = @script_listing.urls.find(params[:id]) @script_url.destroy redirect_to script_listing_urls_path, notice: 'Author was successfully destroyed.' end
ruby
{ "resource": "" }
q23031
Phcscriptcdn.Script::ListingsController.show
train
def show @script_listings = Script::Listing.friendly.find(params[:id]) @versions = Phcscriptcdn::ListingVersions.where(item_id: params[:id], item_type: 'Phcscriptcdn::Script::Listing') end
ruby
{ "resource": "" }
q23032
Phcscriptcdn.Script::ListingsController.create
train
def create @script_listing = Script::Listing.new(script_listing_params) @script_listing.user_id = current_user.id @script_listing.org_id = current_user.org_id if @script_listing.save redirect_to script_listings_path, notice: 'Listing was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23033
RubyManta.MantaClient.put_object
train
def put_object(obj_path, data, opts = {}) url = obj_url(obj_path) opts[:data] = data headers = gen_headers(opts) cors_headers = gen_cors_headers(opts) headers = headers.concat(cors_headers) durability_level = opts[:durability_level] if durability_level raise ArgumentError unless durability_level > 0 headers.push([ 'Durability-Level', durability_level ]) end content_type = opts[:content_type] if content_type raise ArgumentError unless content_type.is_a? String headers.push([ 'Content-Type', content_type ]) end attempt(opts[:attempts]) do result = @client.put(url, data, headers) raise unless result.is_a? HTTP::Message return true, result.headers if [204, 304].include? result.status raise_error(result) end end
ruby
{ "resource": "" }
q23034
RubyManta.MantaClient.get_object
train
def get_object(obj_path, opts = {}) url = obj_url(obj_path) headers = gen_headers(opts) attempt(opts[:attempts]) do method = opts[:head] ? :head : :get result = @client.send(method, url, nil, headers) raise unless result.is_a? HTTP::Message if result.status == 200 return true, result.headers if method == :head sent_md5 = result.headers['Content-MD5'] received_md5 = Digest::MD5.base64digest(result.body) raise CorruptResult if sent_md5 != received_md5 return result.body, result.headers elsif result.status == 304 return nil, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23035
RubyManta.MantaClient.delete_object
train
def delete_object(obj_path, opts = {}) url = obj_url(obj_path) headers = gen_headers(opts) attempt(opts[:attempts]) do result = @client.delete(url, nil, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 204 raise_error(result) end end
ruby
{ "resource": "" }
q23036
RubyManta.MantaClient.put_directory
train
def put_directory(dir_path, opts = {}) url = obj_url(dir_path) headers = gen_headers(opts) headers.push([ 'Content-Type', 'application/json; type=directory' ]) cors_headers = gen_cors_headers(opts) headers = headers.concat(cors_headers) attempt(opts[:attempts]) do result = @client.put(url, nil, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 204 raise_error(result) end end
ruby
{ "resource": "" }
q23037
RubyManta.MantaClient.list_directory
train
def list_directory(dir_path, opts = {}) url = obj_url(dir_path) headers = gen_headers(opts) query_parameters = {} limit = opts[:limit] || MAX_LIMIT raise ArgumentError unless 0 < limit && limit <= MAX_LIMIT query_parameters[:limit] = limit marker = opts[:marker] if marker raise ArgumentError unless marker.is_a? String query_parameters[:marker] = marker end attempt(opts[:attempts]) do method = opts[:head] ? :head : :get result = @client.send(method, url, query_parameters, headers) raise unless result.is_a? HTTP::Message if result.status == 200 raise unless result.headers['Content-Type'] == 'application/x-json-stream; type=directory' return true, result.headers if method == :head json_chunks = result.body.split("\n") if json_chunks.size > limit raise CorruptResult end dir_entries = json_chunks.map { |i| JSON.parse(i) } return dir_entries, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23038
RubyManta.MantaClient.find
train
def find(dir_path, opts = {}) regex = opts.key?(:regex) ? opts[:regex] : nil # We should always be doing GET because switching between methods is used # within this function. opts.delete(:head) begin exists = list_directory(dir_path, head: true).first rescue exists = false end return [] unless exists response = list_directory(dir_path, opts) listing = response.first listing.inject([]) do |memo, obj| if obj['type'] == 'directory' sub_dir = "#{dir_path}/#{obj['name']}" sub_search = find(sub_dir, regex: regex) memo.push(*sub_search) end if obj['type'] == 'object' file = "#{dir_path}/#{obj['name']}" if !regex || obj['name'].match(regex) memo.push file end end memo end end
ruby
{ "resource": "" }
q23039
RubyManta.MantaClient.delete_directory
train
def delete_directory(dir_path, opts = {}) url = obj_url(dir_path) headers = gen_headers(opts) attempt(opts[:attempts]) do result = @client.delete(url, nil, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 204 raise_error(result) end end
ruby
{ "resource": "" }
q23040
RubyManta.MantaClient.put_snaplink
train
def put_snaplink(orig_path, link_path, opts = {}) headers = gen_headers(opts) headers.push([ 'Content-Type', 'application/json; type=link' ], [ 'Location', obj_url(orig_path) ]) attempt(opts[:attempts]) do result = @client.put(obj_url(link_path), nil, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 204 raise_error(result) end end
ruby
{ "resource": "" }
q23041
RubyManta.MantaClient.create_job
train
def create_job(job, opts = {}) raise ArgumentError unless job[:phases] || job['phases'] headers = gen_headers(opts) headers.push([ 'Content-Type', 'application/json; type=job' ]) data = job.to_json attempt(opts[:attempts]) do result = @client.post(job_url(), data, headers) raise unless result.is_a? HTTP::Message if result.status == 201 location = result.headers['Location'] raise unless location return location, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23042
RubyManta.MantaClient.get_job
train
def get_job(job_path, opts = {}) url = job_url(job_path, '/live/status') headers = gen_headers(opts) attempt(opts[:attempts]) do method = opts[:head] ? :head : :get result = @client.send(method, url, nil, headers) raise unless result.is_a? HTTP::Message if result.status == 200 raise unless result.headers['Content-Type'] == 'application/json' return true, result.headers if method == :head job = JSON.parse(result.body) return job, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23043
RubyManta.MantaClient.get_job_errors
train
def get_job_errors(job_path, opts = {}) url = job_url(job_path, '/live/err') headers = gen_headers(opts) attempt(opts[:attempts]) do method = opts[:head] ? :head : :get result = @client.send(method, url, nil, headers) raise unless result.is_a? HTTP::Message if result.status == 200 raise unless result.headers['Content-Type'] == 'application/x-json-stream; type=job-error' return true, result.headers if method == :head json_chunks = result.body.split("\n") errors = json_chunks.map { |i| JSON.parse(i) } return errors, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23044
RubyManta.MantaClient.cancel_job
train
def cancel_job(job_path, opts = {}) url = job_url(job_path, 'live/cancel') body = '{}' opts[:data] = body headers = gen_headers(opts) headers << [ 'Accept', 'application/json' ] headers << [ 'Content-Type', 'application/json'] headers << [ 'Content-Length', body.bytesize ] args = { header: headers, body: body } attempt(opts[:attempts]) do result = @client.post(url, args) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 202 raise_error(result) end end
ruby
{ "resource": "" }
q23045
RubyManta.MantaClient.add_job_keys
train
def add_job_keys(job_path, obj_paths, opts = {}) url = job_url(job_path, '/live/in') headers = gen_headers(opts) headers.push([ 'Content-Type', 'text/plain' ]) data = obj_paths.join("\n") attempt(opts[:attempts]) do result = @client.post(url, data, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 204 raise_error(result) end end
ruby
{ "resource": "" }
q23046
RubyManta.MantaClient.end_job_input
train
def end_job_input(job_path, opts = {}) url = job_url(job_path, '/live/in/end') headers = gen_headers(opts) attempt(opts[:attempts]) do result = @client.post(url, nil, headers) raise unless result.is_a? HTTP::Message return true, result.headers if result.status == 202 raise_error(result) end end
ruby
{ "resource": "" }
q23047
RubyManta.MantaClient.list_jobs
train
def list_jobs(state, opts = {}) raise ArgumentError unless [:all, :running, :done].include? state state = nil if state == :all headers = gen_headers(opts) attempt(opts[:attempts]) do # method = opts[:head] ? :head : :get method = :get # until added to Manta service result = @client.send(method, job_url(), { :state => state }, headers) raise unless result.is_a? HTTP::Message if result.status == 200 # return true, result.headers if method == :head return [], result.headers if result.body.size == 0 raise unless result.headers['Content-Type'] == 'application/x-json-stream; type=job' json_chunks = result.body.split("\n") job_entries = json_chunks.map { |i| JSON.parse(i) } return job_entries, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23048
RubyManta.MantaClient.gen_signed_url
train
def gen_signed_url(expires, method, path, args=[]) methods = method.is_a?(Array) ? method : [method] raise ArgumentError unless (methods - [:get, :put, :post, :delete, :options]).empty? raise ArgumentError unless path =~ OBJ_PATH_REGEX key_id = '/%s/keys/%s' % [user_path, @fingerprint] args.push([ 'expires', expires.to_i ]) args.push([ 'algorithm', @digest_name ]) args.push([ 'keyId', key_id ]) method = methods.map {|m| m.to_s.upcase }.sort.join(",") host = URI.encode(@host.split('/').last) path = URI.encode(path) args.push(['method', method]) if methods.count > 1 encoded_args = args.sort.map do |key, val| # to comply with RFC 3986 CGI.escape(key.to_s) + '=' + CGI.escape(val.to_s) end.join('&') plaintext = "#{method}\n#{host}\n#{path}\n#{encoded_args}" signature = @priv_key.sign(@digest, plaintext) encoded_signature = CGI.escape(Base64.strict_encode64(signature)) host + path + '?' + encoded_args + '&signature=' + encoded_signature end
ruby
{ "resource": "" }
q23049
RubyManta.MantaClient.get_job_state_streams
train
def get_job_state_streams(type, path, opts) raise ArgumentError unless [:in, :out, :fail].include? type url = job_url(path, '/live/' + type.to_s) headers = gen_headers(opts) attempt(opts[:attempts]) do #method = opts[:head] ? :head : :get method = :get # until added to Manta service result = @client.send(method, url, nil, headers) raise unless result.is_a? HTTP::Message if result.status == 200 raise unless result.headers['Content-Type'] == 'text/plain' return true, result.headers if method == :head paths = result.body.split("\n") return paths, result.headers end raise_error(result) end end
ruby
{ "resource": "" }
q23050
RubyManta.MantaClient.job_url
train
def job_url(*args) path = if args.size == 0 @job_base else raise ArgumentError unless args.first =~ JOB_PATH_REGEX args.join('/') end URI.encode(@host + path) end
ruby
{ "resource": "" }
q23051
RubyManta.MantaClient.gen_headers
train
def gen_headers(opts) now = Time.now.httpdate sig = gen_signature('date: ' + now) headers = [[ 'Date', now ], [ 'Authorization', sig ], [ 'User-Agent', HTTP_AGENT ], [ 'Accept-Version', '~1.0' ]] # headers for conditional requests (dates) for arg, conditional in [[:if_modified_since, 'If-Modified-Since' ], [:if_unmodified_since, 'If-Unmodified-Since']] date = opts[arg] next unless date date = Time.parse(date.to_s) unless date.kind_of? Time headers.push([conditional, date]) end # headers for conditional requests (etags) for arg, conditional in [[:if_match, 'If-Match' ], [:if_none_match, 'If-None-Match']] etag = opts[arg] next unless etag raise ArgumentError unless etag.kind_of? String headers.push([conditional, etag]) end origin = opts[:origin] if origin raise ArgumentError unless origin == 'null' || origin =~ CORS_ORIGIN_REGEX headers.push([ 'Origin', origin ]) end custom_headers = opts.keys.select { |key| key.to_s.start_with? 'm_' } unless custom_headers.empty? headers += custom_headers.map do |header_key| [ symbol_to_header(header_key), opts[header_key] ] end end # add md5 hash when sending data data = opts[:data] if data md5 = Digest::MD5.base64digest(data) headers.push([ 'Content-MD5', md5 ]) end return headers end
ruby
{ "resource": "" }
q23052
RubyManta.MantaClient.gen_cors_headers
train
def gen_cors_headers(opts) headers = [] allow_credentials = opts[:access_control_allow_credentials] if allow_credentials allow_credentials = allow_credentials.to_s raise ArgumentError unless allow_credentials == 'true' || allow_credentials == 'false' headers.push([ 'Access-Control-Allow-Credentials', allow_credentials ]) end allow_headers = opts[:access_control_allow_headers] if allow_headers raise ArgumentError unless allow_headers =~ CORS_HEADERS_REGEX allow_headers = allow_headers.split(', ').map(&:downcase).sort.join(', ') headers.push([ 'Access-Control-Allow-Headers', allow_headers ]) end allow_methods = opts[:access_control_allow_methods] if allow_methods raise ArgumentError unless allow_methods.kind_of? String unknown_methods = allow_methods.split(', ').reject do |str| CORS_METHODS.include? str end raise ArgumentError unless unknown_methods.size == 0 headers.push([ 'Access-Control-Allow-Methods', allow_methods ]) end allow_origin = opts[:access_control_allow_origin] if allow_origin raise ArgumentError unless allow_origin.kind_of? String raise ArgumentError unless allow_origin == '*' || allow_origin == 'null' || allow_origin =~ CORS_ORIGIN_REGEX headers.push([ 'Access-Control-Allow-Origin', allow_origin ]) end expose_headers = opts[:access_control_expose_headers] if expose_headers raise ArgumentError unless expose_headers =~ CORS_HEADERS_REGEX expose_headers = expose_headers.split(', ').map(&:downcase).sort.join(', ') headers.push([ 'Access-Control-Expose-Headers', expose_headers ]) end max_age = opts[:access_control_max_age] if max_age raise ArgumentError unless max_age.kind_of?(Integer) && max_age >= 0 headers.push([ 'Access-Control-Max-Age', max_age.to_s ]) end headers end
ruby
{ "resource": "" }
q23053
RubyManta.MantaClient.gen_signature
train
def gen_signature(data) raise ArgumentError unless data sig = @priv_key.sign(@digest, data) base64sig = Base64.strict_encode64(sig) return HTTP_SIGNATURE % [user_path, @fingerprint, @digest_name, base64sig] end
ruby
{ "resource": "" }
q23054
Phcscriptcdn.Script::AuthorsController.show
train
def show @script_authors = Script::Author.friendly.find(params[:id]) @versions = Phcscriptcdn::AuthorVersions.where(item_id: params[:id], item_type: 'Phcscriptcdn::Script::Author') end
ruby
{ "resource": "" }
q23055
Phcscriptcdn.Script::AuthorsController.new
train
def new @script_author = Script::Author.new @script_author.user_id = current_user.id @script_author.org_id = current_user.org_id end
ruby
{ "resource": "" }
q23056
Phcscriptcdn.Script::AuthorsController.create
train
def create @script_author = Script::Author.new(script_author_params) @script_author.user_id = current_user.id @script_author.org_id = current_user.org_id if @script_author.save redirect_to script_authors_url, notice: 'Author was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23057
Phcscriptcdn.Script::ExtensionsController.show
train
def show @script_extensions = Script::Extension.friendly.find(params[:id]) @versions = Phcscriptcdn::ExtensionVersions.where(item_id: params[:id], item_type: 'Phcscriptcdn::Script::Extension') end
ruby
{ "resource": "" }
q23058
Phcscriptcdn.Script::ExtensionsController.new
train
def new @script_extension = Script::Extension.new @script_extension.user_id = current_user.id @script_extension.org_id = current_user.org_id end
ruby
{ "resource": "" }
q23059
Phcscriptcdn.Script::ExtensionsController.create
train
def create @script_extension = Script::Extension.new(script_extension_params) @script_extension.user_id = current_user.id @script_extension.org_id = current_user.org_id if @script_extension.save redirect_to script_extensions_url, notice: 'Extension was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23060
Phcscriptcdn.Script::LicencesController.show
train
def show @script_licences = Script::Licence.friendly.find(params[:id]) @versions = Phcscriptcdn::LicenceVersions.where(item_id: params[:id], item_type: 'Phcscriptcdn::Script::Licence') end
ruby
{ "resource": "" }
q23061
Phcscriptcdn.Script::LicencesController.new
train
def new @script_licence = Script::Licence.new @script_licence.user_id = current_user.id @script_licence.org_id = current_user.org_id end
ruby
{ "resource": "" }
q23062
Phcscriptcdn.Script::LicencesController.create
train
def create @script_licence = Script::Licence.new(script_licence_params) @script_licence.user_id = current_user.id @script_licence.org_id = current_user.org_id if @script_licence.save redirect_to script_licences_url, notice: 'Licence was successfully created.' else render :new end end
ruby
{ "resource": "" }
q23063
HydraAttribute.HydraValue.save
train
def save raise EntityModelIsNotPersistedError unless entity.persisted? if persisted? return false unless changed? update else create end @previously_changed = changes @changed_attributes.clear true end
ruby
{ "resource": "" }
q23064
HydraAttribute.HydraValue.arel_insert
train
def arel_insert table = self.class.arel_tables[entity.class.table_name][hydra_attribute.backend_type] fields = {} fields[table[:entity_id]] = entity.id fields[table[:hydra_attribute_id]] = hydra_attribute.id fields[table[:value]] = value fields[table[:created_at]] = Time.now fields[table[:updated_at]] = Time.now table.compile_insert(fields) end
ruby
{ "resource": "" }
q23065
HydraAttribute.HydraValue.arel_update
train
def arel_update table = self.class.arel_tables[entity.class.table_name][hydra_attribute.backend_type] arel = table.from(table) arel.where(table[:id].eq(id)).compile_update(table[:value] => value, table[:updated_at] => Time.now) end
ruby
{ "resource": "" }
q23066
ActsAsIndexed.SearchIndex.add_records
train
def add_records(records) atoms = ActiveSupport::OrderedHash.new records_count = 0 records.each do |record| next unless allow_indexing?(record) records_count += 1 condensed_record = condense_record(record) atoms = add_occurences(condensed_record, record.id, atoms) end @storage.add(atoms, records_count) end
ruby
{ "resource": "" }
q23067
ActsAsIndexed.SearchIndex.remove_record
train
def remove_record(record) condensed_record = condense_record(record) atoms = add_occurences(condensed_record,record.id) @storage.remove(atoms) end
ruby
{ "resource": "" }
q23068
ActsAsIndexed.SearchIndex.search
train
def search(query) return [] if query.nil? @atoms = @storage.fetch(cleanup_atoms(query), query[/\^/]) queries = parse_query(query.dup) positive = run_queries(queries[:positive]) positive_quoted = run_quoted_queries(queries[:positive_quoted]) negative = run_queries(queries[:negative]) negative_quoted = run_quoted_queries(queries[:negative_quoted]) starts_with = run_queries(queries[:starts_with], true) start_quoted = run_quoted_queries(queries[:start_quoted], true) results = ActiveSupport::OrderedHash.new if queries[:start_quoted].any? results = merge_query_results(results, start_quoted) end if queries[:starts_with].any? results = merge_query_results(results, starts_with) end if queries[:positive_quoted].any? results = merge_query_results(results, positive_quoted) end if queries[:positive].any? results = merge_query_results(results, positive) end negative_results = (negative.keys + negative_quoted.keys) results.delete_if { |r_id, w| negative_results.include?(r_id) } results end
ruby
{ "resource": "" }
q23069
ActsAsIndexed.SearchIndex.record_unchanged?
train
def record_unchanged?(record_new, record_old) # NOTE: Using the dirty state would be great here, but it doesn't keep track of # in-place changes. allow_indexing?(record_old) == allow_indexing?(record_new) && !@fields.map { |field| record_old.send(field) == record_new.send(field) }.include?(false) end
ruby
{ "resource": "" }
q23070
Vmstat.ProcFS.memory
train
def memory @pagesize ||= Vmstat.pagesize has_available = false total = free = active = inactive = pageins = pageouts = available = 0 procfs_file("meminfo") do |file| content = file.read(2048) # the requested information is in the first bytes content.scan(/(\w+):\s+(\d+) kB/) do |name, kbytes| pages = (kbytes.to_i * 1024) / @pagesize case name when "MemTotal" then total = pages when "MemFree" then free = pages when "MemAvailable" available = pages has_available = true when "Active" then active = pages when "Inactive" then inactive = pages end end end procfs_file("vmstat") do |file| content = file.read if content =~ /pgpgin\s+(\d+)/ pageins = $1.to_i end if content =~ /pgpgout\s+(\d+)/ pageouts = $1.to_i end end mem_klass = has_available ? LinuxMemory : Memory mem_klass.new(@pagesize, total-free-active-inactive, active, inactive, free, pageins, pageouts).tap do |mem| mem.available = available if has_available end end
ruby
{ "resource": "" }
q23071
Vmstat.ProcFS.network_interfaces
train
def network_interfaces netifcs = [] procfs_file("net", "dev") do |file| file.read.scan(NET_DATA) do |columns| type = case columns[0] when /^eth/ then NetworkInterface::ETHERNET_TYPE when /^lo/ then NetworkInterface::LOOPBACK_TYPE end netifcs << NetworkInterface.new(columns[0].to_sym, columns[1].to_i, columns[3].to_i, columns[4].to_i, columns[9].to_i, columns[11].to_i, type) end end netifcs end
ruby
{ "resource": "" }
q23072
Vmstat.ProcFS.task
train
def task @pagesize ||= Vmstat.pagesize procfs_file("self", "stat") do |file| data = file.read.split(/ /) Task.new(data[22].to_i / @pagesize, data[23].to_i, data[13].to_i * 1000, data[14].to_i * 1000) end end
ruby
{ "resource": "" }
q23073
Vmstat.ProcFS.boot_time
train
def boot_time raw = procfs_file("uptime") { |file| file.read } Time.now - raw.split(/\s/).first.to_f end
ruby
{ "resource": "" }
q23074
Vmstat.ProcFS.procfs_file
train
def procfs_file(*names, &block) path = File.join(procfs_path, *names) File.open(path, "r", &block) end
ruby
{ "resource": "" }
q23075
ActsAsIndexed.SearchAtom.+
train
def +(other) SearchAtom.new(@records.clone.merge!(other.records) { |key, _old, _new| _old + _new }) end
ruby
{ "resource": "" }
q23076
ActsAsIndexed.SearchAtom.-
train
def -(other) records = @records.clone.reject { |name, records| other.records.include?(name) } SearchAtom.new(records) end
ruby
{ "resource": "" }
q23077
ActsAsIndexed.SearchAtom.preceded_by
train
def preceded_by(former) matches = SearchAtom.new latter = ActiveSupport::OrderedHash.new former.record_ids.each do |rid| latter[rid] = @records[rid] if @records[rid] end # Iterate over each record in latter. latter.each do |record_id,pos| # Iterate over each position. pos.each do |p| # Check if previous position is in former. if former.include_position?(record_id,p-1) matches.add_record(record_id) unless matches.include_record?(record_id) matches.add_position(record_id,p) end end end matches end
ruby
{ "resource": "" }
q23078
ActsAsIndexed.SearchAtom.weightings
train
def weightings(records_size) out = ActiveSupport::OrderedHash.new ## phurni 2012-09-21 when records_size is exactly the @records.size (all records are matches), the Math.log would ## return 0 which means the frequency (pos.size) will have no effect. Cheat to make it like the matching ## record is one less, so that we still can weight on frequency. matching_records_size = (records_size == @records.size ? @records.size - 1 : @records.size) @records.each do |r_id, pos| # Fixes a bug when the records_size is zero. i.e. The only record # contaning the word has been deleted. if records_size < 1 out[r_id] = 0.0 next end # weighting = frequency * log (records.size / records_with_atom) ## parndt 2010/05/03 changed to records_size.to_f to avoid -Infinity Errno::ERANGE exceptions ## which would happen for example Math.log(1 / 20) == -Infinity but Math.log(1.0 / 20) == -2.99573227355399 out[r_id] = pos.size * Math.log(records_size.to_f / matching_records_size) end out end
ruby
{ "resource": "" }
q23079
Alexa.Response.elicit_slot!
train
def elicit_slot!(slot_to_elicit, skip_render: false) directives << { type: "Dialog.ElicitSlot", slotToElicit: slot_to_elicit } if skip_render @slots_to_not_render_elicitation << slot_to_elicit end end
ruby
{ "resource": "" }
q23080
ActsAsIndexed.Storage.fetch
train
def fetch(atom_names, start=false) atoms = ActiveSupport::OrderedHash.new atom_names.uniq.collect{|a| encoded_prefix(a) }.uniq.each do |prefix| pattern = @path.join(prefix.to_s).to_s pattern += '*' if start pattern += INDEX_FILE_EXTENSION Pathname.glob(pattern).each do |atom_file| atom_file.open do |f| atoms.merge!(Marshal.load(f)) end end # Pathname.glob end # atom_names.uniq atoms end
ruby
{ "resource": "" }
q23081
ActsAsIndexed.Storage.operate
train
def operate(operation, atoms) # ActiveSupport always available? atoms_sorted = ActiveSupport::OrderedHash.new # Sort the atoms into the appropriate shards for writing to individual # files. atoms.each do |atom_name, records| (atoms_sorted[encoded_prefix(atom_name)] ||= ActiveSupport::OrderedHash.new)[atom_name] = records end atoms_sorted.each do |e_p, atoms| path = @path.join(e_p.to_s + INDEX_FILE_EXTENSION) lock_file(path) do if path.exist? from_file = path.open do |f| Marshal.load(f) end else from_file = ActiveSupport::OrderedHash.new end atoms = from_file.merge(atoms){ |k,o,n| o.send(operation, n) } write_file(path) do |f| Marshal.dump(atoms,f) end end # end lock. end end
ruby
{ "resource": "" }
q23082
ActsAsIndexed.Storage.lock_file
train
def lock_file(file_path, &block) # :nodoc: @@file_lock.synchronize do # Windows does not support file locking. if !windows? && file_path.exist? file_path.open('r+') do |f| begin f.flock File::LOCK_EX yield ensure f.flock File::LOCK_UN end end else yield end end end
ruby
{ "resource": "" }
q23083
Alexa.Device.location
train
def location @_location ||= begin if Alexa.configuration.location_permission_type == :full_address get_address elsif Alexa.configuration.location_permission_type == :country_and_postal_code get_address(only: :country_and_postal_code) end end end
ruby
{ "resource": "" }
q23084
SimplifyRb.DouglasPeuckerSimplifier.get_sq_seg_dist
train
def get_sq_seg_dist(point, point_1, point_2) x = point_1.x y = point_1.y dx = point_2.x - x dy = point_2.y - y if dx != 0 || dy != 0 t = ((point.x - x) * dx + (point.y - y) * dy) / (dx * dx + dy * dy) if t > 1 x = point_2.x y = point_2.y elsif t > 0 x += dx * t y += dy * t end end dx = point.x - x dy = point.y - y dx * dx + dy * dy end
ruby
{ "resource": "" }
q23085
ActsAsIndexed.ClassMethods.acts_as_indexed
train
def acts_as_indexed(options = {}) class_eval do extend ActsAsIndexed::SingletonMethods end include ActsAsIndexed::InstanceMethods after_create :add_to_index before_update :update_index after_destroy :remove_from_index # scope for Rails 3.x, named_scope for Rails 2.x. if self.respond_to?(:where) scope :with_query, lambda { |query| where("#{table_name}.#{primary_key} IN (?)", search_index(query, {}, {:ids_only => true})) } else named_scope :with_query, lambda { |query| { :conditions => ["#{table_name}.#{primary_key} IN (?)", search_index(query, {}, {:ids_only => true}) ] } } end cattr_accessor :aai_config, :aai_fields self.aai_fields = options.delete(:fields) raise(ArgumentError, 'no fields specified') if self.aai_fields.nil? || self.aai_fields.empty? self.aai_config = ActsAsIndexed.configuration.dup self.aai_config.if_proc = options.delete(:if) options.each do |k, v| self.aai_config.send("#{k}=", v) end # Add the Rails environment and this model's name to the index file path. self.aai_config.index_file = self.aai_config.index_file.join(Rails.env, self.name.underscore) end
ruby
{ "resource": "" }
q23086
ActsAsIndexed.ClassMethods.index_add
train
def index_add(record) return if self.aai_config.disable_auto_indexing build_index index = new_index index.add_record(record) @query_cache = {} end
ruby
{ "resource": "" }
q23087
ActsAsIndexed.ClassMethods.index_update
train
def index_update(record) return if self.aai_config.disable_auto_indexing build_index index = new_index index.update_record(record,find(record.id)) @query_cache = {} end
ruby
{ "resource": "" }
q23088
ActsAsIndexed.ClassMethods.search_index
train
def search_index(query, find_options={}, options={}) # Clear the query cache off if the key is set. @query_cache = {} if options[:no_query_cache] # Run the query if not already in cache. if !@query_cache || !@query_cache[query] build_index (@query_cache ||= {})[query] = new_index.search(query) end if options[:ids_only] find_option_keys = find_options.keys.map{ |k| k.to_sym } find_option_keys -= [:limit, :offset] if find_option_keys.any? raise ArgumentError, 'ids_only can not be combined with find option keys other than :offset or :limit' end end if find_options.include?(:order) part_query = @query_cache[query].map{ |r| r.first } else # slice up the results by offset and limit offset = find_options[:offset] || 0 limit = find_options.include?(:limit) ? find_options[:limit] : @query_cache[query].size part_query = sort(@query_cache[query]).slice(offset,limit).map{ |r| r.first } # Set these to nil as we are dealing with the pagination by setting # exactly what records we want. find_options[:offset] = nil find_options[:limit] = nil end return part_query if options[:ids_only] with_scope :find => find_options do # Doing the find like this eliminates the possibility of errors occuring # on either missing records (out-of-sync) or an empty results array. records = find(:all, :conditions => [ "#{table_name}.#{primary_key} IN (?)", part_query]) if find_options.include?(:order) records # Just return the records without ranking them. else # Results come back in random order from SQL, so order again. ranked_records = ActiveSupport::OrderedHash.new records.each do |r| ranked_records[r] = @query_cache[query][r.id] end sort(ranked_records.to_a).map{ |r| r.first } end end end
ruby
{ "resource": "" }
q23089
ActsAsIndexed.ClassMethods.build_index
train
def build_index return if aai_config.index_file.directory? index = new_index find_in_batches({ :batch_size => 500 }) do |records| index.add_records(records) end end
ruby
{ "resource": "" }
q23090
ActsAsIndexed.ClassMethods.sort
train
def sort(ranked_records) ranked_records.sort { |a, b| a_score = a.last a_id = a.first.is_a?(Fixnum) ? a.first : a.first.id b_score = b.last b_id = b.first.is_a?(Fixnum) ? b.first : b.first.id if a_score == b_score a_id <=> b_id else b_score <=> a_score # We want the records with better relevance first. end } end
ruby
{ "resource": "" }
q23091
MongodbLogger.Base.mongo_fix_session_keys
train
def mongo_fix_session_keys(session = {}) new_session = Hash.new session.to_hash.each do |i, j| new_session[i.gsub(/\./i, "|")] = j.inspect end unless session.empty? new_session end
ruby
{ "resource": "" }
q23092
Panoramic.Resolver.find_templates
train
def find_templates(name, prefix, partial, details, key=nil, locals=[]) return [] if @@resolver_options[:only] && !@@resolver_options[:only].include?(prefix) path = build_path(name, prefix) conditions = { :path => path, :locale => [normalize_array(details[:locale]).first, nil], :format => normalize_array(details[:formats]), :handler => normalize_array(details[:handlers]), :partial => partial || false }.merge(details[:additional_criteria].presence || {}) @@model.find_model_templates(conditions).map do |record| Rails.logger.debug "Rendering template from database: #{path} (#{record.format})" initialize_template(record) end end
ruby
{ "resource": "" }
q23093
Panoramic.Resolver.virtual_path
train
def virtual_path(path, partial) return path unless partial if index = path.rindex("/") path.insert(index + 1, "_") else "_#{path}" end end
ruby
{ "resource": "" }
q23094
Slack.Poster.send_message
train
def send_message(message) body = message.is_a?(String) ? options.merge(text: message) : options.merge(message.as_json) conn = Faraday.new(url: @base_uri) response = conn.post('', payload: body.to_json) response end
ruby
{ "resource": "" }
q23095
MongodbLogger.Logger.record_serializer
train
def record_serializer(rec, nice = true) [:messages, :params].each do |key| if msgs = rec[key] msgs.each do |i, j| msgs[i] = (true == nice ? nice_serialize_object(j) : j.inspect) end end end end
ruby
{ "resource": "" }
q23096
MongodbLogger.ReplicaSetHelper.rescue_connection_failure
train
def rescue_connection_failure(max_retries = 40) success = false retries = 0 while !success begin yield success = true rescue mongo_error_type => e raise e if (retries += 1) >= max_retries sleep 0.25 end end end
ruby
{ "resource": "" }
q23097
POI.Cell.error_value
train
def error_value if poi_cell.cell_type == CELL_TYPE_ERROR error_value_from(poi_cell.error_cell_value) elsif poi_cell.cell_type == CELL_TYPE_FORMULA && poi_cell.cached_formula_result_type == CELL_TYPE_ERROR cell_value = formula_evaluator.evaluate(poi_cell) cell_value && error_value_from(cell_value.error_value) else nil end end
ruby
{ "resource": "" }
q23098
POI.Cell.to_s
train
def to_s(evaluate_formulas=true) return '' if poi_cell.nil? if poi_cell.cell_type == CELL_TYPE_FORMULA && evaluate_formulas == false formula_value else value.to_s end end
ruby
{ "resource": "" }
q23099
Dare.Window.add_mouse_event_listener
train
def add_mouse_event_listener Element.find("##{@canvas.id}").on :mousemove do |event| coords = get_cursor_position(event) @mouse_x = coords.x[:x] @mouse_y = coords.x[:y] end end
ruby
{ "resource": "" }