_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q15400
JSONAPI::Utils::Support::Filter.Default.apply_filter?
train
def apply_filter?(records, options = {}) params[:filter].present? && records.respond_to?(:where) && (options[:filter].nil? || options[:filter]) end
ruby
{ "resource": "" }
q15401
JSONAPI::Utils::Support::Filter.Default.filter_params
train
def filter_params @_filter_params ||= case params[:filter] when Hash, ActionController::Parameters default_filters.each_with_object({}) do |field, hash| unformatted_field = @request.unformat_key(field) hash[unformatted_field] = params[:filter][field] end end end
ruby
{ "resource": "" }
q15402
JSONAPI::Utils::Support.Sort.sort_params
train
def sort_params @_sort_params ||= if params[:sort].present? params[:sort].split(',').each_with_object({}) do |field, hash| unformatted_field = @request.unformat_key(field) desc, field = unformatted_field.to_s.match(/^([-_])?(\w+)$/i)[1..2] hash[field.to_sym] = desc.present? ? :desc : :asc end end end
ruby
{ "resource": "" }
q15403
JSONAPI::Utils.Request.setup_request
train
def setup_request @request ||= JSONAPI::RequestParser.new( params, context: context, key_formatter: key_formatter, server_error_callbacks: (self.class.server_error_callbacks || []) ) end
ruby
{ "resource": "" }
q15404
JSONAPI::Utils.Request.build_params_for
train
def build_params_for(param_type) return {} if @request.operations.empty? keys = %i(attributes to_one to_many) operation = @request.operations.find { |e| e.options[:data].keys & keys == keys } if operation.nil? {} elsif param_type == :relationship operation.options[:data].values_at(:to_one, :to_many).compact.reduce(&:merge) else operation.options[:data][:attributes] end end
ruby
{ "resource": "" }
q15405
HairTrigger.Builder.initialize_copy
train
def initialize_copy(other) @trigger_group = other @triggers = nil @chained_calls = [] @errors = [] @warnings = [] @options = @options.dup @options.delete(:name) # this will be inferred (or set further down the line) @options.each do |key, value| @options[key] = value.dup rescue value end end
ruby
{ "resource": "" }
q15406
ActiveAttr.Typecasting.typecast_attribute
train
def typecast_attribute(typecaster, value) raise ArgumentError, "a typecaster must be given" unless typecaster.respond_to?(:call) return value if value.nil? typecaster.call(value) end
ruby
{ "resource": "" }
q15407
ActiveAttr.Attributes.inspect
train
def inspect attribute_descriptions = attributes.sort.map { |key, value| "#{key}: #{value.inspect}" }.join(", ") separator = " " unless attribute_descriptions.empty? "#<#{self.class.name}#{separator}#{attribute_descriptions}>" end
ruby
{ "resource": "" }
q15408
ActiveAttr.AttributeDefinition.inspect
train
def inspect options_description = options.map { |key, value| "#{key.inspect} => #{value.inspect}" }.sort.join(", ") inspected_options = ", #{options_description}" unless options_description.empty? "attribute :#{name}#{inspected_options}" end
ruby
{ "resource": "" }
q15409
ActiveAttr.AttributeDefaults.apply_defaults
train
def apply_defaults(defaults=attribute_defaults) @attributes ||= {} defaults.each do |name, value| # instance variable is used here to avoid any dirty tracking in attribute setter methods @attributes[name] = value unless @attributes.has_key? name end end
ruby
{ "resource": "" }
q15410
ActiveAttr.AttributeDefaults._attribute_default
train
def _attribute_default(attribute_name) default = self.class.attributes[attribute_name][:default] case when default.respond_to?(:call) then instance_exec(&default) when default.duplicable? then default.dup else default end end
ruby
{ "resource": "" }
q15411
ActiveAttr.MassAssignment.sanitize_for_mass_assignment_with_or_without_role
train
def sanitize_for_mass_assignment_with_or_without_role(new_attributes, options) if method(:sanitize_for_mass_assignment).arity.abs > 1 sanitize_for_mass_assignment new_attributes, options[:as] || :default else sanitize_for_mass_assignment new_attributes end end
ruby
{ "resource": "" }
q15412
Draftsman.Sinatra.user_for_draftsman
train
def user_for_draftsman return unless defined?(current_user) ActiveSupport::VERSION::MAJOR >= 4 ? current_user.try!(:id) : current_user.try(:id) rescue NoMethodError current_user end
ruby
{ "resource": "" }
q15413
RocketJob.DirmonEntry.set_exception
train
def set_exception(worker_name, exc_or_message) if exc_or_message.is_a?(Exception) self.exception = JobException.from_exception(exc_or_message) exception.worker_name = worker_name else build_exception( class_name: 'RocketJob::DirmonEntryException', message: exc_or_message, backtrace: [], worker_name: worker_name ) end end
ruby
{ "resource": "" }
q15414
RocketJob.DirmonEntry.later
train
def later(pathname) job_id = BSON::ObjectId.new archived_file_name = archive_file(job_id, pathname) job = RocketJob::Jobs::UploadFileJob.create!( job_class_name: job_class_name, properties: properties, description: "#{name}: #{pathname.basename}", upload_file_name: archived_file_name.to_s, original_file_name: pathname.to_s, job_id: job_id ) logger.info( message: 'Created RocketJob::Jobs::UploadFileJob', payload: { dirmon_entry_name: name, upload_file_name: archived_file_name.to_s, original_file_name: pathname.to_s, job_class_name: job_class_name, job_id: job_id.to_s, upload_job_id: job.id.to_s } ) job end
ruby
{ "resource": "" }
q15415
RocketJob.DirmonEntry.strip_whitespace
train
def strip_whitespace self.pattern = pattern.strip unless pattern.nil? self.archive_directory = archive_directory.strip unless archive_directory.nil? end
ruby
{ "resource": "" }
q15416
RocketJob.DirmonEntry.archive_file
train
def archive_file(job_id, pathname) target_path = archive_pathname(pathname) target_path.mkpath target_file_name = target_path.join("#{job_id}_#{pathname.basename}") # In case the file is being moved across partitions FileUtils.move(pathname.to_s, target_file_name.to_s) target_file_name.to_s end
ruby
{ "resource": "" }
q15417
RocketJob.DirmonEntry.job_is_a_rocket_job
train
def job_is_a_rocket_job klass = job_class return if job_class_name.nil? || klass&.ancestors&.include?(RocketJob::Job) errors.add(:job_class_name, "Job #{job_class_name} must be defined and inherit from RocketJob::Job") end
ruby
{ "resource": "" }
q15418
RocketJob.DirmonEntry.job_has_properties
train
def job_has_properties klass = job_class return unless klass properties.each_pair do |k, _v| next if klass.public_method_defined?("#{k}=".to_sym) errors.add(:properties, "Unknown Property: Attempted to set a value for #{k.inspect} which is not allowed on the job #{job_class_name}") end end
ruby
{ "resource": "" }
q15419
RocketJob.CLI.boot_standalone
train
def boot_standalone # Try to load bundler if present begin require 'bundler/setup' Bundler.require(environment) rescue LoadError nil end require 'rocketjob' begin require 'rocketjob_enterprise' rescue LoadError nil end # Log to file except when booting rails, when it will add the log file path path = log_file ? Pathname.new(log_file) : Pathname.pwd.join("log/#{environment}.log") path.dirname.mkpath SemanticLogger.add_appender(file_name: path.to_s, formatter: :color) logger.info "Rails not detected. Running standalone: #{environment}" RocketJob::Config.load!(environment, mongo_config, symmetric_encryption_config) self.class.eager_load_jobs(File.expand_path('jobs', File.dirname(__FILE__))) self.class.eager_load_jobs end
ruby
{ "resource": "" }
q15420
RocketJob.CLI.write_pidfile
train
def write_pidfile return unless pidfile pid = $PID File.open(pidfile, 'w') { |f| f.puts(pid) } # Remove pidfile on exit at_exit do File.delete(pidfile) if pid == $PID end end
ruby
{ "resource": "" }
q15421
RocketJob.CLI.parse
train
def parse(argv) parser = OptionParser.new do |o| o.on('-n', '--name NAME', 'Unique Name of this server (Default: host_name:PID)') do |arg| @name = arg end o.on('-w', '--workers COUNT', 'Number of workers (threads) to start') do |arg| @workers = arg.to_i end o.on('-t', '--threads COUNT', 'DEPRECATED') do |arg| warn '-t and --threads are deprecated, use -w or --workers' @workers = arg.to_i end o.on('-F', '--filter REGEXP', 'Limit this server to only those job classes that match this regular expression (case-insensitive). Example: "DirmonJob|WeeklyReportJob"') do |arg| @include_filter = Regexp.new(arg, true) end o.on('-E', '--exclude REGEXP', 'Prevent this server from working on any job classes that match this regular expression (case-insensitive). Example: "DirmonJob|WeeklyReportJob"') do |arg| @exclude_filter = Regexp.new(arg, true) end o.on('-W', '--where JSON', "Limit this server instance to the supplied mongo query filter. Supply as a string in JSON format. Example: '{\"priority\":{\"$lte\":25}}'") do |arg| @where_filter = JSON.parse(arg) end o.on('-q', '--quiet', 'Do not write to stdout, only to logfile. Necessary when running as a daemon') do @quiet = true end o.on('-d', '--dir DIR', 'Directory containing Rails app, if not current directory') do |arg| @directory = arg end o.on('-e', '--environment ENVIRONMENT', 'The environment to run the app on (Default: RAILS_ENV || RACK_ENV || development)') do |arg| @environment = arg end o.on('-l', '--log_level trace|debug|info|warn|error|fatal', 'The log level to use') do |arg| @log_level = arg end o.on('-f', '--log_file FILE_NAME', 'The log file to write to. Default: log/<environment>.log') do |arg| @log_file = arg end o.on('--pidfile PATH', 'Use PATH as a pidfile') do |arg| @pidfile = arg end o.on('-m', '--mongo MONGO_CONFIG_FILE_NAME', 'Path and filename of config file. Default: config/mongoid.yml') do |arg| @mongo_config = arg end o.on('-s', '--symmetric-encryption SYMMETRIC_ENCRYPTION_CONFIG_FILE_NAME', 'Path and filename of Symmetric Encryption config file. Default: config/symmetric-encryption.yml') do |arg| @symmetric_encryption_config = arg end o.on('-v', '--version', 'Print the version information') do puts "Rocket Job v#{RocketJob::VERSION}" exit 1 end end parser.banner = 'rocketjob <options>' parser.on_tail '-h', '--help', 'Show help' do puts parser exit 1 end parser.parse! argv end
ruby
{ "resource": "" }
q15422
RocketJob.Worker.run
train
def run Thread.current.name = format('rocketjob %03i', id) logger.info 'Started' until shutdown? wait = RocketJob::Config.instance.max_poll_seconds if process_available_jobs # Keeps workers staggered across the poll interval so that # all workers don't poll at the same time wait = rand(wait * 1000) / 1000 end break if wait_for_shutdown?(wait) end logger.info 'Stopping' rescue Exception => exc logger.fatal('Unhandled exception in job processing thread', exc) ensure ActiveRecord::Base.clear_active_connections! if defined?(ActiveRecord::Base) end
ruby
{ "resource": "" }
q15423
RocketJob.Worker.reset_filter_if_expired
train
def reset_filter_if_expired # Only clear out the current_filter after every `re_check_seconds` time = Time.now return unless (time - @re_check_start) > re_check_seconds @re_check_start = time self.current_filter = filter.dup if current_filter != filter end
ruby
{ "resource": "" }
q15424
RocketJob::Plugins::Rufus.CronLine.previous_time
train
def previous_time(from=ZoTime.now) pt = nil zt = ZoTime.new(from.to_i - 1, @timezone) miny = from.year - NEXT_TIME_MAX_YEARS loop do pt = zt.dup fail RangeError.new( "failed to reach occurrence within " + "#{NEXT_TIME_MAX_YEARS} years for '#{original}'" ) if pt.year < miny unless date_match?(pt) zt.substract(pt.hour * 3600 + pt.min * 60 + pt.sec + 1) next end unless sub_match?(pt, :hour, @hours) zt.substract(pt.min * 60 + pt.sec + 1) next end unless sub_match?(pt, :min, @minutes) zt.substract(pt.sec + 1) next end unless sub_match?(pt, :sec, @seconds) zt.substract(prev_second(pt)) next end break end pt end
ruby
{ "resource": "" }
q15425
RocketJob.WorkerPool.rebalance
train
def rebalance(max_workers, stagger_start = false) count = max_workers.to_i - living_count return 0 unless count > 0 logger.info("#{'Stagger ' if stagger_start}Starting #{count} workers") add_one count -= 1 delay = Config.instance.max_poll_seconds.to_f / max_workers count.times.each do sleep(delay) if stagger_start return -1 if Supervisor.shutdown? add_one end end
ruby
{ "resource": "" }
q15426
RocketJob.Performance.export_results
train
def export_results(results) CSV.open("job_results_#{ruby}_#{servers}s_#{workers}w_v#{version}.csv", 'wb') do |csv| csv << results.first.keys results.each { |result| csv << result.values } end end
ruby
{ "resource": "" }
q15427
EventSourcery.Event.with
train
def with(event_class: self.class, **attributes) if self.class != Event && !attributes[:type].nil? && attributes[:type] != type raise Error, 'When using typed events change the type by changing the event class.' end event_class.new(**to_h.merge!(attributes)) end
ruby
{ "resource": "" }
q15428
EventSourcery.Event.to_h
train
def to_h { id: id, uuid: uuid, aggregate_id: aggregate_id, type: type, body: body, version: version, created_at: created_at, correlation_id: correlation_id, causation_id: causation_id, } end
ruby
{ "resource": "" }
q15429
JsDuck.Parser.parse
train
def parse(contents, filename="", options={}) @doc_processor.filename = @filename = filename parse_js_or_scss(contents, filename, options).map do |docset| expand(docset) end.flatten.map do |docset| merge(docset) end end
ruby
{ "resource": "" }
q15430
JsDuck.Parser.parse_js_or_scss
train
def parse_js_or_scss(contents, filename, options) if filename =~ /\.scss$/ docs = Css::Parser.new(contents, options).parse else docs = Js::Parser.new(contents, options).parse docs = Js::Ast.new(docs).detect_all! end end
ruby
{ "resource": "" }
q15431
JsDuck.Parser.expand
train
def expand(docset) docset[:comment] = @doc_parser.parse(docset[:comment], @filename, docset[:linenr]) docset[:doc_map] = Doc::Map.build(docset[:comment]) docset[:tagname] = BaseType.detect(docset[:doc_map], docset[:code]) if docset[:tagname] == :class # expand class into several docsets, and rebuild doc-maps for all of them. @class_doc_expander.expand(docset).map do |ds| ds[:doc_map] = Doc::Map.build(ds[:comment]) ds end else docset end end
ruby
{ "resource": "" }
q15432
JsDuck.Parser.merge
train
def merge(docset) @doc_processor.linenr = docset[:linenr] docset[:comment] = @doc_processor.process(docset[:tagname], docset[:doc_map]) docset.delete(:doc_map) @merger.merge(docset, @filename, docset[:linenr]) end
ruby
{ "resource": "" }
q15433
JsDuck.Cache.read
train
def read(file_name, file_contents) fname = cache_file_name(file_name, file_contents) if File.exists?(fname) @previous_entry = fname File.open(fname, "rb") {|file| Marshal::load(file) } else @previous_entry = nil nil end end
ruby
{ "resource": "" }
q15434
JsDuck.Cache.write
train
def write(file_name, file_contents, data) fname = cache_file_name(file_name, file_contents) @previous_entry = fname File.open(fname, "wb") {|file| Marshal::dump(data, file) } end
ruby
{ "resource": "" }
q15435
JsDuck.Guides.write
train
def write(dir) FileUtils.mkdir(dir) unless File.exists?(dir) each_item {|guide| write_guide(guide, dir) } end
ruby
{ "resource": "" }
q15436
JsDuck.Guides.fix_icon
train
def fix_icon(dir) if File.exists?(dir+"/icon.png") # All ok elsif File.exists?(dir+"/icon-lg.png") FileUtils.mv(dir+"/icon-lg.png", dir+"/icon.png") else FileUtils.cp(@opts.template+"/resources/images/default-guide.png", dir+"/icon.png") end end
ruby
{ "resource": "" }
q15437
JsDuck.Aggregator.add_class
train
def add_class(cls) old_cls = @classes[cls[:name]] if !old_cls && @alt_names[cls[:name]] old_cls = @alt_names[cls[:name]] warn_alt_name(cls) end if old_cls merge_classes(old_cls, cls) @current_class = old_cls else @current_class = cls @classes[cls[:name]] = cls # Register all alternate names of class for lookup too cls[:alternateClassNames].each do |altname| if cls[:name] == altname # A buggy documentation, warn. warn_alt_name(cls) else @alt_names[altname] = cls # When an alternate name has been used as a class name before, # then this is one crappy documentation, but attempt to handle # it by merging the class with alt-name into this class. if @classes[altname] merge_classes(cls, @classes[altname]) @classes.delete(altname) warn_alt_name(cls) end end end insert_orphans(cls) end end
ruby
{ "resource": "" }
q15438
JsDuck.Aggregator.merge_classes
train
def merge_classes(old, new) # Merge booleans [:extends, :singleton, :private].each do |tag| old[tag] = old[tag] || new[tag] end # Merge arrays [:mixins, :alternateClassNames, :requires, :uses, :files].each do |tag| old[tag] = (old[tag] || []) + (new[tag] || []) end # Merge hashes of arrays [:aliases].each do |tag| new[tag].each_pair do |key, contents| old[tag][key] = (old[tag][key] || []) + contents end end old[:doc] = old[:doc].length > 0 ? old[:doc] : new[:doc] # Additionally the doc-comment can contain configs and constructor old[:members] += new[:members] end
ruby
{ "resource": "" }
q15439
JsDuck.Aggregator.add_member
train
def add_member(node) # Completely ignore member if @ignore used return if node[:ignore] if node[:owner] if @classes[node[:owner]] add_to_class(@classes[node[:owner]], node) else add_orphan(node) end elsif @current_class node[:owner] = @current_class[:name] add_to_class(@current_class, node) else add_orphan(node) end end
ruby
{ "resource": "" }
q15440
JsDuck.Aggregator.insert_orphans
train
def insert_orphans(cls) members = @orphans.find_all {|node| node[:owner] == cls[:name] } members.each do |node| add_to_class(cls, node) @orphans.delete(node) end end
ruby
{ "resource": "" }
q15441
JsDuck.Merger.general_merge
train
def general_merge(h, docs, code) # Add all items in docs not already in result. docs.each_pair do |key, value| h[key] = value unless h[key] end # Add all items in code not already in result and mark them as # auto-detected. But only if the explicit and auto-detected # names don't conflict. if Merger.can_be_autodetected?(docs, code) code.each_pair do |key, value| unless h[key] h[key] = value mark_autodetected(h, key) end end end end
ruby
{ "resource": "" }
q15442
JsDuck.MembersIndex.merge!
train
def merge!(hash1, hash2) hash2.each_pair do |name, m| if m[:hide] if hash1[name] hash1.delete(name) else msg = "@hide used but #{m[:tagname]} #{m[:name]} not found in parent class" Logger.warn(:hide, msg, m[:files][0]) end else if hash1[name] store_overrides(hash1[name], m) end hash1[name] = m end end end
ruby
{ "resource": "" }
q15443
JsDuck.MembersIndex.store_overrides
train
def store_overrides(old, new) # Sometimes a class is included multiple times (like Ext.Base) # resulting in its members overriding themselves. Because of # this, ignore overriding itself. if new[:owner] != old[:owner] new[:overrides] = [] unless new[:overrides] unless new[:overrides].any? {|m| m[:owner] == old[:owner] } # Make a copy of the important properties for us. We can't # just push the actual `old` member itself, because there # can be circular overrides (notably with Ext.Base), which # will result in infinite loop when we try to convert our # class into JSON. new[:overrides] << { :name => old[:name], :owner => old[:owner], } end end end
ruby
{ "resource": "" }
q15444
JsDuck.News.filter_new_members
train
def filter_new_members(cls) members = cls.all_local_members.find_all do |m| visible?(m) && (m[:new] || new_params?(m)) end members = discard_accessors(members) members.sort! {|a, b| a[:name] <=> b[:name] } end
ruby
{ "resource": "" }
q15445
JsDuck.GuideToc.inject!
train
def inject! @html.each_line do |line| if line =~ /^\s*<h([1-6])>(.*?)<\/h[1-6]>$/ level = $1.to_i original_text = $2 text = Util::HTML.strip_tags(original_text) id = title_to_id(text) if include_to_toc?(level) @toc.add(level, id, text) end @new_html << "<h#{level} id='#{id}'>#{original_text}</h#{level}>\n" else @new_html << line end end inject_toc! @new_html.flatten.join end
ruby
{ "resource": "" }
q15446
JsDuck.Class.internal_doc=
train
def internal_doc=(doc) @doc.merge!(doc) do |key, oldval, newval| if key == :members oldval.zip(newval) do |ms| ms[0].merge!(ms[1]) end oldval else newval end end end
ruby
{ "resource": "" }
q15447
JsDuck.Class.lookup
train
def lookup(classname) if @relations[classname] @relations[classname] elsif @relations.ignore?(classname) || classname =~ /\*/ # Ignore explicitly ignored classes and classnames with # wildcards in them. We could expand the wildcard, but that # can result in a very long list of classes, like when # somebody requires 'Ext.form.*', so for now we do the # simplest thing and ignore it. Class.new({:name => classname}, false) else Logger.warn(:extend, "Class #{classname} not found", @doc[:files][0]) # Create placeholder class Class.new({:name => classname}, false) end end
ruby
{ "resource": "" }
q15448
JsDuck.Class.find_members
train
def find_members(query={}) if query[:name] ms = @members_index.global_by_name[query[:name]] || [] ms = ms.find_all {|m| m[:owner] == @doc[:name]} if query[:local] elsif query[:local] ms = @members_index.all_local else ms = @members_index.all_global end if query[:tagname] ms = ms.find_all {|m| m[:tagname] == query[:tagname] } end if query[:static] == true ms = ms.find_all {|m| m[:static] } elsif query[:static] == false ms = ms.reject {|m| m[:static] } end ms end
ruby
{ "resource": "" }
q15449
JsDuck::Tag.MemberTag.process_code
train
def process_code(code) return { :tagname => code[:tagname], # An auto-detected name might be "MyClass.prototype.myMethod" - # for member name we only want the last "myMethod" part. :name => code[:name] ? code[:name].split(/\./).last : nil, :autodetected => code[:autodetected], :inheritdoc => code[:inheritdoc], :static => code[:static], :private => code[:private], :inheritable => code[:inheritable], :linenr => code[:linenr], } end
ruby
{ "resource": "" }
q15450
JsDuck.GroupedAsset.each_item
train
def each_item(group=nil, &block) group = group || @groups group.each do |item| if item["items"] each_item(item["items"], &block) else block.call(item) end end end
ruby
{ "resource": "" }
q15451
JsDuck.BatchProcessor.aggregate
train
def aggregate(parsed_files) agr = Aggregator.new parsed_files.each do |file| Logger.log("Aggregating", file.filename) agr.aggregate(file) end agr.result end
ruby
{ "resource": "" }
q15452
JsDuck.BatchProcessor.pre_process
train
def pre_process(classes_hash, opts) Process::IgnoredClasses.new(classes_hash).process_all! Process::GlobalMembers.new(classes_hash, opts).process_all! Process::Accessors.new(classes_hash).process_all! Process::Ext4Events.new(classes_hash, opts).process_all! Process::Enums.new(classes_hash).process_all! Process::Overrides.new(classes_hash, opts).process_all! classes_hash.values end
ruby
{ "resource": "" }
q15453
JsDuck.BatchProcessor.to_class_objects
train
def to_class_objects(docs, opts) classes = docs.map {|d| Class.new(d) } Relations.new(classes, opts.external) end
ruby
{ "resource": "" }
q15454
JsDuck.BatchProcessor.post_process
train
def post_process(relations, opts) Process::CircularDeps.new(relations).process_all! Process::InheritDoc.new(relations).process_all! Process::Versions.new(relations, opts).process_all! Process::ReturnValues.new(relations).process_all! Process::Fires.new(relations).process_all! Process::Components.new(relations).process_all! Process::Lint.new(relations).process_all! Process::NoDoc.new(relations).process_all! relations end
ruby
{ "resource": "" }
q15455
JsDuck.Columns.split
train
def split(items, n) if n == 1 [items] elsif items.length <= n Array.new(n) {|i| items[i] ? [items[i]] : [] } else min_max = nil min_arr = nil i = 0 while i <= items.length-n i += 1 # Try placing 1, 2, 3, ... items to first chunk. # Calculate the remaining chunks recursively. cols = [items[0,i]] + split(items[i, items.length], n-1) max = max_sum(cols) # Is this the optimal solution so far? Remember it. if !min_max || max < min_max min_max = max min_arr = cols end end min_arr end end
ruby
{ "resource": "" }
q15456
JsDuck.GuideTocEntry.add
train
def add(level, id, text) if level == @min_level @items << GuideTocEntry.new(self) @items.last.label = "#{prefix} <a href='#!/guide/#{id}'>#{text}</a>\n" else if @items.empty? @items << GuideTocEntry.new(self) end @items.last.add(level-1, id, text) end end
ruby
{ "resource": "" }
q15457
JsDuck.ExternalClasses.add
train
def add(name) if name =~ /\*/ @patterns << make_pattern(name) elsif name =~ /^@browser$/i WEB_APIS.each do |cls| @class_names[cls] = true end else @class_names[name] = true end end
ruby
{ "resource": "" }
q15458
JsDuck.Logger.configure_defaults
train
def configure_defaults # Enable all warnings except some. set_warning(:all, true) set_warning(:link_auto, false) set_warning(:param_count, false) set_warning(:fires, false) set_warning(:nodoc, false) end
ruby
{ "resource": "" }
q15459
JsDuck.Logger.configure
train
def configure(opts) self.verbose = true if opts.verbose self.colors = opts.color unless opts.color.nil? begin opts.warnings.each do |w| set_warning(w[:type], w[:enabled], w[:path], w[:params]) end rescue Warning::WarnException => e warn(nil, e.message) end end
ruby
{ "resource": "" }
q15460
JsDuck.Logger.warn
train
def warn(type, msg, file={}, args=[]) if warning_enabled?(type, file[:filename], args) print_warning(msg, file[:filename], file[:linenr]) end return false end
ruby
{ "resource": "" }
q15461
JsDuck.Logger.format
train
def format(filename=nil, line=nil) out = "" if filename out = Util::OS.windows? ? filename.gsub('/', '\\') : filename if line out += ":#{line}:" end end paint(:magenta, out) end
ruby
{ "resource": "" }
q15462
JsDuck.Logger.paint
train
def paint(color_name, msg) if @colors == false || @colors == nil && (Util::OS.windows? || !$stderr.tty?) msg else COLORS[color_name] + msg + CLEAR end end
ruby
{ "resource": "" }
q15463
JsDuck.InlineExamples.add_classes
train
def add_classes(relations) relations.each do |cls| extract(cls[:doc]).each_with_index do |ex, i| @examples << { :id => cls[:name] + "-" + i.to_s, :name => cls[:name] + " example #" + (i+1).to_s, :href => '#!/api/' + cls[:name], :code => ex[:code], :options => ex[:options], } end end self end
ruby
{ "resource": "" }
q15464
JsDuck.InlineExamples.add_guides
train
def add_guides(guides) guides.each_item do |guide| extract(guide[:html]).each_with_index do |ex, i| @examples << { :id => guide["name"] + "-" + i.to_s, :name => guide["title"] + " example #" + (i+1).to_s, :href => '#!/guide/' + guide["name"], :code => ex[:code], :options => ex[:options], } end end self end
ruby
{ "resource": "" }
q15465
JsDuck.InlineExamples.extract
train
def extract(html) examples = [] s = StringScanner.new(html) while !s.eos? do if s.check(/</) if s.check(@begin_example_re) s.scan(@begin_example_re) =~ @begin_example_re options = build_options_hash($1) ex = s.scan_until(@end_example_re).sub(@end_example_re, '') examples << { :code => Util::HTML.unescape(Util::HTML.strip_tags(ex)), :options => options, } else s.skip(/</) end else s.skip(/[^<]+/) end end examples end
ruby
{ "resource": "" }
q15466
JsDuck.ClassDocExpander.expand_comment
train
def expand_comment(docset) groups = { :class => [], :cfg => [], :constructor => [], } # By default everything goes to :class group group_name = :class docset[:comment].each do |tag| tagname = tag[:tagname] if tagname == :cfg || tagname == :constructor group_name = tagname if tagname == :cfg && (tag[:name] !~ /\./ || groups[:cfg].length == 0) groups[:cfg] << [] end end if tagname == :aliases # For backwards compatibility allow @xtype after @constructor groups[:class] << tag elsif group_name == :cfg groups[:cfg].last << tag else groups[group_name] << tag end end groups_to_docsets(groups, docset) end
ruby
{ "resource": "" }
q15467
JsDuck.ClassDocExpander.groups_to_docsets
train
def groups_to_docsets(groups, docset) results = [] results << { :tagname => :class, :type => docset[:type], :comment => groups[:class], :code => docset[:code], :linenr => docset[:linenr], } groups[:cfg].each do |cfg| results << { :tagname => :cfg, :type => docset[:type], :comment => cfg, :code => {}, :linenr => docset[:linenr], } end if groups[:constructor].length > 0 # Remember that a constructor is already found and ignore if a # constructor is detected from code. @constructor_found = true results << { :tagname => :method, :type => docset[:type], :comment => groups[:constructor], :code => {}, :linenr => docset[:linenr], } end results end
ruby
{ "resource": "" }
q15468
JsDuck.ClassDocExpander.expand_code
train
def expand_code(docset) results = [] if docset[:code] (docset[:code][:members] || []).each do |m| results << code_to_docset(m) unless @constructor_found && JsDuck::Class.constructor?(m) end end results end
ruby
{ "resource": "" }
q15469
JsDuck.TagLoader.load
train
def load(path) if File.directory?(path) Dir[path+"/**/*.rb"].each do |file| # Ruby 1.8 doesn't understand that "jsduck/tag/tag" and # "./lib/jsduck/tag/tag.rb" refer to the same file. So # explicitly avoid loading this file (as it's required on # top already) to prevent warnings of constants getting # defined multiple times. require(file) unless file =~ /jsduck\/tag\/tag\.rb$/ end else require(path) end end
ruby
{ "resource": "" }
q15470
Optimist.Parser.rbvmomi_connection_opts
train
def rbvmomi_connection_opts opt :host, "host", :type => :string, :short => 'o', :default => ENV['RBVMOMI_HOST'] opt :port, "port", :type => :int, :short => :none, :default => (ENV.member?('RBVMOMI_PORT') ? ENV['RBVMOMI_PORT'].to_i : 443) opt :"no-ssl", "don't use ssl", :short => :none, :default => (ENV['RBVMOMI_SSL'] == '0') opt :insecure, "don't verify ssl certificate", :short => 'k', :default => (ENV['RBVMOMI_INSECURE'] == '1') opt :user, "username", :short => 'u', :default => (ENV['RBVMOMI_USER'] || 'root') opt :password, "password", :short => 'p', :default => (ENV['RBVMOMI_PASSWORD'] || '') opt :path, "SOAP endpoint path", :short => :none, :default => (ENV['RBVMOMI_PATH'] || '/sdk') opt :debug, "Log SOAP messages", :short => 'd', :default => (ENV['RBVMOMI_DEBUG'] || false) end
ruby
{ "resource": "" }
q15471
RbVmomi.TypeLoader.reload_extensions_dir
train
def reload_extensions_dir path loaded = Set.new(typenames.select { |x| @namespace.const_defined? x }) Dir.open(path) do |dir| dir.each do |file| next unless file =~ /\.rb$/ next unless loaded.member? $` file_path = File.join(dir, file) load file_path end end end
ruby
{ "resource": "" }
q15472
RbVmomi.NewDeserializer.leaf_keyvalue
train
def leaf_keyvalue node h = {} node.children.each do |child| next unless child.element? h[child.name] = child.content end [h['key'], h['value']] end
ruby
{ "resource": "" }
q15473
Squib.Deck.draw_graph_paper
train
def draw_graph_paper(width, height) background color: 'white' grid width: 50, height: 50, stroke_color: '#659ae9', stroke_width: 1.5 grid width: 200, height: 200, stroke_color: '#659ae9', stroke_width: 3, x: 50, y: 50 (50..height).step(200) do |y| text str: "y=#{y}", x: 3, y: y - 18, font: 'Open Sans, Sans 10' end end
ruby
{ "resource": "" }
q15474
Squib.Deck.sample
train
def sample(str) @sample_x ||= 100 @sample_y ||= 100 rect x: 460, y: @sample_y - 40, width: 600, height: 180, fill_color: '#FFD655', stroke_color: 'black', radius: 15 text str: str, x: 460, y: @sample_y - 40, width: 540, height: 180, valign: 'middle', align: 'center', font: 'Times New Roman,Serif 8' yield @sample_x, @sample_y @sample_y += 200 end
ruby
{ "resource": "" }
q15475
Squib.Deck.enable_groups_from_env!
train
def enable_groups_from_env! return if ENV['SQUIB_BUILD'].nil? ENV['SQUIB_BUILD'].split(',').each do |grp| enable_build grp.strip.to_sym end end
ruby
{ "resource": "" }
q15476
Squib.LayoutParser.parents_exist?
train
def parents_exist?(yml, key) exists = true Array(yml[key]['extends']).each do |parent| unless yml.key?(parent) exists = false unless Squib.logger.error "Processing layout: '#{key}' attempts to extend a missing '#{yml[key]['extends']}'" end end return exists end
ruby
{ "resource": "" }
q15477
Squib.Card.compute_carve
train
def compute_carve(rule, range) w = rule[:box].width[@index] if w == :native file = rule[:file][@index].file case rule[:type] when :png Squib.cache_load_image(file).width.to_f / (range.size - 1) when :svg svg_data = rule[:svg_args].data[@index] unless file.to_s.empty? || svg_data.to_s.empty? Squib.logger.warn 'Both an SVG file and SVG data were specified' end return 0 if (file.nil? or file.eql? '') and svg_data.nil? svg_data = File.read(file) if svg_data.to_s.empty? RSVG::Handle.new_from_data(svg_data).width end else rule[:box].width[@index] * Pango::SCALE / (range.size - 1) end end
ruby
{ "resource": "" }
q15478
Squib.Deck.render_hand
train
def render_hand(range, sheet, hand) cards = range.collect { |i| @cards[i] } center_x = width / 2.0 center_y = hand.radius + height out_size = 3.0 * center_y angle_delta = (hand.angle_range.last - hand.angle_range.first) / cards.size cxt = Cairo::Context.new(Cairo::RecordingSurface.new(0, 0, out_size, out_size)) cxt.translate(out_size / 2.0, out_size / 2.0) cxt.rotate(hand.angle_range.first) cxt.translate(-width, -width) cards.each_with_index do |card, i| cxt.translate(center_x, center_y) cxt.rotate(angle_delta) cxt.translate(-center_x, -center_y) card.use_cairo do |card_cxt| cxt.rounded_rectangle(sheet.trim, sheet.trim, width - (2 * sheet.trim), height - (2 * sheet.trim), sheet.trim_radius, sheet.trim_radius) cxt.clip cxt.set_source(card_cxt.target) cxt.paint cxt.reset_clip end end x, y, w, h = cxt.target.ink_extents # I love Ruby assignment ;) png_cxt = Squib::Graphics::CairoContextWrapper.new(Cairo::Context.new(Cairo::ImageSurface.new(w + 2 * sheet.margin, h + 2 * sheet.margin))) png_cxt.set_source_squibcolor(sheet.fill_color) png_cxt.paint png_cxt.translate(-x + sheet.margin, -y + sheet.margin) png_cxt.set_source(cxt.target) png_cxt.paint png_cxt.target.write_to_png sheet.full_filename end
ruby
{ "resource": "" }
q15479
Squib.Sprue.parse_crop_line
train
def parse_crop_line(line) new_line = @crop_line_default.merge line new_line['width'] = Args::UnitConversion.parse(new_line['width'], @dpi) new_line['color'] = colorify new_line['color'] new_line['style_desc'] = new_line['style'] new_line['style'] = Sprues::CropLineDash.new(new_line['style'], @dpi) new_line['line'] = Sprues::CropLine.new( new_line['type'], new_line['position'], sheet_width, sheet_height, @dpi ) new_line end
ruby
{ "resource": "" }
q15480
Squib.Sprue.parse_card
train
def parse_card(card) new_card = card.clone x = Args::UnitConversion.parse(card['x'], @dpi) y = Args::UnitConversion.parse(card['y'], @dpi) if @template_hash['position_reference'] == :center # Normalize it to a top-left positional reference x -= card_width / 2 y -= card_height / 2 end new_card['x'] = x new_card['y'] = y new_card['rotate'] = parse_rotate_param( card['rotate'] ? card['rotate'] : @template_hash['rotate']) new_card end
ruby
{ "resource": "" }
q15481
PaperTrailAssociationTracking.RecordTrail.save_habtm_associations
train
def save_habtm_associations(version) @record.class.reflect_on_all_associations(:has_and_belongs_to_many).each do |a| next unless save_habtm_association?(a) habtm_assoc_ids(a).each do |id| ::PaperTrail::VersionAssociation.create( version_id: version.transaction_id, foreign_key_name: a.name, foreign_key_id: id, foreign_type: a.klass ) end end end
ruby
{ "resource": "" }
q15482
PaperTrailAssociationTracking.RecordTrail.habtm_assoc_ids
train
def habtm_assoc_ids(habtm_assoc) current = @record.send(habtm_assoc.name).to_a.map(&:id) # TODO: `pluck` would use less memory removed = @record.paper_trail_habtm.try(:[], habtm_assoc.name).try(:[], :removed) || [] added = @record.paper_trail_habtm.try(:[], habtm_assoc.name).try(:[], :added) || [] current + removed - added end
ruby
{ "resource": "" }
q15483
PaperTrailAssociationTracking.RecordTrail.save_bt_association
train
def save_bt_association(assoc, version) assoc_version_args = { version_id: version.id, foreign_key_name: assoc.foreign_key } if assoc.options[:polymorphic] foreign_type = @record.send(assoc.foreign_type) if foreign_type && ::PaperTrail.request.enabled_for_model?(foreign_type.constantize) assoc_version_args[:foreign_key_id] = @record.send(assoc.foreign_key) assoc_version_args[:foreign_type] = foreign_type end elsif ::PaperTrail.request.enabled_for_model?(assoc.klass) assoc_version_args[:foreign_key_id] = @record.send(assoc.foreign_key) assoc_version_args[:foreign_type] = assoc.klass end if assoc_version_args.key?(:foreign_key_id) ::PaperTrail::VersionAssociation.create(assoc_version_args) end end
ruby
{ "resource": "" }
q15484
PaperTrailAssociationTracking.RecordTrail.save_habtm_association?
train
def save_habtm_association?(assoc) @record.class.paper_trail_save_join_tables.include?(assoc.name) || ::PaperTrail.request.enabled_for_model?(assoc.klass) end
ruby
{ "resource": "" }
q15485
PaperTrailAssociationTracking.ModelConfig.assert_concrete_activerecord_class
train
def assert_concrete_activerecord_class(class_name) if class_name.constantize.abstract_class? raise format(::PaperTrail::ModelConfig::E_HPT_ABSTRACT_CLASS, @model_class, class_name) end end
ruby
{ "resource": "" }
q15486
Chef::Provisioning::AWSDriver.AWSProvider.wait_for
train
def wait_for(opts = {}) aws_object = opts[:aws_object] query_method = opts[:query_method] expected_responses = [opts[:expected_responses]].flatten acceptable_errors = [opts[:acceptable_errors] || []].flatten tries = opts[:tries] || 60 sleep = opts[:sleep] || 5 Retryable.retryable(tries: tries, sleep: sleep) do |retries, exception| action_handler.report_progress "waited #{retries * sleep}/#{tries * sleep}s for <#{aws_object.class}:#{aws_object.id}>##{query_method} state to change to #{expected_responses.inspect}..." Chef::Log.debug("Current exception in wait_for is #{exception.inspect}") if exception begin yield(aws_object) if block_given? if aws_object.class.to_s.eql?("Aws::EC2::Vpc") vpc = new_resource.driver.ec2.describe_vpcs(vpc_ids: [aws_object.vpc_id]).vpcs current_response = "[:#{vpc[0].state}]" elsif aws_object.class.to_s.eql?("Aws::EC2::NetworkInterface") result = new_resource.driver.ec2_resource.network_interface(aws_object.id) current_response = "[:#{result.status}]" current_response = "[:in_use]" if current_response.eql?("[:in-use]") elsif aws_object.class.to_s.eql?("Aws::EC2::NatGateway") current_response = "[:#{aws_object.state}]" end Chef::Log.debug("Current response in wait_for from [#{query_method}] is #{current_response}") unless expected_responses.to_s.include?(current_response) raise StatusTimeoutError.new(aws_object, current_response, expected_responses) end rescue *acceptable_errors end end end
ruby
{ "resource": "" }
q15487
AES.AES._random_seed
train
def _random_seed(size=32) if defined? OpenSSL::Random return OpenSSL::Random.random_bytes(size) else chars = ("a".."z").to_a + ("A".."Z").to_a + ("0".."9").to_a (1..size).collect{|a| chars[rand(chars.size)] }.join end end
ruby
{ "resource": "" }
q15488
AES.AES.b64_d
train
def b64_d(data) iv_and_ctext = [] data.split('$').each do |part| iv_and_ctext << Base64.decode64(part) end iv_and_ctext end
ruby
{ "resource": "" }
q15489
AES.AES._setup
train
def _setup(action) @cipher ||= OpenSSL::Cipher.new(@options[:cipher]) # Toggles encryption mode @cipher.send(action) @cipher.padding = @options[:padding] @cipher.key = @key.unpack('a2'*32).map{|x| x.hex}.pack('c'*32) end
ruby
{ "resource": "" }
q15490
Radiant.Engine.default_load_paths
train
def default_load_paths paths = ["#{RADIANT_ROOT}/test/mocks/#{environment}"] # Add the app's controller directory paths.concat(Dir["#{RADIANT_ROOT}/app/controllers/"]) # Followed by the standard includes. paths.concat %w( app app/metal app/models app/controllers app/helpers config lib vendor ).map { |dir| "#{RADIANT_ROOT}/#{dir}" }.select { |dir| File.directory?(dir) } paths.concat builtin_directories paths.concat library_directories end
ruby
{ "resource": "" }
q15491
Radiant.Extension.extension_enabled?
train
def extension_enabled?(extension) begin extension = (extension.to_s.camelcase + 'Extension').constantize extension.enabled? rescue NameError false end end
ruby
{ "resource": "" }
q15492
Radiant.ExtensionPath.check_subdirectory
train
def check_subdirectory(subpath) subdirectory = File.join(path, subpath) subdirectory if File.directory?(subdirectory) end
ruby
{ "resource": "" }
q15493
Radiant.ExtensionLoader.load_extension
train
def load_extension(name) extension_path = ExtensionPath.find(name) begin constant = "#{name}_extension".camelize extension = constant.constantize extension.unloadable extension.path = extension_path extension rescue LoadError, NameError => e $stderr.puts "Could not load extension: #{name}.\n#{e.inspect}" nil end end
ruby
{ "resource": "" }
q15494
Radiant.ApplicationHelper.pagination_for
train
def pagination_for(list, options={}) if list.respond_to? :total_pages options = { max_per_page: detail['pagination.max_per_page'] || 500, depaginate: true }.merge(options.symbolize_keys) depaginate = options.delete(:depaginate) # supply depaginate: false to omit the 'show all' link depagination_limit = options.delete(:max_per_page) # supply max_per_page: false to include the 'show all' link no matter how large the collection html = will_paginate(list, will_paginate_options.merge(options)) if depaginate && list.total_pages > 1 && (!depagination_limit.blank? || list.total_entries <= depagination_limit.to_i) html << content_tag(:div, link_to(t('show_all'), pp: 'all'), class: 'depaginate') elsif depaginate && list.total_entries > depagination_limit.to_i html = content_tag(:div, link_to("paginate", p: 1), class: 'pagination') end html end end
ruby
{ "resource": "" }
q15495
PdfExtract.XmlView.get_xml_attributes
train
def get_xml_attributes obj, parent=true attribs = obj.reject { |k, _| @@ignored_attributes.include? k } if parent attribs = attribs.reject { |k, _| @@parent_ignored_attributes.include? k } end attribs = attribs.reject { |_, v| v.kind_of?(Hash) || v.kind_of?(Array) } attribs.each_pair do |k, v| if @@numeric_attributes.include?(k) || k.to_s =~ /.+_score/ attribs[k] = v.round(@render_options[:round]) end end attribs end
ruby
{ "resource": "" }
q15496
Commontator.Thread.clear
train
def clear return if commontable.blank? || !is_closed? new_thread = Thread.new new_thread.commontable = commontable with_lock do self.commontable = nil save! new_thread.save! subscriptions.each do |s| s.thread = new_thread s.save! end end end
ruby
{ "resource": "" }
q15497
Commontator.Thread.can_be_edited_by?
train
def can_be_edited_by?(user) !commontable.nil? && !user.nil? && user.is_commontator &&\ config.thread_moderator_proc.call(self, user) end
ruby
{ "resource": "" }
q15498
Strings.Truncate.shorten
train
def shorten(original_chars, chars, length_without_trailing) truncated = [] char_width = display_width(chars[0]) while length_without_trailing - char_width > 0 orig_char = original_chars.shift char = chars.shift break unless char while orig_char != char # consume ansi ansi = true truncated << orig_char orig_char = original_chars.shift end truncated << char char_width = display_width(char) length_without_trailing -= char_width end truncated << ["\e[0m"] if ansi truncated end
ruby
{ "resource": "" }
q15499
Strings.Pad.pad
train
def pad(text, padding, fill: SPACE, separator: NEWLINE) padding = Strings::Padder.parse(padding) text_copy = text.dup line_width = max_line_length(text, separator) output = [] filler_line = fill * line_width padding.top.times do output << pad_around(filler_line, padding, fill: fill) end text_copy.split(separator).each do |line| output << pad_around(line, padding, fill: fill) end padding.bottom.times do output << pad_around(filler_line, padding, fill: fill) end output.join(separator) end
ruby
{ "resource": "" }