_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q20000
Fitgem.Client.remove_subscription
train
def remove_subscription(opts) resp = raw_delete make_subscription_url(opts.merge({:use_subscription_id => true})), make_headers(opts) [resp.status, extract_response_body(resp)] end
ruby
{ "resource": "" }
q20001
Fitgem.Client.validate_subscription_type
train
def validate_subscription_type(subscription_type) unless subscription_type && SUBSCRIBABLE_TYPES.include?(subscription_type) raise Fitgem::InvalidArgumentError, "Invalid subscription type (valid values are #{SUBSCRIBABLE_TYPES.join(', ')})" end true end
ruby
{ "resource": "" }
q20002
Fitgem.Client.make_subscription_url
train
def make_subscription_url(opts) validate_subscription_type opts[:type] path = if opts[:type] == :all "" else "/"+opts[:type].to_s end url = "/user/#{@user_id}#{path}/apiSubscriptions" if opts[:use_subscription_id] unless opts[:subscription_id] raise Fitgem::InvalidArgumentError, "Must include options[:subscription_id]" end url += "/#{opts[:subscription_id]}" end url += ".json" end
ruby
{ "resource": "" }
q20003
Fitgem.Client.log_weight
train
def log_weight(weight, date, opts={}) opts[:time] = format_time(opts[:time]) if opts[:time] post("/user/#{@user_id}/body/log/weight.json", opts.merge(:weight => weight, :date => format_date(date))) end
ruby
{ "resource": "" }
q20004
Fitgem.Client.log_body_fat
train
def log_body_fat(fatPercentage, date, opts={}) opts[:fat] = fatPercentage opts[:date] = format_date(date) opts[:time] = format_time(opts[:time]) if opts[:time] post("/user/#{@user_id}/body/fat.json", opts) end
ruby
{ "resource": "" }
q20005
Fitgem.Client.create_or_update_body_weight_goal
train
def create_or_update_body_weight_goal(startDate, startWeight, goalWeight) opts = {startDate: format_date(startDate), startWeight: startWeight, weight: goalWeight} post("/user/#{@user_id}/body/log/weight/goal.json", opts) end
ruby
{ "resource": "" }
q20006
Fitgem.Client.determine_body_uri
train
def determine_body_uri(base_uri, opts = {}) if opts[:date] date = format_date opts[:date] "#{base_uri}/date/#{date}.json" elsif opts[:base_date] && (opts[:period] || opts[:end_date]) date_range = construct_date_range_fragment opts "#{base_uri}/#{date_range}.json" else raise Fitgem::InvalidArgumentError, "You didn't supply one of the required options." end end
ruby
{ "resource": "" }
q20007
Fitgem.Client.create_or_update_weekly_goal
train
def create_or_update_weekly_goal(opts) unless opts[:type] && [:steps, :distance, :floors].include?(opts[:type]) raise InvalidArgumentError, 'Must specify type in order to create or update a weekly goal. One of (:steps, :distance, or :floors) is required.' end unless opts[:value] raise InvalidArgumentError, 'Must specify value of the weekly goal to be created or updated.' end post("/user/#{@user_id}/activities/goals/weekly.json", opts) end
ruby
{ "resource": "" }
q20008
Fitgem.Client.update_alarm
train
def update_alarm(alarm_id, device_id, opts) opts[:time] = format_time opts[:time], include_timezone: true post("/user/#{@user_id}/devices/tracker/#{device_id}/alarms/#{alarm_id}.json", opts) end
ruby
{ "resource": "" }
q20009
Fitgem.Client.format_time
train
def format_time(time, opts = {}) format = opts[:include_timezone] ? "%H:%M%:z" : "%H:%M" if time.is_a? String case time when 'now' return DateTime.now.strftime format else unless time =~ /^\d{2}\:\d{2}$/ raise Fitgem::InvalidTimeArgument, "Invalid time (#{time}), must be in HH:mm format" end timezone = DateTime.now.strftime("%:z") return opts[:include_timezone] ? [ time, timezone ].join : time end elsif DateTime === time || Time === time return time.strftime format else raise Fitgem::InvalidTimeArgument, "Date used must be a valid time object or a string in the format HH:mm; supplied argument is a #{time.class}" end end
ruby
{ "resource": "" }
q20010
Fitgem.Client.label_for_measurement
train
def label_for_measurement(measurement_type, respect_user_unit_preferences=true) unless [:duration, :distance, :elevation, :height, :weight, :measurements, :liquids, :blood_glucose].include?(measurement_type) raise InvalidMeasurementType, "Supplied measurement_type parameter must be one of [:duration, :distance, :elevation, :height, :weight, :measurements, :liquids, :blood_glucose], current value is :#{measurement_type}" end selected_unit_system = api_unit_system if respect_user_unit_preferences unless connected? raise ConnectionRequiredError, "No connection to Fitbit API; one is required when passing respect_user_unit_preferences=true" end # Cache the unit systems for the current user @unit_systems ||= self.user_info['user'].select {|key, value| key =~ /Unit$/ } case measurement_type when :distance selected_unit_system = @unit_systems["distanceUnit"] when :height selected_unit_system = @unit_systems["heightUnit"] when :liquids selected_unit_system = @unit_systems["waterUnit"] when :weight selected_unit_system = @unit_systems["weightUnit"] when :blood_glucose selected_unit_system = @unit_systems["glucoseUnit"] else selected_unit_system = api_unit_system end end # Fix the METRIC system difference selected_unit_system = Fitgem::ApiUnitSystem.METRIC if selected_unit_system == "METRIC" # Ensure the target unit system is one that we know about unless [ApiUnitSystem.US, ApiUnitSystem.UK, ApiUnitSystem.METRIC].include?(selected_unit_system) raise InvalidUnitSystem, "The select unit system must be one of [ApiUnitSystem.US, ApiUnitSystem.UK, ApiUnitSystem.METRIC], current value is #{selected_unit_system}" end unit_mappings[selected_unit_system][measurement_type] end
ruby
{ "resource": "" }
q20011
GettextI18nRails.ModelAttributesFinder.table_name_to_namespaced_model
train
def table_name_to_namespaced_model(table_name) # First assume that there are no namespaces model = to_class(table_name.singularize.camelcase) return model if model != nil # If you were wrong, assume that the model is in a namespace. # Iterate over the underscores and try to substitute each of them # for a slash that camelcase() replaces with the scope operator (::). underscore_position = table_name.index('_') while underscore_position != nil namespaced_table_name = table_name.dup namespaced_table_name[underscore_position] = '/' model = to_class(namespaced_table_name.singularize.camelcase) return model if model != nil underscore_position = table_name.index('_', underscore_position + 1) end # The model either is not defined or is buried more than one level # deep in a module hierarchy return nil end
ruby
{ "resource": "" }
q20012
GettextI18nRails.ModelAttributesFinder.to_class
train
def to_class(name) # I wanted to use Module.const_defined?() here to avoid relying # on exceptions for normal program flow but it's of no use. # If class autoloading is enabled, the constant may be undefined # but turn out to be present when we actually try to use it. begin constant = name.constantize rescue NameError return nil rescue LoadError => e $stderr.puts "failed to load '#{name}', ignoring (#{e.class}: #{e.message})" return nil end return constant.is_a?(Class) ? constant : nil end
ruby
{ "resource": "" }
q20013
LL.CompiledGrammar.display_messages
train
def display_messages [:errors, :warnings].each do |type| send(type).each do |msg| output.puts(msg.to_s) end end end
ruby
{ "resource": "" }
q20014
LL.ConfigurationCompiler.generate_rules
train
def generate_rules(grammar) rules = [] action_index = 0 rule_indices = grammar.rule_indices term_indices = grammar.terminal_indices grammar.rules.each_with_index do |rule, rule_index| rule.branches.each do |branch| row = [TYPES[:action], action_index] action_index += 1 branch.steps.reverse_each do |step| if step.is_a?(Terminal) row << TYPES[:terminal] row << term_indices[step] + 1 elsif step.is_a?(Rule) row << TYPES[:rule] row << rule_indices[step] elsif step.is_a?(Epsilon) row << TYPES[:epsilon] row << 0 elsif step.is_a?(Operator) row << TYPES[step.type] row << rule_indices[step.receiver] unless SKIP_VALUE_STACK.include?(step.type) row << TYPES[:add_value_stack] row << 0 end end end rules << row end end return rules end
ruby
{ "resource": "" }
q20015
LL.GrammarCompiler.warn_for_unused_rules
train
def warn_for_unused_rules(compiled_grammar) compiled_grammar.rules.each_with_index do |rule, index| next if index == 0 || rule.references > 0 compiled_grammar.add_warning( "Unused rule #{rule.name.inspect}", rule.source_line ) end end
ruby
{ "resource": "" }
q20016
LL.GrammarCompiler.warn_for_unused_terminals
train
def warn_for_unused_terminals(compiled_grammar) compiled_grammar.terminals.each do |terminal| next if terminal.references > 0 compiled_grammar.add_warning( "Unused terminal #{terminal.name.inspect}", terminal.source_line ) end end
ruby
{ "resource": "" }
q20017
LL.GrammarCompiler.on_grammar
train
def on_grammar(node, compiled_grammar) # Create the prototypes for all rules since rules can be referenced before # they are defined. node.children.each do |child| if child.type == :rule on_rule_prototype(child, compiled_grammar) end end node.children.each do |child| process(child, compiled_grammar) end end
ruby
{ "resource": "" }
q20018
LL.GrammarCompiler.on_name
train
def on_name(node, compiled_grammar) if compiled_grammar.name compiled_grammar.add_warning( "Overwriting existing parser name #{compiled_grammar.name.inspect}", node.source_line ) end parts = node.children.map { |child| process(child, compiled_grammar) } compiled_grammar.name = parts.join('::') end
ruby
{ "resource": "" }
q20019
LL.GrammarCompiler.on_terminals
train
def on_terminals(node, compiled_grammar) node.children.each do |child| name = process(child, compiled_grammar) if compiled_grammar.has_terminal?(name) compiled_grammar.add_error( "The terminal #{name.inspect} has already been defined", child.source_line ) else compiled_grammar.add_terminal(name, child.source_line) end end end
ruby
{ "resource": "" }
q20020
LL.GrammarCompiler.on_rule
train
def on_rule(node, compiled_grammar) name = process(node.children[0], compiled_grammar) if compiled_grammar.has_terminal?(name) compiled_grammar.add_error( "the rule name #{name.inspect} is already used as a terminal name", node.source_line ) end if compiled_grammar.has_rule_with_branches?(name) compiled_grammar.add_error( "the rule #{name.inspect} has already been defined", node.source_line ) return end branches = node.children[1..-1].map do |child| process(child, compiled_grammar) end rule = compiled_grammar.lookup_rule(name) rule.branches.concat(branches) end
ruby
{ "resource": "" }
q20021
LL.GrammarCompiler.on_rule_prototype
train
def on_rule_prototype(node, compiled_grammar) name = process(node.children[0], compiled_grammar) return if compiled_grammar.has_rule?(name) rule = Rule.new(name, node.source_line) compiled_grammar.add_rule(rule) end
ruby
{ "resource": "" }
q20022
LL.GrammarCompiler.on_branch
train
def on_branch(node, compiled_grammar) steps = process(node.children[0], compiled_grammar) if node.children[1] code = process(node.children[1], compiled_grammar) else code = nil end return Branch.new(steps, node.source_line, code) end
ruby
{ "resource": "" }
q20023
SNMP.SNMPv2_Trap.sys_up_time
train
def sys_up_time varbind = @varbind_list[0] if varbind && (varbind.name == SYS_UP_TIME_OID) return varbind.value else raise InvalidTrapVarbind, "Expected sysUpTime.0, found " + varbind.to_s end end
ruby
{ "resource": "" }
q20024
SNMP.SNMPv2_Trap.trap_oid
train
def trap_oid varbind = @varbind_list[1] if varbind && (varbind.name == SNMP_TRAP_OID_OID) return varbind.value else raise InvalidTrapVarbind, "Expected snmpTrapOID.0, found " + varbind.to_s end end
ruby
{ "resource": "" }
q20025
SNMP.ObjectId.subtree_of?
train
def subtree_of?(parent_tree) parent_tree = make_object_id(parent_tree) if parent_tree.length > self.length false else parent_tree.each_index do |i| return false if parent_tree[i] != self[i] end true end end
ruby
{ "resource": "" }
q20026
SNMP.ObjectId.index
train
def index(parent_tree) parent_tree = make_object_id(parent_tree) if not subtree_of?(parent_tree) raise ArgumentError, "#{self.to_s} not a subtree of #{parent_tree.to_s}" elsif self.length == parent_tree.length raise ArgumentError, "OIDs are the same" else ObjectId.new(self[parent_tree.length..-1]) end end
ruby
{ "resource": "" }
q20027
SNMP.MIB.varbind_list
train
def varbind_list(object_list, option=:KeepValue) raise ArgumentError, "A list of ObjectId or VarBind objects is NilClass" if object_list.nil? vb_list = VarBindList.new if object_list.respond_to? :to_str vb_list << oid(object_list).to_varbind elsif object_list.respond_to? :to_varbind vb_list << apply_option(object_list.to_varbind, option) else object_list.each do |item| if item.respond_to? :to_str varbind = oid(item).to_varbind else varbind = item.to_varbind end vb_list << apply_option(varbind, option) end end vb_list end
ruby
{ "resource": "" }
q20028
SNMP.MIB.name
train
def name(oid) current_oid = ObjectId.new(oid) index = [] while current_oid.size > 1 name = @by_oid[current_oid.to_s] if name return index.empty? ? name : "#{name}.#{index.join('.')}" end index.unshift current_oid.slice!(-1) end ObjectId.new(oid).to_s end
ruby
{ "resource": "" }
q20029
SNMP.Manager.get_next
train
def get_next(object_list) varbind_list = @mib.varbind_list(object_list, :NullValue) request = GetNextRequest.new(@@request_id.next, varbind_list) try_request(request) end
ruby
{ "resource": "" }
q20030
SNMP.Manager.get_bulk
train
def get_bulk(non_repeaters, max_repetitions, object_list) varbind_list = @mib.varbind_list(object_list, :NullValue) request = GetBulkRequest.new( @@request_id.next, varbind_list, non_repeaters, max_repetitions) try_request(request) end
ruby
{ "resource": "" }
q20031
SNMP.Manager.set
train
def set(object_list) varbind_list = @mib.varbind_list(object_list, :KeepValue) request = SetRequest.new(@@request_id.next, varbind_list) try_request(request, @write_community) end
ruby
{ "resource": "" }
q20032
SNMP.Manager.trap_v1
train
def trap_v1(enterprise, agent_addr, generic_trap, specific_trap, timestamp, object_list=[]) vb_list = @mib.varbind_list(object_list, :KeepValue) ent_oid = @mib.oid(enterprise) agent_ip = IpAddress.new(agent_addr) specific_int = Integer(specific_trap) ticks = TimeTicks.new(timestamp) trap = SNMPv1_Trap.new(ent_oid, agent_ip, generic_trap, specific_int, ticks, vb_list) send_request(trap, @community, @host, @trap_port) end
ruby
{ "resource": "" }
q20033
SNMP.Manager.trap_v2
train
def trap_v2(sys_up_time, trap_oid, object_list=[]) vb_list = create_trap_vb_list(sys_up_time, trap_oid, object_list) trap = SNMPv2_Trap.new(@@request_id.next, vb_list) send_request(trap, @community, @host, @trap_port) end
ruby
{ "resource": "" }
q20034
SNMP.Manager.inform
train
def inform(sys_up_time, trap_oid, object_list=[]) vb_list = create_trap_vb_list(sys_up_time, trap_oid, object_list) request = InformRequest.new(@@request_id.next, vb_list) try_request(request, @community, @host, @trap_port) end
ruby
{ "resource": "" }
q20035
SNMP.Manager.create_trap_vb_list
train
def create_trap_vb_list(sys_up_time, trap_oid, object_list) vb_args = @mib.varbind_list(object_list, :KeepValue) uptime_vb = VarBind.new(SNMP::SYS_UP_TIME_OID, TimeTicks.new(sys_up_time.to_int)) trap_vb = VarBind.new(SNMP::SNMP_TRAP_OID_OID, @mib.oid(trap_oid)) VarBindList.new([uptime_vb, trap_vb, *vb_args]) end
ruby
{ "resource": "" }
q20036
SNMP.Manager.walk
train
def walk(object_list, index_column=0) raise ArgumentError, "expected a block to be given" unless block_given? vb_list = @mib.varbind_list(object_list, :NullValue) raise ArgumentError, "index_column is past end of varbind list" if index_column >= vb_list.length is_single_vb = object_list.respond_to?(:to_str) || object_list.respond_to?(:to_varbind) start_list = vb_list start_oid = vb_list[index_column].name last_oid = start_oid loop do vb_list = get_next(vb_list).vb_list index_vb = vb_list[index_column] break if EndOfMibView == index_vb.value stop_oid = index_vb.name if !@ignore_oid_order && stop_oid <= last_oid warn "OIDs are not increasing, #{last_oid} followed by #{stop_oid}" break end break unless stop_oid.subtree_of?(start_oid) last_oid = stop_oid if is_single_vb yield index_vb else vb_list = validate_row(vb_list, start_list, index_column) yield vb_list end end end
ruby
{ "resource": "" }
q20037
SNMP.Manager.validate_row
train
def validate_row(vb_list, start_list, index_column) start_vb = start_list[index_column] index_vb = vb_list[index_column] row_index = index_vb.name.index(start_vb.name) vb_list.each_index do |i| if i != index_column expected_oid = start_list[i].name + row_index if vb_list[i].name != expected_oid vb_list[i] = VarBind.new(expected_oid, NoSuchInstance).with_mib(@mib) end end end vb_list end
ruby
{ "resource": "" }
q20038
SNMP.Manager.get_response
train
def get_response(request) begin data = @transport.recv(@max_bytes) message = Message.decode(data, @mib) response = message.pdu end until request.request_id == response.request_id response end
ruby
{ "resource": "" }
q20039
Sauce.Config.is_defined?
train
def is_defined? (top_mod, sub_mod = nil) return_value = Object.const_defined? top_mod unless !return_value || sub_mod.nil? return_value = Object.const_get(top_mod).const_defined? sub_mod end return_value end
ruby
{ "resource": "" }
q20040
Sauce.Job.build!
train
def build!(options) # Massage JSON options.each { |key,value| options[key] = false if options[key] == "false" } @id = options["id"] @owner = options["owner"] @status = options["status"] @error = options["error"] @name = options["name"] @browser = options["browser"] @browser_version = options["browser_version"] @os = options["os"] @creation_time = options["creation_time"].to_i @start_time = options["start_time"].to_i @end_time = options["end_time"].to_i @video_url = options["video_url"] @log_url = options["log_url"] @public = options["public"] @tags = options["tags"] @passed = options["passed"] @custom_data = options['custom-data'] raise NoIDError if @id.nil? or @id.empty? end
ruby
{ "resource": "" }
q20041
Sprockets.StaticNonDigestGenerator.generate
train
def generate start_time = Time.now.to_f env.each_logical_path do |logical_path| if File.basename(logical_path)[/[^\.]+/, 0] == 'index' logical_path.sub!(/\/index\./, '.') end next unless compile_path?(logical_path) if digest_path = @digests[logical_path] abs_digest_path = "#{@target}/#{digest_path}" abs_logical_path = "#{@target}/#{logical_path}" # Remove known digests from css & js if abs_digest_path.match(/\.(?:js|css)$/) mtime = File.mtime(abs_digest_path) asset_body = File.read(abs_digest_path) # Find all hashes in the asset body with a leading '-' asset_body.gsub!(DIGEST_REGEX) do |match| # Only remove if known digest $1.in?(@asset_digests.values) ? '' : match end # Write non-digest file File.open abs_logical_path, 'w' do |f| f.write asset_body end # Set modification and access times File.utime(File.atime(abs_digest_path), mtime, abs_logical_path) # Also write gzipped asset File.open("#{abs_logical_path}.gz", 'wb') do |f| gz = Zlib::GzipWriter.new(f, Zlib::BEST_COMPRESSION) gz.mtime = mtime.to_i gz.write asset_body gz.close end env.logger.debug "Stripped digests, copied to #{logical_path}, and created gzipped asset" else # Otherwise, treat file as binary and copy it. # Ignore paths that have no digests, such as READMEs unless !File.exist?(abs_digest_path) || abs_digest_path == abs_logical_path FileUtils.cp_r abs_digest_path, abs_logical_path, :remove_destination => true env.logger.debug "Copied binary asset to #{logical_path}" # Copy gzipped asset if exists if File.exist? "#{abs_digest_path}.gz" FileUtils.cp_r "#{abs_digest_path}.gz", "#{abs_logical_path}.gz", :remove_destination => true env.logger.debug "Copied gzipped asset to #{logical_path}.gz" end end end end end elapsed_time = ((Time.now.to_f - start_time) * 1000).to_i env.logger.debug "Generated non-digest assets in #{elapsed_time}ms" end
ruby
{ "resource": "" }
q20042
Sprockets.AssetWithDependencies.init_with
train
def init_with(environment, coder, asset_options = {}) asset_options[:bundle] = false super(environment, coder) @source = coder['source'] @dependency_digest = coder['dependency_digest'] @required_assets = coder['required_paths'].map { |p| p = expand_root_path(p) unless environment.paths.detect { |path| p[path] } raise UnserializeError, "#{p} isn't in paths" end p == pathname.to_s ? self : environment.find_asset(p, asset_options) } @dependency_paths = coder['dependency_paths'].map { |h| DependencyFile.new(expand_root_path(h['path']), h['mtime'], h['digest']) } end
ruby
{ "resource": "" }
q20043
Sprockets.AssetWithDependencies.encode_with
train
def encode_with(coder) super coder['source'] = source coder['dependency_digest'] = dependency_digest coder['required_paths'] = required_assets.map { |a| relativize_root_path(a.pathname).to_s } coder['dependency_paths'] = dependency_paths.map { |d| { 'path' => relativize_root_path(d.pathname).to_s, 'mtime' => d.mtime.iso8601, 'digest' => d.digest } } end
ruby
{ "resource": "" }
q20044
Sauce.Connect.extract_config
train
def extract_config options @username = options[:username] @access_key = options[:access_key] @cli_options = options[:connect_options] @sc4_executable = options[:sauce_connect_4_executable] @skip_connection_test = options[:skip_connection_test] @quiet = options[:quiet] @timeout = options.fetch(:timeout) { TIMEOUT } unless options.fetch(:skip_sauce_config, false) require 'sauce/config' @config = Sauce::Config.new(options) @username ||= @config.username @access_key ||= @config.access_key @cli_options ||= @config[:connect_options] @sc4_executable ||= @config[:sauce_connect_4_executable] @skip_connection_test = @config[:skip_connection_test] end end
ruby
{ "resource": "" }
q20045
Wongi::Engine.Token.generated?
train
def generated? wme return true if generated_wmes.any? { |w| w == wme } return children.any? { |t| t.generated? wme } end
ruby
{ "resource": "" }
q20046
Zenaton.Client.website_url
train
def website_url(resource = '', params = {}) api_url = ENV['ZENATON_API_URL'] || ZENATON_API_URL url = "#{api_url}/#{resource}" if params.is_a?(Hash) params[API_TOKEN] = @api_token append_params_to_url(url, params) else add_app_env("#{url}?#{API_TOKEN}=#{@api_token}&", params) end end
ruby
{ "resource": "" }
q20047
Zenaton.Client.start_task
train
def start_task(task) max_processing_time = if task.respond_to?(:max_processing_time) task.max_processing_time end @http.post( worker_url('tasks'), ATTR_PROG => PROG, ATTR_NAME => class_name(task), ATTR_DATA => @serializer.encode(@properties.from(task)), ATTR_MAX_PROCESSING_TIME => max_processing_time ) end
ruby
{ "resource": "" }
q20048
Zenaton.Client.start_workflow
train
def start_workflow(flow) @http.post( instance_worker_url, ATTR_PROG => PROG, ATTR_CANONICAL => canonical_name(flow), ATTR_NAME => class_name(flow), ATTR_DATA => @serializer.encode(@properties.from(flow)), ATTR_ID => parse_custom_id_from(flow) ) end
ruby
{ "resource": "" }
q20049
Zenaton.Client.find_workflow
train
def find_workflow(workflow_name, custom_id) params = { ATTR_ID => custom_id, ATTR_NAME => workflow_name } params[ATTR_PROG] = PROG data = @http.get(instance_website_url(params))['data'] data && @properties.object_from( data['name'], @serializer.decode(data['properties']) ) rescue Zenaton::InternalError => exception return nil if exception.message =~ /No workflow instance found/ raise exception end
ruby
{ "resource": "" }
q20050
Zenaton.Client.send_event
train
def send_event(workflow_name, custom_id, event) body = { ATTR_PROG => PROG, ATTR_NAME => workflow_name, ATTR_ID => custom_id, EVENT_NAME => event.class.name, EVENT_INPUT => @serializer.encode(@properties.from(event)) } @http.post(send_event_url, body) end
ruby
{ "resource": "" }
q20051
Zenaton.Engine.dispatch
train
def dispatch(jobs) jobs.map(&method(:check_argument)) jobs.map(&method(:local_dispatch)) if process_locally?(jobs) @processor&.process(jobs, false) unless jobs.length.zero? nil end
ruby
{ "resource": "" }
q20052
ClubhouseRuby.PathBuilder.method_missing
train
def method_missing(name, *args) if known_action?(name) execute_request(ACTIONS[name], args.first) elsif known_resource?(name) build_path(name, args.first) elsif known_exception?(name) build_path(EXCEPTIONS[name][:path], nil) execute_request(EXCEPTIONS[name][:action], args.first) else super end end
ruby
{ "resource": "" }
q20053
ClubhouseRuby.PathBuilder.respond_to_missing?
train
def respond_to_missing?(name, include_private = false) known_action?(name) || known_resource?(name) || known_exception?(name) || super end
ruby
{ "resource": "" }
q20054
ClubhouseRuby.Request.fetch
train
def fetch Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |https| req = Net::HTTP.const_get(action).new(uri) set_body(req) set_format_header(req) wrap_response(https.request(req)) end end
ruby
{ "resource": "" }
q20055
EventCalendar.CalendarHelper.day_link
train
def day_link(text, date, day_action) link_to(text, params.merge(:action => day_action, :year => date.year, :month => date.month, :day => date.day), :class => 'ec-day-link') end
ruby
{ "resource": "" }
q20056
EventCalendar.CalendarHelper.cal_row_heights
train
def cal_row_heights(options) # number of rows is the number of days in the event strips divided by 7 num_cal_rows = options[:event_strips].first.size / 7 # the row will be at least this big min_height = (options[:height] - options[:day_names_height]) / num_cal_rows row_heights = [] num_event_rows = 0 # for every day in the event strip... 1.upto(options[:event_strips].first.size+1) do |index| num_events = 0 # get the largest event strip that has an event on this day options[:event_strips].each_with_index do |strip, strip_num| num_events = strip_num + 1 unless strip[index-1].blank? end # get the most event rows for this week num_event_rows = [num_event_rows, num_events].max # if we reached the end of the week, calculate this row's height if index % 7 == 0 total_event_height = options[:event_height] + options[:event_margin] calc_row_height = (num_event_rows * total_event_height) + options[:day_nums_height] + options[:event_margin] row_height = [min_height, calc_row_height].max row_heights << row_height num_event_rows = 0 end end row_heights end
ruby
{ "resource": "" }
q20057
EventCalendar.ClassMethods.event_strips_for_month
train
def event_strips_for_month(shown_date, first_day_of_week=0, find_options = {}) if first_day_of_week.is_a?(Hash) find_options.merge!(first_day_of_week) first_day_of_week = 0 end strip_start, strip_end = get_start_and_end_dates(shown_date, first_day_of_week) events = events_for_date_range(strip_start, strip_end, find_options) event_strips = create_event_strips(strip_start, strip_end, events) event_strips end
ruby
{ "resource": "" }
q20058
EventCalendar.ClassMethods.get_start_and_end_dates
train
def get_start_and_end_dates(shown_date, first_day_of_week=0) # start with the first day of the given month start_of_month = Date.civil(shown_date.year, shown_date.month, 1) # the end of last month strip_start = beginning_of_week(start_of_month, first_day_of_week) # the beginning of next month, unless this month ended evenly on the last day of the week if start_of_month.next_month == beginning_of_week(start_of_month.next_month, first_day_of_week) # last day of the month is also the last day of the week strip_end = start_of_month.next_month else # add the extra days from next month strip_end = beginning_of_week(start_of_month.next_month + 7, first_day_of_week) end [strip_start, strip_end] end
ruby
{ "resource": "" }
q20059
EventCalendar.ClassMethods.events_for_date_range
train
def events_for_date_range(start_d, end_d, find_options = {}) self.scoped(find_options).find( :all, :conditions => [ "(? <= #{self.quoted_table_name}.#{self.end_at_field}) AND (#{self.quoted_table_name}.#{self.start_at_field}< ?)", start_d.to_time.utc, end_d.to_time.utc ], :order => "#{self.quoted_table_name}.#{self.start_at_field} ASC" ) end
ruby
{ "resource": "" }
q20060
EventCalendar.ClassMethods.create_event_strips
train
def create_event_strips(strip_start, strip_end, events) # create an inital event strip, with a nil entry for every day of the displayed days event_strips = [[nil] * (strip_end - strip_start + 1)] events.each do |event| cur_date = event.start_at.to_date end_date = event.end_at.to_date cur_date, end_date = event.clip_range(strip_start, strip_end) start_range = (cur_date - strip_start).to_i end_range = (end_date - strip_start).to_i # make sure the event is within our viewing range if (start_range <= end_range) and (end_range >= 0) range = start_range..end_range open_strip = space_in_current_strips?(event_strips, range) if open_strip.nil? # no strips open, make a new one new_strip = [nil] * (strip_end - strip_start + 1) range.each {|r| new_strip[r] = event} event_strips << new_strip else # found an open strip, add this event to it range.each {|r| open_strip[r] = event} end end end event_strips end
ruby
{ "resource": "" }
q20061
EventCalendar.InstanceMethods.clip_range
train
def clip_range(start_d, end_d) # make sure we are comparing date objects to date objects, # otherwise timezones can cause problems start_at_d = start_at.to_date end_at_d = end_at.to_date # Clip start date, make sure it also ends on or after the start range if (start_at_d < start_d and end_at_d >= start_d) clipped_start = start_d else clipped_start = start_at_d end # Clip end date if (end_at_d > end_d) clipped_end = end_d else clipped_end = end_at_d end [clipped_start, clipped_end] end
ruby
{ "resource": "" }
q20062
CouchPotato.Database.view
train
def view(spec) results = CouchPotato::View::ViewQuery.new( couchrest_database, spec.design_document, {spec.view_name => { :map => spec.map_function, :reduce => spec.reduce_function } }, ({spec.list_name => spec.list_function} unless spec.list_name.nil?), spec.lib, spec.language ).query_view!(spec.view_parameters) processed_results = spec.process_results results processed_results.each do |document| document.database = self if document.respond_to?(:database=) end if processed_results.respond_to?(:each) processed_results end
ruby
{ "resource": "" }
q20063
Sitemap.Generator.load
train
def load(options = {}, &block) options.each do |k, v| self.send("#{k}=", v) end self.routes = block end
ruby
{ "resource": "" }
q20064
Sitemap.Generator.resources
train
def resources(type, options = {}) path(type) unless options[:skip_index] link_params = options.reject { |k, v| k == :objects } get_objects = lambda { options[:objects] ? options[:objects].call : type.to_s.classify.constantize } get_objects.call.find_each(:batch_size => Sitemap.configuration.query_batch_size) do |object| path(object, link_params) end end
ruby
{ "resource": "" }
q20065
Sitemap.Generator.render
train
def render(object = "fragment") xml = Builder::XmlMarkup.new(:indent => 2) file = File.read(File.expand_path("../../views/#{object}.xml.builder", __FILE__)) instance_eval file end
ruby
{ "resource": "" }
q20066
Sitemap.Generator.process_fragment!
train
def process_fragment! file = Tempfile.new("sitemap.xml") file.write(render) file.close self.fragments << file end
ruby
{ "resource": "" }
q20067
Sitemap.Generator.file_url
train
def file_url(path = "sitemap.xml") if context file_path = File.join("/", context, path) else file_path = File.join("/", path) end URI::HTTP.build(:host => host, :path => file_path).to_s end
ruby
{ "resource": "" }
q20068
SippyCup.Scenario.build
train
def build(steps) raise ArgumentError, "Must provide scenario steps" unless steps steps.each_with_index do |step, index| begin instruction, args = step.split ' ', 2 args = split_quoted_string args if args && !args.empty? self.__send__ instruction, *args else self.__send__ instruction end rescue => e @errors << {step: index + 1, message: "#{step}: #{e.message}"} end end end
ruby
{ "resource": "" }
q20069
SippyCup.Scenario.register
train
def register(user, password = nil, opts = {}) send_opts = opts.dup send_opts[:retrans] ||= DEFAULT_RETRANS user, domain = parse_user user if password send register_message(domain, user), send_opts recv opts.merge(response: 401, auth: true, optional: false) send register_auth(domain, user, password), send_opts receive_ok opts.merge(optional: false) else send register_message(domain, user), send_opts end end
ruby
{ "resource": "" }
q20070
SippyCup.Scenario.receive_invite
train
def receive_invite(opts = {}) recv(opts.merge(request: 'INVITE', rrs: true)) do |recv| action = doc.create_element('action') do |action| action << doc.create_element('ereg') do |ereg| ereg['regexp'] = '<sip:(.*)>.*;tag=([^;]*)' ereg['search_in'] = 'hdr' ereg['header'] = 'From:' ereg['assign_to'] = 'dummy,remote_addr,remote_tag' end action << doc.create_element('ereg') do |ereg| ereg['regexp'] = '<sip:(.*)>' ereg['search_in'] = 'hdr' ereg['header'] = 'To:' ereg['assign_to'] = 'dummy,local_addr' end action << doc.create_element('assignstr') do |assignstr| assignstr['assign_to'] = "call_addr" assignstr['value'] = "[$local_addr]" end end recv << action end # These variables (except dummy) will only be used if we initiate a hangup @reference_variables += %w(dummy remote_addr remote_tag local_addr call_addr) end
ruby
{ "resource": "" }
q20071
SippyCup.Scenario.receive_answer
train
def receive_answer(opts = {}) options = { rrs: true, # Record Record Set: Make the Route headers available via [routes] later rtd: true # Response Time Duration: Record the response time } receive_200(options.merge(opts)) do |recv| recv << doc.create_element('action') do |action| action << doc.create_element('ereg') do |ereg| ereg['regexp'] = '<sip:(.*)>.*;tag=([^;]*)' ereg['search_in'] = 'hdr' ereg['header'] = 'To:' ereg['assign_to'] = 'dummy,remote_addr,remote_tag' end end end # These variables will only be used if we initiate a hangup @reference_variables += %w(dummy remote_addr remote_tag) end
ruby
{ "resource": "" }
q20072
SippyCup.Scenario.wait_for_answer
train
def wait_for_answer(opts = {}) receive_trying opts receive_ringing opts receive_progress opts receive_answer opts ack_answer opts end
ruby
{ "resource": "" }
q20073
SippyCup.Scenario.receive_message
train
def receive_message(regexp = nil) recv = Nokogiri::XML::Node.new 'recv', doc recv['request'] = 'MESSAGE' scenario_node << recv if regexp action = Nokogiri::XML::Node.new 'action', doc ereg = Nokogiri::XML::Node.new 'ereg', doc ereg['regexp'] = regexp ereg['search_in'] = 'body' ereg['check_it'] = true var = "message_#{@message_variables += 1}" ereg['assign_to'] = var @reference_variables << var action << ereg recv << action end okay end
ruby
{ "resource": "" }
q20074
SippyCup.Scenario.call_length_repartition
train
def call_length_repartition(min, max, interval) partition_table 'CallLengthRepartition', min.to_i, max.to_i, interval.to_i end
ruby
{ "resource": "" }
q20075
SippyCup.Scenario.response_time_repartition
train
def response_time_repartition(min, max, interval) partition_table 'ResponseTimeRepartition', min.to_i, max.to_i, interval.to_i end
ruby
{ "resource": "" }
q20076
SippyCup.Scenario.to_xml
train
def to_xml(options = {}) pcap_path = options[:pcap_path] docdup = doc.dup # Not removing in reverse would most likely remove the wrong # nodes because of changing indices. @media_nodes.reverse.each do |nop| nopdup = docdup.xpath(nop.path) if pcap_path.nil? or @media.blank? nopdup.remove else exec = nopdup.xpath("./action/exec").first exec['play_pcap_audio'] = pcap_path end end unless @reference_variables.empty? scenario_node = docdup.xpath('scenario').first scenario_node << docdup.create_element('Reference') do |ref| ref[:variables] = @reference_variables.to_a.join ',' end end docdup.to_xml end
ruby
{ "resource": "" }
q20077
SippyCup.Scenario.compile!
train
def compile! unless @media.blank? print "Compiling media to #{@filename}.pcap..." compile_media.to_file filename: "#{@filename}.pcap" puts "done." end scenario_filename = "#{@filename}.xml" print "Compiling scenario to #{scenario_filename}..." File.open scenario_filename, 'w' do |file| file.write to_xml(:pcap_path => "#{@filename}.pcap") end puts "done." scenario_filename end
ruby
{ "resource": "" }
q20078
SippyCup.XMLScenario.to_tmpfiles
train
def to_tmpfiles scenario_file = Tempfile.new 'scenario' scenario_file.write @xml scenario_file.rewind if @media media_file = Tempfile.new 'media' media_file.write @media media_file.rewind else media_file = nil end {scenario: scenario_file, media: media_file} end
ruby
{ "resource": "" }
q20079
SippyCup.Runner.wait
train
def wait exit_status = Process.wait2 @sipp_pid.to_i @err_rd.close if @err_rd @stdout_rd.close if @stdout_rd final_result = process_exit_status exit_status, @stderr_buffer if final_result @logger.info "Test completed successfully!" else @logger.info "Test completed successfully but some calls failed." end @logger.info "Statistics logged at #{File.expand_path @scenario_options[:stats_file]}" if @scenario_options[:stats_file] final_result ensure cleanup_input_files end
ruby
{ "resource": "" }
q20080
Machinist.Machinable.blueprint
train
def blueprint(name = :master, &block) @blueprints ||= {} if block_given? parent = (name == :master ? superclass : self) # Where should we look for the parent blueprint? @blueprints[name] = blueprint_class.new(self, :parent => parent, &block) end @blueprints[name] end
ruby
{ "resource": "" }
q20081
Machinist.Machinable.make!
train
def make!(*args) decode_args_to_make(*args) do |blueprint, attributes| raise BlueprintCantSaveError.new(blueprint) unless blueprint.respond_to?(:make!) blueprint.make!(attributes) end end
ruby
{ "resource": "" }
q20082
Machinist.Machinable.decode_args_to_make
train
def decode_args_to_make(*args) #:nodoc: shift_arg = lambda {|klass| args.shift if args.first.is_a?(klass) } count = shift_arg[Fixnum] name = shift_arg[Symbol] || :master attributes = shift_arg[Hash] || {} raise ArgumentError.new("Couldn't understand arguments") unless args.empty? @blueprints ||= {} blueprint = @blueprints[name] raise NoBlueprintError.new(self, name) unless blueprint if count.nil? yield(blueprint, attributes) else Array.new(count) { yield(blueprint, attributes) } end end
ruby
{ "resource": "" }
q20083
Machinist.Blueprint.make
train
def make(attributes = {}) lathe = lathe_class.new(@klass, new_serial_number, attributes) lathe.instance_eval(&@block) each_ancestor {|blueprint| lathe.instance_eval(&blueprint.block) } lathe.object end
ruby
{ "resource": "" }
q20084
Zookeeper.RequestRegistry.get_watcher
train
def get_watcher(req_id, opts={}) @mutex.synchronize do if Constants::ZKRB_GLOBAL_CB_REQ == req_id { :watcher => @default_watcher, :watcher_context => nil } elsif opts[:keep] @watcher_reqs[req_id] else @watcher_reqs.delete(req_id) end end end
ruby
{ "resource": "" }
q20085
Zookeeper.CZookeeper.wait_until_connected
train
def wait_until_connected(timeout=10) time_to_stop = timeout ? Time.now + timeout : nil return false unless wait_until_running(timeout) @state_mutex.synchronize do while true if timeout now = Time.now break if (@state == ZOO_CONNECTED_STATE) || unhealthy? || (now > time_to_stop) delay = time_to_stop.to_f - now.to_f @state_cond.wait(delay) else break if (@state == ZOO_CONNECTED_STATE) || unhealthy? @state_cond.wait end end end connected? end
ruby
{ "resource": "" }
q20086
Zookeeper.CZookeeper.submit_and_block
train
def submit_and_block(meth, *args) @mutex.synchronize do raise Exceptions::NotConnected if unhealthy? end cnt = Continuation.new(meth, *args) @reg.synchronize do |r| if meth == :state r.state_check << cnt else r.pending << cnt end end wake_event_loop! cnt.value end
ruby
{ "resource": "" }
q20087
Zookeeper.Continuation.call
train
def call(hash) logger.debug { "continuation req_id #{req_id}, got hash: #{hash.inspect}" } @rval = hash.values_at(*METH_TO_ASYNC_RESULT_KEYS.fetch(meth)) logger.debug { "delivering result #{@rval.inspect}" } deliver! end
ruby
{ "resource": "" }
q20088
Zookeeper.ZookeeperBase.close
train
def close sd_thread = nil @mutex.synchronize do return unless @czk inst, @czk = @czk, nil sd_thread = Thread.new(inst) do |_inst| stop_dispatch_thread! _inst.close end end # if we're on the event dispatch thread for some stupid reason, then don't join unless event_dispatch_thread? # hard-coded 30 second delay, don't hang forever if sd_thread.join(30) != sd_thread logger.error { "timed out waiting for shutdown thread to exit" } end end nil end
ruby
{ "resource": "" }
q20089
Zookeeper.ZookeeperBase.create
train
def create(*args) # since we don't care about the inputs, just glob args rc, new_path = czk.create(*args) [rc, @req_registry.strip_chroot_from(new_path)] end
ruby
{ "resource": "" }
q20090
Zookeeper.ZookeeperBase.strip_chroot_from
train
def strip_chroot_from(path) return path unless (chrooted? and path and path.start_with?(chroot_path)) path[chroot_path.length..-1] end
ruby
{ "resource": "" }
q20091
Zookeeper.SpecHelpers.rm_rf
train
def rm_rf(z, path) z.get_children(:path => path).tap do |h| if h[:rc].zero? h[:children].each do |child| rm_rf(z, File.join(path, child)) end elsif h[:rc] == ZNONODE # no-op else raise "Oh noes! unexpected return value! #{h.inspect}" end end rv = z.delete(:path => path) unless (rv[:rc].zero? or rv[:rc] == ZNONODE) raise "oh noes! failed to delete #{path}" end path end
ruby
{ "resource": "" }
q20092
Releaf::Content.Route.params
train
def params(method_or_path, options = {}) method_or_path = method_or_path.to_s [ path_for(method_or_path, options), options_for(method_or_path, options) ] end
ruby
{ "resource": "" }
q20093
Releaf::Content.Node.maintain_name
train
def maintain_name postfix = nil total_count = 0 while self.class.where(parent_id: parent_id, name: "#{name}#{postfix}").where("id != ?", id.to_i).exists? do total_count += 1 postfix = "(#{total_count})" end if postfix self.name = "#{name}#{postfix}" end end
ruby
{ "resource": "" }
q20094
Releaf::Content.Node.maintain_slug
train
def maintain_slug postfix = nil total_count = 0 while self.class.where(parent_id: parent_id, slug: "#{slug}#{postfix}").where("id != ?", id.to_i).exists? do total_count += 1 postfix = "-#{total_count}" end if postfix self.slug = "#{slug}#{postfix}" end end
ruby
{ "resource": "" }
q20095
ActsAsNode.ClassMethods.acts_as_node
train
def acts_as_node(params: nil, fields: nil) configuration = {params: params, fields: fields} ActsAsNode.register_class(self.name) # Store acts_as_node configuration cattr_accessor :acts_as_node_configuration self.acts_as_node_configuration = configuration end
ruby
{ "resource": "" }
q20096
RR.Space.verify_ordered_double
train
def verify_ordered_double(double) unless double.terminal? raise RR::Errors.build_error(:DoubleOrderError, "Ordered Doubles cannot have a NonTerminal TimesCalledExpectation") end unless @ordered_doubles.first == double message = Double.formatted_name(double.method_name, double.expected_arguments) message << " called out of order in list\n" message << Double.list_message_part(@ordered_doubles) raise RR::Errors.build_error(:DoubleOrderError, message) end @ordered_doubles.shift unless double.attempt? double end
ruby
{ "resource": "" }
q20097
RR.Space.reset
train
def reset RR.trim_backtrace = false RR.overridden_error_class = nil reset_ordered_doubles Injections::DoubleInjection.reset reset_method_missing_injections reset_singleton_method_added_injections reset_recorded_calls reset_bound_objects end
ruby
{ "resource": "" }
q20098
Gem.SourceIndex.load_gems_in
train
def load_gems_in(*spec_dirs) @gems.clear spec_dirs.reverse_each do |spec_dir| spec_files = Dir.glob File.join(spec_dir, '*.gemspec') spec_files.each do |spec_file| gemspec = self.class.load_specification spec_file.untaint add_spec gemspec if gemspec end end self end
ruby
{ "resource": "" }
q20099
Gem.SourceIndex.index_signature
train
def index_signature require 'digest' Digest::SHA256.new.hexdigest(@gems.keys.sort.join(',')).to_s end
ruby
{ "resource": "" }