_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q14700
RailsBestPractices.Analyzer.analyze_source_codes
train
def analyze_source_codes @bar = ProgressBar.create(title: 'Source Code', total: parse_files.size * 3) if display_bar? %w[lexical prepare review].each { |process| send(:process, process) } @bar.finish if display_bar? end
ruby
{ "resource": "" }
q14701
Warden.Hooks._run_callbacks
train
def _run_callbacks(kind, *args) #:nodoc: options = args.last # Last callback arg MUST be a Hash send("_#{kind}").each do |callback, conditions| invalid = conditions.find do |key, value| value.is_a?(Array) ? !value.include?(options[key]) : (value != options[key]) end callback.call(*args) unless invalid end end
ruby
{ "resource": "" }
q14702
Warden.Hooks.after_failed_fetch
train
def after_failed_fetch(options = {}, method = :push, &block) raise BlockNotGiven unless block_given? _after_failed_fetch.send(method, [block, options]) end
ruby
{ "resource": "" }
q14703
Warden.Hooks.before_logout
train
def before_logout(options = {}, method = :push, &block) raise BlockNotGiven unless block_given? _before_logout.send(method, [block, options]) end
ruby
{ "resource": "" }
q14704
Warden.Hooks.on_request
train
def on_request(options = {}, method = :push, &block) raise BlockNotGiven unless block_given? _on_request.send(method, [block, options]) end
ruby
{ "resource": "" }
q14705
Warden.Proxy.user
train
def user(argument = {}) opts = argument.is_a?(Hash) ? argument : { :scope => argument } scope = (opts[:scope] ||= @config.default_scope) if @users.has_key?(scope) @users[scope] else unless user = session_serializer.fetch(scope) run_callbacks = opts.fetch(:run_callbacks, true) manager._run_callbacks(:after_failed_fetch, user, self, :scope => scope) if run_callbacks end @users[scope] = user ? set_user(user, opts.merge(:event => :fetch)) : nil end end
ruby
{ "resource": "" }
q14706
Warden.Proxy.logout
train
def logout(*scopes) if scopes.empty? scopes = @users.keys reset_session = true end scopes.each do |scope| user = @users.delete(scope) manager._run_callbacks(:before_logout, user, self, :scope => scope) raw_session.delete("warden.user.#{scope}.session") unless raw_session.nil? session_serializer.delete(scope, user) end reset_session! if reset_session end
ruby
{ "resource": "" }
q14707
Warden.Proxy._run_strategies_for
train
def _run_strategies_for(scope, args) #:nodoc: self.winning_strategy = @winning_strategies[scope] return if winning_strategy && winning_strategy.halted? # Do not run any strategy if locked return if @locked if args.empty? defaults = @config[:default_strategies] strategies = defaults[scope] || defaults[:_all] end (strategies || args).each do |name| strategy = _fetch_strategy(name, scope) next unless strategy && !strategy.performed? && strategy.valid? strategy._run! self.winning_strategy = @winning_strategies[scope] = strategy break if strategy.halted? end end
ruby
{ "resource": "" }
q14708
Warden.Proxy._fetch_strategy
train
def _fetch_strategy(name, scope) @strategies[scope][name] ||= if klass = Warden::Strategies[name] klass.new(@env, scope) elsif @config.silence_missing_strategies? nil else raise "Invalid strategy #{name}" end end
ruby
{ "resource": "" }
q14709
GooglePlaces.Request.parsed_response
train
def parsed_response return @response.headers["location"] if @response.code >= 300 && @response.code < 400 raise APIConnectionError.new(@response) if @response.code >= 500 && @response.code < 600 case @response.parsed_response['status'] when 'OK', 'ZERO_RESULTS' @response.parsed_response when 'OVER_QUERY_LIMIT' raise OverQueryLimitError.new(@response) when 'REQUEST_DENIED' raise RequestDeniedError.new(@response) when 'INVALID_REQUEST' raise InvalidRequestError.new(@response) when 'UNKNOWN_ERROR' raise UnknownError.new(@response) when 'NOT_FOUND' raise NotFoundError.new(@response) end end
ruby
{ "resource": "" }
q14710
GooglePlaces.Client.spots
train
def spots(lat, lng, options = {}) options = @options.merge(options) detail = options.delete(:detail) collection_detail_level( Spot.list(lat, lng, @api_key, options), detail ) end
ruby
{ "resource": "" }
q14711
GooglePlaces.Client.spots_by_query
train
def spots_by_query(query, options = {}) options = @options.merge(options) detail = options.delete(:detail) collection_detail_level( Spot.list_by_query(query, @api_key, options), detail ) end
ruby
{ "resource": "" }
q14712
GooglePlaces.Client.spots_by_bounds
train
def spots_by_bounds(bounds, options = {}) options = @options.merge(options) detail = options.delete(:detail) collection_detail_level( Spot.list_by_bounds(bounds, @api_key, options), detail ) end
ruby
{ "resource": "" }
q14713
GooglePlaces.Client.spots_by_pagetoken
train
def spots_by_pagetoken(pagetoken, options = {}) options = @options.merge(options) detail = options.delete(:detail) collection_detail_level( Spot.list_by_pagetoken(pagetoken, @api_key, options), detail ) end
ruby
{ "resource": "" }
q14714
GooglePlaces.Client.spots_by_radar
train
def spots_by_radar(lat, lng, options = {}) options = @options.merge(options) detail = options.delete(:detail) collection_detail_level( Spot.list_by_radar(lat, lng, @api_key, options), detail ) end
ruby
{ "resource": "" }
q14715
GooglePlaces.Photo.fetch_url
train
def fetch_url(maxwidth, options = {}) language = options.delete(:language) retry_options = options.delete(:retry_options) || {} unless @fetched_url @fetched_url = Request.photo_url( :maxwidth => maxwidth, :photoreference => @photo_reference, :key => @api_key, :retry_options => retry_options ) end @fetched_url end
ruby
{ "resource": "" }
q14716
Virtus.ConstMissingExtensions.const_missing
train
def const_missing(name) Attribute::Builder.determine_type(name) or Axiom::Types.const_defined?(name) && Axiom::Types.const_get(name) or super end
ruby
{ "resource": "" }
q14717
Virtus.AttributeSet.each
train
def each return to_enum unless block_given? @index.each { |name, attribute| yield attribute if name.kind_of?(Symbol) } self end
ruby
{ "resource": "" }
q14718
Virtus.AttributeSet.define_reader_method
train
def define_reader_method(attribute, method_name, visibility) define_method(method_name) { attribute.get(self) } send(visibility, method_name) end
ruby
{ "resource": "" }
q14719
Virtus.AttributeSet.define_writer_method
train
def define_writer_method(attribute, method_name, visibility) define_method(method_name) { |value| attribute.set(self, value) } send(visibility, method_name) end
ruby
{ "resource": "" }
q14720
Virtus.AttributeSet.get
train
def get(object) each_with_object({}) do |attribute, attributes| name = attribute.name attributes[name] = object.__send__(name) if attribute.public_reader? end end
ruby
{ "resource": "" }
q14721
Virtus.AttributeSet.set
train
def set(object, attributes) coerce(attributes).each do |name, value| writer_name = "#{name}=" if object.allowed_writer_methods.include?(writer_name) object.__send__(writer_name, value) end end end
ruby
{ "resource": "" }
q14722
Virtus.AttributeSet.set_defaults
train
def set_defaults(object, filter = method(:skip_default?)) each do |attribute| next if filter.call(object, attribute) attribute.set_default_value(object) end end
ruby
{ "resource": "" }
q14723
Virtus.TypeLookup.determine_type_from_primitive
train
def determine_type_from_primitive(primitive) type = nil descendants.select(&:primitive).reverse_each do |descendant| descendant_primitive = descendant.primitive next unless primitive <= descendant_primitive type = descendant if type.nil? or type.primitive > descendant_primitive end type end
ruby
{ "resource": "" }
q14724
Virtus.TypeLookup.determine_type_from_string
train
def determine_type_from_string(string) if string =~ TYPE_FORMAT and const_defined?(string, *EXTRA_CONST_ARGS) const_get(string, *EXTRA_CONST_ARGS) end end
ruby
{ "resource": "" }
q14725
Virtus.ModuleExtensions.extended
train
def extended(object) super @inclusions.each { |mod| object.extend(mod) } define_attributes(object) object.set_default_attributes end
ruby
{ "resource": "" }
q14726
Virtus.ModuleExtensions.included
train
def included(object) super if Class === object @inclusions.reject do |mod| object.ancestors.include?(mod) end.each do |mod| object.send(:include, mod) end define_attributes(object) else object.extend(ModuleExtensions) ModuleExtensions.setup(object, @inclusions, @attribute_definitions) end end
ruby
{ "resource": "" }
q14727
Virtus.Builder.add_included_hook
train
def add_included_hook with_hook_context do |context| mod.define_singleton_method :included do |object| Builder.pending << object unless context.finalize? context.modules.each { |mod| object.send(:include, mod) } object.define_singleton_method(:attribute, context.attribute_method) end end end
ruby
{ "resource": "" }
q14728
Virtus.Options.options
train
def options accepted_options.each_with_object({}) do |option_name, options| option_value = send(option_name) options[option_name] = option_value unless option_value.nil? end end
ruby
{ "resource": "" }
q14729
Virtus.Options.accept_options
train
def accept_options(*new_options) add_accepted_options(new_options) new_options.each { |option| define_option_method(option) } descendants.each { |descendant| descendant.add_accepted_options(new_options) } self end
ruby
{ "resource": "" }
q14730
ROTP.TOTP.verify
train
def verify(otp, drift_ahead: 0, drift_behind: 0, after: nil, at: Time.now) timecodes = get_timecodes(at, drift_behind, drift_ahead) timecodes = timecodes.select { |t| t > timecode(after) } if after result = nil timecodes.each do |t| result = t * interval if super(otp, generate_otp(t)) end result end
ruby
{ "resource": "" }
q14731
ROTP.TOTP.get_timecodes
train
def get_timecodes(at, drift_behind, drift_ahead) now = timeint(at) timecode_start = timecode(now - drift_behind) timecode_end = timecode(now + drift_ahead) (timecode_start..timecode_end).step(1).to_a end
ruby
{ "resource": "" }
q14732
ROTP.OTP.encode_params
train
def encode_params(uri, params) params_str = String.new('?') params.each do |k, v| params_str << "#{k}=#{CGI.escape(v.to_s)}&" if v end params_str.chop! uri + params_str end
ruby
{ "resource": "" }
q14733
ROTP.OTP.time_constant_compare
train
def time_constant_compare(a, b) return false if a.empty? || b.empty? || a.bytesize != b.bytesize l = a.unpack "C#{a.bytesize}" res = 0 b.each_byte { |byte| res |= byte ^ l.shift } res == 0 end
ruby
{ "resource": "" }
q14734
ROTP.HOTP.verify
train
def verify(otp, counter, retries: 0) counters = (counter..counter + retries).to_a counters.find do |c| super(otp, at(c)) end end
ruby
{ "resource": "" }
q14735
Fluent.GoogleCloudOutput.ec2_metadata
train
def ec2_metadata raise "Called ec2_metadata with platform=#{@platform}" unless @platform == Platform::EC2 unless @ec2_metadata # See http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html open('http://' + METADATA_SERVICE_ADDR + '/latest/dynamic/instance-identity/document') do |f| contents = f.read @ec2_metadata = JSON.parse(contents) end end @ec2_metadata end
ruby
{ "resource": "" }
q14736
Fluent.GoogleCloudOutput.set_required_metadata_variables
train
def set_required_metadata_variables set_project_id set_vm_id set_vm_name set_location # All metadata parameters must now be set. missing = [] missing << 'project_id' unless @project_id if @platform != Platform::OTHER missing << 'zone' unless @zone missing << 'vm_id' unless @vm_id end return if missing.empty? raise Fluent::ConfigError, "Unable to obtain metadata parameters: #{missing.join(' ')}" end
ruby
{ "resource": "" }
q14737
Fluent.GoogleCloudOutput.set_vm_id
train
def set_vm_id @vm_id ||= fetch_gce_metadata('instance/id') if @platform == Platform::GCE @vm_id ||= ec2_metadata['instanceId'] if @platform == Platform::EC2 rescue StandardError => e @log.error 'Failed to obtain vm_id: ', error: e end
ruby
{ "resource": "" }
q14738
Fluent.GoogleCloudOutput.set_location
train
def set_location # Response format: "projects/<number>/zones/<zone>" @zone ||= fetch_gce_metadata('instance/zone').rpartition('/')[2] if @platform == Platform::GCE aws_location_key = if @use_aws_availability_zone 'availabilityZone' else 'region' end @zone ||= 'aws:' + ec2_metadata[aws_location_key] if @platform == Platform::EC2 && ec2_metadata.key?(aws_location_key) rescue StandardError => e @log.error 'Failed to obtain location: ', error: e end
ruby
{ "resource": "" }
q14739
Fluent.GoogleCloudOutput.determine_agent_level_monitored_resource_via_legacy
train
def determine_agent_level_monitored_resource_via_legacy resource = Google::Apis::LoggingV2::MonitoredResource.new( labels: {}) resource.type = determine_agent_level_monitored_resource_type resource.labels = determine_agent_level_monitored_resource_labels( resource.type) resource end
ruby
{ "resource": "" }
q14740
Fluent.GoogleCloudOutput.determine_agent_level_monitored_resource_type
train
def determine_agent_level_monitored_resource_type case @platform when Platform::OTHER # Unknown platform will be defaulted to GCE instance. return COMPUTE_CONSTANTS[:resource_type] when Platform::EC2 return EC2_CONSTANTS[:resource_type] when Platform::GCE # Resource types determined by @subservice_name config. return SUBSERVICE_MAP[@subservice_name] if @subservice_name # Resource types determined by @detect_subservice config. if @detect_subservice begin attributes = fetch_gce_metadata('instance/attributes/').split.to_set SUBSERVICE_METADATA_ATTRIBUTES.each do |resource_type, expected| return resource_type if attributes.superset?(expected) end rescue StandardError => e @log.error 'Failed to detect subservice: ', error: e end end # GCE instance. return COMPUTE_CONSTANTS[:resource_type] end end
ruby
{ "resource": "" }
q14741
Fluent.GoogleCloudOutput.determine_agent_level_monitored_resource_labels
train
def determine_agent_level_monitored_resource_labels(type) case type # GAE app. when APPENGINE_CONSTANTS[:resource_type] return { 'module_id' => fetch_gce_metadata('instance/attributes/gae_backend_name'), 'version_id' => fetch_gce_metadata('instance/attributes/gae_backend_version') } # GCE. when COMPUTE_CONSTANTS[:resource_type] raise "Cannot construct a #{type} resource without vm_id and zone" \ unless @vm_id && @zone return { 'instance_id' => @vm_id, 'zone' => @zone } # GKE container. when GKE_CONSTANTS[:resource_type] raise "Cannot construct a #{type} resource without vm_id and zone" \ unless @vm_id && @zone return { 'instance_id' => @vm_id, 'zone' => @zone, 'cluster_name' => fetch_gce_metadata('instance/attributes/cluster-name') } # Cloud Dataproc. when DATAPROC_CONSTANTS[:resource_type] return { 'cluster_uuid' => fetch_gce_metadata('instance/attributes/dataproc-cluster-uuid'), 'cluster_name' => fetch_gce_metadata('instance/attributes/dataproc-cluster-name'), 'region' => fetch_gce_metadata('instance/attributes/dataproc-region') } # EC2. when EC2_CONSTANTS[:resource_type] raise "Cannot construct a #{type} resource without vm_id and zone" \ unless @vm_id && @zone labels = { 'instance_id' => @vm_id, 'region' => @zone } labels['aws_account'] = ec2_metadata['accountId'] if ec2_metadata.key?('accountId') return labels end {} rescue StandardError => e @log.error "Failed to set monitored resource labels for #{type}: ", error: e {} end
ruby
{ "resource": "" }
q14742
Fluent.GoogleCloudOutput.determine_agent_level_common_labels
train
def determine_agent_level_common_labels labels = {} # User can specify labels via config. We want to capture those as well. labels.merge!(@labels) if @labels case @resource.type # GAE, Cloud Dataflow, Cloud Dataproc and Cloud ML. when APPENGINE_CONSTANTS[:resource_type], DATAFLOW_CONSTANTS[:resource_type], DATAPROC_CONSTANTS[:resource_type], ML_CONSTANTS[:resource_type] labels.merge!( "#{COMPUTE_CONSTANTS[:service]}/resource_id" => @vm_id, "#{COMPUTE_CONSTANTS[:service]}/resource_name" => @vm_name, "#{COMPUTE_CONSTANTS[:service]}/zone" => @zone ) # GCE instance and GKE container. when COMPUTE_CONSTANTS[:resource_type], GKE_CONSTANTS[:resource_type] labels["#{COMPUTE_CONSTANTS[:service]}/resource_name"] = @vm_name # EC2. when EC2_CONSTANTS[:resource_type] labels["#{EC2_CONSTANTS[:service]}/resource_name"] = @vm_name end labels end
ruby
{ "resource": "" }
q14743
Fluent.GoogleCloudOutput.group_log_entries_by_tag_and_local_resource_id
train
def group_log_entries_by_tag_and_local_resource_id(chunk) groups = {} chunk.msgpack_each do |tag, time, record| unless record.is_a?(Hash) @log.warn 'Dropping log entries with malformed record: ' \ "'#{record.inspect}'. " \ 'A log record should be in JSON format.' next end sanitized_tag = sanitize_tag(tag) if sanitized_tag.nil? @log.warn "Dropping log entries with invalid tag: '#{tag.inspect}'." \ ' A tag should be a string with utf8 characters.' next end local_resource_id = record.delete(LOCAL_RESOURCE_ID_KEY) # A nil local_resource_id means "fall back to legacy". hash_key = [sanitized_tag, local_resource_id].freeze groups[hash_key] ||= [] groups[hash_key].push([time, record]) end groups end
ruby
{ "resource": "" }
q14744
Fluent.GoogleCloudOutput.determine_group_level_monitored_resource_and_labels
train
def determine_group_level_monitored_resource_and_labels(tag, local_resource_id) resource = @resource.dup resource.labels = @resource.labels.dup common_labels = @common_labels.dup # Change the resource type and set matched_regexp_group if the tag matches # certain regexp. matched_regexp_group = nil # @tag_regexp_list can be an empty list. @tag_regexp_list.each do |derived_type, tag_regexp| matched_regexp_group = tag_regexp.match(tag) if matched_regexp_group resource.type = derived_type break end end # Determine the monitored resource based on the local_resource_id. # Different monitored resource types have unique ids in different format. # We will query Metadata Agent for the monitored resource. Return the # legacy monitored resource (either the instance resource or the resource # inferred from the tag) if failed to get a monitored resource from # Metadata Agent with this key. # # Examples: # "container.<container_id>" // Docker container. # "k8s_pod.<namespace_name>.<pod_name>" // GKE pod. if local_resource_id converted_resource = monitored_resource_from_local_resource_id( local_resource_id) resource = converted_resource if converted_resource end # Once the resource type is settled down, determine the labels. case resource.type # Cloud Functions. when CLOUDFUNCTIONS_CONSTANTS[:resource_type] resource.labels.merge!( 'region' => @gcf_region, 'function_name' => decode_cloudfunctions_function_name( matched_regexp_group['encoded_function_name']) ) instance_id = resource.labels.delete('instance_id') common_labels.merge!( "#{GKE_CONSTANTS[:service]}/instance_id" => instance_id, "#{COMPUTE_CONSTANTS[:service]}/resource_id" => instance_id, "#{GKE_CONSTANTS[:service]}/cluster_name" => resource.labels.delete('cluster_name'), "#{COMPUTE_CONSTANTS[:service]}/zone" => resource.labels.delete('zone') ) # GKE container. when GKE_CONSTANTS[:resource_type] if matched_regexp_group # We only expect one occurrence of each key in the match group. resource_labels_candidates = matched_regexp_group.names.zip(matched_regexp_group.captures).to_h common_labels_candidates = resource_labels_candidates.dup resource.labels.merge!( delete_and_extract_labels( resource_labels_candidates, # The kubernetes_tag_regexp is poorly named. 'namespace_name' is # in fact 'namespace_id'. 'pod_name' is in fact 'pod_id'. # TODO(qingling128): Figure out how to put this map into # constants like GKE_CONSTANTS[:extra_resource_labels]. 'container_name' => 'container_name', 'namespace_name' => 'namespace_id', 'pod_name' => 'pod_id')) common_labels.merge!( delete_and_extract_labels( common_labels_candidates, GKE_CONSTANTS[:extra_common_labels] .map { |l| [l, "#{GKE_CONSTANTS[:service]}/#{l}"] }.to_h)) end # Docker container. # TODO(qingling128): Remove this logic once the resource is retrieved at a # proper time (b/65175256). when DOCKER_CONSTANTS[:resource_type] common_labels.delete("#{COMPUTE_CONSTANTS[:service]}/resource_name") # TODO(qingling128): Temporary fallback for metadata agent restarts. # K8s resources. when K8S_CONTAINER_CONSTANTS[:resource_type], K8S_POD_CONSTANTS[:resource_type], K8S_NODE_CONSTANTS[:resource_type] common_labels.delete("#{COMPUTE_CONSTANTS[:service]}/resource_name") end # Cloud Dataflow and Cloud ML. # These labels can be set via the 'labels' option. # Report them as monitored resource labels instead of common labels. # e.g. "dataflow.googleapis.com/job_id" => "job_id" [DATAFLOW_CONSTANTS, ML_CONSTANTS].each do |service_constants| next unless resource.type == service_constants[:resource_type] resource.labels.merge!( delete_and_extract_labels( common_labels, service_constants[:extra_resource_labels] .map { |l| ["#{service_constants[:service]}/#{l}", l] }.to_h)) end resource.freeze resource.labels.freeze common_labels.freeze [resource, common_labels] end
ruby
{ "resource": "" }
q14745
Fluent.GoogleCloudOutput.monitored_resource_from_local_resource_id
train
def monitored_resource_from_local_resource_id(local_resource_id) return unless local_resource_id if @enable_metadata_agent @log.debug 'Calling metadata agent with local_resource_id: ' \ "#{local_resource_id}." resource = query_metadata_agent_for_monitored_resource( local_resource_id) @log.debug 'Retrieved monitored resource from metadata agent: ' \ "#{resource.inspect}." if resource # TODO(qingling128): Fix this temporary renaming from 'gke_container' # to 'container'. resource.type = 'container' if resource.type == 'gke_container' return resource end end # Fall back to constructing monitored resource locally. # TODO(qingling128): This entire else clause is temporary until we # implement buffering and caching. @log.debug('Failed to retrieve monitored resource from Metadata' \ " Agent with local_resource_id #{local_resource_id}.") construct_k8s_resource_locally(local_resource_id) end
ruby
{ "resource": "" }
q14746
Fluent.GoogleCloudOutput.determine_entry_level_monitored_resource_and_labels
train
def determine_entry_level_monitored_resource_and_labels( group_level_resource, group_level_common_labels, record) resource = group_level_resource.dup resource.labels = group_level_resource.labels.dup common_labels = group_level_common_labels.dup case resource.type # Cloud Functions. when CLOUDFUNCTIONS_CONSTANTS[:resource_type] if record.key?('log') @cloudfunctions_log_match = @compiled_cloudfunctions_log_regexp.match(record['log']) common_labels['execution_id'] = @cloudfunctions_log_match['execution_id'] if @cloudfunctions_log_match && @cloudfunctions_log_match['execution_id'] end # GKE container. when GKE_CONSTANTS[:resource_type] # Move the stdout/stderr annotation from the record into a label. common_labels.merge!( delete_and_extract_labels( record, 'stream' => "#{GKE_CONSTANTS[:service]}/stream")) # If the record has been annotated by the kubernetes_metadata_filter # plugin, then use that metadata. Otherwise, rely on commonLabels # populated from the group's tag. if record.key?('kubernetes') resource.labels.merge!( delete_and_extract_labels( record['kubernetes'], GKE_CONSTANTS[:extra_resource_labels] .map { |l| [l, l] }.to_h)) common_labels.merge!( delete_and_extract_labels( record['kubernetes'], GKE_CONSTANTS[:extra_common_labels] .map { |l| [l, "#{GKE_CONSTANTS[:service]}/#{l}"] }.to_h)) # Prepend label/ to all user-defined labels' keys. if record['kubernetes'].key?('labels') common_labels.merge!( delete_and_extract_labels( record['kubernetes']['labels'], record['kubernetes']['labels'] .map { |key, _| [key, "label/#{key}"] }.to_h)) end # We've explicitly consumed all the fields we care about -- don't # litter the log entries with the remaining fields that the kubernetes # metadata filter plugin includes (or an empty 'kubernetes' field). record.delete('kubernetes') record.delete('docker') end end # If the name of a field in the record is present in the @label_map # configured by users, report its value as a label and do not send that # field as part of the payload. common_labels.merge!(delete_and_extract_labels(record, @label_map)) # Cloud Dataflow and Cloud ML. # These labels can be set via the 'labels' or 'label_map' options. # Report them as monitored resource labels instead of common labels. # e.g. "dataflow.googleapis.com/job_id" => "job_id" [DATAFLOW_CONSTANTS, ML_CONSTANTS].each do |service_constants| next unless resource.type == service_constants[:resource_type] resource.labels.merge!( delete_and_extract_labels( common_labels, service_constants[:extra_resource_labels] .map { |l| ["#{service_constants[:service]}/#{l}", l] }.to_h)) end [resource, common_labels] end
ruby
{ "resource": "" }
q14747
Fluent.GoogleCloudOutput.query_metadata_agent
train
def query_metadata_agent(path) url = "#{@metadata_agent_url}/#{path}" @log.debug("Calling Metadata Agent: #{url}") open(url) do |f| response = f.read parsed_hash = parse_json_or_nil(response) if parsed_hash.nil? @log.error 'Response from Metadata Agent is not in valid json ' \ "format: '#{response.inspect}'." return nil end @log.debug "Response from Metadata Agent: #{parsed_hash}" return parsed_hash end rescue StandardError => e @log.error "Error calling Metadata Agent at #{url}.", error: e nil end
ruby
{ "resource": "" }
q14748
Fluent.GoogleCloudOutput.parse_labels
train
def parse_labels(record) payload_labels = record.delete(@labels_key) return nil unless payload_labels unless payload_labels.is_a?(Hash) @log.error "Invalid value of '#{@labels_key}' in the payload: " \ "#{payload_labels}. Labels need to be a JSON object." return nil end non_string_keys = payload_labels.each_with_object([]) do |(k, v), a| a << k unless k.is_a?(String) && v.is_a?(String) end unless non_string_keys.empty? @log.error "Invalid value of '#{@labels_key}' in the payload: " \ "#{payload_labels}. Labels need string values for all " \ "keys; keys #{non_string_keys} don't." return nil end payload_labels rescue StandardError => err @log.error "Failed to extract '#{@labels_key}' from payload.", err return nil end
ruby
{ "resource": "" }
q14749
Fluent.GoogleCloudOutput.sanitize_tag
train
def sanitize_tag(tag) if @require_valid_tags && (!tag.is_a?(String) || tag == '' || convert_to_utf8(tag) != tag) return nil end tag = convert_to_utf8(tag.to_s) tag = '_' if tag == '' tag end
ruby
{ "resource": "" }
q14750
Fluent.GoogleCloudOutput.delete_and_extract_labels
train
def delete_and_extract_labels(hash, label_map) return {} if label_map.nil? || !label_map.is_a?(Hash) || hash.nil? || !hash.is_a?(Hash) label_map.each_with_object({}) \ do |(original_label, new_label), extracted_labels| value = hash.delete(original_label) extracted_labels[new_label] = convert_to_utf8(value.to_s) if value end end
ruby
{ "resource": "" }
q14751
Fluent.GoogleCloudOutput.convert_to_utf8
train
def convert_to_utf8(input) if @coerce_to_utf8 input.encode( 'utf-8', invalid: :replace, undef: :replace, replace: @non_utf8_replacement_string) else begin input.encode('utf-8') rescue EncodingError @log.error 'Encountered encoding issues potentially due to non ' \ 'UTF-8 characters. To allow non-UTF-8 characters and ' \ 'replace them with spaces, please set "coerce_to_utf8" ' \ 'to true.' raise end end end
ruby
{ "resource": "" }
q14752
Fluent.GoogleCloudOutput.construct_k8s_resource_locally
train
def construct_k8s_resource_locally(local_resource_id) return unless /^ (?<resource_type>k8s_container) \.(?<namespace_name>[0-9a-z-]+) \.(?<pod_name>[.0-9a-z-]+) \.(?<container_name>[0-9a-z-]+)$/x =~ local_resource_id || /^ (?<resource_type>k8s_pod) \.(?<namespace_name>[0-9a-z-]+) \.(?<pod_name>[.0-9a-z-]+)$/x =~ local_resource_id || /^ (?<resource_type>k8s_node) \.(?<node_name>[0-9a-z-]+)$/x =~ local_resource_id # Clear name and location if they're explicitly set to empty. @k8s_cluster_name = nil if @k8s_cluster_name == '' @k8s_cluster_location = nil if @k8s_cluster_location == '' begin @k8s_cluster_name ||= fetch_gce_metadata( 'instance/attributes/cluster-name') @k8s_cluster_location ||= fetch_gce_metadata( 'instance/attributes/cluster-location') rescue StandardError => e @log.error 'Failed to retrieve k8s cluster name and location.', \ error: e end case resource_type when K8S_CONTAINER_CONSTANTS[:resource_type] labels = { 'namespace_name' => namespace_name, 'pod_name' => pod_name, 'container_name' => container_name, 'cluster_name' => @k8s_cluster_name, 'location' => @k8s_cluster_location } fallback_resource = GKE_CONSTANTS[:resource_type] when K8S_POD_CONSTANTS[:resource_type] labels = { 'namespace_name' => namespace_name, 'pod_name' => pod_name, 'cluster_name' => @k8s_cluster_name, 'location' => @k8s_cluster_location } fallback_resource = GKE_CONSTANTS[:resource_type] when K8S_NODE_CONSTANTS[:resource_type] labels = { 'node_name' => node_name, 'cluster_name' => @k8s_cluster_name, 'location' => @k8s_cluster_location } fallback_resource = COMPUTE_CONSTANTS[:resource_type] end unless @k8s_cluster_name && @k8s_cluster_location @log.error "Failed to construct #{resource_type} resource locally." \ ' Falling back to writing logs against' \ " #{fallback_resource} resource.", error: e return end constructed_resource = Google::Apis::LoggingV2::MonitoredResource.new( type: resource_type, labels: labels) @log.debug("Constructed #{resource_type} resource locally: " \ "#{constructed_resource.inspect}") constructed_resource end
ruby
{ "resource": "" }
q14753
Monitoring.PrometheusMonitoringRegistry.counter
train
def counter(name, desc) return @registry.counter(name, desc) rescue Prometheus::Client::Registry::AlreadyRegisteredError return @registry.get(name) end
ruby
{ "resource": "" }
q14754
ServiceWorker.Middleware.call
train
def call(env) case env[REQUEST_METHOD] when GET, HEAD route_match = @router.match_route(env) return respond_to_match(route_match, env) if route_match end @app.call(env) end
ruby
{ "resource": "" }
q14755
I18n::Tasks.UsedKeys.used_tree
train
def used_tree(key_filter: nil, strict: nil, include_raw_references: false) src_tree = used_in_source_tree(key_filter: key_filter, strict: strict) raw_refs, resolved_refs, used_refs = process_references(src_tree['used'].children) raw_refs.leaves { |node| node.data[:ref_type] = :reference_usage } resolved_refs.leaves { |node| node.data[:ref_type] = :reference_usage_resolved } used_refs.leaves { |node| node.data[:ref_type] = :reference_usage_key } src_tree.tap do |result| tree = result['used'].children tree.subtract_by_key!(raw_refs) tree.merge!(raw_refs) if include_raw_references tree.merge!(used_refs).merge!(resolved_refs) end end
ruby
{ "resource": "" }
q14756
I18n::Tasks::Scanners.RubyKeyLiterals.strip_literal
train
def strip_literal(literal) literal = literal[1..-1] if literal[0] == ':' literal = literal[1..-2] if literal[0] == "'" || literal[0] == '"' literal end
ruby
{ "resource": "" }
q14757
I18n::Tasks.MissingKeys.load_rails_i18n_pluralization!
train
def load_rails_i18n_pluralization!(locale) path = File.join(Gem::Specification.find_by_name('rails-i18n').gem_dir, 'rails', 'pluralization', "#{locale}.rb") eval(File.read(path), binding, path) # rubocop:disable Security/Eval end
ruby
{ "resource": "" }
q14758
I18n::Tasks.MissingKeys.missing_diff_tree
train
def missing_diff_tree(locale, compared_to = base_locale) data[compared_to].select_keys do |key, _node| locale_key_missing? locale, depluralize_key(key, compared_to) end.set_root_key!(locale, type: :missing_diff).keys do |_key, node| # change path and locale to base data = { locale: locale, missing_diff_locale: node.data[:locale] } if node.data.key?(:path) data[:path] = LocalePathname.replace_locale(node.data[:path], node.data[:locale], locale) end node.data.update data end end
ruby
{ "resource": "" }
q14759
I18n::Tasks.MissingKeys.missing_used_tree
train
def missing_used_tree(locale) used_tree(strict: true).select_keys do |key, _node| locale_key_missing?(locale, key) end.set_root_key!(locale, type: :missing_used) end
ruby
{ "resource": "" }
q14760
I18n::Tasks::Scanners.PatternScanner.scan_file
train
def scan_file(path) keys = [] text = read_file(path) text.scan(@pattern) do |match| src_pos = Regexp.last_match.offset(0).first location = occurrence_from_position(path, text, src_pos, raw_key: strip_literal(match[0])) next if exclude_line?(location.line, path) key = match_to_key(match, path, location) next unless key key += ':' if key.end_with?('.') next unless valid_key?(key) keys << [key, location] end keys rescue Exception => e # rubocop:disable Lint/RescueException raise ::I18n::Tasks::CommandError.new(e, "Error scanning #{path}: #{e.message}") end
ruby
{ "resource": "" }
q14761
I18n::Tasks::Translators.GoogleTranslator.to_google_translate_compatible_locale
train
def to_google_translate_compatible_locale(locale) return locale unless locale.include?('-') && !SUPPORTED_LOCALES_WITH_REGION.include?(locale) locale.split('-', 2).first end
ruby
{ "resource": "" }
q14762
I18n::Tasks::Scanners.RubyAstScanner.scan_file
train
def scan_file(path) @parser.reset ast, comments = @parser.parse_with_comments(make_buffer(path)) results = @call_finder.collect_calls ast do |send_node, method_name| send_node_to_key_occurrence(send_node, method_name) end magic_comments = comments.select { |comment| comment.text =~ MAGIC_COMMENT_PREFIX } comment_to_node = Parser::Source::Comment.associate_locations(ast, magic_comments).tap do |h| # transform_values is only available in ActiveSupport 4.2+ h.each { |k, v| h[k] = v.first } end.invert results + (magic_comments.flat_map do |comment| @parser.reset associated_node = comment_to_node[comment] @call_finder.collect_calls( @parser.parse(make_buffer(path, comment.text.sub(MAGIC_COMMENT_PREFIX, '').split(/\s+(?=t)/).join('; '))) ) do |send_node, _method_name| # method_name is not available at this stage send_node_to_key_occurrence(send_node, nil, location: associated_node || comment.location) end end) rescue Exception => e # rubocop:disable Lint/RescueException raise ::I18n::Tasks::CommandError.new(e, "Error scanning #{path}: #{e.message}") end
ruby
{ "resource": "" }
q14763
I18n::Tasks::Scanners.RubyAstScanner.extract_hash_pair
train
def extract_hash_pair(node, key) node.children.detect do |child| next unless child.type == :pair key_node = child.children[0] %i[sym str].include?(key_node.type) && key_node.children[0].to_s == key end end
ruby
{ "resource": "" }
q14764
I18n::Tasks::Scanners.RubyAstScanner.extract_array_as_string
train
def extract_array_as_string(node, array_join_with:, array_flatten: false, array_reject_blank: false) children_strings = node.children.map do |child| if %i[sym str int true false].include?(child.type) # rubocop:disable Lint/BooleanSymbol extract_string child else # ignore dynamic argument in strict mode return nil if config[:strict] if %i[dsym dstr].include?(child.type) || (child.type == :array && array_flatten) extract_string(child, array_join_with: array_join_with) else "\#{#{child.loc.expression.source}}" end end end if array_reject_blank children_strings.reject! do |x| # empty strings and nils in the scope argument are ignored by i18n x == '' end end children_strings.join(array_join_with) end
ruby
{ "resource": "" }
q14765
I18n::Tasks::Data::Tree.Siblings.add_ancestors_that_only_contain_nodes!
train
def add_ancestors_that_only_contain_nodes!(nodes) levels.reverse_each do |level_nodes| level_nodes.each { |node| nodes << node if node.children? && node.children.all? { |c| nodes.include?(c) } } end end
ruby
{ "resource": "" }
q14766
I18n::Tasks::Reports.Base.sort_by_attr!
train
def sort_by_attr!(objects, order = { locale: :asc, key: :asc }) order_keys = order.keys objects.sort! do |a, b| by = order_keys.detect { |k| a[k] != b[k] } order[by] == :desc ? b[by] <=> a[by] : a[by] <=> b[by] end objects end
ruby
{ "resource": "" }
q14767
I18n::Tasks.Data.data
train
def data @data ||= begin data_config = (config[:data] || {}).deep_symbolize_keys data_config[:base_locale] = base_locale data_config[:locales] = config[:locales] adapter_class = data_config[:adapter].presence || data_config[:class].presence || DATA_DEFAULTS[:adapter] adapter_class = adapter_class.to_s adapter_class = 'I18n::Tasks::Data::FileSystem' if adapter_class == 'file_system' data_config.except!(:adapter, :class) ActiveSupport::Inflector.constantize(adapter_class).new data_config end end
ruby
{ "resource": "" }
q14768
I18n::Tasks.References.merge_reference_trees
train
def merge_reference_trees(roots) roots.inject(empty_forest) do |forest, root| root.keys do |full_key, node| if full_key == node.value.to_s log_warn( "Self-referencing key #{node.full_key(root: false).inspect} in #{node.data[:locale].inspect}" ) end end forest.merge!( root.children, on_leaves_merge: lambda do |node, other| if node.value != other.value log_warn( 'Conflicting references: '\ "#{node.full_key(root: false)} ⮕ #{node.value} in #{node.data[:locale]},"\ " but ⮕ #{other.value} in #{other.data[:locale]}" ) end end ) end end
ruby
{ "resource": "" }
q14769
I18n::Tasks::Scanners::Files.FileReader.read_file
train
def read_file(path) result = nil File.open(path, 'rb', encoding: 'UTF-8') { |f| result = f.read } result end
ruby
{ "resource": "" }
q14770
Daru.CategoricalIndex.pos
train
def pos *indexes positions = indexes.map do |index| if include? index @cat_hash[index] elsif index.is_a?(Numeric) && index < @array.size index else raise IndexError, "#{index.inspect} is neither a valid category"\ ' nor a valid position' end end positions.flatten! positions.size == 1 ? positions.first : positions.sort end
ruby
{ "resource": "" }
q14771
Daru.CategoricalIndex.subset
train
def subset *indexes positions = pos(*indexes) new_index = positions.map { |pos| index_from_pos pos } Daru::CategoricalIndex.new new_index.flatten end
ruby
{ "resource": "" }
q14772
Daru.CategoricalIndex.at
train
def at *positions positions = preprocess_positions(*positions) validate_positions(*positions) if positions.is_a? Integer index_from_pos(positions) else Daru::CategoricalIndex.new positions.map(&method(:index_from_pos)) end end
ruby
{ "resource": "" }
q14773
Daru.MultiIndex.validate_name
train
def validate_name names, levels error_msg = "'names' and 'levels' should be of same size. Size of the "\ "'name' array is #{names.size} and size of the MultiIndex 'levels' and "\ "'labels' is #{labels.size}." suggestion_msg = "If you don\'t want to set name for particular level " \ "(say level 'i') then put empty string on index 'i' of the 'name' Array." raise SizeError, error_msg if names.size > levels.size raise SizeError, [error_msg, suggestion_msg].join("\n") if names.size < levels.size end
ruby
{ "resource": "" }
q14774
Daru.Vector.[]
train
def [](*input_indexes) # Get array of positions indexes positions = @index.pos(*input_indexes) # If one object is asked return it return @data[positions] if positions.is_a? Numeric # Form a new Vector using positional indexes Daru::Vector.new( positions.map { |loc| @data[loc] }, name: @name, index: @index.subset(*input_indexes), dtype: @dtype ) end
ruby
{ "resource": "" }
q14775
Daru.Vector.at
train
def at *positions # to be used to form index original_positions = positions positions = coerce_positions(*positions) validate_positions(*positions) if positions.is_a? Integer @data[positions] else values = positions.map { |pos| @data[pos] } Daru::Vector.new values, index: @index.at(*original_positions), dtype: dtype end end
ruby
{ "resource": "" }
q14776
Daru.Vector.set_at
train
def set_at positions, val validate_positions(*positions) positions.map { |pos| @data[pos] = val } update_position_cache end
ruby
{ "resource": "" }
q14777
Daru.Vector.concat
train
def concat element, index raise IndexError, 'Expected new unique index' if @index.include? index @index |= [index] @data[@index[index]] = element update_position_cache end
ruby
{ "resource": "" }
q14778
Daru.Vector.cast
train
def cast opts={} dt = opts[:dtype] raise ArgumentError, "Unsupported dtype #{opts[:dtype]}" unless %i[array nmatrix gsl].include?(dt) @data = cast_vector_to dt unless @dtype == dt end
ruby
{ "resource": "" }
q14779
Daru.Vector.delete_at
train
def delete_at index @data.delete_at @index[index] @index = Daru::Index.new(@index.to_a - [index]) update_position_cache end
ruby
{ "resource": "" }
q14780
Daru.Vector.index_of
train
def index_of element case dtype when :array then @index.key(@data.index { |x| x.eql? element }) else @index.key @data.index(element) end end
ruby
{ "resource": "" }
q14781
Daru.Vector.uniq
train
def uniq uniq_vector = @data.uniq new_index = uniq_vector.map { |element| index_of(element) } Daru::Vector.new uniq_vector, name: @name, index: new_index, dtype: @dtype end
ruby
{ "resource": "" }
q14782
Daru.Vector.sort_by_index
train
def sort_by_index opts={} opts = {ascending: true}.merge(opts) _, new_order = resort_index(@index.each_with_index, opts).transpose reorder new_order end
ruby
{ "resource": "" }
q14783
Daru.Vector.recode!
train
def recode! dt=nil, &block return to_enum(:recode!) unless block_given? @data.map!(&block).data @data = cast_vector_to(dt || @dtype) self end
ruby
{ "resource": "" }
q14784
Daru.Vector.delete_if
train
def delete_if return to_enum(:delete_if) unless block_given? keep_e, keep_i = each_with_index.reject { |n, _i| yield(n) }.transpose @data = cast_vector_to @dtype, keep_e @index = Daru::Index.new(keep_i) update_position_cache self end
ruby
{ "resource": "" }
q14785
Daru.Vector.verify
train
def verify (0...size) .map { |i| [i, @data[i]] } .reject { |_i, val| yield(val) } .to_h end
ruby
{ "resource": "" }
q14786
Daru.Vector.lag
train
def lag k=1 case k when 0 then dup when 1...size copy([nil] * k + data.to_a) when -size..-1 copy(data.to_a[k.abs...size]) else copy([]) end end
ruby
{ "resource": "" }
q14787
Daru.Vector.to_matrix
train
def to_matrix axis=:horizontal if axis == :horizontal Matrix[to_a] elsif axis == :vertical Matrix.columns([to_a]) else raise ArgumentError, "axis should be either :horizontal or :vertical, not #{axis}" end end
ruby
{ "resource": "" }
q14788
Daru.Vector.to_nmatrix
train
def to_nmatrix axis=:horizontal unless numeric? && !include?(nil) raise ArgumentError, 'Can not convert to nmatrix'\ 'because the vector is numeric' end case axis when :horizontal NMatrix.new [1, size], to_a when :vertical NMatrix.new [size, 1], to_a else raise ArgumentError, 'Invalid axis specified. '\ 'Valid axis are :horizontal and :vertical' end end
ruby
{ "resource": "" }
q14789
Daru.Vector.object_summary
train
def object_summary nval = count_values(*Daru::MISSING_VALUES) summary = "\n factors: #{factors.to_a.join(',')}" \ "\n mode: #{mode.to_a.join(',')}" \ "\n Distribution\n" data = frequencies.sort.each_with_index.map do |v, k| [k, v, '%0.2f%%' % ((nval.zero? ? 1 : v.quo(nval))*100)] end summary + Formatters::Table.format(data) end
ruby
{ "resource": "" }
q14790
Daru.Vector.numeric_summary
train
def numeric_summary summary = "\n median: #{median}" + "\n mean: %0.4f" % mean if sd summary << "\n std.dev.: %0.4f" % sd + "\n std.err.: %0.4f" % se end if count_values(*Daru::MISSING_VALUES).zero? summary << "\n skew: %0.4f" % skew + "\n kurtosis: %0.4f" % kurtosis end summary end
ruby
{ "resource": "" }
q14791
Daru.Vector.inspect
train
def inspect spacing=20, threshold=15 row_headers = index.is_a?(MultiIndex) ? index.sparse_tuples : index.to_a "#<#{self.class}(#{size})#{':category' if category?}>\n" + Formatters::Table.format( to_a.lazy.map { |v| [v] }, headers: @name && [@name], row_headers: row_headers, threshold: threshold, spacing: spacing ) end
ruby
{ "resource": "" }
q14792
Daru.Vector.reindex!
train
def reindex! new_index values = [] each_with_index do |val, i| values[new_index[i]] = val if new_index.include?(i) end values.fill(nil, values.size, new_index.size - values.size) @data = cast_vector_to @dtype, values @index = new_index update_position_cache self end
ruby
{ "resource": "" }
q14793
Daru.Vector.only_valid
train
def only_valid as_a=:vector, _duplicate=true # FIXME: Now duplicate is just ignored. # There are no spec that fail on this case, so I'll leave it # this way for now - zverok, 2016-05-07 new_index = @index.to_a - indexes(*Daru::MISSING_VALUES) new_vector = new_index.map { |idx| self[idx] } if as_a == :vector Daru::Vector.new new_vector, index: new_index, name: @name, dtype: dtype else new_vector end end
ruby
{ "resource": "" }
q14794
Daru.Vector.only_numerics
train
def only_numerics numeric_indexes = each_with_index .select { |v, _i| v.is_a?(Numeric) || v.nil? } .map(&:last) self[*numeric_indexes] end
ruby
{ "resource": "" }
q14795
Daru.Vector.db_type
train
def db_type # first, detect any character not number case when @data.any? { |v| v.to_s =~ DATE_REGEXP } 'DATE' when @data.any? { |v| v.to_s =~ /[^0-9e.-]/ } 'VARCHAR (255)' when @data.any? { |v| v.to_s =~ /\./ } 'DOUBLE' else 'INTEGER' end end
ruby
{ "resource": "" }
q14796
Daru.Vector.to_category
train
def to_category opts={} dv = Daru::Vector.new to_a, type: :category, name: @name, index: @index dv.ordered = opts[:ordered] || false dv.categories = opts[:categories] if opts[:categories] dv end
ruby
{ "resource": "" }
q14797
Daru.Vector.cut
train
def cut partitions, opts={} close_at, labels = opts[:close_at] || :right, opts[:labels] partitions = partitions.to_a values = to_a.map { |val| cut_find_category partitions, val, close_at } cats = cut_categories(partitions, close_at) dv = Daru::Vector.new values, index: @index, type: :category, categories: cats # Rename categories if new labels provided if labels dv.rename_categories Hash[cats.zip(labels)] else dv end end
ruby
{ "resource": "" }
q14798
Daru.Vector.valid_value?
train
def valid_value?(v) v.respond_to?(:nan?) && v.nan? || v.nil? ? false : true end
ruby
{ "resource": "" }
q14799
Daru.Vector.prepare_bootstrap
train
def prepare_bootstrap(estimators) h_est = estimators h_est = [h_est] unless h_est.is_a?(Array) || h_est.is_a?(Hash) if h_est.is_a? Array h_est = h_est.map do |est| [est, ->(v) { Daru::Vector.new(v).send(est) }] end.to_h end bss = h_est.keys.map { |v| [v, []] }.to_h [h_est, h_est.keys, bss] end
ruby
{ "resource": "" }