_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q16700
Kontena.Client.get_stream
train
def get_stream(path, response_block, params = nil, headers = {}, auth = true) request(path: path, query: params, headers: headers, response_block: response_block, auth: auth, gzip: false) end
ruby
{ "resource": "" }
q16701
Kontena.Client.request
train
def request(http_method: :get, path:'/', body: nil, query: {}, headers: {}, response_block: nil, expects: [200, 201, 204], host: nil, port: nil, auth: true, gzip: true) retried ||= false if auth && token_expired? raise Excon::Error::Unauthorized, "Token expired or not valid, you need to login again, use: kontena #{token_is_for_master? ? "master" : "cloud"} login" end request_headers = request_headers(headers, auth: auth, gzip: gzip) if body.nil? body_content = '' request_headers.delete(CONTENT_TYPE) else body_content = encode_body(body, request_headers[CONTENT_TYPE]) request_headers.merge!('Content-Length' => body_content.bytesize) end uri = URI.parse(path) host_options = {} if uri.host host_options[:host] = uri.host host_options[:port] = uri.port host_options[:scheme] = uri.scheme path = uri.request_uri else host_options[:host] = host if host host_options[:port] = port if port end request_options = { method: http_method, expects: Array(expects), path: path_with_prefix(path), headers: request_headers, body: body_content, query: query }.merge(host_options) request_options.merge!(response_block: response_block) if response_block # Store the response into client.last_response @last_response = http_client.request(request_options) parse_response(@last_response) rescue Excon::Error::Unauthorized if token debug { 'Server reports access token expired' } if retried || !token || !token['refresh_token'] raise Kontena::Errors::StandardError.new(401, 'The access token has expired and needs to be refreshed') end retried = true retry if refresh_token end raise Kontena::Errors::StandardError.new(401, 'Unauthorized') rescue Excon::Error::HTTPStatus => error if error.response.headers['Content-Encoding'] == 'gzip' error.response.body = Zlib::GzipReader.new(StringIO.new(error.response.body)).read end debug { "Request #{error.request[:method].upcase} #{error.request[:path]}: #{error.response.status} #{error.response.reason_phrase}: #{error.response.body}" } handle_error_response(error.response) end
ruby
{ "resource": "" }
q16702
Kontena.Client.token_account
train
def token_account return {} unless token if token.respond_to?(:account) token.account elsif token.kind_of?(Hash) && token['account'].kind_of?(String) config.find_account(token['account']) else {} end rescue => ex error { "Access token refresh exception" } error { ex } false end
ruby
{ "resource": "" }
q16703
Kontena.Client.refresh_token
train
def refresh_token debug { "Performing token refresh" } return false if token.nil? return false if token['refresh_token'].nil? uri = URI.parse(token_account['token_endpoint']) endpoint_data = { path: uri.path } endpoint_data[:host] = uri.host if uri.host endpoint_data[:port] = uri.port if uri.port debug { "Token refresh endpoint: #{endpoint_data.inspect}" } return false unless endpoint_data[:path] response = request( { http_method: token_account['token_method'].downcase.to_sym, body: refresh_request_params, headers: { CONTENT_TYPE => token_account['token_post_content_type'] }.merge( token_account['code_requires_basic_auth'] ? basic_auth_header : {} ), expects: [200, 201, 400, 401, 403], auth: false }.merge(endpoint_data) ) if response && response['access_token'] debug { "Got response to refresh request" } token['access_token'] = response['access_token'] token['refresh_token'] = response['refresh_token'] token['expires_at'] = in_to_at(response['expires_in']) token.config.write if token.respond_to?(:config) true else debug { "Got null or bad response to refresh request: #{last_response.inspect}" } false end rescue => ex error { "Access token refresh exception" } error { ex } false end
ruby
{ "resource": "" }
q16704
Kontena.Client.encode_body
train
def encode_body(body, content_type) if content_type =~ JSON_REGEX # vnd.api+json should pass as json dump_json(body) elsif content_type == CONTENT_URLENCODED && body.kind_of?(Hash) URI.encode_www_form(body) else body end end
ruby
{ "resource": "" }
q16705
Kontena.Client.in_to_at
train
def in_to_at(expires_in) if expires_in.to_i < 1 0 else Time.now.utc.to_i + expires_in.to_i end end
ruby
{ "resource": "" }
q16706
Kontena.RpcClient.request
train
def request(method, params, timeout: 30) if !wait_until("websocket client is connected", timeout: timeout, threshold: 10.0, interval: 0.1) { connected? } raise TimeoutError.new(500, 'WebsocketClient is not connected') end id = request_id observable = @requests[id] = RequestObservable.new(method, id) websocket_client.send_request(id, method, params) begin result, error = observe(observable, timeout: timeout) rescue Timeout::Error => exc raise TimeoutError.new(500, exc.message) end @requests.delete(id) if error raise Error.new(error['code'], error['message']) else return result end rescue => exc warn exc abort exc end
ruby
{ "resource": "" }
q16707
Agent.NodePlugger.reject!
train
def reject!(connected_at, code, reason) self.update_node!(connected_at, connected: false, updated: false, websocket_connection: { opened: false, close_code: code, close_reason: reason, }, ) info "Rejected connection for node #{@node.to_path} at #{connected_at} with code #{code}: #{reason}" rescue => exc error exc end
ruby
{ "resource": "" }
q16708
Stacks.SortHelper.sort_services
train
def sort_services(services) # Map of service name to array of deep links, including links of linked services service_links = {} # Build hash of service name to shallow array of linked service names # {service => [linked_service]} services.each do |service| service_links[service[:name]] = __links_for_service(service) end # Mutate each service's array to add a deep reference to each linked service's own array of linked services # {service => [linked_service, [linked_service_links, [...]]]} service_links.each do |service, links| links.dup.each do |linked_service| if linked_service_links = service_links[linked_service] service_links[service] << linked_service_links else raise MissingLinkError.new(service, linked_service) end end end # Flatten the deep array references to a flat array # In case of recursive references, the Array#flatten! will fail with ArgumentError: tried to flatten recursive array # {service => [linked_service, linked_service_link, ...]} service_links.each do |service, links| begin service_links[service] = service_links[service].flatten rescue ArgumentError raise RecursiveLinkError.new(service, links) end end # Sort using deep service links services.sort{ |a, b| a_links = service_links[a[:name]] b_links = service_links[b[:name]] if a_links.include? b[:name] 1 elsif b_links.include? a[:name] -1 else a_links.size <=> b_links.size end } end
ruby
{ "resource": "" }
q16709
Kontena::Cli::Helpers.HealthHelper.grid_health
train
def grid_health(grid, nodes) initial = grid['initial_size'] minimum = grid['initial_size'] / 2 + 1 # a majority is required for etcd quorum online = nodes.select{|node| node['initial_member'] && node['connected']} if online.length < minimum return :error elsif online.length < initial return :warning else return :ok end end
ruby
{ "resource": "" }
q16710
Kontena::Cli::Stacks.Common.stack
train
def stack @stack ||= reader.execute( name: stack_name, parent_name: self.respond_to?(:parent_name) ? self.parent_name : nil, values: (self.respond_to?(:values_from_options) ? self.values_from_options : {}) ) end
ruby
{ "resource": "" }
q16711
Kontena::Cli::Stacks.Common.set_env_variables
train
def set_env_variables(stack, grid, platform = grid) ENV['STACK'] = stack ENV['GRID'] = grid ENV['PLATFORM'] = platform end
ruby
{ "resource": "" }
q16712
Kontena::Cli::Stacks.Common.stacks_client
train
def stacks_client @stacks_client ||= Kontena::StacksClient.new(current_account.stacks_url, current_account.token, read_requires_token: current_account.stacks_read_authentication) end
ruby
{ "resource": "" }
q16713
Kontena::Stacks.ChangeResolver.stack_upgraded?
train
def stack_upgraded?(name) old_stack = old_data.stack(name) new_stack = new_data.stack(name) return true if new_stack.root? return true if old_stack.version != new_stack.version return true if old_stack.stack_name != new_stack.stack_name return true if old_stack.variables != new_stack.variables false end
ruby
{ "resource": "" }
q16714
Kontena::Workers.LogWorker.process_queue
train
def process_queue loop do sleep 1 until processing? buffer = @queue.shift(BATCH_SIZE) if buffer.size > 0 rpc_client.notification('/containers/log_batch', [buffer]) sleep 0.01 else sleep 1 end end end
ruby
{ "resource": "" }
q16715
Kontena::Workers.LogWorker.start_streaming
train
def start_streaming info 'start streaming logs from containers' Docker::Container.all.each do |container| begin self.stream_container_logs(container) unless container.skip_logs? rescue Docker::Error::NotFoundError => exc # Could be thrown since container.skip_logs? actually loads the container details warn exc.message rescue => exc error exc end end @streaming = true end
ruby
{ "resource": "" }
q16716
Kontena::Workers.LogWorker.stop_streaming
train
def stop_streaming @streaming = false info 'stop log streaming' @workers.keys.dup.each do |id| queued_item = @queue.find { |i| i[:id] == id } time = queued_item.nil? ? Time.now.to_i : Time.parse(queued_item[:time]).to_i self.stop_streaming_container_logs(id) self.mark_timestamp(id, time) end end
ruby
{ "resource": "" }
q16717
Kontena::Cli::Stacks.UpgradeCommand.process_data
train
def process_data(old_data, new_data) logger.debug { "Master stacks: #{old_data.keys.join(",")} YAML stacks: #{new_data.keys.join(",")}" } new_data.reverse_each do |stackname, data| spinner "Processing stack #{pastel.cyan(stackname)}" process_stack_data(stackname, data, old_data) hint_on_validation_notifications(reader.notifications, reader.loader.source) abort_on_validation_errors(reader.errors, reader.loader.source) end old_set = Kontena::Stacks::StackDataSet.new(old_data) new_set = Kontena::Stacks::StackDataSet.new(new_data) if skip_dependencies? [old_set, new_set].each(&:remove_dependencies) end spinner "Analyzing upgrade" do Kontena::Stacks::ChangeResolver.new(old_set, new_set) end end
ruby
{ "resource": "" }
q16718
Kontena.LocalhostWebServer.serve_one
train
def serve_one Kontena.logger.debug("LHWS") { "Waiting for connection on port #{port}.." } socket = server.accept content = socket.recvfrom(2048).first.split(/(?:\r)?\n/) request = content.shift headers = {} while line = content.shift break if line.nil? break if line == '' header, value = line.chomp.split(/:\s{0,}/, 2) headers[header] = value end body = content.join("\n") Kontena.logger.debug("LHWS") { "Got request: \"#{request.inspect}\n Headers: #{headers.inspect}\n Body: #{body}\"" } get_request = request[/GET (\/cb.+?) HTTP/, 1] if get_request if success_response socket.print [ 'HTTP/1.1 200 OK', 'Content-Type: text/html', "Content-Length: #{success_response.bytesize}", "Connection: close", '', success_response ].join("\r\n") else socket.print [ 'HTTP/1.1 302 Found', "Location: #{SUCCESS_URL}", "Referrer-Policy: no-referrer", "Connection: close", '' ].join("\r\n") end socket.close server.close uri = URI.parse("http://localhost#{get_request}") Kontena.logger.debug("LHWS") { " * Parsing params: \"#{uri.query}\"" } params = {} URI.decode_www_form(uri.query).each do |key, value| if value.to_s == '' next elsif value.to_s =~ /\A\d+\z/ params[key] = value.to_i else params[key] = value end end params else # Unless it's a query to /cb, send an error message and keep listening, # it might have been something funny like fetching favicon.ico socket.print [ 'HTTP/1.1 400 Bad request', 'Content-Type: text/plain', "Content-Length: #{error_response.bytesize}", 'Connection: close', '', error_response ].join("\r\n") socket.close serve_one # serve more, this one was not proper. end end
ruby
{ "resource": "" }
q16719
Contracts.MethodHandler.handle
train
def handle return unless engine? return if decorators.empty? validate_decorators! validate_pattern_matching! engine.add_method_decorator(method_type, method_name, decorator) mark_pattern_matching_decorators method_reference.make_alias(target) redefine_method end
ruby
{ "resource": "" }
q16720
Contracts.MethodReference.make_definition
train
def make_definition(this, &blk) is_private = private?(this) is_protected = protected?(this) alias_target(this).send(:define_method, name, &blk) make_private(this) if is_private make_protected(this) if is_protected end
ruby
{ "resource": "" }
q16721
Contracts.MethodReference.make_alias
train
def make_alias(this) _aliased_name = aliased_name original_name = name alias_target(this).class_eval do alias_method _aliased_name, original_name end end
ruby
{ "resource": "" }
q16722
Parslet::Atoms.CanFlatten.flatten
train
def flatten(value, named=false) # Passes through everything that isn't an array of things return value unless value.instance_of? Array # Extracts the s-expression tag tag, *tail = value # Merges arrays: result = tail. map { |e| flatten(e) } # first flatten each element case tag when :sequence return flatten_sequence(result) when :maybe return named ? result.first : result.first || '' when :repetition return flatten_repetition(result, named) end fail "BUG: Unknown tag #{tag.inspect}." end
ruby
{ "resource": "" }
q16723
Parslet::Atoms.CanFlatten.foldl
train
def foldl(list, &block) return '' if list.empty? list[1..-1].inject(list.first, &block) end
ruby
{ "resource": "" }
q16724
Parslet::Atoms.CanFlatten.flatten_sequence
train
def flatten_sequence(list) foldl(list.compact) { |r, e| # and then merge flat elements merge_fold(r, e) } end
ruby
{ "resource": "" }
q16725
Parslet::Atoms.CanFlatten.flatten_repetition
train
def flatten_repetition(list, named) if list.any? { |e| e.instance_of?(Hash) } # If keyed subtrees are in the array, we'll want to discard all # strings inbetween. To keep them, name them. return list.select { |e| e.instance_of?(Hash) } end if list.any? { |e| e.instance_of?(Array) } # If any arrays are nested in this array, flatten all arrays to this # level. return list. select { |e| e.instance_of?(Array) }. flatten(1) end # Consistent handling of empty lists, when we act on a named result return [] if named && list.empty? # If there are only strings, concatenate them and return that. foldl(list.compact) { |s,e| s+e } end
ruby
{ "resource": "" }
q16726
Parslet.ClassMethods.rule
train
def rule(name, opts={}, &definition) undef_method name if method_defined? name define_method(name) do @rules ||= {} # <name, rule> memoization return @rules[name] if @rules.has_key?(name) # Capture the self of the parser class along with the definition. definition_closure = proc { self.instance_eval(&definition) } @rules[name] = Atoms::Entity.new(name, opts[:label], &definition_closure) end end
ruby
{ "resource": "" }
q16727
Parslet.Cause.raise
train
def raise(exception_klass=Parslet::ParseFailed) exception = exception_klass.new(self.to_s, self) Kernel.raise exception end
ruby
{ "resource": "" }
q16728
Parslet.Source.consume
train
def consume(n) position = self.pos slice_str = @str.scan(@re_cache[n]) slice = Parslet::Slice.new( position, slice_str, @line_cache) return slice end
ruby
{ "resource": "" }
q16729
Surrealist.Builder.construct_collection
train
def construct_collection(schema, instance, key, value) schema[key] = instance.send(key).map do |inst| call(Copier.deep_copy(value), inst) end end
ruby
{ "resource": "" }
q16730
Surrealist.InstanceMethods.surrealize
train
def surrealize(**args) return args[:serializer].new(self).surrealize(args) if args[:serializer] if (serializer = find_serializer(args[:for])) return serializer.new(self).surrealize(args) end Oj.dump(Surrealist.build_schema(instance: self, **args), mode: :compat) end
ruby
{ "resource": "" }
q16731
Surrealist.InstanceMethods.build_schema
train
def build_schema(**args) return args[:serializer].new(self).build_schema(args) if args[:serializer] if (serializer = find_serializer(args[:for])) return serializer.new(self).build_schema(args) end Surrealist.build_schema(instance: self, **args) end
ruby
{ "resource": "" }
q16732
Surrealist.Serializer.build_schema
train
def build_schema(**args) if Helper.collection?(object) build_collection_schema(args) else Surrealist.build_schema(instance: self, **args) end end
ruby
{ "resource": "" }
q16733
Surrealist.Serializer.build_collection_schema
train
def build_collection_schema(**args) object.map { |object| self.class.new(object, context).build_schema(args) } end
ruby
{ "resource": "" }
q16734
Surrealist.Carrier.parameters
train
def parameters { camelize: camelize, include_root: include_root, include_namespaces: include_namespaces, root: root, namespaces_nesting_level: namespaces_nesting_level } end
ruby
{ "resource": "" }
q16735
Surrealist.Carrier.check_booleans!
train
def check_booleans! booleans_hash.each do |key, value| unless BOOLEANS.include?(value) raise ArgumentError, "Expected `#{key}` to be either true, false or nil, got #{value}" end end end
ruby
{ "resource": "" }
q16736
Surrealist.Carrier.check_root!
train
def check_root! unless root.nil? || (root.is_a?(String) && !root.strip.empty?) || root.is_a?(Symbol) Surrealist::ExceptionRaiser.raise_invalid_root!(root) end end
ruby
{ "resource": "" }
q16737
Surrealist.ClassMethods.delegate_surrealization_to
train
def delegate_surrealization_to(klass) raise TypeError, "Expected type of Class got #{klass.class} instead" unless klass.is_a?(Class) Surrealist::ExceptionRaiser.raise_invalid_schema_delegation! unless Helper.surrealist?(klass) hash = Surrealist::VarsHelper.find_schema(klass) Surrealist::VarsHelper.set_schema(self, hash) end
ruby
{ "resource": "" }
q16738
Surrealist.ClassMethods.surrealize_with
train
def surrealize_with(klass, tag: Surrealist::VarsHelper::DEFAULT_TAG) if klass < Surrealist::Serializer Surrealist::VarsHelper.add_serializer(self, klass, tag: tag) instance_variable_set(VarsHelper::PARENT_VARIABLE, klass.defined_schema) else raise ArgumentError, "#{klass} should be inherited from Surrealist::Serializer" end end
ruby
{ "resource": "" }
q16739
Faktory.Client.fetch
train
def fetch(*queues) job = nil transaction do command("FETCH", *queues) job = result! end JSON.parse(job) if job end
ruby
{ "resource": "" }
q16740
Faktory.Client.beat
train
def beat transaction do command("BEAT", %Q[{"wid":"#{@@random_process_wid}"}]) str = result! if str == "OK" str else hash = JSON.parse(str) hash["state"] end end end
ruby
{ "resource": "" }
q16741
SimpleDiscussion.BootstrapLinkRenderer.url
train
def url(page) @base_url_params ||= begin url_params = merge_get_params(default_url_params) merge_optional_params(url_params) end url_params = @base_url_params.dup add_current_page_param(url_params, page) # Add optional url_builder support (@options[:url_builder] || @template).url_for(url_params) end
ruby
{ "resource": "" }
q16742
Gemirro.Source.fetch_prerelease_versions
train
def fetch_prerelease_versions Utils.logger.info( "Fetching #{Configuration.prerelease_versions_file}" \ " on #{@name} (#{@host})" ) Http.get(host + '/' + Configuration.prerelease_versions_file).body end
ruby
{ "resource": "" }
q16743
Gemirro.MirrorDirectory.add_file
train
def add_file(name, content) full_path = File.join(@path, name) file = MirrorFile.new(full_path) file.write(content) file end
ruby
{ "resource": "" }
q16744
Gemirro.GemsFetcher.versions_for
train
def versions_for(gem) available = @versions_file.versions_for(gem.name) return [available.last] if gem.only_latest? versions = available.select do |v| gem.requirement.satisfied_by?(v[0]) end versions = [available.last] if versions.empty? versions end
ruby
{ "resource": "" }
q16745
Gemirro.GemsFetcher.fetch_gemspec
train
def fetch_gemspec(gem, version) filename = gem.gemspec_filename(version) satisfied = if gem.only_latest? true else gem.requirement.satisfied_by?(version) end if gemspec_exists?(filename) || !satisfied Utils.logger.debug("Skipping #{filename}") return end Utils.logger.info("Fetching #{filename}") fetch_from_source(filename, gem, version, true) end
ruby
{ "resource": "" }
q16746
Gemirro.GemsFetcher.fetch_gem
train
def fetch_gem(gem, version) filename = gem.filename(version) satisfied = if gem.only_latest? true else gem.requirement.satisfied_by?(version) end name = gem.name if gem_exists?(filename) || ignore_gem?(name, version, gem.platform) || !satisfied Utils.logger.debug("Skipping #{filename}") return end Utils.configuration.ignore_gem(gem.name, version, gem.platform) Utils.logger.info("Fetching #{filename}") fetch_from_source(filename, gem, version) end
ruby
{ "resource": "" }
q16747
Gemirro.VersionsFetcher.read_file
train
def read_file(file, prerelease = false) destination = Gemirro.configuration.destination file_dst = File.join(destination, file) unless File.exist?(file_dst) File.write(file_dst, @source.fetch_versions) unless prerelease File.write(file_dst, @source.fetch_prerelease_versions) if prerelease end File.read(file_dst) end
ruby
{ "resource": "" }
q16748
Gemirro.Gem.filename
train
def filename(gem_version = nil) gem_version ||= version.to_s n = [name, gem_version] n.push(@platform) if @platform != 'ruby' "#{n.join('-')}.gem" end
ruby
{ "resource": "" }
q16749
Gemirro.Gem.gemspec_filename
train
def gemspec_filename(gem_version = nil) gem_version ||= version.to_s n = [name, gem_version] n.push(@platform) if @platform != 'ruby' "#{n.join('-')}.gemspec.rz" end
ruby
{ "resource": "" }
q16750
Gemirro.Server.fetch_gem
train
def fetch_gem(resource) return unless Utils.configuration.fetch_gem name = File.basename(resource) result = name.match(URI_REGEXP) return unless result gem_name, gem_version, gem_platform, gem_type = result.captures return unless gem_name && gem_version begin gem = Utils.stored_gem(gem_name, gem_version, gem_platform) gem.gemspec = true if gem_type == GEMSPEC_TYPE # rubocop:disable Metrics/LineLength return if Utils.gems_fetcher.gem_exists?(gem.filename(gem_version)) && gem_type == GEM_TYPE return if Utils.gems_fetcher.gemspec_exists?(gem.gemspec_filename(gem_version)) && gem_type == GEMSPEC_TYPE # rubocop:enable Metrics/LineLength Utils.logger .info("Try to download #{gem_name} with version #{gem_version}") Utils.gems_fetcher.source.gems.clear Utils.gems_fetcher.source.gems.push(gem) Utils.gems_fetcher.fetch update_indexes if Utils.configuration.update_on_fetch rescue StandardError => e Utils.logger.error(e) end end
ruby
{ "resource": "" }
q16751
Gemirro.Server.update_indexes
train
def update_indexes indexer = Gemirro::Indexer.new(Utils.configuration.destination) indexer.only_origin = true indexer.ui = ::Gem::SilentUI.new Utils.logger.info('Generating indexes') indexer.update_index indexer.updated_gems.each do |gem| Utils.cache.flush_key(File.basename(gem)) end rescue SystemExit => e Utils.logger.info(e.message) end
ruby
{ "resource": "" }
q16752
Gemirro.Server.query_gems_list
train
def query_gems_list Utils.gems_collection(false) # load collection gems = Parallel.map(query_gems, in_threads: 4) do |query_gem| gem_dependencies(query_gem) end gems.flatten! gems.reject!(&:empty?) gems end
ruby
{ "resource": "" }
q16753
Gemirro.Server.gem_dependencies
train
def gem_dependencies(gem_name) Utils.cache.cache(gem_name) do gems = Utils.gems_collection(false) gem_collection = gems.find_by_name(gem_name) return '' if gem_collection.nil? gem_collection = Parallel.map(gem_collection, in_threads: 4) do |gem| [gem, spec_for(gem.name, gem.number, gem.platform)] end gem_collection.reject! do |_, spec| spec.nil? end Parallel.map(gem_collection, in_threads: 4) do |gem, spec| dependencies = spec.dependencies.select do |d| d.type == :runtime end dependencies = Parallel.map(dependencies, in_threads: 4) do |d| [d.name.is_a?(Array) ? d.name.first : d.name, d.requirement.to_s] end { name: gem.name, number: gem.number, platform: gem.platform, dependencies: dependencies } end end end
ruby
{ "resource": "" }
q16754
Gemirro.Indexer.download_from_source
train
def download_from_source(file) source_host = Gemirro.configuration.source.host Utils.logger.info("Download from source: #{file}") resp = Http.get("#{source_host}/#{File.basename(file)}") return unless resp.code == 200 resp.body end
ruby
{ "resource": "" }
q16755
Gemirro.Indexer.map_gems_to_specs
train
def map_gems_to_specs(gems) gems.map.with_index do |gemfile, index| # rubocop:disable Metrics/LineLength Utils.logger.info("[#{index + 1}/#{gems.size}]: Processing #{gemfile.split('/')[-1]}") # rubocop:enable Metrics/LineLength if File.size(gemfile).zero? Utils.logger.warn("Skipping zero-length gem: #{gemfile}") next end begin spec = if ::Gem::Package.respond_to? :open ::Gem::Package .open(File.open(gemfile, 'rb'), 'r', &:metadata) else ::Gem::Package.new(gemfile).spec end spec.loaded_from = gemfile # HACK: fuck this shit - borks all tests that use pl1 if File.basename(gemfile, '.gem') != spec.original_name exp = spec.full_name exp << " (#{spec.original_name})" if spec.original_name != spec.full_name msg = "Skipping misnamed gem: #{gemfile} should be named #{exp}" Utils.logger.warn(msg) next end version = spec.version.version unless version =~ /^\d+\.\d+\.\d+.*/ msg = "Skipping gem #{spec.full_name} - invalid version #{version}" Utils.logger.warn(msg) next end if ::Gem::VERSION >= '2.5.0' spec.abbreviate spec.sanitize else abbreviate spec sanitize spec end spec rescue SignalException msg = 'Received signal, exiting' Utils.logger.error(msg) raise rescue StandardError => e msg = ["Unable to process #{gemfile}", "#{e.message} (#{e.class})", "\t#{e.backtrace.join "\n\t"}"].join("\n") Utils.logger.debug(msg) end end.compact end
ruby
{ "resource": "" }
q16756
Gemirro.GemVersionCollection.by_name
train
def by_name(&block) if @grouped.nil? @grouped = @gems.group_by(&:name).map do |name, collection| [name, GemVersionCollection.new(collection)] end @grouped.reject! do |name, _collection| name.nil? end @grouped.sort_by! do |name, _collection| name.downcase end end if block_given? @grouped.each(&block) else @grouped end end
ruby
{ "resource": "" }
q16757
Gemirro.GemVersionCollection.find_by_name
train
def find_by_name(gemname) gem = by_name.select do |name, _collection| name == gemname end gem.first.last if gem.any? end
ruby
{ "resource": "" }
q16758
Gemirro.Configuration.logger_level=
train
def logger_level=(level) logger.level = LOGGER_LEVEL[level] if LOGGER_LEVEL.key?(level) logger end
ruby
{ "resource": "" }
q16759
Gemirro.Configuration.ignore_gem
train
def ignore_gem(name, version, platform) ignored_gems[platform] ||= {} ignored_gems[platform][name] ||= [] ignored_gems[platform][name] << version end
ruby
{ "resource": "" }
q16760
Gemirro.Configuration.ignore_gem?
train
def ignore_gem?(name, version, platform) if ignored_gems[platform][name] ignored_gems[platform][name].include?(version) else false end end
ruby
{ "resource": "" }
q16761
Gemirro.Configuration.define_source
train
def define_source(name, url, &block) source = Source.new(name, url) source.instance_eval(&block) @source = source end
ruby
{ "resource": "" }
q16762
Gemirro.MirrorFile.read
train
def read handle = File.open(@path, 'r') content = handle.read handle.close content end
ruby
{ "resource": "" }
q16763
Upton.Scraper.next_index_page_url
train
def next_index_page_url(url, pagination_index) return url unless @paginated if pagination_index > @pagination_max_pages puts "Exceeded pagination limit of #{@pagination_max_pages}" if @verbose EMPTY_STRING else uri = URI.parse(url) query = uri.query ? Hash[URI.decode_www_form(uri.query)] : {} # update the pagination query string parameter query[@pagination_param] = pagination_index uri.query = URI.encode_www_form(query) puts "Next index pagination url is #{uri}" if @verbose uri.to_s end end
ruby
{ "resource": "" }
q16764
Upton.Scraper.scrape_to_csv
train
def scrape_to_csv filename, &blk require 'csv' self.url_array = self.get_index unless self.url_array CSV.open filename, 'wb' do |csv| #this is a conscious choice: each document is a list of things, either single elements or rows (as lists). self.scrape_from_list(self.url_array, blk).compact.each do |document| if document[0].respond_to? :map document.each{|row| csv << row } else csv << document end end #self.scrape_from_list(self.url_array, blk).compact.each{|document| csv << document } end end
ruby
{ "resource": "" }
q16765
Upton.Scraper.get_page
train
def get_page(url, stash=false, options={}) return EMPTY_STRING if url.nil? || url.empty? #url is nil if the <a> lacks an `href` attribute. global_options = { :cache => stash, :verbose => @verbose } if @readable_filenames global_options[:readable_filenames] = true end if @stash_folder global_options[:readable_filenames] = true global_options[:cache_location] = @stash_folder end resp_and_cache = Downloader.new(url, global_options.merge(options)).get if resp_and_cache[:from_resource] puts "sleeping #{@sleep_time_between_requests} secs" if @verbose sleep @sleep_time_between_requests end resp_and_cache[:resp] end
ruby
{ "resource": "" }
q16766
Upton.Scraper.get_instance
train
def get_instance(url, pagination_index=0, options={}) resps = [self.get_page(url, @debug, options)] pagination_index = pagination_index.to_i prev_url = url while !resps.last.empty? next_url = self.next_instance_page_url(url, pagination_index + 1) break if next_url == prev_url || next_url.empty? next_resp = self.get_page(next_url, @debug, options) prev_url = next_url resps << next_resp end resps end
ruby
{ "resource": "" }
q16767
Upton.Scraper.scrape_from_list
train
def scrape_from_list(list, blk) puts "Scraping #{list.size} instances" if @verbose list.each_with_index.map do |instance_url, instance_index| instance_resps = get_instance instance_url, nil, :instance_index => instance_index instance_resps.each_with_index.map do |instance_resp, pagination_index| blk.call(instance_resp, instance_url, instance_index, pagination_index) end end.flatten(1) end
ruby
{ "resource": "" }
q16768
CF::UAA.Info.varz
train
def varz(name, pwd) json_get(target, "/varz", key_style, "authorization" => Http.basic_auth(name, pwd)) end
ruby
{ "resource": "" }
q16769
CF::UAA.Info.server
train
def server reply = json_get(target, '/login', key_style) return reply if reply && (reply[:prompts] || reply['prompts']) raise BadResponse, "Invalid response from target #{target}" end
ruby
{ "resource": "" }
q16770
CF::UAA.Info.discover_uaa
train
def discover_uaa info = server links = info['links'] || info[:links] uaa = links && (links['uaa'] || links[:uaa]) uaa || target end
ruby
{ "resource": "" }
q16771
CF::UAA.Info.validation_key
train
def validation_key(client_id = nil, client_secret = nil) hdrs = client_id && client_secret ? { "authorization" => Http.basic_auth(client_id, client_secret)} : {} json_get(target, "/token_key", key_style, hdrs) end
ruby
{ "resource": "" }
q16772
CF::UAA.Info.password_strength
train
def password_strength(password) json_parse_reply(key_style, *request(target, :post, '/password/score', Util.encode_form(:password => password), "content-type" => Http::FORM_UTF8, "accept" => Http::JSON_UTF8)) end
ruby
{ "resource": "" }
q16773
CF::UAA.Scim.type_info
train
def type_info(type, elem) scimfo = { user: { path: '/Users', name_attr: 'userName', origin_attr: 'origin' }, group: { path: '/Groups', name_attr: 'displayName', origin_attr: 'zoneid' }, client: { path: '/oauth/clients', name_attr: 'client_id' }, user_id: { path: '/ids/Users', name_attr: 'userName', origin_attr: 'origin', }, group_mapping: { path: '/Groups/External', name_attr: 'externalGroup', origin_attr: 'origin' } } type_info = scimfo[type] unless type_info raise ArgumentError, "scim resource type must be one of #{scimfo.keys.inspect}" end value = type_info[elem] unless value raise ArgumentError, "scim schema element must be one of #{type_info.keys.inspect}" end value end
ruby
{ "resource": "" }
q16774
CF::UAA.Scim.add
train
def add(type, info) path, info = type_info(type, :path), force_case(info) reply = json_parse_reply(@key_style, *json_post(@target, path, info, headers)) fake_client_id(reply) if type == :client # hide client reply, not quite scim reply end
ruby
{ "resource": "" }
q16775
CF::UAA.Scim.put
train
def put(type, info) path, info = type_info(type, :path), force_case(info) ida = type == :client ? 'client_id' : 'id' raise ArgumentError, "info must include #{ida}" unless id = info[ida] hdrs = headers if info && info['meta'] && (etag = info['meta']['version']) hdrs.merge!('if-match' => etag) end reply = json_parse_reply(@key_style, *json_put(@target, "#{path}/#{URI.encode(id)}", info, hdrs)) # hide client endpoints that are not quite scim compatible type == :client && !reply ? get(type, info['client_id']): reply end
ruby
{ "resource": "" }
q16776
CF::UAA.Scim.query
train
def query(type, query = {}) query = force_case(query).reject {|k, v| v.nil? } if attrs = query['attributes'] attrs = Util.arglist(attrs).map {|a| force_attr(a)} query['attributes'] = Util.strlist(attrs, ",") end qstr = query.empty?? '': "?#{Util.encode_form(query)}" info = json_get(@target, "#{type_info(type, :path)}#{qstr}", @key_style, headers) unless info.is_a?(Hash) && info[rk = jkey(:resources)].is_a?(Array) # hide client endpoints that are not yet scim compatible if type == :client && info.is_a?(Hash) info = info.each{ |k, v| fake_client_id(v) }.values if m = /^client_id\s+eq\s+"([^"]+)"$/i.match(query['filter']) idk = jkey(:client_id) info = info.select { |c| c[idk].casecmp(m[1]) == 0 } end return {rk => info} end raise BadResponse, "invalid reply to #{type} query of #{@target}" end info end
ruby
{ "resource": "" }
q16777
CF::UAA.Scim.get
train
def get(type, id) info = json_get(@target, "#{type_info(type, :path)}/#{URI.encode(id)}", @key_style, headers) fake_client_id(info) if type == :client # hide client reply, not quite scim info end
ruby
{ "resource": "" }
q16778
CF::UAA.TokenIssuer.implicit_grant_with_creds
train
def implicit_grant_with_creds(credentials, scope = nil) # this manufactured redirect_uri is a convention here, not part of OAuth2 redir_uri = "https://uaa.cloudfoundry.com/redirect/#{@client_id}" response_type = "token" response_type = "#{response_type} id_token" if scope && (scope.include? "openid") uri = authorize_path_args(response_type, redir_uri, scope, state = random_state) # the accept header is only here so the uaa will issue error replies in json to aid debugging headers = {'content-type' => FORM_UTF8, 'accept' => JSON_UTF8 } body = Util.encode_form(credentials.merge(:source => 'credentials')) status, body, headers = request(@target, :post, uri, body, headers) raise BadResponse, "status #{status}" unless status == 302 req_uri, reply_uri = URI.parse(redir_uri), URI.parse(headers['location']) fragment, reply_uri.fragment = reply_uri.fragment, nil raise BadResponse, "bad location header" unless req_uri == reply_uri parse_implicit_params(fragment, state) rescue URI::Error => e raise BadResponse, "bad location header in reply: #{e.message}" end
ruby
{ "resource": "" }
q16779
CF::UAA.TokenIssuer.implicit_uri
train
def implicit_uri(redirect_uri, scope = nil) response_type = "token" response_type = "#{response_type} id_token" if scope && (scope.include? "openid") @target + authorize_path_args(response_type, redirect_uri, scope) end
ruby
{ "resource": "" }
q16780
CF::UAA.TokenIssuer.implicit_grant
train
def implicit_grant(implicit_uri, callback_fragment) in_params = Util.decode_form(URI.parse(implicit_uri).query) unless in_params['state'] && in_params['redirect_uri'] raise ArgumentError, "redirect must happen before implicit grant" end parse_implicit_params(callback_fragment, in_params['state']) end
ruby
{ "resource": "" }
q16781
CF::UAA.TokenIssuer.autologin_uri
train
def autologin_uri(redirect_uri, credentials, scope = nil) headers = {'content-type' => FORM_UTF8, 'accept' => JSON_UTF8, 'authorization' => Http.basic_auth(@client_id, @client_secret) } body = Util.encode_form(credentials) reply = json_parse_reply(nil, *request(@target, :post, "/autologin", body, headers)) raise BadResponse, "no autologin code in reply" unless reply['code'] @target + authorize_path_args('code', redirect_uri, scope, random_state, :code => reply['code']) end
ruby
{ "resource": "" }
q16782
CF::UAA.TokenIssuer.authcode_grant
train
def authcode_grant(authcode_uri, callback_query) ac_params = Util.decode_form(URI.parse(authcode_uri).query) unless ac_params['state'] && ac_params['redirect_uri'] raise ArgumentError, "authcode redirect must happen before authcode grant" end begin params = Util.decode_form(callback_query) authcode = params['code'] raise BadResponse unless params['state'] == ac_params['state'] && authcode rescue URI::InvalidURIError, ArgumentError, BadResponse raise BadResponse, "received invalid response from target #{@target}" end request_token(:grant_type => 'authorization_code', :code => authcode, :redirect_uri => ac_params['redirect_uri']) end
ruby
{ "resource": "" }
q16783
FiniteMachine.EventsMap.choice_transition?
train
def choice_transition?(name, from_state) find(name).select { |trans| trans.matches?(from_state) }.size > 1 end
ruby
{ "resource": "" }
q16784
FiniteMachine.EventsMap.match_transition
train
def match_transition(name, from_state) find(name).find { |trans| trans.matches?(from_state) } end
ruby
{ "resource": "" }
q16785
FiniteMachine.EventsMap.match_transition_with
train
def match_transition_with(name, from_state, *conditions) find(name).find do |trans| trans.matches?(from_state) && trans.check_conditions(*conditions) end end
ruby
{ "resource": "" }
q16786
FiniteMachine.EventsMap.select_transition
train
def select_transition(name, from_state, *conditions) if choice_transition?(name, from_state) match_transition_with(name, from_state, *conditions) else match_transition(name, from_state) end end
ruby
{ "resource": "" }
q16787
FiniteMachine.EventsMap.move_to
train
def move_to(name, from_state, *conditions) transition = select_transition(name, from_state, *conditions) transition ||= UndefinedTransition.new(name) transition.to_state(from_state) end
ruby
{ "resource": "" }
q16788
FiniteMachine.EventsMap.to_s
train
def to_s hash = {} @events_map.each_pair do |name, trans| hash[name] = trans end hash.to_s end
ruby
{ "resource": "" }
q16789
FiniteMachine.Transition.make_conditions
train
def make_conditions @if.map { |c| Callable.new(c) } + @unless.map { |c| Callable.new(c).invert } end
ruby
{ "resource": "" }
q16790
FiniteMachine.Transition.matches?
train
def matches?(from) states.keys.any? { |state| [ANY_STATE, from].include?(state) } end
ruby
{ "resource": "" }
q16791
FiniteMachine.ChoiceMerger.choice
train
def choice(to, **conditions) transition_builder = TransitionBuilder.new(@machine, @name, @transitions.merge(conditions)) transition_builder.call(@transitions[:from] => to) end
ruby
{ "resource": "" }
q16792
FiniteMachine.StateDefinition.define_state_query_method
train
def define_state_query_method(state) return if machine.respond_to?("#{state}?") machine.send(:define_singleton_method, "#{state}?") do machine.is?(state.to_sym) end end
ruby
{ "resource": "" }
q16793
FiniteMachine.Observer.on
train
def on(hook_type, state_or_event_name = nil, async = nil, &callback) sync_exclusive do if state_or_event_name.nil? state_or_event_name = HookEvent.any_state_or_event(hook_type) end async = false if async.nil? ensure_valid_callback_name!(hook_type, state_or_event_name) callback.extend(Async) if async == :async hooks.register(hook_type, state_or_event_name, callback) end end
ruby
{ "resource": "" }
q16794
FiniteMachine.Observer.off
train
def off(hook_type, name = ANY_STATE, &callback) sync_exclusive do hooks.unregister hook_type, name, callback end end
ruby
{ "resource": "" }
q16795
FiniteMachine.Observer.emit
train
def emit(event, *data) sync_exclusive do [event.type].each do |hook_type| any_state_or_event = HookEvent.any_state_or_event(hook_type) [any_state_or_event, event.name].each do |event_name| hooks[hook_type][event_name].each do |hook| handle_callback(hook, event, *data) off(hook_type, event_name, &hook) if hook.is_a?(Once) end end end end end
ruby
{ "resource": "" }
q16796
FiniteMachine.Observer.handle_callback
train
def handle_callback(hook, event, *data) to = machine.events_map.move_to(event.event_name, event.from, *data) trans_event = TransitionEvent.new(event.event_name, event.from, to) callable = create_callable(hook) if hook.is_a?(Async) defer(callable, trans_event, *data) else callable.(trans_event, *data) end end
ruby
{ "resource": "" }
q16797
FiniteMachine.Observer.defer
train
def defer(callable, trans_event, *data) async_call = AsyncCall.new(machine, callable, trans_event, *data) callback_queue.start unless callback_queue.running? callback_queue << async_call end
ruby
{ "resource": "" }
q16798
FiniteMachine.Observer.create_callable
train
def create_callable(hook) callback = proc do |trans_event, *data| machine.instance_exec(trans_event, *data, &hook) end Callable.new(callback) end
ruby
{ "resource": "" }
q16799
FiniteMachine.HookEvent.notify
train
def notify(subscriber, *data) return unless subscriber.respond_to?(MESSAGE) subscriber.public_send(MESSAGE, self, *data) end
ruby
{ "resource": "" }