_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
66
10.5k
language
stringclasses
1 value
meta_information
dict
q15700
Honeybadger.Agent.notify
train
def notify(exception_or_opts, opts = {}) if exception_or_opts.is_a?(Exception) opts[:exception] = exception_or_opts elsif exception_or_opts.respond_to?(:to_hash) opts.merge!(exception_or_opts.to_hash) else opts[:error_message] = exception_or_opts.to_s end validate_notify_opts!(opts) opts[:rack_env] ||= context_manager.get_rack_env opts[:global_context] ||= context_manager.get_context notice = Notice.new(config, opts) config.before_notify_hooks.each do |hook| break if notice.halted? with_error_handling { hook.call(notice) } end unless notice.api_key =~ NOT_BLANK error { sprintf('Unable to send error report: API key is missing. id=%s', notice.id) } return false end if !opts[:force] && notice.ignore? debug { sprintf('ignore notice feature=notices id=%s', notice.id) } return false end if notice.halted? debug { 'halted notice feature=notices' } return false end info { sprintf('Reporting error id=%s', notice.id) } if opts[:sync] send_now(notice) else push(notice) end notice.id end
ruby
{ "resource": "" }
q15701
Honeybadger.Agent.check_in
train
def check_in(id) # this is to allow check ins even if a url is passed check_in_id = id.to_s.strip.gsub(/\/$/, '').split('/').last response = backend.check_in(check_in_id) response.success? end
ruby
{ "resource": "" }
q15702
Honeybadger.Backtrace.to_ary
train
def to_ary lines.take(1000).map { |l| { :number => l.filtered_number, :file => l.filtered_file, :method => l.filtered_method, :source => l.source } } end
ruby
{ "resource": "" }
q15703
Gzr.Command.keys_to_keep
train
def keys_to_keep(operation) o = @sdk.operations[operation] begin say_error "Operation #{operation} not found" return [] end unless o parameters = o[:info][:parameters].select { |p| p[:in] == "body" && p[:schema] } say_warning "Expecting exactly one body parameter with a schema for operation #{operation}" unless parameters.length == 1 schema_ref = parameters[0][:schema][:$ref].split(/\//) return @sdk.swagger[schema_ref[1].to_sym][schema_ref[2].to_sym][:properties].reject { |k,v| v[:readOnly] }.keys end
ruby
{ "resource": "" }
q15704
Gzr.Command.render_csv
train
def render_csv(t) io = StringIO.new io.puts ( t.header.collect do |v| v ? "\"#{v.to_s.gsub(/"/, '""')}\"" : "" end.join(',') ) unless @options[:plain] t.each do |row| next if row === t.header io.puts ( row.collect do |v| v ? "\"#{v.to_s.gsub(/"/, '""')}\"" : "" end.join(',') ) end io.rewind io.gets(nil).encode(crlf_newline: true) end
ruby
{ "resource": "" }
q15705
Gzr.Command.field_names
train
def field_names(opt_fields) fields = [] token_stack = [] last_token = false tokens = opt_fields.split /(\(|,|\))/ tokens << nil tokens.each do |t| if t.nil? then fields << (token_stack + [last_token]).join('.') if last_token elsif t.empty? then next elsif t == ',' then fields << (token_stack + [last_token]).join('.') if last_token elsif t == '(' then token_stack.push(last_token) elsif t == ')' then fields << (token_stack + [last_token]).join('.') if last_token token_stack.pop last_token = false else last_token = t end end fields end
ruby
{ "resource": "" }
q15706
Traject::Macros.Marc21Semantics.oclcnum
train
def oclcnum(extract_fields = "035a") extractor = MarcExtractor.new(extract_fields, :separator => nil) lambda do |record, accumulator| list = extractor.extract(record).collect! do |o| Marc21Semantics.oclcnum_extract(o) end.compact accumulator.concat list.uniq if list end end
ruby
{ "resource": "" }
q15707
Traject::Macros.Marc21Semantics.marc_sortable_title
train
def marc_sortable_title lambda do |record, accumulator| st = Marc21Semantics.get_sortable_title(record) accumulator << st if st end end
ruby
{ "resource": "" }
q15708
Traject::Macros.Marc21Semantics.marc_publication_date
train
def marc_publication_date(options = {}) estimate_tolerance = options[:estimate_tolerance] || 15 min_year = options[:min_year] || 500 max_year = options[:max_year] || (Time.new.year + 6) lambda do |record, accumulator| date = Marc21Semantics.publication_date(record, estimate_tolerance, min_year, max_year) accumulator << date if date end end
ruby
{ "resource": "" }
q15709
Traject::Macros.Marc21Semantics.marc_geo_facet
train
def marc_geo_facet(options = {}) marc_geo_map = Traject::TranslationMap.new("marc_geographic") a_fields_spec = options[:geo_a_fields] || "651a:691a" z_fields_spec = options[:geo_z_fields] || "600:610:611:630:648:650:654:655:656:690:651:691" extractor_043a = MarcExtractor.new("043a", :separator => nil) extractor_a_fields = MarcExtractor.new(a_fields_spec, :separator => nil) extractor_z_fields = MarcExtractor.new(z_fields_spec) lambda do |record, accumulator| accumulator.concat( extractor_043a.extract(record).collect do |code| # remove any trailing hyphens, then map marc_geo_map[code.gsub(/\-+\Z/, '')] end.compact ) #LCSH 651a and 691a go in more or less normally. accumulator.concat( extractor_a_fields.extract(record).collect do |s| # remove trailing periods, which they sometimes have if they were # at end of LCSH. s.sub(/\. */, '') end ) # fields we take z's from have a bit more normalization extractor_z_fields.each_matching_line(record) do |field, spec, extractor| z_fields = field.subfields.find_all {|sf| sf.code == "z"}.collect {|sf| sf.value } # depending on position in total field, may be a period on the end # we want to remove. z_fields.collect! {|s| s.gsub(/\. *\Z/, '')} if z_fields.length == 2 # normalize subdivision as parenthetical accumulator << "#{z_fields[1]} (#{z_fields[0]})" # and 'post up' accumulator << z_fields[0] else # just add all the z's if there's 1 or more than 2. accumulator.concat z_fields end end accumulator.uniq! end end
ruby
{ "resource": "" }
q15710
Traject::Macros.Marc21Semantics.marc_lcsh_formatted
train
def marc_lcsh_formatted(options = {}) spec = options[:spec] || "600:610:611:630:648:650:651:654:662" subd_separator = options[:subdivison_separator] || " — " other_separator = options[:other_separator] || " " extractor = MarcExtractor.new(spec) return lambda do |record, accumulator| accumulator.concat( extractor.collect_matching_lines(record) do |field, spec| Marc21Semantics.assemble_lcsh(field, subd_separator, other_separator) end) end end
ruby
{ "resource": "" }
q15711
Traject.TranslationMap.translate_array
train
def translate_array(array) array.each_with_object([]) do |input_element, output_array| output_element = self.map(input_element) if output_element.kind_of? Array output_array.concat output_element elsif ! output_element.nil? output_array << output_element end end end
ruby
{ "resource": "" }
q15712
Traject.TranslationMap.merge
train
def merge(other_map) default = other_map.default || self.default TranslationMap.new(self.to_hash.merge(other_map.to_hash), :default => default) end
ruby
{ "resource": "" }
q15713
Traject::Macros.Marc21.extract_all_marc_values
train
def extract_all_marc_values(options = {}) unless (options.keys - EXTRACT_ALL_MARC_VALID_OPTIONS).empty? raise RuntimeError.new("Illegal/Unknown argument '#{(options.keys - EXTRACT_ALL_MARC_VALID_OPTIONS).join(', ')}' in extract_all_marc at #{Traject::Util.extract_caller_location(caller.first)}") end options = {:from => "100", :to => "899", :separator => ' '}.merge(options) if [options[:from], options[:to]].map{|x| x.is_a? String}.any?{|x| x == false} raise ArgumentError.new("from/to options to extract_all_marc_values must be strings") end lambda do |record, accumulator, context| record.each do |field| next unless field.tag >= options[:from] && field.tag <= options[:to] subfield_values = field.subfields.collect {|sf| sf.value} next unless subfield_values.length > 0 if options[:separator] accumulator << subfield_values.join( options[:separator]) else accumulator.concat subfield_values end end end end
ruby
{ "resource": "" }
q15714
Traject.OaiPmhNokogiriReader.http_client
train
def http_client @http_client ||= begin # timeout setting on http.rb seems to be a mess. # https://github.com/httprb/http/issues/488 client = HTTP.timeout(:global, write: timeout / 3, connect: timeout / 3, read: timeout / 3) if settings["oai_pmh.try_gzip"] client = client.use(:auto_inflate).headers("accept-encoding" => "gzip;q=1.0, identity;q=0.5") end if settings["oai_pmh.http_persistent"] parsed_uri = URI.parse(start_url) client = client.persistent("#{parsed_uri.scheme}://#{parsed_uri.host}") end client end end
ruby
{ "resource": "" }
q15715
Inky.ComponentFactory._transform_columns
train
def _transform_columns(component, inner) col_count = component.parent.elements.size small_val = component.attr('small') large_val = component.attr('large') small_size = small_val || column_count large_size = large_val || small_val || (column_count / col_count).to_i classes = _combine_classes(component, "small-#{small_size} large-#{large_size} columns") classes << ' first' unless component.previous_element classes << ' last' unless component.next_element subrows = component.elements.css(".row").to_a.concat(component.elements.css("row").to_a) expander = %{<th class="expander"></th>} if large_size.to_i == column_count && subrows.empty? %{<#{INTERIM_TH_TAG} class="#{classes}" #{_pass_through_attributes(component)}><table><tr><th>#{inner}</th>#{expander}</tr></table></#{INTERIM_TH_TAG}>} end
ruby
{ "resource": "" }
q15716
Gollum.Committer.index
train
def index @index ||= begin idx = @wiki.repo.index if (tree = options[:tree]) idx.read_tree(tree) elsif (parent = parents.first) idx.read_tree(parent.tree.id) end idx end end
ruby
{ "resource": "" }
q15717
Gollum.Committer.add_to_index
train
def add_to_index(dir, name, format, data, allow_same_ext = false) # spaces must be dashes dir.gsub!(' ', '-') name.gsub!(' ', '-') path = @wiki.page_file_name(name, format) dir = '/' if dir.strip.empty? fullpath = ::File.join(*[dir, path]) fullpath = fullpath[1..-1] if fullpath =~ /^\// if index.current_tree && (tree = index.current_tree / (@wiki.page_file_dir || '/')) tree = tree / dir unless tree.nil? end if tree downpath = path.downcase.sub(/\.\w+$/, '') tree.blobs.each do |blob| next if page_path_scheduled_for_deletion?(index.tree, fullpath) existing_file = blob.name.downcase.sub(/\.\w+$/, '') existing_file_ext = ::File.extname(blob.name).sub(/^\./, '') new_file_ext = ::File.extname(path).sub(/^\./, '') if downpath == existing_file && !(allow_same_ext && new_file_ext == existing_file_ext) raise DuplicatePageError.new(dir, blob.name, path) end end end fullpath = fullpath.force_encoding('ascii-8bit') if fullpath.respond_to?(:force_encoding) begin data = @wiki.normalize(data) rescue ArgumentError => err # Swallow errors that arise from data being binary raise err unless err.message.include?('invalid byte sequence') end index.add(fullpath, data) end
ruby
{ "resource": "" }
q15718
Gollum.Committer.commit
train
def commit sha1 = index.commit(@options[:message], parents, actor, nil, @wiki.ref) @callbacks.each do |cb| cb.call(self, sha1) end sha1 end
ruby
{ "resource": "" }
q15719
Gollum.Committer.file_path_scheduled_for_deletion?
train
def file_path_scheduled_for_deletion?(map, path) parts = path.split('/') if parts.size == 1 deletions = map.keys.select { |k| !map[k] } deletions.any? { |d| d == parts.first } else part = parts.shift if (rest = map[part]) file_path_scheduled_for_deletion?(rest, parts.join('/')) else false end end end
ruby
{ "resource": "" }
q15720
Gollum.Committer.method_missing
train
def method_missing(name, *args) args.map! { |item| item.respond_to?(:force_encoding) ? item.force_encoding('ascii-8bit') : item } index.send(name, *args) end
ruby
{ "resource": "" }
q15721
Gollum.File.find
train
def find(name, version, try_on_disk = false) checked = name.downcase map = @wiki.tree_map_for(version) commit = version.is_a?(Gollum::Git::Commit) ? version : @wiki.commit_for(version) if (result = map.detect { |entry| entry.path.downcase == checked }) @path = name @version = commit if try_on_disk && get_disk_reference(name, commit) @on_disk = true else @blob = result.blob(@wiki.repo) end self end end
ruby
{ "resource": "" }
q15722
Gollum.BlobEntry.page
train
def page(wiki, commit) blob = self.blob(wiki.repo) page = wiki.page_class.new(wiki).populate(blob, self.dir) page.version = commit page end
ruby
{ "resource": "" }
q15723
Gollum.BlobEntry.file
train
def file(wiki, commit) blob = self.blob(wiki.repo) file = wiki.file_class.new(wiki).populate(blob, self.dir) file.version = commit file end
ruby
{ "resource": "" }
q15724
Gollum.Markup.render_default
train
def render_default(data, format=:markdown, name='render_default.md') # set instance vars so we're able to render data without a wiki or page. @format = format @name = name chain = [:Metadata, :PlainText, :Emoji, :TOC, :RemoteCode, :Code, :Sanitize, :WSD, :Tags, :Render] filter_chain = chain.map do |r| Gollum::Filter.const_get(r).new(self) end process_chain data, filter_chain end
ruby
{ "resource": "" }
q15725
Gollum.Markup.process_chain
train
def process_chain(data, filter_chain) # First we extract the data through the chain... filter_chain.each do |filter| data = filter.extract(data) end # Then we process the data through the chain *backwards* filter_chain.reverse.each do |filter| data = filter.process(data) end # Finally, a little bit of cleanup, just because data.gsub!(/<p><\/p>/) do '' end data end
ruby
{ "resource": "" }
q15726
Gollum.Markup.render
train
def render(no_follow = false, encoding = nil, include_levels = 10) @sanitize = no_follow ? @wiki.history_sanitizer : @wiki.sanitizer @encoding = encoding @include_levels = include_levels data = @data.dup filter_chain = @wiki.filter_chain.map do |r| Gollum::Filter.const_get(r).new(self) end # Since the last 'extract' action in our chain *should* be the markup # to HTML converter, we now have HTML which we can parse and yield, for # anyone who wants it if block_given? yield Nokogiri::HTML::DocumentFragment.parse(data) end process_chain data, filter_chain end
ruby
{ "resource": "" }
q15727
Gollum.Markup.find_file
train
def find_file(name, version=@version) if name =~ /^\// @wiki.file(name[1..-1], version) else path = @dir == '.' ? name : ::File.join(@dir, name) @wiki.file(path, version) end end
ruby
{ "resource": "" }
q15728
Gollum.Wiki.remove_filter
train
def remove_filter(name) unless name.is_a? Symbol raise ArgumentError, "Invalid filter name #{name.inspect} (must be a symbol)" end unless @filter_chain.delete(name) raise ArgumentError, "#{name.inspect} not found in filter chain" end end
ruby
{ "resource": "" }
q15729
Gollum.Wiki.tree_list
train
def tree_list(ref) if (sha = @access.ref_to_sha(ref)) commit = @access.commit(sha) tree_map_for(sha).inject([]) do |list, entry| next list unless @page_class.valid_page_name?(entry.name) list << entry.page(self, commit) end else [] end end
ruby
{ "resource": "" }
q15730
Gollum.Wiki.file_list
train
def file_list(ref) if (sha = @access.ref_to_sha(ref)) commit = @access.commit(sha) tree_map_for(sha).inject([]) do |list, entry| next list if entry.name.start_with?('_') next list if @page_class.valid_page_name?(entry.name) list << entry.file(self, commit) end else [] end end
ruby
{ "resource": "" }
q15731
Gollum.Wiki.tree_map_for
train
def tree_map_for(ref, ignore_page_file_dir=false) if ignore_page_file_dir && !@page_file_dir.nil? @root_access ||= GitAccess.new(path, nil, @repo_is_bare) @root_access.tree(ref) else @access.tree(ref) end rescue Gollum::Git::NoSuchShaFound [] end
ruby
{ "resource": "" }
q15732
Gollum.GitAccess.get_cache
train
def get_cache(name, key) cache = instance_variable_get("@#{name}_map") value = cache[key] if value.nil? && block_given? set_cache(name, key, value = yield) end value == :_nil ? nil : value end
ruby
{ "resource": "" }
q15733
Gollum.GitAccess.parse_tree_line
train
def parse_tree_line(line) mode, _type, sha, size, *name = line.split(/\s+/) BlobEntry.new(sha, name.join(' '), size.to_i, mode.to_i(8)) end
ruby
{ "resource": "" }
q15734
Gollum.Page.find
train
def find(name, version, dir = nil, exact = false) map = @wiki.tree_map_for(version.to_s) if (page = find_page_in_tree(map, name, dir, exact)) page.version = version.is_a?(Gollum::Git::Commit) ? version : @wiki.commit_for(version) page.historical = page.version.to_s == version.to_s page end rescue Gollum::Git::NoSuchShaFound end
ruby
{ "resource": "" }
q15735
Gollum.Page.find_page_in_tree
train
def find_page_in_tree(map, name, checked_dir = nil, exact = false) return nil if !map || name.to_s.empty? checked_dir = BlobEntry.normalize_dir(checked_dir) checked_dir = '' if exact && checked_dir.nil? name = ::File.join(checked_dir, name) if checked_dir map.each do |entry| next if entry.name.to_s.empty? path = checked_dir ? ::File.join(entry.dir, entry.name) : entry.name next unless page_match(name, path) return entry.page(@wiki, @version) end return nil # nothing was found end
ruby
{ "resource": "" }
q15736
Gollum.Page.tree_path
train
def tree_path(treemap, tree) if (ptree = treemap[tree]) tree_path(treemap, ptree) + '/' + tree.name else '' end end
ruby
{ "resource": "" }
q15737
Gollum.Page.page_match
train
def page_match(name, path) if (match = self.class.valid_filename?(path)) @wiki.ws_subs.each do |sub| return true if Page.cname(name).downcase == Page.cname(match, sub).downcase end end false end
ruby
{ "resource": "" }
q15738
ScimRails.Response.find_value
train
def find_value(user, object) case object when Hash object.each.with_object({}) do |(key, value), hash| hash[key] = find_value(user, value) end when Array object.map do |value| find_value(user, value) end when Symbol user.public_send(object) else object end end
ruby
{ "resource": "" }
q15739
ScimRails.ScimUsersController.path_for
train
def path_for(attribute, object = ScimRails.config.mutable_user_attributes_schema, path = []) at_path = path.empty? ? object : object.dig(*path) return path if at_path == attribute case at_path when Hash at_path.each do |key, value| found_path = path_for(attribute, object, [*path, key]) return found_path if found_path end nil when Array at_path.each_with_index do |value, index| found_path = path_for(attribute, object, [*path, index]) return found_path if found_path end nil end end
ruby
{ "resource": "" }
q15740
DocusignRest.Client.send_envelope
train
def send_envelope(envelope_id) content_type = { 'Content-Type' => 'application/json' } post_body = { status: 'sent' }.to_json uri = build_uri("/accounts/#{acct_id}/envelopes/#{envelope_id}") http = initialize_net_http_ssl(uri) request = Net::HTTP::Put.new(uri.request_uri, headers(content_type)) request.body = post_body response = http.request(request) JSON.parse(response.body) end
ruby
{ "resource": "" }
q15741
DocusignRest.Client.get_recipient_view
train
def get_recipient_view(options={}) content_type = { 'Content-Type' => 'application/json' } content_type.merge(options[:headers]) if options[:headers] post_body = { authenticationMethod: 'email', clientUserId: options[:client_id] || options[:email], email: options[:email], returnUrl: options[:return_url], userName: options[:name] }.to_json uri = build_uri("/accounts/#{acct_id}/envelopes/#{options[:envelope_id]}/views/recipient") http = initialize_net_http_ssl(uri) request = Net::HTTP::Post.new(uri.request_uri, headers(content_type)) request.body = post_body response = http.request(request) generate_log(request, response, uri) JSON.parse(response.body) end
ruby
{ "resource": "" }
q15742
DocusignRest.Client.get_envelope_recipients
train
def get_envelope_recipients(options={}) content_type = { 'Content-Type' => 'application/json' } content_type.merge(options[:headers]) if options[:headers] include_tabs = options[:include_tabs] || false include_extended = options[:include_extended] || false uri = build_uri("/accounts/#{acct_id}/envelopes/#{options[:envelope_id]}/recipients?include_tabs=#{include_tabs}&include_extended=#{include_extended}") http = initialize_net_http_ssl(uri) request = Net::HTTP::Get.new(uri.request_uri, headers(content_type)) response = http.request(request) generate_log(request, response, uri) JSON.parse(response.body) end
ruby
{ "resource": "" }
q15743
DocusignRest.Client.get_page_image
train
def get_page_image(options={}) envelope_id = options[:envelope_id] document_id = options[:document_id] page_number = options[:page_number] uri = build_uri("/accounts/#{acct_id}/envelopes/#{envelope_id}/documents/#{document_id}/pages/#{page_number}/page_image") http = initialize_net_http_ssl(uri) request = Net::HTTP::Get.new(uri.request_uri, headers) response = http.request(request) generate_log(request, response, uri) response.body end
ruby
{ "resource": "" }
q15744
DocusignRest.Client.get_document_from_envelope
train
def get_document_from_envelope(options={}) content_type = { 'Content-Type' => 'application/json' } content_type.merge(options[:headers]) if options[:headers] uri = build_uri("/accounts/#{acct_id}/envelopes/#{options[:envelope_id]}/documents/#{options[:document_id]}") http = initialize_net_http_ssl(uri) request = Net::HTTP::Get.new(uri.request_uri, headers(content_type)) response = http.request(request) generate_log(request, response, uri) return response.body if options[:return_stream] split_path = options[:local_save_path].split('/') split_path.pop #removes the document name and extension from the array path = split_path.join("/") #rejoins the array to form path to the folder that will contain the file FileUtils.mkdir_p(path) File.open(options[:local_save_path], 'wb') do |output| output << response.body end end
ruby
{ "resource": "" }
q15745
DocusignRest.Client.delete_envelope_recipient
train
def delete_envelope_recipient(options={}) content_type = {'Content-Type' => 'application/json'} content_type.merge(options[:headers]) if options[:headers] uri = build_uri("/accounts/#{@acct_id}/envelopes/#{options[:envelope_id]}/recipients") post_body = "{ \"signers\" : [{\"recipientId\" : \"#{options[:recipient_id]}\"}] }" http = initialize_net_http_ssl(uri) request = Net::HTTP::Delete.new(uri.request_uri, headers(content_type)) request.body = post_body response = http.request(request) generate_log(request, response, uri) JSON.parse(response.body) end
ruby
{ "resource": "" }
q15746
DocusignRest.Client.void_envelope
train
def void_envelope(options = {}) content_type = { 'Content-Type' => 'application/json' } content_type.merge(options[:headers]) if options[:headers] post_body = { "status" =>"voided", "voidedReason" => options[:voided_reason] || "No reason provided." }.to_json uri = build_uri("/accounts/#{acct_id}/envelopes/#{options[:envelope_id]}") http = initialize_net_http_ssl(uri) request = Net::HTTP::Put.new(uri.request_uri, headers(content_type)) request.body = post_body response = http.request(request) generate_log(request, response, uri) response end
ruby
{ "resource": "" }
q15747
Ethereum.Block.validate_uncles
train
def validate_uncles return false if Utils.keccak256_rlp(uncles) != uncles_hash return false if uncles.size > config[:max_uncles] uncles.each do |uncle| raise InvalidUncles, "Cannot find uncle prevhash in db" unless db.include?(uncle.prevhash) if uncle.number == number logger.error "uncle at same block height", block: self return false end end max_uncle_depth = config[:max_uncle_depth] ancestor_chain = [self] + get_ancestor_list(max_uncle_depth+1) raise ValueError, "invalid ancestor chain" unless ancestor_chain.size == [number+1, max_uncle_depth+2].min # Uncles of this block cannot be direct ancestors and cannot also be # uncles included 1-6 blocks ago. ineligible = [] ancestor_chain.safe_slice(1..-1).each {|a| ineligible.concat a.uncles } ineligible.concat(ancestor_chain.map {|a| a.header }) eligible_ancestor_hashes = ancestor_chain.safe_slice(2..-1).map(&:full_hash) uncles.each do |uncle| parent = Block.find env, uncle.prevhash return false if uncle.difficulty != Block.calc_difficulty(parent, uncle.timestamp) return false if uncle.number != parent.number + 1 return false if uncle.timestamp < parent.timestamp return false unless uncle.check_pow unless eligible_ancestor_hashes.include?(uncle.prevhash) eligible = eligible_ancestor_hashes.map {|h| Utils.encode_hex(h) } logger.error "Uncle does not have a valid ancestor", block: self, eligible: eligible, uncle_prevhash: Utils.encode_hex(uncle.prevhash) return false end if ineligible.include?(uncle) logger.error "Duplicate uncle", block: self, uncle: Utils.encode_hex(Utils.keccak256_rlp(uncle)) return false end # FIXME: what if uncles include previously rewarded uncle? ineligible.push uncle end true end
ruby
{ "resource": "" }
q15748
Ethereum.Block.add_transaction_to_list
train
def add_transaction_to_list(tx) k = RLP.encode @transaction_count @transactions[k] = RLP.encode(tx) r = mk_transaction_receipt tx @receipts[k] = RLP.encode(r) self.bloom |= r.bloom @transaction_count += 1 end
ruby
{ "resource": "" }
q15749
Ethereum.Block.get_transaction
train
def get_transaction(num) index = RLP.encode num tx = @transactions.get index raise IndexError, "Transaction does not exist" if tx == Trie::BLANK_NODE RLP.decode tx, sedes: Transaction end
ruby
{ "resource": "" }
q15750
Ethereum.Block.finalize
train
def finalize delta = @config[:block_reward] + @config[:nephew_reward] * uncles.size delta_balance coinbase, delta self.ether_delta += delta uncles.each do |uncle| r = @config[:block_reward] * (@config[:uncle_depth_penalty_factor] + uncle.number - number) / @config[:uncle_depth_penalty_factor] delta_balance uncle.coinbase, r self.ether_delta += r end commit_state end
ruby
{ "resource": "" }
q15751
Ethereum.Block.to_h
train
def to_h(with_state: false, full_transactions: false, with_storage_roots: false, with_uncles: false) b = { header: header.to_h } txlist = [] get_transactions.each_with_index do |tx, i| receipt_rlp = @receipts[RLP.encode(i)] receipt = RLP.decode receipt_rlp, sedes: Receipt txjson = full_transactions ? tx.to_h : tx.full_hash logs = receipt.logs.map {|l| Log.serialize(l) } txlist.push( tx: txjson, medstate: Utils.encode_hex(receipt.state_root), gas: receipt.gas_used.to_s, logs: logs, bloom: Sedes.int256.serialize(receipt.bloom) ) end b[:transactions] = txlist if with_state state_dump = {} @state.each do |address, v| state_dump[Utils.encode_hex(address)] = account_to_dict(address, with_storage_root: with_storage_roots) end b[:state] = state_dump end if with_uncles b[:uncles] = uncles.map {|u| RLP.decode(u, sedes: BlockHeader) } end b end
ruby
{ "resource": "" }
q15752
Ethereum.Block.get_parent
train
def get_parent raise UnknownParentError, "Genesis block has no parent" if number == 0 Block.find env, prevhash rescue KeyError raise UnknownParentError, Utils.encode_hex(prevhash) end
ruby
{ "resource": "" }
q15753
Ethereum.Block.chain_difficulty
train
def chain_difficulty return difficulty if genesis? k = "difficulty:#{Utils.encode_hex(full_hash)}" return Utils.decode_int(db.get(k)) if db.has_key?(k) o = difficulty + get_parent.chain_difficulty @state.db.put_temporarily k, Utils.encode_int(o) o end
ruby
{ "resource": "" }
q15754
Ethereum.Block.account_is_empty
train
def account_is_empty(address) get_balance(address) == 0 && get_code(address) == Constant::BYTE_EMPTY && get_nonce(address) == 0 end
ruby
{ "resource": "" }
q15755
Ethereum.Block.snapshot
train
def snapshot { state: @state.root_hash, gas: gas_used, txs: @transactions, txcount: @transaction_count, refunds: refunds, suicides: suicides, suicides_size: suicides.size, logs: logs, logs_size: logs.size, journal: @journal, # pointer to reference, so is not static journal_size: @journal.size, ether_delta: ether_delta } end
ruby
{ "resource": "" }
q15756
Ethereum.Block.revert
train
def revert(mysnapshot) logger.debug "REVERTING" @journal = mysnapshot[:journal] # if @journal changed after snapshot while @journal.size > mysnapshot[:journal_size] cache, index, prev, post = @journal.pop logger.debug "revert journal", cache: cache, index: index, prev: prev, post: post if prev @caches[cache][index] = prev else @caches[cache].delete index end end self.suicides = mysnapshot[:suicides] suicides.pop while suicides.size > mysnapshot[:suicides_size] self.logs = mysnapshot[:logs] logs.pop while logs.size > mysnapshot[:logs_size] self.refunds = mysnapshot[:refunds] self.gas_used = mysnapshot[:gas] self.ether_delta = mysnapshot[:ether_delta] @transactions = mysnapshot[:txs] @transaction_count = mysnapshot[:txcount] @state.set_root_hash mysnapshot[:state] @get_transactions_cache = [] end
ruby
{ "resource": "" }
q15757
Ethereum.Block.get_receipt
train
def get_receipt(num) index = RLP.encode num receipt = @receipts[index] if receipt == Trie::BLANK_NODE raise IndexError, "Receipt does not exist" else RLP.decode receipt, sedes: Receipt end end
ruby
{ "resource": "" }
q15758
Ethereum.Block.get_receipts
train
def get_receipts receipts = [] i = 0 loop do begin receipts.push get_receipt(i) i += 1 rescue IndexError return receipts end end end
ruby
{ "resource": "" }
q15759
Ethereum.Block.transfer_value
train
def transfer_value(from, to, value) raise ArgumentError, "value must be greater or equal than zero" unless value >= 0 delta_balance(from, -value) && delta_balance(to, value) end
ruby
{ "resource": "" }
q15760
Ethereum.Block.get_storage
train
def get_storage(address) storage_root = get_account_item address, :storage SecureTrie.new PruningTrie.new(db, storage_root) end
ruby
{ "resource": "" }
q15761
Ethereum.Block.get_storage_data
train
def get_storage_data(address, index) address = Utils.normalize_address address cache = @caches["storage:#{address}"] return cache[index] if cache && cache.has_key?(index) key = Utils.zpad Utils.coerce_to_bytes(index), 32 value = get_storage(address)[key] value.true? ? RLP.decode(value, sedes: Sedes.big_endian_int) : 0 end
ruby
{ "resource": "" }
q15762
Ethereum.Block.set_storage_data
train
def set_storage_data(address, index, value) address = Utils.normalize_address address cache_key = "storage:#{address}" unless @caches.has_key?(cache_key) @caches[cache_key] = {} set_and_journal :all, address, true end set_and_journal cache_key, index, value end
ruby
{ "resource": "" }
q15763
Ethereum.Block.account_to_dict
train
def account_to_dict(address, with_storage_root: false, with_storage: true) address = Utils.normalize_address address # if there are uncommited account changes the current storage root is # meaningless raise ArgumentError, "cannot include storage root with uncommited account changes" if with_storage_root && !@journal.empty? h = {} account = get_account address h[:nonce] = (@caches[:nonce][address] || account.nonce).to_s h[:balance] = (@caches[:balance][address] || account.balance).to_s code = @caches[:code][address] || account.code h[:code] = "0x#{Utils.encode_hex code}" storage_trie = SecureTrie.new PruningTrie.new(db, account.storage) h[:storage_root] = Utils.encode_hex storage_trie.root_hash if with_storage_root if with_storage h[:storage] = {} sh = storage_trie.to_h cache = @caches["storage:#{address}"] || {} keys = cache.keys.map {|k| Utils.zpad Utils.coerce_to_bytes(k), 32 } (sh.keys + keys).each do |k| hexkey = "0x#{Utils.encode_hex Utils.zunpad(k)}" v = cache[Utils.big_endian_to_int(k)] if v.true? h[:storage][hexkey] = "0x#{Utils.encode_hex Utils.int_to_big_endian(v)}" else v = sh[k] h[:storage][hexkey] = "0x#{Utils.encode_hex RLP.decode(v)}" if v end end end h end
ruby
{ "resource": "" }
q15764
Ethereum.Block.get_ancestor_list
train
def get_ancestor_list(n) raise ArgumentError, "n must be greater or equal than zero" unless n >= 0 return [] if n == 0 || number == 0 parent = get_parent [parent] + parent.get_ancestor_list(n-1) end
ruby
{ "resource": "" }
q15765
Ethereum.Block.validate_fields
train
def validate_fields l = Block.serialize self RLP.decode(RLP.encode(l)) == l end
ruby
{ "resource": "" }
q15766
Ethereum.Block.delta_account_item
train
def delta_account_item(address, param, value) new_value = get_account_item(address, param) + value return false if new_value < 0 set_account_item(address, param, new_value % 2**256) true end
ruby
{ "resource": "" }
q15767
Ethereum.Block.get_account_item
train
def get_account_item(address, param) address = Utils.normalize_address address, allow_blank: true return @caches[param][address] if @caches[param].has_key?(address) account = get_account address v = account.send param @caches[param][address] = v v end
ruby
{ "resource": "" }
q15768
Ethereum.Block.set_account_item
train
def set_account_item(address, param, value) raise ArgumentError, "invalid address: #{address}" unless address.size == 20 || address.size == 40 address = Utils.decode_hex(address) if address.size == 40 set_and_journal(param, address, value) set_and_journal(:all, address, true) end
ruby
{ "resource": "" }
q15769
Ethereum.Block.get_account
train
def get_account(address) address = Utils.normalize_address address, allow_blank: true rlpdata = @state[address] if rlpdata == Trie::BLANK_NODE Account.build_blank db, config[:account_initial_nonce] else RLP.decode(rlpdata, sedes: Account, db: db).tap do |acct| acct.make_mutable! acct._cached_rlp = nil end end end
ruby
{ "resource": "" }
q15770
Ethereum.Transaction.sign
train
def sign(key) raise InvalidTransaction, "Zero privkey cannot sign" if [0, '', Constant::PRIVKEY_ZERO, Constant::PRIVKEY_ZERO_HEX].include?(key) rawhash = Utils.keccak256 signing_data(:sign) key = PrivateKey.new(key).encode(:bin) vrs = Secp256k1.recoverable_sign rawhash, key self.v = encode_v(vrs[0]) self.r = vrs[1] self.s = vrs[2] self.sender = PrivateKey.new(key).to_address self end
ruby
{ "resource": "" }
q15771
Ethereum.Transaction.creates
train
def creates Utils.mk_contract_address(sender, nonce) if [Address::BLANK, Address::ZERO].include?(to) end
ruby
{ "resource": "" }
q15772
Ethereum.Miner.mine
train
def mine(rounds=1000, start_nonce=0) blk = @block bin_nonce, mixhash = _mine(blk.number, blk.difficulty, blk.mining_hash, start_nonce, rounds) if bin_nonce.true? blk.mixhash = mixhash blk.nonce = bin_nonce return blk end end
ruby
{ "resource": "" }
q15773
Ethereum.Trie.root_hash
train
def root_hash # TODO: can I memoize computation below? return BLANK_ROOT if @root_node == BLANK_NODE raise InvalidNode, "invalid root node" unless @root_node.instance_of?(Array) val = FastRLP.encode @root_node key = Utils.keccak256 val @db.put key, val SPV.grabbing @root_node key end
ruby
{ "resource": "" }
q15774
Ethereum.Trie.[]=
train
def []=(key, value) raise ArgumentError, "key must be string" unless key.instance_of?(String) raise ArgumentError, "value must be string" unless value.instance_of?(String) @root_node = update_and_delete_storage( @root_node, NibbleKey.from_string(key), value ) update_root_hash end
ruby
{ "resource": "" }
q15775
Ethereum.Trie.delete
train
def delete(key) raise ArgumentError, "key must be string" unless key.instance_of?(String) raise ArgumentError, "max key size is 32" if key.size > 32 @root_node = delete_and_delete_storage( @root_node, NibbleKey.from_string(key) ) update_root_hash end
ruby
{ "resource": "" }
q15776
Ethereum.Trie.to_h
train
def to_h h = {} to_hash(@root_node).each do |k, v| key = k.terminate(false).to_string h[key] = v end h end
ruby
{ "resource": "" }
q15777
Ethereum.Trie.find
train
def find(node, nbk) node_type = get_node_type node case node_type when :blank BLANK_NODE when :branch return node.last if nbk.empty? sub_node = decode_to_node node[nbk[0]] find sub_node, nbk[1..-1] when :leaf node_key = NibbleKey.decode(node[0]).terminate(false) nbk == node_key ? node[1] : BLANK_NODE when :extension node_key = NibbleKey.decode(node[0]).terminate(false) if node_key.prefix?(nbk) sub_node = decode_to_node node[1] find sub_node, nbk[node_key.size..-1] else BLANK_NODE end else raise InvalidNodeType, "node type must be in #{NODE_TYPES}, given: #{node_type}" end end
ruby
{ "resource": "" }
q15778
Ethereum.Trie.update_node
train
def update_node(node, key, value) node_type = get_node_type node case node_type when :blank [key.terminate(true).encode, value] when :branch if key.empty? node.last = value else new_node = update_and_delete_storage( decode_to_node(node[key[0]]), key[1..-1], value ) node[key[0]] = encode_node new_node end node when :leaf update_leaf_node(node, key, value) else update_extension_node(node, key, value) end end
ruby
{ "resource": "" }
q15779
Ethereum.Trie.delete_node
train
def delete_node(node, key) case get_node_type(node) when :blank BLANK_NODE when :branch delete_branch_node(node, key) else # kv type delete_kv_node(node, key) end end
ruby
{ "resource": "" }
q15780
Ethereum.Trie.delete_node_storage
train
def delete_node_storage(node) return if node == BLANK_NODE raise ArgumentError, "node must be Array or BLANK_NODE" unless node.instance_of?(Array) encoded = encode_node node return if encoded.size < 32 # FIXME: in current trie implementation two nodes can share identical # subtree thus we can not safely delete nodes for now # # \@db.delete encoded end
ruby
{ "resource": "" }
q15781
Ethereum.Trie.get_node_type
train
def get_node_type(node) return :blank if node == BLANK_NODE case node.size when KV_WIDTH # [k,v] NibbleKey.decode(node[0]).terminate? ? :leaf : :extension when BRANCH_WIDTH # [k0, ... k15, v] :branch else raise InvalidNode, "node size must be #{KV_WIDTH} or #{BRANCH_WIDTH}" end end
ruby
{ "resource": "" }
q15782
Ethereum.VM.preprocess_code
train
def preprocess_code(code) code = Utils.bytes_to_int_array code ops = [] i = 0 while i < code.size o = Opcodes::TABLE.fetch(code[i], [:INVALID, 0, 0, 0]) + [code[i], 0] ops.push o if o[0][0,Opcodes::PREFIX_PUSH.size] == Opcodes::PREFIX_PUSH n = o[0][Opcodes::PREFIX_PUSH.size..-1].to_i n.times do |j| i += 1 byte = i < code.size ? code[i] : 0 o[-1] = (o[-1] << 8) + byte # polyfill, these INVALID ops will be skipped in execution ops.push [:INVALID, 0, 0, 0, byte, 0] if i < code.size end end i += 1 end ops end
ruby
{ "resource": "" }
q15783
Ethereum.BlockHeader.check_pow
train
def check_pow(nonce=nil) logger.debug "checking pow", block: full_hash_hex[0,8] Miner.check_pow(number, mining_hash, mixhash, nonce || self.nonce, difficulty) end
ruby
{ "resource": "" }
q15784
Ethereum.BlockHeader.to_h
train
def to_h h = {} %i(prevhash uncles_hash extra_data nonce mixhash).each do |field| h[field] = "0x#{Utils.encode_hex(send field)}" end %i(state_root tx_list_root receipts_root coinbase).each do |field| h[field] = Utils.encode_hex send(field) end %i(number difficulty gas_limit gas_used timestamp).each do |field| h[field] = send(field).to_s end h[:bloom] = Utils.encode_hex Sedes.int256.serialize(bloom) h end
ruby
{ "resource": "" }
q15785
Ethereum.Chain.get_brothers
train
def get_brothers(block) o = [] i = 0 while block.has_parent? && i < @env.config[:max_uncle_depth] parent = block.get_parent children = get_children(parent).select {|c| c != block } o.concat children block = parent i += 1 end o end
ruby
{ "resource": "" }
q15786
Ethereum.Chain.add_block
train
def add_block(block, forward_pending_transaction=true) unless block.has_parent? || block.genesis? logger.debug "missing parent", block_hash: block return false end unless block.validate_uncles logger.debug "invalid uncles", block_hash: block return false end unless block.header.check_pow || block.genesis? logger.debug "invalid nonce", block_hash: block return false end if block.has_parent? begin Block.verify(block, block.get_parent) rescue InvalidBlock => e log.fatal "VERIFICATION FAILED", block_hash: block, error: e f = File.join Utils.data_dir, 'badblock.log' File.write(f, Utils.encode_hex(RLP.encode(block))) return false end end if block.number < head.number logger.debug "older than head", block_hash: block, head_hash: head end @index.add_block block store_block block # set to head if this makes the longest chain w/ most work for that number if block.chain_difficulty > head.chain_difficulty logger.debug "new head", block_hash: block, num_tx: block.transaction_count update_head block, forward_pending_transaction elsif block.number > head.number logger.warn "has higher blk number than head but lower chain_difficulty", block_has: block, head_hash: head, block_difficulty: block.chain_difficulty, head_difficulty: head.chain_difficulty end # Refactor the long calling chain block.transactions.clear_all block.receipts.clear_all block.state.db.commit_refcount_changes block.number block.state.db.cleanup block.number commit # batch commits all changes that came with the new block true end
ruby
{ "resource": "" }
q15787
Ethereum.Chain.add_transaction
train
def add_transaction(transaction) raise AssertError, "head candiate cannot be nil" unless @head_candidate hc = @head_candidate logger.debug "add tx", num_txs: transaction_count, tx: transaction, on: hc if @head_candidate.include_transaction?(transaction.full_hash) logger.debug "known tx" return end old_state_root = hc.state_root # revert finalization hc.state_root = @pre_finalize_state_root begin success, output = hc.apply_transaction(transaction) rescue InvalidTransaction => e # if unsuccessful the prerequisites were not fullfilled and the tx is # invalid, state must not have changed logger.debug "invalid tx", error: e hc.state_root = old_state_root return false end logger.debug "valid tx" # we might have a new head_candidate (due to ctx switches in up layer) if @head_candidate != hc logger.debug "head_candidate changed during validation, trying again" return add_transaction(transaction) end @pre_finalize_state_root = hc.state_root hc.finalize logger.debug "tx applied", result: output raise AssertError, "state root unchanged!" unless old_state_root != hc.state_root true end
ruby
{ "resource": "" }
q15788
Ethereum.Chain.get_chain
train
def get_chain(start: '', count: 10) logger.debug "get_chain", start: Utils.encode_hex(start), count: count if start.true? return [] unless @index.db.include?(start) block = get start return [] unless in_main_branch?(block) else block = head end blocks = [] count.times do |i| blocks.push block break if block.genesis? block = block.get_parent end blocks end
ruby
{ "resource": "" }
q15789
Ethereum.Chain.update_head_candidate
train
def update_head_candidate(forward_pending_transaction=true) logger.debug "updating head candidate", head: head # collect uncles blk = head # parent of the block we are collecting uncles for uncles = get_brothers(blk).map(&:header).uniq (@env.config[:max_uncle_depth]+2).times do |i| blk.uncles.each {|u| uncles.delete u } blk = blk.get_parent if blk.has_parent? end raise "strange uncle found!" unless uncles.empty? || uncles.map(&:number).max <= head.number uncles = uncles[0, @env.config[:max_uncles]] # create block ts = [Time.now.to_i, head.timestamp+1].max _env = Env.new DB::OverlayDB.new(head.db), config: @env.config, global_config: @env.global_config hc = Block.build_from_parent head, @coinbase, timestamp: ts, uncles: uncles, env: _env raise ValidationError, "invalid uncles" unless hc.validate_uncles @pre_finalize_state_root = hc.state_root hc.finalize # add transactions from previous head candidate old_hc = @head_candidate @head_candidate = hc if old_hc tx_hashes = head.get_transaction_hashes pending = old_hc.get_transactions.select {|tx| !tx_hashes.include?(tx.full_hash) } if pending.true? if forward_pending_transaction logger.debug "forwarding pending transaction", num: pending.size pending.each {|tx| add_transaction tx } else logger.debug "discarding pending transaction", num: pending.size end end end end
ruby
{ "resource": "" }
q15790
Ethereum.Index.update_blocknumbers
train
def update_blocknumbers(blk) loop do if blk.number > 0 @db.put_temporarily block_by_number_key(blk.number), blk.full_hash else @db.put block_by_number_key(blk.number), blk.full_hash end @db.commit_refcount_changes blk.number break if blk.number == 0 blk = blk.get_parent() break if has_block_by_number(blk.number) && get_block_by_number(blk.number) == blk.full_hash end end
ruby
{ "resource": "" }
q15791
JSS.Package.install_if_reported_available=
train
def install_if_reported_available=(new_val) return nil if new_val == @install_if_reported_available new_val = false if new_val.to_s.empty? raise JSS::InvalidDataError, 'install_if_reported_available must be boolean true or false' unless JSS::TRUE_FALSE.include? new_val @install_if_reported_available = new_val @need_to_update = true end
ruby
{ "resource": "" }
q15792
JSS.Package.priority=
train
def priority=(new_val) return nil if new_val == @priority new_val = DEFAULT_PRIORITY if new_val.to_s.empty? raise JSS::InvalidDataError, ':priority must be an integer from 1-20' unless PRIORITIES.include? new_val @priority = new_val @need_to_update = true end
ruby
{ "resource": "" }
q15793
JSS.Package.required_processor=
train
def required_processor=(new_val) return nil if new_val == @required_processor new_val = DEFAULT_PROCESSOR if new_val.to_s.empty? raise JSS::InvalidDataError, "Required_processor must be one of: #{CPU_TYPES.join ', '}" unless CPU_TYPES.include? new_val @required_processor = new_val @need_to_update = true end
ruby
{ "resource": "" }
q15794
JSS.Package.calculate_checksum
train
def calculate_checksum(type: nil, local_file: nil, rw_pw: nil, ro_pw: nil, unmount: true ) type ||= DEFAULT_CHECKSUM_HASH_TYPE mdp = JSS::DistributionPoint.master_distribution_point api: @api if local_file file_to_calc = local_file else if rw_pw dppw = rw_pw mnt = :rw elsif ro_pw dppw = ro_pw mnt = :ro else raise ArgumentError, 'Either rw_pw: or ro_pw: must be provided' end file_to_calc = mdp.mount(dppw, mnt) + "#{DIST_POINT_PKGS_FOLDER}/#{@filename}" end new_checksum = self.class.calculate_checksum(file_to_calc, type) mdp.unmount if unmount && mdp.mounted? new_checksum end
ruby
{ "resource": "" }
q15795
JSS.Package.checksum_valid?
train
def checksum_valid?(local_file: nil, rw_pw: nil, ro_pw: nil, unmount: true) return false unless @checksum new_checksum = calculate_checksum( type: @checksum_type, local_file: local_file, rw_pw: rw_pw, ro_pw: ro_pw, unmount: unmount ) new_checksum == @checksum end
ruby
{ "resource": "" }
q15796
JSS.Package.update_master_filename
train
def update_master_filename(old_file_name, new_file_name, rw_pw, unmount = true) raise JSS::NoSuchItemError, "#{old_file_name} does not exist in the jss." unless @in_jss mdp = JSS::DistributionPoint.master_distribution_point api: @api pkgs_dir = mdp.mount(rw_pw, :rw) + DIST_POINT_PKGS_FOLDER.to_s old_file = pkgs_dir + old_file_name raise JSS::NoSuchItemError, "File not found on the master distribution point at #{DIST_POINT_PKGS_FOLDER}/#{old_file_name}." unless \ old_file.exist? new_file = pkgs_dir + new_file_name # use the extension of the original file. new_file = pkgs_dir + (new_file_name + old_file.extname) if new_file.extname.empty? old_file.rename new_file mdp.unmount if unmount nil end
ruby
{ "resource": "" }
q15797
JSS.Package.delete
train
def delete(delete_file: false, rw_pw: nil, unmount: true) super() delete_master_file(rw_pw, unmount) if delete_file end
ruby
{ "resource": "" }
q15798
JSS.Package.uninstall
train
def uninstall(args = {}) unless removable? raise JSS::UnsupportedError, \ 'This package cannot be uninstalled. Please use CasperAdmin to index it and allow uninstalls' end raise JSS::UnsupportedError, 'You must have root privileges to uninstall packages' unless JSS.superuser? args[:target] ||= '/' # are we doing "fill existing users" or "fill user template"? do_feu = args[:feu] ? '-feu' : '' do_fut = args[:fut] ? '-fut' : '' # use jamf binary to uninstall the pkg jamf_opts = "-target '#{args[:target]}' -id '#{@id}' #{do_feu} #{do_fut}" # run it via a client JSS::Client.run_jamf 'uninstall', jamf_opts, args[:verbose] $CHILD_STATUS end
ruby
{ "resource": "" }
q15799
JSS.DistributionPoint.rest_xml
train
def rest_xml doc = REXML::Document.new dp = doc.add_element "distribution_point" dp.add_element(:name.to_s).text = @name dp.add_element(:ip_address.to_s).text = @ip_address dp.add_element(:local_path.to_s).text = @local_path dp.add_element(:enable_load_balancing.to_s).text = @enable_load_balancing dp.add_element(:failover_point.to_s).text = @failover_point dp.add_element(:is_master.to_s).text = @is_master dp.add_element(:connection_type.to_s).text = @connection_type dp.add_element(:share_port.to_s).text = @share_port dp.add_element(:share_name.to_s).text = @share_name dp.add_element(:read_write_username.to_s).text = @read_write_username dp.add_element(:read_write_password.to_s).text = @read_write_password dp.add_element(:read_only_username.to_s).text = @read_only_username dp.add_element(:read_only_password.to_s).text = @read_only_password dp.add_element(:workgroup_or_domain.to_s).text = @workgroup_or_domain dp.add_element(:http_downloads_enabled.to_s).text = @http_downloads_enabled dp.add_element(:protocol.to_s).text = @protocol dp.add_element(:port.to_s).text = @port dp.add_element(:context.to_s).text = @context dp.add_element(:no_authentication_required.to_s).text = @no_authentication_required dp.add_element(:certificate_required.to_s).text = @certificate_required dp.add_element(:username_password_required.to_s).text = @username_password_required dp.add_element(:http_username.to_s).text = @http_username dp.add_element(:certificate.to_s).text = @certificate dp.add_element(:http_url.to_s).text = @http_url dp.add_element(:failover_point_url.to_s).text = @failover_point_url dp.add_element(:ssh_username.to_s).text = @ssh_username dp.add_element(:ssh_password.to_s).text = @ssh_password if @ssh_password return doc.to_s end
ruby
{ "resource": "" }