hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
c1979a2aa6e0ddd776378443b0a9b834fbfc9ac2
diff --git a/src/frontend/org/voltdb/utils/CSVLoader.java b/src/frontend/org/voltdb/utils/CSVLoader.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/utils/CSVLoader.java +++ b/src/frontend/org/voltdb/utils/CSVLoader.java @@ -238,6 +238,10 @@ public class CSVLoader { public static void main(String[] args) throws IOException, InterruptedException { start = System.currentTimeMillis(); + long parsingTimeStart = start; + long parsingTimeEnd = start; + long insertTimeStart = start; + long insertTimeEnd = start; int waits = 0; int shortWaits = 0; @@ -250,6 +254,7 @@ public class CSVLoader { ICsvListReader listReader = null; try { + long st = System.currentTimeMillis(); if (CSVLoader.standin) { tokenizer = new Tokenizer(new BufferedReader( new InputStreamReader(System.in)), csvPreference, config.strictquotes, config.escape, config.columnsizelimit, @@ -262,6 +267,8 @@ public class CSVLoader { config.skip) ; listReader = new CsvListReader(tokenizer, csvPreference); } + long end = System.currentTimeMillis(); + parsingTimeEnd += (end - st); } catch (FileNotFoundException e) { m_log.error("CSV file '" + config.file + "' could not be found."); System.exit(-1); @@ -339,7 +346,11 @@ public class CSVLoader { totalLineCount.set(cfg.skip); else totalLineCount.set( listReader.getLineNumber() ); + long st = System.currentTimeMillis(); lineList = listReader.read(); + long end = System.currentTimeMillis(); + parsingTimeEnd += (end - st); + //EOF if(lineList == null) { if( totalLineCount.get() > listReader.getLineNumber() ) @@ -389,12 +400,19 @@ public class CSVLoader { if (csvClient != null) { csvClient.drain(); } + insertTimeEnd = System.currentTimeMillis(); } catch (Exception e) { e.printStackTrace(); } - m_log.info("Inserted " + outCount.get() + " and acknowledged " - + inCount.get() + " rows (final)"); + m_log.info("Parsing CSV file took " + (parsingTimeEnd - parsingTimeStart) + " milliseconds."); + if (!config.check) { + m_log.info("Inserting Data took " + ((insertTimeEnd - insertTimeStart) - (parsingTimeEnd - parsingTimeStart)) + " milliseconds."); + m_log.info("Inserted " + outCount.get() + " and acknowledged " + + inCount.get() + " rows (final)"); + } else { + m_log.info("Verification of CSV input completed."); + } if (waits > 0) { m_log.info("Waited " + waits + " times"); if (shortWaits > 0) { @@ -403,7 +421,9 @@ public class CSVLoader { } } - produceFiles(); + if (!config.check) { + produceFiles(); + } close_cleanup(); listReader.close(); if (csvClient != null) { @@ -512,7 +532,7 @@ public class CSVLoader { private static void produceFiles() { latency = System.currentTimeMillis() - start; - m_log.info("CSVLoader elapsed: " + latency / 1000F + m_log.info("CSVLoader elapsed: " + latency + " seconds"); int bulkflush = 300; // by default right now
Add capturing timings for read/parsing file and insert to DB.
VoltDB_voltdb
train
1793c3775329f38c5f500795ba626c6e1fff5f20
diff --git a/src/directives/leaflet.js b/src/directives/leaflet.js index <HASH>..<HASH> 100644 --- a/src/directives/leaflet.js +++ b/src/directives/leaflet.js @@ -17,7 +17,8 @@ angular.module("leaflet-directive", []).directive('leaflet', function ($q, leafl controls: '=controls', eventBroadcast: '=eventBroadcast' }, - template: '<div class="angular-leaflet-map"></div>', + transclude: true, + template: '<div><div class="angular-leaflet-map"></div><div ng-transclude></div></div>', controller: function ($scope) { _leafletMap = $q.defer(); this.getMap = function () { @@ -33,7 +34,8 @@ angular.module("leaflet-directive", []).directive('leaflet', function ($q, leafl var isDefined = leafletHelpers.isDefined, defaults = leafletMapDefaults.setDefaults(scope.defaults, attrs.id), genDispatchMapEvent = leafletEvents.genDispatchMapEvent, - mapEvents = leafletEvents.getAvailableMapEvents(); + mapEvents = leafletEvents.getAvailableMapEvents(), + mapElement = angular.element(element[0].children[0]); // Set width and height if they are defined if (isDefined(attrs.width)) { @@ -42,6 +44,7 @@ angular.module("leaflet-directive", []).directive('leaflet', function ($q, leafl } else { element.css('width', attrs.width + 'px'); } + mapElement.css('width', element.css('width')); } if (isDefined(attrs.height)) { if (isNaN(attrs.height)) { @@ -49,10 +52,11 @@ angular.module("leaflet-directive", []).directive('leaflet', function ($q, leafl } else { element.css('height', attrs.height + 'px'); } + mapElement.css('height', element.css('height')); } // Create the Leaflet Map Object with the options - var map = new L.Map(element[0], leafletMapDefaults.getMapCreationDefaults(attrs.id)); + var map = new L.Map(mapElement[0], leafletMapDefaults.getMapCreationDefaults(attrs.id)); _leafletMap.resolve(map); if (!isDefined(attrs.center)) {
* modified leaflet-directive to accept transcluded elements
tombatossals_angular-leaflet-directive
train
2151492decd55dedbe06e998f49803a5f81a5ee6
diff --git a/drivers/virtualbox/virtualbox.go b/drivers/virtualbox/virtualbox.go index <HASH>..<HASH> 100644 --- a/drivers/virtualbox/virtualbox.go +++ b/drivers/virtualbox/virtualbox.go @@ -177,17 +177,14 @@ func (d *Driver) Create() error { if err := os.Mkdir(imgPath, 0700); err != nil { return err } - } if d.Boot2DockerURL != "" { isoURL = d.Boot2DockerURL log.Infof("Downloading %s from %s...", isoFilename, isoURL) - if err := b2dutils.DownloadISO(commonIsoPath, isoFilename, isoURL); err != nil { + if err := b2dutils.DownloadISO(imgPath, isoFilename, isoURL); err != nil { return err - } - } else { // todo: check latest release URL, download if it's new // until then always use "latest" @@ -202,11 +199,11 @@ func (d *Driver) Create() error { return err } } + } - isoDest := filepath.Join(d.storePath, isoFilename) - if err := utils.CopyFile(commonIsoPath, isoDest); err != nil { - return err - } + isoDest := filepath.Join(d.storePath, isoFilename) + if err := utils.CopyFile(commonIsoPath, isoDest); err != nil { + return err } log.Infof("Creating SSH key...") diff --git a/utils/b2d.go b/utils/b2d.go index <HASH>..<HASH> 100644 --- a/utils/b2d.go +++ b/utils/b2d.go @@ -7,6 +7,7 @@ import ( "io/ioutil" "net" "net/http" + "net/url" "os" "path/filepath" "time" @@ -84,29 +85,46 @@ func (b *B2dUtils) GetLatestBoot2DockerReleaseURL() (string, error) { } // Download boot2docker ISO image for the given tag and save it at dest. -func (b *B2dUtils) DownloadISO(dir, file, url string) error { - client := getClient() - rsp, err := client.Get(url) - if err != nil { - return err +func (b *B2dUtils) DownloadISO(dir, file, isoUrl string) error { + u, err := url.Parse(isoUrl) + var src io.ReadCloser + if u.Scheme == "file" { + s, err := os.Open(u.Path) + if err != nil { + return err + } + src = s + } else { + client := getClient() + s, err := client.Get(isoUrl) + if err != nil { + return err + } + src = s.Body } - defer rsp.Body.Close() + + defer src.Close() // Download to a temp file first then rename it to avoid partial download. f, err := ioutil.TempFile(dir, file+".tmp") if err != nil { return err } + defer os.Remove(f.Name()) - if _, err := io.Copy(f, rsp.Body); err != nil { + + if _, err := io.Copy(f, src); err != nil { // TODO: display download progress? return err } + if err := f.Close(); err != nil { return err } + if err := os.Rename(f.Name(), filepath.Join(dir, file)); err != nil { return err } + return nil }
fix regression for custom b2d url paths
docker_machine
train
92c945cbd205e1764f3fe404c889ead0e2f30246
diff --git a/test/error-handling.tap.js b/test/error-handling.tap.js index <HASH>..<HASH> 100644 --- a/test/error-handling.tap.js +++ b/test/error-handling.tap.js @@ -14,9 +14,9 @@ test("continuation-local storage glue with a throw in the continuation chain", d.on('error', function (blerg) { t.equal(blerg.message, "explicitly nonlocal exit", "got the expected exception"); - t.notOk(namespace.get('outer'), "outer context should have been exited by throw"); + t.ok(namespace.get('outer'), "outer context is still active"); t.notOk(namespace.get('inner'), "inner context should have been exited by throw"); - t.equal(namespace._set.length, 0, "should be back to global state"); + t.equal(namespace._set.length, 1, "should be back to outer state"); cls.destroyNamespace('test'); t.end();
test: fixed for current exit semantics Not sure when this changed, but it's been like this for a while.
othiym23_node-continuation-local-storage
train
0d6c43a75eed9278349e62330380787dad6a51db
diff --git a/package/Compiler.php b/package/Compiler.php index <HASH>..<HASH> 100644 --- a/package/Compiler.php +++ b/package/Compiler.php @@ -27,18 +27,37 @@ class Compiler $phar->startBuffering(); - $finder = new Finder(); - $finder->files() - ->ignoreVCS(true) - ->name('*.php') - ->name('*.tpl.dist') - ->name('*.html.dist') - ->in($this->compileDir . '/src') - ; - - foreach ($finder as $file) { - $this->addFile($phar, $file); - } + $finder = new Finder(); + $finder->files() + ->ignoreVCS(true) + ->name('*.php') + ->name('*.tpl.dist') + ->name('*.html.dist') + ->in($this->compileDir . '/src'); + + foreach ($finder as $file) { + $this->addFile($phar, $file); + } + + $finder = new Finder(); + $finder + ->files() + ->ignoreVCS(true) + ->name('*.php') + ->name('*.js') + ->name('*.css') + ->name('*.png') + ->name('*.tpl.dist') + ->name('*.html.dist') + ->exclude('Tests') + ->exclude('tests') + ->exclude('benchmark') + ->exclude('demo') + ->in($this->compileDir.'/plugins/frameworks'); + + foreach($finder as $file) { + $this->addFile($phar, $file); + } $finder = new Finder(); $finder @@ -79,7 +98,7 @@ class Compiler $phar = $phar->compress(\Phar::NONE); } - + unset($phar); }
Added yii plugins dirs to the compiler
Codeception_Codeception
train
e42fad2187b685b2eb558e4e3622ba8b50efd6a6
diff --git a/safe_qgis/message_viewer.py b/safe_qgis/message_viewer.py index <HASH>..<HASH> 100644 --- a/safe_qgis/message_viewer.py +++ b/safe_qgis/message_viewer.py @@ -49,9 +49,6 @@ class MessageViewer(QtWebKit.QWebView): self.dynamic_messages = [] #self.show() - # Read the header and footer html snippets - self.header = htmlHeader() - self.footer = htmlFooter() #base_dir = os.path.dirname(__file__) #self.header = header.replace('PATH', base_dir) @@ -134,7 +131,7 @@ class MessageViewer(QtWebKit.QWebView): def show_messages(self): """Show all messages.""" self.setUrl(QtCore.QUrl('')) - string = self.header + string = htmlHeader() if self.static_message is not None: string += self.static_message.to_html() @@ -149,8 +146,7 @@ class MessageViewer(QtWebKit.QWebView): if html is not None: string += html - string += self.footer - self.setHtml(string) + string += htmlFooter() # scroll-to logic would work something like this # see resources/js/inasafe.js and also diff --git a/safe_qgis/tests/test_qgis_environment.py b/safe_qgis/tests/test_qgis_environment.py index <HASH>..<HASH> 100644 --- a/safe_qgis/tests/test_qgis_environment.py +++ b/safe_qgis/tests/test_qgis_environment.py @@ -49,10 +49,11 @@ class QGISTest(unittest.TestCase): see https://github.com/AIFDR/inasafe/issues/349 """ myCrs = QgsCoordinateReferenceSystem() - myProj4 = ('GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",' - 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' - 'PRIMEM["Greenwich",0.0],UNIT["Degree",' - '0.0174532925199433]]') + myProj4 = ( + 'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",' + 'SPHEROID["WGS_1984",6378137.0,298.257223563]],' + 'PRIMEM["Greenwich",0.0],UNIT["Degree",' + '0.0174532925199433]]') myCrs.createFromWkt(myProj4) myAuthId = myCrs.authid() myExpectedAuthId = 'EPSG:4326'
Fixed issue with messages being duplicated in messageviewer
inasafe_inasafe
train
b85f761a01b6b1c2badeec4484c96d6ae78a88ec
diff --git a/lib/eye/process/config.rb b/lib/eye/process/config.rb index <HASH>..<HASH> 100644 --- a/lib/eye/process/config.rb +++ b/lib/eye/process/config.rb @@ -37,6 +37,7 @@ module Eye::Process::Config h[:stdout] = Eye::System.normalized_file(h[:stdout], h[:working_dir]) if h[:stdout] h[:stderr] = Eye::System.normalized_file(h[:stderr], h[:working_dir]) if h[:stderr] + h[:stdall] = Eye::System.normalized_file(h[:stdall], h[:working_dir]) if h[:stdall] h[:environment] = Eye::System.prepare_env(h)
process normalize stdall too
kostya_eye
train
e527650a0ca0c276f7ed4c6fcc785575b5f2619f
diff --git a/lib/docusign_rest/client.rb b/lib/docusign_rest/client.rb index <HASH>..<HASH> 100644 --- a/lib/docusign_rest/client.rb +++ b/lib/docusign_rest/client.rb @@ -221,7 +221,9 @@ module DocusignRest roleName: signer[:role_name], tabs: { textTabs: get_signer_tabs(signer[:text_tabs]), - checkboxTabs: get_signer_tabs(signer[:checkbox_tabs]) + checkboxTabs: get_signer_tabs(signer[:checkbox_tabs]), + fullNameTabs: get_signer_tabs(signer[:fullname_tabs]), + dateTabs: get_signer_tabs(signer[:date_tabs]) } } @@ -492,6 +494,40 @@ module DocusignRest end + # Internal: takes in an array of server template ids and an array of the signers + # and sets up the composite template + # + # Returns an array of server template hashes + def get_composite_template(server_template_ids, signers) + composite_array = [] + index = 0 + server_template_ids.each do |template_id| + server_template_hash = Hash[:sequence, index += 1, \ + :templateId, template_id] + templates_hash = Hash[:serverTemplates, [server_template_hash], \ + :inlineTemplates, get_inline_signers(signers, index += 1)] + composite_array << templates_hash + end + composite_array + end + + # Internal: takes signer info and the inline template sequence number + # and sets up the inline template + # + # Returns an array of signers + def get_inline_signers(signers, sequence) + signers_array = [] + signers.each do |signer| + signers_hash = Hash[:email, signer[:email], :name, signer[:name], \ + :recipientId, signer[:recipient_id], :roleName, signer[:role_name], \ + :clientUserId, signer[:email]] + signers_array << signers_hash + end + template_hash = Hash[:sequence, sequence, :recipients, { signers: signers_array }] + [template_hash] + end + + # Internal sets up the Net::HTTP request # # uri - The fully qualified final URI @@ -699,6 +735,50 @@ module DocusignRest end + # Public: create an envelope for delivery from a composite template + # + # headers - Optional hash of headers to merge into the existing + # required headers for a POST request. + # status - Options include: 'sent', or 'created' and + # determine if the envelope is sent out immediately or + # stored for sending at a later time + # email/body - Sets the text in the email body + # email/subject - Sets the text in the email subject line + # template_roles - See the get_template_roles method definition for a list + # of options to pass. Note: for consistency sake we call + # this 'signers' and not 'templateRoles' when we build up + # the request in client code. + # headers - Optional hash of headers to merge into the existing + # required headers for a multipart request. + # server_template_ids - Array of ids for templates uploaded to DocuSign. Templates + # will be added in the order they appear in the array. + # + # Returns a JSON parsed response body containing the envelope's: + # envelopeId - autogenerated ID provided by Docusign + # uri - the URI where the template is located on the DocuSign servers + # statusDateTime - The date/time the envelope was created + # status - Sent, created, or voided + def create_envelope_from_composite_template(options={}) + content_type = { 'Content-Type' => 'application/json' } + content_type.merge(options[:headers]) if options[:headers] + + post_body = { + status: options[:status], + compositeTemplates: get_composite_template(options[:server_template_ids], options[:signers]) + }.to_json + + uri = build_uri("/accounts/#{acct_id}/envelopes") + + http = initialize_net_http_ssl(uri) + + request = Net::HTTP::Post.new(uri.request_uri, headers(content_type)) + request.body = post_body + + response = http.request(request) + JSON.parse(response.body) + end + + # Public returns the names specified for a given email address (existing docusign user) # # email - the email of the recipient @@ -1052,6 +1132,19 @@ module DocusignRest JSON.parse(http.request(request).body) end + # Public: Retrieves a list of templates used in an envelope + # + # Returns templateId, name and uri for each template found. + # + # envelope_id - DS id of envelope with templates. + def get_templates_in_envelope(envelope_id) + uri = build_uri("/accounts/#{acct_id}/envelopes/#{envelope_id}/templates") + + http = initialize_net_http_ssl(uri) + request = Net::HTTP::Get.new(uri.request_uri, headers({ 'Content-Type' => 'application/json' })) + JSON.parse(http.request(request).body) + end + # Grabs envelope data. # Equivalent to the following call in the API explorer:
Add creation of an envelope from a composite template of server templates
jondkinney_docusign_rest
train
3b30776df67bf94276ca87773bb0ac3cf5546070
diff --git a/goatools/obo_parser.py b/goatools/obo_parser.py index <HASH>..<HASH> 100755 --- a/goatools/obo_parser.py +++ b/goatools/obo_parser.py @@ -221,44 +221,8 @@ class GODag(dict): return wrapped_label - def draw_lineage(self, rec, nodecolor="mediumseagreen", - edgecolor="lightslateblue", dpi=96, verbose=False): - # draw AMIGO style network, lineage containing one query record - try: - import pygraphviz as pgv - except: - print >>sys.stderr, "pygraphviz not installed, lineage not drawn!" - print >>sys.stderr, "try `easy_install pygraphviz`" - return - - G = pgv.AGraph() - edgeset = rec.get_all_parent_edges() | rec.get_all_child_edges() - edgeset = [(self._label_wrap(a), self._label_wrap(b)) for (a, b) in edgeset] - for src, target in edgeset: - # default layout in graphviz is top->bottom, so we invert the direction - # and plot using dir="back" - G.add_edge(target, src) - - G.graph_attr.update(dpi="%d" % dpi) - G.node_attr.update(shape="box", style="rounded,filled", - fillcolor="beige", color=nodecolor) - G.edge_attr.update(shape="normal", color=edgecolor, dir="back", label="is_a") - # highlight the query term - q = G.get_node(self._label_wrap(rec.id)) - q.attr.update(fillcolor="plum") - - if verbose: - print >>sys.stderr, G.to_string() - - lineage_img = "%s.png" % rec.id.replace(":", "_") - print >>sys.stderr, "lineage info for term %s written to %s" %\ - (rec.id, lineage_img) - - G.draw(lineage_img, prog="dot") - - - def draw_lineages(self, recs, nodecolor="mediumseagreen", - edgecolor="lightslateblue", dpi=96, verbose=False, lineage_img="GO_lineage,png"): + def draw_lineage(self, recs, nodecolor="mediumseagreen", + edgecolor="lightslateblue", dpi=96, verbose=False, lineage_img="GO_lineage.png"): # draw AMIGO style network, lineage containing one query record try: import pygraphviz as pgv @@ -271,7 +235,8 @@ class GODag(dict): edgeset = set() for rec in recs: edgeset.update(rec.get_all_parent_edges()) - #G.add_node(self._label_wrap(rec.id)) + edgeset.update(rec.get_all_child_edges()) + edgeset = [(self._label_wrap(a), self._label_wrap(b)) for (a, b) in edgeset] for src, target in edgeset: # default layout in graphviz is top->bottom, so we invert the direction @@ -281,7 +246,7 @@ class GODag(dict): G.graph_attr.update(dpi="%d" % dpi) G.node_attr.update(shape="box", style="rounded,filled", fillcolor="beige", color=nodecolor) - G.edge_attr.update(shape="normal", color=edgecolor, dir="back") + G.edge_attr.update(shape="normal", color=edgecolor, dir="back", label="is_a") # highlight the query terms for rec in recs: try: @@ -292,7 +257,8 @@ class GODag(dict): if verbose: print >>sys.stderr, G.to_string() - print >>sys.stderr, "lineage info written to %s" % lineage_img + print >>sys.stderr, "lineage info for terms %s written to %s" % \ + ([rec.id for rec in recs], lineage_img) G.draw(lineage_img, prog="dot") diff --git a/scripts/plot_go_term.py b/scripts/plot_go_term.py index <HASH>..<HASH> 100755 --- a/scripts/plot_go_term.py +++ b/scripts/plot_go_term.py @@ -37,5 +37,5 @@ if __name__ == '__main__': # run a test case if options.term is not None: rec = g.query_term(options.term, verbose=True) - g.draw_lineage(rec, dpi=50, verbose=True) + g.draw_lineage([rec], verbose=True)
avoid code dup in obo_parser.draw_lineage
tanghaibao_goatools
train
1a06288b654422e010250c9b184118cc9ca82cae
diff --git a/cmd2.py b/cmd2.py index <HASH>..<HASH> 100755 --- a/cmd2.py +++ b/cmd2.py @@ -1136,14 +1136,16 @@ class Cmd(cmd.Cmd): self.stdout.write(""" Commands are case-sensitive: {} Commands may be terminated with: {} - Command-line arguments allowed: {} + Arguments at invocation allowed: {} Output redirection and pipes allowed: {} Parsing of @options commands: - Use POSIX-style argument parser (vs Windows): {} - Strip Quotes when using Windows-style argument parser: {} - Use a list of arguments instead of a single argument string: {} + Shell lexer mode for command argument splitting: {} + Strip Quotes after splitting arguments: {} + Argument type: {} \n""".format(not self.case_insensitive, str(self.terminators), self.allow_cli_args, self.allow_redirection, - POSIX_SHLEX, STRIP_QUOTES_FOR_NON_POSIX, USE_ARG_LIST)) + "POSIX" if POSIX_SHLEX else "non-POSIX", + "True" if STRIP_QUOTES_FOR_NON_POSIX and not POSIX_SHLEX else "False", + "List of argument strings" if USE_ARG_LIST else "string of space-separated arguments")) def do_help(self, arg): """List available commands with "help" or detailed help with "help cmd"."""
Reword output for cmdenvironment This is an attempt at making the cmdenvironment output more informative and easier to understand.
python-cmd2_cmd2
train
1b66316072f8a9962e96ea5f8c74fa3603fa2101
diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index <HASH>..<HASH> 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -135,7 +135,6 @@ class _CLAVRxHelper: factor = attrs.pop('scale_factor', None) offset = attrs.pop('add_offset', None) valid_range = attrs.pop('valid_range', None) - print(attrs) if factor is not None and offset is not None: def scale_inplace(data): @@ -276,24 +275,8 @@ class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): filetype_info) - self.sensor = self.get_sensor(self['/attr/sensor']) - self.platform = self.get_platform(self['/attr/platform']) - - @property - def get_sensor(sensor): - """Get the sensor.""" - for k, v in SENSORS.items(): - if k in sensor: - return v - raise ValueError("Unknown sensor '{}'".format(sensor)) - - @property - def get_platform(platform): - """Get the platform.""" - for k, v in PLATFORMS.items(): - if k in platform: - return v - return platform + self.sensor = self.get_sensor(self.file_content.get('/attr/sensor')) + self.platform = self.get_platform(self.file_content.get('/attr/platform')) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" @@ -371,7 +354,7 @@ class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper): return super(CLAVRXHDF4FileHandler, self).get_area_def(key) l1b_att = str(self.file_content.get('/attr/L1B', None)) - return self.helper._read_axi_fixed_grid(l1b_att) + return self._read_axi_fixed_grid(l1b_att) class CLAVRXNetCDFFileHandler(_CLAVRxHelper, BaseFileHandler):
Removed replicated code and fix old reference
pytroll_satpy
train
d8a6d802d881417c83c09c128991182465441a68
diff --git a/lib/deep_cover/core_ext/require_overrides.rb b/lib/deep_cover/core_ext/require_overrides.rb index <HASH>..<HASH> 100644 --- a/lib/deep_cover/core_ext/require_overrides.rb +++ b/lib/deep_cover/core_ext/require_overrides.rb @@ -1,5 +1,8 @@ # These are the monkeypatches to replace the default #require and # #require_relative in order to instrument the code before it gets run. +# Kernel.require and Kernel#require must both have their version because +# each can have been already overwritten individually. (Rubygems only +# overrides Kernel#require) class << Kernel alias_method :require_without_coverage, :require @@ -22,8 +25,14 @@ class << Kernel end module Kernel + alias_method :require_without_coverage, :require def require(path) - Kernel.require(path) + result = DeepCover.custom_requirer.require(path) + if [:not_found, :cover_failed, :not_supported].include?(result) + require_without_coverage(path) + else + result + end end def require_relative(path) diff --git a/spec/full_usage_spec.rb b/spec/full_usage_spec.rb index <HASH>..<HASH> 100644 --- a/spec/full_usage_spec.rb +++ b/spec/full_usage_spec.rb @@ -35,4 +35,16 @@ end RSpec.describe 'DeepCover usage' do it { 'simple/simple.rb'.should run_successfully.and_output('Done') } it { 'with_configure/test.rb'.should run_successfully.and_output('[1, 0, 2, 0, nil, 2, nil, nil]') } + + it 'Can still require gems when there is no bundler' do + ignore_output = {in: File::NULL, out: File::NULL, err: File::NULL} + Bundler.with_clean_env do + install_success = system("gem install --local spec/full_usage/tiny_gem-0.1.0.gem", ignore_output) + install_success.should be true + + require_success = system(%(ruby -e 'require "./lib/deep_cover"; DeepCover.start; require "tiny_gem"'), ignore_output) + require_success.should be true + end + end + end
Duplicate require's override Rubygem only overrides one of them, so things mess up when Kernel.require is called instead.
deep-cover_deep-cover
train
56a7b534b60a3860b766c2d224bc836c79853486
diff --git a/src/sesame/packers.py b/src/sesame/packers.py index <HASH>..<HASH> 100644 --- a/src/sesame/packers.py +++ b/src/sesame/packers.py @@ -15,6 +15,9 @@ __all__ = [ "LongLongPacker", "UnsignedLongLongPacker", "UUIDPacker", + "BytesPacker", + "StrPacker", + "packer", ]
Add missing entries in __all__.
aaugustin_django-sesame
train
1aeee46cfcf98775564423b15d79cf643aa3dd48
diff --git a/salt/roster/scan.py b/salt/roster/scan.py index <HASH>..<HASH> 100644 --- a/salt/roster/scan.py +++ b/salt/roster/scan.py @@ -1,21 +1,13 @@ -# -*- coding: utf-8 -*- """ Scan a netmask or ipaddr for open ssh ports """ -# Import python libs -from __future__ import absolute_import, print_function, unicode_literals - import copy import logging import socket -# Import salt libs import salt.utils.network from salt._compat import ipaddress -from salt.ext import six - -# Import 3rd-party libs from salt.ext.six.moves import map # pylint: disable=import-error,redefined-builtin log = logging.getLogger(__name__) @@ -30,7 +22,7 @@ def targets(tgt, tgt_type="glob", **kwargs): return rmatcher.targets() -class RosterMatcher(object): +class RosterMatcher: """ Matcher for the roster data structure """ @@ -49,7 +41,7 @@ class RosterMatcher(object): ports = __opts__["ssh_scan_ports"] if not isinstance(ports, list): # Comma-separate list of integers - ports = list(map(int, six.text_type(ports).split(","))) + ports = list(map(int, str(ports).split(","))) if self.tgt_type == "list": tgts = self.tgt else: @@ -63,7 +55,7 @@ class RosterMatcher(object): except ValueError: pass for addr in addrs: - addr = six.text_type(addr) + addr = str(addr) ret[addr] = copy.deepcopy(__opts__.get("roster_defaults", {})) log.trace("Scanning host: %s", addr) for port in ports: @@ -75,6 +67,6 @@ class RosterMatcher(object): sock.shutdown(socket.SHUT_RDWR) sock.close() ret[addr].update({"host": addr, "port": port}) - except socket.error: + except OSError: pass return ret diff --git a/tests/unit/roster/test_scan.py b/tests/unit/roster/test_scan.py index <HASH>..<HASH> 100644 --- a/tests/unit/roster/test_scan.py +++ b/tests/unit/roster/test_scan.py @@ -1,19 +1,11 @@ -# -*- coding: utf-8 -*- - """ Test the scan roster. """ -# Import Python libs -from __future__ import absolute_import, print_function, unicode_literals - import socket -# Import Salt Libs import salt.roster.scan as scan_ from tests.support import mixins - -# Import Salt Testing Libs from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase
Ran pre-commit on the changed files
saltstack_salt
train
eb22f3bb6a7ec23443d4118ab8bdcb50f44938de
diff --git a/absl/logging/__init__.py b/absl/logging/__init__.py index <HASH>..<HASH> 100644 --- a/absl/logging/__init__.py +++ b/absl/logging/__init__.py @@ -50,6 +50,12 @@ is printed to the log. To avoid this, use the level_debug() function: if logging.level_debug(): logging.debug('Thing: %s', thing.ExpensiveOp()) +Per file level logging is supported by logging.vlog() and +logging.vlog_is_on(). For example: + + if logging.vlog_is_on(2): + logging.vlog(2, very_expensive_debug_message()) + Notes on Unicode: The log output is encoded as UTF-8. Don't pass data in other encodings in @@ -484,6 +490,31 @@ def vlog(level, msg, *args, **kwargs): log(level, msg, *args, **kwargs) +def vlog_is_on(level): + """Checks if vlog is enabled for the given level in caller's source file. + + Args: + level: int, the C++ verbose logging level at which to log the message, + e.g. 1, 2, 3, 4... While absl level constants are also supported, + callers should prefer level_debug|level_info|... calls for + checking those. + + Returns: + True if logging is turned on for that level. + """ + + if level > converter.ABSL_DEBUG: + # Even though this function supports level that is greater than 1, users + # should use logging.vlog instead for such cases. + # Treat this as vlog, 1 is equivalent to DEBUG. + standard_level = converter.STANDARD_DEBUG - (level - 1) + else: + if level < converter.ABSL_FATAL: + level = converter.ABSL_FATAL + standard_level = converter.absl_to_standard(level) + return _absl_logger.isEnabledFor(standard_level) + + def flush(): """Flushes all log files.""" get_absl_handler().flush() diff --git a/absl/logging/tests/logging_functional_test.py b/absl/logging/tests/logging_functional_test.py index <HASH>..<HASH> 100755 --- a/absl/logging/tests/logging_functional_test.py +++ b/absl/logging/tests/logging_functional_test.py @@ -44,6 +44,7 @@ I1231 23:59:59.000000 12345 logging_functional_test_helper.py:62] This line is V _PY_VLOG2_LOG_MESSAGE = """\ I1231 23:59:59.000000 12345 logging_functional_test_helper.py:64] This line is VLOG level 2 I1231 23:59:59.000000 12345 logging_functional_test_helper.py:64] This line is log level 2 +I1231 23:59:59.000000 12345 logging_functional_test_helper.py:64] VLOG level 1, but only if VLOG level 2 is active """ # VLOG1 is the same as DEBUG logs. diff --git a/absl/logging/tests/logging_functional_test_helper.py b/absl/logging/tests/logging_functional_test_helper.py index <HASH>..<HASH> 100755 --- a/absl/logging/tests/logging_functional_test_helper.py +++ b/absl/logging/tests/logging_functional_test_helper.py @@ -50,6 +50,8 @@ def _test_do_logging(): logging.vlog(3, 'This line is VLOG level 3') logging.vlog(2, 'This line is VLOG level 2') logging.log(2, 'This line is log level 2') + if logging.vlog_is_on(2): + logging.log(1, 'VLOG level 1, but only if VLOG level 2 is active') logging.vlog(1, 'This line is VLOG level 1') logging.log(1, 'This line is log level 1')
Expose vlog_is_on() to allow avoiding expensive computations for a vlog() call. A common pattern for higher logging level is to check if the level is "on" because the messages can be expensive to generate, so one wants to condition their generation to the level being on. PiperOrigin-RevId: <I>
abseil_abseil-py
train
6ef2e587bcf6b02a9b197b6b50dc96b232f32ab3
diff --git a/choix/utils.py b/choix/utils.py index <HASH>..<HASH> 100644 --- a/choix/utils.py +++ b/choix/utils.py @@ -183,7 +183,7 @@ def log_likelihood_network( for i in range(len(traffic_in)): loglik += traffic_in[i] * params[i] if digraph.out_degree(i) > 0: - neighbors = digraph.successors(i) + neighbors = list(digraph.successors(i)) if weight is None: loglik -= traffic_out[i] * logsumexp(params.take(neighbors)) else:
Fix bug that appeared with NetworkX <I>. Starting with the <I> release of NetworkX, `DiGraph.successors` returns an iterator instead of a list. This caused `log_likelihood_network` to crash.
lucasmaystre_choix
train
c7542292c9d50f394aaad41db239f4096b726ed4
diff --git a/lib/Alchemy/Phrasea/Controller/Datafiles.php b/lib/Alchemy/Phrasea/Controller/Datafiles.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Controller/Datafiles.php +++ b/lib/Alchemy/Phrasea/Controller/Datafiles.php @@ -26,6 +26,8 @@ class Datafiles extends AbstractDelivery { public function connect(Application $app) { + $app['controller.datafiles'] = $this; + $controllers = $app['controllers_factory']; $that = $this; diff --git a/lib/Alchemy/Phrasea/Controller/Lightbox.php b/lib/Alchemy/Phrasea/Controller/Lightbox.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Controller/Lightbox.php +++ b/lib/Alchemy/Phrasea/Controller/Lightbox.php @@ -25,6 +25,8 @@ class Lightbox implements ControllerProviderInterface { public function connect(SilexApplication $app) { + $app['controller.lightbox'] = $this; + $controllers = $app['controllers_factory']; $controllers->before(function(Request $request) use ($app) { diff --git a/lib/Alchemy/Phrasea/Controller/Minifier.php b/lib/Alchemy/Phrasea/Controller/Minifier.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Controller/Minifier.php +++ b/lib/Alchemy/Phrasea/Controller/Minifier.php @@ -21,6 +21,8 @@ class Minifier implements ControllerProviderInterface { public function connect(Application $app) { + $app['controller.minifier'] = $this; + $controllers = $app['controllers_factory']; $controllers->get('/', function (Application $app, Request $request) { diff --git a/lib/Alchemy/Phrasea/Controller/Permalink.php b/lib/Alchemy/Phrasea/Controller/Permalink.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Controller/Permalink.php +++ b/lib/Alchemy/Phrasea/Controller/Permalink.php @@ -24,9 +24,10 @@ use Symfony\Component\HttpKernel\Exception\NotFoundHttpException; */ class Permalink extends AbstractDelivery { - public function connect(Application $app) { + $app['controller.permalink'] = $this; + $controllers = $app['controllers_factory']; $that = $this; diff --git a/lib/Alchemy/Phrasea/Controller/Setup.php b/lib/Alchemy/Phrasea/Controller/Setup.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Controller/Setup.php +++ b/lib/Alchemy/Phrasea/Controller/Setup.php @@ -26,10 +26,10 @@ class Setup implements ControllerProviderInterface { public function connect(SilexApplication $app) { - $controllers = $app['controllers_factory']; - $app['controller.setup'] = $this; + $controllers = $app['controllers_factory']; + $controllers->get('/', function(Application $app) { return $app->redirectPath('install_root'); })->bind('setup');
Declare root controllers as services, remove "call" methods
alchemy-fr_Phraseanet
train
1185c48f58fb13ea2b46aaf564a5d648fb8df6a9
diff --git a/rpcserver.go b/rpcserver.go index <HASH>..<HASH> 100644 --- a/rpcserver.go +++ b/rpcserver.go @@ -3540,6 +3540,7 @@ func (r *rpcServer) SubscribeTransactions(req *lnrpc.GetTransactionsRequest, BlockHash: tx.BlockHash.String(), TimeStamp: tx.Timestamp, TotalFees: tx.TotalFees, + RawTxHex: hex.EncodeToString(tx.RawTx), } if err := updateStream.Send(detail); err != nil { return err @@ -3551,6 +3552,7 @@ func (r *rpcServer) SubscribeTransactions(req *lnrpc.GetTransactionsRequest, Amount: int64(tx.Value), TimeStamp: tx.Timestamp, TotalFees: tx.TotalFees, + RawTxHex: hex.EncodeToString(tx.RawTx), } if err := updateStream.Send(detail); err != nil { return err
rpc: set new raw tx hex in SubscribeTransactions resp
lightningnetwork_lnd
train
cfc31f7a15603f4bb9662e771fb4753861315102
diff --git a/jetstream/jsm/jsm.go b/jetstream/jsm/jsm.go index <HASH>..<HASH> 100644 --- a/jetstream/jsm/jsm.go +++ b/jetstream/jsm/jsm.go @@ -398,7 +398,7 @@ func getMsgSetInfo(nc *nats.Conn, name string) { cfg := &msi.Config log.Println() log.Printf("Subjects: %+v", cfg.Subjects) - log.Printf("Retention: %s", cfg.Retention) + log.Printf("Retention: %s - %s", cfg.Storage, cfg.Retention) log.Printf("TTL: %v", cfg.MaxAge) log.Printf("Messages: %s of %s", humanize.Comma(int64(mstats.Msgs)), diff --git a/server/jetstream.go b/server/jetstream.go index <HASH>..<HASH> 100644 --- a/server/jetstream.go +++ b/server/jetstream.go @@ -133,6 +133,9 @@ const ( // JetStreamRequestNextPre is the prefix for the request next message(s) for an observable in worker/pull mode. JetStreamRequestNextPre = "$JS.RN" + + // JetStreamMsgBySeq is the prefix for direct requests for a message by message set sequence number + JetStreamMsgBySeqPre = "$JS.BYSEQ" ) // This is for internal accounting for JetStream for this server. diff --git a/server/msgset.go b/server/msgset.go index <HASH>..<HASH> 100644 --- a/server/msgset.go +++ b/server/msgset.go @@ -14,6 +14,7 @@ package server import ( + "encoding/json" "fmt" "path" "strconv" @@ -221,6 +222,11 @@ func (mset *MsgSet) subscribeToMsgSet() error { return err } } + // Now subscribe for direct access + subj := fmt.Sprintf("%s.%s", JetStreamMsgBySeqPre, mset.config.Name) + if _, err := mset.subscribeInternal(subj, mset.processMsgBySeq); err != nil { + return err + } return nil } @@ -285,6 +291,49 @@ func (mset *MsgSet) setupStore(storeDir string) error { return nil } +// processMsgBySeq will return the message at the given sequence, or an -ERR if not found. +func (mset *MsgSet) processMsgBySeq(_ *subscription, _ *client, subject, reply string, msg []byte) { + mset.mu.Lock() + store := mset.store + c := mset.client + name := mset.config.Name + mset.mu.Unlock() + + if c == nil { + return + } + var response []byte + + if len(msg) == 0 { + c.Debugf("JetStream request for message from message set: %q - %q no sequence arg", c.acc.Name, name) + response = []byte("-ERR 'sequence argument missing'") + mset.sendq <- &jsPubMsg{reply, _EMPTY_, _EMPTY_, response, nil, 0} + return + } + seq, err := strconv.ParseUint(string(msg), 10, 64) + if err != nil { + c.Debugf("JetStream request for message from message: %q - %q bad sequence arg %q", c.acc.Name, name, msg) + response = []byte("-ERR 'bad sequence argument'") + mset.sendq <- &jsPubMsg{reply, _EMPTY_, _EMPTY_, response, nil, 0} + return + } + + subj, msg, ts, err := store.LoadMsg(seq) + if err != nil { + c.Debugf("JetStream request for message: %q - %q - %d error %v", c.acc.Name, name, seq, err) + response = []byte("-ERR 'bad sequence argument'") + mset.sendq <- &jsPubMsg{reply, _EMPTY_, _EMPTY_, response, nil, 0} + return + } + sm := &StoredMsg{ + Subject: subj, + Data: msg, + Time: time.Unix(0, ts), + } + response, _ = json.MarshalIndent(sm, "", " ") + mset.sendq <- &jsPubMsg{reply, _EMPTY_, _EMPTY_, response, nil, 0} +} + // processInboundJetStreamMsg handles processing messages bound for a message set. func (mset *MsgSet) processInboundJetStreamMsg(_ *subscription, _ *client, subject, reply string, msg []byte) { mset.mu.Lock() @@ -357,6 +406,12 @@ type jsPubMsg struct { seq uint64 } +type StoredMsg struct { + Subject string + Data []byte + Time time.Time +} + // TODO(dlc) - Maybe look at onering instead of chan - https://github.com/pltr/onering const msetSendQSize = 1024 diff --git a/server/store.go b/server/store.go index <HASH>..<HASH> 100644 --- a/server/store.go +++ b/server/store.go @@ -17,6 +17,7 @@ import ( "encoding/json" "errors" "fmt" + "strings" "time" ) @@ -140,6 +141,17 @@ const ( fileStorageString = "file" ) +func (st StorageType) String() string { + switch st { + case MemoryStorage: + return strings.Title(memoryStorageString) + case FileStorage: + return strings.Title(fileStorageString) + default: + return "Unknown Storage Type" + } +} + func (st StorageType) MarshalJSON() ([]byte, error) { switch st { case MemoryStorage:
Allow direct lookup of message from msgset
nats-io_gnatsd
train
990c19dce2485ae54883d54cdd0ffd636cf1798f
diff --git a/aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/S3ObjectInputStream.java b/aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/S3ObjectInputStream.java index <HASH>..<HASH> 100644 --- a/aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/S3ObjectInputStream.java +++ b/aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/model/S3ObjectInputStream.java @@ -28,12 +28,12 @@ import com.amazonaws.metrics.MetricFilterInputStream; import com.amazonaws.services.s3.metrics.S3ServiceMetric; import com.amazonaws.util.IOUtils; -/** - * Input stream representing the content of an {@link S3Object}. In addition to - * the methods supplied by the {@link InputStream} class, - * {@link S3ObjectInputStream} supplies the abort() method, which will terminate - * an HTTP connection to the S3 object. - */ +/** + * Input stream representing the content of an {@link S3Object}. In addition to + * the methods supplied by the {@link InputStream} class, + * {@link S3ObjectInputStream} supplies the abort() method, which will terminate + * an HTTP connection to the S3 object. + */ public class S3ObjectInputStream extends SdkFilterInputStream { private final HttpRequestBase httpRequest; @@ -71,23 +71,22 @@ public class S3ObjectInputStream extends SdkFilterInputStream { return true; // this is a raw input stream so metric wrapping is necessary } - /** - * {@inheritDoc} - * - * Aborts the underlying http request without reading any more data and - * closes the stream. - * <p> - * By default Apache {@link HttpClient} tries to reuse http connections by - * reading to the end of an attached input stream on - * {@link InputStream#close()}. This is efficient from a socket pool - * management perspective, but for objects with large payloads can incur - * significant overhead while bytes are read from s3 and discarded. It's up - * to clients to decide when to take the performance hit implicit in not - * reusing an http connection in order to not read unnecessary information - * from S3. - * - * @see EofSensorInputStream - */ + /** + * {@inheritDoc} + * + * Aborts the underlying HTTP request without reading any more data and + * closes the stream. + * <p> + * By default Apache {@link HttpClient} tries to reuse http connections by + * reading to the end of an attached input stream on {@link + * InputStream#close()}. This is efficient from a socket pool management + * perspective, but objects with large payloads can incur significant + * overhead while bytes are read from s3 and discarded. It's up to clients + * to decide when to take the performance hit implicit in not reusing an + * http connection in order to not read unnecessary information from S3. + * + * @see EofSensorInputStream + */ @Override public void abort() { doAbort(); @@ -111,16 +110,16 @@ public class S3ObjectInputStream extends SdkFilterInputStream { return httpRequest; } - /** - * Returns super.available() if the value is not zero or else always returns - * 1. This is necessary to get around a GZIPInputStream bug which would - * mis-behave in some edge cases upon zero returned from available(), - * causing file truncation. - * <p> - * http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 - * <p> - * Reference TT: 0034867351 - */ + /** + * Returns the value of super.available() if the result is nonzero, or 1 + * otherwise. + * <p> + * This is necessary to work around a known bug in + * GZIPInputStream.available(), which returns zero in some edge cases, + * causing file truncation. + * <p> + * Ref: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=7036144 + */ @Override public int available() throws IOException { int estimate = super.available(); @@ -168,14 +167,14 @@ public class S3ObjectInputStream extends SdkFilterInputStream { eof = false; } - /** - * {@inheritDoc} - * - * Delegate to {@link S3ObjectInputStream#abort()} if there is data remaining in the stream. If the stream has been - * read completely, with no data remaining, safely close the stream. - * - * @see {@link S3ObjectInputStream#abort()} - */ + /** + * {@inheritDoc} + * + * Delegates to {@link S3ObjectInputStream#abort()} if there is any data + * remaining in the stream. Otherwise, it safely closes the stream. + * + * @see {@link S3ObjectInputStream#abort()} + */ @Override public void close() throws IOException { if (eof) {
Fixes for a few javadoc comments.
aws_aws-sdk-java
train
511c6f79859a38e8c9da48744c11f267298ae772
diff --git a/pywb/rewrite/regex_rewriters.py b/pywb/rewrite/regex_rewriters.py index <HASH>..<HASH> 100644 --- a/pywb/rewrite/regex_rewriters.py +++ b/pywb/rewrite/regex_rewriters.py @@ -103,9 +103,9 @@ if (thisObj && thisObj._WB_wombat_obj_proxy) return thisObj._WB_wombat_obj_proxy rules = [ # rewriting 'eval(....)' - invocation - (r'\beval\s*\(', self.add_prefix('WB_wombat_runEval(function _____evalIsEvil(_______eval_arg$$) { return eval(_______eval_arg$$); }.bind(this)).'), 0), + (r'(?<![$])\beval\s*\(', self.add_prefix('WB_wombat_runEval(function _____evalIsEvil(_______eval_arg$$) { return eval(_______eval_arg$$); }.bind(this)).'), 0), # rewriting 'x = eval' - no invocation - (r'\beval\b', self.add_prefix('WB_wombat_'), 0), + (r'(?<![$])\beval\b', self.add_prefix('WB_wombat_'), 0), (r'(?<=\.)postMessage\b\(', self.add_prefix('__WB_pmw(self).'), 0), (r'(?<![$.])\s*location\b\s*[=]\s*(?![=])', self.add_suffix(check_loc), 0), # rewriting 'return this' diff --git a/pywb/rewrite/test/test_regex_rewriters.py b/pywb/rewrite/test/test_regex_rewriters.py index <HASH>..<HASH> 100644 --- a/pywb/rewrite/test/test_regex_rewriters.py +++ b/pywb/rewrite/test/test_regex_rewriters.py @@ -212,7 +212,23 @@ r""" >>> _test_js_obj_proxy(r'this. location = http://example.com/') 'this. location = ((self.__WB_check_loc && self.__WB_check_loc(location)) || {}).href = http://example.com/' +>>> _test_js_obj_proxy('eval(a)') +'WB_wombat_runEval(function _____evalIsEvil(_______eval_arg$$) { return eval(_______eval_arg$$); }.bind(this)).eval(a)' +>>> _test_js_obj_proxy('this.$eval(a)') +'this.$eval(a)' + +>>> _test_js_obj_proxy('x = this.$eval; x(a);') +'x = this.$eval; x(a);' + +>>> _test_js_obj_proxy('x = eval; x(a);') +'x = WB_wombat_eval; x(a);' + +>>> _test_js_obj_proxy('$eval = eval; $eval(a);') +'$eval = WB_wombat_eval; $eval(a);' + +>>> _test_js_obj_proxy('window.eval(a);') +'window.WB_wombat_runEval(function _____evalIsEvil(_______eval_arg$$) { return eval(_______eval_arg$$); }.bind(this)).eval(a);' #================================================================= # XML Rewriting
ensured that the regular expressions for rewriting JavaScript eval usage do not match "$eval", only "eval" identifier (#<I>) added tests for new JS eval rewriting regex tweaks
webrecorder_pywb
train
ee842bb5f381cdc984f064f57cb721c75f145b55
diff --git a/lib/sproutcore-handlebars/tests/handlebars_test.js b/lib/sproutcore-handlebars/tests/handlebars_test.js index <HASH>..<HASH> 100644 --- a/lib/sproutcore-handlebars/tests/handlebars_test.js +++ b/lib/sproutcore-handlebars/tests/handlebars_test.js @@ -617,23 +617,23 @@ test("Child views created using the view helper should have their IDs registered equals(SC.View.views[id], childView, 'childView with passed ID is registered with SC.View.views so that it can properly receive events from RootResponder'); }); -// test("Collection views that specify an example view class have their children be of that class", function() { -// TemplateTests.ExampleViewCollection = SC.TemplateCollectionView.create({ -// itemView: SC.View.extend({ -// isCustom: YES -// }), +test("Collection views that specify an example view class have their children be of that class", function() { + TemplateTests.ExampleViewCollection = SC.CollectionView.extend({ + itemViewClass: SC.View.extend({ + isCustom: true + }), -// content: ['foo'] -// }); + content: ['foo'] + }); -// var parentView = SC.View.create({ -// template: SC.Handlebars.compile('{{#collection "TemplateTests.ExampleViewCollection"}}OHAI{{/collection}}') -// }); + var parentView = SC.View.create({ + template: SC.Handlebars.compile('{{#collection "TemplateTests.ExampleViewCollection"}}OHAI{{/collection}}') + }); -// parentView.createElement(); + parentView.createElement(); -// ok(parentView.childViews[0].childViews[0].isCustom, "uses the example view class"); -// }); + ok(parentView.childViews[0].childViews[0].isCustom, "uses the example view class"); +}); test("should update boundIf blocks if the conditional changes", function() { var templates = SC.Object.create({ @@ -881,3 +881,4 @@ test("should be able to add multiple classes using {{bindAttr class}}", function ok(!view.$('div').hasClass('is-awesome-sauce'), "removes dasherized class when property is set to false"); }); +
Uncomment Handlebars unit test.
emberjs_ember.js
train
e870ae095b18caaca6d90c495c0d68de7b85b70d
diff --git a/Resources/public/js/banner.js b/Resources/public/js/banner.js index <HASH>..<HASH> 100644 --- a/Resources/public/js/banner.js +++ b/Resources/public/js/banner.js @@ -116,19 +116,15 @@ bannerBackgroundImagePositionField.val(bannerBackgroundImagePosition); - updateBannerBackgroundImagePosition(); + updateBannerBackgroundImage(); }); - function updateBannerBackgroundImagePosition() - { - banner.css('background-position', bannerBackgroundImagePositionField.val()); - } - function updateBannerBackgroundImage() { var repeatString = bannerBackgroundImageRepeatField.val(); var selectedPosition = $(".orientation_btn.selected", bannerBackgroundImagePositionBlock).data('value').split(" "); banner.css('background-repeat', repeatString); + banner.css('background-position', bannerBackgroundImagePositionField.val()); } })(jQuery); \ No newline at end of file
[BlogBundle] Update the position of the image is make in the updateBannerBackgroundImage method
claroline_Distribution
train
f96faf11fde64080437f7caf99b0d2d4097aee9d
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -482,7 +482,7 @@ class Offline { // this.serverlessLog(protectedRoutes); // Check for APIKey - if (_.includes(protectedRoutes, `${routeMethod}#${fullPath}`) || _.includes(protectedRoutes, `ANY#${fullPath}`)) { + if ((_.includes(protectedRoutes, `${routeMethod}#${fullPath}`) || _.includes(protectedRoutes, `ANY#${fullPath}`)) && !this.options.noAuth) { const errorResponse = response => response({ message: 'Forbidden' }).code(403).type('application/json').header('x-amzn-ErrorType', 'ForbiddenException'); if ('x-api-key' in request.headers) { const requestToken = request.headers['x-api-key'];
Fix protected routes requiring API key even if noAuth option is set
dherault_serverless-offline
train
0376c1aadea0407ad99b3156ae53f57deac68144
diff --git a/bcbio/install.py b/bcbio/install.py index <HASH>..<HASH> 100644 --- a/bcbio/install.py +++ b/bcbio/install.py @@ -325,7 +325,7 @@ def _install_gemini(tooldir, datadir, args): else: url = "https://raw.github.com/arq5x/gemini/master/gemini/scripts/gemini_install.py" script = os.path.basename(url) - subprocess.check_call(["wget", "-O", script, url]) + subprocess.check_call(["wget", "-O", script, url, "--no-check-certificate"]) cmd = [sys.executable, "-E", script, tooldir, datadir, "--notools", "--nodata", "--sharedpy"] if not args.sudo: cmd.append("--nosudo")
add "--no-check-certificate" to wget gemini_install.py not sure if everyone has the same issue though
bcbio_bcbio-nextgen
train
c5c3b639601022322621d77f6fc81b0bbc6309ba
diff --git a/app/preparation/mzidplus.py b/app/preparation/mzidplus.py index <HASH>..<HASH> 100644 --- a/app/preparation/mzidplus.py +++ b/app/preparation/mzidplus.py @@ -33,8 +33,9 @@ def add_percolator_to_mzidtsv(mzidfn, tsvfn, multipsm, seqdb=None): """Takes a MSGF+ tsv and adds percolator data from corresponding mzIdentML that has been generated with pout2mzid. """ - specfnids = readers.get_mzid_specfile_ids(mzidfn) - specresults = readers.mzid_spec_result_generator(mzidfn) + namespace = readers.get_mzid_namespace(mzidfn) + specfnids = readers.get_mzid_specfile_ids(mzidfn, namespace) + specresults = readers.mzid_spec_result_generator(mzidfn, namespace) with open(tsvfn) as mzidfp: # skip header next(mzidfp) diff --git a/app/readers/basereader.py b/app/readers/basereader.py index <HASH>..<HASH> 100644 --- a/app/readers/basereader.py +++ b/app/readers/basereader.py @@ -2,6 +2,11 @@ from lxml import etree from app import formatting +def get_namespace_from_top(fn, key='xmlns'): + ac, el = etree.iterparse(fn).next() + return {'xmlns': el.nsmap[key]} + + def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None): """ Base generator for percolator xml psm, peptide, protein output. @@ -14,7 +19,8 @@ def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None): for ac, el in etree.iterparse(fn): if el.tag == '{0}{1}'.format(xmlns, tag): yield el - elif el.tag in ['{0}{1}'.format(xmlns, x) for x in ignore_tags]: + elif el.tag not in ['{0}{1}'.format(xmlns, x) for x in + ignore_tags]: formatting.clear_el(el) @@ -25,5 +31,3 @@ def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags): """ for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns): yield formatting.string_and_clear(el, ns) - - diff --git a/app/readers/mzidplus.py b/app/readers/mzidplus.py index <HASH>..<HASH> 100644 --- a/app/readers/mzidplus.py +++ b/app/readers/mzidplus.py @@ -4,9 +4,13 @@ import basereader import ml -def mzid_spec_result_generator(mzidfile): +def get_mzid_namespace(mzidfile): + return basereader.get_namespace_from_top(mzidfile, None) + + +def mzid_spec_result_generator(mzidfile, namespace): return basereader.generate_tags_multiple_files( - [mzidfile], + [mzidfile], namespace, 'SpectrumIdentificationResult', ['MzIdentML', 'DataCollection', @@ -14,20 +18,20 @@ def mzid_spec_result_generator(mzidfile): 'SpectrumIdentificationList']) -def mzid_specdata_generator(mzidfile): +def mzid_specdata_generator(mzidfile, namespace): return basereader.generate_tags_multiple_files( - [mzidfile], + [mzidfile], namespace, 'SpectraData', ['MzIdentML', 'DataCollection', 'Inputs']) -def get_mzid_specfile_ids(mzidfn): +def get_mzid_specfile_ids(mzidfn, namespace): """Returns mzid spectra data filenames and their IDs used in the mzIdentML file as a dict. Keys == IDs, values == fns""" sid_fn = {} - for specdata in mzid_spec_result_generator(mzidfn): + for specdata in mzid_spec_result_generator(mzidfn, namespace): sid_fn[specdata.attrib['id']] = specdata.attrib['name'] return sid_fn
Realized mzIdentML also has namespace and started adding support for it
glormph_msstitch
train
95e2e84cbd81ba36abb13629390121154de07d52
diff --git a/tests/src/ColumnTest.php b/tests/src/ColumnTest.php index <HASH>..<HASH> 100644 --- a/tests/src/ColumnTest.php +++ b/tests/src/ColumnTest.php @@ -62,10 +62,20 @@ class ColumnTest extends \PHPUnit_Framework_TestCase $info['primary'] ); - eval('$actual = ' . var_export($col, true) . ';'); - - foreach ($info as $key => $expect) { - $this->assertSame($expect, $actual->$key); - } + $actual = var_export($col, true); + $expect = <<<EXPECT +Aura\Sql_Schema\Column::__set_state(array( + 'name' => 'cost', + 'type' => 'numeric', + 'size' => 10, + 'scale' => 2, + 'notnull' => true, + 'default' => NULL, + 'autoinc' => false, + 'primary' => false, +)) +EXPECT; + + $this->assertSame($expect, $actual); } }
try fix for hhvm
auraphp_Aura.SqlSchema
train
e879f9b92f55ffd9a02bac77158621d225133bb1
diff --git a/lib/queryBuilder/operations/InsertOperation.js b/lib/queryBuilder/operations/InsertOperation.js index <HASH>..<HASH> 100644 --- a/lib/queryBuilder/operations/InsertOperation.js +++ b/lib/queryBuilder/operations/InsertOperation.js @@ -2,7 +2,7 @@ const { QueryBuilderOperation } = require('./QueryBuilderOperation'); const { mapAfterAllReturn } = require('../../utils/promiseUtils'); -const { isPostgres, isSqlite } = require('../../utils/knexUtils'); +const { isPostgres, isSqlite, isMySql } = require('../../utils/knexUtils'); const { isObject } = require('../../utils/objectUtils'); // Base class for all insert operations. @@ -38,7 +38,7 @@ class InsertOperation extends QueryBuilderOperation { } onBuildKnex(knexBuilder, builder) { - if (!isSqlite(builder.knex()) && !builder.has(/returning/)) { + if (!isSqlite(builder.knex() && !isMySql(builder.knex())) && !builder.has(/returning/)) { // If the user hasn't specified a `returning` clause, we make sure // that at least the identifier is returned. knexBuilder.returning(builder.modelClass().getIdColumn());
Exclude MySQL from returning clause as well
Vincit_objection.js
train
8ac6b4025a3cc7a5bbd5b4dbbc7dab60c08891b7
diff --git a/pytuya/const.py b/pytuya/const.py index <HASH>..<HASH> 100644 --- a/pytuya/const.py +++ b/pytuya/const.py @@ -1,3 +1,3 @@ -version_tuple = (7, 0, 6) +version_tuple = (7, 0, 7) version = __version__ = '%d.%d.%d' % version_tuple __author__ = 'clach04'
Version bump in prep for <I>
clach04_python-tuya
train
834b1a343279bc3468b950bb57cab2250f22d644
diff --git a/lib/rules/_base.js b/lib/rules/_base.js index <HASH>..<HASH> 100644 --- a/lib/rules/_base.js +++ b/lib/rules/_base.js @@ -14,7 +14,6 @@ const MODULE_NAME = Symbol('_moduleName'); // array so only the first one will log the error, and subsequent rules will // see it already in the array and not log it. const loggedNodes = []; -const loggedRules = new Set(); const reLines = /(.*?(?:\r\n?|\n|$))/gm; const reWhitespace = /\s+/; @@ -517,25 +516,21 @@ module.exports = class { rule: this.ruleName, severity: this.severity, }; - let hasAllLocProps = ['line', 'column', 'endLine', 'endColumn'].every((prop) => prop in result); + const REQUIRED_LOC_PROPERTIES = ['line', 'column', 'endLine', 'endColumn']; + let hasAllLocProps = REQUIRED_LOC_PROPERTIES.every((prop) => prop in result); if (this.filePath) { defaults.filePath = this.filePath; } if (!result.node && !hasAllLocProps) { - if (!loggedRules.has(this.ruleName)) { - let message = `ember-template-lint: (${this.ruleName}) Calling the log method without passing all required loc (line, column, endLine, endColumn) -properties or the node property is deprecated. Please ensure you pass either the loc properties or the node in the log method's result.`; - - if (process.env.EMBER_TEMPLATE_LINT_DEV_MODE === '1') { - throw new Error(message); - } else { - this._console.log(message); - - loggedRules.add(this.ruleName); - } - } + throw new Error( + `ember-template-lint: (${ + this.ruleName + }) Must pass the node or all loc properties (${REQUIRED_LOC_PROPERTIES.join( + ', ' + )}) when calling log.` + ); } // perform the node property expansion only if those properties don't exist in result diff --git a/test/jest-setup.js b/test/jest-setup.js index <HASH>..<HASH> 100644 --- a/test/jest-setup.js +++ b/test/jest-setup.js @@ -1,4 +1,2 @@ // eslint-disable-next-line import/no-unassigned-import require('@microsoft/jest-sarif'); - -process.env.EMBER_TEMPLATE_LINT_DEV_MODE = '1';
breaking: when logging a rule violation, require passing node or all loc properties
ember-template-lint_ember-template-lint
train
4d0114d8356da15d4a7e223eb837fd77032b8515
diff --git a/tests/BaseTest.php b/tests/BaseTest.php index <HASH>..<HASH> 100644 --- a/tests/BaseTest.php +++ b/tests/BaseTest.php @@ -660,7 +660,7 @@ abstract class BaseTest extends PHPUnit_Framework_TestCase return [ [ - 'https://github.com/vaites/php-apache-tika/raw/master/samples/sample6.pdf' + 'https://raw.githubusercontent.com/vaites/php-apache-tika/master/samples/sample6.pdf' ] ]; }
Fixed remote sample URL (again)
vaites_php-apache-tika
train
b7323b4bb9ebe6ebc8eb4dc29156cc2195c50526
diff --git a/openquake/hazardlib/probability_map.py b/openquake/hazardlib/probability_map.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/probability_map.py +++ b/openquake/hazardlib/probability_map.py @@ -81,10 +81,10 @@ class ProbabilityCurve(object): :param imtls: DictArray instance :param idx: extract the data corresponding to the given inner index """ - curve = numpy.zeros(1, imtls.imt_dt)[0] + curve = numpy.zeros(1, imtls.imt_dt) for imt in imtls: curve[imt] = self.array[imtls.slicedic[imt], idx] - return curve + return curve[0] class ProbabilityMap(dict):
Worked around a bug of numpy <I>
gem_oq-engine
train
ef88a797b5f60119f7b08a09bda56b343f4fc4e2
diff --git a/code/src/main/com/lmax/disruptor/GroupSequence.java b/code/src/main/com/lmax/disruptor/GroupSequence.java index <HASH>..<HASH> 100644 --- a/code/src/main/com/lmax/disruptor/GroupSequence.java +++ b/code/src/main/com/lmax/disruptor/GroupSequence.java @@ -54,6 +54,11 @@ public final class GroupSequence extends Sequence return minimum; } + /** + * Set all {@link Sequence}s in the group to a given value. + * + * @param value to set the group of sequences to. + */ @Override public void set(final long value) { @@ -74,13 +79,14 @@ public final class GroupSequence extends Sequence } /** - * Remove a {@link Sequence} from this aggregate. + * Remove the first occurrence of the {@link Sequence} from this aggregate. * * @param sequence to be removed from this aggregate. + * @return true if the sequence was removed otherwise false. */ - public void remove(final Sequence sequence) + public boolean remove(final Sequence sequence) { - aggregateSequences.remove(sequence); + return aggregateSequences.remove(sequence); } /** diff --git a/code/src/test/com/lmax/disruptor/GroupSequenceTest.java b/code/src/test/com/lmax/disruptor/GroupSequenceTest.java index <HASH>..<HASH> 100644 --- a/code/src/test/com/lmax/disruptor/GroupSequenceTest.java +++ b/code/src/test/com/lmax/disruptor/GroupSequenceTest.java @@ -18,6 +18,7 @@ package com.lmax.disruptor; import org.junit.Test; import static junit.framework.Assert.assertEquals; +import static junit.framework.Assert.assertTrue; public final class GroupSequenceTest { @@ -75,7 +76,7 @@ public final class GroupSequenceTest assertEquals(sequenceThree.get(), groupSequence.get()); - groupSequence.remove(sequenceThree); + assertTrue(groupSequence.remove(sequenceThree)); assertEquals(sequenceSeven.get(), groupSequence.get()); assertEquals(1, groupSequence.size()); }
Added the ability to have a dynamic group of sequences that can be used in place of a single sequence. This is useful when event processors can dynamically come and go.
LMAX-Exchange_disruptor
train
3c650d124afda1dab30191d0554d4e63f0d91ee7
diff --git a/lib/genesis/seeder.rb b/lib/genesis/seeder.rb index <HASH>..<HASH> 100644 --- a/lib/genesis/seeder.rb +++ b/lib/genesis/seeder.rb @@ -124,7 +124,7 @@ module Genesis end def self.run_seed( version, metadata ) - class_name = metadata[0].camelcase + class_name = metadata[0].camelize file_name = metadata[1] load( file_name ) klass = class_name.constantize
Updated a camelcase method call to camelize, as ccamelcase was having unpredictable results on linux machines.
midas_genesis
train
e978133b85d8e9ee8c2127f684848d0ad156e63b
diff --git a/test_rendering/slimerjs-profile/prefs.js b/test_rendering/slimerjs-profile/prefs.js index <HASH>..<HASH> 100644 --- a/test_rendering/slimerjs-profile/prefs.js +++ b/test_rendering/slimerjs-profile/prefs.js @@ -1,3 +1,4 @@ user_pref("webgl.force-enabled", true); user_pref("webgl.disabled", false); user_pref("webgl.msaa-force", true); +user_pref("layout.css.devPixelsPerPx", "1.0"); diff --git a/test_rendering/spec/ol/reproj/tile.test.js b/test_rendering/spec/ol/reproj/tile.test.js index <HASH>..<HASH> 100644 --- a/test_rendering/spec/ol/reproj/tile.test.js +++ b/test_rendering/spec/ol/reproj/tile.test.js @@ -18,8 +18,7 @@ describe('ol.rendering.reproj.Tile', function() { tile.listen('change', function(e) { if (tile.getState() == ol.TileState.LOADED) { expect(tilesRequested).to.be(expectedRequests); - resembleCanvas(tile.getImage(), expectedUrl, - IMAGE_TOLERANCE, done); + resembleCanvas(tile.getImage(), expectedUrl, 7.5, done); } }); tile.load();
Make render tests work on more devices By setting a device pixel ratio of <I> and increasing the tolerance for raster reprojection tests, the render tests now also run from the console on OSX devices with retina display.
openlayers_openlayers
train
a7b11debd30e4b56f09af13f43d0fc57e3c58724
diff --git a/lib/components/narrative/default/default-itinerary.js b/lib/components/narrative/default/default-itinerary.js index <HASH>..<HASH> 100644 --- a/lib/components/narrative/default/default-itinerary.js +++ b/lib/components/narrative/default/default-itinerary.js @@ -134,7 +134,7 @@ const ITINERARY_ATTRIBUTES = [ const fareInCents = getTotalFare(itinerary, options.configCosts, defaultFareKey) const fareCurrency = itinerary.fare?.fare?.regular?.currency?.currencyCode const fare = fareInCents === null ? null : fareInCents / 100 - if (fare === null) return <FormattedMessage id="common.itineraryDescriptions.noTransitFareProvided" /> + if (fare === null || fare < 0) return <FormattedMessage id="common.itineraryDescriptions.noTransitFareProvided" /> return ( <FormattedNumber // Currency from itinerary fare or from config.
fix(default-itinerary): don't display fares explictly set <0
opentripplanner_otp-react-redux
train
14db079e673e3a522962ea09f5c8a1aae7ee8455
diff --git a/src/main/java/com/github/tomakehurst/wiremock/common/Urls.java b/src/main/java/com/github/tomakehurst/wiremock/common/Urls.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/github/tomakehurst/wiremock/common/Urls.java +++ b/src/main/java/com/github/tomakehurst/wiremock/common/Urls.java @@ -68,9 +68,8 @@ public class Urls { return nodeCount > 0 ? Joiner.on("-") - .join(from(uriPathNodes) - .skip(nodeCount - Math.min(nodeCount, 2)) - ): + .join(from(uriPathNodes)) + : ""; } diff --git a/src/test/java/com/github/tomakehurst/wiremock/common/UrlsTest.java b/src/test/java/com/github/tomakehurst/wiremock/common/UrlsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/github/tomakehurst/wiremock/common/UrlsTest.java +++ b/src/test/java/com/github/tomakehurst/wiremock/common/UrlsTest.java @@ -76,4 +76,10 @@ public class UrlsTest { String pathParts = Urls.urlToPathParts(URI.create("/foo/bar/?param=value")); assertThat(pathParts, is("foo-bar")); } + + @Test + public void returnsNonDelimitedStringForUrlWithMoreThanTwoPathParts() { + String pathParts = Urls.urlToPathParts(URI.create("/foo/bar/zoo/wire/mock?param=value")); + assertThat(pathParts, is("foo-bar-zoo-wire-mock")); + } }
Resolve scenario name conflict restricting to two node parts
tomakehurst_wiremock
train
fd9c68deccdab065d4105b1fd6feaa490e111594
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -2,7 +2,6 @@ var exec = require('child_process').exec var path = require('path') var getOptions = require('loader-utils').getOptions var defaults = require('lodash.defaults') -var util = require('util') function pushAll (dest, src) { Array.prototype.push.apply(dest, src) @@ -90,15 +89,6 @@ function transformSource (runner, engine, source, map, callback) { child.stdin.end() } -function setRailsRunner (config) { - config.runner = config.rails + ' runner' -} - -var deprecatedSetRailsRunner = util.deprecate(setRailsRunner, - 'The rails-erb-loader config option `rails` is deprecated. ' + - 'Please use `runner` instead.' -) - module.exports = function railsErbLoader (source, map) { var loader = this @@ -116,11 +106,6 @@ module.exports = function railsErbLoader (source, map) { engine: 'erubis' }) - // Handle `rails` config option. This is the path to the rails binary. - if ('rails' in config) { - deprecatedSetRailsRunner(config) - } - // loader-utils does not support parsing arrays, so we might have to do it // ourselves. if (typeof config.dependencies === 'string') {
Remove deprecated `rails` option
usabilityhub_rails-erb-loader
train
c42a20ad45e0bbec161ea76cc5cb5a17733fb117
diff --git a/lib/veewee/environment.rb b/lib/veewee/environment.rb index <HASH>..<HASH> 100644 --- a/lib/veewee/environment.rb +++ b/lib/veewee/environment.rb @@ -53,7 +53,9 @@ module Veewee def initialize(options={}) - cwd= options.has_key?(:cwd) ? options[:cwd] : Dir.pwd + cwd = ENV['VEEWEE_DIR'] || Dir.pwd + # If a cwd was provided as option it overrules the default + cwd = options[:cwd] if options.has_key?(:cwd) defaults={ :cwd => cwd,
provided support for VEEWEE_DIR to set the current veewee working directory
jedi4ever_veewee
train
670f54504e006663b6b20308331a1db12d24b679
diff --git a/lib/nsq/connection.rb b/lib/nsq/connection.rb index <HASH>..<HASH> 100644 --- a/lib/nsq/connection.rb +++ b/lib/nsq/connection.rb @@ -385,6 +385,7 @@ module Nsq if @tls_options[:ca_certificate] context.ca_file = @tls_options[:ca_certificate] end + context.verify_mode = @tls_options[:verify_mode] || OpenSSL::SSL::VERIFY_NONE context end diff --git a/spec/lib/nsq/tls_connection_spec.rb b/spec/lib/nsq/tls_connection_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/nsq/tls_connection_spec.rb +++ b/spec/lib/nsq/tls_connection_spec.rb @@ -58,6 +58,22 @@ describe Nsq::Connection do end end + describe 'when verify_mode is passed' do + it 'fails if certificates do not verify' do + tls_options = tls_options_fixture.merge(verify_mode: OpenSSL::SSL::VERIFY_PEER) + tls_options.delete(:ca_certificate) + + params = { + tls_v1: true, + tls_options: tls_options + } + + expect { + new_producer(@nsqd, params) + }.to raise_error(OpenSSL::SSL::SSLError, /certificate verify failed/) + end + end + describe 'when using a simple tls connection' do it 'can write a message onto the queue and read it back off again' do producer = new_producer(@nsqd, tls_v1: true)
add :verify_mode to tls_options This allow a simple passthru of the various OpenSSL::SSL verify modes. This is needed because the default behaviour is to use OpenSSL::SSL:VERIFY_NONE...
wistia_nsq-ruby
train
7222b2316ca95cbf5d741a6e8df33978e95d5d30
diff --git a/src/Picqer/Financials/Exact/Connection.php b/src/Picqer/Financials/Exact/Connection.php index <HASH>..<HASH> 100644 --- a/src/Picqer/Financials/Exact/Connection.php +++ b/src/Picqer/Financials/Exact/Connection.php @@ -179,9 +179,9 @@ class Connection } /** - * Insert a Middleware for the Guzzle Client. + * Insert a Middleware for the Guzzle-Client. * - * @param $middleWare + * @param callable $middleWare */ public function insertMiddleWare($middleWare) { @@ -310,7 +310,7 @@ class Connection } /** - * @param string $url + * @param string $topic * @param mixed $body * * @throws ApiException diff --git a/src/Picqer/Financials/Exact/Model.php b/src/Picqer/Financials/Exact/Model.php index <HASH>..<HASH> 100644 --- a/src/Picqer/Financials/Exact/Model.php +++ b/src/Picqer/Financials/Exact/Model.php @@ -211,7 +211,7 @@ abstract class Model implements \JsonSerializable /** * Refresh deferred item by clearing and then lazy loading it. * - * @param $key + * @param mixed $key * * @return mixed */ @@ -249,7 +249,7 @@ abstract class Model implements \JsonSerializable if ($withDeferred) { foreach ($this->deferred as $attribute => $collection) { if (empty($collection)) { - continue; // Leave orriginal array with __deferred key + continue; // Leave original array with __deferred key } $attributes[$attribute] = []; diff --git a/src/Picqer/Financials/Exact/Query/Findable.php b/src/Picqer/Financials/Exact/Query/Findable.php index <HASH>..<HASH> 100644 --- a/src/Picqer/Financials/Exact/Query/Findable.php +++ b/src/Picqer/Financials/Exact/Query/Findable.php @@ -58,7 +58,7 @@ trait Findable * @param string $code the value to search for * @param string $key the key being searched (defaults to 'Code') * - * @return string (guid) + * @return string|void (guid) */ public function findId($code, $key = 'Code') {
Apply phpstan suggestions - Correct @param phpdoc - Correct some typos - Add missing return type based on implemenation
picqer_exact-php-client
train
1ecdc86342bd609560cfcfdeeb4ea855d190e6bd
diff --git a/scripts/build-mock-doc.js b/scripts/build-mock-doc.js index <HASH>..<HASH> 100644 --- a/scripts/build-mock-doc.js +++ b/scripts/build-mock-doc.js @@ -53,6 +53,7 @@ async function mergeDts(srcDir, destDir) { fileContent = fileContent.replace(/\, d\./g, ', '); fileContent = fileContent.replace(/=> d\./g, '=> '); fileContent = fileContent.replace(/\| d\./g, '| '); + fileContent = fileContent.replace(/= d\./g, '= '); fileContent = fileContent.replace(/extends d\./g, 'extends '); fileContent = fileContent.trim(); diff --git a/scripts/build-runtime.js b/scripts/build-runtime.js index <HASH>..<HASH> 100644 --- a/scripts/build-runtime.js +++ b/scripts/build-runtime.js @@ -134,6 +134,7 @@ async function createPublicTypeExports() { fileContent = fileContent.replace(/\, d\./g, ', '); fileContent = fileContent.replace(/=> d\./g, '=> '); fileContent = fileContent.replace(/\| d\./g, '| '); + fileContent = fileContent.replace(/= d\./g, '= '); fileContent = fileContent.replace(/extends d\./g, 'extends '); fileContent = fileContent.trim();
chore(build): fix dts merging
ionic-team_stencil
train
dd8cd8cabec635cc515cdaf5f6226e631c2596f0
diff --git a/events.py b/events.py index <HASH>..<HASH> 100644 --- a/events.py +++ b/events.py @@ -75,7 +75,9 @@ class Event(object): Fire an Event (SomeEvent.fire()) from the code that causes the interesting event to occur. Fire it any time the event *might* have occurred. The Event will determine whether conditions are right to actually send notifications; - don't succumb to the temptation to do these tests outside the Event. + don't succumb to the temptation to do these tests outside the Event, + because you'll end up repeating yourself if the event is ever fired from + more than one place. Event subclasses can optionally represent a more limited scope of interest by populating the Watch.content_type field and/or adding related diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -1,12 +1,9 @@ -import hashlib - from django.db import models, connections, router from django.contrib.auth.models import User, AnonymousUser from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType -from sumo.models import ModelBase, LocaleField -from sumo.urlresolvers import reverse +from sumo.models import ModelBase def multi_raw(query, params, models): @@ -32,7 +29,13 @@ def multi_raw(query, params, models): class Watch(ModelBase): - """Watch events.""" + """The registration of a user's interest in a certain event + + At minimum, specifies an event_type and thereby an Event subclass. May also + specify a content type and/or object ID and, indirectly, any number of + WatchFilters. + + """ # Key used by an Event to find watches it manages: event_type = models.CharField(max_length=30, db_index=True) @@ -94,8 +97,9 @@ class NotificationsMixin(models.Model): So we get cascading deletes for free, yay! """ - watches = generic.GenericRelation(Watch, - related_name='%(app_label)s_%(class)s_watches') + watches = generic.GenericRelation( + Watch, + related_name='%(app_label)s_%(class)s_watches') class Meta(object): abstract = True @@ -107,7 +111,6 @@ class EmailUser(AnonymousUser): To test whether a returned user is an anonymous user, call is_anonymous(). """ - def __init__(self, email=''): self.email = email diff --git a/tests/test_events.py b/tests/test_events.py index <HASH>..<HASH> 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -234,6 +234,7 @@ class EventUnionTests(TestCase): class OneEvent(object): def _users_watching(self): return [(user(email='HE@LLO.COM'), watch())] + class AnotherEvent(object): def _users_watching(self): return [(user(email='he@llo.com'), watch()),
Spruce up some docs, formatting, and PEP 8 deviations.
mozilla_django-tidings
train
446f39e73a66bdaaab79681fdea32a5e704d03b0
diff --git a/lib/puppet/util/tag_set.rb b/lib/puppet/util/tag_set.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/util/tag_set.rb +++ b/lib/puppet/util/tag_set.rb @@ -1,6 +1,9 @@ require 'set' +require 'puppet/network/format_support' class Puppet::Util::TagSet < Set + include Puppet::Network::FormatSupport + def self.from_yaml(yaml) self.new(YAML.load(yaml)) end @@ -13,14 +16,18 @@ class Puppet::Util::TagSet < Set self.new(data) end + def to_data_hash + to_a + end + def to_pson(*args) - to_a.to_pson + to_data_hash.to_pson end # this makes puppet serialize it as an array for backwards # compatibility def to_zaml(z) - to_a.to_zaml(z) + to_data_hash.to_zaml(z) end def join(*args) diff --git a/spec/unit/network/formats_spec.rb b/spec/unit/network/formats_spec.rb index <HASH>..<HASH> 100755 --- a/spec/unit/network/formats_spec.rb +++ b/spec/unit/network/formats_spec.rb @@ -48,6 +48,29 @@ describe "Puppet Network Format" do @msgpack.intern_multiple(Hash, MessagePack.pack(["foo"])) end.to raise_error(NoMethodError) end + + it "should be able to serialize a catalog" do + cat = Puppet::Resource::Catalog.new('foo') + cat.add_resource(Puppet::Resource.new(:file, 'my_file')) + catunpack = MessagePack.unpack(cat.to_msgpack) + catunpack.should include( + "tags"=>[], + "name"=>"foo", + "version"=>nil, + "environment"=>"", + "edges"=>[], + "classes"=>[] + ) + catunpack["resources"][0].should include( + "type"=>"File", + "title"=>"my_file", + "exported"=>false + ) + catunpack["resources"][0]["tags"].should include( + "file", + "my_file" + ) + end end it "should include a yaml format" do
(PUP-<I>) Fix serialization of TagSet The TagSet class didn't include FormatSupport and didn't have a to_data_hash method, so it couldn't be serialized to msgpack.
puppetlabs_puppet
train
917bbdf7204770ac4d642eed2a5a7ceed86ab370
diff --git a/tests/bc_transaction_test.py b/tests/bc_transaction_test.py index <HASH>..<HASH> 100755 --- a/tests/bc_transaction_test.py +++ b/tests/bc_transaction_test.py @@ -110,10 +110,11 @@ import binascii from pycoin.convention import SATOSHI_PER_COIN from pycoin.intbytes import bytes_from_int, byte_to_int from pycoin.serialize import h2b_rev, h2b -from pycoin.tx.Tx import Tx, TxIn +from pycoin.tx import Tx, TxIn, ValidationFailureError from pycoin.tx.Spendable import Spendable from pycoin.tx.script.opcodes import OPCODE_TO_INT + TX_VALID_JSON = os.path.dirname(__file__) + '/data/tx_valid.json' TX_INVALID_JSON = os.path.dirname(__file__) + '/data/tx_invalid.json' @@ -171,7 +172,24 @@ def txs_from_json(path): for flag in tvec[2].split(','): assert flag in FLAGS flags.add(flag) - yield (prevouts, tx_hex, flags) + + try: + tx = Tx.from_hex(tx_hex) + except: + print("Cannot parse tx_hex: %s" % tx_hex) + raise + + spendable_db = {} + blank_spendable = Spendable(0, b'', b'\0' * 32, 0) + for prevout in prevouts: + spendable = Spendable(coin_value=1000000, + script=compile_script(prevout[2]), + tx_hash=h2b_rev(prevout[0]), tx_out_index=prevout[1]) + spendable_db[(spendable.tx_hash, spendable.tx_out_index)] = spendable + unspents = [spendable_db.get((tx_in.previous_hash, tx_in.previous_index), blank_spendable) for tx_in in tx.txs_in] + tx.set_unspents(unspents) + + yield (tx, flags) def check_transaction(tx): @@ -211,49 +229,42 @@ def check_transaction(tx): class TestTx(unittest.TestCase): + pass - def test_is_valid(self): - for (prevouts, tx_hex, flags) in txs_from_json(TX_VALID_JSON): - try: - tx = Tx.from_hex(tx_hex) - except: - self.fail("Cannot parse tx_hex: " + tx_hex) - if not check_transaction(tx): - self.fail("check_transaction(tx) = False for valid tx: " + - tx_hex) +def make_f(tx, expect_ok=True): + tx_hex = tx.as_hex(include_unspents=True) + def test_f(self): + why = None + try: + check_transaction(tx) + except ValidationFailureError as ex: + why = str(ex) + bs = tx.bad_signature_count() + if bs > 0: + why = "bad sig count = %d" % bs + if (why != None) == expect_ok: + why = why or "tx unexpectedly validated" + self.fail("fail on %s because of %s with hex %s" % (tx.id(), why, tx_hex)) + return test_f - unspents = [Spendable(coin_value=1000000, - script=compile_script(prevout[2]), - tx_hash=h2b(prevout[0]), tx_out_index=prevout[1]) - for prevout in prevouts] - tx.set_unspents(unspents) - bs = tx.bad_signature_count() - if bs > 0: - msg = str(tx.as_hex(include_unspents=True)) + " bad_signature_count() = " + str(bs) - self.fail(msg) +def inject(): + for (tx, flags) in txs_from_json(TX_VALID_JSON): + name_of_f = "test_valid_%s" % tx.id() + setattr(TestTx, name_of_f, make_f(tx)) + print("adding %s" % name_of_f) + + for (tx, flags) in txs_from_json(TX_INVALID_JSON): + name_of_f = "test_invalid_%s" % tx.id() + setattr(TestTx, name_of_f, make_f(tx, expect_ok=False)) + print("adding %s" % name_of_f) + + +inject() - def test_is_invalid(self): - for (prevouts, tx_hex, flags) in txs_from_json(TX_INVALID_JSON): - try: - tx = Tx.from_hex(tx_hex) - if not check_transaction(tx): - continue - unspents = [Spendable(coin_value=1000000, - script=compile_script(prevout[2]), - tx_hash=h2b_rev(prevout[0]), tx_out_index=prevout[1]) - for prevout in prevouts] - tx.set_unspents(unspents) - - bs = tx.bad_signature_count() - self.assertEqual(bs, 0) - except: - continue - self.fail("Invalid transaction: " + tx.id() + - " appears to be valid.") if __name__ == '__main__':
Improve bc_transaction_test test.
richardkiss_pycoin
train
dfa86bb81c0665593da623f12b7772bc71c30497
diff --git a/src/record-service.js b/src/record-service.js index <HASH>..<HASH> 100644 --- a/src/record-service.js +++ b/src/record-service.js @@ -81,11 +81,13 @@ var RecordService = function (provider, type, id) { * @returns {Promise} */ this.save = function () { + var record = this; if (!_data.createdAt) { _data.createdAt = new Date(); } _data.lastUpdatedAt = new Date(); return _provider.save(pluralize(toCamelCase(_type)), _id, cloneProperties(_data)) .then(function (id) { _id = id; + return record; }); }; /**
feat(RecordService): resolve save() promise with saved record
castle-dev_le-storage-service
train
26885fff41e616ad51f07d3c355073808be09d14
diff --git a/fastlane/lib/fastlane/actions/testfairy.rb b/fastlane/lib/fastlane/actions/testfairy.rb index <HASH>..<HASH> 100644 --- a/fastlane/lib/fastlane/actions/testfairy.rb +++ b/fastlane/lib/fastlane/actions/testfairy.rb @@ -2,6 +2,8 @@ module Fastlane module Actions module SharedValues TESTFAIRY_BUILD_URL = :TESTFAIRY_BUILD_URL + TESTFAIRY_DOWNLOAD_URL = :TESTFAIRY_DOWNLOAD_URL + TESTFAIRY_LANDING_PAGE = :TESTFAIRY_LANDING_PAGE end class TestfairyAction < Action @@ -107,6 +109,8 @@ module Fastlane response = self.upload_build(params[:upload_url], path, client_options, params[:timeout]) if parse_response(response) UI.success("Build URL: #{Actions.lane_context[SharedValues::TESTFAIRY_BUILD_URL]}") + UI.success("Download URL: #{Actions.lane_context[SharedValues::TESTFAIRY_DOWNLOAD_URL]}") + UI.success("Landing Page URL: #{Actions.lane_context[SharedValues::TESTFAIRY_LANDING_PAGE]}") UI.success("Build successfully uploaded to TestFairy.") else UI.user_error!("Error when trying to upload ipa to TestFairy") @@ -120,8 +124,12 @@ module Fastlane def self.parse_response(response) if response.body && response.body.key?('status') && response.body['status'] == 'ok' build_url = response.body['build_url'] + app_url = response.body['app_url'] + landing_page_url = response.body['landing_page_url'] Actions.lane_context[SharedValues::TESTFAIRY_BUILD_URL] = build_url + Actions.lane_context[SharedValues::TESTFAIRY_DOWNLOAD_URL] = app_url + Actions.lane_context[SharedValues::TESTFAIRY_LANDING_PAGE] = landing_page_url return true else @@ -244,7 +252,12 @@ module Fastlane api_key: "...", ipa: "./ipa_file.ipa", comment: "Build #{lane_context[SharedValues::BUILD_NUMBER]}", - )' + )', + 'testfairy( + api_key: "...", + apk: "../build/app/outputs/apk/qa/release/app-qa-release.apk", + comment: "Build #{lane_context[SharedValues::BUILD_NUMBER]}", + )' ] end @@ -254,12 +267,14 @@ module Fastlane def self.output [ - ['TESTFAIRY_BUILD_URL', 'URL of the newly uploaded build'] + ['TESTFAIRY_BUILD_URL', 'URL for the sessions of the newly uploaded build'], + ['TESTFAIRY_DOWNLOAD_URL', 'URL directly to the newly uploaded build'], + ['TESTFAIRY_LANDING_PAGE', 'URL of the build\'s landing page'] ] end def self.authors - ["taka0125", "tcurdt", "vijaysharm"] + ["taka0125", "tcurdt", "vijaysharm", "cdm2012"] end def self.is_supported?(platform)
Updating TestFairy action, based upon documentation and endpoint test. (#<I>)
fastlane_fastlane
train
305740c89c249c8299652ed57c7fdf7566cf92e4
diff --git a/src/utilities/HelperProcessor.js b/src/utilities/HelperProcessor.js index <HASH>..<HASH> 100644 --- a/src/utilities/HelperProcessor.js +++ b/src/utilities/HelperProcessor.js @@ -319,7 +319,8 @@ var HelperProcessor = jsface.Class({ _getRequestBody: function (request) { var numParams, params, retVal; var i; - if (request.method.toLowerCase() === "post" || request.method.toLowerCase() === "put") { + if (request.method.toLowerCase() === "post" || request.method.toLowerCase() === "put" || + request.method.toLowerCase() === "delete" || request.method.toLowerCase() === "patch") { if (request.dataMode === "urlencoded") { if (!request.transformed.data || (!request.transformed.data.length)) { return '';
allow patch requests to have a body
postmanlabs_newman
train
9e5e68b6372fb8f7ba99272c2eea51af06610967
diff --git a/source/core/oxconfig.php b/source/core/oxconfig.php index <HASH>..<HASH> 100644 --- a/source/core/oxconfig.php +++ b/source/core/oxconfig.php @@ -2045,7 +2045,7 @@ class oxConfig extends oxSuperCfg if ( count( $this->_aActiveViews ) ) { $oActView = end( $this->_aActiveViews ); } - if ( $oActView == null ) { + if ( !isset($oActView) || $oActView == null ) { $oActView = oxNew( 'oxubase' ); $this->_aActiveViews[] = $oActView; }
undefined variable notice removed from oxconfig. (Notice: Undefined variable: oActView in oxconfig.php line <I>)
OXID-eSales_oxideshop_ce
train
674a97d06cbe4c511ec67485662221128f36f793
diff --git a/fetch.go b/fetch.go index <HASH>..<HASH> 100644 --- a/fetch.go +++ b/fetch.go @@ -158,11 +158,11 @@ func (this *FetchResponseData) Read(decoder Decoder) *DecodingError { } var ( - reason_InvalidBlocksLength = "Invalid length for Blocks field" - reason_InvalidBlockTopic = "Invalid topic in block" - reason_InvalidFetchResponseDataLength = "Invalid length for FetchResponseData field" - reason_InvalidFetchResponseDataPartition = "Invalid partition in FetchResponseData" - reason_InvalidFetchResponseDataErrorCode = "Invalid error code in FetchResponseData" + reason_InvalidBlocksLength = "Invalid length for Blocks field" + reason_InvalidBlockTopic = "Invalid topic in block" + reason_InvalidFetchResponseDataLength = "Invalid length for FetchResponseData field" + reason_InvalidFetchResponseDataPartition = "Invalid partition in FetchResponseData" + reason_InvalidFetchResponseDataErrorCode = "Invalid error code in FetchResponseData" reason_InvalidFetchResponseDataHighwaterMarkOffset = "Invalid highwater mark offset in FetchResponseData" - reason_InvalidMessageSetLength = "Invalid MessageSet length" + reason_InvalidMessageSetLength = "Invalid MessageSet length" ) diff --git a/messages.go b/messages.go index <HASH>..<HASH> 100644 --- a/messages.go +++ b/messages.go @@ -96,10 +96,10 @@ type Message struct { var ( reason_InvalidMessageAndOffsetOffset = "Invalid offset in MessageAndOffset" - reason_InvalidMessageLength = "Invalid Message length" - reason_InvalidMessageCRC = "Invalid Message CRC" - reason_InvalidMessageMagicByte = "Invalid Message magic byte" - reason_InvalidMessageAttributes = "Invalid Message attributes" - reason_InvalidMessageKey = "Invalid Message key" - reason_InvalidMessageValue = "Invalid Message value" + reason_InvalidMessageLength = "Invalid Message length" + reason_InvalidMessageCRC = "Invalid Message CRC" + reason_InvalidMessageMagicByte = "Invalid Message magic byte" + reason_InvalidMessageAttributes = "Invalid Message attributes" + reason_InvalidMessageKey = "Invalid Message key" + reason_InvalidMessageValue = "Invalid Message value" ) diff --git a/offset.go b/offset.go index <HASH>..<HASH> 100644 --- a/offset.go +++ b/offset.go @@ -133,11 +133,11 @@ func (this *PartitionOffsets) Read(decoder Decoder) *DecodingError { } var ( - reason_InvalidOffsetsLength = "Invalid length for Offsets field" - reason_InvalidOffsetTopic = "Invalid topic in offset map" - reason_InvalidPartitionOffsetsLength = "Invalid length for partition offsets field" - reason_InvalidPartitionOffsetsPartition = "Invalid partition in partition offset" - reason_InvalidPartitionOffsetsErrorCode = "Invalid error code in partition offset" + reason_InvalidOffsetsLength = "Invalid length for Offsets field" + reason_InvalidOffsetTopic = "Invalid topic in offset map" + reason_InvalidPartitionOffsetsLength = "Invalid length for partition offsets field" + reason_InvalidPartitionOffsetsPartition = "Invalid partition in partition offset" + reason_InvalidPartitionOffsetsErrorCode = "Invalid error code in partition offset" reason_InvalidPartitionOffsetsOffsetsLength = "Invalid length for offsets field in partition offset" - reason_InvalidPartitionOffset = "Invalid offset in partition offset" + reason_InvalidPartitionOffset = "Invalid offset in partition offset" ) diff --git a/request_response.go b/request_response.go index <HASH>..<HASH> 100644 --- a/request_response.go +++ b/request_response.go @@ -58,7 +58,7 @@ type Response interface { } type DecodingError struct { - err error + err error reason string } diff --git a/topic_metadata.go b/topic_metadata.go index <HASH>..<HASH> 100644 --- a/topic_metadata.go +++ b/topic_metadata.go @@ -203,19 +203,19 @@ func (this *PartitionMetadata) Read(decoder Decoder) *DecodingError { } var ( - reason_InvalidBrokersLength = "Invalid length for Brokers field" - reason_InvalidMetadataLength = "Invalid length for TopicMetadata field" - reason_InvalidBrokerNodeId = "Invalid broker node id" - reason_InvalidBrokerHost = "Invalid broker host" - reason_InvalidBrokerPort = "Invalid broker port" - reason_InvalidTopicMetadataErrorCode = "Invalid topic metadata error code" - reason_InvalidTopicMetadataTopicName = "Invalid topic metadata topic name" - reason_InvalidPartitionMetadataLength = "Invalid length for Partition Metadata field" - reason_InvalidPartitionMetadataErrorCode = "Invalid partition metadata error code" - reason_InvalidPartitionMetadataPartition = "Invalid partition in partition metadata" - reason_InvalidPartitionMetadataLeader = "Invalid leader in partition metadata" + reason_InvalidBrokersLength = "Invalid length for Brokers field" + reason_InvalidMetadataLength = "Invalid length for TopicMetadata field" + reason_InvalidBrokerNodeId = "Invalid broker node id" + reason_InvalidBrokerHost = "Invalid broker host" + reason_InvalidBrokerPort = "Invalid broker port" + reason_InvalidTopicMetadataErrorCode = "Invalid topic metadata error code" + reason_InvalidTopicMetadataTopicName = "Invalid topic metadata topic name" + reason_InvalidPartitionMetadataLength = "Invalid length for Partition Metadata field" + reason_InvalidPartitionMetadataErrorCode = "Invalid partition metadata error code" + reason_InvalidPartitionMetadataPartition = "Invalid partition in partition metadata" + reason_InvalidPartitionMetadataLeader = "Invalid leader in partition metadata" reason_InvalidPartitionMetadataReplicasLength = "Invalid length for Replicas field" - reason_InvalidPartitionMetadataReplica = "Invalid replica in partition metadata" - reason_InvalidPartitionMetadataIsrLength = "Invalid length for Isr field" - reason_InvalidPartitionMetadataIsr = "Invalid isr in partition metadata" + reason_InvalidPartitionMetadataReplica = "Invalid replica in partition metadata" + reason_InvalidPartitionMetadataIsrLength = "Invalid length for Isr field" + reason_InvalidPartitionMetadataIsr = "Invalid isr in partition metadata" )
go fmt changes, re #6
elodina_siesta
train
f82923dd0b3f9cb69da23a236a1ed2877ae25fbb
diff --git a/spec/jruby_csv_spec.rb b/spec/jruby_csv_spec.rb index <HASH>..<HASH> 100644 --- a/spec/jruby_csv_spec.rb +++ b/spec/jruby_csv_spec.rb @@ -36,19 +36,19 @@ describe "a problem with jruby?" do csv_full_contents.split("\n")[range].join("\n") end - it "this actually does raise an exception, so this spec fails" do + xit "this actually does raise an exception, so this spec fails" do expect{ CSV.parse(csv_full_contents) }.to_not raise_exception end - it "using the first 15 lines still works ok" do + xit "using the first 15 lines still works ok" do expect{ CSV.parse(lines(0..15))}.to_not raise_exception end - it "from line 16 on there's a problem" do + xit "from line 16 on there's a problem" do expect{ CSV.parse(lines(0..16))}.to_not raise_exception end - it "but line 16 itself isn't the culprit" do + xit "but line 16 itself isn't the culprit" do expect{ CSV.parse(lines(3..18))}.to_not raise_exception end end
Disable spec that demonstrates JRuby problem with CSV
plexus_ting
train
a8c3cdb0f1b4824909dcf14bfecd508223d00cba
diff --git a/lib/rollbar/request_data_extractor.rb b/lib/rollbar/request_data_extractor.rb index <HASH>..<HASH> 100644 --- a/lib/rollbar/request_data_extractor.rb +++ b/lib/rollbar/request_data_extractor.rb @@ -119,11 +119,12 @@ module Rollbar end def rollbar_filtered_params(sensitive_params, params) + sensitive_params_regexp = Regexp.new(sensitive_params.map(&:to_s).join('|'), true) if params.nil? {} else params.to_hash.inject({}) do |result, (key, value)| - if sensitive_params.include?(key.to_sym) + if sensitive_params_regexp =~ key.to_s result[key] = rollbar_scrubbed(value) elsif value.is_a?(Hash) result[key] = rollbar_filtered_params(sensitive_params, value)
Better coherence with Rails param scrubbing behavior Also eliminates the potential DOS vulnerability of calling `to_sym` on all params (symbols aren't garbage collected).
rollbar_rollbar-gem
train
3c081b60952616cb5fad858badabd5f33804db99
diff --git a/law/decorator.py b/law/decorator.py index <HASH>..<HASH> 100644 --- a/law/decorator.py +++ b/law/decorator.py @@ -122,9 +122,9 @@ def log(fn, opts, task, *args, **kwargs): sys.stderr = tee try: ret = fn(task, *args, **kwargs) - except Exception as e: + except: traceback.print_exc(file=tee) - raise e + raise finally: sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__
Fix re-raising in log decorator.
riga_law
train
3906ca2f9f28009bacf1a15c918221a1bcba99cd
diff --git a/dist/abortcontroller.js b/dist/abortcontroller.js index <HASH>..<HASH> 100644 --- a/dist/abortcontroller.js +++ b/dist/abortcontroller.js @@ -84,7 +84,15 @@ function _classCallCheck(instance, Constructor) { if (!(instance instanceof Cons var realFetch = fetch; var abortableFetch = function abortableFetch(input, init) { if (init && init.signal) { - var abortError = new DOMException('Aborted', 'AbortError'); + var abortError = void 0; + try { + abortError = new DOMException('Aborted', 'AbortError'); + } catch (err) { + // IE 11 does not support calling the DOMException constructor, use a + // regular error object on it instead. + abortError = new Error('Aborted'); + abortError.name = 'AbortError'; + } // Return early if already aborted, thus avoiding making an HTTP request if (init.signal.aborted) { diff --git a/src/abortcontroller.js b/src/abortcontroller.js index <HASH>..<HASH> 100644 --- a/src/abortcontroller.js +++ b/src/abortcontroller.js @@ -49,7 +49,15 @@ const realFetch = fetch; const abortableFetch = (input, init) => { if (init && init.signal) { - const abortError = new DOMException('Aborted', 'AbortError'); + let abortError; + try { + abortError = new DOMException('Aborted', 'AbortError'); + } catch (err) { + // IE 11 does not support calling the DOMException constructor, use a + // regular error object on it instead. + abortError = new Error('Aborted'); + abortError.name = 'AbortError'; + } // Return early if already aborted, thus avoiding making an HTTP request if (init.signal.aborted) {
Add IE<I> compatibility IE<I> does not support then DOMException constructor, fallback to a regular Error for it.
mo_abortcontroller-polyfill
train
3c1dd6b839b7c0e2cbc85074bb5840ebded6097c
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -1958,6 +1958,9 @@ resources are accessed. The `MemberAuthenticator` class is configured as the default option for authentication, and will attempt to use the current CMS `Member` session for authentication context. +**If you are using the default session-based authentication, please be sure that you have +the [CSRF Middleware](#csrf-tokens-required-for-mutations) enabled. (It is by default).** + ### HTTP basic authentication Silverstripe has built in support for [HTTP basic authentication](https://en.wikipedia.org/wiki/Basic_access_authentication). @@ -1967,6 +1970,9 @@ authenticator because GraphQL needs to use the successfully authenticated member for CMS permission filtering, whereas the global `BasicAuth` does not log the member in or use it for model security. +When using HTTP basic authentication, you can feel free to remove the [CSRF Middleware](#csrf-tokens-required-for-mutations), +as it just adds unnecessary overhead to the request. + #### In GraphiQL If you want to add basic authentication support to your GraphQL requests you can @@ -2033,12 +2039,19 @@ the `SecurityToken` API, using `SecurityToken::inst()->getValue()`. Queries do not require CSRF tokens. +### Disabling CSRF protection (for token-based authentication only) + +If you are using HTTP basic authentication or a token-based system like OAuth or [JWT](https://github.com/Firesphere/silverstripe-graphql-jwt), +you will want to remove the CSRF protection, as it just adds unnecessary overhead. You can do this by setting +the middleware to `false`. + ```yaml - SilverStripe\GraphQL\Manager: + SilverStripe\GraphQL\Manager.default: properties: Middlewares: CSRFMiddleware: false ``` + ## Cross-Origin Resource Sharing (CORS) By default [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/Access_control_CORS) is disabled in the GraphQL Server. This can be easily enabled via YAML: diff --git a/src/Middleware/CSRFMiddleware.php b/src/Middleware/CSRFMiddleware.php index <HASH>..<HASH> 100644 --- a/src/Middleware/CSRFMiddleware.php +++ b/src/Middleware/CSRFMiddleware.php @@ -3,6 +3,10 @@ namespace SilverStripe\GraphQL\Middleware; use GraphQL\Schema; +use GraphQL\Language\Parser; +use GraphQL\Language\Source; +use GraphQL\Language\AST\NodeKind; +use SilverStripe\GraphQL\Manager; use SilverStripe\GraphQL\Middleware\QueryMiddleware; use SilverStripe\Security\SecurityToken; use Exception; @@ -11,7 +15,7 @@ class CSRFMiddleware implements QueryMiddleware { public function process(Schema $schema, $query, $context, $params, callable $next) { - if (preg_match('/^\s*mutation/', $query)) { + if ($this->isMutation($query)) { if (empty($context['token'])) { throw new Exception('Mutations must provide a CSRF token in the X-CSRF-TOKEN header'); } @@ -24,4 +28,39 @@ class CSRFMiddleware implements QueryMiddleware return $next($schema, $query, $context, $params); } + + /** + * @param string $query + * @return bool + */ + protected function isMutation($query) + { + // Simple string matching as a first check to prevent unnecessary static analysis + if (stristr($query, Manager::MUTATION_ROOT) === false) { + return false; + } + + // If "mutation" is the first expression in the query, then it's a mutation. + if (preg_match('/^\s*'.preg_quote(Manager::MUTATION_ROOT, '/').'/', $query)) { + return true; + } + + // Otherwise, bring in the big guns. + $document = Parser::parse(new Source($query ?: 'GraphQL')); + $defs = $document->definitions; + foreach ($defs as $statement) { + $options = [ + NodeKind::OPERATION_DEFINITION, + NodeKind::OPERATION_TYPE_DEFINITION + ]; + if (!in_array($statement->kind, $options, true)) { + continue; + } + if ($statement->operation === Manager::MUTATION_ROOT) { + return true; + } + } + + return false; + } } diff --git a/tests/Middleware/CSRFMiddlewareTest.php b/tests/Middleware/CSRFMiddlewareTest.php index <HASH>..<HASH> 100644 --- a/tests/Middleware/CSRFMiddlewareTest.php +++ b/tests/Middleware/CSRFMiddlewareTest.php @@ -30,7 +30,42 @@ class CSRFMiddlewareTest extends MiddlewareProcessTest ' mutation someMutation { tester }' ); $this->assertNotEquals('resolved', $result); + $result = $this->simulateMiddlewareProcess( + new CSRFMiddleware(), + ' mutation someMutation { tester }' + ); + $this->assertNotEquals('resolved', $result); + $graphql = <<<GRAPHQL +mutation MyMutation(\$SomeArg:string!) { + someMutation(Foo:\$SomeArg) { + tester } +} +GRAPHQL; + + $result = $this->simulateMiddlewareProcess( + new CSRFMiddleware(), + $graphql + ); + $this->assertNotEquals('resolved', $result); + $graphql = <<<GRAPHQL +fragment myFragment on File { + id + width +} +mutation someMutation { + tester + } +} +GRAPHQL; + + $result = $this->simulateMiddlewareProcess( + new CSRFMiddleware(), + $graphql + ); + $this->assertNotEquals('resolved', $result); + } + public function testItThrowsIfTokenIsInvalid() {
[CVE-<I>-<I>] Cross Site Request Forgery (CSRF) Protection Bypass
silverstripe_silverstripe-graphql
train
2528e2b05de406fdcebdacd75d19b09b8ecba875
diff --git a/engine/src/main/java/org/camunda/bpm/engine/impl/TaskQueryImpl.java b/engine/src/main/java/org/camunda/bpm/engine/impl/TaskQueryImpl.java index <HASH>..<HASH> 100644 --- a/engine/src/main/java/org/camunda/bpm/engine/impl/TaskQueryImpl.java +++ b/engine/src/main/java/org/camunda/bpm/engine/impl/TaskQueryImpl.java @@ -17,6 +17,7 @@ import static org.camunda.bpm.engine.impl.util.EnsureUtil.ensureNotNull; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -920,16 +921,16 @@ public class TaskQueryImpl extends AbstractQuery<TaskQuery, Task> implements Tas return null; } - if (candidateGroup!=null && candidateGroups != null) { - //get intersection of candidateGroups and candidateGroup - ArrayList result = new ArrayList(candidateGroups); - result.retainAll(Arrays.asList(candidateGroup)); - return result; - } else if (candidateGroup!=null) { + if (candidateGroup != null && candidateGroups != null) { + //get intersection of candidateGroups and candidateGroup + ArrayList result = new ArrayList(candidateGroups); + result.retainAll(Arrays.asList(candidateGroup)); + return result; + } else if (candidateGroup != null) { return Arrays.asList(candidateGroup); } else if (candidateUser != null) { return getGroupsForCandidateUser(candidateUser); - } else if(candidateGroups != null) { + } else if (candidateGroups != null) { return candidateGroups; } return null; @@ -1251,6 +1252,12 @@ public class TaskQueryImpl extends AbstractQuery<TaskQuery, Task> implements Tas ensureOrExpressionsEvaluated(); ensureVariablesInitialized(); checkQueryOk(); + + //check if candidateGroup and candidateGroups intersect + if (getCandidateGroup() != null && getCandidateGroupsInternal() != null && getCandidateGroups().isEmpty()) { + return Collections.emptyList(); + } + List<Task> taskList = commandContext .getTaskManager() .findTasksByQueryCriteria(this); @@ -1270,6 +1277,10 @@ public class TaskQueryImpl extends AbstractQuery<TaskQuery, Task> implements Tas ensureOrExpressionsEvaluated(); ensureVariablesInitialized(); checkQueryOk(); + //check if candidateGroup and candidateGroups intersect + if (getCandidateGroup() != null && getCandidateGroupsInternal() != null && getCandidateGroups().isEmpty()) { + return 0; + } return commandContext .getTaskManager() .findTaskCountByQueryCriteria(this); diff --git a/engine/src/test/java/org/camunda/bpm/engine/test/api/task/TaskQueryTest.java b/engine/src/test/java/org/camunda/bpm/engine/test/api/task/TaskQueryTest.java index <HASH>..<HASH> 100644 --- a/engine/src/test/java/org/camunda/bpm/engine/test/api/task/TaskQueryTest.java +++ b/engine/src/test/java/org/camunda/bpm/engine/test/api/task/TaskQueryTest.java @@ -585,6 +585,14 @@ public class TaskQueryTest extends PluggableProcessEngineTestCase { assertEquals(0, query.list().size()); } + public void testQueryByCandidateGroupInAndCandidateGroupNotIntersected() { + List<String> groups = Arrays.asList("accountancy"); + String candidateGroup = "management"; + TaskQuery query = taskService.createTaskQuery().taskCandidateGroupIn(groups).taskCandidateGroup(candidateGroup); + assertEquals(0, query.count()); + assertEquals(0, query.list().size()); + } + public void testQueryByNullCandidateGroupIn() { try { taskService.createTaskQuery().taskCandidateGroupIn(null).list();
fix(engine): fix case when candidate group and candidate groups don't intersect Related with CAM-<I>
camunda_camunda-bpm-platform
train
cb3ddfdf76d9fe247db0729a1967defad8ad1724
diff --git a/src/Extractor/Markdown.php b/src/Extractor/Markdown.php index <HASH>..<HASH> 100644 --- a/src/Extractor/Markdown.php +++ b/src/Extractor/Markdown.php @@ -4,8 +4,7 @@ declare(strict_types=1); namespace Rusty\Extractor; -use League\CommonMark\Block\Element\AbstractBlock; -use League\CommonMark\Block\Element\FencedCode; +use League\CommonMark\Block\Element; use League\CommonMark\DocParser; use League\CommonMark\Environment; use Rusty\CodeSample; @@ -38,11 +37,11 @@ class Markdown implements SampleExtractor continue; } - if (!$node instanceof AbstractBlock || !$node->isCode()) { + if (!$node instanceof Element\AbstractBlock || !$node->isCode()) { continue; } - if ($node instanceof FencedCode) { + if ($node instanceof Element\FencedCode) { $infoWords = array_map('strtolower', array_filter(array_map('trim', $node->getInfoWords()))); // filter code blocks that are not explicitly declared as PHP @@ -51,12 +50,15 @@ class Markdown implements SampleExtractor } } - /** @var AbstractBlock $node */ + yield $this->buildCodeSample($file, $node); + } + } - $content = $node->getStringContent(); - $pragmaDirectives = $this->pragmaParser->getPragmaDirectives($content); + private function buildCodeSample(\SplFileInfo $file, Element\AbstractBlock $node) + { + $content = $node->getStringContent(); + $pragmaDirectives = $this->pragmaParser->getPragmaDirectives($content); - yield new CodeSample($file, $node->getStartLine(), $content, $pragmaDirectives); - } + yield new CodeSample($file, $node->getStartLine(), $content, $pragmaDirectives); } }
Clean a bit the Markdown extractor
K-Phoen_Rusty
train
248124d1f5a676f30999178408835a8561caeacc
diff --git a/integration/v6/experimental/v3_scale_command_test.go b/integration/v6/experimental/v3_scale_command_test.go index <HASH>..<HASH> 100644 --- a/integration/v6/experimental/v3_scale_command_test.go +++ b/integration/v6/experimental/v3_scale_command_test.go @@ -185,7 +185,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) appTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(len(appTable.Processes)).To(Equal(3)) + Expect(len(appTable.Processes)).To(Equal(2)) processSummary := appTable.Processes[0] Expect(processSummary.Type).To(Equal("web")) @@ -197,9 +197,6 @@ var _ = Describe("v3-scale command", func() { Expect(appTable.Processes[1].Type).To(Equal("console")) Expect(appTable.Processes[1].InstanceCount).To(Equal("0/0")) - - Expect(appTable.Processes[2].Type).To(Equal("rake")) - Expect(appTable.Processes[2].InstanceCount).To(Equal("0/0")) }) }) @@ -209,7 +206,7 @@ var _ = Describe("v3-scale command", func() { session := helpers.CF("v3-scale", appName) Eventually(session).Should(Exit(0)) appTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(appTable.Processes).To(HaveLen(3)) + Expect(appTable.Processes).To(HaveLen(2)) By("scaling to 3 instances") session = helpers.CF("v3-scale", appName, "-i", "3") @@ -220,7 +217,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) updatedAppTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(updatedAppTable.Processes).To(HaveLen(3)) + Expect(updatedAppTable.Processes).To(HaveLen(2)) processSummary := updatedAppTable.Processes[0] instanceSummary := processSummary.Instances[0] @@ -240,7 +237,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) updatedAppTable = helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(updatedAppTable.Processes).To(HaveLen(3)) + Expect(updatedAppTable.Processes).To(HaveLen(2)) processSummary = updatedAppTable.Processes[0] instanceSummary = processSummary.Instances[0] @@ -260,7 +257,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) updatedAppTable = helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(updatedAppTable.Processes).To(HaveLen(3)) + Expect(updatedAppTable.Processes).To(HaveLen(2)) processSummary = updatedAppTable.Processes[0] instanceSummary = processSummary.Instances[0] @@ -312,7 +309,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) appTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(appTable.Processes).To(HaveLen(3)) + Expect(appTable.Processes).To(HaveLen(2)) processSummary := appTable.Processes[0] instanceSummary := processSummary.Instances[0] @@ -336,7 +333,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) appTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(appTable.Processes).To(HaveLen(3)) + Expect(appTable.Processes).To(HaveLen(2)) processSummary := appTable.Processes[0] instanceSummary := processSummary.Instances[0] @@ -396,7 +393,7 @@ var _ = Describe("v3-scale command", func() { Eventually(session).Should(Exit(0)) appTable := helpers.ParseV3AppProcessTable(session.Out.Contents()) - Expect(appTable.Processes).To(HaveLen(3)) + Expect(appTable.Processes).To(HaveLen(2)) processSummary := appTable.Processes[1] instanceSummary := processSummary.Instances[0]
Fix experimental integrations after cf-deployment upgrade Recent updates to cf deployment caused our environments to stop showing a "rake" process on the /v3/apps endpoint. We needed to update our tests accordingly. [#<I>]
cloudfoundry_cli
train
e2d249ab3b879f656987609b3f170f698932c08c
diff --git a/libs/files/mobile.js b/libs/files/mobile.js index <HASH>..<HASH> 100644 --- a/libs/files/mobile.js +++ b/libs/files/mobile.js @@ -16,7 +16,6 @@ JS.core = [ "src/mobile/js/ch.events.js", "src/ui/js/ch.events.js", "src/shared/js/ch.factory.js", - "src/ui/js/ch.shortcuts.js", "src/mobile/js/ch.init.js" ]; diff --git a/libs/files/ui.js b/libs/files/ui.js index <HASH>..<HASH> 100644 --- a/libs/files/ui.js +++ b/libs/files/ui.js @@ -14,7 +14,6 @@ JS.core = [ "src/shared/js/ch.events.js", "src/ui/js/ch.events.js", "src/shared/js/ch.factory.js", - "src/ui/js/ch.shortcuts.js", "src/ui/js/ch.init.js" ]; @@ -27,7 +26,8 @@ JS.abilities = [ "src/shared/js/ch.Closable.js", "src/shared/js/ch.Collapsible.js", "src/shared/js/ch.Viewport.js", - "src/shared/js/ch.Positioner.js" + "src/shared/js/ch.Positioner.js", + "src/ui/js/ch.shortcuts.js" ]; /*
Move ch.shortcuts source file from core to abilities.
mercadolibre_chico
train
0ec406d38646650459eb38320e7e3a20f673d07e
diff --git a/classes/Boom/Environment/Development.php b/classes/Boom/Environment/Development.php index <HASH>..<HASH> 100644 --- a/classes/Boom/Environment/Development.php +++ b/classes/Boom/Environment/Development.php @@ -4,6 +4,8 @@ namespace Boom\Environment; class Development extends Environment { + protected $requiresLogin = true; + public function isDevelopment() { return true; diff --git a/classes/Boom/Environment/Environment.php b/classes/Boom/Environment/Environment.php index <HASH>..<HASH> 100644 --- a/classes/Boom/Environment/Environment.php +++ b/classes/Boom/Environment/Environment.php @@ -4,6 +4,12 @@ namespace Boom\Environment; abstract class Environment { + /** + * + * @var boolean + */ + protected $requiresLogin = false; + public function isDevelopment() { return false; @@ -18,4 +24,9 @@ abstract class Environment { return false; } + + public function requiresLogin() + { + return $this->requiresLogin; + } } \ No newline at end of file diff --git a/tests/Boom/EnvironmentTest.php b/tests/Boom/EnvironmentTest.php index <HASH>..<HASH> 100644 --- a/tests/Boom/EnvironmentTest.php +++ b/tests/Boom/EnvironmentTest.php @@ -2,15 +2,15 @@ class EnvironmentTest extends PHPUnit_Framework_TestCase { + private $environments = ['development', 'staging', 'production']; + public function testIsMethods() { - $environments = ['development', 'staging', 'production']; - - foreach($environments as $env) { + foreach($this->environments as $env) { $className = "Boom\\Environment\\" . ucfirst($env); $class = new $className; - foreach($environments as $env2) { + foreach($this->environments as $env2) { $method = "is" . ucfirst($env2); if ($env === $env2) { @@ -21,4 +21,18 @@ class EnvironmentTest extends PHPUnit_Framework_TestCase } } } + + public function testRequireLoginForDevelopmentOnly() + { + foreach($this->environments as $env) { + $className = "Boom\\Environment\\" . ucfirst($env); + $class = new $className; + + if ($env === 'development') { + $this->assertTrue($class->requiresLogin(), $className); + } else { + $this->assertFalse($class->requiresLogin(), $className); + } + } + } } \ No newline at end of file
Added requiresLogin() methods to environment classes
boomcms_boom-core
train
25db611d89db1a01038de0140d981e9326bd11ad
diff --git a/perf/_runner.py b/perf/_runner.py index <HASH>..<HASH> 100644 --- a/perf/_runner.py +++ b/perf/_runner.py @@ -599,7 +599,7 @@ class Runner: return cmd - def _spawn_worker_suite(self, calibrate=False): + def _spawn_worker(self, calibrate=False): rpipe, wpipe = pipe_cloexec() if six.PY3: rfile = open(rpipe, "r", encoding="utf8") @@ -627,15 +627,6 @@ class Runner: return _load_suite_from_pipe(bench_json) - def _spawn_worker_bench(self, calibrate=False): - suite = self._spawn_worker_suite(calibrate) - - benchmarks = suite.get_benchmarks() - if len(benchmarks) != 1: - raise ValueError("worker produced %s benchmarks instead of 1" - % len(benchmarks)) - return benchmarks[0] - def _display_result(self, bench, checks=True): args = self.args @@ -695,7 +686,13 @@ class Runner: print() for process in range(1, nprocess + 1): - worker_bench = self._spawn_worker_bench(calibrate) + suite = self._spawn_worker(calibrate) + + benchmarks = suite.get_benchmarks() + if len(benchmarks) != 1: + raise ValueError("worker produced %s benchmarks instead of 1" + % len(benchmarks)) + worker_bench = benchmarks[0] if verbose: run = worker_bench.get_runs()[-1]
Inline Runner._spawn_worker_bench() Rename _spawn_worker_suite() to _spawn_worker().
vstinner_perf
train
86321a008c095dd1b6681128a5278efafb84c69a
diff --git a/src/Sulu/Bundle/AdminBundle/Resources/public/js/components/type-overlay/main.js b/src/Sulu/Bundle/AdminBundle/Resources/public/js/components/type-overlay/main.js index <HASH>..<HASH> 100644 --- a/src/Sulu/Bundle/AdminBundle/Resources/public/js/components/type-overlay/main.js +++ b/src/Sulu/Bundle/AdminBundle/Resources/public/js/components/type-overlay/main.js @@ -120,13 +120,7 @@ define([], function() { this.options.overlay.data = this.sandbox.util.template(this.options.template, {data:this.options.data}); this.startOverlayComponent(this.options.overlay); - - this.$overlay = this.sandbox.dom.find(this.options.overlay.container); - this.$overlayContent = this.sandbox.dom.find(constants.overlayContentSelector); - this.bindCustomEvents(); - this.bindDomEvents(); - this.sandbox.emit(INITIALZED.call(this)); }, /** @@ -237,6 +231,15 @@ define([], function() { this.saveNewData(data); } }.bind(this)); + + // use open event because initialzed is to early + this.sandbox.on('husky.overlay.'+this.options.overlay.instanceName+'.opened', function(){ + this.$overlay = this.sandbox.dom.find(this.options.overlay.el); + this.$overlayContent = this.sandbox.dom.find(constants.overlayContentSelector, this.$overlay); + + this.bindDomEvents(); + this.sandbox.emit(INITIALZED.call(this)); + }.bind(this)); }, /**
added eventlistener for fully initialized overlay component
sulu_sulu
train
138ddb565374cd79e2ee4ae4a3abe8308b769a90
diff --git a/lib/gpg.js b/lib/gpg.js index <HASH>..<HASH> 100644 --- a/lib/gpg.js +++ b/lib/gpg.js @@ -124,7 +124,7 @@ var GPG = { fn.call(null, null, buffer); }); - gpg.stdin.end(str, 'utf8'); + gpg.stdin.end(str); }, /**
revert unintended change in <I>a8b<I>faa<I>a<I>c<I>fbd<I>b<I>cb3
drudge_node-gpg
train
ddf2bdf8e50734028b24754cb9516a28c37099bd
diff --git a/blockchain.go b/blockchain.go index <HASH>..<HASH> 100644 --- a/blockchain.go +++ b/blockchain.go @@ -1,7 +1,6 @@ package spvwallet import ( - "errors" "fmt" "github.com/btcsuite/btcd/blockchain" "github.com/btcsuite/btcd/chaincfg" diff --git a/schema.go b/schema.go index <HASH>..<HASH> 100644 --- a/schema.go +++ b/schema.go @@ -1,5 +1,7 @@ package spvwallet +import "time" + // TODO: Eventually we will like to move of this file to a separate interface repo which this wallet // TODO: and others (such as the openbazaar-go bitcoind wallet) can share. @@ -28,13 +30,14 @@ const ( // This callback is passed to any registered transaction listeners when a transaction is detected // for the wallet. -// TODO: we can maybe get rid of this and just use the btcd msgTx, but we do need the linked scriptPubkey -// TODO: which is not included in that object. We could possibly re-purpose the signature field. type TransactionCallback struct { - Txid []byte - Outputs []TransactionOutput - Inputs []TransactionInput - Height int32 + Txid []byte + Outputs []TransactionOutput + Inputs []TransactionInput + Height int32 + Timestamp time.Time + Value int64 + WatchOnly bool } type TransactionOutput struct {
Add timestamp, value, and watchonly to tx callback
OpenBazaar_spvwallet
train
e95cc4e019b78255567baa85fee35bbfef1d8378
diff --git a/src/adapters/sqlite/encodeSchema/index.js b/src/adapters/sqlite/encodeSchema/index.js index <HASH>..<HASH> 100644 --- a/src/adapters/sqlite/encodeSchema/index.js +++ b/src/adapters/sqlite/encodeSchema/index.js @@ -23,14 +23,14 @@ const encodeCreateTable: TableSchema => SQL = ({ name, columns }) => { const encodeIndex: (ColumnSchema, TableName<any>) => SQL = (column, tableName) => `create index ${tableName}_${column.name} on ${encodeName(tableName)} (${encodeName( column.name, - )})` + )});` const encodeTableIndicies: TableSchema => SQL = ({ name: tableName, columns }) => values(columns) .filter(column => column.isIndexed) .map(column => encodeIndex(column, tableName)) - .concat([`create index ${tableName}__status on ${encodeName(tableName)} ("_status")`]) - .join(';') + .concat([`create index ${tableName}__status on ${encodeName(tableName)} ("_status");`]) + .join('') const encodeTable: TableSchema => SQL = table => encodeCreateTable(table) + encodeTableIndicies(table) @@ -38,11 +38,30 @@ const encodeTable: TableSchema => SQL = table => export const encodeSchema: AppSchema => SQL = ({ tables }) => values(tables) .map(encodeTable) - .concat(['']) - .join(';') + .join('') -// +// TODO: Default values, indexes -// +const encodeCreateColumn: (ColumnSchema, TableName<any>) => SQL = (column, tableName) => -// + `alter table ${encodeName(tableName)} add ${encodeName(column.name)}` +const encodeCreateTableMigrationStep: CreateTableMigrationStep => SQL = ({ name, columns }) => + encodeTable({ name, columns }) -export const encodeMigrationSteps: (MigrationStep[]) => SQL = steps => '' +const encodeAddColumnsMigrationStep: AddColumnsMigrationStep => SQL = ({ table, columns }) => + columns + .map( + column => + `alter table ${encodeName(table)} add ${encodeName(column.name)};${ + column.isIndexed ? encodeIndex(column, table) : '' + }`, + ) + .join('') + +export const encodeMigrationSteps: (MigrationStep[]) => SQL = steps => + steps + .map(step => { + if (step.type === 'create_table') { + return encodeCreateTableMigrationStep(step) + } else if (step.type === 'add_columns') { + return encodeAddColumnsMigrationStep(step) + } + + return '' + }) + .join('') diff --git a/src/adapters/sqlite/encodeSchema/test.js b/src/adapters/sqlite/encodeSchema/test.js index <HASH>..<HASH> 100644 --- a/src/adapters/sqlite/encodeSchema/test.js +++ b/src/adapters/sqlite/encodeSchema/test.js @@ -31,20 +31,13 @@ describe('encodeSchema', () => { 'create table "comments" ("id" primary key, "_changed", "_status", "last_modified", "is_ended", "reactions");' + 'create index comments__status on "comments" ("_status");' - expect(encodeSchema(testSchema)).toEqual(expectedSchema) + expect(encodeSchema(testSchema)).toBe(expectedSchema) }) it('encodes migrations', () => { const migrationSteps = [ addColumns({ table: 'posts', - columns: [ - { name: 'subtitle', type: 'string', isOptional: true }, - { name: 'is_pinned', type: 'bool' }, - ], - }), - addColumns({ - table: 'posts', - columns: [{ name: 'author_id', type: 'string', isIndexed: true }], + columns: [{ name: 'subtitle', type: 'string', isOptional: true }], }), createTable({ name: 'comments', @@ -53,8 +46,25 @@ describe('encodeSchema', () => { { name: 'body', type: 'string' }, ], }), + addColumns({ + table: 'posts', + columns: [ + { name: 'author_id', type: 'string', isIndexed: true }, + { name: 'is_pinned', type: 'bool', isIndexed: true }, + ], + }), ] - expect(encodeMigrationSteps(migrationSteps)).toEqual('blabla') + const expectedSQL = + 'alter table "posts" add "subtitle";' + + 'create table "comments" ("id" primary key, "_changed", "_status", "last_modified", "post_id", "body");' + + 'create index comments_post_id on "comments" ("post_id");' + + 'create index comments__status on "comments" ("_status");' + + 'alter table "posts" add "author_id";' + + 'create index posts_author_id on "posts" ("author_id");' + + 'alter table "posts" add "is_pinned";' + + 'create index posts_is_pinned on "posts" ("is_pinned");' + + expect(encodeMigrationSteps(migrationSteps)).toBe(expectedSQL) }) })
[migrations][sqlite] Encode migration SQL
Nozbe_WatermelonDB
train
a2c8d1c061c8385c0c07f32471f29b16419d14dd
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,7 +18,7 @@ * Added new network utilities to fetch IP and country information from a host. - The country information is fetched using the free `ip-api.com` service. NOTE! This service is rate limited to 150 requests per minute and requires a paid plan for commercial usage. Please make sure to respect the terms. + The country information is fetched using the free `ipdata.co` service. NOTE! This service is rate limited to 1500 requests per day. ### Changed diff --git a/federation/tests/utils/test_network.py b/federation/tests/utils/test_network.py index <HASH>..<HASH> 100644 --- a/federation/tests/utils/test_network.py +++ b/federation/tests/utils/test_network.py @@ -8,17 +8,14 @@ from federation.utils.network import ( fetch_document, USER_AGENT, send_document, fetch_country_by_ip, fetch_host_ip_and_country) -@patch('federation.utils.network.requests.get', autospec=True, return_value=Mock( - status_code=200, json=Mock(return_value={'countryCode': 'FI'}), -)) +@patch('federation.utils.network.ipdata', autospec=True) class TestFetchCountryByIp: - def test_calls_ip_api_endpoint(self, mock_get): - fetch_country_by_ip('127.0.0.1') - mock_get.assert_called_once_with('http://ip-api.com/json/127.0.0.1"') - - def test_returns_country_code(self, mock_get): - result = fetch_country_by_ip('127.0.0.1') - assert result == 'FI' + def test_calls_ip_api_endpoint(self, mock_ipdata): + mock_lookup = Mock(lookup=Mock(return_value={'status': 200, 'response': {'country_code': 'DE'}})) + mock_ipdata.ipdata.return_value = mock_lookup + country = fetch_country_by_ip('127.0.0.1') + mock_lookup.lookup.assert_called_once_with('127.0.0.1') + assert country == 'DE' class TestFetchDocument: diff --git a/federation/utils/network.py b/federation/utils/network.py index <HASH>..<HASH> 100644 --- a/federation/utils/network.py +++ b/federation/utils/network.py @@ -2,6 +2,7 @@ import logging import socket import requests +from ipdata import ipdata from requests.exceptions import RequestException, HTTPError, SSLError from requests.exceptions import ConnectionError from requests.structures import CaseInsensitiveDict @@ -19,19 +20,18 @@ def fetch_country_by_ip(ip): Returns empty string if the request fails in non-200 code. - Uses the ip-api.com service which has the following rules: + Uses the ipdata.co service which has the following rules: - * Max 150 requests per minute - * Non-commercial use only without a paid plan! + * Max 1500 requests per day - See: http://ip-api.com/docs/api:json + See: https://ipdata.co/docs.html#python-library """ - result = requests.get("http://ip-api.com/json/%s" % ip) - if result.status_code != 200: + iplookup = ipdata.ipdata() + data = iplookup.lookup(ip) + if data.get('status') != 200: return '' - result = result.json() - return result['countryCode'] + return data.get('response', {}).get('country_code', '') def fetch_document(url=None, host=None, path="/", timeout=10, raise_ssl_errors=True): diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,6 +30,7 @@ setup( "cssselect>=0.9.2", "dirty-validators>=0.3.0", "lxml>=3.4.0", + "ipdata>=2.6", "jsonschema>=2.0.0", "pycrypto>=2.6.0", "python-dateutil>=2.4.0", @@ -52,5 +53,5 @@ setup( 'Topic :: Internet', 'Topic :: Software Development :: Libraries :: Python Modules', ], - keywords='federation diaspora federate social', + keywords='federation diaspora activitypub federate social', )
Switch to ipdata.co service for country lookups
jaywink_federation
train
d626c1ee6899e03d0fa7a509e64ffc7612d17e54
diff --git a/serial_test.go b/serial_test.go index <HASH>..<HASH> 100644 --- a/serial_test.go +++ b/serial_test.go @@ -7,8 +7,8 @@ import ( const ( // socat -d -d pty,raw,echo=0 pty,raw,echo=0 - pty1 = "/dev/pts/5" - pty2 = "/dev/pts/6" + pty1 = "/dev/ttys009" + pty2 = "/dev/ttys010" ) func TestReadWrite(t *testing.T) {
changed pty endpoints in test code
goburrow_serial
train
e19cdf265a4d0c009869acc9beb73a377dcd1d6e
diff --git a/src/shogun2-init/src/main/java/de/terrestris/shogun2/init/ContentInitializer.java b/src/shogun2-init/src/main/java/de/terrestris/shogun2/init/ContentInitializer.java index <HASH>..<HASH> 100644 --- a/src/shogun2-init/src/main/java/de/terrestris/shogun2/init/ContentInitializer.java +++ b/src/shogun2-init/src/main/java/de/terrestris/shogun2/init/ContentInitializer.java @@ -1,6 +1,10 @@ package de.terrestris.shogun2.init; import java.sql.SQLException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; import javax.sql.DataSource; @@ -17,14 +21,25 @@ import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import de.terrestris.shogun2.model.Application; +import de.terrestris.shogun2.model.BorderLayout; +import de.terrestris.shogun2.model.Header; +import de.terrestris.shogun2.model.LayerTree; +import de.terrestris.shogun2.model.Layout; +import de.terrestris.shogun2.model.Module; import de.terrestris.shogun2.model.User; +import de.terrestris.shogun2.model.Viewport; import de.terrestris.shogun2.security.acl.AclUtil; import de.terrestris.shogun2.service.InitializationService; /** - * @author Nils Bühner + * Class to initialize some kind of content. + * + * <b>ATTENTION:</b> This class is currently used to provide some demo content. + * In future, certain entities (like some default {@link Layout}s or + * {@link Module} s should be created on the base of (configurable) bean + * definitions. * - * Class to initialize some kind of content + * @author Nils Bühner * */ public class ContentInitializer { @@ -32,8 +47,7 @@ public class ContentInitializer { /** * The Logger */ - private static final Logger LOG = Logger - .getLogger(ContentInitializer.class); + private static final Logger LOG = Logger.getLogger(ContentInitializer.class); /** * Flag symbolizing if content initialization should be active on startup @@ -87,11 +101,13 @@ public class ContentInitializer { /** * The method called on initialization + * + * THIS WILL CURRENTLY PRODUCE SOME DEMO CONTENT */ public void initializeDatabaseContent() { if (this.shogunInitEnabled.equals(true)) { - LOG.info("Initializing some SHOGun content!"); + LOG.info("Initializing some SHOGun demo content!"); LOG.info("Cleaning up ACL tables..."); cleanupAclTables(); @@ -109,13 +125,36 @@ public class ContentInitializer { Application adminApp = new Application("AdminApp", null); Application userApp = new Application("UserApp", null); - adminApp = initService.createApplication(adminApp); - userApp = initService.createApplication(userApp); - LOG.info("Created an admin app and a user app."); + // CREATE AND ADD A VIEWPORT MODULE WITH A BORDER LAYOUT + BorderLayout borderLayout = new BorderLayout(); + borderLayout.setRegions(Arrays.asList("north", "west")); + borderLayout.setPropertyHints(new HashSet<String>(Arrays.asList("height", "border"))); + borderLayout.setPropertyMusts(new HashSet<String>(Arrays.asList("width"))); + + Map<String, String> properties = new HashMap<String, String>(); + properties.put("width", "200"); + properties.put("border", "2"); + + Viewport vp = new Viewport(); + + vp.setLayout(borderLayout); + vp.setProperties(properties); + + Header headerModule = new Header(); + vp.addModule(headerModule); + + LayerTree layerTreeModule = new LayerTree(); + vp.addModule(layerTreeModule); + + adminApp.setViewport(vp); + // MANAGE SECURITY/ACL + adminApp = initService.createApplication(adminApp); + userApp = initService.createApplication(userApp); + logInUser(admin); aclSecurityUtil.addPermission(adminApp, admin, BasePermission.READ); @@ -137,8 +176,7 @@ public class ContentInitializer { * @param user */ private void logInUser(User user) { - Authentication authRequest = new UsernamePasswordAuthenticationToken( - user.getAccountName(), user.getPassword()); + Authentication authRequest = new UsernamePasswordAuthenticationToken(user.getAccountName(), user.getPassword()); Authentication authResult = authenticationProvider.authenticate(authRequest); SecurityContextHolder.getContext().setAuthentication(authResult);
Extend the initialization of demo content
terrestris_shogun-core
train
b4daeedd1624335268e6780200c6bdae0563ecfa
diff --git a/src/graphql.js b/src/graphql.js index <HASH>..<HASH> 100644 --- a/src/graphql.js +++ b/src/graphql.js @@ -4,7 +4,6 @@ import { ZalgoPromise } from 'zalgo-promise/src'; import { request, stringifyError } from 'belter/src'; import { CURRENCY } from '@paypal/sdk-constants/src'; - import { getLogger } from './logger'; import { getIntent, @@ -17,6 +16,8 @@ import { getDisableCard, getBuyerCountry } from './script'; +import { buildPayPalUrl } from './config'; + type FundingEligibilityParams = {| clientID : string, @@ -58,7 +59,7 @@ function buildFundingEligibilityVariables() : FundingEligibilityParams { export function callGraphQL<T, V>({ query, variables = {}, headers = {} } : {| query : string, variables : V, headers? : { [string] : string } |}) : ZalgoPromise<T> { return request({ - url: GRAPHQL_URI, + url: buildPayPalUrl(GRAPHQL_URI), method: 'POST', json: { query,
add domain into graphql url (#<I>) * enable getting merchant Ids from data-merchant-id. * support email address or * as query value. * use const. * fixed lint error. * support fields as param. * log error.
paypal_paypal-sdk-client
train
d3ce7cc45dcb839a9b9dee5570bfc31d67e3ddc5
diff --git a/cirq/testing/random_circuit.py b/cirq/testing/random_circuit.py index <HASH>..<HASH> 100644 --- a/cirq/testing/random_circuit.py +++ b/cirq/testing/random_circuit.py @@ -96,8 +96,8 @@ def random_circuit(qubits: Union[Sequence[ops.Qid], int], raise ValueError('At least one qubit must be specified.') gate_domain = {k: v for k, v in gate_domain.items() if v <= n_qubits} if not gate_domain: - raise ValueError(f'After removing gates that act on less that ' - '{n_qubits}, gate_domain had no gates.') + raise ValueError(f'After removing gates that act on less than ' + f'{n_qubits} qubits, gate_domain had no gates.') max_arity = max(gate_domain.values()) prng = value.parse_random_state(random_state) diff --git a/cirq/testing/random_circuit_test.py b/cirq/testing/random_circuit_test.py index <HASH>..<HASH> 100644 --- a/cirq/testing/random_circuit_test.py +++ b/cirq/testing/random_circuit_test.py @@ -37,21 +37,44 @@ def test_random_circuit_errors(): with pytest.raises(ValueError, match='At least one'): _ = cirq.testing.random_circuit(qubits=(), n_moments=5, op_density=0.5) - with pytest.raises(ValueError, match='had no gates'): + with pytest.raises( + ValueError, + match= + 'After removing gates that act on less than 1 qubits, gate_domain ' + 'had no gates'): _ = cirq.testing.random_circuit(qubits=1, n_moments=5, op_density=0.5, gate_domain={cirq.CNOT: 2}) +def _cases_for_random_circuit(): + i = 0 + while i < 10: + n_qubits = random.randint(1, 20) + n_moments = random.randint(1, 10) + op_density = random.random() + if random.randint(0, 1): + gate_domain = dict( + random.sample( + tuple(cirq.testing.DEFAULT_GATE_DOMAIN.items()), + random.randint(1, len(cirq.testing.DEFAULT_GATE_DOMAIN)))) + # Sometimes we generate gate domains whose gates all act on a + # number of qubits greater that the number of qubits for the + # circuit. In this case, try again. + if all(n > n_qubits for n in gate_domain.values()): + # coverage: ignore + continue + else: + gate_domain = None + pass_qubits = random.choice((True, False)) + yield (n_qubits, n_moments, op_density, gate_domain, pass_qubits) + i += 1 + + @pytest.mark.parametrize( 'n_qubits,n_moments,op_density,gate_domain,pass_qubits', - [(random.randint(1, 20), random.randint(1, 10), random.random(), - (None if random.randint(0, 1) else dict( - random.sample( - tuple(cirq.testing.DEFAULT_GATE_DOMAIN.items()), - random.randint(1, len(cirq.testing.DEFAULT_GATE_DOMAIN))))), - random.choice((True, False))) for _ in range(10)]) + _cases_for_random_circuit()) def test_random_circuit(n_qubits: Union[int, Sequence[cirq.Qid]], n_moments: int, op_density: float,
Fix flakes in random_circuit_test (#<I>)
quantumlib_Cirq
train
e8a33ded395fd4f1285f1a4bad1ddc2d6f5f0870
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -37,11 +37,16 @@ module.exports = function(options) { requireName = name.replace(options.replaceExp, ''); requireName = options.camelize ? camelize(requireName) : requireName; } - Object.defineProperty(container, requireName, options.lazy ? { - get: function() { - return options.requireFn(name); - } - } : options.requireFn(name)); + if (options.lazy) { + Object.defineProperty(container, requireName, { + get: function() { + return options.requireFn(name); + } + }); + } + else { + container[requireName] = options.requireFn(name); + } }); return container;
fix require issue when options.lazy is false
simbo_auto-plug
train
09e4a6afe6167fedb427ff4c979ff3b9aa0b2f0c
diff --git a/extras/foreman_callback.py b/extras/foreman_callback.py index <HASH>..<HASH> 100755 --- a/extras/foreman_callback.py +++ b/extras/foreman_callback.py @@ -1,51 +1,97 @@ import os from datetime import datetime +from collections import defaultdict import json import uuid import requests -FOREMAN_URL = 'http://localhost:3000' +FOREMAN_URL = "http://localhost:3000" FOREMAN_HEADERS = { - 'Content-Type': 'application/json', - 'Accept': 'application/json' + "Content-Type": "application/json", + "Accept": "application/json" } TIME_FORMAT="%Y-%m-%d_%H%M%S_%f" -FILE_NAME_FORMAT="%(now)s-%(host)s.json" -MSG_FORMAT='{"name":"%(host)s","_timestamp":"%(now)s","category":"%(category)s", "facts": %(data)s}' + "\n" -LOG_DIR="/tmp/ansible/events" -AGGREGATION_KEY = uuid.uuid4().hex - -if not os.path.exists(LOG_DIR): - os.makedirs(LOG_DIR) +FACTS_FORMAT=""" +{ + "name":"%(host)s", + "_timestamp":"%(now)s", + "facts": %(data)s +} +""" +REPORT_FORMAT="""{ +"report": + { + "host":"%(host)s", + "reported_at":"%(now)", + "status":"", + "metrics":"", + "logs":"" + } +} +""" class CallbackModule(object): """ - logs playbook results, per host, in LOG_DIR - sends request to Foreman with ansible setup module facts + Sends Ansible facts (if ansible -m setup ran) and reports """ def log(self, host, category, data): if type(data) != dict: data = dict(msg=data) - if not 'ansible_facts' in data: - return + data['category'] = category + if 'ansible_facts' in data: + self.send_facts(host, data) + + self.send_report(host, data) + + """ + Sends facts to Foreman, to be parsed by foreman_ansible fact parser. + The default fact importer should import these facts properly. + """ + + def send_facts(self, host, data): data["_type"] = "ansible" data = json.dumps(data) - dir_path = os.path.join(LOG_DIR, AGGREGATION_KEY) - if not os.path.exists(dir_path): - os.makedirs(dir_path) - now = datetime.now().strftime(TIME_FORMAT) - path = os.path.join(dir_path, FILE_NAME_FORMAT % dict(now=now, host=host)) - facts_json = MSG_FORMAT % dict(host=host, now=now, category=category, data=data) - fd = open(path, "w") - fd.write(facts_json) - fd.close() + facts_json = FACTS_FORMAT % dict(host=host, + now=datetime.now().strftime(TIME_FORMAT), + data=data) + print facts_json requests.post(url=FOREMAN_URL + '/api/v2/hosts/facts', data=facts_json, headers=FOREMAN_HEADERS, verify=False) + """ + TODO + Send reports to Foreman, to be parsed by Foreman config report importer. + I want to follow chef-handler-foreman strategy here and massage the data + to get a report json that Foreman can handle without writing another + report importer. + """ + + def send_report(self, host, data): + status = defaultdict(lambda:0) + failed_report_category = ["FAILED", "UNREACHABLE", "ASYNC_FAILED"] + success_report_category = ["OK", "SKIPPED", "ASYNC_OK"] + if data['category'] in failed_report_category: + status['failed'] = 1 + if data['category'] in success_report_category: + status['failed'] = 1 + if data['changed'] == 'true': + status['changed'] = 1 +# print data +# data = json.dumps(data) +# report_json = REPORT_FORMAT % dict(host=host, +# now=datetime.now().strftime(TIME_FORMAT), +# status=status, +# metrics=metrics, +# logs=logs) +# requests.post(url=FOREMAN_URL + '/api/v2/reports', +# data=report_json, +# headers=FOREMAN_HEADERS, +# verify=False) +# def on_any(self, *args, **kwargs): pass
Small refactor of callback - sketch reports support
theforeman_foreman_ansible
train
b801ab04e3cd30092694c94fc687df31f42cb4bc
diff --git a/src/main/java/org/gitlab4j/api/RepositoryFileApi.java b/src/main/java/org/gitlab4j/api/RepositoryFileApi.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/gitlab4j/api/RepositoryFileApi.java +++ b/src/main/java/org/gitlab4j/api/RepositoryFileApi.java @@ -61,7 +61,7 @@ public class RepositoryFileApi extends AbstractApi { public RepositoryFile getFileInfo(Object projectIdOrPath, String filePath, String ref) throws GitLabApiException { Form form = new Form(); - addFormParam(form, "ref", ref, true); + addFormParam(form, "ref", (ref != null ? urlEncode(ref) : null), true); Response response = head(Response.Status.OK, form.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "repository", "files", urlEncode(filePath)); @@ -162,7 +162,7 @@ public class RepositoryFileApi extends AbstractApi { } Form form = new Form(); - addFormParam(form, "ref", ref, true); + addFormParam(form, "ref", (ref != null ? urlEncode(ref) : null), true); Response response = get(Response.Status.OK, form.asMap(), "projects", getProjectIdOrPath(projectIdOrPath), "repository", "files", urlEncode(filePath)); return (response.readEntity(RepositoryFile.class)); } @@ -325,7 +325,8 @@ public class RepositoryFileApi extends AbstractApi { } Form form = new Form(); - addFormParam(form, isApiVersion(ApiVersion.V3) ? "branch_name" : "branch", branchName, true); + addFormParam(form, isApiVersion(ApiVersion.V3) ? "branch_name" : "branch", + (branchName != null ? urlEncode(branchName) : null), true); addFormParam(form, "commit_message", commitMessage, true); Response.Status expectedStatus = (isApiVersion(ApiVersion.V3) ? Response.Status.OK : Response.Status.NO_CONTENT);
Now URL encodes refs and branch names (#<I>)
gmessner_gitlab4j-api
train
e8076443d2270289c3f4593d0e78332b88c05dfe
diff --git a/molgenis-core-ui/src/main/resources/js/component/Form.js b/molgenis-core-ui/src/main/resources/js/component/Form.js index <HASH>..<HASH> 100644 --- a/molgenis-core-ui/src/main/resources/js/component/Form.js +++ b/molgenis-core-ui/src/main/resources/js/component/Form.js @@ -598,7 +598,10 @@ }; if (attr.fieldType === 'COMPOUND') { - controlProps['errorMessages'] = this.props.errorMessages; + _.extend(controlProps, { + errorMessages : this.props.errorMessages, + hideOptional : this.props.hideOptional + }); } else { controlProps['errorMessage'] = this.props.errorMessages[attr.name]; } diff --git a/molgenis-core-ui/src/main/resources/js/component/FormControlGroup.js b/molgenis-core-ui/src/main/resources/js/component/FormControlGroup.js index <HASH>..<HASH> 100644 --- a/molgenis-core-ui/src/main/resources/js/component/FormControlGroup.js +++ b/molgenis-core-ui/src/main/resources/js/component/FormControlGroup.js @@ -2,7 +2,7 @@ (function(_, React, molgenis) { "use strict"; - var div = React.DOM.div, p = React.DOM.p; + var div = React.DOM.div, p = React.DOM.p, fieldset = React.DOM.fieldset, legend = React.DOM.legend;; /** * @memberOf component @@ -38,10 +38,11 @@ // add control for each attribute var foundFocusControl = false; var controls = []; + var hasVisible = false; for(var i = 0; i < attributes.length; ++i) { var attr = attributes[i]; if ((attr.visibleExpression === undefined) || (this.props.entity.allAttributes[attr.name].visible === true)) { - var Control = attr.fieldType === 'COMPOUND' ? molgenis.ui.FormControlGroup : molgenis.ui.FormControl; + var ControlFactory = attr.fieldType === 'COMPOUND' ? molgenis.ui.FormControlGroup : molgenis.ui.FormControl; var controlProps = { entity : this.props.entity, attr : attr, @@ -58,7 +59,10 @@ }; if (attr.fieldType === 'COMPOUND') { - controlProps['errorMessages'] = this.props.errorMessages; + _.extend(controlProps, { + errorMessages : this.props.errorMessages, + hideOptional : this.props.hideOptional + }); } else { controlProps['errorMessage'] = this.props.errorMessages[attr.name]; } @@ -68,25 +72,30 @@ foundFocusControl = true; } - controls.push(Control(controlProps)); + var Control = ControlFactory(controlProps); + if(attr.nillable === true && this.props.hideOptional === true) { + Control = div({className: 'hide'}, Control); + } else { + hasVisible = true; + } + controls.push(Control); } } - return ( -// div({className: 'panel panel-default'}, -// div({className: 'panel-body'}, - React.DOM.fieldset({}, - React.DOM.legend({}, this.props.attr.label), - p({}, this.props.attr.description), - div({className: 'row'}, - div({className: 'col-md-offset-1 col-md-11'}, - controls - ) - ) - ) -// ) -// ) + var Fieldset = fieldset({}, + legend({}, this.props.attr.label), + p({}, this.props.attr.description), + div({className: 'row'}, + div({className: 'col-md-offset-1 col-md-11'}, + controls + ) + ) ); + + if(!hasVisible) { + Fieldset = div({className: 'hide'}, Fieldset); + } + return Fieldset; } });
Fix #<I> Forms: nillable compound attributes are not hidden
molgenis_molgenis
train
cdb03bdf7dba8f92f830a9a147df6af4a0c3c821
diff --git a/ui/src/components/table/table-header.js b/ui/src/components/table/table-header.js index <HASH>..<HASH> 100644 --- a/ui/src/components/table/table-header.js +++ b/ui/src/components/table/table-header.js @@ -51,9 +51,7 @@ export default { ? slot(props) : h(QTh, { key: col.name, - props, - style: col.headerStyle, - class: col.headerClasses + props }, () => col.label) })
fix(QTable): duplicate headerClasses added #<I>
quasarframework_quasar
train
a1684b1f932e828e979e0e87df90e5cdaa35226c
diff --git a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java index <HASH>..<HASH> 100644 --- a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java +++ b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java @@ -18,9 +18,7 @@ */ package com.netflix.conductor.server; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Paths; +import java.io.InputStream; import java.util.Collection; import java.util.EnumSet; import java.util.LinkedList; @@ -211,7 +209,6 @@ public class ConductorServer { private static void createKitchenSink(int port) throws Exception { - List<TaskDef> taskDefs = new LinkedList<>(); for(int i = 0; i < 40; i++) { taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); @@ -222,15 +219,11 @@ public class ConductorServer { ObjectMapper om = new ObjectMapper(); client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); - URL template = Main.class.getClassLoader().getResource("kitchensink.json"); - byte[] source = Files.readAllBytes(Paths.get(ClassLoader.getSystemResource(template.getFile()).toURI())); - String json = new String(source); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(json); + InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - template = Main.class.getClassLoader().getResource("sub_flow_1.json"); - source = Files.readAllBytes(Paths.get(ClassLoader.getSystemResource(template.getFile()).toURI())); - json = new String(source); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(json); + stream = Main.class.getResourceAsStream("/sub_flow_1.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); logger.info("Kitchen sink workflows are created!"); }
loading the resources as a stream to avoid reading them as file - and OS level compatibility issues.
Netflix_conductor
train
e7b3410626703b9321f8c05a662c35f0d378fb73
diff --git a/js/wee.chain.js b/js/wee.chain.js index <HASH>..<HASH> 100644 --- a/js/wee.chain.js +++ b/js/wee.chain.js @@ -106,7 +106,7 @@ * @returns {$} */ reverse: function() { - var copy = W.$extend({}, this), + var copy = W.$copy(this), x = this.length, total = x, i = 0;
Use shorter $copy method in chained reverse method
weepower_wee-core
train
635a2c547041ca8470dcb9ee1f85c053f78a2fec
diff --git a/salt/cli/caller.py b/salt/cli/caller.py index <HASH>..<HASH> 100644 --- a/salt/cli/caller.py +++ b/salt/cli/caller.py @@ -151,6 +151,8 @@ class BaseCaller(object): # _retcode will be available in the kwargs of the outputter function if self.opts.get('retcode_passthrough', False): sys.exit(ret['retcode']) + elif ret['retcode'] != salt.defaults.exitcodes.EX_OK: + sys.exit(salt.defaults.exitcodes.EX_GENERIC) except SaltInvocationError as err: raise SystemExit(err) @@ -390,10 +392,12 @@ class RAETCaller(BaseCaller): {'local': print_ret}, out=ret.get('out', 'nested'), opts=self.opts, - _retcode=ret.get('retcode', 0)) + _retcode=ret.get('retcode', salt.defaults.exitcodes.EX_OK)) # _retcode will be available in the kwargs of the outputter function if self.opts.get('retcode_passthrough', False): sys.exit(ret['retcode']) + elif ret['retcode'] != salt.defaults.exitcodes.EX_OK: + sys.exit(salt.defaults.exitcodes.EX_GENERIC) except SaltInvocationError as err: raise SystemExit(err)
Normalize to EX_GENERIC for error conditions in salt-call CLI
saltstack_salt
train
55984d4ea2285aaa7846ccd332ec48a4cc323f0f
diff --git a/integration/e2e/acl_test.go b/integration/e2e/acl_test.go index <HASH>..<HASH> 100644 --- a/integration/e2e/acl_test.go +++ b/integration/e2e/acl_test.go @@ -119,6 +119,66 @@ var _ = Describe("EndToEndACL", func() { execute(adminRunner) Eventually(adminRunner.Err()).Should(gbytes.Say(`\Qdeliver completed with status (FORBIDDEN)\E`)) }) + + Context("when the ACL policy for Deliver is satisfied", func() { + By("setting the block event ACL policy to Org1/Admins") + policyName := resources.Event_Block + policy := "/Channel/Application/Org1/Admins" + SetACLPolicy(&w, policyName, policy) + + By("setting the log level for deliver to debug") + logRun := w.Components.Peer() + logRun.ConfigDir = filepath.Join(w.Rootpath, "org1.example.com_0") + logRun.MSPConfigPath = filepath.Join(w.Rootpath, "crypto", "peerOrganizations", "org1.example.com", "users", "Admin@org1.example.com", "msp") + lRunner := logRun.SetLogLevel("common/deliver", "debug") + execute(lRunner) + Expect(lRunner.Err()).To(gbytes.Say("Log level set for peer modules matching regular expression 'common/deliver': DEBUG")) + + By("fetching the latest block from the peer") + fetchRun := w.Components.Peer() + fetchRun.ConfigDir = filepath.Join(w.Rootpath, "org1.example.com_0") + fetchRun.MSPConfigPath = filepath.Join(w.Rootpath, "crypto", "peerOrganizations", "org1.example.com", "users", "Admin@org1.example.com", "msp") + fRunner := fetchRun.FetchChannel(w.Deployment.Channel, filepath.Join(testDir, "newest_block.pb"), "newest", "") + execute(fRunner) + Expect(fRunner.Err()).To(gbytes.Say("Received block: ")) + // TODO - enable this once the peer's logs are available here + // Expect(peerRunner.Err()).To(gbytes.Say(`\Q[channel: testchannel] Done delivering \E`)) + + By("setting the log level for deliver to back to info") + lRunner = logRun.SetLogLevel("common/deliver", "info") + execute(lRunner) + Expect(lRunner.Err()).To(gbytes.Say("Log level set for peer modules matching regular expression 'common/deliver': INFO")) + }) + + Context("tests when the ACL policy for Deliver is not satisifed", func() { + By("setting the block event ACL policy to Org2/Admins") + policyName := resources.Event_Block + policy := "/Channel/Application/Org2/Admins" + SetACLPolicy(&w, policyName, policy) + + By("setting the log level for deliver to debug") + logRun := w.Components.Peer() + logRun.ConfigDir = filepath.Join(w.Rootpath, "org1.example.com_0") + logRun.MSPConfigPath = filepath.Join(w.Rootpath, "crypto", "peerOrganizations", "org1.example.com", "users", "Admin@org1.example.com", "msp") + lRunner := logRun.SetLogLevel("common/deliver", "debug") + execute(lRunner) + Expect(lRunner.Err()).To(gbytes.Say("Log level set for peer modules matching regular expression 'common/deliver': DEBUG")) + + By("fetching the latest block from the peer") + fetchRun := w.Components.Peer() + fetchRun.ConfigDir = filepath.Join(w.Rootpath, "org1.example.com_0") + fetchRun.MSPConfigPath = filepath.Join(w.Rootpath, "crypto", "peerOrganizations", "org1.example.com", "users", "Admin@org1.example.com", "msp") + fRunner := fetchRun.FetchChannel(w.Deployment.Channel, filepath.Join(testDir, "newest_block.pb"), "newest", "") + execute(fRunner) + Expect(fRunner.Err()).To(gbytes.Say("can't read the block: &{FORBIDDEN}")) + // TODO - enable this once the peer's logs are available here + // Expect(peerRunner.Err()).To(gbytes.Say(`\Q[channel: testchannel] Done delivering \Q`)) + + By("setting the log level for deliver to back to info") + lRunner = logRun.SetLogLevel("common/deliver", "info") + execute(lRunner) + Expect(lRunner.Err()).To(gbytes.Say("Log level set for peer modules matching regular expression 'common/deliver': INFO")) + }) }) })
[FAB-<I>] Function test for block ACL This CR adds function tests for the block event ACL policy. Change-Id: Ic<I>c5fdfd9af<I>b9facf<I>c<I>a4fbb1da4ecea0
hyperledger_fabric
train
7ae0c748a5c4519c18a703d61315d6695e81212f
diff --git a/jlib-core/src/main/java/org/jlib/core/array/ArrayUtility.java b/jlib-core/src/main/java/org/jlib/core/array/ArrayUtility.java index <HASH>..<HASH> 100644 --- a/jlib-core/src/main/java/org/jlib/core/array/ArrayUtility.java +++ b/jlib-core/src/main/java/org/jlib/core/array/ArrayUtility.java @@ -37,6 +37,11 @@ public final class ArrayUtility { /** empty array of Objects */ public static final Object[] EMPTY_ARRAY = new Object[0]; + @SuppressWarnings("unchecked") + public <Item> Item[] emptyArray() { + return (Item[]) EMPTY_ARRAY; + } + /** * Returns a new {@link Iterable} adapter for the specified Items. *
ArrayUtility.emptyArray() created
jlib-framework_jlib-operator
train
bd8c5c192ea46e0d723e2da393609ecd4308c187
diff --git a/demo/component/RadialBarChart.js b/demo/component/RadialBarChart.js index <HASH>..<HASH> 100644 --- a/demo/component/RadialBarChart.js +++ b/demo/component/RadialBarChart.js @@ -14,9 +14,8 @@ export default React.createClass({ ]; const style = { - top: 0, - left: 350, - lineHeight: '24px' + lineHeight: '24px', + left: 300, }; const label = {orient: 'outer'}; diff --git a/src/component/DefaultLegendContent.js b/src/component/DefaultLegendContent.js index <HASH>..<HASH> 100644 --- a/src/component/DefaultLegendContent.js +++ b/src/component/DefaultLegendContent.js @@ -10,7 +10,6 @@ class DefaultLegendContent extends React.Component { static propTypes = { content: PropTypes.element, - wrapperStyle: PropTypes.object, iconSize: PropTypes.number, layout: PropTypes.oneOf(['horizontal', 'vertical']), align: PropTypes.oneOf(['center', 'left', 'right']), @@ -87,23 +86,18 @@ class DefaultLegendContent extends React.Component { } render() { - const { payload, layout, align, wrapperStyle } = this.props; + const { payload, layout, align } = this.props; if (!payload || !payload.length) { return null; } - let finalStyle = { + + const finalStyle = { padding: 0, margin: 0, textAlign: layout === 'horizontal' ? align : 'left', }; - if (layout === 'vertical') { - finalStyle.position = 'absolute'; - } - - finalStyle = { ...finalStyle, ...wrapperStyle }; - return ( <ul className="recharts-default-legend" style={finalStyle}> {this.renderItems()} diff --git a/src/component/Legend.js b/src/component/Legend.js index <HASH>..<HASH> 100644 --- a/src/component/Legend.js +++ b/src/component/Legend.js @@ -47,23 +47,36 @@ class Legend extends React.Component { } } - getDefaultPosition() { + getDefaultPosition(style) { const { layout, align, verticalAlign } = this.props; + let hPos; + let vPos; - if (layout === 'vertical') { - return align === 'right' ? { right: 0 } : { left: 0 }; + if (!style || ((style.left === undefined || style.left === null) && ( + style.right === undefined || style.right === null))) { + hPos = align === 'right' ? { right: 0 } : { left: 0 } + } + + if (!style || ((style.top === undefined || style.top === null) && ( + style.bottom === undefined || style.bottom === null))) { + if (layout === 'vertical') { + vPos = verticalAlign === 'bottom' ? { bottom : 0} : { top: 0 }; + } else { + vPos = verticalAlign === 'top' ? { top: 0 } : { bottom: 0 }; + } } - return verticalAlign === 'top' ? { top: 0 } : { bottom: 0 }; + return { ...hPos, ...vPos }; } render() { - const { content, width, height, layout } = this.props; + const { content, width, height, layout, wrapperStyle } = this.props; const outerStyle = { position: 'absolute', width: width || 'auto', height: height || 'auto', - ...this.getDefaultPosition(), + ...this.getDefaultPosition(wrapperStyle), + ...wrapperStyle, }; return (
fix(Legend): fix the location method of legend
recharts_recharts
train
d6bd5e228a820c234a614bfe6c9b88176b6bed45
diff --git a/public/examples/boomboom/scripts/boomboomcontroller.js b/public/examples/boomboom/scripts/boomboomcontroller.js index <HASH>..<HASH> 100644 --- a/public/examples/boomboom/scripts/boomboomcontroller.js +++ b/public/examples/boomboom/scripts/boomboomcontroller.js @@ -53,6 +53,7 @@ var main = function( var globals = { debug: false, + forceController: false, }; Misc.applyUrlSettings(globals); MobileHacks.fixHeightHack(); @@ -156,15 +157,19 @@ var main = function( ctx.drawImage(frame, 0, 0); }; - g_client.addEventListener('score', handleScore); - g_client.addEventListener('start', handleStart); - g_client.addEventListener('tied', handleTie); - g_client.addEventListener('died', handleDeath); - g_client.addEventListener('winner', handleWinner); + if (globals.forceController) { + hideMsg(); + } else { + g_client.addEventListener('score', handleScore); + g_client.addEventListener('start', handleStart); + g_client.addEventListener('tied', handleTie); + g_client.addEventListener('died', handleDeath); + g_client.addEventListener('winner', handleWinner); + g_client.addEventListener('waitForStart', handleWaitForStart); + g_client.addEventListener('waitForNextGame', handleWaitForNextGame); + g_client.addEventListener('waitForMorePlayers', handleWaitForMorePlayers); + } g_client.addEventListener('setColor', handleSetColor); - g_client.addEventListener('waitForStart', handleWaitForStart); - g_client.addEventListener('waitForNextGame', handleWaitForNextGame); - g_client.addEventListener('waitForMorePlayers', handleWaitForMorePlayers); var sounds = {}; g_audioManager = new AudioManager(sounds); @@ -203,15 +208,17 @@ var main = function( Touch.setupButtons({ inputElement: $("buttons"), buttons: [ - { element: $("abutton"), callback: function(e) { handleAbutton(e.pressed); }, }, - { element: $("avatarinput"), callback: function(e) { handleShow(e.pressed); }, }, + { element: $("abuttoninput"), callback: function(e) { handleAbutton(e.pressed); }, }, + { element: $("avatarinput"), callback: function(e) { handleShow(e.pressed); }, }, ], }); Touch.setupVirtualDPads({ inputElement: $("dpadleftinput"), callback: handleDPad, - fixedCenter: false, + fixedCenter: true, + deadSpaceRadius: 15, + axisSize: 35, pads: [ { referenceElement: $("dpadleft"),
use axis division instead of angle division for boomboom dpad
greggman_HappyFunTimes
train
08f374ead0487c330992ec94aca26400cb216e60
diff --git a/src/sap.ui.rta/src/sap/ui/rta/toolbar/Adaptation.js b/src/sap.ui.rta/src/sap/ui/rta/toolbar/Adaptation.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.rta/src/sap/ui/rta/toolbar/Adaptation.js +++ b/src/sap.ui.rta/src/sap/ui/rta/toolbar/Adaptation.js @@ -126,8 +126,9 @@ function( function _setButtonProperties(sButtonName, sIcon, sTextKey, sToolTipKey) { var oButton = this.getControl(sButtonName); var sText = this.getTextResources().getText(sTextKey); + var sToolTip = this.getTextResources().getText(sToolTipKey); oButton.setText(sText || ""); - oButton.setTooltip(sToolTipKey || ""); + oButton.setTooltip(sToolTip || ""); oButton.setIcon(sIcon || ""); } @@ -149,7 +150,7 @@ function( this.getControl("draftLabel").setVisible(false); this.getControl("iconBox").setVisible(false); this._showButtonIcon("adaptationSwitcherButton", "sap-icon://wrench", "BTN_ADAPTATION"); - this._showButtonIcon("navigationSwitcherButton", "sap-icon://split", "BTN_NAVIGATION"); + this._showButtonIcon("navigationSwitcherButton", "sap-icon://explorer", "BTN_NAVIGATION"); this.getControl("iconBox").setVisible(false); this.getControl("iconSpacer").setVisible(false); this._showButtonIcon("exit", "sap-icon://decline", "BTN_EXIT");
[INTERNAL] sap.ui.rta.toolbar.Adaptation changes the switcher section * changing the "Navigation" icon in lower resolutions * FIX: the tooltips for the icons are translated Change-Id: I1d<I>c<I>a<I>c<I>eded<I>
SAP_openui5
train
eefd07b2696a8b04fdd99c3e948ca99c5f83e904
diff --git a/scot/builtin/binica.py b/scot/builtin/binica.py index <HASH>..<HASH> 100644 --- a/scot/builtin/binica.py +++ b/scot/builtin/binica.py @@ -11,7 +11,7 @@ import subprocess import numpy as np -if not hasattr(__builtin__, 'FileNotFoundError'): +if not hasattr(__builtins__, 'FileNotFoundError'): # PY27: subprocess.Popen raises OSError instead of FileNotFoundError FileNotFoundError = OSError @@ -91,11 +91,12 @@ def binica(data, binary=binica_binary): if os.path.exists(binary): with open(scriptfile) as sc: try: - with subprocess.Popen(binary, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=sc) as proc: - print('waiting for binica to finish...') - proc.wait() - print('binica output:') - print(proc.stdout.read().decode()) + proc = subprocess.Popen(binary, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=sc) + print('waiting for binica to finish...') + proc.wait() + #print('binica output:') + #print(proc.stdout.read().decode()) + proc.stdout.close() except FileNotFoundError: raise RuntimeError('The BINICA binary ica_linux exists in the file system but could not be executed. ' 'This indicates that 32 bit libraries are not installed on the system.')
PY<I>: don't use subprocess.Popen in with statement
scot-dev_scot
train
aa4156d898d2cb1ea456bd6e5b955bf10d1c4c57
diff --git a/shared/actions/teams.js b/shared/actions/teams.js index <HASH>..<HASH> 100644 --- a/shared/actions/teams.js +++ b/shared/actions/teams.js @@ -335,12 +335,13 @@ const _getDetails = function*(action: TeamsGen.GetDetailsPayload): Saga.SagaGene } types.forEach(type => { const members = details.members[type] || [] - members.forEach(({username, active}) => { + members.forEach(({active, fullName, username}) => { infos.push( Constants.makeMemberInfo({ + active, + fullName, type: typeMap[type], username, - active, }) ) memberNames = memberNames.add(username) diff --git a/shared/constants/teams.js b/shared/constants/teams.js index <HASH>..<HASH> 100644 --- a/shared/constants/teams.js +++ b/shared/constants/teams.js @@ -18,9 +18,10 @@ export const makeChannelInfo: I.RecordFactory<Types._ChannelInfo> = I.Record({ }) export const makeMemberInfo: I.RecordFactory<Types._MemberInfo> = I.Record({ + active: true, + fullName: '', type: null, username: '', - active: true, }) export const makeInviteInfo: I.RecordFactory<Types._InviteInfo> = I.Record({ diff --git a/shared/constants/types/teams.js b/shared/constants/types/teams.js index <HASH>..<HASH> 100644 --- a/shared/constants/types/teams.js +++ b/shared/constants/types/teams.js @@ -33,9 +33,10 @@ export type _ChannelInfo = { export type ChannelInfo = I.RecordOf<_ChannelInfo> export type _MemberInfo = { + active: boolean, + fullName: string, type: ?TeamRoleType, username: string, - active: boolean, } export type MemberInfo = I.RecordOf<_MemberInfo> diff --git a/shared/teams/team/index.js b/shared/teams/team/index.js index <HASH>..<HASH> 100644 --- a/shared/teams/team/index.js +++ b/shared/teams/team/index.js @@ -274,6 +274,7 @@ class Team extends React.PureComponent<Props> { // massage data for rowrenderers const memberProps = members.map(member => ({ + fullName: member.fullName, username: member.username, teamname: name, active: member.active, diff --git a/shared/teams/team/member-row/container.js b/shared/teams/team/member-row/container.js index <HASH>..<HASH> 100644 --- a/shared/teams/team/member-row/container.js +++ b/shared/teams/team/member-row/container.js @@ -10,9 +10,10 @@ import {navigateAppend} from '../../../actions/route-tree' import type {TypedState} from '../../../constants/reducer' type OwnProps = { + active: boolean, + fullName: string, username: string, teamname: string, - active: boolean, } type StateProps = { @@ -22,11 +23,15 @@ type StateProps = { _members: I.Set<Types.MemberInfo>, } -const mapStateToProps = (state: TypedState, {teamname, username, active}: OwnProps): StateProps => ({ +const mapStateToProps = ( + state: TypedState, + {active, fullName, teamname, username}: OwnProps +): StateProps => ({ + _members: state.entities.getIn(['teams', 'teamNameToMembers', teamname], I.Set()), + active, following: amIFollowing(state, username), + fullName: state.config.username === username ? 'You' : fullName, you: state.config.username, - active, - _members: state.entities.getIn(['teams', 'teamNameToMembers', teamname], I.Set()), }) type DispatchProps = { diff --git a/shared/teams/team/member-row/index.js b/shared/teams/team/member-row/index.js index <HASH>..<HASH> 100644 --- a/shared/teams/team/member-row/index.js +++ b/shared/teams/team/member-row/index.js @@ -7,13 +7,13 @@ import {typeToLabel} from '../../../constants/teams' import {type TypeMap} from '../../../constants/types/teams' export type Props = { - username: string, - following: boolean, - teamname: string, - you: ?string, - type: ?string, active: boolean, + following: boolean, + fullName: string, onClick: () => void, + type: ?string, + username: string, + you: ?string, } const showCrown: TypeMap = { @@ -24,7 +24,7 @@ const showCrown: TypeMap = { } export const TeamMemberRow = (props: Props) => { - const {username, onClick, you, following, type, active} = props + const {active, following, fullName, onClick, type, username, you} = props return ( <ClickableBox style={{ @@ -53,6 +53,12 @@ export const TeamMemberRow = (props: Props) => { )} </Box> <Box style={globalStyles.flexBoxRow}> + {!!fullName && + active && ( + <Text style={{marginRight: globalMargins.xtiny}} type="BodySmall"> + {fullName} • + </Text> + )} {type && !!showCrown[type] && ( <Icon
Use full names on Members tab (#<I>) * Use full names on Members tab * lint
keybase_client
train
ca47f293c06d5d60a591502029048772fa385921
diff --git a/src/main/java/org/dynjs/runtime/builtins/types/regexp/DynRegExp.java b/src/main/java/org/dynjs/runtime/builtins/types/regexp/DynRegExp.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dynjs/runtime/builtins/types/regexp/DynRegExp.java +++ b/src/main/java/org/dynjs/runtime/builtins/types/regexp/DynRegExp.java @@ -42,10 +42,15 @@ public class DynRegExp extends DynObject { setPatternAndFlags(null, pattern, flags); } - public void setPatternAndFlags(ExecutionContext context, final String pattern, final String flags) { + public void setPatternAndFlags(ExecutionContext context, String pattern, final String flags) { checkSyntaxOfFlags(context, flags); PropertyDescriptor sourceDesc = new PropertyDescriptor(); + // 15.10.4.1: + // If P is the empty String, this specification can be met by letting S be "(?:)". + if (pattern.equals("")) { + pattern = "(?:)"; + } sourceDesc.set(Names.VALUE, pattern); sourceDesc.set(Names.WRITABLE, false); sourceDesc.set(Names.CONFIGURABLE, false);
Regexp constructed with an empty string use the pattern "(?:)" <I> - If P is the empty String, this specification can be met by letting S be "(?:)".
dynjs_dynjs
train
0691c5d75601bf3a35d4a9ddbc3fcd8b1ad1a95a
diff --git a/openshift/openshift.go b/openshift/openshift.go index <HASH>..<HASH> 100644 --- a/openshift/openshift.go +++ b/openshift/openshift.go @@ -290,6 +290,10 @@ func (d *openshiftImageDestination) SupportedManifestMIMETypes() []string { } func (d *openshiftImageDestination) PutManifest(m []byte) error { + // FIXME? Can this eventually just call d.docker.PutManifest()? + // Right now we need this as a skeleton to attach signatures to, and + // to workaround our inability to change tags when uploading v2s1 manifests. + // Note: This does absolutely no kind/version checking or conversions. manifestDigest, err := manifest.Digest(m) if err != nil { @@ -327,7 +331,7 @@ func (d *openshiftImageDestination) PutManifest(m []byte) error { return err } - return d.docker.PutManifest(m) + return nil } func (d *openshiftImageDestination) PutBlob(digest string, stream io.Reader) error {
Fix manifest upload to recent versions of OpenShift Uploading the manifest through the Docker registry API is redundant to the upload we have just done via an ImageStreamMapping, and recent versions reject it because we upload through a digest and they want a tag to use for the ImageStream.
containers_image
train
7ddbeb71e30693efaf00866b07afddbd0c130f6d
diff --git a/pymatgen/ext/tests/test_matproj.py b/pymatgen/ext/tests/test_matproj.py index <HASH>..<HASH> 100644 --- a/pymatgen/ext/tests/test_matproj.py +++ b/pymatgen/ext/tests/test_matproj.py @@ -65,24 +65,25 @@ class MPResterTest(unittest.TestCase): "is_compatible", "task_ids", "density", "icsd_ids", "total_magnetization"] # unicode literals have been reintroduced in py>3.2 - expected_vals = [-191.33812137, -6.833504334642858, -2.551358929370749, - 28, {k: v for k, v in {'P': 4, 'Fe': 4, 'O': 16, 'Li': 4}.items()}, + + expected_vals = [-191.3359011, -6.833425039285714, -2.5515769497278913, + 28, {'P': 4, 'Fe': 4, 'O': 16, 'Li': 4}, "LiFePO4", True, ['Li', 'O', 'P', 'Fe'], 4, 0.0, - {k: v for k, v in {'Fe': 5.3, 'Li': 0.0, 'O': 0.0, 'P': 0.0}.items()}, True, + {'Fe': 5.3, 'Li': 0.0, 'O': 0.0, 'P': 0.0}, True, [u'mp-601412', u'mp-19017', u'mp-796535', u'mp-797820', u'mp-540081', u'mp-797269'], - 3.4662026991351147, + 3.464840709092822, [159107, 154117, 160776, 99860, 181272, 166815, 260571, 92198, 165000, 155580, 38209, 161479, 153699, 260569, 260570, 200155, 260572, 181341, 181342, 72545, 56291, 97764, 162282, 155635], - 16.0002716] + 15.9996841] for (i, prop) in enumerate(props): if prop not in ['hubbards', 'unit_cell_formula', 'elements', 'icsd_ids', 'task_ids']: val = self.rester.get_data("mp-19017", prop=prop)[0][prop] - self.assertAlmostEqual(expected_vals[i], val) + self.assertAlmostEqual(expected_vals[i], val, places=2) elif prop in ["elements", "icsd_ids", "task_ids"]: self.assertEqual(set(expected_vals[i]), set(self.rester.get_data("mp-19017",
Update db entry values. Relax places=7 constraint on assertion.
materialsproject_pymatgen
train
9fa398f5fc48b317d91e0083f82255f32794d5b8
diff --git a/spec/orthography_tokenizer_spec.js b/spec/orthography_tokenizer_spec.js index <HASH>..<HASH> 100644 --- a/spec/orthography_tokenizer_spec.js +++ b/spec/orthography_tokenizer_spec.js @@ -21,7 +21,6 @@ */ var OrthographyTokenizer = require('../lib/natural/tokenizers/regexp_tokenizer').OrthographyTokenizer; -console.log(OrthographyTokenizer); var sentencesInFinnish = [ ["Mikä sinun nimesi on?", [ 'Mikä', 'sinun', 'nimesi', 'on' ]], @@ -31,7 +30,6 @@ var sentencesInFinnish = [ describe("The orthography tokenizer tokenizes sentences in Finnish", function() { var tokenizer = new OrthographyTokenizer({language: "fi"}); - console.log(tokenizer); sentencesInFinnish.forEach(function(sentencePlusResult) { it("It should correctly tokenize the following sentence: " + sentencePlusResult[0], function() { //console.log(tokenizer.tokenize(sentencePlusResult[0]));
Removes some loggings from spec
NaturalNode_natural
train
e19264dae75ccd764967e95a2d80cea316cd362a
diff --git a/src/Adapters/Adapter.php b/src/Adapters/Adapter.php index <HASH>..<HASH> 100644 --- a/src/Adapters/Adapter.php +++ b/src/Adapters/Adapter.php @@ -430,11 +430,18 @@ abstract class Adapter implements DataInterface if ($bigger) { $sizes = []; - foreach ($images as $image) { - $sizes[$image['url']] = $image['size']; + foreach ($images as $img) { + $sizes[$img['url']] = $img['size']; } - $image = static::getBigger($sizes); + $biggest = static::getBigger($sizes); + + foreach ($images as $img) { + if ($biggest == $img['url']) { + $image = $biggest; + break; + } + } } else { reset($images); $image = current($images);
fix option 'choose_bigger_image'
oscarotero_Embed
train
17b0c9ddecf72e77654549363f4895984bc524df
diff --git a/example/lib/controllers/football.js b/example/lib/controllers/football.js index <HASH>..<HASH> 100644 --- a/example/lib/controllers/football.js +++ b/example/lib/controllers/football.js @@ -5,7 +5,8 @@ const footballDb = require('./../db/footballDb') module.exports = { leagues_id_table, // binds to /leagues/:id/table leagues, // binds to /leagues - index // binds to / + index, // binds to / + index_id // binds to /:id } /** @@ -31,7 +32,7 @@ function leagues(name) { .map(addLeaguePath)) function addLeaguePath(league) { - league.leagueHref = "/football/leagues/" + league.id + "/table" + league.leagueHref = '/football/leagues/' + league.id + '/table' return league } } @@ -41,8 +42,23 @@ function leagues(name) { * gets out of the way and delegates on that action the responsibility of * sending the response. * So whenever you want to do something different from the default behavior - * you just have to append res to your parameters. + * you just have to append `res` to your parameters. */ -function index() { - return '/football/leagues' -} \ No newline at end of file +function index(res) { + /** + * If this controller is loaded with an options object set with + * the property `redirectOnStringResult` then this is equivalent + * to removing the `res` parameter and just return the destination + * string path '/football/leagues'. + */ + res.redirect('/football/leagues') +} + +/** + * If this controller is loaded with an options object set with the property + * `redirectOnStringResult` then this action method redirects to + * `/football/leagues/:id/table`. + */ +function index_id(id) { + return '/football/leagues/' + id + '/table' +}
Update web app example with a new endpoint that redirects through the returned string path.
CCISEL_connect-controller
train
ab50a7efb6dc637e855fcf41488b6577a3bd8727
diff --git a/ontquery/plugins/services/rdflib.py b/ontquery/plugins/services/rdflib.py index <HASH>..<HASH> 100644 --- a/ontquery/plugins/services/rdflib.py +++ b/ontquery/plugins/services/rdflib.py @@ -1,4 +1,5 @@ import rdflib +import requests import ontquery as oq import ontquery.exceptions as exc from ontquery.utils import log, red diff --git a/test/test_services.py b/test/test_services.py index <HASH>..<HASH> 100644 --- a/test/test_services.py +++ b/test/test_services.py @@ -154,3 +154,12 @@ class TestSciGraph(ServiceBase, unittest.TestCase): class TestRdflib(ServiceBase, unittest.TestCase): remote = oq.plugin.get('rdflib')(test_graph) + + +@skipif_no_net +class TestGitHub(ServiceBase, unittest.TestCase): + remote = oq.plugin.get('GitHub')('SciCrunch', 'NIF-Ontology', 'ttl/bridge/uberon-bridge.ttl', 'ttl/NIF-GrossAnatomy.ttl', branch='dev') + + def test_ontid(self): + t = self.OntTerm(OntId('BIRNLEX:796')) + assert t.label, repr(t)
services rdflib add requests import and test for github
tgbugs_ontquery
train
e5a986968a0844549283d14ae77d60a98d2987a1
diff --git a/chatterbot/adapters/storage/mongodb.py b/chatterbot/adapters/storage/mongodb.py index <HASH>..<HASH> 100644 --- a/chatterbot/adapters/storage/mongodb.py +++ b/chatterbot/adapters/storage/mongodb.py @@ -139,11 +139,13 @@ class MongoDatabaseAdapter(StorageAdapter): self.statements.replace_one({'text': statement.text}, data, True) # Make sure that an entry for each response is saved - for response_statement in statement.in_response_to: - response = self.find(response_statement.text) - if not response: - response = Statement(response_statement.text) - self.update(response) + for response in statement.in_response_to: + # $setOnInsert does nothing if the document is not created + self.statements.update_one( + {'text': response.text}, + {'$setOnInsert': {'in_response_to': []}}, + upsert=True + ) return statement
Increased efficiency of update method This makes changes to use the upsert attribute in mongodb so that documents are only created if they do not already exist. This change removes the need to call the find method to check if a statement already exists in the database. This reduces the number of database transactions that are required to complete the operation.
gunthercox_ChatterBot
train
ebcaba64f366cbd704b862494a7c1894a32b14d9
diff --git a/packages/terafoundation/index.js b/packages/terafoundation/index.js index <HASH>..<HASH> 100644 --- a/packages/terafoundation/index.js +++ b/packages/terafoundation/index.js @@ -1,17 +1,18 @@ 'use strict'; const SimpleContext = require('./lib/simple-context'); +const { getArgs } = require('./lib/sysconfig'); +const validateConfigs = require('./lib/validate-configs'); +const master = require('./lib/master'); +const api = require('./lib/api'); // this module is not really testable /* istanbul ignore next */ module.exports = function clusterContext(config) { const domain = require('domain'); - const primary = domain.create(); const cluster = require('cluster'); - const { getArgs } = require('./lib/sysconfig'); - const validateConfigs = require('./lib/validate_configs'); - const api = require('./lib/api'); + const primary = domain.create(); const name = config.name ? config.name : 'terafoundation'; @@ -128,7 +129,7 @@ module.exports = function clusterContext(config) { } } - require('./lib/master')(context, config); + master(context, config); // If there's a master plugin defined, pass it on. if (config.master) {
Detect more errors in tests by not nesting requires
terascope_teraslice
train
c13490b26979b0ced8eaeb8ef85c343777aa0419
diff --git a/Generator/VariantGenerator.php b/Generator/VariantGenerator.php index <HASH>..<HASH> 100644 --- a/Generator/VariantGenerator.php +++ b/Generator/VariantGenerator.php @@ -13,11 +13,10 @@ namespace Sylius\Component\Variation\Generator; use Sylius\Component\Resource\Factory\FactoryInterface; use Sylius\Component\Variation\Model\VariableInterface; -use Sylius\Component\Variation\Model\VariantInterface; use Sylius\Component\Variation\SetBuilder\SetBuilderInterface; /** - * Abstract variant generator service implementation. + * Variant generator service implementation. * * It is used to create all possible combinations of object options * and create Variant models from them. @@ -33,8 +32,6 @@ use Sylius\Component\Variation\SetBuilder\SetBuilderInterface; class VariantGenerator implements VariantGeneratorInterface { /** - * Variant manager. - * * @var FactoryInterface */ protected $variantFactory; @@ -45,8 +42,6 @@ class VariantGenerator implements VariantGeneratorInterface private $setBuilder; /** - * Constructor. - * * @param FactoryInterface $variantFactory * @param SetBuilderInterface $setBuilder */ @@ -68,9 +63,9 @@ class VariantGenerator implements VariantGeneratorInterface $optionSet = []; $optionMap = []; - foreach ($variable->getOptions() as $k => $option) { + foreach ($variable->getOptions() as $key => $option) { foreach ($option->getValues() as $value) { - $optionSet[$k][] = $value->getId(); + $optionSet[$key][] = $value->getId(); $optionMap[$value->getId()] = $value; } } @@ -91,18 +86,6 @@ class VariantGenerator implements VariantGeneratorInterface } $variable->addVariant($variant); - - $this->process($variable, $variant); } } - - /** - * Override if needed. - * - * @param VariableInterface $variable - * @param VariantInterface $variant - */ - protected function process(VariableInterface $variable, VariantInterface $variant) - { - } }
[Variant] Change variant generation method
Sylius_Variation
train
eeb61825a31aca8575edeb45b643d609843743f3
diff --git a/sos/report/plugins/openstack_nova.py b/sos/report/plugins/openstack_nova.py index <HASH>..<HASH> 100644 --- a/sos/report/plugins/openstack_nova.py +++ b/sos/report/plugins/openstack_nova.py @@ -51,6 +51,10 @@ class OpenStackNova(Plugin): "nova-manage " + nova_config + " floating list", suggest_filename="nova-manage_floating_list" ) + self.add_cmd_output( + "nova-status " + nova_config + " upgrade check", + suggest_filename="nova-status_upgrade_check" + ) vars_all = [p in os.environ for p in [ 'OS_USERNAME', 'OS_PASSWORD']]
[openstack_nova] Add nova-status upgrade check output nova-status upgrade check [1] is a tool that preforms release specific checks ahead of an upgrade. [1] <URL>
sosreport_sos
train
aac8c623bf3125fc50a1fe434a431848c0976c88
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -8,6 +8,7 @@ curdir = os.path.dirname(os.path.abspath(__file__)) setup( name='rest_framework_nested', description='Nested resources for the Django Rest Framework', + long_description=open('README.md').read(), license='Apache', version='0.1.0', author='Alan Justino and Oscar Vilaplana',
Preparing long_description for PyPI
alanjds_drf-nested-routers
train
722c8324bf87d5ac1e64afd8e6ba3407ab96a4d7
diff --git a/client/src/main/java/com/github/gumtreediff/client/Run.java b/client/src/main/java/com/github/gumtreediff/client/Run.java index <HASH>..<HASH> 100644 --- a/client/src/main/java/com/github/gumtreediff/client/Run.java +++ b/client/src/main/java/com/github/gumtreediff/client/Run.java @@ -99,11 +99,11 @@ public class Run { } public static void main(String[] args) { - initClients(); - Options opts = new Options(); args = Option.processCommandLine(args, opts); + initClients(); + Registry.Factory<? extends Client> client; if (args.length == 0) { System.err.println("** No command given.");
Initialize clients after the options have been read This makes it possible to use the options during client initialization.
GumTreeDiff_gumtree
train
d14eccc5782eeed8a8f6b1a205c53d562c6c123e
diff --git a/lib/will_paginate/data_mapper.rb b/lib/will_paginate/data_mapper.rb index <HASH>..<HASH> 100644 --- a/lib/will_paginate/data_mapper.rb +++ b/lib/will_paginate/data_mapper.rb @@ -21,11 +21,15 @@ module WillPaginate options = options.dup pagenum = options.fetch(:page) { raise ArgumentError, ":page parameter required" } per_page = options.delete(:per_page) || self.per_page + total = options.delete(:total_entries) options.delete(:page) options[:limit] = per_page.to_i - all(options).page(pagenum) + + col = all(options).page(pagenum) + col.total_entries = total.to_i unless total.nil? || (total.kind_of?(String) && total.strip.empty?) + col end end @@ -33,6 +37,7 @@ module WillPaginate include WillPaginate::CollectionMethods attr_accessor :current_page + attr_writer :total_entries def paginated? !current_page.nil? diff --git a/spec/finders/active_record_spec.rb b/spec/finders/active_record_spec.rb index <HASH>..<HASH> 100644 --- a/spec/finders/active_record_spec.rb +++ b/spec/finders/active_record_spec.rb @@ -498,64 +498,4 @@ describe WillPaginate::ActiveRecord do Project.page(307445734561825862) }.should raise_error(WillPaginate::InvalidPage, "invalid offset: 9223372036854775830") end - - protected - - def ignore_deprecation - ActiveSupport::Deprecation.silence { yield } - end - - def run_queries(num) - QueryCountMatcher.new(num) - end - - def show_queries(&block) - counter = QueryCountMatcher.new(nil) - counter.run block - ensure - queries = counter.performed_queries - if queries.any? - puts queries - else - puts "no queries" - end - end - -end - -class QueryCountMatcher - def initialize(num) - @expected_count = num - end - - def matches?(block) - run(block) - - if @expected_count.respond_to? :include? - @expected_count.include? @count - else - @count == @expected_count - end - end - - def run(block) - $query_count = 0 - $query_sql = [] - block.call - ensure - @queries = $query_sql.dup - @count = $query_count - end - - def performed_queries - @queries - end - - def failure_message - "expected #{@expected_count} queries, got #{@count}\n#{@queries.join("\n")}" - end - - def negative_failure_message - "expected query count not to be #{@expected_count}" - end -end + end diff --git a/spec/finders/data_mapper_spec.rb b/spec/finders/data_mapper_spec.rb index <HASH>..<HASH> 100644 --- a/spec/finders/data_mapper_spec.rb +++ b/spec/finders/data_mapper_spec.rb @@ -80,6 +80,19 @@ describe WillPaginate::DataMapper do Animal.all(:conditions => ['1=2']).page(1).total_pages.should == 1 end + it "overrides total_entries count with a fixed value" do + lambda { + animals = Animal.paginate :page => 1, :per_page => 3, :total_entries => 999 + animals.total_entries.should == 999 + }.should run_queries(0) + end + + it "supports a non-int for total_entries" do + topics = Animal.paginate :page => 1, :per_page => 3, :total_entries => "999" + topics.total_entries.should == 999 + end + + it "can iterate and then call WP methods" do animals = Animal.all(:limit => 2).page(1) animals.each { |a| } diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -17,8 +17,32 @@ RSpec.configure do |config| def have_deprecation(msg) DeprecationMatcher.new(msg) end + + def run_queries(num) + QueryCountMatcher.new(num) + end + + def ignore_deprecation + ActiveSupport::Deprecation.silence { yield } + end + + def run_queries(num) + QueryCountMatcher.new(num) + end + + def show_queries(&block) + counter = QueryCountMatcher.new(nil) + counter.run block + ensure + queries = counter.performed_queries + if queries.any? + puts queries + else + puts "no queries" + end + end } - + config.mock_with :mocha end @@ -69,3 +93,40 @@ class DeprecationMatcher $stderr = err end end + +class QueryCountMatcher + def initialize(num) + @expected_count = num + end + + def matches?(block) + run(block) + + if @expected_count.respond_to? :include? + @expected_count.include? @count + else + @count == @expected_count + end + end + + def run(block) + $query_count = 0 + $query_sql = [] + block.call + ensure + @queries = $query_sql.dup + @count = $query_count + end + + def performed_queries + @queries + end + + def failure_message + "expected #{@expected_count} queries, got #{@count}\n#{@queries.join("\n")}" + end + + def negative_failure_message + "expected query count not to be #{@expected_count}" + end +end
Support paginate(:total_entries => n) for dm
mislav_will_paginate
train
8ce1f219014f7a80a8c7d1a8a0f344864b64eee6
diff --git a/Neos.Media/Migrations/Postgresql/Version20200823164701.php b/Neos.Media/Migrations/Postgresql/Version20200823164701.php index <HASH>..<HASH> 100644 --- a/Neos.Media/Migrations/Postgresql/Version20200823164701.php +++ b/Neos.Media/Migrations/Postgresql/Version20200823164701.php @@ -37,7 +37,7 @@ class Version20200823164701 extends AbstractMigration foreach (self::TYPES as $type) { $this->addSql(sprintf( - 'UPDATE neos_media_domain_model_thumbnail SET staticresource = "resource://Neos.Media/Public/IconSets/vivid/%s.svg" WHERE (staticresource = "resource://Neos.Media/Public/Icons/512px/%s.png")', + "UPDATE neos_media_domain_model_thumbnail SET staticresource = 'resource://Neos.Media/Public/IconSets/vivid/%s.svg' WHERE (staticresource = 'resource://Neos.Media/Public/Icons/512px/%s.png')", $type, $type ));
TASK: Fix quotes used in PostgreSQL migration
neos_neos-development-collection
train