diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/src/_pytest/mark/structures.py b/src/_pytest/mark/structures.py index <HASH>..<HASH> 100644 --- a/src/_pytest/mark/structures.py +++ b/src/_pytest/mark/structures.py @@ -476,9 +476,9 @@ class MarkGenerator: skip = _SkipMarkDecorator(Mark("skip", (), {})) skipif = _SkipifMarkDecorator(Mark("skipif", (), {})) xfail = _XfailMarkDecorator(Mark("xfail", (), {})) - parametrize = _ParametrizeMarkDecorator(Mark("parametrize ", (), {})) - usefixtures = _UsefixturesMarkDecorator(Mark("usefixtures ", (), {})) - filterwarnings = _FilterwarningsMarkDecorator(Mark("filterwarnings ", (), {})) + parametrize = _ParametrizeMarkDecorator(Mark("parametrize", (), {})) + usefixtures = _UsefixturesMarkDecorator(Mark("usefixtures", (), {})) + filterwarnings = _FilterwarningsMarkDecorator(Mark("filterwarnings", (), {})) def __getattr__(self, name: str) -> MarkDecorator: if name[0] == "_":
mark: fix extraneous spaces in dummy type-checking marks (cherry picked from commit <I>e<I>b<I>b0cddb1ec<I>c<I>cbd7f<I>)
diff --git a/includes/class-freemius.php b/includes/class-freemius.php index <HASH>..<HASH> 100755 --- a/includes/class-freemius.php +++ b/includes/class-freemius.php @@ -10871,7 +10871,7 @@ * @return number[] */ private function get_plans_ids_associated_with_installs() { - if ( ! $this->_is_network_active ) { + if ( ! is_multisite() ) { if ( ! is_object( $this->_site ) || ! FS_Plugin_Plan::is_valid_id( $this->_site->plan_id ) ) {
[plans] Fixed an issue with the retrieving of plan IDs that are still associated with installs on a multisite environment.
diff --git a/lib/launchy/descendant_tracker.rb b/lib/launchy/descendant_tracker.rb index <HASH>..<HASH> 100644 --- a/lib/launchy/descendant_tracker.rb +++ b/lib/launchy/descendant_tracker.rb @@ -40,8 +40,9 @@ module Launchy # and passing all the rest of the parameters to that method in # each child def find_child( method, *args ) - klass = children.find do |klass| - klass.send( method, *args ) + klass = children.find do |child| + Launchy.log "Checking if class #{child} is the one for #{method}(#{args.join(', ')})}" + child.send( method, *args ) end end end
Add in some debug output for finding children
diff --git a/tests/test_eval.py b/tests/test_eval.py index <HASH>..<HASH> 100644 --- a/tests/test_eval.py +++ b/tests/test_eval.py @@ -44,6 +44,15 @@ class Test(unittest.TestCase): self.assertEqual(c2.eval('(x)'), 2) self.assertEqual(c3.eval('(x)'), 3) + @unittest.skip("Support for exception is not yet present") + def test_exception(self): + context = py_mini_racer.MiniRacer() + + js_source = "var f = function() {throw 'error'};" + + context.eval(js_source) + context.eval("f()") + if __name__ == '__main__': import sys
Add a skipped test for exception handling
diff --git a/core-bundle/contao/elements/ContentTable.php b/core-bundle/contao/elements/ContentTable.php index <HASH>..<HASH> 100644 --- a/core-bundle/contao/elements/ContentTable.php +++ b/core-bundle/contao/elements/ContentTable.php @@ -115,12 +115,12 @@ class ContentTable extends \ContentElement if ($j == 0) { - $class_tr = ' row_first'; + $class_tr .= ' row_first'; } if ($j == ($limit - 1)) { - $class_tr = ' row_last'; + $class_tr .= ' row_last'; } $class_eo = (($j % 2) == 0) ? ' even' : ' odd'; @@ -131,12 +131,12 @@ class ContentTable extends \ContentElement if ($i == 0) { - $class_td = ' col_first'; + $class_td .= ' col_first'; } if ($i == (count($rows[$j]) - 1)) { - $class_td = ' col_last'; + $class_td .= ' col_last'; } $arrBody['row_' . $j . $class_tr . $class_eo][] = array
[Core] The table content element did not assign the correct CSS class names when there was only one row and one column (see #<I>)
diff --git a/publ/__init__.py b/publ/__init__.py index <HASH>..<HASH> 100755 --- a/publ/__init__.py +++ b/publ/__init__.py @@ -10,7 +10,7 @@ import werkzeug.exceptions from . import config, rendering, model, index, caching, view, utils from . import maintenance, image -__version__ = '0.3.22' +__version__ = '0.3.22.1' class _PublApp(flask.Flask): diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ """Setup for Publ packaging""" # Always prefer setuptools over distutils -from setuptools import setup +from setuptools import setup, find_packages from os import path import publ @@ -52,7 +52,7 @@ setup( keywords='website cms publishing blog photogallery sharing', - packages=['publ'], + packages=find_packages(), install_requires=[ 'Flask',
Fix setup.py to include subdirs
diff --git a/lnwallet/interface_test.go b/lnwallet/interface_test.go index <HASH>..<HASH> 100644 --- a/lnwallet/interface_test.go +++ b/lnwallet/interface_test.go @@ -1283,6 +1283,13 @@ func TestLightningWallet(t *testing.T) { t.Fatalf("unable to set up mining node: %v", err) } + // Next mine enough blocks in order for segwit and the CSV package + // soft-fork to activate on SimNet. + numBlocks := netParams.MinerConfirmationWindow * 2 + if _, err := miningNode.Node.Generate(numBlocks); err != nil { + t.Fatalf("unable to generate blocks: %v", err) + } + rpcConfig := miningNode.RPCConfig() chainNotifier, err := btcdnotify.New(&rpcConfig)
lnwallet: mine enough blocks to activate CSV+segwit in reservation tests
diff --git a/systemd/test/test_daemon.py b/systemd/test/test_daemon.py index <HASH>..<HASH> 100644 --- a/systemd/test/test_daemon.py +++ b/systemd/test/test_daemon.py @@ -353,7 +353,7 @@ def test_daemon_notify_memleak(): try: notify('', True, 0, fds) - except ConnectionRefusedError: + except connection_error: pass assert sys.getrefcount(fd) <= ref_cnt, 'leak'
tests: python2-compat in another place
diff --git a/packages/babel-core/src/config/option-manager.js b/packages/babel-core/src/config/option-manager.js index <HASH>..<HASH> 100644 --- a/packages/babel-core/src/config/option-manager.js +++ b/packages/babel-core/src/config/option-manager.js @@ -303,7 +303,7 @@ const loadDescriptor = makeWeakCache( }); try { - item = value(api, options, { dirname }); + item = value(api, options, dirname); } catch (e) { if (alias) { e.message += ` (While processing: ${JSON.stringify(alias)})`;
Simplify dirname option in plugins/presets? (#<I>)
diff --git a/lib/ezdb/classes/ezdbinterface.php b/lib/ezdb/classes/ezdbinterface.php index <HASH>..<HASH> 100644 --- a/lib/ezdb/classes/ezdbinterface.php +++ b/lib/ezdb/classes/ezdbinterface.php @@ -104,7 +104,15 @@ class eZDBInterface $this->OutputTextCodec = null; $this->InputTextCodec = null; - if ( $this->UseBuiltinEncoding ) +/* + This is pseudocode, there is no such function as + mysql_supports_charset() of course + if ( $this->UseBuiltinEncoding and mysql_supports_charset( $charset ) ) + { + mysql_session_set_charset( $charset ); + } + else +*/ { include_once( "lib/ezi18n/classes/eztextcodec.php" ); $this->OutputTextCodec =& eZTextCodec::instance( $charset, false, false );
- Clarify code portion by adding pseudo code. Related to: <URL>
diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -25,7 +25,6 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', - 'sphinxcontrib.email', ] # Add any paths that contain templates here, relative to this directory.
Remove sphinxcontrib.email extension
diff --git a/graylog_hook.go b/graylog_hook.go index <HASH>..<HASH> 100644 --- a/graylog_hook.go +++ b/graylog_hook.go @@ -1,4 +1,4 @@ -package graylog // import "gopkg.in/gemnasium/logrus-graylog-hook.v1" +package graylog // import "github.com/raphyot/logrus-graylog-hook" import ( "bytes"
Changed package def tobe usable
diff --git a/lib/ooor/base.rb b/lib/ooor/base.rb index <HASH>..<HASH> 100644 --- a/lib/ooor/base.rb +++ b/lib/ooor/base.rb @@ -289,7 +289,7 @@ module Ooor self.class.object_service(:execute, object_db, object_uid, object_pass, self.class.openerp_model, method, *args) end - def load(attributes, remove_root=false)#an attribute might actually be a association too, will be determined here + def load(attributes, remove_root=false, persisted=false)#an attribute might actually be a association too, will be determined here self.class.reload_fields_definition(false, object_session) raise ArgumentError, "expected an attributes Hash, got #{attributes.inspect}" unless attributes.is_a?(Hash) @prefix_options, attributes = split_options(attributes)
load API closer to ActiveResource one; should fix broken test
diff --git a/salt/daemons/flo/core.py b/salt/daemons/flo/core.py index <HASH>..<HASH> 100644 --- a/salt/daemons/flo/core.py +++ b/salt/daemons/flo/core.py @@ -403,6 +403,7 @@ class SaltManorLaneSetup(ioflo.base.deeding.Deed): worker_seed.append('yard{0}'.format(ind + 1)) self.workers.value = itertools.cycle(worker_seed) + class SaltRaetLaneStackCloser(ioflo.base.deeding.Deed): # pylint: disable=W0232 ''' Closes lane stack server socket connection
Fix missing line lint issue
diff --git a/network.js b/network.js index <HASH>..<HASH> 100644 --- a/network.js +++ b/network.js @@ -1631,7 +1631,7 @@ eventBus.on('aa_definition_saved', function (payload, unit) { return; storage.readJoint(db, unit, { ifNotFound: function () { - throw Error('recently saved unit ' + unit + ' not found'); + console.log('recently saved unit ' + unit + ' not found'); }, ifFound: function (objJoint) { arrWses.forEach(function (ws) {
fixed: error when executing dry_run_aa
diff --git a/src/Component/JsonSchema/Console/Loader/SchemaLoader.php b/src/Component/JsonSchema/Console/Loader/SchemaLoader.php index <HASH>..<HASH> 100644 --- a/src/Component/JsonSchema/Console/Loader/SchemaLoader.php +++ b/src/Component/JsonSchema/Console/Loader/SchemaLoader.php @@ -40,6 +40,7 @@ class SchemaLoader implements SchemaLoaderInterface 'use-cacheable-supports-method', 'skip-null-values', 'skip-required-fields', + 'custom-string-format-mapping' ]; }
fix: missing declaration of new config option
diff --git a/lib/loglevel.js b/lib/loglevel.js index <HASH>..<HASH> 100644 --- a/lib/loglevel.js +++ b/lib/loglevel.js @@ -19,7 +19,7 @@ // Slightly dubious tricks to cut down minimized file size var noop = function() {}; var undefinedType = "undefined"; - var isIE = ( + var isIE = (typeof window !== undefinedType) && ( window.navigator.userAgent.indexOf('Trident/') >= 0 || window.navigator.userAgent.indexOf('MSIE ') >= 0 );
Fix bug in IE test that breaks logging for node.js
diff --git a/admin/jqadm/templates/product/item-category-standard.php b/admin/jqadm/templates/product/item-category-standard.php index <HASH>..<HASH> 100644 --- a/admin/jqadm/templates/product/item-category-standard.php +++ b/admin/jqadm/templates/product/item-category-standard.php @@ -55,6 +55,9 @@ $keys = [ <input class="item-listid" type="hidden" v-model="items['catalog.lists.id'][idx]" name="<?= $enc->attr( $this->formparam( array( 'category', 'catalog.lists.id', '' ) ) ); ?>" /> + <input class="item-label" type="hidden" v-model="items['catalog.code'][idx]" + name="<?= $enc->attr( $this->formparam( array( 'category', 'catalog.code', '' ) ) ); ?>" /> + <input class="item-label" type="hidden" v-model="items['catalog.label'][idx]" name="<?= $enc->attr( $this->formparam( array( 'category', 'catalog.label', '' ) ) ); ?>" />
Bugfix for JS error if product couldn't be saved
diff --git a/lib/cratus/group.rb b/lib/cratus/group.rb index <HASH>..<HASH> 100644 --- a/lib/cratus/group.rb +++ b/lib/cratus/group.rb @@ -27,7 +27,7 @@ module Cratus # TODO make this work with more things... unless @raw_ldap_data - puts "WARNING: Group '#{@name}' appears to be invalid or beyond the search scope!" + STDERR.puts "WARNING: Group '#{@name}' appears to be invalid or beyond the search scope!" return [] end
Outputing group warning to STDERR
diff --git a/workbench/workers/view_zip.py b/workbench/workers/view_zip.py index <HASH>..<HASH> 100644 --- a/workbench/workers/view_zip.py +++ b/workbench/workers/view_zip.py @@ -6,7 +6,7 @@ import pprint class ViewZip(object): ''' ViewZip: Generates a view for Zip files ''' - dependencies = ['meta', 'unzip'] + dependencies = ['meta', 'unzip', 'yara_sigs'] def __init__(self): self.workbench = zerorpc.Client(timeout=300, heartbeat=60) @@ -21,6 +21,7 @@ class ViewZip(object): view = {} view['payload_md5s'] = input_data['unzip']['payload_md5s'] + view['yara_sigs'] = input_data['yara_sigs']['matches'].keys() view.update(input_data['meta']) # Okay this view is going to also give the meta data about the payloads
adding yara_sigs to the zip view
diff --git a/lib/ffi-geos/prepared_geometry.rb b/lib/ffi-geos/prepared_geometry.rb index <HASH>..<HASH> 100644 --- a/lib/ffi-geos/prepared_geometry.rb +++ b/lib/ffi-geos/prepared_geometry.rb @@ -3,7 +3,7 @@ module Geos class PreparedGeometry include Geos::Tools - attr_reader :ptr + attr_reader :ptr, :geometry undef :clone, :dup @@ -12,7 +12,7 @@ module Geos FFIGeos.GEOSPrepare_r(Geos.current_handle, geom.ptr), auto_free ? self.class.method(:release) : self.class.method(:no_release) ) - @geom = geom + @geometry = geom if !auto_free @ptr.autorelease = false
Add an attr_reader to get to the geometry cmeiklejohn's patch.
diff --git a/lib/autocomplete-manager.js b/lib/autocomplete-manager.js index <HASH>..<HASH> 100644 --- a/lib/autocomplete-manager.js +++ b/lib/autocomplete-manager.js @@ -215,11 +215,11 @@ class AutocompleteManager { this.findSuggestions(activatedManually) }, 'autocomplete-plus:navigate-to-description-more-link': () => { - let suggestionListView = atom.views.getView(this.editor); - let descriptionContainer = suggestionListView.querySelector('.suggestion-description'); + let suggestionListView = atom.views.getView(this.editor) + let descriptionContainer = suggestionListView.querySelector('.suggestion-description') if (descriptionContainer.style.display === 'block') { - let descriptionMoreLink = descriptionContainer.querySelector('.suggestion-description-more-link'); - require('shell').openExternal(descriptionMoreLink.href); + let descriptionMoreLink = descriptionContainer.querySelector('.suggestion-description-more-link') + require('shell').openExternal(descriptionMoreLink.href) } } }))
removed semicolons in autocomplete-manager.js
diff --git a/imgaug/augmenters/size.py b/imgaug/augmenters/size.py index <HASH>..<HASH> 100644 --- a/imgaug/augmenters/size.py +++ b/imgaug/augmenters/size.py @@ -1439,8 +1439,8 @@ class CropToFixedSize(Augmenter): keypoints_on_image = keypoints_on_images[i] ih, iw = keypoints_on_image.shape[:2] - offset_x = int(offset_xs[i]*(iw-w+1)) if iw > w else 0 - offset_y = int(offset_ys[i]*(ih-h+1)) if ih > h else 0 + offset_y = int(offset_ys[i] * (ih - h)) if ih > h else 0 + offset_x = int(offset_xs[i] * (iw - w)) if iw > w else 0 keypoints_cropped = keypoints_on_image.shift(x=-offset_x, y=-offset_y) keypoints_cropped.shape = (min(ih, h), min(iw, w)) + keypoints_cropped.shape[2:]
Fix off by one error in CropToFixedSize
diff --git a/openquake/commands/with_tiles.py b/openquake/commands/with_tiles.py index <HASH>..<HASH> 100644 --- a/openquake/commands/with_tiles.py +++ b/openquake/commands/with_tiles.py @@ -18,6 +18,7 @@ from __future__ import division import os import time +import logging from openquake.baselib import sap, general, parallel from openquake.hazardlib import valid from openquake.commonlib import readinput, datastore, logs @@ -48,6 +49,8 @@ def with_tiles(num_tiles, job_ini, poolsize=0): parent_child[0] = calc_id parent_child[1] = calc_id logs.dbcmd('update_parent_child', parent_child) + logging.warn('Finished calculation %d of %d', + len(calc_ids) + 1, num_tiles) return calc_ids + [calc_id] calc_ids = Starmap(engine.run_tile, task_args, poolsize).reduce(agg, []) for calc_id in calc_ids:
Restored logging [skip CI] Former-commit-id: <I>dc8e<I>ee<I>bec<I>e<I>fb<I>b
diff --git a/lib/chanko/invoker/function_finder.rb b/lib/chanko/invoker/function_finder.rb index <HASH>..<HASH> 100644 --- a/lib/chanko/invoker/function_finder.rb +++ b/lib/chanko/invoker/function_finder.rb @@ -31,7 +31,11 @@ module Chanko end def active? - unit.try(:active?, context, active_if_options) + if unit + unit.active?(context, active_if_options) + else + false + end end end end
Fix problem that the unit may be false
diff --git a/ait/core/server/plugin.py b/ait/core/server/plugin.py index <HASH>..<HASH> 100644 --- a/ait/core/server/plugin.py +++ b/ait/core/server/plugin.py @@ -140,10 +140,15 @@ class TelemetryLimitMonitor(Plugin): log.info('Starting telemetry limit monitoring') def process(self, input_data, topic=None, **kwargs): - split = input_data[1:-1].split(',', 1) - pkt_id, pkt_data = int(split[0]), split[1] - packet = self.packet_dict[pkt_id] - decoded = tlm.Packet(packet, data=bytearray(pkt_data)) + try: + split = input_data[1:-1].split(',', 1) + pkt_id, pkt_data = int(split[0]), split[1] + packet = self.packet_dict[pkt_id] + decoded = tlm.Packet(packet, data=bytearray(pkt_data)) + except Exception as e: + log.error('TelemetryLimitMonitor: {}'.format(e)) + log.error('TelemetryLimitMonitor received input_data that it is unable to process. Skipping input ...') + return if packet.name in self.limit_dict: for field, defn in self.limit_dict[packet.name].iteritems():
Make TelemetryLimitMonitor plugin robust to poorly formed input Update the TelemetryLimitMonitor plugin so exceptions raised when handling potentially malformed data are caught, logged, and ignored. This ensures that the greenlet doesn't terminate when bad input is received.
diff --git a/drivers/python/rethinkdb.py b/drivers/python/rethinkdb.py index <HASH>..<HASH> 100644 --- a/drivers/python/rethinkdb.py +++ b/drivers/python/rethinkdb.py @@ -213,6 +213,14 @@ def gt(*terms): def gte(*terms): return Comparison(list(terms), p.GE) +def and_eq(_hash): + terms = [] + for key in _hash.iterkeys(): + val = _hash[key] + terms.append(eq(key, val)) + return Conjunction(terms) + + class Arithmetic(Term): def __init__(self, terms, op_type): if not terms:
Adding support for and_eq (to be used in filter later as sugar)
diff --git a/prow/config/branch_protection.go b/prow/config/branch_protection.go index <HASH>..<HASH> 100644 --- a/prow/config/branch_protection.go +++ b/prow/config/branch_protection.go @@ -362,10 +362,11 @@ func (c *Config) unprotectedBranches(presubmits map[string][]Presubmit) []string // because any branches not explicitly specified in the configuration will be unprotected. func (c *Config) BranchProtectionWarnings(logger *logrus.Entry, presubmits map[string][]Presubmit) { if warnings := c.reposWithDisabledPolicy(); len(warnings) > 0 { - logger.Warnf("The following repos define a policy, but have protect: false: %s", strings.Join(warnings, ",")) + + logger.WithField("repos", strings.Join(warnings, ",")).Warn("The following repos define a policy, but have protect: false") } if warnings := c.unprotectedBranches(presubmits); len(warnings) > 0 { - logger.Warnf("The following repos define a policy or require context(s), but have one or more branches with protect: false: %s", strings.Join(warnings, ",")) + logger.WithField("repos", strings.Join(warnings, ",")).Warn("The following repos define a policy or require context(s), but have one or more branches with protect: false") } }
Tide: Do not use printf logging for branchtotection warning
diff --git a/vm/src/jit.js b/vm/src/jit.js index <HASH>..<HASH> 100644 --- a/vm/src/jit.js +++ b/vm/src/jit.js @@ -488,7 +488,7 @@ if (canLoop) { loopVar = 'R[' + (a + 3) + ']'; - this.code[pc - 1] = 'for(' + loopVar + '=R[' + a + '],' + limitVar + '=' + limit + ';' + loopVar + (forward? '<' : '>') + '=' + limitVar + ';' + loopVar + '+=' + step +'){'; + this.code[pc - 1] = 'for(' + loopVar + '=R[' + a + '],' + limitVar + '=' + limit + ';' + loopVar + (step > 0? '<' : '>') + '=' + limitVar + ';' + loopVar + '+=' + step +'){'; delete this.jumpDestinations[this.pc]; return '}'; }
Fixes JIT output for negative for-loops.
diff --git a/pyontutils/blackfynn_api.py b/pyontutils/blackfynn_api.py index <HASH>..<HASH> 100644 --- a/pyontutils/blackfynn_api.py +++ b/pyontutils/blackfynn_api.py @@ -184,9 +184,9 @@ def mvp(): def process_files(bf, files): - ns = [nifti1.load(f.as_posix()) for f in files if '.nii' in f.suffixes] - ms = [loadmat(f.as_posix()) for f in files if '.mat' in f.suffixes] - dcs = [dcmread(f.as_posix()) for f in files if '.dcm' in f.suffixes] # loaded dicom files + niftis = [nifti1.load(f.as_posix()) for f in files if '.nii' in f.suffixes] + mats = [loadmat(f.as_posix()) for f in files if '.mat' in f.suffixes] + dicoms = [dcmread(f.as_posix()) for f in files if '.dcm' in f.suffixes] # loaded dicom files embed() # XXX you will drop into an interactive terminal in this scope
bfapi clarified naming of file type collections
diff --git a/src/geo/gmaps/gmaps.geometry.js b/src/geo/gmaps/gmaps.geometry.js index <HASH>..<HASH> 100644 --- a/src/geo/gmaps/gmaps.geometry.js +++ b/src/geo/gmaps/gmaps.geometry.js @@ -35,7 +35,9 @@ function PointView(geometryModel) { icon: { url: '/assets/layout/default_marker.png', anchor: {x: 10, y: 10} - } + }, + raiseOnDrag: false, + crossOnDrag: false } );
removed animation when drag marker under GMaps
diff --git a/android/src/main/java/com/imagepicker/Utils.java b/android/src/main/java/com/imagepicker/Utils.java index <HASH>..<HASH> 100644 --- a/android/src/main/java/com/imagepicker/Utils.java +++ b/android/src/main/java/com/imagepicker/Utils.java @@ -8,7 +8,6 @@ import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; -import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.hardware.camera2.CameraCharacteristics; @@ -17,7 +16,6 @@ import android.net.Uri; import android.os.Build; import android.os.ParcelFileDescriptor; import android.provider.MediaStore; -import android.provider.OpenableColumns; import android.util.Base64; import android.util.Log; import android.webkit.MimeTypeMap; @@ -38,10 +36,6 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.nio.file.*; -import java.nio.file.attribute.BasicFileAttributes; -import java.nio.file.attribute.FileTime; -import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays;
fix(deps): remove unused Android Java dependancies (#<I>)
diff --git a/tests/scripts/unit/wee-screen.js b/tests/scripts/unit/wee-screen.js index <HASH>..<HASH> 100644 --- a/tests/scripts/unit/wee-screen.js +++ b/tests/scripts/unit/wee-screen.js @@ -258,7 +258,7 @@ describe('Screen', () => { resetState(); setScreenSize(3); expect(state.one).to.equal(false); - }) + }); }); describe('args', () => { @@ -269,11 +269,15 @@ describe('Screen', () => { callback(arg, one, two) { expect(one).to.equal('one'); expect(two).to.equal('two'); + + state.one = true; } }); setScreenSize(3); - }) + + expect(state.one).to.be.true; + }); }); describe('scope', () => { @@ -287,10 +291,14 @@ describe('Screen', () => { scope: obj, callback() { expect(this.one).to.equal('one'); + + state.one = true; } }); setScreenSize(3); + + expect(state.one).to.be.true; }); }); @@ -377,6 +385,9 @@ describe('Screen', () => { it('should return the current screen size', () => { setScreenSize(2); expect($screen.size()).to.equal(2); + + setScreenSize(3); + expect($screen.size()).to.equal(3); }); });
Add semi-colons and additional assertions
diff --git a/src/test/java/org/jpmml/sklearn/ClassifierTest.java b/src/test/java/org/jpmml/sklearn/ClassifierTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/jpmml/sklearn/ClassifierTest.java +++ b/src/test/java/org/jpmml/sklearn/ClassifierTest.java @@ -132,7 +132,7 @@ public class ClassifierTest extends SkLearnTest { @Test public void evaluateLGBMAudit() throws Exception { - evaluate("LGBM", "Audit", new RealNumberEquivalence(1)); + evaluate("LGBM", "Audit", new RealNumberEquivalence(2)); } @Test
Fixed the build for Java <I>
diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -328,7 +328,7 @@ def version(*names, **kwargs): ''' saltenv = kwargs.get('saltenv', 'base') - installed_pkgs = list_pkgs(refresh=kwargs.get('refresh', False)) + installed_pkgs = list_pkgs(saltenv=saltenv, refresh=kwargs.get('refresh', False)) available_pkgs = get_repo_data(saltenv).get('repo') ret = {}
<I>/develop version() was ignoring saltenv setting.
diff --git a/src/Export/ExportCsv.php b/src/Export/ExportCsv.php index <HASH>..<HASH> 100644 --- a/src/Export/ExportCsv.php +++ b/src/Export/ExportCsv.php @@ -46,7 +46,11 @@ class ExportCsv extends Export array $data, DataGrid $grid ) use ($name, $outputEncoding, $delimiter, $includeBom): void { - $columns = $this->getColumns() ?? $this->grid->getColumns(); + $columns = $this->getColumns(); + + if ($columns === []) { + $columns = $this->grid->getColumns(); + } $csvDataModel = new CsvDataModel($data, $columns, $this->grid->getTranslator());
Fix ExportCsv blank output Regression from a<I>c9d<I>, which results in rows with no columns (since `getColumns` never returns `null`).
diff --git a/lib/component.js b/lib/component.js index <HASH>..<HASH> 100644 --- a/lib/component.js +++ b/lib/component.js @@ -90,7 +90,7 @@ }, /** * Hook called by React when the component is mounted on a DOM element. - Overriding this set this.el and this.$el (if jQuery available) on the + Overriding this to set this.el and this.$el (if jQuery available) on the component. Also starts component listeners. */ componentDidMount: function () { @@ -186,7 +186,8 @@ * @returns {this} */ remove: function () { - if (this.unmount()) this.el.remove(); + this.unmount(); + this.el.remove(); return this; }, /**
fixed remove method that was expecting old unmount return value
diff --git a/lib/tty/command/execute.rb b/lib/tty/command/execute.rb index <HASH>..<HASH> 100644 --- a/lib/tty/command/execute.rb +++ b/lib/tty/command/execute.rb @@ -30,9 +30,9 @@ module TTY # redirect fds opts = ({ - :in => in_rd, in_wr => :close, - :out => out_wr, out_rd => :close, - :err => err_wr, err_rd => :close + :in => in_rd, # in_wr => :close, + :out => out_wr,# out_rd => :close, + :err => err_wr,# err_rd => :close }).merge(@process_options) pid = Process.spawn(cmd.to_command, opts)
Change to stop closing pipes in child process.
diff --git a/lib/infer.js b/lib/infer.js index <HASH>..<HASH> 100644 --- a/lib/infer.js +++ b/lib/infer.js @@ -888,14 +888,15 @@ case "in": case "instanceof": return true; } } - function literalType(val) { - switch (typeof val) { + function literalType(node) { + if (node.regex) return getInstance(cx.protos.RegExp); + switch (typeof node.value) { case "boolean": return cx.bool; case "number": return cx.num; case "string": return cx.str; case "object": case "function": - if (!val) return ANull; + if (!node.value) return ANull; return getInstance(cx.protos.RegExp); } } @@ -1091,7 +1092,7 @@ return scope.fnType ? scope.fnType.self : cx.topScope; }), Literal: ret(function(node) { - return literalType(node.value); + return literalType(node); }) }; @@ -1338,7 +1339,7 @@ return scope.fnType ? scope.fnType.self : cx.topScope; }, Literal: function(node) { - return literalType(node.value); + return literalType(node); } };
Fix type inference for unsyntactic regexps They get a .value of undefined, which would confuse literalType Issue #<I>
diff --git a/src/main/java/org/cactoos/collection/Immutable.java b/src/main/java/org/cactoos/collection/Immutable.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/cactoos/collection/Immutable.java +++ b/src/main/java/org/cactoos/collection/Immutable.java @@ -54,8 +54,8 @@ public final class Immutable<X> implements Collection<X> { * Ctor. * @param src Source collection */ - public Immutable(final Collection<X> src) { - this.col = src; + public Immutable(final Collection<? extends X> src) { + this.col = (Collection<X>) src; } @Override
(#<I>) Generified Immutable with unchecked cast
diff --git a/src/oidcendpoint/scopes.py b/src/oidcendpoint/scopes.py index <HASH>..<HASH> 100644 --- a/src/oidcendpoint/scopes.py +++ b/src/oidcendpoint/scopes.py @@ -73,3 +73,7 @@ class Scopes: else: return available_scopes(endpoint_context) return [] + + def filter_scopes(self, client_id, endpoint_context, scopes): + allowed_scopes = self.allowed_scopes(client_id, endpoint_context) + return [s for s in scopes if s in allowed_scopes]
Added method filer_scopes().
diff --git a/internal/service/redshift/cluster_data_source.go b/internal/service/redshift/cluster_data_source.go index <HASH>..<HASH> 100644 --- a/internal/service/redshift/cluster_data_source.go +++ b/internal/service/redshift/cluster_data_source.go @@ -37,6 +37,16 @@ func DataSourceCluster() *schema.Resource { Computed: true, }, + "availability_zone_relocation": { + Type: schema.TypeBool, + Computed: true, + }, + + "availability_zone_relocation_status": { + Type: schema.TypeString, + Computed: true, + }, + "bucket_name": { Type: schema.TypeString, Computed: true,
#<I> cluster_data_source: add computed schema
diff --git a/ddsc/core/filedownloader.py b/ddsc/core/filedownloader.py index <HASH>..<HASH> 100644 --- a/ddsc/core/filedownloader.py +++ b/ddsc/core/filedownloader.py @@ -14,7 +14,7 @@ DOWNLOAD_FILE_CHUNK_SIZE = 20 * 1024 * 1024 MIN_DOWNLOAD_CHUNK_SIZE = DOWNLOAD_FILE_CHUNK_SIZE PARTIAL_DOWNLOAD_RETRY_TIMES = 5 -PARTIAL_DOWNLOAD_RETRY_SECONDS = 1 +PARTIAL_DOWNLOAD_RETRY_SECONDS = 20 class FileDownloader(object): diff --git a/ddsc/core/fileuploader.py b/ddsc/core/fileuploader.py index <HASH>..<HASH> 100644 --- a/ddsc/core/fileuploader.py +++ b/ddsc/core/fileuploader.py @@ -13,7 +13,7 @@ import traceback import sys SEND_EXTERNAL_PUT_RETRY_TIMES = 5 -SEND_EXTERNAL_RETRY_SECONDS = 1 +SEND_EXTERNAL_RETRY_SECONDS = 20 RESOURCE_NOT_CONSISTENT_RETRY_SECONDS = 2
Update swift retry settings based on oit feedback Changes to retry recommendations to <I> seconds pause based on a OIT ticket. > ...wait longer than 1 second between retries, perhaps <I> or <I> seconds may yield more successful results ...
diff --git a/src/redis_lock/__init__.py b/src/redis_lock/__init__.py index <HASH>..<HASH> 100644 --- a/src/redis_lock/__init__.py +++ b/src/redis_lock/__init__.py @@ -202,7 +202,9 @@ class Lock(object): logger.warn("UNLOCK_SCRIPT not cached.") self._client.eval(UNLOCK_SCRIPT, 2, self._name, self._signal, self._id) self._held = False - release = __exit__ + + def release(self, force=False): + return self.__exit__(force=force) class InterruptableThread(threading.Thread):
Only allow `force` argument in release method. The other arguments are unrelated (specific to context managers).
diff --git a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jdbc/DataSourceAutoConfiguration.java b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jdbc/DataSourceAutoConfiguration.java index <HASH>..<HASH> 100644 --- a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jdbc/DataSourceAutoConfiguration.java +++ b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/jdbc/DataSourceAutoConfiguration.java @@ -148,6 +148,7 @@ public class DataSourceAutoConfiguration { @ConditionalOnProperty(prefix = "spring.datasource", name = "jmx-enabled", havingValue = "true", matchIfMissing = true) @ConditionalOnClass(name = "org.apache.tomcat.jdbc.pool.DataSourceProxy") @Conditional(DataSourceAutoConfiguration.DataSourceAvailableCondition.class) + @ConditionalOnMissingBean(name = "dataSourceMBean") protected static class TomcatDataSourceJmxConfiguration { @Bean
Protect against duplicate datasource MBeans See gh-<I>
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -5,13 +5,15 @@ Bundler.require(:default) require 'active_record' -unless ENV['TRAVIS'] && ENV['QUALITY'] == 'false' - begin - require './spec/support/simplecov_helper' - include SimpleCovHelper - start_simple_cov("unit-#{RUBY_VERSION}") - rescue LoadError - puts "Coverage disabled." +unless ENV['TRAVIS'] + unless ENV['TRAVIS'] && ENV['QUALITY'] == 'false' + begin + require './spec/support/simplecov_helper' + include SimpleCovHelper + start_simple_cov("unit-#{RUBY_VERSION}") + rescue LoadError + puts "Coverage disabled." + end end end
Disable coverage on travis CI for now.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ reqs = [str(ir.req) for ir in install_reqs] setup( name="napalm-base", - version='0.23.2', + version='0.23.3', packages=find_packages(), author="David Barroso", author_email="dbarrosop@dravetech.com",
napalm-base release <I>
diff --git a/src/webcams/FSWebcam.js b/src/webcams/FSWebcam.js index <HASH>..<HASH> 100644 --- a/src/webcams/FSWebcam.js +++ b/src/webcams/FSWebcam.js @@ -114,6 +114,7 @@ FSWebcam.prototype.generateSh = function( location ) { var setValues = scope.getControlSetString( scope.opts.setValues ); + var verbose = scope.opts.verbose ? "" : " -q" // Use memory if null location @@ -121,7 +122,8 @@ FSWebcam.prototype.generateSh = function( location ) { ? "- -" : location; - var sh = scope.bin + " -q " + var sh = scope.bin + + + verbose + " " + resolution + " " + output + " " + quality + " "
Fixed verbose toggle for fswebcam
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -22,9 +22,9 @@ tests_require = [ 'flake8==3.2.1', 'hypothesis==3.11.3', 'hypothesis-pytest==0.19.0', - 'py==1.4.31', + 'py==1.4.34', 'pydocstyle==2.0.0', - 'pytest==3.1.1', + 'pytest==3.1.2', 'pytest-benchmark==3.1.0a2', 'pytest-cov==2.5.1', 'Sphinx==1.6.2',
update to latest pytest (<I>) and py (<I>)
diff --git a/cmd/ctr/run.go b/cmd/ctr/run.go index <HASH>..<HASH> 100644 --- a/cmd/ctr/run.go +++ b/cmd/ctr/run.go @@ -42,6 +42,10 @@ var runCommand = cli.Command{ Usage: "runtime name (linux, windows, vmware-linux)", Value: "linux", }, + cli.StringFlag{ + Name: "runtime-config", + Usage: "set the OCI config file for the container", + }, cli.BoolFlag{ Name: "readonly", Usage: "set the containers filesystem as readonly",
Add back the runtime-config flag in ctr run This flag was already implemented but could not be specified any more.
diff --git a/pkg/kvstore/etcd.go b/pkg/kvstore/etcd.go index <HASH>..<HASH> 100644 --- a/pkg/kvstore/etcd.go +++ b/pkg/kvstore/etcd.go @@ -544,8 +544,6 @@ func (e *etcdClient) statusChecker() { newStatus := []string{} ok := 0 - e.getLogger().Debugf("Performing status check to etcd") - endpoints := e.client.Endpoints() for _, ep := range endpoints { st, err := e.determineEndpointStatus(ep)
etcd: Remove duplicate debug message The message is very noisy as it is in the background and is completely useless
diff --git a/src/main/java/com/googlecode/lanterna/screen/Screen.java b/src/main/java/com/googlecode/lanterna/screen/Screen.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/googlecode/lanterna/screen/Screen.java +++ b/src/main/java/com/googlecode/lanterna/screen/Screen.java @@ -90,7 +90,7 @@ public class Screen this.terminal.addResizeListener(new TerminalResizeListener()); - ScreenCharacter background = new ScreenCharacter(new ScreenCharacter(' ')); + ScreenCharacter background = new ScreenCharacter(' '); for(int y = 0; y < terminalHeight; y++) { for(int x = 0; x < terminalWidth; x++) { visibleScreen[y][x] = new ScreenCharacter(background);
What was the purpose of that?!?
diff --git a/lib/chronic/time_zone.rb b/lib/chronic/time_zone.rb index <HASH>..<HASH> 100644 --- a/lib/chronic/time_zone.rb +++ b/lib/chronic/time_zone.rb @@ -7,10 +7,11 @@ module Chronic end def self.scan_for_all(token) - scanner = {/[PMCE][DS]T/i => :tz, - /(tzminus)?\d{4}/ => :tz} - scanner.keys.each do |scanner_item| - return self.new(scanner[scanner_item]) if scanner_item =~ token.word + { + /[PMCE][DS]T/i => :tz, + /(tzminus)?\d{4}/ => :tz + }.each do |item, symbol| + return self.new(symbol) if item =~ token.word end return nil end
no need for a lvar to hold symbols
diff --git a/salt/modules/mount.py b/salt/modules/mount.py index <HASH>..<HASH> 100644 --- a/salt/modules/mount.py +++ b/salt/modules/mount.py @@ -332,6 +332,8 @@ def is_fuse_exec(cmd): # No point in running ldd on a command that doesn't exist if not cmd_path: return False + elif not _which('ldd'): + raise CommandNotFoundError('ldd') out = __salt__['cmd.run']('ldd {0}'.format(cmd_path)) return 'libfuse' in out
mount module: Raise an exception when ldd isn't available in is_fuse_exec()
diff --git a/cmsplugin_cascade/plugin_base.py b/cmsplugin_cascade/plugin_base.py index <HASH>..<HASH> 100644 --- a/cmsplugin_cascade/plugin_base.py +++ b/cmsplugin_cascade/plugin_base.py @@ -8,6 +8,7 @@ from .widgets import JSONMultiWidget class CascadePluginBase(CMSPluginBase): tag_type = 'div' render_template = 'cms/plugins/generic.html' + glossary_variables = [] _cached_child_classes = None class Media: @@ -117,3 +118,11 @@ class CascadePluginBase(CMSPluginBase): form.base_fields['glossary'].validators.append(field.run_validators) setattr(form, 'glossary_fields', self.glossary_fields) return form + + def save_model(self, request, obj, form, change): + if self.glossary_variables: + # transfer listed glossary variables from the current object to the new object + old_obj = super(CascadePluginBase, self).get_object(request, form.instance.id) + variables = dict((k, v) for k, v in old_obj.glossary.items() if k in self.glossary_variables) + obj.glossary.update(variables) + super(CascadePluginBase, self).save_model(request, obj, form, change)
CascadePluginBase got new attr: list glossary_variables can be overloaded to store arbitrary data in glossary from request to request
diff --git a/pynos/versions/base/interface.py b/pynos/versions/base/interface.py index <HASH>..<HASH> 100644 --- a/pynos/versions/base/interface.py +++ b/pynos/versions/base/interface.py @@ -760,14 +760,14 @@ class Interface(object): >>> auth = ('admin', 'password') >>> dev = pynos.device.Device(conn=conn, auth=auth) >>> int_type = 'tengigabitethernet' - >>> name = '225/0/38' + >>> name = '225/0/37' >>> enabled = True >>> output = dev.interface.enable_switchport(int_type, name) >>> output = dev.interface.spanning_tree_state(int_type=int_type, ... name=name, enabled=enabled) >>> enabled = False >>> output = dev.interface.spanning_tree_state(int_type=int_type, - ... name=name, enable=enabled) + ... name=name, enabled=enabled) >>> int_type = 'vlan' >>> name = '102' >>> enabled = False
Changed the doctest to ensure that they all work. Change-Id: I9d<I>ead<I>d<I>f<I>aacb5f<I>d<I>f7ad<I>
diff --git a/libkbfs/folder_branch_ops.go b/libkbfs/folder_branch_ops.go index <HASH>..<HASH> 100644 --- a/libkbfs/folder_branch_ops.go +++ b/libkbfs/folder_branch_ops.go @@ -1628,8 +1628,14 @@ func (fbo *folderBranchOps) SetInitialHeadFromServer( }() if md.IsReadable() && fbo.config.Mode().PrefetchWorkers() > 0 { - // We `Get` the root block to ensure downstream prefetches occur. - _ = fbo.config.BlockOps().BlockRetriever().Request(ctx, + // We `Get` the root block to ensure downstream prefetches + // occur. Use a fresh context, in case `ctx` is canceled by + // the caller before we complete. + prefetchCtx := fbo.ctxWithFBOID(context.Background()) + fbo.log.CDebugf(ctx, + "Prefetching root block with a new context: FBOID=%s", + prefetchCtx.Value(CtxFBOIDKey)) + _ = fbo.config.BlockOps().BlockRetriever().Request(prefetchCtx, defaultOnDemandRequestPriority, md, md.data.Dir.BlockPointer, &DirBlock{}, TransientEntry) } else {
folder_branch_ops: use fresh ctx when prefetching TLF root block Otherwise the caller could cancel `ctx` after we return, and the block retriever will cancel the block fetch, and the bserver will log a warning about a context being canceled.
diff --git a/src/Query.php b/src/Query.php index <HASH>..<HASH> 100644 --- a/src/Query.php +++ b/src/Query.php @@ -44,14 +44,26 @@ class Query { } /** + * @param array? $bindings Bindings for a prepared statement. * @return Row[] */ - public function execute(): array { + public function execute(?array $bindings = null): array { if (! is_string($this->query)) { throw new Exception\QueryNotBuiltException; } - $results = static::$database->query($this->query); + try { + $statement = static::$database->prepare($this->query); + } catch (\PDOException $e) { + // do nothing -- we'll check for the return val of $statement + // this is just to prevent a PDOException from stopping execution + } + + if (! $statement) { + return []; + } + + $results = $statement->execute($bindings); /** @todo: Fix this to throw an Exception (?) */ if (! $results) {
Changes Query::execute() to use prepared statements
diff --git a/Kwf/Component/Generator/Abstract.php b/Kwf/Component/Generator/Abstract.php index <HASH>..<HASH> 100644 --- a/Kwf/Component/Generator/Abstract.php +++ b/Kwf/Component/Generator/Abstract.php @@ -67,6 +67,11 @@ abstract class Kwf_Component_Generator_Abstract foreach ($this->_settings['component'] as $k=>$i) { if (!$i) unset($this->_settings['component'][$k]); } + + if (array_key_exists('addUrlPart', $this->_settings)) { + $this->_addUrlPart = (bool)$this->_settings['addUrlPart']; + unset($this->_settings['addUrlPart']); + } } protected function _getModel() @@ -590,11 +595,7 @@ abstract class Kwf_Component_Generator_Abstract public function getAddUrlPart() { - if (isset($this->_settings['addUrlPart'])) { - return $this->_settings['addUrlPart']; - } else { - return $this->_addUrlPart; - } + return $this->_addUrlPart; } abstract public function getChildData($parentData, $select = array());
simplify code: don't have two places where addUrlPart setting is stored
diff --git a/packages/cozy-konnector-libs/src/libs/linkBankOperations.js b/packages/cozy-konnector-libs/src/libs/linkBankOperations.js index <HASH>..<HASH> 100644 --- a/packages/cozy-konnector-libs/src/libs/linkBankOperations.js +++ b/packages/cozy-konnector-libs/src/libs/linkBankOperations.js @@ -92,6 +92,7 @@ class Linker { /* Commit updates */ commitChanges() { + log('debug', `linkBankOperations: commiting ${this.toUpdate.length} changes`) return cozyClient.fetchJSON( 'POST', `/data/${DOCTYPE_OPERATIONS}/_bulk_docs`,
chore: Add log to know how many changes have been commited
diff --git a/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php b/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php index <HASH>..<HASH> 100644 --- a/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php +++ b/Tests/OAuth/ResourceOwner/FacebookResourceOwnerTest.php @@ -12,6 +12,7 @@ namespace HWI\Bundle\OAuthBundle\Tests\OAuth\ResourceOwner; use HWI\Bundle\OAuthBundle\OAuth\ResourceOwner\FacebookResourceOwner; +use Symfony\Component\HttpFoundation\Request; class FacebookResourceOwnerTest extends GenericOAuth2ResourceOwnerTest { @@ -28,6 +29,17 @@ json; 'realname' => 'name', ); + /** + * @expectedException \Symfony\Component\Security\Core\Exception\AuthenticationException + */ + public function testGetAccessTokenFailedResponse() + { + $this->mockBuzz('{"error": {"message": "invalid"}}', 'application/json; charset=utf-8'); + $request = new Request(array('code' => 'code')); + + $this->resourceOwner->getAccessToken($request, 'http://redirect.to/'); + } + protected function setUpResourceOwner($name, $httpUtils, array $options) { $options = array_merge(
Add Facebook specific test to cover previous merges
diff --git a/lib/Doctrine/ORM/UnitOfWork.php b/lib/Doctrine/ORM/UnitOfWork.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/ORM/UnitOfWork.php +++ b/lib/Doctrine/ORM/UnitOfWork.php @@ -471,19 +471,21 @@ class UnitOfWork implements PropertyChangedListener */ public function computeChangeSet(ClassMetadata $class, $entity) { - if ( ! $class->isInheritanceTypeNone()) { - $class = $this->em->getClassMetadata(get_class($entity)); - } - $oid = spl_object_hash($entity); if (isset($this->readOnlyObjects[$oid])) { return; } + if ( ! $class->isInheritanceTypeNone()) { + $class = $this->em->getClassMetadata(get_class($entity)); + } + $actualData = array(); + foreach ($class->reflFields as $name => $refProp) { $value = $refProp->getValue($entity); + if (isset($class->associationMappings[$name]) && ($class->associationMappings[$name]['type'] & ClassMetadata::TO_MANY) && $value !== null
Micro optimization in computeChangeSet when using readOnly entities.
diff --git a/trimesh/path/polygons.py b/trimesh/path/polygons.py index <HASH>..<HASH> 100644 --- a/trimesh/path/polygons.py +++ b/trimesh/path/polygons.py @@ -1,7 +1,5 @@ import numpy as np - -from shapely import vectorized from shapely.geometry import Polygon from rtree import Rtree @@ -331,6 +329,7 @@ def medial_axis(polygon, Vertex positions in space """ from scipy.spatial import Voronoi + from shapely import vectorized if resolution is None: resolution = np.reshape(polygon.bounds, (2, 2)).ptp(axis=0).max() / 100 @@ -485,6 +484,8 @@ def sample(polygon, count, factor=1.5, max_iter=10): Random points inside polygon where n <= count """ + from shapely import vectorized + bounds = np.reshape(polygon.bounds, (2, 2)) extents = bounds.ptp(axis=0)
import shapely.vectorized in function
diff --git a/src/createHTMLAudioElementWithLoopEvent.js b/src/createHTMLAudioElementWithLoopEvent.js index <HASH>..<HASH> 100644 --- a/src/createHTMLAudioElementWithLoopEvent.js +++ b/src/createHTMLAudioElementWithLoopEvent.js @@ -1,23 +1,13 @@ const loopchange = new Event('loopchange'); -class HTMLAudioElementWithLoopEvent extends HTMLAudioElement { - get loop () { - return super.loop; - } - - set loop (value) { - super.loop = value; - this.dispatchEvent(loopchange); - } -} - -document.registerElement('audio-with-loop-event', { - prototype: HTMLAudioElementWithLoopEvent.prototype, - extends: 'audio' -}); - function createHTMLAudioElementWithLoopEvent () { - return document.createElement('audio', 'audio-with-loop-event'); + const audio = document.createElement('audio'); + new MutationObserver(() => { + audio.dispatchEvent(loopchange); + }).observe(audio, { + attributeFilter: ['loop'] + }); + return audio; } module.exports = createHTMLAudioElementWithLoopEvent;
Custom audio element actually native audio element with MutationObserver on loop.
diff --git a/src/immer.js b/src/immer.js index <HASH>..<HASH> 100644 --- a/src/immer.js +++ b/src/immer.js @@ -49,18 +49,14 @@ export function produce(baseState, producer, patchListener) { if (patchListener !== undefined && typeof patchListener !== "function") throw new Error("the third argument of a producer should not be set or a function") } - // if state is a primitive, don't bother proxying at all - if (typeof baseState !== "object" || baseState === null) { + // avoid proxying anything except plain objects and arrays + if (!isProxyable(baseState)) { const returnValue = producer(baseState) return returnValue === undefined ? baseState : normalizeResult(returnValue) } - if (!isProxyable(baseState)) - throw new Error( - `the first argument to an immer producer should be a primitive, plain object or array, got ${typeof baseState}: "${baseState}"` - ) return normalizeResult( getUseProxies() ? produceProxy(baseState, producer, patchListener)
fix(produce): avoid throwing on unproxyable objects
diff --git a/concrete/src/Url/Resolver/Manager/ResolverManager.php b/concrete/src/Url/Resolver/Manager/ResolverManager.php index <HASH>..<HASH> 100644 --- a/concrete/src/Url/Resolver/Manager/ResolverManager.php +++ b/concrete/src/Url/Resolver/Manager/ResolverManager.php @@ -23,6 +23,7 @@ class ResolverManager implements ResolverManagerInterface */ public function __construct($default_handle = '', UrlResolverInterface $default_resolver = null) { + $this->priorityTree = []; if ($default_resolver) { $this->addResolver($default_handle, $default_resolver); }
Initialize ResolverManager::priorityTree Otherwise we risk to iterate on null
diff --git a/ObjJAcornCompiler.js b/ObjJAcornCompiler.js index <HASH>..<HASH> 100644 --- a/ObjJAcornCompiler.js +++ b/ObjJAcornCompiler.js @@ -1017,9 +1017,9 @@ LabeledStatement: function(node, st, c, format) { c(node.label, st, "IdentifierName"); if (format) { buffer.concat(":"); + buffer.concatFormat(format.afterColon); } else { buffer.concat(": "); - buffer.concatFormat(format.afterColon); } } c(node.body, st, "Statement");
Fixed: Minor format issue on label statement
diff --git a/lcdproc/server.py b/lcdproc/server.py index <HASH>..<HASH> 100644 --- a/lcdproc/server.py +++ b/lcdproc/server.py @@ -49,7 +49,7 @@ class Server(object): if "success" in response: # Normal successful reply break if "huh" in response: # Something went wrong - break + break if "connect" in response: # Special reply to "hello" break # TODO Keep track of which screen is displayed
Update lcdproc/server.py Removed trailing space
diff --git a/tests/index.php b/tests/index.php index <HASH>..<HASH> 100644 --- a/tests/index.php +++ b/tests/index.php @@ -583,6 +583,8 @@ function showDebugData(\Delight\Auth\Auth $auth, $result) { echo 'Session name' . "\t\t\t\t"; \var_dump(\session_name()); + echo 'Auth::createRememberCookieName()' . "\t"; + \var_dump(\Delight\Auth\Auth::createRememberCookieName()); echo "\n"; echo 'Auth::createCookieName(\'session\')' . "\t";
Add tests for method 'createRememberCookieName' from class 'Auth'
diff --git a/test/PHPMailer/MimeTypesTest.php b/test/PHPMailer/MimeTypesTest.php index <HASH>..<HASH> 100644 --- a/test/PHPMailer/MimeTypesTest.php +++ b/test/PHPMailer/MimeTypesTest.php @@ -23,10 +23,31 @@ final class MimeTypesTest extends TestCase { /** - * Miscellaneous calls to improve test coverage and some small tests. + * Test mime type mapping. + * + * @dataProvider dataMime_Types + * + * @param string $input Input text string. + * @param string $expected Expected funtion output. */ - public function testMiscellaneous() + public function testMime_Types($input, $expected) { - self::assertSame('application/pdf', PHPMailer::_mime_types('pdf'), 'MIME TYPE lookup failed'); + $result = PHPMailer::_mime_types($input); + self::assertSame($expected, $result, 'MIME TYPE lookup failed'); + } + + /** + * Data provider. + * + * @return array + */ + public function dataMime_Types() + { + return [ + 'Extension: pdf (lowercase)' => [ + 'input' => 'pdf', + 'expected' => 'application/pdf', + ], + ]; } }
MimeTypesTest: reorganize to use data provider * Maintains the same test code and test case. * Makes it easier to add additional test cases in the future.
diff --git a/lib/arel.rb b/lib/arel.rb index <HASH>..<HASH> 100644 --- a/lib/arel.rb +++ b/lib/arel.rb @@ -21,7 +21,7 @@ require 'arel/delete_manager' require 'arel/nodes' module Arel - VERSION = '7.1.1' + VERSION = '7.2.0' def self.sql raw_sql Arel::Nodes::SqlLiteral.new raw_sql
Arel master is alre <I> closes #<I>
diff --git a/lntest/timeouts.go b/lntest/timeouts.go index <HASH>..<HASH> 100644 --- a/lntest/timeouts.go +++ b/lntest/timeouts.go @@ -1,4 +1,4 @@ -// +build !darwin, !kvdb_etcd +// +build !darwin,!kvdb_etcd package lntest diff --git a/lntest/timeouts_etcd.go b/lntest/timeouts_etcd.go index <HASH>..<HASH> 100644 --- a/lntest/timeouts_etcd.go +++ b/lntest/timeouts_etcd.go @@ -1,4 +1,4 @@ -// +build !darwin, kvdb_etcd +// +build !darwin,kvdb_etcd package lntest
lntest/timeouts: remove spaces from build predicates Otherwise this breaks various conditional compilation targets and the linter.
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,4 +1,4 @@ -if ENV['CODECLIMATE_REPO_TOKEN'] +if (RUBY_VERSION >= '1.9') && ENV['CODECLIMATE_REPO_TOKEN'] require 'codeclimate-test-reporter' CodeClimate::TestReporter.start end
Only load coverage for Ruby <I>+ Mostly b/c SimpleCov (used by the test reporter) is <I>+
diff --git a/subliminal/plugins/TheSubDB.py b/subliminal/plugins/TheSubDB.py index <HASH>..<HASH> 100644 --- a/subliminal/plugins/TheSubDB.py +++ b/subliminal/plugins/TheSubDB.py @@ -33,7 +33,7 @@ class TheSubDB(PluginBase.PluginBase): site_name = 'SubDB' server_url = 'http://api.thesubdb.com' # for testing purpose, use http://sandbox.thesubdb.com instead api_based = True - user_agent = 'SubDB/1.0 (Subliminal/0.1; https://github.com/Diaoul/subliminal)' # defined by the API + user_agent = 'SubDB/1.0 (Subliminal/0.3; https://github.com/Diaoul/subliminal)' # defined by the API _plugin_languages = {'cs': 'cs', # the whole list is available with the API: http://sandbox.thesubdb.com/?action=languages 'da': 'da', 'de': 'de',
Fix wrong subliminal version in TheSubDB
diff --git a/activesupport/lib/active_support/dependencies.rb b/activesupport/lib/active_support/dependencies.rb index <HASH>..<HASH> 100644 --- a/activesupport/lib/active_support/dependencies.rb +++ b/activesupport/lib/active_support/dependencies.rb @@ -255,12 +255,10 @@ module ActiveSupport #:nodoc: end def load_dependency(file) - Dependencies.load_interlock do - if Dependencies.load? && ActiveSupport::Dependencies.constant_watch_stack.watching? - Dependencies.new_constants_in(Object) { yield } - else - yield - end + if Dependencies.load? && ActiveSupport::Dependencies.constant_watch_stack.watching? + Dependencies.new_constants_in(Object) { yield } + else + yield end rescue Exception => exception # errors from loading file exception.blame_file! file if exception.respond_to? :blame_file!
Don't apply locking around basic #load / #require That's outside our remit, and dangerous... if a caller has their own locking to protect against the natural race danger, we'll deadlock against it.
diff --git a/test/unit/transports/azure_test.rb b/test/unit/transports/azure_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/transports/azure_test.rb +++ b/test/unit/transports/azure_test.rb @@ -2,6 +2,9 @@ require 'helper' +# Required because this test file acesses classes under Azure:: +require 'azure_mgmt_resources' + describe 'azure transport' do def transport(options = nil) ENV['AZURE_TENANT_ID'] = 'test_tenant_id'
Add require for Azure (#<I>)
diff --git a/index.js b/index.js index <HASH>..<HASH> 100755 --- a/index.js +++ b/index.js @@ -87,7 +87,7 @@ async function startProcessEngine(sqlitePath) { .send(JSON.stringify(packageInfo, null, 2)); }); - const iamConfigPath = path.join(process.env.CONFIG_PATH, 'sqlite', 'iam', 'iam_service.json'); + const iamConfigPath = path.join(process.env.CONFIG_PATH, process.env.NODE_ENV, 'iam', 'iam_service.json'); // eslint-disable-next-line global-require const iamConfig = require(iamConfigPath);
:lipstick: Obtain the Config Path with the NODE_ENV env. Variable
diff --git a/lib/eye/dsl/main.rb b/lib/eye/dsl/main.rb index <HASH>..<HASH> 100644 --- a/lib/eye/dsl/main.rb +++ b/lib/eye/dsl/main.rb @@ -41,7 +41,7 @@ module Eye::Dsl::Main opts = Eye::Dsl::ConfigOpts.new opts.instance_eval(&block) - @parsed_config.settings.merge!(opts.config) + Eye::Utils.deep_merge!(@parsed_config.settings, opts.config) Eye::Dsl.debug '<= config' end diff --git a/lib/eye/dsl/process_opts.rb b/lib/eye/dsl/process_opts.rb index <HASH>..<HASH> 100644 --- a/lib/eye/dsl/process_opts.rb +++ b/lib/eye/dsl/process_opts.rb @@ -4,7 +4,7 @@ class Eye::Dsl::ProcessOpts < Eye::Dsl::Opts opts = Eye::Dsl::ChildProcessOpts.new opts.instance_eval(&block) if block @config[:monitor_children] ||= {} - @config[:monitor_children].merge!(opts.config) + Eye::Utils.deep_merge!(@config[:monitor_children], opts.config) end alias xmonitor_children nop
deep merge in other places in dsl
diff --git a/scraper/util.py b/scraper/util.py index <HASH>..<HASH> 100644 --- a/scraper/util.py +++ b/scraper/util.py @@ -20,6 +20,14 @@ def execute(command, cwd=None): process = Popen(command, cwd=cwd, stdout=PIPE, stderr=STDOUT, shell=False) # nosec out, err = process.communicate() + + if process.returncode: + logging.error( + "Error Executing: command=%s, returncode=%d", + " ".join(command), + process.returncode, + ) + return str(out), str(err) @@ -130,7 +138,7 @@ def git_repo_to_sloc(url): cloc_json = json.loads(json_blob) sloc = cloc_json["SUM"]["code"] except json.decoder.JSONDecodeError: - logger.debug("Error Decoding: url=%s, out=%s", url, out) + logger.error("Error Decoding: url=%s, out=%s", url, out) sloc = 0 logger.debug("SLOC: url=%s, sloc=%d", url, sloc)
Check the return code of executed commands Add a check of the returncode of the command executed in scraper.util.execute() and output an error message if it is not zero. Additionally change the logging level from DEBUG to ERROR for failures to process the JSON output from cloc. These combined will make it more clear when failures in core functionality are occurring.
diff --git a/public/javascripts/promotion.js b/public/javascripts/promotion.js index <HASH>..<HASH> 100644 --- a/public/javascripts/promotion.js +++ b/public/javascripts/promotion.js @@ -98,7 +98,7 @@ var promotion_page = (function($){ if (adding) { button.html(i18n.remove).addClass("remove_" + type).removeClass('add_'+type); if( type !== 'product'){ - if( changeset.productCount() === 0 ){ + if( changeset.products[product_id] === undefined ){ add_product_breadcrumbs(changeset.id, product_id, product_name); } }
fixing issue where adding a partial product after a full product would result in not being able to browse the partial product
diff --git a/addon/serializers/rest-serializer.js b/addon/serializers/rest-serializer.js index <HASH>..<HASH> 100644 --- a/addon/serializers/rest-serializer.js +++ b/addon/serializers/rest-serializer.js @@ -2,6 +2,7 @@ import ActiveModelSerializer from './active-model-serializer'; import { camelize, singularize, pluralize } from '../utils/inflector'; export default ActiveModelSerializer.extend({ + serializeIds: 'always', keyForModel(type) { return camelize(type); diff --git a/tests/unit/serializers/rest-serializer-test.js b/tests/unit/serializers/rest-serializer-test.js index <HASH>..<HASH> 100644 --- a/tests/unit/serializers/rest-serializer-test.js +++ b/tests/unit/serializers/rest-serializer-test.js @@ -29,3 +29,8 @@ test('it hyphenates camelized words', function(assert) { } }); }); + +test('serializeIds defaults to "always"', function(assert) { + let defaultState = new RestSerializer; + assert.equal(defaultState.serializeIds, 'always'); +});
RestSerializer should always serialize ids Override the default Serializer behavior in the RestSerializer to convert ids in the expected way.
diff --git a/src/gluon.js b/src/gluon.js index <HASH>..<HASH> 100644 --- a/src/gluon.js +++ b/src/gluon.js @@ -2,7 +2,7 @@ * @license * MIT License * - * Copyright (c) 2017 Goffert van Gool + * Copyright (c) 2018 Goffert van Gool * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal @@ -23,8 +23,8 @@ * SOFTWARE. */ -import { render } from '../lit-html/lib/shady-render.js'; -export { html } from '../lit-html/lib/shady-render.js'; +import { render } from '../../lit-html/lib/shady-render.js'; +export { html } from '../../lit-html/lib/shady-render.js'; // Key to store the HTML tag in a custom element class const TAG = Symbol('tag'); @@ -120,7 +120,7 @@ export class GluonElement extends HTMLElement { } if (this[NEEDSRENDER]) { this[NEEDSRENDER] = false; - render(this.template, this.renderRoot, this.constructor.is); + render(this.template, this.renderRoot, { scopeName: this.constructor.is, eventContext: this }); } } }
Update to lit-html <I>, fix relative imports
diff --git a/dist/pptxgen.js b/dist/pptxgen.js index <HASH>..<HASH> 100644 --- a/dist/pptxgen.js +++ b/dist/pptxgen.js @@ -5505,6 +5505,11 @@ var PptxGenJS = function(){ color: rgbToHex( Number(arrRGB1[0]), Number(arrRGB1[1]), Number(arrRGB1[2]) ), fill: rgbToHex( Number(arrRGB2[0]), Number(arrRGB2[1]), Number(arrRGB2[2]) ) }; + var fontFamily = jQuery(cell).css('font-family').replace(/["]/gi,'').split(", ")[0]; + if (fontFamily !== "" && fontFamily !== "inherit" && fontFamily !== "initial") { + objOpts.fontFace = fontFamily; + } + if ( ['left','center','right','start','end'].indexOf(jQuery(cell).css('text-align')) > -1 ) objOpts.align = jQuery(cell).css('text-align').replace('start','left').replace('end','right'); if ( ['top','middle','bottom'].indexOf(jQuery(cell).css('vertical-align')) > -1 ) objOpts.valign = jQuery(cell).css('vertical-align');
Add support for font family css when export HTML table to slide.
diff --git a/library/WT/MenuBar.php b/library/WT/MenuBar.php index <HASH>..<HASH> 100644 --- a/library/WT/MenuBar.php +++ b/library/WT/MenuBar.php @@ -51,12 +51,6 @@ class WT_MenuBar { $menu->addSubmenu($submenu); } } - //-- Welcome Menu customization - $filename = WT_ROOT.'includes/extras/custom_welcome_menu.php'; - if (file_exists($filename)) { - require $filename; - } - return $menu; }
Do not load includes/extras/custom_welcome_menu.php; it is most likely an old PGV menu that will not work. If we want to allow this sort of behaviour, we will do it a different way.
diff --git a/lib/handlers/bin.js b/lib/handlers/bin.js index <HASH>..<HASH> 100644 --- a/lib/handlers/bin.js +++ b/lib/handlers/bin.js @@ -1050,6 +1050,7 @@ module.exports = Observable.extend({ processors: bin.settings.processors || {}, checksum: options.checksum || null, metadata: options.metadata, + latest: bin.latest || false }, settings: options.settings ? _.extend(options.settings, { panels: panels }) : { panels: panels } };
Pass through "latest" in jsbin.state on client
diff --git a/app.go b/app.go index <HASH>..<HASH> 100644 --- a/app.go +++ b/app.go @@ -42,6 +42,7 @@ type Application struct { *flagGroup *argGroup *cmdGroup + initialized bool commandHelp *string Name string Help string @@ -99,6 +100,9 @@ func (a *Application) Version(version string) *Application { } func (a *Application) init() error { + if a.initialized { + return nil + } if a.cmdGroup.have() && a.argGroup.have() { return fmt.Errorf("can't mix top-level Arg()s with Command()s") } @@ -125,6 +129,7 @@ func (a *Application) init() error { return err } } + a.initialized = true return nil }
Don't allow multiple initialization.
diff --git a/lib/caracal/core/models/table_cell_model.rb b/lib/caracal/core/models/table_cell_model.rb index <HASH>..<HASH> 100644 --- a/lib/caracal/core/models/table_cell_model.rb +++ b/lib/caracal/core/models/table_cell_model.rb @@ -76,8 +76,10 @@ module Caracal # because paragraph-level styles don't seem to # affect runs within tables. weirdsies. if model.respond_to?(:runs) - options.each do |k,v| - model.send(k, v) if model.respond_to?(k) + runs.each do |run| + options.each do |k,v| + run.send(k, v) if run.respond_to?(k) + end end end end
Fixed issue where cell style options were not being applied to runs correctly.
diff --git a/kitnirc/client.py b/kitnirc/client.py index <HASH>..<HASH> 100644 --- a/kitnirc/client.py +++ b/kitnirc/client.py @@ -629,7 +629,7 @@ def _parse_msg(client, command, actor, args): recipient, _, message = args.partition(' :') chantypes = client.server.features.get("CHANTYPES", "#") if recipient[0] in chantypes: - recipient = client.server.channels.get(recipient.lower()) or recipient + recipient = client.server.get_channel(recipient) or recipient.lower() else: recipient = User(recipient) client.dispatch_event(command, actor, recipient, message)
Convert one more instance to get_channel
diff --git a/concrete/blocks/youtube/view.php b/concrete/blocks/youtube/view.php index <HASH>..<HASH> 100644 --- a/concrete/blocks/youtube/view.php +++ b/concrete/blocks/youtube/view.php @@ -41,7 +41,7 @@ if (isset($iv_load_policy) && $iv_load_policy > 0) { $params[] = 'iv_load_policy=' . $iv_load_policy; } -if (isset($loop) && $loop) { +if (isset($loopEnd) && $loopEnd) { $params[] = 'loop=1'; } @@ -79,4 +79,4 @@ if (Page::getCurrentPage()->isEditMode()) { ?>" frameborder="0" allowfullscreen></iframe> </div> <?php -} ?> \ No newline at end of file +} ?>
loop Setting not working in youtube block (#<I>) The variable for the loop setting in the controller is $loopEnd (instead of $loop). Former-commit-id: 1acc<I>b<I>a<I>effe<I>ef<I>bf Former-commit-id: cb5ac7cba<I>d9ada2b5bbb9d3c2c<I>abd
diff --git a/__init__.py b/__init__.py index <HASH>..<HASH> 100644 --- a/__init__.py +++ b/__init__.py @@ -20,5 +20,5 @@ __revision__ = "$Id$" # #--start constants-- -__version__ = "3.0a4" +__version__ = "3.0a5" #--end constants--
Bumping versions for <I>a5
diff --git a/lib/mohawk/adapters/uia/table.rb b/lib/mohawk/adapters/uia/table.rb index <HASH>..<HASH> 100644 --- a/lib/mohawk/adapters/uia/table.rb +++ b/lib/mohawk/adapters/uia/table.rb @@ -2,7 +2,19 @@ module Mohawk module Adapters module UIA class Table < Control - include ElementLocator + class Row + attr_reader :index + + def initialize(table, element, index) + @table, @element, @index = table, element, index + end + + def to_hash + {text: element.name, row: index} + end + + end + include ElementLocator, Enumerable def select(which) find_row_with(which).select @@ -16,21 +28,21 @@ module Mohawk find_row_with(which).remove_from_selection end - def count - table.row_count - end - def headers table.headers.map &:name end + def each + all_items.each_with_index.map { |el, index| yield Row.new self, el, index } + end + def find_row_with(row_info) found_row = case row_info - when Hash - find_by_hash(row_info) - else - find(row_info) - end + when Hash + find_by_hash(row_info) + else + find(row_info) + end raise "A row with #{row_info} was not found" unless found_row found_row end
Table is Enumerable; returns Rows in uia adapter
diff --git a/lib/Less/Parser.php b/lib/Less/Parser.php index <HASH>..<HASH> 100644 --- a/lib/Less/Parser.php +++ b/lib/Less/Parser.php @@ -1042,7 +1042,7 @@ class Parser { private function parseOperand () { $negate = false; - $p = $this->input[$this->pos + 1]; + $p = isset($this->input[$this->pos + 1]) ? $this->input[$this->pos + 1] : ''; if ($this->peek('-') && ($p === '@' || $p === '(')) { $negate = $this->match('-'); }
Added test for string length to operator parser function. Fixes #5.
diff --git a/mutagen/id3.py b/mutagen/id3.py index <HASH>..<HASH> 100644 --- a/mutagen/id3.py +++ b/mutagen/id3.py @@ -116,7 +116,7 @@ class ID3(mutagen.Metadata): if (2,2,0) <= self.version < (2,3,0): perframe = 6 if frames is None: frames = Frames_2_2 - while self.__readbytes+perframe < self.__size: + while self.__readbytes+perframe < self.__size+10: try: name, tag = self.load_frame(frames=frames) except EOFError: break
Handle short tags that hit the end - remember to count id3 header size. Test not included; unsure how best to test this.
diff --git a/salt/config.py b/salt/config.py index <HASH>..<HASH> 100644 --- a/salt/config.py +++ b/salt/config.py @@ -2395,7 +2395,7 @@ def is_profile_configured(opts, provider, profile_name): alias, driver = provider.split(':') # Most drivers need a size, but some do not. - non_size_drivers = ['parallels', 'softlayer', 'softlayer_hw'] + non_size_drivers = ['opennebula', 'parallels', 'softlayer', 'softlayer_hw'] if driver not in non_size_drivers: required_keys.append('size')
Don't require opennebula profiles to have a size
diff --git a/src/protocol/recordBatch/v0/index.js b/src/protocol/recordBatch/v0/index.js index <HASH>..<HASH> 100644 --- a/src/protocol/recordBatch/v0/index.js +++ b/src/protocol/recordBatch/v0/index.js @@ -4,6 +4,9 @@ const crc32C = require('../crc32C') const { Types: Compression, lookupCodec } = require('../../message/compression') const MAGIC_BYTE = 2 +const COMPRESSION_MASK = 11 // The lowest 3 bits +const TIMESTAMP_MASK = 0 // The fourth lowest bit, always set this bit to 0 (since 0.10.0) +const TRANSACTIONAL_MASK = 16 // The fifth lowest bit /** * v0 @@ -30,13 +33,17 @@ const RecordBatch = async ({ maxTimestamp = Date.now(), partitionLeaderEpoch = 0, lastOffsetDelta = 0, + transactional = false, producerId = Long.fromValue(-1), // for idempotent messages producerEpoch = 0, // for idempotent messages firstSequence = 0, // for idempotent messages records = [], }) => { + const attributes = + (compression & COMPRESSION_MASK) | TIMESTAMP_MASK | transactional ? TRANSACTIONAL_MASK : 0 + const batchBody = new Encoder() - .writeInt16(compression & 0x3) + .writeInt16(attributes) .writeInt32(lastOffsetDelta) .writeInt64(firstTimestamp) .writeInt64(maxTimestamp)
Accept transactional flag on RecordBatch
diff --git a/lib/distribot/workflow.rb b/lib/distribot/workflow.rb index <HASH>..<HASH> 100644 --- a/lib/distribot/workflow.rb +++ b/lib/distribot/workflow.rb @@ -104,6 +104,7 @@ module Distribot end def pause! + raise "Cannot pause unless running" unless self.running? self.add_transition(to: 'paused', timestamp: Time.now.utc.to_f) end @@ -121,6 +122,7 @@ module Distribot end def cancel! + raise "Cannot cancel unless running" unless self.running? self.add_transition(to: 'canceled', timestamp: Time.now.utc.to_f) end @@ -128,6 +130,10 @@ module Distribot self.current_phase == 'canceled' end + def running? + ! ( self.paused? || self.canceled? || self.finished? ) + end + def redis_id @redis_id ||= Distribot.redis_id("workflow", self.id) end
Disallow pause or cancel unless running.
diff --git a/lib/common.js b/lib/common.js index <HASH>..<HASH> 100644 --- a/lib/common.js +++ b/lib/common.js @@ -18,7 +18,6 @@ var config = require('getset').loadSync(config_file); // exports common.root_path = root_path; -common.script_path = script_path; common.version = version; common.program = program; common.os_name = os_name;
Removed script_path from lib/common.
diff --git a/internetarchive/iarequest.py b/internetarchive/iarequest.py index <HASH>..<HASH> 100644 --- a/internetarchive/iarequest.py +++ b/internetarchive/iarequest.py @@ -300,15 +300,6 @@ def prepare_metadata(metadata, source_metadata=None, append=False): # Index all items which contain an index. for key in metadata: - # Parse string bools to proper bools. - try: - if metadata[key].lower() == 'true': - metadata[key] = True - elif metadata[key].lower() == 'false': - metadata[key] = False - except AttributeError: - pass - # Insert values from indexed keys into prepared_metadata dict. if (rm_index(key) in indexed_keys): try:
Don't convert "true"/"false" to True/False in upload.
diff --git a/packages/openneuro-server/src/graphql/resolvers/snapshots.js b/packages/openneuro-server/src/graphql/resolvers/snapshots.js index <HASH>..<HASH> 100644 --- a/packages/openneuro-server/src/graphql/resolvers/snapshots.js +++ b/packages/openneuro-server/src/graphql/resolvers/snapshots.js @@ -186,7 +186,7 @@ export const filterLatestSnapshot = snapshots => { snapshots, snapshotCreationComparison, ) - return sortedSnapshots[0].tag + return sortedSnapshots[sortedSnapshots.length - 1].tag } else { return null }
fix(api): Fix inverted sort order for latestSnapshot
diff --git a/spec/yelp/responses/base_spec.rb b/spec/yelp/responses/base_spec.rb index <HASH>..<HASH> 100644 --- a/spec/yelp/responses/base_spec.rb +++ b/spec/yelp/responses/base_spec.rb @@ -9,8 +9,8 @@ describe Yelp::Response::Base do it { is_expected.to be_a Yelp::Response::Base } it 'should create variables' do - expect(base.instance_variable_get('@a')).to eq 10 - expect(base.instance_variable_get('@b')).to eq 20 + expect(base.instance_variable_get(:@a)).to eq 10 + expect(base.instance_variable_get(:@b)).to eq 20 end end
Uses symbols for instance_variable_get (more idiomatic)