diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/salt/modules/file.py b/salt/modules/file.py index <HASH>..<HASH> 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py @@ -1896,7 +1896,7 @@ def get_selinux_context(path): try: ret = re.search('\w+:\w+:\w+:\w+', out).group(0) except AttributeError: - ret = "No selinux context information is available for {0}".format(path) + ret = 'No selinux context information is available for {0}'.format(path) return ret
Modified string to use single quotes - looks like this is the convention?
diff --git a/spec/nrser/tree/leaves_spec.rb b/spec/nrser/tree/leaves_spec.rb index <HASH>..<HASH> 100644 --- a/spec/nrser/tree/leaves_spec.rb +++ b/spec/nrser/tree/leaves_spec.rb @@ -5,8 +5,17 @@ describe "NRSER.leaves" do it_behaves_like "function", mapping: { + # flat hash [{a: 1, b: 2}] => {[:a] => 1, [:b] => 2}, + # flat array + [ [:a, :b, :c] ] => { + [0] => :a, + [1] => :b, + [2] => :c, + }, + + # Nested, all hashes [{ a: { x: 'ex',
More NRSER.leaves tests
diff --git a/includes/class-freemius.php b/includes/class-freemius.php index <HASH>..<HASH> 100644 --- a/includes/class-freemius.php +++ b/includes/class-freemius.php @@ -10361,7 +10361,8 @@ $plan_title = $this->_site->plan->title; } - $deactivation_step = version_compare( $this->version, '1.2.2', '>' ) ? + // @since 1.2.1.5 The free version is auto deactivated. + $deactivation_step = version_compare( $this->version, '1.2.1.5', '>=' ) ? ( '<li>' . __fs( 'deactivate-free-version', $this->_slug ) . '.</li>' ) : '';
[auto-deactivation] Since we already have themes deployed with the beta version of the themes SDK that is tagged with <I>, if we release the new SDK with <I>, it can conflict with the themes version. Therefore, changed the version to <I>.
diff --git a/src/services/olHelpers.js b/src/services/olHelpers.js index <HASH>..<HASH> 100644 --- a/src/services/olHelpers.js +++ b/src/services/olHelpers.js @@ -152,6 +152,25 @@ angular.module('openlayers-directive').factory('olHelpers', function($q, $log, $ var oSource; switch (source.type) { + case 'MapBox': + if (!source.mapId || !source.accessToken) { + $log.error('[AngularJS - Openlayers] - MapBox layer requires the map id and the access token'); + return; + } + var url = 'http://api.tiles.mapbox.com/v4/' + source.mapId + '/{z}/{x}/{y}.png?access_token=' + + source.accessToken; + + var pixelRatio = window.goog.dom.getPixelRatio(); + + if (pixelRatio > 1) { + url = url.replace('.png', '@2x.png'); + } + + oSource = new ol.source.XYZ({ + url: url, + tilePixelRatio: pixelRatio > 1 ? 2 : 1 + }); + break; case 'ImageWMS': if (!source.url || !source.params) { $log.error('[AngularJS - Openlayers] - ImageWMS Layer needs ' +
Added MapBox as tile source type; Using high DPI tile set if pixelRatio > 1
diff --git a/km3pipe/io/__init__.py b/km3pipe/io/__init__.py index <HASH>..<HASH> 100644 --- a/km3pipe/io/__init__.py +++ b/km3pipe/io/__init__.py @@ -67,14 +67,17 @@ def GenericPump(filenames, use_jppy=False, name="GenericPump", **kwargs): return io[extension](filenames=filenames, name=name, **kwargs) -def df_to_h5(df, filename, tabname, filemode='a', where='/', complevel=5,): +def df_to_h5(df, filename, where, filemode='a', complevel=5,): """Write pandas dataframes with proper columns. The main 2 ways pandas writes dataframes suck bigly. """ + loc, tabname = os.path.split(where) + if loc == '': + loc = '/' with tb.open_file(filename, filemode) as h5: - filt = tb.Filters(complevel=complevel, shuffle=True) - h5.create_table(where, tabname, obj=df.to_records(index=False), + filt = tb.Filters(complevel=complevel, shuffle=True, fletcher32=True) + h5.create_table(loc, tabname, obj=df.to_records(index=False), filters=filt)
simplify df_to_h5 signature
diff --git a/tests/__init.test.js b/tests/__init.test.js index <HASH>..<HASH> 100644 --- a/tests/__init.test.js +++ b/tests/__init.test.js @@ -9,7 +9,8 @@ navigator.getVRDisplays = function () { var resolvePromise = Promise.resolve(); var mockVRDisplay = { requestPresent: resolvePromise, - exitPresent: resolvePromise + exitPresent: resolvePromise, + requestAnimationFrame: function () { return 1; } }; return Promise.resolve([mockVRDisplay]); }; diff --git a/tests/core/a-entity.test.js b/tests/core/a-entity.test.js index <HASH>..<HASH> 100644 --- a/tests/core/a-entity.test.js +++ b/tests/core/a-entity.test.js @@ -418,11 +418,13 @@ suite('a-entity', function () { }); test('removes itself from scene parent', function (done) { + var count; var el = this.el; var parentEl = el.parentNode; + count = parentEl.object3D.children.length; parentEl.removeChild(el); process.nextTick(function () { - assert.equal(parentEl.object3D.children.length, 3); + assert.equal(parentEl.object3D.children.length, count - 1); done(); }); });
fix tests from vreffect bump
diff --git a/components/menu/__tests__/index.test.js b/components/menu/__tests__/index.test.js index <HASH>..<HASH> 100644 --- a/components/menu/__tests__/index.test.js +++ b/components/menu/__tests__/index.test.js @@ -940,14 +940,16 @@ describe('Menu', () => { it('should support ref', async () => { const ref = React.createRef(); - const wrapper = mount( + const { container } = render( <Menu ref={ref}> <SubMenu key="sub1" title="Navigation One"> <Menu.Item key="1">Option 1</Menu.Item> </SubMenu> </Menu>, ); - expect(ref.current?.menu?.list).toBe(wrapper.find('ul').first().getDOMNode()); + expect(ref.current?.menu?.list).toBe(container.querySelector('ul')); + ref.current?.focus(); + expect(document.activeElement).toBe(container.querySelector('ul')); }); it('expandIcon', () => {
test: add menu focus test (#<I>)
diff --git a/test/unit/exception_handling/log_error_stub_test.rb b/test/unit/exception_handling/log_error_stub_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/exception_handling/log_error_stub_test.rb +++ b/test/unit/exception_handling/log_error_stub_test.rb @@ -41,7 +41,7 @@ module ExceptionHandling assert_equal exception_pattern, exception_whitelist[0][0] begin ExceptionHandling.log_error("This is a test error") - rescue Exception + rescue StandardError flunk # Shouldn't raise an error in this case end end @@ -53,7 +53,7 @@ module ExceptionHandling assert_equal exception_pattern, exception_whitelist[0][0] begin ExceptionHandling.log_error("This is a test error") - rescue Exception + rescue StandardError flunk # Shouldn't raise an error in this case end end diff --git a/test/unit/exception_handling_test.rb b/test/unit/exception_handling_test.rb index <HASH>..<HASH> 100644 --- a/test/unit/exception_handling_test.rb +++ b/test/unit/exception_handling_test.rb @@ -14,7 +14,7 @@ class ExceptionHandlingTest < ActiveSupport::TestCase data[:user_details] = {} data[:user_details][:username] = "CaryP" data[:user_details][:organization] = "Invoca Engineering Dept." - rescue Exception + rescue StandardError # don't let these out! end
TECH-<I>-Upgrade Ruby 2 4: fix exception class
diff --git a/servlet/src/main/java/io/undertow/servlet/spec/AsyncContextImpl.java b/servlet/src/main/java/io/undertow/servlet/spec/AsyncContextImpl.java index <HASH>..<HASH> 100644 --- a/servlet/src/main/java/io/undertow/servlet/spec/AsyncContextImpl.java +++ b/servlet/src/main/java/io/undertow/servlet/spec/AsyncContextImpl.java @@ -195,6 +195,9 @@ public class AsyncContextImpl implements AsyncContext { Connectors.executeRootHandler(new HttpHandler() { @Override public void handleRequest(final HttpServerExchange exchange) throws Exception { + ServletRequestContext src = exchange.getAttachment(ServletRequestContext.ATTACHMENT_KEY); + src.setServletRequest(servletRequest); + src.setServletResponse(servletResponse); servletDispatcher.dispatchToPath(exchange, pathInfo, DispatcherType.ASYNC); } }, exchange);
UNDERTOW-<I> Make sure async dispatch happens with current requst/response
diff --git a/spyderlib/widgets/varexp/utils.py b/spyderlib/widgets/varexp/utils.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/varexp/utils.py +++ b/spyderlib/widgets/varexp/utils.py @@ -5,7 +5,7 @@ # (see spyderlib/__init__.py for details) """ -Utilities for the Dictionary Editor Widget and Dialog based on Qt +Utilities for the Collections editor widget and dialog """ from __future__ import print_function
Variable Explorer: Improve another docstring
diff --git a/intranet/utils/helpers.py b/intranet/utils/helpers.py index <HASH>..<HASH> 100644 --- a/intranet/utils/helpers.py +++ b/intranet/utils/helpers.py @@ -66,7 +66,7 @@ def get_current_commit_long_hash(workdir): def get_current_commit_info(): - cmd = ["git", "show", "-s", "--format=Commit %h\n%ad", "HEAD"] + cmd = ["git", "show", "-s", "--format='Commit %h\n%ad'", "HEAD"] return subprocess.check_output(cmd, universal_newlines=True).strip()
fix(utils): fix current commit info invocation
diff --git a/src/TwigBridge/View/Environment.php b/src/TwigBridge/View/Environment.php index <HASH>..<HASH> 100644 --- a/src/TwigBridge/View/Environment.php +++ b/src/TwigBridge/View/Environment.php @@ -20,7 +20,7 @@ class Environment extends \Illuminate\View\Environment public function make($view, $data = array(), $mergeData = array()) { $path = $this->finder->find($view); - $data = array_merge($data, $mergeData); + $data = array_merge($mergeData, $this->parseData($data)); return new View($this, $this->getEngineFromPath($path), $view, $path, $data); }
Updated Environment class to reflect changes in Laravel.
diff --git a/queryx_test.go b/queryx_test.go index <HASH>..<HASH> 100644 --- a/queryx_test.go +++ b/queryx_test.go @@ -5,8 +5,10 @@ package gocqlx import ( + "reflect" "testing" + "github.com/gocql/gocql" "github.com/google/go-cmp/cmp" ) @@ -149,3 +151,25 @@ func TestQueryxBindMap(t *testing.T) { } }) } + +func TestQyeryxAllWrapped(t *testing.T) { + var ( + gocqlQueryPtr = reflect.TypeOf((*gocql.Query)(nil)) + queryxPtr = reflect.TypeOf((*Queryx)(nil)) + ) + + for i := 0; i < gocqlQueryPtr.NumMethod(); i++ { + m, ok := queryxPtr.MethodByName(gocqlQueryPtr.Method(i).Name) + if !ok { + t.Fatalf("Queryx missing method %s", gocqlQueryPtr.Method(i).Name) + } + + t.Log(m.Name) + + for j := 0; j < m.Type.NumOut(); j++ { + if m.Type.Out(j) == gocqlQueryPtr { + t.Errorf("Queryx method %s not wrapped", m.Name) + } + } + } +}
queryx: Add test to check that all methods are wrapped
diff --git a/compliance_checker/cf/util.py b/compliance_checker/cf/util.py index <HASH>..<HASH> 100644 --- a/compliance_checker/cf/util.py +++ b/compliance_checker/cf/util.py @@ -2,10 +2,10 @@ import os import os.path import itertools from lxml import etree -#try: -from udunitspy import Unit, UdunitsError, Converter -#except ImportError: - #pass #disabled as CF is not working and udunits won't install on centos/rhel yet +try: + from udunitspy import Unit, UdunitsError, Converter +except ImportError: + pass #disabled as CF is not working and udunits won't install on centos/rhel yet from netCDF4 import Dimension, Variable from pkgutil import get_data diff --git a/compliance_checker/suite.py b/compliance_checker/suite.py index <HASH>..<HASH> 100644 --- a/compliance_checker/suite.py +++ b/compliance_checker/suite.py @@ -31,6 +31,7 @@ class CheckSuite(object): if isinstance(val, list): return [fix_return_value(v, check_method.im_func.func_name) for v in val] + return [fix_return_value(val, check_method.im_func.func_name)] def _get_valid_checkers(self, ds, checker_names):
Reverted Minor changes in suite.py and util.py
diff --git a/mithril.js b/mithril.js index <HASH>..<HASH> 100644 --- a/mithril.js +++ b/mithril.js @@ -557,6 +557,7 @@ var m = (function app(window, undefined) { var controller = function() { return (component.controller || noop).apply(this, args) || this; }; + controller.prototype = component.controller.prototype var view = function(ctrl) { if (arguments.length > 1) args = args.concat([].slice.call(arguments, 1)); return component.view.apply(component, args ? [ctrl].concat(args) : [ctrl]);
make controller inherit from prototype if in m.component
diff --git a/test/app.js b/test/app.js index <HASH>..<HASH> 100644 --- a/test/app.js +++ b/test/app.js @@ -484,3 +484,22 @@ test('edge-case 1', function *(t) { } ]); }); +test('edge-case 2', function *(t) { + yield makeTest(t, {trim: true, case: true}, [ + { handler: { url: '/?a&b', method: 'GET', headers: {} }, + match : [ + { url: '/?a=1&b=2', method: 'GET', headers: {}, args: ['1', '2'] }, + ] + }, { + handler: { url: '/?a&c', method: 'GET', headers: {} }, + match : [ + { url: '/?a=1&c=3', method: 'GET', headers: {}, args: ['1', '3'] }, + ] + }, { + catch : { url: '404', method: 'GET', headers: {} }, + match : [ + { url: '/?a=1', method: 'GET', headers: {}, args: [/Not Found/] }, + ] + } + ]); +});
add extra edge-case test for nested queries
diff --git a/sherlock/transient_classifier.py b/sherlock/transient_classifier.py index <HASH>..<HASH> 100644 --- a/sherlock/transient_classifier.py +++ b/sherlock/transient_classifier.py @@ -1329,6 +1329,8 @@ class transient_classifier(): # start_time = time.time() # print "COLLECTING TRANSIENTS WITH NO ANNOTATIONS" + sd + if updatePeakMagnitudes: sqlQuery = u""" SELECT * from sherlock_crossmatches cm, sherlock_classifications cl where rank =1 and cl.transient_object_id=cm.transient_object_id
fixing memory issue with query that updates the annotations
diff --git a/lib/log4jruby/logger_for_class.rb b/lib/log4jruby/logger_for_class.rb index <HASH>..<HASH> 100644 --- a/lib/log4jruby/logger_for_class.rb +++ b/lib/log4jruby/logger_for_class.rb @@ -9,6 +9,14 @@ module Log4jruby def klass.logger @logger ||= Logger.get(name) end + + def klass.logger=(logger) + @logger = logger + end + + def klass.set_logger(name, options = {}) + @logger = Logger.get(name, options) + end end def logger
Added logger = and set_logger(name, options)
diff --git a/pkg/models/fundingoffer/fundingoffer.go b/pkg/models/fundingoffer/fundingoffer.go index <HASH>..<HASH> 100644 --- a/pkg/models/fundingoffer/fundingoffer.go +++ b/pkg/models/fundingoffer/fundingoffer.go @@ -124,6 +124,15 @@ func FromRaw(raw []interface{}) (o *Offer, err error) { return } +func CancelFromRaw(raw []interface{}) (Cancel, error) { + o, err := FromRaw(raw) + if err != nil { + return Cancel{}, err + } + + return Cancel(*o), nil +} + func SnapshotFromRaw(raw []interface{}) (snap *Snapshot, err error) { if len(raw) == 0 { return snap, fmt.Errorf("data slice too short for offer: %#v", raw)
ability to explicitly decode raw data to funding offer Cancel structure
diff --git a/llvmlite/binding/targets.py b/llvmlite/binding/targets.py index <HASH>..<HASH> 100644 --- a/llvmlite/binding/targets.py +++ b/llvmlite/binding/targets.py @@ -152,7 +152,8 @@ class TargetData(ffi.ObjectRef): RELOC = frozenset(['default', 'static', 'pic', 'dynamicnopic']) -CODEMODEL = frozenset(['default', 'small', 'kernel', 'medium', 'large']) +CODEMODEL = frozenset(['default', 'defaultjit', 'small', 'kernel', 'medium', + 'large']) class Target(ffi.ObjectRef):
Add missing code model to those accepted. As title.
diff --git a/lib/dnsimple/client.rb b/lib/dnsimple/client.rb index <HASH>..<HASH> 100644 --- a/lib/dnsimple/client.rb +++ b/lib/dnsimple/client.rb @@ -173,7 +173,7 @@ module Dnsimple elsif access_token options[:headers][HEADER_AUTHORIZATION] = "Bearer #{access_token}" else - raise Error, 'A password, domain API token or OAuth access token is required for all API requests.' + raise Error, 'A password, domain API token or access token is required for all API requests.' end options diff --git a/spec/dnsimple/client_spec.rb b/spec/dnsimple/client_spec.rb index <HASH>..<HASH> 100644 --- a/spec/dnsimple/client_spec.rb +++ b/spec/dnsimple/client_spec.rb @@ -59,7 +59,7 @@ describe Dnsimple::Client do expect { subject.execute(:get, "test", {}) - }.to raise_error(Dnsimple::Error, "A password, domain API token or OAuth access token is required for all API requests.") + }.to raise_error(Dnsimple::Error, "A password, domain API token or access token is required for all API requests.") end end
Remove specific mention to Oauth from the message OAuth token occurrences were renamed to access token.
diff --git a/lib/sidekiq_unique_jobs/version.rb b/lib/sidekiq_unique_jobs/version.rb index <HASH>..<HASH> 100644 --- a/lib/sidekiq_unique_jobs/version.rb +++ b/lib/sidekiq_unique_jobs/version.rb @@ -3,5 +3,5 @@ module SidekiqUniqueJobs # # @return [String] the current SidekiqUniqueJobs version - VERSION = "7.1.26" + VERSION = "7.1.27" end
Bump sidekiq-unique-jobs to <I>
diff --git a/erizo_controller/erizoClient/src/Stream.js b/erizo_controller/erizoClient/src/Stream.js index <HASH>..<HASH> 100644 --- a/erizo_controller/erizoClient/src/Stream.js +++ b/erizo_controller/erizoClient/src/Stream.js @@ -247,10 +247,15 @@ Erizo.Stream = function (spec) { return; if (that.pc){ that.checkOptions(config, true); - if(that.room.p2p){ - for (var index in that.pc){ - that.pc[index].updateSpec(config, callback); + if (that.local){ + if(that.room.p2p){ + for (var index in that.pc){ + that.pc[index].updateSpec(config, callback); + } + }else{ + that.pc.updateSpec(config, callback); } + }else{ that.pc.updateSpec(config, callback); }
Wont apply p2p updateConfiguration to remote streams
diff --git a/pycbc/waveform/spa_tmplt.py b/pycbc/waveform/spa_tmplt.py index <HASH>..<HASH> 100644 --- a/pycbc/waveform/spa_tmplt.py +++ b/pycbc/waveform/spa_tmplt.py @@ -139,12 +139,12 @@ def spa_distance(psd, mass1, mass2, lower_frequency_cutoff, snr=8): """ Return the distance at a given snr (default=8) of the SPA TaylorF2 template. """ - kend = spa_tmplt_end(mass1=mass1, mass2=mass2) / psd.delta_f + kend = int(spa_tmplt_end(mass1=mass1, mass2=mass2) / psd.delta_f) norm1 = spa_tmplt_norm(psd, len(psd), psd.delta_f, lower_frequency_cutoff) norm2 = (spa_amplitude_factor(mass1=mass1, mass2=mass2)) ** 2.0 if kend >= len(psd): - kend = len(psd) - 1 + kend = len(psd) - 2 return sqrt(norm1[kend] * norm2) / snr @schemed("pycbc.waveform.spa_tmplt_")
fixes to spa for python3 (#<I>) * fixes to spa for python3 * fixes
diff --git a/agent/engine/common_test.go b/agent/engine/common_test.go index <HASH>..<HASH> 100644 --- a/agent/engine/common_test.go +++ b/agent/engine/common_test.go @@ -184,11 +184,7 @@ func addTaskToEngine(t *testing.T, createStartEventsReported sync.WaitGroup) { // steadyStateCheckWait is used to force the test to wait until the steady-state check // has been invoked at least once - steadyStateVerify := make(chan time.Time, 1) mockTime.EXPECT().Now().Return(time.Now()).AnyTimes() - gomock.InOrder( - mockTime.EXPECT().After(steadyStateTaskVerifyInterval).Return(steadyStateVerify).AnyTimes(), - ) err := taskEngine.Init(ctx) assert.NoError(t, err)
engine: remove mock validations for time.After()
diff --git a/salt/cloud/clouds/joyent.py b/salt/cloud/clouds/joyent.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/joyent.py +++ b/salt/cloud/clouds/joyent.py @@ -107,6 +107,8 @@ VALID_RESPONSE_CODES = [ http_client.NO_CONTENT ] +DEFAULT_NETWORKS = ['Joyent-SDC-Public'] + # Only load in this module if the Joyent configurations are in place def __virtual__(): @@ -281,7 +283,7 @@ def create(vm_): salt.utils.cloud.check_name(vm_['name'], 'a-zA-Z0-9-.') kwargs = { 'name': vm_['name'], - 'networks': vm_['networks'], + 'networks': vm_.get('networks', DEFAULT_NETWORKS), 'image': get_image(vm_), 'size': get_size(vm_), 'location': vm_.get('location', DEFAULT_LOCATION)
Added safer .get method to vm_ 'networks' dictionary
diff --git a/src/Select/partials/SelectInputFieldSize.js b/src/Select/partials/SelectInputFieldSize.js index <HASH>..<HASH> 100644 --- a/src/Select/partials/SelectInputFieldSize.js +++ b/src/Select/partials/SelectInputFieldSize.js @@ -1,7 +1,7 @@ import styled from 'styled-components' import SelectInputField from './SelectInputField'; -export default SelectInputField.withComponent('div').extend` +export default styled(SelectInputField.withComponent('div'))` position: absolute; top: 0px; left: 0px;
Remove use of extend API To prevent warning `Warning: The "extend" API will be removed in the upcoming <I> release. Use styled(StyledComponent) instead. You can find more information here: <URL>
diff --git a/intake/source/cache.py b/intake/source/cache.py index <HASH>..<HASH> 100644 --- a/intake/source/cache.py +++ b/intake/source/cache.py @@ -60,12 +60,12 @@ class BaseCache(object): """ self._driver = driver self._spec = spec - cd = cache_dir or spec.get('cache_dir', conf['cache_dir']) + cd = cache_dir or conf['cache_dir'] if cd == 'catdir': if catdir is None: raise TypeError('cache_dir="catdir" only allowed when loaded' 'from a catalog file.') - cd = catdir + cd = os.path.join(catdir, 'intake_cache') self._cache_dir = cd self._storage_options = storage_options
Don't allow cachedir in spec, only config
diff --git a/lib/filterlib.php b/lib/filterlib.php index <HASH>..<HASH> 100644 --- a/lib/filterlib.php +++ b/lib/filterlib.php @@ -199,4 +199,30 @@ function filter_phrases ($text, $link_array, $ignoretagsopen=NULL, $ignoretagscl } + + +function filter_remove_duplicates($linkarray) { + + $concepts = array(); // keep a record of concepts as we cycle through + $lconcepts = array(); // a lower case version for case insensitive + + $cleanlinks = array(); + + foreach ($linkarray as $key=>$filterobject) { + if ($filterobject->casesensitive) { + $exists = in_array($filterobject->phrase, $concepts); + } else { + $exists = in_array(strtolower($filterobject->phrase), $lconcepts); + } + + if (!$exists) { + $cleanlinks[] = $filterobject; + $concepts[] = $filterobject->phrase; + $lconcepts[] = strtolower($filterobject->phrase); + } + } + + return $cleanlinks; +} + ?>
New function to remove duplicate entries from an array of filterobjects
diff --git a/mod/forum/lib.php b/mod/forum/lib.php index <HASH>..<HASH> 100644 --- a/mod/forum/lib.php +++ b/mod/forum/lib.php @@ -3317,7 +3317,8 @@ function forum_tp_count_forum_unread_posts($userid, $forumid, $groupid=false) { } $sql = 'SELECT COUNT(p.id) '. - 'FROM '.$CFG->prefix.'forum_posts p JOIN '.$CFG->prefix.'forum_discussions d ON p.discussion = d.id '. + 'FROM '.$CFG->prefix.'forum_posts p '. + 'LEFT JOIN '.$CFG->prefix.'forum_discussions d ON p.discussion = d.id '. 'LEFT JOIN '.$CFG->prefix.'forum_read r ON r.postid = p.id AND r.userid = '.$userid.' '. 'WHERE d.forum = '.$forumid.$groupsel. ' AND p.modified >= '.$cutoffdate.' AND r.id is NULL';
Corrected a typo that was throwing errors.
diff --git a/src/client.js b/src/client.js index <HASH>..<HASH> 100644 --- a/src/client.js +++ b/src/client.js @@ -245,7 +245,9 @@ function makeUnaryRequestFunction(method, serialize, deserialize) { return; } if (response.status.code !== grpc.status.OK) { - callback(response.status); + var error = new Error(response.status.details); + error.code = response.status.code; + callback(error); return; } emitter.emit('status', response.status); @@ -314,7 +316,9 @@ function makeClientStreamRequestFunction(method, serialize, deserialize) { return; } if (response.status.code !== grpc.status.OK) { - callback(response.status); + var error = new Error(response.status.details); + error.code = response.status.code; + callback(error); return; } stream.emit('status', response.status);
Return error status as actual errors to client callbacks
diff --git a/dynamic_scraper/utils/processors.py b/dynamic_scraper/utils/processors.py index <HASH>..<HASH> 100644 --- a/dynamic_scraper/utils/processors.py +++ b/dynamic_scraper/utils/processors.py @@ -1,7 +1,7 @@ #Stage 2 Update (Python 3) from __future__ import unicode_literals from builtins import str -import datetime +import datetime, re from scrapy import log @@ -10,7 +10,14 @@ def string_strip(text, loader_context): text = str(text) chars = loader_context.get('string_strip', ' \n\t\r') return text.strip(chars) + + +def remove_chars(text, loader_context): + pattern = loader_context.get('remove_chars', '') + result_str = re.sub(pattern, '', str(text)) + return result_str + def pre_string(text, loader_context): pre_str = loader_context.get('pre_string', '')
New processor remove_chars for removing characters or character pattern from a scraped string
diff --git a/source/php/Module/Posts/TemplateController/ExpandableListTemplate.php b/source/php/Module/Posts/TemplateController/ExpandableListTemplate.php index <HASH>..<HASH> 100644 --- a/source/php/Module/Posts/TemplateController/ExpandableListTemplate.php +++ b/source/php/Module/Posts/TemplateController/ExpandableListTemplate.php @@ -101,6 +101,9 @@ class ExpandableListTemplate } else { $accordion[$index]['heading'] = apply_filters('the_title', $post->post_title); } + + } else { + $accordion[$index]['heading'] = apply_filters('the_title', $post->post_title); } $accordion[$index]['content'] = apply_filters('the_content', $post->post_content);
Fixes missing titles in accordion view
diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -88,7 +88,7 @@ kill.hooks = []; // // Start-up a small static file server so we can download files and fixtures - // inside our tests. + // inside our PhantomJS test. // require('./static'),
Remove unnecessary change in test/index.js
diff --git a/src/mui/list/FilterButton.js b/src/mui/list/FilterButton.js index <HASH>..<HASH> 100644 --- a/src/mui/list/FilterButton.js +++ b/src/mui/list/FilterButton.js @@ -52,7 +52,7 @@ export class FilterButton extends Component { render() { const hiddenFilters = this.getHiddenFilters(); - return (hiddenFilters.length > 0 && <span> + return (hiddenFilters.length > 0 && <div style={{ display: 'inline-block' }}> <FlatButton primary label="Add Filter" icon={<ContentFilter />} onTouchTap={this.handleTouchTap} /> <Popover open={this.state.open} @@ -67,7 +67,7 @@ export class FilterButton extends Component { )} </Menu> </Popover> - </span>); + </div>); } }
Fix regression in material ui Popover component
diff --git a/MQ2/plugins/csv_plugin.py b/MQ2/plugins/csv_plugin.py index <HASH>..<HASH> 100644 --- a/MQ2/plugins/csv_plugin.py +++ b/MQ2/plugins/csv_plugin.py @@ -67,12 +67,13 @@ def get_qtls_from_rqtl_data(matrix, lod_threshold): # row 0: markers # row 1: chr # row 2: pos - for row in t_matrix[4:]: + for row in t_matrix[3:]: lgroup = None max_lod = None peak = None cnt = 1 while cnt < len(row): + print row[0] if lgroup is None: lgroup = t_matrix[1][cnt] diff --git a/MQ2/plugins/xls_plugin.py b/MQ2/plugins/xls_plugin.py index <HASH>..<HASH> 100644 --- a/MQ2/plugins/xls_plugin.py +++ b/MQ2/plugins/xls_plugin.py @@ -102,7 +102,7 @@ def get_qtls_from_rqtl_data(matrix, lod_threshold): # row 0: markers # row 1: chr # row 2: pos - for row in t_matrix[4:]: + for row in t_matrix[3:]: lgroup = None max_lod = None peak = None
The trait information starts on the fourth column of the file, not the fifth...
diff --git a/test/CrateTest/PDO/PDOTest.php b/test/CrateTest/PDO/PDOTest.php index <HASH>..<HASH> 100644 --- a/test/CrateTest/PDO/PDOTest.php +++ b/test/CrateTest/PDO/PDOTest.php @@ -143,6 +143,17 @@ class PDOTest extends TestCase /** * @covers ::getAttribute + * @covers ::setAttribute + */ + public function testGetAndSetStatementClass() + { + $this->assertEquals(PDOStatement::class, $this->pdo->getAttribute(PDO::ATTR_STATEMENT_CLASS)); + $this->pdo->setAttribute(PDO::ATTR_STATEMENT_CLASS, 'Doctrine\DBAL\Driver\PDO\Statement'); + $this->assertEquals('Doctrine\DBAL\Driver\PDO\Statement', $this->pdo->getAttribute(PDO::ATTR_STATEMENT_CLASS)); + } + + /** + * @covers ::getAttribute */ public function testGetVersion() {
Add test case for ATTR_STATEMENT_CLASS
diff --git a/src/python/grpcio/grpc/experimental/aio/_interceptor.py b/src/python/grpcio/grpc/experimental/aio/_interceptor.py index <HASH>..<HASH> 100644 --- a/src/python/grpcio/grpc/experimental/aio/_interceptor.py +++ b/src/python/grpcio/grpc/experimental/aio/_interceptor.py @@ -53,6 +53,7 @@ class UnaryUnaryClientInterceptor(metaclass=ABCMeta): client_call_details: ClientCallDetails, request: RequestType) -> Union[UnaryUnaryCall, ResponseType]: """Intercepts a unary-unary invocation asynchronously. + Args: continuation: A coroutine that proceeds with the invocation by executing the next interceptor in chain or invoking the @@ -65,8 +66,10 @@ class UnaryUnaryClientInterceptor(metaclass=ABCMeta): client_call_details: A ClientCallDetails object describing the outgoing RPC. request: The request value for the RPC. + Returns: - An object with the RPC response. + An object with the RPC response. + Raises: AioRpcError: Indicating that the RPC terminated with non-OK status. asyncio.CancelledError: Indicating that the RPC was canceled.
Fix lack of empty line in the docstring
diff --git a/PhpAmqpLib/Wire/IO/StreamIO.php b/PhpAmqpLib/Wire/IO/StreamIO.php index <HASH>..<HASH> 100644 --- a/PhpAmqpLib/Wire/IO/StreamIO.php +++ b/PhpAmqpLib/Wire/IO/StreamIO.php @@ -309,7 +309,7 @@ class StreamIO extends AbstractIO $buffer = fwrite($this->sock, mb_substr($data, $written, 8192, 'ASCII'), 8192); $this->cleanup_error_handler(); } catch (\ErrorException $e) { - throw new AMQPRuntimeException($e->getMessage(). $e->getCode(), $e); + throw new AMQPRuntimeException($e->getMessage(), $e->getCode(), $e); } if ($buffer === false) {
Fixed typo that may cause fatal error in runtime due to incorrect arguments being passed. Fixes #<I>
diff --git a/tasks/iisexpress.js b/tasks/iisexpress.js index <HASH>..<HASH> 100644 --- a/tasks/iisexpress.js +++ b/tasks/iisexpress.js @@ -23,7 +23,9 @@ module.exports = function(grunt) { cmd: options.cmd, args: args, opts: spawnOptions - }, function() {}); + }, function(error, result, code) { + grunt.event.emit('iisexpress.done', error, result, code); + }); spawn.stdout.on('data', function (data) { grunt.log.write('IIS Express: ' + data); @@ -45,4 +47,4 @@ module.exports = function(grunt) { }); } }); -}; \ No newline at end of file +};
Emit event when child process exits.
diff --git a/pkg/archive/changes.go b/pkg/archive/changes.go index <HASH>..<HASH> 100644 --- a/pkg/archive/changes.go +++ b/pkg/archive/changes.go @@ -187,10 +187,15 @@ func changes(layers []string, rw string, dc deleteChange, sc skipChange) ([]Chan } if change.Kind == ChangeAdd || change.Kind == ChangeDelete { parent := filepath.Dir(path) - if _, ok := changedDirs[parent]; !ok && parent != "/" { - changes = append(changes, Change{Path: parent, Kind: ChangeModify}) - changedDirs[parent] = struct{}{} + tail := []Change{} + for parent != "/" { + if _, ok := changedDirs[parent]; !ok && parent != "/" { + tail = append([]Change{{Path: parent, Kind: ChangeModify}}, tail...) + changedDirs[parent] = struct{}{} + } + parent = filepath.Dir(parent) } + changes = append(changes, tail...) } // Record change
Extend the fix for #<I> to cover the whole path Extend the fix for #<I> to cover every component of an added or removed file's pathname.
diff --git a/lib/gds_api/test_helpers/publishing_api_v2.rb b/lib/gds_api/test_helpers/publishing_api_v2.rb index <HASH>..<HASH> 100644 --- a/lib/gds_api/test_helpers/publishing_api_v2.rb +++ b/lib/gds_api/test_helpers/publishing_api_v2.rb @@ -154,6 +154,12 @@ module GdsApi stub_request(:get, url).to_return(status: 200, body: item.to_json, headers: {}) end + def publishing_api_has_links(links) + links = links.with_indifferent_access + url = PUBLISHING_API_V2_ENDPOINT + "/links/" + links[:content_id] + stub_request(:get, url).to_return(status: 200, body: links.to_json, headers: {}) + end + private def stub_publishing_api_put(content_id, body, resource_path, override_response_hash = {}) response_hash = {status: 200, body: '{}', headers: {"Content-Type" => "application/json; charset=utf-8"}}
Add a publishing_api_has_links helper This is to stub that the Publishing API V2 contains a links payload.
diff --git a/kdtree/kdtree.py b/kdtree/kdtree.py index <HASH>..<HASH> 100644 --- a/kdtree/kdtree.py +++ b/kdtree/kdtree.py @@ -404,7 +404,7 @@ class KDNode(Node): return best @require_axis - def search_nn_dist(self, point, distance, best=[]): + def search_nn_dist(self, point, distance, best=None): """ Search the n nearest nodes of the given point which are within given distance @@ -413,6 +413,9 @@ class KDNode(Node): nodes to the point within the distance will be returned. """ + if best is None: + best = [] + # consider the current node if self.dist(point) < distance: best.append(self) @@ -471,7 +474,7 @@ class KDNode(Node): -def create(point_list=[], dimensions=None, axis=0, sel_axis=None): +def create(point_list=None, dimensions=None, axis=0, sel_axis=None): """ Creates a kd-tree from a list of points All points in the list must be of the same dimensionality.
Fixing default argument issue in search_nn_dist. This was causing nodes to be duplicated in the array returned by the method.
diff --git a/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go b/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go index <HASH>..<HASH> 100644 --- a/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go +++ b/plugin/pkg/scheduler/algorithmprovider/defaults/defaults.go @@ -238,7 +238,7 @@ func GetEquivalencePod(pod *v1.Pod) interface{} { // to be equivalent if len(pod.OwnerReferences) != 0 { for _, ref := range pod.OwnerReferences { - if *ref.Controller && isValidControllerKind(ref.Kind) { + if *ref.Controller { equivalencePod.ControllerRef = ref // a pod can only belongs to one controller break @@ -248,17 +248,6 @@ func GetEquivalencePod(pod *v1.Pod) interface{} { return &equivalencePod } -// isValidControllerKind checks if a given controller's kind can be applied to equivalence pod algorithm. -func isValidControllerKind(kind string) bool { - switch kind { - // list of kinds that we cannot handle - case StatefulSetKind: - return false - default: - return true - } -} - // EquivalencePod is a group of pod attributes which can be reused as equivalence to schedule other pods. type EquivalencePod struct { ControllerRef metav1.OwnerReference
Remove special case for StatefulSets in scheduler
diff --git a/timer.js b/timer.js index <HASH>..<HASH> 100644 --- a/timer.js +++ b/timer.js @@ -111,14 +111,10 @@ var hours = document.createElement('div'); hours.className = 'hours'; - var days = document.createElement('div'); - days.className = 'days'; - var clearDiv = document.createElement('div'); clearDiv.className = 'clearDiv'; return timerBoxElement. - append(days). append(hours). append(minutes). append(seconds). @@ -135,6 +131,10 @@ that.onComplete(); }); + that.on('complete', function(){ + that.addClass('timeout'); + }); + }; })(jQuery);
Add timeout class on complete event. Remove days element.
diff --git a/closure/goog/editor/plugins/linkdialogplugin.js b/closure/goog/editor/plugins/linkdialogplugin.js index <HASH>..<HASH> 100644 --- a/closure/goog/editor/plugins/linkdialogplugin.js +++ b/closure/goog/editor/plugins/linkdialogplugin.js @@ -332,15 +332,14 @@ goog.editor.plugins.LinkDialogPlugin.prototype.handleOk = function(e) { this.touchUpAnchorOnOk_(extraAnchors[i], e); } - // Place cursor to the right of the modified link, and immediately dispatch a - // selectionChange event. + // Place cursor to the right of the modified link. this.currentLink_.placeCursorRightOf(); - this.getFieldObject().dispatchSelectionChangeEvent(); - - this.getFieldObject().dispatchChange(); this.getFieldObject().focus(); + this.getFieldObject().dispatchSelectionChangeEvent(); + this.getFieldObject().dispatchChange(); + this.eventHandler_.removeAll(); };
Automated g4 rollback *** Reason for rollback *** Didn't help fix *** Original change description *** Have LinkDialogPlugin fire selectionChange event immediately after moving cursor by moving the focus call to after the dispatch call (as well as after the change event dispatch call) *** ------------- Created by MOE: <URL>
diff --git a/ansuz.js b/ansuz.js index <HASH>..<HASH> 100644 --- a/ansuz.js +++ b/ansuz.js @@ -136,7 +136,10 @@ var exists=ansuz.exists=function (A,e){ if the provided argument is an object, instead test if one of the keys corresponds to such a value */ - if(isArray(A)) return (A.indexOf(e)!==-1)?true:false; + if(typeof e === 'undefined'){ + return A; + } + if(isArray(A)) return (A.indexOf(e)!==-1); if(typeof A==='object'){ for(a in A){ if(A[a] == e){ @@ -186,7 +189,7 @@ var log=ansuz.log=function(a,b){ }; var is=ansuz.is=function (a,b){ -/* alias for equality, for when you want to curry */ + /* alias for equality, for when you want to curry */ return a === b; };
ansuz.js : checks for truthiness if only one argument is passed
diff --git a/src/scs_core/aws/client/mqtt_client.py b/src/scs_core/aws/client/mqtt_client.py index <HASH>..<HASH> 100644 --- a/src/scs_core/aws/client/mqtt_client.py +++ b/src/scs_core/aws/client/mqtt_client.py @@ -31,9 +31,9 @@ class MQTTClient(object): __QUEUE_DROP_BEHAVIOUR = MQTTLib.DROP_OLDEST # not required for infinite queue __QUEUE_DRAINING_FREQUENCY = 2 # recommended: 2 (Hz) - __RECONN_BASE = 2 # recommended: 1 (sec) - __RECONN_MAX = 64 # recommended: 32 (sec) - __RECONN_STABLE = 40 # recommended: 20 (sec) + __RECONN_BASE = 1 # recommended: 1 (sec) + __RECONN_MAX = 32 # recommended: 32 (sec) + __RECONN_STABLE = 20 # recommended: 20 (sec) __DISCONNECT_TIMEOUT = 20 # recommended: 10 (sec) __OPERATION_TIMEOUT = 10 # recommended: 5 (sec)
Changed connect times in MQTTClient.
diff --git a/OpenSSL/crypto.py b/OpenSSL/crypto.py index <HASH>..<HASH> 100644 --- a/OpenSSL/crypto.py +++ b/OpenSSL/crypto.py @@ -1192,11 +1192,11 @@ def dump_certificate_request(type, req): if type == FILETYPE_PEM: result_code = _api.PEM_write_bio_X509_REQ(bio, req._req) elif type == FILETYPE_ASN1: - pass + result_code = _api.i2d_X509_REQ_bio(bio, req._req) elif type == FILETYPE_TEXT: - pass + result_code = _api.X509_REQ_print_ex(bio, req._req, 0, 0) else: - 1/0 + raise ValueError("type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT") if result_code == 0: 1/0 @@ -1220,7 +1220,7 @@ def load_certificate_request(type, buffer): if type == FILETYPE_PEM: req = _api.PEM_read_bio_X509_REQ(bio, _api.NULL, _api.NULL, _api.NULL) elif type == FILETYPE_ASN1: - pass + req = _api.d2i_X509_REQ_bio(bio, _api.NULL) else: 1/0
Make another FunctionTests case pass by implementing some more proper error handling
diff --git a/source/clique/collection.py b/source/clique/collection.py index <HASH>..<HASH> 100644 --- a/source/clique/collection.py +++ b/source/clique/collection.py @@ -251,15 +251,25 @@ class Collection(object): else: data['padding'] = '%d' - if self.indexes: + if '{holes}' in pattern: data['holes'] = self.holes().format('{ranges}') + if '{range}' in pattern or '{ranges}' in pattern: indexes = list(self.indexes) - if len(indexes) == 1: + indexes_count = len(indexes) + + if indexes_count == 0: + data['range'] = '' + + elif indexes_count == 1: data['range'] = '{0}'.format(indexes[0]) + else: - data['range'] = '{0}-{1}'.format(indexes[0], indexes[-1]) + data['range'] = '{0}-{1}'.format( + indexes[0], indexes[-1] + ) + if '{ranges}' in pattern: separated = self.separate() if len(separated) > 1: ranges = [collection.format('{range}') @@ -270,11 +280,6 @@ class Collection(object): data['ranges'] = ', '.join(ranges) - else: - data['holes'] = '' - data['range'] = '' - data['ranges'] = '' - return pattern.format(**data) def is_contiguous(self):
[#<I>] Avoid redundant and potentially infinitely recursive computation in Collection.format.
diff --git a/html5lib/_trie/_base.py b/html5lib/_trie/_base.py index <HASH>..<HASH> 100644 --- a/html5lib/_trie/_base.py +++ b/html5lib/_trie/_base.py @@ -1,6 +1,9 @@ from __future__ import absolute_import, division, unicode_literals -from collections import Mapping +try: + from collections.abc import Mapping +except ImportError: # Python 2.7 + from collections import Mapping class Trie(Mapping): diff --git a/html5lib/treebuilders/dom.py b/html5lib/treebuilders/dom.py index <HASH>..<HASH> 100644 --- a/html5lib/treebuilders/dom.py +++ b/html5lib/treebuilders/dom.py @@ -1,7 +1,10 @@ from __future__ import absolute_import, division, unicode_literals -from collections import MutableMapping +try: + from collections.abc import MutableMapping +except ImportError: # Python 2.7 + from collections import MutableMapping from xml.dom import minidom, Node import weakref
Try to import MutableMapping from collections.abc (#<I>) Note that collections.abc has been added in Python <I>. Fixes #<I>
diff --git a/lib/virtualmonkey/deployment_runner.rb b/lib/virtualmonkey/deployment_runner.rb index <HASH>..<HASH> 100644 --- a/lib/virtualmonkey/deployment_runner.rb +++ b/lib/virtualmonkey/deployment_runner.rb @@ -261,13 +261,12 @@ module VirtualMonkey end raise "Fatal: Failed to verify that monitoring is operational" unless response #TODO: pass in some list of plugin info to check multiple values. For now just -# hardcoding the df check +# hardcoding the cpu check sleep 60 # This is to allow monitoring data to accumulate - monitor=server.get_sketchy_data({'start'=>-60,'end'=>-20,'plugin_name'=>"df",'plugin_type'=>"df-mnt"}) - data=monitor['data'] - free=data['free'] - raise "No df free data" unless free.length > 0 - raise "DF not free" unless free[0] > 0 + monitor=server.get_sketchy_data({'start'=>-60,'end'=>-20,'plugin_name'=>"cpu-0",'plugin_type'=>"cpu-idle"}) + idle_values = monitor['data']['value'] + raise "No cpu idle data" unless idle_values.length > 0 + raise "No idle time" unless idle_values[0] > 0 puts "Monitoring is OK for #{server.nickname}" end end
Jon's monitoring check changes (compat w/ Win & Linux)
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -191,10 +191,20 @@ exports.argmentApp = function (app, opts) { app.engine('html', exports.engine); app.locals.appRoot = opts.appRoot; app.locals.assetsDirName = opts.assetsDirName; - var _render = app.response.render; - app.response.render = function () { - this.locals.partials = this.locals.partials || getPartials(opts.appRoot); - _render.apply(this, arguments); - }; + app.use(function (req, res, next) { + var _render = res.render; + res.render = function (view, options, fn) { + if ('function' === typeof options) { + fn = options; + options = {}; + } + this.locals.partials = this.locals.partials || getPartials(opts + .appRoot); + options = assign({}, app.locals, this.locals, options); + this.locals = {}; + _render.call(this, view, options, fn); + }; + next(); + }); app.use(exports.middleware(opts)); -}; +}; \ No newline at end of file
fix render monkey patch for express-promise
diff --git a/src/main/resources/META-INF/resources/primefaces/datepicker/0-datepicker.js b/src/main/resources/META-INF/resources/primefaces/datepicker/0-datepicker.js index <HASH>..<HASH> 100644 --- a/src/main/resources/META-INF/resources/primefaces/datepicker/0-datepicker.js +++ b/src/main/resources/META-INF/resources/primefaces/datepicker/0-datepicker.js @@ -1802,7 +1802,9 @@ : ((((this.isMultipleSelection() || this.isRangeSelection()) && this.value instanceof Array) ? this.value[0] : this.value) || this.parseValue(new Date())); - this.updateViewDate(null, viewDate); + if(viewDate instanceof Date) { + this.updateViewDate(null, viewDate); + } } },
Fix primefaces#<I>: Only update viewDate if it is really a date (#<I>)
diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index <HASH>..<HASH> 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -500,12 +500,10 @@ func main() { case <-term: level.Warn(logger).Log("msg", "Received SIGTERM, exiting gracefully...") reloadReady.Close() - case <-webHandler.Quit(): level.Warn(logger).Log("msg", "Received termination request via web service, exiting gracefully...") case <-cancel: reloadReady.Close() - break } return nil },
remove unwanted break (#<I>)
diff --git a/salt/master.py b/salt/master.py index <HASH>..<HASH> 100644 --- a/salt/master.py +++ b/salt/master.py @@ -697,7 +697,11 @@ class AESFuncs(object): self.serial.dump( clear_load, open( os.path.join( - salt.utils.jid_dir(jid), + salt.utils.jid_dir( + jid, + self.opts['cachedir'], + self.opts['hash_type'] + ), '.load.p' ), 'w+')
Ensure the correct args are passed to jid_dir
diff --git a/Validator/UniqueValidator.php b/Validator/UniqueValidator.php index <HASH>..<HASH> 100644 --- a/Validator/UniqueValidator.php +++ b/Validator/UniqueValidator.php @@ -60,6 +60,7 @@ class UniqueValidator extends ConstraintValidator if ( $id !== null && count($entities) === 1 + && $accessor->getValue($object, $id) !== null && $entityManager->getReference($class, $accessor->getValue($object, $id)) === current($entities) ) { return;
Ignore ID attribute when it is null in Unique validator. (#<I>) Ignore ID attribute when it is null in Unique validator
diff --git a/furious/context.py b/furious/context.py index <HASH>..<HASH> 100644 --- a/furious/context.py +++ b/furious/context.py @@ -57,6 +57,18 @@ def new(): return new_context +def init_context_with_async(async): + """Instantiate a new JobContext and store a reference to it in the global + async context to make later retrieval easier.""" + if not _local_context._executing_async_context: + raise ContextExistsError + + _init() + job_context = JobContext(async) + _local_context._executing_async_context = job_context + return job_context + + def get_current_async(): """Return a reference to the currently executing Async job object or None if not in an Async job. @@ -178,7 +190,14 @@ def _init(): if hasattr(_local_context, '_initialized'): return + # Used to track the context object stack. _local_context.registry = [] + + # Used to provide easy access to the currently running Async job. + _local_context._executing_async_context = None + _local_context._executing_async = None + + # So that we do not inadvertently reinitialize the local context. _local_context._initialized = True return _local_context
Add setup and helper to initialize the eniviron with a job context.
diff --git a/src/PhpFlo/Port.php b/src/PhpFlo/Port.php index <HASH>..<HASH> 100644 --- a/src/PhpFlo/Port.php +++ b/src/PhpFlo/Port.php @@ -134,12 +134,6 @@ final class Port extends AbstractPort throw new PortException("This port is not connected"); } - if (false == $this->hasType($data, 'send')) { - throw new InvalidTypeException( - 'Port tries to send invalid data type "' . gettype($data) . '"!' - ); - } - if ($this->isConnected()) { return $this->socket->send($data); }
Fix regression in port type check on initialization
diff --git a/salt/cloud/clouds/msazure.py b/salt/cloud/clouds/msazure.py index <HASH>..<HASH> 100644 --- a/salt/cloud/clouds/msazure.py +++ b/salt/cloud/clouds/msazure.py @@ -63,7 +63,7 @@ try: import salt.utils.msazure from salt.utils.msazure import object_to_dict HAS_LIBS = True -except ImportError, e: +except ImportError: pass __virtualname__ = 'azure' @@ -674,9 +674,9 @@ def create(vm_): ret['Attached Volumes'] = created data = show_instance(vm_['name'], call='action') - log.info('Created Cloud VM {0[name]!r}'.format(vm_)) + log.info('Created Cloud VM \'{0[name]}\''.format(vm_)) log.debug( - '{0[name]!r} VM creation details:\n{1}'.format( + '\'{0[name]}\' VM creation details:\n{1}'.format( vm_, pprint.pformat(data) ) )
Correcting lint error, reverting !r
diff --git a/tests/test_reading.py b/tests/test_reading.py index <HASH>..<HASH> 100755 --- a/tests/test_reading.py +++ b/tests/test_reading.py @@ -12,10 +12,10 @@ def test_from_local_file(): def test_from_analysis_id(): - mwtabfile_generator = mwtab.read_files("5") + mwtabfile_generator = mwtab.read_files("2") mwtabfile = next(mwtabfile_generator) - assert mwtabfile.study_id == "ST000004" - assert mwtabfile.analysis_id == "AN000005" + assert mwtabfile.study_id == "ST000002" + assert mwtabfile.analysis_id == "AN000002" @pytest.mark.parametrize("files_source", [
Fixes `test_reading.py` module, which tests the mwtab packages ability to read on generate `~mwtab.MWTabFile` objects.
diff --git a/spacy/about.py b/spacy/about.py index <HASH>..<HASH> 100644 --- a/spacy/about.py +++ b/spacy/about.py @@ -1,6 +1,6 @@ # fmt: off __title__ = "spacy" -__version__ = "2.2.0.dev16" +__version__ = "2.2.0.dev17" __release__ = True __download_url__ = "https://github.com/explosion/spacy-models/releases/download" __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
Set version to <I>.de<I>
diff --git a/Source/classes/HUD.js b/Source/classes/HUD.js index <HASH>..<HASH> 100644 --- a/Source/classes/HUD.js +++ b/Source/classes/HUD.js @@ -11,10 +11,10 @@ Garnish.HUD = Garnish.Base.extend({ this.$trigger = $(trigger); this.setSettings(settings, Garnish.HUD.defaults); - if (typeof Garnish.HUD.activeHUDs == "undefined") - { - Garnish.HUD.activeHUDs = {}; - } + if (typeof Garnish.HUD.activeHUDs == "undefined") + { + Garnish.HUD.activeHUDs = {}; + } this.showing = false; @@ -42,11 +42,11 @@ Garnish.HUD = Garnish.Base.extend({ return; } - if (Garnish.HUD.activeHUDs.length && !this.settings.closeOtherHUDs) + if (this.settings.closeOtherHUDs) { for (var hudID in Garnish.HUD.activeHUDs) { - Garnish.HUD.activeHUDs[hudID].hide(); - } + Garnish.HUD.activeHUDs[hudID].hide(); + } } this.$hud.show(); @@ -256,6 +256,6 @@ Garnish.HUD = Garnish.Base.extend({ onShow: $.noop, onHide: $.noop, closeBtn: null, - closeOtherHUDs: false + closeOtherHUDs: true } });
Fix some broken-logic naming for HUD.
diff --git a/lib/Resque/Worker.php b/lib/Resque/Worker.php index <HASH>..<HASH> 100644 --- a/lib/Resque/Worker.php +++ b/lib/Resque/Worker.php @@ -69,13 +69,8 @@ class Resque_Worker } $this->queues = $queues; - if(function_exists('gethostname')) { - $hostname = gethostname(); - } - else { - $hostname = php_uname('n'); - } - $this->hostname = $hostname; + $this->hostname = php_uname('n'); + $this->id = $this->hostname . ':'.getmypid() . ':' . implode(',', $this->queues); }
gethostname() doesn't work on Amazon's EC2
diff --git a/condoor/version.py b/condoor/version.py index <HASH>..<HASH> 100644 --- a/condoor/version.py +++ b/condoor/version.py @@ -1,3 +1,3 @@ """Version information.""" -__version__ = '1.0.5.dev2' +__version__ = '1.0.6'
Bumping version number to <I>
diff --git a/app/controllers/renalware/events_controller.rb b/app/controllers/renalware/events_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/renalware/events_controller.rb +++ b/app/controllers/renalware/events_controller.rb @@ -5,14 +5,11 @@ module Renalware def new @event = Event.new - authorize @event @event_type = EventType.new end def create @event = @patient.events.new(event_params) - authorize @event - if @event.save redirect_to patient_events_path(@patient), :notice => "You have successfully added an encounter/event." else @@ -22,7 +19,6 @@ module Renalware def index @events = @patient.events - authorize @events end private
Removed pundit 'authorize' from events_controller's patient actions.
diff --git a/pysc2/env/sc2_env.py b/pysc2/env/sc2_env.py index <HASH>..<HASH> 100644 --- a/pysc2/env/sc2_env.py +++ b/pysc2/env/sc2_env.py @@ -511,7 +511,8 @@ class SC2Env(environment.Base): wait_time = next_step_time - time.time() if wait_time > 0.0: - time.sleep(wait_time) + with sw("wait_on_step_mul"): + time.sleep(wait_time) # Note that we use the targeted next_step_time here, not the actual # time. This is so that we advance our view of the SC2 game clock in @@ -557,7 +558,8 @@ class SC2Env(environment.Base): self._target_step, game_loop) - time.sleep(REALTIME_GAME_LOOP_SECONDS) + with sw("wait_on_game_loop"): + time.sleep(REALTIME_GAME_LOOP_SECONDS) else: # We're beyond our target now. if needed_to_wait:
Wrap realtime waits with stopwatch. PiperOrigin-RevId: <I>
diff --git a/src/ocrmypdf/pdfinfo/info.py b/src/ocrmypdf/pdfinfo/info.py index <HASH>..<HASH> 100644 --- a/src/ocrmypdf/pdfinfo/info.py +++ b/src/ocrmypdf/pdfinfo/info.py @@ -409,15 +409,10 @@ class ImageInfo: return _get_dpi(self._shorthand, (self._width, self._height)) def __repr__(self): - class_locals = { - attr: getattr(self, attr, None) - for attr in dir(self) - if not attr.startswith('_') - } return ( - "<ImageInfo '{name}' {type_} {width}x{height} {color} " - "{comp} {bpc} {enc} {dpi}>" - ).format(**class_locals) + f"<ImageInfo '{self.name}' {self.type_} {self.width}x{self.height} " + f"{self.color} {self.comp} {self.bpc} {self.enc} {self.dpi}>" + ) def _find_inline_images(contentsinfo: ContentsInfo) -> Iterator[ImageInfo]:
info: replace introspection with explicit f-string
diff --git a/core/codegen-runtime/src/main/java/org/overture/codegen/runtime/Utils.java b/core/codegen-runtime/src/main/java/org/overture/codegen/runtime/Utils.java index <HASH>..<HASH> 100644 --- a/core/codegen-runtime/src/main/java/org/overture/codegen/runtime/Utils.java +++ b/core/codegen-runtime/src/main/java/org/overture/codegen/runtime/Utils.java @@ -102,6 +102,10 @@ public class Utils return Double.toString(n.doubleValue()); } } + else if(obj instanceof Character) + { + return "'" + obj + "'"; + } else if(obj instanceof String) { return "\"" + obj.toString() + "\"";
Small fix to the 'toString(..)' method in the codegen runtime Utils class
diff --git a/learning.py b/learning.py index <HASH>..<HASH> 100644 --- a/learning.py +++ b/learning.py @@ -159,8 +159,8 @@ class NaiveBayesLearner(Learner): def train(self, dataset): """Just count the target/attr/val occurrences. Count how many times each value of each attribute occurs. - Store count in N[targetvalue][attr][val]. Let N[attr][None] be the - sum over all vals.""" + Store count in N[targetvalue][attr][val]. Let + N[targetvalue][attr][None] be the sum over all vals.""" self.dataset = dataset N = {} ## Initialize to 0 @@ -168,6 +168,7 @@ class NaiveBayesLearner(Learner): N[gv] = {} for attr in self.dataset.attrs: N[gv][attr] = {} + assert None not in self.dataset.values[attr] for val in self.dataset.values[attr]: N[gv][attr][val] = 0 N[gv][attr][None] = 0
NaiveBayesLearner: Fixed doc comment and added an assertion.
diff --git a/provision/docker/provisioner.go b/provision/docker/provisioner.go index <HASH>..<HASH> 100644 --- a/provision/docker/provisioner.go +++ b/provision/docker/provisioner.go @@ -42,7 +42,11 @@ func getRouterForApp(app provision.App) (router.Router, error) { type dockerProvisioner struct{} func (p *dockerProvisioner) Initialize() error { - return initDockerCluster() + err := initDockerCluster() + if err != nil { + return err + } + return migrateImages() } // Provision creates a route for the container
provision/docker: call migrateImages on Initialize Related to #<I> (only missing some tests, before I close it).
diff --git a/lhc/binf/genomic_feature.py b/lhc/binf/genomic_feature.py index <HASH>..<HASH> 100644 --- a/lhc/binf/genomic_feature.py +++ b/lhc/binf/genomic_feature.py @@ -5,6 +5,8 @@ from lhc.collections.sorted_list import SortedList class GenomicFeature(Interval): + __slots__ = ('_chr', 'children', 'name', 'type') + def __init__(self, name, type=None, interval=None, data=None): self._chr = None self.children = SortedList() @@ -94,3 +96,9 @@ class GenomicFeature(Interval): if depth == 0: return res if self.strand == '+' else revcmp(res) return res + + def __getstate__(self): + return self._chr, self.children, self.name, self.type, self.start, self.stop, self.data, self.strand, self.type + + def __setstate__(self, state): + self._chr, self.children, self.name, self.type, self.start, self.stop, self.data, self.strand, self.type = state
added get and set state to genomic feature
diff --git a/app/scripts/HorizontalGeneAnnotationsTrack.js b/app/scripts/HorizontalGeneAnnotationsTrack.js index <HASH>..<HASH> 100644 --- a/app/scripts/HorizontalGeneAnnotationsTrack.js +++ b/app/scripts/HorizontalGeneAnnotationsTrack.js @@ -674,6 +674,7 @@ class HorizontalGeneAnnotationsTrack extends HorizontalTiled1DPixiTrack { } else { r.setAttribute('fill', this.options.minusStrandColor); } + r.setAttribute('opacity', '0.3'); gTile.appendChild(r); });
<I>% opacity on SVG export gene annotations
diff --git a/src/Medoo.php b/src/Medoo.php index <HASH>..<HASH> 100644 --- a/src/Medoo.php +++ b/src/Medoo.php @@ -530,7 +530,7 @@ class Medoo if ($single_condition != []) { - $condition = $this->data_implode($single_condition, ''); + $condition = $this->data_implode($single_condition, ' AND'); if ($condition != '') {
[feature] Connect conditions with AND keyword by default
diff --git a/interact.js b/interact.js index <HASH>..<HASH> 100644 --- a/interact.js +++ b/interact.js @@ -98,7 +98,6 @@ maxPerElement: 1, snap: { - actions : ['drag'], enabled : false, mode : 'grid', endOnly : false, @@ -121,7 +120,6 @@ }, inertia: { - actions : ['drag'], enabled : false, resistance : 10, // the lambda in exponential decay minSpeed : 100, // target speed must be above this for inertia to start @@ -1961,7 +1959,6 @@ // check if inertia should be started inertiaPossible = (options[this.action].inertia.enabled && this.action !== 'gesture' - && contains(inertiaOptions.actions, this.action) && event !== inertiaStatus.startEvent); inertia = (inertiaPossible @@ -5189,7 +5186,6 @@ resistance: inertia.resistance, minSpeed: inertia.minSpeed, endSpeed: inertia.endSpeed, - actions: inertia.actions, allowResume: inertia.allowResume, zeroResumeDelta: inertia.zeroResumeDelta };
Remove actions:[] from snap and inertia options
diff --git a/example/main.go b/example/main.go index <HASH>..<HASH> 100644 --- a/example/main.go +++ b/example/main.go @@ -33,6 +33,29 @@ func main() { } func publish(written, read *disruptor.Cursor) { + + // sequence := disruptor.InitialSequenceValue + // writer := &disruptor.Writer2{} + + // // writer := disruptor.NewWriter(written, read, BufferSize) + // for sequence < Iterations { + // sequence = writer.Reserve() + // } + + // sequence := disruptor.InitialSequenceValue + // writer := disruptor.NewWriter(written, read, BufferSize) + + // for sequence <= Iterations { + // sequence = writer.Reserve() + // ringBuffer[sequence&BufferMask] = sequence + // written.Sequence = sequence + // // writer.Commit(sequence) + // } + + // fmt.Println(writer.Gating()) + + gating := 0 + previous := disruptor.InitialSequenceValue gate := disruptor.InitialSequenceValue @@ -42,12 +65,15 @@ func publish(written, read *disruptor.Cursor) { for wrap > gate { gate = read.Sequence + gating++ } ringBuffer[next&BufferMask] = next written.Sequence = next previous = next } + + fmt.Println("Gating", gating) } type SampleConsumer struct{}
Experimenting with different writing techniques.
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -15,7 +15,10 @@ module.exports = function download (opts, cb) { var symbols = opts.symbols || false if (!version) return cb(new Error('must specify version')) var filename = 'electron-v' + version + '-' + platform + '-' + arch + (symbols ? '-symbols' : '') + '.zip' - var url = process.env.ELECTRON_MIRROR || opts.mirror || 'https://github.com/atom/electron/releases/download/v' + var url = process.env.NPM_CONFIG_ELECTRON_MIRROR || + process.env.ELECTRON_MIRROR || + opts.mirror || + 'https://github.com/atom/electron/releases/download/v' url += version + '/electron-v' + version + '-' + platform + '-' + arch + (symbols ? '-symbols' : '') + '.zip' var homeDir = homePath() var cache = opts.cache || path.join(homeDir, './.electron')
Added npm config support Added support for electron mirror in npm config
diff --git a/src/Model/Table/ImportsTable.php b/src/Model/Table/ImportsTable.php index <HASH>..<HASH> 100644 --- a/src/Model/Table/ImportsTable.php +++ b/src/Model/Table/ImportsTable.php @@ -65,10 +65,26 @@ class ImportsTable extends Table ->notEmpty('filename'); $validator + ->requirePresence('status', 'create') + ->notEmpty('status'); + + $validator + ->requirePresence('model_name', 'create') + ->notEmpty('model_name'); + + $validator + ->requirePresence('attempts', 'create') + ->notEmpty('attempts'); + + $validator ->requirePresence('options', 'update') ->notEmpty('options', 'update'); $validator + ->requirePresence('attempted_date', 'update') + ->notEmpty('attempted_date', 'update'); + + $validator ->dateTime('trashed') ->allowEmpty('trashed');
Added validation rules for new fields (task #<I>)
diff --git a/ignite/contrib/handlers/neptune_logger.py b/ignite/contrib/handlers/neptune_logger.py index <HASH>..<HASH> 100644 --- a/ignite/contrib/handlers/neptune_logger.py +++ b/ignite/contrib/handlers/neptune_logger.py @@ -7,7 +7,6 @@ import torch import torch.nn as nn from torch.optim import Optimizer -import ignite import ignite.distributed as idist from ignite.contrib.handlers.base_logger import ( BaseLogger,
remove unused imports in contrib/handlers (#<I>)
diff --git a/currencies/__init__.py b/currencies/__init__.py index <HASH>..<HASH> 100644 --- a/currencies/__init__.py +++ b/currencies/__init__.py @@ -1 +1 @@ -__version__ = '0.3.0' +__version__ = '0.3.2'
pushed to version <I>.
diff --git a/demo_timeout.py b/demo_timeout.py index <HASH>..<HASH> 100644 --- a/demo_timeout.py +++ b/demo_timeout.py @@ -7,7 +7,7 @@ my_agent = 'mwapi demo script <ahalfaker@wikimedia.org>' session = mwapi.Session('https://10.11.12.13', user_agent=my_agent, timeout=0.5) -print("Making a request that should hang for 2 seconds and then timeout.") +print("Making a request that should hang for 0.5 seconds and then timeout.") try: session.get(action="fake") except mwapi.errors.TimeoutError as e:
Fixes print statement for demo_timeout. Timeout is <I> seconds.
diff --git a/lib/active_annotations/rdf_annotation.rb b/lib/active_annotations/rdf_annotation.rb index <HASH>..<HASH> 100644 --- a/lib/active_annotations/rdf_annotation.rb +++ b/lib/active_annotations/rdf_annotation.rb @@ -142,7 +142,9 @@ module ActiveAnnotations def start_time value = fragment_value.nil? ? nil : fragment_value.object.value.scan(/^t=(.*)$/).flatten.first.split(/,/)[0] - value.nil? ? nil : value.to_f + Float(value) + rescue + value end def start_time=(value) @@ -151,7 +153,9 @@ module ActiveAnnotations def end_time value = fragment_value.nil? ? nil : fragment_value.object.value.scan(/^t=(.*)$/).flatten.first.split(/,/)[1] - value.nil? ? nil : value.to_f + Float(value) + rescue + value end def end_time=(value)
Try casting start and end time into Floats but fall back to actual value
diff --git a/src/connection.js b/src/connection.js index <HASH>..<HASH> 100644 --- a/src/connection.js +++ b/src/connection.js @@ -27,11 +27,8 @@ class Connection { */ async setClient (skipListeners) { let sioConfig = this.auth.access_token ? { - query: 'bearer=' + this.auth.access_token, - reconnection: false - } : { - reconnection: false - } + query: 'bearer=' + this.auth.access_token + } : {} // Connect to parent namespace this.client = io.connect(this.options.api_url + this.options.namespace, sioConfig)
fix: Keep original reconnect behaviour alive, so long-term disconnects won't cause issues.
diff --git a/src/main/java/org/aludratest/impl/log4testing/data/TestObject.java b/src/main/java/org/aludratest/impl/log4testing/data/TestObject.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/aludratest/impl/log4testing/data/TestObject.java +++ b/src/main/java/org/aludratest/impl/log4testing/data/TestObject.java @@ -133,9 +133,4 @@ public abstract class TestObject { return comment; } - @Override - public int hashCode() { - return id == null ? 0 : id.hashCode(); - } - }
Remove useless hashCode() method Method would only be meaningful if equals() could also be implemented. As hashCode() falls back to an object-unique ID, it is no better than Object#hashCode().
diff --git a/eZ/Publish/Core/REST/Client/Input/Parser/FieldDefinition.php b/eZ/Publish/Core/REST/Client/Input/Parser/FieldDefinition.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/Client/Input/Parser/FieldDefinition.php +++ b/eZ/Publish/Core/REST/Client/Input/Parser/FieldDefinition.php @@ -68,11 +68,18 @@ class FieldDefinition extends Parser 'names' => $this->parserTools->parseTranslatableList( $data['names'] ), 'descriptions' => $this->parserTools->parseTranslatableList( $data['descriptions'] ), - // TODO: Call fromHash() here 'defaultValue' => $this->fieldTypeParser->parseValue( $data['fieldType'], $data['defaultValue'] ), + 'fieldSettings' => $this->fieldTypeParser->parseFieldSettings( + $data['fieldType'], + $data['fieldSettings'] + ), + 'validators' => $this->fieldTypeParser->parseFieldSettings( + $data['fieldType'], + $data['validatorConfiguration'] + ), ) ); } }
Implemented: Parsing of fieldSettings and validatorConfiguration.
diff --git a/lib/player.js b/lib/player.js index <HASH>..<HASH> 100644 --- a/lib/player.js +++ b/lib/player.js @@ -2856,7 +2856,8 @@ function grooveFileToDbFile(file, filenameHintWithoutPath, object) { object.albumArtistName = (file.getMetadata("album_artist") || "").trim(); object.albumName = (file.getMetadata("album") || "").trim(); object.compilation = !!(parseInt(file.getMetadata("TCP"), 10) || - parseInt(file.getMetadata("TCMP"), 10)); + parseInt(file.getMetadata("TCMP"), 10) || + parseInt(file.getMetadata("TPOS"))); object.track = parsedTrack.value; object.trackCount = parsedTrack.total; object.disc = parsedDisc.value;
TPOS tag indicates song is part of compilation
diff --git a/fabfile.py b/fabfile.py index <HASH>..<HASH> 100644 --- a/fabfile.py +++ b/fabfile.py @@ -5,7 +5,7 @@ import errno import os -from fabric.api import * +from fabric.api import (env, local, task) env.projname = local("python setup.py --name", capture=True) env.version = local("python setup.py --version", capture=True)
fabfile: Import only the needed symbols to please pyflakes.
diff --git a/example/extract-rest-api-id.js b/example/extract-rest-api-id.js index <HASH>..<HASH> 100644 --- a/example/extract-rest-api-id.js +++ b/example/extract-rest-api-id.js @@ -22,7 +22,7 @@ stdin.on('data', function (chunk) { }) stdin.on('end', function () { - let inputJSON = inputChunks.join() + let inputJSON = inputChunks.join('') let parsedData = JSON.parse(inputJSON) parsedData.items.forEach(function (curr) { if (curr.name === targetRestApiName) {
Update extract-rest-api-id.js (#<I>) Added double quotes on chunks join to avoid default ',' (comma) character used by JS.
diff --git a/java/client/test/org/openqa/selenium/ExecutingAsyncJavascriptTest.java b/java/client/test/org/openqa/selenium/ExecutingAsyncJavascriptTest.java index <HASH>..<HASH> 100644 --- a/java/client/test/org/openqa/selenium/ExecutingAsyncJavascriptTest.java +++ b/java/client/test/org/openqa/selenium/ExecutingAsyncJavascriptTest.java @@ -49,7 +49,7 @@ public class ExecutingAsyncJavascriptTest extends JUnit4TestBase { public void setUp() { assumeTrue(driver instanceof JavascriptExecutor); executor = (JavascriptExecutor) driver; - driver.manage().timeouts().setScriptTimeout(0, TimeUnit.MILLISECONDS); + driver.manage().timeouts().setScriptTimeout(5000, TimeUnit.MILLISECONDS); } @Test @@ -202,7 +202,6 @@ public class ExecutingAsyncJavascriptTest extends JUnit4TestBase { @Test public void shouldNotTimeoutWithMultipleCallsTheFirstOneBeingSynchronous() { driver.get(pages.ajaxyPage); - driver.manage().timeouts().setScriptTimeout(10, TimeUnit.MILLISECONDS); assertThat((Boolean) executor.executeAsyncScript("arguments[arguments.length - 1](true);")) .isTrue(); assertThat((Boolean) executor.executeAsyncScript(
[java] Setting script timeout to zero is nonsense, but default (<I> seconds) is too long for tests
diff --git a/client/objectstore.go b/client/objectstore.go index <HASH>..<HASH> 100644 --- a/client/objectstore.go +++ b/client/objectstore.go @@ -27,7 +27,7 @@ var ( backupListCmd = cli.Command{ Name: "list", - Usage: "list volume in objectstore: list <dest>", + Usage: "list backups in objectstore: list <dest>", Flags: []cli.Flag{ cli.StringFlag{ Name: "volume-uuid", diff --git a/client/volume.go b/client/volume.go index <HASH>..<HASH> 100644 --- a/client/volume.go +++ b/client/volume.go @@ -54,7 +54,7 @@ var ( volumeMountCmd = cli.Command{ Name: "mount", - Usage: "mount a volume to an specific path: mount <volume> [options]", + Usage: "mount a volume: mount <volume> [options]", Flags: []cli.Flag{ cli.StringFlag{ Name: "mountpoint",
cli: correct "mount" and "backup list" help text
diff --git a/umap/parametric_umap.py b/umap/parametric_umap.py index <HASH>..<HASH> 100644 --- a/umap/parametric_umap.py +++ b/umap/parametric_umap.py @@ -510,15 +510,15 @@ class ParametricUMAP(UMAP): print("Keras full model saved to {}".format(parametric_model_output)) # # save model.pkl (ignoring unpickleable warnings) - # with catch_warnings(): - # filterwarnings("ignore") + with catch_warnings(): + filterwarnings("ignore") # work around optimizers not pickling anymore (since tf 2.4) - self._optimizer_dict = self.optimizer.get_config() - model_output = os.path.join(save_location, "model.pkl") - with open(model_output, "wb") as output: - pickle.dump(self, output, pickle.HIGHEST_PROTOCOL) - if verbose: - print("Pickle of ParametricUMAP model saved to {}".format(model_output)) + self._optimizer_dict = self.optimizer.get_config() + model_output = os.path.join(save_location, "model.pkl") + with open(model_output, "wb") as output: + pickle.dump(self, output, pickle.HIGHEST_PROTOCOL) + if verbose: + print("Pickle of ParametricUMAP model saved to {}".format(model_output)) def get_graph_elements(graph_, n_epochs):
Finally fixed; extended test to check model loads correctly.
diff --git a/lib/godot/producer/producer.js b/lib/godot/producer/producer.js index <HASH>..<HASH> 100644 --- a/lib/godot/producer/producer.js +++ b/lib/godot/producer/producer.js @@ -8,10 +8,11 @@ var stream = require('stream'), ip = require('ip'), utile = require('utile'), - uuid = require('node-uuid'); + uuid = require('node-uuid'), + tick = typeof setImmediate == 'undefined' + ? process.nextTick + : setImmediate; -// -// ### function Producer (options) // #### @options {Object} Options for this producer. // Constructor function for the Producer object responsible // for creating events to process. @@ -107,7 +108,7 @@ Object.keys(Producer.prototype.types).forEach(function (key) { // if (value === 0) { return (function tickProduce() { - process.nextTick(function () { + tick(function () { self.produce(); tickProduce(); });
[fix] use setImmediate and do some fancy things to ensure we have backward compat
diff --git a/test/integration/projects.js b/test/integration/projects.js index <HASH>..<HASH> 100644 --- a/test/integration/projects.js +++ b/test/integration/projects.js @@ -72,10 +72,11 @@ module.exports = [ 'aio/content/examples/router/src/app/app-routing.module.9.ts' ] }, - { - repository: 'https://github.com/microsoft/typescript', - extraArguments: typescriptArguments - }, + // TODO: enable this when ``@typescript-eslint/parser` support typescript 4.0 + // { + // repository: 'https://github.com/microsoft/typescript', + // extraArguments: typescriptArguments + // }, { repository: 'https://github.com/microsoft/vscode', extraArguments: typescriptArguments
Temporary disable integration tests for the TypeScript project (#<I>)
diff --git a/src/default_panels/GraphTransformsPanel.js b/src/default_panels/GraphTransformsPanel.js index <HASH>..<HASH> 100644 --- a/src/default_panels/GraphTransformsPanel.js +++ b/src/default_panels/GraphTransformsPanel.js @@ -46,6 +46,8 @@ export class Aggregations extends Component { {label: _('Max'), value: 'max'}, {label: _('First'), value: 'first'}, {label: _('Last'), value: 'last'}, + {label: _('Change'), value: 'change'}, + {label: _('Range'), value: 'range'}, ]} clearable={false} />
New aggregate functions (#<I>)
diff --git a/product/selectors/product.js b/product/selectors/product.js index <HASH>..<HASH> 100644 --- a/product/selectors/product.js +++ b/product/selectors/product.js @@ -157,12 +157,9 @@ export const getPopulatedProductsResult = (state, hash, result) => { let products = []; let totalProductCount = !hash ? 0 : null; - if (result) { + if (result && result.products) { totalProductCount = result.totalResultCount; - - if (result.products) { - products = result.products.map(id => getProductById(state, id).productData); - } + products = result.products.map(id => getProductById(state, id).productData); } return {
CON-<I>: Bugfix for the product selectors
diff --git a/client/state/home/reducer.js b/client/state/home/reducer.js index <HASH>..<HASH> 100644 --- a/client/state/home/reducer.js +++ b/client/state/home/reducer.js @@ -24,7 +24,7 @@ const schema = { export const quickLinksToggleStatus = withSchemaValidation( schema, - ( state = 'collapsed', action ) => { + ( state = 'expanded', action ) => { switch ( action.type ) { case HOME_QUICK_LINKS_EXPAND: return 'expanded'; diff --git a/client/state/selectors/is-home-quick-links-expanded.js b/client/state/selectors/is-home-quick-links-expanded.js index <HASH>..<HASH> 100644 --- a/client/state/selectors/is-home-quick-links-expanded.js +++ b/client/state/selectors/is-home-quick-links-expanded.js @@ -4,4 +4,4 @@ * @param {object} state Global state tree * @returns {object} Whether Quick Links are expanded */ -export default ( state ) => state.home?.quickLinksToggleStatus === 'expanded'; +export default ( state ) => state.home?.quickLinksToggleStatus !== 'collapsed';
My Home: Quick links expanded by default (#<I>)
diff --git a/Classes/Page/Part/MetatagPart.php b/Classes/Page/Part/MetatagPart.php index <HASH>..<HASH> 100644 --- a/Classes/Page/Part/MetatagPart.php +++ b/Classes/Page/Part/MetatagPart.php @@ -378,7 +378,7 @@ class MetatagPart extends AbstractPart * * @param array $tsConfig TypoScript config setup * - * @return string Page Id or url + * @return null|array of (linkParam, linkConf, linkMpMode) */ protected function detectCanonicalPage(array $tsConfig = array()) { @@ -1296,7 +1296,7 @@ class MetatagPart extends AbstractPart */ protected function generateCanonicalUrl() { - //User has specified a canonical URL in the backend + //User has specified a canonical URL in the page properties if (!empty($this->pageRecord['tx_metaseo_canonicalurl'])) { return $this->pageRecord['tx_metaseo_canonicalurl']; }
[TASK] fix doc: return data type Issue #<I>
diff --git a/test/host/loader.js b/test/host/loader.js index <HASH>..<HASH> 100644 --- a/test/host/loader.js +++ b/test/host/loader.js @@ -6,7 +6,7 @@ /*global run, exit:true*/ // From bdd.js /*global __coverage__*/ // Coverage data - var Func = Function, + var safeFunc = Function, sources, context = (function () { /*global global*/ // NodeJS global @@ -17,7 +17,7 @@ if ("undefined" !== typeof window) { return window; } - return this; //eslint-disable-line no-invalid-this + return safeFunc("return this;")(); }()), _resolvePath = function (configuration, relativePath) { @@ -94,7 +94,7 @@ } // Load the list of modules var sourcesJson = configuration.read("src/sources.json"); - sources = new Func("return " + sourcesJson + ";")(); + sources = safeFunc("return " + sourcesJson + ";")(); }, _loadBDD = function (configuration, verbose) {
Support Nashorn: access to global (#<I>)
diff --git a/tests/test_error.py b/tests/test_error.py index <HASH>..<HASH> 100644 --- a/tests/test_error.py +++ b/tests/test_error.py @@ -13,15 +13,21 @@ class StripeErrorTests(StripeTestCase): self.assertEqual(u'öre', six.text_type(err)) if six.PY2: self.assertEqual('\xc3\xb6re', str(err)) + else: + self.assertEqual(u'öre', str(err)) def test_formatting_with_request_id(self): err = StripeError(u'öre', headers={'request-id': '123'}) self.assertEqual(u'Request 123: öre', six.text_type(err)) if six.PY2: self.assertEqual('Request 123: \xc3\xb6re', str(err)) + else: + self.assertEqual(u'Request 123: öre', str(err)) def test_formatting_with_none(self): err = StripeError(None, headers={'request-id': '123'}) self.assertEqual(u'Request 123: <empty message>', six.text_type(err)) if six.PY2: self.assertEqual('Request 123: <empty message>', str(err)) + else: + self.assertEqual('Request 123: <empty message>', str(err))
Adds a couple more error assertions for Python 3 This is basically a no-op, but just adds a few more test case branches for Python 3 to demonstrate what `str(...)` on an error should be expected to return in that version (instead of just on Python 2).
diff --git a/stanza/utils/training/run_pos.py b/stanza/utils/training/run_pos.py index <HASH>..<HASH> 100644 --- a/stanza/utils/training/run_pos.py +++ b/stanza/utils/training/run_pos.py @@ -1,6 +1,7 @@ import logging +import os from stanza.models import tagger @@ -24,6 +25,10 @@ def run_treebank(mode, paths, treebank, short_name, test_pred_file = temp_output_file if temp_output_file else f"{pos_dir}/{short_name}.test.pred.conllu" if mode == Mode.TRAIN: + if not os.path.exists(train_file): + logger.error("TRAIN FILE NOT FOUND: %s ... skipping" % train_file) + return + # some languages need reduced batch size if short_name == 'de_hdt': # 'UD_German-HDT'
Warn rather than crash if a training file is missing