diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/browscap/Browscap.php b/browscap/Browscap.php index <HASH>..<HASH> 100644 --- a/browscap/Browscap.php +++ b/browscap/Browscap.php @@ -680,11 +680,11 @@ class Browscap * Browscap.ini parsing class exception * * @package Browscap - * @author Jonathan Stoppani <st.jonathan@gmail.com> - * @copyright Copyright (c) 2006-2008 Jonathan Stoppani - * @license http://www.gnu.org/licenses/lgpl.html GNU Lesser General Public License - * @link http://garetjax.info/projects/browscap/ - */ + * @author Jonathan Stoppani <jonathan@stoppani.name> + * @copyright Copyright (c) 2006-2012 Jonathan Stoppani + * @version 0.7 + * @license http://www.opensource.org/licenses/MIT MIT License + * @link https://github.com/GaretJax/phpbrowscap/*/ class Browscap_Exception extends Exception {}
Updated docblock for the exception class. All docblocks now contain coherent information regarding website, copyright, license and author.
diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index <HASH>..<HASH> 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -546,7 +546,9 @@ class LoadData(SqlAlchemyMixin, BaseSalesforceApiTask): ) def _filter_out_person_account_records(self, query, model): - return query.filter(model.__table__.columns.get("IsPersonAccount") == "false") + return query.filter( + func.lower(model.__table__.columns.get("IsPersonAccount")) == "false" + ) def _generate_contact_id_map_for_person_accounts( self, contact_mapping, account_id_lookup, conn
Fix handling of Boolean in Person Account loads
diff --git a/bread/__init__.py b/bread/__init__.py index <HASH>..<HASH> 100644 --- a/bread/__init__.py +++ b/bread/__init__.py @@ -1,5 +1,5 @@ __title__ = 'bread' -__version__ = '2.1.0' +__version__ = '2.1.1' __author__ = 'Alex Rasmussen' __license__ = 'MIT' __copyright__ = 'Copyright 2015 Alex Rasmussen' diff --git a/docs/source/conf.py b/docs/source/conf.py index <HASH>..<HASH> 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -48,9 +48,9 @@ copyright = u'2013, Alex Rasmussen' # built documents. # # The short X.Y version. -version = '2.1.0' +version = '2.1.1' # The full version, including alpha/beta/rc tags. -release = '2.1.0' +release = '2.1.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup setup(name='bread', - version='2.1.0', + version='2.1.1', description='Binary format parsing made easier', url='https://github.com/alexras/bread', author='Alex Rasmussen',
Bumping version to <I>
diff --git a/Resources/public/js/controllers/portfolioController.js b/Resources/public/js/controllers/portfolioController.js index <HASH>..<HASH> 100644 --- a/Resources/public/js/controllers/portfolioController.js +++ b/Resources/public/js/controllers/portfolioController.js @@ -8,7 +8,7 @@ portfolioApp $scope.widgets = widgetsManager.widgets; }); - $scope.widgetTypes = widgetsConfig.getTypes(true); + $scope.widgetTypes = widgetsConfig.getTypes(true); $scope.assetPath = assetPath; $scope.displayComment = true; $scope.comment = "";
[PortfolioBundle] CS fix
diff --git a/clientv3/watch.go b/clientv3/watch.go index <HASH>..<HASH> 100644 --- a/clientv3/watch.go +++ b/clientv3/watch.go @@ -164,8 +164,12 @@ type watcherStream struct { } func NewWatcher(c *Client) Watcher { + return NewWatchFromWatchClient(pb.NewWatchClient(c.conn)) +} + +func NewWatchFromWatchClient(wc pb.WatchClient) Watcher { return &watcher{ - remote: pb.NewWatchClient(c.conn), + remote: wc, streams: make(map[string]*watchGrpcStream), } }
clientv3: support creating a Watch from a WatchClient
diff --git a/test/dataretriever.js b/test/dataretriever.js index <HASH>..<HASH> 100644 --- a/test/dataretriever.js +++ b/test/dataretriever.js @@ -156,6 +156,18 @@ experiment('RBAC internal modular information retrieval', () => { }); }); + test('should not allow using get with context and without callback', (done) => { + + expect(dataRetriever.get.bind(null, 'get-with-context-without-callback:x', {})).to.throw(Error); + done() + }); + + test('should not allow using get without context and without callback', (done) => { + + expect(dataRetriever.get.bind(null, 'get-with-context-without-callback:x', {})).to.throw(Error); + done() + }); + test('should return err in callback when an error is thrown (sync)', (done) => { const retriever = (source, key, context) => {
Created tests to try different combination of missing callback parameter on get
diff --git a/tests/src/Registry/RegistryTest.php b/tests/src/Registry/RegistryTest.php index <HASH>..<HASH> 100644 --- a/tests/src/Registry/RegistryTest.php +++ b/tests/src/Registry/RegistryTest.php @@ -63,6 +63,8 @@ class RegistryTest extends Bluz\Tests\TestCase */ public function testSetGet() { + $this->assertNull($this->registry->foo); + $this->registry->foo = 'baz'; $this->assertEquals('baz', $this->registry->foo);
Added tests for Bluz\Registry package
diff --git a/Event.php b/Event.php index <HASH>..<HASH> 100644 --- a/Event.php +++ b/Event.php @@ -38,7 +38,7 @@ class Event { if (!is_array($json)) { - $json = json_decode($json); + $json = json_decode($json, true); } $this->json = $json;
Fatal error: Cannot use object of type stdClass as array
diff --git a/lib/flapjack/gateways/pagerduty.rb b/lib/flapjack/gateways/pagerduty.rb index <HASH>..<HASH> 100644 --- a/lib/flapjack/gateways/pagerduty.rb +++ b/lib/flapjack/gateways/pagerduty.rb @@ -37,7 +37,7 @@ module Flapjack def start @logger.info("starting") - while not test_pagerduty_connection do + while not test_pagerduty_connection and not @should_quit do @logger.error("Can't connect to the pagerduty API, retrying after 10 seconds") EM::Synchrony.sleep(10) end
don't block quit if pagerduty has never connected
diff --git a/src/components/MegadraftEditor.js b/src/components/MegadraftEditor.js index <HASH>..<HASH> 100644 --- a/src/components/MegadraftEditor.js +++ b/src/components/MegadraftEditor.js @@ -67,7 +67,6 @@ export default class MegadraftEditor extends Component { }; this.onChange = this.onChange.bind(this); - this.onTab = this.onTab.bind(this); this.mediaBlockRenderer = this.mediaBlockRenderer.bind(this); @@ -158,12 +157,6 @@ export default class MegadraftEditor extends Component { return getDefaultKeyBinding(e); } - onTab(event) { - if (this.props.onTab) { - this.props.onTab(event); - } - } - handleKeyCommand(command) { // external key bindings if (this.keyBindings.length) { @@ -558,7 +551,6 @@ export default class MegadraftEditor extends Component { readOnly={this.state.readOnly} blockRendererFn={this.mediaBlockRenderer} blockStyleFn={this.props.blockStyleFn || this.blockStyleFn} - onTab={this.onTab} handleKeyCommand={this.handleKeyCommand} handleReturn={this.props.handleReturn || this.handleReturn} keyBindingFn={this.externalKeyBindings}
refactor: remove deprecated method (#<I>)
diff --git a/hawtio-maven-indexer/src/main/java/io/hawt/maven/indexer/MavenIndexerFacade.java b/hawtio-maven-indexer/src/main/java/io/hawt/maven/indexer/MavenIndexerFacade.java index <HASH>..<HASH> 100644 --- a/hawtio-maven-indexer/src/main/java/io/hawt/maven/indexer/MavenIndexerFacade.java +++ b/hawtio-maven-indexer/src/main/java/io/hawt/maven/indexer/MavenIndexerFacade.java @@ -73,8 +73,7 @@ public class MavenIndexerFacade extends MBeanSupport implements MavenIndexerFaca private boolean updateIndexOnStartup = true; private int maximumIndexersPerMachine = 1000; private String[] repositories = { - // TODO seems there is no maven index any more here :( - // "http://repo.fusesource.com/nexus/content/repositories/releases@id=fusesource.release.repo", + "http://repository.jboss.org/nexus/content/repositories/ea@id=ea.jboss..release.repo", "http://repo1.maven.org/maven2@central" }; private String cacheDirName;
added back the Fuse EA repo
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ except ImportError: setup( name='threadloop', - version='0.3.0', + version='0.3.1.dev0', author='Grayson Koonce', author_email='breerly@gmail.com', description='Tornado IOLoop Backed Concurrent Futures',
Bump to <I>.dev0
diff --git a/go/pools/id_pool.go b/go/pools/id_pool.go index <HASH>..<HASH> 100644 --- a/go/pools/id_pool.go +++ b/go/pools/id_pool.go @@ -25,7 +25,7 @@ import ( // contains any duplicates. The IDs start at 1 and increase without bound, but // will never be larger than the peak number of concurrent uses. // -// IDPool's Get() and Set() methods can be used concurrently. +// IDPool's Get() and Put() methods can be used concurrently. type IDPool struct { sync.Mutex
doc: fix comment for IDPool
diff --git a/js/language/de-DE.js b/js/language/de-DE.js index <HASH>..<HASH> 100644 --- a/js/language/de-DE.js +++ b/js/language/de-DE.js @@ -60,13 +60,18 @@ window.calendar_languages['de-DE'] = { holidays: { '01-01': 'Neujahr', + '06-01': 'Heilige Drei Könige', + 'easter-3': 'Gründonnerstag', 'easter-2': 'Karfreitag', + 'easter': 'Ostersonntag', 'easter+1': 'Ostermontag', - '01-05': 'Erster Mai', + '01-05': 'Tag der Arbeit', 'easter+39': 'Himmelfahrt', 'easter+49': 'Pfingstsonntag', 'easter+50': 'Pfingstmontag', + '15-08': 'Mariä Himmelfahrt', '03-10': 'Tag der Deutschen Einheit', + '01-11': 'Allerheiligen', '25-12': 'Erster Weihnachtsfeiertag', '26-12': 'Zweiter Weihnachtsfeiertag', }
Updated german holidays (copy some from de-AT)
diff --git a/epab/utils/_pipenv.py b/epab/utils/_pipenv.py index <HASH>..<HASH> 100644 --- a/epab/utils/_pipenv.py +++ b/epab/utils/_pipenv.py @@ -56,4 +56,4 @@ def write_reqs(ctx, auto_commit: bool): files_to_add = ['Pipfile', 'Pipfile.lock', 'requirements.txt', 'requirements-dev.txt'] repo_commit( - ctx, 'chg: dev: update requirements [auto] [skip ci]', files_to_add=files_to_add) + ctx, 'chg: dev: update requirements [auto]', files_to_add=files_to_add)
chg: reqs update should not skip ci
diff --git a/src/setup.py b/src/setup.py index <HASH>..<HASH> 100644 --- a/src/setup.py +++ b/src/setup.py @@ -38,7 +38,7 @@ setup(name='supy', 'f90nml', 'matplotlib', 'seaborn', - 'supy_driver>=2018b15' # a separate f2py-based driver + 'supy_driver>=2018b16' # a separate f2py-based driver ], include_package_data=True, test_suite='nose.collector', diff --git a/src/supy/version.py b/src/supy/version.py index <HASH>..<HASH> 100644 --- a/src/supy/version.py +++ b/src/supy/version.py @@ -2,7 +2,7 @@ ver_milestone = 2018 ver_major = 12 -ver_minor = 15 +ver_minor = 21 ver_remark = '' __version__ = '{ver_milestone}.{ver_major}.{ver_minor}{ver_remark}'.format( ver_milestone=ver_milestone,
loosened python requirement to <I>
diff --git a/lib/runcible/version.rb b/lib/runcible/version.rb index <HASH>..<HASH> 100644 --- a/lib/runcible/version.rb +++ b/lib/runcible/version.rb @@ -1,3 +1,3 @@ module Runcible - VERSION = '1.2.0' + VERSION = '1.3.0' end
Bumped the version for docker tag changes
diff --git a/github-updater.php b/github-updater.php index <HASH>..<HASH> 100644 --- a/github-updater.php +++ b/github-updater.php @@ -12,7 +12,7 @@ Plugin Name: GitHub Updater Plugin URI: https://github.com/afragen/github-updater Description: A plugin to automatically update GitHub, Bitbucket or GitLab hosted plugins and themes. It also allows for remote installation of plugins or themes into WordPress. -Version: 5.2.0.7 +Version: 5.3.0 Author: Andy Fragen License: GNU General Public License v2 License URI: http://www.gnu.org/licenses/gpl-2.0.html
I'm very thankful for all the people putting their trust in my code.
diff --git a/chui/chocolatechip.js b/chui/chocolatechip.js index <HASH>..<HASH> 100644 --- a/chui/chocolatechip.js +++ b/chui/chocolatechip.js @@ -53,12 +53,12 @@ Version 1.3.6 return O; } else { Object.keys(P).forEach(function(p) { - var enumberale = iterable || false; + var enumerable = iterable || false; if (P.hasOwnProperty(p)) { Object.defineProperty(O, p, { value: P[p], writable: true, - enumerable: enumberale, + enumerable: enumerable, configurable: true }); }
Corrected type of enumerable.
diff --git a/src/js/form.js b/src/js/form.js index <HASH>..<HASH> 100644 --- a/src/js/form.js +++ b/src/js/form.js @@ -121,7 +121,7 @@ ch.form = function(conf) { // Doc: http://wiki.ml.com/display/ux/Mensajes+de+error if (childrenError[0].element.tagName === "DIV") { $(childrenError[0].element).find("input:first").focus(); - } else { + } else if (childrenError[0].element.type !== "hidden") { childrenError[0].element.focus(); } } else {
#<I> Watchers: When first error is from an input hidden, it's doing focus()
diff --git a/src/instance/methods.js b/src/instance/methods.js index <HASH>..<HASH> 100644 --- a/src/instance/methods.js +++ b/src/instance/methods.js @@ -39,14 +39,6 @@ Moon.prototype.get = function(key) { } /** -* Calls a method -* @param {String} method -*/ -Moon.prototype.method = function(method) { - this.$methods[method](); -} - -/** * Destroys Moon Instance */ Moon.prototype.destroy = function() {
remove method function to call a method
diff --git a/Tests/Auth/OpenID/Server.php b/Tests/Auth/OpenID/Server.php index <HASH>..<HASH> 100644 --- a/Tests/Auth/OpenID/Server.php +++ b/Tests/Auth/OpenID/Server.php @@ -74,4 +74,19 @@ class Tests_Auth_OpenID_Server extends PHPUnit_TestCase { $this->assertEquals(Auth_OpenID_DO_ABOUT, $status); } + + function test_postError() + { + $args = array( + 'openid.mode' => 'pandadance', + 'openid.identity' => $this->id_url, + ); + + list($status, $info) = $this->server->getOpenIDResponse( + $this->noauth, 'POST', $args); + + $this->assertEquals(Auth_OpenID_REMOTE_ERROR, $status); + $resultArgs = Auth_OpenID_KVForm::kvToArray($info); + $this->assertTrue(array_key_exists('error', $resultArgs)); + } }
[project @ Added bad mode for post server test]
diff --git a/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/DatabaseTest.java b/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/DatabaseTest.java index <HASH>..<HASH> 100644 --- a/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/DatabaseTest.java +++ b/android/CouchbaseLite/src/androidTest/java/com/couchbase/lite/DatabaseTest.java @@ -1487,12 +1487,12 @@ public class DatabaseTest extends BaseTest { @Override public void run() { // just create 100 documents - for (int i = 0; i < 1000; i++) { + for (int i = 0; i < 100; i++) { Document doc = new Document(); // each doc has 10 items doc.setInt("index", i); - for (int j = 0; j < 9; j++) + for (int j = 0; j < 10; j++) doc.setInt("item_" + j, j); try {
to avoid `Excessive JNI global references` error with Android Emulator API <I>/<I>
diff --git a/modin/pandas/series.py b/modin/pandas/series.py index <HASH>..<HASH> 100644 --- a/modin/pandas/series.py +++ b/modin/pandas/series.py @@ -323,6 +323,9 @@ class Series(BasePandasDataset): self._create_or_update_from_compiler( self._query_compiler.setitem(1, key, value), inplace=True ) + # Propagate changes back to parent so that column in dataframe had the same contents + if self._parent is not None: + self._parent[self.name] = self def __sub__(self, right): return self.sub(right) diff --git a/modin/pandas/test/test_dataframe.py b/modin/pandas/test/test_dataframe.py index <HASH>..<HASH> 100644 --- a/modin/pandas/test/test_dataframe.py +++ b/modin/pandas/test/test_dataframe.py @@ -5111,6 +5111,11 @@ class TestDataFrameIndexing: df_equals(modin_df, pandas_df) + modin_df[modin_df.columns[0]][modin_df.index[0]] = 12345 + pandas_df[pandas_df.columns[0]][pandas_df.index[0]] = 12345 + + df_equals(modin_df, pandas_df) + def test_setitem_on_empty_df(self): columns = ["id", "max_speed", "health"] modin_df = pd.DataFrame(columns=columns)
Changes in column are propagated to dataframe if column belongs to one Fixed bug #<I>.
diff --git a/airflow/gcp/hooks/dataflow.py b/airflow/gcp/hooks/dataflow.py index <HASH>..<HASH> 100644 --- a/airflow/gcp/hooks/dataflow.py +++ b/airflow/gcp/hooks/dataflow.py @@ -259,7 +259,7 @@ class _DataflowJobsController(LoggingMixin): if not self._jobs: self._refresh_jobs() if not self._jobs: - raise ValueError("Could nit read _jobs") + raise ValueError("Could not read _jobs") return self._jobs
[AIRFLOW-XXXX] Fix typo in error for when getting data flow jobs (#<I>)
diff --git a/src/PhpGitHooks/Module/Git/Service/PreCommitTool.php b/src/PhpGitHooks/Module/Git/Service/PreCommitTool.php index <HASH>..<HASH> 100644 --- a/src/PhpGitHooks/Module/Git/Service/PreCommitTool.php +++ b/src/PhpGitHooks/Module/Git/Service/PreCommitTool.php @@ -154,15 +154,15 @@ class PreCommitTool $configurationData->getErrorMessage() ) ); - } - if (true === $configurationData->isPhpunitStrictCoverage()) { - $this->commandBus->handle( - new StrictCoverageCommand( - $configurationData->getMinimum(), - $configurationData->getErrorMessage() - ) - ); + if (true === $configurationData->isPhpunitStrictCoverage()) { + $this->commandBus->handle( + new StrictCoverageCommand( + $configurationData->getMinimum(), + $configurationData->getErrorMessage() + ) + ); + } } } }
Execute strict coverage command if phpunit is enabled
diff --git a/simple_history/tests/tests/test_admin.py b/simple_history/tests/tests/test_admin.py index <HASH>..<HASH> 100644 --- a/simple_history/tests/tests/test_admin.py +++ b/simple_history/tests/tests/test_admin.py @@ -185,6 +185,19 @@ class AdminSiteTest(WebTest): self.assertEqual(historical_poll.history_user, self.user, "Middleware should make the request available to " "retrieve history_user.") + + def test_middleware_anonymous_user(self): + overridden_settings = { + 'MIDDLEWARE_CLASSES': + settings.MIDDLEWARE_CLASSES + + ['simple_history.middleware.HistoryRequestMiddleware'], + } + with override_settings(**overridden_settings): + poll = Poll.objects.create(question="why?", pub_date=today) + historical_poll = poll.history.all()[0] + self.assertEqual(historical_poll.history_user, None, + "Middleware request user should be able to " + "be anonymous.") def test_other_admin(self): """Test non-default admin instances.
Update test_admin.py Added test for allowing Anonymous user when using HistoryRequestMiddleware.
diff --git a/src/pyclts/models.py b/src/pyclts/models.py index <HASH>..<HASH> 100644 --- a/src/pyclts/models.py +++ b/src/pyclts/models.py @@ -20,7 +20,22 @@ __all__ = [ 'Diphthong', 'Cluster', 'UnknownSound'] -EXCLUDE_FEATURES = ['apical', 'laminal', 'ejective'] +EXCLUDE_FEATURES = [ + 'apical', + 'laminal', + 'ejective', + 'with_falling_tone', + 'with_extra_low_tone', + 'with_extra-high_tone', + 'with_falling_tone', + 'with_low_tone', + 'with_global_fall', + 'with_global_rise', + 'with_high_tone', + 'with_mid_tone', + 'with_rising_tone', + 'with_upstep' +] def is_valid_sound(sound, ts):
Implemented change by @lingulist in <URL>
diff --git a/tests/Unit/Drivers/TextDriverTest.php b/tests/Unit/Drivers/TextDriverTest.php index <HASH>..<HASH> 100644 --- a/tests/Unit/Drivers/TextDriverTest.php +++ b/tests/Unit/Drivers/TextDriverTest.php @@ -29,7 +29,7 @@ EOF)); } /** @test */ - public function it_can_serialize_when_given_windows_line_endings() + public function it_can_serialize_when_given_OS_dependant_line_endings() { $driver = new TextDriver();
adjust test name ..also hope this retriggers tests and fixes github
diff --git a/openquake/commonlib/nrml.py b/openquake/commonlib/nrml.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/nrml.py +++ b/openquake/commonlib/nrml.py @@ -136,7 +136,7 @@ def node_to_nrml(node, output=sys.stdout): Convert a node into a NRML file. output must be a file object open in write mode. If you want to perform a consistency check, open it in read-write mode, then it will - be read after creation and checked against the NRML schema. + be read after creation and validated. :params node: a Node object :params output: a file-like object in write or read-write mode @@ -148,7 +148,7 @@ def node_to_nrml(node, output=sys.stdout): node_to_xml(root, output) if hasattr(output, 'mode') and '+' in output.mode: # read-write mode output.seek(0) - # node_from_nrml(output) # validate the written file + node_from_nrml(output) # validate the written file if __name__ == '__main__':
I restore validation after writing in node_to_nrml
diff --git a/jstore.php b/jstore.php index <HASH>..<HASH> 100644 --- a/jstore.php +++ b/jstore.php @@ -40,6 +40,9 @@ class jstore public function admin($key){ $default = $this->get($key)->toArray(); + foreach($default as $arraykey => $entry){ + $default[$arraykey] = json_encode($entry); + } include('admintemplate.php'); } }
Pass defaults to to admin template already json encoded for direct adding to the schema
diff --git a/dpark/moosefs/__init__.py b/dpark/moosefs/__init__.py index <HASH>..<HASH> 100644 --- a/dpark/moosefs/__init__.py +++ b/dpark/moosefs/__init__.py @@ -136,9 +136,6 @@ class File(object): self.master = master self.cscache = {} - def __len__(self): - return (self.length - 1) / CHUNKSIZE + 1 - def get_chunk(self, i): chunk = self.cscache.get(i) if not chunk: @@ -148,8 +145,9 @@ class File(object): def locs(self, i=None): if i is None: + n = (self.length - 1) / CHUNKSIZE + 1 return [[host for host, _ in self.get_chunk(i).addrs] - for i in range(len(self))] + for i in range(n)] return [host for host, _ in self.get_chunk(i).addrs]
bugfix: File is a iterator, should not have __len__
diff --git a/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java b/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java index <HASH>..<HASH> 100644 --- a/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java +++ b/oauth2/oauth2-client/src/main/java/org/springframework/security/oauth2/client/web/server/OAuth2AuthorizationCodeGrantWebFilter.java @@ -124,7 +124,7 @@ public class OAuth2AuthorizationCodeGrantWebFilter implements WebFilter { Assert.notNull(authorizedClientRepository, "authorizedClientRepository cannot be null"); this.authenticationManager = authenticationManager; this.authorizedClientRepository = authorizedClientRepository; - this.requiresAuthenticationMatcher = new PathPatternParserServerWebExchangeMatcher("/authorize/oauth2/code/{registrationId}"); + this.requiresAuthenticationMatcher = new PathPatternParserServerWebExchangeMatcher("/{action}/oauth2/code/{registrationId}"); this.authenticationConverter = authenticationConverter; this.authenticationSuccessHandler = new RedirectServerAuthenticationSuccessHandler(); this.authenticationFailureHandler = (webFilterExchange, exception) -> Mono.error(exception);
Fix OAuth2AuthorizationCodeGrantWebFilter works w/ /{action/ Issue: gh-<I>
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ install_requires = [ 'IDUtils~=0.0,>=0.2.4', 'autosemver~=0.0,>=0.5.1', 'dojson~=1.0,>=1.3.1', - 'inspire-schemas~=41.0,>=41.0.0', + 'inspire-schemas~=42.0,>=42.0.0', 'langdetect~=1.0,>=1.0.7', 'pycountry~=17.0,>=17.5.4', ]
setup: bump inspire-schemas to version ~<I>
diff --git a/unitest-restful.py b/unitest-restful.py index <HASH>..<HASH> 100755 --- a/unitest-restful.py +++ b/unitest-restful.py @@ -112,7 +112,7 @@ class TestGlances(unittest.TestCase): req = requests.get("%s/%s/%s" % (URL, method, i)) self.assertTrue(req.ok) self.assertIsInstance(req.json(), dict) - print req.json()[i] + print(req.json()[i]) self.assertIsInstance(req.json()[i], numbers.Number) def test_005_values(self):
Correct unitest Restfull JSON to be Python compatible
diff --git a/libcontainer/specconv/spec_linux.go b/libcontainer/specconv/spec_linux.go index <HASH>..<HASH> 100644 --- a/libcontainer/specconv/spec_linux.go +++ b/libcontainer/specconv/spec_linux.go @@ -325,7 +325,7 @@ func createCgroupConfig(opts *CreateOpts) (*configs.Cgroup, error) { // for e.g. "system.slice:docker:1234" parts := strings.Split(myCgroupPath, ":") if len(parts) != 3 { - return nil, fmt.Errorf("expected cgroupsPath to be of format \"slice:prefix:name\" for systemd cgroups") + return nil, fmt.Errorf("expected cgroupsPath to be of format \"slice:prefix:name\" for systemd cgroups, got %q instead", myCgroupPath) } c.Parent = parts[0] c.ScopePrefix = parts[1]
Add cgroup name to error message More information should help troubleshoot an issue when this error occurs.
diff --git a/test/index.js b/test/index.js index <HASH>..<HASH> 100644 --- a/test/index.js +++ b/test/index.js @@ -659,7 +659,7 @@ function testRenderOutput() { } function testDefaultRenderImpl() { - var ht = new (Hogan.Template || HoganTemplate)(); + var ht = new Hogan.Template(); is(ht.render() === '', true, 'default renderImpl returns an array.'); }
Adjust test to reflect new API consistency.
diff --git a/timepiece/views.py b/timepiece/views.py index <HASH>..<HASH> 100644 --- a/timepiece/views.py +++ b/timepiece/views.py @@ -113,9 +113,11 @@ def view_entries(request): user=request.user, end_time__isnull=True, ) - for entry in my_active_entries: - active_hours = get_active_hours(entry) - current_total += active_hours + + if current_total: + current_total += sum([get_active_hours(entry) \ + for entry in my_active_entries]) or 0 + # temporarily disabled until the allocations represent accurate goals # -TM 6/27 allocations = []
refs #<I> - Updated total count loop
diff --git a/lib/puppet-lint/bin.rb b/lib/puppet-lint/bin.rb index <HASH>..<HASH> 100644 --- a/lib/puppet-lint/bin.rb +++ b/lib/puppet-lint/bin.rb @@ -76,6 +76,13 @@ class PuppetLint::Bin opts.separator "" opts.separator " Disable checks:" + opts.on('--only-check CHECKS', 'Provide a comma seperated list of checks that should be run') do |check_list| + enable_checks = check_list.split(',').map { |check| check.to_sym } + (PuppetLint.configuration.checks - enable_checks).each do |check| + PuppetLint.configuration.send("disable_#{check}") + end + end + PuppetLint.configuration.checks.each do |check| opts.on("--no-#{check}-check", "Skip the #{check} check") do PuppetLint.configuration.send("disable_#{check}")
Allow specifying a list of checks that should run. This reverses the default assumption of "everything enabled"
diff --git a/public/js/clients/chrome.js b/public/js/clients/chrome.js index <HASH>..<HASH> 100644 --- a/public/js/clients/chrome.js +++ b/public/js/clients/chrome.js @@ -4,7 +4,7 @@ const { connect } = require("../lib/chrome-remote-debug-protocol"); const defer = require("../utils/defer"); const { Tab } = require("../types"); const { isEnabled, getValue } = require("../feature"); -const { networkRequest } = require("../utils/networkRequest"); +const networkRequest = require("../utils/networkRequest"); const { setupCommands, clientCommands } = require("./chrome/commands"); const { setupEvents, clientEvents, pageEvents } = require("./chrome/events");
Change the require call in the chrome client file to resolve to the `networkRequest` function.
diff --git a/nextTick.js b/nextTick.js index <HASH>..<HASH> 100644 --- a/nextTick.js +++ b/nextTick.js @@ -8,7 +8,7 @@ module.exports = (function(global) { function makeTicker(tickFn) { return function nextTick(callback) { - tickFn(callback) + tickFn(function() { callback() }) // Do not pass through arguments from setTimeout/requestAnimationFrame } } }(this))
do not pass through arguments from setTimeout/requestAnimationFrame in nextTick
diff --git a/codebird.es7.js b/codebird.es7.js index <HASH>..<HASH> 100644 --- a/codebird.es7.js +++ b/codebird.es7.js @@ -604,7 +604,7 @@ } else { key = eval(evalStr + ".push([]);") - 1; } - evalStr += `[${key}']`; + evalStr += `[${key}]`; if (j !== keys.length - 1 && eval("typeof " + evalStr) === "undefined") { eval(evalStr + " = [];"); }
Removed extra ' on line <I> Removes SyntaxError (Unexpected string) caused when calling cb.__call () on an app-only auth request.
diff --git a/lib/actions/user.js b/lib/actions/user.js index <HASH>..<HASH> 100644 --- a/lib/actions/user.js +++ b/lib/actions/user.js @@ -298,7 +298,7 @@ export function verifyPhoneNumber (code) { } /** - * Check itinerary availability (existence) for the given monitored trip. + * Check itinerary existence for the given monitored trip. */ export function checkItineraryExistence (trip) { return async function (dispatch, getState) { @@ -315,7 +315,7 @@ export function checkItineraryExistence (trip) { if (status === 'success' && data) { dispatch(setitineraryExistence(data)) } else { - alert('Error checking the availability of your selected trip.') + alert('Error checking whether your selected trip is possible.') } } }
refactor(actions/user): Twaek comment/error msg for itinerary check.
diff --git a/lib/api/express.js b/lib/api/express.js index <HASH>..<HASH> 100644 --- a/lib/api/express.js +++ b/lib/api/express.js @@ -7,7 +7,7 @@ 'use strict'; module.exports = function exported(_) { - return function express(context, pattern, done) { + return function express(context, pattern, done) { // eslint-disable-line consistent-return var api = this; var handler = api.views.lookup(pattern); var callback = done ? done : _.noop; @@ -15,7 +15,5 @@ module.exports = function exported(_) { if (handler && _.isFunction(handler)) { return handler.call(api, context, pattern, callback); } - - return null; }; };
test: disable `consistent-return` for api#express
diff --git a/app/controllers/mailkick/subscriptions_controller.rb b/app/controllers/mailkick/subscriptions_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/mailkick/subscriptions_controller.rb +++ b/app/controllers/mailkick/subscriptions_controller.rb @@ -1,5 +1,7 @@ module Mailkick class SubscriptionsController < ActionController::Base + protect_from_forgery with: :exception + before_action :set_email def show
Added protect_from_forgery - not an issue since all requests are GET, but safer for the future
diff --git a/src/Enum/Fields/FulfillmentFields.php b/src/Enum/Fields/FulfillmentFields.php index <HASH>..<HASH> 100644 --- a/src/Enum/Fields/FulfillmentFields.php +++ b/src/Enum/Fields/FulfillmentFields.php @@ -14,6 +14,7 @@ class FulfillmentFields extends AbstractObjectEnum const TRACKING_COMPANY = 'tracking_company'; const TRACKING_NUMBERS = 'tracking_numbers'; const TRACKING_URLS = 'tracking_urls'; + const LOCATION_ID = 'location_id'; const UPDATED_AT = 'updated_at'; const VARIANT_INVENTORY_MANAGEMENT = 'variant_inventory_management'; @@ -30,6 +31,7 @@ class FulfillmentFields extends AbstractObjectEnum 'tracking_company' => 'string', 'tracking_numbers' => 'array', 'tracking_urls' => 'array', + 'location_id' => 'integer', 'updated_at' => 'DateTime', 'variant_inventory_management' => 'string' );
Add location_id to FulfillmentFields (#<I>)
diff --git a/src/adapters/pulsepoint.js b/src/adapters/pulsepoint.js index <HASH>..<HASH> 100644 --- a/src/adapters/pulsepoint.js +++ b/src/adapters/pulsepoint.js @@ -50,6 +50,10 @@ var PulsePointAdapter = function PulsePointAdapter() { bid.width = adSize[0]; bid.height = adSize[1]; bidmanager.addBidResponse(bidRequest.placementCode, bid); + } else { + var passback = bidfactory.createBid(2); + passback.bidderCode = bidRequest.bidder; + bidmanager.addBidResponse(bidRequest.placementCode, passback); } }
Pulsepoint: Registering passback
diff --git a/packages/cli/src/download-npm-package.js b/packages/cli/src/download-npm-package.js index <HASH>..<HASH> 100644 --- a/packages/cli/src/download-npm-package.js +++ b/packages/cli/src/download-npm-package.js @@ -15,7 +15,7 @@ export default async (pkg, dest) => { const tmpObj = tmp.dirSync({ unsafeCleanup: true }); // step 2: download package from npm - const result = await spawn.sync("npm", ["pack", pkg], { + const result = spawn.sync("npm", ["pack", pkg], { stdio: "ignore", cwd: tmpObj.name, });
fix: cli await on spawn bug
diff --git a/api/opentrons/robot/robot_configs.py b/api/opentrons/robot/robot_configs.py index <HASH>..<HASH> 100755 --- a/api/opentrons/robot/robot_configs.py +++ b/api/opentrons/robot/robot_configs.py @@ -110,12 +110,11 @@ def load(filename=None): def save(config, filename=None): filename = filename or environment.get_path('OT_CONFIG_FILE') - default_dict = dict(children(default._asdict())) + _default = children(default._asdict()) diff = build([ - (key, value) - for key, value in children(config._asdict()) - if default_dict[key] != value + item for item in children(config._asdict()) + if item not in _default ]) with open(filename, 'w') as file:
shorten listcomp that produces diff in save to make it more readable
diff --git a/core/engine.go b/core/engine.go index <HASH>..<HASH> 100644 --- a/core/engine.go +++ b/core/engine.go @@ -285,7 +285,9 @@ func (e *Engine) processMetrics(globalCtx context.Context, processMetricsAfterRu case <-processMetricsAfterRun: e.logger.Debug("Processing metrics and thresholds after the test run has ended...") processSamples() - e.processThresholds() + if !e.NoThresholds { + e.processThresholds() + } processMetricsAfterRun <- struct{}{} case sc := <-e.Samples:
Check if thresholds are enabled before processing them
diff --git a/server.go b/server.go index <HASH>..<HASH> 100644 --- a/server.go +++ b/server.go @@ -1125,10 +1125,17 @@ func (s *server) handleConnectPeer(msg *connectPeerMsg) { // persistent connection to the peer. srvrLog.Debugf("Connecting to %v", addr) if msg.persistent { - go s.connMgr.Connect(&connmgr.ConnReq{ + connReq := &connmgr.ConnReq{ Addr: addr, Permanent: true, - }) + } + + s.pendingConnMtx.Lock() + s.persistentConnReqs[targetPub] = append(s.persistentConnReqs[targetPub], + connReq) + s.pendingConnMtx.Unlock() + + go s.connMgr.Connect(connReq) msg.err <- nil } else { // If we're not making a persistent connection, then we'll
server: fix regression in handling persistent connection requesrts This commit fixes a bug that was introduced when the concurrent connection handling logic was re-written: if we don’t properly add the persistent outbound connection to the persistent conn reqs map. The fix is easy: add the pending conn req to the proper map.
diff --git a/lib/Alchemy/Phrasea/Model/Entities/User.php b/lib/Alchemy/Phrasea/Model/Entities/User.php index <HASH>..<HASH> 100644 --- a/lib/Alchemy/Phrasea/Model/Entities/User.php +++ b/lib/Alchemy/Phrasea/Model/Entities/User.php @@ -418,19 +418,26 @@ class User /** * @param string $gender * + * @return $this * @throws InvalidArgumentException */ public function setGender($gender) { - if (null !== $gender && !in_array($gender, [ - self::GENDER_MISS, - self::GENDER_MR, - self::GENDER_MRS + if (null !== $gender) { + $gender = (string)$gender; + + } + + if (!in_array($gender, [ + null, + (string)self::GENDER_MISS, + (string)self::GENDER_MR, + (string)self::GENDER_MRS, ], true)) { throw new InvalidArgumentException(sprintf("Invalid gender %s.", $gender)); } - $this->gender = $gender; + $this->gender = $gender ? (int)$gender : null; return $this; }
Avoid BC break in User. Convert gender to string then back to int after strict check. The other way does not work as false is considered equal to 0 without strict mode.
diff --git a/addok/helpers/results.py b/addok/helpers/results.py index <HASH>..<HASH> 100644 --- a/addok/helpers/results.py +++ b/addok/helpers/results.py @@ -75,6 +75,8 @@ def score_by_autocomplete_distance(helper, result): def _score_by_str_distance(helper, result, scale=1.0): + if helper.lat is not None and helper.lon is not None: + scale = scale * 0.9 result.add_score('str_distance', compare_str(result.labels[0], helper.query) * scale, ceiling=1.0)
reduce str_distance when lat/lon in query
diff --git a/lib/weblib.php b/lib/weblib.php index <HASH>..<HASH> 100644 --- a/lib/weblib.php +++ b/lib/weblib.php @@ -4498,8 +4498,12 @@ function page_id_and_class(&$getid, &$getclass) { static $class = NULL; static $id = NULL; - if(empty($class) || empty($id)) { - $path = str_replace($CFG->httpswwwroot.'/', '', $ME); //Because the page could be HTTPSPAGEREQUIRED + if (empty($CFG->pagepath)) { + $CFG->pagepath = $ME; + } + + if (empty($class) || empty($id)) { + $path = str_replace($CFG->httpswwwroot.'/', '', $CFG->pagepath); //Because the page could be HTTPSPAGEREQUIRED $path = str_replace('.php', '', $path); if (substr($path, -1) == '/') { $path .= 'index'; @@ -4509,7 +4513,6 @@ function page_id_and_class(&$getid, &$getclass) { $class = 'course'; } else if (substr($path, 0, 5) == 'admin') { $id = str_replace('/', '-', $path); - $id = str_replace('admin2', 'admin', $id); $class = 'admin'; } else { $id = str_replace('/', '-', $path);
Take $CFG->pagepath into account for page_id_and_class so "weird" pages can end up wth nice page classes and ids
diff --git a/tests/Objects/DumbModelTest.php b/tests/Objects/DumbModelTest.php index <HASH>..<HASH> 100644 --- a/tests/Objects/DumbModelTest.php +++ b/tests/Objects/DumbModelTest.php @@ -52,10 +52,11 @@ class DumbModelTest extends \PHPUnit_Framework_TestCase { /** * @expectedException \Thru\ActiveRecord\DatabaseLayer\TableDoesntExistException - * @expectedExceptionMessage 42S02: SQLSTATE[42S02]: Base table or view not found: 1146 Table 'active_record_test.doesntexist' doesn't exist + * @expectedExceptionMessageRegExp /42S02: SQLSTATE\[42S02\]: Base table or view not found: 1146 Table '(.*).doesntexist' doesn't exist/ */ public function testTableExistsGotcha(){ $result = DumbModel::queryOne("SELECT * FROM doesntexist", 'Thru\ActiveRecord\Test\Models\NotStdClass'); } } +
Make test regexp a bit less picky.
diff --git a/openupgradelib/openupgrade_120.py b/openupgradelib/openupgrade_120.py index <HASH>..<HASH> 100644 --- a/openupgradelib/openupgrade_120.py +++ b/openupgradelib/openupgrade_120.py @@ -362,7 +362,7 @@ def _convert_field_bootstrap_3to4_orm(env, model_name, field_name, update_field_multilang( records, field_name, - lambda old, *a, **k: convert_field_bootstrap_3to4(old), + lambda old, *a, **k: convert_string_bootstrap_3to4(old), )
[FIX] typo in <I> tools Fix typo in openupgrade_<I> in `_convert_field_bootstrap_3to4_orm` method. it have to call string conversion.
diff --git a/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py b/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py index <HASH>..<HASH> 100644 --- a/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py +++ b/virtualchain/lib/blockchain/bitcoin_blockchain/multisig.py @@ -129,7 +129,7 @@ def make_multisig_segwit_info( m, pks ): # 1 pubkey means p2wpkh key_hash = hashing.bin_hash160(pubs[0].decode('hex')).encode('hex') - script = '0014' + key_hash + script = '160014' + key_hash addr = btc_make_p2sh_address(script) else:
script for singlesig segwit should start with 0x<I>
diff --git a/src/main/java/org/osgl/mvc/result/RenderBinary.java b/src/main/java/org/osgl/mvc/result/RenderBinary.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/osgl/mvc/result/RenderBinary.java +++ b/src/main/java/org/osgl/mvc/result/RenderBinary.java @@ -168,7 +168,7 @@ public class RenderBinary extends Result { * readable file to send back */ public RenderBinary(File file) { - this(file, file.getName(), true); + this(file, file.getName(), false); } /**
RenderBinary(File) shall default to attachment disposition
diff --git a/manifest.php b/manifest.php index <HASH>..<HASH> 100755 --- a/manifest.php +++ b/manifest.php @@ -45,10 +45,7 @@ return array( array('type' => 'CheckFileSystemComponent', 'value' => array('id' => 'fs_taoSubjects_includes', 'location' => 'taoSubjects/includes', 'rights' => 'r')) ) ), - 'managementRole' => 'http://www.tao.lu/Ontologies/TAOSubject.rdf#SubjectsManagerRole', - 'optimizableClasses' => array( - 'http://www.tao.lu/Ontologies/TAOSubject.rdf#Subject' - ), + 'managementRole' => 'http://www.tao.lu/Ontologies/TAOSubject.rdf#SubjectsManagerRole', 'constants' => array( # actions directory "DIR_ACTIONS" => $extpath."actions".DIRECTORY_SEPARATOR,
The whole User class hierarchy is considered as optimzable by the generis extension. No need to add the Test Taker class independently. git-svn-id: <URL>
diff --git a/airflow/hooks/druid_hook.py b/airflow/hooks/druid_hook.py index <HASH>..<HASH> 100644 --- a/airflow/hooks/druid_hook.py +++ b/airflow/hooks/druid_hook.py @@ -85,8 +85,6 @@ class DruidHook(BaseHook): self.log.info("Job still running for %s seconds...", sec) - sec = sec + 1 - if self.max_ingestion_time and sec > self.max_ingestion_time: # ensure that the job gets killed if the max ingestion time is exceeded requests.post("{0}/{1}/shutdown".format(url, druid_task_id)) @@ -95,6 +93,8 @@ class DruidHook(BaseHook): time.sleep(self.timeout) + sec = sec + self.timeout + status = req_status.json()['status']['status'] if status == 'RUNNING': running = True
[AIRFLOW-<I>] DruidHook: time check is wrong (#<I>)
diff --git a/lib/airbrake-ruby/filters/gem_root_filter.rb b/lib/airbrake-ruby/filters/gem_root_filter.rb index <HASH>..<HASH> 100644 --- a/lib/airbrake-ruby/filters/gem_root_filter.rb +++ b/lib/airbrake-ruby/filters/gem_root_filter.rb @@ -24,7 +24,7 @@ module Airbrake # If the frame is unparseable, then 'file' is nil, thus nothing to # filter (all frame's data is in 'function' instead). next unless (file = frame[:file]) - file.sub!(/\A#{gem_path}/, GEM_ROOT_LABEL) + frame[:file] = file.sub(/\A#{gem_path}/, GEM_ROOT_LABEL) end end end
filters/gem_root: do not mutate file to avoid side effects Mutating `file` with `GEM_ROOT` causes problems with code hunks. They rely on the pristine value.
diff --git a/src/server/cmd/pachctl/cmd/cmd.go b/src/server/cmd/pachctl/cmd/cmd.go index <HASH>..<HASH> 100644 --- a/src/server/cmd/pachctl/cmd/cmd.go +++ b/src/server/cmd/pachctl/cmd/cmd.go @@ -175,7 +175,7 @@ kubectl %v port-forward "$pod" %d:8081 } portForward.Flags().IntVarP(&port, "port", "p", 30650, "The local port to bind to.") portForward.Flags().IntVarP(&uiPort, "ui-port", "u", 38080, "The local port to bind to.") - portForward.Flags().IntVarP(&uiWebsocketPort, "proxy-port", "x", 32082, "The local port to bind to.") + portForward.Flags().IntVarP(&uiWebsocketPort, "proxy-port", "x", 32081, "The local port to bind to.") portForward.Flags().StringVarP(&kubeCtlFlags, "kubectlflags", "k", "", "Any kubectl flags to proxy, e.g. --kubectlflags='--kubeconfig /some/path/kubeconfig'") rootCmd.AddCommand(version)
Fix websocket port forward port <I>-><I> Makes it consistent with internal/external port exposed by container (<I>;<I>)
diff --git a/spyderlib/app/spyder.py b/spyderlib/app/spyder.py index <HASH>..<HASH> 100644 --- a/spyderlib/app/spyder.py +++ b/spyderlib/app/spyder.py @@ -1020,6 +1020,7 @@ class MainWindow(QMainWindow): def add_ipm_action(text, path): """Add installed Python module doc action to help submenu""" # QAction.triggered works differently for PySide and PyQt + path = file_uri(path) if not API == 'pyside': slot=lambda _checked, path=path: programs.start_file(path) else:
Fix little removal after PR #<I> [ci skip]
diff --git a/local_modules/cubemap-to-octmap/test.js b/local_modules/cubemap-to-octmap/test.js index <HASH>..<HASH> 100644 --- a/local_modules/cubemap-to-octmap/test.js +++ b/local_modules/cubemap-to-octmap/test.js @@ -11,8 +11,8 @@ mediump vec2 octahedralProjection(mediump vec3 dir) { } */ -var Vec3 = require('pex-math/Vec3') -var Vec2 = require('pex-math/Vec2') +var Vec3 = require('pex-math/vec3') +var Vec2 = require('pex-math/vec2') var abs = Math.abs var random = Math.random
Fix pex-math casing in octomap test
diff --git a/lib/hacker_term/version.rb b/lib/hacker_term/version.rb index <HASH>..<HASH> 100644 --- a/lib/hacker_term/version.rb +++ b/lib/hacker_term/version.rb @@ -1,3 +1,3 @@ module HackerTerm - VERSION = "0.0.6" + VERSION = "0.1.0" end
Bump to <I>.
diff --git a/lib/alchemy/seeder.rb b/lib/alchemy/seeder.rb index <HASH>..<HASH> 100644 --- a/lib/alchemy/seeder.rb +++ b/lib/alchemy/seeder.rb @@ -78,8 +78,8 @@ module Alchemy def create_default_site desc "Creating default site" site = Alchemy::Site.find_or_initialize_by_host( - :name => 'Default', - :host => 'default' + :name => 'Default Site', + :host => '*' ) if site.new_record? site.save! diff --git a/lib/alchemy/upgrader.rb b/lib/alchemy/upgrader.rb index <HASH>..<HASH> 100644 --- a/lib/alchemy/upgrader.rb +++ b/lib/alchemy/upgrader.rb @@ -20,12 +20,26 @@ module Alchemy copy_new_config_file removed_richmedia_essences_notice convert_picture_storage + upgrade_to_sites display_todos end private + def upgrade_to_sites + desc "Creating default site and migrating existing languages to it" + if Site.count == 0 + Alchemy::Site.transaction do + site = Alchemy::Site.create!(host: '*', name: 'Default Site') + Alchemy::Language.update_all(site_id: site.id) + log "Done." + end + else + log "Site(s) already present.", :skip + end + end + # Creates Language model if it does not exist (Alchemy CMS prior v1.5) # Also creates missing associations between pages and languages def upgrade_to_language
Add creation of default site to alchemy:upgrade task.
diff --git a/javascript/WorkflowGridField.js b/javascript/WorkflowGridField.js index <HASH>..<HASH> 100644 --- a/javascript/WorkflowGridField.js +++ b/javascript/WorkflowGridField.js @@ -7,13 +7,6 @@ onmatch: function(e) { var ele = $(this); var row = ele.closest('tr'); - row.on('click', function(e) { - /* - * Prevent a precursor POST to gridfield record URLs (all GridFields) when clicking on target-object's - * hyperlinks, which results in a 404. - */ - e.stopPropagation(); - }); if(ele.find('.col-buttons.disabled').length) { row @@ -23,7 +16,7 @@ return (e.target.nodeName === 'A' && e.target.className.match(/edit-link/) === null); }); ele.find('a.edit-link').attr('title', ''); - } + } } }); @@ -34,6 +27,16 @@ } }); + /* + * Prevent a precursor POST to gridfield record URLs (all Pending/Submitted GridFields) + * when clicking on target-object's hyperlinks, which results in a 404. + */ + $('.ss-gridfield .ss-gridfield-item td.col-Title a').entwine({ + onclick: function(e) { + e.stopPropagation(); + } + }); + }); }(jQuery));
FIX: Fixes regression in <I>eaf1d. Incorrect disabling JS logic order.
diff --git a/lib/determine-basal/determine-basal.js b/lib/determine-basal/determine-basal.js index <HASH>..<HASH> 100644 --- a/lib/determine-basal/determine-basal.js +++ b/lib/determine-basal/determine-basal.js @@ -210,8 +210,9 @@ var determine_basal = function determine_basal(glucose_status, currenttemp, iob_ iobArray.forEach(function(iobTick) { //console.error(iobTick); predBGI = Math.round(( -iobTick.activity * sens * 5 )*100)/100; - // predicted carb impact drops from current carb impact down to zero over duration of carb impact - predCI = Math.max(0, ci * ( 1 - predBGs.length/Math.max(cid,1) ) ); + // predicted carb impact drops linearly from current carb impact down to zero + // eventually accounting for all carbs (if they can be absorbed over DIA) + predCI = Math.max(0, ci * ( 1 - predBGs.length/Math.max(cid*2,1) ) ); predBG = predBGs[predBGs.length-1] + predBGI + predCI; console.error(predBGI, predCI, predBG); predBGs.push(predBG);
extend carb impact to account for all carbs (if they can be absorbed over DIA)
diff --git a/rb/lib/selenium/webdriver/firefox/bridge.rb b/rb/lib/selenium/webdriver/firefox/bridge.rb index <HASH>..<HASH> 100644 --- a/rb/lib/selenium/webdriver/firefox/bridge.rb +++ b/rb/lib/selenium/webdriver/firefox/bridge.rb @@ -41,9 +41,9 @@ module Selenium def quit super - @launcher.quit - nil + ensure + @launcher.quit end private
JariBakken: Make sure Firefox shutdown happens even if the RPC fails. r<I>
diff --git a/lib/services/db.js b/lib/services/db.js index <HASH>..<HASH> 100644 --- a/lib/services/db.js +++ b/lib/services/db.js @@ -199,14 +199,11 @@ _.each(['put', 'batch'], function (key) { var args = _.slice(arguments, 1), result = fn.apply(module.exports, args); - if (_.isObject(result) && _.isFunction(result.then)) { - return result.then(function (result) { - module.exports.trigger.apply(module.exports, [key].concat(args)); + return result.then(function (result) { + module.exports.trigger.apply(module.exports, [key].concat(args)); - // don't wait - return result; - }); - } - return result; + // don't wait + return result; + }); }); });
remove check if promise, because it's always a promise
diff --git a/myawis/__init__.py b/myawis/__init__.py index <HASH>..<HASH> 100644 --- a/myawis/__init__.py +++ b/myawis/__init__.py @@ -12,6 +12,11 @@ try: except ImportError: from urllib.parse import quote, urlencode +URLINFO_RESPONSE_GROUPS = ",".join( + ["RelatedLinks", "Categories", "Rank", "ContactInfo", "RankByCountry", + "UsageStats", "Speed", "Language", "OwnedDomains", "LinksInCount", + "SiteData", "AdultContent"]) + class CallAwis(object):
Add constant with UrlInfo response groups
diff --git a/th_github/my_github.py b/th_github/my_github.py index <HASH>..<HASH> 100644 --- a/th_github/my_github.py +++ b/th_github/my_github.py @@ -35,7 +35,7 @@ from th_github.models import Github logger = getLogger('django_th.trigger_happy') -cache = caches['th_github'] +cache = caches['django_th'] class ServiceGithub(ServicesMgr):
Replaced cache variable REPLACED: cache = caches['th_<service>'] With cache = caches['django_th']
diff --git a/switchyard/lib/packet/packet.py b/switchyard/lib/packet/packet.py index <HASH>..<HASH> 100644 --- a/switchyard/lib/packet/packet.py +++ b/switchyard/lib/packet/packet.py @@ -171,7 +171,7 @@ class Packet(object): def _checkidx(self, index): if isinstance(index, int): if index < 0: - index = len(self._headers) - index + index = len(self._headers) + index if not (0 <= index < len(self._headers)): raise IndexError("Index out of range") return index
Fix packet lib negative indexing to access header
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup setup( name='slackdown', - version='0.0.2', + version='0.0.3', description='A simple Slack message text formatting to HTML code converter.', author='Andrew Briz', author_email='briz.andrew@gmail.com',
Updated pypi version number.
diff --git a/lib/natural/tokenizers/tokenizer_ja.js b/lib/natural/tokenizers/tokenizer_ja.js index <HASH>..<HASH> 100644 --- a/lib/natural/tokenizers/tokenizer_ja.js +++ b/lib/natural/tokenizers/tokenizer_ja.js @@ -68,10 +68,10 @@ var Tokenizer = require('./tokenizer'), */ var TokenizerJa = function() { this.chartype_ = [ - [/[一二三四五六七八九十百千万億兆]/, 'M'], - [/[一-龠〆ヵヶ]/, 'H'], - [/[ぁ-ゔ]/, 'I'], - [/[ァ-ヴー]/, 'K'], + [/[〇一二三四五六七八九十百千万億兆]/, 'M'], + [/[一-龠〆]/, 'H'], + [/[ぁ-ゖ]/, 'I'], + [/[ァ-ヶー]/, 'K'], [/[a-zA-Z]/, 'A'], [/[0-9]/, 'N'] ];
Harmonize regexp with normalizer and add missing characters
diff --git a/lib/container/docker/template.js b/lib/container/docker/template.js index <HASH>..<HASH> 100644 --- a/lib/container/docker/template.js +++ b/lib/container/docker/template.js @@ -133,6 +133,7 @@ const engine = { update.version = deployment.Version.Index; if (!update.TaskTemplate) update.TaskTemplate = {}; + update.TaskTemplate.ForceUpdate = 1; if (!update.TaskTemplate.ContainerSpec) update.TaskTemplate.ContainerSpec = {}; if (!update.TaskTemplate.ContainerSpec.Env) update.TaskTemplate.ContainerSpec.Env = []; update.TaskTemplate.ContainerSpec.Env.push('SOAJS_REDEPLOY_TRIGGER=true'); @@ -166,7 +167,7 @@ const engine = { options.params.newBuild.variables[i] = options.params.newBuild.variables[i].replace("$SOAJS_HA_NAME", "{{.Task.Name}}"); } } - + update.TaskTemplate.ForceUpdate = 1; update.TaskTemplate.ContainerSpec.Env = options.params.newBuild.variables; update.TaskTemplate.ContainerSpec.Image = options.params.newBuild.image; update.TaskTemplate.ContainerSpec.Command = options.params.newBuild.command;
added force true when updating a service
diff --git a/user/profile.php b/user/profile.php index <HASH>..<HASH> 100644 --- a/user/profile.php +++ b/user/profile.php @@ -57,7 +57,7 @@ $context = $usercontext = get_context_instance(CONTEXT_USER, $userid, MUST_EXIST if (!$currentuser && !empty($CFG->forceloginforprofiles) && - !has_capability('moodle/user:viewdetails', $context) && + !has_capability('moodle/user:viewdetails', $context) && !has_coursecontact_role($userid)) { // Course managers can be browsed at site level. If not forceloginforprofiles, allow access (bug #4366) $struser = get_string('user'); @@ -96,7 +96,11 @@ if (isguestuser()) { // Guests can never edit their profile } } - +if (has_capability('moodle/user:viewhiddendetails', $context)) { + $hiddenfields = array(); +} else { + $hiddenfields = array_flip(explode(',', $CFG->hiddenuserfields)); +} // Start setting up the page $strpublicprofile = get_string('publicprofile');
NOBUG Added missing hiddenfields var from user/profile.php
diff --git a/src/instrumentation/index.js b/src/instrumentation/index.js index <HASH>..<HASH> 100644 --- a/src/instrumentation/index.js +++ b/src/instrumentation/index.js @@ -100,6 +100,8 @@ function getRawGroupedTracesTimings (traces, groupedTraces) { logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.relativeTraceStartLargerThanTransactionDuration', 'color: #ff0000', relativeTraceStart, transaction._start, transaction.duration(), { trace: trace, transaction: transaction }) } else if (relativeTraceStart < 0) { logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.negativeRelativeTraceStart!', 'color: #ff0000', relativeTraceStart, trace._start, transaction._start, trace) + } else if (trace.duration() > transaction.duration()) { + logger.log('%c -- opbeat.instrumentation.getRawGroupedTracesTimings.error.traceDurationLargerThanTranscationDuration', 'color: #ff0000', trace.duration(), transaction.duration(), { trace: trace, transaction: transaction }) } else { data.push([groupIndex, relativeTraceStart, trace.duration()]) }
Don’t send traces where the duration is longer than the transaction duration
diff --git a/client/webpack/webpack.config.dev.js b/client/webpack/webpack.config.dev.js index <HASH>..<HASH> 100644 --- a/client/webpack/webpack.config.dev.js +++ b/client/webpack/webpack.config.dev.js @@ -3,6 +3,9 @@ const webpack = require('webpack'); const config = require('./webpack.config.base'); module.exports = Object.assign({}, config, { + // See http://webpack.github.io/docs/configuration.html#devtool + devtool: 'inline-source-map', + plugins: config.plugins.concat([ new webpack.DefinePlugin({ 'process.env': {
Add missing source maps in dev mode
diff --git a/ci/publishElmRelease.js b/ci/publishElmRelease.js index <HASH>..<HASH> 100644 --- a/ci/publishElmRelease.js +++ b/ci/publishElmRelease.js @@ -19,7 +19,12 @@ async function tagElmRelease(config, context) { exec(`elm-package publish`); - return true; + return { + name: 'Elm release', + url: + 'http://package.elm-lang.org/packages/cultureamp/elm-css-modules-loader/' + + elmPackageJson.version, + }; } module.exports = tagElmRelease;
fix(ci): Fix semantic-release plugin Previously this returned true, but it should return an object with information about the release. Fixes #<I>
diff --git a/pygooglechart.py b/pygooglechart.py index <HASH>..<HASH> 100644 --- a/pygooglechart.py +++ b/pygooglechart.py @@ -1,5 +1,5 @@ """ -PyGoogleChart - A Python wrapper for the Google Chart API +PyGoogleChart - A complete Python wrapper for the Google Chart API http://pygooglechart.slowchop.com/
branched to <I>
diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py index <HASH>..<HASH> 100644 --- a/salt/modules/ipset.py +++ b/salt/modules/ipset.py @@ -13,7 +13,6 @@ if six.PY3: import ipaddress else: import salt.ext.ipaddress as ipaddress -from salt.ext.six.moves import range # pylint: disable=import-error,redefined-builtin # Fix included in py2-ipaddress for 32bit architectures
No longer using this range functions. So don't import it.
diff --git a/src/Task/Task.php b/src/Task/Task.php index <HASH>..<HASH> 100644 --- a/src/Task/Task.php +++ b/src/Task/Task.php @@ -156,7 +156,7 @@ abstract class Task $this->file->set('log', $logFile); $this->setStatus(self::STATE_RUNNING); - $this->addOutput('Task started.' . "\n"); + $this->addOutput('Task ' . $this->getId() . ' started.' . "\n"); $this->doPerform(); } catch (\Exception $exception) {
Show id in task log file
diff --git a/go/test/endtoend/vtgate/main_test.go b/go/test/endtoend/vtgate/main_test.go index <HASH>..<HASH> 100644 --- a/go/test/endtoend/vtgate/main_test.go +++ b/go/test/endtoend/vtgate/main_test.go @@ -129,12 +129,6 @@ create table t7_fk( CONSTRAINT t7_fk_ibfk_1 foreign key (t7_uid) references t7_xxhash(uid) on delete set null on update cascade ) Engine=InnoDB; - -create table t8( - id8 bigint, - testId bigint, - primary key(id8) -) Engine=InnoDB; ` VSchema = ` @@ -369,14 +363,6 @@ create table t8( "name": "unicode_loose_xxhash" } ] - }, - "t8": { - "column_vindexes": [ - { - "column": "id8", - "name": "hash" - } - ] } } }`
Removed unrequired table from vtgate's main E2E test
diff --git a/src/MadeYourDay/Contao/ThemeAssistant.php b/src/MadeYourDay/Contao/ThemeAssistant.php index <HASH>..<HASH> 100644 --- a/src/MadeYourDay/Contao/ThemeAssistant.php +++ b/src/MadeYourDay/Contao/ThemeAssistant.php @@ -574,10 +574,15 @@ class ThemeAssistant extends \Backend } } elseif ($data['templateVars'][$key]['type'] === 'length') { - if ($value && isset($value['value']) && isset($value['unit'])) { - $value = (trim($value['value']) ? trim($value['value']) : '0') . trim($value['unit']); + if (is_array($value)) { + $value['value'] = empty($value['value']) ? '0' : $value['value']; + $value['unit'] = empty($value['unit']) ? '' : $value['unit']; + $value = trim($value['value']) . trim($value['unit']); } - if (! $value) { + else if (is_string($value) && trim($value)) { + $value = trim($value); + } + else { $value = '0'; } }
Fixed bug with missing unit in inputUnid fields
diff --git a/lib/collection.js b/lib/collection.js index <HASH>..<HASH> 100644 --- a/lib/collection.js +++ b/lib/collection.js @@ -15,6 +15,19 @@ var Static = require('./static') , model = require('./model'); /** + * Create a collection of sub documents. + * + * @param {Constructor} Child sub document constructor + * @param {Object} parent main document + * @return {Object} constructed sub document + * @api private + */ +function createSubCollection(Child, parent) { + Child.prototype.save = Child.prototype.save.bind(parent); + return new Child; +} + +/** * Default model definition of fossa. * * Options: @@ -41,6 +54,17 @@ module.exports = function collection(options) { * @api public */ , initialize: function initialize() { + // Initialize subdocuments. + var subdocs = options.subdocs; + if (subdocs) { + Object.keys(subdocs).forEach(function addSubDocs(key) { + this[key] = createSubCollection(subdocs[key], this); + }.bind(this)); + + // Remove the key from the options. + delete options.subdocs; + } + // Store references the collection name, database and fossa. _.extend(this, options); }
[minor] bind subdocuments to the collection, save routes to main
diff --git a/ospd/command/command.py b/ospd/command/command.py index <HASH>..<HASH> 100644 --- a/ospd/command/command.py +++ b/ospd/command/command.py @@ -71,7 +71,7 @@ class BaseCommand(metaclass=InitSubclassMeta): 'name': self.get_name(), 'attributes': self.get_attributes(), 'description': self.get_description(), - 'element': self.get_elements(), + 'elements': self.get_elements(), } def __repr__(self):
Fix typo in BaseCommand as_dict It must be elements instead of element.
diff --git a/btb/benchmark/challenges/atmchallenge.py b/btb/benchmark/challenges/atmchallenge.py index <HASH>..<HASH> 100644 --- a/btb/benchmark/challenges/atmchallenge.py +++ b/btb/benchmark/challenges/atmchallenge.py @@ -113,7 +113,7 @@ class ATMChallenge(MLChallenge): loaded_challenges = [] for dataset in datasets: try: - loaded_challenges.append(cls(dataset=dataset)) + loaded_challenges.append(cls(dataset)) LOGGER.info('Dataset %s loaded', dataset) except Exception as ex: LOGGER.warn('Dataset: %s could not be loaded. Error: %s', dataset, ex) @@ -139,4 +139,4 @@ class ATMChallenge(MLChallenge): super().__init__(**kwargs) def __repr__(self): - return '{}({})'.format(self.__class__.__name__, self.dataset) + return "{}('{}')".format(self.__class__.__name__, self.dataset)
Fix lint / details.
diff --git a/bids/variables/io.py b/bids/variables/io.py index <HASH>..<HASH> 100644 --- a/bids/variables/io.py +++ b/bids/variables/io.py @@ -303,10 +303,8 @@ def _load_time_variables(layout, dataset=None, columns=None, scan_length=None, if regressors: sub_ents = {k: v for k, v in entities.items() if k in BASE_ENTITIES} - # .tsv.gz is not strictly BIDS compatible, but our test data is gzipped, - # so we'll be forgiving here confound_files = layout.get(suffix=['regressors', 'timeseries'], - scope=scope, extension=['.tsv', '.tsv.gz'], + scope=scope, extension='.tsv', **sub_ents) for cf in confound_files: _data = pd.read_csv(cf.path, sep='\t', na_values='n/a')
FIX: Go back to .tsv only
diff --git a/lib/gxapi/google_analytics.rb b/lib/gxapi/google_analytics.rb index <HASH>..<HASH> 100644 --- a/lib/gxapi/google_analytics.rb +++ b/lib/gxapi/google_analytics.rb @@ -100,9 +100,13 @@ module Gxapi @client ||= begin client = Google::APIClient.new # key stuff is hardcoded for now - key = Google::APIClient::KeyUtils.load_from_pkcs12( - Gxapi.config.google.private_key_path, 'notasecret' - ) + if encoded_key = Gxapi.config.google.private_key + key = OpenSSL::PKey::RSA.new Base64.decode64(encoded_key), 'notasecret' + else + key = Google::APIClient::KeyUtils.load_from_pkcs12( + Gxapi.config.google.private_key_path, 'notasecret' + ) + end client.authorization = Signet::OAuth2::Client.new( token_credential_uri: 'https://accounts.google.com/o/oauth2/token', audience: 'https://accounts.google.com/o/oauth2/token',
Allow private key to be passed as a Base<I> string
diff --git a/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go b/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go index <HASH>..<HASH> 100644 --- a/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go +++ b/upup/pkg/fi/cloudup/bootstrapchannelbuilder.go @@ -203,7 +203,13 @@ func (b *BootstrapChannelBuilder) buildManifest() (*channelsapi.Addons, map[stri if b.cluster.Spec.Networking.Weave != nil { key := "networking.weave" - version := "1.9.4" + var version string + switch { + case kv.Major == 1 && kv.Minor <= 5: + version = "1.9.3" + default: + version = "1.9.4" + } // TODO: Create configuration object for cni providers (maybe create it but orphan it)? location := key + "/v" + version + ".yaml"
for different k8s version different weave
diff --git a/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java b/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java index <HASH>..<HASH> 100644 --- a/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java +++ b/src/main/java/de/btobastian/javacord/entities/message/MessageDecoration.java @@ -19,20 +19,16 @@ package de.btobastian.javacord.entities.message; /** - * All decorations available in discord. + * All basic decorations available in discord. */ public enum MessageDecoration { ITALICS("*"), BOLD("**"), - BOLD_ITALICS("***"), STRIKEOUT("~~"), CODE_SIMPLE("`"), CODE_LONG("```"), - UNDERLINE("__"), - UNDERLINE_ITALICS("__*"), - UNDERLINE_BOLD("__**"), - UNDERLINE_BOLD_ITALICS("__***"); + UNDERLINE("__"); private final String prefix; private final String suffix;
MessageDecoration does no longer contain combined decorations (e.g. BOLD_ITALICS)
diff --git a/thunder/series/series.py b/thunder/series/series.py index <HASH>..<HASH> 100755 --- a/thunder/series/series.py +++ b/thunder/series/series.py @@ -881,18 +881,15 @@ class Series(Data): def convolve(self, signal, mode='full'): """ - Conolve series data against another signal. + Convolve series data against another signal. Parameters ---------- signal : array Signal to convolve with (must be 1D) - var : str - Variable name if loading from a MAT file - mode : str, optional, default='full' - Mode of convolution, options are 'full', 'same', and 'same' + Mode of convolution, options are 'full', 'same', and 'valid' """ from numpy import convolve
Doc fixes in series.convolve
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -287,9 +287,27 @@ module.exports = function(grunt) { return good; }); - grunt.registerTask('docs', ['eslint:examples', 'clean:docs', 'jsdoc', 'makeindex']); - grunt.registerTask('build', ['eslint:lib', 'clean:dist', 'requirejs', /*'concat',*/ 'uglify']); - grunt.registerTask('publish', ['bumpversion', 'build', 'browserify', 'docs']); + grunt.registerTask('docs', [ + 'eslint:examples', + 'clean:docs', + 'jsdoc', + 'makeindex', + ]); + grunt.registerTask('build', [ + 'eslint:lib', + 'clean:dist', + 'requirejs', + /*'concat',*/ + 'uglify', + ]); + grunt.registerTask('publish', [ + 'eslint:lib', + 'eslint:examples', + 'bumpversion', + 'build', + 'browserify', + 'docs', + ]); grunt.registerTask('default', 'build'); setLicense();
run lint before bumping version
diff --git a/h5p-default-storage.class.php b/h5p-default-storage.class.php index <HASH>..<HASH> 100644 --- a/h5p-default-storage.class.php +++ b/h5p-default-storage.class.php @@ -297,13 +297,10 @@ class H5PDefaultStorage implements \H5PFileStorage { * Recursive function that makes sure the specified directory exists and * is writable. * - * TODO: Will be made private when the editor file handling is done by this - * class! - * * @param string $path * @return bool */ - public static function dirReady($path) { + private static function dirReady($path) { if (!file_exists($path)) { $parent = preg_replace("/\/[^\/]+\/?$/", '', $path); if (!self::dirReady($parent)) {
Made dir handling private to prevent abuse h5p/h5p-moodle-plugin#<I> HFJ-<I>
diff --git a/lib/punchblock/translator/asterisk/call.rb b/lib/punchblock/translator/asterisk/call.rb index <HASH>..<HASH> 100644 --- a/lib/punchblock/translator/asterisk/call.rb +++ b/lib/punchblock/translator/asterisk/call.rb @@ -37,9 +37,9 @@ module Punchblock when 'Hangup' pb_logger.debug "Received a Hangup AMI event. Sending End event." send_pb_event Event::End.new(:reason => :hangup) - when 'AGIExec' - if component = component_with_id(ami_event['CommandId']) + when 'AsyncAGI' pb_logger.debug "Received an AsyncAGI event. Looking for matching AGICommand component." + if component = component_with_id(ami_event['CommandID']) pb_logger.debug "Found component #{component.id} for event. Forwarding event..." component.handle_ami_event! ami_event end
Calls should send AsyncAGI events to components, not AGIExec events
diff --git a/json-formatter/go/message_lookup.go b/json-formatter/go/message_lookup.go index <HASH>..<HASH> 100644 --- a/json-formatter/go/message_lookup.go +++ b/json-formatter/go/message_lookup.go @@ -1,7 +1,6 @@ package json import ( - "fmt" messages "github.com/cucumber/cucumber-messages-go/v6" ) @@ -40,10 +39,7 @@ func (self *MessageLookup) ProcessMessage(envelope *messages.Envelope) (err erro scenario := child.GetScenario() if scenario != nil { - fmt.Println("Adding scenario", scenario.Name) - fmt.Println("Scenario ID", scenario.Id) self.scenarioByID[scenario.Id] = scenario - fmt.Println("scenarios stored:", len(self.scenarioByID)) for _, step := range scenario.Steps { self.stepByID[step.Id] = step }
Remove STDOUT printing which breaks JSON output
diff --git a/assets/shiny-server.js b/assets/shiny-server.js index <HASH>..<HASH> 100644 --- a/assets/shiny-server.js +++ b/assets/shiny-server.js @@ -23,7 +23,7 @@ store = window.localStorage; whitelistStr = store["shiny.whitelist"]; if (!whitelistStr || whitelistStr === ""){ - whitelist = []; + whitelist = availableOptions; } else{ whitelist = JSON.parse(whitelistStr); } @@ -66,7 +66,8 @@ $(document).keydown(function(event){ - if (event.shiftKey && event.ctrlKey && event.keyCode == 65){ + console.log(event); + if (event.shiftKey && event.ctrlKey && event.altKey && event.keyCode == 65){ ShinyServer.toggleNetworkSelector(); } });
Changed keyboard shortcut to ctrl+shift+alt+a
diff --git a/public/app/plugins/datasource/opentsdb/datasource.js b/public/app/plugins/datasource/opentsdb/datasource.js index <HASH>..<HASH> 100644 --- a/public/app/plugins/datasource/opentsdb/datasource.js +++ b/public/app/plugins/datasource/opentsdb/datasource.js @@ -173,11 +173,7 @@ function (angular, _, kbn) { }; OpenTSDBDatasource.prototype.performAggregatorsQuery = function() { - var options = { - method: 'GET', - url: this.url + '/api/aggregators' - }; - return $http(options).then(function(result) { + return this._get('/api/aggregators', {}).then(function(result) { if (result.data instanceof Array) { return result.data.sort(); } else {
fix for a change in datasource object
diff --git a/src/frontend/org/voltdb/planner/AbstractParsedStmt.java b/src/frontend/org/voltdb/planner/AbstractParsedStmt.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/planner/AbstractParsedStmt.java +++ b/src/frontend/org/voltdb/planner/AbstractParsedStmt.java @@ -329,6 +329,10 @@ public abstract class AbstractParsedStmt { String type = attrs.getNamedItem("type").getNodeValue(); ExpressionType exprType = ExpressionType.get(type); AbstractExpression expr = null; + + if (exprType == ExpressionType.INVALID) { + throw new PlanningErrorException("Unsupported operation type '" + type + "'"); + } try { expr = exprType.getExpressionClass().newInstance(); } catch (Exception e) {
Provide more informative message when invalid operation is encountered during planning.