diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/spec/dummy/config/application.rb b/spec/dummy/config/application.rb index <HASH>..<HASH> 100644 --- a/spec/dummy/config/application.rb +++ b/spec/dummy/config/application.rb @@ -7,6 +7,8 @@ require "carnival" module Carnival class Application < Rails::Application + config.i18n.available_locales = :en + config.i18n.default_locale = :en console { config.console = Pry } end end
Add locale setup do application.rb in the dummy app
diff --git a/packages/react/src/components/DataTable/DataTable-story.js b/packages/react/src/components/DataTable/DataTable-story.js index <HASH>..<HASH> 100644 --- a/packages/react/src/components/DataTable/DataTable-story.js +++ b/packages/react/src/components/DataTable/DataTable-story.js @@ -349,7 +349,6 @@ export const WithCheckmarkColumns = () => { className={`la-${cell.info.header}`}> <Checkbox id={'check-' + cell.id} - checked={cell.value} hideLabel labelText="checkbox" />
docs(data-table): switch checkboxes to uncontrolled (#<I>)
diff --git a/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java b/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java index <HASH>..<HASH> 100644 --- a/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java +++ b/xprocspec-runner/src/main/java/org/daisy/maven/xproc/xprocspec/XProcSpecRunner.java @@ -243,11 +243,12 @@ public class XProcSpecRunner { */ private static Collection<File> listXProcSpecFilesRecursively(File directory) { ImmutableList.Builder<File> builder = new ImmutableList.Builder<File>(); - for (File file : directory.listFiles()) { - if (file.isDirectory()) - builder.addAll(listXProcSpecFilesRecursively(file)); - else if (file.getName().endsWith(".xprocspec")) - builder.add(file); } + if (directory.isDirectory()) + for (File file : directory.listFiles()) { + if (file.isDirectory()) + builder.addAll(listXProcSpecFilesRecursively(file)); + else if (file.getName().endsWith(".xprocspec")) + builder.add(file); } return builder.build(); }
Don't fail when src/test/xprocspec directory does not exist see issue #<I>
diff --git a/packaging/create-linux-packages.py b/packaging/create-linux-packages.py index <HASH>..<HASH> 100755 --- a/packaging/create-linux-packages.py +++ b/packaging/create-linux-packages.py @@ -10,7 +10,7 @@ create-linux-packages.py <release-version> import sys from shutil import rmtree from subprocess import run -from pathlib2 import Path +from pathlib import Path THIS_DIRECTORY = Path(__file__).absolute().parent diff --git a/packaging/upload-linux-packages.py b/packaging/upload-linux-packages.py index <HASH>..<HASH> 100755 --- a/packaging/upload-linux-packages.py +++ b/packaging/upload-linux-packages.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 import sys -from pathlib2 import Path +from pathlib import Path from subprocess import run PACKAGES = Path(__file__).absolute().parent / "out"
switch back to pathlib for the deployment scripts
diff --git a/test/system/__init__.py b/test/system/__init__.py index <HASH>..<HASH> 100644 --- a/test/system/__init__.py +++ b/test/system/__init__.py @@ -42,8 +42,7 @@ def get_client(): client = get_client() -import tempfile -_, pid_fname = tempfile.mkstemp() +pid_fname = "system_test.pid" def pid(): return int(open(pid_fname).read()) @@ -55,6 +54,11 @@ class CassandraTester(object): def setUp(self): if self.runserver: + if os.path.exists(pid_fname): + pid_path = os.path.join(root, pid_fname) + print "Unclean shutdown detected, (%s found)" % pid_path + sys.exit() + # clean out old stuff import shutil # todo get directories from conf/storage-conf.xml @@ -95,3 +99,5 @@ class CassandraTester(object): # TODO kill server with SIGKILL if it's still alive time.sleep(0.5) # TODO assert server is Truly Dead + +# vim:ai sw=4 ts=4 tw=0 et
abort system tests if previous run shutdown uncleanly Patch by eevans; reviewed by Michael Greene for CASSANDRA-<I> git-svn-id: <URL>
diff --git a/lib/dm-validations/validation_errors.rb b/lib/dm-validations/validation_errors.rb index <HASH>..<HASH> 100644 --- a/lib/dm-validations/validation_errors.rb +++ b/lib/dm-validations/validation_errors.rb @@ -84,6 +84,17 @@ module DataMapper end end + # Return validation errors for a particular field name or an empty array + # + # This method is a necessary requirement for active_model compatibility. + # + # @param [Symbol] field_name the name of the field you want an error for + # @return [Array<Array<String>>] + # array of validation errors or empty array, if there are no errors on given field + def [](field_name) + errors[field_name] ||= [] + end + # Return validation errors for a particular field_name. # # @param [Symbol] field_name the name of the field you want an error for
[dm-validations] Improved active_model compatibility With this change, dm-validations is compatible with active_model's requirements regarding the #errors and the #valid? method. The remaining aspects of of active_model compatibility are handled inside the dm-active_model plugin.
diff --git a/runtime/v2/shim/util_windows.go b/runtime/v2/shim/util_windows.go index <HASH>..<HASH> 100644 --- a/runtime/v2/shim/util_windows.go +++ b/runtime/v2/shim/util_windows.go @@ -20,6 +20,7 @@ import ( "context" "fmt" "net" + "os" "syscall" "time" @@ -48,7 +49,25 @@ func SocketAddress(ctx context.Context, id string) (string, error) { // AnonDialer returns a dialer for a npipe func AnonDialer(address string, timeout time.Duration) (net.Conn, error) { - return winio.DialPipe(address, &timeout) + var c net.Conn + var lastError error + start := time.Now() + for { + remaining := timeout - time.Now().Sub(start) + if remaining <= 0 { + lastError = errors.Errorf("timed out waiting for npipe %s", address) + break + } + c, lastError = winio.DialPipe(address, &remaining) + if lastError == nil { + break + } + if !os.IsNotExist(lastError) { + break + } + time.Sleep(10 * time.Millisecond) + } + return c, lastError } // NewSocket returns a new npipe listener
Adds retry support to Windows AnonDialer Adds retry support to AnonDialer if the pipe does not exist. This will retry up to the timeout for the pipe to exist and connect. This solves the race between the containerd-shim-* start command and the reinvocation.
diff --git a/les/vflux/client/fillset_test.go b/les/vflux/client/fillset_test.go index <HASH>..<HASH> 100644 --- a/les/vflux/client/fillset_test.go +++ b/les/vflux/client/fillset_test.go @@ -34,16 +34,20 @@ type testIter struct { } func (i *testIter) Next() bool { - i.waitCh <- struct{}{} + if _, ok := <-i.waitCh; !ok { + return false + } i.node = <-i.nodeCh - return i.node != nil + return true } func (i *testIter) Node() *enode.Node { return i.node } -func (i *testIter) Close() {} +func (i *testIter) Close() { + close(i.waitCh) +} func (i *testIter) push() { var id enode.ID @@ -53,7 +57,7 @@ func (i *testIter) push() { func (i *testIter) waiting(timeout time.Duration) bool { select { - case <-i.waitCh: + case i.waitCh <- struct{}{}: return true case <-time.After(timeout): return false
les/vflux/client: fix goroutine leak in testIter (#<I>)
diff --git a/core/src/main/java/io/neba/core/logviewer/LogfileViewerConsolePlugin.java b/core/src/main/java/io/neba/core/logviewer/LogfileViewerConsolePlugin.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/io/neba/core/logviewer/LogfileViewerConsolePlugin.java +++ b/core/src/main/java/io/neba/core/logviewer/LogfileViewerConsolePlugin.java @@ -140,7 +140,7 @@ public class LogfileViewerConsolePlugin extends AbstractWebConsolePlugin impleme StringBuilder options = new StringBuilder(1024); for (File logFile : resolveLogFiles()) { String fileIdentifier = getNormalizedFilePath(logFile); - options.append("<option value=\"").append(fileIdentifier).append("\" id=\"logfile\" ") + options.append("<option value=\"").append(fileIdentifier).append("\" ") .append("title=\"").append(fileIdentifier).append("\">") .append(logFile.getParentFile().getName()).append('/').append(logFile.getName()) .append("</option>");
Review: Removed erroneous id="..." attribute of <option>'s.
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/executor/OrderByStep.java b/core/src/main/java/com/orientechnologies/orient/core/sql/executor/OrderByStep.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/orientechnologies/orient/core/sql/executor/OrderByStep.java +++ b/core/src/main/java/com/orientechnologies/orient/core/sql/executor/OrderByStep.java @@ -110,6 +110,7 @@ public class OrderByStep extends AbstractExecutionStep { try { cachedResult.add(item); if (maxElementsAllowed >= 0 && maxElementsAllowed < cachedResult.size()) { + this.cachedResult.clear(); throw new OCommandExecutionException("Limit of allowed elements for in-heap ORDER BY in a single query exceeded (" + maxElementsAllowed + ") . You can set " + OGlobalConfiguration.QUERY_MAX_HEAP_ELEMENTS_ALLOWED_PER_OP.getKey() + " to increase this limit"); }
Improve memory management in case of query abort for exceeded limit of records in ORDER BY
diff --git a/looptools/timer.py b/looptools/timer.py index <HASH>..<HASH> 100644 --- a/looptools/timer.py +++ b/looptools/timer.py @@ -39,16 +39,22 @@ class Timer: def function_timer(*args, **kwargs): """A nested function for timing other functions.""" + # Capture start time start = time.time() + + # Execute function with arguments value = func(*args, **kwargs) + + # Capture end time end = time.time() + + # Calculate run time runtime = end - start if runtime < 60: runtime = str('sec: ' + str('{:f}'.format(runtime))) else: runtime = str('min: ' + str('{:f}'.format(runtime / 60))) - msg = '{func:30} --> {time}' - print(msg.format(func=func.__name__, time=runtime)) + print('{func:50} --> {time}'.format(func=func.__qualname__, time=runtime)) return value return function_timer diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name='looptools', - version='1.1.1', + version='1.1.2', packages=find_packages(), install_requires=[], url='https://github.com/mrstephenneal/looptools',
FIX Timer.decorator to also display the calling functions class name
diff --git a/lib/modules/apostrophe-areas/public/js/editor.js b/lib/modules/apostrophe-areas/public/js/editor.js index <HASH>..<HASH> 100644 --- a/lib/modules/apostrophe-areas/public/js/editor.js +++ b/lib/modules/apostrophe-areas/public/js/editor.js @@ -369,7 +369,7 @@ apos.define('apostrophe-area-editor', { }; self.enhanceWidgetControls = function($widget) { - $controls = $widget.find('[data-apos-widget-controls]'); + $controls = $widget.findSafe('[data-apos-widget-controls]', '[data-area]'); if (self.options.limit == 1) { $controls.addClass('apos-limit-one') }
Fix issue where 'apos-limit-one' class would be applied to all nested widget controls, even if it wasn't true.
diff --git a/src/MetaModels/DcGeneral/Data/Model.php b/src/MetaModels/DcGeneral/Data/Model.php index <HASH>..<HASH> 100644 --- a/src/MetaModels/DcGeneral/Data/Model.php +++ b/src/MetaModels/DcGeneral/Data/Model.php @@ -146,7 +146,10 @@ class Model implements ModelInterface */ public function setId($mixID) { - $this->getItem()->set('id', $mixID); + if ($this->getId() == null) { + $this->getItem()->set('id', $mixID); + $this->setMeta(static::IS_CHANGED, true); + } } /**
Mark model as changed when the id has been set.
diff --git a/holoviews/core/spaces.py b/holoviews/core/spaces.py index <HASH>..<HASH> 100644 --- a/holoviews/core/spaces.py +++ b/holoviews/core/spaces.py @@ -463,13 +463,13 @@ class DynamicMap(HoloMap): """) def __init__(self, callback, initial_items=None, **params): + super(DynamicMap, self).__init__(initial_items, callback=callback, **params) # Set source to self if not already specified - for stream in params.get('streams',[]): + for stream in self.streams: if stream.source is None: stream.source = self - super(DynamicMap, self).__init__(initial_items, callback=callback, **params) self.counter = 0 if self.callback is None: raise Exception("A suitable callback must be "
Setting stream sources in DynamicMap after setting the parameters
diff --git a/src/module-elasticsuite-catalog/Plugin/Search/RequestMapperPlugin.php b/src/module-elasticsuite-catalog/Plugin/Search/RequestMapperPlugin.php index <HASH>..<HASH> 100644 --- a/src/module-elasticsuite-catalog/Plugin/Search/RequestMapperPlugin.php +++ b/src/module-elasticsuite-catalog/Plugin/Search/RequestMapperPlugin.php @@ -234,6 +234,10 @@ class RequestMapperPlugin */ private function getCurrentCategoryId(ContainerConfigurationInterface $containerConfiguration, SearchCriteriaInterface $searchCriteria) { + if ($this->searchContext->getCurrentCategory() && $this->searchContext->getCurrentCategory()->getId()) { + return $this->searchContext->getCurrentCategory()->getId(); + } + $store = $this->storeManager->getStore($containerConfiguration->getStoreId()); $categoryId = $this->storeManager->getGroup($store->getStoreGroupId())->getRootCategoryId();
Trust any previously set current category for search context.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( author='The Graphistry Team', author_email='pygraphistry@graphistry.com', setup_requires=['numpy'], - install_requires=['numpy', 'pandas', 'requests', 'future'], + install_requires=['numpy', 'pandas', 'requests', 'future >= 0.15.0'], extras_require={ 'igraph': ['python-igraph'], 'networkx': ['networkx'],
Depend on future >= <I>. Closes #<I> (hopefully)
diff --git a/shoebox/disk_storage.py b/shoebox/disk_storage.py index <HASH>..<HASH> 100644 --- a/shoebox/disk_storage.py +++ b/shoebox/disk_storage.py @@ -83,6 +83,11 @@ class Version1(Version0): self.header_schema = "iii" self.header_size = struct.calcsize(self.header_schema) + def _encode(self, s): + if isinstance(s, unicode): + return s.encode('utf-8') + return s + def pack(self, notification, metadata): nsize = len(notification) raw_block_schema = "i%ds" % nsize @@ -91,12 +96,16 @@ class Version1(Version0): metadata_items = ["i"] # appended with N "%ds"'s metadata_values = [len(metadata) * 4] # [n]=key, [n+1]=value for key, value in metadata.iteritems(): + key = self._encode(key) + value = self._encode(value) metadata_items.append("i") metadata_items.append("i") metadata_values.append(len(key)) metadata_values.append(len(value)) for key, value in metadata.iteritems(): + key = self._encode(key) + value = self._encode(value) metadata_items.append("%ds" % len(key)) metadata_values.append(key) metadata_items.append("%ds" % len(value))
Proper unicode support for metadata. Json doesn't need it
diff --git a/lib/mongodb/collection.js b/lib/mongodb/collection.js index <HASH>..<HASH> 100644 --- a/lib/mongodb/collection.js +++ b/lib/mongodb/collection.js @@ -305,6 +305,7 @@ Collection.prototype.findAndModify = function(query, sort, update, options, call queryObject['new'] = options['new'] ? 1 : 0; queryObject['remove'] = options.remove ? 1 : 0; queryObject['upsert'] = options.upsert ? 1 : 0; + if (options.fields) queryObject.fields = options.fields; // Set up the update if it exists if(update) queryObject['update'] = update;
Added fields support for findAndModify. This allows us to have findAndModify return a subset of the fields.
diff --git a/interact.js b/interact.js index <HASH>..<HASH> 100644 --- a/interact.js +++ b/interact.js @@ -748,7 +748,7 @@ // get the most apprpriate dropzone based on DOM depth and order drop = resolveDrops(elements); - dropTarget = drop? dropzones[drop.index]: null; + dropTarget = drop? drops[drop.index]: null; if (selectorDZs.length) { var draggableElement = target._element;
Fix bug in getDrops the `dropzones` collection of dropzones was being used to get a qualifying dropzone instead of the drops passing Interactable#dropCheck. Closes #3
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -51,7 +51,7 @@ copyright = u'2012, coagulant' # The short X.Y version. version = '0.2.3' # The full version, including alpha/beta/rc tags. -release = '0.2.3dev' +release = '0.2.3' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages.
Bumped version to <I>
diff --git a/describe.js b/describe.js index <HASH>..<HASH> 100644 --- a/describe.js +++ b/describe.js @@ -77,8 +77,6 @@ try { fun.call({ expect: function(a,b) { - done = true; - clearTimeout(timer); return expect(a,b,callback,options); } });
Fixed silly timing error (expectations shouldn't clear the timeout).
diff --git a/docs/examples/example_advanced_saml11.php b/docs/examples/example_advanced_saml11.php index <HASH>..<HASH> 100644 --- a/docs/examples/example_advanced_saml11.php +++ b/docs/examples/example_advanced_saml11.php @@ -42,9 +42,13 @@ phpCAS::handleLogoutRequests(true, $cas_real_hosts); // Force CAS authentication on any page that includes this file phpCAS::forceAuthentication(); - ?> -<h2>Secure Page</h2> +<html> + <head> + <title>Advanced SAML 1.1 example</title> + </head> + <body> +<h2>Advanced SAML 1.1 example</h2> <?php require 'script_info.php' ?> Authentication succeeded for user @@ -61,8 +65,11 @@ foreach (phpCAS::getAttributes() as $key => $value) { } echo '</ol></li>'; } else { - echo '<li>', $key, ': <strong>', $value, '</strong></li>'; + echo '<li>', $key, ': <strong>', $value, '</strong></li>' . PHP_EOL; } } ?> -</ul> \ No newline at end of file +</ul> +<p><a href="?logout=">Logout</a></p> +</body> +</html> \ No newline at end of file
Polish the SAML example a bit with nicer html output and a logout button.
diff --git a/werkzeug/serving.py b/werkzeug/serving.py index <HASH>..<HASH> 100644 --- a/werkzeug/serving.py +++ b/werkzeug/serving.py @@ -69,6 +69,7 @@ class BaseRequestHandler(BaseHTTPRequestHandler, object): 'wsgi.multithread': self.server.multithread, 'wsgi.multiprocess': self.server.multiprocess, 'wsgi.run_once': False, + 'SERVER_SOFTWARE': self.server_version, 'REQUEST_METHOD': self.command, 'SCRIPT_NAME': '', 'PATH_INFO': unquote(path_info),
Server now sets the `SERVER_SOFTWARE` environment key.
diff --git a/core/src/main/java/org/sql2o/converters/AbstractDateConverter.java b/core/src/main/java/org/sql2o/converters/AbstractDateConverter.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/sql2o/converters/AbstractDateConverter.java +++ b/core/src/main/java/org/sql2o/converters/AbstractDateConverter.java @@ -35,7 +35,7 @@ public abstract class AbstractDateConverter<E extends Date> implements Converter throw new ConverterException("Cannot convert type " + val.getClass().toString() + " to java.util.Date"); } - public Timestamp toDatabaseParam(Date val) { + public Object toDatabaseParam(Date val) { if(val==null) return null; return (val instanceof Timestamp) ? (Timestamp) val
-Changed the return type to Object to be more consistent with the other Converters.
diff --git a/lib/puppet/indirector/indirection.rb b/lib/puppet/indirector/indirection.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/indirector/indirection.rb +++ b/lib/puppet/indirector/indirection.rb @@ -196,7 +196,12 @@ class Puppet::Indirector::Indirection result.expiration ||= self.expiration if result.respond_to?(:expiration) if cache? Puppet.info "Caching #{self.name} for #{request.key}" - cache.save request(:save, key, result, options) + begin + cache.save request(:save, key, result, options) + rescue => detail + Puppet.log_exception(detail) + raise detail + end end filtered = result diff --git a/spec/unit/indirector/indirection_spec.rb b/spec/unit/indirector/indirection_spec.rb index <HASH>..<HASH> 100755 --- a/spec/unit/indirector/indirection_spec.rb +++ b/spec/unit/indirector/indirection_spec.rb @@ -393,6 +393,17 @@ describe Puppet::Indirector::Indirection do @indirection.find("/my/key") end + + it "should fail if saving to the cache fails but log the exception" do + @cache.stubs(:find).returns nil + + @terminus.stubs(:find).returns(@instance) + @cache.stubs(:save).raises RuntimeError + + Puppet.expects(:log_exception) + + expect { @indirection.find("/my/key") }.to raise_error + end end end
(PUP-<I>) Log exceptions raised by saving to cache Failing to cache requests could lead to silent exits. For example if the apply application failed to cache to PuppetDB catalog terminus. This will log the exception and continue.
diff --git a/core-bundle/src/Resources/contao/library/Contao/Environment.php b/core-bundle/src/Resources/contao/library/Contao/Environment.php index <HASH>..<HASH> 100644 --- a/core-bundle/src/Resources/contao/library/Contao/Environment.php +++ b/core-bundle/src/Resources/contao/library/Contao/Environment.php @@ -548,7 +548,7 @@ class Environment } // Android tablets are not mobile (see #4150) - if ($os == 'Android' && stripos('mobile', $ua) === false) + if ($os == 'android' && stripos($ua, 'mobile') === false) { $mobile = false; }
[Core] Correctly detect Android tablets in the `Environment` class (see #<I>)
diff --git a/jchart/templatetags/jchart.py b/jchart/templatetags/jchart.py index <HASH>..<HASH> 100644 --- a/jchart/templatetags/jchart.py +++ b/jchart/templatetags/jchart.py @@ -2,7 +2,7 @@ import uuid from django import template from django.template.loader import render_to_string -from django.core.urlresolvers import reverse +from django.urls import reverse from .. import Chart
Fix deprecation notice ```.../jchart/templatetags/jchart.py:5: RemovedInDjango<I>Warning: Importing from django.core.urlresolvers is deprecated in favor of django.urls. from django.core.urlresolvers import reverse```
diff --git a/synapse/tests/test_link.py b/synapse/tests/test_link.py index <HASH>..<HASH> 100644 --- a/synapse/tests/test_link.py +++ b/synapse/tests/test_link.py @@ -73,8 +73,9 @@ class LinkTest(unittest.TestCase): self.assertEqual(link[1]['authinfo'].get('user'),'visi') self.assertEqual(link[1]['authinfo'].get('passwd'),'secret') - def test_link_ssl_basic(self): + def newp_link_ssl_basic(self): + # FIXME some kind of cert validation diffs in *py* vers killed us cafile = getTestPath('ca.pem') keyfile = getTestPath('server.key') certfile = getTestPath('server.pem')
temp disable real ssl test due to behav diffs in py vers. TODO
diff --git a/src/Options.php b/src/Options.php index <HASH>..<HASH> 100644 --- a/src/Options.php +++ b/src/Options.php @@ -30,15 +30,13 @@ class Options public function expandTargetDir($target) { - $options = $this->options; - - return rtrim(preg_replace_callback('{%(.+?)%}', function ($matches) use ($options) { + return preg_replace_callback('{%(.+?)%}', function ($matches) { $option = str_replace('_', '-', strtolower($matches[1])); - if (!isset($options[$option])) { + if (!isset($this->options[$option])) { throw new \InvalidArgumentException(sprintf('Placeholder "%s" does not exist.', $matches[1])); } - return $options[$option]; - }, $target), '/'); + return rtrim($this->options[$option], '/'); + }, $target); } }
Simplified the code of Options::expandTargetDir()
diff --git a/lib/blockscore/connection.rb b/lib/blockscore/connection.rb index <HASH>..<HASH> 100644 --- a/lib/blockscore/connection.rb +++ b/lib/blockscore/connection.rb @@ -5,12 +5,6 @@ require 'httparty' require 'blockscore/error' require 'blockscore/errors/invalid_request_error' -HEADERS = { - 'Accept' => 'application/vnd.blockscore+json;version=4', - 'User-Agent' => 'blockscore-ruby/4.1.0 (https://github.com/BlockScore/blockscore-ruby)', - 'Content-Type' => 'application/json' -} - module BlockScore module Connection mattr_accessor :api_key @@ -37,6 +31,14 @@ module BlockScore private + def headers + @@headers ||= { + 'Accept' => 'application/vnd.blockscore+json;version=4', + 'User-Agent' => 'blockscore-ruby/4.1.0 (https://github.com/BlockScore/blockscore-ruby)', + 'Content-Type' => 'application/json' + } + end + def request(method, path, params) response = execute_request(method, path, params) @@ -58,7 +60,6 @@ module BlockScore def execute_request(method, path, params) auth = { :username => @@api_key, :password => '' } - headers = HEADERS options = { :basic_auth => auth, :headers => headers, :body => params.to_json }
make headers a memoized method
diff --git a/mamba/formatters.py b/mamba/formatters.py index <HASH>..<HASH> 100644 --- a/mamba/formatters.py +++ b/mamba/formatters.py @@ -91,8 +91,6 @@ class DocumentationFormatter(object): with indent(3): puts(colored.red('Failure/Error: %s' % self.format_failing_expectation(failed))) puts() - puts(colored.red(str(failed.exception))) - puts() puts('Traceback:') puts(colored.red(self.format_traceback(failed))) puts() @@ -110,11 +108,10 @@ class DocumentationFormatter(object): return ' '.join(result) def format_failing_expectation(self, spec_): - error = traceback.format_tb(spec_.traceback)[1:2][0] - return error.split('\n')[-2].strip() + return str(spec_.exception) def format_traceback(self, spec_): - return ''.join(traceback.format_tb(spec_.traceback)[1:]) + return ''.join([message[2:] for message in traceback.format_tb(spec_.traceback)[1:]]) def format_seconds(self, seconds): return '%.4f seconds' % seconds
Avoid fails when formatting traceback and it does not exists
diff --git a/bottle_sqlalchemy.py b/bottle_sqlalchemy.py index <HASH>..<HASH> 100644 --- a/bottle_sqlalchemy.py +++ b/bottle_sqlalchemy.py @@ -47,7 +47,7 @@ Usage Example:: db.add(entity) -It is up to you create engine, and metadata, because SQLAlchemy has +It is up to you create engine and metadata, because SQLAlchemy has a lot of options to do it. The plugin just handles the SQLAlchemy session.
Removed a comma So very OCD...
diff --git a/tests/integration/runners/test_jobs.py b/tests/integration/runners/test_jobs.py index <HASH>..<HASH> 100644 --- a/tests/integration/runners/test_jobs.py +++ b/tests/integration/runners/test_jobs.py @@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function, unicode_literals from tests.support.case import ShellCase from tests.support.unit import skipIf + class ManageTest(ShellCase): ''' Test the manage runner @@ -35,7 +36,7 @@ class ManageTest(ShellCase): ''' ret = self.run_run_plus('jobs.lookup_jid') expected = 'Passed invalid arguments:' - self.self.assertIn(expected, ret['return']) + self.assertIn(expected, ret['return']) @skipIf(True, 'to be re-enabled when #23623 is merged') def test_list_jobs(self):
Fixing typo in test_jobs. Fixing lint.
diff --git a/master/buildbot/reporters/bitbucket.py b/master/buildbot/reporters/bitbucket.py index <HASH>..<HASH> 100644 --- a/master/buildbot/reporters/bitbucket.py +++ b/master/buildbot/reporters/bitbucket.py @@ -115,6 +115,8 @@ class BitbucketStatusPush(http.HttpStatusPushBase): if path.endswith('.git'): path = path[:-4] + while path.endswith('/'): + path = path[:-1] parts = path.split('/')
remove trailing / in the repo url
diff --git a/spec/file_spec.rb b/spec/file_spec.rb index <HASH>..<HASH> 100644 --- a/spec/file_spec.rb +++ b/spec/file_spec.rb @@ -12,7 +12,7 @@ describe Cap2::File do context 'when the file does have the given capability' do before(:each) do - system %{sudo setcap "cap_dac_override+p" #{file.path}} + run_as_root('permit(:dac_override)') end it { should be_permitted(:dac_override) } @@ -26,7 +26,7 @@ describe Cap2::File do context 'when the file does have the given capability' do before(:each) do - system %{sudo setcap "cap_dac_override+pe" #{file.path}} + run_as_root('permit(:dac_override)', 'enable_on_exec(:dac_override)') end it { should be_effective(:dac_override) } @@ -40,7 +40,7 @@ describe Cap2::File do context 'when the file does have the given capability' do before(:each) do - system %{sudo setcap "cap_dac_override+i" #{file.path}} + run_as_root('allow_inherit(:dac_override)') end it { should be_inheritable(:dac_override) }
Use `run_as_root` rather than `system 'sudo...` in file_spec.rb
diff --git a/test/test_bulk.py b/test/test_bulk.py index <HASH>..<HASH> 100644 --- a/test/test_bulk.py +++ b/test/test_bulk.py @@ -1043,7 +1043,7 @@ class TestBulkWriteConcern(BulkTestBase): 'op': {'_id': '...', 'a': 1}}]}, result) - self.assertEqual(2, len(result['writeConcernErrors'])) + self.assertTrue(len(result['writeConcernErrors']) > 1) failed = result['writeErrors'][0] self.assertTrue("duplicate" in failed['errmsg'])
Fix a bulk operations test for MongoDB <I> behavior change
diff --git a/filter-widget/Filter.js b/filter-widget/Filter.js index <HASH>..<HASH> 100644 --- a/filter-widget/Filter.js +++ b/filter-widget/Filter.js @@ -57,8 +57,8 @@ export const Filter = DefineMap.extend('Filter', { }); export const FilterList = DefineList.extend('FilterList', { - '#': Filter -}); + DefineMap: Filter +}, {}); export const FilterOptions = [{ label: 'Does not contain',
workaround can-define issue with '#' and concat
diff --git a/lib/hako/schedulers/ecs.rb b/lib/hako/schedulers/ecs.rb index <HASH>..<HASH> 100644 --- a/lib/hako/schedulers/ecs.rb +++ b/lib/hako/schedulers/ecs.rb @@ -139,7 +139,10 @@ module Hako unless page.service_arns.empty? @ecs.describe_services(cluster: @cluster, services: page.service_arns).services.each do |s| if s.status != 'INACTIVE' - max_port = [max_port, find_front_port(s)].max + port = find_front_port(s) + if port + max_port = [max_port, port].max + end end end end @@ -158,7 +161,9 @@ module Hako task_definition.container_definitions.each do |c| container_definitions[c.name] = c end - container_definitions['front'].port_mappings[0].host_port + if container_definitions.size == 2 && container_definitions['front'] && container_definitions['app'] + container_definitions['front'].port_mappings[0].host_port + end end def task_definition_changed?(front, app)
Ignore non-hako services
diff --git a/pykube/objects.py b/pykube/objects.py index <HASH>..<HASH> 100644 --- a/pykube/objects.py +++ b/pykube/objects.py @@ -4,6 +4,7 @@ import time import jsonpatch +from urllib import urlencode from .exceptions import ObjectDoesNotExist from .query import ObjectManager @@ -175,6 +176,21 @@ class Pod(NamespacedAPIObject): condition = next((c for c in cs if c["type"] == "Ready"), None) return condition is not None and condition["status"] == "True" + def get_logs(self, container=None): + url = "logs" + params = {} + if container is not None: + params["container"] = container + query_string = urlencode(params) + url += "?{}".format(query_string) if query_string else "" + kwargs = {'url': url, + 'pods': self.name, + 'namespace': self.namespace, + 'version': self.version} + r = self.api.get(**kwargs) + r.raise_for_status() + return r.json() + class ReplicationController(NamespacedAPIObject, ReplicatedAPIObject):
Adds implementation to get logs from the Pod object itself
diff --git a/ehforwarderbot/__main__.py b/ehforwarderbot/__main__.py index <HASH>..<HASH> 100644 --- a/ehforwarderbot/__main__.py +++ b/ehforwarderbot/__main__.py @@ -1,5 +1,5 @@ # coding=utf-8 - +import signal import threading import logging import argparse @@ -265,6 +265,8 @@ def main(): setup_telemetry(conf['telemetry']) atexit.register(stop_gracefully) + signal.signal(signal.SIGTERM, stop_gracefully) + signal.signal(signal.SIGINT, stop_gracefully) init(conf) poll() diff --git a/ehforwarderbot/__version__.py b/ehforwarderbot/__version__.py index <HASH>..<HASH> 100644 --- a/ehforwarderbot/__version__.py +++ b/ehforwarderbot/__version__.py @@ -1,3 +1,3 @@ # coding=utf-8 -__version__ = "2.0.0b21.dev1" +__version__ = "2.0.0b21.dev2"
Attempt to address #<I>: catch SIGTERM and SIGINT for graceful exit.
diff --git a/src/Form/FieldTextarea.js b/src/Form/FieldTextarea.js index <HASH>..<HASH> 100644 --- a/src/Form/FieldTextarea.js +++ b/src/Form/FieldTextarea.js @@ -11,7 +11,9 @@ class FieldTextarea extends FieldInput { constructor() { super(...arguments); - this.state = {height: 0}; + this.state = {height: this.props.minHeight}; + + console.log(this.props); this.updateTextareaHeight = Util.throttle(this.updateTextareaHeight, 100); this.handleContentEditableBlur = this.handleContentEditableBlur.bind(this); @@ -21,10 +23,6 @@ class FieldTextarea extends FieldInput { this.handleContentEditableFocus.bind(this); } - componentWillMount() { - this.setState({height: this.props.minHeight}); - } - componentDidMount() { super.componentDidMount(...arguments);
Assign minHeight in the constructor
diff --git a/go/cmd/vtctld/vtctld.go b/go/cmd/vtctld/vtctld.go index <HASH>..<HASH> 100644 --- a/go/cmd/vtctld/vtctld.go +++ b/go/cmd/vtctld/vtctld.go @@ -269,6 +269,7 @@ func (ar *ActionRepository) Apply(actionName string, zkPath string, r *http.Requ output, err := action(ar.wrangler, zkPath, r) if err != nil { result.error(err.Error()) + return result } result.Output = output return result
vtctld now will show the error.
diff --git a/devices.js b/devices.js index <HASH>..<HASH> 100644 --- a/devices.js +++ b/devices.js @@ -1676,6 +1676,13 @@ const devices = [ extend: hue.light_onoff_brightness_colortemp, }, { + zigbeeModel: ['LTC021'], + model: '3435011P7', + vendor: 'Philips', + description: 'Hue white ambiance bathroom ceiling light Adore', + extend: hue.light_onoff_brightness_colortemp, + }, + { zigbeeModel: ['LTD003'], model: '4503848C5', vendor: 'Philips',
Add support for LTC<I> Philips Adore bathroom light (#<I>)
diff --git a/gen/strings_test.go b/gen/strings_test.go index <HASH>..<HASH> 100644 --- a/gen/strings_test.go +++ b/gen/strings_test.go @@ -82,7 +82,7 @@ func TestAlphaString(t *testing.T) { t.Errorf("Invalid string: %#v", v) } } - if result.Sieve == nil || result.Sieve("01") { + if result.Sieve != nil && result.Sieve("01") { t.Error("Invalid sieve") } } @@ -106,7 +106,7 @@ func TestNumString(t *testing.T) { t.Errorf("Invalid string: %#v", v) } } - if result.Sieve == nil || result.Sieve("abc") { + if result.Sieve != nil && result.Sieve("abc") { t.Error("Invalid sieve") } } @@ -136,7 +136,7 @@ func TestIdentifier(t *testing.T) { t.Errorf("Invalid string: %#v", v) } } - if result.Sieve == nil || result.Sieve("0ab") || result.Sieve("ab\n") { + if result.Sieve != nil && (result.Sieve("0ab") || result.Sieve("ab\n")) { t.Error("Invalid sieve") } }
Sometimes Sieve is legitimately nil.
diff --git a/expynent/patterns.py b/expynent/patterns.py index <HASH>..<HASH> 100644 --- a/expynent/patterns.py +++ b/expynent/patterns.py @@ -288,3 +288,9 @@ PHONE_NUMBER = { # RegEx pattern to match Taiwan phone numbers 'TW': r'^(?:\+886|0)((?:9\d{8})|(?:[2-8]\d{7,8}))$' } + +# List of RegEx patterns for license plates +LICENSE_PLATE = { + # Regex pattern to match Taiwanese license plates + 'TW': r'(^[A-Z0-9]{2,3}-\d{2,4}$)|(^\d{2,3}-[A-Z0-9]{2,3}$)|(^\d{4}-[A-Z0-9]{2}$)|' + } \ No newline at end of file
added Taiwanese license plate regex
diff --git a/lib/discordrb/bot.rb b/lib/discordrb/bot.rb index <HASH>..<HASH> 100644 --- a/lib/discordrb/bot.rb +++ b/lib/discordrb/bot.rb @@ -660,22 +660,6 @@ module Discordrb token_cache.store_token(email, password, token) token - rescue Exception => e - response_code = login_response.nil? ? 0 : login_response.code ######## mackmm145 - if login_attempts < 100 && (e.inspect.include?('No such host is known.') || response_code == 523) - debug("Login failed! Reattempting in 5 seconds. #{100 - login_attempts} attempts remaining.") - debug("Error was: #{e.inspect}") - sleep 5 - login_attempts += 1 - retry - else - debug("Login failed permanently after #{login_attempts + 1} attempts") - - # Apparently we get a 400 if the password or username is incorrect. In that case, tell the user - debug("Are you sure you're using the correct username and password?") if e.class == RestClient::BadRequest - log_exception(e) - raise $ERROR_INFO - end end def retrieve_token(email, password, token_cache)
Completely redo the user_login error handling
diff --git a/browser/environment/helpers.js b/browser/environment/helpers.js index <HASH>..<HASH> 100644 --- a/browser/environment/helpers.js +++ b/browser/environment/helpers.js @@ -121,6 +121,16 @@ window.flush = function flush(callback) { }; /** + * Advances a single animation frame. + * @param {function()} callback + */ +window.animationFrameFlush = function animationFrameFlush(callback) { + requestAnimationFrame(function() { + flush(callback); + }); +} + +/** * DEPRECATED: Use `flush`. * @param {function} callback */
Add animationFrameFlush.
diff --git a/pushy/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java b/pushy/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java index <HASH>..<HASH> 100644 --- a/pushy/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java +++ b/pushy/src/test/java/com/relayrides/pushy/apns/ApnsClientTest.java @@ -240,6 +240,7 @@ public class ApnsClientTest { this.tokenAuthenticationClient = new ApnsClientBuilder() .setTrustedServerCertificateChain(CA_CERTIFICATE) .setEventLoopGroup(EVENT_LOOP_GROUP) + .setSslProvider(this.preferredSslProvider) .build(); this.tlsAuthenticationClient.connect(HOST, PORT).await();
Fixed a goof where the token-auth client wasn't paying attention to the preferred SSL provider.
diff --git a/aiogram/types/base.py b/aiogram/types/base.py index <HASH>..<HASH> 100644 --- a/aiogram/types/base.py +++ b/aiogram/types/base.py @@ -211,6 +211,15 @@ class TelegramObject(ContextInstanceMixin, metaclass=MetaTelegramObject): """ return self.as_json() + def __repr__(self) -> str: + """ + Return object readable representation. + + Example: <ObjectName {"id": 123456}> + :return: object class name and object data as a string + """ + return f"<{type(self).__name__} {self}>" + def __getitem__(self, item: typing.Union[str, int]) -> typing.Any: """ Item getter (by key)
feat: TelegramObject readable representation (#<I>)
diff --git a/unit/manager.go b/unit/manager.go index <HASH>..<HASH> 100644 --- a/unit/manager.go +++ b/unit/manager.go @@ -161,6 +161,10 @@ func (m *SystemdManager) removeUnit(name string) { log.Printf("Unlinking systemd unit %s from target %s", name, m.Target.Name()) link := m.getLocalPath(path.Join(m.Target.Name()+".wants", name)) syscall.Unlink(link) + + file := m.getLocalPath(name) + log.Printf("Removing systemd unit file %s", file) + syscall.Unlink(file) } func (m *SystemdManager) readUnit(name string) (string, error) {
fix(agent): Unlink unit files when jobs are stopped
diff --git a/lib/launchy/applications/browser.rb b/lib/launchy/applications/browser.rb index <HASH>..<HASH> 100644 --- a/lib/launchy/applications/browser.rb +++ b/lib/launchy/applications/browser.rb @@ -20,7 +20,7 @@ class Launchy::Application end def nix_app_list - nix_de = Launchy::Detect::NixDekstopEnvironment.browser + nix_de = Launchy::Detect::NixDesktopEnvironment.detect app_list = %w[ xdg-open ] app_list << nix_de.browser app_list << nix_de.fallback_browsers diff --git a/lib/launchy/detect/runner.rb b/lib/launchy/detect/runner.rb index <HASH>..<HASH> 100644 --- a/lib/launchy/detect/runner.rb +++ b/lib/launchy/detect/runner.rb @@ -29,7 +29,7 @@ module Launchy::Detect def shell_commands( cmd, args ) cmdline = [ cmd.shellsplit ] cmdline << args.collect{ |a| a.to_s.shellescape } - return commanddline_normalize( cmdline ) + return commandline_normalize( cmdline ) end def commandline_normalize( cmdline )
Fix browser opening in *nix; fixes gh-<I>, gh-<I>
diff --git a/spring-jdbc-oracle/src/main/java/com/github/ferstl/spring/jdbc/oracle/SqlOracleArrayValue.java b/spring-jdbc-oracle/src/main/java/com/github/ferstl/spring/jdbc/oracle/SqlOracleArrayValue.java index <HASH>..<HASH> 100644 --- a/spring-jdbc-oracle/src/main/java/com/github/ferstl/spring/jdbc/oracle/SqlOracleArrayValue.java +++ b/spring-jdbc-oracle/src/main/java/com/github/ferstl/spring/jdbc/oracle/SqlOracleArrayValue.java @@ -19,6 +19,8 @@ import java.sql.Array; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; +import java.util.Arrays; + import org.springframework.dao.CleanupFailureDataAccessException; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.jdbc.core.JdbcTemplate; @@ -111,4 +113,12 @@ public final class SqlOracleArrayValue implements NamedSqlValue { } } + /** + * {@inheritDoc} + */ + @Override + public String toString() { + return Arrays.toString(this.values); + } + }
Implement SqlOracleArrayValue#toString Implement SqlOracleArrayValue#toString for debug purposes. It is extremely helpful to have a good #toString while debugging in the IDE.
diff --git a/lib/Sabre/HTTP/Request.php b/lib/Sabre/HTTP/Request.php index <HASH>..<HASH> 100644 --- a/lib/Sabre/HTTP/Request.php +++ b/lib/Sabre/HTTP/Request.php @@ -35,9 +35,8 @@ class Sabre_HTTP_Request { if (is_null($this->body)) { $this->body = file_get_contents('php://input'); - } else { - return $this->body; - } + } + return $this->body; }
Path by Andreas Gohr, Thanks!
diff --git a/src/BoomCMS/Chunk/Linkset.php b/src/BoomCMS/Chunk/Linkset.php index <HASH>..<HASH> 100644 --- a/src/BoomCMS/Chunk/Linkset.php +++ b/src/BoomCMS/Chunk/Linkset.php @@ -25,10 +25,7 @@ class Linkset extends BaseChunk 'limit' => 0, ]; - /** - * @return array - */ - public function attributes() + public function attributes(): array { foreach ($this->options as $key => $value) { $attrs[$this->attributePrefix.$key] = (int) $value; @@ -77,17 +74,15 @@ class Linkset extends BaseChunk /** * Returns true if the linkset contains any links. - * - * @return bool */ - public function hasContent() + public function hasContent(): bool { return count($this->getLinks()) > 0; } - public function getTitle() + public function getTitle(): string { - return isset($this->attrs['title']) ? $this->attrs['title'] : ''; + return $this->attrs['title'] ?? ''; } /**
Chunk linkset: Added PHP7 features to Linkset class
diff --git a/packages/core/src/index.js b/packages/core/src/index.js index <HASH>..<HASH> 100644 --- a/packages/core/src/index.js +++ b/packages/core/src/index.js @@ -9,7 +9,7 @@ import { import Component from './component' import Render, { Router } from './render' -import { State, Effect, Hook, Actions, StateDefaults } from 'jumpstate' +import { State, Effect, Hook, Actions, StateDefaults, getState, dispatch } from 'jumpstate' import Goto from './routing' import { Middleware } from './reducer' @@ -34,6 +34,8 @@ module.exports = { /* Redux */ Middleware, + getState, + dispatch, /* Other */ StateDefaults
Expose getState and dispatch from jumpstate
diff --git a/lib/odata/query/criteria.rb b/lib/odata/query/criteria.rb index <HASH>..<HASH> 100644 --- a/lib/odata/query/criteria.rb +++ b/lib/odata/query/criteria.rb @@ -1,21 +1,32 @@ module OData class Query + # Represents a discreet detail about an OData query. It also validates + # the criteria based on what the gem knows how to support. class Criteria + # Defines the options required to create a new OData::Query::Criteria. REQUIRED_OPTIONS = [:operation, :argument] + + # Defines the operations the OData gem knows how to support. SUPPORTED_OPERATIONS = [ :filter, :order_by, :skip, :top, :select, :expand, :inlinecount ] + # Creates a new OData::Query::Criteria with the supplied options. + # @param options [Hash] def initialize(options = {}) @options = process_options(options) validate_required_options validate_supported_operation end + # The query operation of a particular criteria. + # @return [Symbol] def operation options[:operation] end + # The query argument of a particular criteria. + # @return [String] def argument options[:argument] end
Updated YARD documentation. [ci skip]
diff --git a/src/Cascader/Dropdown.js b/src/Cascader/Dropdown.js index <HASH>..<HASH> 100644 --- a/src/Cascader/Dropdown.js +++ b/src/Cascader/Dropdown.js @@ -296,16 +296,17 @@ class Dropdown extends React.Component<Props, State> { handleSearchRowSelect = (item: Object, event: DefaultEvent) => { const { valueKey, onChange, onSelect } = this.props; const value = item[valueKey]; + const nextState = getDerivedStateForCascade(this.props, this.state, value); this.closeDropdown(); this.setState({ - ...getDerivedStateForCascade(this.props, this.state, value), + ...nextState, selectNode: item, searchKeyword: '', value }); - onSelect && onSelect(item, null, null, event); + onSelect && onSelect(item, nextState.activePaths, null, event); onChange && onChange(value, event); };
Fixed an issue with Cascader select search result. (#<I>)
diff --git a/pymatgen/io/vasp/sets.py b/pymatgen/io/vasp/sets.py index <HASH>..<HASH> 100644 --- a/pymatgen/io/vasp/sets.py +++ b/pymatgen/io/vasp/sets.py @@ -370,7 +370,7 @@ class DictSet(VaspInputSet): for mag in incar['MAGMOM']]) incar['NUPDOWN'] = nupdown - if self.use_structure_charge and self.structure.charge: + if self.use_structure_charge: incar["NELECT"] = self.nelect return incar @@ -392,7 +392,11 @@ class DictSet(VaspInputSet): if ps.element in site_symbols: site_symbols.remove(ps.element) nelect += self.structure.composition.element_composition[ps.element] * ps.ZVAL - return int(round(nelect)) - self.structure.charge + + if self.use_structure_charge: + return int(round(nelect)) - self.structure.charge + else: + return int(round(nelect)) @property def kpoints(self):
Fix setting NELECT only when told to
diff --git a/uncompyle6/parsers/reducecheck/or_check.py b/uncompyle6/parsers/reducecheck/or_check.py index <HASH>..<HASH> 100644 --- a/uncompyle6/parsers/reducecheck/or_check.py +++ b/uncompyle6/parsers/reducecheck/or_check.py @@ -38,6 +38,7 @@ def or_check(self, lhs, n, rule, ast, tokens, first, last): last_token = tokens[last] last_token_offset = last_token.off2int() + # FIXME: use instructions for all of this if jmp_true_target < first_offset: return False elif jmp_true_target < last_token_offset: @@ -49,8 +50,10 @@ def or_check(self, lhs, n, rule, ast, tokens, first, last): # For a backwards loop, well compare to the instruction *after* # then POP_JUMP... last_token = tokens[last + 1] + # HACK alert 3 is the Python < 3.6ish thing. + # Convert to using instructions return not ( - (last_token_offset <= jmp_true_target <= last_token_offset + 2) + (last_token_offset <= jmp_true_target <= last_token_offset + 3) or jmp_true_target < tokens[first].off2int() ) elif last_token == "JUMP_FORWARD" and expr_jt.kind != "expr_jitop":
git commit -m'Adjust "or" offset check ... for Python < <I> hopefully it doesn't break Python <I>+
diff --git a/libs/render.js b/libs/render.js index <HASH>..<HASH> 100644 --- a/libs/render.js +++ b/libs/render.js @@ -7,6 +7,10 @@ var isEngine = function(engine) { return typeof(engine) === 'function'; } +var isRemote = function(dir) { + return dir && (dir.indexOf('http') === 0 || dir.indexOf('https') === 0); +} + // 根据给定的主题名称或者文件名称渲染邮件 // 不指定引擎渲染的话会自动寻找支持的模板引擎 // e.g: exports.render('mails-flat/message', {...}, callback); @@ -22,7 +26,10 @@ module.exports = function(template, data, callback, e) { var engine = {}; var dest = file.exist ? file.dir : file.availables[0]; engine.name = theme['view engine']; + // 主题相关信息 data.Theme = theme; + // 渲染页面时要进行 #{static} 变量的替换,这里就是替换成相应主题在 public 下的目录 + data.static = isRemote(theme.static) ? theme.static : '/' + theme.name; try { engine._engine = isEngine(e) ? e : require(theme['view engine']); } catch (err) {
<I>: move static locals in
diff --git a/pandas/tests/dtypes/test_common.py b/pandas/tests/dtypes/test_common.py index <HASH>..<HASH> 100644 --- a/pandas/tests/dtypes/test_common.py +++ b/pandas/tests/dtypes/test_common.py @@ -20,6 +20,7 @@ from pandas.core.dtypes.missing import isna import pandas as pd import pandas._testing as tm +from pandas.api.types import pandas_dtype from pandas.arrays import SparseArray @@ -400,6 +401,23 @@ def test_is_int64_dtype(dtype): assert com.is_int64_dtype(dtype) +def test_type_comparison_with_numeric_ea_dtype(any_numeric_ea_dtype): + # GH#43038 + assert pandas_dtype(any_numeric_ea_dtype) == any_numeric_ea_dtype + + +def test_type_comparison_with_real_numpy_dtype(any_real_numpy_dtype): + # GH#43038 + assert pandas_dtype(any_real_numpy_dtype) == any_real_numpy_dtype + + +def test_type_comparison_with_signed_int_ea_dtype_and_signed_int_numpy_dtype( + any_signed_int_ea_dtype, any_signed_int_numpy_dtype +): + # GH#43038 + assert not pandas_dtype(any_signed_int_ea_dtype) == any_signed_int_numpy_dtype + + @pytest.mark.parametrize( "dtype", [
tests added for dtype comparison with fixture (#<I>)
diff --git a/git_ready.py b/git_ready.py index <HASH>..<HASH> 100644 --- a/git_ready.py +++ b/git_ready.py @@ -48,7 +48,8 @@ def main(branch): # Ensure that we're in a git repository. This command is silent unless # you're not actually in a git repository, in which case, you receive a # "Not a git repository" error message. - sys.stdout.write(subprocess.check_output(['git', 'rev-parse'])) + output = subprocess.check_output(['git', 'rev-parse']).decode('utf-8') + sys.stdout.write(output) except subprocess.CalledProcessError: # Bail if we're not in a git repository. return
Decode bytes from stdout
diff --git a/app/models/chouette/route.rb b/app/models/chouette/route.rb index <HASH>..<HASH> 100644 --- a/app/models/chouette/route.rb +++ b/app/models/chouette/route.rb @@ -16,7 +16,7 @@ class Chouette::Route < Chouette::TridentActiveRecord def find_by_stop_area(stop_area) stop_area_ids = Integer === stop_area ? [stop_area] : (stop_area.children_in_depth + [stop_area]).map(&:id) where( :stop_area_id => stop_area_ids).first or - raise ActiveRecord::RecordNotFound.new("Can't find a StopArea #{stop_area.inspect} in Route #{owner.id.inspect}'s StopPoints") + raise ActiveRecord::RecordNotFound.new("Can't find a StopArea #{stop_area.inspect} in Route #{proxy_owner.id.inspect}'s StopPoints") end def between(departure, arrival) @@ -40,7 +40,7 @@ class Chouette::Route < Chouette::TridentActiveRecord departure, arrival = [departure, arrival].collect do |endpoint| String === endpoint ? Chouette::StopArea.find_by_objectid(endpoint) : endpoint end - proxy_owner.stop_points.between(departure, arrival).includes(:stop_areas).collect(&:stop_area) + proxy_owner.stop_points.between(departure, arrival).includes(:stop_area).collect(&:stop_area) end end
Use stop_area relation in place of stop_areas for stop_points
diff --git a/djcelery/management/commands/celeryctl.py b/djcelery/management/commands/celeryctl.py index <HASH>..<HASH> 100644 --- a/djcelery/management/commands/celeryctl.py +++ b/djcelery/management/commands/celeryctl.py @@ -22,4 +22,7 @@ class Command(CeleryCommand): def run_from_argv(self, argv): util = celeryctl(app=app) - util.execute_from_commandline(argv[1:]) + + util.execute_from_commandline(self.handle_default_options(argv)[1:]) + +
Fixing #<I> (--settings not supported with celeryctl). Code was already there. One just has to call it
diff --git a/src/ossos/core/ossos/wcs.py b/src/ossos/core/ossos/wcs.py index <HASH>..<HASH> 100644 --- a/src/ossos/core/ossos/wcs.py +++ b/src/ossos/core/ossos/wcs.py @@ -106,6 +106,10 @@ class WCS(astropy_wcs.WCS): return pos[0] * units.degree, pos[1] * units.degree def sky2xy(self, ra, dec, usepv=True): + if isinstance(ra, Quantity): + ra = ra.to(units.degree).value + if isinstance(dec, Quantity): + dec = dec.to(units.degree).value try: if usepv: return sky2xypv(ra=ra, @@ -120,10 +124,7 @@ class WCS(astropy_wcs.WCS): except Exception as ex: logger.warning("sky2xy raised exception: {0}".format(ex)) logger.warning("Reverted to CD-Matrix WCS to convert: {0} {1} ".format(ra, dec)) - if isinstance(ra, Quantity): - ra = ra.to(units.degree).value - if isinstance(dec, Quantity): - dec = dec.to(units.degree).value + print ex pos = self.wcs_world2pix([[ra, dec], ], 1) return pos[0][0], pos[0][1]
the sky2xy function expects to be using values not quanities.. so set them to values here.
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -17,7 +17,6 @@ require 'alchemy/seeder' require 'alchemy/test_support/auth_helpers' require 'alchemy/test_support/controller_requests' require 'alchemy/test_support/integration_helpers' -require 'alchemy/test_support/essence_shared_examples' require 'alchemy/test_support/factories' require 'capybara/poltergeist' require 'capybara/rails' @@ -29,6 +28,8 @@ require 'rspec/rails' require_relative "support/hint_examples.rb" require_relative "support/transformation_examples.rb" +require 'alchemy/test_support/essence_shared_examples' + ActionMailer::Base.delivery_method = :test ActionMailer::Base.perform_deliveries = true ActionMailer::Base.default_url_options[:host] = "test.com"
Move require `essence_shared_examples` after rspec in spec_helper. .. as `shared_examples_for` is an rspec method, which needs to be defined beforehand.
diff --git a/lib/neo4j/active_node/labels/reloading.rb b/lib/neo4j/active_node/labels/reloading.rb index <HASH>..<HASH> 100644 --- a/lib/neo4j/active_node/labels/reloading.rb +++ b/lib/neo4j/active_node/labels/reloading.rb @@ -14,6 +14,7 @@ module Neo4j::ActiveNode::Labels associations.each_value(&:queue_model_refresh!) MODELS_FOR_LABELS_CACHE.clear WRAPPED_CLASSES.each { |c| MODELS_TO_RELOAD << c.name } + WRAPPED_CLASSES.clear end end end
Put this back, because of course it broke things
diff --git a/abodepy/devices/light.py b/abodepy/devices/light.py index <HASH>..<HASH> 100644 --- a/abodepy/devices/light.py +++ b/abodepy/devices/light.py @@ -1,6 +1,7 @@ """Abode light device.""" import json import logging +import math from abodepy.exceptions import AbodeException @@ -74,8 +75,11 @@ class AbodeLight(AbodeSwitch): if response_object['idForPanel'] != self.device_id: raise AbodeException((ERROR.SET_STATUS_DEV_ID)) - if (response_object['hue'] != int(hue) or - response_object['saturation'] != int(saturation)): + # Abode will sometimes return hue value off by 1 (rounding error) + hue_comparison = math.isclose(response_object["hue"], + int(hue), abs_tol=1) + if not hue_comparison or (response_object["saturation"] + != int(saturation)): _LOGGER.warning( ("Set color mismatch for device %s. " "Request val: %s, Response val: %s "),
Fix for hue logging issue (#<I>) * Fix for hue logging issue * Fix for saturation comparison * Yet another fix for comparison confusion
diff --git a/termdown.py b/termdown.py index <HASH>..<HASH> 100755 --- a/termdown.py +++ b/termdown.py @@ -9,7 +9,9 @@ import click from dateutil.parser import parse from pyfiglet import Figlet -TIMEDELTA_REGEX = re.compile(r'((?P<hours>\d+)h ?)?' +TIMEDELTA_REGEX = re.compile(r'((?P<years>\d+)y ?)?' + r'((?P<days>\d+)d ?)?' + r'((?P<hours>\d+)h ?)?' r'((?P<minutes>\d+)m ?)?' r'((?P<seconds>\d+)s ?)?') @@ -65,6 +67,11 @@ def parse_timedelta(deltastr): for name, value in matches.groupdict().items(): if value: components[name] = int(value) + for period, hours in (('days', 24), ('years', 8766)): + if period in components: + components['hours'] = components.get('hours', 0) + \ + components[period] * hours + del components[period] return int(timedelta(**components).total_seconds())
allow days and years in timedelta STARTs
diff --git a/synapse/cores/ram.py b/synapse/cores/ram.py index <HASH>..<HASH> 100644 --- a/synapse/cores/ram.py +++ b/synapse/cores/ram.py @@ -19,11 +19,11 @@ class Cortex(common.Cortex): def _sizeByRange(self, prop, valu, limit=None): # HACK: for speed - data = dict(size=0) + data = [0] def inc(): - data['size'] += 1 + data[0] += 1 [ inc() for r in self.rowsbyprop.get(prop,()) if r[2] >= valu[0] and r[2] < valu[1] ] - return data['size'] + return data[0] def _rowsByRange(self, prop, valu, limit=None): # HACK: for speed
optimized sizeByRange for ram cortex
diff --git a/session.go b/session.go index <HASH>..<HASH> 100644 --- a/session.go +++ b/session.go @@ -227,18 +227,26 @@ func (s *Session) init() error { } hosts = hosts[:0] + + var wg sync.WaitGroup for _, host := range hostMap { - host = s.ring.addOrUpdate(host) + host := s.ring.addOrUpdate(host) if s.cfg.filterHost(host) { continue } host.setState(NodeUp) - s.pool.addHost(host) - hosts = append(hosts, host) + + wg.Add(1) + go func() { + defer wg.Done() + s.pool.addHost(host) + }() } + wg.Wait() + type bulkAddHosts interface { AddHosts([]*HostInfo) }
Init host pools in parallel. (#<I>) Creating a session can take a long time if your cluster is big and there's latency between you and the cluster. We are seeing it take ~<I>s to create a session to a <I> node cluster with ~<I>ms of latency. This is somewhat related to #<I> and #<I>, but this change tries to do the simplest thing to speed up initial connection without changing any behavior.
diff --git a/Application.php b/Application.php index <HASH>..<HASH> 100644 --- a/Application.php +++ b/Application.php @@ -169,6 +169,11 @@ class Application extends HttpKernel */ private function throwUncaughtInfrastructureException(Exception $exception) { + (new ApplicationLogService())->getLogger()->critical( + 'Error: ' . $exception->getMessage(), + $exception->getTrace() + ); + if (getenv('ENV') == self::ENV_PROD) { throw new HttpException(Response::HTTP_INTERNAL_SERVER_ERROR, 'Internal server error'); }
[update] add the logging for uncaught infrastructure exceptions
diff --git a/app/controllers/marty/diagnostic/controller.rb b/app/controllers/marty/diagnostic/controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/marty/diagnostic/controller.rb +++ b/app/controllers/marty/diagnostic/controller.rb @@ -5,7 +5,7 @@ module Marty def self.inherited(klass) namespace = klass.name.deconstantize.split('::')[0] rescue '' - Marty::Diagnostic::Reporter.namespaces.unshift(namespace) + Reporter.namespaces.unshift(namespace) super end
revert changes to diag controller
diff --git a/lib/Controller/Data.php b/lib/Controller/Data.php index <HASH>..<HASH> 100644 --- a/lib/Controller/Data.php +++ b/lib/Controller/Data.php @@ -87,10 +87,10 @@ abstract class Controller_Data extends AbstractController { // abstract function loadByConditions($model); // abstract function deleteAll($model); - /** Create a new cursor and load model with the first entry */ + /** Create a new cursor and load model with the first entry. Returns cursor */ abstract function prefetchAll($model); - /** Provided that rewind was called before, load next data entry */ - abstract function loadCurrent($model); + /** Load next data row from cursor */ + abstract function loadCurrent($model,&$cursor); } diff --git a/lib/Model.php b/lib/Model.php index <HASH>..<HASH> 100644 --- a/lib/Model.php +++ b/lib/Model.php @@ -766,14 +766,15 @@ class Model extends AbstractModel implements ArrayAccess,Iterator,Countable { // }}} // {{{ Iterator support + public $_cursor=null; function rewind() { $this->unload(); - $this->controller->prefetchAll($this); + $this->_cursor = $this->controller->prefetchAll($this); $this->next(); } function next() { $this->hook('beforeLoad', array('iterating')); - $this->controller->loadCurrent($this); + $this->controller->loadCurrent($this,$this->_cursor); if($this->loaded()) { $this->hook('afterLoad'); }
Store "cursor" inside model instead of controller, when iterating through adta.
diff --git a/support/cas-server-support-aws-cognito-authentication/src/main/java/org/apereo/cas/authentication/AmazonCognitoAuthenticationAuthenticationHandler.java b/support/cas-server-support-aws-cognito-authentication/src/main/java/org/apereo/cas/authentication/AmazonCognitoAuthenticationAuthenticationHandler.java index <HASH>..<HASH> 100644 --- a/support/cas-server-support-aws-cognito-authentication/src/main/java/org/apereo/cas/authentication/AmazonCognitoAuthenticationAuthenticationHandler.java +++ b/support/cas-server-support-aws-cognito-authentication/src/main/java/org/apereo/cas/authentication/AmazonCognitoAuthenticationAuthenticationHandler.java @@ -82,8 +82,8 @@ public class AmazonCognitoAuthenticationAuthenticationHandler extends AbstractUs } val userResult = cognitoIdentityProvider.adminGetUser(AdminGetUserRequest.builder() - .userPoolId(credential.getUsername()) - .userPoolId(properties.getUserPoolId()).build()); + .userPoolId(properties.getUserPoolId()) + .username(credential.getUsername()).build()); val attributes = new LinkedHashMap<String, List<Object>>(); attributes.put("userStatus", CollectionUtils.wrap(userResult.userStatusAsString()));
Amazon Cognito Authentication: fix adminGetUser (#<I>) (cherry picked from commit <I>f<I>aaaff<I>f<I>a2f<I>f<I>bd<I>fc0b)
diff --git a/src/processors/DropProcessor.php b/src/processors/DropProcessor.php index <HASH>..<HASH> 100644 --- a/src/processors/DropProcessor.php +++ b/src/processors/DropProcessor.php @@ -113,13 +113,13 @@ class DropProcessor extends AbstractProcessor { $object['expr_type'] = $objectType; if ($objectType === ExpressionType::TABLE || $objectType === ExpressionType::TEMPORARY_TABLE) { $object['table'] = $trim; + $object['no_quotes'] = false; $object['alias'] = false; - } else { - $object['base_expr'] = $trim; } - $object['no_quote'] = $this->revokeQuotation($trim); + $object['base_expr'] = $trim; + $object['no_quotes'] = $this->revokeQuotation($trim); $object['delim'] = false; - + $objectList[] = $object; continue 2; }
CHG: the property no_quote has been renamed into no_quotes. CHG: the property order within the table part has been changed. git-svn-id: <URL>
diff --git a/webwhatsapi/__init__.py b/webwhatsapi/__init__.py index <HASH>..<HASH> 100644 --- a/webwhatsapi/__init__.py +++ b/webwhatsapi/__init__.py @@ -23,7 +23,7 @@ class WhatsAPIDriver(object): _SELECTORS = { 'firstrun': "#wrapper", - 'qrCode': ".qrcode > img:nth-child(4)", + 'qrCode': "img[alt=\"Scan me !\"]", 'mainPage': ".app.two", 'chatList': ".infinite-list-viewport", 'messageList': "#main > div > div:nth-child(1) > div > div.message-list",
Fix qrCode css selector QRCode css selector changed to search for its alt text rather than searching almost randomly.
diff --git a/test/test.py b/test/test.py index <HASH>..<HASH> 100755 --- a/test/test.py +++ b/test/test.py @@ -36,10 +36,9 @@ from pandocxnos import insert_secnos_factory from pandocxnos import repair_refs, process_refs_factory, replace_refs_factory PANDOCVERSION = '2.8.1' -PANDOC = 'pandoc-2.9.2.1' +PANDOC = 'pandoc-2.10.1' PANDOC1p15 = 'pandoc-1.15.2' # pylint: disable=invalid-name - -PANDOC_API_VERSION = '1,20' +PANDOC_API_VERSION = '1,21' #-----------------------------------------------------------------------------
Updated for pandoc-<I>.
diff --git a/GPy/kern/constructors.py b/GPy/kern/constructors.py index <HASH>..<HASH> 100644 --- a/GPy/kern/constructors.py +++ b/GPy/kern/constructors.py @@ -5,6 +5,23 @@ import numpy as np from kern import kern import parts + +def rbf_inv(input_dim,variance=1., inv_lengthscale=None,ARD=False): + """ + Construct an RBF kernel + + :param input_dim: dimensionality of the kernel, obligatory + :type input_dim: int + :param variance: the variance of the kernel + :type variance: float + :param lengthscale: the lengthscale of the kernel + :type lengthscale: float + :param ARD: Auto Relevance Determination (one lengthscale per dimension) + :type ARD: Boolean + """ + part = parts.rbf_inv.RBFInv(input_dim,variance,inv_lengthscale,ARD) + return kern(input_dim, [part]) + def rbf(input_dim,variance=1., lengthscale=None,ARD=False): """ Construct an RBF kernel diff --git a/GPy/kern/parts/__init__.py b/GPy/kern/parts/__init__.py index <HASH>..<HASH> 100644 --- a/GPy/kern/parts/__init__.py +++ b/GPy/kern/parts/__init__.py @@ -20,3 +20,4 @@ import spline import symmetric import white import hierarchical +import rbf_inv
Added rbf_inv.py kernel which is parametrised with the variances
diff --git a/color.js b/color.js index <HASH>..<HASH> 100644 --- a/color.js +++ b/color.js @@ -301,9 +301,11 @@ if (!net.brehaut) { net.brehaut = {}; } }, function ( css ) { - if (css in css_colors) { - css = css_colors[css.toLowerCase()]; + var lower = css.toLowerCase(); + if (lower in css_colors) { + css = css_colors[lower]; } + css = css.replace(/^#/,''); if (css.length === 0 ||
CSS color names are not case-sensitive any more
diff --git a/src/Textfield.js b/src/Textfield.js index <HASH>..<HASH> 100644 --- a/src/Textfield.js +++ b/src/Textfield.js @@ -64,7 +64,7 @@ class Textfield extends React.Component { const { className, inputClassName, id, error, expandable, expandableIcon, floatingLabel, label, maxRows, - rows, style, ...otherProps } = this.props; + rows, style, children, ...otherProps } = this.props; const hasRows = !!rows; const customId = id || `textfield-${label.replace(/[^a-z0-9]/gi, '')}`; @@ -97,12 +97,14 @@ class Textfield extends React.Component { {labelContainer} {errorContainer} </div> + {children} </div> ) : ( <div className={containerClasses} style={style}> {input} {labelContainer} {errorContainer} + {children} </div> ); }
feat(Textfield): Render `children` as last element to ease customization of the Textfield by client code. (#<I>) SEE Make Textfield render children #<I>
diff --git a/alot/account.py b/alot/account.py index <HASH>..<HASH> 100644 --- a/alot/account.py +++ b/alot/account.py @@ -161,7 +161,8 @@ class Account(object): :param mail: the mail to send :type mail: :class:`email.message.Message` or string - :raises: :class:`alot.account.SendingMailFailed` if an error occured + :returns: a `Deferred` that errs back with a class:`SendingMailFailed`, + containing a reason string if an error occured. """ return 'not implemented'
adjust docstring for Account.send_mail mention the returned Deferred
diff --git a/src/Concerns/MakesHttpRequests.php b/src/Concerns/MakesHttpRequests.php index <HASH>..<HASH> 100644 --- a/src/Concerns/MakesHttpRequests.php +++ b/src/Concerns/MakesHttpRequests.php @@ -376,7 +376,7 @@ trait MakesHttpRequests } if (! Str::startsWith($uri, 'http')) { - $uri = config('app.url').'/'.$uri; + $uri = $this->baseUrl.'/'.$uri; } return trim($uri, '/');
use configured baseUrl instead of app url
diff --git a/lib/crash_log/configuration.rb b/lib/crash_log/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/crash_log/configuration.rb +++ b/lib/crash_log/configuration.rb @@ -1,5 +1,6 @@ require 'hashr' require 'multi_json' +require 'logger' module CrashLog class Configuration < Hashr
Ensure we've loaded Logger in config
diff --git a/rapidoid-integration-tests/src/test/java/org/rapidoid/http/HttpLoginTest.java b/rapidoid-integration-tests/src/test/java/org/rapidoid/http/HttpLoginTest.java index <HASH>..<HASH> 100644 --- a/rapidoid-integration-tests/src/test/java/org/rapidoid/http/HttpLoginTest.java +++ b/rapidoid-integration-tests/src/test/java/org/rapidoid/http/HttpLoginTest.java @@ -26,6 +26,8 @@ import org.rapidoid.annotation.Since; import org.rapidoid.commons.Err; import org.rapidoid.commons.Rnd; import org.rapidoid.ctx.Contextual; +import org.rapidoid.log.Log; +import org.rapidoid.log.LogLevel; import org.rapidoid.security.Role; import org.rapidoid.setup.On; import org.rapidoid.u.U; @@ -37,10 +39,12 @@ import java.util.List; @Since("5.1.0") public class HttpLoginTest extends IsolatedIntegrationTest { - volatile boolean ready = false; + private volatile boolean ready = false; @Test public void testLogin() { + Log.setLogLevel(LogLevel.ERROR); + On.get("/user").json(() -> U.list(Contextual.username(), Contextual.roles())); On.get("/profile").roles(Role.LOGGED_IN).json(Contextual::username);
Hide the warnings for the failed HTTP logins in HttpLoginTest.
diff --git a/src/measurement/position_measurement.js b/src/measurement/position_measurement.js index <HASH>..<HASH> 100644 --- a/src/measurement/position_measurement.js +++ b/src/measurement/position_measurement.js @@ -427,7 +427,7 @@ export function coordsChar(cm, x, y) { function wrappedLineExtent(cm, lineObj, preparedMeasure, y) { let measure = ch => intoCoordSystem(cm, lineObj, measureCharPrepared(cm, preparedMeasure, ch), "line") let end = lineObj.text.length - let begin = findFirst(ch => measure(ch).bottom <= y, end - 1, 0) + 1 + let begin = findFirst(ch => measure(ch - 1).bottom <= y, end, 0) end = findFirst(ch => measure(ch).top > y, begin, end) return {begin, end} } @@ -450,7 +450,7 @@ function coordsCharInner(cm, lineObj, lineNo, x, y) { prevDiff = diff let prevPos = pos pos = moveVisually(cm, lineObj, pos, dir) - if (pos == null || pos.ch < begin || end <= pos.ch) { + if (pos == null || pos.ch < begin || end <= (pos.sticky == "before" ? pos.ch - 1 : pos.ch)) { pos = prevPos break }
Fix coordsChar at end and beginning of wrapped bidi lines
diff --git a/visidata/vdtui/__init__.py b/visidata/vdtui/__init__.py index <HASH>..<HASH> 100755 --- a/visidata/vdtui/__init__.py +++ b/visidata/vdtui/__init__.py @@ -435,7 +435,7 @@ class Extensible: def global_api(cls, func): setattr(cls, func.__name__, func) def _vdfunc(*args, **kwargs): - return func(vd, *args, **kwargs) + return getattr(vd, func.__name__)(*args, **kwargs) # getattr in case of replacement return _vdfunc @classmethod
[api] global_api global func calls VisiData peer by name, to allow for patching
diff --git a/ui/src/dashboards/constants/index.js b/ui/src/dashboards/constants/index.js index <HASH>..<HASH> 100644 --- a/ui/src/dashboards/constants/index.js +++ b/ui/src/dashboards/constants/index.js @@ -26,23 +26,23 @@ export const NEW_DEFAULT_DASHBOARD_CELL = { } const getMostCommonValue = values => { - const distribution = {} - let max = 0 - let result = 0 - - values.forEach(value => { - distribution[value] = (distribution[value] || 0) + 1 - if (distribution[value] > max) { - max = distribution[value] - result = [value] - return - } - if (distribution[value] === max) { - result.push(value) - } - }) + const results = values.reduce( + (acc, value) => { + const {distribution, mostCommonCount} = acc + distribution[value] = (distribution[value] || 0) + 1 + if (distribution[value] > mostCommonCount) { + return { + distribution, + mostCommonCount: distribution[value], + mostCommonValue: value, + } + } + return acc + }, + {distribution: {}, mostCommonCount: 0} + ) - return result[0] + return results.mostCommonValue } export const generateNewDashboardCell = dashboard => {
Use Iris' improved approach for determine most common cell sizes
diff --git a/py3status/modules/do_not_disturb.py b/py3status/modules/do_not_disturb.py index <HASH>..<HASH> 100644 --- a/py3status/modules/do_not_disturb.py +++ b/py3status/modules/do_not_disturb.py @@ -87,7 +87,7 @@ class Py3status: self.is_on = not self.is_on if self._is_dunst(): new_flag = "--signal {}".format(self.dunst_signal_on - if self.is_on else self.dunst_signal_off) + if self.is_on else self.dunst_signal_off) system("killall {} dunst".format(new_flag)) else: if self.is_on:
Revise indentation to make flake8 happy
diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index <HASH>..<HASH> 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -60,7 +60,7 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): """Mimic reader input file content.""" file_content = { '/attr/time_coverage_start': filename_info['start_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ'), - '/attr/time_coverage_end': filename_info['end_time'].strftime('%Y-%m-%dT%H:%M:%S.%fZ'), + '/attr/time_coverage_end': filename_info['end_time'].strftime('%Y-%m-%dT%H:%M:%SZ'), '/attr/start_orbit_number': 1, '/attr/end_orbit_number': 2, '/attr/platform_name': 'NPP',
Add a test for alternate form of date found in <I> files, using end_time to perform that test while keeping original format for start_time test.
diff --git a/i3pystatus/openstack_vms.py b/i3pystatus/openstack_vms.py index <HASH>..<HASH> 100644 --- a/i3pystatus/openstack_vms.py +++ b/i3pystatus/openstack_vms.py @@ -17,12 +17,10 @@ class Openstack_vms(IntervalModule): ("username", "Username for OpenStack authentication (OS_USERNAME)"), ("password", "Password for Openstack authentication (OS_PASSWORD)"), ("tenant_name", "Tenant/Project name to view (OS_TENANT_NAME)"), - ("color", "Display color when non-active VMs are =< `threshold` " - "(default: #00FF00"), - ("crit_color", "Display color when non-active VMs are => `threshold` " - "(default: #FF0000"), + ("color", "Display color when non-active VMs are =< `threshold`"), + ("crit_color", "Display color when non-active VMs are => `threshold`"). ("threshold", "Set critical indicators when non-active VM pass this " - "number (default: 0)"), + "number"), ("horizon_url", "When clicked, open this URL in a browser") ) required = ("auth_url", "password", "tenant_name", "username")
Remove defaults documentation, as they are auto-gen
diff --git a/src/post.js b/src/post.js index <HASH>..<HASH> 100644 --- a/src/post.js +++ b/src/post.js @@ -106,7 +106,7 @@ return function () if (line == "quit") { process.exit(); } - stockfish.postMessage(line); + stockfish.postMessage(line, true); } });
Don't delay for command line either.
diff --git a/app/components/ninetails/section.rb b/app/components/ninetails/section.rb index <HASH>..<HASH> 100644 --- a/app/components/ninetails/section.rb +++ b/app/components/ninetails/section.rb @@ -5,7 +5,10 @@ module Ninetails attr_accessor :elements_instances def self.new_from_filename(filename) - name = File.basename(filename, ".rb") + new_from_name File.basename(filename, ".rb") + end + + def self.new_from_name(name) "Section::#{name.camelize}".safe_constantize.new end diff --git a/app/controllers/ninetails/sections_controller.rb b/app/controllers/ninetails/sections_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/ninetails/sections_controller.rb +++ b/app/controllers/ninetails/sections_controller.rb @@ -10,7 +10,7 @@ module Ninetails end def show - render json: empty_section_from_name(params[:id]) + render json: Section.new_from_name(params[:id]).serialize end def validate
Split new_from_filename into two methods One that handles converting a lowercase name to an instance of the class. Another that converts a full filename and then uses the formentioned method.
diff --git a/maildump/static/js/maildump.js b/maildump/static/js/maildump.js index <HASH>..<HASH> 100644 --- a/maildump/static/js/maildump.js +++ b/maildump/static/js/maildump.js @@ -4,6 +4,9 @@ $(document).ready(function() { // Misc stuff and initialization $('.resizer').on('mousedown', function(e) { + if(e.button != 0) { + return; + } var $this = $(this); var target = $this.data('sibling') == 'prev' ? $this.prev() : $this.next(); e.preventDefault();
Only allow resizing with the left mouse button
diff --git a/meshio/h5m_io.py b/meshio/h5m_io.py index <HASH>..<HASH> 100644 --- a/meshio/h5m_io.py +++ b/meshio/h5m_io.py @@ -44,16 +44,17 @@ def read(filename): 'Tet4': 'tetra' } cells = {} + cell_data = {} for h5m_type, data in dset['elements'].iteritems(): meshio_type = h5m_to_meshio_type[h5m_type] conn = data['connectivity'] # Note that the indices are off by 1 in h5m. cells[meshio_type] = conn[()] - 1 - cell_data = {} - if 'tags' in data: - for name, dataset in data['tags'].items(): - cell_data[name] = dataset[()] + # TODO bring cell data back + # if 'tags' in data: + # for name, dataset in data['tags'].items(): + # cell_data[name] = dataset[()] # The `sets` in H5M are special in that they represent a segration of data # in the current file, particularly by a load balancer (Metis, Zoltan,
h5m: don't read cell data for now
diff --git a/promptly/inputs.py b/promptly/inputs.py index <HASH>..<HASH> 100644 --- a/promptly/inputs.py +++ b/promptly/inputs.py @@ -120,11 +120,11 @@ class ChoiceInput(BaseInput): return prompt def process_data(self, data): - try: - self.value = int(data) - except ValueError: - self.value = None - raise + + if not [x for x in choices if str(x[0]) == data]: + raise ValueError + + self.value = data diff --git a/tests/test_promptly.py b/tests/test_promptly.py index <HASH>..<HASH> 100644 --- a/tests/test_promptly.py +++ b/tests/test_promptly.py @@ -28,6 +28,7 @@ class TestPromptly(unittest.TestCase): form.prompt() values = form.values() + for v in values: print(v)
fixed issue with BooleanInput
diff --git a/xhtml2pdf/document.py b/xhtml2pdf/document.py index <HASH>..<HASH> 100644 --- a/xhtml2pdf/document.py +++ b/xhtml2pdf/document.py @@ -74,16 +74,22 @@ def pisaStory(src, path=None, link_callback=None, debug=0, default_css=None, def pisaDocument(src, dest=None, path=None, link_callback=None, debug=0, default_css=None, xhtml=False, encoding=None, xml_output=None, - raise_exception=True, capacity=100 * 1024, **kw): - log.debug("pisaDocument options:\n src = %r\n dest = %r\n path = %r\n link_callback = %r\n xhtml = %r", + raise_exception=True, capacity=100 * 1024, context_meta=None, + **kw): + log.debug("pisaDocument options:\n src = %r\n dest = %r\n path = %r\n link_callback = %r\n xhtml = %r\n context_meta = %r", src, dest, path, link_callback, - xhtml) + xhtml, + context_meta) # Prepare simple context context = pisaContext(path, debug=debug, capacity=capacity) + + if context_meta is not None: + context.meta.update(context_meta) + context.pathCallback = link_callback # Build story
Add optional pisaDocument argument to set metadata Without this the functionality of pisaDocument would need to be recreated in order to set metadata such as the document author.
diff --git a/test/helper/Appium_test.js b/test/helper/Appium_test.js index <HASH>..<HASH> 100644 --- a/test/helper/Appium_test.js +++ b/test/helper/Appium_test.js @@ -14,6 +14,7 @@ describe('Appium', function () { this.timeout(0); before(() => { + global.codecept_dir = path.join(__dirname, '/../data'); app = new Appium({ app: apk_path, desiredCapabilities: { @@ -584,15 +585,15 @@ describe('Appium', function () { }); }); - describe('#saveScreenshot', () => { + describe('#saveScreenshot @quick', () => { beforeEach(() => { global.output_dir = path.join(global.codecept_dir, 'output'); }); it('should create a screenshot file in output dir', async () => { const sec = (new Date()).getUTCMilliseconds(); - await app.saveScreenshot(`screenshot_${sec}`); - assert.ok(fileExists(path.join(output_dir, `screenshot_${sec}`)), null, 'file does not exists'); + await app.saveScreenshot(`screenshot_${sec}.png`); + assert.ok(fileExists(path.join(global.output_dir, `screenshot_${sec}.png`)), null, 'file does not exists'); }); });
re-enabled saveScreenshot for Appium tests
diff --git a/lib/containers.js b/lib/containers.js index <HASH>..<HASH> 100644 --- a/lib/containers.js +++ b/lib/containers.js @@ -27,9 +27,10 @@ var defaultDefinitions = [{ }, { 'require': 'process-container', 'type': 'process' -}, { - 'require': 'process-monitor-container', - 'type': 'processmonitor' +// TODO re-enable +//}, { +// 'require': 'process-monitor-container', +// 'type': 'processmonitor' }, { 'require': 'aws-elb-container', 'type': 'aws-elb'
Commented process-monitor-container require in containers.
diff --git a/command/apply_destroy_test.go b/command/apply_destroy_test.go index <HASH>..<HASH> 100644 --- a/command/apply_destroy_test.go +++ b/command/apply_destroy_test.go @@ -195,7 +195,7 @@ func TestApply_destroyTargeted(t *testing.T) { Mode: addrs.ManagedResourceMode, Type: "test_instance", Name: "foo", - }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), + }.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance), &states.ResourceInstanceObjectSrc{ AttrsJSON: []byte(`{"id":"i-ab123"}`), Status: states.ObjectReady, @@ -212,8 +212,9 @@ func TestApply_destroyTargeted(t *testing.T) { Name: "foo", }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance), &states.ResourceInstanceObjectSrc{ - AttrsJSON: []byte(`{"id":"i-abc123"}`), - Status: states.ObjectReady, + AttrsJSON: []byte(`{"id":"i-abc123"}`), + Dependencies: []addrs.AbsResource{mustResourceAddr("test_instance.foo")}, + Status: states.ObjectReady, }, addrs.AbsProviderConfig{ Provider: addrs.NewLegacyProvider("test"),
add missing deps to targeted destroy test