diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/src/Tokenly/XChainClient/Client.php b/src/Tokenly/XChainClient/Client.php index <HASH>..<HASH> 100644 --- a/src/Tokenly/XChainClient/Client.php +++ b/src/Tokenly/XChainClient/Client.php @@ -56,6 +56,16 @@ class Client } /** + * destroys the payment address + * @param string $uuid id of the paymehnt address + * @return array an empty array + */ + public function destroyPaymentAddress($uuid) { + $result = $this->newAPIRequest('DELETE', '/addresses/'.$uuid); + return $result; + } + + /** * monitor a new address * @param string $address bitcoin/counterparty address * @param string $webhook_endpoint webhook callback URL diff --git a/src/Tokenly/XChainClient/Mock/MockBuilder.php b/src/Tokenly/XChainClient/Mock/MockBuilder.php index <HASH>..<HASH> 100644 --- a/src/Tokenly/XChainClient/Mock/MockBuilder.php +++ b/src/Tokenly/XChainClient/Mock/MockBuilder.php @@ -206,6 +206,11 @@ class MockBuilder } + // handle delete message with an empty array + if ($method == 'DELETE') { + return []; + } + throw new Exception("No sample method for $method $path", 1); }));
Added destroy payment address and mock handler
diff --git a/Webservice/Interfaces/ISchema.php b/Webservice/Interfaces/ISchema.php index <HASH>..<HASH> 100644 --- a/Webservice/Interfaces/ISchema.php +++ b/Webservice/Interfaces/ISchema.php @@ -10,7 +10,7 @@ interface ISchema /** * Gets the method argument that was passed, using the param name * @param string $method - * @param string $paranName + * @param string $paramName * @return string */ function MethodArgument($method, $paramName); diff --git a/Wording/Worder.php b/Wording/Worder.php index <HASH>..<HASH> 100644 --- a/Wording/Worder.php +++ b/Wording/Worder.php @@ -55,7 +55,7 @@ class Worder } /** - * Replaces + * Replaces the placeholder using a given realizer and optional arguments * @param string $placeholder * @param Interfaces\IRealizer $realizer * @param $realizer,... Optional strings inserted via String::Format @@ -63,7 +63,6 @@ class Worder */ static function ReplaceUsing($placeholder, Interfaces\IRealizer $realizer = null) { - echo($placeholder); $args = func_get_args(); array_shift($args); if (count($args) >= 2)
- Corrections in comments and removal of test code in Worder
diff --git a/pkg/pod/pods.go b/pkg/pod/pods.go index <HASH>..<HASH> 100644 --- a/pkg/pod/pods.go +++ b/pkg/pod/pods.go @@ -269,7 +269,7 @@ func getPod(dataDir string, uuid *types.UUID) (*Pod, error) { p.FileLock = l - if p.isRunning() { + if p.isRunning() || p.isExit() { cfd, err := p.Fd() if err != nil { return nil, errwrap.Wrap(fmt.Errorf("error acquiring pod %v dir fd", uuid), err) @@ -1200,6 +1200,11 @@ func (p *Pod) IsFinished() bool { return p.isExited || p.isAbortedPrepare || p.isGarbage || p.isGone } +// isExit returns true if the pod is in exited states +func (p *Pod) isExit() bool { + return p.isExited +} + // AppExitCode returns the app's exit code. // It returns an error if the exit code file doesn't exit or the content of the file is invalid. func (p *Pod) AppExitCode(appName string) (int, error) {
list: add ip of non running pods to the json format
diff --git a/opinel/utils_s3.py b/opinel/utils_s3.py index <HASH>..<HASH> 100644 --- a/opinel/utils_s3.py +++ b/opinel/utils_s3.py @@ -4,6 +4,23 @@ from opinel.utils import * ######################################## +##### S3-related arguments +######################################## + +# +# Add an S3-related argument to a recipe +# +def add_s3_argument(parser, default_args, argument_name): + if argument_name == 'bucket-name': + parser.add_argument('--bucket-name', + dest='bucket_name', + default=[], + nargs='+', + help='Name of S3 buckets that the script will iterate through.') + else: + raise Exception('Invalid parameter name: %s' % argument_name) + +######################################## ##### Helpers ########################################
Add --bucket-name as a common S3 argument
diff --git a/georasters/georasters.py b/georasters/georasters.py index <HASH>..<HASH> 100755 --- a/georasters/georasters.py +++ b/georasters/georasters.py @@ -679,9 +679,13 @@ class GeoRaster(object): geo.map_pixel(point_x, point_y) Return value of raster in location + Note: (point_x, point_y) must belong to the geographic coordinate system and the coverage of the raster ''' row, col =map_pixel(point_x, point_y, self.x_cell_size, self.y_cell_size, self.xmin, self.ymax) - return self.raster[row, col] + try: + return self.raster[row, col] + except: + raise Exception('There has been an error. Make sure the point belongs to the raster coverage and it is in the correct geographic coordinate system.') def map_pixel_location(self, point_x, point_y): ''' diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): return f.read() setup(name='georasters', - version='0.5', + version='0.5.1', description='Tools for working with Geographical Information System Rasters', url='http://github.com/ozak/georasters', author='Ömer Özak',
Included error message for map_pixel
diff --git a/actionmailer/lib/action_mailer/test_case.rb b/actionmailer/lib/action_mailer/test_case.rb index <HASH>..<HASH> 100644 --- a/actionmailer/lib/action_mailer/test_case.rb +++ b/actionmailer/lib/action_mailer/test_case.rb @@ -68,6 +68,15 @@ module ActionMailer ActionMailer::Base.deliveries.clear end + def set_delivery_method(method) + @old_delivery_method = ActionMailer::Base.delivery_method + ActionMailer::Base.delivery_method = method + end + + def restore_delivery_method + ActionMailer::Base.delivery_method = @old_delivery_method + end + def set_expected_mail @expected = Mail.new @expected.content_type ["text", "plain", { "charset" => charset }]
Add `set_delivery_method` and `restore_delivery_method` to `ActionMailer::TestCase`. This way these methods are available outside the ActionMailer test suite, but they are still duplicated inside `test/abstract_unit` for test cases that don't inherit from the `ActionMailer::TestCase` class.
diff --git a/src/mol.js b/src/mol.js index <HASH>..<HASH> 100644 --- a/src/mol.js +++ b/src/mol.js @@ -569,17 +569,17 @@ Chain.prototype._cacheBackboneTraces = function() { if (!residue.isAminoacid()) { if (stretch.length() > 1) { this._cachedTraces.push(stretch); - stretch = new BackboneTrace(); } + stretch = new BackboneTrace(); continue; } if (stretch.length() === 0) { stretch.push(residue); continue; } - var ca_prev = this._residues[i-1].atom('C'); - var n_this = residue.atom('N'); - if (Math.abs(vec3.sqrDist(ca_prev.pos(), n_this.pos()) - 1.5*1.5) < 1) { + var caPrev = this._residues[i-1].atom('C'); + var nThis = residue.atom('N'); + if (Math.abs(vec3.sqrDist(caPrev.pos(), nThis.pos()) - 1.5*1.5) < 1) { stretch.push(residue); } else { if (stretch.length() > 1) {
fix backbone trace creation The backbone trace creation was failing when a single amino acid was followed by a non-aminoacid residue, which was then again followed by an amino acid.
diff --git a/nanoplot/version.py b/nanoplot/version.py index <HASH>..<HASH> 100644 --- a/nanoplot/version.py +++ b/nanoplot/version.py @@ -1 +1 @@ -__version__ = "1.36.2" +__version__ = "1.37.0"
bumping version, after Ilias readded the output format of figures as requested in <URL>
diff --git a/src/Client/Signature/HmacSha1Signature.php b/src/Client/Signature/HmacSha1Signature.php index <HASH>..<HASH> 100644 --- a/src/Client/Signature/HmacSha1Signature.php +++ b/src/Client/Signature/HmacSha1Signature.php @@ -2,7 +2,8 @@ namespace League\OAuth1\Client\Signature; -use Guzzle\Http\Url; +use GuzzleHttp\Psr7; +use GuzzleHttp\Psr7\Uri; class HmacSha1Signature extends Signature implements SignatureInterface { @@ -35,7 +36,7 @@ class HmacSha1Signature extends Signature implements SignatureInterface */ protected function createUrl($uri) { - return Url::factory($uri); + return Psr7\uri_for($uri); } /** @@ -48,11 +49,11 @@ class HmacSha1Signature extends Signature implements SignatureInterface * * @return string */ - protected function baseString(Url $url, $method = 'POST', array $parameters = array()) + protected function baseString(\GuzzleHttp\Psr7\Uri $url, $method = 'POST', array $parameters = array()) { $baseString = rawurlencode($method).'&'; - $schemeHostPath = Url::buildUrl(array( + $schemeHostPath = Uri::fromParts(array( 'scheme' => $url->getScheme(), 'host' => $url->getHost(), 'path' => $url->getPath(),
migrate to Guzzlehttp 6 for compatibility with oauth2-client
diff --git a/tests/utils/test_utils.py b/tests/utils/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/utils/test_utils.py +++ b/tests/utils/test_utils.py @@ -265,6 +265,11 @@ def test_cross_validator_with_predictor_and_kwargs(): assert len(results_06) == 3 +def test_cross_validator_with_stratified_cox_model(): + cf = CoxPHFitter(strata=['race']) + utils.k_fold_cross_validation(cf, load_rossi(), duration_col='week', event_col='arrest') + + def test_cross_validator_with_specific_loss_function(): def square_loss(y_actual, y_pred): return ((y_actual - y_pred) ** 2).mean() @@ -275,6 +280,7 @@ def test_cross_validator_with_specific_loss_function(): results_con = utils.k_fold_cross_validation(cf, load_regression_dataset(), duration_col='T', event_col='E') assert list(results_sq) != list(results_con) + def test_concordance_index(): size = 1000 T = np.random.normal(size=size)
example on how to use strata in k_fold
diff --git a/packages/qix/src/utils/makeReactNativeConfig.js b/packages/qix/src/utils/makeReactNativeConfig.js index <HASH>..<HASH> 100644 --- a/packages/qix/src/utils/makeReactNativeConfig.js +++ b/packages/qix/src/utils/makeReactNativeConfig.js @@ -77,6 +77,9 @@ const getDefaultConfig = ({ exclude: /node_modules\/(?!react|@expo|pretty-format|qix)/, use: [ { + loader: require.resolve('thread-loader'), + }, + { loader: require.resolve('babel-loader'), options: Object.assign({}, getBabelConfig(root), { /**
chore: use `thread-loader`
diff --git a/web/concrete/src/Localization/Localization.php b/web/concrete/src/Localization/Localization.php index <HASH>..<HASH> 100755 --- a/web/concrete/src/Localization/Localization.php +++ b/web/concrete/src/Localization/Localization.php @@ -198,6 +198,8 @@ class Localization */ public static function clearCache() { + $locale = static::activeLocale(); self::getCache()->flush(); + static::changeLocale($locale); } }
Make sure we reload the translation files immediately on cache clear Former-commit-id: a<I>fadf<I>ed<I>d<I>a<I>d<I>a<I>c9c<I>
diff --git a/staging/src/k8s.io/apiserver/pkg/server/mux/pathrecorder.go b/staging/src/k8s.io/apiserver/pkg/server/mux/pathrecorder.go index <HASH>..<HASH> 100644 --- a/staging/src/k8s.io/apiserver/pkg/server/mux/pathrecorder.go +++ b/staging/src/k8s.io/apiserver/pkg/server/mux/pathrecorder.go @@ -103,10 +103,11 @@ func (m *PathRecorderMux) ListedPaths() []string { } func (m *PathRecorderMux) trackCallers(path string) { + stack := string(debug.Stack()) if existingStack, ok := m.pathStacks[path]; ok { - utilruntime.HandleError(fmt.Errorf("registered %q from %v", path, existingStack)) + utilruntime.HandleError(fmt.Errorf("duplicate path registration of %q: original registration from %v\n\nnew registration from %v", path, existingStack, stack)) } - m.pathStacks[path] = string(debug.Stack()) + m.pathStacks[path] = stack } // refreshMuxLocked creates a new mux and must be called while locked. Otherwise the view of handlers may
Improve pathrecorder duplicate registration info Print information from both the original path registration and the new path registration stack traces when encountering a duplicate. This helps the developer determine where the duplication is coming from and makes it much easier to resolve.
diff --git a/bcbio/bam/highdepth.py b/bcbio/bam/highdepth.py index <HASH>..<HASH> 100644 --- a/bcbio/bam/highdepth.py +++ b/bcbio/bam/highdepth.py @@ -14,6 +14,7 @@ import yaml from bcbio import utils from bcbio.distributed.transaction import file_transaction +from bcbio.log import logger from bcbio.pipeline import datadict as dd from bcbio.provenance import do @@ -41,9 +42,12 @@ def identify(data): "--window-size {window_size} {work_bam} " "| head -n {sample_size} " """| cut -f 5 | {py_cl} -l 'numpy.median([float(x) for x in l if not x.startswith("mean")])'""") + median_depth_out = subprocess.check_output(cmd.format(**locals()), shell=True) try: - median_cov = float(subprocess.check_output(cmd.format(**locals()), shell=True)) + median_cov = float(median_depth_out) except ValueError: + logger.info("Skipping high coverage region detection; problem calculating median depth: %s" % + median_depth_out) median_cov = None if median_cov and not numpy.isnan(median_cov): high_thresh = int(high_multiplier * median_cov)
Provide better debugging of failed high depth runs Help assist with #<I>
diff --git a/src/toil/test/src/fileStoreTest.py b/src/toil/test/src/fileStoreTest.py index <HASH>..<HASH> 100644 --- a/src/toil/test/src/fileStoreTest.py +++ b/src/toil/test/src/fileStoreTest.py @@ -639,13 +639,12 @@ class hidden(object): F.addChild(G) Job.Runner.startToil(A, self.options) except FailedJobsException as err: - self.assertEqual(err.numberOfFailedJobs, 1) with open(self.options.logFile) as f: logContents = f.read() if CacheUnbalancedError.message in logContents: self.assertEqual(expectedResult, 'Fail') else: - self.fail('Toil did not raise the expected AssertionError') + self.fail('Toil did not raise the expected CacheUnbalancedError but failed for some other reason') @staticmethod def _writeFileToJobStoreWithAsserts(job, isLocalFile, nonLocalDir=None, fileMB=1):
Make _testCacheEviction not care about the total failed job count
diff --git a/includes/functions.php b/includes/functions.php index <HASH>..<HASH> 100644 --- a/includes/functions.php +++ b/includes/functions.php @@ -421,11 +421,6 @@ function add_network( $args = array() ) { update_network_option( $new_network_id, 'site_name', $network_name ); - switch_to_network( $new_network_id ); - add_site_option( 'site_admins', array() ); - grant_super_admin( $r['network_admin_id']); - restore_current_network(); - /** * Fix upload_path for main sites on secondary networks * This applies only to new installs (WP 3.5+) @@ -508,6 +503,14 @@ function add_network( $args = array() ) { restore_current_network(); } + switch_to_network( $new_network_id ); + // Grant super admin adds 'admin' as network admin if the 'site_admins' option does not exist. + if ( empty( $r['clone_network'] ) ) { + add_site_option( 'site_admins', array() ); + } + grant_super_admin( $r['network_admin_id']); + restore_current_network(); + // Clean network cache clean_network_cache( array() );
Grant super admin after network options cloning so network admin argument is respected #fix-<I>
diff --git a/src/Classes/Bulk.php b/src/Classes/Bulk.php index <HASH>..<HASH> 100644 --- a/src/Classes/Bulk.php +++ b/src/Classes/Bulk.php @@ -41,6 +41,12 @@ class Bulk */ public $body = []; + /** + * Number of pending operations + * @var int + */ + public $operationCount = 0; + /** * Bulk constructor. @@ -177,6 +183,8 @@ class Bulk $this->body["body"][] = $data; } + $this->operationCount++; + $this->reset(); } @@ -203,5 +211,15 @@ class Bulk } + /** + * Commit all pending operations + */ + public function commit() + { + $this->query->connection->bulk($this->body); + $this->operationCount = 0; + $this->body = []; + + } }
Add commit method to Bulk class for easier use outside of the Query class
diff --git a/src/utils/convertToNumberWithinIntervalBounds.js b/src/utils/convertToNumberWithinIntervalBounds.js index <HASH>..<HASH> 100644 --- a/src/utils/convertToNumberWithinIntervalBounds.js +++ b/src/utils/convertToNumberWithinIntervalBounds.js @@ -1,4 +1,6 @@ function convertToNumberWithinIntervalBounds (number, min, max) { + min = typeof min === 'number' ? min : -Infinity; + max = typeof max === 'number' ? max : Infinity; return Math.max(min, Math.min(number, max)); }
convertToNumberWithinIntervalBounds uses -/+ Infinity for min/max if unspecified.
diff --git a/salt/output/nested.py b/salt/output/nested.py index <HASH>..<HASH> 100644 --- a/salt/output/nested.py +++ b/salt/output/nested.py @@ -1,5 +1,5 @@ ''' -Recursively display netsted data +Recursively display nested data, this is the default outputter. ''' # Import salt libs diff --git a/salt/output/pprint_out.py b/salt/output/pprint_out.py index <HASH>..<HASH> 100644 --- a/salt/output/pprint_out.py +++ b/salt/output/pprint_out.py @@ -1,5 +1,5 @@ ''' -The python pretty print system is the default outputter. This outputter +The python pretty print system was the default outputter. This outputter simply passed the data passed into it through the pprint module. '''
Fix docstrings for the outputters
diff --git a/xchange-core/src/main/java/org/knowm/xchange/Exchange.java b/xchange-core/src/main/java/org/knowm/xchange/Exchange.java index <HASH>..<HASH> 100644 --- a/xchange-core/src/main/java/org/knowm/xchange/Exchange.java +++ b/xchange-core/src/main/java/org/knowm/xchange/Exchange.java @@ -1,6 +1,7 @@ package org.knowm.xchange; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import org.knowm.xchange.client.ResilienceRegistries; import org.knowm.xchange.currency.CurrencyPair; @@ -50,7 +51,7 @@ public interface Exchange { * @return The exchange's instruments */ default List<Instrument> getExchangeInstruments() { - throw new NotYetImplementedForExchangeException(); + return new ArrayList<>(getExchangeSymbols()); } /**
[core] Improved Exchange.getExchangeInstruments()
diff --git a/src/mink.js b/src/mink.js index <HASH>..<HASH> 100644 --- a/src/mink.js +++ b/src/mink.js @@ -14,6 +14,8 @@ const DEFAULT_CONFIG = { width: 1366, height: 768, }, + headless: process.env.RUN_HEADLESS !== '0', + devtools: process.env.RUN_DEVTOOLS === '1', }; function gherkin(cucumber) { @@ -35,7 +37,10 @@ Mink.prototype.hook = function (cucumber) { }); }; Mink.prototype.setup = async function () { - this.browser = await puppeteer.launch(); + this.browser = await puppeteer.launch({ + headless: this.config.headless && !this.config.devtools, + devtools: this.config.devtools, + }); this.page = await this.browser.newPage(); return this.page.setViewport(this.config.viewport); };
Added headless and dev-tool options
diff --git a/papi-v1/rules.go b/papi-v1/rules.go index <HASH>..<HASH> 100644 --- a/papi-v1/rules.go +++ b/papi-v1/rules.go @@ -27,6 +27,7 @@ type Rules struct { func NewRules() *Rules { rules := &Rules{} rules.Rules = NewRule(rules) + rules.Rules.Name = "default" rules.Init() return rules @@ -759,7 +760,7 @@ func NewBehavior(parent *Rule) *Behavior { // OptionValue is a map with string keys, and any // type of value. You can nest OptionValues as necessary // to create more complex values. -type OptionValue client.JSONBody +type OptionValue map[string]interface{} // AvailableCriteria represents a collection of available rule criteria type AvailableCriteria struct {
The first rule should always be called default
diff --git a/src/Payum/YiiExtension/PayumComponent.php b/src/Payum/YiiExtension/PayumComponent.php index <HASH>..<HASH> 100644 --- a/src/Payum/YiiExtension/PayumComponent.php +++ b/src/Payum/YiiExtension/PayumComponent.php @@ -1,6 +1,8 @@ <?php namespace Payum\YiiExtension; +\Yii::import('Payum\YiiExtension\TokenFactory', true); + use Payum\Core\PaymentInterface; use Payum\Core\Registry\RegistryInterface; use Payum\Core\Registry\SimpleRegistry;
Add required code to include TokenFactory in Yii
diff --git a/hydpy/tests/test_everything.py b/hydpy/tests/test_everything.py index <HASH>..<HASH> 100644 --- a/hydpy/tests/test_everything.py +++ b/hydpy/tests/test_everything.py @@ -16,8 +16,7 @@ import warnings import matplotlib exitcode = int(os.system('python test_pyplot_backend.py')) -print('Exit code is %s.' % exitcode) -standard_backend_missing = exitcode == 1 +standard_backend_missing = exitcode in (1, 256) if standard_backend_missing: matplotlib.use('Agg') print('The standard backend of matplotlib does not seem to be available '
Try to fix commit #<I>b1e. On Linux, exit code 1 seems to become <I>...
diff --git a/src/lib/collections/list.js b/src/lib/collections/list.js index <HASH>..<HASH> 100644 --- a/src/lib/collections/list.js +++ b/src/lib/collections/list.js @@ -237,23 +237,6 @@ extend(List, Collection, { }, /** - * Performs the specified action on each element of the List. - * @param {Function} action The action function to perform on each element of the List. eg. function(item) - */ - forEach: function (action, thisArg) { - assertType(action, Function); - - for (var i = 0, len = this.length; i < len; i++) { - if (thisArg) { - action.call(thisArg, this[i]); - } - else { - action(this[i]); - } - } - }, - - /** * Gets the element at the specified index. * @param {Number} index The zero-based index of the element to get. * @returns {Object}
remove List's redundant 'forEach' method
diff --git a/test/test_helper.rb b/test/test_helper.rb index <HASH>..<HASH> 100644 --- a/test/test_helper.rb +++ b/test/test_helper.rb @@ -1,9 +1,7 @@ -$:.unshift(File.dirname(__FILE__) + '/../lib') - -require 'rubygems' require 'test/unit' -require 'mocha' require 'whois' +require 'rubygems' +require 'mocha' class Test::Unit::TestCase
No need to change $LOAD_PATH. Currenty library folder is already appended.
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -33,8 +33,6 @@ extensions = [ 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.mathjax', - 'IPython.sphinxext.ipython_console_highlighting', - 'IPython.sphinxext.ipython_directive' ] # Add any paths that contain templates here, relative to this directory.
removing ipython directives from docs; wasn't building
diff --git a/tofu/spectro/_fit12d_dextract.py b/tofu/spectro/_fit12d_dextract.py index <HASH>..<HASH> 100644 --- a/tofu/spectro/_fit12d_dextract.py +++ b/tofu/spectro/_fit12d_dextract.py @@ -853,7 +853,9 @@ def fit2d_extract( d3[k0][k1]['values'][~indphi, jj] = np.nan # update validity according to indphi - dfit2d['validity'][np.all(~indphi, axis=1)] = -3 + dfit2d['validity'][ + (dfit2d['validity'] == 0) & np.all(~indphi, axis=1) + ] = -3 # ---------- # func diff --git a/tofu/version.py b/tofu/version.py index <HASH>..<HASH> 100644 --- a/tofu/version.py +++ b/tofu/version.py @@ -1,2 +1,2 @@ # Do not edit, pipeline versioning governed by git tags! -__version__ = '1.5.0-231-ge4efc387' +__version__ = '1.5.0-232-g5578af59'
[#<I>] validity set to -3 only if not already negative
diff --git a/test/test_response.py b/test/test_response.py index <HASH>..<HASH> 100644 --- a/test/test_response.py +++ b/test/test_response.py @@ -26,7 +26,7 @@ def test_response_reference(): foo = bar.foo # This used to cause an exception about invalid pointers because the response got garbage collected - foo.foo().wait() + assert foo.foo().wait().val == 1 def test_response_reference2(): baz = test_response_capnp.Baz._new_client(BazServer()) @@ -37,4 +37,4 @@ def test_response_reference2(): response = baz.grault().wait() bar = response.bar foo = bar.foo - foo.foo().wait() + assert foo.foo().wait().val == 1
Add asserts to test_response.py
diff --git a/pkg/volume/rbd/rbd_util.go b/pkg/volume/rbd/rbd_util.go index <HASH>..<HASH> 100644 --- a/pkg/volume/rbd/rbd_util.go +++ b/pkg/volume/rbd/rbd_util.go @@ -712,7 +712,7 @@ func (util *rbdUtil) rbdInfo(b *rbdMounter) (int, error) { // klog.V(4).Infof("rbd: info %s using mon %s, pool %s id %s key %s", b.Image, mon, b.Pool, id, secret) output, err = b.exec.Command("rbd", - "info", b.Image, "--pool", b.Pool, "-m", mon, "--id", id, "--key="+secret, "-k=/dev/null", "--format=json").CombinedOutput() + "info", b.Image, "--pool", b.Pool, "-m", mon, "--id", id, "--key="+secret, "-k=/dev/null", "--format=json").Output() if err, ok := err.(*exec.Error); ok { if err.Err == exec.ErrNotFound {
fix expanding rbd volumes without ceph.conf Ignore stderr of rbd info --format=json as without a ceph.conf it will print messages about no configuration onto stderr which break the json parsing. The actual json information the function wants is always on stdout. Closes: gh-<I>
diff --git a/inliner.js b/inliner.js index <HASH>..<HASH> 100755 --- a/inliner.js +++ b/inliner.js @@ -215,7 +215,9 @@ function Inliner(url, options, callback) { // some protection against putting script tags in the body final_code = final_code.replace(/<\/script>/gi, '<\\/script>'); - this.innerHTML = final_code; + this.innerText = final_code; + // window.$(this).text(final_code); + if (src) { inliner.emit('progress', 'compress ' + URL.resolve(root, src)); } else { @@ -230,7 +232,7 @@ function Inliner(url, options, callback) { inliner.emit('jobs', (inliner.total - inliner.todo) + '/' + inliner.total); } else if (orig_code) { // window.$(this).text(orig_code.replace(/<\/script>/gi, '<\\/script>')); - this.innerHTML = orig_code.replace(/<\/script>/gi, '<\\/script>'); + this.innerText = orig_code.replace(/<\/script>/gi, '<\\/script>'); } }); finished();
fixed HTML encoding when it should not have been happening
diff --git a/test/dialects/from-clause-tests.js b/test/dialects/from-clause-tests.js index <HASH>..<HASH> 100644 --- a/test/dialects/from-clause-tests.js +++ b/test/dialects/from-clause-tests.js @@ -17,6 +17,10 @@ Harness.test({ mysql: { text : 'SELECT `user`.* FROM `user` , `post`', string: 'SELECT `user`.* FROM `user` , `post`' + }, + sqlserver: { + text : 'SELECT [user].* FROM [user] , [post]', + string: 'SELECT [user].* FROM [user] , [post]' } }); @@ -33,5 +37,9 @@ Harness.test({ mysql: { text : 'SELECT `user`.*, `post`.* FROM `user` , `post`', string: 'SELECT `user`.*, `post`.* FROM `user` , `post`' + }, + sqlserver: { + text : 'SELECT [user].*, [post].* FROM [user] , [post]', + string: 'SELECT [user].*, [post].* FROM [user] , [post]' } });
Added from-clause-tests for SqlServer.
diff --git a/src/array.js b/src/array.js index <HASH>..<HASH> 100644 --- a/src/array.js +++ b/src/array.js @@ -143,15 +143,6 @@ var foldRight = curry(function(a, v, f) { return v; }); -var foreach = curry(function(a, f) { - var total, - i; - - for (i = 0, total = a.length; i < total; i++) { - f(a[i]); - } -}); - var map = curry(function(a, f) { var accum = [], total, @@ -252,7 +243,6 @@ squishy = squishy .method('flatMap', isArray, flatMap) .method('fold', isArray, fold) .method('foldRight', isArray, foldRight) - .method('foreach', isArray, foreach) .method('map', isArray, map) .method('partition', isArray, partition) .method('reduce', isArray, reduce) diff --git a/src/promise.js b/src/promise.js index <HASH>..<HASH> 100644 --- a/src/promise.js +++ b/src/promise.js @@ -20,7 +20,7 @@ var Promise = function Promise(deferred) { listeners = [], invoke = function(a) { return function(value) { - foreach(a(), function(f) { + map(a(), function(f) { f(value); }); };
Removing foreach from array.
diff --git a/mautrix/client/state_store/asyncpg/upgrade.py b/mautrix/client/state_store/asyncpg/upgrade.py index <HASH>..<HASH> 100644 --- a/mautrix/client/state_store/asyncpg/upgrade.py +++ b/mautrix/client/state_store/asyncpg/upgrade.py @@ -25,15 +25,18 @@ async def upgrade_blank_to_v2(conn: Connection, scheme: Scheme) -> None: power_levels TEXT )""" ) + membership_check = "" if scheme != Scheme.SQLITE: await conn.execute( "CREATE TYPE membership AS ENUM ('join', 'leave', 'invite', 'ban', 'knock')" ) + else: + membership_check = "CHECK (membership IN ('join', 'leave', 'invite', 'ban', 'knock'))" await conn.execute( - """CREATE TABLE mx_user_profile ( + f"""CREATE TABLE mx_user_profile ( room_id TEXT, user_id TEXT, - membership membership NOT NULL, + membership membership NOT NULL {membership_check}, displayname TEXT, avatar_url TEXT, PRIMARY KEY (room_id, user_id)
Add CHECK constraint for membership column on SQLite
diff --git a/PelEntry.php b/PelEntry.php index <HASH>..<HASH> 100644 --- a/PelEntry.php +++ b/PelEntry.php @@ -172,8 +172,8 @@ abstract class PelEntry { $d = explode('-', strtr($data->getBytes(0, -1), '.: ', '---')); // TODO: handle timezones. require_once('PelEntryAscii.php'); - return new PelEntryTime($tag, mktime($d[3], $d[4], $d[5], - $d[1], $d[2], $d[0])); + return new PelEntryTime($tag, gmmktime($d[3], $d[4], $d[5], + $d[1], $d[2], $d[0])); case PelTag::COPYRIGHT: if ($format != PelFormat::ASCII)
The timestamps should be treated uniformly as UTC time, and not local time, otherwise we cannot guarantee a safe round trip where an entry is written to a file and then later retrieved.
diff --git a/lib/app/models/invitation.rb b/lib/app/models/invitation.rb index <HASH>..<HASH> 100644 --- a/lib/app/models/invitation.rb +++ b/lib/app/models/invitation.rb @@ -7,7 +7,19 @@ class Invitation < ActiveRecord::Base private + def call_back_method + Inviter::InvitationCallbacks.invitation_created_callback(inviter, invitee, invited_to) + end + def trigger_callbacks - puts 'CALL BACKS WILL BE TRIGGERED' + inviters = Inviter::ActsAsInviter.inviters + invitees = Inviter::ActsAsInvitee.invitees + invitations = Inviter::ActsAsInvitation.invitations + [inviters, invitees, invitations].each do |klass| + _call_back_method = call_back_method + klass.each do |_klass| + _klass.send(_call_back_method) if _klass.respond_to?(_call_back_method) + end + end end end
added method to trigger callbacks for members involved
diff --git a/admin/tool/customlang/db/upgrade.php b/admin/tool/customlang/db/upgrade.php index <HASH>..<HASH> 100644 --- a/admin/tool/customlang/db/upgrade.php +++ b/admin/tool/customlang/db/upgrade.php @@ -24,7 +24,7 @@ * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ -function xmldb_report_customlang_upgrade($oldversion) { +function xmldb_tool_customlang_upgrade($oldversion) { global $CFG, $DB, $OUTPUT; $dbman = $DB->get_manager();
MDL-<I> fix customlang upgrade script
diff --git a/pymc3/distributions/continuous.py b/pymc3/distributions/continuous.py index <HASH>..<HASH> 100644 --- a/pymc3/distributions/continuous.py +++ b/pymc3/distributions/continuous.py @@ -1131,12 +1131,12 @@ class Bounded(Continuous): self.testval = 0.5 * (upper + lower) if not np.isinf(lower) and np.isinf(upper): - self.transform = transforms.lowerbound_elemwise(lower) + self.transform = transforms.lowerbound(lower) if default <= lower: self.testval = lower + 1 if np.isinf(lower) and not np.isinf(upper): - self.transform = transforms.upperbound_elemwise(upper) + self.transform = transforms.upperbound(upper) if default >= upper: self.testval = upper - 1
BUG Wrong search/replace.
diff --git a/zarr/tests/test_core.py b/zarr/tests/test_core.py index <HASH>..<HASH> 100644 --- a/zarr/tests/test_core.py +++ b/zarr/tests/test_core.py @@ -855,11 +855,6 @@ class TestArray(unittest.TestCase): assert_array_equal(a['bar'], z['bar']) assert_array_equal(a['baz'], z['baz']) - # this does not raise with numpy 1.14 - # with pytest.raises(ValueError): - # # dodgy fill value - # self.create_array(shape=a.shape, chunks=2, dtype=a.dtype, fill_value=42) - def test_dtypes(self): # integers
delete commented code [ci skip]
diff --git a/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/UserMetadata.java b/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/UserMetadata.java index <HASH>..<HASH> 100644 --- a/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/UserMetadata.java +++ b/molgenis-data-security/src/main/java/org/molgenis/data/security/auth/UserMetadata.java @@ -75,7 +75,8 @@ public class UserMetadata extends SystemEntityType { .setLabel("Username") .setUnique(true) .setNillable(false) - .setReadOnly(true); + .setReadOnly(true) + .setValidationExpression("$('" + USERNAME + "').matches(/^[\\S].+[\\S]$/).value()"); addAttribute(PASSWORD) .setLabel("Password") .setDescription("This is the hashed password, enter a new plaintext password to update.")
Fix #<I>: Usernames are not checked for spaces
diff --git a/tests/Symfony/Tests/Component/Translation/Loader/ResourceBundleLoaderTest.php b/tests/Symfony/Tests/Component/Translation/Loader/ResourceBundleLoaderTest.php index <HASH>..<HASH> 100644 --- a/tests/Symfony/Tests/Component/Translation/Loader/ResourceBundleLoaderTest.php +++ b/tests/Symfony/Tests/Component/Translation/Loader/ResourceBundleLoaderTest.php @@ -51,7 +51,7 @@ class ResourceBundleFileLoaderTest extends LocalizedTestCase } /** - * @expectedException Exception + * @expectedException \RuntimeException */ public function testLoadInvalidResource() {
[Translation] changed some unit tests for PHPUnit <I> compatibility
diff --git a/lib/stealth/base.rb b/lib/stealth/base.rb index <HASH>..<HASH> 100644 --- a/lib/stealth/base.rb +++ b/lib/stealth/base.rb @@ -71,6 +71,8 @@ module Stealth def self.load_environment require File.join(Stealth.root, 'config', 'boot') require_directory("config/initializers") + # Require explicitly to ensure it loads first + require File.join(Stealth.root, 'bot', 'controllers', 'bot_controller') require_directory("bot") end
Ensure BotController loads before all controllers
diff --git a/lib/cb/clients/job_branding.rb b/lib/cb/clients/job_branding.rb index <HASH>..<HASH> 100755 --- a/lib/cb/clients/job_branding.rb +++ b/lib/cb/clients/job_branding.rb @@ -8,6 +8,7 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and limitations under the License. +require_relative 'base' module Cb module Clients class JobBranding < Base
linux vs mac file pathing crap again
diff --git a/gengen.go b/gengen.go index <HASH>..<HASH> 100644 --- a/gengen.go +++ b/gengen.go @@ -52,8 +52,11 @@ func generate(filename string, typenames ...string) ([]byte, error) { } var buf bytes.Buffer - err = format.Node(&buf, fset, f) - return buf.Bytes(), err + if err = format.Node(&buf, fset, f); err != nil { + return nil, err + } + + return format.Source(buf.Bytes()) } func main() {
Pass generated source through format.Source() (i.e. gofmt) Fixes #1
diff --git a/spec/suite/compat/unit/rom/configuration/relation_classes_spec.rb b/spec/suite/compat/unit/rom/configuration/relation_classes_spec.rb index <HASH>..<HASH> 100644 --- a/spec/suite/compat/unit/rom/configuration/relation_classes_spec.rb +++ b/spec/suite/compat/unit/rom/configuration/relation_classes_spec.rb @@ -11,7 +11,8 @@ RSpec.describe ROM::Configuration, "#relation_classes" do rel_default = Class.new(ROM::Relation[:memory]) { schema(:users) {} } rel_custom = Class.new(ROM::Relation[:memory]) { gateway :custom - schema(:others) {} } + schema(:others) {} +} conf.register_relation(rel_default) conf.register_relation(rel_custom)
[rubocop] address Layout/BlockEndNewline
diff --git a/chunkypipes/util/__init__.py b/chunkypipes/util/__init__.py index <HASH>..<HASH> 100644 --- a/chunkypipes/util/__init__.py +++ b/chunkypipes/util/__init__.py @@ -1,4 +1,5 @@ import sys +import os import traceback from importlib import import_module @@ -8,6 +9,12 @@ def fetch_command_class(subcommand): return module.Command() +def print_no_init(): + sys.stderr.write('ChunkyPipes cannot find an init directory at the user ' + + 'home or in the CHUNKY_HOME environment variable. Please ' + + 'run \'chunky init\' before using ChunkyPipes.\n') + + def print_help_text(): fetch_command_class('help').run_from_argv() @@ -38,6 +45,11 @@ def execute_from_command_line(argv=None): print_help_text() sys.exit(0) + chunky_home_root = os.environ.get('CHUNKY_HOME') or os.path.expanduser('~') + if not os.path.exists(os.path.join(chunky_home_root, '.chunky')): + print_no_init() + sys.exit(1) + send_argv = [] if len(argv) > 2: send_argv = argv[2:]
ChunkyPipes now halts execution if it can't find an initialized hidden directory and asks the user to run chunky init
diff --git a/upload/admin/model/openbay/ebay_profile.php b/upload/admin/model/openbay/ebay_profile.php index <HASH>..<HASH> 100644 --- a/upload/admin/model/openbay/ebay_profile.php +++ b/upload/admin/model/openbay/ebay_profile.php @@ -56,7 +56,7 @@ class ModelOpenbayEbayProfile extends Model{ foreach ($qry->rows as $row) { $row['link_edit'] = HTTPS_SERVER . 'index.php?route=openbay/ebay_profile/edit&token=' . $this->session->data['token'] . '&ebay_profile_id=' . $row['ebay_profile_id']; $row['link_delete'] = HTTPS_SERVER . 'index.php?route=openbay/ebay_profile/delete&token=' . $this->session->data['token'] . '&ebay_profile_id=' . $row['ebay_profile_id']; - $row['data'] = unserialize($row['data']); + $row['data'] = !empty($row['data']) ? unserialize($row['data']) : array(); $profiles[] = $row; }
Added check for data before trying to unserialize.
diff --git a/modules/clean-up.php b/modules/clean-up.php index <HASH>..<HASH> 100644 --- a/modules/clean-up.php +++ b/modules/clean-up.php @@ -119,7 +119,7 @@ add_filter('body_class', 'soil_body_class'); function soil_embed_wrap($cache) { return '<div class="entry-content-asset">' . $cache . '</div>'; } -add_filter('embed_oembed_html', 'soil_embed_wrap', 10, 4); +add_filter('embed_oembed_html', 'soil_embed_wrap'); /** * Remove unnecessary dashboard widgets
Fixing add_filter call to have right parameters
diff --git a/salesforce/tests/test_integration.py b/salesforce/tests/test_integration.py index <HASH>..<HASH> 100644 --- a/salesforce/tests/test_integration.py +++ b/salesforce/tests/test_integration.py @@ -95,7 +95,7 @@ class BasicSOQLTest(TestCase): def test_exclude_query_construction(self): """ - Test that excludde query construction returns valid SOQL + Test that exclude query construction returns valid SOQL. """ contacts = Contact.objects.filter(FirstName__isnull=False).exclude(Email="steve@apple.com", LastName="Wozniak").exclude(LastName="smith") number_of_contacts = contacts.count()
Fix typ in docstring.
diff --git a/nap/__init__.py b/nap/__init__.py index <HASH>..<HASH> 100644 --- a/nap/__init__.py +++ b/nap/__init__.py @@ -1,6 +1,6 @@ __author__ = 'Kimmo Brunfeldt' __email__ = 'kimmobrunfeldt@gmail.com' __url__ = 'https://github.com/kimmobrunfeldt/nap' -__version__ = '1.0.0' +__version__ = '1.0.0-dev' from . import url diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ readme = """Read docs from GitHub_ setup( name='nap', - version='1.0.0', + version='1.0.0-dev', description='Convenient way to request HTTP APIs', long_description=readme, author='Kimmo Brunfeldt',
Set development version: <I>-dev
diff --git a/lib/modules.js b/lib/modules.js index <HASH>..<HASH> 100644 --- a/lib/modules.js +++ b/lib/modules.js @@ -216,9 +216,13 @@ var //dependencies "client/html/templates/simple-thumbnails.html" ], fuImages: [ + "client/continue.gif", + "client/edit.gif", "client/loading.gif", + "client/pause.gif", "client/processing.gif", - "client/edit.gif" + "client/retry.gif", + "client/trash.gif" ], fuPlaceholders: [ "client/placeholders/not_available-generic.png",
chore(build): include new images in packaged builds #<I> #<I>
diff --git a/lib/Doctrine/ORM/Query/SqlWalker.php b/lib/Doctrine/ORM/Query/SqlWalker.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/ORM/Query/SqlWalker.php +++ b/lib/Doctrine/ORM/Query/SqlWalker.php @@ -1353,7 +1353,8 @@ class SqlWalker implements TreeWalker break; case ($expr instanceof AST\NewObjectExpression): - $sql .= $this->walkNewObject($expr); + $resultAlias = $selectExpression->fieldIdentificationVariable ?: null; + $sql .= $this->walkNewObject($expr,$resultAlias); break; default: @@ -1519,10 +1520,10 @@ class SqlWalker implements TreeWalker * * @return string The SQL. */ - public function walkNewObject($newObjectExpression) + public function walkNewObject($newObjectExpression, $newObjectResultAlias=null) { $sqlSelectExpressions = array(); - $objIndex = $this->newObjectCounter++; + $objIndex = $newObjectResultAlias?:$this->newObjectCounter++; foreach ($newObjectExpression->args as $argIndex => $e) { $resultAlias = $this->scalarResultCounter++;
Adding the ability to alias new object expressions
diff --git a/lib/frameworks/jasmine.js b/lib/frameworks/jasmine.js index <HASH>..<HASH> 100644 --- a/lib/frameworks/jasmine.js +++ b/lib/frameworks/jasmine.js @@ -85,7 +85,7 @@ exports.run = function(runner, specs) { spec.getFullName().match(new RegExp(jasmineNodeOpts.grep)) != null; var invertGrep = !!(jasmineNodeOpts && jasmineNodeOpts.invertGrep); if (grepMatch == invertGrep) { - spec.pend(); + spec.disable(); } return true; };
fix(grep): change excluded tests to disabled instead of pending (#<I>)
diff --git a/classes/PodsUI.php b/classes/PodsUI.php index <HASH>..<HASH> 100644 --- a/classes/PodsUI.php +++ b/classes/PodsUI.php @@ -389,7 +389,7 @@ class PodsUI { $options = array(); if ( isset( $object->ui ) ) { - $options = $object->ui; + $options = (array) $object->ui; unset( $object->ui ); } @@ -454,6 +454,7 @@ class PodsUI { */ public function setup_deprecated ( $deprecated_options ) { $options = array(); + if ( isset( $deprecated_options[ 'id' ] ) ) $options[ 'id' ] = $deprecated_options[ 'id' ]; if ( isset( $deprecated_options[ 'action' ] ) ) @@ -705,6 +706,13 @@ class PodsUI { if ( isset( $deprecated_options[ 'wpcss' ] ) ) $options[ 'wpcss' ] = $deprecated_options[ 'wpcss' ]; + $remaining_options = array_diff_assoc( $options, $deprecated_options ); + + foreach ( $remaining_options as $option => $value ) { + if ( isset( $this->$option ) ) + $options[ $option ] = $value; + } + return $options; }
Loop through extra options in deprecated options handling, fix for when you use new options in a deprecated pods_ui call
diff --git a/lib/dbus/proxy_object.rb b/lib/dbus/proxy_object.rb index <HASH>..<HASH> 100644 --- a/lib/dbus/proxy_object.rb +++ b/lib/dbus/proxy_object.rb @@ -144,10 +144,5 @@ module DBus raise NoMethodError, "undefined method `#{name}' for DBus interface `#{@default_iface}' on object `#{@path}'" end end - - # Returns the singleton class of the object. - def singleton_class - (class << self ; self ; end) - end end # class ProxyObject end diff --git a/lib/dbus/proxy_object_interface.rb b/lib/dbus/proxy_object_interface.rb index <HASH>..<HASH> 100644 --- a/lib/dbus/proxy_object_interface.rb +++ b/lib/dbus/proxy_object_interface.rb @@ -34,11 +34,6 @@ module DBus @name end - # Returns the singleton class of the interface. - def singleton_class - (class << self ; self ; end) - end - # Defines a method on the interface from the Method descriptor _m_. def define_method_from_descriptor(m) m.params.each do |fpar|
Removed superfulous definitions of Object#singleton_class MRI implemented it in <I> already.
diff --git a/src/Student.php b/src/Student.php index <HASH>..<HASH> 100644 --- a/src/Student.php +++ b/src/Student.php @@ -77,20 +77,23 @@ class Student extends Person */ protected static function fromSimpleIdentifier($identifier) { - $person = parent::fromSimpleIdentifier($identifier); - $uwregid = $person->getAttr("UWRegID"); + if ($person !== null) { + $uwregid = $person->getAttr("UWRegID"); - $resp = static::getStudentConnection()->execGET( - "person/$uwregid.json" - ); + $resp = static::getStudentConnection()->execGET( + "person/$uwregid.json" + ); - $resp = static::parse($resp); + $resp = static::parse($resp); - $person->attrs = array_merge($person->attrs, $resp); + $person->attrs = array_merge($person->attrs, $resp); - return $person; + return $person; + } else { + return null; + } } /**
Handle null person when creating student.
diff --git a/test/db/postgresql/jsonb_test.rb b/test/db/postgresql/jsonb_test.rb index <HASH>..<HASH> 100644 --- a/test/db/postgresql/jsonb_test.rb +++ b/test/db/postgresql/jsonb_test.rb @@ -49,14 +49,14 @@ class PostgreSQLJSONBTest < Test::Unit::TestCase assert @column = JsonbDataType.columns.find { |c| c.name == 'payload' } data = "{\"a_key\":\"a_value\"}" - hash = @column.class.string_to_json data + hash = @column.class.string_to_json(data) assert_equal({'a_key' => 'a_value'}, hash) assert_equal({'a_key' => 'a_value'}, @column.type_cast(data)) assert_equal({}, @column.type_cast("{}")) assert_equal({'key'=>nil}, @column.type_cast('{"key": null}')) assert_equal({'c'=>'}','"a"'=>'b "a b'}, @column.type_cast(%q({"c":"}", "\"a\"":"b \"a b"}))) - end + end unless ar_version('4.2') def test_rewrite @connection.execute "insert into jsonb_data_type (payload) VALUES ('{\"k\":\"v\"}')"
jsonb (type-cast) test makes no longer sense on AR <I>
diff --git a/lib/rb/lib/thrift/transport.rb b/lib/rb/lib/thrift/transport.rb index <HASH>..<HASH> 100644 --- a/lib/rb/lib/thrift/transport.rb +++ b/lib/rb/lib/thrift/transport.rb @@ -231,10 +231,6 @@ module Thrift return @rpos < @wpos end - def get_buffer - return @buf - end - def reset_buffer(new_buf = '') @buf = new_buf @sz = new_buf.length
Rip out MemoryBuffer#get_buffer Nobody should be using that method and it's preventing MemoryBuffer from being optimized wrt. memory usage git-svn-id: <URL>
diff --git a/redis/client.py b/redis/client.py index <HASH>..<HASH> 100644 --- a/redis/client.py +++ b/redis/client.py @@ -1111,9 +1111,7 @@ class Redis(StrictRedis): if len(args) % 2 != 0: raise RedisError("ZADD requires an equal number of " "values and scores") - temp_args = args - temp_args.reverse() - pieces.extend(temp_args) + pieces.extend(reversed(args)) for pair in kwargs.iteritems(): pieces.append(pair[1]) pieces.append(pair[0])
Fix zadd: 'tuple' object has no attribute 'reverse' (and args is a tuple)
diff --git a/src/Saft/Rdf/StatementIteratorFactoryImpl.php b/src/Saft/Rdf/StatementIteratorFactoryImpl.php index <HASH>..<HASH> 100644 --- a/src/Saft/Rdf/StatementIteratorFactoryImpl.php +++ b/src/Saft/Rdf/StatementIteratorFactoryImpl.php @@ -13,15 +13,6 @@ class StatementIteratorFactoryImpl implements StatementIteratorFactory */ public function createIteratorFromArray(array $statements) { - if (is_array($statements)) { - return new ArrayStatementIteratorImpl($statements); - - } elseif ($statements instanceof \Iterator) { - $arrayIterator = new \ArrayIterator($statements); - return new ArrayStatementIteratorImpl($arrayIterator->getArrayCopy()); - - } else { - throw new \Exception('Parameter $statements is neither an array nor instace of \Iterator.'); - } + return new ArrayStatementIteratorImpl($statements); } }
Simplify createIteratorFromArray It directly returns an instance of an StatementIterator without further checks because $statements can only be an array.
diff --git a/react-native/react/engine/index.js b/react-native/react/engine/index.js index <HASH>..<HASH> 100644 --- a/react-native/react/engine/index.js +++ b/react-native/react/engine/index.js @@ -11,6 +11,8 @@ import NativeEventEmitter from '../common-adapters/native-event-emitter' import windowsHack from './windows-hack' import {log} from '../native/log/logui' +import {constants} from '../constants/types/keybase_v1' + class Engine { constructor () { windowsHack() @@ -220,7 +222,10 @@ class Engine { console.log(`Unknown incoming rpc: ${sessionID} ${method} ${param}${response ? ': Sending back error' : ''}`) } if (response && response.error) { - response.error('Unhandled rpc') + response.error({ + code: constants.StatusCode.scgeneric, + desc: 'Unhandled incoming RPC' + }) } } }
sending scgeneric error to daemon
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -197,7 +197,7 @@ exports.init = function (sbot, config) { get: function (opts, cb) { if(!cb) cb = opts, opts = {} - index.get(function (err, value) { + index.get({}, function (err, value) { if(err) return cb(err) //opts is used like this in ssb-ws if(opts && opts.source) { @@ -205,6 +205,13 @@ exports.init = function (sbot, config) { if(value && opts.dest) value = value[opts.dest] } + else if( opts && opts.dest) { + var _value = {} + for(var k in value) + if('undefined' !== typeof value[k][opts.dest]) + _value[k] = value[k][opts.dest] + return cb(null, _value) + } cb(null, value) }) }, @@ -226,3 +233,6 @@ exports.init = function (sbot, config) { } } + + +
support friends.get with dest only
diff --git a/src/semantic_tree/semantic_processor.js b/src/semantic_tree/semantic_processor.js index <HASH>..<HASH> 100644 --- a/src/semantic_tree/semantic_processor.js +++ b/src/semantic_tree/semantic_processor.js @@ -906,6 +906,9 @@ sre.SemanticProcessor.prototype.getPunctuationInRow_ = function(nodes) { // similar to an mrow. The only exception are ellipses, which we assume to be // in lieu of identifiers. // In addition we keep the single punctuation nodes as content. + if (nodes.length <= 1) { + return nodes; + } var partition = sre.SemanticProcessor.partitionNodes_( nodes, function(x) { return sre.SemanticPred.isPunctuation(x) &&
Adds a special case to avoid single punctuation elements being turned into trivial punctuated elements. Fixes issue #<I>.
diff --git a/tests/Localization/DvMvTest.php b/tests/Localization/DvMvTest.php index <HASH>..<HASH> 100644 --- a/tests/Localization/DvMvTest.php +++ b/tests/Localization/DvMvTest.php @@ -10,6 +10,8 @@ */ namespace Tests\Localization; +use Carbon\Translator; + class DvMvTest extends LocalizationTestCase { const LOCALE = 'dv_MV'; // Divehi @@ -218,4 +220,15 @@ class DvMvTest extends LocalizationTestCase // CarbonInterval::create('P1DT3H')->forHumans(true) '1 ދުވަސް 3 ގަޑި', ]; + + public function testPlural() + { + $translator = Translator::get('dv_MV'); + $translator->setTranslations([ + 'a' => 'a|b', + ]); + + $this->assertSame('a', $translator->transChoice('a', 1)); + $this->assertSame('b', $translator->transChoice('a', 2)); + } }
Test dv_MV plural rule
diff --git a/Lib/fontbakery/specifications/head.py b/Lib/fontbakery/specifications/head.py index <HASH>..<HASH> 100644 --- a/Lib/fontbakery/specifications/head.py +++ b/Lib/fontbakery/specifications/head.py @@ -81,7 +81,8 @@ def parse_version_string(name): ) def com_google_fonts_check_044(ttFont): """Checking font version fields (head and name table).""" - head_version = parse_version_string(str(ttFont["head"].fontRevision)) + from decimal import Decimal + head_version = parse_version_string(str(Decimal(ttFont["head"].fontRevision).quantize(Decimal('1.000')))) # Compare the head version against the name ID 5 strings in all name records. from fontbakery.constants import NAMEID_VERSION_STRING
fix #<I>: accept rounding fontRevision due to bad ... interpretations of float values causing false-FAILs (such as <I> being interpreted as <I>)
diff --git a/scheduler/util_test.go b/scheduler/util_test.go index <HASH>..<HASH> 100644 --- a/scheduler/util_test.go +++ b/scheduler/util_test.go @@ -580,6 +580,12 @@ func TestInplaceUpdate_Success(t *testing.T) { if len(ctx.plan.NodeAllocation) != 1 { t.Fatal("inplaceUpdate did not do an inplace update") } + + // Get the alloc we inserted. + a := ctx.plan.NodeAllocation[alloc.NodeID][0] + if len(a.Services) != 0 { + t.Fatalf("Expected number of services: %v, Actual: %v", 0, len(a.Services)) + } } func TestEvictAndPlace_LimitGreaterThanAllocs(t *testing.T) {
Added a test to prove services are removed from the map in Alloc if they are removed from the Tasks
diff --git a/src/MetaModels/Attribute/Base.php b/src/MetaModels/Attribute/Base.php index <HASH>..<HASH> 100644 --- a/src/MetaModels/Attribute/Base.php +++ b/src/MetaModels/Attribute/Base.php @@ -102,9 +102,9 @@ abstract class Base implements IAttribute public function getName() { if (is_array($this->arrData['name'])) { - return $this->getLangValue($this->get('name')); + return $this->getLangValue($this->get('name')) ?: $this->getColName(); } - return $this->arrData['name']; + return $this->arrData['name'] ?: $this->getColName(); } /**
Apply fallback to colName if attribute has no name See #<I>, #<I>
diff --git a/plugins/Actions/Actions/ActionSiteSearch.php b/plugins/Actions/Actions/ActionSiteSearch.php index <HASH>..<HASH> 100644 --- a/plugins/Actions/Actions/ActionSiteSearch.php +++ b/plugins/Actions/Actions/ActionSiteSearch.php @@ -64,6 +64,16 @@ class ActionSiteSearch extends Action return null; } + public function getIdActionUrlForEntryAndExitIds() + { + return $this->getIdActionUrl(); + } + + public function getIdActionNameForEntryAndExitIds() + { + return $this->getIdActionName(); + } + public function getCustomFloatValue() { return $this->request->getPageGenerationTime();
Site search requests are also pageviews
diff --git a/tests/test_incuna_mail.py b/tests/test_incuna_mail.py index <HASH>..<HASH> 100644 --- a/tests/test_incuna_mail.py +++ b/tests/test_incuna_mail.py @@ -8,7 +8,8 @@ import incuna_mail class TestIncunaMail(TestCase): def tearDown(self): - mail.outbox = [] + # Empty the outbox to avoid emails persisting between tests. + mail.outbox = None def send_and_assert_email(self, text_template_name=()): """Runs send() on proper input and checks the result."""
Use None instead of [] to empty the outbox. * Add an explanatory comment as well.
diff --git a/test/urlMatcherFactorySpec.js b/test/urlMatcherFactorySpec.js index <HASH>..<HASH> 100644 --- a/test/urlMatcherFactorySpec.js +++ b/test/urlMatcherFactorySpec.js @@ -463,7 +463,7 @@ describe("urlMatcherFactoryProvider", function () { }); }); -fdescribe("urlMatcherFactory", function () { +describe("urlMatcherFactory", function () { var $umf;
chore(urlMatcherFactory): remove f
diff --git a/src/Api.php b/src/Api.php index <HASH>..<HASH> 100644 --- a/src/Api.php +++ b/src/Api.php @@ -40,11 +40,11 @@ $this->loadWhitelist(); - if ($this->checkWhitelist() === false) + if (isset($this->request->headers->authorization) && !empty($this->request->headers->authorization())) { try { - $this->authenticate(); + $this->authenticate($this->checkWhitelist()); } catch (Exceptions\Authentication $exception) {
Efficiency improvements to whitelist/authentication system Always process auth header if it exists Pass whitelisted state to authenticate method
diff --git a/lib/logstasher/version.rb b/lib/logstasher/version.rb index <HASH>..<HASH> 100644 --- a/lib/logstasher/version.rb +++ b/lib/logstasher/version.rb @@ -1,3 +1,3 @@ module LogStasher - VERSION = "0.3.0" + VERSION = "0.3.1" end
Version update after merging pull request #<I>
diff --git a/src/main/org/openscience/cdk/aromaticity/DaylightModel.java b/src/main/org/openscience/cdk/aromaticity/DaylightModel.java index <HASH>..<HASH> 100644 --- a/src/main/org/openscience/cdk/aromaticity/DaylightModel.java +++ b/src/main/org/openscience/cdk/aromaticity/DaylightModel.java @@ -174,6 +174,8 @@ final class DaylightModel extends ElectronDonation { int v = valence(element, charge); if (v - bondOrderSum[i] >= 2) electrons[i] = 2; + else + electrons[i] = -1; } else {
Not enough electrons - then it cannot participate.
diff --git a/consumer_test.go b/consumer_test.go index <HASH>..<HASH> 100644 --- a/consumer_test.go +++ b/consumer_test.go @@ -211,7 +211,15 @@ func TestSequentialConsuming(t *testing.T) { closeWithin(t, 10*time.Second, consumer) } -func TestCompression(t *testing.T) { +func TestGzipCompression(t *testing.T) { + testCompression(t, sarama.CompressionGZIP) +} + +func TestSnappyCompression(t *testing.T) { + testCompression(t, sarama.CompressionSnappy) +} + +func testCompression(t *testing.T, codec sarama.CompressionCodec) { topic := fmt.Sprintf("test-compression-%d", time.Now().Unix()) messages := make([]string, 0) for i := 0; i < numMessages; i++ { @@ -220,7 +228,7 @@ func TestCompression(t *testing.T) { CreateMultiplePartitionsTopic(localZk, topic, 1) EnsureHasLeader(localZk, topic) - produce(t, messages, topic, localBroker, sarama.CompressionGZIP) + produce(t, messages, topic, localBroker, codec) config := testConsumerConfig() config.NumWorkers = 1
tests for gzip and snappy compression
diff --git a/dirutility/_version.py b/dirutility/_version.py index <HASH>..<HASH> 100644 --- a/dirutility/_version.py +++ b/dirutility/_version.py @@ -1 +1 @@ -__version__ = '0.3.4' +__version__ = '0.3.5' diff --git a/dirutility/gui.py b/dirutility/gui.py index <HASH>..<HASH> 100644 --- a/dirutility/gui.py +++ b/dirutility/gui.py @@ -129,8 +129,8 @@ class BackupZipGUI: [gui.Text('-' * 200)], # Source - [gui.Text('Select source folder', size=(15, 1), font=("Helvetica", 25), auto_size_text=False), - gui.InputText('Source', key='source', font=("Helvetica", 20)), + [gui.Text('Select source folder', size=(20, 1), font=("Helvetica", 25), auto_size_text=False), + gui.InputText('', key='source', font=("Helvetica", 20)), gui.FolderBrowse()], [gui.Submit(), gui.Cancel()]]
Updated to version <I>
diff --git a/vendor/refinerycms/images/lib/images.rb b/vendor/refinerycms/images/lib/images.rb index <HASH>..<HASH> 100644 --- a/vendor/refinerycms/images/lib/images.rb +++ b/vendor/refinerycms/images/lib/images.rb @@ -22,6 +22,12 @@ module Refinery app_images.analyser.register(Dragonfly::Analysis::RMagickAnalyser) app_images.analyser.register(Dragonfly::Analysis::FileCommandAnalyser) + # This little eval makes it so that dragonfly urls work in traditional + # situations where the filename and extension are required, e.g. lightbox. + # What this does is takes the url that is about to be produced e.g. + # /system/images/BAhbB1sHOgZmIiMyMDEwLzA5LzAxL1NTQ19DbGllbnRfQ29uZi5qcGdbCDoGcDoKdGh1bWIiDjk0MngzNjAjYw + # and adds the filename onto the end (say the image was 'refinery_is_awesome.jpg') + # /system/images/BAhbB1sHOgZmIiMyMDEwLzA5LzAxL1NTQ19DbGllbnRfQ29uZi5qcGdbCDoGcDoKdGh1bWIiDjk0MngzNjAjYw/refinery_is_awesome.jpg app_images.instance_eval %{ def url_for(job, *args) image_url = nil
Serious hacks require serious documentation.
diff --git a/getgauge/connection.py b/getgauge/connection.py index <HASH>..<HASH> 100755 --- a/getgauge/connection.py +++ b/getgauge/connection.py @@ -7,13 +7,13 @@ from google.protobuf.internal.encoder import _EncodeVarint def connect(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(('localhost', int(os.environ['GAUGE_INTERNAL_PORT']))) + s.connect(('127.0.0.1', int(os.environ['GAUGE_INTERNAL_PORT']))) return s def to_api(): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.connect(('localhost', int(os.environ['GAUGE_API_PORT']))) + s.connect(('127.0.0.1', int(os.environ['GAUGE_API_PORT']))) return s
Changing localhost to <I> #<I>
diff --git a/lib/event_source/stream_name.rb b/lib/event_source/stream_name.rb index <HASH>..<HASH> 100644 --- a/lib/event_source/stream_name.rb +++ b/lib/event_source/stream_name.rb @@ -2,9 +2,7 @@ module EventSource module StreamName extend self - def stream_name(category_name, id=nil) - id ||= Identifier::UUID.random - + def stream_name(category_name, id) "#{category_name}-#{id}" end
StreamName creation does not optionally randomize IDs
diff --git a/src/test/java/org/dstadler/commons/util/ExecutorUtilTest.java b/src/test/java/org/dstadler/commons/util/ExecutorUtilTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/dstadler/commons/util/ExecutorUtilTest.java +++ b/src/test/java/org/dstadler/commons/util/ExecutorUtilTest.java @@ -83,6 +83,10 @@ public class ExecutorUtilTest { ExecutorUtil.shutdownAndAwaitTermination(executor, 10); + // allow some time for thread to vanish from the list of threads, sometimes + // they can "linger" on for a short while... + ThreadTestHelper.waitForThreadToFinishSubstring("ExecutorTest"); + ThreadTestHelper.assertNoThreadLeft( "No thread expected after shutting down the executor, look at log for thread-dump", "ExecutorTest");
Wait a bit for thread to actually fully stop to not fail in CI sometimes
diff --git a/openquake/commands/restore.py b/openquake/commands/restore.py index <HASH>..<HASH> 100644 --- a/openquake/commands/restore.py +++ b/openquake/commands/restore.py @@ -33,7 +33,7 @@ def restore(archive, oqdata): Build a new oqdata directory from the data contained in the zip archive """ if not os.path.exists(archive): - raise OSError('%s does not exist' % archive) + sys.exit('%s does not exist' % archive) t0 = time.time() oqdata = os.path.abspath(oqdata) assert archive.endswith('.zip'), archive
Cleanup [skip CI]
diff --git a/src/Codeception/Module/WebDriver.php b/src/Codeception/Module/WebDriver.php index <HASH>..<HASH> 100644 --- a/src/Codeception/Module/WebDriver.php +++ b/src/Codeception/Module/WebDriver.php @@ -369,6 +369,7 @@ class WebDriver extends \Codeception\Module implements WebInterface { $matched = true; } catch (\NoSuchElementWebDriverError $e) {} } + if ($matched) return; foreach ($option as $opt) { try { $select->selectByValue($opt);
Add a return to separate different select situations
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -81,7 +81,7 @@ function gulpRepl(_gulp_){ exports.instances.push({ gulp: gulp, tasks: util.getTasks(gulp), - runner: gulp.parallel || gulp.start + runner: gulp.start }); return repl;
refactor: runner is only one now (gulp-runtime)
diff --git a/liquibase-core/src/main/java/liquibase/datatype/DataTypeFactory.java b/liquibase-core/src/main/java/liquibase/datatype/DataTypeFactory.java index <HASH>..<HASH> 100644 --- a/liquibase-core/src/main/java/liquibase/datatype/DataTypeFactory.java +++ b/liquibase-core/src/main/java/liquibase/datatype/DataTypeFactory.java @@ -96,6 +96,13 @@ public class DataTypeFactory { primaryKey = true; } + String[] splitTypeName = dataTypeName.split("\\s+", 2); + dataTypeName = splitTypeName[0]; + String additionalInfo = null; + if (splitTypeName.length > 1) { + additionalInfo = splitTypeName[1]; + } + SortedSet<Class<? extends LiquibaseDataType>> classes = registry.get(dataTypeName.toLowerCase()); LiquibaseDataType liquibaseDataType = null; @@ -113,6 +120,7 @@ public class DataTypeFactory { liquibaseDataType = new UnknownType(dataTypeName); } + liquibaseDataType.setAdditionalInformation(additionalInfo); if (dataTypeDefinition.matches(".+\\s*\\(.*")) { String paramStrings = dataTypeDefinition.replaceFirst(".*?\\(", "").replaceFirst("\\).*", "");
CORE-<I> AutoIncrement not working with some types
diff --git a/consumer.go b/consumer.go index <HASH>..<HASH> 100644 --- a/consumer.go +++ b/consumer.go @@ -167,7 +167,7 @@ func (c *Consumer) MarkOffsets(s *OffsetStash) { } } -// ResetOffsets marks the provided message as processed, alongside a metadata string +// ResetOffset marks the provided message as processed, alongside a metadata string // that represents the state of the partition consumer at that point in time. The // metadata string can be used by another consumer to restore that state, so it // can resume consumption.
Fix based on best practices from Effective Go (#<I>)
diff --git a/flake8_author.py b/flake8_author.py index <HASH>..<HASH> 100644 --- a/flake8_author.py +++ b/flake8_author.py @@ -30,19 +30,24 @@ class Checker(object): @classmethod def add_options(cls, parser): + extra_kwargs = {} + if hasattr(parser, 'config_options'): # flake8 < 3.0 + parser.config_options.append('author-attribute') + parser.config_options.append('author-pattern') + else: # flake8 >= 3.0 + extra_kwargs['parse_from_config'] = True + parser.add_option( '--author-attribute', default='optional', help="__author__ attribute: {0}".format( - ', '.join(cls.attribute_choices))) + ', '.join(cls.attribute_choices)), + **extra_kwargs) parser.add_option( '--author-pattern', default=r'.*', - help="__author__ attribute validation pattern (regex)") - - if hasattr(parser, 'config_options'): # flake8 < 3.0 - parser.config_options.append('author-attribute') - parser.config_options.append('author-pattern') + help="__author__ attribute validation pattern (regex)", + **extra_kwargs) @classmethod def parse_options(cls, options):
Pass parse_from_config=True for flake8 <I>+ Otherwise, the options won't be read from the configuration file. This case still needs test coverage.
diff --git a/librecaptcha/recaptcha.py b/librecaptcha/recaptcha.py index <HASH>..<HASH> 100644 --- a/librecaptcha/recaptcha.py +++ b/librecaptcha/recaptcha.py @@ -35,7 +35,7 @@ import time BASE_URL = "https://www.google.com/recaptcha/api2/" API_JS_URL = "https://www.google.com/recaptcha/api.js" -JS_URL_TEMPLATE = "https://www.gstatic.com/recaptcha/api2/{}/recaptcha__en.js" +JS_URL_TEMPLATE = "https://www.gstatic.com/recaptcha/releases/{}/recaptcha__en.js" STRINGS_VERSION = "0.1.0" STRINGS_PATH = os.path.join( @@ -127,7 +127,7 @@ def get_js_strings(user_agent, rc_version): def get_rc_version(user_agent): - match = re.search(r"/recaptcha/api2/(.+?)/", requests.get( + match = re.search(r"/recaptcha/releases/(.+?)/", requests.get( API_JS_URL, headers={ "User-Agent": user_agent, },
Script url no longer works reCAPTCHA changed its urls for `recaptcha__en.js`, now it is not under `/api2/`, but `/releases/`. This PR fixes the issue.
diff --git a/pressbooks.php b/pressbooks.php index <HASH>..<HASH> 100644 --- a/pressbooks.php +++ b/pressbooks.php @@ -24,6 +24,7 @@ function _pb_session_start() { if ( ! session_id() ) { if ( ! headers_sent() ) { ini_set( 'session.use_only_cookies', true ); + ini_set( 'session.cookie_domain', COOKIE_DOMAIN ); /** * Adjust session configuration as needed. * @@ -54,7 +55,7 @@ function _pb_session_kill() { } // @codingStandardsIgnoreEnd -add_action( 'init', '_pb_session_start', 1 ); +add_action( 'plugins_loaded', '_pb_session_start', 1 ); add_action( 'wp_logout', '_pb_session_kill' ); add_action( 'wp_login', '_pb_session_kill' );
Regain control of $_SESSION (#<I>)
diff --git a/dvc/repo/reproduce.py b/dvc/repo/reproduce.py index <HASH>..<HASH> 100644 --- a/dvc/repo/reproduce.py +++ b/dvc/repo/reproduce.py @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) def _reproduce_stage(stage, **kwargs): - if stage.locked: + if stage.locked and not stage.is_import: logger.warning( "{} is locked. Its dependencies are" " not going to be reproduced.".format(stage)
repro: do not log when stage is locked and is repo import (#<I>)
diff --git a/parallel.js b/parallel.js index <HASH>..<HASH> 100644 --- a/parallel.js +++ b/parallel.js @@ -32,6 +32,7 @@ function fastparallel (options) { function parallel (that, toCall, arg, done) { var i var holder = next() + done = done || nop if (toCall.length === 0) { done.call(that) released(head) diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -240,3 +240,22 @@ test('call the result callback when the each array is empty with no results', fu t.error('this should never be called') } }) + +test('does not require a done callback', function (t) { + t.plan(4) + + var instance = parallel() + var count = 0 + var obj = {} + + instance(obj, [something, something], 42) + + function something (arg, cb) { + t.equal(obj, this) + t.equal(arg, 42) + setImmediate(function () { + count++ + cb() + }) + } +})
Do not require a done callback.
diff --git a/alot/widgets.py b/alot/widgets.py index <HASH>..<HASH> 100644 --- a/alot/widgets.py +++ b/alot/widgets.py @@ -476,10 +476,12 @@ class MessageWidget(urwid.WidgetWrap): mail = self.message.get_email() # normalize values if only filtered list is shown norm = not (self._displayed_headers == self._all_headers) + lowercase_keys = [k.lower() for k in self._displayed_headers] + #build lines lines = [] for k, v in mail.items(): - if k in self._displayed_headers: + if k.lower() in lowercase_keys: lines.append((k, message.decode_header(v, normalize=norm))) cols = [HeadersList(lines)]
case insensitive header key filter closes #<I>
diff --git a/lib/spidr/page.rb b/lib/spidr/page.rb index <HASH>..<HASH> 100644 --- a/lib/spidr/page.rb +++ b/lib/spidr/page.rb @@ -280,7 +280,7 @@ module Spidr def cookie_params params = {} - cookie.split(/;\s+/).each do |key_value| + (@headers['set-cookie'] || []).each do |key_value| key, value = key_value.split('=',2) next if RESERVED_COOKIE_NAMES.include?(key) diff --git a/spec/page_spec.rb b/spec/page_spec.rb index <HASH>..<HASH> 100644 --- a/spec/page_spec.rb +++ b/spec/page_spec.rb @@ -81,7 +81,7 @@ describe Page do describe "cookies" do before(:all) do - @page = get_page('http://github.com/postmodern/spidr/') + @page = get_page('http://twitter.com/login') end it "should provide access to the raw Cookie" do @@ -97,7 +97,7 @@ describe Page do params.each do |key,value| key.should_not be_empty - value.should_not be_nil + value.should_not be_empty end end end
fix potential parse error with multiple header cookies
diff --git a/lib/node-mplayer.js b/lib/node-mplayer.js index <HASH>..<HASH> 100644 --- a/lib/node-mplayer.js +++ b/lib/node-mplayer.js @@ -112,10 +112,17 @@ Mplayer.prototype.getTimeLength = function(callback) { Mplayer.prototype.getTimePosition = function(callback) { if(this.childProc !== null){ + var that = this; this.rl.question("get_time_pos\n", function(answer) { - callback(answer.split('=')[1]); + if (answer.split('=')[0]=='ANS_TIME_POSITION'){ + callback(answer.split('=')[1]); + } + else{ + // Try again :( + that.getTimePosition(callback); + } }); } }; -module.exports = Mplayer; \ No newline at end of file +module.exports = Mplayer;
Fix for getTimePosition on ubuntu <I> As the answer from the subprocess sometimes is blank when I poll for the time position, I suggest adding some lines to test the answer
diff --git a/tests/pytests/unit/states/test_cmd.py b/tests/pytests/unit/states/test_cmd.py index <HASH>..<HASH> 100644 --- a/tests/pytests/unit/states/test_cmd.py +++ b/tests/pytests/unit/states/test_cmd.py @@ -155,7 +155,7 @@ def test_script_runas_no_password(): "changes": {}, "result": False, "comment": "", - "commnd": "Must supply a password if runas argument is used on Windows.", + "command": "Must supply a password if runas argument is used on Windows.", } patch_opts = patch.dict(cmd.__opts__, {"test": False}) @@ -198,8 +198,6 @@ def test_call(): specified in the state declaration. """ name = "cmd.script" - # func = 'myfunc' - ret = {"name": name, "result": False, "changes": {}, "comment": ""} flag = None
Fix typo and remove superfluous comment
diff --git a/bids/variables/entities.py b/bids/variables/entities.py index <HASH>..<HASH> 100644 --- a/bids/variables/entities.py +++ b/bids/variables/entities.py @@ -46,9 +46,13 @@ class RunNode(Node): super(RunNode, self).__init__('run', entities) def get_info(self): - - return RunInfo(self.entities, self.duration, self.repetition_time, - self.image_file) + # Note: do not remove the dict() call! self.entities is a SQLAlchemy + # association_proxy mapping, and without the conversion, the connection + # to the DB persists, causing problems on Python 3.5 if we try to clone + # a RunInfo or any containing object. + entities = dict(self.entities) + return RunInfo(entities, self.duration, + self.repetition_time, self.image_file) # Stores key information for each Run.
fixes BIDSVariable deepcopy error by ensuring we have no connection to the DB
diff --git a/mutagen/flac.py b/mutagen/flac.py index <HASH>..<HASH> 100644 --- a/mutagen/flac.py +++ b/mutagen/flac.py @@ -45,10 +45,10 @@ class FLACVorbisError(ValueError, error): pass -def to_int_be(string): +def to_int_be(data): """Convert an arbitrarily-long string to a long using big-endian byte order.""" - return reduce(lambda a, b: (a << 8) + b, bytearray(string), 0) + return reduce(lambda a, b: (a << 8) + b, bytearray(data), 0) class StrictFileObject(object): @@ -619,8 +619,8 @@ class FLAC(mutagen.FileType): """Known metadata block types, indexed by ID.""" @staticmethod - def score(filename, fileobj, header): - return (header.startswith(b"fLaC") + + def score(filename, fileobj, header_data): + return (header_data.startswith(b"fLaC") + endswith(filename.lower(), ".flac") * 3) def __read_metadata_block(self, fileobj):
flac.py: renamed variable names for clarity.
diff --git a/table/tables.py b/table/tables.py index <HASH>..<HASH> 100644 --- a/table/tables.py +++ b/table/tables.py @@ -14,13 +14,13 @@ from addon import (TableSearchBox, TableInfoLabel, TablePagination, class BaseTable(object): def __init__(self, data=None): - if isinstance(data, QuerySet) or isinstance(data, list): + model = getattr(self.opts, 'model', None) + if model: + self.data = model.objects.all() + elif hasattr(data, "__iter__"): self.data = data else: - model = getattr(self.opts, 'model', None) - if not model: - raise ValueError("Model class or QuerySet-like object is required.") - self.data = model.objects.all() + raise ValueError("Model class or QuerySet-like object is required.") # Make a copy so that modifying this will not touch the class definition. self.columns = copy.deepcopy(self.base_columns)
Fix bug about checking type of datasource
diff --git a/programs/thumbnails.py b/programs/thumbnails.py index <HASH>..<HASH> 100755 --- a/programs/thumbnails.py +++ b/programs/thumbnails.py @@ -105,6 +105,16 @@ def make_thumbnails(directory=".", fmt="png"): top = 0 + height * .155 right = width * .902 bottom = height * .86 + elif "ty:_day" in lower_infile: + left = 0 + width * .124 + top = 0 + height * .12 + right = width * .902 + bottom = height * .855 + elif "ty:_s-bc" in lower_infile or "ty:_s-bcr" in lower_infile: + left = 0 + width * .124 + top = 0 + height * .12 + right = width * .902 + bottom = height * .892 else: error_log("Could not crop {}".format(infile)) im.save(infile[:-4] + ".thumb.{}".format(fmt), fmt, dpi=(300, 300))
add dayplot sizes into thumbnail cropping
diff --git a/activejdbc/src/main/java/activejdbc/Model.java b/activejdbc/src/main/java/activejdbc/Model.java index <HASH>..<HASH> 100644 --- a/activejdbc/src/main/java/activejdbc/Model.java +++ b/activejdbc/src/main/java/activejdbc/Model.java @@ -587,7 +587,7 @@ public abstract class Model extends CallbackSupport implements Externalizable { StringWriter sw = new StringWriter(); - sw.write(indent + "{" + (pretty?"\n " + indent: "") + "\"type\":\"" + getClass().getName() + "\","); + sw.write(indent + "{" + (pretty?"\n " + indent: "") + "\"model_class\":\"" + getClass().getName() + "\","); List<String> attributeStrings = new ArrayList<String>();
changed "type" to "model_class" in JSON, references: <URL>
diff --git a/djstripe/models/webhooks.py b/djstripe/models/webhooks.py index <HASH>..<HASH> 100644 --- a/djstripe/models/webhooks.py +++ b/djstripe/models/webhooks.py @@ -219,7 +219,7 @@ class WebhookEventTrigger(models.Model): if obj.valid: if djstripe_settings.WEBHOOK_EVENT_CALLBACK: # If WEBHOOK_EVENT_CALLBACK, pass it for processing - djstripe_settings.WEBHOOK_EVENT_CALLBACK(obj) + djstripe_settings.WEBHOOK_EVENT_CALLBACK(obj, api_key=api_key) else: # Process the item (do not save it, it'll get saved below) obj.process(save=False, api_key=api_key)
Passed api_key in WEBHOOK_EVENT_CALLBACK
diff --git a/lib/Gitlab/HttpClient/Builder.php b/lib/Gitlab/HttpClient/Builder.php index <HASH>..<HASH> 100644 --- a/lib/Gitlab/HttpClient/Builder.php +++ b/lib/Gitlab/HttpClient/Builder.php @@ -5,6 +5,7 @@ namespace Gitlab\HttpClient; use Http\Client\Common\HttpMethodsClient; use Http\Client\Common\Plugin; use Http\Client\Common\PluginClient; +use Http\Client\Common\PluginClientFactory; use Http\Client\HttpClient; use Http\Discovery\HttpClientDiscovery; use Http\Discovery\MessageFactoryDiscovery; @@ -81,7 +82,7 @@ class Builder $this->httpClientModified = false; $this->pluginClient = new HttpMethodsClient( - new PluginClient($this->httpClient, $this->plugins), + (new PluginClientFactory())->createClient($this->httpClient, $this->plugins), $this->requestFactory ); }
Use the plugin client factory in the http client builder