diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/codec-http2/src/test/java/io/netty/handler/codec/http2/LastInboundHandler.java b/codec-http2/src/test/java/io/netty/handler/codec/http2/LastInboundHandler.java index <HASH>..<HASH> 100644 --- a/codec-http2/src/test/java/io/netty/handler/codec/http2/LastInboundHandler.java +++ b/codec-http2/src/test/java/io/netty/handler/codec/http2/LastInboundHandler.java @@ -99,7 +99,7 @@ public class LastInboundHandler extends ChannelDuplexHandler { @Override public void channelWritabilityChanged(ChannelHandlerContext ctx) throws Exception { - if (writabilityStates == "") { + if ("".equals(writabilityStates)) { writabilityStates = String.valueOf(ctx.channel().isWritable()); } else { writabilityStates += "," + ctx.channel().isWritable();
Change String comparison to equals() from == (#<I>) Motivation: Even if it was stored in the string constant pool, I thought it was safe to compare it through the Equals() method. Modification: So, I changed "==" comparison to equals() comparison Result: It has become safer to compare String values with different references and with the same values.
diff --git a/src/Leevel/Protocol/Server.php b/src/Leevel/Protocol/Server.php index <HASH>..<HASH> 100755 --- a/src/Leevel/Protocol/Server.php +++ b/src/Leevel/Protocol/Server.php @@ -580,8 +580,8 @@ abstract class Server throw new InvalidArgumentException($e); } - if (version_compare(phpversion('swoole'), '4.4.0', '<')) { - $e = 'Swoole 4.4.0 OR Higher'; + if (version_compare(phpversion('swoole'), '4.4.2', '<')) { + $e = 'Swoole 4.4.2 OR Higher'; throw new InvalidArgumentException($e); }
chore: update swoole version
diff --git a/lib/appsignal/railtie.rb b/lib/appsignal/railtie.rb index <HASH>..<HASH> 100644 --- a/lib/appsignal/railtie.rb +++ b/lib/appsignal/railtie.rb @@ -1,8 +1,5 @@ module Appsignal class Railtie < Rails::Railtie - rake_tasks do - load "tasks/auth_check.rake" - end initializer "appsignal.configure_rails_initialization" do |app| # Some apps when run from the console do not have Rails.root set, there's
Remove rake task in favor of CLI
diff --git a/core/src/test/java/com/google/errorprone/bugpatterns/testdata/BadComparablePositiveCases.java b/core/src/test/java/com/google/errorprone/bugpatterns/testdata/BadComparablePositiveCases.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/com/google/errorprone/bugpatterns/testdata/BadComparablePositiveCases.java +++ b/core/src/test/java/com/google/errorprone/bugpatterns/testdata/BadComparablePositiveCases.java @@ -16,6 +16,7 @@ package com.google.errorprone.bugpatterns.testdata; +import java.io.File; import java.util.Comparator; /** @author irogers@google.com (Ian Rogers) */ @@ -53,4 +54,12 @@ public class BadComparablePositiveCases { return (int) (n1 - n2); } }; + + static final Comparator<File> COMPARATOR_FILE_INT_CAST = + new Comparator<File>() { + public int compare(File lhs, File rhs) { + // BUG: Diagnostic contains: return Long.compare(rhs.lastModified(), lhs.lastModified()) + return (int) (rhs.lastModified() - lhs.lastModified()); + } + }; }
BadComparable test case with File. This was a motivating case for BadComparable and so it makes a nice test. RELNOTES: extra BadComparable test. ------------- Created by MOE: <URL>
diff --git a/js/angular/components/notification/notification.js b/js/angular/components/notification/notification.js index <HASH>..<HASH> 100644 --- a/js/angular/components/notification/notification.js +++ b/js/angular/components/notification/notification.js @@ -230,7 +230,9 @@ // close if autoclose if (scope.autoclose) { setTimeout(function() { - scope.hide(); + if (scope.active) { + scope.hide(); + } }, parseInt(scope.autoclose)); }; } else if (msg == 'close' || msg == 'hide') { @@ -240,7 +242,9 @@ // close if autoclose if (scope.autoclose) { setTimeout(function() { - scope.toggle(); + if (scope.active) { + scope.toggle(); + } }, parseInt(scope.autoclose)); }; }
Change to autoclose notification to prevent notification from reappearing after swipe.
diff --git a/src/main/java/io/github/lukehutch/fastclasspathscanner/json/JSONMapper.java b/src/main/java/io/github/lukehutch/fastclasspathscanner/json/JSONMapper.java index <HASH>..<HASH> 100644 --- a/src/main/java/io/github/lukehutch/fastclasspathscanner/json/JSONMapper.java +++ b/src/main/java/io/github/lukehutch/fastclasspathscanner/json/JSONMapper.java @@ -69,7 +69,7 @@ public class JSONMapper { * fields. */ public static String toJSON(final Object obj, final int indentWidth) { - return JSONSerializer.toJSON(obj, 0); + return JSONSerializer.toJSON(obj, indentWidth); } // -------------------------------------------------------------------------------------------------------------
Pass in indentWidth
diff --git a/generators/generator-constants.js b/generators/generator-constants.js index <HASH>..<HASH> 100644 --- a/generators/generator-constants.js +++ b/generators/generator-constants.js @@ -54,7 +54,7 @@ const DOCKER_JAVA_JRE = 'eclipse-temurin:11-jre-focal'; const DOCKER_MYSQL = 'mysql:8.0.27'; const DOCKER_MARIADB = 'mariadb:10.7.1'; const DOCKER_POSTGRESQL = 'postgres:14.1'; -const DOCKER_MONGODB = 'mongo:4.4.10'; +const DOCKER_MONGODB = 'mongo:4.4.11'; const DOCKER_COUCHBASE = 'couchbase/server:7.0.0'; const DOCKER_CASSANDRA = 'cassandra:3.11.11'; const DOCKER_MSSQL = 'mcr.microsoft.com/mssql/server:2019-CU13-ubuntu-20.04';
Update mongo docker image version to <I>
diff --git a/sunspot/lib/sunspot/query/restriction.rb b/sunspot/lib/sunspot/query/restriction.rb index <HASH>..<HASH> 100644 --- a/sunspot/lib/sunspot/query/restriction.rb +++ b/sunspot/lib/sunspot/query/restriction.rb @@ -353,6 +353,10 @@ module Sunspot @field.to_indexed(value) end + def to_negative_boolean_phrase + %Q(-"#{to_positive_boolean_phrase}") + end + def to_positive_boolean_phrase "{!field f=#{@field.indexed_name} op=#{operation}}#{solr_value}" end
negative boolean phrase for AbstractRange
diff --git a/lib/app.js b/lib/app.js index <HASH>..<HASH> 100644 --- a/lib/app.js +++ b/lib/app.js @@ -5,7 +5,7 @@ var Performer = require('./performer') function App(opts) { var options = opts || {}; - var ydmHome = process.env.YDM_HOME || path.join(process.env.HOME, '.ydm') + var ydmHome = process.env.YDM_HOME || (process.env.HOME ? path.join(process.env.HOME, '.ydm') : null) this.scopesPath = options.scopesPath || path.join(ydmHome, 'scopes') this.dropsPath = options.dropsPath if (this.dropsPath) mkdirp.sync(this.dropsPath)
don't break when no HOME set
diff --git a/Package/Loader/LazyAssetPackageLoader.php b/Package/Loader/LazyAssetPackageLoader.php index <HASH>..<HASH> 100644 --- a/Package/Loader/LazyAssetPackageLoader.php +++ b/Package/Loader/LazyAssetPackageLoader.php @@ -174,6 +174,10 @@ class LazyAssetPackageLoader implements LazyLoaderInterface if (false === $data) { $this->driver->cleanup(); + if (!$this->verbose) { + $this->io->overwrite('', false); + } + return false; }
Fix output without verbose option when package file is not found
diff --git a/ara/cli/playbook.py b/ara/cli/playbook.py index <HASH>..<HASH> 100644 --- a/ara/cli/playbook.py +++ b/ara/cli/playbook.py @@ -92,8 +92,29 @@ class PlaybookList(Lister): query["limit"] = args.limit playbooks = client.get("/api/v1/playbooks", **query) - columns = ("id", "status", "path", "started", "duration") + # Send items to columns + for playbook in playbooks["results"]: + playbook["plays"] = playbook["items"]["plays"] + playbook["tasks"] = playbook["items"]["tasks"] + playbook["results"] = playbook["items"]["results"] + playbook["hosts"] = playbook["items"]["hosts"] + playbook["files"] = playbook["items"]["files"] + playbook["records"] = playbook["items"]["records"] + # fmt: off + columns = ( + "id", + "status", + "path", + "plays", + "tasks", + "results", + "hosts", + "files", + "records", + "started", + "duration" + ) return ( columns, ( [playbook[column] for column in columns]
CLI: Add number of items in 'playbook list' This adds columns to display the number of plays, tasks, results, hosts, files and records in each playbook. Change-Id: If<I>e<I>e<I>b<I>d3c<I>e0ec<I>fee<I>f9c<I>
diff --git a/lib/query_report/report_pdf.rb b/lib/query_report/report_pdf.rb index <HASH>..<HASH> 100755 --- a/lib/query_report/report_pdf.rb +++ b/lib/query_report/report_pdf.rb @@ -50,7 +50,7 @@ module QueryReport end def humanized_table_header - report_columns.collect(&:humanize) + report_columns.collect { |h| fix_content h.humanize } end def table_content_for(report) @@ -59,7 +59,7 @@ module QueryReport item_values = [] report_columns.collect(&:humanize).each do |column| - item_values << item[column].to_s + item_values << fix_content(item[column].to_s) end item_values end @@ -89,6 +89,10 @@ module QueryReport end end + def fix_content(content) + content + end + private def report_columns report.columns.select { |c| !c.only_on_web? } diff --git a/spec/report/paginate_spec.rb b/spec/report/paginate_spec.rb index <HASH>..<HASH> 100644 --- a/spec/report/paginate_spec.rb +++ b/spec/report/paginate_spec.rb @@ -5,6 +5,10 @@ require 'query_report/paginate' describe QueryReport::PaginateModule do class DummyClass include QueryReport::PaginateModule + + def paginate? + true + end end let(:object) { DummyClass.new }
Added feature to sanitize the pdf content
diff --git a/nipap/nipap/nipapconfig.py b/nipap/nipap/nipapconfig.py index <HASH>..<HASH> 100644 --- a/nipap/nipap/nipapconfig.py +++ b/nipap/nipap/nipapconfig.py @@ -25,7 +25,7 @@ class NipapConfig(ConfigParser.SafeConfigParser): raise NipapConfigError("missing configuration file") self._cfg_path = cfg_path - ConfigParser.ConfigParser.__init__(self, default, allow_no_value = True) + ConfigParser.ConfigParser.__init__(self, default) self.read_file()
Python <I> compliance Removed allow_no_value from config object as it's newer than Python <I>.
diff --git a/patroni/postgresql.py b/patroni/postgresql.py index <HASH>..<HASH> 100644 --- a/patroni/postgresql.py +++ b/patroni/postgresql.py @@ -1216,7 +1216,8 @@ class Postgresql(object): if data: data = data.decode('utf-8').splitlines() # pg_controldata output depends on major verion. Some of parameters are prefixed by 'Current ' - result = {l.split(':')[0].replace('Current ', '', 1): l.split(':', 1)[1].strip() for l in data if l} + result = {l.split(':')[0].replace('Current ', '', 1): l.split(':', 1)[1].strip() for l in data + if l and ':' in l} except subprocess.CalledProcessError: logger.exception("Error when calling pg_controldata") return result
check if output lines of controldata are possible to split (#<I>) Otherwise it fails with scary stacktrace
diff --git a/graphsrv/static/js/graphsrv.mtr.js b/graphsrv/static/js/graphsrv.mtr.js index <HASH>..<HASH> 100644 --- a/graphsrv/static/js/graphsrv.mtr.js +++ b/graphsrv/static/js/graphsrv.mtr.js @@ -44,6 +44,9 @@ $gs.components.register( }, + "init_popover": function() { + }, + "default_options" : function() { options = this.Graph_default_options(); options.max_targets = 1; @@ -133,7 +136,6 @@ $gs.components.register( for(i = 0; i < hops.length; i++) { colors[hops[i]] = this.colors[i] } - console.log(colors, hops) var bars = this.d3.data.selectAll("g") .data(hops)
disable popover in mtr graph for now
diff --git a/app/assets/javascripts/lentil/buttonhandler.js b/app/assets/javascripts/lentil/buttonhandler.js index <HASH>..<HASH> 100644 --- a/app/assets/javascripts/lentil/buttonhandler.js +++ b/app/assets/javascripts/lentil/buttonhandler.js @@ -2,7 +2,7 @@ function buttonhandler() { $(".like-btn, .flag-confirm").click(function(e) { button = $(this); - imageId = $(".fancybox-wrap").attr("id"); + imageId = $(".fancybox-wrap, .image-show").attr("id"); if (!$(button).is(".already-clicked")) { url = $(button).attr("href"); $.post(url, function() {
check for image id on standalone image page
diff --git a/salt/utils/sanitisers.py b/salt/utils/sanitisers.py index <HASH>..<HASH> 100644 --- a/salt/utils/sanitisers.py +++ b/salt/utils/sanitisers.py @@ -55,5 +55,7 @@ class InputSanitizer(object): ''' return re.sub(r'[^a-zA-Z0-9.-]', '', InputSanitizer.trim(value)) + id = hostname + clean = InputSanitizer()
Add a stub for ID sanitiser (at the moment same as hostname)
diff --git a/lib/gearman/multiserver-worker.js b/lib/gearman/multiserver-worker.js index <HASH>..<HASH> 100644 --- a/lib/gearman/multiserver-worker.js +++ b/lib/gearman/multiserver-worker.js @@ -21,11 +21,10 @@ var log = require('../log'); * @return {Object} */ -function MultiserverWorker(servers, func_name, callback, Worker) { - Worker = Worker || gearman.Worker; +function MultiserverWorker(servers, func_name, callback) { var that = this; Multiserver.call(this, servers, function(server, index) { - return new Worker(func_name, function(payload, worker) { + return new gearman.Worker(func_name, function(payload, worker) { that._debug('received job from', that._serverString(index)); return callback(payload, worker); }, server);
Remove dependecy injection from MultiserverClient
diff --git a/dpark/schedule.py b/dpark/schedule.py index <HASH>..<HASH> 100644 --- a/dpark/schedule.py +++ b/dpark/schedule.py @@ -748,7 +748,7 @@ class DAGScheduler(Scheduler): if self.loghub_dir: self._dump_stats(stats) except Exception as e: - logger.exception("Fail to dump job stats: %s.", e) + logger.warning("Fail to dump job stats: %s.", e) def _dump_stats(self, stats): name = "_".join(map(str, ['sched', self.id, "job", self.runJobTimes])) + ".json"
Adjust log level of ""Fail to dump job stats" to warning.
diff --git a/src/main/java/com/parship/roperty/KeyValues.java b/src/main/java/com/parship/roperty/KeyValues.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/parship/roperty/KeyValues.java +++ b/src/main/java/com/parship/roperty/KeyValues.java @@ -58,7 +58,7 @@ public class KeyValues { return addOrChangeDomainSpecificValue(changeSet, value, domainKeyParts); } - private synchronized DomainSpecificValue addOrChangeDomainSpecificValue(final String changeSet, final Object value, final String[] domainKeyParts) { + private DomainSpecificValue addOrChangeDomainSpecificValue(final String changeSet, final Object value, final String[] domainKeyParts) { DomainSpecificValue domainSpecificValue = domainSpecificValueFactory.create(value, changeSet, domainKeyParts); if (domainSpecificValues.contains(domainSpecificValue)) {
Removed synchronized modifier, because ConcurrentSkipListSet has an own concurrency mechanism
diff --git a/terraform/resource_test.go b/terraform/resource_test.go index <HASH>..<HASH> 100644 --- a/terraform/resource_test.go +++ b/terraform/resource_test.go @@ -40,6 +40,12 @@ func TestResourceConfigGet(t *testing.T) { Value interface{} }{ { + Config: nil, + Key: "foo", + Value: nil, + }, + + { Config: map[string]interface{}{ "foo": "${var.foo}", }, @@ -74,12 +80,16 @@ func TestResourceConfigGet(t *testing.T) { } for i, tc := range cases { - rawC, err := config.NewRawConfig(tc.Config) - if err != nil { - t.Fatalf("err: %s", err) + var rawC *config.RawConfig + if tc.Config != nil { + var err error + rawC, err = config.NewRawConfig(tc.Config) + if err != nil { + t.Fatalf("err: %s", err) + } } - rc := NewResourceConfig(rawC) + rc := NewResourceConfig(rawC) if tc.Vars != nil { ctx := NewContext(&ContextOpts{Variables: tc.Vars}) if err := rc.interpolate(ctx); err != nil {
terraform: test to make sure nil in RawConfig is okay
diff --git a/hystrix-core/src/main/java/com/netflix/hystrix/AbstractCommand.java b/hystrix-core/src/main/java/com/netflix/hystrix/AbstractCommand.java index <HASH>..<HASH> 100644 --- a/hystrix-core/src/main/java/com/netflix/hystrix/AbstractCommand.java +++ b/hystrix-core/src/main/java/com/netflix/hystrix/AbstractCommand.java @@ -1442,9 +1442,7 @@ import com.netflix.hystrix.util.HystrixTimer.TimerListener; public ExecutionResult addEvents(HystrixEventType... events) { ArrayList<HystrixEventType> newEvents = new ArrayList<>(); newEvents.addAll(this.events); - for (HystrixEventType e : events) { - newEvents.add(e); - } + Collections.addAll(newEvents, events); return new ExecutionResult(Collections.unmodifiableList(newEvents), executionTime, exception); }
Replace explicit iteration with library support for fast list creation
diff --git a/tornadis/__init__.py b/tornadis/__init__.py index <HASH>..<HASH> 100644 --- a/tornadis/__init__.py +++ b/tornadis/__init__.py @@ -4,14 +4,12 @@ # This file is part of tornadis library released under the MIT license. # See the LICENSE file for more information. -version_info = (0, 0, '1') +version_info = (0, 1, 0) __version__ = ".".join([str(x) for x in version_info]) DEFAULT_HOST = '127.0.0.1' DEFAULT_PORT = 6379 DEFAULT_CONNECT_TIMEOUT = 20 -DEFAULT_WRITE_TIMEOUT = 20 -DEFAULT_READ_TIMEOUT = 20 DEFAULT_READ_PAGE_SIZE = 65536 DEFAULT_WRITE_PAGE_SIZE = 65536
remove two unused const and preparing <I> release
diff --git a/packages/node-krl-parser/tests/parser.test.js b/packages/node-krl-parser/tests/parser.test.js index <HASH>..<HASH> 100644 --- a/packages/node-krl-parser/tests/parser.test.js +++ b/packages/node-krl-parser/tests/parser.test.js @@ -2059,6 +2059,11 @@ test("Action setting", function(t){ var src = "ruleset rs{rule r1{select when a b; "+src_action+"}}"; var ast = parser(src).rules[0].action_block.actions[0]; t.deepEquals(normalizeAST(rmLoc(ast)), normalizeAST(expected)); + + //test it also in defaction + src = "ruleset rs{global{a=defaction(){"+src_action+"}}}"; + ast = parser(src).global[0].actions[0]; + t.deepEquals(normalizeAST(rmLoc(ast)), normalizeAST(expected)); }; testAction("http:post(\"url\") with qs = {\"foo\": \"bar\"}", {
testing action setting support in defaction
diff --git a/fuse.py b/fuse.py index <HASH>..<HASH> 100644 --- a/fuse.py +++ b/fuse.py @@ -382,15 +382,15 @@ class Stat(FuseStruct): def __init__(self, **kw): self.st_mode = None - self.st_ino = None - self.st_dev = None + self.st_ino = 0 + self.st_dev = 0 self.st_nlink = None - self.st_uid = None - self.st_gid = None - self.st_size = None - self.st_atime = None - self.st_mtime = None - self.st_ctime = None + self.st_uid = 0 + self.st_gid = 0 + self.st_size = 0 + self.st_atime = 0 + self.st_mtime = 0 + self.st_ctime = 0 FuseStruct.__init__(self, **kw)
the FuseStat initializatior sets a default zero value for some of the stat fields
diff --git a/openid/codecutil.py b/openid/codecutil.py index <HASH>..<HASH> 100644 --- a/openid/codecutil.py +++ b/openid/codecutil.py @@ -50,13 +50,7 @@ def _in_escape_range(octet): return False -def _pct_escape_handler(err): - ''' - Encoding error handler that does percent-escaping of Unicode, to be used - with codecs.register_error - TODO: replace use of this with urllib.parse.quote as appropriate - ''' - chunk = err.object[err.start:err.end] +def _pct_encoded_replacements(chunk): replacements = [] for character in chunk: codepoint = ord(character) @@ -65,6 +59,17 @@ def _pct_escape_handler(err): replacements.append("%%%X" % char) else: replacements.append(chr(codepoint)) + return replacements + + +def _pct_escape_handler(err): + ''' + Encoding error handler that does percent-escaping of Unicode, to be used + with codecs.register_error + TODO: replace use of this with urllib.parse.quote as appropriate + ''' + chunk = err.object[err.start:err.end] + replacements = _pct_encoded_replacements(chunk) return ("".join(replacements), err.end) codecs.register_error("oid_percent_escape", _pct_escape_handler)
Factor out percent-encoding into separate function in codecutil, for debugging / testing
diff --git a/lib/validation.js b/lib/validation.js index <HASH>..<HASH> 100644 --- a/lib/validation.js +++ b/lib/validation.js @@ -2,6 +2,9 @@ const Joi = require( 'joi' ); +// pre-load +require( 'joi/lib/any' ); + const utils = require( './utils' ); const ignoredProperties = require( './ignored-properties' ); @@ -115,6 +118,23 @@ function configure( userSchema ) { if( userSchema ) { schema = userSchema; + + // warm up during load phase + try { + + let testEvent = {}; + + Object.keys( schema ).forEach( function( key ) { + + testEvent[ key ] = 'test-data'; + }); + + verify( testEvent ); + } + catch( err ) { + + // ignore + } } }
Added preload and warm-up code for validation testing - reduces time for cold runs
diff --git a/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(1).php b/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(1).php index <HASH>..<HASH> 100644 --- a/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(1).php +++ b/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(1).php @@ -16,6 +16,8 @@ */ $aData = array( + 'skipped' => 1, + 'categories' => array ( 0 => array ( 'oxid' => 'testCategory1', diff --git a/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(2).php b/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(2).php index <HASH>..<HASH> 100644 --- a/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(2).php +++ b/tests/integration/price/testcases/basket/testFrontendOrderStep1DeliverySortedWithCategories(2).php @@ -16,6 +16,8 @@ */ $aData = array( + 'skipped' => 1, + 'categories' => array ( 0 => array ( 'oxid' => 'testCategory1',
marked as skipped (cherry picked from commit <I>b1c5) (cherry picked from commit 7c9cbdd)
diff --git a/lib/schema_monkey/railtie.rb b/lib/schema_monkey/railtie.rb index <HASH>..<HASH> 100644 --- a/lib/schema_monkey/railtie.rb +++ b/lib/schema_monkey/railtie.rb @@ -8,10 +8,14 @@ module SchemaMonkey end rake_tasks do - load 'rails/tasks/database.rake' + namespace :schema_monkey do + task :insert do + SchemaMonkey.insert + end + end ['db:schema:dump', 'db:schema:load'].each do |name| if task = Rake.application.tasks.find { |task| task.name == name } - task.enhance(["schema_monkey:load"]) + task.enhance(["schema_monkey:insert"]) end end end
bug fix: replace dangling rails/tasks/database.rake with the correct task inline
diff --git a/lib/numina/recipes/__init__.py b/lib/numina/recipes/__init__.py index <HASH>..<HASH> 100644 --- a/lib/numina/recipes/__init__.py +++ b/lib/numina/recipes/__init__.py @@ -62,8 +62,8 @@ class RecipeBase: self.repeat = run.get('repeat', 1) self._current = 0 - def setup(self, _param): - warnings.warn("the setup method is deprecated", DeprecationWarning, stacklevel=2) + def setup(self): + pass def cleanup(self): '''Cleanup structures after recipe execution.'''
setup method is reserrected
diff --git a/misc/log-analytics/import_logs.py b/misc/log-analytics/import_logs.py index <HASH>..<HASH> 100755 --- a/misc/log-analytics/import_logs.py +++ b/misc/log-analytics/import_logs.py @@ -1353,7 +1353,7 @@ class Parser(object): hit.host = config.options.log_hostname else: try: - hit.host = match.group('host') + hit.host = match.group('host').lower().strip('.') except IndexError: # Some formats have no host. pass
import_logs.py: lowercase the host and strip dots at both ends. git-svn-id: <URL>
diff --git a/lib/ronin/config.rb b/lib/ronin/config.rb index <HASH>..<HASH> 100644 --- a/lib/ronin/config.rb +++ b/lib/ronin/config.rb @@ -38,10 +38,14 @@ module Ronin # Temporary file directory TMP_DIR = File.join(PATH,'tmp') + + # Directory for storing recovered remote files + FILES_DIR = File.join(PATH,'files') FileUtils.mkdir(PATH) unless File.directory?(PATH) FileUtils.mkdir(CONFIG_DIR) unless File.directory?(PATH) FileUtils.mkdir(TMP_DIR) unless File.directory?(TMP_DIR) + FileUtils.mkdir(FILES_DIR) unless File.directory?(FILES_DIR) # # Loads the Ronin configuration file.
Added the ~/.ronin/files/ directory for storing RemoteFiles.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -271,7 +271,7 @@ def get_version_info(): # If this is a release or another kind of source distribution of PyCBC except: - version = '1.7.5' + version = '1.7.6' release = 'True' date = hash = branch = tag = author = committer = status = builder = build_date = ''
set for <I> release (#<I>)
diff --git a/fermipy/jobs/batch.py b/fermipy/jobs/batch.py index <HASH>..<HASH> 100644 --- a/fermipy/jobs/batch.py +++ b/fermipy/jobs/batch.py @@ -48,7 +48,7 @@ def get_batch_job_interface(job_time=1500): if DEFAULT_JOB_TYPE == 'slac': from fermipy.jobs.slac_impl import SlacInterface - return Slac_Interface(**batch_job_args) + return SlacInterface(**batch_job_args) elif DEFAULT_JOB_TYPE == 'native': from fermipy.jobs.native_impl import NativeInterface - return Native_Interface(**batch_job_args) + return NativeInterface(**batch_job_args) diff --git a/fermipy/jobs/link.py b/fermipy/jobs/link.py index <HASH>..<HASH> 100644 --- a/fermipy/jobs/link.py +++ b/fermipy/jobs/link.py @@ -589,7 +589,7 @@ class Link(object): By default this checks the status of the top-level job """ - if key in self.jobs.has_key: + if key in self.jobs: status = self.jobs[key].status if status in [JobStatus.unknown, JobStatus.ready, JobStatus.pending, JobStatus.running] or force_check:
Fix a couple of minor issues left over from code cleanup
diff --git a/lib/conceptql/database.rb b/lib/conceptql/database.rb index <HASH>..<HASH> 100644 --- a/lib/conceptql/database.rb +++ b/lib/conceptql/database.rb @@ -1,4 +1,4 @@ -require 'facets/core/hash/revalue' +require 'facets/hash/revalue' module ConceptQL class Database
Fix incorrect require of hash/revalue
diff --git a/lib/shopify_theme/cli.rb b/lib/shopify_theme/cli.rb index <HASH>..<HASH> 100644 --- a/lib/shopify_theme/cli.rb +++ b/lib/shopify_theme/cli.rb @@ -19,7 +19,7 @@ module ShopifyTheme desc "configure API_KEY PASSWORD STORE", "generate a config file for the store to connect to" def configure(api_key=nil, password=nil, store=nil) - config = {:api_key => api_key, :password => password, :store => store, :ignore_files => []} + config = {:api_key => api_key, :password => password, :store => store, :ignore_files => ["README"]} create_file('config.yml', config.to_yaml) end @@ -84,7 +84,7 @@ module ShopifyTheme end if !options['keep_files'] m.delete do |base, relative| - delete_asset(relative, options['quiet']) + delete_asset(relative, options['quiet']) if local_assets_list.include?(relative) end end end
default ignore files config, delete should check the ignore files as well
diff --git a/Model/Variant.php b/Model/Variant.php index <HASH>..<HASH> 100644 --- a/Model/Variant.php +++ b/Model/Variant.php @@ -13,7 +13,6 @@ namespace Sylius\Component\Variation\Model; use Doctrine\Common\Collections\ArrayCollection; use Doctrine\Common\Collections\Collection; -use Sylius\Component\Resource\Model\SoftDeletableTrait; use Sylius\Component\Resource\Model\TimestampableTrait; /** diff --git a/Model/VariantInterface.php b/Model/VariantInterface.php index <HASH>..<HASH> 100644 --- a/Model/VariantInterface.php +++ b/Model/VariantInterface.php @@ -13,7 +13,6 @@ namespace Sylius\Component\Variation\Model; use Doctrine\Common\Collections\Collection; use Sylius\Component\Resource\Model\ResourceInterface; -use Sylius\Component\Resource\Model\SoftDeletableInterface; use Sylius\Component\Resource\Model\TimestampableInterface; /**
Get rid of the SoftDeletable Interface and Trait
diff --git a/lib/ovirt/vm.rb b/lib/ovirt/vm.rb index <HASH>..<HASH> 100644 --- a/lib/ovirt/vm.rb +++ b/lib/ovirt/vm.rb @@ -174,8 +174,9 @@ module OVIRT unless runcmd.nil? runcmd.each do |cmd| cmdlist = \ - "#{cmdlist}\n" \ - "- #{cmd}\n" + "#{cmdlist}\n" \ + "- |\n"\ + " #{cmd.lines.join(" ")}\n" end if extracmd.nil? extracmd = cmdlist
Support for multi-line runcmd params In Foreman, we use cloud-init templates like this: ``` runcmd: - | echo 1 echo 2 echo 3 - | echo a echo b echo c ``` Currently, the rbovirt is preparing the cloud-init template, it counts on the fact the runcmd are only single lines, which limits it's use significantly.
diff --git a/urwid_datatable/datatable.py b/urwid_datatable/datatable.py index <HASH>..<HASH> 100644 --- a/urwid_datatable/datatable.py +++ b/urwid_datatable/datatable.py @@ -1255,7 +1255,7 @@ class DataTable(urwid.WidgetWrap, MutableSequence): # raise Exception(columns) # for c in self.columns: # self.remove_column(c.name) - del self.columns[:] + self.columns = [] for c in columns: self._add_column(c) self.update_header()
Fix another bug with removing columns.
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -250,22 +250,6 @@ function session(options){ : rawCookie; } - // set-cookie - var writeHead = res.writeHead; - res.writeHead = function(){ - if (!req.session) { - debug('no session'); - writeHead.apply(res, arguments); - return; - } - - var val = 's:' + signature.sign(req.sessionID, secret); - val = cookie.serialize(key, val); - debug('set-cookie %s', val); - res.setHeader('Set-Cookie', val); - writeHead.apply(res, arguments); - }; - // proxy end() to commit the session var end = res.end; res.end = function(data, encoding){
forgot part of set-cookie to remove
diff --git a/cli/Valet/Valet.php b/cli/Valet/Valet.php index <HASH>..<HASH> 100644 --- a/cli/Valet/Valet.php +++ b/cli/Valet/Valet.php @@ -80,7 +80,7 @@ class Valet $url = 'https://api.github.com/repos/laravel/valet/releases/latest'; $response = Utils::jsonDecode((new Client())->get($url)->getBody()); - return version_compare($currentVersion, trim($response->body->tag_name, 'v'), '>='); + return version_compare($currentVersion, trim($response->tag_name, 'v'), '>='); } /**
Access `tag_name` directly Since `$response` is the response body now the body below is obsolete.
diff --git a/src/android/CameraPreview.java b/src/android/CameraPreview.java index <HASH>..<HASH> 100644 --- a/src/android/CameraPreview.java +++ b/src/android/CameraPreview.java @@ -466,9 +466,6 @@ public class CameraPreview extends CordovaPlugin implements CameraActivity.Camer private boolean setOnPictureTakenHandler(JSONArray args, CallbackContext callbackContext) { Log.d(TAG, "setOnPictureTakenHandler"); takePictureCallbackContext = callbackContext; - PluginResult pluginResult = new PluginResult(PluginResult.Status.OK); - pluginResult.setKeepCallback(true); - callbackContext.sendPluginResult(pluginResult); return true; } }
fix: setOnPictureTakenHandler should not be called back immediately after set (#<I>)
diff --git a/wandb/integration/keras/keras.py b/wandb/integration/keras/keras.py index <HASH>..<HASH> 100644 --- a/wandb/integration/keras/keras.py +++ b/wandb/integration/keras/keras.py @@ -1014,7 +1014,8 @@ class WandbCallback(tf.keras.callbacks.Callback): self.model.save(self.filepath[:-3], overwrite=True, save_format="tf") # Log the model as artifact. - model_artifact = wandb.Artifact(f"model-{wandb.run.name}", type="model") + name = wandb.util.make_artifact_name_safe(f"model-{wandb.run.name}") + model_artifact = wandb.Artifact(name, type="model") model_artifact.add_dir(self.filepath[:-3]) wandb.run.log_artifact(model_artifact, aliases=["latest", f"epoch_{epoch}"])
sanitize run name (#<I>)
diff --git a/lib/devise/models/timeoutable.rb b/lib/devise/models/timeoutable.rb index <HASH>..<HASH> 100644 --- a/lib/devise/models/timeoutable.rb +++ b/lib/devise/models/timeoutable.rb @@ -12,13 +12,12 @@ module Devise # Checks whether the user session has expired based on configured time. def timeout?(last_access) - last_access && last_access <= timeout.ago.utc + last_access && last_access <= self.class.timeout.ago.utc end module ClassMethods + Devise::Models.config(self, :timeout) end - - Devise::Models.config(self, :timeout) end end end diff --git a/test/models_test.rb b/test/models_test.rb index <HASH>..<HASH> 100644 --- a/test/models_test.rb +++ b/test/models_test.rb @@ -115,7 +115,7 @@ class ActiveRecordTest < ActiveSupport::TestCase end test 'set a default value for timeout' do - assert_equal 15.minutes, Configurable.new.timeout + assert_equal 15.minutes, Configurable.timeout end test 'set null fields on migrations' do
Updating timeoutable with last devise changes.
diff --git a/tests/integration/modules/event.py b/tests/integration/modules/event.py index <HASH>..<HASH> 100644 --- a/tests/integration/modules/event.py +++ b/tests/integration/modules/event.py @@ -81,7 +81,7 @@ class EventModuleTest(integration.ModuleCase): with self.assertRaises(Empty): eventfired = events.get(block=True, timeout=10) - def test_event_fire_ipc_mode_tcp(self): + def __test_event_fire_ipc_mode_tcp(self): events = Queue() def get_event(events):
stub out another event test that needs refinement
diff --git a/cumulusci/core/flows.py b/cumulusci/core/flows.py index <HASH>..<HASH> 100644 --- a/cumulusci/core/flows.py +++ b/cumulusci/core/flows.py @@ -175,7 +175,12 @@ class BaseFlow(object): self._run_task(flow_task_config) def _run_flow(self, flow_task_config): - flow = BaseFlow( + class_path = flow_task_config['task_config'].config.get( + 'class_path', + 'cumulusci.core.flows.BaseFlow', + ) + flow_class = import_class(class_path) + flow = flow_class( self.project_config, flow_task_config['task_config'], self.org_config,
support class override for nested flows
diff --git a/servers/src/main/java/tachyon/UserInfo.java b/servers/src/main/java/tachyon/UserInfo.java index <HASH>..<HASH> 100644 --- a/servers/src/main/java/tachyon/UserInfo.java +++ b/servers/src/main/java/tachyon/UserInfo.java @@ -57,14 +57,14 @@ public class UserInfo { @Override public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UserInfo userInfo = (UserInfo) o; - return mUserId == userInfo.mUserId; + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UserInfo userInfo = (UserInfo) o; + return mUserId == userInfo.mUserId; } @Override public int hashCode() { - return (int) (mUserId ^ (mUserId >>> 32)); + return (int) (mUserId ^ (mUserId >>> 32)); } }
Update UserInfo.java fixed indentation.
diff --git a/test/integration/fixtures/adapter.withHandlers.fixture.js b/test/integration/fixtures/adapter.withHandlers.fixture.js index <HASH>..<HASH> 100644 --- a/test/integration/fixtures/adapter.withHandlers.fixture.js +++ b/test/integration/fixtures/adapter.withHandlers.fixture.js @@ -10,7 +10,7 @@ module.exports = { // // The tests work by passing a `_simulate` option as a property to the first argument, // which might be `options` or `values`. If `options`, it's a criteria, so we have to - // check the `where` + // check the `where` since it's being automatically normalized in Waterline core. find: function (cid, options, cb) { return _interpretUsageTest(options.where && options.where._simulate, cb); },
Bit more explanation on why we have to look in 'where' in our test fixture for handlers (automatically normalized)
diff --git a/lib/rabbitmq/channel.rb b/lib/rabbitmq/channel.rb index <HASH>..<HASH> 100644 --- a/lib/rabbitmq/channel.rb +++ b/lib/rabbitmq/channel.rb @@ -35,13 +35,13 @@ module RabbitMQ end # @see {Connection#on_event} - def on(event_type, callable=nil, &block) - @connection.on_event(@id, event_type, callable, &block) + def on(*args, &block) + @connection.on_event(@id, *args, &block) end # @see {Connection#run_loop!} - def run_loop! - @connection.run_loop! + def run_loop!(*args) + @connection.run_loop!(*args) end # @see {Connection#break!}
Fix signature of forwarding methods in Channel to be more general.
diff --git a/webpack.config.js b/webpack.config.js index <HASH>..<HASH> 100644 --- a/webpack.config.js +++ b/webpack.config.js @@ -1,4 +1,4 @@ require('argv-set-env')(); var webpack = require('./webpack/config')[process.env.NODE_ENV === 'development' ? 'dist' : 'prod']; -module.exports = webpack; \ No newline at end of file +module.exports = webpack; diff --git a/webpack/build.js b/webpack/build.js index <HASH>..<HASH> 100644 --- a/webpack/build.js +++ b/webpack/build.js @@ -22,4 +22,4 @@ module.exports = function (type) { } ]; } -}; \ No newline at end of file +};
style: missing new lines in webpack
diff --git a/lib/hobo/virtual_box.rb b/lib/hobo/virtual_box.rb index <HASH>..<HASH> 100644 --- a/lib/hobo/virtual_box.rb +++ b/lib/hobo/virtual_box.rb @@ -27,6 +27,7 @@ class VirtualBox "cableconnected1" => "on", "nictrace1" => "off", "bridgeadapter1" => "en0: Ethernet", + "macaddress1" => "08002771F257", # Ports "audio" => "none", "clipboard" => "bidirectional",
Hard code MAC address for NIC. Must match the base VM.
diff --git a/definitions/npm/react-apollo_v2.x.x/flow_v0.58.x-/react-apollo_v2.x.x.js b/definitions/npm/react-apollo_v2.x.x/flow_v0.58.x-/react-apollo_v2.x.x.js index <HASH>..<HASH> 100644 --- a/definitions/npm/react-apollo_v2.x.x/flow_v0.58.x-/react-apollo_v2.x.x.js +++ b/definitions/npm/react-apollo_v2.x.x/flow_v0.58.x-/react-apollo_v2.x.x.js @@ -206,7 +206,7 @@ declare module 'react-apollo' { declare export function cleanupApolloState(apolloState: any): void; declare type QueryRenderPropFunction<TData, TVariables> = ({ - data: TData, + data?: TData, loading: boolean, error: ?ApolloError, variables: TVariables,
[react-apollo] Fix Query component typing (#<I>) If case of error, result object doesn't contain `data` property, please see the original TypeScript typings: <URL>
diff --git a/lib/cancan/ability.rb b/lib/cancan/ability.rb index <HASH>..<HASH> 100644 --- a/lib/cancan/ability.rb +++ b/lib/cancan/ability.rb @@ -283,9 +283,9 @@ module CanCan protected - def rules - @rules - end + # Must be protected as an ability can merge with other abilities. + # This means that an ability must expose their rules with another ability. + attr_reader :rules private @@ -356,8 +356,7 @@ module CanCan def alternative_subjects(subject) subject = subject.class unless subject.is_a?(Module) - descendants = [] - [:all, *subject.ancestors, *descendants, subject.class.to_s] + [:all, *subject.ancestors, subject.class.to_s] end # Returns an array of Rule instances which match the action and subject
Minor ability cleanup. decendants in the alternative_subjects methods will never not be an empty array. We can remove this statement. Make rules an attr_reader and add a bit of documentations as to why it needs to be protected, as this is one of the -few- cases in ruby where it makes sense for it to be so.
diff --git a/kbfsblock/bserver_constants.go b/kbfsblock/bserver_constants.go index <HASH>..<HASH> 100644 --- a/kbfsblock/bserver_constants.go +++ b/kbfsblock/bserver_constants.go @@ -8,5 +8,5 @@ const ( // ServerTokenServer is the expected server type for bserver authentication. ServerTokenServer = "kbfs_block" // ServerTokenExpireIn is the TTL to use when constructing an authentication token. - ServerTokenExpireIn = 2 * 60 * 60 // 2 hours + ServerTokenExpireIn = 24 * 60 * 60 // 24 hours ) diff --git a/kbfsmd/server_constants.go b/kbfsmd/server_constants.go index <HASH>..<HASH> 100644 --- a/kbfsmd/server_constants.go +++ b/kbfsmd/server_constants.go @@ -8,5 +8,5 @@ const ( // ServerTokenServer is the expected server type for mdserver authentication. ServerTokenServer = "kbfs_md" // ServerTokenExpireIn is the TTL to use when constructing an authentication token. - ServerTokenExpireIn = 2 * 60 * 60 // 2 hours + ServerTokenExpireIn = 24 * 60 * 60 // 24 hours )
kbfsmd/kbfsblock: Increase bserver and mdserver token expiry times. Needs to be deployed before any clients start sending such expire times.
diff --git a/define-test.js b/define-test.js index <HASH>..<HASH> 100644 --- a/define-test.js +++ b/define-test.js @@ -1489,3 +1489,18 @@ QUnit.test("async setter is provided", 5, function(){ QUnit.equal(instance.prop2, 9, "used async setter updates after"); }); + +QUnit.test('setter with default value causes an infinite loop (#142)', function(){ + var A = define.Constructor({ + val: { + value: 'hello', + set(val){ + console.log(this.val); + return val; + } + } + }); + + var a = new A(); + QUnit.equal(a.val, 'hello', 'creating an instance should not cause an inifinte loop'); +});
adding test for ininite loop issue
diff --git a/src/structures/Channel.js b/src/structures/Channel.js index <HASH>..<HASH> 100644 --- a/src/structures/Channel.js +++ b/src/structures/Channel.js @@ -100,17 +100,12 @@ class Channel extends Base { const Structures = require('../util/Structures'); let channel; if (!data.guild_id && !guild) { - switch (data.type) { - case ChannelTypes.DM: { - const DMChannel = Structures.get('DMChannel'); - channel = new DMChannel(client, data); - break; - } - case ChannelTypes.GROUP: { - const PartialGroupDMChannel = require('./PartialGroupDMChannel'); - channel = new PartialGroupDMChannel(client, data); - break; - } + if ((data.recipients && data.type !== ChannelTypes.GROUP) || data.type === ChannelTypes.DM) { + const DMChannel = Structures.get('DMChannel'); + channel = new DMChannel(client, data); + } else if (data.type === ChannelTypes.GROUP) { + const PartialGroupDMChannel = require('./PartialGroupDMChannel'); + channel = new PartialGroupDMChannel(client, data); } } else { guild = guild || client.guilds.cache.get(data.guild_id);
fix(Channel): ensure partial DMChannels get created (#<I>)
diff --git a/src/Illuminate/Database/Query/Builder.php b/src/Illuminate/Database/Query/Builder.php index <HASH>..<HASH> 100755 --- a/src/Illuminate/Database/Query/Builder.php +++ b/src/Illuminate/Database/Query/Builder.php @@ -715,7 +715,7 @@ class Builder * Add a raw or where clause to the query. * * @param string $sql - * @param array $bindings + * @param mixed $bindings * @return \Illuminate\Database\Query\Builder|static */ public function orWhereRaw($sql, $bindings = [])
Update orWhereRaw docblock following #<I> (#<I>) Thanks @a-komarev for pointing out.
diff --git a/mdc_api/mdc_api.py b/mdc_api/mdc_api.py index <HASH>..<HASH> 100755 --- a/mdc_api/mdc_api.py +++ b/mdc_api/mdc_api.py @@ -448,12 +448,21 @@ class connector: :return: successful ``.status_code`` / ``.is_done``. Check the ``.error`` :rtype: asyncResult ''' + if isinstance(JSONdata,str) and self._isJSON(JSONdata): + self.log.warn("pre-subscription data was a string, converting to a list : %s",JSONdata) + JSONdata = json.loads(JSONdata) # convert json string to list + if not (isinstance(JSONdata,list) and self._isJSON(JSONdata)): + self.log.error("pre-subscription data is not valid. Please make sure it is a valid JSON list") result = asyncResult() data = self._putURL("/subscriptions",JSONdata, versioned=False) - if data.status_code == 200: #immediate success + if data.status_code == 200: #immediate success with response result.error = False result.is_done = True result.result = data.json() + elif data.status_code == 204: # immediate success with no response + result.error = False + result.is_done = True + result.result = [] else: result.error = response_codes("presubscription",data.status_code) result.is_done = True
fixed putPreSubscription to handle both list and str json objects, enhanced return code handling
diff --git a/gandi/cli/modules/vhost.py b/gandi/cli/modules/vhost.py index <HASH>..<HASH> 100644 --- a/gandi/cli/modules/vhost.py +++ b/gandi/cli/modules/vhost.py @@ -45,6 +45,7 @@ class Vhost(GandiModule): cls.echo("We're creating a new vhost.") cls.display_progress(result) cls.echo('Your vhost %s have been created.' % vhost) + return result @classmethod def delete(cls, resources, background=False):
To know if something was done during the process.
diff --git a/tests/test_extra.py b/tests/test_extra.py index <HASH>..<HASH> 100644 --- a/tests/test_extra.py +++ b/tests/test_extra.py @@ -637,7 +637,8 @@ def test_promises_with_only_then(): promise3 = promise1.then(lambda x: None) context["promise1_reject"](error) - promise1._wait() + promise2._wait() + promise3._wait() assert promise2.reason == error assert promise3.reason == error diff --git a/tests/test_issues.py b/tests/test_issues.py index <HASH>..<HASH> 100644 --- a/tests/test_issues.py +++ b/tests/test_issues.py @@ -61,9 +61,11 @@ def test_issue_26(): context["promise1_reject"](RuntimeError("Ooops!")) promise2 = Promise(lambda resolve, reject: context.update({"promise2_resolve": resolve})) - promise2.then(lambda x: context.update({"success": True})) + promise3 = promise2.then(lambda x: context.update({"success": True})) context["promise2_resolve"](None) + # We wait so it works in asynchronous envs + promise3._wait(timeout=.1) assert context["success"]
Fixed tests in async environments
diff --git a/safe_qgis/widgets/dock.py b/safe_qgis/widgets/dock.py index <HASH>..<HASH> 100644 --- a/safe_qgis/widgets/dock.py +++ b/safe_qgis/widgets/dock.py @@ -1929,11 +1929,13 @@ class Dock(QtGui.QDockWidget, Ui_DockBase): mySettings = QSettings() lastSaveDir = mySettings.value('inasafe/lastSourceDir', '.') lastSaveDir = str(lastSaveDir.toString()) + default_name = myTitle.replace( + ' ', '_').replace('(', '').replace(')', '') if theScenarioFilePath is None: # noinspection PyCallByClass,PyTypeChecker myFileName = str(QFileDialog.getSaveFileName( self, myTitleDialog, - os.path.join(lastSaveDir, myTitle + '.txt'), + os.path.join(lastSaveDir, default_name + '.txt'), "Text files (*.txt)")) else: myFileName = theScenarioFilePath
Underscore separate batch file names by default.
diff --git a/src/main/java/com/ibm/stocator/fs/swift/SwiftAPIClient.java b/src/main/java/com/ibm/stocator/fs/swift/SwiftAPIClient.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/ibm/stocator/fs/swift/SwiftAPIClient.java +++ b/src/main/java/com/ibm/stocator/fs/swift/SwiftAPIClient.java @@ -497,7 +497,9 @@ public class SwiftAPIClient implements IStoreClient { LOG.trace("{} does not match {}. Skipped", unifiedObjectName, obj); continue; } - if (isSparkOrigin(unifiedObjectName) && !fullListing) { + LOG.trace("Unified name: {}, path {}", unifiedObjectName, tmp.getName()); + if (!unifiedObjectName.equals(tmp.getName()) && isSparkOrigin(unifiedObjectName) + && !fullListing) { LOG.trace("{} created by Spark", unifiedObjectName); if (!isJobSuccessful(unifiedObjectName)) { LOG.trace("{} created by failed Spark job. Skipped", unifiedObjectName);
no need to check isSparkOrigin on objects that unified name equals object name
diff --git a/lib/services/s3.js b/lib/services/s3.js index <HASH>..<HASH> 100644 --- a/lib/services/s3.js +++ b/lib/services/s3.js @@ -495,7 +495,7 @@ AWS.util.update(AWS.S3.prototype, { return done(); } - var regionReq = req.service.listObjects({Bucket: bucket}); + var regionReq = req.service.listObjects({Bucket: bucket, MaxKeys: 0}); regionReq._requestRegionForBucket = bucket; regionReq.send(function() { var region = req.service.bucketRegionCache[bucket] || null; @@ -535,7 +535,7 @@ AWS.util.update(AWS.S3.prototype, { } done(); } else if (request.httpRequest.virtualHostedBucket) { - var getRegionReq = service.listObjects({Bucket: bucket}); + var getRegionReq = service.listObjects({Bucket: bucket, MaxKeys: 0}); service.updateReqBucketRegion(getRegionReq, 'us-east-1'); getRegionReq._requestRegionForBucket = bucket;
For internal SDK requests for bucket region, adds a MaxKeys constraint of 0 so that response will not return any object keys. Although in most cases the request will return an error as well as the region and not be retried, there are some cases in the browser where the request may succeed and return a list of object keys, so this new constraint addresses those cases.
diff --git a/bulk_writer.js b/bulk_writer.js index <HASH>..<HASH> 100644 --- a/bulk_writer.js +++ b/bulk_writer.js @@ -1,3 +1,5 @@ +/* eslint no-underscore-dangle: ["error", { "allow": ["_index", "_type"] }] */ + const fs = require('fs'); const path = require('path'); const Promise = require('promise');
allow understcode dangling for _index and _type (expected by ES client)
diff --git a/lib/sendwithus_ruby_action_mailer/version.rb b/lib/sendwithus_ruby_action_mailer/version.rb index <HASH>..<HASH> 100644 --- a/lib/sendwithus_ruby_action_mailer/version.rb +++ b/lib/sendwithus_ruby_action_mailer/version.rb @@ -1,3 +1,3 @@ module SendWithUsMailer - VERSION = "0.4.0" + VERSION = "1.0.0" end
Update the version number to <I>
diff --git a/src/browser/extension/inject/pageScript.js b/src/browser/extension/inject/pageScript.js index <HASH>..<HASH> 100644 --- a/src/browser/extension/inject/pageScript.js +++ b/src/browser/extension/inject/pageScript.js @@ -185,9 +185,11 @@ const preEnhancer = instanceId => next => const store = next(reducer, preloadedState, enhancer); // Mutate the store in order to keep the reference - stores[instanceId].dispatch = store.dispatch; - stores[instanceId].liftedStore = store.liftedStore; - stores[instanceId].getState = store.getState; + if (stores[instanceId]) { + stores[instanceId].dispatch = store.dispatch; + stores[instanceId].liftedStore = store.liftedStore; + stores[instanceId].getState = store.getState; + } return { ...store,
Don't update the store when the page is blacklisted in settings
diff --git a/lib/fog/dreamhost/dns.rb b/lib/fog/dreamhost/dns.rb index <HASH>..<HASH> 100644 --- a/lib/fog/dreamhost/dns.rb +++ b/lib/fog/dreamhost/dns.rb @@ -1,6 +1,5 @@ require File.expand_path(File.join(File.dirname(__FILE__), '..', 'dreamhost')) require 'fog/dns' -require 'uuid' module Fog module DNS @@ -70,8 +69,7 @@ module Fog def request(params) params[:query].merge!( { :key => @dreamhost_api_key, - :format => 'json', - :unique_id => UUID.generate } ) + :format => 'json' } ) response = @connection.request(params) unless response.body.empty?
[dreamhost|dns] drop uuid gem requirements, not needed Not sure to understand unique_id documentation from Dreamhost, but experimentation shows that a new UUID every request isn't required and Dreamhost says it's optional. <URL>
diff --git a/integration-cli/docker_cli_top_test.go b/integration-cli/docker_cli_top_test.go index <HASH>..<HASH> 100644 --- a/integration-cli/docker_cli_top_test.go +++ b/integration-cli/docker_cli_top_test.go @@ -52,7 +52,7 @@ func TestTopPrivileged(t *testing.T) { topCmd = exec.Command(dockerBinary, "top", cleanedContainerID) out2, _, err2 := runCommandWithOutput(topCmd) - errorOut(err, t, fmt.Sprintf("failed to run top: %v %v", out2, err2)) + errorOut(err2, t, fmt.Sprintf("failed to run top: %v %v", out2, err2)) killCmd := exec.Command(dockerBinary, "kill", cleanedContainerID) _, err = runCommand(killCmd)
Fixed error check using the wrong error value errorOut was using the err from the previous test. same as #<I> but on TestTopPrivileged, which I missed last time
diff --git a/lib/jazzy/sourcekitten.rb b/lib/jazzy/sourcekitten.rb index <HASH>..<HASH> 100644 --- a/lib/jazzy/sourcekitten.rb +++ b/lib/jazzy/sourcekitten.rb @@ -97,13 +97,11 @@ module Jazzy # Determine the subdirectory in which a doc should be placed def self.subdir_for_doc(doc, parents) - # To group docs by category file instead of declaration type, - # return parents.map(&:name) - - if doc.type.name - [doc.type.plural_name] - else - [] + parents.map(&:name).tap do |names| + # We always want to create top-level subdirs according to type (Struct, + # Class, etc), but parents[0] might be a custom category name. + top_level_decl = (parents + [doc])[1] + names[0] = top_level_decl.type.plural_name if top_level_decl end end
Fixed subdirs for nested types e.g. Classes/Request/TaskDelegate.html, not Classes/TaskDelegate.html
diff --git a/dropwizard-core/src/test/java/io/dropwizard/validation/InjectValidatorFeatureTest.java b/dropwizard-core/src/test/java/io/dropwizard/validation/InjectValidatorFeatureTest.java index <HASH>..<HASH> 100644 --- a/dropwizard-core/src/test/java/io/dropwizard/validation/InjectValidatorFeatureTest.java +++ b/dropwizard-core/src/test/java/io/dropwizard/validation/InjectValidatorFeatureTest.java @@ -36,7 +36,7 @@ class InjectValidatorFeatureTest { private ValidatorFactory validatorFactory; @BeforeEach - void setUp() throws Exception { + void setUp() { Bootstrap<Configuration> bootstrap = new Bootstrap<>(application); application.initialize(bootstrap); @@ -57,14 +57,10 @@ class InjectValidatorFeatureTest { // Run validation manually Set<ConstraintViolation<Bean>> constraintViolations = validator.validate(new Bean(1)); - - assertThat(constraintViolations.size()).isEqualTo(1); - - Optional<String> message = constraintViolations.stream() - .findFirst() - .map(ConstraintViolation::getMessage); - - assertThat(message).hasValue("must be greater than or equal to 10"); + assertThat(constraintViolations) + .singleElement() + .extracting(ConstraintViolation::getMessage) + .isEqualTo("must be greater than or equal to 10"); } @Test
Simplify InjectValidatorFeatureTest
diff --git a/spec/apruve/resources/order_spec.rb b/spec/apruve/resources/order_spec.rb index <HASH>..<HASH> 100644 --- a/spec/apruve/resources/order_spec.rb +++ b/spec/apruve/resources/order_spec.rb @@ -145,7 +145,7 @@ describe Apruve::Order do context 'successful response' do let! (:stubs) do faraday_stubs do |stub| - stub.get('/api/v4/orders') { [200, {}, '{}'] } + stub.get('/api/v4/orders') { [200, {}, '[]'] } end end it 'should get all orders' do
AP-<I>: changed spec response to contain json array not an object
diff --git a/dragonpy/core/configs.py b/dragonpy/core/configs.py index <HASH>..<HASH> 100644 --- a/dragonpy/core/configs.py +++ b/dragonpy/core/configs.py @@ -130,6 +130,7 @@ class BaseConfig(object): self.mem_info = DummyMemInfo() self.memory_byte_middlewares = {} + self.memory_word_middlewares = {} def _get_initial_Memory(self, size): return [0x00] * size
bugfix: 'Cfg' object has no attribute 'memory_word_middlewares'
diff --git a/Datagrid/PropelDatagrid.php b/Datagrid/PropelDatagrid.php index <HASH>..<HASH> 100644 --- a/Datagrid/PropelDatagrid.php +++ b/Datagrid/PropelDatagrid.php @@ -346,10 +346,8 @@ abstract class PropelDatagrid implements PropelDatagridInterface foreach ($sort as $column => $order) { $method = 'orderBy'.ucfirst($column); - try { + if (method_exists($this->getQuery(), $method)) { $this->getQuery()->{$method}($order); - } catch (\Exception $e) { - throw new \Exception('There is no method "'.$method.'" to sort the datagrid on column "'.$column.'". Just create it in the "'.get_class($this->query).'" object.'); } } }
Fail silently when sorting on unknown column.
diff --git a/tests/PackageManager/ComposerIOBridgeTest.php b/tests/PackageManager/ComposerIOBridgeTest.php index <HASH>..<HASH> 100644 --- a/tests/PackageManager/ComposerIOBridgeTest.php +++ b/tests/PackageManager/ComposerIOBridgeTest.php @@ -13,11 +13,6 @@ namespace Yosymfony\Spress\PackageManager; use Yosymfony\Spress\Core\IO\NullIO; -/** - * Bridge between Spress IO and Composer IO. - * - * @author Victor Puertas <vpgugr@gmail.com> - */ class ComposerIOBridgeTest extends \PHPUnit_Framework_TestCase { public function testVerbosity()
Deleted unnecesary docblock
diff --git a/lib/schema.js b/lib/schema.js index <HASH>..<HASH> 100644 --- a/lib/schema.js +++ b/lib/schema.js @@ -285,7 +285,7 @@ var RootTable = { return "CREATE TABLE " + this.name + "(" + atts.join(",") + ")"; }, toJSON: function () { - var o = { name : this.name, attributes: this.attributes }; + var o = { attributes: this.attributes }; if(this.constraints && this.constraints.length > 0) o.constraints = this.constraints; return o; },
table.name is not required in the json format (surrounding object holds name alredy)
diff --git a/lib/autokey/scripting/engine.py b/lib/autokey/scripting/engine.py index <HASH>..<HASH> 100644 --- a/lib/autokey/scripting/engine.py +++ b/lib/autokey/scripting/engine.py @@ -91,9 +91,8 @@ class Engine: cannot use absolute paths.") path = parent_folder.expanduser() / title path.mkdir(parents=True, exist_ok=True) - new_folder = model.Folder(title) + new_folder = model.Folder(title, str(path.resolve()) self.configManager.allFolders.append(new_folder) - new_folder.path = str(path.resolve()) return new_folder # TODO: Convert this to use get_folder, when we change to specifying # the exact folder by more than just title.
Create_folder: Set path as arg, not after creation
diff --git a/lib/adhearsion/console.rb b/lib/adhearsion/console.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/console.rb +++ b/lib/adhearsion/console.rb @@ -33,10 +33,10 @@ module Adhearsion if libedit? logger.error "Cannot start. You are running Adhearsion on Ruby with libedit. You must use readline for the console to work." else - logger.info "Starting up..." + logger.info "Launching Adhearsion Console" @pry_thread = Thread.current pry - logger.info "Console exiting" + logger.info "Adhearsion Console exiting" end end diff --git a/lib/adhearsion/router.rb b/lib/adhearsion/router.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/router.rb +++ b/lib/adhearsion/router.rb @@ -23,7 +23,7 @@ module Adhearsion def handle(call) return unless route = match(call) - logger.debug "Call #{call.id} passing through router matched route #{route}" + logger.debug "Call #{call.id} selected route \"#{route.name}\" (#{route.target})" route.dispatcher end end
Tweak log messages for readability
diff --git a/openquake/calculators/hazard/disagg/core.py b/openquake/calculators/hazard/disagg/core.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/hazard/disagg/core.py +++ b/openquake/calculators/hazard/disagg/core.py @@ -19,6 +19,7 @@ Disaggregation calculator core functionality """ import nhlib +import numpy from django.db import transaction @@ -116,8 +117,9 @@ def compute_disagg(job_id, points, lt_rlz_id): hazard_curve__sa_damping=sa_damping, ) - for poe in []: - pass + for poe in hc.poes_disagg: + iml = numpy.interp(poe, curve.poes[::-1], imls) + logs.LOG.warn('iml is %s' % iml) # TODO: for each disagg poe, interpolate IML for the curve # TODO: load the site model, if there is one # TODO: Prepare the args for the calculator.
calcs/hazard/disagg/core: Added sketch of curve interpolation.
diff --git a/rspec-crispy/spec/rspec/crispy/configure_without_conflict_spec.rb b/rspec-crispy/spec/rspec/crispy/configure_without_conflict_spec.rb index <HASH>..<HASH> 100644 --- a/rspec-crispy/spec/rspec/crispy/configure_without_conflict_spec.rb +++ b/rspec-crispy/spec/rspec/crispy/configure_without_conflict_spec.rb @@ -112,6 +112,13 @@ RSpec.describe ::RSpec::Crispy do context 'with arguments' do + context 'given a method and arguments ObjectClass actually called' do + let(:method_name){ :hoge } + let(:arguments){ [1, 1, 1] } + + it { is_expected.to be_matches(ObjectClass) } + end + end end
add spec when given argument to have_received
diff --git a/src/Symfony/Bundle/TwigBundle/CacheWarmer/TemplateCacheCacheWarmer.php b/src/Symfony/Bundle/TwigBundle/CacheWarmer/TemplateCacheCacheWarmer.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Bundle/TwigBundle/CacheWarmer/TemplateCacheCacheWarmer.php +++ b/src/Symfony/Bundle/TwigBundle/CacheWarmer/TemplateCacheCacheWarmer.php @@ -26,7 +26,7 @@ use Symfony\Bundle\FrameworkBundle\CacheWarmer\TemplateFinderInterface; class TemplateCacheCacheWarmer implements CacheWarmerInterface { protected $container; - protected $warmer; + protected $finder; /** * Constructor.
Rename misprint property (from warmer to finder)
diff --git a/src/photini/__init__.py b/src/photini/__init__.py index <HASH>..<HASH> 100644 --- a/src/photini/__init__.py +++ b/src/photini/__init__.py @@ -1,4 +1,4 @@ from __future__ import unicode_literals __version__ = '2016.08.0' -build = '683 (8b2ed70)' +build = '684 (d3a9ad5)' diff --git a/src/photini/photinimap.py b/src/photini/photinimap.py index <HASH>..<HASH> 100644 --- a/src/photini/photinimap.py +++ b/src/photini/photinimap.py @@ -141,6 +141,8 @@ class PhotiniMap(QtWidgets.QWidget): @QtCore.pyqtSlot(QtCore.QUrl) def link_clicked(self, url): + if url.isLocalFile(): + url.setScheme('http') webbrowser.open_new(url.toString()) @QtCore.pyqtSlot()
Ensure links from maps are not 'local'.
diff --git a/raven/utils/stacks.py b/raven/utils/stacks.py index <HASH>..<HASH> 100644 --- a/raven/utils/stacks.py +++ b/raven/utils/stacks.py @@ -233,7 +233,7 @@ def get_stack_info(frames, transformer=transform, capture_locals=True, # This changes /foo/site-packages/baz/bar.py into baz/bar.py try: base_filename = sys.modules[module_name.split('.', 1)[0]].__file__ - filename = abs_path.split(base_filename.rsplit('/', 2)[0], 1)[-1][1:] + filename = abs_path.split(base_filename.rsplit('/', 2)[0], 1)[-1].lstrip("/") except: filename = abs_path
Use lstrip to get rid of leading slashes, because in some cases the paths have already been replaced.
diff --git a/src/Guzzle/Stream/Stream.php b/src/Guzzle/Stream/Stream.php index <HASH>..<HASH> 100644 --- a/src/Guzzle/Stream/Stream.php +++ b/src/Guzzle/Stream/Stream.php @@ -70,9 +70,7 @@ class Stream implements StreamInterface */ public function __destruct() { - if (is_resource($this->stream)) { - fclose($this->stream); - } + $this->close(); } /** @@ -92,6 +90,18 @@ class Stream implements StreamInterface } /** + * {@inheritdoc} + */ + public function close() + { + if (is_resource($this->stream)) { + fclose($this->stream); + } + $this->cache[self::IS_READABLE] = false; + $this->cache[self::IS_WRITABLE] = false; + } + + /** * Calculate a hash of a Stream * * @param StreamInterface $stream Stream to calculate the hash for diff --git a/src/Guzzle/Stream/StreamInterface.php b/src/Guzzle/Stream/StreamInterface.php index <HASH>..<HASH> 100644 --- a/src/Guzzle/Stream/StreamInterface.php +++ b/src/Guzzle/Stream/StreamInterface.php @@ -15,6 +15,11 @@ interface StreamInterface public function __toString(); /** + * Close the underlying stream + */ + public function close(); + + /** * Get stream metadata * * @param string $key Specific metadata to retrieve
Adding close() method to stream objects
diff --git a/tests/test_protobuf.py b/tests/test_protobuf.py index <HASH>..<HASH> 100644 --- a/tests/test_protobuf.py +++ b/tests/test_protobuf.py @@ -33,7 +33,7 @@ def doc_pb(): def test_parse_protobuf(doc_pb): - assert doc_pb.ByteSize() == 4239 + assert doc_pb.ByteSize() == 4709 def test_write_protobuf(doc_pb):
Update the expected size of the protobuf
diff --git a/backend/handler.py b/backend/handler.py index <HASH>..<HASH> 100644 --- a/backend/handler.py +++ b/backend/handler.py @@ -171,7 +171,7 @@ class TensorboardHandler(BaseHTTPServer.BaseHTTPRequestHandler): code: The numeric HTTP status code to use. """ out = BytesIO() - f = gzip.GzipFile(fileobj=out, mode='wb') + f = gzip.GzipFile(fileobj=out, mode='wb', compresslevel=3) f.write(compat.as_bytes(content)) f.close() gzip_content = out.getvalue()
Make GZip faster for TensorBoard It turns out compresslevel=3 is 4x faster and nearly as good. Less time spent compressing is going to be important, because this HTTP server is multithreaded and CPU bound operations scale negatively, i.e. 1 + 1 < 1. Benchmark: <URL>
diff --git a/flattened_serializers.go b/flattened_serializers.go index <HASH>..<HASH> 100644 --- a/flattened_serializers.go +++ b/flattened_serializers.go @@ -1,6 +1,7 @@ package manta import ( + "encoding/json" "os" "github.com/dotabuff/manta/dota" @@ -40,6 +41,23 @@ type flattened_serializers struct { proto *dota.CSVCMsg_FlattenedSerializer } +// Dumps a flattened table as json +func (sers *flattened_serializers) dump_json(name string) string { + // Can't marshal map[int32]x + type jContainer struct { + Version int32 + Data *dt + } + + j := make([]jContainer, 0) + for i, o := range sers.Serializers[name] { + j = append(j, jContainer{i, o}) + } + + str, _ := json.MarshalIndent(j, "", " ") // two space ident + return string(str) +} + // Fills properties for a data table func (sers *flattened_serializers) recurse_table(cur *dota.ProtoFlattenedSerializerT) *dt { // Basic table structure
Added dump_json to flattened_serializers.go
diff --git a/uproot_methods/convert.py b/uproot_methods/convert.py index <HASH>..<HASH> 100644 --- a/uproot_methods/convert.py +++ b/uproot_methods/convert.py @@ -58,6 +58,9 @@ def towriteable(obj): elif any(x == ("uproot_methods.classes.TH1", "Methods") for x in types(obj.__class__, obj)): return (None, None, "uproot.write.objects.TH1", "TH1") + elif any(x == ("TH1", "Methods") for x in types(obj.__class__, obj)): + return (None, None, "uproot.write.objects.TH1", "TH1") + else: raise TypeError("type {0} from module {1} is not writeable by uproot".format(obj.__class__.__name__, obj.__class__.__module__))
Write histogram read by uproot
diff --git a/src/_0_scripts/ie8-main.js b/src/_0_scripts/ie8-main.js index <HASH>..<HASH> 100644 --- a/src/_0_scripts/ie8-main.js +++ b/src/_0_scripts/ie8-main.js @@ -1,3 +1,10 @@ +// indexOf polyfill +// Minified version of this: +// https://stackoverflow.com/a/35054662/1611058 +if (!Array.prototype.indexOf) { + Array.prototype.indexOf=function(r){var t=this.length>0,i=Number(arguments[1])||0;for((i=i<0?Math.ceil(i):Math.floor(i))<0&&(i+=t);i<t;i++)if(i in this&&this[i]===r)return i;return-1}; +} + $(document).ready(function(){ //IE8 tabs code
Fixed IE8 js error I thought indexOf would be safe to use but apparantly it isn't. I'm adding a polyfill to make it safe
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -13,6 +13,7 @@ convert = function(source) { line, objectNames = [], output = {}, + parents, parentObj = {}, i, linesLength,
Added `parents` declaration
diff --git a/studio/lib/archivefile.py b/studio/lib/archivefile.py index <HASH>..<HASH> 100644 --- a/studio/lib/archivefile.py +++ b/studio/lib/archivefile.py @@ -52,5 +52,4 @@ def extractall(archive, filename, dstdir): tar.extractall(path=dstdir) else: # seems to be a single file, save it - shutil.copyfile(archive, - file(os.path.join(dstdir, filename), 'wb')) + shutil.copyfile(archive, os.path.join(dstdir, filename))
shutil.copyfile does take a filename as the destination file to be written instead of a file object
diff --git a/src/Leevel/Event/Observer.php b/src/Leevel/Event/Observer.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Event/Observer.php +++ b/src/Leevel/Event/Observer.php @@ -68,7 +68,8 @@ class Observer implements SplObserver */ public function __invoke(...$args) { - call_user_func($this->handle, ...$args); + $handle = $this->handle; + $handle(...$args); } /**
perf(event): improves observer performance
diff --git a/rwa/pandas.py b/rwa/pandas.py index <HASH>..<HASH> 100644 --- a/rwa/pandas.py +++ b/rwa/pandas.py @@ -63,7 +63,11 @@ else: service.poke('name', ix.name, container, visited=visited, _stack=_stack) def peek_index(service, container, _stack=None, force_unicode=None): - data = service.peek('data', container, _stack=_stack) + try: + data = service.peek('data', container, _stack=_stack) + except KeyError: + # try loading it as a generic sequence (backward compatibility) + data = service.byPythonType(list, True).peek(service, container, _stack=_stack) try: name = service.peek('name', container, _stack=_stack) except (SystemExit, KeyboardInterrupt):
backward compatibility bugfix for Pandas' Index
diff --git a/tests/test_pandora/test_models.py b/tests/test_pandora/test_models.py index <HASH>..<HASH> 100644 --- a/tests/test_pandora/test_models.py +++ b/tests/test_pandora/test_models.py @@ -253,3 +253,23 @@ class TestAdItem(TestCase): self.result.prepare_playback() assert self.result.register_ad.called assert super_mock.called + + def test_prepare_playback_raises_paramater_missing(self): + with patch.object(PlaylistModel, 'prepare_playback') as super_mock: + + self.result.register_ad = Mock(side_effect=ParameterMissing('No ad tracking tokens provided for ' + 'registration.') + ) + self.assertRaises(ParameterMissing, self.result.prepare_playback) + assert self.result.register_ad.called + assert not super_mock.called + + def test_prepare_playback_handles_paramater_missing_if_no_tokens(self): + with patch.object(PlaylistModel, 'prepare_playback') as super_mock: + + self.result.tracking_tokens = [] + self.result.register_ad = Mock(side_effect=ParameterMissing('No ad tracking tokens provided for ' + 'registration.')) + self.result.prepare_playback() + assert self.result.register_ad.called + assert super_mock.called
Test cases for handling exceptions in prepare_playback.
diff --git a/python/thunder/utils/ec2.py b/python/thunder/utils/ec2.py index <HASH>..<HASH> 100644 --- a/python/thunder/utils/ec2.py +++ b/python/thunder/utils/ec2.py @@ -177,6 +177,9 @@ def install_anaconda(master, opts): ssh(master, opts, "/root/anaconda/bin/conda install --yes jsonschema pillow seaborn scikit-learn") print_success() + # add mistune (for notebook conversions) + ssh(master, opts, "pip install mistune") + # copy to slaves print_status("Copying Anaconda to workers") ssh(master, opts, "/root/spark-ec2/copy-dir /root/anaconda")
Add mistune (for notebook HTML conversions)
diff --git a/cronjobs/indexcontent.php b/cronjobs/indexcontent.php index <HASH>..<HASH> 100644 --- a/cronjobs/indexcontent.php +++ b/cronjobs/indexcontent.php @@ -62,6 +62,8 @@ while( true ) if ( !( $node instanceof eZContentObjectTreeNode ) ) { $cli->error( "An error occured while trying fetching node $nodeId" ); + $db->query( "DELETE FROM ezpending_actions WHERE action = '$action' AND param = '$objectID'" ); + $db->commit(); continue; }
Fix EZP-<I>: Delayed indexing should fail gracefully on trashed / broken node
diff --git a/karaage/projectreports/views/user.py b/karaage/projectreports/views/user.py index <HASH>..<HASH> 100644 --- a/karaage/projectreports/views/user.py +++ b/karaage/projectreports/views/user.py @@ -69,7 +69,7 @@ def thanks(request, project_id): survey, created = ProjectSurvey.objects.get_or_create(project=project, survey_group=survey_group) if created: - return HttpResponseRedirect(reverse('kg_survey')) + return HttpResponseRedirect(reverse('kg_survey', args=project.pid)) return render_to_response('surveys/thanks.html', locals(), context_instance=RequestContext(request))
Fixed bug in project reports url redirection
diff --git a/grammar.js b/grammar.js index <HASH>..<HASH> 100644 --- a/grammar.js +++ b/grammar.js @@ -375,7 +375,7 @@ module.exports = grammar({ ) ), - string_expansion: $ => seq('$', $.string), + string_expansion: $ => seq('$', choice($.string, $.raw_string)), expansion: $ => seq( '${',
Allow $'\n' (whatever that is)
diff --git a/controllers/DefaultController.php b/controllers/DefaultController.php index <HASH>..<HASH> 100644 --- a/controllers/DefaultController.php +++ b/controllers/DefaultController.php @@ -51,7 +51,7 @@ class DefaultController extends \cms\base\Controller throw new NotFoundHttpException("The requested url '$activeUrl' does not exist."); } - $link = Yii::$app->links->findOneByArguments(['lang_id' => $this->getLangId(), 'url' => $activeUrl]); + $link = Yii::$app->links->findOneByArguments(['lang_id' => $this->getLangId(), 'url' => $suffix]); if (!$link) { throw new NotFoundHttpException("The page '$activeUrl' does not exist in this language.");
fixed but in Cms DefaultController in links lookup
diff --git a/gnupg/test/test_gnupg.py b/gnupg/test/test_gnupg.py index <HASH>..<HASH> 100755 --- a/gnupg/test/test_gnupg.py +++ b/gnupg/test/test_gnupg.py @@ -1152,9 +1152,9 @@ know, maybe you shouldn't be doing it in the first place. self.assertTrue(os.path.isfile(output)) # Check the contents: - with open(output) as fh: + with open(output, 'rb') as fh: encrypted_message = fh.read() - log.debug("Encrypted file contains:\n\n%s\n" % encrypted_message) + self.assertTrue(b"-----BEGIN PGP MESSAGE-----" in encrypted_message) def test_encryption_to_filehandle(self): """Test that ``encrypt(..., output=filelikething)`` is successful.""" @@ -1174,9 +1174,9 @@ know, maybe you shouldn't be doing it in the first place. self.assertTrue(os.path.isfile(output)) # Check the contents: - with open(output) as fh: + with open(output, 'rb') as fh: encrypted_message = fh.read() - log.debug("Encrypted file contains:\n\n%s\n" % encrypted_message) + self.assertTrue(b"-----BEGIN PGP MESSAGE-----" in encrypted_message) def test_encryption_from_filehandle(self): """Test that ``encrypt(open('foo'), ...)`` is successful."""
Actually check output file contents in to test_encrypt_*() tests. This provides more accurate testing for issues like #<I>.
diff --git a/test/jss_test.py b/test/jss_test.py index <HASH>..<HASH> 100644 --- a/test/jss_test.py +++ b/test/jss_test.py @@ -12,6 +12,7 @@ defaults write org.da.jss_helper jss_url <URL to JSS> import subprocess import base64 +import inspect from xml.etree import ElementTree from nose.tools import * @@ -110,6 +111,16 @@ def test_jss_delete(): assert_raises(JSSGetError, j_global.Policy, id_) +@with_setup(setup) +def test_jss_method_constructors(): + skip_these_methods = ['__init__', 'get', 'delete', 'put', 'post', '_error_handler'] + method_constructors = [ m[1] for m in inspect.getmembers(j_global) if inspect.ismethod(m[1]) and m[0] not in skip_these_methods] + for cls in method_constructors: + instance = cls() + print(type(instance)) + assert_true(isinstance(instance, JSSObject) or isinstance(instance, JSSObjectList)) + + #JSSObject Tests###############################################################
Add test for JSS constructor methods.