diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/src/frontend/org/voltdb/VoltZK.java b/src/frontend/org/voltdb/VoltZK.java index <HASH>..<HASH> 100644 --- a/src/frontend/org/voltdb/VoltZK.java +++ b/src/frontend/org/voltdb/VoltZK.java @@ -56,7 +56,7 @@ public class VoltZK { /* * Processes that want to block catalog updates create children here */ - public static final String catalogUpdateBlockers = "/db/export_generations"; + public static final String catalogUpdateBlockers = "/db/catalog_update_blockers"; // configuration (ports, interfaces, ...) public static final String cluster_metadata = "/db/cluster_metadata";
For ENG-<I>, fix a misnamed ZK path
diff --git a/test/e2e/apps/job.go b/test/e2e/apps/job.go index <HASH>..<HASH> 100644 --- a/test/e2e/apps/job.go +++ b/test/e2e/apps/job.go @@ -181,7 +181,7 @@ var _ = SIGDescribe("Job", func() { Expect(err).NotTo(HaveOccurred()) By("Ensuring job exceed backofflimit") - err = framework.WaitForJobFailure(f.ClientSet, f.Namespace.Name, job.Name, time.Duration(30)*time.Second, "BackoffLimitExceeded") + err = framework.WaitForJobFailure(f.ClientSet, f.Namespace.Name, job.Name, framework.JobTimeout, "BackoffLimitExceeded") Expect(err).NotTo(HaveOccurred()) By("Checking that only one pod created and status is failed")
Fix e2e Flaky Apps/Job BackoffLimit test This fix is linked to the PR #<I> that introduce the JobSpec.BackoffLimit. Previously the Timeout used in the test was too agressive and generates flaky test execution. Now it used the default framework.JobTimeout used in others tests.
diff --git a/qbit/core/src/main/java/io/advantageous/qbit/service/impl/CallbackManagerWithTimeout.java b/qbit/core/src/main/java/io/advantageous/qbit/service/impl/CallbackManagerWithTimeout.java index <HASH>..<HASH> 100644 --- a/qbit/core/src/main/java/io/advantageous/qbit/service/impl/CallbackManagerWithTimeout.java +++ b/qbit/core/src/main/java/io/advantageous/qbit/service/impl/CallbackManagerWithTimeout.java @@ -157,18 +157,18 @@ public class CallbackManagerWithTimeout implements CallbackManager { @Override public void process(long currentTime) { - if (handlers.size() > 1_000) { + if (handlers.size() > 8_000) { logger.error("Issue with handlers growing too large size {} " + "service name {}", handlers.size(), this.name); } - if (handlers.size() > 100_000) { + if (handlers.size() > 32_000) { logger.error("Issue with handlers growing very large size {} " + "service name {}", handlers.size(), this.name); - checkForTimeOuts(60_000 * 5); + checkForTimeOuts(60_000); } if (!handleTimeouts) { return;
fixing auto-checkin and adding debugging for callback handler
diff --git a/lib/steam/packets/A2A_INFO_ResponsePacket.php b/lib/steam/packets/A2A_INFO_ResponsePacket.php index <HASH>..<HASH> 100644 --- a/lib/steam/packets/A2A_INFO_ResponsePacket.php +++ b/lib/steam/packets/A2A_INFO_ResponsePacket.php @@ -47,8 +47,8 @@ class A2A_INFO_ResponsePacket extends SteamPacket $this->botNumber = $byteBuffer->getByte(); $this->dedicated = chr($byteBuffer->getByte()); $this->operatingSystem = chr($byteBuffer->getByte()); - $this->passwordProtexted = $byteBuffer->getByte(); - $this->secureServer = $byteBuffer->getByte(); + $this->passwordProtected = $byteBuffer->getByte() == 1; + $this->secureServer = $byteBuffer->getByte() == 1; $this->gameVersion = $byteBuffer->getString(); $extraDataFlag = $byteBuffer->getByte();
* (PHP) Changed A2A_INFO response to store boolean values of passwordProtected and secure * (PHP) Fixed typo in A2A_INFO_ResponsePacket * (Java) Removed ipAddress and portNumber from SourceServer * (Ruby) Fixed some file properties
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -8,6 +8,9 @@ var EventEmitter = require('eventemitter3'); // some validation routines var checkCandidate = require('rtc-validator/candidate'); +// the sdp cleaner +var sdpclean = require('rtc-sdpclean'); + var PRIORITY_LOW = 100; var PRIORITY_WAIT = 1000; @@ -119,6 +122,20 @@ module.exports = function(pc, opts) { }); } + function cleansdp(desc) { + // ensure we have clean sdp + var sdpErrors = []; + var sdp = desc && sdpclean(desc.sdp, { collector: sdpErrors }); + + // if we don't have a match, log some info + if (desc && sdp !== desc.sdp) { + console.info('invalid lines removed from sdp: ', sdpErrors); + desc.sdp = sdp; + } + + return desc; + } + function completeConnection() { if (pc.signalingState === 'have-remote-offer') { return tq.createAnswer(); @@ -248,6 +265,7 @@ module.exports = function(pc, opts) { }); tq.setLocalDescription = enqueue('setLocalDescription', execMethod, { + processArgs: cleansdp, pass: emitSdp });
Ensure generated sdp is valid before setting local description
diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index <HASH>..<HASH> 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -17,6 +17,8 @@ # satpy. If not, see <http://www.gnu.org/licenses/>. """Modifier classes for corrections based on sun and other angles.""" +from __future__ import annotations + import logging import time from datetime import datetime @@ -135,7 +137,6 @@ class SunZenithCorrector(SunZenithCorrectorBase): def _apply_correction(self, proj, coszen): logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") - print("Applying sunzen: ", proj.chunks == coszen.chunks) res = proj.copy() res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza) return res
Refactor SZA and cos(SZA) generation to reduce duplicate computations
diff --git a/lib/pdk/cli/exec/interactive_command.rb b/lib/pdk/cli/exec/interactive_command.rb index <HASH>..<HASH> 100644 --- a/lib/pdk/cli/exec/interactive_command.rb +++ b/lib/pdk/cli/exec/interactive_command.rb @@ -1,4 +1,4 @@ -require 'pdk/cli/exec/command' +require 'pdk' module PDK module CLI
(maint) Ensure pdk/cli/exec/interactive_command works standalone
diff --git a/GPy/core/parameterised.py b/GPy/core/parameterised.py index <HASH>..<HASH> 100644 --- a/GPy/core/parameterised.py +++ b/GPy/core/parameterised.py @@ -191,8 +191,8 @@ class parameterised(object): self.constrain(which, transformations.logistic(lower, upper)) def all_constrained_indices(self): - if len(self.constrained_indices): - return np.hstack(self.constrained_indices) + if len(self.constrained_indices) or len(self.fixed_indices): + return np.hstack(self.constrained_indices + self.fixed_indices) else: return np.empty(shape=(0,))
fixed a bug in all_constrained_indices
diff --git a/Slim/Http/Stream.php b/Slim/Http/Stream.php index <HASH>..<HASH> 100644 --- a/Slim/Http/Stream.php +++ b/Slim/Http/Stream.php @@ -317,7 +317,7 @@ class Stream implements StreamInterface $this->seekable = false; if ($this->isAttached()) { $meta = $this->getMetadata(); - $this->seekable = $meta['seekable'] && !$this->isPipe(); + $this->seekable = $meta['seekable']; } }
gh-<I> removed check for isPipe from Stream::isSeekable
diff --git a/imgaug/augmenters/blend.py b/imgaug/augmenters/blend.py index <HASH>..<HASH> 100644 --- a/imgaug/augmenters/blend.py +++ b/imgaug/augmenters/blend.py @@ -387,6 +387,18 @@ class Alpha(meta.Augmenter): # pylint: disable=locally-disabled, unused-variabl keypoints_on_images, random_state, parents, hooks, _augfunc ) + def _augment_polygons(self, polygons_on_images, random_state, parents, hooks): + def _augfunc(augs_, polygons_on_images_, parents_, hooks_): + return augs_.augment_polygons( + keypoints_on_images=[polysoi_i.deepcopy() for polysoi_i in polygons_on_images_], + parents=parents_, + hooks=hooks_ + ) + + return self._augment_coordinate_based( + polygons_on_images, random_state, parents, hooks, _augfunc + ) + def _augment_coordinate_based(self, inputs, random_state, parents, hooks, func): nb_images = len(inputs) if nb_images == 0:
Add polygon augmentation method to Alpha
diff --git a/scripts/npm-release.js b/scripts/npm-release.js index <HASH>..<HASH> 100644 --- a/scripts/npm-release.js +++ b/scripts/npm-release.js @@ -148,7 +148,7 @@ const waitOnTests = async (names, packageInfo) => { console.log(`\nWaiting on the following CI jobs: ${jobs.join(', ')}`) return Promise.all(jobs.map((job) => { - return Promise.resolve(waitForJobToPass(job)) + return waitForJobToPass(job) .timeout(minutes(60)) .then(() => { console.log(`${job} passed`)
chore: adding bluebird promise to release script (#<I>)
diff --git a/plugins/udp/src/main/java/kg/apc/jmeter/samplers/DNSJavaTCPClientImpl.java b/plugins/udp/src/main/java/kg/apc/jmeter/samplers/DNSJavaTCPClientImpl.java index <HASH>..<HASH> 100644 --- a/plugins/udp/src/main/java/kg/apc/jmeter/samplers/DNSJavaTCPClientImpl.java +++ b/plugins/udp/src/main/java/kg/apc/jmeter/samplers/DNSJavaTCPClientImpl.java @@ -3,6 +3,7 @@ package kg.apc.jmeter.samplers; import kg.apc.io.BinaryUtils; import org.apache.jmeter.protocol.tcp.sampler.TCPClient; +import org.apache.jmeter.samplers.SampleResult; import org.apache.jorphan.logging.LoggingManager; import java.io.ByteArrayOutputStream; @@ -61,6 +62,10 @@ public class DNSJavaTCPClientImpl extends DNSJavaDecoder implements TCPClient { return new String(super.decode(buf)); } + public String read(InputStream in, SampleResult sampleResult) { + return this.read(in); + } + public byte getEolByte() { throw new UnsupportedOperationException("Not supported yet."); }
Add read method for Jmeter-<I> TCPClient signature (#<I>) * Add read method for Jmeter-<I> TCPClient signature * Fix missing semi-colon.
diff --git a/amibaker/ami_baker.py b/amibaker/ami_baker.py index <HASH>..<HASH> 100644 --- a/amibaker/ami_baker.py +++ b/amibaker/ami_baker.py @@ -18,7 +18,7 @@ class AmiBaker(object): ec2 = AmiEc2(quiet=self.__quiet, recipe=self.__recipe) if self._instance_id: - ec2.grab_existing_instance(self._instance_id) + ec2.get_instance(self._instance_id) else: ec2.instantiate() diff --git a/amibaker/ami_ec2.py b/amibaker/ami_ec2.py index <HASH>..<HASH> 100644 --- a/amibaker/ami_ec2.py +++ b/amibaker/ami_ec2.py @@ -72,7 +72,7 @@ class AmiEc2(object): self.__describe_instance() - def grab_existing_instance(self, ec2_id): + def get_instance(self, ec2_id): self.__describe_instance(ec2_id) def terminate(self):
Renamed method name to match others
diff --git a/dev_tools/modules.py b/dev_tools/modules.py index <HASH>..<HASH> 100644 --- a/dev_tools/modules.py +++ b/dev_tools/modules.py @@ -303,6 +303,9 @@ def parse(args): def main(argv: List[str]): + if argv == []: + # If no arguments are given, print the help/usage info. + argv = ['--help'] args = parse(argv) # args.func is where we store the function to be called for a given subparser # e.g. it is list_modules for the `list` subcommand
Fix <I>: prevent confusing error if `dev_tools/modules.py` is invoked without arguments (#<I>) This PR changes `main` in `dev_tools/modules.py` in a very simple way: if the user invokes it without any arguments, it pretends that `--help` was given as the argument instead. This prevents the error described in issue #<I> and makes the whole thing a little bit more user friendly.
diff --git a/lib/config.js b/lib/config.js index <HASH>..<HASH> 100644 --- a/lib/config.js +++ b/lib/config.js @@ -78,7 +78,7 @@ var init = function() { } fs.writeFileSync(g_configPath, JSON.stringify({ installDir: path.resolve(path.normalize(path.join(__dirname, ".."))), - gamesDir: path.resolve(path.normalize(path.join(__dirname, "..", "public", "games"))), + gamesDir: path.resolve(path.normalize(path.join(configDir, "games"))), options: [ { name: "dns", value: false, }, { name: "private", value: false, },
move default games dir to the config folder
diff --git a/router_test.go b/router_test.go index <HASH>..<HASH> 100644 --- a/router_test.go +++ b/router_test.go @@ -319,12 +319,7 @@ func Test_buildRouteName(t *testing.T) { func Test_CatchAll_Route(t *testing.T) { r := require.New(t) - rr := render.New(render.Options{ - // HTMLLayout: "application.html", - TemplateEngine: plush.BuffaloRenderer, - TemplatesBox: packr.NewBox("../templates"), - Helpers: map[string]interface{}{}, - }) + rr := render.New(render.Options{}) a := Automatic(Options{}) a.GET("/{name:.+}", func(c Context) error {
cleaning out test to use only what it needs
diff --git a/zone.go b/zone.go index <HASH>..<HASH> 100644 --- a/zone.go +++ b/zone.go @@ -577,8 +577,15 @@ func (api *API) PurgeEverything(zoneID string) (PurgeCacheResponse, error) { // // API reference: https://api.cloudflare.com/#zone-purge-individual-files-by-url-and-cache-tags func (api *API) PurgeCache(zoneID string, pcr PurgeCacheRequest) (PurgeCacheResponse, error) { + return api.PurgeCacheContext(context.TODO(), zoneID, pcr) +} + +// PurgeCacheContext purges the cache using the given PurgeCacheRequest (zone/url/tag). +// +// API reference: https://api.cloudflare.com/#zone-purge-individual-files-by-url-and-cache-tags +func (api *API) PurgeCacheContext(ctx context.Context, zoneID string, pcr PurgeCacheRequest) (PurgeCacheResponse, error) { uri := "/zones/" + zoneID + "/purge_cache" - res, err := api.makeRequest("POST", uri, pcr) + res, err := api.makeRequestContext(ctx, "POST", uri, pcr) if err != nil { return PurgeCacheResponse{}, errors.Wrap(err, errMakeRequestError) }
feat(zones): Add PurgeCacheContext method (#<I>) Support context for purging cache. Adds a *Context method as suggested in #<I>, and used in ListZonesContext.
diff --git a/sailthru/Sailthru_Client.php b/sailthru/Sailthru_Client.php index <HASH>..<HASH> 100644 --- a/sailthru/Sailthru_Client.php +++ b/sailthru/Sailthru_Client.php @@ -579,8 +579,9 @@ class Sailthru_Client { * @param Mixed $tags Null for empty values, or String or arrays * @link http://docs.sailthru.com/api/content */ - public function pushContent($title, $url, $date = null, $tags = null, $vars = array()) { + public function pushContent($title, $url, $date = null, $tags = null, $vars = array(),$spider = 1) { $data = array(); + $data['spider'] = $spider; $data['title'] = $title; $data['url'] = $url; if (!is_null($tags)) {
Update Sailthru_Client.php
diff --git a/lib/htmlRenderer.js b/lib/htmlRenderer.js index <HASH>..<HASH> 100644 --- a/lib/htmlRenderer.js +++ b/lib/htmlRenderer.js @@ -61,7 +61,9 @@ HTMLRenderer.prototype.renderPage = function (url, pageData, cb) { renderData.gpsiData = pageData.gpsi; renderData.browsertimeData = pageData.browsertime; renderData.wptData = pageData.webpagetest; - renderData.phantomjsData = pageData.phantomjs.getStats(); + if (pageData.phantomjs) { + renderData.phantomjsData = pageData.phantomjs.getStats(); + } renderData.config = config; renderData.pageMeta = {
don't get phantomjs metrics if we don't fetch phantomjs
diff --git a/test/src/test/java/hudson/search/SearchTest.java b/test/src/test/java/hudson/search/SearchTest.java index <HASH>..<HASH> 100644 --- a/test/src/test/java/hudson/search/SearchTest.java +++ b/test/src/test/java/hudson/search/SearchTest.java @@ -173,7 +173,7 @@ public class SearchTest extends HudsonTestCase { project1.setDisplayName(displayName); WebClient wc = new WebClient(); - Page result = wc.goTo("search/suggest?query=name", "application/javascript"); + Page result = wc.goTo("search/suggest?query=name", "application/json"); Assert.assertNotNull(result); assertGoodStatus(result);
fixed test error. we expected application/json.
diff --git a/lib/mtgox/client.rb b/lib/mtgox/client.rb index <HASH>..<HASH> 100644 --- a/lib/mtgox/client.rb +++ b/lib/mtgox/client.rb @@ -33,10 +33,10 @@ module MtGox # offers.bids[0, 3] def offers offers = get('/code/data/getDepth.php') - offers['asks'] = offers['asks'].sort_by{|ask| ask[0].to_f}.map do |ask| + offers['asks'].sort_by!{|ask| ask[0].to_f}.map! do |ask| Ask.new(*ask) end - offers['bids'] = offers['bids'].sort_by{|bid| bid[0].to_f}.reverse.map do |bid| + offers['bids'].sort_by!{|bid| bid[0].to_f}.reverse!.map! do |bid| Bid.new(*bid) end offers
Don't waste memory by duplicating objects
diff --git a/tests/test_creation.py b/tests/test_creation.py index <HASH>..<HASH> 100644 --- a/tests/test_creation.py +++ b/tests/test_creation.py @@ -142,8 +142,9 @@ class CreationTest(g.unittest.TestCase): path = g.np.c_[x, y, z] # Extrude - mesh = g.trimesh.creation.sweep_polygon(poly, path) - assert mesh.is_volume + for engine in self.engines: + mesh = g.trimesh.creation.sweep_polygon(poly, path, engine=engine) + assert mesh.is_volume def test_annulus(self): """ @@ -264,8 +265,9 @@ class CreationTest(g.unittest.TestCase): if len(self.engines) == 0: return p = g.get_mesh('2D/ChuteHolderPrint.DXF') - v, f = p.triangulate() - check_triangulation(v, f, p.area) + for engine in self.engines: + v, f = p.triangulate(engine=engine) + check_triangulation(v, f, p.area) def test_truncated(self, count=10): # create some random triangles
Use available triangulation engines in tests In test_path_sweep and test_triangulate_plumbing, use all available triangulation engines (and no unavailable ones). This specifically allows the tests to run when the 'earcut' engine is available but the non-free 'triangle' engine is not. It also provides coverage of the 'earcut' engine when 'triangle' (the implicit default) *is* available.
diff --git a/extensions/composer/Installer.php b/extensions/composer/Installer.php index <HASH>..<HASH> 100644 --- a/extensions/composer/Installer.php +++ b/extensions/composer/Installer.php @@ -176,6 +176,9 @@ class Installer extends LibraryInstaller protected function saveExtensions(array $extensions) { $file = $this->vendorDir . '/' . self::EXTENSION_FILE; + if (!file_exists(dirname($file))) { + mkdir(dirname($file), 0777, true); + } $array = str_replace("'<vendor-dir>", '$vendorDir . \'', var_export($extensions, true)); file_put_contents($file, "<?php\n\n\$vendorDir = dirname(__DIR__);\n\nreturn $array;\n"); // invalidate opcache of extensions.php if exists
composer: create directory if it does not exists may happen if yii is installed globally. fixes #<I>
diff --git a/test/test_coursera.py b/test/test_coursera.py index <HASH>..<HASH> 100755 --- a/test/test_coursera.py +++ b/test/test_coursera.py @@ -3,9 +3,10 @@ Test functionality of coursera module. """ -import unittest import os import os.path +import unittest + from coursera import coursera_dl TEST_SYLLABUS_FILE = \
test: Sort stdlib imports and separate local import from the former.
diff --git a/holoviews/core/dimension.py b/holoviews/core/dimension.py index <HASH>..<HASH> 100644 --- a/holoviews/core/dimension.py +++ b/holoviews/core/dimension.py @@ -483,6 +483,9 @@ class Dimensioned(LabelledData): if selection == 'all': dims = [dim for group in self._dim_groups for dim in getattr(self, group)] + elif isinstance(selection, list): + dims = [dim for group in selection + for dim in getattr(self, '%s_dimensions' % group)] elif selection in ['key', 'value', 'constant']: lmbd, kwargs = lambdas[selection] key_traversal = self.traverse(lmbd, **kwargs)
Dimensioned.dimensions now accepts list of selections
diff --git a/lib/client.js b/lib/client.js index <HASH>..<HASH> 100644 --- a/lib/client.js +++ b/lib/client.js @@ -1206,6 +1206,7 @@ function doInitialSync(client, historyLen) { client.emit("syncComplete"); _pollForEvents(client); }, function(err) { + console.error("/initialSync error: %s", err); client.emit("syncError", err); // TODO: Retries. }); @@ -1254,6 +1255,7 @@ function _pollForEvents(client) { var discardResult = false; var timeoutObj = setTimeout(function() { discardResult = true; + console.error("/events request timed out."); _pollForEvents(client); }, 40000); @@ -1330,6 +1332,7 @@ function _pollForEvents(client) { } _pollForEvents(self); }, function(err) { + console.error("/events error: %s", JSON.stringify(err)); if (discardResult) { return; }
Add more logging when sync requests fail.
diff --git a/daemon/cmd/daemon_main.go b/daemon/cmd/daemon_main.go index <HASH>..<HASH> 100644 --- a/daemon/cmd/daemon_main.go +++ b/daemon/cmd/daemon_main.go @@ -936,7 +936,7 @@ func initEnv(cmd *cobra.Command) { // Prepopulate option.Config with options from CLI. option.Config.Populate() - // add hooks after setting up metrics in the option.Confog + // add hooks after setting up metrics in the option.Config logging.DefaultLogger.Hooks.Add(metrics.NewLoggingHook(components.CiliumAgentName)) // Logging should always be bootstrapped first. Do not add any code above this!
daemon_main: fix comments error The option.Confog is replaced with option.Config.
diff --git a/models/event/event.go b/models/event/event.go index <HASH>..<HASH> 100644 --- a/models/event/event.go +++ b/models/event/event.go @@ -46,6 +46,8 @@ const ( DisconnectedFromServer string = "discFromServer" MatchEnded string = "matchEnded" Test string = "test" + + ReservationOver string = "reservationOver" ) var stop = make(chan struct{}) @@ -82,6 +84,8 @@ func StartListening() { disconnectedFromServer(event.LobbyID) case MatchEnded: matchEnded(event.LobbyID, event.LogsID, event.ClassTimes) + case ReservationOver: + reservationEnded(event.LobbyID) } case <-stop: return @@ -94,6 +98,12 @@ func StopListening() { stop <- struct{}{} } +func reservationEnded(lobbyID uint) { + lobby, _ := models.GetLobbyByID(lobbyID) + lobby.Close(false, false) + models.SendNotification("Lobby Closed (serveme.tf reservation ended)", int(lobby.ID)) +} + func playerDisc(steamID string, lobbyID uint) { player, _ := models.GetPlayerBySteamID(steamID) lobby, _ := models.GetLobbyByID(lobbyID)
stop lobby when the serveme reservation is over
diff --git a/src/OAuth/Services/OAuthService.php b/src/OAuth/Services/OAuthService.php index <HASH>..<HASH> 100644 --- a/src/OAuth/Services/OAuthService.php +++ b/src/OAuth/Services/OAuthService.php @@ -251,7 +251,7 @@ class OAuthService */ public function getAppToken(\DTS\eBaySDK\OAuth\Types\GetAppTokenRestRequest $request = null) { - return $this->getAppTokenAsync()->wait(); + return $this->getAppTokenAsync($request)->wait(); } /**
Pass through the request for grabbing the app token.
diff --git a/code/site/components/com_default/templates/helpers/date.php b/code/site/components/com_default/templates/helpers/date.php index <HASH>..<HASH> 100644 --- a/code/site/components/com_default/templates/helpers/date.php +++ b/code/site/components/com_default/templates/helpers/date.php @@ -49,7 +49,7 @@ class ComDefaultTemplateHelperDate extends KTemplateHelperDate { $config = new KConfig($config); $config->append(array( - 'gmt_offset' => KFactory::get('lib.joomla.config')->getValue('config.offset') * 3600 + 'gmt_offset' => 0 )); return parent::humanize($config);
Set gmt_offset config option to 0 by default
diff --git a/inotify/adapters.py b/inotify/adapters.py index <HASH>..<HASH> 100644 --- a/inotify/adapters.py +++ b/inotify/adapters.py @@ -261,7 +261,7 @@ class InotifyTrees(BaseTree): self.__load_trees(paths) def __load_trees(self, paths): - _LOGGER.debug("Adding initial watches on trees: [%s]", ",".join(paths)) + _LOGGER.debug("Adding initial watches on trees: [%s]", ",".join(map(str, paths))) q = paths while q:
resolves #<I>, list of binary paths can't be logged with existing call
diff --git a/src/metapensiero/signal/utils.py b/src/metapensiero/signal/utils.py index <HASH>..<HASH> 100644 --- a/src/metapensiero/signal/utils.py +++ b/src/metapensiero/signal/utils.py @@ -50,6 +50,7 @@ class MultipleResults(Awaitable): res = await coro self._results[ix] = res self.results = tuple(self._results) + del self._results self.done = True return self.results
Remove _results when they aren't needed anymore
diff --git a/openquake/engine/tools/make_html_report.py b/openquake/engine/tools/make_html_report.py index <HASH>..<HASH> 100644 --- a/openquake/engine/tools/make_html_report.py +++ b/openquake/engine/tools/make_html_report.py @@ -186,10 +186,9 @@ def make_report(isodate='today'): txt = view_fullreport('fullreport', ds) report = html_parts(txt) except Exception as exc: - raise report = dict( html_title='Could not generate report: %s' % cgi.escape( - unicode(exc), quote=True), + exc, quote=True), fragment='') page = report['html_title']
Removed a debugging raise Former-commit-id: ea0c5f<I>a3b<I>f7e<I>a4e<I>f<I>
diff --git a/lib/rails-env/version.rb b/lib/rails-env/version.rb index <HASH>..<HASH> 100644 --- a/lib/rails-env/version.rb +++ b/lib/rails-env/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module RailsEnv - VERSION = "2.0.1" + VERSION = "2.0.2" end
Bump up version (<I>).
diff --git a/lib/celluloid/zmq/sockets.rb b/lib/celluloid/zmq/sockets.rb index <HASH>..<HASH> 100644 --- a/lib/celluloid/zmq/sockets.rb +++ b/lib/celluloid/zmq/sockets.rb @@ -1,13 +1,12 @@ module Celluloid module ZMQ - attr_reader :linger - class Socket # Create a new socket def initialize(type) @socket = Celluloid::ZMQ.context.socket ::ZMQ.const_get(type.to_s.upcase) @linger = 0 end + attr_reader :linger # Connect to the given 0MQ address # Address should be in the form: tcp://1.2.3.4:5678/
Move the Socket#linger reader to the right place
diff --git a/cachalot/utils.py b/cachalot/utils.py index <HASH>..<HASH> 100644 --- a/cachalot/utils.py +++ b/cachalot/utils.py @@ -65,6 +65,7 @@ def check_parameter_types(params): elif cl is dict: check_parameter_types(p.items()) else: + print(params, [text_type(p) for p in params]) raise UncachableQuery
Prints again a debug line to understand why Travis CI is failing… -_-
diff --git a/EventListener/Cloner/CopyUploadablesSubscriber.php b/EventListener/Cloner/CopyUploadablesSubscriber.php index <HASH>..<HASH> 100644 --- a/EventListener/Cloner/CopyUploadablesSubscriber.php +++ b/EventListener/Cloner/CopyUploadablesSubscriber.php @@ -129,6 +129,10 @@ class CopyUploadablesSubscriber implements EventSubscriberInterface */ private function generateTmpPathname(): string { + if (!is_dir($this->tmpDir) && !mkdir($this->tmpDir, 0777, true)) { + throw new \RuntimeException(sprintf('Unable to create temporary files directory "%s".', $this->tmpDir)); + } + $pathname = @tempnam($this->tmpDir, ''); if (false === $pathname) {
Create temporary files directory in copy uploadables event subscriber.
diff --git a/hiyapyco/__init__.py b/hiyapyco/__init__.py index <HASH>..<HASH> 100644 --- a/hiyapyco/__init__.py +++ b/hiyapyco/__init__.py @@ -73,6 +73,7 @@ METHOD_SUBSTITUTE = METHODS['METHOD_SUBSTITUTE'] class HiYaPyCo: """Main class""" + # pylint: disable=too-many-instance-attributes def __init__(self, *args, **kwargs): """ args: YAMLfile(s)
make pylint silently ignore too-many-instance-attributes on main class
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -1,3 +1,5 @@ +/* jshint unused: false */ + /** * RETSr.io rets.js RETS Client * @module RETS diff --git a/lib/resolve.js b/lib/resolve.js index <HASH>..<HASH> 100644 --- a/lib/resolve.js +++ b/lib/resolve.js @@ -1,13 +1,15 @@ +/* jshint unused: false */ + var Resolve = exports; var debug = require('debug')('rets.js:resolve'); -var URL = require('url'); +var url = require('url'); var util = require('util'); Resolve.endpoint = function(url){ }; Resolve.operation = function(operand){ -} +};
Adding joshing directives to prevent files that are known to be incomplete (stubs) form triggering errors. We’ll remove these as we progress. For now, it’s ok to have these unused variables.
diff --git a/spec/functional/resource/git_spec.rb b/spec/functional/resource/git_spec.rb index <HASH>..<HASH> 100644 --- a/spec/functional/resource/git_spec.rb +++ b/spec/functional/resource/git_spec.rb @@ -24,7 +24,11 @@ require 'tmpdir' describe Chef::Resource::Git do include Chef::Mixin::ShellOut let(:file_cache_path) { Dir.mktmpdir } - let(:deploy_directory) { Dir.mktmpdir } + # Some versions of git complains when the deploy directory is + # already created. Here we intentionally don't create the deploy + # directory beforehand. + let(:base_dir_path) { Dir.mktmpdir } + let(:deploy_directory) { File.join(base_dir_path, make_tmpname("git_base")) } let(:node) do Chef::Node.new.tap do |n|
Make git_spec work with older git versions.
diff --git a/molo/core/templatetags/core_tags.py b/molo/core/templatetags/core_tags.py index <HASH>..<HASH> 100644 --- a/molo/core/templatetags/core_tags.py +++ b/molo/core/templatetags/core_tags.py @@ -170,7 +170,7 @@ def load_descendant_articles_for_section( @register.assignment_tag(takes_context=True) -def load_child_articles_for_section(context, section, count=5): +def load_child_articles_for_section(context, section, count=None): ''' Returns all child articles If the `locale_code` in the context is not the main language, it will @@ -181,6 +181,9 @@ def load_child_articles_for_section(context, section, count=5): qs = section.articles() + print count + if not count: + count = 1 # Pagination paginator = Paginator(qs, count)
setting count back to default to None for articles
diff --git a/hydpy/core/magictools.py b/hydpy/core/magictools.py index <HASH>..<HASH> 100644 --- a/hydpy/core/magictools.py +++ b/hydpy/core/magictools.py @@ -17,7 +17,6 @@ from hydpy.core import filetools from hydpy.core import parametertools from hydpy.core import devicetools -_warnsimulationstep = True class Tester(object): @@ -69,6 +68,8 @@ class Tester(object): modulename = '.'.join((self.package, name)) module = importlib.import_module(modulename) warnings.filterwarnings('error', module=modulename) + warnings.filterwarnings('ignore', + category=ImportWarning) doctest.testmod(module, extraglobs={'testing': True}) warnings.resetwarnings() finally:
prevent ImportWarnings from becoming exceptions (see the last commits)
diff --git a/modules/mapbox/src/deck-utils.js b/modules/mapbox/src/deck-utils.js index <HASH>..<HASH> 100644 --- a/modules/mapbox/src/deck-utils.js +++ b/modules/mapbox/src/deck-utils.js @@ -1,4 +1,5 @@ import {Deck} from '@deck.gl/core'; +import {withParameters} from 'luma.gl'; export function getDeckInstance({map, gl, deck}) { // Only create one deck instance per context @@ -165,7 +166,19 @@ function handleMouseEvent(deck, event) { srcEvent: event.originalEvent }; } - callback(event); + + // Work around for https://github.com/mapbox/mapbox-gl-js/issues/7801 + const {gl} = deck.layerManager.context; + withParameters( + gl, + { + depthMask: true, + depthTest: true, + depthRange: [0, 1], + colorMask: [true, true, true, true] + }, + () => callback(event) + ); } // Register deck callbacks for pointer events
Fix occasional picking failure in mapbox layer (#<I>)
diff --git a/resources/views/admin/includes/media-uploader.blade.php b/resources/views/admin/includes/media-uploader.blade.php index <HASH>..<HASH> 100644 --- a/resources/views/admin/includes/media-uploader.blade.php +++ b/resources/views/admin/includes/media-uploader.blade.php @@ -29,7 +29,7 @@ :max-number-of-files="{{ $mediaCollection->getMaxNumberOfFiles() }}" @endif @if($mediaCollection->getMaxFileSize()) - :max-file-size-in-mb="{{ round($mediaCollection->getMaxFileSize()/1024/1024) }}" + :max-file-size-in-mb="{{ round(($mediaCollection->getMaxFileSize()/1024/1024), 2) }}" @endif @if($mediaCollection->getAcceptedFileTypes()) :accepted-file-types="'{{ implode($mediaCollection->getAcceptedFileTypes(), '') }}'"
max file size round fix (#<I>)
diff --git a/doc/ex/preview.rb b/doc/ex/preview.rb index <HASH>..<HASH> 100755 --- a/doc/ex/preview.rb +++ b/doc/ex/preview.rb @@ -3,7 +3,14 @@ require 'RMagick' img = Magick::Image.read("images/Blonde_with_dog.jpg").first -preview = img.preview(Magick::SolarizePreview) + +begin + preview = img.preview(Magick::SolarizePreview) +rescue NotImplementedError + img = Image.read('images/notimplemented.gif').first + img.write('preview.jpg') + exit +end preview.minify.write('preview.jpg') exit
Handle "not supported" exception when built with IM < <I>
diff --git a/spyder/app/tests/test_mainwindow.py b/spyder/app/tests/test_mainwindow.py index <HASH>..<HASH> 100644 --- a/spyder/app/tests/test_mainwindow.py +++ b/spyder/app/tests/test_mainwindow.py @@ -1594,24 +1594,6 @@ def test_troubleshooting_menu_item_and_url(monkeypatch): @flaky(max_runs=3) @pytest.mark.slow -def test_tabfilter_typeerror_full(main_window): - """Test for #5813 ; event filter handles None indicies when moving tabs.""" - MockEvent = MagicMock() - MockEvent.return_value.type.return_value = QEvent.MouseMove - MockEvent.return_value.pos.return_value = 0 - mockEvent_instance = MockEvent() - - test_tabbar = main_window.findChildren(QTabBar)[0] - test_tabfilter = TabFilter(test_tabbar, main_window) - test_tabfilter.from_index = None - test_tabfilter.moving = True - - assert test_tabfilter.eventFilter(None, mockEvent_instance) - assert mockEvent_instance.pos.call_count == 1 - - -@flaky(max_runs=3) -@pytest.mark.slow @pytest.mark.xfail def test_help_opens_when_show_tutorial_full(main_window, qtbot): """Test fix for #6317 : 'Show tutorial' opens the help plugin if closed."""
Testing: Remove test that no longer applies
diff --git a/hot_redis.py b/hot_redis.py index <HASH>..<HASH> 100644 --- a/hot_redis.py +++ b/hot_redis.py @@ -109,7 +109,7 @@ class Bitwise(Base): __rrshift__ = op_right(operator.rshift) -class Iterable(Base): +class Sequential(Base): __add__ = op_left(operator.add) __mul__ = op_left(operator.mul) @@ -145,7 +145,7 @@ class Numeric(Base): __ipow__ = inplace("number_pow") -class List(Iterable): +class List(Sequential): @property def value(self): @@ -423,7 +423,7 @@ class Dict(Base): return cls({}.fromkeys(*args)) -class String(Iterable): +class String(Sequential): @property def value(self):
More accurate name for List/String base class: Iterable -> Sequential
diff --git a/tests/test-timber-term.php b/tests/test-timber-term.php index <HASH>..<HASH> 100644 --- a/tests/test-timber-term.php +++ b/tests/test-timber-term.php @@ -11,7 +11,7 @@ function testGetTermWithObject() { $term_id = $this->factory->term->create(array('name' => 'Famous Commissioners')); $term_data = get_term($term_id, 'post_tag'); - $this->assertEquals('WP_Term', get_class($term_data)); + $this->assertTrue( in_array( get_class($term_data), array('WP_Term', 'stdClass') ) ); $term = new TimberTerm($term_id); $this->assertEquals('Famous Commissioners', $term->name()); $this->assertEquals('TimberTerm', get_class($term));
Fixed test for how WP <I> returns Term objects
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -42,9 +42,7 @@ function rehype2react(options) { } } - var hast = tableCellStyle(node); - - return toH(h, hast, settings.prefix); + return toH(h, tableCellStyle(node), settings.prefix); } /* Wrap `createElement` to pass components in. */
style(table): Do it in one line
diff --git a/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java b/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java index <HASH>..<HASH> 100644 --- a/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java +++ b/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java @@ -265,8 +265,8 @@ public abstract class ByteToMessageDecoder extends ChannelInboundHandlerAdapter callDecode(ctx, cumulation, out); } catch (DecoderException e) { throw e; - } catch (Throwable t) { - throw new DecoderException(t); + } catch (Exception e) { + throw new DecoderException(e); } finally { if (cumulation != null && !cumulation.isReadable()) { numReads = 0; @@ -455,7 +455,7 @@ public abstract class ByteToMessageDecoder extends ChannelInboundHandlerAdapter } } catch (DecoderException e) { throw e; - } catch (Throwable cause) { + } catch (Exception cause) { throw new DecoderException(cause); } }
Do not treat errors as decoder exception Motivation: Today when Netty encounters a general error while decoding it treats this as a decoder exception. However, for fatal causes this should not be treated as such, instead the fatal error should be carried up the stack without the callee having to unwind causes. Modifications: Instead of translating any error to a decoder exception, we let those unwind out the stack (note that finally blocks still execute). Result: Fatal errors will not be treated as innocent decoder exceptions.
diff --git a/src/lang/number.js b/src/lang/number.js index <HASH>..<HASH> 100644 --- a/src/lang/number.js +++ b/src/lang/number.js @@ -74,12 +74,12 @@ Number.prototype.round = function () { * @alias toHex * @return {string} converted hexadecimal value */ -Number.prototype.toHex = function () { +Number.prototype.toHex = (function () { var hexString = "0123456789ABCDEF"; return function () { return hexString.charAt((this - (this % 16)) >> 4) + hexString.charAt(this % 16); }; -}(); +})(); /** * Returns a value indicating the sign of a number<br>
Wrap with () to show immediate invocation.
diff --git a/Cache/ConfigWarmer.php b/Cache/ConfigWarmer.php index <HASH>..<HASH> 100644 --- a/Cache/ConfigWarmer.php +++ b/Cache/ConfigWarmer.php @@ -29,8 +29,14 @@ class ConfigWarmer implements CacheWarmerInterface public function warmUp($cacheDir) { - // this forces the full processing of the backend configuration - $this->configManager->getBackendConfig(); + try { + // this forces the full processing of the backend configuration + $this->configManager->getBackendConfig(); + } catch (\PDOException $e) { + // this occurs for example when the database doesn't exist yet and the + // project is being installed ('composer install' clears the cache at the end) + // ignore this error at this point and display an error message later + } } public function isOptional()
Prevent error messages when installing an app and the DB doesn't exist
diff --git a/mock/mock.go b/mock/mock.go index <HASH>..<HASH> 100644 --- a/mock/mock.go +++ b/mock/mock.go @@ -146,7 +146,7 @@ func (c *Call) After(d time.Duration) *Call { // arg := args.Get(0).(*map[string]interface{}) // arg["foo"] = "bar" // }) -func (c *Call) Run(fn func(Arguments)) *Call { +func (c *Call) Run(fn func(args Arguments)) *Call { c.lock() defer c.unlock() c.RunFn = fn
Provide argument name `args` in function signature This mainly serves to make code-completion better in IDEs that automatically create the function signature.
diff --git a/jsonapi/api.py b/jsonapi/api.py index <HASH>..<HASH> 100644 --- a/jsonapi/api.py +++ b/jsonapi/api.py @@ -278,7 +278,9 @@ class API(object): duration=time.time() - time_start) return response - if resource.Meta.authenticators: + if resource.Meta.authenticators and not ( + request.method == "GET" and + resource.Meta.disable_get_authentication): user = resource.authenticate(request) if user is None or not user.is_authenticated(): response = HttpResponse("Not Authenticated", status=401) diff --git a/jsonapi/auth.py b/jsonapi/auth.py index <HASH>..<HASH> 100644 --- a/jsonapi/auth.py +++ b/jsonapi/auth.py @@ -54,6 +54,7 @@ class Authenticator(object): class Meta: authenticators = [] + disable_get_authentication = None @classmethod def authenticate(cls, request):
allow user to disable get authenticators and maintain access rights on the application level
diff --git a/src/jquery.continuous-calendar/jquery.continuous-calendar.js b/src/jquery.continuous-calendar/jquery.continuous-calendar.js index <HASH>..<HASH> 100644 --- a/src/jquery.continuous-calendar/jquery.continuous-calendar.js +++ b/src/jquery.continuous-calendar/jquery.continuous-calendar.js @@ -248,7 +248,7 @@ function todayStyle(date) {return date.isToday() ? 'today' : '';} function initSingleDateCalendarEvents() { - $('.date', container).live('click', function() { + $('.date', container).bind('click', function() { var dateCell = $(this); if (dateCell.hasClass('disabled')) return; $('td.selected', container).removeClass('selected');
change live event to bind in order to encapsulate events
diff --git a/hazelcast/src/test/java/com/hazelcast/nio/tcp/nonblocking/iobalancer/IOBalancerMemoryLeakTest.java b/hazelcast/src/test/java/com/hazelcast/nio/tcp/nonblocking/iobalancer/IOBalancerMemoryLeakTest.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/test/java/com/hazelcast/nio/tcp/nonblocking/iobalancer/IOBalancerMemoryLeakTest.java +++ b/hazelcast/src/test/java/com/hazelcast/nio/tcp/nonblocking/iobalancer/IOBalancerMemoryLeakTest.java @@ -48,6 +48,7 @@ public class IOBalancerMemoryLeakTest extends HazelcastTestSupport { @Test public void testMemoryLeak() throws IOException { Config config = new Config(); + config.getGroupConfig().setName(randomName()); config.setProperty(GroupProperty.REST_ENABLED, "true"); HazelcastInstance instance = Hazelcast.newHazelcastInstance(config); HTTPCommunicator communicator = new HTTPCommunicator(instance);
randomize group name to make this test prone to unintentional joins from multicast.
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ def read(fname): setup( name="okcupyd", - version="0.6.2", + version="0.6.3", packages=find_packages(exclude=('tests', '*.db')), install_requires=['lxml', 'requests >= 2.4.1', 'simplejson', 'sqlalchemy >= 0.9.0', 'ipython >= 2.2.0',
version bumpo to <I>
diff --git a/lib/keymail/version.rb b/lib/keymail/version.rb index <HASH>..<HASH> 100644 --- a/lib/keymail/version.rb +++ b/lib/keymail/version.rb @@ -1,3 +1,3 @@ module Keymail - VERSION = "0.0.1" + VERSION = "0.1.0" end
Set initial version to <I> in favor of SemVer
diff --git a/pymatbridge/matlab_magic.py b/pymatbridge/matlab_magic.py index <HASH>..<HASH> 100644 --- a/pymatbridge/matlab_magic.py +++ b/pymatbridge/matlab_magic.py @@ -235,7 +235,7 @@ class MatlabMagics(Magics): if len(imgf): # Store the path to the directory so that you can delete it # later on: - image = open(imgf, 'rb').read().decode('utf-8') + image = open(imgf, 'rb').read() if ipython_version < 3: display_data.append(('MatlabMagic.matlab', {'image/png':image})) diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ else: # Get version and release info, which is all stored in pymatbridge/version.py ver_file = os.path.join('pymatbridge', 'version.py') -exec(open(ver_file).read().decode('utf-8')) +exec(open(ver_file).read()) opts = dict(name=NAME, maintainer=MAINTAINER,
BF: No need for these calls to `decode`. In my hands, these caused crashes on installation and testing on both python 2 and python 3.
diff --git a/server.js b/server.js index <HASH>..<HASH> 100644 --- a/server.js +++ b/server.js @@ -200,7 +200,11 @@ function cache(f) { } var badgeData = getBadgeData('vendor', data); badgeData.text[1] = 'unresponsive'; - badge(badgeData, makeSend(match[0].split('.').pop(), ask.res, end)); + var extension; + try { + extension = match[0].split('.').pop(); + } catch(e) { extension = 'svg'; } + badge(badgeData, makeSend(extension, ask.res, end)); }, 25000); // Only call vendor servers when last request is older than…
Avoid risk of crash with URL extension Part of #<I>
diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -11,7 +11,7 @@ from warnings import warn from abstractions import ModelGibbsSampling, ModelMeanField, ModelEM from abstractions import Distribution, GibbsSampling, MeanField, Collapsed, MaxLikelihood from distributions import Categorical, CategoricalAndConcentration -from internals.labels import Labels, FrozenLabels, CRPLabels +from internals.labels import Labels, CRPLabels from pyhsmm.util.stats import getdatasize
removed old FrozenLabels ref
diff --git a/core/src/test/java/me/prettyprint/hector/api/DynamicCompositeTest.java b/core/src/test/java/me/prettyprint/hector/api/DynamicCompositeTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/me/prettyprint/hector/api/DynamicCompositeTest.java +++ b/core/src/test/java/me/prettyprint/hector/api/DynamicCompositeTest.java @@ -7,6 +7,7 @@ import java.math.BigInteger; import java.nio.ByteBuffer; import java.util.UUID; +import me.prettyprint.cassandra.utils.TimeUUIDUtils; import me.prettyprint.hector.api.beans.DynamicComposite; import org.apache.cassandra.utils.ByteBufferUtil; @@ -48,7 +49,8 @@ public class DynamicCompositeTest { o = c.get(0); assertTrue(o instanceof Long); - b = createDynamicCompositeKey("Hello", UUID.randomUUID(), 10, false); + b = createDynamicCompositeKey("Hello", + TimeUUIDUtils.getUniqueTimeUUIDinMillis(), 10, false); c = new DynamicComposite(); c.deserialize(b); o = c.get(0);
Unit test for DynamicComposite bean
diff --git a/brozzler/chrome.py b/brozzler/chrome.py index <HASH>..<HASH> 100644 --- a/brozzler/chrome.py +++ b/brozzler/chrome.py @@ -179,7 +179,7 @@ class Chrome: extra_chrome_args = os.environ.get('BROZZLER_EXTRA_CHROME_ARGS') if extra_chrome_args: - chrome_args.append(extra_chrome_args) + chrome_args.extend(extra_chrome_args.split()) if disk_cache_dir: chrome_args.append('--disk-cache-dir=%s' % disk_cache_dir) if disk_cache_size:
Split extra chrome args on whitespace This is in case multiple args are used.
diff --git a/build/docs/classes/RedirectMapBuilder.php b/build/docs/classes/RedirectMapBuilder.php index <HASH>..<HASH> 100644 --- a/build/docs/classes/RedirectMapBuilder.php +++ b/build/docs/classes/RedirectMapBuilder.php @@ -65,6 +65,17 @@ class RedirectMapBuilder // Fall back to api main page if service not found $redirectEntry []= $reWriteRulePrefix . '(.*)' . $docPathPrefix . 'index.html' . $flags; + // Redirect old /AWSSDKforPHP/ paths + $reWriteRulePrefix = 'RewriteRule ^/AWSSDKforPHP/'; + array_unshift($redirectEntry, + "RewriteCond %{REQUEST_URI} !^\\/AWSSDKforPHP\\/.*$\n", + "RewriteRule \".*\" \"-\" [S={4}]\n", + $reWriteRulePrefix . 'latest(.*) /aws-sdk-php/latest/index.html' . $flags, + $reWriteRulePrefix . 'v3(.*) /aws-sdk-php/v3/api/index.html' . $flags, + $reWriteRulePrefix . 'v2(.*) /aws-sdk-php/v2/api/index.html' . $flags, + $reWriteRulePrefix . 'v1(.*) /aws-sdk-php/v1/index.html' . $flags + ); + file_put_contents($this->outputDir, $redirectEntry); }
Redirect old AWSSDKforPHP paths. (#<I>)
diff --git a/omego/upgrade.py b/omego/upgrade.py index <HASH>..<HASH> 100644 --- a/omego/upgrade.py +++ b/omego/upgrade.py @@ -93,6 +93,17 @@ class Install(object): - Modified args object, flag to indicate new/existing/auto install """ if cmd == 'install': + if args.upgrade: + # Current behaviour: install or upgrade + if args.initdb or args.upgradedb: + raise Stop(10, ( + 'Deprecated --initdb --upgradedb flags ' + 'are incompatible with --upgrade')) + newinstall = None + else: + # Current behaviour: Server must not exist + newinstall = True + if args.managedb: # Current behaviour if args.initdb or args.upgradedb: @@ -105,17 +116,6 @@ class Install(object): # Deprecated behaviour pass - if args.upgrade: - # Current behaviour: install or upgrade - if args.initdb or args.upgradedb: - raise Stop(10, ( - 'Deprecated --initdb --upgradedb flags ' - 'are incompatible with --upgrade')) - newinstall = None - else: - # Current behaviour: Server must not exist - newinstall = True - elif cmd == 'upgrade': # Deprecated behaviour log.warn(
Fix order of deprecated arg changes
diff --git a/pyzotero/zotero.py b/pyzotero/zotero.py index <HASH>..<HASH> 100644 --- a/pyzotero/zotero.py +++ b/pyzotero/zotero.py @@ -983,9 +983,9 @@ class Zotero(object): if 'name' not in item: raise ze.ParamNotPassed( "The dict you pass must include a 'name' key") - # add a blank 'parent' key if it hasn't been passed + # add a blank 'parentCollection' key if it hasn't been passed if not 'parentCollection' in item: - payload['parentCollection'] = '' + item['parentCollection'] = '' headers = { 'Zotero-Write-Token': token(), }
Fix where the parentCollection key gets added (to the dict, not the enclosing list)
diff --git a/spec/garage_client/request/propagate_request_id_spec.rb b/spec/garage_client/request/propagate_request_id_spec.rb index <HASH>..<HASH> 100644 --- a/spec/garage_client/request/propagate_request_id_spec.rb +++ b/spec/garage_client/request/propagate_request_id_spec.rb @@ -5,8 +5,11 @@ describe GarageClient::Request::PropagateRequestId do GarageClient::Client.new end - before do + around do |example| + original = Thread.current[:request_id] Thread.current[:request_id] = 'request_id' + example.run + Thread.current[:request_id] = original end it 'sends request_id via header' do
Restore Thread.current value to be an original one
diff --git a/test/offline/db_upgrade_from_version_1_unit.js b/test/offline/db_upgrade_from_version_1_unit.js index <HASH>..<HASH> 100644 --- a/test/offline/db_upgrade_from_version_1_unit.js +++ b/test/offline/db_upgrade_from_version_1_unit.js @@ -452,6 +452,12 @@ describe('DBUpgradeFromVersion1', function() { segments, newSegment); }); + }) + .then(function() { + // Make sure to close the database when we are done or else any + // following test that tries to delete this database will hang + // when trying to delete it. + db.close(); }); }); }
Close Database After Upgrade Test On Safari it seems that deleting a database that was opened in a previous test causes the delete call to hang. This makes sure we close the database after each upgrade test. Change-Id: Ie<I>a<I>bd<I>cef<I>ee<I>f<I>d8a1a<I>
diff --git a/Controller/ProfileController.php b/Controller/ProfileController.php index <HASH>..<HASH> 100644 --- a/Controller/ProfileController.php +++ b/Controller/ProfileController.php @@ -19,12 +19,17 @@ class ProfileController extends Controller public function updatePhotoAction(Request $request) { $form = $this->createForm(new EditUserProfilePhotoType()); - $user = $this->container->get('security.context')->getToken()->getUser(); + $userOnline = $this->getUser(); + + /** @var \Ant\Bundle\ChateaClientBundle\Manager\UserManager $userManager */ + $userManager = $this->container->get('api_users'); + + $user = $userManager->findById($userOnline->getId()); return $this->render('ChateaClientBundle:User:edit_profile_photo.html.twig', array( 'form' => $form->createView(), 'user' => $user, - 'access_token' => $user->getAccessToken(), + 'access_token' => $userOnline->getAccessToken(), 'api_endpoint' => $this->container->getParameter('api_endpoint') )); }
When editing the profile phot we get the whole user
diff --git a/microcosm_postgres/types.py b/microcosm_postgres/types.py index <HASH>..<HASH> 100644 --- a/microcosm_postgres/types.py +++ b/microcosm_postgres/types.py @@ -25,7 +25,7 @@ class EnumType(TypeDecorator): def process_bind_param(self, value, dialect): if value is None: return None - return str(self.enum_class(value).name) + return unicode(self.enum_class(value).name) def process_result_value(self, value, dialect): if value is None:
Enum columns are unicode; should be inserted as such. The latest SQLAlchemy just started to produce warnings for any enum columns that used string values: SAWarning: Unicode type received non-unicode bind param value '<column>'.
diff --git a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/915_drop_table_graphs.rb b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/915_drop_table_graphs.rb index <HASH>..<HASH> 100644 --- a/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/915_drop_table_graphs.rb +++ b/server/sonar-web/src/main/webapp/WEB-INF/db/migrate/915_drop_table_graphs.rb @@ -19,7 +19,8 @@ # # -# SonarQube 5.1 +# SonarQube 5.2 +# SONAR-6418 # class DropTableGraphs < ActiveRecord::Migration
Fix comment of DB migration <I>
diff --git a/UnitPay.php b/UnitPay.php index <HASH>..<HASH> 100644 --- a/UnitPay.php +++ b/UnitPay.php @@ -67,6 +67,16 @@ class UnitPay } /** + * Return IP address + * + * @return string + */ + protected function getIp() + { + return $_SERVER['REMOTE_ADDR']; + } + + /** * Get URL for pay through the form * * @param $publicKey @@ -145,7 +155,7 @@ class UnitPay */ public function checkHandlerRequest() { - $ip = $_SERVER['REMOTE_ADDR']; + $ip = $this->getIp(); if (!isset($_GET['method'])) { throw new InvalidArgumentException('Method is null'); }
Added overrideable getIp method
diff --git a/web/static/js/controllers.js b/web/static/js/controllers.js index <HASH>..<HASH> 100644 --- a/web/static/js/controllers.js +++ b/web/static/js/controllers.js @@ -483,8 +483,7 @@ function SubServiceControl($scope, $routeParams, $location, resourcesService, au $scope.snapshotService = function(service) { resourcesService.snapshot_service(service.Id, function(label) { - console.log('Snapshotted service name:%s label:%s', service.Id, label.Detail); - alert('Snapshotted service:' + service.Name + ' LABEL:' + label.Detail); + console.log('Snapshotted service name:%s label:%s', service.Name, label.Detail); // TODO: add the snapshot label to some partial view in the UI }); };
console log the Name, not the Id; removed useless alert
diff --git a/example_project/settings.py b/example_project/settings.py index <HASH>..<HASH> 100644 --- a/example_project/settings.py +++ b/example_project/settings.py @@ -94,7 +94,10 @@ INSTALLED_APPS = ( ) TEMPLATE_CONTEXT_PROCESSORS = ( + # for django 1.2 or 1.3 'django.core.context_processors.auth', + # for django 1.4 comment above line and uncomment below + #'django.contrib.auth.context_processors.auth', 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.media',
Add context processor toggle to support django <I> - <I>
diff --git a/hypervisor/hypervisor.go b/hypervisor/hypervisor.go index <HASH>..<HASH> 100644 --- a/hypervisor/hypervisor.go +++ b/hypervisor/hypervisor.go @@ -44,7 +44,7 @@ func (ctx *VmContext) handlePAEs() { } func (ctx *VmContext) watchHyperstart(sendReadyEvent bool) { - timeout := time.AfterFunc(30*time.Second, func() { + timeout := time.AfterFunc(60*time.Second, func() { if ctx.PauseState == PauseStateUnpaused { ctx.Log(ERROR, "watch hyperstart timeout") ctx.Hub <- &InitFailedEvent{Reason: "watch hyperstart timeout"} @@ -69,7 +69,7 @@ func (ctx *VmContext) watchHyperstart(sendReadyEvent bool) { sendReadyEvent = false } time.Sleep(10 * time.Second) - timeout.Reset(30 * time.Second) + timeout.Reset(60 * time.Second) } timeout.Stop() }
watch hyperstart with longer timeouts Jekins saw launching vm in more than <I> seconds...
diff --git a/cli/packages/vim/lib/index.js b/cli/packages/vim/lib/index.js index <HASH>..<HASH> 100644 --- a/cli/packages/vim/lib/index.js +++ b/cli/packages/vim/lib/index.js @@ -118,6 +118,8 @@ const render = (colors) => { exec "hi Error ctermfg=".s:ctermaccent0." ctermbg=".s:ctermshade1 exec "hi Todo guifg=".s:guiaccent0." guibg=".s:guishade1 exec "hi Todo ctermfg=".s:ctermaccent0." ctermbg=".s:ctermshade1 + exec "hi Function guifg=".s:guiaccent1 + exec "hi Function ctermfg=".s:ctermaccent1 " GitGutter
Add Vim Function group assignment using accent1
diff --git a/lib/stagehand.rb b/lib/stagehand.rb index <HASH>..<HASH> 100644 --- a/lib/stagehand.rb +++ b/lib/stagehand.rb @@ -1,8 +1,8 @@ require "stagehand/schema" -require "stagehand/engine" require "stagehand/staging" require "stagehand/production" require "stagehand/helpers" +require "stagehand/engine" module Stagehand end diff --git a/lib/stagehand/engine.rb b/lib/stagehand/engine.rb index <HASH>..<HASH> 100644 --- a/lib/stagehand/engine.rb +++ b/lib/stagehand/engine.rb @@ -5,5 +5,10 @@ module Stagehand config.generators do |g| g.test_framework :rspec end + + initializer "stagehand.set_connection_names" do + Stagehand::Staging::connection_name = Rails.configuration.x.stagehand.staging_connection_name + Stagehand::Production::connection_name = Rails.configuration.x.stagehand.production_connection_name + end end end
Set the database connections from Rails env config
diff --git a/examples/mvvmfx-contacts/src/test/java/de/saxsys/mvvmfx/contacts/ui/contactform/ContactFormViewModelTest.java b/examples/mvvmfx-contacts/src/test/java/de/saxsys/mvvmfx/contacts/ui/contactform/ContactFormViewModelTest.java index <HASH>..<HASH> 100644 --- a/examples/mvvmfx-contacts/src/test/java/de/saxsys/mvvmfx/contacts/ui/contactform/ContactFormViewModelTest.java +++ b/examples/mvvmfx-contacts/src/test/java/de/saxsys/mvvmfx/contacts/ui/contactform/ContactFormViewModelTest.java @@ -13,6 +13,7 @@ import javafx.scene.control.DatePicker; import javafx.scene.control.TextField; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; @@ -20,6 +21,7 @@ import de.saxsys.javafx.test.JfxRunner; import de.saxsys.mvvmfx.contacts.util.CentralClock; +@Ignore("Fix threading problems") @RunWith(JfxRunner.class) public class ContactFormViewModelTest {
Temporary fix build breaker by ignoring test
diff --git a/lib/Constants.js b/lib/Constants.js index <HASH>..<HASH> 100644 --- a/lib/Constants.js +++ b/lib/Constants.js @@ -1,3 +1,19 @@ +/************************************************************************ + * Copyright 2010-2011 Worlize Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + ***********************************************************************/ + /** * @author justoneplanet */
Adding Apache License information to new file.
diff --git a/impl/src/main/java/com/groupon/lex/metrics/config/CollectorBuilderWrapper.java b/impl/src/main/java/com/groupon/lex/metrics/config/CollectorBuilderWrapper.java index <HASH>..<HASH> 100644 --- a/impl/src/main/java/com/groupon/lex/metrics/config/CollectorBuilderWrapper.java +++ b/impl/src/main/java/com/groupon/lex/metrics/config/CollectorBuilderWrapper.java @@ -34,16 +34,22 @@ package com.groupon.lex.metrics.config; import static com.groupon.lex.metrics.ConfigSupport.collectorConfigString; import com.groupon.lex.metrics.MetricRegistryInstance; import com.groupon.lex.metrics.builders.collector.CollectorBuilder; -import lombok.Value; +import lombok.Getter; +import lombok.RequiredArgsConstructor; /** * CollectorBuilderWrapper wraps a builder. */ -@Value +@RequiredArgsConstructor +@Getter public class CollectorBuilderWrapper implements MonitorStatement { - /** Collector name. */ + /** + * Collector name. + */ private final String name; - /** Builder implementation. */ + /** + * Builder implementation. + */ private final CollectorBuilder builder; @Override
Remove unusused equals and hashcode from CollectorBuilderWrapper.
diff --git a/bin/templates/scripts/cordova/lib/prepare.js b/bin/templates/scripts/cordova/lib/prepare.js index <HASH>..<HASH> 100644 --- a/bin/templates/scripts/cordova/lib/prepare.js +++ b/bin/templates/scripts/cordova/lib/prepare.js @@ -223,7 +223,7 @@ function updateProject (platformConfig, locations) { /* eslint-disable no-tabs */ // Write out the plist file with the same formatting as Xcode does - var info_contents = plist.build(infoPlist, { indent: ' ', offset: -1 }); + var info_contents = plist.build(infoPlist, { indent: '\t', offset: -1 }); /* eslint-enable no-tabs */ info_contents = info_contents.replace(/<string>[\s\r\n]*<\/string>/g, '<string></string>');
Don't use whitespace as an indent indicator (#<I>) This is extremely confusing if your editor is not configured to clearly distinguish different sorts of whitespce
diff --git a/symphony/lib/toolkit/class.databasestatement.php b/symphony/lib/toolkit/class.databasestatement.php index <HASH>..<HASH> 100644 --- a/symphony/lib/toolkit/class.databasestatement.php +++ b/symphony/lib/toolkit/class.databasestatement.php @@ -282,11 +282,11 @@ class DatabaseStatement /** * Creates the ordered SQL parts array. - * The order in which the part sorted are given by getStatementStructure(). + * The order in which the parts are sorted is given by getStatementStructure(). * * @see getStatementStructure() - * @return string - * The resulting SQL string + * @return array + * The sorted SQL parts array */ final public function generateOrderedSQLParts() { @@ -630,6 +630,9 @@ class DatabaseStatement */ final public function splitFunctionArguments($arguments) { + General::ensureType([ + 'arguments' => ['var' => $arguments, 'type' => 'string'], + ]); $arguments = str_split($arguments); $current = []; $args = [];
Fix type confusion 1. The return type is an array (not a string) 2. Make sure the argument is a string
diff --git a/src/serve.js b/src/serve.js index <HASH>..<HASH> 100644 --- a/src/serve.js +++ b/src/serve.js @@ -21,6 +21,7 @@ var cordova_util = require('./util'), shell = require('shelljs'), platforms = require('../platforms'), config_parser = require('./config_parser'), + hooker = require('./hooker'), fs = require('fs'), util = require('util'), http = require("http"), @@ -122,7 +123,14 @@ module.exports = function server(port) { throw new Error('Current working directory is not a Cordova-based project.'); } - // Return for testing. - return launchServer(projectRoot, port); + var hooks = new hooker(projectRoot); + return hooks.fire('before_serve') + .then(function() { + // Run a prepare first! + return require('../cordova').raw.prepare([]); + }).then(function() { + launchServer(projectRoot, port); + return hooks.fire('after_serve'); + }); };
Update "cordova serve" to work with promises refactoring
diff --git a/tests/test_project.py b/tests/test_project.py index <HASH>..<HASH> 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -36,6 +36,8 @@ class TestProject(unittest.TestCase): proj2.log_table(tbl_exp) proj2.build_report('task.rst', 'rst') + proj2.build_report('task.md', 'md') + proj2.build_report('task.html', 'html') self.assertEqual(len(tbl_exp.arr), 3) self.assertEqual(tbl_exp.arr[1][2], 'petrol')
test for report generation in html and md includes datatable lists
diff --git a/server.go b/server.go index <HASH>..<HASH> 100644 --- a/server.go +++ b/server.go @@ -170,11 +170,12 @@ func (ps *peerState) forAllPeers(closure func(sp *serverPeer)) { // bitcoin peers. type server struct { // The following variables must only be used atomically. + // Putting the uint64s first makes them 64-bit aligned for 32-bit systems. + bytesReceived uint64 // Total bytes received from all peers since start. + bytesSent uint64 // Total bytes sent by all peers since start. started int32 shutdown int32 shutdownSched int32 - bytesReceived uint64 // Total bytes received from all peers since start. - bytesSent uint64 // Total bytes sent by all peers since start. listeners []net.Listener chainParams *chaincfg.Params
fix memory allignment for <I>-bit architectures (#<I>) having 3 int<I>s above the uint<I>s in the struct will cause misalignment for some <I>-bit architectures. see <URL>
diff --git a/src/calendar/index.js b/src/calendar/index.js index <HASH>..<HASH> 100644 --- a/src/calendar/index.js +++ b/src/calendar/index.js @@ -127,7 +127,7 @@ class CalendarClab { getValue() { const format = _getFormat(this.options, this.getRomeInstance()); - const formatted = moment(this.valueStr, _getFormat(format)).format(); + const formatted = moment(this.valueStr, format).format(); return this.valueStr ? formatted : undefined; }
re #<I> - missed a typo in fixing the calendar (#<I>)
diff --git a/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/SimpleProcessManager.java b/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/SimpleProcessManager.java index <HASH>..<HASH> 100644 --- a/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/SimpleProcessManager.java +++ b/SingularityRunnerBase/src/main/java/com/hubspot/singularity/runner/base/shared/SimpleProcessManager.java @@ -14,6 +14,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; +import jdk.internal.joptsimple.internal.Strings; import org.slf4j.Logger; public class SimpleProcessManager extends SafeProcessManager { @@ -136,9 +137,12 @@ public class SimpleProcessManager extends SafeProcessManager { if (exitCode.isPresent() && !acceptableExitCodes.contains(exitCode.get())) { throw new ProcessFailedException( String.format( - "Got unacceptable exit code %s while running %s", + "Got unacceptable exit code %s while running %s. %s", exitCode, - processToString + processToString, + reader.isPresent() + ? "Output was " + Strings.join(reader.get().output, "\n") + : "" ) ); }
Include output in the exception when the command fails.
diff --git a/gen/base/development.js b/gen/base/development.js index <HASH>..<HASH> 100644 --- a/gen/base/development.js +++ b/gen/base/development.js @@ -22,7 +22,7 @@ var config = { , hostname: null , port: 4000 , model: { - defaultAdapter: 'memory' + defaultAdapter: 'filesystem' } , sessions: { store: 'memory'
Change default adapter for new apps to filesystem
diff --git a/tangelo/tangelo/ws4py/websocket.py b/tangelo/tangelo/ws4py/websocket.py index <HASH>..<HASH> 100644 --- a/tangelo/tangelo/ws4py/websocket.py +++ b/tangelo/tangelo/ws4py/websocket.py @@ -240,6 +240,9 @@ class WebSocket(object): if self.terminated or self.sock is None: raise RuntimeError("Cannot send on a terminated websocket") + # This fixes an error in the ws4py package - not yet in the upstream + # package. + # # blocking mode, never throw WantWriteError self.sock.setblocking(1)
Added a comment marking the change to upstream ws4py
diff --git a/lib/redis/connection/memory.rb b/lib/redis/connection/memory.rb index <HASH>..<HASH> 100644 --- a/lib/redis/connection/memory.rb +++ b/lib/redis/connection/memory.rb @@ -346,7 +346,14 @@ class Redis def lrange(key, startidx, endidx) data_type_check(key, Array) - (data[key] && data[key][startidx..endidx]) || [] + if data[key] + # In Ruby when negative start index is out of range Array#slice returns + # nil which is not the case for lrange in Redis. + startidx = 0 if startidx < 0 && startidx.abs > data[key].size + data[key][startidx..endidx] || [] + else + [] + end end def ltrim(key, start, stop) diff --git a/spec/lists_spec.rb b/spec/lists_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lists_spec.rb +++ b/spec/lists_spec.rb @@ -89,6 +89,7 @@ module FakeRedis @client.rpush("key1", "v3") expect(@client.lrange("key1", 1, -1)).to eq(["v2", "v3"]) + expect(@client.lrange("key1", -999, -1)).to eq(["v1", "v2", "v3"]) end it "should remove elements from a list" do
Fix lrange implementation when negative start index is out of range
diff --git a/benchexec/containerexecutor.py b/benchexec/containerexecutor.py index <HASH>..<HASH> 100644 --- a/benchexec/containerexecutor.py +++ b/benchexec/containerexecutor.py @@ -112,6 +112,11 @@ def handle_basic_container_args(options, parser=None): ) if path in dir_modes: error_fn(f"Cannot specify multiple directory modes for '{path}'.") + if path == "/proc": + error_fn( + "Cannot specify directory mode for /proc, " + "this directory is handled specially." + ) dir_modes[path] = mode for path in options.hidden_dir:
Avoid ugly ValueError if directory mode for /proc is given
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb +++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb @@ -761,7 +761,8 @@ module ActiveRecord begin execute "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quote_column_name(column_name)} TYPE #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}" - rescue ActiveRecord::StatementInvalid + rescue ActiveRecord::StatementInvalid => e + raise e if postgresql_version > 80000 # This is PostgreSQL 7.x, so we have to use a more arcane way of doing it. begin begin_db_transaction
PostgreSQL: fix transaction bug that can occur if you call change_column with invalid parameters [#<I> state:resolved]
diff --git a/src/common/processors.js b/src/common/processors.js index <HASH>..<HASH> 100644 --- a/src/common/processors.js +++ b/src/common/processors.js @@ -128,6 +128,9 @@ new sre.Processor( var descrs = sre.SpeechGeneratorUtil.computeSpeech(xml); return descrs; }, + print: function(descrs) { + return JSON.stringify(descrs); + }, pprint: function(descrs) { return JSON.stringify(descrs, null, 2); }
Adds a print method for the description processor.
diff --git a/src/ocLazyLoad.js b/src/ocLazyLoad.js index <HASH>..<HASH> 100644 --- a/src/ocLazyLoad.js +++ b/src/ocLazyLoad.js @@ -288,7 +288,9 @@ if(angular.isArray(module)) { // Resubmit each entry as a single module angular.forEach(module, function(m) { - deferredList.push(self.load(m, params)); + if (m) { + deferredList.push(self.load(m, params)); + } }); // Resolve the promise once everything has loaded
Small change for #<I> to fix trailing commas in $ocLazyLoad.load([]) arrays.
diff --git a/spark.py b/spark.py index <HASH>..<HASH> 100644 --- a/spark.py +++ b/spark.py @@ -25,7 +25,7 @@ def sparkify(series): if data_range == 0.0: raise Exception("Cannot normalize when range is zero.") - return ''.join( + return u''.join( map( lambda x: spark_chars[int(round((x - minimum) * 7.0 / data_range))], series @@ -50,4 +50,4 @@ def guess_series(input_string): ) if __name__ == "__main__": - print sparkify(guess_series2(sys.stdin.read())) + print sparkify(guess_series(sys.stdin.read()))
output should start as a unicode string
diff --git a/src/mailreader-parser.js b/src/mailreader-parser.js index <HASH>..<HASH> 100644 --- a/src/mailreader-parser.js +++ b/src/mailreader-parser.js @@ -99,7 +99,7 @@ bodyPart.content.push(part); } - part.signed = node._childNodes[0].raw; + part.signedMessage = node._childNodes[0].raw; part.signature = new TextDecoder('utf-8').decode(node._childNodes[1].content); // walk the mime tree to find the nested nodes diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -78,7 +78,7 @@ }] }, function(err, bodyParts) { expect(err).to.not.exist; - expect(bodyParts[0].signed).to.exist; + expect(bodyParts[0].signedMessage).to.exist; expect(bodyParts[0].signature).to.exist; expect(bodyParts[0].content).to.not.be.empty; expect(bodyParts[0].raw).to.not.exist;
rename signed property to signedMessage
diff --git a/core/model/Translatable.php b/core/model/Translatable.php index <HASH>..<HASH> 100755 --- a/core/model/Translatable.php +++ b/core/model/Translatable.php @@ -495,6 +495,7 @@ class Translatable extends DataObjectDecorator { return $record; } + /* function augmentWrite(&$manipulation) { if(!Translatable::is_enabled()) return; @@ -505,7 +506,7 @@ class Translatable extends DataObjectDecorator { $newManip = array(); foreach($manipulation as $table => $manip) { if(strpos($table, "_versions") !== false) continue; - /* + foreach($this->fieldBlackList as $blackField) { if(isset($manip["fields"][$blackField])) { if($this->isTranslation()) { @@ -520,10 +521,10 @@ class Translatable extends DataObjectDecorator { } } } - */ } DB::manipulate($newManip); } + */ //-----------------------------------------------------------------------------------------------//
ENHANCEMENT Disabled Translatab-e>augmentWrite() - was only needed for the blacklist fields implementation which is inactive for the moment git-svn-id: svn://svn.silverstripe.com/silverstripe/open/modules/sapphire/trunk@<I> <I>b<I>ca-7a2a-<I>-9d3b-<I>d<I>a<I>a9
diff --git a/lib/neography/tasks.rb b/lib/neography/tasks.rb index <HASH>..<HASH> 100644 --- a/lib/neography/tasks.rb +++ b/lib/neography/tasks.rb @@ -7,7 +7,7 @@ require 'net/http' namespace :neo4j do desc "Install Neo4j" task :install, :edition, :version do |t, args| - args.with_defaults(:edition => "community", :version => "1.9") + args.with_defaults(:edition => "community", :version => "1.9.3") puts "Installing Neo4j-#{args[:edition]}-#{args[:version]}" if OS::Underlying.windows?
upgrading installer to use <I>
diff --git a/vault/expiration.go b/vault/expiration.go index <HASH>..<HASH> 100644 --- a/vault/expiration.go +++ b/vault/expiration.go @@ -497,9 +497,12 @@ func (m *ExpirationManager) Restore(errorFunc func()) (retErr error) { wg.Wait() m.restoreModeLock.Lock() - m.restoreLoaded = sync.Map{} - m.restoreLocks = nil atomic.StoreInt32(m.restoreMode, 0) + m.restoreLoaded.Range(func(k, v interface{}) bool { + m.restoreLoaded.Delete(k) + return true + }) + m.restoreLocks = nil m.restoreModeLock.Unlock() m.logger.Info("lease restore complete")
safely clean up loaded map (#<I>)
diff --git a/lxc/storage.go b/lxc/storage.go index <HASH>..<HASH> 100644 --- a/lxc/storage.go +++ b/lxc/storage.go @@ -245,7 +245,7 @@ func (c *storageCmd) run(config *lxd.Config, args []string) error { if len(args) < 3 { return errArgs } - driver := strings.Join(args[2:3], "") + driver := args[2] return c.doStoragePoolCreate(client, pool, driver, args[3:]) case "delete": return c.doStoragePoolDelete(client, pool)
lxc/storage: simplify