diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/src/clients/ApplicationClient.js b/src/clients/ApplicationClient.js
index <HASH>..<HASH> 100644
--- a/src/clients/ApplicationClient.js
+++ b/src/clients/ApplicationClient.js
@@ -75,6 +75,11 @@ export default class ApplicationClient extends BaseClient {
} else {
this.httpServer = config.org + ".internetofthings.ibmcloud.com";
}
+
+ this.withProxy = false;
+ if(isDefined(config['with-proxy'])) {
+ this.withProxy = config['with-proxy'];
+ }
this.log.info("[ApplicationClient:constructor] ApplicationClient initialized for organization : " + config.org);
}
@@ -340,7 +345,9 @@ export default class ApplicationClient extends BaseClient {
callApi(method, expectedHttpCode, expectJsonContent, paths, body, params){
return new Promise((resolve, reject) => {
// const API_HOST = "https://%s.internetofthings.ibmcloud.com/api/v0002";
- let uri = format("https://%s/api/v0002", this.httpServer);
+ let uri = this.withProxy
+ ? "/api/v0002"
+ : format("https://%s/api/v0002", this.httpServer);
if(Array.isArray(paths)){
for(var i = 0, l = paths.length; i < l; i++){
|
Add "with-proxy" config to allow
While using a proxy with this client could be achieved by settings
http-server: ‘<my server>’, providing an option to call APIs with just
the path is a more natural option.
|
diff --git a/src/iterators/compile.js b/src/iterators/compile.js
index <HASH>..<HASH> 100644
--- a/src/iterators/compile.js
+++ b/src/iterators/compile.js
@@ -524,18 +524,10 @@ export function compileCycle(key, p) {
return promise;
};
- fCtx.race = function () {
- throw new Error(".race can't be used inside a filter");
- };
-
ctx.wait = function (max, promise) {
return waitFactory(waitStore, max, promise);
};
- fCtx.wait = function () {
- throw new Error(".wait can't be used inside a filter");
- };
-
ctx.sleep = function (time, opt_test, opt_interval) {
ctx.yield();
return new Promise(function (resolve, reject) {
|
Removed restrictions from .race and .wait
|
diff --git a/www/src/pages/index.js b/www/src/pages/index.js
index <HASH>..<HASH> 100644
--- a/www/src/pages/index.js
+++ b/www/src/pages/index.js
@@ -197,7 +197,7 @@ import NextAuth from 'next-auth'
import Providers from 'next-auth/providers'
const options = {
- site: 'https://example.com'
+ site: 'https://example.com',
providers: [
// OAuth authentication providers
Providers.Apple({
|
added missing comma to homepage example
|
diff --git a/datacleaner/datacleaner.py b/datacleaner/datacleaner.py
index <HASH>..<HASH> 100644
--- a/datacleaner/datacleaner.py
+++ b/datacleaner/datacleaner.py
@@ -76,7 +76,13 @@ def autoclean(input_dataframe, drop_nans=False, copy=False, encoder=None,
try:
input_dataframe[column].fillna(input_dataframe[column].median(), inplace=True)
except TypeError:
- input_dataframe[column].fillna(input_dataframe[column].mode()[0], inplace=True)
+ most_frequent = input_dataframe[column].mode()
+ if len(most_frequent)>0:
+ input_dataframe[column].fillna(input_dataframe[column].mode()[0], inplace=True)
+ else:
+ input_dataframe[column].fillna(method='bfill', inplace=True)
+ input_dataframe[column].fillna(method='ffill', inplace=True)
+
# Encode all strings with numerical equivalents
if str(input_dataframe[column].values.dtype) == 'object':
|
Fix index out of bounds issue when fillna
|
diff --git a/agent.go b/agent.go
index <HASH>..<HASH> 100644
--- a/agent.go
+++ b/agent.go
@@ -95,11 +95,11 @@ func (p *process) closePostStartFDs() {
}
if p.process.ConsoleSocket != nil {
- p.process.Stderr.(*os.File).Close()
+ p.process.ConsoleSocket.Close()
}
if p.consoleSock != nil {
- p.process.Stderr.(*os.File).Close()
+ p.consoleSock.Close()
}
}
|
agent: Close appropriate file handler
This commit fixes a bug in the code of agent.go file. It closes the
appropriate file handlers after they have been properly tested.
Fixes #<I>
|
diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go
index <HASH>..<HASH> 100644
--- a/pkg/services/alerting/notifiers/pagerduty.go
+++ b/pkg/services/alerting/notifiers/pagerduty.go
@@ -88,7 +88,13 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error {
pn.log.Info("Notifying Pagerduty", "event_type", eventType)
payloadJSON := simplejson.New()
- payloadJSON.Set("summary", evalContext.Rule.Name+" - "+evalContext.Rule.Message)
+
+ summary := evalContext.Rule.Name + " - " + evalContext.Rule.Message
+ if len(summary) > 1024 {
+ summary = summary[0:1024]
+ }
+ payloadJSON.Set("summary", summary)
+
if hostname, err := os.Hostname(); err == nil {
payloadJSON.Set("source", hostname)
}
|
Alerting: Truncate PagerDuty summary when greater than <I> characters (#<I>)
Requests to PagerDuty fail with an HTTP <I> if the `summary`
attribute contains more than <I> characters, this fixes this.
API spec:
<URL>
|
diff --git a/specs-go/version.go b/specs-go/version.go
index <HASH>..<HASH> 100644
--- a/specs-go/version.go
+++ b/specs-go/version.go
@@ -25,7 +25,7 @@ const (
VersionPatch = 0
// VersionDev indicates development branch. Releases will be empty string.
- VersionDev = "-rc6"
+ VersionDev = "-rc6-dev"
)
// Version is the specification version that the package types support.
|
version: master back to -dev
|
diff --git a/src/main/java/com/greatmancode/craftconomy3/account/AccountManager.java b/src/main/java/com/greatmancode/craftconomy3/account/AccountManager.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/greatmancode/craftconomy3/account/AccountManager.java
+++ b/src/main/java/com/greatmancode/craftconomy3/account/AccountManager.java
@@ -87,6 +87,14 @@ public class AccountManager {
return result;
}
+ @Deprecated
+ public boolean exist(String name) {
+ if (name.startsWith("bank:")) {
+ return exist(name.split("bank:")[1], true);
+ } else {
+ return exist(name, false);
+ }
+ }
/**
* Delete a account from the system
*
|
Add back exist for backward Vault compatibility.
|
diff --git a/lib/session.js b/lib/session.js
index <HASH>..<HASH> 100644
--- a/lib/session.js
+++ b/lib/session.js
@@ -10,7 +10,7 @@ const Store = require('./store');
const uid = require('uid-safe');
/* istanbul ignore next */
-function defaultErrorHanlder(err, type) {
+function defaultErrorHandler(err, type) {
/* eslint no-param-reassign:0 */
err.name = `${pkg.name} ${type} error`;
throw err;
@@ -33,7 +33,7 @@ module.exports = (opts) => {
debug('options:%j', options);
const key = options.key || 'koa.sid';
const client = options.store || new FileStore();
- const errorHandler = options.errorHandler || defaultErrorHanlder;
+ const errorHandler = options.errorHandler || defaultErrorHandler;
const reconnectTimeout = options.reconnectTimeout || 10 * 1000;
const store = new Store(client, {
ttl: options.ttl,
|
Fix typo in var name
Handler was misspelled
|
diff --git a/end-to-end-tests/src/test/java/fi/jumi/test/RunningTestsTest.java b/end-to-end-tests/src/test/java/fi/jumi/test/RunningTestsTest.java
index <HASH>..<HASH> 100755
--- a/end-to-end-tests/src/test/java/fi/jumi/test/RunningTestsTest.java
+++ b/end-to-end-tests/src/test/java/fi/jumi/test/RunningTestsTest.java
@@ -39,7 +39,6 @@ public class RunningTestsTest {
assertThat("failing tests", launcher.getFailingTests(), is(0));
}
- @Ignore("not implemented")
@Test(timeout = TIMEOUT)
public void suite_with_one_failing_test() throws Exception {
launcher.addToClassPath(TestEnvironment.getSampleClasses());
|
RunningTestsTest: "suite with one failing test" passes
|
diff --git a/arctic/arctic.py b/arctic/arctic.py
index <HASH>..<HASH> 100644
--- a/arctic/arctic.py
+++ b/arctic/arctic.py
@@ -203,7 +203,9 @@ class Arctic(object):
-------
list of Arctic library names
"""
- return self._list_libraries_cached(newer_than_secs) if self.is_caching_enabled() else self._list_libraries()
+ if self._cache and self.is_caching_enabled():
+ return self._list_libraries_cached(newer_than_secs)
+ return self._list_libraries()
@mongo_retry
def _list_libraries(self):
|
Handle uninitialized cache object
|
diff --git a/gwpy/io/cache.py b/gwpy/io/cache.py
index <HASH>..<HASH> 100644
--- a/gwpy/io/cache.py
+++ b/gwpy/io/cache.py
@@ -52,13 +52,13 @@ __author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
try: # python2.x
FILE_LIKE = (
file, GzipFile,
- tempfile._TemporaryFileWrapper, # pylint: disable=protected-access
+ tempfile._TemporaryFileWrapper, # pylint: disable=protected-access
)
except NameError: # python3.x
from io import IOBase
FILE_LIKE = (
IOBase, GzipFile,
- tempfile._TemporaryFileWrapper, # pylint: disable=protected-access
+ tempfile._TemporaryFileWrapper, # pylint: disable=protected-access
)
|
io.cache: fixed style issue
|
diff --git a/app/src/Bolt/Nut/ConfigSet.php b/app/src/Bolt/Nut/ConfigSet.php
index <HASH>..<HASH> 100644
--- a/app/src/Bolt/Nut/ConfigSet.php
+++ b/app/src/Bolt/Nut/ConfigSet.php
@@ -12,7 +12,7 @@ class ConfigSet extends BaseCommand
{
$this
->setName('config:set')
- ->setDescription('Set a value from config.yml.')
+ ->setDescription('Set a value in config.yml.')
->addArgument('key', InputArgument::REQUIRED, 'The key you wish to get.')
->addArgument('value', InputArgument::REQUIRED, 'The value you wish to set it to.');
}
diff --git a/app/src/Bolt/Nut/DatabaseCheck.php b/app/src/Bolt/Nut/DatabaseCheck.php
index <HASH>..<HASH> 100644
--- a/app/src/Bolt/Nut/DatabaseCheck.php
+++ b/app/src/Bolt/Nut/DatabaseCheck.php
@@ -11,7 +11,7 @@ class DatabaseCheck extends BaseCommand
{
$this
->setName('database:check')
- ->setDescription('Check the database for missing columns.');
+ ->setDescription('Check the database for missing tables and/or columns.');
}
protected function execute(InputInterface $input, OutputInterface $output)
|
Tweaking descriptions in 'nut' commands.
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ def main():
description='Lightweight WSGI application framework, schema-validated JSON APIs, and API documentation.',
long_description=long_description,
long_description_content_type='text/x-rst',
- version='1.1.6',
+ version='1.1.7',
author='Craig A. Hobbs',
author_email='craigahobbs@gmail.com',
keywords='api json framework schema wsgi',
|
chisel <I>
|
diff --git a/concrete/blocks/autonav/controller.php b/concrete/blocks/autonav/controller.php
index <HASH>..<HASH> 100644
--- a/concrete/blocks/autonav/controller.php
+++ b/concrete/blocks/autonav/controller.php
@@ -162,6 +162,11 @@ class Controller extends BlockController
}
} else {
$c = $this->collection;
+
+ // let's use the ID of the collection passed in $this->collection
+ if ($this->collection instanceof Page) {
+ $this->cID = $this->collection->getCollectionID();
+ }
}
//Create an array of parent cIDs so we can determine the "nav path" of the current page
$inspectC = $c;
|
Use the ID of the collection passed in $this->collection in the autonav block
|
diff --git a/classes/phing/system/io/PhingFile.php b/classes/phing/system/io/PhingFile.php
index <HASH>..<HASH> 100644
--- a/classes/phing/system/io/PhingFile.php
+++ b/classes/phing/system/io/PhingFile.php
@@ -420,7 +420,7 @@ class PhingFile {
if ($fs->checkAccess($this) !== true) {
throw new IOException("No read access to ".$this->path);
}
- return @is_dir($this->path);
+ return @is_dir($this->path) && !@is_link($this->path);
}
/**
|
Refs #<I> - symbolic links are not directories
|
diff --git a/course/rest.php b/course/rest.php
index <HASH>..<HASH> 100644
--- a/course/rest.php
+++ b/course/rest.php
@@ -41,7 +41,7 @@ $sequence = optional_param('sequence', '', PARAM_SEQUENCE);
$visible = optional_param('visible', 0, PARAM_INT);
$pageaction = optional_param('action', '', PARAM_ALPHA); // Used to simulate a DELETE command
-$PAGE->set_url('/course/rest.php', array('courseId'=>$courseId,'class'=>$class));
+$PAGE->set_url('/course/rest.php', array('courseId'=>$courseid,'class'=>$class));
// Authorise the user and verify some incoming data
if (!$course = $DB->get_record('course', array('id'=>$courseid))) {
|
course MDL-<I> Fixed up typo is ajax editing
|
diff --git a/lib/mobility/backend/active_record/serialized.rb b/lib/mobility/backend/active_record/serialized.rb
index <HASH>..<HASH> 100644
--- a/lib/mobility/backend/active_record/serialized.rb
+++ b/lib/mobility/backend/active_record/serialized.rb
@@ -53,7 +53,7 @@ module Mobility
end
end
EOM
- end if Loaded::ActiveRecord
+ end
end
end
end
|
Remove unneeded Loaded::ActiveRecord
|
diff --git a/abutils/utils/alignment.py b/abutils/utils/alignment.py
index <HASH>..<HASH> 100644
--- a/abutils/utils/alignment.py
+++ b/abutils/utils/alignment.py
@@ -827,8 +827,8 @@ class NWAlignment(BaseAlignment):
def _get_matrix_file(self, match=None, mismatch=None, matrix=None):
matrix_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'matrices')
builtins = ['blosum62', 'match3mismatch2', 'match1mismatch0']
- if self._matrix is not None:
- matrix_name = self._matrix
+ if matrix is not None:
+ matrix_name = matrix
else:
matrix_name = 'match{}mismatch{}'.format(abs(match), abs(mismatch))
if matrix_name.lower() in builtins:
|
fix global_alignment scoring when providing an alignment matrix
|
diff --git a/star.py b/star.py
index <HASH>..<HASH> 100644
--- a/star.py
+++ b/star.py
@@ -180,6 +180,7 @@ def plot_parameter(logP, parameter, parameter_name, output):
def trig_param_plot(stars, output):
logP = numpy.fromiter((math.log(star.period, 10) for star in stars),
numpy.float)
+ assert False, str(stars)
parameters = numpy.vstack(tuple(
interpolation.ak_bk2Ak_Phik(star.coefficients) for star in stars))
(A0, A1, Phi1, A2, Phi2, A3, Phi3) = numpy.hsplit(parameters[:,:7], 7)
|
Time for more assertion debugging...
|
diff --git a/src/Auth/Form/Register.php b/src/Auth/Form/Register.php
index <HASH>..<HASH> 100644
--- a/src/Auth/Form/Register.php
+++ b/src/Auth/Form/Register.php
@@ -46,6 +46,9 @@ class Register extends Form
'options' => array(
'label' => /*@translate*/ 'Email',
),
+ 'attributes' => [
+ 'required' => true
+ ]
)
);
diff --git a/src/Auth/Form/UserInfoFieldset.php b/src/Auth/Form/UserInfoFieldset.php
index <HASH>..<HASH> 100644
--- a/src/Auth/Form/UserInfoFieldset.php
+++ b/src/Auth/Form/UserInfoFieldset.php
@@ -120,8 +120,9 @@ class UserInfoFieldset extends Fieldset implements
],
'attributes' => [
'data-placeholder' => /*@translate*/ 'please select',
- 'data-allowclear' => 'true',
- 'required' => true,
+ 'data-allowclear' => 'false',
+ 'data-searchbox' => -1, // hide the search box
+ 'required' => true, // mark label as required
],
)
);
|
[General] adjust the use of the search form and the hide feature select 2 Elements
|
diff --git a/inferno/callbacks.py b/inferno/callbacks.py
index <HASH>..<HASH> 100644
--- a/inferno/callbacks.py
+++ b/inferno/callbacks.py
@@ -202,7 +202,7 @@ class BestLoss(Callback):
yield key, sign, loss
def on_epoch_end(self, net, **kwargs):
- sl = slice(-1, None), list(self.key_signs)
+ sl = np.s_[-1, list(self.key_signs)]
check_history_slice(net.history, sl)
history = net.history
|
Use np.s_ for consistencey.
|
diff --git a/aeron-system-tests/src/test/java/io/aeron/ChannelEndpointStatusTest.java b/aeron-system-tests/src/test/java/io/aeron/ChannelEndpointStatusTest.java
index <HASH>..<HASH> 100644
--- a/aeron-system-tests/src/test/java/io/aeron/ChannelEndpointStatusTest.java
+++ b/aeron-system-tests/src/test/java/io/aeron/ChannelEndpointStatusTest.java
@@ -80,8 +80,7 @@ public class ChannelEndpointStatusTest
private final ErrorHandler driverErrorHandler =
(ex) ->
{
- if (ex instanceof AeronException &&
- ex.getMessage().startsWith("channel error - Address already in use:"))
+ if (ex instanceof AeronException && ex.getMessage().contains("channel error - Address already in use"))
{
return;
}
|
[Java] Do a contains rather then startWith for error message.
|
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -427,8 +427,7 @@ class EventBasedRuptureCalculator(PSHACalculator):
events[name][i] = event[name]
self.eid += 1
i += 1
- if not self.oqparam.ground_motion_fields:
- self.datastore['sescollection/%s' % ebr.serial] = ebr
+ self.datastore['sescollection/%s' % ebr.serial] = ebr
self.datastore.extend('events', events)
def post_execute(self, result):
|
Restored the saving of the sescollection
|
diff --git a/neo/SmartContract/tests/StorageTest.py b/neo/SmartContract/tests/StorageTest.py
index <HASH>..<HASH> 100644
--- a/neo/SmartContract/tests/StorageTest.py
+++ b/neo/SmartContract/tests/StorageTest.py
@@ -1,5 +1,5 @@
-from boa.code.builtins import range, concat
-from boa.blockchain.vm.Neo.Storage import GetContext, Get, Put, Delete
+from boa.builtins import range, concat
+from boa.interop.Neo.Storage import GetContext, Get, Put
def Main(operation, key, value):
|
fix StorageTest smart contract for new compiler
|
diff --git a/gpiozero/boards.py b/gpiozero/boards.py
index <HASH>..<HASH> 100644
--- a/gpiozero/boards.py
+++ b/gpiozero/boards.py
@@ -24,10 +24,8 @@ class TrafficLights(object):
class PiTraffic(TrafficLights):
def __init__(self):
- self.red = LED(9)
- self.amber = LED(10)
- self.green = LED(11)
- self._lights = (self.red, self.amber, self.green)
+ red, amber, green = (9, 10, 11)
+ super(FishDish, self).__init__(red, amber, green)
class FishDish(TrafficLights):
|
Use super over re-implentation for PiTraffic
|
diff --git a/tests/sample.py b/tests/sample.py
index <HASH>..<HASH> 100755
--- a/tests/sample.py
+++ b/tests/sample.py
@@ -354,8 +354,9 @@ def main():
sample_crud_consumer_group(client, project, logstore, consumer_group)
time.sleep(10)
- sample_external_store(client, project)
- time.sleep(10)
+ # skip this part of UT cause of the known issue in
+ # sample_external_store(client, project)
+ # time.sleep(10)
# test copy project
try:
@@ -371,4 +372,4 @@ def main():
if __name__ == '__main__':
- main()
+ main()
\ No newline at end of file
|
sample_external_store(client, project)
|
diff --git a/docs/storage/driver/testsuites/testsuites.go b/docs/storage/driver/testsuites/testsuites.go
index <HASH>..<HASH> 100644
--- a/docs/storage/driver/testsuites/testsuites.go
+++ b/docs/storage/driver/testsuites/testsuites.go
@@ -258,6 +258,7 @@ func (suite *DriverSuite) TestWriteReadLargeStreams(c *check.C) {
reader, err := suite.StorageDriver.ReadStream(suite.ctx, filename, 0)
c.Assert(err, check.IsNil)
+ defer reader.Close()
writtenChecksum := sha1.New()
io.Copy(writtenChecksum, reader)
|
Close reader after the test is finished.
|
diff --git a/ella/ellaadmin/widgets.py b/ella/ellaadmin/widgets.py
index <HASH>..<HASH> 100644
--- a/ella/ellaadmin/widgets.py
+++ b/ella/ellaadmin/widgets.py
@@ -225,11 +225,6 @@ class ListingCustomWidget(forms.SelectMultiple):
cx['choices'] = choices or self.choices
cx['listings'] = list(value[0]) or []
- if len(value):
- # modifying existing object, so value is dict containing Listings and selected category IDs
- # cx['selected'] = Category.objects.filter(pk__in=value['selected_categories']).values('id') or []
- cx['listings'] = list(value[0]['listings']) or []
-
tpl = get_template('admin/widget/listing_custom.html')
return mark_safe(tpl.render(cx))
""" """
|
deleted forgotten line frm ellaadmin (is ellaadmin still in use
somewhere?)
|
diff --git a/drf_hal_json/views.py b/drf_hal_json/views.py
index <HASH>..<HASH> 100644
--- a/drf_hal_json/views.py
+++ b/drf_hal_json/views.py
@@ -11,4 +11,4 @@ class HalCreateModelMixin(CreateModelMixin):
url_field_data = links_data.get(api_settings.URL_FIELD_NAME)
if not url_field_data:
return {}
- return {'Location': url_field_data}
+ return {'Location': str(url_field_data)}
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,8 +11,8 @@ setup(
author_email='bredehoeft.sebastian@gmail.com',
packages=find_packages(exclude=['tests*']),
install_requires=[
- 'django>=1.6',
- 'djangorestframework>=3.0.0',
+ 'django>=2.0',
+ 'djangorestframework>=3.10.0',
'drf-nested-fields>=0.9.0'
],
zip_safe=False,
|
made compatible with DRF <I>
|
diff --git a/DrdPlus/Lighting/UnsuitableLightingQualityMalus.php b/DrdPlus/Lighting/UnsuitableLightingQualityMalus.php
index <HASH>..<HASH> 100644
--- a/DrdPlus/Lighting/UnsuitableLightingQualityMalus.php
+++ b/DrdPlus/Lighting/UnsuitableLightingQualityMalus.php
@@ -48,7 +48,7 @@ class UnsuitableLightingQualityMalus extends StrictObject implements NegativeInt
if ($infravisionCanBeUsed && $currentLightingQuality->getValue() <= -90 // like star night
&& in_array($raceCode->getValue(), [RaceCode::DWARF, RaceCode::ORC], true)
) {
- /** lowering malus by infravision, see PPH page 129 right column, @link https://pph.drdplus.jaroslavtyc.com/#Infravidění */
+ /** lowering malus by infravision, see PPH page 129 right column, @link https://pph.drdplus.jaroslavtyc.com/#infravideni */
$possibleMalus += 3;
}
$possibleMalus += $duskSight->getInsufficientLightingBonus(); // lowering malus
|
Updated a link to PPH
|
diff --git a/anyconfig/__init__.py b/anyconfig/__init__.py
index <HASH>..<HASH> 100644
--- a/anyconfig/__init__.py
+++ b/anyconfig/__init__.py
@@ -19,7 +19,7 @@ validation/generation support.
from .globals import AUTHOR, VERSION
from .api import (
single_load, multi_load, load, loads, dump, dumps, validate, gen_schema,
- list_types, find_loader, to_container, get, set_, open,
+ list_types, find_loader, merge, get, set_, open,
MS_REPLACE, MS_NO_REPLACE, MS_DICTS, MS_DICTS_AND_LISTS,
UnknownParserTypeError, UnknownFileTypeError
)
@@ -29,7 +29,7 @@ __version__ = VERSION
__all__ = [
"single_load", "multi_load", "load", "loads", "dump", "dumps", "validate",
- "gen_schema", "list_types", "find_loader", "to_container",
+ "gen_schema", "list_types", "find_loader", "merge",
"get", "set_", "open",
"MS_REPLACE", "MS_NO_REPLACE", "MS_DICTS", "MS_DICTS_AND_LISTS",
"UnknownParserTypeError", "UnknownFileTypeError"
|
api: export merge (anyconfig.dicts.merge) instead of to_container which was deprecated and removed
|
diff --git a/stimela/cargo/cab/rfinder/src/run.py b/stimela/cargo/cab/rfinder/src/run.py
index <HASH>..<HASH> 100644
--- a/stimela/cargo/cab/rfinder/src/run.py
+++ b/stimela/cargo/cab/rfinder/src/run.py
@@ -32,11 +32,8 @@ for param in cab['parameters']:
if value is None:
continue
- msnames = []
if name == 'msname':
- for ms in value:
- msnames.append(ms.split('/')[-1])
- list_doc['general']['msname'] = msnames
+ list_doc['general']['msname'] = value.split('/')[-1]
continue
for key, val in list_doc.items():
|
get the msname with no path
|
diff --git a/src/main/java/water/util/MRUtils.java b/src/main/java/water/util/MRUtils.java
index <HASH>..<HASH> 100644
--- a/src/main/java/water/util/MRUtils.java
+++ b/src/main/java/water/util/MRUtils.java
@@ -75,11 +75,10 @@ public class MRUtils {
int cores = 0;
for( H2ONode node : H2O.CLOUD._memary )
cores += node._heartbeat._num_cpus;
- final int splits = 4*cores;
-
+ final int splits = cores;
// rebalance only if the number of chunks is less than the number of cores
- if( (fr.vecs()[0].nChunks() < splits/4 || shuffle) && fr.numRows() > splits) {
+ if( (fr.vecs()[0].nChunks() < splits || shuffle) && fr.numRows() > splits) {
Vec[] vecs = fr.vecs().clone();
Log.info("Load balancing dataset, splitting it into up to " + splits + " chunks.");
long[] idx = null;
|
Reduce the number of chunks for rebalancing to the number of cores, not 4x.
|
diff --git a/activestorage/app/jobs/active_storage/purge_job.rb b/activestorage/app/jobs/active_storage/purge_job.rb
index <HASH>..<HASH> 100644
--- a/activestorage/app/jobs/active_storage/purge_job.rb
+++ b/activestorage/app/jobs/active_storage/purge_job.rb
@@ -3,6 +3,7 @@
# Provides asynchronous purging of ActiveStorage::Blob records via ActiveStorage::Blob#purge_later.
class ActiveStorage::PurgeJob < ActiveStorage::BaseJob
discard_on ActiveRecord::RecordNotFound
+ retry_on ActiveRecord::Deadlocked, attempts: 10, wait: :exponentially_longer
def perform(blob)
blob.purge
|
Retry ActiveStorage::PurgeJobs on DB deadlock
|
diff --git a/source/CAS/PGTStorage/Db.php b/source/CAS/PGTStorage/Db.php
index <HASH>..<HASH> 100644
--- a/source/CAS/PGTStorage/Db.php
+++ b/source/CAS/PGTStorage/Db.php
@@ -296,7 +296,7 @@ class CAS_PGTStorage_Db extends CAS_PGTStorage_AbstractStorage
// initialize the PDO object for this method
$pdo = $this->_getPdo();
- $this->setErrorMode();
+ $this->_setErrorMode();
try {
$pdo->beginTransaction();
@@ -337,7 +337,7 @@ class CAS_PGTStorage_Db extends CAS_PGTStorage_AbstractStorage
// initialize the PDO object for this method
$pdo = $this->_getPdo();
- $this->setErrorMode();
+ $this->_setErrorMode();
try {
$pdo->beginTransaction();
|
#<I> fix function names screwed up by the style changes
|
diff --git a/wicket-orientdb/src/test/java/ru/ydn/wicket/wicketorientdb/orientdb/TestStandaloneOrientDBCompatibility.java b/wicket-orientdb/src/test/java/ru/ydn/wicket/wicketorientdb/orientdb/TestStandaloneOrientDBCompatibility.java
index <HASH>..<HASH> 100644
--- a/wicket-orientdb/src/test/java/ru/ydn/wicket/wicketorientdb/orientdb/TestStandaloneOrientDBCompatibility.java
+++ b/wicket-orientdb/src/test/java/ru/ydn/wicket/wicketorientdb/orientdb/TestStandaloneOrientDBCompatibility.java
@@ -14,6 +14,7 @@ import ru.ydn.wicket.wicketorientdb.OrientDbTestWebApplication;
import static org.junit.Assert.assertNotNull;
+@Ignore
public class TestStandaloneOrientDBCompatibility {
private static final String PLOCAL_DB_NAME = "testDBLifeCycleLocal";
|
Exclude standalone tests because they make test very slow
|
diff --git a/openfisca_core/holders.py b/openfisca_core/holders.py
index <HASH>..<HASH> 100644
--- a/openfisca_core/holders.py
+++ b/openfisca_core/holders.py
@@ -2,18 +2,19 @@
from __future__ import division
-import warnings
+import logging
+import shutil
import os
+import warnings
import numpy as np
+import psutil
from commons import empty_clone
import periods
from periods import MONTH, YEAR, ETERNITY
from columns import make_column_from_variable
from indexed_enums import Enum, EnumArray
-import logging
-import psutil
log = logging.getLogger(__name__)
@@ -528,6 +529,12 @@ class OnDiskStorage(object):
def get_known_periods(self):
return self._files.keys()
+ def __del__(self):
+ shutil.rmtree(self.storage_dir) # Remove the holder temporary files
+ # If the simulation temporary directory is empty, remove it
+ parent_dir = os.path.abspath(os.path.join(self.storage_dir, os.pardir))
+ if not os.listdir(parent_dir):
+ shutil.rmtree(parent_dir)
class InMemoryStorage(object):
|
Remove tmp files when simulation is done
|
diff --git a/resource_aws_vpc_test.go b/resource_aws_vpc_test.go
index <HASH>..<HASH> 100644
--- a/resource_aws_vpc_test.go
+++ b/resource_aws_vpc_test.go
@@ -10,9 +10,8 @@ import (
)
func TestAccVpc(t *testing.T) {
- testAccPreCheck(t)
-
resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckVpcDestroy,
Steps: []resource.TestStep{
|
helper/resource: add PreCheck
|
diff --git a/spec/mongo/collection_spec.rb b/spec/mongo/collection_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mongo/collection_spec.rb
+++ b/spec/mongo/collection_spec.rb
@@ -1263,7 +1263,7 @@ describe Mongo::Collection do
end
end
- context 'when a max time ms value is provided' do
+ context 'when a max time ms value is provided', if: (!sharded? && write_command_enabled?) do
let(:result) do
authorized_collection.parallel_scan(2, options)
|
RUBY-<I> Don't test parallelScan on <I>
|
diff --git a/lib/rpub.rb b/lib/rpub.rb
index <HASH>..<HASH> 100644
--- a/lib/rpub.rb
+++ b/lib/rpub.rb
@@ -48,7 +48,6 @@ module Rpub
end
KRAMDOWN_OPTIONS = {
- :auto_ids => false,
:coderay_line_numbers => nil
}
end
|
Do use auto IDs for TOC
|
diff --git a/src/wyjc/runtime/Util.java b/src/wyjc/runtime/Util.java
index <HASH>..<HASH> 100755
--- a/src/wyjc/runtime/Util.java
+++ b/src/wyjc/runtime/Util.java
@@ -403,7 +403,7 @@ public class Util {
* The <code>coerce</code> method forces this object to conform to a given
* type.
*/
- public static Object coerce(Object obj, Type t) {
+ public static Object coerce(Object obj, Type t) {
if(obj instanceof BigInteger) {
return coerce((BigInteger)obj,t);
} else if(obj instanceof List) {
@@ -461,8 +461,10 @@ public class Util {
throw new RuntimeException("invalid list coercion (" + obj + " => " + t + ")");
}
- public static Object coerce(String obj, Type t) {
- if(t.kind == Type.K_LIST) {
+ public static Object coerce(String obj, Type t) {
+ if(t.kind == Type.K_STRING) {
+ return obj;
+ } else if(t.kind == Type.K_LIST) {
Type.List tl = (Type.List) t;
List r = new List(obj.length());
for(int i=0;i!=obj.length();++i) {
|
Bug fix for string => [int] coercions.
|
diff --git a/lib/songbirdsh/track.rb b/lib/songbirdsh/track.rb
index <HASH>..<HASH> 100644
--- a/lib/songbirdsh/track.rb
+++ b/lib/songbirdsh/track.rb
@@ -22,7 +22,7 @@ module Songbirdsh
end
def search_string
- "#{self.artist.downcase}#{self.album.downcase}#{self.title.downcase}"
+ "#{self.artist.to_s.downcase}#{self.album.to_s.downcase}#{self.title.to_s.downcase}"
end
def to_s
|
slight change to handle nil track fields
|
diff --git a/scope/scope.py b/scope/scope.py
index <HASH>..<HASH> 100644
--- a/scope/scope.py
+++ b/scope/scope.py
@@ -78,6 +78,11 @@ class TagBase:
"""Method called for defining arguments for the object. Should be implemented by subclasses."""
pass
+ def set_children(self, children):
+ """Set the children of the object."""
+ self.children = children
+ return self
+
def serialize(self, context):
"""Method called for serializing object. Should be implemented by subclasses."""
pass
|
Add helper function for setting children.
|
diff --git a/spec/apps/rails/dummy_app.rb b/spec/apps/rails/dummy_app.rb
index <HASH>..<HASH> 100644
--- a/spec/apps/rails/dummy_app.rb
+++ b/spec/apps/rails/dummy_app.rb
@@ -104,8 +104,7 @@ class DummyController < ActionController::Base
)
]
- def index
- end
+ def index; end
def crash
raise AirbrakeTestError
|
rubocop: fix offences of the Style/EmptyMethod cop
|
diff --git a/code/javascript/core/scripts/selenium-seleneserunner.js b/code/javascript/core/scripts/selenium-seleneserunner.js
index <HASH>..<HASH> 100644
--- a/code/javascript/core/scripts/selenium-seleneserunner.js
+++ b/code/javascript/core/scripts/selenium-seleneserunner.js
@@ -109,7 +109,7 @@ function nextCommand() {
if (postResult == "START") {
url = url + "driver/?seleniumStart=true" + buildDriverParams() + preventBrowserCaching();
} else {
- url = url + "driver/?commandResult=" + postResult + buildDriverParams() + preventBrowserCaching();
+ url = url + "driver/?commandResult=" + encodeURI(postResult) + buildDriverParams() + preventBrowserCaching();
}
LOG.debug("XMLHTTPRequesting " + url);
xmlHttp.open("GET", url, true);
|
Encode the URI of the command result, for SRC-2
r<I>
|
diff --git a/openfisca_web_api/controllers.py b/openfisca_web_api/controllers.py
index <HASH>..<HASH> 100644
--- a/openfisca_web_api/controllers.py
+++ b/openfisca_web_api/controllers.py
@@ -397,6 +397,7 @@ def api1_field(req):
headers = headers,
)
+ model.tax_benefit_system.set_variables_dependencies()
simulation = simulations.Simulation(
period = periods.period(datetime.date.today().year),
tax_benefit_system = model.tax_benefit_system,
|
Compute consumers of variable before returning a field JSON.
|
diff --git a/forwarded.js b/forwarded.js
index <HASH>..<HASH> 100644
--- a/forwarded.js
+++ b/forwarded.js
@@ -10,13 +10,19 @@
var proxies = [
{
ip: 'x-forwarded-for',
- port: 'x-forwarded-port'
+ port: 'x-forwarded-port',
+ proto: 'x-forwarded-proto'
}, {
ip: 'z-forwarded-for',
- port: 'z-forwarded-port'
+ port: 'z-forwarded-port', // Estimated guess, no standard header available.
+ proto: 'z-forwarded-proto' // Estimated guess, no standard header available.
}, {
ip: 'forwarded',
- port: 'forwarded-port'
+ port: 'forwarded-port',
+ proto: 'forwarded-proto' // Estimated guess, no standard header available.
+ }, {
+ ip: 'x-real-ip',
+ port: 'x-real-port' // Estimated guess, no standard header available.
}
];
|
[minor] Added x-real-ip as option.
|
diff --git a/troposphere/cognito.py b/troposphere/cognito.py
index <HASH>..<HASH> 100644
--- a/troposphere/cognito.py
+++ b/troposphere/cognito.py
@@ -130,7 +130,7 @@ class PasswordPolicy(AWSProperty):
'RequireNumbers': (boolean, False),
'RequireSymbols': (boolean, False),
'RequireUppercase': (boolean, False),
- 'TemporaryPasswordValidityDays': (float, False),
+ 'TemporaryPasswordValidityDays': (positive_integer, False),
}
|
Fix TemporaryPasswordValidityDays type (#<I>)
|
diff --git a/devices.js b/devices.js
index <HASH>..<HASH> 100755
--- a/devices.js
+++ b/devices.js
@@ -3396,17 +3396,14 @@ const devices = [
vendor: 'Iris',
description: 'Motion and temperature sensor',
supports: 'occupancy and temperature',
- fromZigbee: [
- fz.temperature,
- fz.iaszone_occupancy_2,
- ],
+ fromZigbee: [fz.iaszone_occupancy_2, fz.temperature, fz.battery_3V_2100],
toZigbee: [],
- meta: {configureKey: 1},
+ meta: {configureKey: 2},
configure: async (device, coordinatorEndpoint) => {
const endpoint = device.getEndpoint(1);
await bind(endpoint, coordinatorEndpoint, ['msTemperatureMeasurement', 'genPowerCfg']);
await configureReporting.temperature(endpoint);
- await configureReporting.batteryPercentageRemaining(endpoint);
+ await configureReporting.batteryVoltage(endpoint);
},
},
{
|
Fix battery reporting for Iris <I>-L motion sensor. (#<I>)
|
diff --git a/system/Database/BaseBuilder.php b/system/Database/BaseBuilder.php
index <HASH>..<HASH> 100644
--- a/system/Database/BaseBuilder.php
+++ b/system/Database/BaseBuilder.php
@@ -1602,14 +1602,12 @@ class BaseBuilder
return false; // @codeCoverageIgnore
}
- } else {
- if (empty($set)) {
- if (CI_DEBUG) {
- throw new DatabaseException('insertBatch() called with no data');
- }
-
- return false; // @codeCoverageIgnore
+ } elseif (empty($set)) {
+ if (CI_DEBUG) {
+ throw new DatabaseException('insertBatch() called with no data');
}
+
+ return false; // @codeCoverageIgnore
}
$hasQBSet = ($set === null);
@@ -1690,7 +1688,7 @@ class BaseBuilder
$clean = [];
- foreach ($row as $k => $rowValue) {
+ foreach ($row as $rowValue) {
$clean[] = $escape ? $this->db->escape($rowValue) : $rowValue;
}
|
refactor: vendor/bin/rector process
|
diff --git a/rw/event.py b/rw/event.py
index <HASH>..<HASH> 100644
--- a/rw/event.py
+++ b/rw/event.py
@@ -64,8 +64,10 @@ class Event(set):
# wait for results
for func, future in futures:
try:
- result = yield future
- re.append(result)
+ if not future.done():
+ yield future
+ re.append(future.result())
+
except Exception:
exceptions.append((func, traceback.format_exc()))
|
rw.event.Event make sure we are not waiting for already done Future's
|
diff --git a/code/libraries/koowa/libraries/database/row/abstract.php b/code/libraries/koowa/libraries/database/row/abstract.php
index <HASH>..<HASH> 100644
--- a/code/libraries/koowa/libraries/database/row/abstract.php
+++ b/code/libraries/koowa/libraries/database/row/abstract.php
@@ -95,6 +95,8 @@ abstract class KDatabaseRowAbstract extends KObjectArray implements KDatabaseRow
// Set the row data
if (isset($config->data)) {
$this->setProperties($config->data->toArray(), $this->isNew());
+
+ unset($config->data);
}
//Set the status message
diff --git a/code/libraries/koowa/libraries/database/rowset/abstract.php b/code/libraries/koowa/libraries/database/rowset/abstract.php
index <HASH>..<HASH> 100644
--- a/code/libraries/koowa/libraries/database/rowset/abstract.php
+++ b/code/libraries/koowa/libraries/database/rowset/abstract.php
@@ -72,6 +72,9 @@ abstract class KDatabaseRowsetAbstract extends KObjectSet implements KDatabaseRo
foreach($config->data->toArray() as $properties) {
$this->create($properties, $config->status);
}
+
+ // Unset data to save memory
+ unset($config->data);
}
//Set the status message
|
re #<I>: Optimize row and rowset creation for memory use
|
diff --git a/src/Projection/Catalog/Import/CatalogImport.php b/src/Projection/Catalog/Import/CatalogImport.php
index <HASH>..<HASH> 100644
--- a/src/Projection/Catalog/Import/CatalogImport.php
+++ b/src/Projection/Catalog/Import/CatalogImport.php
@@ -143,7 +143,7 @@ class CatalogImport
array_map(function (Context $context) use ($productBuilder, $productXml) {
if ($productBuilder->isAvailableForContext($context)) {
$product = $productBuilder->getProductForContext($context);
- $this->addCommandToQueue($product);
+ $this->addUpdateProductCommandToQueue($product);
$this->processImagesInProductXml($productXml);
}
}, $this->contextSource->getAllAvailableContextsWithVersion($this->dataVersion));
@@ -175,7 +175,7 @@ class CatalogImport
}
}
- private function addCommandToQueue(Product $product)
+ private function addUpdateProductCommandToQueue(Product $product)
{
$this->commandQueue->add(new UpdateProductCommand($product));
}
|
Issue #<I>: Rename method addCommandToQueue to more expressive addUpdateProductCommandToQueue
|
diff --git a/launch_control/commands/dashboard.py b/launch_control/commands/dashboard.py
index <HASH>..<HASH> 100644
--- a/launch_control/commands/dashboard.py
+++ b/launch_control/commands/dashboard.py
@@ -85,5 +85,7 @@ class server_version(XMLRPCCommand):
Display dashboard server version
"""
+ __abstract__ = False
+
def invoke_remote(self):
print "Dashboard server version: %s" % (self.server.version(),)
diff --git a/launch_control/commands/dispatcher.py b/launch_control/commands/dispatcher.py
index <HASH>..<HASH> 100644
--- a/launch_control/commands/dispatcher.py
+++ b/launch_control/commands/dispatcher.py
@@ -25,6 +25,8 @@ class LaunchControlDispatcher(object):
self.subparsers = self.parser.add_subparsers(
title="Sub-command to invoke")
for command_cls in Command.get_subclasses():
+ if getattr(command_cls, '__abstract__', False):
+ continue
sub_parser = self.subparsers.add_parser(
command_cls.get_name(),
help=command_cls.get_help())
|
Make XMLRPCCommand (base class) not show up in lc-tool
|
diff --git a/lib/princely/pdf_helper.rb b/lib/princely/pdf_helper.rb
index <HASH>..<HASH> 100644
--- a/lib/princely/pdf_helper.rb
+++ b/lib/princely/pdf_helper.rb
@@ -25,10 +25,11 @@ module Princely
:stylesheets => [],
:layout => false,
:template => File.join(controller_path, action_name),
- :relative_paths => true
+ :relative_paths => true,
+ :server_flag => true
}.merge(options)
- prince = Princely::Pdf.new
+ prince = Princely::Pdf.new(options.slice(:server_flag))
# Sets style sheets on PDF renderer
prince.add_style_sheets(*options[:stylesheets].collect{|style| asset_file_path(style)})
|
Accept options[:server_flag] from make_pdf
|
diff --git a/client/gutenberg/editor/hooks/components/media-upload/index.js b/client/gutenberg/editor/hooks/components/media-upload/index.js
index <HASH>..<HASH> 100644
--- a/client/gutenberg/editor/hooks/components/media-upload/index.js
+++ b/client/gutenberg/editor/hooks/components/media-upload/index.js
@@ -77,19 +77,17 @@ export class MediaUpload extends Component {
};
getEnabledFilters = () => {
- // TODO: Replace with `allowedTypes` (array) after updating Gutenberg
- const { type } = this.props;
- switch ( type ) {
- case 'image':
- return [ 'images' ];
- case 'audio':
- return [ 'audio' ];
- case 'video':
- return [ 'videos' ];
- case 'document':
- return [ 'documents' ];
- }
- return undefined;
+ const { allowedTypes } = this.props;
+
+ const enabledFiltersMap = {
+ image: 'images',
+ audio: 'audio',
+ video: 'videos',
+ };
+
+ return isArray( allowedTypes )
+ ? allowedTypes.map( type => enabledFiltersMap[ type ] )
+ : undefined;
};
getSelectedItems = () => {
|
Gutenberg: wire up Media library enabled filters (#<I>)
The existing mapping of allowed types to enabled filters was no longer
accurate after the package update. This fixes the mapping and refactors
the code to handle the cases when multiple allowed types can be returned.
|
diff --git a/src/util/node.js b/src/util/node.js
index <HASH>..<HASH> 100644
--- a/src/util/node.js
+++ b/src/util/node.js
@@ -1,3 +1,4 @@
+
/*jslint indent:2,white:true,sloppy:true,node:true */
/*
* freedom.js Node runtime
@@ -5,7 +6,20 @@
'use strict';
-var fdom = module.exports = {};
+var sources = [
+ '/..',
+ '/../link',
+ '/../proxy',
+ '/../../interface'
+];
+
+sources.forEach(function(dir) {
+ require('fs').readdirSync(__dirname + dir).forEach(function(file) {
+ if (file.match(/.+\.js/) !== null) {
+ require(__dirname + dir + '/' + file);
+ }
+ });
+});
module.exports.freedom = fdom.setup(global, undefined, {
portType: 'Node',
|
reapair node node functionality from bad merge
|
diff --git a/go/libkb/env.go b/go/libkb/env.go
index <HASH>..<HASH> 100644
--- a/go/libkb/env.go
+++ b/go/libkb/env.go
@@ -255,6 +255,9 @@ func (e *Env) GetMountDir() (string, error) {
"%s (%s)", runmodeName, user.Username))
case "linux":
return filepath.Join(e.GetRuntimeDir(), "kbfs")
+ // kbfsdokan depends on an empty default
+ case "windows":
+ return ""
default:
return filepath.Join(e.GetRuntimeDir(), "kbfs")
}
|
make Env.GetMountDir() return emtpy by default on windows (#<I>)
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,6 @@ setup(
test_suite='tests',
classifiers=[
- 'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
|
Remove development status from setup.py
The project status could be tracked by its version so it's useless.
|
diff --git a/lib/olive_branch/middleware.rb b/lib/olive_branch/middleware.rb
index <HASH>..<HASH> 100644
--- a/lib/olive_branch/middleware.rb
+++ b/lib/olive_branch/middleware.rb
@@ -11,12 +11,14 @@ module OliveBranch
env["action_dispatch.request.request_parameters"].deep_transform_keys!(&:underscore)
end
- status, headers, response = @app.call(env)
-
- if inflection && headers["Content-Type"] =~ /application\/json/
- response.each do |body|
- begin
- new_response = JSON.parse(body)
+ @app.call(env).tap do |_status, headers, response|
+ if inflection && headers["Content-Type"] =~ /application\/json/
+ response.each do |body|
+ begin
+ new_response = JSON.parse(body)
+ rescue JSON::ParserError
+ next
+ end
if inflection == "camel"
new_response.deep_transform_keys! { |k| k.camelize(:lower) }
@@ -25,12 +27,9 @@ module OliveBranch
end
body.replace(new_response.to_json)
- rescue JSON::ParserError
end
end
end
-
- [status, headers, response]
end
end
end
|
Refactor middleware w/ tap
|
diff --git a/test_project/settings.py b/test_project/settings.py
index <HASH>..<HASH> 100644
--- a/test_project/settings.py
+++ b/test_project/settings.py
@@ -24,6 +24,8 @@ INSTALLED_APPS = (
'tester',
)
+SECRET_KEY = 'unique snowflake'
+
TEST_RUNNER = "test_runner.DJCETestSuiteRunner"
CELERY_ALWAYS_EAGER = True
|
Add dummy SECRET_KEY to test_project settings
Required to be non-empty since Django <I>
|
diff --git a/lib/svtplay_dl/service/svtplay.py b/lib/svtplay_dl/service/svtplay.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/service/svtplay.py
+++ b/lib/svtplay_dl/service/svtplay.py
@@ -38,7 +38,7 @@ class Svtplay(Service, OpenGraphThumbMixin):
if re.match("^[0-9]+$", vid):
old = True
- url = "http://www.svt.se/videoplayer-api/video/%s" % vid
+ url = "http://api.svt.se/videoplayer-api/video/%s" % vid
data = self.http.request("get", url)
if data.status_code == 404:
yield ServiceError("Can't get the json file for %s" % url)
|
svtplay: they changed the hostname for api calls
fixes #<I>
|
diff --git a/frontend/src/store/uiPropTypes.js b/frontend/src/store/uiPropTypes.js
index <HASH>..<HASH> 100644
--- a/frontend/src/store/uiPropTypes.js
+++ b/frontend/src/store/uiPropTypes.js
@@ -70,6 +70,16 @@ export const result = PropTypes.shape({
export const results = PropTypes.objectOf(result);
+export const deletedResult = PropTypes.shape({
+ id: resultId,
+ pathName: PropTypes.string,
+ name: PropTypes.string,
+ group: PropTypes.string,
+ isUnregistered: PropTypes.bool,
+});
+
+export const deletedResults = PropTypes.objectOf(result);
+
export const fetchState = PropTypes.shape({
resultList: PropTypes.string,
});
|
Define a prop type for deleted results
|
diff --git a/mstate/relation.go b/mstate/relation.go
index <HASH>..<HASH> 100644
--- a/mstate/relation.go
+++ b/mstate/relation.go
@@ -117,7 +117,7 @@ func (r *Relation) Life() Life {
// state. If the lifecycle transitioned concurrently so that the cache is
// stale, the call is valid but will yield no effect in the database.
func (r *Relation) SetLife(life Life) error {
- if !r.doc.Life.isNextValid(life) {
+ if !transitions[r.doc.Life][life] {
panic(fmt.Errorf("illegal lifecycle state change from %q to %q", r.doc.Life, life))
}
sel := bson.D{
diff --git a/mstate/state.go b/mstate/state.go
index <HASH>..<HASH> 100644
--- a/mstate/state.go
+++ b/mstate/state.go
@@ -17,10 +17,10 @@ const (
Alive Life = iota
Dying
Dead
- Nlife
+ nLife
)
-var lifeStrings = [Nlife]string{
+var lifeStrings = [nLife]string{
Alive: "alive",
Dying: "dying",
Dead: "dead",
@@ -30,7 +30,7 @@ func (l Life) String() string {
return lifeStrings[l]
}
-var transitions = [Nlife][Nlife]bool{
+var transitions = [nLife][nLife]bool{
Alive: {Dying: true},
Dying: {Dying: true, Dead: true},
Dead: {Dead: true},
|
mstate: rename Nlife to nLife
|
diff --git a/src/lib/widget/default.js b/src/lib/widget/default.js
index <HASH>..<HASH> 100644
--- a/src/lib/widget/default.js
+++ b/src/lib/widget/default.js
@@ -162,11 +162,13 @@ define(['../util', '../assets', '../i18n'], function(util, assets, i18n) {
evt.stopPropagation();
setState('initial');
document.body.removeEventListener('click', hideBubble);
+ document.body.removeEventListener('touchstart', hideBubble);
return false;
}
setCubeAction(hideBubble);
document.body.addEventListener('click', hideBubble);
+ document.body.addEventListener('touchstart', hideBubble);
elements.connectForm.userAddress.focus();
},
@@ -240,9 +242,11 @@ define(['../util', '../assets', '../i18n'], function(util, assets, i18n) {
if(visible) {
addClass(elements.bubble, 'hidden');
document.body.removeEventListener('click', handleBodyClick);
+ document.body.removeEventListener('touchstart', handleBodyClick);
} else {
removeClass(elements.bubble, 'hidden');
document.body.addEventListener('click', handleBodyClick);
+ document.body.addEventListener('touchstart', handleBodyClick);
}
visible = !visible;
return false;
|
bind to body's 'touchstart' event as well, to make stuff work on iSomething
|
diff --git a/test/configCases/target/amd-unnamed/index.js b/test/configCases/target/amd-unnamed/index.js
index <HASH>..<HASH> 100644
--- a/test/configCases/target/amd-unnamed/index.js
+++ b/test/configCases/target/amd-unnamed/index.js
@@ -6,5 +6,5 @@ it("should name define", function() {
var fs = require("fs");
var source = fs.readFileSync(__filename, "utf-8");
- expect(source).toMatch("define(function(");
+ expect(source).toMatch(/define\(\[[^\]]*\], function\(/);
});
|
Fix the "should name define" test in configCases/target/amd-unnamed
|
diff --git a/godotenv.go b/godotenv.go
index <HASH>..<HASH> 100644
--- a/godotenv.go
+++ b/godotenv.go
@@ -147,15 +147,20 @@ func Exec(filenames []string, cmd string, cmdArgs []string) error {
// Write serializes the given environment and writes it to a file
func Write(envMap map[string]string, filename string) error {
- content, error := Marshal(envMap)
- if error != nil {
- return error
+ content, err := Marshal(envMap)
+ if err != nil {
+ return err
+ }
+ file, err := os.Create(filename)
+ if err != nil {
+ return err
}
- file, error := os.Create(filename)
- if error != nil {
- return error
+ defer file.Close()
+ _, err = file.WriteString(content)
+ if err != nil {
+ return err
}
- _, err := file.WriteString(content)
+ file.Sync()
return err
}
|
Fixed Write bugs
This should address a fix #<I> and #<I>
This has not been addressed in any tests.
|
diff --git a/gwpy/timeseries/statevector.py b/gwpy/timeseries/statevector.py
index <HASH>..<HASH> 100644
--- a/gwpy/timeseries/statevector.py
+++ b/gwpy/timeseries/statevector.py
@@ -366,11 +366,15 @@ class StateVector(TimeSeries):
"""
if bits is None:
bits = [b for b in self.bits if b is not None]
- for i, b in enumerate(bits):
- if not b in self.bits and isinstance(b):
- bits[i] = self.bits[b]
+ bindex = []
+ for b in bits:
+ try:
+ bindex.append((self.bits.index(b), b))
+ except IndexError as e:
+ e.args = ('Bit %r not found in StateVector' % b)
+ raise e
self._bitseries = TimeSeriesDict()
- for i, bit in enumerate(self.bits):
+ for i, bit in bindex:
self._bitseries[bit] = StateTimeSeries(
self.data >> i & 1, name=bit, epoch=self.x0.value,
channel=self.channel, sample_rate=self.sample_rate)
|
StateVector.get_bit_series: fixed bugs
- presumably this function didn't work properly before
|
diff --git a/app/Blueprint/Webserver/WebserverBlueprint.php b/app/Blueprint/Webserver/WebserverBlueprint.php
index <HASH>..<HASH> 100644
--- a/app/Blueprint/Webserver/WebserverBlueprint.php
+++ b/app/Blueprint/Webserver/WebserverBlueprint.php
@@ -231,9 +231,9 @@ class WebserverBlueprint implements Blueprint {
protected function makeServerService(Configuration $config, Configuration $default) : Service {
$serverService = new Service();
$serverService->setName($config->get('service-name'));
- $serverService->setImage($config->get('docker.image', 'ipunktbs/nginx:1.9.7-7-1.2.9'));
+ $serverService->setImage($config->get('docker.image', 'ipunktbs/nginx:1.9.7-7-1.2.10'));
if( $config->get('debug-image', false) )
- $serverService->setImage($config->get('docker.image', 'ipunktbs/nginx-debug:debug-1.2.9'));
+ $serverService->setImage($config->get('docker.image', 'ipunktbs/nginx-debug:debug-1.2.10'));
if( $config->get('sync-user-into-container', false) ) {
$serverService->setEnvironmentVariable('USER_ID', getmyuid());
|
nginx image update to <I>
|
diff --git a/lib/mutator_rails/single_mutate.rb b/lib/mutator_rails/single_mutate.rb
index <HASH>..<HASH> 100644
--- a/lib/mutator_rails/single_mutate.rb
+++ b/lib/mutator_rails/single_mutate.rb
@@ -9,6 +9,7 @@ module MutatorRails
parms << preface(path.basename) + base
parms << '> ' + log
+ log_dir
cmd = first_run(parms)
rerun(cmd)
|
FOrece log directory to be made
|
diff --git a/eqcorrscan/core/match_filter.py b/eqcorrscan/core/match_filter.py
index <HASH>..<HASH> 100644
--- a/eqcorrscan/core/match_filter.py
+++ b/eqcorrscan/core/match_filter.py
@@ -2982,10 +2982,7 @@ class Detection(object):
chans=None, event=None, id=None):
"""Main class of Detection."""
self.template_name = template_name
- if not isinstance(detect_time, UTCDateTime):
- self.detect_time = UTCDateTime(detect_time)
- else:
- self.detect_time = detect_time
+ self.detect_time = detect_time
self.no_chans = int(no_chans)
if not isinstance(chans, list):
self.chans = [chans]
|
Do not force detect_time to be UTCDateTime
|
diff --git a/dgitcore/datasets/auto.py b/dgitcore/datasets/auto.py
index <HASH>..<HASH> 100644
--- a/dgitcore/datasets/auto.py
+++ b/dgitcore/datasets/auto.py
@@ -14,7 +14,7 @@ from datetime import datetime
# Exports
#####################################################
-__all__ = ['auto_update', 'auto_init',]
+__all__ = ['auto_update', 'auto_init', 'auto_get_repo']
def find_executable_files():
"""
@@ -173,7 +173,13 @@ def auto_init(autofile, force_init=False):
def auto_get_repo(autooptions, debug=False):
"""
- Clone this repo if exists. Otherwise create one...
+ Automatically get repo
+
+ Parameters
+ ----------
+
+ autooptions: dgit.json content
+
"""
# plugin manager
@@ -297,6 +303,12 @@ def auto_update(autofile, force_init):
# find all the files that must be collected
files = get_files_to_commit(autooptions)
+ if len(files) > 10:
+ print("Large number ({}) files are being added.".format(len(files)))
+ proceed = input("Do you wish to proceed? [yN] ")
+ if proceed != 'y':
+ return
+
# Add the files to the repo
count = auto_add(repo, autooptions, files)
if count == 0:
|
1. Export auto_get_repo as well
|
diff --git a/resource_aws_ecs_task_definition_test.go b/resource_aws_ecs_task_definition_test.go
index <HASH>..<HASH> 100644
--- a/resource_aws_ecs_task_definition_test.go
+++ b/resource_aws_ecs_task_definition_test.go
@@ -82,17 +82,19 @@ func testAccCheckAWSEcsTaskDefinitionDestroy(s *terraform.State) error {
continue
}
- out, err := conn.DescribeTaskDefinition(&ecs.DescribeTaskDefinitionInput{
- TaskDefinition: aws.String(rs.Primary.ID),
- })
+ input := ecs.DescribeTaskDefinitionInput{
+ TaskDefinition: aws.String(rs.Primary.Attributes["arn"]),
+ }
- if err == nil {
- if out.TaskDefinition != nil {
- return fmt.Errorf("ECS task definition still exists:\n%#v", *out.TaskDefinition)
- }
+ out, err := conn.DescribeTaskDefinition(&input)
+
+ if err != nil {
+ return err
}
- return err
+ if out.TaskDefinition != nil && *out.TaskDefinition.Status != "INACTIVE" {
+ return fmt.Errorf("ECS task definition still exists:\n%#v", *out.TaskDefinition)
+ }
}
return nil
|
aws: Treat INACTIVE ECS TDs as deleted in acc tests
- related to <URL>
|
diff --git a/test/database_rewinder_test.rb b/test/database_rewinder_test.rb
index <HASH>..<HASH> 100644
--- a/test/database_rewinder_test.rb
+++ b/test/database_rewinder_test.rb
@@ -2,6 +2,12 @@
require 'test_helper'
class DatabaseRewinder::DatabaseRewinderTest < ActiveSupport::TestCase
+ if ActiveRecord::VERSION::STRING >= '5'
+ self.use_transactional_tests = false
+ else
+ self.use_transactional_fixtures = false
+ end
+
setup do
DatabaseRewinder.init
end
@@ -117,12 +123,6 @@ class DatabaseRewinder::DatabaseRewinderTest < ActiveSupport::TestCase
if ActiveRecord::VERSION::STRING >= '4'
sub_test_case 'migrations' do
- if ActiveRecord::VERSION::STRING >= '5'
- self.use_transactional_tests = false
- else
- self.use_transactional_fixtures = false
- end
-
test '.clean_all should not touch AR::SchemaMigration' do
begin
ActiveRecord::Base.connection.initialize_schema_migrations_table
|
Actually use_transactional_tests for every test
|
diff --git a/lib/accesslib.php b/lib/accesslib.php
index <HASH>..<HASH> 100755
--- a/lib/accesslib.php
+++ b/lib/accesslib.php
@@ -4927,10 +4927,13 @@ function count_role_users($roleid, $context, $parent=false) {
$parentcontexts = '';
}
- $SQL = "SELECT count(*)
- FROM {$CFG->prefix}role_assignments r
- WHERE (r.contextid = $context->id $parentcontexts)
- AND r.roleid = $roleid";
+ $SQL = "SELECT count(u.id)
+ FROM {$CFG->prefix}role_assignments r
+ JOIN {$CFG->prefix}user u
+ ON u.id = r.userid
+ WHERE (r.contextid = $context->id $parentcontexts)
+ AND r.roleid = $roleid
+ AND u.deleted = 0";
return count_records_sql($SQL);
}
|
MDL-<I> count_role_users was showing different count to those returned from
get_role_users(), based on patch from Patrick Pollett
merged from MOODLE_<I>_STABLE
|
diff --git a/lib/page.js b/lib/page.js
index <HASH>..<HASH> 100644
--- a/lib/page.js
+++ b/lib/page.js
@@ -189,13 +189,15 @@ var Page = function( page_path, options ){
if (status == 404) {
server.logger.log(page.route + ' [' + resource.name + '] ' + error, 3);
} else {
- server.logger.log(page.route + ' [' + resource.name + '] ' + error, resource.options.optional ? 1 : 0);
- page.logToSentry(error, {
- error: err,
- resource: {name: resource.name, url: resource.resource.url},
- response: resource_response ? {status: resource_response.status, body: resource_response.data} : null,
- context: _.omit(context, 'resources')
- });
+ server.logger.log(page.route + ' [' + resource.name + '] ' + error, is_optional ? 1 : 0);
+ if (!is_optional) {
+ page.logToSentry(error, {
+ error: err,
+ resource: {name: resource.name, url: resource.resource.url},
+ response: resource_response ? {status: resource_response.status, body: resource_response.data} : null,
+ context: _.omit(context, 'resources')
+ });
+ }
}
err = {status: status, error: error, message: err, resource: resource.name};
|
Do not report optional resource errors to Sentry
|
diff --git a/Swat/SwatCheckboxEntryList.php b/Swat/SwatCheckboxEntryList.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatCheckboxEntryList.php
+++ b/Swat/SwatCheckboxEntryList.php
@@ -134,7 +134,7 @@ class SwatCheckboxEntryList extends SwatCheckboxList
$input_tag->removeAttribute('checked');
$input_tag->name = $this->id.'['.$key.']';
- if (in_array($option->value, $this->values))
+ if (array_key_exists($key ,$this->values))
$input_tag->checked = 'checked';
$input_tag->id = $this->id.'_'.$checkbox_id;
@@ -187,7 +187,7 @@ class SwatCheckboxEntryList extends SwatCheckboxList
$checkbox_id = $key.'_'.$value;
$widget = $this->getEntryWidget($checkbox_id);
$widget->process();
- $this->entry_values[$value] = $widget->value;
+ $this->entry_values[] = $widget->value;
}
}
|
Ticket #<I>, problems occured when two options with the same value were added to the checkbox entry list widget, now when the form is submitted the array keys of the options and those of the values are compared instead of the values names. This allows that right entries to remain checked and other entries with the same value not to become suddenly checked. Also the array of entry_values is now indexed by number and not by values, this prevents two widgets with the same value having only one entry in the entry_values array.
svn commit r<I>
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -20,16 +20,14 @@ HashBase.prototype.update = function (data, encoding) {
if (!Buffer.isBuffer(data)) data = new Buffer(data, encoding || 'binary')
// consume data
+ var block = this._block
var offset = 0
while (this._blockOffset + data.length - offset >= this._blockSize) {
- for (var i = this._blockOffset; i < this._blockSize;) this._block[i++] = data[offset++]
+ for (var i = this._blockOffset; i < this._blockSize;) block[i++] = data[offset++]
this._update()
this._blockOffset = 0
}
-
- while (offset < data.length) {
- this._block[this._blockOffset++] = data[offset++]
- }
+ while (offset < data.length) block[this._blockOffset++] = data[offset++]
// update length
for (var j = 0, carry = data.length * 8; carry > 0; ++j) {
|
Use this._block as local variable
|
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -129,4 +129,21 @@ describe('render-to-string', () => {
);
});
});
+
+ describe('className / class massaging', () => {
+ it('should render class using className', () => {
+ let rendered = render(<div className="foo bar" />);
+ expect(rendered).to.equal('<div class="foo bar"></div>');
+ });
+
+ it('should render class using class', () => {
+ let rendered = render(<div class="foo bar" />);
+ expect(rendered).to.equal('<div class="foo bar"></div>');
+ });
+
+ it('should prefer className over class', () => {
+ let rendered = render(<div class="foo" className="foo bar" />);
+ expect(rendered).to.equal('<div class="foo bar"></div>');
+ });
+ });
});
|
Tests for class/className support
|
diff --git a/src/test/java/stormpot/PoolTest.java b/src/test/java/stormpot/PoolTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/stormpot/PoolTest.java
+++ b/src/test/java/stormpot/PoolTest.java
@@ -1302,6 +1302,33 @@ public class PoolTest {
throws Exception {
shutdown(fixture.initPool(config)).await(timeout, null);
}
+
+ @Test(timeout = 300)
+ @Theory public void
+ mustCompleteShutDownEvenIfAllSlotsHaveNullErrors(PoolFixture fixture)
+ throws InterruptedException {
+ Allocator allocator = new CountingAllocator() {
+ @Override
+ public Poolable allocate(Slot slot) throws Exception {
+ return null;
+ }
+ };
+ Pool pool = givenPoolWithFailedAllocation(fixture, allocator);
+ // the shut-down procedure must complete before the test times out.
+ shutdown(pool).await();
+ }
+
+ private Pool givenPoolWithFailedAllocation(
+ PoolFixture fixture, Allocator allocator) {
+ Pool pool = fixture.initPool(config.setAllocator(allocator));
+ try {
+ // ensure at least one allocation attempt has taken place
+ pool.claim();
+ } catch (Exception _) {
+ // we don't care about this one
+ }
+ return pool;
+ }
// TODO test for resilience against spurious wake-ups?
// NOTE: When adding, removing or modifying tests, also remember to update
|
pools must be able to complete their shut down procedures even if the allocator has only ever returned null.
|
diff --git a/tscreen.go b/tscreen.go
index <HASH>..<HASH> 100644
--- a/tscreen.go
+++ b/tscreen.go
@@ -398,9 +398,12 @@ func (t *tScreen) Fini() {
t.clear = false
t.fini = true
- if t.quit != nil {
+ select {
+ case <-t.quit:
+ // do nothing, already closed
+
+ default:
close(t.quit)
- t.quit = nil
}
t.termioFini()
|
Fix data race in tScreen shutdown
Setting t.quit to nil while the mainLoop is running causes a
race condition when the Fini() method is called. This change
instead uses a select expression to avoid the nil check and set.
|
diff --git a/integration/integration_test.go b/integration/integration_test.go
index <HASH>..<HASH> 100644
--- a/integration/integration_test.go
+++ b/integration/integration_test.go
@@ -37,6 +37,7 @@ import (
// Load all supported backends.
_ "github.com/cayleygraph/cayley/graph/bolt"
+ _ "github.com/cayleygraph/cayley/graph/bolt2"
_ "github.com/cayleygraph/cayley/graph/leveldb"
_ "github.com/cayleygraph/cayley/graph/memstore"
_ "github.com/cayleygraph/cayley/graph/mongo"
@@ -452,7 +453,7 @@ func prepare(t testing.TB) {
switch *backend {
case "memstore":
cfg.DatabasePath = "../data/30kmoviedata.nq.gz"
- case "leveldb", "bolt":
+ case "leveldb", "bolt", "bolt2":
cfg.DatabasePath = "/tmp/cayley_test_" + *backend
cfg.DatabaseOptions = map[string]interface{}{
"nosync": true, // It's a test. If we need to load, do it fast.
|
add memory profile and bolt2 to main/integration
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -32,7 +32,8 @@ function takeScreenshot(title) {
}
function ProshotReporter(runner) {
- runner.on('fail', function(test) {
+ runner.on('fail', function(test, err) {
+ test.err = err;
return takeScreenshot(test.fullTitle());
});
}
|
Fix #1: Add a workaround for protractor depending on internal behavior of builtin mocha reporters
|
diff --git a/lib/xcore.js b/lib/xcore.js
index <HASH>..<HASH> 100644
--- a/lib/xcore.js
+++ b/lib/xcore.js
@@ -237,7 +237,8 @@ XClient.prototype.unpackEvent = function(type, seq, extra, code, raw)
var event = {}; // TODO: constructor & base functions
// Remove the most significant bit. See Chapter 1, Event Format section in X11 protocol
// specification
- event.type = type && 0x7F;
+ type = type & 0x7F;
+ event.type = type;
event.seq = seq;
var extUnpacker = this.eventParsers[type];
|
Really fix the parsing of the event type field
- Typo: use the bitwise operator and not the comparison operator
|
diff --git a/okhttp/src/main/java/com/squareup/okhttp/OkHttpClient.java b/okhttp/src/main/java/com/squareup/okhttp/OkHttpClient.java
index <HASH>..<HASH> 100644
--- a/okhttp/src/main/java/com/squareup/okhttp/OkHttpClient.java
+++ b/okhttp/src/main/java/com/squareup/okhttp/OkHttpClient.java
@@ -31,7 +31,7 @@ import java.net.ProxySelector;
import java.net.URLConnection;
import java.security.GeneralSecurityException;
import java.security.SecureRandom;
-import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.net.SocketFactory;
@@ -508,7 +508,7 @@ public class OkHttpClient implements Cloneable {
// etc.) may incorrectly be reflected in the request when it is executed.
OkHttpClient client = clone();
// Force HTTP/1.1 until the WebSocket over SPDY/HTTP2 spec is finalized.
- client.setProtocols(Arrays.asList(Protocol.HTTP_1_1));
+ client.setProtocols(Collections.singletonList(Protocol.HTTP_1_1));
return new WebSocket(client, request, new SecureRandom());
}
|
Use a singleton list which is slightly more efficient.
|
diff --git a/lib/beaker-pe/install/pe_utils.rb b/lib/beaker-pe/install/pe_utils.rb
index <HASH>..<HASH> 100644
--- a/lib/beaker-pe/install/pe_utils.rb
+++ b/lib/beaker-pe/install/pe_utils.rb
@@ -425,8 +425,8 @@ module Beaker
end
install_hosts.each do |host|
- #windows agents from 4.0 -> 2016.1.2 were only installable via aio method
- is_windows_msi_and_aio = (host['platform'] =~ /windows/ && (version_is_less(host['pe_ver'], '2016.3.0') && !version_is_less(host['pe_ver'], '3.99') && !(host['roles'].include?('frictionless'))))
+ #windows agents from 4.0 -> 2016.1.2 were only installable via the aio method
+ is_windows_msi_and_aio = (host['platform'] =~ /windows/ && (version_is_less(host['pe_ver'], '2016.3.0') && !version_is_less(host['pe_ver'], '3.99')))
if agent_only_check_needed && hosts_agent_only.include?(host) || is_windows_msi_and_aio
host['type'] = 'aio'
|
PE-<I> Fix windows frictionless upgrades
There was an error with my previous PR. PE <I> to PE <I>
were failing because the PE <I> install of the windows agent
would attempt to install via frictionless, and not with the old msi
method.
This PR fixes this.
|
diff --git a/app/src/Bolt/Application.php b/app/src/Bolt/Application.php
index <HASH>..<HASH> 100644
--- a/app/src/Bolt/Application.php
+++ b/app/src/Bolt/Application.php
@@ -9,7 +9,7 @@ class Application extends BaseApplication
public function __construct(array $values = array())
{
$values['bolt_version'] = '1.3';
- $values['bolt_name'] = 'dev';
+ $values['bolt_name'] = 'beta';
parent::__construct($values);
}
|
Bumping version to "<I> beta".
|
diff --git a/lib/specinfra/helper/configuration.rb b/lib/specinfra/helper/configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/specinfra/helper/configuration.rb
+++ b/lib/specinfra/helper/configuration.rb
@@ -33,6 +33,7 @@ module Specinfra
else
value = RSpec.configuration.send(c) if defined?(RSpec)
end
+ next if c == :lxc && defined?(Serverspec::Type::Lxc) && value.is_a?(Serverspec::Type::Lxc)
Specinfra::Configuration.instance_variable_set("@#{c}", value)
end
end
|
Work around for avoid unintentional replacing of lxc configuration
|
diff --git a/pdfwatermarker/watermark/utils.py b/pdfwatermarker/watermark/utils.py
index <HASH>..<HASH> 100644
--- a/pdfwatermarker/watermark/utils.py
+++ b/pdfwatermarker/watermark/utils.py
@@ -19,13 +19,12 @@ def bundle_dir():
def register_font(font='Vera.ttf'):
"""Register fonts for report labs canvas."""
- folder = bundle_dir + os.sep + 'lib' + os.sep + 'font'
+ folder = bundle_dir() + os.sep + 'lib' + os.sep + 'font'
ttfFile = resource_path(os.path.join(folder, font))
pdfmetrics.registerFont(TTFont("Vera", ttfFile))
return ttfFile
-bundle_dir = bundle_dir()
FONT = register_font()
LETTER = letter[1], letter[0]
-image_directory = str(bundle_dir + os.sep + 'lib' + os.sep + 'img')
\ No newline at end of file
+image_directory = str(bundle_dir() + os.sep + 'lib' + os.sep + 'img')
\ No newline at end of file
|
Created utils.py within the watermark module to house helper functions called on imports
|
diff --git a/src/props.js b/src/props.js
index <HASH>..<HASH> 100644
--- a/src/props.js
+++ b/src/props.js
@@ -1,4 +1,15 @@
export default {
+ type: {
+ type: String,
+ required: true,
+ validator: function(value) {
+ return (
+ ["card", "iban", "postalCode", "cardNumber", "cardExpiry", "cardCvc"]
+ .map(s => s.toLowerCase())
+ .indexOf(value.toLowerCase()) > -1
+ )
+ }
+ },
stripe: {
type: [String, Object], // stripe key or instance
required: true
@@ -9,10 +20,12 @@ export default {
},
options: {
type: Object,
- required: false
+ required: false,
+ default: () => ({})
},
stripeOptions: {
type: Object,
- required: false
+ required: false,
+ default: () => ({})
}
}
|
update(props): extracted props from StripeElement component
|
diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py
index <HASH>..<HASH> 100644
--- a/airflow/executors/local_executor.py
+++ b/airflow/executors/local_executor.py
@@ -125,12 +125,12 @@ class LocalWorkerBase(Process, LoggingMixin):
ret = 0
return State.SUCCESS
except Exception as e:
- self.log.error("Failed to execute task %s.", str(e))
+ self.log.exception("Failed to execute task %s.", e)
+ return State.FAILED
finally:
Sentry.flush()
logging.shutdown()
os._exit(ret)
- raise RuntimeError('unreachable -- keep mypy happy')
@abstractmethod
def do_work(self):
|
Log exception in local executor (#<I>)
|
diff --git a/example/src/screens/FirstTabScreen.js b/example/src/screens/FirstTabScreen.js
index <HASH>..<HASH> 100644
--- a/example/src/screens/FirstTabScreen.js
+++ b/example/src/screens/FirstTabScreen.js
@@ -137,6 +137,11 @@ export default class FirstTabScreen extends Component {
Navigation.startSingleScreenApp({
screen: {
screen: 'example.FirstTabScreen'
+ },
+ drawer: {
+ left: {
+ screen: 'example.SideMenu'
+ }
}
});
}
|
Fix example "show single screen app" now will show the side menu (drawer) (#<I>)
|
diff --git a/binstar_client/utils/projects/tests/test_projects.py b/binstar_client/utils/projects/tests/test_projects.py
index <HASH>..<HASH> 100644
--- a/binstar_client/utils/projects/tests/test_projects.py
+++ b/binstar_client/utils/projects/tests/test_projects.py
@@ -4,7 +4,7 @@ from binstar_client.utils.projects import get_files
def test_get_files():
- pfiles = get_files(example_path('bokeh-apps/weather'))
+ pfiles = sorted(get_files(example_path('bokeh-apps/weather')), key=lambda x: x['basename'])
assert len(pfiles) == 6
assert pfiles[0]['basename'] == '.projectignore'
assert pfiles[0]['relativepath'] == '.projectignore'
|
Make test_projects.py::test_get_files deterministic
It relied on list order that apparently was not guaranteed.
Sort the list explicitly.
|
diff --git a/openquake/engine/tools/make_html_report.py b/openquake/engine/tools/make_html_report.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/tools/make_html_report.py
+++ b/openquake/engine/tools/make_html_report.py
@@ -164,7 +164,7 @@ SELECT description, oq_job_id,
FROM uiapi.job_stats AS s
INNER JOIN uiapi.oq_job AS o
ON s.oq_job_id=o.id
- INNER JOIN uiapi.job_param AS p
+ LEFT JOIN uiapi.job_param AS p
ON o.id=p.job_id AND name='description') AS x
WHERE oq_job_id=%s;
'''
@@ -274,6 +274,7 @@ def make_report(conn, isodate='today'):
tag_status.append(status)
stats = fetcher.query(JOB_STATS, job_id)[1:]
if not stats:
+ import pdb; pdb.set_trace()
continue
(description, job_id, stop_time, status, disk_space,
duration) = stats[0]
|
Failed computations without a JobParam record must appear in the HTML report
|
diff --git a/createdb.py b/createdb.py
index <HASH>..<HASH> 100755
--- a/createdb.py
+++ b/createdb.py
@@ -31,4 +31,9 @@ if '--with-dev-data' in sys.argv:
detail_name="registration id", icon="phone",
placeholder="laksdjfasdlfkj183097falkfj109f"
)
+ context4 = fmn.lib.models.Context.create(
+ session, name="desktop", description="fedmsg-notify",
+ detail_name="None", icon="console",
+ placeholder="There's no need to put a value here"
+ )
session.commit()
|
Add the desktop context to the setup script.
|
diff --git a/spec/functional/resource/registry_spec.rb b/spec/functional/resource/registry_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/resource/registry_spec.rb
+++ b/spec/functional/resource/registry_spec.rb
@@ -55,19 +55,13 @@ describe Chef::Resource::RegistryKey, :windows_only do
def clean_registry
# clean 64-bit space on WOW64
- begin
- hive_class.open(key_parent, Win32::Registry::KEY_WRITE | 0x0100) do |reg|
- reg.delete_key(child, true)
- end
- rescue
- end
+ @registry.architecture = :x86_64
+ @registry.delete_key(reg_parent, true)
+ @registry.architecture = :machine
# clean 32-bit space on WOW64
- begin
- hive_class.open(key_parent, Win32::Registry::KEY_WRITE | 0x0200) do |reg|
- reg.delete_key(child, true)
- end
- rescue
- end
+ @registry.architecture = :i386
+ @registry.delete_key(reg_parent, true)
+ @registry.architecture = :machine
end
def reset_registry
|
changing tests to use new delete function to delete keys
|
diff --git a/tests/block/test_get_span.py b/tests/block/test_get_span.py
index <HASH>..<HASH> 100644
--- a/tests/block/test_get_span.py
+++ b/tests/block/test_get_span.py
@@ -52,15 +52,13 @@ def test(first_node_with_tokens):
assert result == (1, 13)
-@pytest.mark.parametrize(
- 'code_str', [
- '''
+@pytest.mark.parametrize('code_str', [
+ '''
long_string = """
"""
''',
- ]
-)
+])
def test_context_arrange(first_node_with_tokens):
"""
Long string spans are counted.
|
Fix lint: reformat example in get_span tests
|
diff --git a/react/Text/Text.js b/react/Text/Text.js
index <HASH>..<HASH> 100644
--- a/react/Text/Text.js
+++ b/react/Text/Text.js
@@ -49,6 +49,8 @@ const Text = ({
</span>
);
+Text.displayName = 'Text';
+
Text.propTypes = {
children: PropTypes.node.isRequired,
className: PropTypes.string,
|
fix(Text): Add display name to Text component (#<I>)
Due to decorating while shallow render snapshots are getting DecoratedComponent rendered instead of
Text
|
diff --git a/src/helpers.php b/src/helpers.php
index <HASH>..<HASH> 100644
--- a/src/helpers.php
+++ b/src/helpers.php
@@ -111,8 +111,10 @@ if (! function_exists('config')) {
if (! function_exists('file_size_convert')) {
function file_size_convert($size)
{
+ if ($size === 0) {
+ return '0 b';
+ }
$unit = ['b', 'kb', 'mb', 'gb', 'tb', 'pb'];
-
return round($size / pow(1024, ($i = floor(log($size, 1024)))), 2).' '.$unit[intval($i)];
}
}
|
:bug: fixed a divide by zero in file_size_convert
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.