diff
stringlengths
65
26.7k
message
stringlengths
7
9.92k
diff --git a/make/config.js b/make/config.js index <HASH>..<HASH> 100644 --- a/make/config.js +++ b/make/config.js @@ -126,9 +126,7 @@ Promise.resolve(new ConfigFile()) config.content.host.java = javaInstalled; return askForQualityMetrics(config); }) + .then(() => config.save()) // Save before checking Selenium (which updates the configuration file) .then(() => askForSelenium(config)) - .then(() => fs.mkdirAsync("tmp")) - .then(undefined, () => {}) // ignore mkdir error - .then(() => config.save()) )["catch"](reason => console.error(reason.message));
Tmp folder exists, save before detecting selenium
diff --git a/redis_shard/shard.py b/redis_shard/shard.py index <HASH>..<HASH> 100644 --- a/redis_shard/shard.py +++ b/redis_shard/shard.py @@ -54,7 +54,7 @@ class RedisShardAPI(object): "lindex", "pop", "lset", "lrem", "sadd", "srem", "sismember", "smembers", - "zadd", "zrem", "zincr", + "zadd", "zrem", "zincr","zrank", "zrange", "zrevrange", "zrangebyscore","zremrangebyrank", "zremrangebyscore", "zcard", "zscore", "hget", "hset", "hdel", "hincrby", "hlen",
support zrank in redis shard
diff --git a/eval.go b/eval.go index <HASH>..<HASH> 100644 --- a/eval.go +++ b/eval.go @@ -1084,9 +1084,7 @@ func (st *Runtime) evalBaseExpressionGroup(node Node) reflect.Value { } // limit the number of pointers to follow - dereferenceLimit := 2 - for resolved.Kind() == reflect.Ptr && dereferenceLimit >= 0 { - dereferenceLimit-- + for dereferenceLimit := 2; resolved.Kind() == reflect.Ptr && dereferenceLimit >= 0; dereferenceLimit-- { if resolved.IsNil() { return reflect.ValueOf("") } @@ -1452,6 +1450,14 @@ func getFieldOrMethodValue(key string, v reflect.Value) reflect.Value { if value.Kind() == reflect.Interface && !value.IsNil() { value = value.Elem() } + + for dereferenceLimit := 2; value.Kind() == reflect.Ptr && dereferenceLimit >= 0; dereferenceLimit-- { + if value.IsNil() { + return reflect.ValueOf("") + } + value = reflect.Indirect(value) + } + return value }
Add support for pointer fields on structs
diff --git a/packages/date/src/DateRange.js b/packages/date/src/DateRange.js index <HASH>..<HASH> 100644 --- a/packages/date/src/DateRange.js +++ b/packages/date/src/DateRange.js @@ -67,7 +67,6 @@ const DateRange = ({ const [focusedInput, setFocusedInput] = useState(null); const calendarIconRef = useRef(); - const startId = `${(id || name).replace(/[^a-zA-Z0-9]/gi, '')}-start`; const endId = `${(id || name).replace(/[^a-zA-Z0-9]/gi, '')}-end`; @@ -159,7 +158,7 @@ const DateRange = ({ const onFocusChange = async input => { if (!input) await setFieldTouched(name, true); - if (autoSync) await syncDates(); + if (autoSync && !endValue) await syncDates(); setFocusedInput(input); if (onPickerFocusChange) onPickerFocusChange({ focusedInput: input }); };
fix(date): autosync infinite loop
diff --git a/acstools/utils_calib.py b/acstools/utils_calib.py index <HASH>..<HASH> 100644 --- a/acstools/utils_calib.py +++ b/acstools/utils_calib.py @@ -22,7 +22,7 @@ __all__ = ['extract_dark', 'extract_flash', 'extract_flatfield', 'check_overscan', 'SM4_MJD'] # The MJD date of the EVA during SM4 to restore ACS/WFC and ACS/HRC. -# This value is also defined in header file, acs.h, for us by calacs.e in hstcal +# This value is also defined in header file, acs.h, for use by calacs.e in hstcal SM4_MJD = 54967.0
Update acstools/utils_calib.py Resolve minor typo in code comment
diff --git a/lib/marked.js b/lib/marked.js index <HASH>..<HASH> 100644 --- a/lib/marked.js +++ b/lib/marked.js @@ -17,7 +17,7 @@ var block = { hr: /^( *[-*_]){3,} *(?:\n+|$)/, heading: /^ *(#{1,6}) *([^\n]+?) *#* *(?:\n+|$)/, nptable: noop, - lheading: /^([^\n]+)\n *(=|-){2,} *\n*/, + lheading: /^([^\n]+)\n *(=|-){2,} *(?:\n+|$)/, blockquote: /^( *>[^\n]+(\n[^\n]+)*\n*)+/, list: /^( *)(bull) [\s\S]+?(?:hr|\n{2,}(?! )(?!\1bull )\n*|\s*$)/, html: /^ *(?:comment|closed|closing) *(?:\n{2,}|\s*$)/,
fix new lheading rule (2 chars) for markdown guide test.
diff --git a/Content/Filter/Connect.php b/Content/Filter/Connect.php index <HASH>..<HASH> 100644 --- a/Content/Filter/Connect.php +++ b/Content/Filter/Connect.php @@ -31,7 +31,7 @@ class Connect implements FilterInterface ->doRequest( $params->getMandatoryParam('method'), $params->getMandatoryParam('uri'), - $params->getOptionalParam('payload') + $params->getOptionalParam('payload', null) ); return $this->twig->render(
No POST payload in case the payload is not set
diff --git a/etcdserver/server_test.go b/etcdserver/server_test.go index <HASH>..<HASH> 100644 --- a/etcdserver/server_test.go +++ b/etcdserver/server_test.go @@ -53,11 +53,6 @@ func testServer(t *testing.T, ns int64) { ss[i] = srv } - // TODO: find fast way to trigger leader election - // TODO: introduce the way to know that the leader has been elected - // then remove this sleep. - time.Sleep(110 * time.Millisecond) - for i := 1; i <= 10; i++ { r := pb.Request{ Method: "PUT",
etcdserver: remove useless sleep etcdserver.Do will block until there exists leader
diff --git a/src/urh/ui/views/SimulatorGraphicsView.py b/src/urh/ui/views/SimulatorGraphicsView.py index <HASH>..<HASH> 100644 --- a/src/urh/ui/views/SimulatorGraphicsView.py +++ b/src/urh/ui/views/SimulatorGraphicsView.py @@ -374,5 +374,6 @@ class SimulatorGraphicsView(QGraphicsView): for item in self.copied_items: assert isinstance(item, GraphicsItem) parent = item.model_item.parent() - self.scene().simulator_config.add_items([copy.deepcopy(item.model_item)], parent.child_count(), parent) + pos = parent.child_count() if parent is not None else 0 + self.scene().simulator_config.add_items([copy.deepcopy(item.model_item)], pos, parent)
fix crash when pasting to empty simulator scene
diff --git a/src/Aws/S3/S3SignatureV4.php b/src/Aws/S3/S3SignatureV4.php index <HASH>..<HASH> 100644 --- a/src/Aws/S3/S3SignatureV4.php +++ b/src/Aws/S3/S3SignatureV4.php @@ -31,14 +31,8 @@ class S3SignatureV4 extends SignatureV4 implements S3SignatureInterface */ public function signRequest(RequestInterface $request, CredentialsInterface $credentials) { - if ($request instanceof EntityEnclosingRequestInterface && - $request->getBody() && - !$request->hasHeader('x-amz-content-sha256') - ) { - $request->setHeader( - 'X-Amz-Content-Sha256', - $this->getPresignedPayload($request) - ); + if (!$request->hasHeader('x-amz-content-sha256')) { + $request->setHeader('x-amz-content-sha256', $this->getPresignedPayload($request)); } parent::signRequest($request, $credentials); @@ -54,7 +48,7 @@ class S3SignatureV4 extends SignatureV4 implements S3SignatureInterface // If the body is empty, then sign with 'UNSIGNED-PAYLOAD' if ($result === self::DEFAULT_PAYLOAD) { - $result = 'UNSIGNED-PAYLOAD'; + $result = hash('sha256', 'UNSIGNED-PAYLOAD'); } return $result;
Fixed an issue with the SignatureV4 implementation when used with Amazon S3.
diff --git a/packages/core/is-v2-ready-yet/src/Footer.js b/packages/core/is-v2-ready-yet/src/Footer.js index <HASH>..<HASH> 100644 --- a/packages/core/is-v2-ready-yet/src/Footer.js +++ b/packages/core/is-v2-ready-yet/src/Footer.js @@ -31,7 +31,7 @@ class Footer extends React.Component { Project Board </FooterLink> &middot; - <FooterLink href="parcel-bundler/parcel/tree/v2/packages/core/is-v2-ready-yet"> + <FooterLink href="parcel-bundler/parcel/tree/v2-work-so-far/packages/core/is-v2-ready-yet"> Page Source </FooterLink> </div>
is-v2-ready-yet page source url correction (#<I>)
diff --git a/client/driver/rkt.go b/client/driver/rkt.go index <HASH>..<HASH> 100644 --- a/client/driver/rkt.go +++ b/client/driver/rkt.go @@ -599,6 +599,7 @@ networkLoop: if status, err := rktGetStatus(uuid); err == nil { for _, net := range status.Networks { if !net.IP.IsGlobalUnicast() { + d.logger.Printf("[DEBUG] driver.rkt: network %s for pod %q (UUID %s) for task %q ignored", net.IP.String(), img, uuid, d.taskName) continue } @@ -625,6 +626,12 @@ networkLoop: } break networkLoop } + + if len(status.Networks) == 0 { + lastErr = fmt.Errorf("no networks found") + } else { + lastErr = fmt.Errorf("no good driver networks out of %d returned", len(status.Networks)) + } } else { lastErr = err }
Improve rkt driver network status poll loop The network status poll loop will now report any networks it ignored, as well as a no-networks situations.
diff --git a/src/toil/worker.py b/src/toil/worker.py index <HASH>..<HASH> 100644 --- a/src/toil/worker.py +++ b/src/toil/worker.py @@ -252,14 +252,18 @@ def main(): # The job is a checkpoint, and is being restarted after previously completing if jobGraph.checkpoint != None: logger.debug("Job is a checkpoint") + # If the checkpoint still has extant jobs in its + # (flattened) stack and services, its subtree didn't + # complete properly. We handle the restart of the + # checkpoint here, removing its previous subtree. if len([i for l in jobGraph.stack for i in l]) > 0 or len(jobGraph.services) > 0: logger.debug("Checkpoint has failed.") # Reduce the retry count assert jobGraph.remainingRetryCount >= 0 jobGraph.remainingRetryCount = max(0, jobGraph.remainingRetryCount - 1) jobGraph.restartCheckpoint(jobStore) - # Otherwise, the job and successors are done, and we can cleanup stuff we couldn't clean - # because of the job being a checkpoint + # Otherwise, the job and successors are done, and we can cleanup stuff we couldn't clean + # because of the job being a checkpoint else: logger.debug("The checkpoint jobs seems to have completed okay, removing any checkpoint files to delete.") #Delete any remnant files
Add comment about checkpoint restart (and comment indentation fixes)
diff --git a/salt/modules/chocolatey.py b/salt/modules/chocolatey.py index <HASH>..<HASH> 100644 --- a/salt/modules/chocolatey.py +++ b/salt/modules/chocolatey.py @@ -298,7 +298,7 @@ def list_windowsfeatures(): return result['stdout'] -def install(name, version=None, source=None, force=False): +def install(name, version=None, source=None, force=False, install_args=None): ''' Instructs Chocolatey to install a package. @@ -315,6 +315,10 @@ def install(name, version=None, source=None, force=False): force Reinstall the current version of an existing package. + install_args + A list of install arguments you want to pass to the installation process + i.e product key or feature list + CLI Example: .. code-block:: bash @@ -330,7 +334,9 @@ def install(name, version=None, source=None, force=False): if source: cmd.extend(['-Source', source]) if salt.utils.is_true(force): - cmd.append('-Force') + cmd.extend(['-Force']) + if install_args: + cmd.extend(['-InstallArguments', install_args]) result = __salt__['cmd.run_all'](cmd, python_shell=False) if result['retcode'] != 0:
Added the ability to pass install arguments to chocolatey packages
diff --git a/src/emir/recipes/image/shared.py b/src/emir/recipes/image/shared.py index <HASH>..<HASH> 100644 --- a/src/emir/recipes/image/shared.py +++ b/src/emir/recipes/image/shared.py @@ -189,8 +189,8 @@ class DirectImageCommon(object): # Basic processing # FIXME: add this - bpm = pyfits.getdata(self.parameters['master_bpm']) - bpm_corrector = BadPixelCorrector(bpm) + #bpm = pyfits.getdata(self.parameters['master_bpm']) + #bpm_corrector = BadPixelCorrector(bpm) if self.parameters['master_bias']: mbias = pyfits.getdata(self.parameters['master_bias']) @@ -205,7 +205,7 @@ class DirectImageCommon(object): mflat = pyfits.getdata(self.parameters['master_intensity_ff']) ff_corrector = FlatFieldCorrector(mflat) - basicflow = SerialFlow([bpm_corrector, + basicflow = SerialFlow([#bpm_corrector, bias_corrector, dark_corrector, nl_corrector,
Removing BPM line, is not working
diff --git a/src/Omniphx/Forrest/Providers/Laravel/ForrestServiceProvider.php b/src/Omniphx/Forrest/Providers/Laravel/ForrestServiceProvider.php index <HASH>..<HASH> 100644 --- a/src/Omniphx/Forrest/Providers/Laravel/ForrestServiceProvider.php +++ b/src/Omniphx/Forrest/Providers/Laravel/ForrestServiceProvider.php @@ -23,7 +23,7 @@ class ForrestServiceProvider extends ServiceProvider { $authentication = Config::get('forrest::authentication'); - include __DIR__ . "/routes/$authentication.php"; + include __DIR__ . "/Routes/$authentication.php"; } /**
Fixed routes path for case-sensitive systems.
diff --git a/ceph_deploy/__init__.py b/ceph_deploy/__init__.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/__init__.py +++ b/ceph_deploy/__init__.py @@ -1,3 +1,3 @@ -__version__ = '1.5.37' +__version__ = '1.5.38'
[RM-<I>] bump to <I>
diff --git a/lib/get-webpack-config.js b/lib/get-webpack-config.js index <HASH>..<HASH> 100644 --- a/lib/get-webpack-config.js +++ b/lib/get-webpack-config.js @@ -105,8 +105,13 @@ module.exports = function (options) { output: { ascii_only: true }, + beautify: false, // 最紧凑的输出 + comments: false, // 删除所有的注释 compress: { - warnings: false + warnings: false, // 在UglifyJs删除没有用到的代码时不输出警告 + drop_console: true, // 删除所有的 `console` 语句, 还可以兼容ie浏览器 + collapse_vars: true, // 内嵌定义了但是只用到一次的变量 + reduce_vars: true // 提取出出现多次但是没有定义成变量去引用的静态值 } })); }
refactor: improve webpack UglifyJsPlugin config
diff --git a/src/Testability.php b/src/Testability.php index <HASH>..<HASH> 100644 --- a/src/Testability.php +++ b/src/Testability.php @@ -30,10 +30,12 @@ class Testability $this->excludeDirs = $exclude; } + /* public function setCloverReport ($file) { $this->cloverXML = $file; } + */ public function setCSV ($value) {
Commented out unused method (for now)
diff --git a/juju/model.py b/juju/model.py index <HASH>..<HASH> 100644 --- a/juju/model.py +++ b/juju/model.py @@ -1521,7 +1521,7 @@ class Model: if raw: return result_status - + result_str = self._print_status_model(result_status) result_str += '\n' result_str += self._print_status_apps(result_status) @@ -1560,7 +1560,7 @@ class Model: apps = result_status.applications if apps is None or len(apps) == 0: return '' - + limits = '{:<25} {:<10} {:<10} {:<5} {:<20} {:<8}' # print header result_str = limits.format( @@ -1597,12 +1597,12 @@ class Model: addr = unit.public_address if addr is None: addr = '' - + if unit.opened_ports is None: opened_ports = '' else: opened_ports = ','.join(unit.opened_ports) - + info = unit.workload_status.info if info is None: info = ''
Changes for lint.
diff --git a/lib/yard/templates/helpers/base_helper.rb b/lib/yard/templates/helpers/base_helper.rb index <HASH>..<HASH> 100644 --- a/lib/yard/templates/helpers/base_helper.rb +++ b/lib/yard/templates/helpers/base_helper.rb @@ -39,12 +39,8 @@ module YARD::Templates::Helpers url end - def format_object_name_list(objects) - objects.sort_by {|o| o.name.to_s.downcase }.join(", ") - end - def format_types(list, brackets = true) - list.empty? ? "" : (brackets ? "(#{list.join(", ")})" : list.join(", ")) + list.nil? || list.empty? ? "" : (brackets ? "(#{list.join(", ")})" : list.join(", ")) end def format_object_type(object)
Remove unused methods from BaseHelper
diff --git a/flake8_docstrings.py b/flake8_docstrings.py index <HASH>..<HASH> 100644 --- a/flake8_docstrings.py +++ b/flake8_docstrings.py @@ -6,6 +6,7 @@ included as module into flake8 """ import sys +from flake8_polyfill import stdin import pycodestyle try: import pydocstyle as pep257 @@ -15,7 +16,9 @@ except ImportError: module_name = 'pep257' __version__ = '1.0.2' -__all__ = ['pep257Checker'] +__all__ = ('pep257Checker',) + +stdin.monkey_patch('pycodestyle') class EnvironError(pep257.Error): diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ setup( 'D = flake8_docstrings:pep257Checker', ], }, - install_requires=['flake8', 'pydocstyle'], + install_requires=['flake8', 'pydocstyle', 'flake8-polyfill'], provides=['flake8_docstrings'], py_modules=['flake8_docstrings'], )
Use flake8-polyfill to monkey-patch stdin In order to support flake8 2.x and 3.x, we need to rely on flake8-polyfill to provide a compatibility shim here. Once we release flake8-docstrings that only relies on Flake8 3.x we can drop this shim.
diff --git a/lib/port.rb b/lib/port.rb index <HASH>..<HASH> 100644 --- a/lib/port.rb +++ b/lib/port.rb @@ -16,7 +16,7 @@ module Dataflow include Enumerable def each s = self - while 1 + loop do yield s.head s = s.tail end
Changed "while 1" to "loop do"
diff --git a/spyderlib/widgets/externalshell/pythonshell.py b/spyderlib/widgets/externalshell/pythonshell.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/externalshell/pythonshell.py +++ b/spyderlib/widgets/externalshell/pythonshell.py @@ -387,10 +387,13 @@ The process may not exit as a result of clicking this button #-------------------------Python specific------------------------------- # Python arguments - p_args = ['-u'] + get_python_args(self.fname, self.python_args, - self.interact_action.isChecked(), - self.debug_action.isChecked(), - self.arguments) + p_args = ['-u'] + if DEBUG: + p_args += ['-v'] + p_args += get_python_args(self.fname, self.python_args, + self.interact_action.isChecked(), + self.debug_action.isChecked(), + self.arguments) env = [unicode(_path) for _path in self.process.systemEnvironment()]
Spyder's remote console: in debug mode (SPYDER_DEBUG=True), Python interpreters are in verbose mode (-v command line option). Update Issue <I> Status: Fixed @Steve: this is the changeset I was referring to in my previous comment.
diff --git a/salt/modules/ps.py b/salt/modules/ps.py index <HASH>..<HASH> 100755 --- a/salt/modules/ps.py +++ b/salt/modules/ps.py @@ -76,6 +76,8 @@ def top(num_processes=5, interval=3): else: cmdline = process.cmdline info = {'cmd': cmdline, + 'user': process.username, + 'status': process.status, 'pid': process.pid, 'create_time': process.create_time} for key, value in process.get_cpu_times()._asdict().items():
Add process username and status fields to top output
diff --git a/app/controllers/clickfunnels_auth/user_sessions_controller.rb b/app/controllers/clickfunnels_auth/user_sessions_controller.rb index <HASH>..<HASH> 100644 --- a/app/controllers/clickfunnels_auth/user_sessions_controller.rb +++ b/app/controllers/clickfunnels_auth/user_sessions_controller.rb @@ -24,7 +24,7 @@ class ClickfunnelsAuth::UserSessionsController < ClickfunnelsAuth::ApplicationCo user.access_tokens.create!({ token: omniauth['credentials']['token'], refresh_token: omniauth['credentials']['refresh_token'], - expires_at: Time.at(omniauth['credentials']['expires_at']) + expires_at: omniauth['credentials']['expires_at'] ? Time.at(omniauth['credentials']['expires_at']) : omniauth['credentials']['expires_at'] }) session[:user_id] = user.id
Allow for non-expiring tokens. Ugh.
diff --git a/lib/addressable/idna/pure.rb b/lib/addressable/idna/pure.rb index <HASH>..<HASH> 100644 --- a/lib/addressable/idna/pure.rb +++ b/lib/addressable/idna/pure.rb @@ -301,7 +301,7 @@ module Addressable (class <<self; private :lookup_unicode_lowercase; end) def self.lookup_unicode_composition(unpacked) - return COMPOSITION_TABLE[unpacked.pack("C*")] + return COMPOSITION_TABLE[unpacked] end (class <<self; private :lookup_unicode_composition; end) @@ -4567,7 +4567,7 @@ module Addressable exclusion = data[UNICODE_DATA_EXCLUSION] if canonical && exclusion == 0 - COMPOSITION_TABLE[canonical] = codepoint + COMPOSITION_TABLE[canonical.unpack("C*")] = codepoint end end
Fixed a bug with Ruby <I>.x unicode composition lookups.
diff --git a/src/tilesource.js b/src/tilesource.js index <HASH>..<HASH> 100644 --- a/src/tilesource.js +++ b/src/tilesource.js @@ -65,7 +65,7 @@ $.TileSource = function( width, height, tileSize, tileOverlap, minLevel, maxLeve height: args[1], tileSize: args[2], tileOverlap: args[3], - minlevel: args[4], + minLevel: args[4], maxLevel: args[5] }; }
applying patch provided by eikeon for position parameter constructor of TileSource. At some point I hope to deprecate most of these constructors that have more than two positional parameters.
diff --git a/src/Drupal/Driver/Fields/Drupal8/EntityReferenceHandler.php b/src/Drupal/Driver/Fields/Drupal8/EntityReferenceHandler.php index <HASH>..<HASH> 100644 --- a/src/Drupal/Driver/Fields/Drupal8/EntityReferenceHandler.php +++ b/src/Drupal/Driver/Fields/Drupal8/EntityReferenceHandler.php @@ -26,7 +26,7 @@ class EntityReferenceHandler extends AbstractHandler { // Determine target bundle restrictions. $target_bundle_key = NULL; - if (!$target_bundles = $this->getTargetBundles()) { + if ($target_bundles = $this->getTargetBundles()) { $target_bundle_key = $entity_definition->getKey('bundle'); }
Fix condition to get the target bundle key when there are target bundles
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -22,6 +22,8 @@ module.exports = function(json, options) { maxFileSize = options.maxFileSize; } const ajv = new Ajv({ + schemaId: 'id', // for draft-04 + meta: false, // don't load draft-07 meta schema allErrors: true, unknownFormats: true, errorDataPath: 'property', @@ -39,6 +41,9 @@ module.exports = function(json, options) { ajv.removeKeyword('propertyNames'); ajv.removeKeyword('contains'); ajv.removeKeyword('const'); + ajv.removeKeyword('if'); + ajv.removeKeyword('then'); + ajv.removeKeyword('else'); ajv.addKeyword('maxFileSize', { validate: function validateMaxFileSize(schema, data) {
fix: for ajv@6 with JSON Schema draft-<I>
diff --git a/isort/isort.py b/isort/isort.py index <HASH>..<HASH> 100644 --- a/isort/isort.py +++ b/isort/isort.py @@ -220,7 +220,7 @@ class _SortImports: module_name = str(module_name) if sub_imports and config["order_by_type"]: - if module_name.isupper() and len(module_name) > 1: + if module_name.isupper() and len(module_name) > 1: # see issue #376 prefix = "A" elif module_name[0:1].isupper(): prefix = "B"
Add comment mentioning relevant ticket at line upercasing letter
diff --git a/minicluster/src/main/java/tachyon/master/LocalTachyonCluster.java b/minicluster/src/main/java/tachyon/master/LocalTachyonCluster.java index <HASH>..<HASH> 100644 --- a/minicluster/src/main/java/tachyon/master/LocalTachyonCluster.java +++ b/minicluster/src/main/java/tachyon/master/LocalTachyonCluster.java @@ -388,6 +388,7 @@ public final class LocalTachyonCluster { if (mUfsCluster != null) { mUfsCluster.cleanup(); } + // TODO(gpang): is this line necessary? System.clearProperty("tachyon.underfs.address"); }
Add todo for investigating system property clearing
diff --git a/src/Sylius/Bundle/CoreBundle/Application/Kernel.php b/src/Sylius/Bundle/CoreBundle/Application/Kernel.php index <HASH>..<HASH> 100644 --- a/src/Sylius/Bundle/CoreBundle/Application/Kernel.php +++ b/src/Sylius/Bundle/CoreBundle/Application/Kernel.php @@ -31,12 +31,12 @@ use Webmozart\Assert\Assert; class Kernel extends HttpKernel { - public const VERSION = '1.3.13'; - public const VERSION_ID = '10313'; + public const VERSION = '1.3.14-DEV'; + public const VERSION_ID = '10314'; public const MAJOR_VERSION = '1'; public const MINOR_VERSION = '3'; - public const RELEASE_VERSION = '13'; - public const EXTRA_VERSION = ''; + public const RELEASE_VERSION = '14'; + public const EXTRA_VERSION = 'DEV'; public function __construct(string $environment, bool $debug) {
Change application's version to <I>-DEV
diff --git a/examples/apps/JavaScript/Chart/appclass.js b/examples/apps/JavaScript/Chart/appclass.js index <HASH>..<HASH> 100644 --- a/examples/apps/JavaScript/Chart/appclass.js +++ b/examples/apps/JavaScript/Chart/appclass.js @@ -155,10 +155,10 @@ F2.Apps["com_openf2_examples_javascript_chart"] = (function(){ // Add the up month/down month data to the chart's series - //issue #1 + //GH issue #1 //hcChartObj.series[1].setData(upSeriesData, false); //hcChartObj.series[2].setData(downSeriesData, false); - + hcChartObj.yAxis[0].setExtremes(dataRanges.dataMin, dataRanges.dataMax, true, false); this.ui.updateHeight();
Fixes chart in IE #1
diff --git a/rfc1869.js b/rfc1869.js index <HASH>..<HASH> 100644 --- a/rfc1869.js +++ b/rfc1869.js @@ -33,7 +33,7 @@ exports.parse = function(type, line) { var matches; while (matches = chew_regexp.exec(line)) { params.push(matches[1]); - line = line.slice(matches[1].length); + line = line.slice(matches[0].length); } params = params.reverse();
Need to zeroth element to slice off not the bracketed match
diff --git a/lib/safe_yaml/psych_resolver.rb b/lib/safe_yaml/psych_resolver.rb index <HASH>..<HASH> 100644 --- a/lib/safe_yaml/psych_resolver.rb +++ b/lib/safe_yaml/psych_resolver.rb @@ -14,7 +14,12 @@ module SafeYAML end def resolve_tree(tree) - resolve_node(tree)[0] + case tree + when Psych::Nodes::Document + resolve_node(tree)[0] + else + resolve_node(tree) + end end def resolve_alias(node)
fixed PsychResolver for older versions of Psych (Ruby <I>)
diff --git a/tests/VerbalExpressions/PHPVerbalExpressions/VerbalExpressionsTest.php b/tests/VerbalExpressions/PHPVerbalExpressions/VerbalExpressionsTest.php index <HASH>..<HASH> 100644 --- a/tests/VerbalExpressions/PHPVerbalExpressions/VerbalExpressionsTest.php +++ b/tests/VerbalExpressions/PHPVerbalExpressions/VerbalExpressionsTest.php @@ -560,7 +560,8 @@ class VerbalExpressionsTest extends PHPUnit_Framework_TestCase /** * @depends testGetRegex */ - public function testReplace() { + public function testReplace() + { $regex = new VerbalExpressions(); $regex->add('foo');
rolling back change to verify that psr-2 violations trigger a failed travis build
diff --git a/contrib/seccomp/seccomp_default.go b/contrib/seccomp/seccomp_default.go index <HASH>..<HASH> 100644 --- a/contrib/seccomp/seccomp_default.go +++ b/contrib/seccomp/seccomp_default.go @@ -235,11 +235,13 @@ func DefaultProfile(sp *specs.Spec) *specs.LinuxSeccomp { "prctl", "pread64", "preadv", + "preadv2", "prlimit64", "pselect6", "pselect6_time64", "pwrite64", "pwritev", + "pwritev2", "read", "readahead", "readlink",
seccomp: allow add preadv2 and pwritev2 syscalls
diff --git a/lib/dcell/messages.rb b/lib/dcell/messages.rb index <HASH>..<HASH> 100644 --- a/lib/dcell/messages.rb +++ b/lib/dcell/messages.rb @@ -26,6 +26,8 @@ module DCell # A request to open relay pipe class RelayOpen < Message + attr_reader :sender + def initialize(sender) @id = DCell.id @sender = sender @@ -92,6 +94,8 @@ module DCell # Ping message checks if remote node is alive or not class Ping < Message + attr_reader :sender + def initialize(sender) @sender = sender end
messages: export sender attribute for Ping and RelayOpen messages
diff --git a/src/Utils/XmlLoaderUtils.php b/src/Utils/XmlLoaderUtils.php index <HASH>..<HASH> 100644 --- a/src/Utils/XmlLoaderUtils.php +++ b/src/Utils/XmlLoaderUtils.php @@ -23,7 +23,10 @@ class XmlLoaderUtils throw new OpdsParserNotFoundException(); } - $content = fread($handle, filesize($file)); + + while (!feof($handle)) { + $content .= fread($handle, 8192); // 8192 : nombre d'octets équivalent à la taille d'un bloc + } fclose($handle); return new \SimpleXMLElement($content);
MBW-<I> update read file
diff --git a/kcp.go b/kcp.go index <HASH>..<HASH> 100644 --- a/kcp.go +++ b/kcp.go @@ -785,7 +785,7 @@ func (kcp *KCP) flush(ackOnly bool) { needsend = true segment.xmit++ segment.fastack = 0 - segment.resendts = current + segment.rto + segment.resendts = current + kcp.rx_rto change++ fastRetransSegs++ } else if segment.fastack > 0 && newSegsCount == 0 && @@ -793,7 +793,7 @@ func (kcp *KCP) flush(ackOnly bool) { needsend = true segment.xmit++ segment.fastack = 0 - segment.resendts = current + segment.rto + segment.resendts = current + kcp.rx_rto change++ earlyRetransSegs++ }
reset RTO for fastacked packets
diff --git a/api/src/main/java/io/neba/api/tags/package-info.java b/api/src/main/java/io/neba/api/tags/package-info.java index <HASH>..<HASH> 100644 --- a/api/src/main/java/io/neba/api/tags/package-info.java +++ b/api/src/main/java/io/neba/api/tags/package-info.java @@ -20,10 +20,11 @@ * of project-specific implementations to the NEBA core, i.e. to prevent * project code to inherit from these tag implementations. */ -@TagLibrary(value = "http://www.neba.io/core/1.0", +@TagLibrary( + value = "http://neba.io/1.0", descriptorFile = "neba.tld", shortName = "neba", - description = "NEBA core tag library", + description = "NEBA tag library", libraryVersion = "1.0") package io.neba.api.tags;
neba-9 Change the taglib URI to match the fact that it is published via the API bundle
diff --git a/icalevents/icalparser.py b/icalevents/icalparser.py index <HASH>..<HASH> 100644 --- a/icalevents/icalparser.py +++ b/icalevents/icalparser.py @@ -408,7 +408,10 @@ def parse_events(content, start=None, end=None, default_span=timedelta(days=7)): if exdate not in exceptions: found.append(e) # Filter out all events that are moved as indicated by the recurrence-id prop - return [event for event in found if e.sequence is None or not (event.uid, event.start, e.sequence) in recurrence_ids] + return [ + event for event in found + if e.sequence is None or not (event.uid, event.start, e.sequence) in recurrence_ids + ] def parse_rrule(component, tz=UTC):
Split up list comprehension for readability.
diff --git a/src/main/java/com/couchbase/lite/replicator/Pusher.java b/src/main/java/com/couchbase/lite/replicator/Pusher.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/couchbase/lite/replicator/Pusher.java +++ b/src/main/java/com/couchbase/lite/replicator/Pusher.java @@ -92,6 +92,7 @@ public final class Pusher extends Replication implements Database.ChangeListener return; } Log.v(Database.TAG, "Remote db might not exist; creating it..."); + asyncTaskStarted(); sendAsyncRequest("PUT", "", null, new RemoteRequestCompletionBlock() { @Override @@ -104,6 +105,7 @@ public final class Pusher extends Replication implements Database.ChangeListener } shouldCreateTarget = false; beginReplicating(); + asyncTaskFinished(1); } });
Issue #<I> - was not calling asyncTaskStarted() and asyncTaskFinished() when making remote request to create a remote DB. Needed for testPusher() to pass w/o having any exceptions. <URL>
diff --git a/lib/oxidized/pfsense.rb b/lib/oxidized/pfsense.rb index <HASH>..<HASH> 100644 --- a/lib/oxidized/pfsense.rb +++ b/lib/oxidized/pfsense.rb @@ -1,6 +1,5 @@ class PfSense < Oxidized::Model - prompt /^\e\[0;1;33m\[\S*\e\[0;1;33m\]\e\[0;1;33m\e\[\S*\e\[0;1;31m@\S*\e\[0;1;33m\]\S*\e\[0;1;31m:\e\[0;0;0m\s$/ comment '# ' @@ -20,6 +19,10 @@ class PfSense < Oxidized::Model end + cfg :ssh do + exec true + end + cfg :telnet do username /^Username:/ password /^Password:/
Switched from prompt grap to ssh exec
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -10,20 +10,20 @@ const util = require('./src/util') const version = require('./package').version -const install = function () { - return host.install() +const install = function (...args) { + return host.install(...args) } const environ = function () { return console.log(JSON.stringify(host.environ(), null, ' ')) // eslint-disable-line no-console } -const start = function (address='127.0.0.1', port=2000) { - return host.start(address, port) +const start = function (...args) { + return host.start(...args) } -const stop = function () { - return host.stop() +const stop = function (...args) { + return host.stop(...args) } -const run = function (address='127.0.0.1', port=2000, timeout=Infinity, duration=Infinity) { - return host.run(address, port, timeout, duration) +const run = function (...args) { + return host.run(...args) } module.exports = {
Pass args on to host methods
diff --git a/src/instrumentation/transaction.js b/src/instrumentation/transaction.js index <HASH>..<HASH> 100644 --- a/src/instrumentation/transaction.js +++ b/src/instrumentation/transaction.js @@ -1,5 +1,6 @@ var logger = require('../lib/logger') var Trace = require('./trace') +var utils = require('../lib/utils') var Transaction = function (queue, name, type, options) { this.metadata = {} @@ -9,6 +10,7 @@ var Transaction = function (queue, name, type, options) { this._markDoneAfterLastTrace = false this._isDone = false this._options = options + this.uuid = utils.generateUuid() this.traces = [] this._activeTraces = {}
Generate UUID on Transcations for easier debugging
diff --git a/core/src/main/java/hudson/matrix/MatrixBuild.java b/core/src/main/java/hudson/matrix/MatrixBuild.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/matrix/MatrixBuild.java +++ b/core/src/main/java/hudson/matrix/MatrixBuild.java @@ -104,7 +104,7 @@ public class MatrixBuild extends AbstractBuild<MatrixProject,MatrixBuild> { return; } - List<MatrixRun> runs = getRuns(); + List<MatrixRun> runs = getExactRuns(); for(MatrixRun run : runs){ why = run.getWhyKeepLog(); if (why!=null) {
When deleting whole matrix build, including sub-builds, delete only builds which actually run, not linekd ones
diff --git a/src/Pdf/Engine/DomPdfEngine.php b/src/Pdf/Engine/DomPdfEngine.php index <HASH>..<HASH> 100644 --- a/src/Pdf/Engine/DomPdfEngine.php +++ b/src/Pdf/Engine/DomPdfEngine.php @@ -2,7 +2,6 @@ namespace CakePdf\Pdf\Engine; use CakePdf\Pdf\CakePdf; -use Dompdf\Dompdf; class DomPdfEngine extends AbstractPdfEngine { @@ -30,7 +29,7 @@ class DomPdfEngine extends AbstractPdfEngine */ public function output() { - $DomPDF = new Dompdf(); + $DomPDF = new \DOMPDF(); $DomPDF->set_paper($this->_Pdf->pageSize(), $this->_Pdf->orientation()); $DomPDF->load_html($this->_Pdf->html()); $DomPDF->render();
Revert back namespace change for dompdf. Changing the namespace meant one needed to use dompdf <I>beta. Such a change should not have been done in a bugfix release.
diff --git a/core/src/main/java/org/springframework/security/authentication/ProviderManager.java b/core/src/main/java/org/springframework/security/authentication/ProviderManager.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/springframework/security/authentication/ProviderManager.java +++ b/core/src/main/java/org/springframework/security/authentication/ProviderManager.java @@ -205,7 +205,7 @@ public class ProviderManager extends AbstractAuthenticationManager implements Me * * @return {@link ConcurrentSessionController} instance */ - public ConcurrentSessionController getSessionController() { + ConcurrentSessionController getSessionController() { return sessionController; }
Removed public modifier from getSessionController() method on ProviderManager.
diff --git a/queue.go b/queue.go index <HASH>..<HASH> 100644 --- a/queue.go +++ b/queue.go @@ -50,7 +50,7 @@ func (q *queue) peek() interface{} { func (q *queue) dequeue() { q.head = (q.head+1) % len(q.buf) q.count-- - if len(q.buf) > minQueueLen && q.count*3 < len(q.buf) { + if len(q.buf) > minQueueLen && q.count*4 < len(q.buf) { q.resize() } }
Be slightly less agressive in shrinking the queue. The power of two is marginally more efficient to calculate, and the less agressive bounds mean we have to reallocate+copy less. It does mean we might waste slightly more memory when a large buffer is emptied, but given that garbage-collection means old slices aren't necessarily freed immediately anyways, that isn't a big deal.
diff --git a/django_tenants/migration_executors/base.py b/django_tenants/migration_executors/base.py index <HASH>..<HASH> 100644 --- a/django_tenants/migration_executors/base.py +++ b/django_tenants/migration_executors/base.py @@ -3,6 +3,7 @@ import sys from django.db import transaction from django.core.management.commands.migrate import Command as MigrateCommand +from django.db.migrations.recorder import MigrationRecorder from django_tenants.signals import schema_migrated, schema_migrate_message from django_tenants.utils import get_public_schema_name, get_tenant_database_alias @@ -38,6 +39,11 @@ def run_migrations(args, options, executor_codename, schema_name, tenant_type='' connection = connections[options.get('database', get_tenant_database_alias())] connection.set_schema(schema_name, tenant_type=tenant_type) + # ensure that django_migrations table is created in the schema before migrations run, otherwise the migration + # table in the public schema gets picked and no migrations are applied + migration_recorder = MigrationRecorder(connection) + migration_recorder.ensure_schema() + stdout = OutputWrapper(sys.stdout) stdout.style_func = style_func stderr = OutputWrapper(sys.stderr)
Ensure django_migrations table is created in a schema before migrations begin
diff --git a/websockets/protocol.py b/websockets/protocol.py index <HASH>..<HASH> 100644 --- a/websockets/protocol.py +++ b/websockets/protocol.py @@ -379,7 +379,7 @@ class WebSocketCommonProtocol(asyncio.StreamReaderProtocol): # longer than the worst case (2 * self.timeout) but not unlimited. if self.state == CLOSING: yield from asyncio.wait_for( - self.connection_closed, 3 * self.timeout, loop=self.loop) + self.worker, 3 * self.timeout, loop=self.loop) raise ConnectionClosed(self.close_code, self.close_reason) # Control may only reach this point in buggy third-party subclasses.
Prevent unintended timeout. The library considers that its job is done when the worker task exits. Usually connection_closed will have been set by connection_lost, but this is somewhat outside of our control, making it preferrable to test consistently for the worker's termination.
diff --git a/lib/transit/handlers.rb b/lib/transit/handlers.rb index <HASH>..<HASH> 100644 --- a/lib/transit/handlers.rb +++ b/lib/transit/handlers.rb @@ -231,13 +231,13 @@ module Transit class SetHandler def tag(_) "set" end - def rep(s) TaggedValue.new("array", s.to_a) end + def rep(s) s.to_a end def string_rep(_) nil end end class ListHandler def tag(_) "list" end - def rep(l) TaggedValue.new("array", l.to_a) end + def rep(l) l.to_a end def string_rep(_) nil end end @@ -246,7 +246,7 @@ module Transit @type = type end def tag(_) @type end - def rep(a) TaggedValue.new("array", a.to_a) end + def rep(a) a.to_a end def string_rep(_) nil end end
remove unneeded use of TaggedValues (arrays already marshall as arrays)
diff --git a/io/flushing_writer_test.go b/io/flushing_writer_test.go index <HASH>..<HASH> 100644 --- a/io/flushing_writer_test.go +++ b/io/flushing_writer_test.go @@ -13,6 +13,7 @@ import ( "net" "net/http" "net/http/httptest" + "sync" "time" check "gopkg.in/check.v1" @@ -166,3 +167,24 @@ func (h *hijacker) Hijack() (net.Conn, *bufio.ReadWriter, error) { } return h.conn, &rw, nil } + +func (s *S) TestFlushingWriterFlushAfterWrite(c *check.C) { + wg := sync.WaitGroup{} + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + flusher, ok := w.(WriterFlusher) + c.Assert(ok, check.Equals, true) + fw := FlushingWriter{WriterFlusher: flusher} + defer fw.Flush() + for i := 0; i < 100; i++ { + wg.Add(1) + go func() { + defer wg.Done() + fw.Write([]byte("a")) + }() + } + })) + defer srv.Close() + _, err := http.Get(srv.URL) + c.Assert(err, check.IsNil) + wg.Wait() +}
io: add test to expose panic in write after close with flushing writer
diff --git a/spyder/widgets/editor.py b/spyder/widgets/editor.py index <HASH>..<HASH> 100644 --- a/spyder/widgets/editor.py +++ b/spyder/widgets/editor.py @@ -1318,6 +1318,9 @@ class EditorStack(QWidget): # Save the currently edited file index = self.get_stack_index() finfo = self.data[index] + # The next line is necessary to avoid checking if the file exists + # While running __check_file_status + # See issues 3678 and 3026 finfo.newly_created = True filename = self.select_savename(finfo.filename) if filename:
Added comment about the necessity of the change
diff --git a/sunspot_rails/lib/sunspot/rails/stub_session_proxy.rb b/sunspot_rails/lib/sunspot/rails/stub_session_proxy.rb index <HASH>..<HASH> 100644 --- a/sunspot_rails/lib/sunspot/rails/stub_session_proxy.rb +++ b/sunspot_rails/lib/sunspot/rails/stub_session_proxy.rb @@ -148,6 +148,7 @@ module Sunspot def previous_page nil end + alias :prev_page :previous_page def next_page nil
Added prev_page alias to stubbed PaginatedCollection Also see <URL>
diff --git a/src/Netzmacht/Bootstrap/Installer.php b/src/Netzmacht/Bootstrap/Installer.php index <HASH>..<HASH> 100644 --- a/src/Netzmacht/Bootstrap/Installer.php +++ b/src/Netzmacht/Bootstrap/Installer.php @@ -86,10 +86,10 @@ class Installer } if(!$success) { - \Controller::log("Error during creating symlink '$target'", 'Netzmacht\Bootstrap\Installer createSymlink', TL_ERROR); + \Controller::log("Error during creating symlink '$target'", 'Netzmacht\Bootstrap\Installer createSymlink', 'TL_ERROR'); } else { - \Controller::log("Created symlink '$target'", 'Netzmacht\Bootstrap\Installer createSymlink', TL_INFO); + \Controller::log("Created symlink '$target'", 'Netzmacht\Bootstrap\Installer createSymlink', 'TL_INFO'); } }
fix usage of undefined TL_INFO class
diff --git a/classes/fields/pick.php b/classes/fields/pick.php index <HASH>..<HASH> 100644 --- a/classes/fields/pick.php +++ b/classes/fields/pick.php @@ -1299,11 +1299,21 @@ class PodsField_Pick extends PodsField { $selected = false; if ( is_array( $args->value ) ) { - // Cast values in array as string. - $args->value = array_map( 'strval', $args->value ); + if ( ! isset( $args->value[0] ) ) { + $keys = array_map( 'strval', array_keys( $args->value ) ); - if ( in_array( (string) $item_id, $args->value, true ) ) { - $selected = true; + if ( in_array( (string) $item_id, $keys, true ) ) { + $selected = true; + } + } + + if ( ! $selected ) { + // Cast values in array as string. + $args->value = array_map( 'strval', $args->value ); + + if ( in_array( (string) $item_id, $args->value, true ) ) { + $selected = true; + } } } elseif ( (string) $item_id === (string) $args->value ) { $selected = true;
Records added modally via DFV's 'Add New' were not being selected
diff --git a/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java b/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java index <HASH>..<HASH> 100644 --- a/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java +++ b/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java @@ -443,7 +443,7 @@ public class ConfigurationTest { long original = ConfigurationProperties.maxFutureTimeout(); try { // then - default value - assertThat(configuration.maxFutureTimeoutInMillis(), equalTo(60000L)); + assertThat(configuration.maxFutureTimeoutInMillis(), equalTo(90000L)); // when - system property setter ConfigurationProperties.maxFutureTimeout(10L);
increase max future timeout by <I> seconds to allow for state retrieve were the log is very large - fixed test
diff --git a/js/lib/mediawiki.TokenTransformManager.js b/js/lib/mediawiki.TokenTransformManager.js index <HASH>..<HASH> 100644 --- a/js/lib/mediawiki.TokenTransformManager.js +++ b/js/lib/mediawiki.TokenTransformManager.js @@ -101,7 +101,10 @@ TokenTransformManager.prototype._cmpTransformations = function ( a, b ) { * @param {String} tag name for tags, omitted for non-tags */ TokenTransformManager.prototype.addTransform = function ( transformation, debug_name, rank, type, name ) { - var t = { rank: rank }; + var t = { + rank: rank, + name: debug_name + }; if (!this.env.trace) { t.transform = transformation; } else { @@ -741,6 +744,12 @@ SyncTokenTransformManager.prototype.onChunk = function ( tokens ) { //this.env.dp( 'sync res:', res ); if ( res.tokens && res.tokens.length ) { + if ( token.constructor === EOFTk && + res.tokens.last().constructor !== EOFTk ) { + console.error( 'ERROR: EOFTk was dropped by ' + transformer.name ); + // fix it up for now by adding it back in + res.tokens.push(token); + } // Splice in the returned tokens (while replacing the original // token), and process them next. var revTokens = res.tokens.slice();
Make sure the EOFTk is preserved in the SyncTokenTransformer And blame transformers when they drop it. Change-Id: Ib<I>fd<I>d0a3e<I>b0dfaf2bd9b4cd<I>f
diff --git a/src/__tests__/reduxForm.spec.js b/src/__tests__/reduxForm.spec.js index <HASH>..<HASH> 100644 --- a/src/__tests__/reduxForm.spec.js +++ b/src/__tests__/reduxForm.spec.js @@ -1566,6 +1566,33 @@ const describeReduxForm = (name, structure, combineReducers, expect) => { expect(decorated.wrappedInstance.props).toEqual(wrapped.props) }) + + it('should return an empty list if there are no registered fields', () => { + const store = makeStore({}) + + class Form extends Component { + render() { + return ( + <form> + </form> + ) + } + } + + const Decorated = reduxForm({ + form: 'testForm' + })(Form) + + const dom = TestUtils.renderIntoDocument( + <Provider store={store}> + <Decorated/> + </Provider> + ) + + const decorated = TestUtils.findRenderedComponentWithType(dom, Decorated) + + expect(decorated.refs.wrapped.getWrappedInstance().getFieldList()).toEqual([]) + }) }) }
added "empty registered fields" test from #<I>
diff --git a/alot/init.py b/alot/init.py index <HASH>..<HASH> 100755 --- a/alot/init.py +++ b/alot/init.py @@ -44,7 +44,7 @@ def parse_args(): choices=['debug', 'info', 'warning', 'error'], help='debug level') parser.add_argument('-l', dest='logfile', - default='debug.log', + default='/dev/null', help='logfile') parser.add_argument('query', nargs='?', default='tag:inbox AND NOT tag:killed', @@ -62,7 +62,8 @@ def main(): # setup logging numeric_loglevel = getattr(logging, args.debug_level.upper(), None) - logging.basicConfig(level=numeric_loglevel, filename=args.logfile) + logfilename = os.path.expanduser(args.logfile) + logging.basicConfig(level=numeric_loglevel, filename=logfilename) logger = logging.getLogger() # get ourselves a database manager
log to /dev/null by default
diff --git a/src/require.js b/src/require.js index <HASH>..<HASH> 100644 --- a/src/require.js +++ b/src/require.js @@ -119,6 +119,13 @@ function _gpfRequireResolve (name) { return _gpfPathJoin(this.base, name); } +function _gpfRequireDocumentStack (reason, name) { + if (!Array.isArray(reason.requires)) { + reason.requires = []; + } + reason.requires.push(name); +} + /** * Get the cached resource or load it * @@ -134,7 +141,10 @@ function _gpfRequireGet (name) { } promise = _gpfRequireLoad.call(me, name); me.cache[name] = promise; - return promise; + return promise["catch"](function (reason) { + _gpfRequireDocumentStack(reason, name); + return Promise.reject(reason); + }); } /**
Document requires path (#<I>)
diff --git a/ghost/admin/app/helpers/parse-member-event.js b/ghost/admin/app/helpers/parse-member-event.js index <HASH>..<HASH> 100644 --- a/ghost/admin/app/helpers/parse-member-event.js +++ b/ghost/admin/app/helpers/parse-member-event.js @@ -11,7 +11,7 @@ export default function parseMemberEvent(event, hasMultipleNewsletters) { let timestamp = moment(event.data.created_at); return { - memberId: event.data.member_id, + memberId: event.data.member_id ?? event.data.member?.id, member: event.data.member, emailId: event.data.email_id, email: event.data.email,
Fixed clicking on member in dashboard activity feed comment events refs <URL>
diff --git a/lib/dm-core.rb b/lib/dm-core.rb index <HASH>..<HASH> 100644 --- a/lib/dm-core.rb +++ b/lib/dm-core.rb @@ -17,7 +17,7 @@ require 'yaml' require 'rubygems' -gem 'addressable', '>=1.0.4' +gem 'addressable', '=2.0.0' require 'addressable/uri' gem 'extlib', '>=0.9.5'
Updated gem() dependency to be =<I>
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,5 +1,2 @@ gem 'rspec', '~> 2.4' require 'rspec' -require 'hexdump/version' - -include Hexdump
Do not include Hexdump.
diff --git a/api/src/opentrons/api/session.py b/api/src/opentrons/api/session.py index <HASH>..<HASH> 100755 --- a/api/src/opentrons/api/session.py +++ b/api/src/opentrons/api/session.py @@ -548,15 +548,6 @@ class Session(object): self._broker.set_logger(self._default_logger) return self - def identify(self): - self._hw_iface().identify() - - def turn_on_rail_lights(self): - self._hw_iface().set_lights(rails=True) - - def turn_off_rail_lights(self): - self._hw_ifce().set_lights(rails=False) - def set_state(self, state): log.debug("State set to {}".format(state)) if state not in VALID_STATES:
fix(api): typo in Session.turn_off_rail_lights (#<I>) * fix typo in Session.turn_off_rail_lights * remove identify, turn_on_rail_lights, and turn_off_rail_lights from Session. these methods are not used.
diff --git a/bernhard/__init__.py b/bernhard/__init__.py index <HASH>..<HASH> 100644 --- a/bernhard/__init__.py +++ b/bernhard/__init__.py @@ -15,8 +15,22 @@ class TransportError(Exception): class TCPTransport(object): def __init__(self, host, port): - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.sock.connect((host, port)) + for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + self.sock = socket.socket(af, socktype, proto) + except socket.error, e: + self.sock = None + continue + try: + self.sock.connect(sa) + except socket.error, e: + self.sock.close() + self.sock = None + continue + break + if self.sock is None: + raise TransportError("Could not open socket.") def close(self): self.sock.close() @@ -58,7 +72,7 @@ class UDPTransport(object): self.sock = socket.socket(af, socktype, proto) self.host = sa[0] self.port = sa[1] - except socket.error as msg: + except socket.error, e: self.sock = None continue break
Add dual-stack support for TCP transport.
diff --git a/src/component.js b/src/component.js index <HASH>..<HASH> 100644 --- a/src/component.js +++ b/src/component.js @@ -1,5 +1,8 @@ -import { select, local } from "d3-selection"; -var instanceLocal = local(), +import { select } from "d3-selection"; +var instanceLocal = { + set: function (node, value){ node.__instance__ = value }, + get: function (node){ return node.__instance__; } + }, noop = function (){}; // no operation export default function (tagName, className){ @@ -56,7 +59,7 @@ export default function (tagName, className){ function destroyDescendant(){ var instance = instanceLocal.get(this); - instanceLocal.remove(this) && instance.destroy(); + instance && instance.destroy(); } component.render = function(_) { return (render = _, component); };
Un-adopt d3.local to avoid DOM walking performance hit
diff --git a/OpenPNM/Geometry/models/pore_diameter.py b/OpenPNM/Geometry/models/pore_diameter.py index <HASH>..<HASH> 100644 --- a/OpenPNM/Geometry/models/pore_diameter.py +++ b/OpenPNM/Geometry/models/pore_diameter.py @@ -4,6 +4,8 @@ pore_diameter =============================================================================== """ +from OpenPNM.Base import logging +_logger = logging.getLogger() from . import misc as _misc import scipy as _sp @@ -105,6 +107,9 @@ def largest_sphere(geometry, network, iters=10, **kwargs): am = network.create_adjacency_matrix(data=Lt, sprsfmt='lil', dropzeros=False) D[Ps] = D[Ps] + _sp.array([_sp.amin(row) for row in am.data])[Ps]*0.95 + if _sp.any(D < 0): + _logger.warning('Negative pore diameters found! Neighboring pores' + + ' must be larger than the pore spacing.') return D[network.pores(geometry.name)]
Added logger warning if negative pore diameters found.
diff --git a/src/Schema/JsonApi/DynamicEntitySchema.php b/src/Schema/JsonApi/DynamicEntitySchema.php index <HASH>..<HASH> 100644 --- a/src/Schema/JsonApi/DynamicEntitySchema.php +++ b/src/Schema/JsonApi/DynamicEntitySchema.php @@ -127,7 +127,7 @@ class DynamicEntitySchema extends BaseSchema ? $entity->getVisible() : $entity->visibleProperties(); foreach ($properties as $property) { - if ($property === '_joinData' || $property === '_matchingData') { + if ($property[0] === '_') { continue; }
Exclude internal properties (starts with underscore) from payload
diff --git a/twtxt/cli.py b/twtxt/cli.py index <HASH>..<HASH> 100644 --- a/twtxt/cli.py +++ b/twtxt/cli.py @@ -159,7 +159,7 @@ def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, ca help="Cache remote twtxt files locally. (Default: True") @click.argument("source") @click.pass_context -def view(ctx, pager, limit, sorting, timeout, porcelain, cache, source): +def view(ctx, **kwargs): """Show feed of given source.""" ctx.forward(timeline)
Use **kwargs for view command
diff --git a/lib/kue.js b/lib/kue.js index <HASH>..<HASH> 100755 --- a/lib/kue.js +++ b/lib/kue.js @@ -404,6 +404,14 @@ Queue.prototype.active = function (fn) { }; /** + * Delayed jobs. + */ + +Queue.prototype.delayed = function (fn) { + return this.state('delayed', fn); +}; + +/** * Completed jobs count. */
Add Queue.prototype.delayed function Queue.prototype.delayed appears to be missing, added to complete the suite (and I need it).
diff --git a/taboo.js b/taboo.js index <HASH>..<HASH> 100644 --- a/taboo.js +++ b/taboo.js @@ -97,7 +97,7 @@ function Taboo(tableName){ } else { options = defaultOptions; } - this.addColumns([header]); + this.addColumns([header], options); if (!options.silent){ this.triggerCallbacks('update'); } diff --git a/tests/tabooSpec.js b/tests/tabooSpec.js index <HASH>..<HASH> 100644 --- a/tests/tabooSpec.js +++ b/tests/tabooSpec.js @@ -101,6 +101,13 @@ describe("Taboo", function() { table.addColumn('hello'); expect(table.getColumnHeaders().length).toBe(2); }); + + it("should ignore duplicate headers if option passed", function(){ + table.addColumn('hello', {ignoreDuplicates:true}); + table.addColumn('hello', {ignoreDuplicates:true}); + console.log(table.print()); + expect(table.getColumnHeaders().length).toBe(1); + }); it('should be able to clone new copies of the table', function(){ table.addRows(dogs);
pass options from addColumn to addColumns
diff --git a/server.js b/server.js index <HASH>..<HASH> 100644 --- a/server.js +++ b/server.js @@ -78,6 +78,11 @@ exports.start = function(config) { }); - return { listen: server.listen.bind(server) }; + return { + listen: function(port) { + console.log("Game engine listening on port", port); + server.listen(port); + } + }; }; \ No newline at end of file
Changed a couple of configuration options and added a debug message when the server is started.
diff --git a/blocks/action-list/action-list.js b/blocks/action-list/action-list.js index <HASH>..<HASH> 100644 --- a/blocks/action-list/action-list.js +++ b/blocks/action-list/action-list.js @@ -97,8 +97,14 @@ define([ var $active = $el.parent().find(ACTIVE_SELECTOR); if ($active.length) { - var eventData = items[$el.parent().find(ITEM_ACTION_SELECTOR).index($active)]; - actionList.trigger('action_' + uid, (eventData && eventData.event[0] && eventData.event[0].data) || false); + var eventEl = items[$el.parent().find(ITEM_ACTION_SELECTOR).index($active)], + eventData; + if ((eventEl && eventEl.event && eventEl.event[0] && eventEl.event[0])) { + eventData = eventEl.event[0].data; + } else { + eventData = eventEl; + } + actionList.trigger('action_' + uid, eventData || false); return false; } else { @@ -164,7 +170,9 @@ define([ override: true }, getUID: { - method:function() { return uid; }, + method: function () { + return uid; + }, override: true } });
RG-<I> fix action-list trigger action event Former-commit-id: e<I>f<I>f<I>c<I>ade<I>dca1df<I>d9abc<I>
diff --git a/jbehave-core/src/main/java/org/jbehave/core/embedder/Embedder.java b/jbehave-core/src/main/java/org/jbehave/core/embedder/Embedder.java index <HASH>..<HASH> 100755 --- a/jbehave-core/src/main/java/org/jbehave/core/embedder/Embedder.java +++ b/jbehave-core/src/main/java/org/jbehave/core/embedder/Embedder.java @@ -312,7 +312,7 @@ public class Embedder { MetaFilter filter = metaFilter(); return storyManager.runningStory(storyId, story, filter, null).getFuture(); } - + public void reportStepdocs() { reportStepdocs(configuration(), candidateSteps()); } @@ -411,7 +411,7 @@ public class Embedder { return executorService; } - private StoryManager storyManager() { + public StoryManager storyManager() { return new StoryManager(configuration(), embedderControls(), embedderMonitor(), executorService(), stepsFactory(), storyRunner()); }
JBEHAVE-<I>: StoryManager now accessible via the public method from Embedder.
diff --git a/cmd/auto-pause/auto-pause.go b/cmd/auto-pause/auto-pause.go index <HASH>..<HASH> 100644 --- a/cmd/auto-pause/auto-pause.go +++ b/cmd/auto-pause/auto-pause.go @@ -103,7 +103,7 @@ func runPause() { mu.Lock() defer mu.Unlock() if runtimePaused { - out.Step(style.AddonEnable, "Auto-pause is already enabled.") + out.Styled(style.AddonEnable, "Auto-pause is already enabled.") return } diff --git a/pkg/addons/addons_autopause.go b/pkg/addons/addons_autopause.go index <HASH>..<HASH> 100644 --- a/pkg/addons/addons_autopause.go +++ b/pkg/addons/addons_autopause.go @@ -1,5 +1,5 @@ /* -Copyright 2022 The Kubernetes Authors All rights reserved. +Copyright 2021 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
Prefer out.Styled to out.Step, revert copyright
diff --git a/tests/Jaguar/Tests/Action/CropActionTest.php b/tests/Jaguar/Tests/Action/CropActionTest.php index <HASH>..<HASH> 100644 --- a/tests/Jaguar/Tests/Action/CropActionTest.php +++ b/tests/Jaguar/Tests/Action/CropActionTest.php @@ -25,4 +25,11 @@ class CropActionTest extends AbstractActionTest ); } + public function testSetGetBox() + { + $action = new CropAction(); + $this->assertSame($action, $action->setBox(new Box())); + $this->assertInstanceOf('\Jaguar\Box', $action->getBox()); + } + }
Imporved Crop Action Test
diff --git a/src/actions.js b/src/actions.js index <HASH>..<HASH> 100644 --- a/src/actions.js +++ b/src/actions.js @@ -35,7 +35,7 @@ var actions = { actionType: RowConstants.QUERY_START }); client.runQuery(query, (err, res) => { - if (!err) { + if (!err && res) { Dispatcher.handleViewAction({ actionType: RowConstants.QUERY_DONE, value: res
Test not just that there's no error, but also that there's a result
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -72,7 +72,7 @@ module.exports = function (grunt) { ci: { configFile: 'karma.conf.js', singleRun: true, - browsers: ['PhantomJS'] + browsers: ['Firefox'] }, watch: { configFile: 'karma.conf.js',
Change karma ci task to use Firefox
diff --git a/findspark.py b/findspark.py index <HASH>..<HASH> 100644 --- a/findspark.py +++ b/findspark.py @@ -23,7 +23,8 @@ def find(): for path in [ '/usr/local/opt/apache-spark/libexec', # OS X Homebrew '/usr/lib/spark/', # AWS Amazon EMR - '/usr/local/spark/' # common linux path for spark + '/usr/local/spark/', # common linux path for spark + '/opt/spark/' # other common linux path for spark # Any other common places to look? ]: if os.path.exists(path):
Added another common linux path for spark
diff --git a/middleware/compress.go b/middleware/compress.go index <HASH>..<HASH> 100644 --- a/middleware/compress.go +++ b/middleware/compress.go @@ -92,6 +92,7 @@ var defaultContentTypes = map[string]struct{}{ "application/json": {}, "application/atom+xml": {}, "application/rss+xml": {}, + "image/svg+xml": {}, } // DefaultCompress is a middleware that compresses response
middleware: add image/svg+xml content-type to compress
diff --git a/liquibase-core/src/main/java/liquibase/datatype/core/UUIDType.java b/liquibase-core/src/main/java/liquibase/datatype/core/UUIDType.java index <HASH>..<HASH> 100644 --- a/liquibase-core/src/main/java/liquibase/datatype/core/UUIDType.java +++ b/liquibase-core/src/main/java/liquibase/datatype/core/UUIDType.java @@ -6,6 +6,7 @@ import liquibase.datatype.DataTypeInfo; import liquibase.datatype.DatabaseDataType; import liquibase.datatype.LiquibaseDataType; import liquibase.exception.DatabaseException; +import liquibase.statement.DatabaseFunction; @DataTypeInfo(name="uuid", aliases = {"uniqueidentifier"}, minParameters = 0, maxParameters = 0, priority = LiquibaseDataType.PRIORITY_DEFAULT) public class UUIDType extends LiquibaseDataType { @@ -35,7 +36,7 @@ public class UUIDType extends LiquibaseDataType { @Override public String objectToSql(Object value, Database database) { if (database instanceof MSSQLDatabase) { - return "'"+value+"'"; + return (value instanceof DatabaseFunction) ? database.generateDatabaseFunctionValue((DatabaseFunction) value) : "'" + value + "'"; } return super.objectToSql(value, database); }
defaultValueComputed is no longer quoted for GUID columns in MSSQL
diff --git a/src/umm/index.js b/src/umm/index.js index <HASH>..<HASH> 100644 --- a/src/umm/index.js +++ b/src/umm/index.js @@ -153,7 +153,7 @@ module.exports = ({ logger, middlewares, botfile, projectLocation, db, contentMa let markdown = await getDocument() // TODO Add more context - const fullContext = Object.assign({ + const fullContext = Object.assign({}, initialData, { user: incomingEvent.user, originalEvent: incomingEvent }, additionalData)
Merging initialData in UMM rendering
diff --git a/theanets/trainer.py b/theanets/trainer.py index <HASH>..<HASH> 100644 --- a/theanets/trainer.py +++ b/theanets/trainer.py @@ -180,7 +180,6 @@ class SGD(Trainer): logging.info('compiling %s learning function', self.__class__.__name__) self.f_learn = theano.function( network.inputs, - self.cost_exprs, updates=list(network.updates) + list(self.learning_updates())) def learning_updates(self): @@ -232,9 +231,15 @@ class SGD(Trainer): break try: + [self.train_minibatch(*x) for x in train_set] + except KeyboardInterrupt: + logging.info('interrupted!') + break + + try: costs = list(zip( self.cost_names, - np.mean([self.train_minibatch(*x) for x in train_set], axis=0))) + np.mean([self.f_eval(*x) for i, x in zip(range(3), train_set)], axis=0))) except KeyboardInterrupt: logging.info('interrupted!') break
Make f_learn an update-only function. To get monitor values for training data, run f_eval on three of the minibatches from the training set.
diff --git a/ics/parser.py b/ics/parser.py index <HASH>..<HASH> 100644 --- a/ics/parser.py +++ b/ics/parser.py @@ -127,11 +127,11 @@ if __name__ == "__main__": def printTree(elem, lvl=0): if isinstance(elem, list) or isinstance(elem, Container): if isinstance(elem, Container): - print(' '*lvl, elem.name) + print("{}{}".format(' '*lvl, elem.name)) for sub_elem in elem: printTree(sub_elem, lvl+1) elif isinstance(elem, ContentLine): - print(' '*lvl, elem.name, elem.params, elem.value) + print("{}{}{}".format(' '*lvl, elem.name, elem.params, elem.value)) else: print("Wuuut ?")
[fix] wrong format in printTree on py2
diff --git a/pkg/client/s3/bucket.go b/pkg/client/s3/bucket.go index <HASH>..<HASH> 100644 --- a/pkg/client/s3/bucket.go +++ b/pkg/client/s3/bucket.go @@ -180,10 +180,8 @@ func (c *s3Client) ListObjects(bucket, objectPrefix string) (items []*client.Ite if err != nil { return nil, err } - if len(items) > 0 { - return items, nil - } - return nil, os.ErrNotExist + // even if items are equal to '0' is valid case + return items, nil default: // Error return nil, err }
Length of items to be zero in ListObjects is a valid case
diff --git a/phonopy/version.py b/phonopy/version.py index <HASH>..<HASH> 100644 --- a/phonopy/version.py +++ b/phonopy/version.py @@ -32,4 +32,4 @@ # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. -__version__ = "1.11.12" +__version__ = "1.11.14"
Increment version number to <I>
diff --git a/plugin/src/main/java/io/fabric8/maven/plugin/mojo/build/AbstractResourceMojo.java b/plugin/src/main/java/io/fabric8/maven/plugin/mojo/build/AbstractResourceMojo.java index <HASH>..<HASH> 100644 --- a/plugin/src/main/java/io/fabric8/maven/plugin/mojo/build/AbstractResourceMojo.java +++ b/plugin/src/main/java/io/fabric8/maven/plugin/mojo/build/AbstractResourceMojo.java @@ -95,8 +95,7 @@ public abstract class AbstractResourceMojo extends AbstractFabric8Mojo { public static File writeResourcesIndividualAndComposite(KubernetesList resources, File resourceFileBase, ResourceFileType resourceFileType, Logger log, Boolean generateRoute) throws MojoExecutionException { - List<HasMetadata> oldItemList = new ArrayList<>(); - oldItemList = resources.getItems(); + List<HasMetadata> oldItemList = resources.getItems(); List<HasMetadata> newItemList = new ArrayList<>(); @@ -118,7 +117,6 @@ public abstract class AbstractResourceMojo extends AbstractFabric8Mojo { // if the list contains a single Template lets unwrap it Template template = getSingletonTemplate(resources); if (template != null) { - System.out.println("In"); entity = template; }
Updated for removing pepperrboat issue
diff --git a/src/sap.m/src/sap/m/Dialog.js b/src/sap.m/src/sap/m/Dialog.js index <HASH>..<HASH> 100644 --- a/src/sap.m/src/sap/m/Dialog.js +++ b/src/sap.m/src/sap/m/Dialog.js @@ -685,7 +685,7 @@ sap.ui.define(['jquery.sap.global', './Bar', './InstanceManager', './Associative $dialogContent.height(parseInt($dialog.height(), 10)); } - if (this.getStretch()) { + if (this.getStretch() || this._bDisableRepositioning) { return; }
[FIX] sap.m.Dialog: Dialog repositioning is fixed - when dialog is resizeble and click on the resize corner, it jumps in top and left direction due to resize transformation. BCP: <I> Change-Id: I<I>e4be<I>ba8bd3a<I>eda3df2befe<I>df2f1a
diff --git a/irc3/plugins/casefold.py b/irc3/plugins/casefold.py index <HASH>..<HASH> 100644 --- a/irc3/plugins/casefold.py +++ b/irc3/plugins/casefold.py @@ -48,11 +48,14 @@ class Casefold(object): # casemapping @irc3.event(r'^:\S+ 005 \S+ .+CASEMAPPING.*') def recalculate_casemaps(self): - casemapping = self.bot.config.get('server_config', {}).get('CASEMAPPING', 'rfc1459') + casemapping = self.bot.config['server_config'].get('CASEMAPPING', + 'rfc1459') if casemapping == 'rfc1459': - lower_chars = string.ascii_lowercase + ''.join(chr(i) for i in range(123, 127)) - upper_chars = string.ascii_uppercase + ''.join(chr(i) for i in range(91, 95)) + lower_chars = (string.ascii_lowercase + + ''.join(chr(i) for i in range(123, 127))) + upper_chars = (string.ascii_uppercase + + ''.join(chr(i) for i in range(91, 95))) elif casemapping == 'ascii': lower_chars = string.ascii_lowercase
Fix Casefolding PEP8 errors
diff --git a/gcsio/src/main/java/com/google/cloud/hadoop/gcsio/StorageStubProvider.java b/gcsio/src/main/java/com/google/cloud/hadoop/gcsio/StorageStubProvider.java index <HASH>..<HASH> 100644 --- a/gcsio/src/main/java/com/google/cloud/hadoop/gcsio/StorageStubProvider.java +++ b/gcsio/src/main/java/com/google/cloud/hadoop/gcsio/StorageStubProvider.java @@ -186,10 +186,11 @@ public class StorageStubProvider { .put("maxAttempts", GRPC_MAX_RETRY_ATTEMPTS) .put( "initialBackoff", - Durations.fromMillis(readOptions.getBackoffInitialIntervalMillis()).toString()) + Durations.toString( + Durations.fromMillis(readOptions.getBackoffInitialIntervalMillis()))) .put( "maxBackoff", - Durations.fromMillis(readOptions.getBackoffMaxIntervalMillis()).toString()) + Durations.toString(Durations.fromMillis(readOptions.getBackoffMaxIntervalMillis()))) .put("backoffMultiplier", readOptions.getBackoffMultiplier()) .put("retryableStatusCodes", ImmutableList.of("UNAVAILABLE", "RESOURCE_EXHAUSTED")) .build();
Fix duration format for grpc retry (#<I>)
diff --git a/test/browser/interactive.js b/test/browser/interactive.js index <HASH>..<HASH> 100644 --- a/test/browser/interactive.js +++ b/test/browser/interactive.js @@ -60,8 +60,8 @@ function execute() { eval(code); error.text(''); } catch (e) { + console.error(e); var text = 'Error: ' + e.message; - console.warn(text); error.text(text); setRight(empty); }
interactive page: print full error to the console
diff --git a/lib/adhearsion/foundation/object.rb b/lib/adhearsion/foundation/object.rb index <HASH>..<HASH> 100644 --- a/lib/adhearsion/foundation/object.rb +++ b/lib/adhearsion/foundation/object.rb @@ -1,16 +1,5 @@ require 'adhearsion/logging' -# Monkey patch Object to support the #tap method. -# This method is present in Ruby 1.8.7 and later. -unless Object.respond_to?(:tap) - class Object - def tap - yield self - self - end - end -end - class Object def pb_logger logger
[CS] Remove definition of Object#tap since all supported Ruby platforms support it natively
diff --git a/openquake/server/tests/tests.py b/openquake/server/tests/tests.py index <HASH>..<HASH> 100644 --- a/openquake/server/tests/tests.py +++ b/openquake/server/tests/tests.py @@ -110,12 +110,7 @@ class EngineServerTestCase(unittest.TestCase): def setUp(self): if sys.version_info[0] == 2: - # in Python 2 the tests fail when doing _lgeos = load_dll( - # 'geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so']) - raise unittest.SkipTest('Python 2') - - def setUp(self): - if sys.version_info[0] == 2: + # python 2 will die raise unittest.SkipTest('Python 2') # tests
Merged from master [skip CI]
diff --git a/src/main/java/org/robolectric/res/AndroidResourcePathFinder.java b/src/main/java/org/robolectric/res/AndroidResourcePathFinder.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/robolectric/res/AndroidResourcePathFinder.java +++ b/src/main/java/org/robolectric/res/AndroidResourcePathFinder.java @@ -3,12 +3,7 @@ package org.robolectric.res; import android.R; import org.robolectric.util.PropertiesHelper; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStreamReader; -import java.util.List; +import java.io.*; import java.util.Properties; public class AndroidResourcePathFinder { @@ -37,7 +32,7 @@ public class AndroidResourcePathFinder { return resourcePath; } - throw new RuntimeException("Unable to find path to Android SDK"); + throw new RuntimeException("Unable to find path to Android SDK, (you probably need a local.properties file, see: http://pivotal.github.com/robolectric/resources.html"); } private String getAndroidResourcePathFromLocalProperties() {
Point users to the website when their tests can't find Android.
diff --git a/trimesh/exchange/gltf.py b/trimesh/exchange/gltf.py index <HASH>..<HASH> 100644 --- a/trimesh/exchange/gltf.py +++ b/trimesh/exchange/gltf.py @@ -1122,6 +1122,10 @@ def _append_material(mat, tree, buffer_items): except BaseException: pass + # if alphaMode is defined, export + if isinstance(mat.alphaMode, str): + pbr['alphaMode'] = mat.alphaMode + # if scalars are defined correctly export if isinstance(mat.metallicFactor, float): pbr['metallicFactor'] = mat.metallicFactor diff --git a/trimesh/visual/material.py b/trimesh/visual/material.py index <HASH>..<HASH> 100644 --- a/trimesh/visual/material.py +++ b/trimesh/visual/material.py @@ -135,7 +135,7 @@ class PBRMaterial(Material): self.doubleSided = doubleSided # str - alphaMode = alphaMode + self.alphaMode = alphaMode def to_color(self, uv): """
Preserves the alphaMode when reading and writing gltf
diff --git a/lib/pseudohiki/blockparser.rb b/lib/pseudohiki/blockparser.rb index <HASH>..<HASH> 100644 --- a/lib/pseudohiki/blockparser.rb +++ b/lib/pseudohiki/blockparser.rb @@ -411,11 +411,5 @@ module PseudoHiki end end end - -# class << Formatter[HeadingLeaf] -# def make_html_element(tree) -# create_element(@element_name+tree.nominal_level.to_s) -# end -# end end end
removed XhtmlFormat::Formatter[HeadingLeaf].make_html_element that is unnecessary now