diff
stringlengths 65
26.7k
| message
stringlengths 7
9.92k
|
|---|---|
diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -5,6 +5,7 @@ var express = require('express')
exports.reset = function() {
server = express()
+ server.disable('x-powered-by')
server.use(express.urlencoded())
server.use(express.json())
server.use(express.methodOverride())
|
disable `X-Powered-By` header
|
diff --git a/cobra/core/Metabolite.py b/cobra/core/Metabolite.py
index <HASH>..<HASH> 100644
--- a/cobra/core/Metabolite.py
+++ b/cobra/core/Metabolite.py
@@ -132,7 +132,7 @@ class Metabolite(Species):
the_coefficient = the_reaction._metabolites[self]
the_reaction.subtract_metabolites({self: the_coefficient})
elif method.lower() == 'destructive':
- for x in self._reaction():
+ for x in self._reaction:
x.remove_from_model()
else:
raise Exception(method + " is not 'subtractive' or 'destructive'")
|
Correct bug in remove_from_model in Metabolite.py
Correct a small bug in remove_from_model in core/Metabolite.py when method == "destructive'.
|
diff --git a/size.go b/size.go
index <HASH>..<HASH> 100644
--- a/size.go
+++ b/size.go
@@ -7,26 +7,24 @@ import (
"strings"
)
-type unit int64
-
// See: http://en.wikipedia.org/wiki/Binary_prefix
const (
// Decimal
- KB unit = 1000
- MB = 1000 * KB
- GB = 1000 * MB
- TB = 1000 * GB
- PB = 1000 * TB
+ KB = 1000
+ MB = 1000 * KB
+ GB = 1000 * MB
+ TB = 1000 * GB
+ PB = 1000 * TB
// Binary
- KiB unit = 1024
- MiB = 1024 * KiB
- GiB = 1024 * MiB
- TiB = 1024 * GiB
- PiB = 1024 * TiB
+ KiB = 1024
+ MiB = 1024 * KiB
+ GiB = 1024 * MiB
+ TiB = 1024 * GiB
+ PiB = 1024 * TiB
)
-type unitMap map[string]unit
+type unitMap map[string]int64
var (
decimalMap = unitMap{"k": KB, "m": MB, "g": GB, "t": TB, "p": PB}
@@ -81,7 +79,7 @@ func parseSize(sizeStr string, uMap unitMap) (int64, error) {
unitPrefix := strings.ToLower(matches[2])
if mul, ok := uMap[unitPrefix]; ok {
- size *= int64(mul)
+ size *= mul
}
return size, nil
|
pkg/units: Unit constants directly int<I>
int<I> seems sufficient
Docker-DCO-<I>-
|
diff --git a/paperweight/document.py b/paperweight/document.py
index <HASH>..<HASH> 100755
--- a/paperweight/document.py
+++ b/paperweight/document.py
@@ -119,6 +119,7 @@ class FilesystemTexDocument(TexDocument):
def __init__(self, path, recursive=True):
# read the tex document
self._filepath = path
+ root = os.path.dirname(os.path.abspath(path))
with codecs.open(path, 'r', encoding='utf-8') as f:
text = f.read()
super(FilesystemTexDocument, self).__init__(text)
@@ -126,7 +127,7 @@ class FilesystemTexDocument(TexDocument):
child_paths = self.find_input_documents()
for path in child_paths:
# FIXME may need to deal with path normalization here.
- self._children[path] = FilesystemTexDocument(path,
+ self._children[path] = FilesystemTexDocument(root+os.sep+path,
recursive=True)
def _file_exists(self, path):
|
fix for issue with recursive processing of files where they are not
in the current working directory
|
diff --git a/stanza/tests/test_tokenize_data.py b/stanza/tests/test_tokenize_data.py
index <HASH>..<HASH> 100644
--- a/stanza/tests/test_tokenize_data.py
+++ b/stanza/tests/test_tokenize_data.py
@@ -23,6 +23,7 @@ FAKE_PROPERTIES = {
"lang":"de",
'feat_funcs': ("space_before","capitalized"),
'max_seqlen': 300,
+ 'use_dictionary': False,
}
def test_has_mwt():
|
Update test for new tokenizer args after the dictionary was added
|
diff --git a/test/acceptance/app_test.rb b/test/acceptance/app_test.rb
index <HASH>..<HASH> 100644
--- a/test/acceptance/app_test.rb
+++ b/test/acceptance/app_test.rb
@@ -220,6 +220,9 @@ class AppTest < ActiveSupport::TestCase
end
test "app gets reloaded when preloaded files change (listen watcher)" do
+ # listen with ruby 2.0.0-rc1 crashes on travis, revisit when they install 2.0.0-p0
+ skip if RUBY_VERSION == "2.0.0" && RUBY_PATCHLEVEL == -1
+
begin
gemfile = app_root.join("Gemfile")
gemfile_contents = gemfile.read
|
Disable test on <I>-rc1 for now
|
diff --git a/lib/obfuscate_id.rb b/lib/obfuscate_id.rb
index <HASH>..<HASH> 100644
--- a/lib/obfuscate_id.rb
+++ b/lib/obfuscate_id.rb
@@ -1,2 +1,37 @@
module ObfuscateId
+
+ def obfuscate_id
+ extend ClassMethods
+ include InstanceMethods
+ end
+
+ def self.hide(id)
+ id.to_i + 100
+ end
+
+ def self.show(id)
+ id.to_i - 100
+ end
+
+ module ClassMethods
+ def find(*args)
+ if has_obfuscated_id?
+ args[0] = ObfuscateId.show(args[0])
+ end
+ super(*args)
+ end
+
+ def has_obfuscated_id?
+ true
+ end
+ end
+
+ module InstanceMethods
+ def to_param
+ ObfuscateId.hide(self.id)
+ end
+
+ end
end
+
+ActiveRecord::Base.extend ObfuscateId
diff --git a/spec/dummy/app/models/post.rb b/spec/dummy/app/models/post.rb
index <HASH>..<HASH> 100644
--- a/spec/dummy/app/models/post.rb
+++ b/spec/dummy/app/models/post.rb
@@ -1,2 +1,3 @@
class Post < ActiveRecord::Base
+ obfuscate_id
end
|
added methods to update find and to_param and add them to active record - specs pass
|
diff --git a/src/sap.m/test/sap/m/visual/SinglePlanningCalendarCellAndAppNav.spec.js b/src/sap.m/test/sap/m/visual/SinglePlanningCalendarCellAndAppNav.spec.js
index <HASH>..<HASH> 100644
--- a/src/sap.m/test/sap/m/visual/SinglePlanningCalendarCellAndAppNav.spec.js
+++ b/src/sap.m/test/sap/m/visual/SinglePlanningCalendarCellAndAppNav.spec.js
@@ -5,6 +5,8 @@ describe("sap.m.SinglePlanningCalendarCellAndAppNav", function() {
var CTRL_KEY = process.platform === 'darwin' ? protractor.Key.META : protractor.Key.CONTROL;
+ browser.testrunner.currentSuite.meta.controlName = "sap.m.SinglePlanningCalendar";
+
it("should select 2 appointments with Ctrl/Cmd + Click", function () {
var oSPC = element(by.id("SinglePlanningCalendar"));
|
[INTERNAL] sap.m.SinglePlanningCalendar: visual test added to ownership
Change-Id: I7cbcb3f<I>c<I>bf<I>c<I>f<I>d8f<I>ad<I>b
|
diff --git a/lib/zyre.js b/lib/zyre.js
index <HASH>..<HASH> 100644
--- a/lib/zyre.js
+++ b/lib/zyre.js
@@ -141,10 +141,11 @@ class Zyre extends EventEmitter {
* @return {Promise}
*/
stop(callback) {
+ this._zyreNode.removeAllListeners();
+ this._zyrePeers.removeAllListeners();
+ this._zyrePeers.disconnectAll();
+
return new Promise((resolve) => {
- this._zyreNode.removeAllListeners();
- this._zyrePeers.removeAllListeners();
- this._zyrePeers.disconnectAll();
this._zBeacon.stop().then(() => {
this._zyreNode.stopListening().then(() => {
if (typeof callback === 'function') callback();
|
Move sync functions out of async promise body
|
diff --git a/lib/modules/apostrophe-ui/public/js/ui.js b/lib/modules/apostrophe-ui/public/js/ui.js
index <HASH>..<HASH> 100644
--- a/lib/modules/apostrophe-ui/public/js/ui.js
+++ b/lib/modules/apostrophe-ui/public/js/ui.js
@@ -534,7 +534,7 @@ apos.define('apostrophe-ui', {
restore($old, $new);
function autoPreserveText($context) {
- $context.find('input[name],textarea[name]').each(function() {
+ $context.find('input[name]:focus,textarea[name]:focus').each(function() {
var $el = $(this);
if (!is($el, 'preserve')) {
attr($el, 'preserve', $el.attr('name'));
|
text elements should be autopreserved on AJAX only if they currently have the focus. Makes it easier to update the value
|
diff --git a/cake/libs/model/datasources/dbo/dbo_mysql.php b/cake/libs/model/datasources/dbo/dbo_mysql.php
index <HASH>..<HASH> 100644
--- a/cake/libs/model/datasources/dbo/dbo_mysql.php
+++ b/cake/libs/model/datasources/dbo/dbo_mysql.php
@@ -313,12 +313,7 @@ class DboMysql extends DboSource {
* @return in
*/
function lastInsertId($source = null) {
- $id = $this->fetchRow('SELECT LAST_INSERT_ID() AS insertID', false);
- if ($id !== false && !empty($id) && !empty($id[0]) && isset($id[0]['insertID'])) {
- return $id[0]['insertID'];
- }
-
- return null;
+ return $this->_connection->lastInsertId();
}
/**
|
Using PDO method to get lastInsertId
|
diff --git a/lib/fb_graph2/comment.rb b/lib/fb_graph2/comment.rb
index <HASH>..<HASH> 100644
--- a/lib/fb_graph2/comment.rb
+++ b/lib/fb_graph2/comment.rb
@@ -6,8 +6,8 @@ module FbGraph2
register_attributes(
raw: [:can_comment, :can_remove, :comment_count, :like_count, :message, :user_likes, :is_hidden, :can_hide],
time: [:created_time],
- user: [:from],
comment: [:parent],
+ profile: [:from],
profiles: [:message_tags],
custom: [:attachment]
)
|
Comment#from can be a Page, use "profile" for auto User/Page detection.
close #<I>
|
diff --git a/lib/chef/knife/raw_essentials.rb b/lib/chef/knife/raw_essentials.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/knife/raw_essentials.rb
+++ b/lib/chef/knife/raw_essentials.rb
@@ -4,12 +4,12 @@ class Chef
class Knife
remove_const(:Raw) if const_defined?(:Raw) && Raw.name == 'Chef::Knife::Raw' # override Chef's version
class Raw < Chef::Knife
- ChefFS = ::ChefFS
banner "knife raw REQUEST_PATH"
deps do
require 'json'
- require 'chef_fs/data_handler/data_handler_base'
+ require 'chef/rest'
+ require 'chef/config'
require 'chef_fs/raw_request'
end
@@ -48,7 +48,7 @@ class Chef
end
chef_rest = Chef::REST.new(Chef::Config[:chef_server_url])
begin
- output ChefFS::RawRequest.api_request(chef_rest, config[:method].to_sym, chef_rest.create_url(name_args[0]), {}, data)
+ output ::ChefFS::RawRequest.api_request(chef_rest, config[:method].to_sym, chef_rest.create_url(name_args[0]), {}, data)
rescue Timeout::Error => e
ui.error "Server timeout"
exit 1
|
Fix knife raw (Chef = ::ChefFS exception)
|
diff --git a/src/ar/validation.php b/src/ar/validation.php
index <HASH>..<HASH> 100755
--- a/src/ar/validation.php
+++ b/src/ar/validation.php
@@ -89,7 +89,7 @@ return [
'string' => 'يجب أن يكون طول النص :attribute على الأقل :min حروفٍ/حرفًا.',
'array' => 'يجب أن يحتوي :attribute على الأقل على :min عُنصرًا/عناصر.',
],
- 'not_in' => ':attribute موجود.',
+ 'not_in' => 'العنصر :attribute غير صحيح.',
'not_regex' => 'صيغة :attribute غير صحيحة.',
'numeric' => 'يجب على :attribute أن يكون رقمًا.',
'present' => 'يجب تقديم :attribute.',
|
[ar] Update validation.not_in
The old translation was not clear this what is says
`:attribute exists`
I changed to the equivalent of the original laravel translation which says
`The selected :attribute is invalid.`
|
diff --git a/core/src/main/resources/lib/form/repeatable/repeatable.js b/core/src/main/resources/lib/form/repeatable/repeatable.js
index <HASH>..<HASH> 100644
--- a/core/src/main/resources/lib/form/repeatable/repeatable.js
+++ b/core/src/main/resources/lib/form/repeatable/repeatable.js
@@ -186,7 +186,8 @@ Behaviour.specify("INPUT.repeatable-delete", 'repeatable', 0, function(e) {
});
// radio buttons in repeatable content
-Behaviour.specify("DIV.repeated-chunk", 'repeatable', 0, function(d) {
+// Needs to run before the radioBlock behavior so that names are already unique.
+Behaviour.specify("DIV.repeated-chunk", 'repeatable', -200, function(d) {
var inputs = d.getElementsByTagName('INPUT');
for (var i = 0; i < inputs.length; i++) {
if (inputs[i].type == 'radio') {
|
[JENKINS-<I>] Uniquify names before setting initial visibility
|
diff --git a/src/Util.php b/src/Util.php
index <HASH>..<HASH> 100644
--- a/src/Util.php
+++ b/src/Util.php
@@ -160,14 +160,6 @@ final class Util
// ORM = first L octets of T
/** @var string $orm */
$orm = Binary::safeSubstr($t, 0, $length);
-
- // @codeCoverageIgnoreStart
- if (!\is_string($orm)) {
- throw new CannotPerformOperation(
- 'An unknown error has occurred'
- );
- }
- // @codeCoverageIgnoreEnd
return $orm;
}
|
This check is actually not needed with constant_time_encoding v2+
|
diff --git a/bettercache/utils.py b/bettercache/utils.py
index <HASH>..<HASH> 100644
--- a/bettercache/utils.py
+++ b/bettercache/utils.py
@@ -8,6 +8,9 @@ from django.utils.cache import cc_delim_re
from django.utils.encoding import smart_str
from django.utils.http import http_date, parse_http_date
+import logging
+logger = logging.getLogger()
+
CACHEABLE_STATUS = [200, 203, 300, 301, 404, 410]
class CachingMixin(object):
@@ -114,6 +117,7 @@ class CachingMixin(object):
"""
# try and get the cached GET response
cache_key = self.cache_key(request)
+ logger.error(cache_key)
cached_response = cache.get(cache_key, None)
# if it wasn't found and we are looking for a HEAD, try looking for a corresponding GET
if cached_response is None and request.method == 'HEAD':
@@ -129,7 +133,7 @@ class CachingMixin(object):
""" the cache key is the absolute uri and the request method """
if method is None:
method = request.method
- return "page_cache:%s:%s" %(request.build_absolute_uri, method)
+ return "page_cache:%s:%s" %(request.build_absolute_uri(), method)
def get_header_dict(response, header):
|
[CMSPERF-<I>] changes from today's OTIS session
|
diff --git a/test/karma.conf.js b/test/karma.conf.js
index <HASH>..<HASH> 100644
--- a/test/karma.conf.js
+++ b/test/karma.conf.js
@@ -22,7 +22,9 @@ module.exports = function (config) {
browsers: ['Chrome'],
captureTimeout: 60000,
singleRun: false,
- preprocessors: { 'src/js/**/!(app|intro|outro).js': 'coverage' },
+ preprocessors: {
+ 'src/js/**/!(app|intro|outro|ui|settings).js': 'coverage'
+ },
coverageReporter: {
type: 'lcov',
dir: 'test/coverage/',
|
remove ui settings from coverage.
|
diff --git a/Lib/ufo2fdk/fdkBridge.py b/Lib/ufo2fdk/fdkBridge.py
index <HASH>..<HASH> 100644
--- a/Lib/ufo2fdk/fdkBridge.py
+++ b/Lib/ufo2fdk/fdkBridge.py
@@ -143,11 +143,6 @@ def checkOutlines(fontPath, removeOverlap=True, correctContourDirection=True):
stderr, stdout = _execute(c)
allStderr.append(stderr)
allStdout.append(stdout)
- if not removeOverlap and not correctContourDirection:
- c = cmds + ["-O", fontPath]
- stderr, stdout = _execute(c)
- allStderr.append(stderr)
- allStdout.append(stdout)
return "\n".join(allStderr), "\n".join(allStdout)
outlineCheckFirstLineRE = re.compile(
|
ignore checkoutlines and correct contour direction output from fdk
|
diff --git a/django_databrowse/tests/sites.py b/django_databrowse/tests/sites.py
index <HASH>..<HASH> 100644
--- a/django_databrowse/tests/sites.py
+++ b/django_databrowse/tests/sites.py
@@ -51,7 +51,7 @@ class DatabrowseTestsClient(TestCase):
def tearDownClass(self):
django_databrowse.site.unregister(SomeModel)
- def test_root(self):
+ def test_urls(self):
django_databrowse.site.register(SomeModel)
response = Client().get('')
self.assertEqual(response.status_code, 200)
@@ -61,3 +61,11 @@ class DatabrowseTestsClient(TestCase):
response = Client().get('/django_databrowse/somemodel/')
self.assertEqual(response.status_code, 200)
+
+ response = Client().get('/django_databrowse/doesnotexistmodel/')
+ self.assertEqual(response.status_code, 404)
+ response = Client().get('/django_databrowse/something/somemodel/')
+ self.assertEqual(response.status_code, 404)
+ response = Client().get(
+ '/django_databrowse/somemodel/fields/some_field/')
+ self.assertEqual(response.status_code, 200)
|
added a test on a model field detail page
|
diff --git a/Response.php b/Response.php
index <HASH>..<HASH> 100644
--- a/Response.php
+++ b/Response.php
@@ -3,6 +3,7 @@
namespace Modulus\Framework;
use Modulus\Http\Rest;
+use Modulus\Utility\View;
use Modulus\Http\Redirect;
use Modulus\Framework\Upstart;
@@ -41,6 +42,11 @@ class Response
if ($response instanceof Redirect) return $response->send();
/**
+ * Create a view page
+ */
+ if (Response::isView($response)) return;
+
+ /**
* Avoid "Segmentation fault (core dumped)"
*/
echo ' ';
@@ -50,4 +56,21 @@ class Response
*/
return null;
}
+
+ /**
+ * Render a view
+ *
+ * @param mixed $response
+ * @return bool
+ */
+ public static function isView($response) : bool
+ {
+ if ($response instanceof View) {
+ $response->render();
+
+ return true;
+ }
+
+ return false;
+ }
}
|
chore: handle View instances
render view component if the returned object was a view instance
|
diff --git a/modules/social_features/social_search/modules/social_search_autocomplete/webpack.config.js b/modules/social_features/social_search/modules/social_search_autocomplete/webpack.config.js
index <HASH>..<HASH> 100644
--- a/modules/social_features/social_search/modules/social_search_autocomplete/webpack.config.js
+++ b/modules/social_features/social_search/modules/social_search_autocomplete/webpack.config.js
@@ -11,7 +11,7 @@ module.exports = {
rules: [
{
test: /\.m?jsx?$/,
- exclude: /node_modules\/(?!yoastseo\/)/,
+ exclude: /node_modules/,
use: {
loader: 'babel-loader',
}
|
Remove reference to yoastseo
This was accidentally left behind by re-using a webpack configuration
for the RTSEO.js project but is not needed here.
|
diff --git a/rpc2/transport.go b/rpc2/transport.go
index <HASH>..<HASH> 100644
--- a/rpc2/transport.go
+++ b/rpc2/transport.go
@@ -86,7 +86,7 @@ func (t *Transport) GetRemoteAddr() (ret net.Addr) {
}
func NewTransport(c net.Conn, l LogFactory, wef WrapErrorFunc) *Transport {
- var mh codec.MsgpackHandle
+ mh := codec.MsgpackHandle{WriteExt : true}
buf := new(bytes.Buffer)
ret := &Transport{
|
This should address keybase/go#<I>
|
diff --git a/asset/js/setup.js b/asset/js/setup.js
index <HASH>..<HASH> 100644
--- a/asset/js/setup.js
+++ b/asset/js/setup.js
@@ -82,7 +82,7 @@ function setupWithSearch(siteData) {
},
methods: {
searchCallback(match) {
- const page = `${baseUrl}/${match.src.replace('.md', '.html')}`;
+ const page = `${baseUrl}/${match.src.replace(/.(md|mbd)$/, '.html')}`;
const anchor = match.heading ? `#${match.heading.id}` : '';
window.location = `${page}${anchor}`;
},
|
Support .mbd file extension in search navigation
|
diff --git a/lib/remote_syslog/tcp_endpoint.rb b/lib/remote_syslog/tcp_endpoint.rb
index <HASH>..<HASH> 100644
--- a/lib/remote_syslog/tcp_endpoint.rb
+++ b/lib/remote_syslog/tcp_endpoint.rb
@@ -3,6 +3,20 @@ require 'eventmachine'
module RemoteSyslog
# Additional class that uses TCP but no TLS. Has the benefit of a greater max packet size
class TcpEndpoint
+ class Handler < EventMachine::Connection
+ def initialize(endpoint)
+ @endpoint = endpoint
+ super()
+ end
+
+ def connection_completed
+ @endpoint.connection = self
+ end
+
+ def unbind
+ @endpoint.unbind
+ end
+ end
attr_accessor :connection
@@ -44,7 +58,7 @@ module RemoteSyslog
def connect
logger.debug "Connecting to #{address}:#{@port}"
- self.connection = EventMachine.connect(address, @port)
+ EventMachine.connect(address, @port, TcpEndpoint::Handler, self)
end
def unbind
|
TCP endpoint works better with the handler in place
|
diff --git a/tests/phpunit/TagTest.php b/tests/phpunit/TagTest.php
index <HASH>..<HASH> 100644
--- a/tests/phpunit/TagTest.php
+++ b/tests/phpunit/TagTest.php
@@ -118,6 +118,26 @@ class TagTest extends \PHPUnit\Framework\TestCase {
}
/**
+ * @covers Tag::appendContent
+ */
+ public function testAppendContentWithArrayKeys() {
+ $tag = new Tag();
+ // FIXME: The behavior of appendContent() and prependContent() is not consistent
+ $this->expectError();
+ $tag->appendContent( [ 'foo' => 'bar' ] );
+ }
+
+ /**
+ * @covers Tag::prependContent
+ */
+ public function testPrependContentWithArrayKeys() {
+ $tag = new Tag();
+ // FIXME: The behavior of appendContent() and prependContent() is not consistent
+ $tag->prependContent( [ 'foo' => 'bar' ] );
+ $this->assertSame( '<div>bar</div>', $tag->toString() );
+ }
+
+ /**
* @covers Tag::setAttributes
* @covers Tag::getAttribute
* @covers Tag::removeAttributes
|
Document inconsistent Tag methods with PHPUnit tests
All this does is documenting the status quo. Possible fixes are
discussed in the next patch.
Change-Id: I<I>db<I>c7b<I>d5be6d1f<I>f3fac3bcae2ee
|
diff --git a/nestable/NodeMoveAction.php b/nestable/NodeMoveAction.php
index <HASH>..<HASH> 100644
--- a/nestable/NodeMoveAction.php
+++ b/nestable/NodeMoveAction.php
@@ -25,6 +25,9 @@ class NodeMoveAction extends Action
{
/** @var string class to use to locate the supplied data ids */
public $modelName;
+
+ /** @var bool variable to support editing without possibility of creating a root elements */
+ public $rootable = true;
/** @vars string the attribute names of the model that hold these attributes */
private $leftAttribute;
@@ -76,7 +79,7 @@ class NodeMoveAction extends Action
]);
/* Root/Append/Left/Right change */
- if($this->treeAttribute&&is_null($par)&&!$model->isRoot()){
+ if($this->rootable&&$this->treeAttribute&&is_null($par)&&!$model->isRoot()){
$model->makeRoot();
} else if(is_null($par)){
if(!is_null($rgt))
|
Update NodeMoveAction.php
Add support edit subtrees.
|
diff --git a/edison-togglz/src/main/java/de/otto/edison/togglz/s3/S3TogglzRepository.java b/edison-togglz/src/main/java/de/otto/edison/togglz/s3/S3TogglzRepository.java
index <HASH>..<HASH> 100644
--- a/edison-togglz/src/main/java/de/otto/edison/togglz/s3/S3TogglzRepository.java
+++ b/edison-togglz/src/main/java/de/otto/edison/togglz/s3/S3TogglzRepository.java
@@ -50,7 +50,7 @@ public class S3TogglzRepository implements StateRepository {
}
@Scheduled(initialDelay = 0, fixedRate = SCHEDULE_RATE_IN_MILLISECONDS)
- private void prefetchFeatureStates() {
+ protected void prefetchFeatureStates() {
if (cache.size() == 0) {
LOG.debug("Initialize state for features");
initializeFeatureStates();
|
Enable scheduling on TogglzRepo in newer Spring version
Newer Spring Version does not allow @Scheduled-Annotation for
private functions, so changing to protected for correct proxying.
|
diff --git a/src/main/java/fr/xebia/springframework/jdbc/ManagedBasicDataSourceMBean.java b/src/main/java/fr/xebia/springframework/jdbc/ManagedBasicDataSourceMBean.java
index <HASH>..<HASH> 100644
--- a/src/main/java/fr/xebia/springframework/jdbc/ManagedBasicDataSourceMBean.java
+++ b/src/main/java/fr/xebia/springframework/jdbc/ManagedBasicDataSourceMBean.java
@@ -46,5 +46,7 @@ public interface ManagedBasicDataSourceMBean {
void setMaxIdle(int maxIdle);
+ void setMinIdle(int maxIdle);
+
void setMaxWait(long maxWait);
}
|
expose setMinIdle in JMX
|
diff --git a/api/src/opentrons/hardware_control/controller.py b/api/src/opentrons/hardware_control/controller.py
index <HASH>..<HASH> 100644
--- a/api/src/opentrons/hardware_control/controller.py
+++ b/api/src/opentrons/hardware_control/controller.py
@@ -41,7 +41,9 @@ class Controller:
'This is intended to run on a robot, and while it can connect '
'to a smoothie via a usb/serial adapter unexpected things '
'using gpios (such as smoothie reset or light management) '
- 'will fail')
+ 'will fail. If you are seeing this message and you are '
+ 'running on a robot, you need to set the RUNNING_ON_PI '
+ 'environmental variable to 1.')
self.config = config or opentrons.config.robot_configs.load()
|
feat(api): add info to debug warning on how IS_ROBOT is determined (#<I>)
If you run a robot without RUNNING_ON_PI in the environment you get this warning message (and substantially reduced functionality), but it is not clear how to resolve it. This provides that information. In my case it was because I was invoking one Python script from another with subprocess.
|
diff --git a/salt/utils/verify.py b/salt/utils/verify.py
index <HASH>..<HASH> 100644
--- a/salt/utils/verify.py
+++ b/salt/utils/verify.py
@@ -69,8 +69,10 @@ def verify_socket(interface, pub_port, ret_port):
pubsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
retsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
+ pubsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
pubsock.bind((interface, int(pub_port)))
pubsock.close()
+ retsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
retsock.bind((interface, int(ret_port)))
retsock.close()
result = True
@@ -82,8 +84,8 @@ def verify_socket(interface, pub_port, ret_port):
pubsock.close()
retsock.close()
- return True # TODO: Make this test actually function as advertised
- # Disabled check as per github issue number 1594
+ return result
+
def verify_env(dirs, user):
'''
|
Set the SO_REUSEADDR option when performing the bind() test.
Make failure of this test fatal again as well.
|
diff --git a/lib/devise_token/rails/routes.rb b/lib/devise_token/rails/routes.rb
index <HASH>..<HASH> 100644
--- a/lib/devise_token/rails/routes.rb
+++ b/lib/devise_token/rails/routes.rb
@@ -23,7 +23,7 @@ module ActionDispatch::Routing
:module => :devise,
:path => "#{opts[:at]}",
:controllers => controllers,
- :skip => opts[:skip]
+ :skip => opts[:skip] + [:omniauth_callbacks]
unnest_namespace do
# get full url path as if it were namespaced
|
skips omniauth_callback
|
diff --git a/lib/sensu/server/process.rb b/lib/sensu/server/process.rb
index <HASH>..<HASH> 100644
--- a/lib/sensu/server/process.rb
+++ b/lib/sensu/server/process.rb
@@ -128,12 +128,10 @@ module Sensu
# Process an event: filter -> mutate -> handle.
#
- # This method runs event bridges, relaying the event data to
- # other services. This method also determines the appropriate
- # handlers for the event, filtering and mutating the event data
- # for each of them. The `@handling_event_count` is incremented
- # by `1`, for each event handler chain (filter -> mutate ->
- # handle).
+ # This method determines the appropriate handlers for an event,
+ # filtering and mutating the event data for each of them. The
+ # `@handling_event_count` is incremented by `1`, for each event
+ # handler chain (filter -> mutate -> handle).
#
# @param event [Hash]
def process_event(event)
@@ -299,8 +297,10 @@ module Sensu
# registry. If the previous conditions are not met, and check
# `:type` is `metric` and the `:status` is `0`, the event
# registry is not updated, but the provided callback is called
- # with the event data. JSON serialization is used when storing
- # data in the registry.
+ # with the event data. All event data is sent to event bridge
+ # extensions, including events that do not normally produce an
+ # action. JSON serialization is used when storing data in the
+ # registry.
#
# @param client [Hash]
# @param check [Hash]
|
[event-bridge] updated yardoc, event_bridges() moved
|
diff --git a/tff/io.py b/tff/io.py
index <HASH>..<HASH> 100644
--- a/tff/io.py
+++ b/tff/io.py
@@ -343,6 +343,9 @@ class DefaultPTY(PTY):
new[6][termios.VSUSP] = vdisable # Ctrl-Z
new[6][termios.VQUIT] = vdisable # Ctrl-\
+ VDSUSP = 11
+ new[6][VDSUSP] = vdisable # Ctrl-Y
+
termios.tcsetattr(self.__stdin_fileno, termios.TCSANOW, new)
pid, master = pty.fork()
if not pid:
|
Fix VDSUSP (Ctrl-y) issue
|
diff --git a/src/python/dxpy/utils/describe.py b/src/python/dxpy/utils/describe.py
index <HASH>..<HASH> 100644
--- a/src/python/dxpy/utils/describe.py
+++ b/src/python/dxpy/utils/describe.py
@@ -378,7 +378,7 @@ def print_data_obj_desc(desc, verbose=False):
if desc["class"] == "file" or desc["class"] == "gtable":
sponsored_str = ""
if 'sponsored' in desc and desc['sponsored']:
- sponsored_str = ", sponsored by DNAnexus"
+ sponsored_str = DELIMITER(", ") + "sponsored by DNAnexus"
print_field("Size", get_size_str(desc['size']) + sponsored_str)
else:
print_field("Size", str(desc['size']))
|
PTFM-<I>-sidenote: adding delimiter between size string and "sponsored by DNAnexus" string
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ long_description = open('README.rst').read()
setup(
name = 'templated-emails',
- version = "0.4",
+ version = "0.5",
url = 'https://github.com/philippWassibauer/templated-emails',
author = "Philipp Wassibauer",
author_email = "phil@gidsy.com",
|
browsing emails in the backend now possible. this renders the blocks, but not the templates itself; therefore showing the whole logic of the template and data is not required for the rendering process. this is especially useful for complex, timebased emails that are almost impossible to recreate manually.
|
diff --git a/lib/App.js b/lib/App.js
index <HASH>..<HASH> 100644
--- a/lib/App.js
+++ b/lib/App.js
@@ -92,7 +92,9 @@ inherit(App, EventEmitter, [
this.intialized = true;
this.emit('init', settings);
return this;
- }
+ },
+ _App.screenHeight,
+ _App.screenWidth,
]);
|
accidentally removed screenWidth and screenHeight
|
diff --git a/synapse/tests/test_lib_agenda.py b/synapse/tests/test_lib_agenda.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_lib_agenda.py
+++ b/synapse/tests/test_lib_agenda.py
@@ -324,8 +324,8 @@ class AgendaTest(s_t_utils.SynTest):
await agenda.add('visi', '[teststr=baz]', {s_tu.HOUR: (7, 8), s_tu.MINUTE: 0, s_tu.DAYOFMONTH: 6},
incunit=s_agenda.TimeUnit.MONTH, incvals=1)
- xmas = {s_tu.DAYOFMONTH: 25, s_tu.MONTH: 12, s_tu.YEAR: 2018}
- lasthanu = {s_tu.DAYOFMONTH: 10, s_tu.MONTH: 12, s_tu.YEAR: 2018}
+ xmas = {s_tu.DAYOFMONTH: 25, s_tu.MONTH: 12, s_tu.YEAR: 2099}
+ lasthanu = {s_tu.DAYOFMONTH: 10, s_tu.MONTH: 12, s_tu.YEAR: 2099}
await agenda.delete(guid1)
|
Fix agenda persistence fail (#<I>)
|
diff --git a/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java b/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
index <HASH>..<HASH> 100644
--- a/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
+++ b/jdk8/src/main/java/com/google/errorprone/dataflow/DataFlow.java
@@ -134,7 +134,7 @@ public final class DataFlow {
final MethodTree method = (MethodTree) leaf;
Preconditions.checkNotNull(method.getBody(),
- "Method to analyze must have a body. Method passed in: %s in file %s",
+ "Method to analyze must have a body. Method passed in: %s() in file %s",
method.getName(),
methodPath.getCompilationUnit().getSourceFile().getName());
@@ -177,6 +177,15 @@ public final class DataFlow {
// Currently not supported because it only happens in ~2% of cases.
return null;
}
+
+ final MethodTree method = (MethodTree) enclosingMethodPath.getLeaf();
+ if (method.getBody() == null) {
+ // expressions can occur in abstract methods, for example {@code Map.Entry} in:
+ //
+ // abstract Set<Map.Entry<K, V>> entries();
+ return null;
+ }
+
return methodDataflow(enclosingMethodPath, context, transfer).getAnalysis().getValue(expr);
}
|
Fix bug where expressionDataflow would throw an exception for expressions in a method that doesn't have a body, e.g. Map.Entry in:
abstract Set<Map.Entry<K, V>> entries();
-------------
Created by MOE: <URL>
|
diff --git a/luigi/contrib/bigquery.py b/luigi/contrib/bigquery.py
index <HASH>..<HASH> 100644
--- a/luigi/contrib/bigquery.py
+++ b/luigi/contrib/bigquery.py
@@ -326,7 +326,7 @@ class BigQueryClient(object):
job_id = new_job['jobReference']['jobId']
logger.info('Started import job %s:%s', project_id, job_id)
while True:
- status = self.client.jobs().get(projectId=project_id, jobId=job_id).execute()
+ status = self.client.jobs().get(projectId=project_id, jobId=job_id).execute(num_retries=10)
if status['status']['state'] == 'DONE':
if status['status'].get('errorResult'):
raise Exception('BigQuery job failed: {}'.format(status['status']['errorResult']))
|
Add retries for <I> errors in BigQuery
According to <URL>
|
diff --git a/spec/integration/util/rdoc/parser_spec.rb b/spec/integration/util/rdoc/parser_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/integration/util/rdoc/parser_spec.rb
+++ b/spec/integration/util/rdoc/parser_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
require 'puppet/util/rdoc'
-describe "RDoc::Parser" do
+describe "RDoc::Parser", :unless => Puppet.features.microsoft_windows? do
require 'puppet_spec/files'
include PuppetSpec::Files
|
(PUP-<I>) Skip rdoc tests on windows
Appveyor has transient failures running rdoc parser tests on windows
only. Puppet uses rdoc as part of its `puppet doc` command. The
puppet-strings project has mostly, but not yet completely, replaced the
`puppet doc` command. At a future date `puppet doc` will be deprecated
and removed. This commit skips the tests on windows.
|
diff --git a/lib/ffi-glib/ptr_array.rb b/lib/ffi-glib/ptr_array.rb
index <HASH>..<HASH> 100644
--- a/lib/ffi-glib/ptr_array.rb
+++ b/lib/ffi-glib/ptr_array.rb
@@ -28,12 +28,17 @@ module GLib
Lib.g_ptr_array_add self, ptr
end
+ # Re-implementation of the g_ptr_array_index macro
+ def index idx
+ sz = FFI.type_size :pointer
+ ptr = @struct[:pdata].get_pointer(idx * sz)
+ GirFFI::ArgHelper.cast_from_pointer(element_type, ptr)
+ end
+
def each
- prc = Proc.new {|valptr, userdata|
- val = GirFFI::ArgHelper.cast_from_pointer element_type, valptr
- yield val
- }
- Lib.g_ptr_array_foreach self.to_ptr, prc, nil
+ @struct[:len].times.each do |idx|
+ yield index(idx)
+ end
end
end
end
|
Make PtrArray#each behave properly with early exit in JRuby.
|
diff --git a/pyqg/layered_model.py b/pyqg/layered_model.py
index <HASH>..<HASH> 100644
--- a/pyqg/layered_model.py
+++ b/pyqg/layered_model.py
@@ -99,15 +99,11 @@ class LayeredModel(model.Model):
Parameters
----------
- g : number
- Gravitational acceleration. Units: meters second :sup:`-2`
nz : integer number
Number of layers (> 1)
beta : number
Gradient of coriolis parameter. Units: meters :sup:`-1`
seconds :sup:`-1`
- rek : number
- Linear drag in lower layer. Units: seconds :sup:`-1`
rd : number
Deformation radius. Units: meters. Only necessary for
the two-layer (nz=2) case.
|
Removes unused paramaters from docs strings
|
diff --git a/lib/haml_lint/tree/root_node.rb b/lib/haml_lint/tree/root_node.rb
index <HASH>..<HASH> 100644
--- a/lib/haml_lint/tree/root_node.rb
+++ b/lib/haml_lint/tree/root_node.rb
@@ -17,7 +17,9 @@ module HamlLint::Tree
# @param line [Integer] the line number of the node
# @return [HamlLint::Node]
def node_for_line(line)
- find(-> { HamlLint::Tree::NullNode.new }) { |node| node.line_numbers.cover?(line) }
+ find(-> { HamlLint::Tree::NullNode.new }) do |node|
+ node.line_numbers.cover?(line) && node != self
+ end
end
end
end
|
Fix RootNode#node_for_line method on Ruby <I>
This method broke on Ruby <I> since `Range#cover?` now returns true for
beginless ranges where it previously returned false. This meant the
`RootNode` would match (as its `line_numbers` have no start nor end).
Fix by excluding the root node if it matches.
|
diff --git a/untwisted/network.py b/untwisted/network.py
index <HASH>..<HASH> 100644
--- a/untwisted/network.py
+++ b/untwisted/network.py
@@ -23,9 +23,9 @@ class SuperSocket(Dispatcher):
core.gear.scale(self)
def destroy(self):
- self.base.clear()
- SSL.base.clear()
- del self.pool[:]
+ # self.base.clear()
+ # SSL.base.clear()
+ # del self.pool[:]
core.gear.unregister(self)
class SSL(SuperSocket):
@@ -69,3 +69,4 @@ class Device(SuperSocket):
+
|
Fixing misbehavior with Supersocket class.
|
diff --git a/core/Core.php b/core/Core.php
index <HASH>..<HASH> 100644
--- a/core/Core.php
+++ b/core/Core.php
@@ -304,19 +304,6 @@ Debug::loadErrorHandlers();
///////////////////////////////////////////////////////////////////////////////
// HELPER FUNCTIONS
-function getSysTempDir() {
- Deprecation::notice(3.0, 'Please use PHP function get_sys_temp_dir() instead.');
- return sys_get_temp_dir();
-}
-
-/**
- * @deprecated 3.0 Please use {@link SS_ClassManifest::getItemPath()}.
- */
-function getClassFile($className) {
- Deprecation::notice('3.0', 'Use SS_ClassManifest::getItemPath() instead.');
- return SS_ClassLoader::instance()->getManifest()->getItemPath($className);
-}
-
/**
* Creates a class instance by the "singleton" design pattern.
* It will always return the same instance for this class,
|
Removing deprecated Core.php functions
|
diff --git a/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php b/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
+++ b/eZ/Publish/Core/Search/Legacy/Content/Gateway/DoctrineDatabase.php
@@ -85,7 +85,7 @@ class DoctrineDatabase extends Gateway
$doCount = true
)
{
- $count = $doCount ? $this->getResultCount( $criterion, $sort, $fieldFilters ) : null;
+ $count = $doCount ? $this->getResultCount( $criterion, null, $fieldFilters ) : null;
if ( !$doCount && $limit === 0 )
{
|
There is no need for sorting when just getting the result count
|
diff --git a/app/models/no_cms/blocks/block_slot.rb b/app/models/no_cms/blocks/block_slot.rb
index <HASH>..<HASH> 100644
--- a/app/models/no_cms/blocks/block_slot.rb
+++ b/app/models/no_cms/blocks/block_slot.rb
@@ -7,5 +7,11 @@ module NoCms::Blocks
belongs_to :block, class_name: "NoCms::Blocks::Block"
accepts_nested_attributes_for :block
+
+ scope :for_bone, -> (bone) { where(bone: bone) }
+
+ validates :bone, presence: true
+
+
end
end
|
Scopes and validations for bones in the blocks
|
diff --git a/geomet/tests/wkb_test.py b/geomet/tests/wkb_test.py
index <HASH>..<HASH> 100644
--- a/geomet/tests/wkb_test.py
+++ b/geomet/tests/wkb_test.py
@@ -45,6 +45,10 @@ class PointTestCase(unittest.TestCase):
)
self.assertEqual(expected, wkb.dumps(pt, big_endian=True))
+ # We skip this because, right now, we have no way of signalling with
+ # GeoJSON that an object with 3-dimensional coordinates is XYZ or XYM.
+ # XYZ is more common, so we assume this.
+ @unittest.skip
def test_dumps_point_m(self):
# Test for an XYM Point:
pt = dict(type='Point', coordinates=[0.0, 1.0, 2.0])
|
tests/wkb_test:
Skip `test_dumps_point_m` and explain why (with a comment).
|
diff --git a/ui/app/milestone/milestone-controllers.js b/ui/app/milestone/milestone-controllers.js
index <HASH>..<HASH> 100644
--- a/ui/app/milestone/milestone-controllers.js
+++ b/ui/app/milestone/milestone-controllers.js
@@ -188,6 +188,8 @@
that.data = milestoneDetail;
+ that.data.endDate = dateUtilConverter.convertFromTimestampNoonUTC(that.data.endDate);
+
that.submit = function() {
that.data.endDate = dateUtilConverter.convertToTimestampNoonUTC(that.data.endDate);
|
NCL-<I> Missing convertor added
|
diff --git a/update.go b/update.go
index <HASH>..<HASH> 100644
--- a/update.go
+++ b/update.go
@@ -48,6 +48,10 @@ func init() {
func Update(channel string) {
golock.Lock(updateLockPath)
defer golock.Unlock(updateLockPath)
+ if !IsUpdateNeeded("soft") {
+ // update no longer needed
+ return
+ }
done := make(chan bool)
go func() {
touchAutoupdateFile()
|
skip updating if it is no longer needed
|
diff --git a/adafruit_bme280.py b/adafruit_bme280.py
index <HASH>..<HASH> 100644
--- a/adafruit_bme280.py
+++ b/adafruit_bme280.py
@@ -213,7 +213,7 @@ class Adafruit_BME280_I2C(Adafruit_BME280):
with self._i2c as i2c:
i2c.write(bytes([register & 0xFF]))
result = bytearray(length)
- i2c.read_into(result)
+ i2c.readinto(result)
#print("$%02X => %s" % (register, [hex(i) for i in result]))
return result
|
changed read_into to readinto
|
diff --git a/src/shims/forms-picker.js b/src/shims/forms-picker.js
index <HASH>..<HASH> 100644
--- a/src/shims/forms-picker.js
+++ b/src/shims/forms-picker.js
@@ -8,7 +8,7 @@ webshims.register('forms-picker', function($, webshims, window, document, undefi
var ret = [date.getFullYear(), moduleOpts.addZero(date.getMonth() + 1), moduleOpts.addZero(date.getDate())];
ret.month = ret[0]+'-'+ret[1];
ret.date = ret[0]+'-'+ret[1]+'-'+ret[2];
- ret.time = date.getHours() +':'+ date.getMinutes();
+ ret.time = moduleOpts.addZero(date.getHours()) +':'+ moduleOpts.addZero(date.getMinutes());
ret['datetime-local'] = ret.date +'T'+ ret.time;
return ret;
|
getDateArray should add zeros to time as well
|
diff --git a/xtuml/load.py b/xtuml/load.py
index <HASH>..<HASH> 100644
--- a/xtuml/load.py
+++ b/xtuml/load.py
@@ -111,8 +111,9 @@ class ModelLoader(object):
'''
Parse input as raw data.
'''
- s = self.parser.parse(lexer=self.lexer, input=data)
- self.statements.extend(s)
+ if data:
+ s = self.parser.parse(lexer=self.lexer, input=data)
+ self.statements.extend(s)
def filename_input(self, filename):
|
load: don't report syntax errors on empty input
|
diff --git a/test/benchmarks/active_record_ips_test.rb b/test/benchmarks/active_record_ips_test.rb
index <HASH>..<HASH> 100644
--- a/test/benchmarks/active_record_ips_test.rb
+++ b/test/benchmarks/active_record_ips_test.rb
@@ -16,6 +16,6 @@ class Blueprinter::ActiveRecordIPSTest < Minitest::Test
def test_render
result = iterate {@blueprinter.render(@prepared_objects)}
puts "\nActiveRecord IPS: #{result}"
- assert_operator(result, :>=, 2500)
+ assert_operator(result, :>=, 2000)
end
end
diff --git a/test/benchmarks/ips_test.rb b/test/benchmarks/ips_test.rb
index <HASH>..<HASH> 100644
--- a/test/benchmarks/ips_test.rb
+++ b/test/benchmarks/ips_test.rb
@@ -16,6 +16,6 @@ class Blueprinter::IPSTest < Minitest::Test
def test_render
result = iterate {@blueprinter.render(@prepared_objects)}
puts "\nBasic IPS: #{result}"
- assert_operator(result, :>=, 3000)
+ assert_operator(result, :>=, 2500)
end
end
|
IPS are slower due to ruby downgrade.
Previously, we were building and running benchmarks on circle ci using
ruby <I>. We now build and run benchmarks on <I>. Ruby <I> is
slower than <I>, so we need to reduce the IPS benchmarks.
|
diff --git a/test_everything.py b/test_everything.py
index <HASH>..<HASH> 100644
--- a/test_everything.py
+++ b/test_everything.py
@@ -61,7 +61,6 @@ pub.options.reprcomments = False
import hydpy
doctests = {}
for dirinfo in os.walk(hydpy.__path__[0]):
- print(dirinfo[0])
if dirinfo[0].endswith('unittests') or not '__init__.py' in dirinfo[2]:
continue
packagename = dirinfo[0].replace(os.sep, '.')+'.'
@@ -69,7 +68,9 @@ for dirinfo in os.walk(hydpy.__path__[0]):
level = packagename.count('.')-1
modulenames = [packagename+fn.split('.')[0]
for fn in dirinfo[2] if fn.endswith('.py')]
+ print(dirinfo[0], packagename)
for modulename in modulenames:
+ print(' '+modulename)
module = importlib.import_module(modulename)
runner = unittest.TextTestRunner(stream=open(os.devnull, 'w'))
suite = unittest.TestSuite()
|
still debugging
going into the details of doctesting hland
|
diff --git a/src/transformers/trainer_seq2seq.py b/src/transformers/trainer_seq2seq.py
index <HASH>..<HASH> 100644
--- a/src/transformers/trainer_seq2seq.py
+++ b/src/transformers/trainer_seq2seq.py
@@ -161,6 +161,9 @@ class Seq2SeqTrainer(Trainer):
"synced_gpus": True if is_deepspeed_zero3_enabled() else False,
}
+ if "attention_mask" in inputs:
+ gen_kwargs["attention_mask"] = inputs.get("attention_mask", None)
+
# prepare generation inputs
# some encoder-decoder models can have varying encder's and thus
# varying model input names
@@ -171,7 +174,6 @@ class Seq2SeqTrainer(Trainer):
generated_tokens = self.model.generate(
generation_inputs,
- attention_mask=inputs.get("attention_mask", None),
**gen_kwargs,
)
# in case the batch is shorter than max length, the output should be padded
|
Fix Seq2SeqTrainer (#<I>)
|
diff --git a/tensorflow_datasets/core/utils/read_config.py b/tensorflow_datasets/core/utils/read_config.py
index <HASH>..<HASH> 100644
--- a/tensorflow_datasets/core/utils/read_config.py
+++ b/tensorflow_datasets/core/utils/read_config.py
@@ -77,10 +77,11 @@ class ReadConfig(_ReadConfig):
try_autocache: If True (default) and the dataset satisfy the right
conditions (dataset small enough, files not shuffled,...) the dataset
will be cached during the first iteration (through `ds = ds.cache()`).
- shuffle_seed: `tf.int64`, seeds forwarded to `tf.data.Dataset.shuffle` when
- `shuffle_files=True`.
+ shuffle_seed: `tf.int64`, seed forwarded to `tf.data.Dataset.shuffle` during
+ file shuffling (which happens when `tfds.load(..., shuffle_files=True)`).
shuffle_reshuffle_each_iteration: `bool`, forwarded to
- `tf.data.Dataset.shuffle` when `shuffle_files=True`.
+ `tf.data.Dataset.shuffle` during file shuffling (which happens when
+ `tfds.load(..., shuffle_files=True)`).
interleave_cycle_length: `int`, forwarded to `tf.data.Dataset.interleave`.
Default to 16.
interleave_block_length: `int`, forwarded to `tf.data.Dataset.interleave`.
|
Modifying docstring of ReadConfig to clarify that `shuffle_seed` and `shuffle_reshuffle_each_iteration` are only used for file shuffling.
PiperOrigin-RevId: <I>
|
diff --git a/simuvex/s_irexpr.py b/simuvex/s_irexpr.py
index <HASH>..<HASH> 100644
--- a/simuvex/s_irexpr.py
+++ b/simuvex/s_irexpr.py
@@ -20,7 +20,10 @@ class SimIRExpr(object):
self._post_processed = False
self.expr = None
- self.type = tyenv.typeOf(expr)
+ if expr.tag in ('Iex_BBPTR', 'Iex_VECRET'):
+ self.type = None
+ else:
+ self.type = tyenv.typeOf(expr)
self.state._inspect('expr', BP_BEFORE)
@@ -100,6 +103,16 @@ class SimIRExpr(object):
### expression handlers ###
###########################
+ def _handle_BBPTR(self, expr):
+ l.warning("BBPTR IRExpr encountered. This is (probably) not bad, but we have no real idea how to handle it.")
+ self.type = "Ity_I32"
+ self.expr = self.state.BVV("WTF!")
+
+ def _handle_VECRET(self, expr):
+ l.warning("VECRET IRExpr encountered. This is (probably) not bad, but we have no real idea how to handle it.")
+ self.type = "Ity_I32"
+ self.expr = self.state.BVV("OMG!")
+
def _handle_Get(self, expr):
size = size_bytes(expr.type)
self.type = expr.type
|
handle BBPTR and VECRET, at least as stubs
|
diff --git a/lib/hashie/extensions/coercion.rb b/lib/hashie/extensions/coercion.rb
index <HASH>..<HASH> 100644
--- a/lib/hashie/extensions/coercion.rb
+++ b/lib/hashie/extensions/coercion.rb
@@ -2,7 +2,7 @@ module Hashie
module Extensions
module Coercion
def self.included(base)
- base.send :extend, ClassMethods
+ base.extend ClassMethods
base.send :include, InstanceMethods
end
|
Object#extend is a public method
|
diff --git a/chickpea/base_models.py b/chickpea/base_models.py
index <HASH>..<HASH> 100644
--- a/chickpea/base_models.py
+++ b/chickpea/base_models.py
@@ -89,7 +89,7 @@ class Category(models.Model):
map = models.ForeignKey(Map)
name = models.CharField(max_length=50)
description = models.TextField(blank=True, null=True)
- color = models.CharField(max_length=32)
+ color = models.CharField(max_length=32, default="DarkBlue")
icon = models.ForeignKey(Icon, null=True, blank=True)
preset = models.BooleanField(default=False, help_text="Display this category on load.")
rank = models.IntegerField(null=True, blank=True)
diff --git a/chickpea/views.py b/chickpea/views.py
index <HASH>..<HASH> 100644
--- a/chickpea/views.py
+++ b/chickpea/views.py
@@ -76,6 +76,7 @@ class QuickMapCreate(CreateView):
self.object = form.save()
layer = TileLayer.get_default()
MapToTileLayer.objects.create(map=self.object, tilelayer=layer, rank=1)
+ Category.objects.create(map=self.object, name="POIs", preset=True)
response = {
"redirect": self.get_success_url()
}
|
Create a default category during map quick create process
|
diff --git a/lib/info.rb b/lib/info.rb
index <HASH>..<HASH> 100644
--- a/lib/info.rb
+++ b/lib/info.rb
@@ -1,5 +1,5 @@
module MultiRepo
NAME = "git-multirepo"
- VERSION = "1.0.0.beta2"
+ VERSION = "1.0.0.beta3"
DESCRIPTION = "Track multiple Git repositories side-by-side."
end
\ No newline at end of file
|
Updated gem version to <I>.beta3 (yanked gem version forces version bump).
|
diff --git a/django_ssh/models.py b/django_ssh/models.py
index <HASH>..<HASH> 100644
--- a/django_ssh/models.py
+++ b/django_ssh/models.py
@@ -23,3 +23,6 @@ class Key(models.Model):
data = models.TextField(db_index=True, unique=True)
comment = models.TextField()
fingerprint = models.CharField(max_length=47)
+
+ class Meta:
+ db_table = 'ssh_key'
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,11 +19,11 @@ from distutils.core import setup
setup(
name = 'django-ssh',
packages = ['django_ssh'],
- version = '0.0.2',
+ version = '0.0.3',
description = 'A basic Django app for storing SSH keys',
author = 'Jon Eyolfson',
author_email = 'jon@eyl.io',
url = 'https://github.com/eyolfson/django-ssh/',
download_url = ('https://github.com/eyolfson/django-ssh/archive/'
- 'v0.0.2.tar.gz'),
+ 'v0.0.3.tar.gz'),
)
|
Changed table names and bumped version
|
diff --git a/src/trumbowyg.js b/src/trumbowyg.js
index <HASH>..<HASH> 100644
--- a/src/trumbowyg.js
+++ b/src/trumbowyg.js
@@ -1225,8 +1225,9 @@ Object.defineProperty(jQuery.trumbowyg, 'defaultOptions', {
createLink: function () {
var t = this,
documentSelection = t.doc.getSelection(),
+ selectedRange = documentSelection.getRangeAt(0),
node = documentSelection.focusNode,
- text = new XMLSerializer().serializeToString(documentSelection.getRangeAt(0).cloneContents()),
+ text = new XMLSerializer().serializeToString(selectedRange.cloneContents()) || selectedRange + '',
url,
title,
target;
@@ -1263,7 +1264,7 @@ Object.defineProperty(jQuery.trumbowyg, 'defaultOptions', {
}
};
if (!t.o.minimalLinks) {
- Object.assign(options, {
+ $.extend(options, {
title: {
label: t.lang.title,
value: title
|
fix: makes links works on IE<I>
|
diff --git a/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java b/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
index <HASH>..<HASH> 100644
--- a/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
+++ b/java-allocation-instrumenter/src/main/java/com/google/monitoring/runtime/instrumentation/ConstructorInstrumenter.java
@@ -39,9 +39,7 @@ import java.util.logging.Logger;
* Instruments bytecode by inserting a specified call in the
* constructor of a given class. This class is intended to be loaded
* by a javaagent; end-users will want to add {@link ConstructorCallback}s by
- * invoking {@link
- * com.google.monitoring.runtime.allocation.AllocationInspector#
- * addConstructorCallback(Class, ConstructorCallback)}.
+ * invoking {@link #instrumentClass(Class, ConstructorCallback)}.
*
* @author Jeremy Manson
*/
|
Doc fix: javadoc points to incorrect method for instrumentation.
|
diff --git a/lib/builder.js b/lib/builder.js
index <HASH>..<HASH> 100644
--- a/lib/builder.js
+++ b/lib/builder.js
@@ -219,9 +219,16 @@ lunr.Builder.prototype.createDocumentVectors = function () {
tf = termFrequencies[term],
termIndex = this.invertedIndex[term]._index,
idf = lunr.idf(this.invertedIndex[term], this.documentCount),
- score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (documentLength / this.averageDocumentLength)) + tf)
-
- documentVector.insert(termIndex, score)
+ score = idf * ((this._k1 + 1) * tf) / (this._k1 * (1 - this._b + this._b * (documentLength / this.averageDocumentLength)) + tf),
+ scoreWithPrecision = Math.round(score * 1000) / 1000
+ // Converts 1.23456789 to 1.234.
+ // Reducing the precision so that the vectors take up less
+ // space when serialised. Doing it now so that they behave
+ // the same before and after serialisation. Also, this is
+ // the fastest approach to reducing a number's precision in
+ // JavaScript.
+
+ documentVector.insert(termIndex, scoreWithPrecision)
}
documentVectors[docRef] = documentVector
|
Reduce term score precision in vectors
This change reduces the number of decimal places used to represent the
score of a term within a document vector. Through testing this has been
seen to not have any impact to the relevance of the search results but
does lead to a ~<I>% size reduction when serialising indexes, before and
after compression with gzip.
|
diff --git a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
index <HASH>..<HASH> 100755
--- a/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
+++ b/examples/pytorch/speech-recognition/run_speech_recognition_ctc.py
@@ -152,7 +152,7 @@ class DataTrainingArguments:
eval_split_name: str = field(
default="test",
metadata={
- "help": "The name of the training data set split to use (via the datasets library). Defaults to 'train'"
+ "help": "The name of the training data set split to use (via the datasets library). Defaults to 'test'"
},
)
audio_column_name: str = field(
|
Fix 'eval_split_name' described as defaulting to 'train' (#<I>)
The default is correct (`test`) but the description is not.
|
diff --git a/pycbc/results/legacy_grb.py b/pycbc/results/legacy_grb.py
index <HASH>..<HASH> 100755
--- a/pycbc/results/legacy_grb.py
+++ b/pycbc/results/legacy_grb.py
@@ -36,7 +36,6 @@ if 'matplotlib.backends' not in sys.modules:
matplotlib.use('agg')
import matplotlib.pyplot as plt
from pycbc_glue import markup, segments
-from lal.gpstime import gps_to_utc, LIGOTimeGPS
from matplotlib.patches import Rectangle
from matplotlib.lines import Line2D
from matplotlib.ticker import ScalarFormatter
@@ -132,6 +131,7 @@ def write_summary(page, args, ifos, skyError=None, ipn=False, ipnError=False):
Write summary of information to markup.page object page
"""
from pylal import antenna
+ from lal.gpstime import gps_to_utc, LIGOTimeGPS
gps = args.start_time
grbdate = gps_to_utc(LIGOTimeGPS(gps))\
|
don't explicitly require lal gpstime python module (#<I>)
|
diff --git a/security/src/test/java/com/networknt/security/JwtHelperTest.java b/security/src/test/java/com/networknt/security/JwtHelperTest.java
index <HASH>..<HASH> 100644
--- a/security/src/test/java/com/networknt/security/JwtHelperTest.java
+++ b/security/src/test/java/com/networknt/security/JwtHelperTest.java
@@ -51,6 +51,14 @@ public class JwtHelperTest {
}
@Test
+ public void longLivedAPIAJwt() throws Exception {
+ JwtClaims claims = getTestClaims("Steve", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("api_a.w", "api_b.w", "api_c.w", "api_d.w", "server.info.r"));
+ claims.setExpirationTimeMinutesInTheFuture(5256000);
+ String jwt = JwtHelper.getJwt(claims);
+ System.out.println("***LongLived APIA JWT***: " + jwt);
+ }
+
+ @Test
public void longLivedATMP1000Jwt() throws Exception {
JwtClaims claims = getTestClaims("eric", "EMPLOYEE", "f7d42348-c647-4efb-a52d-4c5787421e72", Arrays.asList("ATMP1000.w", "ATMP1000.r"));
claims.setExpirationTimeMinutesInTheFuture(5256000);
|
add a test case to generate long lived token for calling API A
|
diff --git a/email_extras/admin.py b/email_extras/admin.py
index <HASH>..<HASH> 100644
--- a/email_extras/admin.py
+++ b/email_extras/admin.py
@@ -10,5 +10,9 @@ if USE_GNUPG:
class KeyAdmin(admin.ModelAdmin):
form = KeyForm
+ class AddressAdmin(admin.ModelAdmin):
+ def has_add_permission(self, request):
+ return False
+
admin.site.register(Key, KeyAdmin)
- admin.site.register(Address)
+ admin.site.register(Address, AddressAdmin)
|
Addresses can't be added without keys
|
diff --git a/bottom.py b/bottom.py
index <HASH>..<HASH> 100644
--- a/bottom.py
+++ b/bottom.py
@@ -35,9 +35,11 @@ class Client(object):
bot.run()
'''
+ command = rfc.unique_command(command)
+
def wrap(func):
''' Add the function to this client's handlers and return it '''
- self.handler.add(command.upper(), func)
+ self.handler.add(command, func)
return func
return wrap
@@ -104,13 +106,13 @@ class Handler(object):
def add(self, command, func):
# Wrap the function in a coroutine so that we can
- # crete a task list and use asyncio.wait
- command = command.upper()
+ # create a task list and use asyncio.wait
+ command = rfc.unique_command(command)
coro = asyncio.coroutine(func)
self.coros[command].add(coro)
@asyncio.coroutine
def __call__(self, command, *args, **kwargs):
- coros = self.coros[command.upper()]
+ coros = self.coros[rfc.unique_command(command)]
tasks = [coro(*args, **kwargs) for coro in coros]
asyncio.wait(tasks)
|
Replace command.upper() with rfc.unique_command() for consistency
|
diff --git a/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java b/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
index <HASH>..<HASH> 100644
--- a/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
+++ b/structr/structr-core/src/main/java/org/structr/core/resource/constraint/RelationshipConstraint.java
@@ -85,7 +85,7 @@ public class RelationshipConstraint extends ResourceConstraint {
@Override
public boolean supportsNesting() {
- return false;
+ return true;
}
@Override
|
Allowed nesting of RelationshipConstraint to fix paging for relationships.
|
diff --git a/foolbox/adversarial.py b/foolbox/adversarial.py
index <HASH>..<HASH> 100644
--- a/foolbox/adversarial.py
+++ b/foolbox/adversarial.py
@@ -5,16 +5,16 @@ Provides a class that represents an adversarial example.
import numpy as np
-from .adversarial import Adversarial
-from .adversarial import StopAttack
+from v1.adversarial import Adversarial as BaseAdversarial
+from v1.adversarial import StopAttack
-class YieldingAdversarial(Adversarial):
+class Adversarial(BaseAdversarial):
def _check_unperturbed(self):
try:
# for now, we use the non-yielding implementation in the super-class
# TODO: add support for batching this first call as well
- super(YieldingAdversarial, self).forward_one(self._Adversarial__unperturbed)
+ super(Adversarial, self).forward_one(self._Adversarial__unperturbed)
except StopAttack:
# if a threshold is specified and the unperturbed input is
# misclassified, this can already cause a StopAttack
|
renamed YieldingAdversarial to Adversarial and based it on v1
|
diff --git a/code/controller/context/context.php b/code/controller/context/context.php
index <HASH>..<HASH> 100644
--- a/code/controller/context/context.php
+++ b/code/controller/context/context.php
@@ -16,6 +16,16 @@
class KControllerContext extends KCommand implements KControllerContextInterface
{
/**
+ * Constructor.
+ *
+ * @param array|\Traversable $attributes An associative array or a Traversable object instance
+ */
+ public function __construct($attributes = array())
+ {
+ KObjectConfig::__construct($attributes);
+ }
+
+ /**
* Get the request object
*
* @return KControllerRequestInterface
|
re #<I> - Override ControllerContext constructor to only accept an array of attributes
|
diff --git a/application/modules/g/controllers/AuthController.php b/application/modules/g/controllers/AuthController.php
index <HASH>..<HASH> 100755
--- a/application/modules/g/controllers/AuthController.php
+++ b/application/modules/g/controllers/AuthController.php
@@ -291,7 +291,7 @@ class G_AuthController extends Garp_Controller_Action {
return;
}
- if (!empty($this->getRequest()->getPost(self::HONEYPOT_COLUMN))) {
+ if ($this->getRequest()->getPost(self::HONEYPOT_COLUMN)) {
throw new Garp_Auth_Exception(__('honeypot error'));
}
|
added functionanility to add/remove sprint alert to event
|
diff --git a/lib/matestack/ui/vue_js/components/async.rb b/lib/matestack/ui/vue_js/components/async.rb
index <HASH>..<HASH> 100644
--- a/lib/matestack/ui/vue_js/components/async.rb
+++ b/lib/matestack/ui/vue_js/components/async.rb
@@ -29,7 +29,13 @@ module Matestack
div class: 'matestack-async-component-container', 'v-bind:class': '{ "loading": loading === true }' do
div class: 'matestack-async-component-wrapper', 'v-if': 'asyncTemplate == null', 'v-bind:class': '{ "loading": loading === true }' do
div async_attributes do
- yield unless is_deferred?
+ if params[:component_key]
+ # we need to yield if a request is looking for a async component, indicated through present params[:component_key]
+ # the requested component could be hidden within this deferred async!
+ yield
+ else
+ yield unless is_deferred?
+ end
end
end
div class: 'matestack-async-component-wrapper', 'v-if': 'asyncTemplate != null', 'v-bind:class': '{ "loading": loading === true }' do
|
fixed async component resolve within deferred async components
|
diff --git a/src/AutoRotatingCarousel.js b/src/AutoRotatingCarousel.js
index <HASH>..<HASH> 100644
--- a/src/AutoRotatingCarousel.js
+++ b/src/AutoRotatingCarousel.js
@@ -10,6 +10,7 @@ import ArrowBackIcon from '@material-ui/icons/ArrowBack'
import ArrowForwardIcon from '@material-ui/icons/ArrowForward'
import Modal from '@material-ui/core/Modal'
import Fade from '@material-ui/core/Fade'
+import Backdrop from '@material-ui/core/Backdrop'
import Dots from 'material-ui-dots'
import classNames from 'classnames'
import Carousel from './SwipableCarouselView'
@@ -188,6 +189,7 @@ class AutoRotatingCarousel extends Component {
})}
open={open}
onClose={onClose}
+ BackdropComponent={Backdrop}
BackdropProps={ModalProps ? { transitionDuration, ...ModalProps.BackdropProps } : { transitionDuration }}
{...ModalProps}
>
|
Add BackdropComponent prop to resolve issue with transitionDuration prop on MUI v4 (#<I>)
|
diff --git a/tasks/cucumber.js b/tasks/cucumber.js
index <HASH>..<HASH> 100644
--- a/tasks/cucumber.js
+++ b/tasks/cucumber.js
@@ -41,6 +41,10 @@ module.exports = function(grunt) {
var commands = [];
+ if (grunt.option('rerun')) {
+ commands.push(grunt.option('rerun'));
+ }
+
if (options.executeParallel && options.workers) {
commands.push('-w', options.workers);
}
|
rerun the cucumber failed scenario. Pass the path of @rerun.txt file
|
diff --git a/src/exporter/file.js b/src/exporter/file.js
index <HASH>..<HASH> 100644
--- a/src/exporter/file.js
+++ b/src/exporter/file.js
@@ -40,6 +40,7 @@ module.exports = (node, name, pathRest, ipldResolver) => {
return pull.values([{
content: content,
path: name,
+ hash: node.multihash,
size: file.fileSize()
}])
}
diff --git a/test/test-exporter.js b/test/test-exporter.js
index <HASH>..<HASH> 100644
--- a/test/test-exporter.js
+++ b/test/test-exporter.js
@@ -45,8 +45,8 @@ module.exports = (repo) => {
function onFiles (err, files) {
expect(err).to.not.exist()
expect(files).to.have.length(1)
+ expect(files[0]).to.have.property('hash')
expect(files[0]).to.have.property('path', hash)
-
fileEql(files[0], unmarsh.data, done)
}
})
@@ -117,6 +117,7 @@ module.exports = (repo) => {
pull(
exporter(hash, ipldResolver),
pull.collect((err, files) => {
+ files.forEach(file => expect(file).to.have.property('hash'))
expect(err).to.not.exist()
expect(
|
feat: Include hash field for exported files (#<I>)
|
diff --git a/lib/dexter/indexer.rb b/lib/dexter/indexer.rb
index <HASH>..<HASH> 100644
--- a/lib/dexter/indexer.rb
+++ b/lib/dexter/indexer.rb
@@ -99,6 +99,8 @@ module Dexter
log "Index created: #{((Time.now - started_at) * 1000).to_i} ms"
end
end
+ else
+ log "No indexes found"
end
new_indexes
|
Added no indexes found message
|
diff --git a/src/harvesters/core.py b/src/harvesters/core.py
index <HASH>..<HASH> 100644
--- a/src/harvesters/core.py
+++ b/src/harvesters/core.py
@@ -1592,6 +1592,10 @@ class ImageAcquirer:
self._num_images_to_acquire = num_images_to_acquire
+ # We're ready to start image acquisition. Lock the device's transport
+ # layer related features:
+ self.device.node_map.TLParamsLocked.value = 1
+
# Start image acquisition.
self._is_acquiring_images = True
@@ -1911,6 +1915,10 @@ class ImageAcquirer:
#
self.device.node_map.AcquisitionStop.execute()
+ # Unlock TLParamsLocked in order to allow full device
+ # configuration:
+ self.device.node_map.TLParamsLocked.value = 0
+
for data_stream in self._data_streams:
# Stop image acquisition.
try:
|
Resolve issue #<I>
|
diff --git a/src/Helpers/view/TinyMCE.php b/src/Helpers/view/TinyMCE.php
index <HASH>..<HASH> 100644
--- a/src/Helpers/view/TinyMCE.php
+++ b/src/Helpers/view/TinyMCE.php
@@ -17,9 +17,9 @@ class TinyMCE extends AbstractHelper
{
if ($this->_enabled) {
$this->getView()->Scripts()->setPack(false)
- ->add($this->getBase() . '/jquery.tinymce.min', 'tinymce')
- ->add($this->getBase() . '/tinymce.min', 'tinymce')
- ->add($this->getBase() . '/init', 'tinymce');
+ ->add($this->getBase() . '/jquery.tinymce.min.js', 'tinymce')
+ ->add($this->getBase() . '/tinymce.min.js', 'tinymce')
+ ->add($this->getBase() . '/init.js', 'tinymce');
}
return $this->getView()->Scripts()->render('tinymce');
|
Add SetBase to TinyMCE
|
diff --git a/openquake/calculators/views.py b/openquake/calculators/views.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/views.py
+++ b/openquake/calculators/views.py
@@ -695,7 +695,7 @@ def view_dupl_sources_time(token, dstore):
calc_time = records['calc_time'].sum()
tot_time += calc_time
tbl.append((source_id, calc_time, len(records)))
- if tbl and info.attrs.get('has_dupl_sources'):
+ if tbl:
tot = info['calc_time'].sum() + info['split_time'].sum()
percent = tot_time / tot * 100
m = '\nTotal time in duplicated sources: %d/%d (%d%%)' % (
@@ -854,7 +854,7 @@ def view_dupl_sources(token, dstore):
if not dupl:
return ''
msg = ('Found %d source(s) with the same ID and %d true duplicate(s): %s'
- % (len(sameid), len(dupl), dupl))
+ % (len(sameid), len(dupl), numpy.array(dupl)))
fakedupl = set(sameid) - set(dupl)
if fakedupl:
msg += '\nHere is a fake duplicate: %s' % fakedupl.pop()
|
Reduced logging in view_dupl_sources [skip hazardlib]
Former-commit-id: <I>f0a<I>d0bd9f7f<I>e9e1f<I>c<I>ab1ca5a6
|
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -135,7 +135,7 @@ test('Works for iterable objects', function(is) {
"value" : 1,
"@@iterator" : function(){
var hasValue = true;
- value = this.value;
+ var value = this.value;
return {
next: function(){
if(hasValue) {
|
Added missing `var` statement.
|
diff --git a/cmd/jujud/run.go b/cmd/jujud/run.go
index <HASH>..<HASH> 100644
--- a/cmd/jujud/run.go
+++ b/cmd/jujud/run.go
@@ -141,7 +141,10 @@ func (c *RunCommand) executeNoContext() (*exec.ExecResponse, error) {
if err != nil {
return nil, err
}
- lock.Lock("juju-run")
+ err = lock.Lock("juju-run")
+ if err != nil {
+ return nil, err
+ }
defer lock.Unlock()
runCmd := `[ -f "/home/ubuntu/.juju-proxy" ] && . "/home/ubuntu/.juju-proxy"` + "\n" + c.commands
|
Check the err response from the fslock Lock method.
|
diff --git a/src/main/java/org/paumard/spliterators/GatingSpliterator.java b/src/main/java/org/paumard/spliterators/GatingSpliterator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/paumard/spliterators/GatingSpliterator.java
+++ b/src/main/java/org/paumard/spliterators/GatingSpliterator.java
@@ -88,11 +88,7 @@ public class GatingSpliterator<E> implements Spliterator<E> {
@Override
public long estimateSize() {
- if (gateIsOpenned) {
- return this.spliterator.estimateSize();
- } else {
- return 0;
- }
+ return 0L;
}
@Override
|
Fixed the estimate sized of the gating spliterator
|
diff --git a/ripe/atlas/sagan/helpers/abuf.py b/ripe/atlas/sagan/helpers/abuf.py
index <HASH>..<HASH> 100644
--- a/ripe/atlas/sagan/helpers/abuf.py
+++ b/ripe/atlas/sagan/helpers/abuf.py
@@ -290,12 +290,14 @@ class AbufParser(object):
edns0 = {
'UDPsize': res[1],
'ExtendedReturnCode': res[2] >> 24,
- 'Version': (res[2] and 0x0f00) >> 16,
- 'Z': (res[2] and 0x00ff),
+ 'Version': (res[2] & 0x00ff0000) >> 16,
+ 'Z': (res[2] & 0x007fff),
'Type': 'OPT',
'Option': [],
'Name': name,
}
+ if res[2] & 0x8000:
+ edns0['DO']= True
o = 0
while o < len(rdata):
|
Fixed some bugs in EDNS0 parsing and extract DO flag.
|
diff --git a/actionpack/lib/action_controller/metal/rendering.rb b/actionpack/lib/action_controller/metal/rendering.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_controller/metal/rendering.rb
+++ b/actionpack/lib/action_controller/metal/rendering.rb
@@ -6,7 +6,7 @@ module ActionController
# Before processing, set the request formats in current controller formats.
def process_action(*) #:nodoc:
- self.formats = request.formats.map { |x| x.ref }
+ self.formats = request.formats.select { |x| !x.nil? }.map(&:ref)
super
end
diff --git a/actionpack/lib/action_dispatch/http/mime_type.rb b/actionpack/lib/action_dispatch/http/mime_type.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/http/mime_type.rb
+++ b/actionpack/lib/action_dispatch/http/mime_type.rb
@@ -306,12 +306,16 @@ module Mime
method.to_s.ends_with? '?'
end
end
-
+
class NullType
def nil?
true
end
+ def respond_to_missing?(method, include_private = false)
+ method.to_s.ends_with? '?'
+ end
+
private
def method_missing(method, *args)
false if method.to_s.ends_with? '?'
|
Reverts rendering behavior when format is unknown
If a request has unknown format (eg. /foo.bar), the renderer
fallbacks to default format.
This patch reverts Rails <I> behavior after c<I>db commit.
Fixes issue #<I>.
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,6 @@ setup(
description="Python API and CLI for KeePassX",
long_description=open(os.path.join(os.path.dirname(__file__),
'README.rst')).read(),
- license='BSD',
author='James Saryerwinnie',
author_email='js@jamesls.com',
packages=find_packages(),
|
Remove license field
This is specified in the trove classifier as GPLv2.
|
diff --git a/django_extensions/management/shells.py b/django_extensions/management/shells.py
index <HASH>..<HASH> 100644
--- a/django_extensions/management/shells.py
+++ b/django_extensions/management/shells.py
@@ -153,7 +153,11 @@ def import_objects(options, style):
if not quiet_load:
print(style.SQL_TABLE("# Shell Plus Model Imports"))
for app_mod, models in sorted(six.iteritems(load_models)):
- app_name = app_mod.split('.')[-2]
+ try:
+ app_name = app_mod.split('.')[-2]
+ except IndexError:
+ # Some weird model naming scheme like in Sentry.
+ app_name = app_mod
app_aliases = model_aliases.get(app_name, {})
model_labels = []
|
Fix model loading for sentry
Sentry has a weird model naming scheme where all model files are inside
a single directory called models and each 'app' models is in a file of
its own (I guess the authors thought this was a better idea when they
didn't have any views etc. but models only).
With the models directory begin in the import path, one can import by
from useroption import UserOption
for example.
If there are no app names, use the file name as app name.
|
diff --git a/public/javascripts/promotion.js b/public/javascripts/promotion.js
index <HASH>..<HASH> 100644
--- a/public/javascripts/promotion.js
+++ b/public/javascripts/promotion.js
@@ -184,7 +184,7 @@ var promotion_page = {
if (promotion_page.current_changeset) {
if (promotion_page.current_product) {
var product = promotion_page.current_changeset.products[promotion_page.current_product];
- if( product.all ){
+ if( product !== undefined && product.all !== undefined ){
promotion_page.disable_all();
} else {
jQuery.each(promotion_page.subtypes, function(index, type){
|
Adds extra check to ensure product in reset_page exists when doing an all check.
|
diff --git a/rollbar/contrib/fastapi/utils.py b/rollbar/contrib/fastapi/utils.py
index <HASH>..<HASH> 100644
--- a/rollbar/contrib/fastapi/utils.py
+++ b/rollbar/contrib/fastapi/utils.py
@@ -1,5 +1,8 @@
+import functools
import logging
+import fastapi
+
log = logging.getLogger(__name__)
@@ -11,3 +14,20 @@ class FastAPIVersionError(Exception):
log.error(err_msg)
return super().__init__(err_msg)
+
+
+class fastapi_min_version:
+ def __init__(self, min_version):
+ self.min_version = min_version
+
+ def __call__(self, func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ if fastapi.__version__ < self.min_version:
+ raise FastAPIVersionError(
+ '0.41.0', reason=f'to use {func.__name__}() function'
+ )
+
+ return func(*args, **kwargs)
+
+ return wrapper
|
Add decorator to check minimum required FastAPI version
|
diff --git a/src/main/java/io/airlift/slice/XxHash64.java b/src/main/java/io/airlift/slice/XxHash64.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/airlift/slice/XxHash64.java
+++ b/src/main/java/io/airlift/slice/XxHash64.java
@@ -85,16 +85,9 @@ public class XxHash64
hash = rotateLeft(v1, 1) + rotateLeft(v2, 7) + rotateLeft(v3, 12) + rotateLeft(v4, 18);
- v1 = mix(0, v1);
hash = update(hash, v1);
-
- v2 = mix(0, v2);
hash = update(hash, v2);
-
- v3 = mix(0, v3);
hash = update(hash, v3);
-
- v4 = mix(0, v4);
hash = update(hash, v4);
}
else {
@@ -130,7 +123,7 @@ public class XxHash64
private static long update(long hash, long value)
{
- long temp = hash ^ value;
+ long temp = hash ^ mix(0, value);
return temp * PRIME64_1 + PRIME64_4;
}
|
More simplification of xxh<I>
|
diff --git a/Tests/TestServiceSoundCloud.php b/Tests/TestServiceSoundCloud.php
index <HASH>..<HASH> 100755
--- a/Tests/TestServiceSoundCloud.php
+++ b/Tests/TestServiceSoundCloud.php
@@ -34,7 +34,6 @@ class TestServiceSoundCloud extends TestProviders
'https://soundcloud.com/explore',
'https://soundcloud.com/groups',
'https://soundcloud.com',
- '',
),
);
|
Remove emtpy url from SoundCloud Test
|
diff --git a/lib/ponder/irc.rb b/lib/ponder/irc.rb
index <HASH>..<HASH> 100644
--- a/lib/ponder/irc.rb
+++ b/lib/ponder/irc.rb
@@ -12,9 +12,10 @@ module Ponder
raw "PRIVMSG #{recipient} :#{message}"
end
+ # register when connected
def register
raw "NICK #{@config.nick}"
- raw "USER #{@config.nick} 0 * :#{@config.realname}"
+ raw "USER #{@config.username} * * :#{@config.real_name}"
raw "PASS #{@config.password}" if @config.password
end
diff --git a/lib/ponder/thaum.rb b/lib/ponder/thaum.rb
index <HASH>..<HASH> 100644
--- a/lib/ponder/thaum.rb
+++ b/lib/ponder/thaum.rb
@@ -18,7 +18,8 @@ module Ponder
@config = OpenStruct.new(:server => 'localhost',
:port => 6667,
:nick => 'Ponder',
- :realname => 'Ponder',
+ :username => 'Ponder',
+ :real_name => 'Ponder',
:verbose => true,
:logging => false,
:reconnect => true,
|
added the functionality of chosing an username
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -137,6 +137,7 @@ setup(
'boto',
'jellyfish',
'nilsimsa >= 0.3',
+ 'regex != 2014.08.28',
'chromium_compact_language_detector',
'sortedcollection',
'python-docx',
|
we should be using the new regex wherever we use `re`
|
diff --git a/djpaypal/models/billing.py b/djpaypal/models/billing.py
index <HASH>..<HASH> 100644
--- a/djpaypal/models/billing.py
+++ b/djpaypal/models/billing.py
@@ -143,7 +143,8 @@ class BillingAgreement(PaypalObject):
if ba.error:
raise PaypalApiError(str(ba.error)) # , ba.error)
- return cls.get_or_update_from_api_data(ba, always_sync=True)
+ obj, created = cls.get_or_update_from_api_data(ba, always_sync=True)
+ return obj
class PaymentDefinition(PaypalObject):
|
Return only the object from BillingAgreement.execute
|
diff --git a/tests/tests/lib/ezutils/ezmail_test.php b/tests/tests/lib/ezutils/ezmail_test.php
index <HASH>..<HASH> 100644
--- a/tests/tests/lib/ezutils/ezmail_test.php
+++ b/tests/tests/lib/ezutils/ezmail_test.php
@@ -800,7 +800,7 @@ class eZMailTest extends ezpTestCase
}
// Open mailbox and delete all existing emails in the account
- $mbox = imap_open( $mboxString, $recipient['username'], $recipient['password'] );
+ $mbox = @imap_open( $mboxString, $recipient['username'], $recipient['password'] );
if ( !$mbox )
{
$this->markTestSkipped( 'Cannot open mailbox for ' . $recipient['username'] . ': ' . imap_last_error() );
@@ -860,7 +860,7 @@ class eZMailTest extends ezpTestCase
// Read emails
foreach ( $recipients as $recipient )
{
- $mbox = imap_open( $mboxString, $recipient['username'], $recipient['password'] );
+ $mbox = @imap_open( $mboxString, $recipient['username'], $recipient['password'] );
if ( !$mbox )
{
$this->markTestSkipped( 'Cannot open mailbox for ' . $recipient['username'] . ': ' . imap_last_error() );
|
Fix failing imap tests, silence errors causing failures
|
diff --git a/website/resources/js/main.js b/website/resources/js/main.js
index <HASH>..<HASH> 100644
--- a/website/resources/js/main.js
+++ b/website/resources/js/main.js
@@ -37,7 +37,7 @@
if (self.data("clc")) return;
var href = self.attr("href");
self.data("clc", true);
- if (!href || href.substr(0, 4) === "http" || href === "/api/" || href === "/" || href === "/supporters" || href === "/order-license") return;
+ if (!href || href.substr(0, 4) === "http" || href.substr(0, 1) === "#" || href === "/api/" || href === "/" || href === "/supporters" || href === "/order-license") return;
var ext = href.substr(href.length - 4, 4);
if (ext === ".xml" || ext === ".jar" || ext === ".pdf") return;
self.on("click", function(evt) {
|
[website] fixed history/ajax-load system to deal with anchor links
|
diff --git a/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java b/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
+++ b/src/main/java/com/hmsonline/cassandra/triggers/Trigger.java
@@ -1,8 +1,14 @@
package com.hmsonline.cassandra.triggers;
-
+/**
+ * A trigger that can be invoked upon a database mutation.
+ */
public interface Trigger {
- public void process(LogEntry loEntry);
-
+ /**
+ * Causes this trigger to process the given {@link LogEntry}.
+ *
+ * @param logEntry the log entry to process (never <code>null</code>)
+ */
+ void process(LogEntry logEntry);
}
|
Fixed "loEntry" typo, removed redundant public modifier, and added JavaDoc.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.