hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
99921fc13b22de3dfddb4277b0612353886cc916
|
diff --git a/main/core/Resources/modules/workspace/store/actions.js b/main/core/Resources/modules/workspace/store/actions.js
index <HASH>..<HASH> 100644
--- a/main/core/Resources/modules/workspace/store/actions.js
+++ b/main/core/Resources/modules/workspace/store/actions.js
@@ -86,6 +86,10 @@ actions.selfRegister = (workspace) => ({
url: ['apiv2_workspace_self_register', {workspace: workspace.uuid}],
request: {
method: 'PUT'
+ },
+ success: (response, dispatch) => {
+ dispatch(actions.setLoaded(false))
+ dispatch(actions.open(workspace.meta.slug))
}
}
})
|
Reloads workspace after self-registration from workspace restrictions page
|
claroline_Distribution
|
train
|
905a44908c4fd18805262b75db377dab685d71c4
|
diff --git a/src/module-elasticsuite-catalog-graph-ql/Search/Request/Product/Aggregation/Provider/FilterableAttributes/Modifier/ViewMore.php b/src/module-elasticsuite-catalog-graph-ql/Search/Request/Product/Aggregation/Provider/FilterableAttributes/Modifier/ViewMore.php
index <HASH>..<HASH> 100644
--- a/src/module-elasticsuite-catalog-graph-ql/Search/Request/Product/Aggregation/Provider/FilterableAttributes/Modifier/ViewMore.php
+++ b/src/module-elasticsuite-catalog-graph-ql/Search/Request/Product/Aggregation/Provider/FilterableAttributes/Modifier/ViewMore.php
@@ -45,6 +45,7 @@ class ViewMore implements ModifierInterface
*
* @param \Smile\ElasticsuiteCatalog\Helper\ProductAttribute $mappingHelper Mapping Helper
* @param \Magento\Catalog\Api\ProductAttributeRepositoryInterface $attributeRepository Attribute Repository
+ * @param ViewMoreContext $viewMoreContext View More Context
*/
public function __construct(
\Smile\ElasticsuiteCatalog\Helper\ProductAttribute $mappingHelper,
|
Implementation of "view more" in aggregations.
|
Smile-SA_elasticsuite
|
train
|
8db72c3f10a8e02dc371015db0753616d14c3227
|
diff --git a/nipap/setup.py b/nipap/setup.py
index <HASH>..<HASH> 100644
--- a/nipap/setup.py
+++ b/nipap/setup.py
@@ -20,7 +20,7 @@ setup(
keywords = ['nipap'],
requires = ['twisted', 'ldap', 'sqlite3', 'IPy', 'psycopg2'],
data_files = [
- ('/etc/nipap/', ['nipap.conf']),
+ ('/etc/nipap/', ['local_auth.db', 'nipap.conf']),
('/usr/bin/', ['nipap-passwd']),
('/usr/sbin/', ['nipapd']),
('/usr/share/nipap/sql/', [
|
Include local_auth.db in packaging
This includes an example local_auth.db by setup.py and in turn into the
Debian package.
|
SpriteLink_NIPAP
|
train
|
f244ee478f39805b43166a4438242351a04d110e
|
diff --git a/src/select.js b/src/select.js
index <HASH>..<HASH> 100644
--- a/src/select.js
+++ b/src/select.js
@@ -128,7 +128,7 @@
if (ctrl.resetSearchInput) {
ctrl.search = EMPTY_SEARCH;
//reset activeIndex
- if (ctrl.selected && ctrl.items.length) {
+ if (ctrl.selected && ctrl.items.length && !ctrl.multiple) {
ctrl.activeIndex = ctrl.items.indexOf(ctrl.selected);
}
}
|
Fix: multiple choices highlighted at same time
|
angular-ui_ui-select
|
train
|
d699dcae493d382c9b24678f0f9d1363b7736383
|
diff --git a/tests/automated/legacy/locale.js b/tests/automated/legacy/locale.js
index <HASH>..<HASH> 100644
--- a/tests/automated/legacy/locale.js
+++ b/tests/automated/legacy/locale.js
@@ -7,30 +7,25 @@ describe('locale', function() {
it('is not affected by global moment locale when unset', function() {
moment.locale('fr')
- affix('#cal')
- $('#cal').fullCalendar()
- var calendar = $('#cal').fullCalendar('getCalendar')
- var mom = calendar.moment('2014-05-01')
+ initCalendar()
+ var mom = window.currentCalendar.moment('2014-05-01')
var s = mom.format('dddd MMMM Do YYYY')
expect(s).toEqual('Thursday May 1st 2014')
})
it('is not affected by global moment locale when unset', function() {
moment.locale('fr')
- affix('#cal')
- $('#cal').fullCalendar({
+ initCalendar({
locale: 'es'
})
- var calendar = $('#cal').fullCalendar('getCalendar')
- var mom = calendar.moment('2014-05-01')
+ var mom = window.currentCalendar.moment('2014-05-01')
var s = mom.format('dddd MMMM Do YYYY')
expect(s).toEqual('jueves mayo 1º 2014')
})
it('doesn\'t side-effect the global moment locale when customized', function() {
moment.locale('fr')
- affix('#cal')
- $('#cal').fullCalendar({
+ initCalendar({
locale: 'es'
})
var mom = moment.utc('2014-05-01')
@@ -44,11 +39,10 @@ describe('locale', function() {
// needs to be fixed to the developer.
/*
xit('defaults to English when configured to locale that isn\'t loaded', function() {
- affix('#cal');
- $('#cal').fullCalendar({
+ pushOptions({
locale: 'zz'
});
- var calendar = $('#cal').fullCalendar('getCalendar');
+ var calendar = initCalendar();
var mom = calendar.moment('2014-05-01');
var s = mom.format('dddd MMMM Do YYYY');
expect(s).toEqual('Thursday May 1st 2014');
@@ -56,8 +50,7 @@ describe('locale', function() {
*/
it('works when certain locale has no FC settings defined', function() {
- affix('#cal')
- $('#cal').fullCalendar({
+ initCalendar({
locale: 'en-ca',
defaultView: 'agendaWeek',
defaultDate: '2014-12-25',
@@ -69,21 +62,26 @@ describe('locale', function() {
expect($('.fc-event .fc-time')).toHaveText('10:00')
})
- it('allows dynamic setting', function() {
- affix('#cal')
- $('#cal').fullCalendar({
+ // @todo dynamic setting of locale not working
+ xit('allows dynamic setting', function() {
+
+ initCalendar({
locale: 'es',
defaultDate: '2016-07-10',
defaultView: 'month'
})
- expect($('.fc h2')).toHaveText('julio 2016')
- expect($('.fc')).not.toHaveClass('fc-rtl')
- $('#cal').fullCalendar('option', 'locale', 'fr')
- expect($('.fc h2')).toHaveText('juillet 2016')
+ var calendar_el = window.currentCalendar.el
+
+ expect($('h2', calendar_el)).toHaveText('julio 2016')
+ expect($(calendar_el)).not.toHaveClass('fc-rtl')
+
+ calendar_el.fullCalendar('option', 'locale', 'fr')
+ expect($('h2', calendar_el)).toHaveText('juillet 2016')
+
+ calendar_el.fullCalendar('option', 'locale', 'ar')
+ expect($(calendar_el).toHaveClass('fc-rtl')
- $('#cal').fullCalendar('option', 'locale', 'ar') // NOTE: we had problems testing for RTL title text
- expect($('.fc')).toHaveClass('fc-rtl')
})
})
|
update tests, disable failing test of dynamic locale change
|
fullcalendar_fullcalendar
|
train
|
6b3ebfc1d9cf5787951d564d918342e09161281a
|
diff --git a/lib/transmission-rss/aggregator.rb b/lib/transmission-rss/aggregator.rb
index <HASH>..<HASH> 100644
--- a/lib/transmission-rss/aggregator.rb
+++ b/lib/transmission-rss/aggregator.rb
@@ -95,7 +95,7 @@ module TransmissionRSS
begin
on_new_item(link)
- rescue Errno::ECONNREFUSED, Client::Unauthorized
+ rescue Client::Unauthorized, Errno::ECONNREFUSED, Timeout::Error
# Do not add to seen file.
else
add_seen(link)
diff --git a/lib/transmission-rss/client.rb b/lib/transmission-rss/client.rb
index <HASH>..<HASH> 100644
--- a/lib/transmission-rss/client.rb
+++ b/lib/transmission-rss/client.rb
@@ -84,7 +84,10 @@ module TransmissionRSS
end
def request(data)
+ c ||= 0
+
Timeout.timeout(@timeout) do
+ @log.debug("request #@host:#@port")
Net::HTTP.new(@host, @port).start do |http|
http.request(data)
end
@@ -93,7 +96,13 @@ module TransmissionRSS
@log.debug('connection refused')
raise
rescue Timeout::Error
- @log.debug('connection timeout')
+ s = 'connection timeout'
+ s += " (retry #{c})" if c > 0
+ @log.debug(s)
+
+ c += 1
+ retry unless c > 2
+
raise
end
end
|
Retry client requests on timeout and do not add URL to seen file.
|
nning_transmission-rss
|
train
|
84081c9e57dd9b4098bf89c547adde39f7e8af48
|
diff --git a/lib/octokit/client/repositories.rb b/lib/octokit/client/repositories.rb
index <HASH>..<HASH> 100644
--- a/lib/octokit/client/repositories.rb
+++ b/lib/octokit/client/repositories.rb
@@ -232,6 +232,24 @@ module Octokit
post "repos/#{Repository.new repo}/keys", options.merge(:title => title, :key => key)
end
+ # Edit a deploy key
+ #
+ # @param repo [String, Hash, Repository] A GitHub repository.
+ # @param id [Integer] Deploy key ID.
+ # @param options [Hash] Attributes to edit.
+ # @option title [String] Key title.
+ # @option key [String] Public key.
+ # @return [Sawyer::Resource] Updated deploy key.
+ # @see http://developer.github.com/v3/repos/keys/#edit
+ # @example Update the key for a deploy key.
+ # @client.edit_deploy_key('octokit/octokit.rb', 8675309, :key => 'ssh-rsa BBB...')
+ # @example
+ # @client.update_deploy_key('octokit/octokit.rb', 8675309, :title => 'Uber', :key => 'ssh-rsa BBB...'))
+ def edit_deploy_key(repo, id, options)
+ patch "repos/#{Repository.new repo}/keys/#{id}", options
+ end
+ alias :update_deploy_key :edit_deploy_key
+
# Remove deploy key from a repo
#
# Requires authenticated client.
diff --git a/spec/octokit/client/repositories_spec.rb b/spec/octokit/client/repositories_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/octokit/client/repositories_spec.rb
+++ b/spec/octokit/client/repositories_spec.rb
@@ -68,16 +68,28 @@ describe Octokit::Client::Repositories do
describe ".deploy_key" do
it "returns a specific deploy key for a repo" do
- VCR.turn_off!
- repo = "api-playground/api-sandbox"
- key_id = 8675309
- request = stub_get github_url "/repos/#{repo}/keys/#{key_id}"
- deploy_key = @client.deploy_key repo, key_id
- assert_requested request
- VCR.turn_on!
+ VCR.turned_off do
+ repo = "api-playground/api-sandbox"
+ key_id = 8675309
+ request = stub_get github_url "/repos/#{repo}/keys/#{key_id}"
+ deploy_key = @client.deploy_key repo, key_id
+ assert_requested request
+ end
end
end # .deploy_key
+ describe ".edit_deploy_key" do
+ it "modifies a deploy key" do
+ VCR.turned_off do
+ repo = "api-playground/api-sandbox"
+ key_id = 8675309
+ request = stub_patch github_url "/repos/#{repo}/keys/#{key_id}"
+ updated_deploy_key = @client.edit_deploy_key(repo, key_id, :title => 'Staging')
+ assert_requested request
+ end
+ end
+ end # .edit_deploy_key
+
describe ".remove_deploy_key" do
it "removes a repository deploy keys" do
VCR.turned_off do
|
Add Repositories#edit_deploy_key. Use VCR turned_off blocks.
|
octokit_octokit.rb
|
train
|
18995cff7057b74decf520885e08cf07358f4935
|
diff --git a/src/toil_scripts/rnaseq_cgl/rnaseq_cgl_pipeline.py b/src/toil_scripts/rnaseq_cgl/rnaseq_cgl_pipeline.py
index <HASH>..<HASH> 100644
--- a/src/toil_scripts/rnaseq_cgl/rnaseq_cgl_pipeline.py
+++ b/src/toil_scripts/rnaseq_cgl/rnaseq_cgl_pipeline.py
@@ -673,7 +673,8 @@ def star(job, job_vars):
wiggles = [os.path.basename(x) for x in glob.glob(os.path.join(work_dir, '*.bg'))]
# Rename extension
for wiggle in wiggles:
- shutil.move(wiggle, os.path.splitext(wiggle)[0] + '.bedGraph')
+ shutil.move(os.path.join(work_dir, wiggle),
+ os.path.join(work_dir, os.path.splitext(wiggle)[0] + '.bedGraph'))
wiggles = [os.path.splitext(x)[0] + '.bedGraph' for x in wiggles]
tarball_files(work_dir, 'wiggle.tar.gz', uuid=uuid, files=wiggles)
ids['wiggle.tar.gz'] = job.fileStore.writeGlobalFile(os.path.join(work_dir, 'wiggle.tar.gz'))
|
only the basename for the wiggle files was stored, causing a path error during the move step.
|
BD2KGenomics_toil-scripts
|
train
|
169a1ccd39771b21fc81476f4033bd70a690762d
|
diff --git a/asn1crypto/ocsp.py b/asn1crypto/ocsp.py
index <HASH>..<HASH> 100644
--- a/asn1crypto/ocsp.py
+++ b/asn1crypto/ocsp.py
@@ -330,7 +330,7 @@ class StatusGood(Null):
None or 'good'
"""
- if value is not None and value != 'good':
+ if value is not None and value != 'good' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "good", not %s
@@ -355,7 +355,7 @@ class StatusUnknown(Null):
None or 'unknown'
"""
- if value is not None and value != 'unknown':
+ if value is not None and value != 'unknown' and not isinstance(value, Null):
raise ValueError(unwrap(
'''
value must be one of None, "unknown", not %s
|
Preserve the ability to pass core.Null() into ocsp.CertStatus()
|
wbond_asn1crypto
|
train
|
bb2afcb38e6f15d6e335afda8a703a292fdc9639
|
diff --git a/builtin/providers/opc/import_security_protocol_test.go b/builtin/providers/opc/import_security_protocol_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/opc/import_security_protocol_test.go
+++ b/builtin/providers/opc/import_security_protocol_test.go
@@ -32,11 +32,11 @@ func TestAccOPCSecurityProtocol_importBasic(t *testing.T) {
},
})
}
-func TestAccOPCSecurityProtocol_importDisabled(t *testing.T) {
+func TestAccOPCSecurityProtocol_importComplete(t *testing.T) {
resourceName := "opc_compute_security_protocol.test"
ri := acctest.RandInt()
- config := fmt.Sprintf(testAccOPCSecurityProtocolFull, ri)
+ config := fmt.Sprintf(testAccOPCSecurityProtocolComplete, ri)
resource.Test(t, resource.TestCase{
PreCheck: func() {
diff --git a/builtin/providers/opc/resource_security_protocol_test.go b/builtin/providers/opc/resource_security_protocol_test.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/opc/resource_security_protocol_test.go
+++ b/builtin/providers/opc/resource_security_protocol_test.go
@@ -29,10 +29,34 @@ func TestAccOPCSecurityProtocol_Basic(t *testing.T) {
})
}
-func TestAccOPCSecurityProtocol_Full(t *testing.T) {
+func TestAccOPCSecurityProtocol_Complete(t *testing.T) {
protocolResourceName := "opc_compute_security_protocol.test"
ri := acctest.RandInt()
- config := fmt.Sprintf(testAccOPCSecurityProtocolFull, ri)
+ config := fmt.Sprintf(testAccOPCSecurityProtocolComplete, ri)
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
+ Providers: testAccProviders,
+ CheckDestroy: testAccCheckSecurityProtocolDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: config,
+ Check: resource.ComposeTestCheckFunc(
+ testAccCheckSecurityProtocolExists,
+ resource.TestCheckResourceAttr(protocolResourceName, "description", "Terraform Acceptance Test"),
+ resource.TestCheckResourceAttr(protocolResourceName, "dst_ports.0", "2025-2030"),
+ resource.TestCheckResourceAttr(protocolResourceName, "src_ports.0", "3025-3030"),
+ resource.TestCheckResourceAttr(protocolResourceName, "ip_protocol", "tcp"),
+ ),
+ },
+ },
+ })
+}
+
+func TestAccOPCSecurityProtocol_Update(t *testing.T) {
+ protocolResourceName := "opc_compute_security_protocol.test"
+ ri := acctest.RandInt()
+ config := fmt.Sprintf(testAccOPCSecurityProtocolComplete, ri)
config2 := fmt.Sprintf(testAccOPCSecurityProtocolUpdated, ri)
resource.Test(t, resource.TestCase{
@@ -109,7 +133,7 @@ resource "opc_compute_security_protocol" "test" {
}
`
-const testAccOPCSecurityProtocolFull = `
+const testAccOPCSecurityProtocolComplete = `
resource "opc_compute_security_protocol" "test" {
name = "acc-security-protocol-%d"
description = "Terraform Acceptance Test"
|
Refactoring the Complete test / identifying Update as Update
|
hashicorp_terraform
|
train
|
792cdb419a299209eb0221bd8c80ab2f6c0e531e
|
diff --git a/quark/db/models.py b/quark/db/models.py
index <HASH>..<HASH> 100644
--- a/quark/db/models.py
+++ b/quark/db/models.py
@@ -316,7 +316,7 @@ class Subnet(BASEV2, models.HasId, IsHazTags):
name = sa.Column(sa.String(255))
network_id = sa.Column(sa.String(36), sa.ForeignKey('quark_networks.id'))
_cidr = sa.Column(sa.String(64), nullable=False)
- _allocation_pool_cache = sa.Column(sa.Text(), nullable=True)
+ _allocation_pool_cache = orm.deferred(sa.Column(sa.Text(), nullable=True))
tenant_id = sa.Column(sa.String(255), index=True)
segment_id = sa.Column(sa.String(255), index=True)
diff --git a/quark/tests/plugin_modules/test_subnets.py b/quark/tests/plugin_modules/test_subnets.py
index <HASH>..<HASH> 100644
--- a/quark/tests/plugin_modules/test_subnets.py
+++ b/quark/tests/plugin_modules/test_subnets.py
@@ -1232,6 +1232,7 @@ class TestSubnetsQuotas(test_quark_plugin.TestQuarkPlugin):
s["network"] = models.Network()
s["network"]["created_at"] = s["created_at"]
s["dns_nameservers"] = []
+ s["_allocation_pool_cache"] = None
subnet = models.Subnet(**s)
subnets.append(subnet)
with contextlib.nested(
|
Deferred allocation pool cache
Defer hitting the database for the allocation pool cache until it
is actually accessed in code. This should lead to a performance
boost in some situations.
JIRA:NCP-<I>
|
openstack_quark
|
train
|
8df0e1fe7b42c10d7547ba23b3a6760134fad497
|
diff --git a/core/src/test/java/org/jsmart/zerocode/core/utils/SmartUtilsTest.java b/core/src/test/java/org/jsmart/zerocode/core/utils/SmartUtilsTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/org/jsmart/zerocode/core/utils/SmartUtilsTest.java
+++ b/core/src/test/java/org/jsmart/zerocode/core/utils/SmartUtilsTest.java
@@ -173,7 +173,6 @@ public class SmartUtilsTest {
String parentFolderAbsPath = path1.getParent().getParent().toFile().getAbsolutePath();
- System.out.println("parent path: --> " + parentFolderAbsPath);
List<String> allScenarios = SmartUtils.retrieveScenariosByAbsPath(parentFolderAbsPath);
assertThat(allScenarios.size(), is(2));
|
ISS-0 # sysout - Removed
|
authorjapps_zerocode
|
train
|
64b7abe7298c7b2c8b00e9fcba67407ce1337bca
|
diff --git a/lib/cramp/model/finders.rb b/lib/cramp/model/finders.rb
index <HASH>..<HASH> 100644
--- a/lib/cramp/model/finders.rb
+++ b/lib/cramp/model/finders.rb
@@ -19,7 +19,7 @@ module Cramp
private
def table_name
- @table_name || self.to_s.pluralize
+ @table_name || self.to_s.demodulize.underscore.pluralize
end
end
|
Table names is now resolved the same way as Rails does.
Before: ClassName => ClassNames
Now: ClassName => class_names
|
lifo_cramp
|
train
|
7063ef48d1e01a6dee71f0931e77b7a4bb3bd8ca
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -119,15 +119,12 @@ for($i = 0; $i < 5; $i++)
$ui = LabelBox::create()
->setPosn(0, -35)
->setSizen(100, 10)
- ->appendTo($frame)
- ->bg
- ->setStyle(Bgs1::BgTitle3)
- ->getParent()
- ->label
- ->setText('Much foobar')
- ->setTextSize(4)
- ->getParent()
->appendTo($frame);
+$ui->bg()
+ ->setStyle(Bgs1::BgTitle3);
+$ui->label()
+ ->setText('Much foobar')
+ ->setTextSize(4);
$frame2 = Frame::create()
->setPosn(0, -50)
diff --git a/src/ManiaLib/Manialink/Cards/Box.php b/src/ManiaLib/Manialink/Cards/Box.php
index <HASH>..<HASH> 100644
--- a/src/ManiaLib/Manialink/Cards/Box.php
+++ b/src/ManiaLib/Manialink/Cards/Box.php
@@ -2,25 +2,35 @@
namespace ManiaLib\Manialink\Cards;
-class Box extends \ManiaLib\Manialink\Elements\Frame
+use ManiaLib\Manialink\Elements\Frame;
+use ManiaLib\Manialink\Elements\Quad;
+
+class Box extends Frame
{
/**
- * @var \ManiaLib\Manialink\Elements\Quad
+ * @var Quad
*/
- public $bg;
+ protected $bg;
function __construct()
{
parent::__construct();
- $this->bg = new \ManiaLib\Manialink\Elements\Quad();
- $this->appendChild($this->bg);
+ $this->bg = Quad::create()->appendTo($this);
}
- function preFilterSize()
+ protected function preFilterSize()
{
parent::__construct();
$this->bg->setSizen($this->getSizenX(), $this->getSizenY());
}
+ /**
+ * @return Quad
+ */
+ function bg()
+ {
+ return $this->bg;
+ }
+
}
diff --git a/src/ManiaLib/Manialink/Cards/LabelBox.php b/src/ManiaLib/Manialink/Cards/LabelBox.php
index <HASH>..<HASH> 100644
--- a/src/ManiaLib/Manialink/Cards/LabelBox.php
+++ b/src/ManiaLib/Manialink/Cards/LabelBox.php
@@ -10,16 +10,18 @@ class LabelBox extends Box
/**
* @var Label
*/
- public $label;
+ protected $label;
function __construct()
{
parent::__construct();
- $this->label = Label::create()->setBothAlign('center', 'center')->setPosn(0, 0, 0.1);
- $this->appendChild($this->label);
+ $this->label = Label::create()
+ ->setBothAlign('center', 'center')
+ ->setPosn(0, 0, 0.1)
+ ->appendTo($this);
}
- function preFilterSize()
+ protected function preFilterSize()
{
parent::preFilterSize();
@@ -29,4 +31,12 @@ class LabelBox extends Box
}
}
+ /**
+ * @return Label
+ */
+ function label()
+ {
+ return $this->label;
+ }
+
}
|
Replaced public properties in cards with getters (thanks m4rcel for the feedback).
|
maniaplanet_manialib-manialink
|
train
|
e888032bdf7ae6ef26a60607eb75de0f81cb76bd
|
diff --git a/PHPCI/Controller/WebhookController.php b/PHPCI/Controller/WebhookController.php
index <HASH>..<HASH> 100644
--- a/PHPCI/Controller/WebhookController.php
+++ b/PHPCI/Controller/WebhookController.php
@@ -39,15 +39,8 @@ class WebhookController extends \PHPCI\Controller
public function bitbucket($project)
{
$payload = json_decode($this->getParam('payload'), true);
- $commits = array();
foreach ($payload['commits'] as $commit) {
- if (!in_array($commit['branch'], array_keys($commits))) {
- $commits[$commit['branch']] = $commit;
- }
- }
-
- foreach ($commits as $commit) {
try {
$email = $commit['raw_author'];
$email = substr($email, 0, strpos($email, '>'));
|
Updating Bitbucket to match Github and Gitlab in regards to building all payload commits instead of one per branch
|
dancryer_PHPCI
|
train
|
887277329a5fbc29f34091dbccc031489a4e7bff
|
diff --git a/desktop/app/index.js b/desktop/app/index.js
index <HASH>..<HASH> 100644
--- a/desktop/app/index.js
+++ b/desktop/app/index.js
@@ -74,16 +74,6 @@ if (shouldQuit) {
}
})
- // Don't quit the app, instead try to close all windows
- app.on('close-windows', event => {
- const windows = BrowserWindow.getAllWindows()
- windows.forEach(w => {
- // We tell it to close, we can register handlers for the 'close' event if we want to
- // keep this window alive or hide it instead.
- w.close()
- })
- })
-
app.on('before-quit', event => {
const windows = BrowserWindow.getAllWindows()
windows.forEach(w => {
diff --git a/desktop/app/menu-helper.js b/desktop/app/menu-helper.js
index <HASH>..<HASH> 100644
--- a/desktop/app/menu-helper.js
+++ b/desktop/app/menu-helper.js
@@ -16,7 +16,7 @@ export default function makeMenu (window) {
{label: 'Hide Others', accelerator: 'CmdOrCtrl+Shift+H', role: 'hideothers'},
{label: 'Show All', role: 'unhide'},
{type: 'separator'},
- {label: 'Quit', accelerator: 'CmdOrCtrl+Q', click () { app.emit('close-windows') }},
+ {label: 'Quit', accelerator: 'CmdOrCtrl+Q', role: 'quit'},
],
}, {
label: 'Edit',
|
Allow the entire app to quit on OSX through Quit
Leaving the menubar active but closing all windows results in main
thread exceptions when RPCs to the other windows fail.
|
keybase_client
|
train
|
f7146be784346cbd24f68aabae057d21f7402b19
|
diff --git a/src/Assetic/Factory/AssetFactory.php b/src/Assetic/Factory/AssetFactory.php
index <HASH>..<HASH> 100644
--- a/src/Assetic/Factory/AssetFactory.php
+++ b/src/Assetic/Factory/AssetFactory.php
@@ -163,10 +163,6 @@ class AssetFactory
$options['output'] = $this->output;
}
- if (!isset($options['name'])) {
- $options['name'] = $this->generateAssetName($inputs, $filters, $options);
- }
-
if (!isset($options['debug'])) {
$options['debug'] = $this->debug;
}
@@ -181,6 +177,10 @@ class AssetFactory
$options['root'][] = $this->root;
}
+ if (!isset($options['name'])) {
+ $options['name'] = $this->generateAssetName($inputs, $filters, $options);
+ }
+
$asset = $this->createAssetCollection();
$extensions = array();
|
moved generation of name after all default options are set (closes #<I>)
|
kriswallsmith_assetic
|
train
|
f4cf6deb6a2c32d71a3a916e0fcd112ceaafb914
|
diff --git a/ipyxact/ipyxact.py b/ipyxact/ipyxact.py
index <HASH>..<HASH> 100644
--- a/ipyxact/ipyxact.py
+++ b/ipyxact/ipyxact.py
@@ -52,8 +52,11 @@ class IpxactBool(str):
raise Exception
class IpxactItem(object):
- nsmap = {'1.4' : ('spirit' , 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.4'),
- '1.5' : ('spirit' , 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.5')}
+ nsmap = {'1.4' : ('spirit' , 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.4'),
+ '1.5' : ('spirit' , 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1.5'),
+ '2009' : ('spirit' , 'http://www.spiritconsortium.org/XMLSchema/SPIRIT/1685-2009'),
+ '2014' : ('ipxact' , 'http://www.accellera.org/XMLSchema/IPXACT/1685-2014'),
+ }
nsversion = '1.5'
ATTRIBS = {}
@@ -74,13 +77,10 @@ class IpxactItem(object):
tree = ET.parse(f)
root = tree.getroot()
- #Warning: Horrible hack to find out which IP-Xact version that is used
- for key, value in root.attrib.items():
- if key == '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation':
- nstags = value.split()
- for version, _val in self.nsmap.items():
- if _val[1] in nstags:
- self.nsversion = version
+ #Warning: Semi-horrible hack to find out which IP-Xact version that is used
+ for key, value in self.nsmap.items():
+ if root.tag[1:].startswith(value[1]):
+ self.nsversion = key
S = '{%s}' % self.nsmap[self.nsversion][1]
if not (root.tag == S+self._tag):
|
Improve IP-XACT version detection and add <I>/<I> support
|
olofk_ipyxact
|
train
|
1833effeb71918b9071d804e1d5c2c6d43da5fc9
|
diff --git a/bin/next-dev b/bin/next-dev
index <HASH>..<HASH> 100755
--- a/bin/next-dev
+++ b/bin/next-dev
@@ -3,7 +3,7 @@
import { resolve } from 'path'
import parseArgs from 'minimist'
import Server from '../server'
-import build from '../server/build'
+import build from '../server/build/index'
const argv = parseArgs(process.argv.slice(2), {
alias: {
diff --git a/client/eval-script.js b/client/eval-script.js
index <HASH>..<HASH> 100644
--- a/client/eval-script.js
+++ b/client/eval-script.js
@@ -2,12 +2,14 @@ import React from 'react'
import ReactDOM from 'react-dom'
import App from '../lib/app'
import Link from '../lib/link'
+import Css from '../lib/css'
const modules = new Map([
['react', React],
['react-dom', ReactDOM],
['next/app', App],
- ['next/link', Link]
+ ['next/link', Link],
+ ['next/css', Css]
])
/**
diff --git a/server/build/bundle.js b/server/build/bundle.js
index <HASH>..<HASH> 100644
--- a/server/build/bundle.js
+++ b/server/build/bundle.js
@@ -14,7 +14,8 @@ export default function bundle (src, dst) {
'react-dom',
{
[require.resolve('react')]: 'react',
- [require.resolve('../lib/link')]: 'next/link'
+ [require.resolve('../../lib/link')]: 'next/link',
+ [require.resolve('../../lib/css')]: 'next/css'
}
],
resolveLoader: {
diff --git a/server/build/transpile.js b/server/build/transpile.js
index <HASH>..<HASH> 100644
--- a/server/build/transpile.js
+++ b/server/build/transpile.js
@@ -25,7 +25,8 @@ const babelOptions = {
[
{ src: `npm:${babelRuntimePath}`, expose: 'babel-runtime' },
{ src: `npm:${require.resolve('react')}`, expose: 'react' },
- { src: `npm:${require.resolve('../lib/link')}`, expose: 'next/link' }
+ { src: `npm:${require.resolve('../../lib/link')}`, expose: 'next/link' },
+ { src: `npm:${require.resolve('../../lib/css')}`, expose: 'next/css' }
]
]
],
|
Fixes next dev and adds the css requires
|
zeit_next.js
|
train
|
24fc0a8773f0ed0b4d3c228892986ad3a3a14312
|
diff --git a/tests/django_mysql_tests/test_cache.py b/tests/django_mysql_tests/test_cache.py
index <HASH>..<HASH> 100644
--- a/tests/django_mysql_tests/test_cache.py
+++ b/tests/django_mysql_tests/test_cache.py
@@ -754,11 +754,11 @@ class MySQLCacheTests(TransactionTestCase):
def test_expiration(self):
# Cache values can be set to expire
- cache.set('expire1', 'very quickly', 0.1)
- cache.set('expire2', 'very quickly', 0.1)
- cache.set('expire3', 'very quickly', 0.1)
+ cache.set('expire1', 'very quickly', 0.3)
+ cache.set('expire2', 'very quickly', 0.3)
+ cache.set('expire3', 'very quickly', 0.3)
- time.sleep(0.2)
+ time.sleep(0.4)
self.assertIsNone(cache.get("expire1"))
cache.add("expire2", "newvalue")
@@ -784,8 +784,8 @@ class MySQLCacheTests(TransactionTestCase):
def test_get_many_with_one_expired(self):
# Multiple cache keys can be returned using get_many
the_cache = caches['no_cull']
- the_cache.set('a', 'a', 0.1)
- time.sleep(0.2)
+ the_cache.set('a', 'a', 0.3)
+ time.sleep(0.4)
the_cache.set('b', 'b')
the_cache.set('c', 'c')
@@ -814,10 +814,10 @@ class MySQLCacheTests(TransactionTestCase):
def test_set_many_expiration(self):
# set_many takes a second ``timeout`` parameter
with self.assertNumQueries(1):
- caches['no_cull'].set_many({"key1": "spam", "key2": "eggs"}, 0.1)
+ caches['no_cull'].set_many({"key1": "spam", "key2": "eggs"}, 0.3)
cache.set("key3", "ham")
- time.sleep(0.2)
+ time.sleep(0.4)
self.assertIsNone(cache.get("key1"))
self.assertIsNone(cache.get("key2"))
self.assertEqual(cache.get("key3"), "ham")
@@ -873,14 +873,14 @@ class MySQLCacheTests(TransactionTestCase):
# Original tests
def test_add_with_expired(self):
- cache.add("mykey", "value", 0.1)
+ cache.add("mykey", "value", 0.3)
self.assertEqual(cache.get("mykey"), "value")
- result = cache.add("mykey", "newvalue", 0.1)
+ result = cache.add("mykey", "newvalue", 0.3)
self.assertFalse(result)
self.assertEqual(cache.get("mykey"), "value")
- time.sleep(0.2)
+ time.sleep(0.4)
result = cache.add("mykey", "newvalue", 1)
self.assertTrue(result)
@@ -943,8 +943,8 @@ class MySQLCacheTests(TransactionTestCase):
def test_cull_deletes_expired_first(self):
cull_cache = caches['cull']
- cull_cache.set("key", "value", 0.1)
- time.sleep(0.2)
+ cull_cache.set("key", "value", 0.3)
+ time.sleep(0.4)
# Add 30 more entries. The expired key should get deleted, leaving the
# 30 new keys
@@ -1054,15 +1054,15 @@ class MySQLCacheTests(TransactionTestCase):
# cull_mysql_caches tests
def test_cull_mysql_caches_basic(self):
- cache.set('key', 'value', 0.1)
- time.sleep(0.2)
+ cache.set('key', 'value', 0.3)
+ time.sleep(0.4)
self.assertEqual(self.table_count(), 1)
call_command('cull_mysql_caches', verbosity=0)
self.assertEqual(self.table_count(), 0)
def test_cull_mysql_caches_named_cache(self):
- cache.set('key', 'value', 0.1)
- time.sleep(0.2)
+ cache.set('key', 'value', 0.3)
+ time.sleep(0.4)
self.assertEqual(self.table_count(), 1)
out = StringIO()
|
Increase cache test expire/sleep times to stop intermittent failures on Travis
|
adamchainz_django-mysql
|
train
|
86ad372b8ea6fa8893124168ea965ac6bdec20da
|
diff --git a/packages/cli/src/middleware/index.js b/packages/cli/src/middleware/index.js
index <HASH>..<HASH> 100644
--- a/packages/cli/src/middleware/index.js
+++ b/packages/cli/src/middleware/index.js
@@ -61,13 +61,13 @@ export function configCliMiddleware(argv) {
if (wsProvider?.connection?.url?.includes('eth.aragon.network')) {
reporter.newLine()
reporter.warning(
- `You are connecting to the default node (${wsProvider.connection.url}) the request could take a while. Consider switching to Infura for better performance.`,
+ `The request may take a while because you are connecting to the default node (${wsProvider.connection.url}). For better performance, consider switching to your own Ethereum node or Infura.`,
'\n'
)
reporter.info(
`You have the following options:
- 1. Use the global option "--ws-rpc" with wss://mainnet.infura.io/ws/v3/<INFURA_KEY>
- 2. Set the "wsRPC" field on mainnet environment of the arapp.json with wss://mainnet.infura.io/ws/v3/<INFURA_KEY>`,
+ 1. Use the global option "--ws-rpc" (e.g. with wss://mainnet.infura.io/ws/v3/<INFURA_KEY> for Infura)
+ 2. Set the "wsRPC" field on mainnet environment of the arapp.json`,
'\n'
)
}
|
Don't insist on using Infura, recommend running one's own eth node (#<I>)
* Don't insist on using Infura, recommend running one's own eth node
|
aragon_aragon-cli
|
train
|
314abb86d9789db80f9f05bb143194ee5e105273
|
diff --git a/pyciss/opusapi.py b/pyciss/opusapi.py
index <HASH>..<HASH> 100644
--- a/pyciss/opusapi.py
+++ b/pyciss/opusapi.py
@@ -223,15 +223,18 @@ class OPUS(object):
# myquery.update(query)
# self.create_request_with_query('data', myquery, fmt=fmt)
+ @property
+ def response(self):
+ return self.r.json()['data']
+
def unpack_json_response(self):
if self.r.status_code == 500:
if not self.silent:
print("No data found.")
self.obsids = []
return
- response = self.r.json()['data']
obsids = []
- for obsid_data in response.items():
+ for obsid_data in self.response.items():
obsids.append(OPUSObsID(obsid_data))
self.obsids = obsids
if not self.silent:
|
store parsed response in `response` attribute
|
michaelaye_pyciss
|
train
|
97ebd3ac4c54c6740249ecc64bdf636e6e1a5f94
|
diff --git a/growler/http/methods.py b/growler/http/methods.py
index <HASH>..<HASH> 100644
--- a/growler/http/methods.py
+++ b/growler/http/methods.py
@@ -13,6 +13,7 @@ class HTTPMethod(enum.IntEnum):
POST = 0b000010
DELETE = 0b000100
PUT = 0b001000
+ HEAD = 0b010000
string_to_method = {
@@ -20,4 +21,8 @@ string_to_method = {
"POST": HTTPMethod.POST,
"DELETE": HTTPMethod.DELETE,
"PUT": HTTPMethod.PUT,
+ "HEAD": HTTPMethod.HEAD,
}
+
+
+method_to_string = dict((v,k) for k, v in string_to_method.items())
|
Added HEAD http method and a "method to string" function to the http.methods package
|
pyGrowler_Growler
|
train
|
3cd8ecb31ca45670c05af3b4580499da8f1ad270
|
diff --git a/pdftools/_cli.py b/pdftools/_cli.py
index <HASH>..<HASH> 100644
--- a/pdftools/_cli.py
+++ b/pdftools/_cli.py
@@ -6,6 +6,7 @@ from . import __version__
def main():
PARSER = argparse.ArgumentParser(
+ description="Python-based command line tool for manipulating PDFs. It is based on the PyPdf2 package.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
# global options
@@ -22,6 +23,7 @@ def main():
parser_add = SUBPARSERS.add_parser(
"add",
help="Add pages from a source file to an output PDF file",
+ description="Add pages from a source file to an output PDF file",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_add.add_argument("dest", type=str, help="Destination PDF file")
@@ -46,6 +48,7 @@ def main():
parser_copy = SUBPARSERS.add_parser(
"copy",
help="Copy specific pages of a PDF file in a new file",
+ description="Copy specific pages of a PDF file in a new file",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_copy.add_argument(
@@ -79,6 +82,7 @@ def main():
parser_insert = SUBPARSERS.add_parser(
"insert",
help="Insert pages of one file into another",
+ description="Insert pages of one file into another",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_insert.add_argument("dest", type=str, help="Destination PDF file")
@@ -109,6 +113,7 @@ def main():
parser_merge = SUBPARSERS.add_parser(
"merge",
help="Merge the pages of multiple input files into one output file",
+ description="Merge the pages of multiple input files into one output file",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_merge.add_argument(
@@ -126,6 +131,7 @@ def main():
parser_remove = SUBPARSERS.add_parser(
"remove",
help="Remove pages from a PDF file",
+ description="Remove pages from a PDF file",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_remove.add_argument("src", type=str, default=None, help="PDF source file")
@@ -148,6 +154,7 @@ def main():
parser_rotate = SUBPARSERS.add_parser(
"rotate",
help="Rotate the pages of a PDF files by 90 degrees",
+ description="Rotate the pages of a PDF files by 90 degrees",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_rotate.add_argument("src", type=str, default=None, help="Source file")
@@ -178,6 +185,7 @@ def main():
parser_split = SUBPARSERS.add_parser(
"split",
help="Split a PDF file into multiple documents",
+ description="Split a PDF file into multiple documents",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_split.add_argument(
@@ -211,6 +219,7 @@ def main():
parser_zip = SUBPARSERS.add_parser(
"zip",
help="Python-like zipping (interleaving) the pages of two documents in one output file",
+ description="Python-like zipping (interleaving) the pages of two documents in one output file",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser_zip.add_argument("src1", type=str, help="First source file")
|
Updating help and description messages for `--help` option
|
stlehmann_pdftools
|
train
|
a17e89722d99f7d474f6bffa350bd9a5ad344831
|
diff --git a/lib/fog/aws/iam.rb b/lib/fog/aws/iam.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/aws/iam.rb
+++ b/lib/fog/aws/iam.rb
@@ -2,6 +2,12 @@ module Fog
module AWS
class IAM < Fog::Service
+ class EntityAlreadyExists < Fog::AWS::IAM::Error; end
+ class KeyPairMismatch < Fog::AWS::IAM::Error; end
+ class LimitExceeded < Fog::AWS::IAM::Error; end
+ class MalformedCertificate < Fog::AWS::IAM::Error; end
+ class NotFound < Fog::AWS::IAM::Error; end
+
requires :aws_access_key_id, :aws_secret_access_key
recognizes :host, :path, :port, :scheme, :persistent
@@ -132,17 +138,41 @@ module Fog
}
)
- response = @connection.request({
- :body => body,
- :expects => 200,
- :idempotent => idempotent,
- :headers => { 'Content-Type' => 'application/x-www-form-urlencoded' },
- :host => @host,
- :method => 'POST',
- :parser => parser
- })
-
- response
+ begin
+ response = @connection.request({
+ :body => body,
+ :expects => 200,
+ :idempotent => idempotent,
+ :headers => { 'Content-Type' => 'application/x-www-form-urlencoded' },
+ :host => @host,
+ :method => 'POST',
+ :parser => parser
+ })
+
+ response
+ rescue Excon::Errors::HTTPStatusError => error
+ if match = error.message.match(/<Code>(.*)<\/Code>(?:.*<Message>(.*)<\/Message>)?/m)
+ case match[1]
+ when 'CertificateNotFound'
+ raise Fog::AWS::IAM::NotFound.slurp(error, match[2])
+ when 'EntityAlreadyExists'
+ raise Fog::AWS::IAM::EntityAlreadyExists.slurp(error, match[2])
+ when 'KeyPairMismatch'
+ raise Fog::AWS::IAM::KeyPairMismatch.slurp(error, match[2])
+ when 'LimitExceeded'
+ raise Fog::AWS::IAM::LimitExceeded.slurp(error, match[2])
+ when 'MalformedCertificate'
+ raise Fog::AWS::IAM::MalformedCertificate.slurp(error, match[2])
+ else
+ raise Fog::AWS::IAM::Error.slurp(error, "#{match[1]} => #{match[2]}") if match[1]
+ raise
+ end
+ else
+ raise
+ end
+ end
+
+
end
end
|
[aws|iam] add error handling for common failures resulting from upload_server_certificate
|
fog_fog
|
train
|
1e2e85b0105559a1d7c3b850488ad9585b36efeb
|
diff --git a/state/machine_upgradeseries.go b/state/machine_upgradeseries.go
index <HASH>..<HASH> 100644
--- a/state/machine_upgradeseries.go
+++ b/state/machine_upgradeseries.go
@@ -521,7 +521,7 @@ func (m *Machine) GetUpgradeSeriesMessages() ([]string, bool, error) {
for _, unseenMessage := range unseenMessages {
messages = append(messages, unseenMessage.Message)
}
- err = m.SetUpgradeSeriesMessagesAsSeen(lock.Messages)
+ err = m.SetUpgradeSeriesMessagesAsSeen(unseenMessages)
if err != nil {
return nil, false, errors.Trace(err)
}
@@ -537,11 +537,9 @@ func (m *Machine) GetUpgradeSeriesMessages() ([]string, bool, error) {
}
// SetUpgradeSeriesMessagesAsSeen marks a given upgrade series messages as
-// having been seen by a client of the API. This method is exported since only
-// the client of this method can determine when a message has been "seen" and
-// thus the decision must be made ad the APIServer level as whether to call this
-// method or not (as apposed to immediately calling this method when messages
-// are queried for sending).
+// having been seen by a client of the API. The method we use to determine have
+// that the message has actually been seen is that a client has made a call to
+// fetch the message.
func (m *Machine) SetUpgradeSeriesMessagesAsSeen(messages []UpgradeSeriesMessage) error {
buildTxn := func(attempt int) ([]txn.Op, error) {
if attempt > 0 {
@@ -549,6 +547,9 @@ func (m *Machine) SetUpgradeSeriesMessagesAsSeen(messages []UpgradeSeriesMessage
return nil, errors.Trace(err)
}
}
+ if len(messages) == 0 {
+ return nil, jujutxn.ErrNoOperations
+ }
if err := m.isStillAlive(); err != nil {
return nil, errors.Trace(err)
}
|
Only mark unseen messages as seen.
|
juju_juju
|
train
|
1f216a5ddf1d9648956841df65bfe86356d6159e
|
diff --git a/lib/Headers.js b/lib/Headers.js
index <HASH>..<HASH> 100644
--- a/lib/Headers.js
+++ b/lib/Headers.js
@@ -110,11 +110,26 @@ Headers.prototype.populateFromObject = function (valuesByName, doNotStringify) {
var value = valuesByName[headerName],
headerNameLowerCase = headerName.toLowerCase();
if (Array.isArray(value)) {
- this.valuesByName[headerNameLowerCase] = doNotStringify ? [].concat(value) : value.map(String);
+ if (!doNotStringify) {
+ value = value.map(String);
+ }
+ if (this.valuesByName[headerNameLowerCase]) {
+ Array.prototype.push.apply(this.valuesByName[headerNameLowerCase], value)
+ } else {
+ this.valuesByName[headerNameLowerCase] = [].concat(value);
+ }
} else if (typeof value === 'undefined') {
+ // Hmm, this might not behave as intended when the header occurs multiple times in the object with different casing
delete this.valuesByName[headerNameLowerCase];
} else {
- this.valuesByName[headerNameLowerCase] = doNotStringify ? [value] : [String(value)];
+ if (!doNotStringify) {
+ value = String(value);
+ }
+ if (this.valuesByName[headerNameLowerCase]) {
+ this.valuesByName[headerNameLowerCase].push(value);
+ } else {
+ this.valuesByName[headerNameLowerCase] = [value];
+ }
}
}, this);
return this;
diff --git a/test/Headers.js b/test/Headers.js
index <HASH>..<HASH> 100644
--- a/test/Headers.js
+++ b/test/Headers.js
@@ -33,6 +33,24 @@ describe('Headers', function () {
expect(headers.toString(), 'to equal', 'Received: foo\r\n');
});
+ it('should accept multiple occurrences of the same header with different casing', function () {
+ var headers = new Headers({cookie: 'foo=bar', Cookie: 'quux=baz'});
+
+ expect(headers.toString(), 'to equal', 'Cookie: foo=bar\r\nCookie: quux=baz\r\n');
+ });
+
+ it('should accept multiple occurrences of the same header with different casing when the first is given as an array', function () {
+ var headers = new Headers({cookie: ['foo=bar'], Cookie: 'quux=baz'});
+
+ expect(headers.toString(), 'to equal', 'Cookie: foo=bar\r\nCookie: quux=baz\r\n');
+ });
+
+ it('should accept multiple occurrences of the same header with different casing when the second is given as an array', function () {
+ var headers = new Headers({cookie: 'foo=bar', Cookie: ['quux=baz']});
+
+ expect(headers.toString(), 'to equal', 'Cookie: foo=bar\r\nCookie: quux=baz\r\n');
+ });
+
describe('#remove', function () {
it('should remove all header values for the given header when only passed one argument', function () {
var headers = new Headers({foo: ['bla', 'bar'], quux: 'baz'});
|
Headers.populateFromObject: Support multiple occurrences of the same header in different casing.
|
papandreou_messy
|
train
|
ffcee0960b8df4cf81aef41e546d3bcc61c3e14f
|
diff --git a/src/Cortex.php b/src/Cortex.php
index <HASH>..<HASH> 100644
--- a/src/Cortex.php
+++ b/src/Cortex.php
@@ -103,12 +103,12 @@ class Cortex
*/
private function doBoot(\WP $wp, $do, RequestInterface $request = null)
{
- $routes = $this->factoryRoutes();
+ $uri = $this->factoryUri($request);
+ $method = $this->getMethod($request);
+ $routes = $this->factoryRoutes($uri, $method);
$groups = $this->factoryGroups();
$router = $this->factoryRouter($routes, $groups);
$handler = $this->factoryHandler();
- $uri = $this->factoryUri($request);
- $method = $this->getMethod($request);
$do = $handler->handle($router->match($uri, $method), $wp, $do);
unset($method, $uri, $handler, $router, $groups, $routes, $instance);
remove_all_filters('cortex.routes');
@@ -142,6 +142,41 @@ class Cortex
}
/**
+ * @param \Psr\Http\Message\RequestInterface $request
+ * @return \Brain\Cortex\Uri\UriInterface
+ */
+ private function factoryUri(RequestInterface $request = null)
+ {
+ $psrUri = is_null($request) ? null : $request->getUri();
+
+ /** @var UriInterface $uri */
+ $uri = $this->factoryByHook(
+ 'uri',
+ UriInterface::class,
+ function () use ($psrUri) {
+ is_null($psrUri) and $psrUri = new PsrUri();
+
+ return new WordPressUri($psrUri);
+ }
+ );
+
+ return $uri;
+ }
+
+ /**
+ * @param \Psr\Http\Message\RequestInterface|null $request
+ * @return string
+ */
+ private function getMethod(RequestInterface $request = null)
+ {
+ if ($request) {
+ return $request->getMethod();
+ }
+
+ return empty($_SERVER['REQUEST_METHOD']) ? 'GET' : strtoupper($_SERVER['REQUEST_METHOD']);
+ }
+
+ /**
* @return \Brain\Cortex\Group\GroupCollectionInterface
*/
private function factoryGroups()
@@ -161,9 +196,11 @@ class Cortex
}
/**
+ * @param \Brain\Cortex\Uri\UriInterface $uri
+ * @param string method
* @return \Brain\Cortex\Route\RouteCollectionInterface
*/
- private function factoryRoutes()
+ private function factoryRoutes(UriInterface $uri, $method)
{
/** @var \Brain\Cortex\Route\RouteCollectionInterface $routes */
$routes = $this->factoryByHook(
@@ -174,7 +211,7 @@ class Cortex
}
);
- do_action('cortex.routes', $routes);
+ do_action('cortex.routes', $routes, $uri, $method);
return $routes;
}
@@ -216,40 +253,4 @@ class Cortex
return $handler;
}
-
- /**
- * @param \Psr\Http\Message\RequestInterface $request
- * @return \Brain\Cortex\Uri\UriInterface
- * @internal param null|\Psr\Http\Message\UriInterface $psrUri
- */
- private function factoryUri(RequestInterface $request = null)
- {
- $psrUri = is_null($request) ? null : $request->getUri();
-
- /** @var UriInterface $uri */
- $uri = $this->factoryByHook(
- 'uri',
- UriInterface::class,
- function () use ($psrUri) {
- is_null($psrUri) and $psrUri = new PsrUri();
-
- return new WordPressUri($psrUri);
- }
- );
-
- return $uri;
- }
-
- /**
- * @param \Psr\Http\Message\RequestInterface|null $request
- * @return string
- */
- private function getMethod(RequestInterface $request = null)
- {
- if ($request) {
- return $request->getMethod();
- }
-
- return empty($_SERVER['REQUEST_METHOD']) ? 'GET' : strtoupper($_SERVER['REQUEST_METHOD']);
- }
}
|
'cortex.routes' hook now passes uri and method
|
Brain-WP_Cortex
|
train
|
e774b4650bd20e28a3bbc0f06f83857a47701ba0
|
diff --git a/src/ocrmypdf/exec/__init__.py b/src/ocrmypdf/exec/__init__.py
index <HASH>..<HASH> 100644
--- a/src/ocrmypdf/exec/__init__.py
+++ b/src/ocrmypdf/exec/__init__.py
@@ -25,7 +25,7 @@ from ..exceptions import MissingDependencyError
-def get_version(program, *,
+def get_version(program, *,
version_arg='--version', regex=r'(\d+(\.\d+)*)'):
"Get the version of the specified program"
args_prog = [
@@ -37,6 +37,10 @@ def get_version(program, *,
args_prog, close_fds=True, universal_newlines=True,
stdout=PIPE, stderr=STDOUT, check=True)
output = proc.stdout
+ except FileNotFoundError as e:
+ raise MissingDependencyError(
+ "Could not find program '{}' on the PATH".format(
+ program)) from e
except CalledProcessError as e:
if e.returncode < 0:
raise MissingDependencyError(
|
ocrmypdf.exec: trap FileNotFoundError too
|
jbarlow83_OCRmyPDF
|
train
|
604976917461e1166b4e8000b0bc5dd983956c71
|
diff --git a/packages/babel-plugin-transform-react-trans/src/index.js b/packages/babel-plugin-transform-react-trans/src/index.js
index <HASH>..<HASH> 100644
--- a/packages/babel-plugin-transform-react-trans/src/index.js
+++ b/packages/babel-plugin-transform-react-trans/src/index.js
@@ -9,11 +9,20 @@ function cleanChildren(node) {
const mergeProps = (props, nextProps) => ({
text: props.text + nextProps.text,
params: Object.assign({}, props.params, nextProps.params),
- components: props.components.concat(nextProps.components)
+ components: props.components.concat(nextProps.components),
+ elementIndex: nextProps.elementIndex
})
+const elementGeneratorFactory = () => {
+ let index = 0
+ return () => index++
+}
+
+
// Plugin function
export default function({ types: t }) {
+ let elementGenerator
+
function isIdAttribute(node) {
return t.isJSXAttribute(node) && t.isJSXIdentifier(node.name, {name: 'id'})
}
@@ -42,7 +51,7 @@ export default function({ types: t }) {
// Trans
if (isTransElement(node)) {
for (const child of node.children) {
- props = processChildren.call(this, child, props)
+ props = processChildren(child, props)
}
// Plural, Select, SelectOrdinal
@@ -67,9 +76,7 @@ export default function({ types: t }) {
offset = ` offset:${attr.value.value}`
} else {
- props = processChildren.call(this, attr.value, {
- text: '', params: props.params, components: props.components
- })
+ props = processChildren(attr.value, Object.assign({}, props, { text: '' }))
choices[name.replace('_', '=')] = props.text
}
}
@@ -86,13 +93,13 @@ export default function({ types: t }) {
} else {
if (root) return
- const index = this.inlineElementCounter++
+ const index = elementGenerator()
const selfClosing = node.openingElement.selfClosing
props.text += !selfClosing ? `<${index}>` : `<${index}/>`
for (const child of node.children) {
- props = processChildren.call(this, child, props)
+ props = processChildren(child, props)
}
if (!selfClosing) props.text += `</${index}>`
@@ -137,14 +144,14 @@ export default function({ types: t }) {
})
} else if (t.isJSXElement(exp)) {
- nextProps = processElement.call(this, exp, nextProps)
+ nextProps = processElement(exp, nextProps)
} else {
nextProps.text += exp.value
}
} else if (t.isJSXElement(node)) {
- nextProps = processElement.call(this, node, nextProps)
+ nextProps = processElement(node, nextProps)
} else if (t.isJSXSpreadChild(node)) {
// TODO: I don't have a clue what's the usecase
@@ -162,11 +169,11 @@ export default function({ types: t }) {
return {
visitor: {
JSXElement({ node }) {
- this.inlineElementCounter = 0
+ elementGenerator = elementGeneratorFactory()
// 1. Collect all parameters and inline elements and generate message ID
- const props = processElement.call(this, node, props, /* root= */true)
+ const props = processElement(node, props, /* root= */true)
if (!props) return
// 2. Replace children and add collected data
|
refactor: Add elementGenerator to replace context bound variable
affects: babel-plugin-transform-react-trans
|
lingui_js-lingui
|
train
|
ab36122ad0a38036ea3f8d3043693fba42226e8e
|
diff --git a/src/Output.php b/src/Output.php
index <HASH>..<HASH> 100644
--- a/src/Output.php
+++ b/src/Output.php
@@ -2,22 +2,39 @@
namespace League\CLImate;
+use League\CLImate\Decorator\ParserImporter;
use League\CLImate\Decorator\Parser;
class Output
{
- protected $output;
+ use ParserImporter;
- protected $parser;
+ protected $content;
- public function __construct($output, Parser $parser)
+ protected $new_line = true;
+
+ public function __construct($content, Parser $parser)
{
- $this->output = $output;
- $this->parser = $parser;
+ $this->parser($parser);
+ $this->content($content);
+ }
+
+ protected function content($content)
+ {
+ $this->content = $content;
+ }
+
+ public function sameLine()
+ {
+ $this->new_line = false;
}
public function __toString()
{
- return $this->parser->apply($this->output) . "\n";
+ $result = $this->parser->apply($this->content);
+
+ if ($this->new_line) $result .= "\n";
+
+ return $result;
}
}
diff --git a/src/TerminalObject/Dynamic/Input.php b/src/TerminalObject/Dynamic/Input.php
index <HASH>..<HASH> 100644
--- a/src/TerminalObject/Dynamic/Input.php
+++ b/src/TerminalObject/Dynamic/Input.php
@@ -2,6 +2,7 @@
namespace League\CLImate\TerminalObject\Dynamic;
+use League\CLImate\Output;
use League\CLImate\Util\Reader;
class Input extends BaseDynamicTerminalObject
@@ -60,7 +61,10 @@ class Input extends BaseDynamicTerminalObject
public function prompt()
{
- echo $this->promptFormatted();
+ $output = new Output($this->promptFormatted(), $this->parser);
+ $output->sameLine();
+
+ echo $output;
$response = $this->reader->line();
diff --git a/src/TerminalObject/Router.php b/src/TerminalObject/Router.php
index <HASH>..<HASH> 100644
--- a/src/TerminalObject/Router.php
+++ b/src/TerminalObject/Router.php
@@ -51,9 +51,7 @@ class Router
foreach ($obj->settings() as $obj_setting) {
$setting = $this->settings->get($obj_setting);
- if ($setting) {
- $obj->importSetting($setting);
- }
+ if ($setting) $obj->importSetting($setting);
}
if ($this->isBasic($name)) {
diff --git a/tests/InputTest.php b/tests/InputTest.php
index <HASH>..<HASH> 100644
--- a/tests/InputTest.php
+++ b/tests/InputTest.php
@@ -22,9 +22,9 @@ class InputTest extends TestBase
ob_end_clean();
- $should_be = "\e[m\e[0m\n";
+ $should_be = "\e[mSo what is up? \e[0m";
- $this->assertSame($result, 'So what is up? ');
+ $this->assertSame($result, $should_be);
$this->assertSame('Not much.', $response);
}
@@ -48,7 +48,7 @@ class InputTest extends TestBase
ob_end_clean();
- $should_be = "So what is up? So what is up? ";
+ $should_be = "\e[mSo what is up? \e[0m\e[mSo what is up? \e[0m";
$this->assertSame($should_be, $result);
$this->assertSame('Everything.', $response);
@@ -74,7 +74,7 @@ class InputTest extends TestBase
ob_end_clean();
- $should_be = "So what is up? So what is up? ";
+ $should_be = "\e[mSo what is up? \e[0m\e[mSo what is up? \e[0m";
$this->assertSame($should_be, $result);
$this->assertSame('Everything.', $response);
@@ -100,7 +100,7 @@ class InputTest extends TestBase
ob_end_clean();
- $should_be = "So what is up? So what is up? ";
+ $should_be = "\e[mSo what is up? \e[0m\e[mSo what is up? \e[0m";
$this->assertSame($should_be, $result);
$this->assertSame('Stuff.', $response);
@@ -125,7 +125,7 @@ class InputTest extends TestBase
ob_end_clean();
- $should_be = "So what is up? [Everything./Stuff.] ";
+ $should_be = "\e[mSo what is up? [Everything./Stuff.] \e[0m";
$this->assertSame($should_be, $result);
$this->assertSame('Stuff.', $response);
|
allowed input styles, added sameLine method for output
|
thephpleague_climate
|
train
|
73c3fa48a46c2598a536d7f790ea850cc6e6e9bf
|
diff --git a/cypress/plugins/index.js b/cypress/plugins/index.js
index <HASH>..<HASH> 100644
--- a/cypress/plugins/index.js
+++ b/cypress/plugins/index.js
@@ -1,4 +1,8 @@
-const webpack = require('@cypress/webpack-preprocessor')
+// https://github.com/cypress-io/cypress-webpack-preprocessor
+const webpackPreprocessor = require('@cypress/webpack-preprocessor')
+
+// Cypress webpack options or just require from
+// an existing webpack.config.js
const webpackOptions = {
module: {
rules: [
@@ -11,12 +15,9 @@ const webpackOptions = {
}
const options = {
- // send in the options from your webpack.config.js, so it works the same
- // as your app's code
- webpackOptions,
- watchOptions: {}
+ webpackOptions
}
module.exports = on => {
- on('file:preprocessor', webpack(options))
+ on('file:preprocessor', webpackPreprocessor(options))
}
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -67,8 +67,6 @@ const mountVue = (component, options = {}) => () => {
const vueHtml = getPageHTML(options)
document.write(vueHtml)
document.close()
- console.log('wrote html')
- console.log(vueHtml)
})
cy
.window()
@@ -81,17 +79,3 @@ const mountVue = (component, options = {}) => () => {
}
module.exports = mountVue
-
-// export const loadAndMountMyComponent = VueComponent => () => {
-// cy.visit('index.html')
-// cy
-// .window()
-// .its('Vue')
-// .then(Vue => {
-// deleteCachedConstructors(VueComponent)
-// // TODO go through ITS components and delete their constructors
-// // wonder if there is unified list
-// Cypress.vue = new Vue(VueComponent).$mount('#app')
-// copyStyles(VueComponent)
-// })
-// }
|
fix: simplify webpack options and document
|
cypress-io_cypress
|
train
|
788dbeb6866413d891bc92da6957ddac2d4cf45b
|
diff --git a/packet_number_generator.go b/packet_number_generator.go
index <HASH>..<HASH> 100644
--- a/packet_number_generator.go
+++ b/packet_number_generator.go
@@ -42,28 +42,19 @@ func (p *packetNumberGenerator) Pop() protocol.PacketNumber {
return next
}
-func (p *packetNumberGenerator) generateNewSkip() error {
- num, err := p.getRandomNumber()
- if err != nil {
- return err
- }
-
+func (p *packetNumberGenerator) generateNewSkip() {
+ num := p.getRandomNumber()
skip := protocol.PacketNumber(num) * (p.averagePeriod - 1) / (math.MaxUint16 / 2)
// make sure that there are never two consecutive packet numbers that are skipped
p.nextToSkip = p.next + 2 + skip
-
- return nil
}
// getRandomNumber() generates a cryptographically secure random number between 0 and MaxUint16 (= 65535)
// The expectation value is 65535/2
-func (p *packetNumberGenerator) getRandomNumber() (uint16, error) {
+func (p *packetNumberGenerator) getRandomNumber() uint16 {
b := make([]byte, 2)
- _, err := rand.Read(b)
- if err != nil {
- return 0, err
- }
+ rand.Read(b) // ignore the error here
num := uint16(b[0])<<8 + uint16(b[1])
- return num, nil
+ return num
}
diff --git a/packet_number_generator_test.go b/packet_number_generator_test.go
index <HASH>..<HASH> 100644
--- a/packet_number_generator_test.go
+++ b/packet_number_generator_test.go
@@ -69,8 +69,7 @@ var _ = Describe("Packet Number Generator", func() {
rep := 10000
for i := 0; i < rep; i++ {
- num, err := png.getRandomNumber()
- Expect(err).ToNot(HaveOccurred())
+ num := png.getRandomNumber()
sum += uint64(num)
if num > largest {
largest = num
|
be more explicit about ignoring errors in the packet number generator
|
lucas-clemente_quic-go
|
train
|
129cff828800ec63c87325d2d58ad87d46cd5a4c
|
diff --git a/examples/lookup-view.php b/examples/lookup-view.php
index <HASH>..<HASH> 100644
--- a/examples/lookup-view.php
+++ b/examples/lookup-view.php
@@ -10,7 +10,6 @@ try {
$Lookup = $MessageBird->lookup->read("624971134", "NL");
var_dump($Lookup);
-
} catch (\MessageBird\Exceptions\AuthenticateException $e) {
// That means that your accessKey is unknown
echo 'wrong login';
diff --git a/src/MessageBird/Resources/Lookup.php b/src/MessageBird/Resources/Lookup.php
index <HASH>..<HASH> 100644
--- a/src/MessageBird/Resources/Lookup.php
+++ b/src/MessageBird/Resources/Lookup.php
@@ -4,6 +4,7 @@ namespace MessageBird\Resources;
use MessageBird\Objects;
use MessageBird\Common;
+use InvalidArgumentException;
/**
* Class Verify
@@ -33,8 +34,11 @@ class Lookup extends Base
* @throws \MessageBird\Exceptions\RequestException
* @throws \MessageBird\Exceptions\ServerException
*/
- public function read($phoneNumber = null, $countryCode = null)
+ public function read($phoneNumber, $countryCode = null)
{
+ if(empty($phoneNumber)) {
+ throw new InvalidArgumentException('The phone number cannot be empty.');
+ }
$query = null;
if ($countryCode != null) {
$query = array("countryCode" => $countryCode);
diff --git a/src/MessageBird/Resources/LookupHLR.php b/src/MessageBird/Resources/LookupHLR.php
index <HASH>..<HASH> 100644
--- a/src/MessageBird/Resources/LookupHLR.php
+++ b/src/MessageBird/Resources/LookupHLR.php
@@ -4,6 +4,7 @@ namespace MessageBird\Resources;
use MessageBird\Objects;
use MessageBird\Common;
+use InvalidArgumentException;
/**
* Class LookupHLR
@@ -35,6 +36,10 @@ class LookupHLR extends Base
*/
public function create($hlr, $countryCode = null)
{
+ if(empty($hlr->msisdn)) {
+ throw new InvalidArgumentException('The phone number ($hlr->msisdn) cannot be empty.');
+ }
+
$query = null;
if ($countryCode != null) {
$query = array("countryCode" => $countryCode);
@@ -53,8 +58,12 @@ class LookupHLR extends Base
* @throws \MessageBird\Exceptions\RequestException
* @throws \MessageBird\Exceptions\ServerException
*/
- public function read($phoneNumber = null, $countryCode = null)
+ public function read($phoneNumber, $countryCode = null)
{
+ if(empty($phoneNumber)) {
+ throw new InvalidArgumentException('The phone number cannot be empty.');
+ }
+
$query = null;
if ($countryCode != null) {
$query = array("countryCode" => $countryCode);
diff --git a/tests/integration/lookup/LookupTest.php b/tests/integration/lookup/LookupTest.php
index <HASH>..<HASH> 100644
--- a/tests/integration/lookup/LookupTest.php
+++ b/tests/integration/lookup/LookupTest.php
@@ -17,6 +17,14 @@ class LookupTest extends BaseTest
}
/**
+ * @expectedException InvalidArgumentException
+ */
+ public function testReadLookupWithEmptyNumber()
+ {
+ $this->client->lookup->read(null);
+ }
+
+ /**
* @expectedException MessageBird\Exceptions\ServerException
*/
public function testReadLookupWithCountryCode()
@@ -41,6 +49,16 @@ class LookupTest extends BaseTest
}
/**
+ * @expectedException InvalidArgumentException
+ */
+ public function testCreateLookupHLRWithEmptyNumber()
+ {
+ $Hlr = new \MessageBird\Objects\Hlr();
+ $Hlr->msisdn = null;
+ $this->client->lookupHLR->create($Hlr);
+ }
+
+ /**
* @expectedException MessageBird\Exceptions\ServerException
*/
public function testCreateLookupHLRWithCountryCode()
@@ -66,6 +84,14 @@ class LookupTest extends BaseTest
}
/**
+ * @expectedException InvalidArgumentException
+ */
+ public function testReadLookupHLRWithEmptyNumber()
+ {
+ $this->client->lookupHLR->read(null);
+ }
+
+ /**
* @expectedException MessageBird\Exceptions\ServerException
*/
public function testReadLookupHLRWithCountryCode()
|
new lookup endpoints: prevent user to call methods without a phone number
|
messagebird_php-rest-api
|
train
|
9ae97d4a8aae7a26f4628941f3b973544522ae64
|
diff --git a/lib/plugins/webpagetest/aggregator.js b/lib/plugins/webpagetest/aggregator.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/webpagetest/aggregator.js
+++ b/lib/plugins/webpagetest/aggregator.js
@@ -28,55 +28,56 @@ module.exports = {
forEach(wptData.data.runs, run => {
// TODO remove this if check once issue with 0 stats, but 200 response is fixed upstream.
- // It seems to be cases when users tries to navigate away before fullyLoaded has happend
- if (
- (wptOptions && wptOptions.video && run.firstView.SpeedIndex > 0) ||
- (wptOptions && !wptOptions.video)
- ) {
- forEach(run, (viewData, viewName) => {
- forEach(metrics, metric =>
- statsHelpers.pushGroupStats(
- this.timingStats,
- this.timingGroups[group],
- [viewName, metric],
- viewData[metric]
- )
- );
+ // It seems to be cases when users tries to navigate away before fullyLoaded has happened
+ if (wptOptions && wptOptions.video && run.firstView.SpeedIndex <= 0) {
+ log.error(
+ `Incomplete first view data for WPT test ${wptData.data
+ .id}, run ${run}`
+ );
+ return false;
+ }
- forEach(viewData.userTimes, (timingData, timingName) =>
- statsHelpers.pushGroupStats(
- this.timingStats,
- this.timingGroups[group],
- [viewName, timingName],
- timingData
- )
- );
+ forEach(run, (viewData, viewName) => {
+ forEach(metrics, metric =>
+ statsHelpers.pushGroupStats(
+ this.timingStats,
+ this.timingGroups[group],
+ [viewName, metric],
+ viewData[metric]
+ )
+ );
- forEach(viewData.breakdown, (contentType, typeName) =>
- forEach(['requests', 'bytes'], property =>
- statsHelpers.pushGroupStats(
- this.assetStats,
- this.assetGroups[group],
- [viewName, typeName, property],
- contentType[property]
- )
+ forEach(viewData.userTimes, (timingData, timingName) =>
+ statsHelpers.pushGroupStats(
+ this.timingStats,
+ this.timingGroups[group],
+ [viewName, timingName],
+ timingData
+ )
+ );
+
+ forEach(viewData.breakdown, (contentType, typeName) =>
+ forEach(['requests', 'bytes'], property =>
+ statsHelpers.pushGroupStats(
+ this.assetStats,
+ this.assetGroups[group],
+ [viewName, typeName, property],
+ contentType[property]
)
- );
+ )
+ );
- forEach(viewData.custom, metricName => {
- if (!isNaN(viewData[metricName])) {
- statsHelpers.pushGroupStats(
- this.customStats,
- this.customGroups[group],
- [viewName, 'custom', metricName],
- viewData[metricName]
- );
- }
- });
+ forEach(viewData.custom, metricName => {
+ if (!isNaN(viewData[metricName])) {
+ statsHelpers.pushGroupStats(
+ this.customStats,
+ this.customGroups[group],
+ [viewName, 'custom', metricName],
+ viewData[metricName]
+ );
+ }
});
- } else {
- log.error('First View: Test Data Missing');
- }
+ });
});
},
summarize() {
|
Log id and run for WPT runs with incomplete data. (#<I>)
|
sitespeedio_sitespeed.io
|
train
|
c3cac72b92a5017423ce2358b8bfc3a56bb45fbb
|
diff --git a/cmd/ctr/commands/tasks/metrics.go b/cmd/ctr/commands/tasks/metrics.go
index <HASH>..<HASH> 100644
--- a/cmd/ctr/commands/tasks/metrics.go
+++ b/cmd/ctr/commands/tasks/metrics.go
@@ -89,13 +89,19 @@ var metricsCommand = cli.Command{
fmt.Fprintf(w, "%s\t%s\t\n\n", metric.ID, metric.Timestamp)
fmt.Fprintf(w, "METRIC\tVALUE\t\n")
- fmt.Fprintf(w, "memory.usage_in_bytes\t%d\t\n", data.Memory.Usage.Usage)
- fmt.Fprintf(w, "memory.limit_in_bytes\t%d\t\n", data.Memory.Usage.Limit)
- fmt.Fprintf(w, "memory.stat.cache\t%d\t\n", data.Memory.TotalCache)
- fmt.Fprintf(w, "cpuacct.usage\t%d\t\n", data.CPU.Usage.Total)
- fmt.Fprintf(w, "cpuacct.usage_percpu\t%v\t\n", data.CPU.Usage.PerCPU)
- fmt.Fprintf(w, "pids.current\t%v\t\n", data.Pids.Current)
- fmt.Fprintf(w, "pids.limit\t%v\t\n", data.Pids.Limit)
+ if data.Memory != nil {
+ fmt.Fprintf(w, "memory.usage_in_bytes\t%d\t\n", data.Memory.Usage.Usage)
+ fmt.Fprintf(w, "memory.limit_in_bytes\t%d\t\n", data.Memory.Usage.Limit)
+ fmt.Fprintf(w, "memory.stat.cache\t%d\t\n", data.Memory.TotalCache)
+ }
+ if data.CPU != nil {
+ fmt.Fprintf(w, "cpuacct.usage\t%d\t\n", data.CPU.Usage.Total)
+ fmt.Fprintf(w, "cpuacct.usage_percpu\t%v\t\n", data.CPU.Usage.PerCPU)
+ }
+ if data.Pids != nil {
+ fmt.Fprintf(w, "pids.current\t%v\t\n", data.Pids.Current)
+ fmt.Fprintf(w, "pids.limit\t%v\t\n", data.Pids.Limit)
+ }
return w.Flush()
case formatJSON:
marshaledJSON, err := json.MarshalIndent(data, "", " ")
|
ctr: fix potential panic in metric
|
containerd_containerd
|
train
|
e3eb520b31c7340d0236fccdc4ba2c6c2a1479da
|
diff --git a/orb/core/model.py b/orb/core/model.py
index <HASH>..<HASH> 100644
--- a/orb/core/model.py
+++ b/orb/core/model.py
@@ -618,6 +618,24 @@ class Model(object):
if self.processEvent(event):
self.onInit(event)
+ def markLoaded(self, *columns):
+ """
+ Tells the model to treat the given columns as though they had been loaded from the database.
+
+ :param columns: (<str>, ..)
+ """
+ schema = self.schema()
+
+ columns = {schema.column(col) for col in columns}
+ column_names = {col.name() for col in columns}
+
+ with WriteLocker(self.__dataLock):
+ for key, (old_value, new_value) in self.__values.items():
+ if key in column_names:
+ self.__values[key] = (new_value, new_value)
+
+ self.__loaded.update(columns)
+
def isModified(self):
"""
Returns whether or not any data has been modified for
@@ -634,12 +652,13 @@ class Model(object):
:return <bool>
"""
- if db in (None, self.context().db):
- col = self.schema().column(self.schema().idColumn())
+ if db is not None:
+ same_db = db == self.context().db
+
+ if db is None or same_db:
+ col = self.schema().idColumn()
with ReadLocker(self.__dataLock):
- if col not in self.__loaded or self.__values[col.name()][0] is None:
- return False
- return True
+ return (col in self.__loaded) and (self.__values[col.name()][0] is not None)
else:
return None
|
* added in the ability to mark loaded columns on a model
* fixed is record logic based on that
|
orb-framework_orb
|
train
|
a965bb6f34c7566d1b3a00427d6530119c3f50f5
|
diff --git a/src/com/sbpinvertor/modbus/exception/IllegalDataAddressException.java b/src/com/sbpinvertor/modbus/exception/IllegalDataAddressException.java
index <HASH>..<HASH> 100644
--- a/src/com/sbpinvertor/modbus/exception/IllegalDataAddressException.java
+++ b/src/com/sbpinvertor/modbus/exception/IllegalDataAddressException.java
@@ -46,7 +46,13 @@ import com.sbpinvertor.modbus.utils.ModbusExceptionCode;
* with address 100."
*/
public class IllegalDataAddressException extends ModbusProtocolException {
- public IllegalDataAddressException(int serverAddress) {
+ final private int dataAddress;
+ public IllegalDataAddressException(int serverAddress, int dataAddress) {
super(ModbusExceptionCode.ILLEGAL_DATA_ADDRESS, serverAddress);
+ this.dataAddress = dataAddress;
+ }
+
+ public int getDataAddress() {
+ return dataAddress;
}
}
diff --git a/src/com/sbpinvertor/modbus/exception/IllegalFunctionException.java b/src/com/sbpinvertor/modbus/exception/IllegalFunctionException.java
index <HASH>..<HASH> 100644
--- a/src/com/sbpinvertor/modbus/exception/IllegalFunctionException.java
+++ b/src/com/sbpinvertor/modbus/exception/IllegalFunctionException.java
@@ -1,6 +1,7 @@
package com.sbpinvertor.modbus.exception;
import com.sbpinvertor.modbus.utils.ModbusExceptionCode;
+import com.sbpinvertor.modbus.utils.ModbusFunctionCode;
/**
* Copyright (c) 2015-2016 JSC "Zavod "Invertor"
@@ -39,7 +40,19 @@ import com.sbpinvertor.modbus.utils.ModbusExceptionCode;
* asked to return register values."
*/
public class IllegalFunctionException extends ModbusProtocolException {
- public IllegalFunctionException(int serverAddress) {
+
+ final private ModbusFunctionCode functionCode;
+
+ public IllegalFunctionException(int serverAddress, ModbusFunctionCode functionCode) {
super(ModbusExceptionCode.ILLEGAL_FUNCTION, serverAddress);
+ this.functionCode = functionCode;
+ }
+
+ public IllegalFunctionException(int serverAddress, int code) {
+ this(serverAddress, ModbusFunctionCode.getFunctionCode(code));
+ }
+
+ public ModbusFunctionCode getFunctionCode() {
+ return functionCode;
}
}
|
additional fields in exception classes IllegalFunctionException and IllegalDataAddressException.
|
kochedykov_jlibmodbus
|
train
|
2f6e034d896fbd219f52fa47a8d5afd49cb0220c
|
diff --git a/android/guava/src/com/google/common/primitives/Shorts.java b/android/guava/src/com/google/common/primitives/Shorts.java
index <HASH>..<HASH> 100644
--- a/android/guava/src/com/google/common/primitives/Shorts.java
+++ b/android/guava/src/com/google/common/primitives/Shorts.java
@@ -31,7 +31,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.RandomAccess;
-import javax.annotation.Nullable;
+import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Static utility methods pertaining to {@code short} primitives, that are not already found in
@@ -588,13 +588,13 @@ public final class Shorts {
}
@Override
- public boolean contains(@Nullable Object target) {
+ public boolean contains(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
return (target instanceof Short) && Shorts.indexOf(array, (Short) target, start, end) != -1;
}
@Override
- public int indexOf(@Nullable Object target) {
+ public int indexOf(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.indexOf(array, (Short) target, start, end);
@@ -606,7 +606,7 @@ public final class Shorts {
}
@Override
- public int lastIndexOf(@Nullable Object target) {
+ public int lastIndexOf(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.lastIndexOf(array, (Short) target, start, end);
@@ -637,7 +637,7 @@ public final class Shorts {
}
@Override
- public boolean equals(@Nullable Object object) {
+ public boolean equals(@NullableDecl Object object) {
if (object == this) {
return true;
}
diff --git a/guava/src/com/google/common/primitives/Shorts.java b/guava/src/com/google/common/primitives/Shorts.java
index <HASH>..<HASH> 100644
--- a/guava/src/com/google/common/primitives/Shorts.java
+++ b/guava/src/com/google/common/primitives/Shorts.java
@@ -31,7 +31,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.RandomAccess;
-import javax.annotation.Nullable;
+import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Static utility methods pertaining to {@code short} primitives, that are not already found in
@@ -588,13 +588,13 @@ public final class Shorts {
}
@Override
- public boolean contains(@Nullable Object target) {
+ public boolean contains(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
return (target instanceof Short) && Shorts.indexOf(array, (Short) target, start, end) != -1;
}
@Override
- public int indexOf(@Nullable Object target) {
+ public int indexOf(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.indexOf(array, (Short) target, start, end);
@@ -606,7 +606,7 @@ public final class Shorts {
}
@Override
- public int lastIndexOf(@Nullable Object target) {
+ public int lastIndexOf(@NullableDecl Object target) {
// Overridden to prevent a ton of boxing
if (target instanceof Short) {
int i = Shorts.lastIndexOf(array, (Short) target, start, end);
@@ -637,7 +637,7 @@ public final class Shorts {
}
@Override
- public boolean equals(@Nullable Object object) {
+ public boolean equals(@NullableDecl Object object) {
if (object == this) {
return true;
}
|
Switch from @Nullable to @NullableDecl.
RELNOTES=n/a
-------------
Created by MOE: <URL>
|
google_guava
|
train
|
b69d4b08e921875cc3c9efee95c2eb4d58935d1c
|
diff --git a/reaction.py b/reaction.py
index <HASH>..<HASH> 100644
--- a/reaction.py
+++ b/reaction.py
@@ -397,6 +397,52 @@ class MetNet(object):
return Reaction(direction, left, right)
+class KEGG(object):
+ '''Parser for the reaction format in KEGG'''
+
+ @classmethod
+ def parse(cls, s):
+ '''Parse a KEGG reaction string
+
+ >>> KEGG.parse('C00013 + C00001 <=> 2 C00009')
+ Reaction('<=>', [('C00013', 1, None), ('C00001', 1, None)], [('C00009', 2, None)])
+ >>> KEGG.parse('C00404 + n C00001 <=> (n+1) C02174')
+ Reaction('<=>', [('C00404', 1, None), ('C00001', 'n', None)], [('C02174', 'n+1', None)])
+ '''
+ def parse_count(s):
+ m = re.match(r'\((.*)\)', s)
+ if m is not None:
+ s = m.group(1)
+
+ m = re.match(r'\d+', s)
+ if m is not None:
+ return int(m.group(0))
+
+ return s
+
+ def parse_compound_list(s):
+ for cpd in s.split(' + '):
+ if cpd == '':
+ continue
+
+ fields = cpd.strip().split(' ')
+ if len(fields) > 2:
+ raise ParseError('Malformed compound specification: {}'.format(cpd))
+ if len(fields) == 1:
+ count = 1
+ cpdid = fields[0]
+ else:
+ count = parse_count(fields[0])
+ cpdid = fields[1]
+
+ yield cpdid, count, None
+
+ cpd_left, cpd_right = s.split('<=>')
+ left = list(parse_compound_list(cpd_left.strip()))
+ right = list(parse_compound_list(cpd_right.strip()))
+
+ return Reaction('<=>', left, right)
+
if __name__ == '__main__':
import doctest
|
reaction: Add KEGG reaction parser
|
zhanglab_psamm
|
train
|
a0637a357ecd2be1d565c3da6d8cb7d3e29ff0d2
|
diff --git a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java
+++ b/src/main/java/com/conveyal/gtfs/loader/JdbcGtfsSnapshotter.java
@@ -1,6 +1,5 @@
package com.conveyal.gtfs.loader;
-import com.sun.xml.internal.ws.util.StringUtils;
import org.apache.commons.dbutils.DbUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -184,7 +183,8 @@ public class JdbcGtfsSnapshotter {
.map(d -> String.format(
"case %s when 1 then '%s' else '' end",
d,
- StringUtils.capitalize(d.substring(0, 2)))).toArray(String[]::new));
+ // Capitalize first letter. Converts days of week from "monday" -> "Mo".
+ d.substring(0, 1).toUpperCase() + d.substring(1, 2))).toArray(String[]::new));
String updateOtherSql = String.format(
"update %scalendar set description = concat(%s) where description is NULL",
tablePrefix,
|
replace StringUtils.capitalize with substring method
StringUtils import from com.sun.xml.internal was causing issues with build
|
conveyal_gtfs-lib
|
train
|
74d9d9ecc5464c93a2a1dbfb1b0a2918923a75d2
|
diff --git a/datapoint/__init__.py b/datapoint/__init__.py
index <HASH>..<HASH> 100644
--- a/datapoint/__init__.py
+++ b/datapoint/__init__.py
@@ -12,7 +12,7 @@ import datapoint.profile
def connection(profile_name='default', api_key=None):
"""Connect to DataPoint with the given API key profile name."""
- if api_key == None:
+ if api_key is None:
profile_fname = datapoint.profile.API_profile_fname(profile_name)
if not os.path.exists(profile_fname):
raise ValueError('Profile not found in {}. Please install your API \n'
diff --git a/examples/washing/washing.py b/examples/washing/washing.py
index <HASH>..<HASH> 100644
--- a/examples/washing/washing.py
+++ b/examples/washing/washing.py
@@ -50,7 +50,7 @@ for day in forecast.days:
best_day = day.date
# If best_day is still None then there are no good days
-if best_day == None:
+if best_day is None:
print "Better use the tumble dryer"
# Otherwise print out the day
|
Changed '== None' to 'is None' for PEP8 compliance
|
jacobtomlinson_datapoint-python
|
train
|
b0d75941456ceaf04070b9d8b984d0d93434661b
|
diff --git a/src/jquery.contextMenu.js b/src/jquery.contextMenu.js
index <HASH>..<HASH> 100755
--- a/src/jquery.contextMenu.js
+++ b/src/jquery.contextMenu.js
@@ -331,7 +331,7 @@
var visible;
if ($.isFunction(e.data.items[item].visible)) {
visible = e.data.items[item].visible.call($(e.currentTarget), item, e.data);
- } else if (typeof item.visible !== 'undefined') {
+ } else if (typeof e.data.items[item] !== 'undefined') {
visible = e.data.items[item].visible === true;
} else {
visible = true;
|
The contextmenu shows even if all items are set to visible:false #<I>
|
swisnl_jQuery-contextMenu
|
train
|
6651c0186450b8f1b6c4d126a0e11549e568c177
|
diff --git a/firetv/__main__.py b/firetv/__main__.py
index <HASH>..<HASH> 100644
--- a/firetv/__main__.py
+++ b/firetv/__main__.py
@@ -106,7 +106,7 @@ def add_device():
req = request.get_json()
success = False
if 'device_id' in req and 'host' in req:
- success = add(req['device_id'], req['host'], req['adbkey'])
+ success = add(req['device_id'], req['host'], req.get('adbkey', ''))
return jsonify(success=success)
@@ -149,7 +149,7 @@ def running_apps(device_id):
abort(403)
if device_id not in devices:
abort(404)
- return jsonify(running_apps=devices[device_id].running_apps())
+ return jsonify(running_apps=devices[device_id].running_apps)
@app.route('/devices/<device_id>/apps/state/<app_id>', methods=['GET'])
def get_app_state(device_id, app_id):
@@ -230,7 +230,7 @@ def _add_devices_from_config(args):
raise ValueError('devicename "default" in config is not allowed if default param is set')
if config['devices'][device]['host'] == args.default:
raise ValueError('host set in default param must not be defined in config')
- add(device, config['devices'][device]['host'], config['devices'][device]['adbkey'])
+ add(device, config['devices'][device]['host'], config['devices'][device].get('adbkey', ''))
def main():
""" Set up the server. """
|
Fixes in firetv server ('adbkey' and 'running_apps')
|
happyleavesaoc_python-firetv
|
train
|
245b1a4c29cf05fb04f186ea04eb50016f7f1d31
|
diff --git a/wakeonlan.py b/wakeonlan.py
index <HASH>..<HASH> 100755
--- a/wakeonlan.py
+++ b/wakeonlan.py
@@ -29,12 +29,10 @@ def create_magic_packet(macaddress):
magic packet.
"""
- if len(macaddress) == 12:
- pass
- elif len(macaddress) == 17:
+ if len(macaddress) == 17:
sep = macaddress[2]
macaddress = macaddress.replace(sep, '')
- else:
+ elif len(macaddress) != 12:
raise ValueError('Incorrect MAC address format')
# Pad the synchronization stream
@@ -63,16 +61,13 @@ def send_magic_packet(*macs, **kwargs):
(default 9)
"""
- packets = []
ip = kwargs.pop('ip_address', BROADCAST_IP)
port = kwargs.pop('port', DEFAULT_PORT)
for k in kwargs:
raise TypeError('send_magic_packet() got an unexpected keyword '
'argument {!r}'.format(k))
- for mac in macs:
- packet = create_magic_packet(mac)
- packets.append(packet)
+ packets = [create_magic_packet(mac) for mac in macs]
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
Cleanup mac formatting and packet creation logic
|
remcohaszing_pywakeonlan
|
train
|
adf653e2e828e209092be643ce93ffc216367e8f
|
diff --git a/tests/integration/cloud/providers/virtualbox.py b/tests/integration/cloud/providers/virtualbox.py
index <HASH>..<HASH> 100644
--- a/tests/integration/cloud/providers/virtualbox.py
+++ b/tests/integration/cloud/providers/virtualbox.py
@@ -158,7 +158,7 @@ class CreationDestructionVirtualboxTests(VirtualboxTestCase):
super(CreationDestructionVirtualboxTests, self).setUp()
def test_vm_creation_and_destruction(self):
- vm_name = "__temp_test_vm__"
+ vm_name = BASE_BOX_NAME
virtualbox.vb_create_machine(vm_name)
self.assertMachineExists(vm_name)
|
Use the constants for testing
Related to saltstack/salt#<I> Saltcloud virtualbox provider
|
saltstack_salt
|
train
|
3abb0c543b8204264bb8be073d43557f59c80dc5
|
diff --git a/ghost/members-api/lib/users.js b/ghost/members-api/lib/users.js
index <HASH>..<HASH> 100644
--- a/ghost/members-api/lib/users.js
+++ b/ghost/members-api/lib/users.js
@@ -145,6 +145,15 @@ module.exports = function ({
return stripe.linkStripeCustomer(id, member, options);
}
+ async function setComplimentarySubscription(member, options) {
+ if (!stripe) {
+ throw new common.errors.BadRequestError({
+ message: 'Cannot link create Complimentary Subscription without a Stripe connection'
+ });
+ }
+ return stripe.setComplimentarySubscription(member, options);
+ }
+
return {
create,
update,
@@ -152,7 +161,7 @@ module.exports = function ({
get,
destroy,
updateSubscription,
- setComplimentarySubscription: safeStripe('setComplimentarySubscription'),
+ setComplimentarySubscription,
setComplimentarySubscriptionById,
cancelComplimentarySubscription: safeStripe('cancelComplimentarySubscription'),
cancelStripeSubscriptions: safeStripe('cancelComplimentarySubscription'),
|
Updated setComplimentarySubscription to error without a Stripe connection
no-issue
This allows the consumer (e.g. the importer) to surfaces errors when importing comped members
|
TryGhost_Ghost
|
train
|
af79256a87b7520a66b906fe63993f1cc9f22805
|
diff --git a/Twig/JqGridExtension.php b/Twig/JqGridExtension.php
index <HASH>..<HASH> 100644
--- a/Twig/JqGridExtension.php
+++ b/Twig/JqGridExtension.php
@@ -10,10 +10,10 @@
*/
namespace EPS\JqGridBundle\Twig;
-
use EPS\JqGridBundle\Grid\Grid;
-class JqGridExtension extends \Twig_Extension {
+class JqGridExtension extends \Twig_Extension
+{
const DEFAULT_TEMPLATE = 'EPSJqGridBundle::blocks.html.twig';
@@ -50,14 +50,45 @@ class JqGridExtension extends \Twig_Extension {
public function getFunctions()
{
return array(
- 'jqgrid_js' => new \Twig_Function_Method($this, 'renderGrid', array('is_safe' => array('html'))),
+ 'jqgrid' => new \Twig_Function_Method($this, 'renderGrid',
+ array(
+ 'is_safe' => array(
+ 'html'
+ )
+ )),
+ 'jqgrid_js' => new \Twig_Function_Method($this, 'renderGridJs',
+ array(
+ 'is_safe' => array(
+ 'html'
+ )
+ )),
+ 'jqgrid_html' => new \Twig_Function_Method($this, 'renderGridHtml',
+ array(
+ 'is_safe' => array(
+ 'html'
+ )
+ )),
);
}
public function renderGrid(Grid $grid)
{
if (!$grid->isOnlyData()) {
- return $this->renderBlock('gridjs', array('grid' => $grid));
+ return $this->renderBlock('jqgrid', array('grid' => $grid));
+ }
+ }
+
+ public function renderGridJs(Grid $grid)
+ {
+ if (!$grid->isOnlyData()) {
+ return $this->renderBlock('jqgrid_j', array('grid' => $grid));
+ }
+ }
+
+ public function renderGridHtml(Grid $grid)
+ {
+ if (!$grid->isOnlyData()) {
+ return $this->renderBlock('jqgrid_h', array('grid' => $grid));
}
}
|
twig function to render all, or js and html separately
|
michelpa_JqGridBundle
|
train
|
23f6a88341537695b38ed4ea07acd30fd57334f0
|
diff --git a/course/classes/search/section.php b/course/classes/search/section.php
index <HASH>..<HASH> 100644
--- a/course/classes/search/section.php
+++ b/course/classes/search/section.php
@@ -85,6 +85,9 @@ class section extends \core_search\base {
* @return \core_search\document
*/
public function get_document($record, $options = array()) {
+ global $CFG;
+ require_once($CFG->dirroot . '/course/lib.php');
+
// Get the context, modinfo, and section.
try {
$context = \context_course::instance($record->course);
|
MDL-<I> core_search: Cron error indexing sections in some cases
|
moodle_moodle
|
train
|
c9b4a046f54d0cdab9abaa95717672f1a5302b22
|
diff --git a/src/main/java/org/logicng/solvers/SATSolver.java b/src/main/java/org/logicng/solvers/SATSolver.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/logicng/solvers/SATSolver.java
+++ b/src/main/java/org/logicng/solvers/SATSolver.java
@@ -90,7 +90,7 @@ public abstract class SATSolver {
* Adds a given set of propositions to the solver.
* @param propositions the set of propositions
*/
- public void addPropositions(final Collection<Proposition> propositions) {
+ public void addPropositions(final Collection<? extends Proposition> propositions) {
for (final Proposition proposition : propositions)
add(proposition);
}
|
Generified addPropositions in SATSolver
|
logic-ng_LogicNG
|
train
|
7b86a778bc7b8d9a08305d3a43a6f038e36d2d24
|
diff --git a/tests/Router/ViewTest.php b/tests/Router/ViewTest.php
index <HASH>..<HASH> 100644
--- a/tests/Router/ViewTest.php
+++ b/tests/Router/ViewTest.php
@@ -60,7 +60,7 @@ use Tests\TestCase;
* 使用静态代理
*
* ``` php
- * \Leevel\Router\Proxy\View::->setVar($name, $value = null): void;
+ * \Leevel\Router\Proxy\View::setVar($name, $value = null): void;
* ```
*
* ## view 配置
|
docs(view): fix view doc
|
hunzhiwange_framework
|
train
|
434e229a2d71316ec390499cdbf0bd79eadb4256
|
diff --git a/lib/ripple-rest/helpers.rb b/lib/ripple-rest/helpers.rb
index <HASH>..<HASH> 100644
--- a/lib/ripple-rest/helpers.rb
+++ b/lib/ripple-rest/helpers.rb
@@ -126,7 +126,7 @@ module RippleRest
if source_currencies
cur = source_currencies.join(",")
- uri += "?#{cur}"
+ uri += "?source_currencies=#{cur}"
end
RippleRest.get(uri)["payments"].map(&Payment.method(:new)).map do |i|
@@ -210,4 +210,4 @@ module RippleRest
RippleRest.post "v1/accounts/#{account.address}/trustlines", hash
end
end
-end
\ No newline at end of file
+end
|
Fix the source currencies filter for Payments#find_path
|
orzFly_ruby-ripple-rest
|
train
|
21401859520682c97cc498eb4630dabc06faec64
|
diff --git a/lib/operations.js b/lib/operations.js
index <HASH>..<HASH> 100644
--- a/lib/operations.js
+++ b/lib/operations.js
@@ -35,13 +35,14 @@ function getJobs (printer, req, res) {
var attributes = getAttributesForGroup(req.body, C.OPERATION_ATTRIBUTES_TAG)
var which = getFirstValueForName(attributes, 'which-jobs')
- if (which && (which !== 'completed' || which !== 'not-completed')) {
+ if (which && which !== 'completed' && which !== 'not-completed') {
res.send(
C.CLIENT_ERROR_ATTRIBUTES_OR_VALUES_NOT_SUPPORTED,
{ tag: C.UNSUPPORTED_ATTRIBUTES_TAG, attributes: [
{ tag: C.UNSUPPORTED, name: 'which-jobs', value: which }
] }
)
+ return
}
var _groups = printer.jobs
|
Fix handling of unsupported which-jobs value
|
watson_ipp-printer
|
train
|
5f5573e36bbe6116e9e2adde9abaeb0d8c30c24f
|
diff --git a/cloudvolume/lib.py b/cloudvolume/lib.py
index <HASH>..<HASH> 100644
--- a/cloudvolume/lib.py
+++ b/cloudvolume/lib.py
@@ -311,6 +311,20 @@ class Bbox(object):
)
@classmethod
+ def create(cls, obj):
+ typ = type(obj)
+ if typ is Bbox:
+ return obj
+ elif typ is list:
+ return Bbox.from_slices(obj)
+ elif typ is Vec:
+ return Bbox.from_vec(obj)
+ elif typ is str:
+ return Bbox.from_filename(obj)
+ else:
+ raise NotImplementedError("{} is not a Bbox convertible type.".format(typ))
+
+ @classmethod
def from_vec(cls, vec):
return Bbox( (0,0,0), vec )
diff --git a/cloudvolume/txrx.py b/cloudvolume/txrx.py
index <HASH>..<HASH> 100644
--- a/cloudvolume/txrx.py
+++ b/cloudvolume/txrx.py
@@ -59,6 +59,7 @@ NON_ALIGNED_WRITE = yellow(
set non_aligned_writes=True.
Alignment Check:
+ Mip: {mip}
Volume Offset: {offset}
Received: {got}
Nearest Aligned: {check}
@@ -302,7 +303,7 @@ def upload_image(vol, img, offset, parallel=1,
manual_shared_memory_order=manual_shared_memory_order)
return
elif vol.non_aligned_writes == False:
- msg = NON_ALIGNED_WRITE.format(offset=vol.voxel_offset, got=bounds, check=expanded)
+ msg = NON_ALIGNED_WRITE.format(mip=vol.mip, offset=vol.voxel_offset, got=bounds, check=expanded)
raise AlignmentError(msg)
# Upload the aligned core
|
feat: add mip information to non-aligned writes
|
seung-lab_cloud-volume
|
train
|
a50dab94d9f6b89ade1470053f3633be67ef60df
|
diff --git a/README.rst b/README.rst
index <HASH>..<HASH> 100644
--- a/README.rst
+++ b/README.rst
@@ -1,6 +1,8 @@
Belogging
=========
+*Don't fight with logging ...*
+
|TravisCI Build Status| |Coverage Status|
----
@@ -29,11 +31,12 @@ Simple applications:
# my_script.py
- import logging
import belogging
- belogging.load()
+ belogging.load() # this call is optional, only useful for customization
- logger = logging.getLogger('foobar')
+ # belogging.getLogger is just a sugar to logging.getLogger, you can
+ # use logging.getLogger as usual (and recommended).
+ logger = belogging.getLogger('foobar')
logger.debug('test 1')
logger.info('test 2')
@@ -56,7 +59,7 @@ Executing:
# only level=INFO message=test 2
-Other applications should call ```belogging.load()``` upon initialization.
+Applications should call ```belogging.load()``` upon initialization.
The first ```__init__.py``` would be a good candidate, but anything before any call to
```logging``` module will be fine.
@@ -97,7 +100,7 @@ Logging follows a hierarchy, so you easily select or skip some logging messages:
Note:
-----
-If you are developing a library you should not configure the logging.
+If you are developing a library you should not configure logging.
Applications configure it, libraries only "log" messages.
diff --git a/belogging/__init__.py b/belogging/__init__.py
index <HASH>..<HASH> 100644
--- a/belogging/__init__.py
+++ b/belogging/__init__.py
@@ -1,15 +1,28 @@
-# -*- coding: utf-8 -*-
-# vi:si:et:sw=4:sts=4:ts=4
+import logging
from .loader import BeloggingLoader
# Sugar
+__loaded = False
+
+
def load(log_format=None, **options):
loader = BeloggingLoader(**options)
if log_format is not None:
loader.update_default_formatter(log_format)
- return loader.setup()
+ global __loaded
+ retval = loader.setup()
+ __loaded = True
+ return retval
+
+
+def getLogger(name):
+ if __loaded:
+ return logging.getLogger(name)
+
+ load()
+ return getLogger(name)
diff --git a/belogging/__version__.py b/belogging/__version__.py
index <HASH>..<HASH> 100644
--- a/belogging/__version__.py
+++ b/belogging/__version__.py
@@ -1,4 +1 @@
-# -*- coding: utf-8 -*-
-# vi:si:et:sw=4:sts=4:ts=4
-
__version__ = '0.0.1'
diff --git a/belogging/defaults.py b/belogging/defaults.py
index <HASH>..<HASH> 100644
--- a/belogging/defaults.py
+++ b/belogging/defaults.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-# vi:si:et:sw=4:sts=4:ts=4
-
DEFAULT_LOGGING_CONF = {
'version': 1,
@@ -10,11 +7,11 @@ DEFAULT_LOGGING_CONF = {
},
'filters': {
'logger_filter': {
- '(,)': 'belogging.filters.LoggerFilter',
+ '()': 'belogging.filters.LoggerFilter',
},
},
'handlers': {
- 'console': {
+ 'default': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'default',
@@ -25,7 +22,7 @@ DEFAULT_LOGGING_CONF = {
},
},
'root': {
- 'handlers': ['console'],
+ 'handlers': ['default'],
'level': 'DEBUG',
},
'loggers': {},
diff --git a/belogging/filters.py b/belogging/filters.py
index <HASH>..<HASH> 100644
--- a/belogging/filters.py
+++ b/belogging/filters.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-# vi:si:et:sw=4:sts=4:ts=4
-
import os
import logging
diff --git a/belogging/loader.py b/belogging/loader.py
index <HASH>..<HASH> 100644
--- a/belogging/loader.py
+++ b/belogging/loader.py
@@ -1,6 +1,3 @@
-# -*- coding: utf-8 -*-
-# vi:si:et:sw=4:sts=4:ts=4
-
from copy import deepcopy
import logging
import logging.config
|
Sugar and fixes (#3)
* Add getLogger sugar that maps to logging.getLogger + autoload configuration
* Remove file headers
* Fix: logger_filter key in logging configuration
* Rename main handler to "default"
* Update README
|
georgeyk_belogging
|
train
|
1a6cb6c3111453931217b408e5c8526a79e87955
|
diff --git a/src/OAuth1/Client/Signature/HmacSha1.php b/src/OAuth1/Client/Signature/HmacSha1.php
index <HASH>..<HASH> 100644
--- a/src/OAuth1/Client/Signature/HmacSha1.php
+++ b/src/OAuth1/Client/Signature/HmacSha1.php
@@ -78,16 +78,12 @@ class HmacSha1 implements Signature
* @param string $httpMethod
*/
public function __construct(
- $resourceURL,
\Snaggle\OAuth1\Client\Credential $consumerCredential,
\Snaggle\OAuth1\Client\Credential $userCredential,
- $httpMethod = 'GET'
)
{
- $this->resourceURL = $resourceURL;
$this->consumerCredential = $consumerCredential;
$this->userCredential = $userCredential;
- $this->httpMethod = $httpMethod;
}
/**
|
Removed HTTP Method and Resource URL from the constructor
|
mfrost503_Snaggle
|
train
|
c0d1e7977921000307a83115d8f853955a9a9ce3
|
diff --git a/google-cloud-storage/lib/google/cloud/storage/bucket.rb b/google-cloud-storage/lib/google/cloud/storage/bucket.rb
index <HASH>..<HASH> 100644
--- a/google-cloud-storage/lib/google/cloud/storage/bucket.rb
+++ b/google-cloud-storage/lib/google/cloud/storage/bucket.rb
@@ -557,7 +557,8 @@ module Google
# Set the Cloud KMS encryption key that will be used to protect files.
# For example: `projects/a/locations/b/keyRings/c/cryptoKeys/d`
#
- # @param [String] new_default_kms_key New Cloud KMS key name.
+ # @param [String, nil] new_default_kms_key New Cloud KMS key name, or
+ # `nil` to delete the Cloud KMS encryption key.
#
# @example
# require "google/cloud/storage"
@@ -571,6 +572,15 @@ module Google
#
# bucket.default_kms_key = kms_key_name
#
+ # @example Delete the default Cloud KMS encryption key:
+ # require "google/cloud/storage"
+ #
+ # storage = Google::Cloud::Storage.new
+ #
+ # bucket = storage.bucket "my-bucket"
+ #
+ # bucket.default_kms_key = nil
+ #
def default_kms_key= new_default_kms_key
@gapi.encryption = API::Bucket::Encryption.new \
default_kms_key_name: new_default_kms_key
|
docs(storage): Update Bucket#default_kms_key= docs
* Demonstrate deleting the Cloud KMS encryption key
pr: #<I>
refs: #<I>
|
googleapis_google-cloud-ruby
|
train
|
a11bd56a5d312446b6e9f4571455d093543938b6
|
diff --git a/spec/awesome_spawn_spec.rb b/spec/awesome_spawn_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/awesome_spawn_spec.rb
+++ b/spec/awesome_spawn_spec.rb
@@ -80,6 +80,7 @@ describe AwesomeSpawn do
context "with real execution" do
before do
+ # Re-enable actual spawning just for these specs.
Kernel.stub(:spawn).and_call_original
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -14,6 +14,10 @@ RSpec.configure do |config|
# the seed, which is printed after each run.
# --seed 1234
config.order = 'random'
+
+ config.before do
+ Kernel.stub(:spawn).and_raise("Spawning is not permitted in specs. Please change your spec to use expectations/stubs.")
+ end
end
require 'awesome_spawn'
|
Disable actual spawning for specs, except where expected.
|
ManageIQ_awesome_spawn
|
train
|
f230f12621216a0546b2a7f2b7e3312e0871afb5
|
diff --git a/web/concrete/single_pages/dashboard/system/seo/excluded.php b/web/concrete/single_pages/dashboard/system/seo/excluded.php
index <HASH>..<HASH> 100644
--- a/web/concrete/single_pages/dashboard/system/seo/excluded.php
+++ b/web/concrete/single_pages/dashboard/system/seo/excluded.php
@@ -1,12 +1,12 @@
<?php defined('C5_EXECUTE') or die('Access Denied');
$form = Loader::helper('form');
-echo Loader::helper('concrete/dashboard')->getDashboardPaneHeaderWrapper(t('Reserved Words'), t("Reserved Words"), false, false); ?>
+echo Loader::helper('concrete/dashboard')->getDashboardPaneHeaderWrapper(t('Reserved Words'), t("Words listed here will be automatically removed from url slugs."), false, false); ?>
<form method="post" id="url-form" action="<?php echo $this->action('save')?>">
<div class="ccm-pane-body">
<div class="control-group">
<textarea style='width:100%;height:100px' name='SEO_EXCLUDE_WORDS'><?=$SEO_EXCLUDE_WORDS?></textarea>
- <span class='help-block'><?=t('Separate reserved words with a new line.')?></span>
+ <span class='help-block'><?=t('Separate reserved words with a comma.')?></span>
</div>
</div>
<div class="ccm-pane-footer">
|
added help text and corrected form help
Former-commit-id: <I>eed<I>cbc<I>d<I>b<I>ee<I>ba0eefb
|
concrete5_concrete5
|
train
|
a7bafcc16262bccdd881a01d04b38bfaf97aee35
|
diff --git a/src/tokenValidator.js b/src/tokenValidator.js
index <HASH>..<HASH> 100644
--- a/src/tokenValidator.js
+++ b/src/tokenValidator.js
@@ -10,7 +10,7 @@ export class TokenValidator {
message: 'empty token'
};
}
- if(!/^[$_a-zA-Z][0-9a-zA-Z$_]*$/.test(key)) {
+ if(!/^[$_a-zA-ZÀ-ÿ][0-9a-zA-ZÀ-ÿ$_]*$/.test(key)) {
return {
isValid: false,
message: key + ' is not valid TypeScript variable name.'
|
accept all accents
TypeScript, JavaScript and CSS supports accents
|
Quramy_typed-css-modules
|
train
|
ce9c3a0e780df75ee4f08642f0da13a869532d59
|
diff --git a/recipe/symfony.php b/recipe/symfony.php
index <HASH>..<HASH> 100644
--- a/recipe/symfony.php
+++ b/recipe/symfony.php
@@ -58,7 +58,7 @@ task('deploy:assets', function () {
return "{{release_path}}/$asset";
}, get('assets')));
- $time = date('Ymdhi.s');
+ $time = run('date +%Y%m%d%H%M.%S')
run("find $assets -exec touch -t $time {} ';' &> /dev/null || true");
})->desc('Normalize asset timestamps');
|
Also change time calculation for Symfony assets
|
deployphp_deployer
|
train
|
044024287084466b233e2550c63e20d6e7ba47e8
|
diff --git a/lib/ronin/ui/console.rb b/lib/ronin/ui/console.rb
index <HASH>..<HASH> 100644
--- a/lib/ronin/ui/console.rb
+++ b/lib/ronin/ui/console.rb
@@ -144,32 +144,6 @@ module Ronin
end
#
- # Creates a new Console instance.
- #
- # @param [Hash{Symbol => Object}] variables
- # Instance variable names and values to set within the console.
- #
- # @yield []
- # The block to be ran within the Console, after it has been setup.
- #
- # @since 1.0.0
- #
- def initialize(variables={},&block)
- # populate instance variables
- variables.each do |name,value|
- instance_variable_set("@#{name}".to_sym,value)
- end
-
- # run any setup-blocks
- @@setup_blocks.each do |setup_block|
- context.instance_eval(&setup_block)
- end
-
- # run the supplied configuration block is given
- instance_eval(&block) if block
- end
-
- #
# Starts a Console.
#
# @param [Hash{Symbol => Object}] variables
@@ -213,31 +187,44 @@ module Ronin
# append the current directory to $LOAD_PATH for Ruby 1.9.
$LOAD_PATH << '.' unless $LOAD_PATH.include?('.')
- context = self.new(variables,&block)
- context_binding = context.instance_eval { binding }
+ context = class << self.new; self; end
+
+ # populate instance variables
+ variables.each do |name,value|
+ context.instance_variable_set("@#{name}".to_sym,value)
+ end
+
+ # run any setup-blocks
+ @@setup_blocks.each do |setup_block|
+ context.instance_eval(&setup_block)
+ end
+
+ # run the supplied configuration block is given
+ context.instance_eval(&block) if block
+ # Start the Ripl console
Ripl.start(
:argv => [],
:name => 'ronin',
- :binding => context_binding,
+ :binding => context.instance_eval { binding },
:history => HISTORY_FILE
)
return context
end
- alias include extend
-
- #
- # Inspects the console.
- #
- # @return [String]
- # The inspected console.
- #
- # @since 1.0.0
- #
- def inspect
- 'Console'
+ class << self
+ #
+ # Inspects the console.
+ #
+ # @return [String]
+ # The inspected console.
+ #
+ # @since 1.0.0
+ #
+ def inspect
+ "#<Ronin::UI::Console>"
+ end
end
end
|
Use the metaclass of Console.new as the context for Ripl.
|
ronin-ruby_ronin
|
train
|
b9a324a4b447ff0a9d8f07d663ca771f3d0f4619
|
diff --git a/lib/checks/label/multiple-label.js b/lib/checks/label/multiple-label.js
index <HASH>..<HASH> 100644
--- a/lib/checks/label/multiple-label.js
+++ b/lib/checks/label/multiple-label.js
@@ -1,6 +1,8 @@
const id = axe.utils.escapeSelector(node.getAttribute('id'));
-let labels = Array.from(document.querySelectorAll(`label[for="${id}"]`));
let parent = node.parentNode;
+let root = axe.commons.dom.getRootNode(node);
+root = root.documentElement || root;
+let labels = Array.from(root.querySelectorAll(`label[for="${id}"]`));
if (labels.length) {
// filter out CSS hidden labels because they're fine
diff --git a/test/checks/label/multiple-label.js b/test/checks/label/multiple-label.js
index <HASH>..<HASH> 100644
--- a/test/checks/label/multiple-label.js
+++ b/test/checks/label/multiple-label.js
@@ -2,7 +2,7 @@ describe('multiple-label', function() {
'use strict';
var fixture = document.getElementById('fixture');
-
+ var shadowSupported = axe.testUtils.shadowSupport.v1;
var checkContext = axe.testUtils.MockCheckContext();
afterEach(function() {
@@ -176,4 +176,62 @@ describe('multiple-label', function() {
var target = fixture.querySelector('#Q');
assert.isTrue(checks['multiple-label'].evaluate.call(checkContext, target));
});
+
+ (shadowSupported ? it : xit)(
+ 'should consider labels in the same document/shadow tree',
+ function() {
+ fixture.innerHTML = '<div id="target"></div>';
+ var target = document.querySelector('#target');
+ var shadowRoot = target.attachShadow({ mode: 'open' });
+ shadowRoot.innerHTML =
+ '<input id="myinput" /><label for="myinput">normal</label>';
+ var shadowTarget = target.shadowRoot;
+ assert.isFalse(
+ checks['multiple-label'].evaluate.call(
+ checkContext,
+ shadowTarget.firstElementChild
+ )
+ );
+ }
+ );
+
+ (shadowSupported ? it : xit)(
+ 'should return false for valid multiple labels in the same document/shadow tree',
+ function() {
+ fixture.innerHTML = '<div id="target"></div>';
+ var target = document.querySelector('#target');
+ var shadowRoot = target.attachShadow({ mode: 'open' });
+ var innerHTML = '<input type="checkbox" id="D" aria-labelledby="E"/>';
+ innerHTML += '<label for="D" aria-hidden="true">Please</label>';
+ innerHTML += '<label for="D" id="E">Excuse</label>';
+ shadowRoot.innerHTML = innerHTML;
+ var shadowTarget = target.shadowRoot;
+ assert.isFalse(
+ checks['multiple-label'].evaluate.call(
+ checkContext,
+ shadowTarget.firstElementChild
+ )
+ );
+ }
+ );
+
+ (shadowSupported ? it : xit)(
+ 'should return true for invalid multiple labels in the same document/shadow tree',
+ function() {
+ fixture.innerHTML = '<div id="target"></div>';
+ var target = document.querySelector('#target');
+ var shadowRoot = target.attachShadow({ mode: 'open' });
+ var innerHTML = '<input type="checkbox" id="Q"/>';
+ innerHTML += '<label for="Q" aria-hidden="true"></label>';
+ innerHTML += '<label for="Q" >Excuse</label>';
+ shadowRoot.innerHTML = innerHTML;
+ var shadowTarget = target.shadowRoot;
+ assert.isTrue(
+ checks['multiple-label'].evaluate.call(
+ checkContext,
+ shadowTarget.firstElementChild
+ )
+ );
+ }
+ );
});
|
fix(multiple-label): considers explicit labels in the same shadow tree (#<I>)
|
dequelabs_axe-core
|
train
|
e29cc0fc3c8250c9a2acfb6f592d92fb8e75af97
|
diff --git a/ui/src/components/tabs/QTabs.js b/ui/src/components/tabs/QTabs.js
index <HASH>..<HASH> 100644
--- a/ui/src/components/tabs/QTabs.js
+++ b/ui/src/components/tabs/QTabs.js
@@ -515,7 +515,7 @@ export default createComponent({
* always check the existing list again and infer the changes.
*/
function unregisterTab (tabData) {
- tabList.splice(tabList.value.indexOf(tabData), 1)
+ tabList.splice(tabList.indexOf(tabData), 1)
if (unwatchRoute !== void 0) {
const routeList = getRouteList()
|
chore(QTabs): addition to previous commit
|
quasarframework_quasar
|
train
|
75f163266e5597ade9ea06d005b65c511a2afd4e
|
diff --git a/aws-sdk-core/lib/aws-sdk-core/checksums.rb b/aws-sdk-core/lib/aws-sdk-core/checksums.rb
index <HASH>..<HASH> 100644
--- a/aws-sdk-core/lib/aws-sdk-core/checksums.rb
+++ b/aws-sdk-core/lib/aws-sdk-core/checksums.rb
@@ -12,7 +12,7 @@ module Aws
# @param [File, Tempfile, IO#read, String] value
# @return [String<SHA256 Hexdigest>]
def sha256_hexdigest(value)
- if File === value || Tempfile === value
+ if (File === value || Tempfile === value) && !value.path.nil? && File.exist?(value.path)
OpenSSL::Digest::SHA256.file(value).hexdigest
elsif value.respond_to?(:read)
sha256 = OpenSSL::Digest::SHA256.new
@@ -26,7 +26,7 @@ module Aws
# @param [File, Tempfile, IO#read, String] value
# @return [String<MD5>]
def md5(value)
- if File === value || Tempfile === value
+ if (File === value || Tempfile === value) && !value.path.nil? && File.exist?(value.path)
Base64.encode64(OpenSSL::Digest::MD5.file(value).digest).strip
elsif value.respond_to?(:read)
md5 = OpenSSL::Digest::MD5.new
|
OpenSSL::Digest::MD5.file cannot be used on filehandles whose links have
been removed from the filesystem.
Instead fallback to reading data from the handle.
Fixes #<I>.
|
aws_aws-sdk-ruby
|
train
|
efafff4078e49e9348e2d34a123983ad39866ab5
|
diff --git a/federation/cmd/federation-controller-manager/app/controllermanager.go b/federation/cmd/federation-controller-manager/app/controllermanager.go
index <HASH>..<HASH> 100644
--- a/federation/cmd/federation-controller-manager/app/controllermanager.go
+++ b/federation/cmd/federation-controller-manager/app/controllermanager.go
@@ -52,14 +52,13 @@ import (
)
const (
- // "federation-apiserver-secret" was the old name we used to store
- // Federation API server kubeconfig secret. Unfortunately, this name
- // is very close to "federation-apiserver-secrets" and causes a lot
- // of confusion, particularly while debugging. So deprecating it in
- // favor of the new name but giving people time to migrate.
- // TODO(madhusudancs): this name is deprecated in 1.4 and should be
- // removed in 1.5. Remove it in 1.5.
- DeprecatedKubeconfigSecretName = "federation-apiserver-secret"
+ // "federation-apiserver-kubeconfig" was the old name we used to
+ // store Federation API server kubeconfig secret. We are
+ // deprecating it in favor of `--kubeconfig` flag but giving people
+ // time to migrate.
+ // TODO(madhusudancs): this name is deprecated in 1.5 and should be
+ // removed in 1.6. Remove it in 1.6.
+ DeprecatedKubeconfigSecretName = "federation-apiserver-kubeconfig"
)
// NewControllerManagerCommand creates a *cobra.Command object with default parameters
@@ -91,8 +90,8 @@ func Run(s *options.CMServer) error {
glog.Errorf("unable to register configz: %s", err)
}
- // If s.Kubeconfig flag is empty, try with the deprecated name in 1.4.
- // TODO(madhusudancs): Remove this in 1.5.
+ // If s.Kubeconfig flag is empty, try with the deprecated name in 1.5.
+ // TODO(madhusudancs): Remove this in 1.6.
var restClientCfg *restclient.Config
var err error
if len(s.Kubeconfig) <= 0 {
@@ -100,18 +99,18 @@ func Run(s *options.CMServer) error {
if err != nil {
return err
}
- }
-
- // Create the config to talk to federation-apiserver.
- restClientCfg, err = clientcmd.BuildConfigFromFlags(s.Master, s.Kubeconfig)
- if err != nil || restClientCfg == nil {
- // Retry with the deprecated name in 1.4.
- // TODO(madhusudancs): Remove this in 1.5.
- glog.V(2).Infof("Couldn't build the rest client config from flags: %v", err)
- glog.V(2).Infof("Trying with deprecated secret: %s", DeprecatedKubeconfigSecretName)
- restClientCfg, err = restClientConfigFromSecret(s.Master)
- if err != nil {
- return err
+ } else {
+ // Create the config to talk to federation-apiserver.
+ restClientCfg, err = clientcmd.BuildConfigFromFlags(s.Master, s.Kubeconfig)
+ if err != nil || restClientCfg == nil {
+ // Retry with the deprecated name in 1.5.
+ // TODO(madhusudancs): Remove this in 1.6.
+ glog.V(2).Infof("Couldn't build the rest client config from flags: %v", err)
+ glog.V(2).Infof("Trying with deprecated secret: %s", DeprecatedKubeconfigSecretName)
+ restClientCfg, err = restClientConfigFromSecret(s.Master)
+ if err != nil {
+ return err
+ }
}
}
@@ -195,13 +194,13 @@ func StartControllers(s *options.CMServer, restClientCfg *restclient.Config) err
select {}
}
-// TODO(madhusudancs): Remove this in 1.5. This is only temporary to give an
-// upgrade path in 1.4.
+// TODO(madhusudancs): Remove this in 1.6. This is only temporary to give an
+// upgrade path in 1.4/1.5.
func restClientConfigFromSecret(master string) (*restclient.Config, error) {
kubeconfigGetter := util.KubeconfigGetterForSecret(DeprecatedKubeconfigSecretName)
restClientCfg, err := clientcmd.BuildConfigFromKubeconfigGetter(master, kubeconfigGetter)
if err != nil {
- return nil, fmt.Errorf("failed to find the Federation API server kubeconfig, tried the flags and the deprecated secret %s: %v", DeprecatedKubeconfigSecretName, err)
+ return nil, fmt.Errorf("failed to find the Federation API server kubeconfig, tried the --kubeconfig flag and the deprecated secret %s: %v", DeprecatedKubeconfigSecretName, err)
}
return restClientCfg, nil
}
|
[Federation] Update the deprecated name for <I> and try with flags only when `--kubeconfig` is non-empty.
|
kubernetes_kubernetes
|
train
|
f3e907c70c393801c511bba35e9d79cd35937323
|
diff --git a/lib/vagrant-lxss-plugin/command.rb b/lib/vagrant-lxss-plugin/command.rb
index <HASH>..<HASH> 100644
--- a/lib/vagrant-lxss-plugin/command.rb
+++ b/lib/vagrant-lxss-plugin/command.rb
@@ -15,7 +15,7 @@ module VagrantLxss
}
opts = OptionParser.new do |o|
- o.banner = "Usage: vagrant bash [name|id]"
+ o.banner = "Usage: vagrant bash [name|id ...]"
o.separator ""
o.on("-h", "--help", "Display command help") do |h|
@@ -59,7 +59,9 @@ module VagrantLxss
command = "C:\\Windows\\system32\\bash.exe -c 'ssh #{ssh_options.join(' ')}'"
@logger.debug("Full command: \"#{command}\"")
- system(command)
+ pid = spawn(command)
+ Process.waitpid pid
+ # system(command)
return 0
end
end
|
Temporary workaround for spawning Bash shells.
|
csh_vagrant-lxss-plugin
|
train
|
d74d4fc0ee98e4c30f45e9290f9b5270c111bbd8
|
diff --git a/slice/skip/skip_test.go b/slice/skip/skip_test.go
index <HASH>..<HASH> 100644
--- a/slice/skip/skip_test.go
+++ b/slice/skip/skip_test.go
@@ -126,7 +126,7 @@ func TestIter(t *testing.T) {
}
func BenchmarkInsert(b *testing.B) {
- numItems := 10000
+ numItems := b.N
sl := New(uint64(0))
entries := generateMockEntries(numItems)
@@ -139,7 +139,7 @@ func BenchmarkInsert(b *testing.B) {
}
func BenchmarkGet(b *testing.B) {
- numItems := 10000
+ numItems := b.N
sl := New(uint64(0))
entries := generateMockEntries(numItems)
|
Modified benchmarks a bit.
|
Workiva_go-datastructures
|
train
|
f18b0df36981b5ceea3bd257af8ebaef1c0a35fc
|
diff --git a/src/js/me-namespace.js b/src/js/me-namespace.js
index <HASH>..<HASH> 100644
--- a/src/js/me-namespace.js
+++ b/src/js/me-namespace.js
@@ -2,7 +2,7 @@
var mejs = mejs || {};
// version number
-mejs.version = '2.1.0.dev';
+mejs.version = '2.0.8';
// player number (for missing, same id attr)
mejs.meIndex = 0;
|
- updpated version number
|
mediaelement_mediaelement
|
train
|
055d4ec0ec01f59b6ba61815d7075023657b23aa
|
diff --git a/openxc/tests/com/openxc/sources/BytestreamDataSourceTest.java b/openxc/tests/com/openxc/sources/BytestreamDataSourceTest.java
index <HASH>..<HASH> 100644
--- a/openxc/tests/com/openxc/sources/BytestreamDataSourceTest.java
+++ b/openxc/tests/com/openxc/sources/BytestreamDataSourceTest.java
@@ -83,7 +83,7 @@ public class BytestreamDataSourceTest {
source.connect();
source.nextReadThrowsException = true;
source.inject(new byte[] {1,2,3,4});
- TestUtils.pause(20);
+ TestUtils.pause(50);
assertTrue(source.isRunning());
assertFalse(source.isConnected());
}
|
Increase delay in a time-dependent test to make more reliable.
|
openxc_openxc-android
|
train
|
33c60f5f051256e407771b49dd978a0dea518f16
|
diff --git a/lib/dropbox_api/endpoints/sharing/add_file_member.rb b/lib/dropbox_api/endpoints/sharing/add_file_member.rb
index <HASH>..<HASH> 100644
--- a/lib/dropbox_api/endpoints/sharing/add_file_member.rb
+++ b/lib/dropbox_api/endpoints/sharing/add_file_member.rb
@@ -50,7 +50,7 @@ module DropboxApi::Endpoints::Sharing
Array(members).map do |member|
case member
when String
- DropboxApi::Metadata::Member.build_from_email_or_dropbox_id member
+ DropboxApi::Metadata::Member.new member
when DropboxApi::Metadata::Member
member
else
diff --git a/lib/dropbox_api/metadata/add_member.rb b/lib/dropbox_api/metadata/add_member.rb
index <HASH>..<HASH> 100644
--- a/lib/dropbox_api/metadata/add_member.rb
+++ b/lib/dropbox_api/metadata/add_member.rb
@@ -22,7 +22,7 @@ module DropboxApi::Metadata
class << self
def build_from_string(member, access_level = :editor)
new({
- "member" => Member.build_from_email_or_dropbox_id(member),
+ "member" => Member.new(member),
"access_level" => access_level
})
end
diff --git a/lib/dropbox_api/metadata/member.rb b/lib/dropbox_api/metadata/member.rb
index <HASH>..<HASH> 100644
--- a/lib/dropbox_api/metadata/member.rb
+++ b/lib/dropbox_api/metadata/member.rb
@@ -10,38 +10,47 @@ module DropboxApi::Metadata
# }
# ]
class Member < Base
- class << self
- def build_from_email_or_dropbox_id(email_or_id)
- if email_or_id.start_with? "dbid:"
- build_from_dropbox_id email_or_id
- elsif email_or_id =~ /\A[^@\s]+@[^@\s]+\z/
- build_from_email email_or_id
+ def initialize(member)
+ @member_hash = case member
+ when Hash
+ member
+ when String
+ hash_from_email_or_dropbox_id member
+ when DropboxApi::Metadata::Member
+ member.to_hash
else
- raise ArgumentError, "Invalid email or Dropbox ID: #{email_or_id}"
+ raise ArgumentError, "Invalid object for Member: #{member.inspect}"
end
- end
+ end
- def build_from_dropbox_id(dropbox_id)
- new({
- :".tag" => :dropbox_id,
- :dropbox_id => dropbox_id
- })
- end
+ def to_hash
+ @member_hash
+ end
+
+ private
- def build_from_email(email)
- new({
- :".tag" => :email,
- :email => email
- })
+ def hash_from_email_or_dropbox_id(email_or_id)
+ if email_or_id.start_with? "dbid:"
+ hash_from_dropbox_id email_or_id
+ elsif email_or_id =~ /\A[^@\s]+@[^@\s]+\z/
+ hash_from_email email_or_id
+ else
+ raise ArgumentError, "Invalid email or Dropbox ID: #{email_or_id}"
end
end
- def initialize(member)
- @member = member
+ def hash_from_dropbox_id(dropbox_id)
+ {
+ :".tag" => :dropbox_id,
+ :dropbox_id => dropbox_id
+ }
end
- def to_hash
- @member
+ def hash_from_email(email)
+ {
+ :".tag" => :email,
+ :email => email
+ }
end
end
end
|
Update initialization of Member object.
Use a single initializer for any kind of input.
|
Jesus_dropbox_api
|
train
|
82726813625fd6872402ca92d167e86a649cd34d
|
diff --git a/src/Blog/Entity/BlogPost.php b/src/Blog/Entity/BlogPost.php
index <HASH>..<HASH> 100644
--- a/src/Blog/Entity/BlogPost.php
+++ b/src/Blog/Entity/BlogPost.php
@@ -45,7 +45,7 @@ class BlogPost {
* @var string $content
*
* @ORM\Column(type="text")
- * @Layer\HtmlContent
+ * @Layer\HtmlProperty
*/
protected $content;
diff --git a/src/Pages/Page.php b/src/Pages/Page.php
index <HASH>..<HASH> 100644
--- a/src/Pages/Page.php
+++ b/src/Pages/Page.php
@@ -48,7 +48,7 @@ class Page {
* @var string $content
*
* @ORM\Column(type="text")
- * @Layer\HtmlContent
+ * @Layer\HtmlProperty
*/
protected $content;
|
Updated HtmlProperty annotation class
|
mikegibson_sentient
|
train
|
aa93a6f7743f847101bfde5c5906dbb6dbcab750
|
diff --git a/src/test/java/net/kuujo/vertigo/test/integration/CoordinatorTest.java b/src/test/java/net/kuujo/vertigo/test/integration/CoordinatorTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/net/kuujo/vertigo/test/integration/CoordinatorTest.java
+++ b/src/test/java/net/kuujo/vertigo/test/integration/CoordinatorTest.java
@@ -20,7 +20,6 @@ import static org.vertx.testtools.VertxAssert.fail;
import static org.vertx.testtools.VertxAssert.testComplete;
import net.kuujo.vertigo.cluster.LocalCluster;
import net.kuujo.vertigo.cluster.VertigoCluster;
-import net.kuujo.vertigo.cluster.data.WatchableAsyncMap;
import net.kuujo.vertigo.component.ComponentCoordinator;
import net.kuujo.vertigo.component.impl.DefaultComponentCoordinator;
import net.kuujo.vertigo.context.InstanceContext;
@@ -28,6 +27,7 @@ import net.kuujo.vertigo.context.NetworkContext;
import net.kuujo.vertigo.context.impl.DefaultInstanceContext;
import net.kuujo.vertigo.context.impl.DefaultNetworkContext;
import net.kuujo.vertigo.context.impl.DefaultVerticleContext;
+import net.kuujo.vertigo.data.WatchableAsyncMap;
import org.junit.Test;
import org.vertx.java.core.AsyncResult;
diff --git a/src/test/java/net/kuujo/vertigo/test/integration/LocalClusterDataTest.java b/src/test/java/net/kuujo/vertigo/test/integration/LocalClusterDataTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/net/kuujo/vertigo/test/integration/LocalClusterDataTest.java
+++ b/src/test/java/net/kuujo/vertigo/test/integration/LocalClusterDataTest.java
@@ -22,8 +22,8 @@ import static org.vertx.testtools.VertxAssert.fail;
import static org.vertx.testtools.VertxAssert.testComplete;
import net.kuujo.vertigo.cluster.LocalCluster;
import net.kuujo.vertigo.cluster.VertigoCluster;
-import net.kuujo.vertigo.cluster.data.MapEvent;
-import net.kuujo.vertigo.cluster.data.WatchableAsyncMap;
+import net.kuujo.vertigo.data.MapEvent;
+import net.kuujo.vertigo.data.WatchableAsyncMap;
import org.junit.Test;
import org.vertx.java.core.AsyncResult;
diff --git a/src/test/java/net/kuujo/vertigo/test/integration/RemoteClusterDataTest.java b/src/test/java/net/kuujo/vertigo/test/integration/RemoteClusterDataTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/net/kuujo/vertigo/test/integration/RemoteClusterDataTest.java
+++ b/src/test/java/net/kuujo/vertigo/test/integration/RemoteClusterDataTest.java
@@ -22,8 +22,8 @@ import static org.vertx.testtools.VertxAssert.fail;
import static org.vertx.testtools.VertxAssert.testComplete;
import net.kuujo.vertigo.cluster.RemoteCluster;
import net.kuujo.vertigo.cluster.VertigoCluster;
-import net.kuujo.vertigo.cluster.data.MapEvent;
-import net.kuujo.vertigo.cluster.data.WatchableAsyncMap;
+import net.kuujo.vertigo.data.MapEvent;
+import net.kuujo.vertigo.data.WatchableAsyncMap;
import net.kuujo.xync.test.integration.XyncTestVerticle;
import org.junit.Test;
|
Fix bad imports in integration tests.
|
kuujo_vertigo
|
train
|
d3f6bac32687241d4b401e6185782d4fb57a67f8
|
diff --git a/src/pikepdf/models/outlines.py b/src/pikepdf/models/outlines.py
index <HASH>..<HASH> 100644
--- a/src/pikepdf/models/outlines.py
+++ b/src/pikepdf/models/outlines.py
@@ -197,7 +197,7 @@ class OutlineItem:
"""
title = str(obj.Title)
destination = obj.get(Name.Dest)
- if destination is not None and not isinstance(destination, (Array, String)):
+ if destination is not None and not isinstance(destination, (Array, String, Name)):
raise OutlineStructureError(
f"Unexpected object type in Outline's /Dest: {destination!r}"
)
|
Fix reading of named destinations that use PdfName rather than PdfString
|
pikepdf_pikepdf
|
train
|
e385e59a098cd661ea1ea4257c9a5735c5c32bc6
|
diff --git a/dvc/version.py b/dvc/version.py
index <HASH>..<HASH> 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -6,7 +6,7 @@
import os
import subprocess
-_BASE_VERSION = "2.0.0a3"
+_BASE_VERSION = "2.0.0a4"
def _generate_version(base_version):
|
dvc: bump to <I>a4
|
iterative_dvc
|
train
|
2498e4461c62e13af38462f1b70d846ea3e86a22
|
diff --git a/tests/unit-tests/Carbon_Breadcrumb_Trail/CarbonBreadcrumbTrailRenderTest.php b/tests/unit-tests/Carbon_Breadcrumb_Trail/CarbonBreadcrumbTrailRenderTest.php
index <HASH>..<HASH> 100644
--- a/tests/unit-tests/Carbon_Breadcrumb_Trail/CarbonBreadcrumbTrailRenderTest.php
+++ b/tests/unit-tests/Carbon_Breadcrumb_Trail/CarbonBreadcrumbTrailRenderTest.php
@@ -29,6 +29,7 @@ class CarbonBreadcrumbTrailRenderTest extends WP_UnitTestCase {
public function tearDown() {
unset( $this->trail );
+ unset( $this->renderer );
unset( $this->item1 );
unset( $this->item2 );
}
|
polish CarbonBreadcrumbTrailRenderTest
|
tyxla_carbon-breadcrumbs
|
train
|
9cee09d09a093de5ebfece0a3257fed533f4d25e
|
diff --git a/api/models.py b/api/models.py
index <HASH>..<HASH> 100644
--- a/api/models.py
+++ b/api/models.py
@@ -402,7 +402,7 @@ class Release(UuidAuditedModel):
def __str__(self):
return "{0}-v{1}".format(self.app.id, self.version)
- def new(self, user, config=None, build=None):
+ def new(self, user, config=None, build=None, summary=None):
"""
Create a new application release using the provided Build and Config
on behalf of a user.
@@ -420,7 +420,7 @@ class Release(UuidAuditedModel):
# create new release and auto-increment version
release = Release.objects.create(
owner=user, app=self.app, config=config,
- build=build, version=new_version, image=image)
+ build=build, version=new_version, image=image, summary=summary)
# publish release to registry as new docker image
repository_path = "{}/{}".format(user.username, self.app.id)
publish_release(repository_path, config.values, tag)
diff --git a/api/views.py b/api/views.py
index <HASH>..<HASH> 100644
--- a/api/views.py
+++ b/api/views.py
@@ -8,7 +8,6 @@ import json
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
-from django.db import transaction
from django.utils import timezone
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_objects_for_user
@@ -23,7 +22,6 @@ from rest_framework.generics import get_object_or_404
from rest_framework.response import Response
from api import models, serializers
-from registry import publish_release
from .exceptions import UserRegistrationException
from django.conf import settings
@@ -421,20 +419,16 @@ class AppReleaseViewSet(BaseAppViewSet):
config vars of a previous release.
"""
app = get_object_or_404(models.App, id=self.kwargs['id'])
- last_version = app.release_set.latest().version
+ release = app.release_set.latest()
+ last_version = release.version
version = int(request.DATA.get('version', last_version - 1))
if version < 1:
return Response(status=status.HTTP_404_NOT_FOUND)
+ summary = "{} rolled back to v{}".format(request.user, version)
prev = app.release_set.get(version=version)
- with transaction.atomic():
- summary = "{} rolled back to v{}".format(request.user, version)
- app.release_set.create(owner=request.user, version=last_version + 1,
- build=prev.build, config=prev.config,
- summary=summary)
- # publish release to registry as new docker image
- repository_path = "{}/{}".format(app.owner.username, app.id)
- tag = 'v{}'.format(last_version + 1)
- publish_release(repository_path, prev.config.values, tag)
+ new_release = release.new(
+ request.user, build=prev.build, config=prev.config, summary=summary)
+ app.deploy(new_release)
msg = "Rolled back to v{}".format(version)
return Response(msg, status=status.HTTP_201_CREATED)
|
fix(app): restored apps:rollback functionality
Fixes #<I>.
|
deis_controller-sdk-go
|
train
|
23bff0f7352bd7c965f34550d68f78bea9a9f23f
|
diff --git a/lib/gn_crossmap/result_processor.rb b/lib/gn_crossmap/result_processor.rb
index <HASH>..<HASH> 100644
--- a/lib/gn_crossmap/result_processor.rb
+++ b/lib/gn_crossmap/result_processor.rb
@@ -51,9 +51,11 @@ module GnCrossmap
end
def new_data(datum, result)
+ synonym = result[:current_name_string] ? "synonym" : nil
[matched_type(result), datum[:supplied_name_string],
result[:name_string], result[:canonical_form],
@input[datum[:supplied_id]][:rank], matched_rank(result),
+ synonym, result[:current_name_string],
result[:edit_distance], result[:score], result[:taxon_id]]
end
diff --git a/lib/gn_crossmap/writer.rb b/lib/gn_crossmap/writer.rb
index <HASH>..<HASH> 100644
--- a/lib/gn_crossmap/writer.rb
+++ b/lib/gn_crossmap/writer.rb
@@ -23,7 +23,8 @@ module GnCrossmap
def output_fields(original_fields)
original_fields + [:matchedType, :inputName, :matchedName,
:matchedCanonicalForm, :inputRank, :matchedRank,
- :matchedEditDistance, :marchedScore, :matchTaxonID]
+ :synonymStatus, :acceptedName, :matchedEditDistance,
+ :marchedScore, :matchTaxonID]
end
end
end
|
+ fix #<I>: synonym status and current name
|
GlobalNamesArchitecture_gn_crossmap
|
train
|
63488045d370e12d63c09a35d26b8a4fb04b2945
|
diff --git a/h2o-py/h2o/job.py b/h2o-py/h2o/job.py
index <HASH>..<HASH> 100644
--- a/h2o-py/h2o/job.py
+++ b/h2o-py/h2o/job.py
@@ -74,6 +74,8 @@ class H2OJob(object):
estimated_finish_time = start_time + 120
else:
estimated_finish_time = start_time + (last_poll_time - start_time) / self.progress
+ if self.progress < 1:
+ estimated_finish_time = max(estimated_finish_time, next_poll_time)
# Figure out when we need to display the next '#' symbol, so that all the remaining symbols will be printed
# out in a uniform fashion assuming our estimate of finish time is correct.
symbols_remaining = width - last_display_amnt
@@ -84,7 +86,7 @@ class H2OJob(object):
display_speed = 0
next_display_time = next_poll_time + 1 # Force polling before displaying an update
# Polling should always occur if it is past due -- takes precedence over displaying
- if next_poll_time <= min(current_time, next_display_time):
+ if next_poll_time <= max(current_time, next_display_time):
if next_poll_time > current_time:
time.sleep(next_poll_time - current_time)
poll_interval = min(1, poll_interval + 0.2)
|
Fix a typo with progress bar calculations
|
h2oai_h2o-3
|
train
|
46f84dd4df002450feee5c72d2859990dc3ebdb0
|
diff --git a/atomic_reactor/metadata.py b/atomic_reactor/metadata.py
index <HASH>..<HASH> 100644
--- a/atomic_reactor/metadata.py
+++ b/atomic_reactor/metadata.py
@@ -105,7 +105,7 @@ def _decorate_metadata(metadata_type, keys, match_keys):
raise RuntimeError('[{}] Already set: {!r}'.format(metadata_type, key))
if match_keys:
- metadata[key] = str(result[key])
+ metadata[key] = result[key]
else:
metadata[key] = result
diff --git a/atomic_reactor/plugins/exit_store_metadata_in_osv3.py b/atomic_reactor/plugins/exit_store_metadata_in_osv3.py
index <HASH>..<HASH> 100644
--- a/atomic_reactor/plugins/exit_store_metadata_in_osv3.py
+++ b/atomic_reactor/plugins/exit_store_metadata_in_osv3.py
@@ -167,6 +167,9 @@ class StoreMetadataInOSv3Plugin(ExitPlugin):
self._update_labels(labels, self.workflow.labels)
self._update_labels(labels, self.workflow.build_result.labels)
+ if 'sources_for_koji_build_id' in labels:
+ labels['sources_for_koji_build_id'] = str(labels['sources_for_koji_build_id'])
+
return labels
def set_koji_task_annotations_whitelist(self, annotations):
diff --git a/tests/plugins/test_add_filesystem.py b/tests/plugins/test_add_filesystem.py
index <HASH>..<HASH> 100644
--- a/tests/plugins/test_add_filesystem.py
+++ b/tests/plugins/test_add_filesystem.py
@@ -261,7 +261,7 @@ def test_add_filesystem_plugin_generated(tmpdir, docker_tasker, scratch):
assert 'base-image-id' in plugin_result
assert 'filesystem-koji-task-id' in plugin_result
assert plugin_result == expected_results
- assert workflow.labels['filesystem-koji-task-id'] == str(FILESYSTEM_TASK_ID)
+ assert workflow.labels['filesystem-koji-task-id'] == FILESYSTEM_TASK_ID
@pytest.mark.parametrize('scratch', [True, False])
diff --git a/tests/plugins/test_fetch_sources.py b/tests/plugins/test_fetch_sources.py
index <HASH>..<HASH> 100644
--- a/tests/plugins/test_fetch_sources.py
+++ b/tests/plugins/test_fetch_sources.py
@@ -315,7 +315,7 @@ class TestFetchSources(object):
if custom_rcm:
assert get_srpm_url() in caplog.text
assert get_srpm_url('usedKey') not in caplog.text
- assert runner.workflow.labels['sources_for_koji_build_id'] == '1'
+ assert runner.workflow.labels['sources_for_koji_build_id'] == 1
@pytest.mark.parametrize('signing_intent', ('unsigned', 'empty', 'one', 'multiple', 'invalid'))
def test_koji_signing_intent(self, requests_mock, docker_tasker, koji_session, tmpdir,
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index <HASH>..<HASH> 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -82,7 +82,7 @@ def test_store_metadata_map(metadata_map_decorator, metadata_attr):
assert p2.run() is None
other_attr = 'labels' if metadata_attr == 'annotations' else 'annotations'
- assert getattr(workflow, metadata_attr) == {'foo': '1', 'bar': '2'}
+ assert getattr(workflow, metadata_attr) == {'foo': 1, 'bar': 2}
assert getattr(workflow, other_attr) == {}
@@ -185,9 +185,9 @@ def test_store_metadata_combined():
p.run()
assert workflow.annotations == {
'foo': {'bar': 1, 'eggs': 2},
- 'bar': '1'
+ 'bar': 1
}
assert workflow.labels == {
'spam': {'bar': 1, 'eggs': 2},
- 'eggs': '2'
+ 'eggs': 2
}
|
don't convert all metadata to str, but just sources_for_koji_build_id
* CLOUDBLD-<I>
|
projectatomic_atomic-reactor
|
train
|
1c7db774dd25bcc7e57d4661335de96998117b71
|
diff --git a/bin/personal b/bin/personal
index <HASH>..<HASH> 100644
--- a/bin/personal
+++ b/bin/personal
@@ -30,6 +30,19 @@ class PersonalCommand < Thor
@db_persistence.close_db
end
+ desc "find", "find entry containing the given text"
+ method_option :text, :type => :string, :required => true, :aliases => "-t", :desc => "The text to find."
+ def find
+ if options[:text].include?("%") or options[:text].include?("_")
+ present_message "The text must not contain % or _", :light_red
+ return
+ end
+
+ init
+ present @db_persistence.find(options[:text])
+ @db_persistence.close_db
+ end
+
desc "add", "Add a new entry to the personal diary"
method_option :text, :type => :string, :required => true, :aliases => "-t", :desc => "The text to save."
def add
diff --git a/lib/personal.rb b/lib/personal.rb
index <HASH>..<HASH> 100644
--- a/lib/personal.rb
+++ b/lib/personal.rb
@@ -60,6 +60,10 @@ module Personal
@db.execute("select * from (select * from personal order by created_at desc limit ?) order by created_at ASC", limit)
end
+ def find(some_text)
+ @db.execute("select * from (select * from personal where data like ? order by created_at desc) order by created_at ASC", "%" + some_text + "%")
+ end
+
def entries_by_year_and_month(year, month)
@db.execute("select * from (select * from personal where cast(strftime('%Y', created_at) as integer) = ? and cast(strftime('%m', created_at) as integer) = ? order by created_at DESC) order by created_at ASC", year, month)
end
diff --git a/lib/personal/version.rb b/lib/personal/version.rb
index <HASH>..<HASH> 100644
--- a/lib/personal/version.rb
+++ b/lib/personal/version.rb
@@ -1,3 +1,3 @@
module Personal
- VERSION = "0.2.0"
+ VERSION = "0.3.0"
end
|
added the (personal find -t "foo") task
|
phenomen2277_personal
|
train
|
889e9dd71688904951b22883ac6540fd1d626c6c
|
diff --git a/metpy/units.py b/metpy/units.py
index <HASH>..<HASH> 100644
--- a/metpy/units.py
+++ b/metpy/units.py
@@ -16,10 +16,14 @@ units : `pint.UnitRegistry`
from __future__ import division
import pint
+import pint.unit
import numpy as np
units = pint.UnitRegistry(autoconvert_offset_to_baseunit=True)
+# For pint 0.6, this is the best way to define a dimensionless unit. See pint #185
+units.define(pint.unit.UnitDefinition('percent', '%', (), pint.unit.ScaleConverter(0.01)))
+
def concatenate(arrs, axis=0):
r'''Concatenate multiple values into a new unitized object.
|
Define a unit for percent.
This will properly change to a dimensionless with the right scaling
[0, 1].
|
Unidata_MetPy
|
train
|
ed9c782b57a7dc9ebb900089d7d7829d08b7991c
|
diff --git a/plugins/connect.rdns_access.js b/plugins/connect.rdns_access.js
index <HASH>..<HASH> 100644
--- a/plugins/connect.rdns_access.js
+++ b/plugins/connect.rdns_access.js
@@ -55,7 +55,7 @@ exports.rdns_access = function(next, connection) {
if (_in_blacklist(plugin, connection.remote_ip)) {
plugin.logdebug("Rejecting, matched: " + connection.remote_ip);
- return next(DENY, connection.remote_host.toLowerCase + '[' +
+ return next(DENY, connection.remote_host.toLowerCase + ' [' +
connection.remote_ip + '] ' + plugin.deny_msg);
}
}
@@ -67,7 +67,7 @@ exports.rdns_access = function(next, connection) {
if (_in_blacklist(plugin, connection.remote_host.toLowerCase())) {
plugin.logdebug("Rejecting, matched: " + connection.remote_host);
- return next(DENY, connection.remote_host.toLowerCase + '[' +
+ return next(DENY, connection.remote_host.toLowerCase + ' [' +
connection.remote_ip + '] ' + plugin.deny_msg);
}
}
diff --git a/plugins/lookup_rdns.strict.js b/plugins/lookup_rdns.strict.js
index <HASH>..<HASH> 100644
--- a/plugins/lookup_rdns.strict.js
+++ b/plugins/lookup_rdns.strict.js
@@ -147,7 +147,7 @@ exports.hook_lookup_rdns = function (next, connection) {
if (_in_whitelist(plugin, rdns)) {
next(OK, rdns);
} else {
- next(DENYDISCONNECT, rdns + '[' +
+ next(DENYDISCONNECT, rdns + ' [' +
connection.remote_ip + '] ' + nomatch);
}
}
|
slight formatting change to make this look a little more like headers.
|
haraka_Haraka
|
train
|
fd504292402eda536ffebbfd572ee87bc1a5e24c
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-from setuptools import setup
+from setuptools import setup, find_packages
setup(
name='keepkey',
@@ -8,27 +8,7 @@ setup(
author_email='support@keepkey.com',
description='Python library for communicating with KeepKey Hardware Wallet',
url='https://github.com/keepkey/python-keepkey',
- py_modules=[
- 'keepkeylib.ckd_public',
- 'keepkeylib.client',
- 'keepkeylib.debuglink',
- 'keepkeylib.mapping',
- 'keepkeylib.messages_pb2',
- 'keepkeylib.protobuf_json',
- 'keepkeylib.qt.pinmatrix',
- 'keepkeylib.tools',
- 'keepkeylib.transport',
- 'keepkeylib.transport_fake',
- 'keepkeylib.transport_hid',
- 'keepkeylib.transport_pipe',
- 'keepkeylib.transport_serial',
- 'keepkeylib.transport_socket',
- 'keepkeylib.transport_webusb',
- 'keepkeylib.transport_udp',
- 'keepkeylib.tx_api',
- 'keepkeylib.types_pb2',
- 'keepkeylib.exchange_pb2',
- ],
+ packages=find_packages(exclude=['tests']),
scripts = ['keepkeyctl'],
test_suite='tests/**/test_*.py',
install_requires=[
|
Use find_packages instead of listing each module independently
|
keepkey_python-keepkey
|
train
|
9d9c7d3edd02f24a7789c34d2748981e6d61fce0
|
diff --git a/src/Fetchers/GuzzleFetcher.php b/src/Fetchers/GuzzleFetcher.php
index <HASH>..<HASH> 100644
--- a/src/Fetchers/GuzzleFetcher.php
+++ b/src/Fetchers/GuzzleFetcher.php
@@ -217,10 +217,12 @@ class GuzzleFetcher implements FetcherInterface
} catch (ClientException $e) {
// Log the event as failed
- $this->logger->error('[http ' . $e->getResponse()->getStatusCode() . '] ' .
- '[' . $e->getResponse()->getReasonPhrase() . '] ' .
- $method . ' -> ' . $this->stripRefreshTokenValue($uri) . ' [' .
- number_format(microtime(true) - $start, 2) . 's]');
+ $this->logger->error('[http ' . $e->getResponse()->getStatusCode() . ', ' .
+ strtolower($e->getResponse()->getReasonPhrase()) . '] ' .
+ $method . ' -> ' . $this->stripRefreshTokenValue($uri) . ' [t/e: ' .
+ number_format(microtime(true) - $start, 2) . 's/' .
+ implode(' ', $e->getResponse()->getHeader('X-Esi-Error-Limit-Remain')) . ']'
+ );
// Raise the exception that should be handled by the caller
throw new RequestFailedException($e,
@@ -231,10 +233,12 @@ class GuzzleFetcher implements FetcherInterface
}
// Log the sucessful request.
- $this->logger->log('[http ' . $response->getStatusCode() . '] ' .
- '[' . $response->getReasonPhrase() . '] ' .
- $method . ' -> ' . $this->stripRefreshTokenValue($uri) . ' [' .
- number_format(microtime(true) - $start, 2) . 's]');
+ $this->logger->log('[http ' . $response->getStatusCode() . ', ' .
+ strtolower($response->getReasonPhrase()) . '] ' .
+ $method . ' -> ' . $this->stripRefreshTokenValue($uri) . ' [t/e: ' .
+ number_format(microtime(true) - $start, 2) . 's/' .
+ implode(' ', $response->getHeader('X-Esi-Error-Limit-Remain')) . ']'
+ );
// Return a container response that can be parsed.
return $this->makeEsiResponse(
|
Update request logger to include current error limit.
|
eveseat_eseye
|
train
|
15c8846153fb531ff40d139a5d20b8267b7153e7
|
diff --git a/mse/cmd/mse/main.go b/mse/cmd/mse/main.go
index <HASH>..<HASH> 100644
--- a/mse/cmd/mse/main.go
+++ b/mse/cmd/mse/main.go
@@ -37,7 +37,7 @@ func mainErr() error {
}{
CryptoMethod: mse.AllSupportedCrypto,
}
- arg.MustParse(&args)
+ p := arg.MustParse(&args)
if args.Dial != nil {
cn, err := net.Dial(args.Dial.Network, args.Dial.Address)
if err != nil {
@@ -72,6 +72,9 @@ func mainErr() error {
}
doStreaming(rw)
}
+ if p.Subcommand() == nil {
+ p.Fail("missing subcommand")
+ }
return nil
}
|
mse/cmd/mse: Fail on missing subcommand
|
anacrolix_torrent
|
train
|
c22c79a13a8017540d80d088d3b697159a1af729
|
diff --git a/ceph_deploy/osd.py b/ceph_deploy/osd.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/osd.py
+++ b/ceph_deploy/osd.py
@@ -2,6 +2,7 @@ import argparse
import logging
import os
import sys
+from textwrap import dedent
from cStringIO import StringIO
@@ -410,6 +411,21 @@ def make(parser):
"""
Prepare a data disk on remote host.
"""
+ sub_command_help = dedent("""
+ Manage OSDs by preparing a data disk on remote host.
+
+ For paths, first prepare and then activate:
+
+ ceph-deploy osd prepare {osd-node-name}:/path/to/osd
+ ceph-deploy osd activate {osd-node-name}:/path/to/osd
+
+ For disks or journals the `create` command will do prepare and activate
+ for you.
+ """
+ )
+ parser.formatter_class = argparse.RawDescriptionHelpFormatter
+ parser.description = sub_command_help
+
parser.add_argument(
'subcommand',
metavar='SUBCOMMAND',
@@ -471,7 +487,7 @@ def make_disk(parser):
nargs='+',
metavar='HOST:DISK',
type=colon_separated,
- help='host and disk to zap',
+ help='host and disk (or path)',
)
parser.add_argument(
'--zap-disk',
|
improve osd help by adding example of activation by paths
|
ceph_ceph-deploy
|
train
|
4375361939e942c4dd666d3ca4e1159858404bc4
|
diff --git a/src/saml2/server.py b/src/saml2/server.py
index <HASH>..<HASH> 100644
--- a/src/saml2/server.py
+++ b/src/saml2/server.py
@@ -476,7 +476,7 @@ class Server(Entity):
if not encrypt_assertion:
if sign_assertion:
assertion.signature = pre_signature_part(assertion.id,
- self.sec.my_cert, 1,
+ self.sec.my_cert, 2,
sign_alg=sign_alg,
digest_alg=digest_alg)
to_sign.append((class_name(assertion), assertion.id))
|
The ID of each Signature element must be unique
If the assertion and response both are signed, both Signatures have an
ID of `Signature1`. This creates invalid xml as xs:ID must be unique.
This fixes the issue when integrating with onelogin's python3-saml
client:
Element '{<URL>
|
IdentityPython_pysaml2
|
train
|
3cee5b9f67231755cf1c6240d9be3e5f2be01e15
|
diff --git a/neo4j-gremlin/src/main/java/com/tinkerpop/gremlin/neo4j/structure/Neo4jGraph.java b/neo4j-gremlin/src/main/java/com/tinkerpop/gremlin/neo4j/structure/Neo4jGraph.java
index <HASH>..<HASH> 100644
--- a/neo4j-gremlin/src/main/java/com/tinkerpop/gremlin/neo4j/structure/Neo4jGraph.java
+++ b/neo4j-gremlin/src/main/java/com/tinkerpop/gremlin/neo4j/structure/Neo4jGraph.java
@@ -19,9 +19,11 @@ import org.neo4j.graphdb.DynamicLabel;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.NotFoundException;
import org.neo4j.graphdb.NotInTransactionException;
+import org.neo4j.graphdb.PropertyContainer;
import org.neo4j.graphdb.factory.GraphDatabaseBuilder;
import org.neo4j.graphdb.factory.GraphDatabaseFactory;
import org.neo4j.kernel.GraphDatabaseAPI;
+import org.neo4j.kernel.impl.core.NodeManager;
import javax.transaction.Status;
import javax.transaction.SystemException;
@@ -197,6 +199,10 @@ public class Neo4jGraph implements Graph {
return cypher.execute(query,null == params ? Collections.<String,Object>emptyMap() : params).iterator();
}
+ private PropertyContainer getGraphProperties() {
+ return ((GraphDatabaseAPI) this.rawGraph).getDependencyResolver().resolveDependency(NodeManager.class).getGraphProperties();
+ }
+
private static Long evaluateToLong(final Object id) throws NumberFormatException {
Long longId;
if (id instanceof Long)
|
Add access to neo4j graph property container.
|
apache_tinkerpop
|
train
|
4b45a831c560bfc6a24e16bb86835dfb702a4c60
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -2,7 +2,7 @@ function gulp4(gulp, paths) {
Object.keys(gulp._registry._tasks).forEach(function(taskName){
var glob = paths[taskName];
if (!glob) return;
- gulp.watch(glob, gulp.series(taskName));
+ gulp.watch(glob, taskName);
});
}
|
fix extra logging in gulp 4
|
gulp-community_gulp-autowatch
|
train
|
4ec6d6129b836535f33b28ee212d74ad70d887f7
|
diff --git a/faker/providers/person/uk_UA/__init__.py b/faker/providers/person/uk_UA/__init__.py
index <HASH>..<HASH> 100644
--- a/faker/providers/person/uk_UA/__init__.py
+++ b/faker/providers/person/uk_UA/__init__.py
@@ -11,8 +11,7 @@ class Provider(PersonProvider):
'{{prefix_female}} {{first_name_female}} {{last_name}}',
)
- # got from
- # http://uk.wikipedia.org/wiki/%D0%A3%D0%BA%D1%80%D0%B0%D1%97%D0%BD%D1%81%D1%8C%D0%BA%D1%96_%D1%96%D0%BC%D0%B5%D0%BD%D0%B0
+ # Source: uk.wikipedia.org/wiki/Українські_імена
first_names_male = (
'Аарон', 'Августин', 'Аврелій', 'Адам', 'Азар', 'Алевтин', 'Альберт', 'Амвросій', 'Андрій', 'Антон', 'Аркадій',
'Арсен', 'Артем', 'Орхип', 'Богдан', 'Богодар', 'Богуслав', 'Болеслав', 'Борис', 'Борислав', 'Вадим',
@@ -40,8 +39,7 @@ class Provider(PersonProvider):
first_names = first_names_male + first_names_female
- # Ukrainian last names are taken from
- # http://uk.wikipedia.org/wiki/%D0%9A%D0%B0%D1%82%D0%B5%D0%B3%D0%BE%D1%80%D1%96%D1%8F:%D0%A3%D0%BA%D1%80%D0%B0%D1%97%D0%BD%D1%81%D1%8C%D0%BA%D1%96_%D0%BF%D1%80%D1%96%D0%B7%D0%B2%D0%B8%D1%89%D0%B0
+ # Source: uk.wikipedia.org/wiki/Категорія:Українські_прізвища
last_names = (
# А
'Абрагамовський', 'Абраменко', 'Абрамчук', 'Авдєєнко', 'Аверченко', 'Авраменко', 'Аврамчук', 'Адаменко',
|
Make source comments clearer & shorter in the Ukrainian person provider
|
joke2k_faker
|
train
|
93fa3e1936cbfa8c6836b34479313598f6d50c36
|
diff --git a/lib/nodemenu.js b/lib/nodemenu.js
index <HASH>..<HASH> 100644
--- a/lib/nodemenu.js
+++ b/lib/nodemenu.js
@@ -56,6 +56,12 @@ NodeMenu.prototype.customPrompt = function(customPromptFunc) {
return self;
};
+NodeMenu.prototype.resetMenu = function() {
+ var self = this;
+ self.menuItems = [];
+ return self;
+};
+
NodeMenu.prototype.addItem = function(title, handler, owner, args) {
var self = this;
self.menuItems.push(new MenuItem(MenuType.ACTION, ++self.itemNo, title, handler, owner, args));
|
Update nodemenu.js
Added ability to reset the menu to empty.
|
nbu_node-menu
|
train
|
9e14daae461dcd50b29fc91000256f9637172793
|
diff --git a/shared/api/resource.go b/shared/api/resource.go
index <HASH>..<HASH> 100644
--- a/shared/api/resource.go
+++ b/shared/api/resource.go
@@ -3,9 +3,8 @@ package api
// Resources represents the system resources avaible for LXD
// API extension: resources
type Resources struct {
- CPU ResourcesCPU `json:"cpu,omitempty" yaml:"cpu,omitempty"`
- Memory ResourcesMemory `json:"memory,omitempty" yaml:"memory,omitempty"`
- StoragePool ResourcesStoragePool `json:"pool,omitempty" yaml:"pool,omitempty"`
+ CPU ResourcesCPU `json:"cpu,omitempty" yaml:"cpu,omitempty"`
+ Memory ResourcesMemory `json:"memory,omitempty" yaml:"memory,omitempty"`
}
// ResourcesCPUSocket represents a cpu socket on the system
|
shared/api: Drop StoragePool from Resources struct
The ResourcesStoragePool struct is only meant to be used on its own, no
value are meant to be exposed for it through /<I>/resources.
|
lxc_lxd
|
train
|
032b50d86c01dbf2eecf687a5ba46eac2a96bd8d
|
diff --git a/controller/ProviderAdmin.php b/controller/ProviderAdmin.php
index <HASH>..<HASH> 100755
--- a/controller/ProviderAdmin.php
+++ b/controller/ProviderAdmin.php
@@ -52,12 +52,10 @@ class ProviderAdmin extends tao_actions_SaSModule
private function getLtiVersion(): string
{
$body = $this->getPsrRequest()->getParsedBody();
+ $rawLtiVersion = trim($body[tao_helpers_Uri::encode(RdfLtiProviderRepository::LTI_VERSION)] ?? '');
+ $ltiVersion = empty($rawLtiVersion) ? RdfLtiProviderRepository::DEFAULT_LTI_VERSION : tao_helpers_Uri::decode($rawLtiVersion);
- $rawLtiVersion = $body[tao_helpers_Uri::encode(
- RdfLtiProviderRepository::LTI_VERSION
- )] ?? RdfLtiProviderRepository::DEFAULT_LTI_VERSION;
-
- return $this->getConfigurationMapper()->map(tao_helpers_Uri::decode($rawLtiVersion)) ?? '1.1';
+ return $this->getConfigurationMapper()->map($ltiVersion);
}
private function getValidationFactory(): ValidatorsFactory
|
Fallback added to match non-provided lti version
|
oat-sa_extension-tao-lti
|
train
|
765c060928c3b45da0bfcf31cde86a84b2fba38c
|
diff --git a/apiserver/backups/create.go b/apiserver/backups/create.go
index <HASH>..<HASH> 100644
--- a/apiserver/backups/create.go
+++ b/apiserver/backups/create.go
@@ -20,7 +20,8 @@ func (a *API) Create(args params.BackupsCreateArgs) (p params.BackupsMetadataRes
mgoInfo := a.st.MongoConnectionInfo()
dbInfo := db.NewMongoConnInfo(mgoInfo)
- machine := "0" // We *could* pull this from state.
+ // TODO(ericsnow) The machine ID needs to be introspected from state.
+ machine := "0"
origin := state.NewBackupsOrigin(a.st, machine)
meta, err := backups.Create(a.paths, *dbInfo, *origin, args.Notes)
|
Add a TODO about getting the machine ID.
|
juju_juju
|
train
|
373a59f625674b8839041004c3dc98363e75aa60
|
diff --git a/hydpy/core/hydpytools.py b/hydpy/core/hydpytools.py
index <HASH>..<HASH> 100644
--- a/hydpy/core/hydpytools.py
+++ b/hydpy/core/hydpytools.py
@@ -529,7 +529,7 @@ is not requested to make any time-series data available.
be overwritten during the simulation and thus only support the `write_jit` argument.
The |HydPy.prepare_inputseries| method, on the other hand, supports both the
`read_jit` and the `write_jit` argument. However, in most cases, only reading
- makes sense. The argument `write_jit` is thought of when other methods (for
+ makes sense. The argument `write_jit` is thought for when other methods (for
example data assimilation approaches) modify the input data, and we need to keep
track of these modifications:
@@ -562,7 +562,7 @@ is not requested to make any time-series data available.
Reloading the initial conditions and starting a new simulation run leads to the
same results as the simulation run above:
- >>> with TestIO():
+ >>> with TestIO(), pub.options.checkseries(False):
... hp.load_conditions()
... hp.simulate()
@@ -644,7 +644,7 @@ is not requested to make any time-series data available.
After another simulation run, all input data (read during simulation) and output
data (calculated during simulation) are directly available:
- >>> with TestIO():
+ >>> with TestIO(), pub.options.checkseries(False):
... hp.load_conditions()
... hp.simulate()
diff --git a/hydpy/core/netcdftools.py b/hydpy/core/netcdftools.py
index <HASH>..<HASH> 100644
--- a/hydpy/core/netcdftools.py
+++ b/hydpy/core/netcdftools.py
@@ -1016,7 +1016,9 @@ No data for sequence `flux_pc` and (sub)device `land_lahn_2_0` in NetCDF file \
timeunit = tg_init.firstdate.to_cfunits("hours")
timepoints = tg_init.to_timepoints("hours")
for variable, readmode in variable2readmode.items():
- if not os.path.exists(variable.filepath):
+ if not os.path.exists(variable.filepath) and (
+ not readmode or not hydpy.pub.options.checkseries
+ ):
variable.write(timeunit, timepoints)
ncfile = netcdf4.Dataset(variable.filepath, "r+")
variable2ncfile[variable] = ncfile
|
Do not create NetCDF files automatically in the "just in time" mode when reading is required and option `checkseries` is enabled.
|
hydpy-dev_hydpy
|
train
|
9adabfa4f75a3ac07d8788a87653ecf497f3f8f5
|
diff --git a/bokeh/models/renderers.py b/bokeh/models/renderers.py
index <HASH>..<HASH> 100644
--- a/bokeh/models/renderers.py
+++ b/bokeh/models/renderers.py
@@ -80,23 +80,23 @@ class Legend(Renderer):
""")
label_standoff = Int(15, help="""
- The distance in pixels to separate the label from its associated glyph.
+ The distance (in pixels) to separate the label from its associated glyph.
""")
label_height = Int(20, help="""
- The height in pixels that the area legend labels should occupy.
+ The height (in pixels) of the area that legend labels should occupy.
""")
label_width = Int(50, help="""
- The width in pixels that the area legend labels should occupy.
+ The width (in pixels) of the area that legend labels should occupy.
""")
glyph_height = Int(20, help="""
- The height in pixels that the rendered legend glyph should occupy.
+ The height (in pixels) that the rendered legend glyph should occupy.
""")
glyph_width = Int(20, help="""
- The width in pixels that the rendered legend glyph should occupy.
+ The width (in pixels) that the rendered legend glyph should occupy.
""")
legend_padding = Int(10, help="""
|
Improve docstring readability in renderers.py
|
bokeh_bokeh
|
train
|
673507ee66aebf0ce5ca366627b8b6e203e41240
|
diff --git a/src/test/java/at/favre/lib/crypto/bcrypt/BCryptParserTest.java b/src/test/java/at/favre/lib/crypto/bcrypt/BCryptParserTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/at/favre/lib/crypto/bcrypt/BCryptParserTest.java
+++ b/src/test/java/at/favre/lib/crypto/bcrypt/BCryptParserTest.java
@@ -115,6 +115,21 @@ public class BCryptParserTest {
parser.parse("$2a$06$If6bvum7DFjUnE9p2uDeDu0YHzrHM6tf.iqN8.yx.jNN1ILEf7h0i9".getBytes());
}
+ @Test(expected = IllegalArgumentException.class)
+ public void parseErrorNullHash() throws Exception {
+ parser.parse(null);
+ }
+
+ @Test(expected = IllegalArgumentException.class)
+ public void parseErrorZeroLengthHash() throws Exception {
+ parser.parse(new byte[0]);
+ }
+
+ @Test(expected = IllegalBCryptFormatException.class)
+ public void parseErrorWayTooShort() throws Exception {
+ parser.parse("$2a".getBytes());
+ }
+
@Test
public void parseErrorTooLongGetExceptionMessage() {
try {
@@ -126,4 +141,22 @@ public class BCryptParserTest {
System.out.println(e.getMessage());
}
}
+
+ @Test
+ public void testPartsPojoMethods() {
+ BCryptParser.Parts parts1 = new BCryptParser.Parts(BCrypt.Version.VERSION_2A, 6, new byte[16], new byte[23]);
+ BCryptParser.Parts parts2 = new BCryptParser.Parts(BCrypt.Version.VERSION_2A, 6, new byte[16], new byte[23]);
+ BCryptParser.Parts parts3 = new BCryptParser.Parts(BCrypt.Version.VERSION_2A, 7, new byte[16], new byte[23]);
+
+ assertEquals(parts1, parts2);
+ assertEquals(parts1.hashCode(), parts2.hashCode());
+ assertNotEquals(parts1, parts3);
+ assertNotEquals(parts1.hashCode(), parts3.hashCode());
+ assertNotEquals(parts2, parts3);
+ assertNotEquals(parts2.hashCode(), parts3.hashCode());
+
+ assertNotNull(parts1.toString());
+ assertNotNull(parts2.toString());
+ assertNotNull(parts3.toString());
+ }
}
diff --git a/src/test/java/at/favre/lib/crypto/bcrypt/BcryptTest.java b/src/test/java/at/favre/lib/crypto/bcrypt/BcryptTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/at/favre/lib/crypto/bcrypt/BcryptTest.java
+++ b/src/test/java/at/favre/lib/crypto/bcrypt/BcryptTest.java
@@ -109,7 +109,7 @@ public class BcryptTest {
@Test(expected = IllegalArgumentException.class)
public void createHashWithPwNull() {
- BCrypt.withDefaults().hash(6, new byte[17], null);
+ BCrypt.withDefaults().hash(6, new byte[16], null);
}
@Test(expected = IllegalArgumentException.class)
@@ -119,7 +119,7 @@ public class BcryptTest {
@Test(expected = IllegalArgumentException.class)
public void createHashWithPwTooLong() {
- BCrypt.withDefaults().hash(6, new byte[17], new byte[72]);
+ BCrypt.withDefaults().hash(6, new byte[16], new byte[72]);
}
@Test
|
Add more BCryptParserTests
|
patrickfav_bcrypt
|
train
|
302f46301f129402febbac5606048992fbd0e920
|
diff --git a/gosu-core-api/src/main/java/gw/lang/reflect/FunctionType.java b/gosu-core-api/src/main/java/gw/lang/reflect/FunctionType.java
index <HASH>..<HASH> 100644
--- a/gosu-core-api/src/main/java/gw/lang/reflect/FunctionType.java
+++ b/gosu-core-api/src/main/java/gw/lang/reflect/FunctionType.java
@@ -106,7 +106,7 @@ public class FunctionType extends AbstractType implements IFunctionType, IGeneri
_mi = mi;
if (!lazyTypes) {
- initLazyMethodInfoState();
+ initLazyMethodInfoState_NoLock();
}
setName( mi.getDisplayName() );
@@ -131,35 +131,40 @@ public class FunctionType extends AbstractType implements IFunctionType, IGeneri
TypeSystem.lock();
try
{
- if( _paramTypes == null )
+ initLazyMethodInfoState_NoLock();
+ }
+ finally
+ {
+ TypeSystem.unlock();
+ }
+ }
+
+ private void initLazyMethodInfoState_NoLock()
+ {
+ if( _paramTypes == null )
+ {
+ IParameterInfo[] pd = _mi.getParameters();
+ int iArgs = pd.length;
+ _paramTypes = new IType[iArgs];
+ for( int i = 0; i < iArgs; i++ )
{
- IParameterInfo[] pd = _mi.getParameters();
- int iArgs = pd.length;
- _paramTypes = new IType[iArgs];
- for( int i = 0; i < iArgs; i++ )
- {
- _paramTypes[i] = pd[i].getFeatureType();
- }
- if( _paramTypes.length == 0 )
- {
- _paramTypes = EMPTY_ARGS;
- }
- _typeVars = EMPTY_TYPE_VARS;
- if( _mi instanceof IGenericMethodInfo)
- {
- _typeVars = ((IGenericMethodInfo)_mi).getTypeVariables();
- }
- clearParamSignature();
+ _paramTypes[i] = pd[i].getFeatureType();
}
- _retType = _mi.getReturnType();
- if( _retType == null )
+ if( _paramTypes.length == 0 )
{
- _retType = JavaTypes.pVOID();
+ _paramTypes = EMPTY_ARGS;
+ }
+ _typeVars = EMPTY_TYPE_VARS;
+ if( _mi instanceof IGenericMethodInfo )
+ {
+ _typeVars = ((IGenericMethodInfo)_mi).getTypeVariables();
}
+ clearParamSignature();
}
- finally
+ _retType = _mi.getReturnType();
+ if( _retType == null )
{
- TypeSystem.unlock();
+ _retType = JavaTypes.pVOID();
}
}
diff --git a/gosu-core/src/main/java/gw/internal/gosu/compiler/GosuClassLoader.java b/gosu-core/src/main/java/gw/internal/gosu/compiler/GosuClassLoader.java
index <HASH>..<HASH> 100644
--- a/gosu-core/src/main/java/gw/internal/gosu/compiler/GosuClassLoader.java
+++ b/gosu-core/src/main/java/gw/internal/gosu/compiler/GosuClassLoader.java
@@ -132,23 +132,25 @@ public class GosuClassLoader implements IGosuClassLoader
@Override
public Class loadClass( String strName ) throws ClassNotFoundException
{
+ String strGsName = strName.replace( '$', '.' );
+ //## hack:
+ if (strGsName.startsWith("com.guidewire.commons.metadata.proxy._generated.iface.")) {
+ strGsName = "entity." + strGsName.substring(strName.lastIndexOf('.') + 1);
+ }
+
+ IType type = TypeSystem.getByFullNameIfValid( strGsName );
+ if( type instanceof IGosuClassInternal )
+ {
+ return ((IGosuClassInternal)type).getBackingClass();
+ }
+ else if( type instanceof IJavaBackedType )
+ {
+ return ((IJavaBackedType)type).getBackingClass();
+ }
+
TypeSystemLockHelper.getTypeSystemLockWithMonitor(_loader);
try
{
- String strGsName = strName.replace( '$', '.' );
- //## hack:
- if (strGsName.startsWith("com.guidewire.commons.metadata.proxy._generated.iface.")) {
- strGsName = "entity." + strGsName.substring(strName.lastIndexOf('.') + 1);
- }
- IType type = TypeSystem.getByFullNameIfValid( strGsName );
- if( type instanceof IGosuClassInternal )
- {
- return ((IGosuClassInternal)type).getBackingClass();
- }
- else if( type instanceof IJavaBackedType )
- {
- return ((IJavaBackedType)type).getBackingClass();
- }
return _loader.loadClass( strName );
}
finally
|
Refine type system lock usage in response to: <URL>
|
gosu-lang_gosu-lang
|
train
|
a8797189f8bfc467997888e3d749f852f122f3cb
|
diff --git a/tool/fragment/src/main/java/org/openscience/cdk/fragment/ExhaustiveFragmenter.java b/tool/fragment/src/main/java/org/openscience/cdk/fragment/ExhaustiveFragmenter.java
index <HASH>..<HASH> 100644
--- a/tool/fragment/src/main/java/org/openscience/cdk/fragment/ExhaustiveFragmenter.java
+++ b/tool/fragment/src/main/java/org/openscience/cdk/fragment/ExhaustiveFragmenter.java
@@ -122,6 +122,9 @@ public class ExhaustiveFragmenter implements IFragmenter {
List<IAtomContainer> parts = FragmentUtils.splitMolecule(atomContainer, bond);
// make sure we don't add the same fragment twice
for (IAtomContainer partContainer : parts) {
+ AtomContainerManipulator.clearAtomConfigurations(partContainer);
+ for (IAtom atom : partContainer.atoms())
+ atom.setImplicitHydrogenCount(null);
AtomContainerManipulator.percieveAtomTypesAndConfigureAtoms(partContainer);
CDKHydrogenAdder.getInstance(partContainer.getBuilder()).addImplicitHydrogens(partContainer);
Aromaticity.cdkLegacy().apply(partContainer);
@@ -145,6 +148,9 @@ public class ExhaustiveFragmenter implements IFragmenter {
for (IAtomContainer frag : frags) {
if (frag.getBondCount() < 3) continue;
+ AtomContainerManipulator.clearAtomConfigurations(frag);
+ for (IAtom atom : frag.atoms())
+ atom.setImplicitHydrogenCount(null);
AtomContainerManipulator.percieveAtomTypesAndConfigureAtoms(frag);
CDKHydrogenAdder.getInstance(frag.getBuilder()).addImplicitHydrogens(frag);
Aromaticity.cdkLegacy().apply(frag);
diff --git a/tool/fragment/src/main/java/org/openscience/cdk/fragment/MurckoFragmenter.java b/tool/fragment/src/main/java/org/openscience/cdk/fragment/MurckoFragmenter.java
index <HASH>..<HASH> 100644
--- a/tool/fragment/src/main/java/org/openscience/cdk/fragment/MurckoFragmenter.java
+++ b/tool/fragment/src/main/java/org/openscience/cdk/fragment/MurckoFragmenter.java
@@ -334,6 +334,9 @@ public class MurckoFragmenter implements IFragmenter {
List<String> smis = new ArrayList<String>();
for (IAtomContainer mol : mols) {
try {
+ AtomContainerManipulator.clearAtomConfigurations(mol);
+ for (IAtom atom : mol.atoms())
+ atom.setImplicitHydrogenCount(null);
AtomContainerManipulator.percieveAtomTypesAndConfigureAtoms(mol);
CDKHydrogenAdder.getInstance(mol.getBuilder()).addImplicitHydrogens(mol);
Aromaticity.cdkLegacy().apply(mol);
|
To obtain correct output in the fragmenters we need to clear all existing configurations and reperceive atom types. Could be improved.
|
cdk_cdk
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.