hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
3501a3e76c75379e3ad19e1ea39bdf3e2e28a20b
|
diff --git a/classes/Ergo.php b/classes/Ergo.php
index <HASH>..<HASH> 100644
--- a/classes/Ergo.php
+++ b/classes/Ergo.php
@@ -42,7 +42,7 @@ class Ergo
/**
* Called as a shutdown function, calls stop() if required
*/
- public function shutdown()
+ public static function shutdown()
{
if(self::$_started)
self::stop();
|
Changed the shutdown function to be static
|
99designs_ergo
|
train
|
fbd3a3172de12c998fc394580209a6f47383660c
|
diff --git a/explauto/sensorimotor_model/nearest_neighbor.py b/explauto/sensorimotor_model/nearest_neighbor.py
index <HASH>..<HASH> 100644
--- a/explauto/sensorimotor_model/nearest_neighbor.py
+++ b/explauto/sensorimotor_model/nearest_neighbor.py
@@ -12,7 +12,7 @@ n_neighbors = 1
class NearestNeighbor(SensorimotorModel):
- def __init__(self, conf, n_explore, sigma_ratio):
+ def __init__(self, conf, sigma_ratio):
""" This class implements a simple sensorimotor model inspired from the original SAGG-RIAC algorithm. Used as a forward model, it simply returns the image in S of nearest neighbor in M. Used as an inverse model, it looks at the nearest neighbor in S, then explore during the n_explore following calls to self.infer around that neighborhood.
:param conf: a Configuration instance
@@ -26,7 +26,7 @@ class NearestNeighbor(SensorimotorModel):
self.s_dims = conf.s_dims
self.t = 0
self.sigma_expl = (conf.m_maxs - conf.m_mins) * float(sigma_ratio)
- self.n_explore = n_explore
+ self.n_explore = 1
self.to_explore = 0
self.best_dist_to_goal = float('inf')
self.current_goal = inf * ones(conf.s_ndims)
@@ -67,5 +67,5 @@ class NearestNeighbor(SensorimotorModel):
self.dataset.add_xy(tuple(m), tuple(s))
self.t += 1
-configurations = {'default': {'n_explore': 10, 'sigma_ratio': 1./7.}}
+configurations = {'default': {'sigma_ratio': 1./5.}}
sensorimotor_models = {'nearest_neighbor': (NearestNeighbor, configurations)}
|
Change the default nearest_neighbor sm_model configuration. Do not allow
a parametrized number of step anymore (only 1, otherwise it would
actually kills the framework consistency ...)
|
flowersteam_explauto
|
train
|
0d31808033272cee5126f4546f70626a3e28f59d
|
diff --git a/lib/salt/page.rb b/lib/salt/page.rb
index <HASH>..<HASH> 100644
--- a/lib/salt/page.rb
+++ b/lib/salt/page.rb
@@ -33,13 +33,18 @@ module Salt
File.join(parent_path, File.dirname(@path).gsub(site.source_paths[:pages], ''))
end
- def write(site, path, context = {})
+ def write(site, path, context = false)
directory_path = output_path(site, path)
full_path = File.join(directory_path, output_file)
@url = full_path.gsub(site.output_paths[:site], '').gsub(/index\.html$/, '')
- contents = render(site, @contents, {this: self}.merge(context))
+ contents = if context
+ render(site, @contents, {this: self}.merge(context))
+ else
+ @contents
+ end
+
FileUtils.mkdir_p(directory_path) unless Dir.exist?(directory_path)
File.open(full_path, 'w') do |file|
|
Make the context option, so we can write out raw files based on the contents.
|
waferbaby_dimples
|
train
|
670ec8e766a8d2d6598744df4401e6f5d73eb112
|
diff --git a/yfinance/base.py b/yfinance/base.py
index <HASH>..<HASH> 100644
--- a/yfinance/base.py
+++ b/yfinance/base.py
@@ -809,7 +809,6 @@ class TickerBase():
dates = None
while True:
url = "{}/calendar/earnings?symbol={}&offset={}&size={}".format(_ROOT_URL_, self.ticker, page_offset, page_size)
- page_offset += page_size
session = self.session or _requests
data = session.get(
@@ -828,15 +827,17 @@ class TickerBase():
except ValueError:
if page_offset == 0:
# Should not fail on first page
- print("Could not find earnings history data for {}.".format(self.ticker))
- return
- else:
- break
+ if "Showing Earnings for:" in data:
+ # Actually YF was successful, problem is company doesn't have earnings history
+ dates = utils.empty_earnings_dates_df()
+ break
if dates is None:
dates = data
else:
dates = _pd.concat([dates, data], axis=0)
+ page_offset += page_size
+
if dates is None:
raise Exception("No data found, symbol may be delisted")
dates = dates.reset_index(drop=True)
diff --git a/yfinance/utils.py b/yfinance/utils.py
index <HASH>..<HASH> 100644
--- a/yfinance/utils.py
+++ b/yfinance/utils.py
@@ -87,6 +87,11 @@ def empty_df(index=[]):
empty.index.name = 'Date'
return empty
+def empty_earnings_dates_df():
+ empty = _pd.DataFrame(columns=["Symbol", "Company", "Earnings Date", "EPS Estimate", "Reported EPS", "Surprise(%)"])
+ # empty.index.name = 'Earnings Date'
+ return empty
+
def get_html(url, proxy=None, session=None):
session = session or _requests
|
Return empty DF if YF missing earnings dates
|
ranaroussi_fix-yahoo-finance
|
train
|
a5da45abd6a41ebd4c9c08188cbd382a38c7be06
|
diff --git a/src/main/java/org/jfrog/hudson/maven2/ArtifactsDeployer.java b/src/main/java/org/jfrog/hudson/maven2/ArtifactsDeployer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jfrog/hudson/maven2/ArtifactsDeployer.java
+++ b/src/main/java/org/jfrog/hudson/maven2/ArtifactsDeployer.java
@@ -16,6 +16,7 @@
package org.jfrog.hudson.maven2;
+import hudson.EnvVars;
import hudson.Util;
import hudson.maven.MavenBuild;
import hudson.maven.MavenModule;
@@ -43,6 +44,7 @@ import org.jfrog.hudson.release.ReleaseAction;
import org.jfrog.hudson.util.BuildUniqueIdentifierHelper;
import org.jfrog.hudson.util.ExtractorUtils;
import org.jfrog.hudson.util.IncludesExcludes;
+import org.jfrog.hudson.util.MavenVersionHelper;
import java.io.File;
import java.io.FileNotFoundException;
@@ -70,7 +72,7 @@ public class ArtifactsDeployer {
private final IncludeExcludePatterns patterns;
private final boolean downstreamIdentifier;
private final boolean isArchiveJenkinsVersion;
- private final Map<String, String> env;
+ private final EnvVars env;
private final String[] matrixParams;
private final AbstractBuild<?, ?> rootBuild;
@@ -239,8 +241,25 @@ public class ArtifactsDeployer {
File file = new File(new File(new File(new File(build.getArtifactsDir(), mavenArtifact.groupId),
mavenArtifact.artifactId), mavenArtifact.version), fileName);
if (!file.exists()) {
- throw new FileNotFoundException("Archived artifact is missing: " + file);
+ throw new FileNotFoundException("Archived artifact is missing: " + file + " " + getAdditionalMessage());
}
return file;
}
+
+ /**
+ * @return An additional error message to be attached to the exception
+ */
+ private String getAdditionalMessage() throws IOException {
+ try {
+ String mavenVersion = MavenVersionHelper.getMavenVersion(mavenModuleSetBuild, env, listener);
+ if (StringUtils.isBlank(mavenVersion)) {
+ return "";
+ }
+ return mavenVersion.startsWith("2") ?
+ "\nDisabling the automatic arching and using the external Maven extractor is compatible with Maven 3.0.2 and up" :
+ "";
+ } catch (InterruptedException e) {
+ throw new RuntimeException("Unable to get maven version", e);
+ }
+ }
}
diff --git a/src/main/java/org/jfrog/hudson/util/MavenVersionHelper.java b/src/main/java/org/jfrog/hudson/util/MavenVersionHelper.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jfrog/hudson/util/MavenVersionHelper.java
+++ b/src/main/java/org/jfrog/hudson/util/MavenVersionHelper.java
@@ -18,13 +18,18 @@ package org.jfrog.hudson.util;
import hudson.AbortException;
import hudson.EnvVars;
+import hudson.FilePath;
+import hudson.maven.MavenEmbedderException;
+import hudson.maven.MavenEmbedderUtils;
import hudson.maven.MavenModuleSet;
import hudson.maven.MavenModuleSetBuild;
import hudson.model.BuildListener;
import hudson.model.Computer;
+import hudson.remoting.VirtualChannel;
import hudson.tasks.Maven;
import org.jfrog.hudson.maven3.MavenVersionCallable;
+import java.io.File;
import java.io.IOException;
/**
@@ -38,6 +43,21 @@ public class MavenVersionHelper {
*/
private static final String MINIMUM_MAVEN_VERSION = "3.0.2";
+ public static String getMavenVersion(MavenModuleSetBuild build, EnvVars vars,
+ BuildListener listener) throws IOException, InterruptedException {
+ final Maven.MavenInstallation installation = getMavenInstallation(build.getProject(), vars, listener);
+ build.getWorkspace().act(new FilePath.FileCallable<String>() {
+ public String invoke(File f, VirtualChannel channel) throws IOException, InterruptedException {
+ try {
+ return MavenEmbedderUtils.getMavenVersion(new File(installation.getHome())).getVersion();
+ } catch (MavenEmbedderException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ return "";
+ }
+
/**
* @return True if the Maven version of this build is at least {@link MavenVersionHelper#MINIMUM_MAVEN_VERSION}
*/
@@ -70,8 +90,7 @@ public class MavenVersionHelper {
* is {@code null} then this exception is thrown.
*/
private static Maven.MavenInstallation getMavenInstallation(MavenModuleSet project, EnvVars vars,
- BuildListener listener)
- throws IOException, InterruptedException {
+ BuildListener listener) throws IOException, InterruptedException {
Maven.MavenInstallation mavenInstallation = project.getMaven();
if (mavenInstallation == null) {
throw new AbortException("A Maven installation needs to be available for this project to be built.\n" +
|
HAP-<I>: Log informetive message when maven2 is used with Disable automatic artifact archiving
|
jenkinsci_artifactory-plugin
|
train
|
78b4a0202600d91251743936980469d85e644763
|
diff --git a/pychromecast/dial.py b/pychromecast/dial.py
index <HASH>..<HASH> 100644
--- a/pychromecast/dial.py
+++ b/pychromecast/dial.py
@@ -7,6 +7,7 @@ from uuid import UUID
import logging
import requests
+from .const import CAST_TYPE_CHROMECAST
from .discovery import get_info_from_service, get_host_from_service_info
XML_NS_UPNP_DEVICE = "{urn:schemas-upnp-org:device-1-0}"
@@ -81,6 +82,8 @@ def get_device_status(host, services=None, zconf=None):
udn = status.get("ssdp_udn", None)
+ cast_type = CAST_TYPE_CHROMECAST
+
uuid = None
if udn:
uuid = UUID(udn.replace("-", ""))
|
Correct mistake in PR#<I> (#<I>)
|
balloob_pychromecast
|
train
|
7d291b91f2cd34941ea4c21c2f571ca54ba3c564
|
diff --git a/addon/components/object-list-view.js b/addon/components/object-list-view.js
index <HASH>..<HASH> 100644
--- a/addon/components/object-list-view.js
+++ b/addon/components/object-list-view.js
@@ -339,14 +339,20 @@ export default FlexberryBaseComponent.extend(
'showCheckBoxInRow',
'showDeleteButtonInRow',
'showEditButtonInRow',
+ 'customButtonsInRow',
'modelProjection',
function() {
if (this.get('modelProjection')) {
- return this.get('showAsteriskInRow') || this.get('showCheckBoxInRow') || this.get('showDeleteButtonInRow') || this.get('showEditButtonInRow');
+ return this.get('showAsteriskInRow') ||
+ this.get('showCheckBoxInRow') ||
+ this.get('showDeleteButtonInRow') ||
+ this.get('showEditButtonInRow') ||
+ !!this.get('customButtonsInRow');
} else {
return false;
}
- }),
+ }
+ ).readOnly(),
/**
Flag indicates whether to show dropdown menu with edit menu item, in last column of every row.
|
Fix object-list-view component show column with custom buttons in row
|
Flexberry_ember-flexberry
|
train
|
e86b159f28b167f9783bae904e4546655fd9c4df
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ setup(
name = 'django-cprofile-middleware',
packages = ['django_cprofile_middleware'],
license = 'MIT',
- version = '0.1',
+ version = '0.2',
description = 'Easily add cProfile profiling to django views.',
author = 'Omar Bohsali',
author_email = 'omar.bohsali@gmail.com',
|
<I> with python 3 fix
|
omarish_django-cprofile-middleware
|
train
|
9a358cb0a0cdc2d73115c38df0c2fd5765668dd3
|
diff --git a/src/sap.ui.core/src/sap/ui/model/analytics/odata4analytics.js b/src/sap.ui.core/src/sap/ui/model/analytics/odata4analytics.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.core/src/sap/ui/model/analytics/odata4analytics.js
+++ b/src/sap.ui.core/src/sap/ui/model/analytics/odata4analytics.js
@@ -203,7 +203,7 @@ sap.ui.define([
/**
* Create a reference to an OData model having certain workarounds activated. A
* workaround is an implementation that changes the standard behavior of the API
- * to overcome some gap or limitation in the OData provider. The workaround
+ * to overcome some gap or restriction in the OData provider. The workaround
* implementation can be conditionally activated by passing the identifier in
* the constructor.
*
@@ -522,7 +522,7 @@ sap.ui.define([
if (oParameterization3 != null) {
// TODO: extend this implementation to support more
// than one related parameter entity type
- throw "LIMITATION: Unable to handle multiple parameter entity types of query entity "
+ throw "Unable to handle multiple parameter entity types of query entity "
+ oEntityType3.name;
} else {
oParameterization3 = oMatchingParameterization;
@@ -5194,7 +5194,7 @@ sap.ui.define([
this._oSortExpression = null;
if (this._oParameterizationRequest != null && this._bUseMasterData == true) {
- throw "LIMITATION: parameterized master data entity sets are not yet implemented";
+ throw "Parameterized master data entity sets are not yet implemented";
}
if (this._bUseMasterData) {
this._oEntitySet = this._oDimension.getMasterDataEntitySet();
diff --git a/src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataPropertyBinding.qunit.js b/src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataPropertyBinding.qunit.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataPropertyBinding.qunit.js
+++ b/src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataPropertyBinding.qunit.js
@@ -1702,7 +1702,7 @@ sap.ui.define([
// annotation, but an "etag(?)" header which is not supported by _Cache so far
//TODO for PATCH we need the edit URL (for single property we can't determine the canonical URL
// because the path need not contain the key properties e.g.
- // /EMPLOYEES('2')/EMPLOYEE_2_MANAGER/TEAM_ID) --> accept limitation for now
+ // /EMPLOYEES('2')/EMPLOYEE_2_MANAGER/TEAM_ID) --> accept restriction for now
//TODO if the back end returns a different value we should take care
//TODO PUT of primitive property versus PATCH of entity (with select *), what is better?
// --> PATCH with header "Prefer: return=minimal" followed by
|
[INTERNAL] Avoid usage of the term limitation
PS1: Do it
PS2: Review comment
Change-Id: I8b<I>afbaee<I>fc<I>ba6af<I>a4bb<I>a2ac4a
JIRA: CPOUI5MODELS-<I>
|
SAP_openui5
|
train
|
1c03f47e0e4f5b704af67664149e6cc43d3d782b
|
diff --git a/structr-core/src/main/java/org/structr/core/entity/Principal.java b/structr-core/src/main/java/org/structr/core/entity/Principal.java
index <HASH>..<HASH> 100644
--- a/structr-core/src/main/java/org/structr/core/entity/Principal.java
+++ b/structr-core/src/main/java/org/structr/core/entity/Principal.java
@@ -23,6 +23,7 @@ import java.util.Set;
import org.structr.common.AccessControllable;
import org.structr.common.ValidationHelper;
import org.structr.common.error.ErrorBuffer;
+import org.structr.common.error.SemanticErrorToken;
import org.structr.core.entity.relationship.PrincipalOwnsNode;
import org.structr.core.graph.NodeInterface;
import org.structr.core.property.ArrayProperty;
@@ -76,6 +77,21 @@ public interface Principal extends NodeInterface, AccessControllable {
valid &= ValidationHelper.isValidStringNotBlank(this, name, errorBuffer);
valid &= ValidationHelper.isValidUniqueProperty(this, eMail, errorBuffer);
+ final String _eMail = getProperty(eMail);
+ if (_eMail != null) {
+
+ // verify that the address contains at least the @ character,
+ // which is a requirement for it to be distinguishable from
+ // a user name, so email addresses can less easily interfere
+ // with user names.
+ if (!_eMail.contains("@")) {
+
+ valid = false;
+
+ errorBuffer.add(new SemanticErrorToken(getClass().getSimpleName(), eMail, "must_contain_at_character", _eMail));
+ }
+ }
+
return valid;
}
}
diff --git a/structr-core/src/test/java/org/structr/common/AccessControlTest.java b/structr-core/src/test/java/org/structr/common/AccessControlTest.java
index <HASH>..<HASH> 100644
--- a/structr-core/src/test/java/org/structr/common/AccessControlTest.java
+++ b/structr-core/src/test/java/org/structr/common/AccessControlTest.java
@@ -31,6 +31,7 @@ import org.slf4j.LoggerFactory;
import org.structr.api.graph.Direction;
import org.structr.api.graph.Relationship;
import org.structr.api.util.Iterables;
+import org.structr.common.error.ErrorToken;
import org.structr.common.error.FrameworkException;
import org.structr.core.GraphObject;
import org.structr.core.Result;
@@ -38,6 +39,7 @@ import org.structr.core.app.App;
import org.structr.core.app.StructrApp;
import org.structr.core.entity.AbstractNode;
import org.structr.core.entity.Group;
+import org.structr.core.entity.Principal;
import org.structr.core.entity.ResourceAccess;
import org.structr.core.entity.TestOne;
import org.structr.core.entity.TestUser;
@@ -852,6 +854,28 @@ public class AccessControlTest extends StructrTest {
logger.error(ex.toString());
}
+ try (final Tx tx = app.tx()) {
+
+ List<TestUser> users = createTestNodes(TestUser.class, 1);
+ user1 = (TestUser) users.get(0);
+ user1.setProperty(Principal.eMail, "invalid");
+
+ tx.success();
+
+ fail("Invalid e-mail address should have thrown an exception.");
+
+ } catch (FrameworkException ex) {
+
+ final ErrorToken token = ex.getErrorBuffer().getErrorTokens().get(0);
+
+ assertEquals("Invalid error code", 422, ex.getStatus());
+ assertEquals("Invalid error code", "TestUser", token.getType());
+ assertEquals("Invalid error code", "eMail", token.getProperty());
+ assertEquals("Invalid error code", "must_contain_at_character", token.getToken());
+ assertEquals("Invalid error code", "invalid", token.getDetail());
+
+ }
+
// Switch user context to user1
final App user1App = StructrApp.getInstance(SecurityContext.getInstance(user1, AccessMode.Frontend));
try (final Tx tx = user1App.tx()) {
|
Adds validation for eMail property of Principals, the property value must
contain an "@" character if set, adds test.
|
structr_structr
|
train
|
dcd7fdb8af97d93bd4fdc806f8575ccbd14783d7
|
diff --git a/lib/test-generator.js b/lib/test-generator.js
index <HASH>..<HASH> 100644
--- a/lib/test-generator.js
+++ b/lib/test-generator.js
@@ -11,7 +11,7 @@ module.exports = function(relativePath, results, testingFramework){
assertions = results.warnings.map(warning => `${warning.line}:${warning.column} ${escapeString(warning.text)}`).join('\n');
}
return (
- generator.suiteHeader('Stylelint') +
+ generator.suiteHeader(`Stylelint: ${relativePath}`) +
generator.test(relativePath + ' should pass stylelint', passed, assertions) +
generator.suiteFooter()
);
|
[ENHANCEMENT] Add name to test
|
billybonks_broccoli-stylelint
|
train
|
6a24f4d73c1700c1971552d25a1ad0096b8eb7b7
|
diff --git a/diffHistory.js b/diffHistory.js
index <HASH>..<HASH> 100644
--- a/diffHistory.js
+++ b/diffHistory.js
@@ -78,26 +78,37 @@ function saveDiffObject(currentObject, original, updated, opts, queryObject) {
/* eslint-disable complexity */
const saveDiffHistory = (queryObject, currentObject, opts) => {
- const update = JSON.parse(JSON.stringify(queryObject._update));
-
- /* eslint-disable security/detect-object-injection */
- const updateParams = Object.assign(
- ...Object.keys(update).map(function (key) {
- if (typeof update[key] === 'object') {
- return update[key];
+ const queryUpdate = queryObject.getUpdate();
+
+ let keysToBeModified = [];
+ let mongoUpdateOperations = [];
+ let plainKeys = [];
+
+ for (const key in queryUpdate) {
+ const value = queryUpdate[key];
+ if (key.startsWith("$") && typeof value === "object") {
+ const innerKeys = Object.keys(value);
+ keysToBeModified = keysToBeModified.concat(innerKeys);
+ if (key !== "$setOnInsert") {
+ mongoUpdateOperations = mongoUpdateOperations.concat(key);
}
- return update;
- })
+ } else {
+ keysToBeModified = keysToBeModified.concat(key);
+ plainKeys = plainKeys.concat(key);
+ }
+ }
+
+ const dbObject = pick(currentObject, keysToBeModified);
+ const updatedObject = assign(
+ dbObject,
+ pick(queryUpdate, mongoUpdateOperations),
+ pick(queryUpdate, plainKeys)
);
-
- /* eslint-enable security/detect-object-injection */
- delete queryObject['_update']['$setOnInsert'];
- const dbObject = pick(currentObject, Object.keys(updateParams));
-
+
return saveDiffObject(
currentObject,
dbObject,
- assign(dbObject, queryObject._update),
+ updatedObject,
opts,
queryObject
);
@@ -106,7 +117,6 @@ const saveDiffHistory = (queryObject, currentObject, opts) => {
const saveDiffs = (queryObject, opts) =>
queryObject
.find(queryObject._conditions)
- .lean(false)
.cursor()
.eachAsync(result => saveDiffHistory(queryObject, result, opts));
diff --git a/tests/diffHistory.js b/tests/diffHistory.js
index <HASH>..<HASH> 100644
--- a/tests/diffHistory.js
+++ b/tests/diffHistory.js
@@ -116,6 +116,16 @@ mandatorySchema.plugin(diffHistory.plugin, { required: ['user', 'reason'] });
const MandatorySchema = mongoose.model('mandatories', mandatorySchema);
+
+const schemaWithTimestamps = new mongoose.Schema(
+ {
+ def: String
+ },
+ { timestamps: true }
+ );
+schemaWithTimestamps.plugin(diffHistory.plugin);
+const TimestampsSchema = mongoose.model('timestamps', schemaWithTimestamps);
+
describe('diffHistory', function () {
afterEach(function (done) {
Promise.all([
@@ -123,7 +133,8 @@ describe('diffHistory', function () {
mongoose.connection.collections['picks'].remove({}),
mongoose.connection.collections['samplesarrays'].remove({}),
mongoose.connection.collections['histories'].remove({}),
- mongoose.connection.collections['mandatories'].remove({})
+ mongoose.connection.collections['mandatories'].remove({}),
+ mongoose.connection.collections['timestamps'].remove({})
])
.then(() => done())
.catch(done);
@@ -602,6 +613,31 @@ describe('diffHistory', function () {
})
.catch(done);
});
+
+ it('should not override lean option in original query', function (done) {
+ Sample1.findOneAndUpdate(
+ { def: 'hey hye' },
+ { ghi: 1234 },
+ { lean: true }
+ ).then(updatedObj => {
+ expect(updatedObj).not.to.instanceOf(Sample1);
+ done();
+ }).catch(done);
+ });
+
+ it("should not fail if timestamps enabled", function (done) {
+ const timestampModel = new TimestampsSchema({ def: "hello" });
+ timestampModel.save().then(() =>
+ TimestampsSchema.findOneAndUpdate(
+ { def: "hello" },
+ { def: "update hello" }
+ )
+ .then(() => done())
+ .catch((e) => {
+ done(e);
+ })
+ );
+ });
});
describe('plugin: pre updateOne', function () {
|
fix: cannot use $timestamps, lean not respected
BREAKING CHANGE: lean no longer forced false for saveDiffs, may cause
issues for some
* Fix lean option override in findOneAndUpdate pre hook
* Fix findOneAndUpdate pre hook
* findOneAndUpdate pre hook is not working as expected when timestamps autogeneration is enabled
|
mimani_mongoose-diff-history
|
train
|
b623cc1d7f410d3fde0ab1d8cab8227f842d6297
|
diff --git a/src/Native/NativeFileInfo.php b/src/Native/NativeFileInfo.php
index <HASH>..<HASH> 100644
--- a/src/Native/NativeFileInfo.php
+++ b/src/Native/NativeFileInfo.php
@@ -185,6 +185,7 @@ class NativeFileInfo implements IFileInfo {
foreach (explode(',', $attribute) as $acl) {
list($user, $permissions) = explode(':', $acl, 2);
+ $user = trim($user, '\\');
list($type, $flags, $mask) = explode('/', $permissions);
$mask = hexdec($mask);
diff --git a/tests/NativeShareTest.php b/tests/NativeShareTest.php
index <HASH>..<HASH> 100644
--- a/tests/NativeShareTest.php
+++ b/tests/NativeShareTest.php
@@ -7,6 +7,7 @@
namespace Icewind\SMB\Test;
+use Icewind\SMB\ACL;
use Icewind\SMB\BasicAuth;
use Icewind\SMB\Exception\InvalidArgumentException;
use Icewind\SMB\IOptions;
@@ -81,4 +82,15 @@ class NativeShareTest extends AbstractShareTest {
);
$server->listShares();
}
+
+ public function testACL() {
+ $this->share->mkdir($this->root . "/test");
+ $listing = $this->share->dir($this->root);
+
+ $this->assertCount(1, $listing);
+ $acls = $listing[0]->getAcls();
+ $acl = $acls['Everyone'];
+ $this->assertEquals($acl->getType(), ACL::TYPE_ALLOW);
+ $this->assertEquals(ACL::MASK_READ, $acl->getMask() & ACL::MASK_READ);
+ }
}
diff --git a/tests/ParserTest.php b/tests/ParserTest.php
index <HASH>..<HASH> 100644
--- a/tests/ParserTest.php
+++ b/tests/ParserTest.php
@@ -10,6 +10,7 @@ namespace Icewind\SMB\Test;
use Icewind\SMB\ACL;
use Icewind\SMB\IFileInfo;
use Icewind\SMB\Wrapped\FileInfo;
+use Icewind\SMB\Wrapped\Parser;
class ParserTest extends \PHPUnit\Framework\TestCase {
public function modeProvider() {
@@ -29,7 +30,7 @@ class ParserTest extends \PHPUnit\Framework\TestCase {
* @dataProvider modeProvider
*/
public function testParseMode($string, $mode) {
- $parser = new \Icewind\SMB\Wrapped\Parser('UTC');
+ $parser = new Parser('UTC');
$this->assertEquals($mode, $parser->parseMode($string), 'Failed parsing ' . $string);
}
@@ -90,7 +91,7 @@ class ParserTest extends \PHPUnit\Framework\TestCase {
* @dataProvider statProvider
*/
public function testStat($output, $stat) {
- $parser = new \Icewind\SMB\Wrapped\Parser('UTC');
+ $parser = new Parser('UTC');
$this->assertEquals($stat, $parser->parseStat($output));
}
@@ -124,14 +125,14 @@ class ParserTest extends \PHPUnit\Framework\TestCase {
* @dataProvider dirProvider
*/
public function testDir($output, $dir) {
- $parser = new \Icewind\SMB\Wrapped\Parser('CEST');
+ $parser = new Parser('CEST');
$this->assertEquals($dir, $parser->parseDir($output, '', function () {
return [];
}));
}
- public function testParseACL() {
- $parser = new \Icewind\SMB\Wrapped\Parser('CEST');
+ public function testParseACLRealWorld() {
+ $parser = new Parser('CEST');
$raw = [
"lp_load_ex: refreshing parameters",
"Initialising global parameters",
@@ -176,4 +177,28 @@ class ParserTest extends \PHPUnit\Framework\TestCase {
$result = $parser->parseACLs($raw);
$this->assertEquals($expected, $result);
}
+
+ public function testParseACLConstructed() {
+ $parser = new Parser('CEST');
+ $raw = [
+ "REVISION:1",
+ "CONTROL:SR|PD|DI|DP",
+ "OWNER:DESKTOP-MLM38Q5\robin",
+ "GROUP:DESKTOP-MLM38Q5\None",
+ "ACL:Everyone:ALLOWED/0x0/READ",
+ "ACL:Test:DENIED/0x0/R",
+ "ACL:Multiple:ALLOWED/0x0/R|X|D",
+ "ACL:Numeric:ALLOWED/0x0/0x10",
+ "Maximum access: 0x120089"
+ ];
+
+ $expected = [
+ "Everyone" => new ACL(ACL::TYPE_ALLOW, 0, ACL::MASK_READ + ACL::MASK_EXECUTE),
+ "Test" => new ACL(ACL::TYPE_DENY, 0, ACL::MASK_READ),
+ "Multiple" => new ACL(ACL::TYPE_ALLOW, 0, ACL::MASK_READ + ACL::MASK_EXECUTE + ACL::MASK_DELETE),
+ "Numeric" => new ACL(ACL::TYPE_ALLOW, 0, 0x10),
+ ];
+ $result = $parser->parseACLs($raw);
+ $this->assertEquals($expected, $result);
+ }
}
|
fix handling of 'Everyone' acl when using native backend
|
icewind1991_SMB
|
train
|
6d9f6408c71b943b4c2d2fe721299e14b23a161e
|
diff --git a/emirdrp/recipes/spec/subs.py b/emirdrp/recipes/spec/subs.py
index <HASH>..<HASH> 100644
--- a/emirdrp/recipes/spec/subs.py
+++ b/emirdrp/recipes/spec/subs.py
@@ -276,6 +276,7 @@ class BaseABBARecipe(EmirRecipe):
hdu = hdulist[0]
hdu.header['history'] = "Processed ABBA"
hdu.header['NUM-NCOM'] = (2, 'Number of combined frames')
+ hdu.header['IMGOBBL'] = 0
dm = emirdrp.datamodel.EmirDataModel()
for img, key in zip(images, ['A', 'B', 'B', 'A']):
imgid = dm.get_imgid(img)
|
Keyword IMGOBBL = 0 in reduced_mos_abba (fixes #<I>)
|
guaix-ucm_pyemir
|
train
|
b6f486b9b58ec51d1b05173fc2985dd9aad5e883
|
diff --git a/lib/ruuid/uuid.rb b/lib/ruuid/uuid.rb
index <HASH>..<HASH> 100644
--- a/lib/ruuid/uuid.rb
+++ b/lib/ruuid/uuid.rb
@@ -34,10 +34,12 @@ module RUUID
"<#{self.class}:0x#{object_id} data=#{to_s}>"
end
+ # @private
def marshal_dump
data
end
+ # @private
def marshal_load(data)
@data = data.dup.freeze
end
|
doc: Mark RUUID::UUID marshal methods private
|
hashrabbit_ruuid
|
train
|
adb5768f766619237afeba9694cfa09a420902f1
|
diff --git a/app/helpers/dashboard_helper.rb b/app/helpers/dashboard_helper.rb
index <HASH>..<HASH> 100644
--- a/app/helpers/dashboard_helper.rb
+++ b/app/helpers/dashboard_helper.rb
@@ -70,10 +70,23 @@ module DashboardHelper
end
end
- def products_synced num= quantity
- Product.readable(current_organization).reject{|prod|
- prod.sync_status.uuid.nil?
- }.sort{|a,b| a.sync_status.start_time <=> b.sync_status.start_time}[0..num]
+ def products_synced num=quantity
+ syncing_products = []
+ synced_products = []
+
+ Product.readable(current_organization).each{ |prod|
+ if !prod.sync_status.start_time.nil?
+ syncing_products << prod
+ else
+ synced_products << prod
+ end
+ }
+
+ syncing_products.sort{|a,b|
+ a.sync_status.start_time <=> b.sync_status.start_time
+ }
+
+ return (syncing_products + synced_products)[0..num]
end
|
<I> - Fixes issue where sync status dashboard widget caused an error when more than one product had a sync plan attached to it.
Note, this change slightly re-works the sync dashboard widget to show
currently syncing products with priority and synced products afterward
depending on filter size.
|
Katello_katello
|
train
|
4cd84ce81884ce1bdca18ade48672a38ebee65ed
|
diff --git a/bibo/bibo.py b/bibo/bibo.py
index <HASH>..<HASH> 100644
--- a/bibo/bibo.py
+++ b/bibo/bibo.py
@@ -125,20 +125,21 @@ def add(ctx, destination, **kwargs):
@cli.command(short_help='Remove an entry or a field.')
-@SEARCH_TERMS_OPTION
-@click.option('--field', help='Field to remove.')
+@click.argument('key')
+@click.argument('field', nargs=-1)
@click.pass_context
-def remove(ctx, search_terms, field):
+def remove(ctx, key, field):
data = ctx.obj['data']
- entry = query.get(data, search_terms)
+ entry = query.get_by_key(data, key)
- if field is None:
+ if not field:
data.remove(entry)
else:
- if field in entry['fields']:
- del entry['fields'][field]
- else:
- click.echo('No such field')
+ for f in field:
+ if f in entry['fields']:
+ del entry['fields'][f]
+ else:
+ click.echo('"{}" has no field "{}"'.format(key, f))
pybibs.write_file(data, ctx.obj['database'])
diff --git a/tests/bibo/test_bibo.py b/tests/bibo/test_bibo.py
index <HASH>..<HASH> 100644
--- a/tests/bibo/test_bibo.py
+++ b/tests/bibo/test_bibo.py
@@ -140,7 +140,7 @@ def test_add_without_saving(runner, database):
def test_remove(runner, database):
- args = ['--database', database, 'remove', 'asimov']
+ args = ['--database', database, 'remove', 'asimov1951foundation']
result = runner.invoke(bibo.cli, args)
assert result.exit_code == 0
assert result.output == ''
@@ -150,7 +150,7 @@ def test_remove(runner, database):
def test_remove_field(runner, database):
- args = ['--database', database, 'remove', '--field', 'file', 'tolkien1937']
+ args = ['--database', database, 'remove', 'tolkien1937hobit', 'file']
result = runner.invoke(bibo.cli, args)
assert result.exit_code == 0
assert result.output == ''
@@ -297,8 +297,8 @@ def test_bibtex_error_with_relative_path(runner):
def test_remove_one_entry_at_a_time(runner, database):
result = runner.invoke(bibo.cli, ['--database', database, 'remove'])
- assert result.exit_code == 1
+ assert result.exit_code != 0
result = runner.invoke(bibo.cli, ['--database', database, 'remove', 'tolkien'])
- assert result.exit_code == 1
- result = runner.invoke(bibo.cli, ['--database', database, 'remove', 'lord of the rings'])
+ assert result.exit_code != 0
+ result = runner.invoke(bibo.cli, ['--database', database, 'remove', 'tolkien1937hobit'])
assert result.exit_code == 0
|
Remove by key. #<I>
|
Nagasaki45_bibo
|
train
|
cc8a0bc140daace2c8db8f7fb097cd763175d5ed
|
diff --git a/netpyne/sim/utils.py b/netpyne/sim/utils.py
index <HASH>..<HASH> 100644
--- a/netpyne/sim/utils.py
+++ b/netpyne/sim/utils.py
@@ -834,8 +834,7 @@ def _dict2utf8(obj):
#------------------------------------------------------------------------------
def clearAll():
"""
- Function for/to <short description of `netpyne.sim.utils.clearAll`>
-
+ Function to clear all sim objects in memory
"""
@@ -847,36 +846,46 @@ def clearAll():
sim.pc.gid_clear() # clear previous gid settings
# clean cells and simData in all nodes
- sim.clearObj([cell.__dict__ if hasattr(cell, '__dict__') else cell for cell in sim.net.cells])
- if 'stims' in list(sim.simData.keys()):
- sim.clearObj([stim for stim in sim.simData['stims']])
+ if hasattr(sim, 'net'):
+ sim.clearObj([cell.__dict__ if hasattr(cell, '__dict__') else cell for cell in sim.net.cells])
+ if hasattr(sim, 'simData'):
+ if 'stims' in list(sim.simData.keys()):
+ sim.clearObj([stim for stim in sim.simData['stims']])
- for key in list(sim.simData.keys()): del sim.simData[key]
- for c in sim.net.cells: del c
- for p in sim.net.pops: del p
- del sim.net.params
+ for key in list(sim.simData.keys()): del sim.simData[key]
+
+ if hasattr(sim, 'net'):
+ for c in sim.net.cells: del c
+ for p in sim.net.pops: del p
+ del sim.net.params
# clean cells and simData gathered in master node
- if sim.rank == 0:
- if hasattr(sim.net, 'allCells'):
- sim.clearObj([cell.__dict__ if hasattr(cell, '__dict__') else cell for cell in sim.net.allCells])
- if hasattr(sim, 'allSimData'):
- for key in list(sim.allSimData.keys()): del sim.allSimData[key]
+ if hasattr(sim, 'rank'):
+ if sim.rank == 0:
+ if hasattr(sim, 'net'):
+ if hasattr(sim.net, 'allCells'):
+ sim.clearObj([cell.__dict__ if hasattr(cell, '__dict__') else cell for cell in sim.net.allCells])
+ if hasattr(sim, 'allSimData'):
+ for key in list(sim.allSimData.keys()): del sim.allSimData[key]
+
+ if 'stims' in list(sim.allSimData.keys()):
+ sim.clearObj([stim for stim in sim.allSimData['stims']])
+
+ if hasattr(sim, 'net'):
+ for c in sim.net.allCells: del c
+ for p in sim.net.allPops: del p
+ del sim.net.allCells
- if 'stims' in list(sim.allSimData.keys()):
- sim.clearObj([stim for stim in sim.allSimData['stims']])
-
- for c in sim.net.allCells: del c
- for p in sim.net.allPops: del p
- del sim.net.allCells
- del sim.allSimData
-
- import matplotlib
- matplotlib.pyplot.clf()
- matplotlib.pyplot.close('all')
-
- del sim.net
+ if hasattr(sim, 'allSimData'):
+ del sim.allSimData
+
+ import matplotlib
+ matplotlib.pyplot.clf()
+ matplotlib.pyplot.close('all')
+
+ if hasattr(sim, 'net'):
+ del sim.net
import gc; gc.collect()
|
Improved sim.clearAll such that it works before simulations have been run
|
Neurosim-lab_netpyne
|
train
|
09c9cfa3431e3c89254277a3c4a82c204caa3e04
|
diff --git a/lib/smart_rspec/matchers.rb b/lib/smart_rspec/matchers.rb
index <HASH>..<HASH> 100644
--- a/lib/smart_rspec/matchers.rb
+++ b/lib/smart_rspec/matchers.rb
@@ -43,7 +43,7 @@ module SmartRspec
matcher :be_a_list_of do |klass|
match do |collection|
- collection.each.all? { |e| e.is_a?(klass) }
+ collection.all? { |e| e.is_a?(klass) }
end
end
end
|
Enumerable#all? does not need “.each”
|
tiagopog_smart_rspec
|
train
|
e0d4ca4626bb11f2daf4ceedbe35169eb9cb753d
|
diff --git a/pact/pact-compiler/pom.xml b/pact/pact-compiler/pom.xml
index <HASH>..<HASH> 100644
--- a/pact/pact-compiler/pom.xml
+++ b/pact/pact-compiler/pom.xml
@@ -37,6 +37,12 @@
<version>${project.version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ <version>1.9.12</version>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<reporting>
diff --git a/pact/pact-compiler/src/main/java/eu/stratosphere/pact/compiler/plandump/PlanJSONDumpGenerator.java b/pact/pact-compiler/src/main/java/eu/stratosphere/pact/compiler/plandump/PlanJSONDumpGenerator.java
index <HASH>..<HASH> 100644
--- a/pact/pact-compiler/src/main/java/eu/stratosphere/pact/compiler/plandump/PlanJSONDumpGenerator.java
+++ b/pact/pact-compiler/src/main/java/eu/stratosphere/pact/compiler/plandump/PlanJSONDumpGenerator.java
@@ -69,6 +69,13 @@ public class PlanJSONDumpGenerator {
compilePlanToJSON(n, writer);
}
+ public String getPactPlanAsJSON(List<DataSinkNode> nodes) {
+ StringWriter sw = new StringWriter();
+ PrintWriter pw = new PrintWriter(sw);
+ dumpPactPlanAsJSON(nodes, pw);
+ return sw.toString();
+ }
+
public void dumpOptimizerPlanAsJSON(OptimizedPlan plan, File toFile) throws IOException {
PrintWriter pw = null;
try {
diff --git a/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/CompiledPlanDumpTest.java b/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/CompiledPlanDumpTest.java
index <HASH>..<HASH> 100644
--- a/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/CompiledPlanDumpTest.java
+++ b/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/CompiledPlanDumpTest.java
@@ -14,8 +14,9 @@
**********************************************************************************************************************/
package eu.stratosphere.pact.compiler.plandump;
-import java.io.PrintWriter;
-
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.JsonParser;
import org.junit.Assert;
import org.junit.Test;
@@ -69,11 +70,12 @@ public class CompiledPlanDumpTest extends CompilerTestBase {
try {
OptimizedPlan op = compileNoStats(p);
PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
- PrintWriter writer = new PrintWriter(new BlackHoleWriter());
-
-// StringWriter sw = new StringWriter(512);
-// PrintWriter writer = new PrintWriter(sw, true);
- dumper.dumpOptimizerPlanAsJSON(op, writer);
+ String json = dumper.getOptimizerPlanAsJSON(op);
+ JsonParser parser = new JsonFactory().createJsonParser(json);
+ while (parser.nextToken() != null);
+ } catch (JsonParseException e) {
+ e.printStackTrace();
+ Assert.fail("JSON Generator produced malformatted output: " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An error occurred in the test: " + e.getMessage());
diff --git a/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/PreviewPlanDumpTest.java b/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/PreviewPlanDumpTest.java
index <HASH>..<HASH> 100644
--- a/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/PreviewPlanDumpTest.java
+++ b/pact/pact-compiler/src/test/java/eu/stratosphere/pact/compiler/plandump/PreviewPlanDumpTest.java
@@ -14,9 +14,11 @@
**********************************************************************************************************************/
package eu.stratosphere.pact.compiler.plandump;
-import java.io.PrintWriter;
import java.util.List;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.JsonParser;
import org.junit.Assert;
import org.junit.Test;
@@ -73,11 +75,12 @@ public class PreviewPlanDumpTest {
try {
List<DataSinkNode> sinks = PactCompiler.createPreOptimizedPlan(p);
PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
- PrintWriter writer = new PrintWriter(new BlackHoleWriter());
-
-// StringWriter sw = new StringWriter(512);
-// PrintWriter writer = new PrintWriter(sw, true);
- dumper.dumpPactPlanAsJSON(sinks, writer);
+ String json = dumper.getPactPlanAsJSON(sinks);
+ JsonParser parser = new JsonFactory().createJsonParser(json);
+ while (parser.nextToken() != null);
+ } catch (JsonParseException e) {
+ e.printStackTrace();
+ Assert.fail("JSON Generator produced malformatted output: " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
Assert.fail("An error occurred in the test: " + e.getMessage());
|
Added json verification to json generator tests.
|
stratosphere_stratosphere
|
train
|
22c3d47127d3d3953f5d50f44a7f1ed50c7c4bcd
|
diff --git a/examples/groups.rb b/examples/groups.rb
index <HASH>..<HASH> 100644
--- a/examples/groups.rb
+++ b/examples/groups.rb
@@ -7,8 +7,11 @@ client = DiscourseApi::Client.new
response = client.create_group(name: "engineering_team")
group_id = response["basic_group"]["id"]
-client.group_add(group_id, "sam")
-client.group_add(group_id, "jeff")
-client.group_add(group_id, "neil")
+client.group_add(group_id, username: "sam")
+client.group_add(group_id, username: "jeff")
+client.group_add(group_id, usernames: ["neil", "dan"])
+client.group_add(group_id, user_id: 123)
+client.group_add(group_id, user_ids: [123, 456])
client.group_remove(group_id, "neil")
+client.group_remove(group_id, 123)
diff --git a/lib/discourse_api/api/groups.rb b/lib/discourse_api/api/groups.rb
index <HASH>..<HASH> 100644
--- a/lib/discourse_api/api/groups.rb
+++ b/lib/discourse_api/api/groups.rb
@@ -14,12 +14,25 @@ module DiscourseApi
response.body
end
- def group_add(group_id, *usernames)
- patch("/admin/groups/#{group_id}", changes: {add: usernames})
+ def group_add(group_id, users)
+ users.keys.each do |key|
+ # Accept arrays and convert to comma-delimited string.
+ if users[key].respond_to? :join
+ users[key] = users[key].join(",")
+ end
+
+ # Accept non-plural user_id or username, but send pluralized version in the request.
+ if key.to_s[-1] != 's'
+ users["#{key}s"] = users[key]
+ users.delete(key)
+ end
+ end
+
+ put("/admin/groups/#{group_id}/members.json", users)
end
- def group_remove(group_id, *usernames)
- patch("/admin/groups/#{group_id}", changes: {delete: usernames})
+ def group_remove(group_id, user)
+ delete("/admin/groups/#{group_id}/members.json", user)
end
end
end
diff --git a/spec/discourse_api/api/groups_spec.rb b/spec/discourse_api/api/groups_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/discourse_api/api/groups_spec.rb
+++ b/spec/discourse_api/api/groups_spec.rb
@@ -27,20 +27,44 @@ describe DiscourseApi::API::Groups do
).to have_been_made
end
- it "adds members" do
- stub_request(:patch, "http://localhost:3000/admin/groups/123?api_key=test_d7fd0429940&api_username=test_user")
- subject.group_add(123, "sam")
- expect(a_request(:patch, "http://localhost:3000/admin/groups/123?api_key=test_d7fd0429940&api_username=test_user").
- with(body: {changes: {add: [ "sam" ]}})
- ).to have_been_made
+ describe "add members" do
+ before do
+ stub_request(:put, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user")
+ end
+
+ it "adds a single member by username" do
+ subject.group_add(123, username: "sam")
+ expect(a_request(:put, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user").
+ with(body: {usernames: "sam"})
+ ).to have_been_made
+ end
+
+ it "adds an array of members by username" do
+ subject.group_add(123, usernames: ["sam", "jeff"])
+ expect(a_request(:put, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user").
+ with(body: {usernames: "sam,jeff"})
+ ).to have_been_made
+ end
+
+ it "adds a single member by user_id" do
+ subject.group_add(123, user_id: 456)
+ expect(a_request(:put, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user").
+ with(body: {user_ids: "456"})
+ ).to have_been_made
+ end
+
+ it "adds an array of members by user_id" do
+ subject.group_add(123, user_id: [123, 456])
+ expect(a_request(:put, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user").
+ with(body: {user_ids: "123,456"})
+ ).to have_been_made
+ end
end
it "removes members" do
- stub_request(:patch, "http://localhost:3000/admin/groups/123?api_key=test_d7fd0429940&api_username=test_user")
- subject.group_remove(123, "sam")
- expect(a_request(:patch, "http://localhost:3000/admin/groups/123?api_key=test_d7fd0429940&api_username=test_user").
- with(body: {changes: {delete: [ "sam" ]}})
- ).to have_been_made
+ stub_request(:delete, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user")
+ subject.group_remove(123, username: "sam")
+ expect(a_request(:delete, "http://localhost:3000/admin/groups/123/members.json?api_key=test_d7fd0429940&api_username=test_user&username=sam")).to have_been_made
end
end
end
|
Update add and remove group calls for changed discourse API
As discussed in <URL>
|
discourse_discourse_api
|
train
|
958389ca483701da46a8f7d52414dadeb2b091ff
|
diff --git a/entu-cms.js b/entu-cms.js
index <HASH>..<HASH> 100755
--- a/entu-cms.js
+++ b/entu-cms.js
@@ -39,7 +39,6 @@ var getFilePath = function(dirName, fileName, locale) {
// Scans source folder and generates HTMLs
var worker = function() {
- console.log('Started to scan folder ' + appConf.source)
htmlFiles = []
css = {}
fse.walk(appConf.source)
@@ -101,6 +100,7 @@ var worker = function() {
setTimeout(worker, (appConf.timeout * 1000))
})
+ console.log('Build finished')
}
@@ -108,7 +108,7 @@ var worker = function() {
// Open config.yaml
var appConf = {}
-var appConfFile = process.argv[2] || path.join(__dirname, 'config.yaml')
+var appConfFile = path.resolve(process.argv[2]) || path.join(__dirname, 'config.yaml')
try {
appConf = yaml.safeLoad(fs.readFileSync(appConfFile, 'utf8'))
@@ -124,6 +124,7 @@ appConf.source = appConf.source || path.join(__dirname, 'source')
appConf.build = appConf.build || path.join(__dirname, 'build')
appConf.assets = appConf.assets || path.join(__dirname, 'assets')
appConf.assets_path = appConf.assets_path || '/assets'
+appConf.build_path = appConf.build_path || '/build'
appConf.timeout = appConf.timeout || 60
if (appConf.source.substr(0, 1) === '.') {
@@ -148,13 +149,26 @@ for (var l in appConf.locales) {
}
}
-// Start scanning source folder and building
-worker()
+
+//Printout configuration
+var c = {}
+c[appConfFile] = appConf
+console.log()
+console.log()
+console.log(yaml.safeDump(c))
+
// Start server to listen port 4000
express()
.use(appConf.build_path, express.static(appConf.build))
.use(appConf.assets_path, express.static(appConf.assets))
.listen(4000, function() {
+ console.log()
+ console.log()
console.log('Server started at http://localhost:4000')
+ console.log()
+ console.log()
+
+ // Start scanning source folder and building
+ worker()
})
|
Printout configuration; Config file relative path fix;
|
entu_ssg
|
train
|
08db68e1244a6b998cd81c7fbb7edce06c81ecc2
|
diff --git a/lib/lita/handler/common.rb b/lib/lita/handler/common.rb
index <HASH>..<HASH> 100644
--- a/lib/lita/handler/common.rb
+++ b/lib/lita/handler/common.rb
@@ -11,9 +11,13 @@ module Lita
# define +self.name+.
# @return [String] The handler's namespace.
# @raise [RuntimeError] If +self.name+ is not defined.
- def namespace
- if name
- Util.underscore(name.split("::").last)
+ def namespace(value = nil)
+ @namespace = value if value
+
+ string_name = defined?(@namespace) ? @namespace : name
+
+ if string_name
+ Util.underscore(string_name.split("::").last)
else
raise I18n.t("lita.handler.name_required")
end
diff --git a/spec/lita/handler/common_spec.rb b/spec/lita/handler/common_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lita/handler/common_spec.rb
+++ b/spec/lita/handler/common_spec.rb
@@ -8,7 +8,7 @@ describe Lita::Handler::Common, lita: true do
include Lita::Handler::Common
def self.name
- "Test"
+ "Lita::Handlers::FooBarBaz"
end
def self.default_config(config)
@@ -19,6 +19,27 @@ describe Lita::Handler::Common, lita: true do
subject { handler.new(robot) }
+ describe ".namespace" do
+ it "returns a snake cased namesapce for the handler based on class name" do
+ expect(handler.namespace).to eq("foo_bar_baz")
+ end
+
+ it "allows the namespace to be set with an object" do
+ handler = Class.new do
+ include Lita::Handler::Common
+
+ namespace "Lita::Handler::Common"
+ end
+
+ expect(handler.namespace).to eq("common")
+ end
+
+ it "raises an exception if the handler doesn't have a name to derive the namespace from" do
+ handler = Class.new { include Lita::Handler::Common }
+ expect { handler.namespace }.to raise_error
+ end
+ end
+
describe "#config" do
before do
Lita.register_handler(handler)
diff --git a/spec/lita/handler_spec.rb b/spec/lita/handler_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lita/handler_spec.rb
+++ b/spec/lita/handler_spec.rb
@@ -39,22 +39,6 @@ describe Lita::Handler, lita: true do
subject { described_class.new(robot) }
- describe ".namespace" do
- it "provides a snake cased namespace for the handler" do
- handler_class = Class.new(described_class) do
- def self.name
- "Lita::Handlers::FooBarBaz"
- end
- end
- expect(handler_class.namespace).to eq("foo_bar_baz")
- end
-
- it "raises an exception if the handler doesn't define self.name" do
- handler_class = Class.new(described_class)
- expect { handler_class.namespace }.to raise_error
- end
- end
-
describe "#http" do
it "returns a Faraday connection" do
expect(subject.http).to be_a(Faraday::Connection)
|
Move Handler.namespace spec to common mixin and allow it to be set manually.
|
litaio_lita
|
train
|
2648eacc057bacc1571f76a8cbd6954fe3c0a110
|
diff --git a/views/js/layout/search.js b/views/js/layout/search.js
index <HASH>..<HASH> 100644
--- a/views/js/layout/search.js
+++ b/views/js/layout/search.js
@@ -96,7 +96,14 @@ define(['jquery', 'layout/actions', 'ui/searchModal', 'core/store', 'context', '
criterias = criterias || { search: $('input', searchComponent.container).val() };
const url = searchComponent.container.data('url');
const rootClassUri = decodeURIComponent(urlHelper.parse(url).query.rootNode);
- const searchModalInstance = searchModal({ criterias, url, searchOnInit, rootClassUri });
+ const isResultPage = rootClassUri === 'http://www.tao.lu/Ontologies/TAODelivery.rdf#AssembledDelivery';
+ const searchModalInstance = searchModal({
+ criterias,
+ url,
+ searchOnInit,
+ rootClassUri,
+ hideResourceSelector: isResultPage
+ });
searchModalInstance.on('store-updated', manageSearchStoreUpdate);
searchModalInstance.on('refresh', uri => {
|
hide resource selector in search modal for results page
|
oat-sa_tao-core
|
train
|
425950c5c2eff35e166a06f4d8924445c3c83089
|
diff --git a/gridtk/script/jman.py b/gridtk/script/jman.py
index <HASH>..<HASH> 100644
--- a/gridtk/script/jman.py
+++ b/gridtk/script/jman.py
@@ -112,7 +112,7 @@ def submit(args):
if args.array is not None: kwargs['array'] = get_array(args.array)
if args.log_dir is not None: kwargs['log_dir'] = args.log_dir
if args.dependencies is not None: kwargs['dependencies'] = args.dependencies
- if args.queue != 'all.q': kwargs['hvmem'] = args.memory
+ if args.qname != 'all.q': kwargs['hvmem'] = args.memory
if args.parallel is not None:
kwargs['pe_opt'] = "pe_mth %d" % args.parallel
kwargs['memfree'] = "%d%s" % (int(args.memory.rstrip(string.ascii_letters)) * args.parallel, args.memory.lstrip(string.digits))
|
Corrected variable name in jman resubmit.
|
bioidiap_gridtk
|
train
|
0c07971e5c9335ab5087b6ab9ceaa8a225d02d6e
|
diff --git a/test/invalid-requires-test.js b/test/invalid-requires-test.js
index <HASH>..<HASH> 100644
--- a/test/invalid-requires-test.js
+++ b/test/invalid-requires-test.js
@@ -2,3 +2,5 @@
var foo = require('foo'),
// A comment
bar = require('bar');
+const baz = require('baz'),
+ fiz = require('fiz');
diff --git a/test/invalid-requires-test.output.js b/test/invalid-requires-test.output.js
index <HASH>..<HASH> 100644
--- a/test/invalid-requires-test.output.js
+++ b/test/invalid-requires-test.output.js
@@ -3,3 +3,6 @@ var foo = require('foo');
// A comment
var bar = require('bar');
+
+const baz = require('baz');
+const fiz = require('fiz');
diff --git a/transforms/invalid-requires.js b/transforms/invalid-requires.js
index <HASH>..<HASH> 100644
--- a/transforms/invalid-requires.js
+++ b/transforms/invalid-requires.js
@@ -15,8 +15,9 @@ module.exports = function(file, api) {
requireStatements.forEach(requireStatement => {
jscodeshift(requireStatement)
.replaceWith(requireStatement.value.declarations.map((declaration, i) => {
- var variableDeclaration =
- jscodeshift.variableDeclaration('var', [declaration]);
+ const kind = requireStatement.value.kind; // e.g. var or const
+ const variableDeclaration =
+ jscodeshift.variableDeclaration(kind, [declaration]);
if (i == 0) {
variableDeclaration.comments = requireStatement.value.comments;
|
Preserve kind of variable declaration in invalid-requires
I noticed that this was always using `var` even if `const` was used
before the transform. This commit aims to preserve the kind that was
used.
|
cpojer_js-codemod
|
train
|
9832a46b37aa5ce51ee3b9da7c7491f606338341
|
diff --git a/timepiece/views.py b/timepiece/views.py
index <HASH>..<HASH> 100644
--- a/timepiece/views.py
+++ b/timepiece/views.py
@@ -498,13 +498,13 @@ def view_person_time_sheet(request, user_id):
return HttpResponseForbidden('Forbidden')
from_date = utils.get_month_start(datetime.datetime.today()).date()
to_date = from_date + relativedelta(months=1)
- if request.user and request.user.has_perm('timepiece.view_entry_summary'):
- form = timepiece_forms.UserYearMonthForm
- else:
- form = timepiece_forms.YearMonthForm
+ can_view_summary = request.user and \
+ request.user.has_perm('timepiece.view_entry_summary')
+ form = timepiece_forms.UserYearMonthForm if can_view_summary else \
+ timepiece_forms.YearMonthForm
year_month_form = form(request.GET or None)
if year_month_form.is_valid():
- if isinstance(year_month_form, timepiece_forms.UserYearMonthForm):
+ if can_view_summary:
from_date, to_date, form_user = year_month_form.save()
is_update = request.GET.get('yearmonth', None)
if form_user and is_update:
|
refs #<I> - Adjusted form assignment
|
caktus_django-timepiece
|
train
|
c42ca8fb52e72a215f25d469567a9876206db098
|
diff --git a/tests/test_procar.py b/tests/test_procar.py
index <HASH>..<HASH> 100644
--- a/tests/test_procar.py
+++ b/tests/test_procar.py
@@ -130,7 +130,6 @@ class ProcarTestCase( unittest.TestCase ):
np.testing.assert_equal( combined_pcar.occupancy, np.vstack( ( pcar1.occupancy, pcar2.occupancy ) ) )
np.testing.assert_equal( combined_pcar.bands, np.vstack( ( pcar1.bands, pcar2.bands ) ) )
np.testing.assert_equal( combined_pcar.k_points, np.vstack( ( pcar1.k_points, pcar2.k_points ) ) )
- print( combined_pcar.k_points )
def test___add___spin_polarised_procars( self ):
pcar1 = procar.Procar()
diff --git a/vasppy/procar.py b/vasppy/procar.py
index <HASH>..<HASH> 100644
--- a/vasppy/procar.py
+++ b/vasppy/procar.py
@@ -158,6 +158,7 @@ class Procar:
new_procar.bands = np.concatenate( ( self.bands, other.bands ) )
new_procar.occupancy = np.concatenate( ( self.occupancy, other.occupancy ) )
new_procar.k_points = np.concatenate( ( self.k_points, other.k_points ) )
+ new_procar.sanity_check()
return new_procar
def parse_projections( self ):
|
Added sanity_check() to addition
|
bjmorgan_vasppy
|
train
|
946217a10119aa2abc666a18979940f642569f11
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ def read(fname):
setup(
name='zplgrf',
- version='1.5',
+ version='1.5.1',
description='Tools to work with ZPL GRF images and CUPS',
long_description=read('README.rst'),
author='Kyle MacFarlane',
diff --git a/src/zplgrf/__init__.py b/src/zplgrf/__init__.py
index <HASH>..<HASH> 100644
--- a/src/zplgrf/__init__.py
+++ b/src/zplgrf/__init__.py
@@ -432,7 +432,7 @@ class GRF(object):
]
if center_of_pixel:
- cmd += ['0 .setfilladjust']
+ cmd += ['0 0 .setfilladjust2']
if font_path and os.path.exists(font_path):
cmd += ['-I' + font_path]
|
Fix center of pixel with Ghostscript <I>+
|
kylemacfarlane_zplgrf
|
train
|
b9b2078e1807f0a1c7e28fcf0162b3ee2a721700
|
diff --git a/GEOparse/GEOparse.py b/GEOparse/GEOparse.py
index <HASH>..<HASH> 100755
--- a/GEOparse/GEOparse.py
+++ b/GEOparse/GEOparse.py
@@ -42,8 +42,9 @@ def get_GEO(geo=None, filepath=None, destdir="./", how='full',
destdir (:obj:`str`, optional): Directory to download data. Defaults to
None.
how (:obj:`str`, optional): GSM download mode. Defaults to "full".
- annotate_gpl (:obj:`bool`, optional): Annotate GPL object. Defaults to
- False.
+ annotate_gpl (:obj:`bool`, optional): Download the GPL annotation
+ instead of regular GPL. If not available, fallback to regular GPL
+ file. Defaults to False.
geotype (:obj:`str`, optional): Type of GEO entry. By default it is
inferred from the ID or the file name.
include_data (:obj:`bool`, optional): Full download of GPLs including
@@ -98,8 +99,9 @@ def get_GEO_file(geo, destdir=None, annotate_gpl=False, how="full",
geo (:obj:`str`): GEO database identifier.
destdir (:obj:`str`, optional): Directory to download data. Defaults to
None.
- annotate_gpl (:obj:`bool`, optional): Annotate GPL object. Defaults to
- False.
+ annotate_gpl (:obj:`bool`, optional): Download the GPL annotation
+ instead of regular GPL. If not available, fallback to regular GPL
+ file. Defaults to False.
how (:obj:`str`, optional): GSM download mode. Defaults to "full".
include_data (:obj:`bool`, optional): Full download of GPLs including
series and samples. Defaults to False.
|
docs: Explain better what is annotate_gpl
|
guma44_GEOparse
|
train
|
356560621f4865d2fba3c43e47fcb22cc4028e20
|
diff --git a/test/test-02-rule.js b/test/test-02-rule.js
index <HASH>..<HASH> 100644
--- a/test/test-02-rule.js
+++ b/test/test-02-rule.js
@@ -227,6 +227,7 @@ describe('rule', function()
{
sawRunning.must.be.true();
rule.logger.error.callCount.must.equal(3);
+ console.log(rule.logger.debug.args);
rule.logger.debug.callCount.must.equal(1);
done();
|
Log what's going on on travis.
|
ceejbot_jthoober
|
train
|
30fd03c60a5441f721a25b28f194c890ec75b477
|
diff --git a/packages/simpletest.org/index.php b/packages/simpletest.org/index.php
index <HASH>..<HASH> 100644
--- a/packages/simpletest.org/index.php
+++ b/packages/simpletest.org/index.php
@@ -2,21 +2,21 @@
require_once(dirname(__FILE__).'/package.php');
-$transform = "simpletest.org.xslt";
-$source_path = "../../docs/source/";
-$destination_path = "../../docs/simpletest.org/";
+$source_path = dirname(__FILE__).'/../../docs/source/';
+$destination_path = dirname(__FILE__).'/../../docs/simpletest.org/';
$languages = array("en/", "fr/", "../../");
foreach ($languages as $language) {
- $dir = opendir($source_path.$language);
- while (($file = readdir($dir)) !== false) {
- if (is_file($source_path.$language.$file) and preg_match("/\.xml$/", $file)) {
- $source = simplexml_load_file($source_path.$language.$file, "SimpleTestXMLElement");
- $destination = $source->destination("map.xml");
-
+ $dir = opendir($source_path.$language);
+
+ while (($file = readdir($dir)) !== false) {
+ if (is_file($source_path.$language.$file) and preg_match("/\.xml$/", $file)) {
+ $source = simplexml_load_file($source_path.$language.$file, "SimpleTestXMLElement");
+ $destination = $source->destination(dirname(__FILE__).'/map.xml');
+
if (!empty($destination)) {
- $page = file_get_contents('template.html');
+ $page = file_get_contents(dirname(__FILE__).'/template.html');
$page = str_replace('KEYWORDS', $source->keywords(), $page);
$page = str_replace('TITLE', $source->title(), $page);
@@ -24,7 +24,7 @@ foreach ($languages as $language) {
$page = str_replace('INTERNAL', $source->internal(), $page);
$page = str_replace('EXTERNAL', $source->external(), $page);
- $links = $source->links("map.xml");
+ $links = $source->links(dirname(__FILE__).'/map.xml');
foreach ($links as $category => $link) {
$page = str_replace("LINKS_".strtoupper($category), $link, $page);
}
@@ -45,12 +45,10 @@ foreach ($languages as $language) {
$synchronisation = new PackagingSynchronisation($source_path.$language.$file);
$result .= " ".$synchronisation->result();
-
-
- echo $destination_path.$destination." : ".$result."<br />";
+
+ echo $destination_path.$destination." : ".$result."\n";
}
- }
+ }
}
closedir($dir);
}
-?>
\ No newline at end of file
|
Cleaning up the file creating the web site contents
|
simpletest_simpletest
|
train
|
4ba70687e6a1d5dd21f65579bcd061d2968d292a
|
diff --git a/lib/ahoy_email/engine.rb b/lib/ahoy_email/engine.rb
index <HASH>..<HASH> 100644
--- a/lib/ahoy_email/engine.rb
+++ b/lib/ahoy_email/engine.rb
@@ -5,7 +5,7 @@ module AhoyEmail
initializer "ahoy_email" do |app|
AhoyEmail.secret_token ||= begin
creds =
- if app.respond_to?(:credentials)
+ if app.respond_to?(:credentials) && app.credentials.secret_key_base
app.credentials
elsif app.respond_to?(:secrets)
app.secrets
|
Fixed secret token for Rails <I>, take 2
|
ankane_ahoy_email
|
train
|
10c64a472f2f19a5e485bdac7d5106a76aeb29a5
|
diff --git a/actionpack/lib/action_dispatch/middleware/executor.rb b/actionpack/lib/action_dispatch/middleware/executor.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/middleware/executor.rb
+++ b/actionpack/lib/action_dispatch/middleware/executor.rb
@@ -9,7 +9,7 @@ module ActionDispatch
end
def call(env)
- state = @executor.run!
+ state = @executor.run!(reset: true)
begin
response = @app.call(env)
returned = response << ::Rack::BodyProxy.new(response.pop) { state.complete! }
diff --git a/actionpack/test/dispatch/executor_test.rb b/actionpack/test/dispatch/executor_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/dispatch/executor_test.rb
+++ b/actionpack/test/dispatch/executor_test.rb
@@ -119,6 +119,27 @@ class ExecutorTest < ActiveSupport::TestCase
assert_not defined?(@in_shared_context) # it's not in the test itself
end
+ def test_body_abandonned
+ total = 0
+ ran = 0
+ completed = 0
+
+ executor.to_run { total += 1; ran += 1 }
+ executor.to_complete { total += 1; completed += 1}
+
+ stack = middleware(proc { [200, {}, "response"] })
+
+ requests_count = 5
+
+ requests_count.times do
+ stack.call({})
+ end
+
+ assert_equal (requests_count * 2) - 1, total
+ assert_equal requests_count, ran
+ assert_equal requests_count - 1, completed
+ end
+
private
def call_and_return_body(&block)
app = middleware(block || proc { [200, {}, "response"] })
diff --git a/activesupport/lib/active_support/execution_wrapper.rb b/activesupport/lib/active_support/execution_wrapper.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/execution_wrapper.rb
+++ b/activesupport/lib/active_support/execution_wrapper.rb
@@ -64,18 +64,21 @@ module ActiveSupport
# after the work has been performed.
#
# Where possible, prefer +wrap+.
- def self.run!
- if active?
- Null
+ def self.run!(reset: false)
+ if reset
+ lost_instance = IsolatedExecutionState.delete(active_key)
+ lost_instance&.complete!
else
- new.tap do |instance|
- success = nil
- begin
- instance.run!
- success = true
- ensure
- instance.complete! unless success
- end
+ return Null if active?
+ end
+
+ new.tap do |instance|
+ success = nil
+ begin
+ instance.run!
+ success = true
+ ensure
+ instance.complete! unless success
end
end
end
@@ -105,27 +108,20 @@ module ActiveSupport
end
end
- class << self # :nodoc:
- attr_accessor :active
- end
-
def self.error_reporter
@error_reporter ||= ActiveSupport::ErrorReporter.new
end
- def self.inherited(other) # :nodoc:
- super
- other.active = Concurrent::Hash.new
+ def self.active_key # :nodoc:
+ @active_key ||= :"active_execution_wrapper_#{object_id}"
end
- self.active = Concurrent::Hash.new
-
def self.active? # :nodoc:
- @active[IsolatedExecutionState.unique_id]
+ IsolatedExecutionState.key?(active_key)
end
def run! # :nodoc:
- self.class.active[IsolatedExecutionState.unique_id] = true
+ IsolatedExecutionState[self.class.active_key] = self
run
end
@@ -140,7 +136,7 @@ module ActiveSupport
def complete!
complete
ensure
- self.class.active.delete(IsolatedExecutionState.unique_id)
+ IsolatedExecutionState.delete(self.class.active_key)
end
def complete # :nodoc:
diff --git a/activesupport/lib/active_support/isolated_execution_state.rb b/activesupport/lib/active_support/isolated_execution_state.rb
index <HASH>..<HASH> 100644
--- a/activesupport/lib/active_support/isolated_execution_state.rb
+++ b/activesupport/lib/active_support/isolated_execution_state.rb
@@ -42,6 +42,14 @@ module ActiveSupport
state[key] = value
end
+ def key?(key)
+ state.key?(key)
+ end
+
+ def delete(key)
+ state.delete(key)
+ end
+
def clear
state.clear
end
|
ActionDispatch::Executor don't fully trust `body#close`
Under certain circumstances, the middleware isn't informed that the
response body has been fully closed which result in request state not
being fully reset before the next request.
[CVE-<I>-<I>]
|
rails_rails
|
train
|
17a9ce5dd6335ebadac0b1957ec05610520c2a9f
|
diff --git a/admin-ui/app/scripts/controllers/ExampleController.js b/admin-ui/app/scripts/controllers/ExampleController.js
index <HASH>..<HASH> 100644
--- a/admin-ui/app/scripts/controllers/ExampleController.js
+++ b/admin-ui/app/scripts/controllers/ExampleController.js
@@ -29,11 +29,11 @@ angular.module('upsConsole').controller('ExampleController',
};
$scope.variantType = $routeParams.variantType;
$scope.applicationId = $routeParams.applicationId;
+ $scope.currentLocation = ContextProvider.contextPath();
if (typeof $routeParams.variantId !== 'undefined') {
variants.get(params, function (variant) {
$scope.variant = variant;
- $scope.currentLocation = ContextProvider.contextPath();
});
} else {
pushApplication.get(params, function (application) {
|
AGPUSH-<I>: moved currentLocation initialization above conditional block
|
aerogear_aerogear-unifiedpush-server
|
train
|
b6acd9da1e1a4b00ac1793ce4a6ddec251bd151f
|
diff --git a/spec/link_shrink/shrinker_base_spec.rb b/spec/link_shrink/shrinker_base_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/link_shrink/shrinker_base_spec.rb
+++ b/spec/link_shrink/shrinker_base_spec.rb
@@ -36,7 +36,8 @@ describe LinkShrink::Shrinkers::Base do
context 'when api_key is not found' do
it 'returns base_url' do
- link_shrink.stub(:sub_klass).and_return('Blahblah')
+ link_shrink.stub(:sub_klass).and_return('Blahblahblah')
+ link_shrink.stub(:api_key?).and_return(false)
link_shrink.stub(:base_url).and_return('http://blah.io/api')
expect(link_shrink.api_url).to eq('http://blah.io/api')
end
@@ -51,7 +52,7 @@ describe LinkShrink::Shrinkers::Base do
describe '#api_key?' do
it 'returns false when API is not found' do
- link_shrink.stub(:sub_klass).and_return('blahblah')
+ link_shrink.stub(:sub_klass).and_return('blahbla')
expect(link_shrink.api_key?).to be_false
end
end
@@ -59,7 +60,7 @@ describe LinkShrink::Shrinkers::Base do
describe '#api_key' do
context 'when not found' do
it 'returns nil' do
- link_shrink.stub(:sub_klass).and_return('BlahBlah')
+ link_shrink.stub(:sub_klass).and_return('Blahh')
expect(link_shrink.api_key).to eq(nil)
end
end
|
Fix base spec issue with sub_klass names
|
jonahoffline_link_shrink
|
train
|
9ebc555ea619ddfb29556eb73c6b9a6be4116950
|
diff --git a/dev/com.ibm.ws.session.cache/src/com/ibm/ws/session/store/cache/serializable/SessionKey.java b/dev/com.ibm.ws.session.cache/src/com/ibm/ws/session/store/cache/serializable/SessionKey.java
index <HASH>..<HASH> 100644
--- a/dev/com.ibm.ws.session.cache/src/com/ibm/ws/session/store/cache/serializable/SessionKey.java
+++ b/dev/com.ibm.ws.session.cache/src/com/ibm/ws/session/store/cache/serializable/SessionKey.java
@@ -52,7 +52,7 @@ public class SessionKey implements Serializable {
@Override
public int hashCode() {
- return id.hashCode();
+ return id.hashCode() + app.hashCode();
}
@Override
|
Issue #<I> code review fix - app name missing from hash code
|
OpenLiberty_open-liberty
|
train
|
433f6830f805c2cad59b07addb48fcb1db07e407
|
diff --git a/pkg/proxy/iptables/proxier.go b/pkg/proxy/iptables/proxier.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/iptables/proxier.go
+++ b/pkg/proxy/iptables/proxier.go
@@ -471,6 +471,11 @@ func buildServiceMap(allServices []api.Service, oldServiceMap proxyServiceMap) (
glog.V(3).Infof("Skipping service %s due to clusterIP = %q", svcName, service.Spec.ClusterIP)
continue
}
+ // Even if ClusterIP is set, ServiceTypeExternalName services don't get proxied
+ if service.Spec.Type == api.ServiceTypeExternalName {
+ glog.V(3).Infof("Skipping service %s due to Type=ExternalName", svcName)
+ continue
+ }
for i := range service.Spec.Ports {
servicePort := &service.Spec.Ports[i]
diff --git a/pkg/proxy/iptables/proxier_test.go b/pkg/proxy/iptables/proxier_test.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/iptables/proxier_test.go
+++ b/pkg/proxy/iptables/proxier_test.go
@@ -1053,4 +1053,30 @@ func TestBuildServiceMapServiceHeadless(t *testing.T) {
}
}
+func TestBuildServiceMapServiceTypeExternalName(t *testing.T) {
+ services := []api.Service{
+ makeTestService("somewhere-else", "external-name", func(svc *api.Service) {
+ svc.Spec.Type = api.ServiceTypeExternalName
+ svc.Spec.ClusterIP = "172.16.55.4" // Should be ignored
+ svc.Spec.ExternalName = "foo2.bar.com"
+ svc.Spec.Ports = addTestPort(svc.Spec.Ports, "blah", "UDP", 1235, 5321, 0)
+ }),
+ }
+
+ serviceMap, hcAdd, hcDel, staleUDPServices := buildServiceMap(services, make(proxyServiceMap))
+ if len(serviceMap) != 0 {
+ t.Errorf("expected service map length 0, got %v", serviceMap)
+ }
+ // No proxied services, so no healthchecks
+ if len(hcAdd) != 0 {
+ t.Errorf("expected healthcheck add length 0, got %v", hcAdd)
+ }
+ if len(hcDel) != 0 {
+ t.Errorf("expected healthcheck del length 0, got %v", hcDel)
+ }
+ if len(staleUDPServices) != 0 {
+ t.Errorf("expected stale UDP services length 0, got %v", staleUDPServices)
+ }
+}
+
// TODO(thockin): add *more* tests for syncProxyRules() or break it down further and test the pieces.
|
proxy/iptables: don't proxy ExternalName services
The API docs say:
// ServiceTypeExternalName means a service consists of only a reference to
// an external name that kubedns or equivalent will return as a CNAME
// record, with no exposing or proxying of any pods involved.
which implies that ExternalName services should be ignored for proxy
purposes.
|
kubernetes_kubernetes
|
train
|
25d6f3afd1b73ebf7a942d2ca84605be81e76f7b
|
diff --git a/zipline/protocol.py b/zipline/protocol.py
index <HASH>..<HASH> 100644
--- a/zipline/protocol.py
+++ b/zipline/protocol.py
@@ -142,7 +142,7 @@ class BarData(object):
if self._contains_override:
return self._contains_override(name)
else:
- return name in self.__dict__
+ return name in self._data
def has_key(self, name):
"""
|
BUG: Fix contains logic when override is not used.
The internal _data should be checked, not the __dict__
|
quantopian_zipline
|
train
|
0a223c62b02754e442e31b001c7c2a3653f2f6cd
|
diff --git a/lib/raml/method.rb b/lib/raml/method.rb
index <HASH>..<HASH> 100644
--- a/lib/raml/method.rb
+++ b/lib/raml/method.rb
@@ -25,6 +25,7 @@ module Raml
@children << Parameter::QueryParameter.new(name, query_parameter_data)
end
when 'body'
+ validate_body value
value.each do |name, body_data|
@children << Body.new(name, body_data)
end
@@ -144,5 +145,17 @@ module Raml
raise InvalidProperty, 'queryParameters property must be a map with map values' unless
query_parameters.values.all? {|v| v.is_a? Hash }
end
+
+ def validate_body(body)
+ raise InvalidProperty, 'body property must be a map' unless
+ body.is_a? Hash
+
+ raise InvalidProperty, 'body property must be a map with string keys' unless
+ body.keys.all? {|k| k.is_a? String }
+
+ raise InvalidProperty, 'body property must be a map with map values' unless
+ body.values.all? {|v| v.is_a? Hash }
+ end
+
end
end
diff --git a/test/raml/method_spec.rb b/test/raml/method_spec.rb
index <HASH>..<HASH> 100644
--- a/test/raml/method_spec.rb
+++ b/test/raml/method_spec.rb
@@ -182,7 +182,41 @@ describe Raml::Method do
it { expect { subject }.to raise_error Raml::InvalidProperty, /queryParameters/ }
end
end
-
+
+ context 'when a body property is given' do
+ context 'when the body property is well formed' do
+ let(:data) {
+ YAML.load(
+ %q(
+ description: Create a Job
+ body:
+ text/xml:
+ schema: job_xml_schema
+ application/json:
+ schema: json_xml_schema
+ )
+ )
+ }
+
+ it { expect { subject }.to_not raise_error }
+ it 'stores all as Raml::Body instances' do
+ expect( subject.bodies ).to all( be_a Raml::Body )
+ subject.bodies.map(&:media_type).should contain_exactly('text/xml', 'application/json')
+ end
+ end
+ context 'when the body property is not a map' do
+ before { data['body'] = 1 }
+ it { expect { subject }.to raise_error Raml::InvalidProperty, /body/ }
+ end
+ context 'when the body property is a map with non-string keys' do
+ before { data['body'] = { 1 => {}} }
+ it { expect { subject }.to raise_error Raml::InvalidProperty, /body/ }
+ end
+ context 'when the body property is a map with non-map values' do
+ before { data['body'] = { 'text/xml' => 1 } }
+ it { expect { subject }.to raise_error Raml::InvalidProperty, /body/ }
+ end
+ end
end
describe "#document" do
|
Validate Method's body property.
|
coub_raml_ruby
|
train
|
b15ce7c4f90ff6919c679ca7f05cee9eed2bac87
|
diff --git a/src/com/google/javascript/jscomp/PeepholeRemoveDeadCode.java b/src/com/google/javascript/jscomp/PeepholeRemoveDeadCode.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/PeepholeRemoveDeadCode.java
+++ b/src/com/google/javascript/jscomp/PeepholeRemoveDeadCode.java
@@ -123,12 +123,24 @@ class PeepholeRemoveDeadCode extends AbstractPeepholeOptimization {
private Node tryFoldLabel(Node n) {
String labelName = n.getFirstChild().getString();
Node stmt = n.getLastChild();
- if (stmt.isEmpty() || (stmt.isBlock() && !stmt.hasChildren())) {
+ if (stmt.isEmpty()) {
reportChangeToEnclosingScope(n);
n.detach();
return null;
}
+ if (stmt.isBlock() && !stmt.hasChildren()) {
+ reportChangeToEnclosingScope(n);
+ if (n.getParent().isLabel()) {
+ // If the parent is itself a label, replace this label
+ // with its contained block to keep the AST in a valid state.
+ n.replaceWith(stmt.detach());
+ } else {
+ n.detach();
+ }
+ return null;
+ }
+
Node child = getOnlyInterestingChild(stmt);
if (child != null) {
stmt = child;
@@ -183,23 +195,27 @@ class PeepholeRemoveDeadCode extends AbstractPeepholeOptimization {
// Removes TRYs that had its CATCH removed and/or empty FINALLY.
if (!catchBlock.hasChildren() && (finallyBlock == null || !finallyBlock.hasChildren())) {
+ checkState(!n.getParent().isLabel());
body.detach();
n.replaceWith(body);
reportChangeToEnclosingScope(body);
return body;
}
- // Only leave FINALLYs if TRYs are empty
+ // Only leave FINALLYs if TRYs are not empty
if (!body.hasChildren()) {
NodeUtil.redeclareVarsInsideBranch(catchBlock);
reportChangeToEnclosingScope(n);
if (finallyBlock != null) {
finallyBlock.detach();
+ checkState(!n.getParent().isLabel());
n.replaceWith(finallyBlock);
+ return finallyBlock;
} else {
+ checkState(!n.getParent().isLabel());
n.detach();
+ return null;
}
- return finallyBlock;
}
return n;
diff --git a/test/com/google/javascript/jscomp/PeepholeRemoveDeadCodeTest.java b/test/com/google/javascript/jscomp/PeepholeRemoveDeadCodeTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/PeepholeRemoveDeadCodeTest.java
+++ b/test/com/google/javascript/jscomp/PeepholeRemoveDeadCodeTest.java
@@ -1402,6 +1402,8 @@ public final class PeepholeRemoveDeadCodeTest extends CompilerTestCase {
fold("try {} catch (e) {}", "");
fold("try {} finally {}", "");
fold("try {} catch (e) {} finally {}", "");
+ fold("L1:try {} catch (e) {} finally {}", "");
+ fold("L2:L1:try {} catch (e) {} finally {}", "");
}
@Test
|
Correct useless label optimization when a label is nested within a label.
Fixes github issue #<I>
PiperOrigin-RevId: <I>
|
google_closure-compiler
|
train
|
962f3ee2b44d8b30cb6c1467798af5cad8afeddf
|
diff --git a/src/Resources/Register.php b/src/Resources/Register.php
index <HASH>..<HASH> 100644
--- a/src/Resources/Register.php
+++ b/src/Resources/Register.php
@@ -56,6 +56,14 @@ class Register extends BaseRestResource
'password_confirmation' => ArrayUtils::get($payload, 'password_confirmation', $password)
];
+ if(empty($data['first_name'])){
+ list($username, $domain) = explode('@', $data['email']);
+ $data['first_name'] = $username;
+ }
+ if(empty($data['name'])){
+ $data['name'] = $data['first_name'] . ' ' . $data['last_name'];
+ }
+
ArrayUtils::removeNull($data);
/** @var \Illuminate\Validation\Validator $validator */
@@ -69,9 +77,13 @@ class Register extends BaseRestResource
$user = $registrar->create($data);
if($login) {
- Session::setUserInfoWithJWT($user);
+ if($user->confirm_code !== 'y'){
+ return ['success' => true, 'confirmation_required' => true];
+ } else {
+ Session::setUserInfoWithJWT($user);
- return ['success' => true, 'session_token' => Session::getSessionToken()];
+ return ['success' => true, 'session_token' => Session::getSessionToken()];
+ }
} else {
return ['success' => true];
}
|
user registration bugfix - login not allowed if email confirmation required. First name and name field is now derived from email address if not provided
|
dreamfactorysoftware_df-user
|
train
|
ed951bf30bb1405a312cc2f87aadb30cf013c078
|
diff --git a/lib/fastlane/lane_manager.rb b/lib/fastlane/lane_manager.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/lane_manager.rb
+++ b/lib/fastlane/lane_manager.rb
@@ -127,6 +127,7 @@ module Fastlane
end
def self.load_dot_env(env)
+ return if Dir["*.env*"].count == 0
require 'dotenv'
Actions.lane_context[Actions::SharedValues::ENVIRONMENT] = env if env
|
Don't require dotenv if there are no files
|
fastlane_fastlane
|
train
|
e79f825b84739fc27501b3e201f2eed89b0f3157
|
diff --git a/src/base/FieldProcessor.php b/src/base/FieldProcessor.php
index <HASH>..<HASH> 100644
--- a/src/base/FieldProcessor.php
+++ b/src/base/FieldProcessor.php
@@ -541,6 +541,35 @@ class FieldProcessor extends Processor
$fieldObj['blockTypes'] = $blockTypesObj;
}
unset($fieldObj['contentTable']);
+ } else if ($item instanceof Neo) {
+ $blockTypesObj = [];
+ $blockTypeGroupsObj = [];
+ /**
+ * @var Neo $item
+ */
+ foreach ($item->getGroups() as $group) {
+ /* @var NeoBlockTypeGroup $blockType */
+ $blockTypeGroupsObj[] = [
+ 'name' => $group->name,
+ 'sortOrder' => (int) $group->sortOrder
+ ];
+ }
+ $fieldObj['groups'] = $blockTypeGroupsObj;
+ foreach ($item->getBlockTypes() as $blockType) {
+ /* @var NeoBlockType $blockType */
+ $blockTypesObj[] = [
+ 'name' => $blockType->name,
+ 'handle' => $blockType->handle,
+ 'sortOrder' => (int) $blockType->sortOrder,
+ 'maxBlocks' => (int) $blockType->maxBlocks,
+ 'childBlocks' => Json::decodeIfJson((string) $blockType->childBlocks),
+ 'maxChildBlocks' => (int) $blockType->maxBlocks,
+ 'topLevel' => (bool) $blockType->topLevel,
+ 'fieldLayout' => $this->exportFieldLayout($blockType->getFieldLayout()),
+ 'requiredFields' => $this->exportRequiredFields($blockType->getFieldLayout()),
+ ];
+ }
+ $fieldObj['blockTypes'] = $blockTypesObj;
} else if ($item instanceof Date) {
/**
* @var Date $item
@@ -560,34 +589,6 @@ class FieldProcessor extends Processor
);
unset($fieldObj['showDate'], $fieldObj['showTime']);
}
- } else if ($item instanceof Neo) {
- $blockTypesObj = [];
- /**
- * @var Neo $item
- */
- foreach ($item->getBlockTypes() as $blockType) {
- /* @var NeoBlockType $blockType */
- $blockTypeObj = [
- 'name' => $blockType->name,
- 'handle' => $blockType->handle,
- 'maxBlocks' => $blockType->maxBlocks,
- 'childBlocks' => Json::decodeIfJson($blockType->childBlocks),
- 'maxChildBlocks' => $blockType->maxBlocks,
- 'topLevel' => (bool) $blockType->topLevel,
- 'fieldLayout' => [],
- ];
- /* @var FieldLayout $fieldLayout */
- $fieldLayout = $blockType->getFieldLayout();
- foreach ($fieldLayout->getTabs() as $tab) {
- $blockTypeObj['fieldLayout'][$tab->name] = [];
- foreach ($tab->getFields() as $field) {
- $blockTypeObj['fieldLayout'][$tab->name][] = $field->handle;
- }
- }
- $blockTypesObj[] = $blockTypeObj;
- }
- $fieldObj['blockTypes'] = $blockTypesObj;
-// Craft::dd($item);
} else if ($item instanceof SuperTableField) {
/**
* @var SuperTableField $item
|
Export requiredFields and switch field layout exporting to use the Processor helper function.
|
Pennebaker_craft-architect
|
train
|
95f24de27c9eb0b12115ca4ce89b190b1a9566e0
|
diff --git a/findbugs/src/java/edu/umd/cs/findbugs/detect/BadUseOfReturnValue.java b/findbugs/src/java/edu/umd/cs/findbugs/detect/BadUseOfReturnValue.java
index <HASH>..<HASH> 100644
--- a/findbugs/src/java/edu/umd/cs/findbugs/detect/BadUseOfReturnValue.java
+++ b/findbugs/src/java/edu/umd/cs/findbugs/detect/BadUseOfReturnValue.java
@@ -62,8 +62,10 @@ public class BadUseOfReturnValue extends BytecodeScanningDetector {
}
if (seen == INVOKEVIRTUAL &&
- getNameConstantOperand().equals("readLine")
- && getSigConstantOperand().equals("()Ljava/lang/String;"))
+ getNameConstantOperand().equals("readLine")
+ && getSigConstantOperand().equals("()Ljava/lang/String;")
+ && !getClassConstantOperand().equals("java/io/LineNumberReader")
+ )
readLineOnTOS = true;
else if (readLineOnTOS) {
if (seen == IFNULL || seen == IFNONNULL)
|
don't report RV_DONT_JUST_NULL_CHECK_READLINE for LineNumberReader
git-svn-id: <URL>
|
spotbugs_spotbugs
|
train
|
add15235d73cc0ea86c2deaf53e976be3d808849
|
diff --git a/src/org/kapott/hbci/passport/AbstractPinTanPassport.java b/src/org/kapott/hbci/passport/AbstractPinTanPassport.java
index <HASH>..<HASH> 100644
--- a/src/org/kapott/hbci/passport/AbstractPinTanPassport.java
+++ b/src/org/kapott/hbci/passport/AbstractPinTanPassport.java
@@ -260,7 +260,7 @@ public abstract class AbstractPinTanPassport
// einen dialog-restart fordern, weil w�hrend eines dialoges
// das secmech nicht gewechselt werden darf
restart_needed=true;
- HBCIUtils.log("autosecfunc: after this dialog-init we had to change selected pintan method, so a restart of this dialog is needed", HBCIUtils.LOG_INFO);
+ HBCIUtils.log("autosecfunc: after this dialog-init we had to change selected pintan method from "+oldTANMethod+" to "+updatedTANMethod+", so a restart of this dialog is needed", HBCIUtils.LOG_INFO);
}
}
|
Improve logging for secmech change
|
adorsys_hbci4java-adorsys
|
train
|
b994e52cdc51c18ca3a3913e5930869ef1a3300f
|
diff --git a/openquake/commonlib/tests/oqvalidation_test.py b/openquake/commonlib/tests/oqvalidation_test.py
index <HASH>..<HASH> 100644
--- a/openquake/commonlib/tests/oqvalidation_test.py
+++ b/openquake/commonlib/tests/oqvalidation_test.py
@@ -331,6 +331,15 @@ class OqParamTestCase(unittest.TestCase):
oq.set_risk_imtls(rm)
self.assertIn("Unknown IMT: ' SA(0.1)'", str(ctx.exception))
+ def test_invalid_loss_ratios(self):
+ with self.assertRaises(ValueError) as ctx:
+ OqParam(calculation_mode='event_based',
+ sites='0.1 0.2',
+ inputs=dict(structural_vulnerability=None,
+ nonstructural_vulnerability=None),
+ loss_ratios="{'structural': [.1, .2]}").validate()
+ self.assertIn('loss types in the loss_ratios', str(ctx.exception))
+
def test_disaggregation(self):
with self.assertRaises(ValueError) as ctx:
OqParam(
|
Added test for invalid loss ratios
|
gem_oq-engine
|
train
|
582a11cad3fd0292f17c4b9aecd335eb72434ef9
|
diff --git a/example/client-side/webpack.config.js b/example/client-side/webpack.config.js
index <HASH>..<HASH> 100644
--- a/example/client-side/webpack.config.js
+++ b/example/client-side/webpack.config.js
@@ -24,7 +24,7 @@ module.exports = {
loaders: [{
test: /\.js$/,
loaders: [
- 'react-hot', 'babel-loader?stage=0&cacheDirectory=true'
+ 'react-hot', 'babel-loader?cacheDirectory=true'
],
exclude: /node_modules/
}]
|
internal: remove stage=0 from babel-loader options in client-side example
|
kodyl_react-document-meta
|
train
|
d18ecaf1e3780b7dec0045f5d289f58a70e4f0f3
|
diff --git a/xwiki-commons-core/xwiki-commons-logging/xwiki-commons-logging-common/src/main/java/org/xwiki/logging/internal/tail/AbstractLoggerTail.java b/xwiki-commons-core/xwiki-commons-logging/xwiki-commons-logging-common/src/main/java/org/xwiki/logging/internal/tail/AbstractLoggerTail.java
index <HASH>..<HASH> 100644
--- a/xwiki-commons-core/xwiki-commons-logging/xwiki-commons-logging-common/src/main/java/org/xwiki/logging/internal/tail/AbstractLoggerTail.java
+++ b/xwiki-commons-core/xwiki-commons-logging/xwiki-commons-logging-common/src/main/java/org/xwiki/logging/internal/tail/AbstractLoggerTail.java
@@ -38,7 +38,9 @@ public abstract class AbstractLoggerTail extends AbstractLogger implements Logge
public void log(Logger logger)
{
for (LogEvent logEvent : this) {
- logEvent.log(logger);
+ if (logEvent != null) {
+ logEvent.log(logger);
+ }
}
}
}
|
XCOMMONS-<I>: Live storage of the job log
|
xwiki_xwiki-commons
|
train
|
14f0b02600484265f462832b0d17c5e1ca38d428
|
diff --git a/src/components/Editor/Breakpoints.js b/src/components/Editor/Breakpoints.js
index <HASH>..<HASH> 100644
--- a/src/components/Editor/Breakpoints.js
+++ b/src/components/Editor/Breakpoints.js
@@ -57,9 +57,7 @@ class Breakpoints extends Component<Props> {
}
}
-export default connect(
- state => ({
- breakpoints: getVisibleBreakpoints(state),
- selectedSource: getSelectedSource(state)
- }),
-)(Breakpoints);
+export default connect(state => ({
+ breakpoints: getVisibleBreakpoints(state),
+ selectedSource: getSelectedSource(state)
+}))(Breakpoints);
diff --git a/src/components/Editor/Tabs.js b/src/components/Editor/Tabs.js
index <HASH>..<HASH> 100644
--- a/src/components/Editor/Tabs.js
+++ b/src/components/Editor/Tabs.js
@@ -427,7 +427,7 @@ class SourceTabs extends PureComponent<Props, State> {
const Panel = <ul>{hiddenSourceTabs.map(this.renderDropdownSource)}</ul>;
- return <Dropdown panel={Panel} icon={"»"}/>;
+ return <Dropdown panel={Panel} icon={"»"} />;
}
renderStartPanelToggleButton() {
|
Reformat some files (#<I>)
|
firefox-devtools_debugger
|
train
|
7c381feb2a67efec92dfefa3f27748686d1ebfef
|
diff --git a/src/PatternLab/Console/Commands/ServerCommand.php b/src/PatternLab/Console/Commands/ServerCommand.php
index <HASH>..<HASH> 100644
--- a/src/PatternLab/Console/Commands/ServerCommand.php
+++ b/src/PatternLab/Console/Commands/ServerCommand.php
@@ -24,8 +24,9 @@ class ServerCommand extends Command {
$this->command = "server";
Console::setCommand($this->command,"Start the PHP-based server","The server command will start PHP's web server for you.","s");
- Console::setCommandOption($this->command,"host:","Provide a custom hostname. Default value is <path>localhost</path>.","To use a custom hostname:","","<host>");
- Console::setCommandOption($this->command,"port:","Provide a custom port. Default value is <path>8080</path>.","To use a custom port:","","<port>");
+ Console::setCommandOption($this->command,"host:","Provide a custom hostname. Default value is <path>localhost</path>.","To use a custom hostname and the default port:","","<host>");
+ Console::setCommandOption($this->command,"port:","Provide a custom port. Default value is <path>8080</path>.","To use a custom port and the default hostname:","","<port>");
+ Console::setCommandSample($this->command,"To provide both a custom hostname and port:","--host <host> --port <port>");
}
|
adding host and port options to --server command
|
pattern-lab_patternlab-php-core
|
train
|
02904e20775cdcd175bfcefc32a80d1dc420b337
|
diff --git a/phoebe/backend/universe.py b/phoebe/backend/universe.py
index <HASH>..<HASH> 100644
--- a/phoebe/backend/universe.py
+++ b/phoebe/backend/universe.py
@@ -4034,7 +4034,7 @@ class PhysicalBody(Body):
Reset the mesh to its original position.
"""
columns = self.mesh.dtype.names
-
+
# All column names starting with _o_ are mapped to the ones without
# _o_.
self.mesh['center'] = self.mesh['_o_center']
@@ -4171,7 +4171,7 @@ class PhysicalBody(Body):
#-- replace old triangles with newly subdivided ones, but remember the
# old ones if this is the first time we subdivide the mesh
if self.subdivision['orig'] is None:
- self.subdivision['orig'] = self.mesh.copy()
+ self.subdivision['orig'] = self.mesh#.copy()
self.mesh = np.hstack([self.mesh[-partial],subdivided])
if subtype==1:
#self.update_mesh(self.mesh['partial'])
diff --git a/phoebe/dynamics/keplerorbit.py b/phoebe/dynamics/keplerorbit.py
index <HASH>..<HASH> 100644
--- a/phoebe/dynamics/keplerorbit.py
+++ b/phoebe/dynamics/keplerorbit.py
@@ -1982,7 +1982,7 @@ def place_in_binary_orbit_old2(self,time):
component=component, t0type=t0type)
#-- we need a new copy of the mesh
- mesh = self.mesh.copy()
+ mesh = self.mesh#.copy()
#-- modify velocity vectors due to binarity and rotation within the orbit
# rotational velocity
@@ -2060,7 +2060,7 @@ def place_in_binary_orbit(self,time):
component=component, t0type=t0type)
#-- we need a new copy of the mesh
- mesh = self.mesh.copy()
+ mesh = self.mesh#.copy()
#-- modify velocity vectors due to binarity and rotation within the orbit
# rotational velocity
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -56,14 +56,16 @@ ext_modules = [
sources = ['phoebe/algorithms/freflection.f']),
Extension('phoebe.algorithms.fraytracing',
sources = ['phoebe/algorithms/fraytracing_double.f']),
- Extension('phoebe.algorithms.fconvex',
- sources = ['phoebe/algorithms/fconvex.f']),
+ #Extension('phoebe.algorithms.fconvex',
+ # sources = ['phoebe/algorithms/fconvex.f']),
Extension('phoebe.algorithms.ceclipse',
sources = ['phoebe/algorithms/ceclipse.cpp']),
- Extension('phoebe.algorithms.cecl',
- sources = ['phoebe/algorithms/ecl.c']),
+ #Extension('phoebe.algorithms.cecl',
+ # sources = ['phoebe/algorithms/ecl.c']),
Extension('phoebe.dynamics.ftrans',
sources = ['phoebe/dynamics/ftrans.f']),
+ Extension('phoebe.dynamics.ctrans',
+ sources = ['phoebe/dynamics/ctrans.cpp']),
Extension('phoebe.wd.fwd',
include_dirs=['phoebe/wd'],
sources = glob.glob('phoebe/wd/*.f')),
|
removed some unnecessary copying of arrays (small optimization)
|
phoebe-project_phoebe2
|
train
|
9b2222bdc4a227f7a0a63e2e54e752a0d30b5b16
|
diff --git a/widgetsnbextension/src/manager.js b/widgetsnbextension/src/manager.js
index <HASH>..<HASH> 100644
--- a/widgetsnbextension/src/manager.js
+++ b/widgetsnbextension/src/manager.js
@@ -403,9 +403,8 @@ WidgetManager.prototype.updateSnapshots = function() {
} else {
if (widgetSubarea && widgetSubarea.widgetSnapshot) {
delete widgetSubarea.widgetSnapshot;
-
- return that.progressModal.setValue(++progress/cells.length);
}
+ return that.progressModal.setValue(++progress/cells.length);
}
});
});
|
Fix the progress bar on the widget static image renderer,
because it wasn't incrementing on widgetless cells.
|
jupyter-widgets_ipywidgets
|
train
|
a2e17bdd89efa5eb6d89b14fafc454988a65524a
|
diff --git a/src/Deployment/RelocationResourceDeploymentProvider.php b/src/Deployment/RelocationResourceDeploymentProvider.php
index <HASH>..<HASH> 100644
--- a/src/Deployment/RelocationResourceDeploymentProvider.php
+++ b/src/Deployment/RelocationResourceDeploymentProvider.php
@@ -40,7 +40,7 @@ class RelocationResourceDeploymentProvider extends ResourceDeploymentProvider
$url = "/deployed/" . str_replace("\\", "/", str_replace($cwd, "", realpath($resourceFilePath)));
- $this->alreadyDeployed[$resourceFilePath] = $url;
+ $this->alreadyDeployed[$resourceFilePath] = $url . '?' . filemtime($resourceFilePath);
return $url;
}
|
Adding filemtime to resource urls
|
RhubarbPHP_Rhubarb
|
train
|
14ef04c46b310f746871a8abb34c1b44b557595f
|
diff --git a/txaws/s3/client.py b/txaws/s3/client.py
index <HASH>..<HASH> 100644
--- a/txaws/s3/client.py
+++ b/txaws/s3/client.py
@@ -50,13 +50,13 @@ class URLContext(object):
self.endpoint.scheme, self.get_host(), self.get_path())
-class CreateBucketURLContext(URLContext):
+class BucketURLContext(URLContext):
"""
This URL context class provides a means of overriding the standard
- behaviour of the URLContext object so that when creating a bucket, the
- appropriate URL is obtained.
+ behaviour of the URLContext object so that when creating or deleting a
+ bucket, the appropriate URL is obtained.
- When creating buckets on AWS, if the host is set as documented
+ When creating and deleting buckets on AWS, if the host is set as documented
(bucketname.s3.amazonaws.com), a 403 error is returned. When, however, one
sets the host without the bucket name prefix, the operation is completed
successfully.
@@ -111,7 +111,7 @@ class S3Client(object):
query = self.query_factory(
action="PUT", creds=self.creds, endpoint=self.endpoint,
bucket=bucket)
- url_context = CreateBucketURLContext(self.endpoint, bucket)
+ url_context = BucketURLContext(self.endpoint, bucket)
return query.submit(url_context)
def delete_bucket(self, bucket):
@@ -123,7 +123,8 @@ class S3Client(object):
query = self.query_factory(
action="DELETE", creds=self.creds, endpoint=self.endpoint,
bucket=bucket)
- return query.submit()
+ url_context = BucketURLContext(self.endpoint, bucket)
+ return query.submit(url_context)
def put_object(self, bucket, object_name, data, content_type=None,
metadata={}):
diff --git a/txaws/s3/tests/test_client.py b/txaws/s3/tests/test_client.py
index <HASH>..<HASH> 100644
--- a/txaws/s3/tests/test_client.py
+++ b/txaws/s3/tests/test_client.py
@@ -59,12 +59,12 @@ class URLContextTestCase(TXAWSTestCase):
"http://mydocs.localhost/notes.txt")
-class CreateBucketURLContextTestCase(TXAWSTestCase):
+class BucketURLContextTestCase(TXAWSTestCase):
endpoint = AWSServiceEndpoint("https://s3.amazonaws.com/")
def test_get_host_with_bucket(self):
- url_context = client.CreateBucketURLContext(self.endpoint, "mystuff")
+ url_context = client.BucketURLContext(self.endpoint, "mystuff")
self.assertEquals(url_context.get_host(), "s3.amazonaws.com")
self.assertEquals(url_context.get_path(), "/mystuff")
@@ -148,7 +148,7 @@ class S3ClientTestCase(TXAWSTestCase):
self.assertEqual(query.data, "")
self.assertEqual(query.metadata, {})
- def submit(query):
+ def submit(query, url_context=None):
return succeed(None)
creds = AWSCredentials("foo", "bar")
|
Renamed CreateBucketURLContext to BucketURLContext, since it's used by both
create and delete operations.
|
twisted_txaws
|
train
|
9bd57502277cf35bfa95f49a4ac7a844a08ae569
|
diff --git a/src/Notifynder/Models/Notification.php b/src/Notifynder/Models/Notification.php
index <HASH>..<HASH> 100755
--- a/src/Notifynder/Models/Notification.php
+++ b/src/Notifynder/Models/Notification.php
@@ -5,6 +5,7 @@ namespace Fenos\Notifynder\Models;
use Fenos\Notifynder\Notifications\ExtraParams;
use Fenos\Notifynder\Parsers\NotifynderParser;
use Illuminate\Contracts\Container\Container;
+use Illuminate\Database\Eloquent\Collection;
use Illuminate\Database\Eloquent\Model;
use Carbon\Carbon;
use Illuminate\Support\Arr;
@@ -215,4 +216,39 @@ class Notification extends Model
return $fillables;
}
+
+ /**
+ * Filter Scope by stack.
+ *
+ * @param $query
+ * @param $stackId
+ * @return mixed
+ */
+ public function scopeByStack($query, $stackId)
+ {
+ return $query->where('stack_id', $stackId);
+ }
+
+ /**
+ * Check if this notification is part of a stack.
+ *
+ * @return bool
+ */
+ public function hasStack()
+ {
+ return !is_null($this->stack_id);
+ }
+
+ /**
+ * Get the full stack of notifications if this has one.
+ *
+ * @return null|Collection
+ */
+ public function getStack()
+ {
+ if($this->hasStack()) {
+ return static::byStack($this->stack_id)->get();
+ }
+ return null;
+ }
}
diff --git a/src/Notifynder/Notifications/NotificationRepository.php b/src/Notifynder/Notifications/NotificationRepository.php
index <HASH>..<HASH> 100755
--- a/src/Notifynder/Notifications/NotificationRepository.php
+++ b/src/Notifynder/Notifications/NotificationRepository.php
@@ -376,4 +376,41 @@ class NotificationRepository implements NotificationDB
return $query;
}
+
+ /**
+ * Retrive all notifications, in a stack.
+ * You can also limit the number of
+ * Notifications if you don't, it will get all.
+ *
+ * @param $stackId
+ * @param null $limit
+ * @param int|null $paginate
+ * @param string $orderDate
+ * @param Closure $filterScope
+ * @return mixed
+ */
+ public function getStack(
+ $stackId,
+ $limit = null,
+ $paginate = null,
+ $orderDate = 'desc',
+ Closure $filterScope = null
+ ) {
+ $query = $this->notification->with('body', 'from', 'to')
+ ->byStack($stackId)
+ ->orderBy('read', 'ASC')
+ ->orderBy('created_at', $orderDate);
+
+ if ($limit && ! $paginate) {
+ $query->limit($limit);
+ }
+
+ $query = $this->applyFilter($filterScope, $query);
+
+ if (is_int(intval($paginate)) && $paginate) {
+ return $query->paginate($limit);
+ }
+
+ return $query->get();
+ }
}
|
Issue #<I>: make stack_id null to add methods on the model hasStack() & getStack()
|
fenos_Notifynder
|
train
|
f4f58d71aa458fc13c7ce08ead84ab66df9cc4fc
|
diff --git a/cli/sawtooth_cli/block.py b/cli/sawtooth_cli/block.py
index <HASH>..<HASH> 100644
--- a/cli/sawtooth_cli/block.py
+++ b/cli/sawtooth_cli/block.py
@@ -54,7 +54,9 @@ def add_block_parser(subparsers, parent_parser):
formatter_class=argparse.RawDescriptionHelpFormatter)
list_parser.add_argument(
- '--limit',
+ '-n',
+ '--count',
+ default=100,
type=int,
help='the number of blocks to list',
)
@@ -84,7 +86,15 @@ def do_block(args):
rest_client = RestClient(args.url, args.user)
if args.subcommand == 'list':
- blocks = rest_client.list_blocks(limit=args.limit)
+ block_generator = rest_client.list_blocks()
+ blocks = []
+ left = args.count
+ for block in block_generator:
+ blocks.append(block)
+ left -= 1
+ if left <= 0:
+ break
+
keys = ('num', 'block_id', 'batches', 'txns', 'signer')
headers = tuple(k.upper() if k != 'batches' else 'BATS' for k in keys)
diff --git a/cli/sawtooth_cli/rest_client.py b/cli/sawtooth_cli/rest_client.py
index <HASH>..<HASH> 100644
--- a/cli/sawtooth_cli/rest_client.py
+++ b/cli/sawtooth_cli/rest_client.py
@@ -35,6 +35,11 @@ class RestClient(object):
self._auth_header = None
def list_blocks(self, limit=None):
+ """Return a block generator.
+
+ Args:
+ limit (int): The page size of requests
+ """
return self._get_data('/blocks', limit=limit)
def get_block(self, block_id):
@@ -116,10 +121,6 @@ class RestClient(object):
url = self._base_url + path
params = self._format_queries(queries)
- limit = None
- if "limit" in params:
- limit = params["limit"]
-
while url:
code, json_result = self._submit_request(
url,
@@ -137,11 +138,6 @@ class RestClient(object):
for item in json_result.get('data', []):
yield item
- if limit:
- limit = limit - len(json_result.get('data', []))
- if limit <= 0:
- break
-
url = json_result['paging'].get('next', None)
def _post(self, path, data, **queries):
diff --git a/docs/source/cli/sawtooth.rst b/docs/source/cli/sawtooth.rst
index <HASH>..<HASH> 100644
--- a/docs/source/cli/sawtooth.rst
+++ b/docs/source/cli/sawtooth.rst
@@ -147,7 +147,7 @@ sawtooth block list
The ``sawtooth block list`` subcommand queries the Sawtooth REST API
(default: ``http://localhost:8008``) for a list of blocks in the
-current chain. Using the ``--limit`` option, the number of blocks returned can
+current chain. Using the ``--count`` option, the number of blocks returned can
be configured. It returns the id and number of each block, the public key of
each signer, and the number of transactions and batches in each.
|
Separate paging from quantity in block list
The previous implementation of this convoluted the paging size with the
number of blocks requested, causing other commands such as sawnet
compare-chains to stop working correctly since they depended on the
paging.
|
hyperledger_sawtooth-core
|
train
|
6f81d7aedb003b3c09a0258b92856fd8746025c4
|
diff --git a/src/frontend/org/voltdb/ExecutionSite.java b/src/frontend/org/voltdb/ExecutionSite.java
index <HASH>..<HASH> 100644
--- a/src/frontend/org/voltdb/ExecutionSite.java
+++ b/src/frontend/org/voltdb/ExecutionSite.java
@@ -2289,7 +2289,9 @@ implements Runnable, SiteTransactionConnection, SiteProcedureConnection
}
// if enabled, send a record of this invocation to the secondary/dr cluster
// if not enabled, should be a noop
- m_partitionDRGateway.onSuccessfulProcedureCall(itask.getTxnId(), itask.getStoredProcedureInvocation(), cr);
+ // also only send write txns
+ if (itask.isReadOnly() == false)
+ m_partitionDRGateway.onSuccessfulProcedureCall(itask.getTxnId(), itask.getStoredProcedureInvocation(), cr);
}
}
catch (final ExpectedProcedureException e) {
|
Only send write txns over the wire for DR.
|
VoltDB_voltdb
|
train
|
d98c55a20a2a48341a8e260c2a7d9e9d97b7e491
|
diff --git a/bin/snakeskin.js b/bin/snakeskin.js
index <HASH>..<HASH> 100644
--- a/bin/snakeskin.js
+++ b/bin/snakeskin.js
@@ -205,7 +205,7 @@ function action(data, file) {
if (res !== false) {
if (execTpl) {
- var tpl = Snakeskin.returnMainTpl(tpls, fileName, mainTpl);
+ var tpl = Snakeskin.getMainTpl(tpls, fileName, mainTpl);
if (!tpl) {
console.log(new Date().toString());
diff --git a/snakeskin.js b/snakeskin.js
index <HASH>..<HASH> 100644
--- a/snakeskin.js
+++ b/snakeskin.js
@@ -199,7 +199,7 @@ exports.compileFile = function (src, opt_params) {
* @param {?string=} [opt_tplName] - name of the main template
* @return {Function}
*/
-exports.returnMainTpl = function (tpls, opt_src, opt_tplName) {
+exports.getMainTpl = function (tpls, opt_src, opt_tplName) {
var
tpl;
@@ -247,7 +247,7 @@ exports.execFile = function (src, opt_params, opt_tplName) {
return null;
}
- return exports.returnMainTpl(tpls, src, opt_tplName);
+ return exports.getMainTpl(tpls, src, opt_tplName);
};
/**
@@ -268,5 +268,5 @@ exports.exec = function (txt, opt_params, opt_tplName) {
opt_params.context = tpls;
exports.compile(txt, opt_params);
- return exports.returnMainTpl(tpls, null, opt_tplName);
+ return exports.getMainTpl(tpls, null, opt_tplName);
};
|
Renamed returnMainTpl to getMainTpl
|
SnakeskinTpl_Snakeskin
|
train
|
1198d6c915f63cea5247f39500b7cb8845fc4dcf
|
diff --git a/lib/searchlogic/named_scopes/ordering.rb b/lib/searchlogic/named_scopes/ordering.rb
index <HASH>..<HASH> 100644
--- a/lib/searchlogic/named_scopes/ordering.rb
+++ b/lib/searchlogic/named_scopes/ordering.rb
@@ -18,9 +18,9 @@ module Searchlogic
def create_condition(name)
if name == :order
- named_scope name, lambda { |scope_name|
- return {} if !condition?(scope_name)
- send(scope_name).proxy_options
+ alias_scope name, lambda { |scope_name|
+ return scoped({}) if !condition?(scope_name)
+ send(scope_name)
}
elsif details = ordering_condition_details(name)
create_ordering_conditions(details[:column])
|
order condition should act like any other scope
|
binarylogic_searchlogic
|
train
|
dfd06b1f58706e5130d5f9d4237b8b222ae6b0ea
|
diff --git a/sequel/index.js b/sequel/index.js
index <HASH>..<HASH> 100644
--- a/sequel/index.js
+++ b/sequel/index.js
@@ -188,7 +188,7 @@ Sequel.prototype.update = function update(currentTable, queryObject, data) {
};
// Get the attribute identity (as opposed to the table name)
- var identity = _.find(_.values(this.schema), {tableName: currentTable}).identity;
+ var identity = currentTable;
// Create the query with the tablename aliased as the identity (in case they are different)
var query = 'UPDATE ' + utils.escapeName(currentTable, this.escapeCharacter) + ' AS ' + utils.escapeName(identity, this.escapeCharacter) + ' ';
@@ -236,7 +236,7 @@ Sequel.prototype.update = function update(currentTable, queryObject, data) {
Sequel.prototype.destroy = function destroy(currentTable, queryObject) {
// Get the attribute identity (as opposed to the table name)
- var identity = _.find(_.values(this.schema), {tableName: currentTable}).identity;
+ var identity = currentTable;
var query = 'DELETE ' + (this.declareDeleteAlias ? utils.escapeName(identity, this.escapeCharacter) : '') + ' FROM ' + utils.escapeName(currentTable, this.escapeCharacter) + ' AS ' + utils.escapeName(identity, this.escapeCharacter) + ' ';
diff --git a/sequel/select.js b/sequel/select.js
index <HASH>..<HASH> 100644
--- a/sequel/select.js
+++ b/sequel/select.js
@@ -15,7 +15,7 @@ var hop = utils.object.hasOwnProperty;
var SelectBuilder = module.exports = function(schema, currentTable, queryObject, options) {
this.schema = schema;
- this.currentTable = _.find(_.values(schema), {tableName: currentTable}).identity;
+ this.currentTable = currentTable;
this.escapeCharacter = '"';
this.cast = false;
this.wlNext = {};
@@ -82,7 +82,7 @@ SelectBuilder.prototype.buildSimpleSelect = function buildSimpleSelect(queryObje
var population = queryObject.instructions[attr].instructions[0];
// Handle hasFK
- var childAlias = _.find(_.values(self.schema), {tableName: population.child}).identity;
+ var childAlias = _.find(_.values(self.schema), {tableName: population.child}).tableName;
_.keys(self.schema[childAlias].attributes).forEach(function(key) {
var schema = self.schema[childAlias].attributes[key];
diff --git a/sequel/where.js b/sequel/where.js
index <HASH>..<HASH> 100644
--- a/sequel/where.js
+++ b/sequel/where.js
@@ -48,7 +48,7 @@ var hop = utils.object.hasOwnProperty;
var WhereBuilder = module.exports = function WhereBuilder(schema, currentTable, options) {
this.schema = schema;
- this.currentTable = _.find(_.values(schema), {tableName: currentTable}).identity;
+ this.currentTable = currentTable;
this.wlNext = {};
@@ -95,7 +95,7 @@ WhereBuilder.prototype.single = function single(queryObject, options) {
var population = queryObject.instructions[attr].instructions[0];
var alias = utils.escapeName(utils.populationAlias(population.alias), self.escapeCharacter);
- var parentAlias = _.find(_.values(self.schema), {tableName: population.parent}).identity;
+ var parentAlias = _.find(_.values(self.schema), {tableName: population.parent}).tableName;
// Handle hasFK
if(strategy === 1) {
@@ -201,7 +201,7 @@ WhereBuilder.prototype.complex = function complex(queryObject, options) {
if(strategy === 2) {
var population = queryObject.instructions[attr].instructions[0];
- var populationAlias = _.find(_.values(self.schema), {tableName: population.child}).identity;
+ var populationAlias = _.find(_.values(self.schema), {tableName: population.child}).tableName;
// Mixin the parameterized flag into options
_options = _.assign({
@@ -257,8 +257,8 @@ WhereBuilder.prototype.complex = function complex(queryObject, options) {
var stage1 = queryObject.instructions[attr].instructions[0];
var stage2 = queryObject.instructions[attr].instructions[1];
- stage1ChildAlias = _.find(_.values(self.schema), {tableName: stage1.child}).identity;
- stage2ChildAlias = _.find(_.values(self.schema), {tableName: stage2.child}).identity;
+ stage1ChildAlias = _.find(_.values(self.schema), {tableName: stage1.child}).tableName;
+ stage2ChildAlias = _.find(_.values(self.schema), {tableName: stage2.child}).tableName;
// Mixin the parameterized flag into options
_options = _.assign({
@@ -298,7 +298,7 @@ WhereBuilder.prototype.complex = function complex(queryObject, options) {
queryString += '(SELECT ';
selectKeys.forEach(function(projection) {
- var projectionAlias = _.find(_.values(self.schema), {tableName: projection.table}).identity;
+ var projectionAlias = _.find(_.values(self.schema), {tableName: projection.table}).tableName;
queryString += utils.escapeName(projectionAlias, self.escapeCharacter) + '.' + utils.escapeName(projection.key, self.escapeCharacter) + ',';
});
|
use the current table instead of the identity property
|
balderdashy_waterline-sequel
|
train
|
165e528d6d90332c2e09bbb09c3ed58d56b03c3e
|
diff --git a/morse_talk/encoding.py b/morse_talk/encoding.py
index <HASH>..<HASH> 100644
--- a/morse_talk/encoding.py
+++ b/morse_talk/encoding.py
@@ -67,6 +67,11 @@ def _encode_morse(message):
>>> message = "SOS"
>>> _encode_morse(message)
['...', '---', '...']
+
+ >>> message = " SOS"
+ >>> _encode_morse(message)
+ [' ', ' ', ' ', '...', '---', '...']
+
"""
return [morsetab.get(c.upper(), '?') for c in message]
diff --git a/morse_talk/utils.py b/morse_talk/utils.py
index <HASH>..<HASH> 100644
--- a/morse_talk/utils.py
+++ b/morse_talk/utils.py
@@ -186,16 +186,43 @@ def _get_speed(element_duration, wpm, word_ref=WORD):
else:
raise NotImplementedError("Can't set both element_duration and wpm")
-def display(message, wpm, element_duration, word_ref):
+def _numbers_units(N):
+ """
+ >>> _numbers_units(45)
+ '123456789012345678901234567890123456789012345'
+ """
+ lst = range(1, N + 1)
+ return "".join(list(map(lambda i: str(i % 10), lst)))
+
+def _numbers_decades(N):
+ """
+ >>> _numbers_decades(45)
+ """
+ N = N // 10
+ lst = range(1, N + 1)
+ return "".join(map(lambda i: "%10s" % i, lst))
+
+def display(message, wpm, element_duration, word_ref, strip=False):
"""
Display
text message
morse code
binary morse code
"""
- print("text : %r" % message)
- print("morse: %s" % mtalk.encode(message))
- print("bin : %s" % mtalk.encode(message, encoding_type='binary'))
+ if strip:
+ print("text : %r" % message.strip())
+ else:
+ print("text : %r" % message)
+ print("morse: %s" % mtalk.encode(message, strip=strip))
+ print("bin : %s" % mtalk.encode(message, encoding_type='binary', strip=strip))
+ print("")
+ s = "".join(mtalk.encoding._encode_binary(message, on="=", off="."))
+ N = len(s)
+ print(_numbers_decades(N))
+ print(_numbers_units(N))
+ print("")
+ print(s)
+
print("")
print("code speed : %s wpm" % wpm)
print("element_duration : %s" % element_duration)
|
Improve console display using = and .
|
morse-talk_morse-talk
|
train
|
4f486a4aec746e9d66441600ee3b0743228b061c
|
diff --git a/tornado/http1connection.py b/tornado/http1connection.py
index <HASH>..<HASH> 100644
--- a/tornado/http1connection.py
+++ b/tornado/http1connection.py
@@ -390,7 +390,10 @@ class HTTP1Connection(httputil.HTTPConnection):
self._chunking_output = (
start_line.method in ("POST", "PUT", "PATCH")
and "Content-Length" not in headers
- and "Transfer-Encoding" not in headers
+ and (
+ "Transfer-Encoding" not in headers
+ or headers["Transfer-Encoding"] == "chunked"
+ )
)
else:
assert isinstance(start_line, httputil.ResponseStartLine)
diff --git a/tornado/test/httpclient_test.py b/tornado/test/httpclient_test.py
index <HASH>..<HASH> 100644
--- a/tornado/test/httpclient_test.py
+++ b/tornado/test/httpclient_test.py
@@ -307,6 +307,21 @@ Transfer-Encoding: chunked
# don't either).
self.assertEqual(301, response.code)
+ def test_redirect_put_with_body(self):
+ response = self.fetch(
+ "/redirect?url=/put&status=307", method="PUT", body="hello"
+ )
+ self.assertEqual(response.body, b"Put body: hello")
+
+ def test_redirect_put_without_body(self):
+ # This "without body" edge case is similar to what happens with body_producer.
+ response = self.fetch(
+ "/redirect?url=/put&status=307",
+ method="PUT",
+ allow_nonstandard_methods=True,
+ )
+ self.assertEqual(response.body, b"Put body: ")
+
def test_credentials_in_url(self):
url = self.get_url("/auth").replace("http://", "http://me:secret@")
response = self.fetch(url)
|
http1connection: Fix a bug with redirects and chunked requests
After a redirect, the chunked-encoding header is already set and would
not be detected correctly. This affects empty bodies with
allow_nonstandard_methods and any use of body_producer.
|
tornadoweb_tornado
|
train
|
3a4ece395f2a1600cda73b37d7b714e3f6d70bb3
|
diff --git a/src/ReferenceTrait.php b/src/ReferenceTrait.php
index <HASH>..<HASH> 100644
--- a/src/ReferenceTrait.php
+++ b/src/ReferenceTrait.php
@@ -19,6 +19,7 @@
namespace Smalldb\StateMachine;
use Smalldb\StateMachine\Definition\StateMachineDefinition;
+use Smalldb\StateMachine\Definition\UndefinedTransitionException;
use Smalldb\StateMachine\Provider\SmalldbProviderInterface;
use Smalldb\StateMachine\ReferenceDataSource\ReferenceDataSourceInterface;
use Smalldb\StateMachine\Transition\TransitionEvent;
@@ -107,9 +108,14 @@ trait ReferenceTrait // implements ReferenceInterface
final public function isTransitionAllowed(string $transitionName): bool
{
- $provider = $this->getMachineProvider();
- $transition = $provider->getDefinition()->getTransition($transitionName, $this->getState());
- return $provider->getTransitionsDecorator()->isTransitionAllowed($this, $transition);
+ try {
+ $provider = $this->getMachineProvider();
+ $transition = $provider->getDefinition()->getTransition($transitionName, $this->getState());
+ return $provider->getTransitionsDecorator()->isTransitionAllowed($this, $transition);
+ }
+ catch (UndefinedTransitionException $ex) {
+ return false;
+ }
}
|
ReferenceTrait: Fix isTransitionAllowed() -- it should not throw an exception
|
smalldb_libSmalldb
|
train
|
7bfd820b47921ad7899081a7ccd233eafa8a12ca
|
diff --git a/django_afip/models.py b/django_afip/models.py
index <HASH>..<HASH> 100644
--- a/django_afip/models.py
+++ b/django_afip/models.py
@@ -435,7 +435,7 @@ class TaxPayer(models.Model):
)
def __str__(self):
- return str(self.cuit)
+ return str(self.name)
class Meta:
verbose_name = _('taxpayer')
|
Use `name` to present TaxPayers
That's the whole point of the field!
|
WhyNotHugo_django-afip
|
train
|
b8833e95e4d6e36bdaf802249ddebf04c81ef960
|
diff --git a/lib/jellyfish.rb b/lib/jellyfish.rb
index <HASH>..<HASH> 100644
--- a/lib/jellyfish.rb
+++ b/lib/jellyfish.rb
@@ -154,16 +154,6 @@ module Jellyfish
private
def handle controller, e, stderr=nil
- case e
- when Respond
- respond(controller, e)
- when Exception
- log_error(e, stderr)
- respond(controller, e)
- end
- end
-
- def respond controller, e
raise e unless self.class.handle_exceptions
handler = self.class.handlers.find{ |klass, block|
break block if e.kind_of?(klass)
@@ -173,7 +163,8 @@ module Jellyfish
elsif e.kind_of?(Respond) # InternalError ends up here if no handlers
[e.status, e.headers, e.body]
else # fallback and see if there's any InternalError handler
- respond(controller, InternalError.new)
+ log_error(e, stderr)
+ handle(controller, InternalError.new)
end
end
|
only print exceptions for which have no handlers
|
godfat_jellyfish
|
train
|
aaef85b9292fba11a44b9aeed865aca072282dea
|
diff --git a/list/docs/define-list.md b/list/docs/define-list.md
index <HASH>..<HASH> 100644
--- a/list/docs/define-list.md
+++ b/list/docs/define-list.md
@@ -92,3 +92,7 @@ people.pop(); // remove ["eve"] 2
people.unshift("Xerxes"); // add ["Xerxes"] 1
// length 3 2
```
+
+__NOTE:__ Only changes made to indexed values using the list's `set` method will dispatch change events.
+👍 `defineList.set(0, 'newValue'); // will dispatch event`
+👎 `defineList[0] = 'newValue'; // will NOT dispatch event`
diff --git a/list/list-test.js b/list/list-test.js
index <HASH>..<HASH> 100644
--- a/list/list-test.js
+++ b/list/list-test.js
@@ -1041,6 +1041,16 @@ QUnit.test("can-reflect onValue", function(assert) {
list.set(0, 5);
});
+QUnit.test("can-reflect onKeyValue", function(assert) {
+ assert.expect(1);
+ var list = new DefineList([1,2,3]);
+ var key = 1;
+ canReflect.onKeyValue(list, key, function(newVal) {
+ assert.equal(newVal, 5);
+ });
+ list.set(key, 5);
+});
+
test("works with can-reflect", function(){
var a = new DefineMap({ foo: 4 });
var b = new DefineList([ "foo", "bar" ]);
diff --git a/list/list.js b/list/list.js
index <HASH>..<HASH> 100644
--- a/list/list.js
+++ b/list/list.js
@@ -18,6 +18,7 @@ var canReflect = require("can-reflect");
var canSymbol = require("can-symbol");
var CIDSet = require("can-util/js/cid-set/cid-set");
var CIDMap = require("can-util/js/cid-map/cid-map");
+var singleReference = require("can-util/js/single-reference/single-reference");
var splice = [].splice;
var runningNative = false;
@@ -1389,6 +1390,33 @@ canReflect.assignSymbols(DefineList.prototype,{
// get/set
"can.getKeyValue": DefineList.prototype.get,
"can.setKeyValue": DefineList.prototype.set,
+
+ // Called for every reference to a property in a template
+ // if a key is a numerical index then translate to length event
+ "can.onKeyValue": function(key, handler) {
+ if (isNaN(key)) {
+ this.addEventListener(key, handler);
+ }
+ else {
+ var translationHandler = function() {
+ handler(this[key]);
+ };
+
+ singleReference.set(handler, this, translationHandler, key);
+ this.addEventListener('length', translationHandler);
+ }
+ },
+ // Called when a property reference is removed
+ "can.offKeyValue": function(key, handler) {
+ if (isNaN(key)) {
+ this.removeEventListener(key, handler);
+ }
+ else {
+ var translationHandler = singleReference.getAndDelete(handler, this, key);
+ this.removeEventListener('length', translationHandler);
+ }
+ },
+
"can.deleteKeyValue": function(prop) {
if(typeof prop === "number") {
this.splice(prop, 1);
|
Dispatch event when an indexed list value changes. Resolves #<I>
|
canjs_can-define
|
train
|
00ca5581674fcfb948d1770e77ad4da1e58e7848
|
diff --git a/pkg/proxy/server_test.go b/pkg/proxy/server_test.go
index <HASH>..<HASH> 100644
--- a/pkg/proxy/server_test.go
+++ b/pkg/proxy/server_test.go
@@ -73,7 +73,9 @@ func testServer(t *testing.T, scheme string, secure bool, delayTx bool) {
cfg.TLSInfo = tlsInfo
}
p := NewServer(cfg)
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
data1 := []byte("Hello World!")
@@ -196,7 +198,9 @@ func testServerDelayAccept(t *testing.T, secure bool) {
cfg.TLSInfo = tlsInfo
}
p := NewServer(cfg)
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
data := []byte("Hello World!")
@@ -246,7 +250,9 @@ func TestServer_PauseTx(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
p.PauseTx()
@@ -293,7 +299,9 @@ func TestServer_ModifyTx_corrupt(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
p.ModifyTx(func(d []byte) []byte {
@@ -329,7 +337,9 @@ func TestServer_ModifyTx_packet_loss(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
// 50% packet loss
@@ -366,7 +376,9 @@ func TestServer_BlackholeTx(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
p.BlackholeTx()
@@ -417,7 +429,9 @@ func TestServer_Shutdown(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
s, _ := p.(*server)
@@ -448,7 +462,9 @@ func TestServer_ShutdownListener(t *testing.T) {
From: url.URL{Scheme: scheme, Host: srcAddr},
To: url.URL{Scheme: scheme, Host: dstAddr},
})
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer p.Close()
// shut down destination
@@ -527,7 +543,9 @@ func testServerHTTP(t *testing.T, secure, delayTx bool) {
cfg.TLSInfo = tlsInfo
}
p := NewServer(cfg)
- <-p.Ready()
+
+ waitForServer(t, p)
+
defer func() {
lg.Info("closing Proxy server...")
p.Close()
@@ -670,3 +688,13 @@ func receive(t *testing.T, ln net.Listener) (data []byte) {
}
return buf.Bytes()
}
+
+// Waits until a proxy is ready to serve.
+// Aborts test on proxy start-up error.
+func waitForServer(t *testing.T, s Server) {
+ select {
+ case <-s.Ready():
+ case err := <-s.Error():
+ t.Fatal(err)
+ }
+}
diff --git a/tests/functional/cmd/etcd-proxy/main.go b/tests/functional/cmd/etcd-proxy/main.go
index <HASH>..<HASH> 100644
--- a/tests/functional/cmd/etcd-proxy/main.go
+++ b/tests/functional/cmd/etcd-proxy/main.go
@@ -87,7 +87,13 @@ $ ./bin/etcdctl --endpoints localhost:23790 put foo bar`)
zap.Int("port", httpPort))
}
p := proxy.NewServer(cfg)
- <-p.Ready()
+
+ select {
+ case <-p.Ready():
+ case err := <-p.Error():
+ panic(err)
+ }
+
defer p.Close()
mux := http.NewServeMux()
|
Fixed goroutine leak on NewServer
|
etcd-io_etcd
|
train
|
d827cbb83955027690365641d643a30b3e7f379d
|
diff --git a/src/equations.js b/src/equations.js
index <HASH>..<HASH> 100644
--- a/src/equations.js
+++ b/src/equations.js
@@ -114,7 +114,55 @@ Equation.prototype.solveFor = function(variable) {
return [];
}
} else if (this._isCubic(variable)) {
- // TODO: solve
+ // Move everything to the lhs so we have the form ax^3 + bx^2 + cx + d = 0.
+ var newLhs = this.lhs.copy();
+
+ for (var i = 0; i < this.rhs.terms.length; i++) {
+ newLhs = newLhs.subtract(this.rhs.terms[i]);
+ }
+
+ newLhs = newLhs.subtract(this.rhs.constant);
+
+ // Extract the coefficients a, b, c, and d into a dict.
+ var coefs = newLhs._cubicCoefficients();
+
+ var a = coefs.a;
+ var b = coefs.b;
+ var c = coefs.c;
+ var d = coefs.d;
+
+ // Calculate D and D0.
+ var D = a.multiply(b).multiply(c).multiply(d).multiply(18);
+ D = D.subtract(b.pow(3).multiply(d).multiply(4));
+ D = D.add(b.pow(2).multiply(c.pow(2)));
+ D = D.subtract(a.multiply(c.pow(3)).multiply(4));
+ D = D.subtract(a.pow(2).multiply(d.pow(2)).multiply(27));
+
+ var D0 = b.pow(2).subtract(a.multiply(c).multiply(3));
+
+ // Check for special cases when D = 0.
+ if (D.valueOf() === 0) {
+ // If D = D0 = 0, there is one distinct real root, -b / 3a.
+ if (D0.valueOf() === 0) {
+ var root1 = b.multiply(-1).divide(a.multiply(3));
+
+ return [root1.reduce()];
+ // Otherwise, if D0 != 0, there are two distinct real roots.
+ // 9ad - bc / 2D0
+ // 4abc - 9a^2d - b^3 / aD0
+ } else {
+ var root1 = a.multiply(b).multiply(c).multiply(4);
+ root1 = root1.subtract(a.pow(2).multiply(d).multiply(9));
+ root1 = root1.subtract(b.pow(3));
+ root1 = root1.divide(a.multiply(D0));
+
+ var root2 = a.multiply(d).multiply(9).subtract(b.multiply(c)).divide(D0.multiply(2));
+
+ return [root1.reduce(), root2.reduce()];
+ }
+ }
+
+ // TODO: Reduce to a depressed cubic.
return;
}
};
diff --git a/test/equation-spec.js b/test/equation-spec.js
index <HASH>..<HASH> 100644
--- a/test/equation-spec.js
+++ b/test/equation-spec.js
@@ -217,4 +217,30 @@ describe("Solving for variables that can't be isolated", function() {
var answer = eq.solveFor("x");
expect(answer).toBeUndefined();
});
+});
+
+describe("Solving a cubic equation", function() {
+ it("works when there's one distinct real root", function() {
+ var a = new Expression("x").pow(3);
+ var b = new Expression("x").pow(2).multiply(-3);
+ var c = new Expression("x").multiply(3);
+ var d = -1;
+
+ var expr = a.add(b).add(c).add(d);
+
+ var eq = new Equation(expr, 0); // x^3 - 3x^2 + 3x - 1 = 0
+ var answers = eq.solveFor("x");
+ expect(answers.toString()).toEqual("1");
+ });
+
+ it("works when there's two distinct real roots", function() {
+ var expr = new Expression("x").pow(3);
+ expr = expr.subtract(new Expression("x").multiply(3));
+ expr = expr.add(2); // x^3 - 3x + 2
+
+ var eq = new Equation(expr, 0); // x^3 - 3x + 2 = 0
+ var answers = eq.solveFor("x");
+
+ expect(answers.toString()).toEqual("-2,1");
+ });
});
\ No newline at end of file
|
Add ability to solve easy cubic equations.
For cubic equations with a discriminant of 0, the roots are easy to find
and don't require cube or square roots.
|
nicolewhite_algebra.js
|
train
|
bbafe6cb6b833dfa9f706166e2159f99d91c4df0
|
diff --git a/lib/AMNL/Mollie/Client.php b/lib/AMNL/Mollie/Client.php
index <HASH>..<HASH> 100644
--- a/lib/AMNL/Mollie/Client.php
+++ b/lib/AMNL/Mollie/Client.php
@@ -32,12 +32,12 @@ abstract class Client
/**
* @var Buzz\Browser
*/
- private $browser;
+ protected $browser;
/**
* @var string
*/
- private $baseUrl;
+ protected $baseUrl;
/**
* @var string User-Agent
@@ -91,11 +91,13 @@ abstract class Client
}
// Convert XML
- $xml = @simplexml_load_string($response->getContent());
-
- // Succesful?
- if ($xml === false) {
- throw new MollieException('Server did not respond with valid XML.');
+ $xml = null;
+ try {
+ $xml = new \SimpleXMLElement($response->getContent());
+ }
+ catch (\Exception $e) {
+ // Failed
+ throw new MollieException('Server did not respond with valid XML.', 0, $e);
}
// Error?
|
Changed access level for properties from private to protected to allow for easier testing and fixed issue #6.
|
itavero_AMNL-Mollie
|
train
|
e371dfc9f69bf8124333e32b0413120c4e9a813a
|
diff --git a/virtualbox/events.py b/virtualbox/events.py
index <HASH>..<HASH> 100644
--- a/virtualbox/events.py
+++ b/virtualbox/events.py
@@ -5,16 +5,17 @@ By Michael Dorman
"""
from __future__ import print_function
import sys
-import atexit
-import inspect
+import atexit
+import inspect
import traceback
import threading
-import virtualbox
from virtualbox import library
-_lookup = {}
+_lookup = {}
+
+
def type_to_interface(event_type):
"""Return the event interface object that corresponds to the event type
enumeration"""
@@ -38,6 +39,8 @@ def type_to_interface(event_type):
_callbacks = {}
+
+
def _event_monitor(callback, event_source, listener, event_interface, quit):
global _callbacks
try:
@@ -46,27 +49,27 @@ def _event_monitor(callback, event_source, listener, event_interface, quit):
event = event_source.get_event(listener, 1000)
except library.VBoxError:
print("Unregistering %s due to VBoxError on get_event" %
- listener, file=sys.stderr)
+ listener, file=sys.stderr)
break
if event:
try:
callback(event_interface(event))
except:
- print("Unhanded exception in callback: \n%s" % \
- traceback.format_exc(), file=sys.stderr)
+ print("Unhanded exception in callback: \n%s" %
+ traceback.format_exc(), file=sys.stderr)
event_source.event_processed(listener, event)
finally:
_callbacks.pop(threading.current_thread().ident, None)
try:
event_source.unregister_listener(listener)
- except Exception as exc:
+ except Exception:
print("Failed to unregister listener %s" % listener,
- file=sys.stderr)
+ file=sys.stderr)
def register_callback(callback, event_source, event_type):
"""register a callback function against an event_source for a given
- event_type.
+ event_type.
Arguments:
callback - function to call when the event occurs
@@ -77,11 +80,11 @@ def register_callback(callback, event_source, event_type):
"""
global _callbacks
event_interface = type_to_interface(event_type)
- listener = event_source.create_listener()
+ listener = event_source.create_listener()
event_source.register_listener(listener, [event_type], False)
quit = threading.Event()
- t = threading.Thread(target=_event_monitor, args=(callback,
- event_source,
+ t = threading.Thread(target=_event_monitor, args=(callback,
+ event_source,
listener,
event_interface,
quit))
@@ -113,7 +116,3 @@ def _remove_all_callbacks():
atexit.register(_remove_all_callbacks)
-
-
-
-
|
Fix linting issues in virtualbox/events.py
|
sethmlarson_virtualbox-python
|
train
|
6a2b2e305a2fc00d581230608e60be889738c17b
|
diff --git a/generator/__init__.py b/generator/__init__.py
index <HASH>..<HASH> 100644
--- a/generator/__init__.py
+++ b/generator/__init__.py
@@ -50,12 +50,12 @@ def generator(klass):
return klass
-class GeneratorTestCaseMeta(type):
+class GeneratorMeta(type):
def __new__(metaclass, name, bases, attributes):
- cls = super(GeneratorTestCaseMeta, metaclass).__new__(metaclass, name, bases, attributes)
+ cls = super(GeneratorMeta, metaclass).__new__(metaclass, name, bases, attributes)
return generator(cls)
-@add_metaclass(GeneratorTestCaseMeta)
-class GeneratorTestCase(object):
+@add_metaclass(GeneratorMeta)
+class GeneratorMixin(object):
generate = staticmethod(generate)
diff --git a/tests/test_generator.py b/tests/test_generator.py
index <HASH>..<HASH> 100755
--- a/tests/test_generator.py
+++ b/tests/test_generator.py
@@ -17,7 +17,7 @@ except ImportError:
import mock
-from generator import generate, generator, GeneratorTestCase, GeneratorTest
+from generator import generate, generator, GeneratorMixin, GeneratorTest
class AssertCalledWithInputs(object):
@@ -95,13 +95,13 @@ class TestGenerator(AssertCalledWithInputs, unittest.TestCase):
)
-class TestGeneratorTestCase(AssertCalledWithInputs, unittest.TestCase):
+class TestGeneratorMixin(AssertCalledWithInputs, unittest.TestCase):
def setUp(self):
self.inputs = 1, 2, 3
self.spy = mock.Mock()
self.other_spy = mock.Mock()
- class Example(GeneratorTestCase, unittest.TestCase):
+ class Example(GeneratorMixin, unittest.TestCase):
@generate(*self.inputs)
def test_method(me, arg):
self.spy(arg)
|
Renamed base class to GeneratorMixin
|
kevinastone_generator
|
train
|
1236a43891e934be3e08cf2d376e7325c8268b0f
|
diff --git a/richtextfx/src/main/java/org/fxmisc/richtext/EditableStyledDocument.java b/richtextfx/src/main/java/org/fxmisc/richtext/EditableStyledDocument.java
index <HASH>..<HASH> 100644
--- a/richtextfx/src/main/java/org/fxmisc/richtext/EditableStyledDocument.java
+++ b/richtextfx/src/main/java/org/fxmisc/richtext/EditableStyledDocument.java
@@ -21,6 +21,7 @@ import org.reactfx.EventSource;
import org.reactfx.EventStream;
import org.reactfx.EventStreams;
import org.reactfx.Guard;
+import org.reactfx.Suspendable;
import org.reactfx.util.Lists;
import org.reactfx.value.SuspendableVar;
import org.reactfx.value.Val;
@@ -381,6 +382,20 @@ final class EditableStyledDocument<S, PS> extends StyledDocumentBase<S, PS, Obse
* *
* ********************************************************************** */
+ private final List<Suspendable> suspendables = new ArrayList<>(1);
+
+ void addSuspendable(Suspendable omniSuspendable) {suspendables.add(omniSuspendable);}
+
+ void removeSuspendable(Suspendable omnisuspendable) {suspendables.remove(omnisuspendable);}
+
+ Guard suspendAll() {
+ Suspendable[] suspendablesArray = new Suspendable[suspendables.size()];
+ for (int i = 0; i < suspendables.size(); i++) {
+ suspendablesArray[i] = suspendables.get(i);
+ }
+ return Suspendable.combine(suspendablesArray).suspend();
+ }
+
private void ensureValidParagraphIndex(int parIdx) {
Lists.checkIndex(parIdx, paragraphs.size());
}
diff --git a/richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java b/richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
index <HASH>..<HASH> 100644
--- a/richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
+++ b/richtextfx/src/main/java/org/fxmisc/richtext/StyledTextArea.java
@@ -495,8 +495,6 @@ public class StyledTextArea<S, PS> extends Region
return preserveStyle;
}
- private final Suspendable omniSuspendable;
-
/* ********************************************************************** *
* *
@@ -637,7 +635,7 @@ public class StyledTextArea<S, PS> extends Region
() -> content.getText(internalSelection.getValue()),
internalSelection, content.getParagraphs()).suspendable();
- omniSuspendable = Suspendable.combine(
+ Suspendable omniSuspendable = Suspendable.combine(
beingUpdated, // must be first, to be the last one to release
text,
length,
@@ -654,6 +652,8 @@ public class StyledTextArea<S, PS> extends Region
// paragraphs to be released first
paragraphs);
+ content.addSuspendable(omniSuspendable);
+ manageSubscription(() -> content.removeSuspendable(omniSuspendable));
this.setBackground(new Background(new BackgroundFill(Color.WHITE, CornerRadii.EMPTY, Insets.EMPTY)));
getStyleClass().add("styled-text-area");
@@ -1040,7 +1040,7 @@ public class StyledTextArea<S, PS> extends Region
* Sets style for the given character range.
*/
public void setStyle(int from, int to, S style) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setStyle(from, to, style);
}
}
@@ -1049,7 +1049,7 @@ public class StyledTextArea<S, PS> extends Region
* Sets style for the whole paragraph.
*/
public void setStyle(int paragraph, S style) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setStyle(paragraph, style);
}
}
@@ -1058,7 +1058,7 @@ public class StyledTextArea<S, PS> extends Region
* Sets style for the given range relative in the given paragraph.
*/
public void setStyle(int paragraph, int from, int to, S style) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setStyle(paragraph, from, to, style);
}
}
@@ -1074,7 +1074,7 @@ public class StyledTextArea<S, PS> extends Region
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int from, StyleSpans<? extends S> styleSpans) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setStyleSpans(from, styleSpans);
}
}
@@ -1090,7 +1090,7 @@ public class StyledTextArea<S, PS> extends Region
* but the actual implementation is more efficient.
*/
public void setStyleSpans(int paragraph, int from, StyleSpans<? extends S> styleSpans) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setStyleSpans(paragraph, from, styleSpans);
}
}
@@ -1099,7 +1099,7 @@ public class StyledTextArea<S, PS> extends Region
* Sets style for the whole paragraph.
*/
public void setParagraphStyle(int paragraph, PS paragraphStyle) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
content.setParagraphStyle(paragraph, paragraphStyle);
}
}
@@ -1142,7 +1142,7 @@ public class StyledTextArea<S, PS> extends Region
@Override
public void replace(int start, int end, StyledDocument<S, PS> replacement) {
- try(Guard g = omniSuspendable.suspend()) {
+ try (Guard g = content.suspendAll()) {
start = clamp(0, start, getLength());
end = clamp(0, end, getLength());
|
Stored all attached StyledTextArea's omniSuspendables in their shared document so that, when one modifies the underlying document, all of their suspendables are suspended. This insures that their values are always valid and correct.
|
FXMisc_RichTextFX
|
train
|
d895cec8bfeae87b5bc1e3fa6b528758f10e45de
|
diff --git a/src/elements/Icon/Icon.d.ts b/src/elements/Icon/Icon.d.ts
index <HASH>..<HASH> 100644
--- a/src/elements/Icon/Icon.d.ts
+++ b/src/elements/Icon/Icon.d.ts
@@ -52,6 +52,9 @@ export interface IconProps {
/** Size of the icon. */
size?: IconSizeProp;
+
+ /** Icon can have an aria label. */
+ 'aria-label'?: string;
}
declare class Icon extends React.Component<IconProps, {}> {
diff --git a/src/elements/Icon/Icon.js b/src/elements/Icon/Icon.js
index <HASH>..<HASH> 100644
--- a/src/elements/Icon/Icon.js
+++ b/src/elements/Icon/Icon.js
@@ -66,6 +66,9 @@ class Icon extends Component {
/** Size of the icon. */
size: PropTypes.oneOf(_.without(SUI.SIZES, 'medium')),
+
+ /** Icon can have an aria label. */
+ 'aria-label': PropTypes.string,
}
static defaultProps = {
@@ -83,6 +86,17 @@ class Icon extends Component {
return !shallowEqual(this.props, nextProps)
}
+ getIconAriaOptions() {
+ const ariaOptions = {}
+ const { 'aria-label': ariaLabel } = this.props
+
+ if (!ariaLabel) {
+ ariaOptions['aria-hidden'] = 'true'
+ }
+
+ return ariaOptions
+ }
+
render() {
const {
bordered,
@@ -120,8 +134,9 @@ class Icon extends Component {
)
const rest = getUnhandledProps(Icon, this.props)
const ElementType = getElementType(Icon, this.props)
+ const ariaOptions = this.getIconAriaOptions()
- return <ElementType {...rest} aria-hidden='true' className={classes} />
+ return <ElementType {...rest} {...ariaOptions} className={classes} />
}
}
diff --git a/test/specs/elements/Icon/Icon-test.js b/test/specs/elements/Icon/Icon-test.js
index <HASH>..<HASH> 100644
--- a/test/specs/elements/Icon/Icon-test.js
+++ b/test/specs/elements/Icon/Icon-test.js
@@ -39,5 +39,11 @@ describe('Icon', () => {
wrapper.should.have.prop('aria-hidden', 'true')
})
+
+ it('should omit aria-hidden when aria-label is set', () => {
+ const wrapper = shallow(<Icon aria-label='icon' />)
+
+ wrapper.should.not.have.prop('aria-hidden')
+ })
})
})
|
fix(icon): omit aria-hidden attr when aria-label prop is truthy (#<I>)
|
Semantic-Org_Semantic-UI-React
|
train
|
ae285f32866b299ade704d7c0af8cd8dd40af799
|
diff --git a/admin/client/src/containers/CampaignAdmin/CampaignAdmin.js b/admin/client/src/containers/CampaignAdmin/CampaignAdmin.js
index <HASH>..<HASH> 100644
--- a/admin/client/src/containers/CampaignAdmin/CampaignAdmin.js
+++ b/admin/client/src/containers/CampaignAdmin/CampaignAdmin.js
@@ -26,6 +26,7 @@ class CampaignAdmin extends SilverStripeComponent {
});
this.campaignListCreateFn = this.campaignListCreateFn.bind(this);
this.campaignEditCreateFn = this.campaignEditCreateFn.bind(this);
+ this.campaignCreationCreateFn = this.campaignCreationCreateFn.bind(this);
}
componentDidMount() {
@@ -68,6 +69,9 @@ class CampaignAdmin extends SilverStripeComponent {
case 'edit':
view = this.renderDetailEditView();
break;
+ case 'create':
+ view = this.renderCreateView();
+ break;
default:
view = this.renderIndexView();
}
@@ -158,6 +162,53 @@ class CampaignAdmin extends SilverStripeComponent {
}
/**
+ * Render the view for creating a new Campaign.
+ */
+ renderCreateView() {
+ const baseSchemaUrl = this.props.sectionConfig.forms.CreateEditForm.schemaUrl;
+ const formBuilderProps = {
+ createFn: this.campaignCreationCreateFn,
+ formId: 'CreateEditForm',
+ schemaUrl: `${baseSchemaUrl}/ChangeSet`,
+ };
+
+ return (
+ <div className="cms-middle no-preview">
+ <div className="cms-campaigns collapse in" aria-expanded="true">
+ <NorthHeader>
+ <h2 className="text-truncate north-header__heading">Campaigns</h2>
+ </NorthHeader>
+ <div className="cms-middle__scrollable">
+ <FormBuilder {...formBuilderProps} />
+ </div>
+ </div>
+ </div>
+ );
+ }
+
+ /**
+ * Hook to allow customisation of components being constructed
+ * by the Campaign creation FormBuilder.
+ *
+ * @param {Object} Component - Component constructor.
+ * @param {Object} props - Props passed from FormBuilder.
+ * @return {Object} - Instanciated React component
+ */
+ campaignCreationCreateFn(Component, props) {
+ if (props.name === 'action_save') {
+ const extendedProps = Object.assign({}, props, {
+ type: 'submit',
+ label: props.title,
+ icon: 'save',
+ });
+
+ return <Component key={props.name} {...extendedProps} />;
+ }
+
+ return <Component key={props.name} {...props} />;
+ }
+
+ /**
* Hook to allow customisation of components being constructed
* by the Campaign list FormBuilder.
*
@@ -254,7 +305,12 @@ class CampaignAdmin extends SilverStripeComponent {
}
addCampaign() {
- // Add campaign
+ const path = this.props.sectionConfig.campaignViewRoute
+ .replace(/:type\?/, 'set')
+ .replace(/:id\?/, 0)
+ .replace(/:view\?/, 'create');
+
+ window.ss.router.show(path);
}
}
diff --git a/admin/code/CampaignAdmin.php b/admin/code/CampaignAdmin.php
index <HASH>..<HASH> 100644
--- a/admin/code/CampaignAdmin.php
+++ b/admin/code/CampaignAdmin.php
@@ -12,6 +12,7 @@ class CampaignAdmin extends LeftAndMain implements PermissionProvider {
'set',
'sets',
'schema',
+ 'CreateEditForm',
'DetailEditForm',
'readCampaigns',
'createCampaign',
@@ -30,7 +31,6 @@ class CampaignAdmin extends LeftAndMain implements PermissionProvider {
private static $url_handlers = [
'GET sets' => 'readCampaigns',
'POST set/$ID/publish' => 'publishCampaign',
- 'POST set/$ID' => 'createCampaign',
'GET set/$ID/$Name' => 'readCampaign',
'DELETE set/$ID' => 'deleteCampaign',
];
@@ -62,7 +62,10 @@ class CampaignAdmin extends LeftAndMain implements PermissionProvider {
],
'DetailEditForm' => [
'schemaUrl' => $this->Link('schema/DetailEditForm')
- ]
+ ],
+ 'CreateEditForm' => [
+ 'schemaUrl' => $this->Link('schema/CreateEditForm')
+ ],
],
'campaignViewRoute' => $this->Link() . ':type?/:id?/:view?',
'itemListViewEndpoint' => $this->Link() . 'set/:id/show',
@@ -189,23 +192,6 @@ JSON;
}
/**
- * REST endpoint to create a campaign.
- *
- * @param SS_HTTPRequest $request
- *
- * @return SS_HTTPResponse
- */
- public function createCampaign(SS_HTTPRequest $request) {
- $response = new SS_HTTPResponse();
- $response->addHeader('Content-Type', 'application/json');
- $response->setBody(Convert::raw2json(['campaign' => 'create']));
-
- // TODO Implement permission check and data creation
-
- return $response;
- }
-
- /**
* REST endpoint to get a list of campaigns.
*
* @param SS_HTTPRequest $request
@@ -490,6 +476,22 @@ JSON;
}
/**
+ * @todo Use GridFieldDetailForm once it can handle structured data and form schemas
+ *
+ * @return Form
+ */
+ public function getCreateEditForm() {
+ return Form::create(
+ $this,
+ 'CreateEditForm',
+ ChangeSet::singleton()->getCMSFields(),
+ FieldList::create(
+ FormAction::create('save', 'Save')
+ )
+ );
+ }
+
+ /**
* Gets user-visible url to edit a specific {@see ChangeSet}
*
* @param $itemID
|
Adds route for creating new Campaigns
|
silverstripe_silverstripe-framework
|
train
|
5eae568fcfc8f805ad0e270a9ff24ee6af305f70
|
diff --git a/core/src/main/java/com/google/errorprone/bugpatterns/MissingFail.java b/core/src/main/java/com/google/errorprone/bugpatterns/MissingFail.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/google/errorprone/bugpatterns/MissingFail.java
+++ b/core/src/main/java/com/google/errorprone/bugpatterns/MissingFail.java
@@ -297,6 +297,10 @@ public class MissingFail extends BugChecker implements TryTreeMatcher {
return false;
}
+ if (tree.getBlock().getStatements().isEmpty()) {
+ return false;
+ }
+
return true;
}
@@ -456,7 +460,10 @@ public class MissingFail extends BugChecker implements TryTreeMatcher {
public boolean matches(TryTree tryTree, VisitorState state) {
MethodTree enclosingMethodTree =
ASTHelpers.findEnclosingNode(state.getPath(), MethodTree.class);
-
+ if (enclosingMethodTree == null) {
+ // e.g. a class initializer
+ return true;
+ }
Name name = enclosingMethodTree.getName();
return JUnitMatchers.looksLikeJUnit3SetUp.matches(enclosingMethodTree, state)
|| JUnitMatchers.looksLikeJUnit3TearDown.matches(enclosingMethodTree, state)
diff --git a/core/src/test/java/com/google/errorprone/bugpatterns/MissingFailTest.java b/core/src/test/java/com/google/errorprone/bugpatterns/MissingFailTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/com/google/errorprone/bugpatterns/MissingFailTest.java
+++ b/core/src/test/java/com/google/errorprone/bugpatterns/MissingFailTest.java
@@ -265,6 +265,49 @@ public class MissingFailTest {
.doTest();
}
+ @Test
+ public void emptyTry() {
+ refactoringHelper
+ .addInputLines(
+ "in/ExceptionTest.java",
+ "import java.io.IOException;",
+ "import org.junit.Test;",
+ "abstract class ExceptionTest {",
+ " abstract AutoCloseable c();",
+ " @Test",
+ " public void test() {",
+ " try (AutoCloseable c = c()) {",
+ " } catch (Exception expected) {",
+ " }",
+ " }",
+ "}")
+ .expectUnchanged()
+ .doTest();
+ }
+
+ @Test
+ public void noEnclosingMethod() {
+ refactoringHelper
+ .addInputLines(
+ "in/ExceptionTest.java",
+ "import java.io.IOException;",
+ "import org.junit.Test;",
+ "import org.junit.runner.RunWith;",
+ "import org.junit.runners.JUnit4;",
+ "@RunWith(JUnit4.class)",
+ "abstract class ExceptionTest {",
+ " abstract void c();",
+ " {",
+ " try {",
+ " c();",
+ " } catch (Exception expected) {",
+ " }",
+ " }",
+ "}")
+ .expectUnchanged()
+ .doTest();
+ }
+
private static class TestScanner extends Scanner {
final List<Description> suggestedChanges = new ArrayList<>();
|
Fix some brittle assumptions in MissingFail
RELNOTES: N/A
-------------
Created by MOE: <URL>
|
google_error-prone
|
train
|
4ec8d0bb3180e5efbdbc48078051eae1d7050be8
|
diff --git a/examples/counter/test/components/Counter.spec.js b/examples/counter/test/components/Counter.spec.js
index <HASH>..<HASH> 100644
--- a/examples/counter/test/components/Counter.spec.js
+++ b/examples/counter/test/components/Counter.spec.js
@@ -2,7 +2,6 @@ import test from 'ava';
import React from 'react';
import { shallow } from 'enzyme';
import sinon from 'sinon';
-import { rootComponent, Madera } from 'madera';
import { Counter } from '../../components/Counter.jsx';
|
Remove extraneous dependency in counter test
|
jairtrejo_madera
|
train
|
aa291a2766b26527b2bee832906b5a8dcbaf786f
|
diff --git a/src/index.js b/src/index.js
index <HASH>..<HASH> 100644
--- a/src/index.js
+++ b/src/index.js
@@ -27,10 +27,6 @@ const PEER_CONNECTION_CONFIG = {
]
};
-// In the event the server restarts and all clients lose connection, reconnect with
-// some random jitter added to prevent simultaneous reconnection requests.
-const INITIAL_RECONNECTION_DELAY = 1000 * Math.random();
-
class JanusAdapter {
constructor() {
this.room = null;
@@ -41,8 +37,13 @@ class JanusAdapter {
this.ws = null;
this.session = null;
- this.reconnectionDelay = INITIAL_RECONNECTION_DELAY;
+ // In the event the server restarts and all clients lose connection, reconnect with
+ // some random jitter added to prevent simultaneous reconnection requests.
+ this.initialReconnectionDelay = 1000 * Math.random();
+ this.reconnectionDelay = this.initialReconnectionDelay;
this.reconnectionTimeout = null;
+ this.maxReconnectionAttempts = 10;
+ this.reconnectionAttempts = 0;
this.publisher = null;
this.occupants = {};
@@ -93,6 +94,15 @@ class JanusAdapter {
this.onOccupantMessage = messageListener;
}
+ setReconnectionListeners(reconnectingListener, reconnectedListener, reconnectionErrorListener) {
+ // onReconnecting is called with the number of milliseconds until the next reconnection attempt
+ this.onReconnecting = reconnectingListener;
+ // onReconnected is called when the connection has been reestablished
+ this.onReconnected = reconnectedListener;
+ // onReconnectionError is called with an error when maxReconnectionAttempts has been reached
+ this.onReconnectionError = reconnectionErrorListener;
+ }
+
connect() {
debug(`connecting to ${this.serverUrl}`);
@@ -177,6 +187,10 @@ class JanusAdapter {
return;
}
+ if (this.onReconnecting) {
+ this.onReconnecting(this.reconnectionDelay);
+ }
+
this.reconnectionTimeout = setTimeout(() => this.reconnect(), this.reconnectionDelay);
}
@@ -186,10 +200,25 @@ class JanusAdapter {
this.connect()
.then(() => {
- this.reconnectionDelay = INITIAL_RECONNECTION_DELAY;
+ this.reconnectionDelay = this.initialReconnectionDelay;
+ this.reconnectionAttempts = 0;
+
+ if (this.onReconnected) {
+ this.onReconnected();
+ }
})
.catch((error) => {
this.reconnectionDelay += 1000;
+ this.reconnectionAttempts++;
+
+ if (this.reconnectionAttempts > this.maxReconnectionAttempts && this.onReconnectionError) {
+ return this.onReconnectionError(new Error("Connection could not be reestablished, exceeded maximum number of reconnection attempts."));
+ }
+
+ if (this.onReconnecting) {
+ this.onReconnecting(this.reconnectionDelay);
+ }
+
this.reconnectionTimeout = setTimeout(() => this.reconnect(), this.reconnectionDelay);
});
}
|
Make reconnection variables configurable, add event listeners, limit reconnection attempts.
|
mozilla_naf-janus-adapter
|
train
|
29595faf52b1c6005aeed51dd00043513c8d1e1c
|
diff --git a/lib/dynamic_scaffold/controller.rb b/lib/dynamic_scaffold/controller.rb
index <HASH>..<HASH> 100644
--- a/lib/dynamic_scaffold/controller.rb
+++ b/lib/dynamic_scaffold/controller.rb
@@ -37,7 +37,7 @@ module DynamicScaffold
@records = @records.order dynamic_scaffold.list.sorter if dynamic_scaffold.list.sorter
@records = @records.order(*dynamic_scaffold.list.order) unless dynamic_scaffold.list.order.empty?
- append_optional_queries(@records)
+ @records = append_optional_queries(@records)
end
def new
@@ -115,6 +115,8 @@ module DynamicScaffold
def before_create_save(record, prev_attribute); end
- def append_optional_queries(records); end
+ def append_optional_queries(records)
+ records
+ end
end
end
|
Fix append_optional_queries bug.
|
gomo_dynamic_scaffold
|
train
|
804104347b2f716580e828533b0c45f64ade8258
|
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -1,8 +1,21 @@
-module.exports = obj
+module.exports = reduxer
module.exports.ctor = ctor
+module.exports.obj = obj
var through2 = require("through2")
+function reduxer (options, fn) {
+ return ctor(options, fn)()
+}
+
+function obj (options, fn) {
+ if (typeof options == "function") {
+ fn = options
+ options = {}
+ }
+ return ctor({ objectMode: true }, fn)(options)
+}
+
function ctor (options, fn) {
if (typeof options == "function") {
fn = options
@@ -17,14 +30,4 @@ function ctor (options, fn) {
this.push(options.state)
return callback(err)
})
-}
-
-function obj (options, fn) {
- if (typeof options == "function") {
- fn = options
- options = {}
- }
- options = options || {}
- options.objectMode = true
- return ctor(options, fn)()
}
\ No newline at end of file
diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "reduxer",
- "version": "0.3.0",
+ "version": "0.4.0",
"description": "Redux for streams",
"main": "index.js",
"repository": {
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -8,7 +8,7 @@ var isnumber = require('isnumber')
test('object chunks', function (t) {
t.plan(1)
- var meaner = redux(function (prev={time: 0, mean: 0}, curr) {
+ var meaner = redux.obj(function (prev={time: 0, mean: 0}, curr) {
var mean = prev.mean - (prev.mean - curr.mean) / (prev.time + 1)
prev.mean = mean
prev.time = curr.time
|
objectMode no longer set by default
|
lukeburns_reduxer
|
train
|
f19f86891571a0d7e81e0d98de12ca223395993d
|
diff --git a/tornado/test/httpclient_test.py b/tornado/test/httpclient_test.py
index <HASH>..<HASH> 100644
--- a/tornado/test/httpclient_test.py
+++ b/tornado/test/httpclient_test.py
@@ -109,7 +109,7 @@ class ContentLength304Handler(RequestHandler):
self.set_status(304)
self.set_header("Content-Length", 42)
- def _clear_headers_for_304(self):
+ def _clear_representation_headers(self):
# Tornado strips content-length from 304 responses, but here we
# want to simulate servers that include the headers anyway.
pass
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index <HASH>..<HASH> 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -1175,7 +1175,6 @@ class StaticFileTest(WebTestCase):
)
self.assertEqual(response2.code, 304)
self.assertTrue("Content-Length" not in response2.headers)
- self.assertTrue("Last-Modified" not in response2.headers)
def test_static_304_if_none_match(self):
response1 = self.get_and_head("/static/robots.txt")
diff --git a/tornado/web.py b/tornado/web.py
index <HASH>..<HASH> 100644
--- a/tornado/web.py
+++ b/tornado/web.py
@@ -1138,7 +1138,7 @@ class RequestHandler(object):
assert not self._write_buffer, (
"Cannot send body with %s" % self._status_code
)
- self._clear_headers_for_304()
+ self._clear_representation_headers()
elif "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
@@ -1803,21 +1803,13 @@ class RequestHandler(object):
def _ui_method(self, method: Callable[..., str]) -> Callable[..., str]:
return lambda *args, **kwargs: method(self, *args, **kwargs)
- def _clear_headers_for_304(self) -> None:
- # 304 responses should not contain entity headers (defined in
- # http://www.w3.org/Protocols/rfc2616/rfc2616-sec7.html#sec7.1)
+ def _clear_representation_headers(self) -> None:
+ # 304 responses should not contain representation metadata
+ # headers (defined in
+ # https://tools.ietf.org/html/rfc7231#section-3.1)
# not explicitly allowed by
- # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
- headers = [
- "Allow",
- "Content-Encoding",
- "Content-Language",
- "Content-Length",
- "Content-MD5",
- "Content-Range",
- "Content-Type",
- "Last-Modified",
- ]
+ # https://tools.ietf.org/html/rfc7232#section-4.1
+ headers = ["Content-Encoding", "Content-Language", "Content-Type"]
for h in headers:
self.clear_header(h)
|
Clear fewer headers on 1xx/<I>/<I> responses
This function is called on more than just <I> responses; it’s
important to permit the Allow header on <I> responses. Also, the
relevant RFCs have changed significantly.
Fixes #<I>.
|
tornadoweb_tornado
|
train
|
d651f64e7e963ac91fb876b655ad78eb5573d824
|
diff --git a/Core/Classes/CoreSite.php b/Core/Classes/CoreSite.php
index <HASH>..<HASH> 100644
--- a/Core/Classes/CoreSite.php
+++ b/Core/Classes/CoreSite.php
@@ -58,7 +58,7 @@ class CoreSite extends AbstractSite {
//if the user is an admin, load the admin controller, otherwise, return false;
if ($this->getGlobalUser()->isAdmin()) {
if ($controllerName) {
- $this->getViewRenderer()->registerAppContextProperty("app_http", "{$this->getSiteConfig()['PATH_HTTP']}admin/{$this->getPages()[$controllerName]->getPath()}/");
+ $this->getViewRenderer()->registerAppContextProperty("app_http", "{$this->getSiteConfig()['PATH_HTTP']}admin/{$currentPage->getPath()}/");
$controllerClass = "{$this->getSiteConfig()['NAMESPACE_APP']}Classes\\Controllers\\".ucfirst($controllerName)."AdminController";
} else {
$this->getViewRenderer()->registerAppContextProperty("app_http", "{$this->getSiteConfig()['PATH_HTTP']}admin/");
@@ -67,7 +67,7 @@ class CoreSite extends AbstractSite {
}
} elseif ($this->getGlobalUser()->isLoggedIn() || $currentPage->getAccessLevel() == SECURITY_PUBLIC) {
//load standard controller
- $this->getViewRenderer()->registerAppContextProperty("app_http", "{$this->getSiteConfig()['PATH_HTTP']}{$this->getPages()[$controllerName]->getPath()}/");
+ $this->getViewRenderer()->registerAppContextProperty("app_http", "{$this->getSiteConfig()['PATH_HTTP']}{$currentPage->getPath()}/");
$controllerClass = "{$this->getSiteConfig()['NAMESPACE_APP']}Classes\\Controllers\\".ucfirst($controllerName)."Controller";
}
} else {
|
fix non-object bug when user controller is active
|
TAMULib_Pipit
|
train
|
e80162a938cb1cd691a0456da20bc737687a4566
|
diff --git a/plenum/server/monitor.py b/plenum/server/monitor.py
index <HASH>..<HASH> 100644
--- a/plenum/server/monitor.py
+++ b/plenum/server/monitor.py
@@ -278,7 +278,7 @@ class Monitor(HasActionQueue, PluginLoaderHelper):
logger.debug("{} found difference between master's and "
"backups's avg latency to be higher than the "
"threshold".format(self))
- logger.debug(
+ logger.trace(
"{}'s master's avg request latency is {} and backup's "
"avg request latency is {} ".
format(self, avgLatM, avgLatB))
|
use trace log level for printing avg latencies
|
hyperledger_indy-plenum
|
train
|
6367cacbe4288c876d4b2b3477eb3b616ae2c5af
|
diff --git a/Godeps/_workspace/src/k8s.io/kubernetes/pkg/controller/node/nodecontroller.go b/Godeps/_workspace/src/k8s.io/kubernetes/pkg/controller/node/nodecontroller.go
index <HASH>..<HASH> 100644
--- a/Godeps/_workspace/src/k8s.io/kubernetes/pkg/controller/node/nodecontroller.go
+++ b/Godeps/_workspace/src/k8s.io/kubernetes/pkg/controller/node/nodecontroller.go
@@ -639,7 +639,11 @@ func (nc *NodeController) tryUpdateNodeStatus(node *api.Node) (time.Duration, ap
} else {
transitionTime = savedNodeStatus.readyTransitionTimestamp
}
- glog.V(3).Infof("Nodes ReadyCondition updated. Updating timestamp: %+v\n vs %+v.", savedNodeStatus.status, node.Status)
+ if glog.V(5) {
+ glog.Infof("Node %s ReadyCondition updated. Updating timestamp: %+v vs %+v.", node.Name, savedNodeStatus.status, node.Status)
+ } else {
+ glog.V(3).Infof("Node %s ReadyCondition updated. Updating timestamp.", node.Name)
+ }
savedNodeStatus = nodeStatusData{
status: node.Status,
probeTimestamp: nc.now(),
diff --git a/Godeps/_workspace/src/k8s.io/kubernetes/pkg/kubelet/dockertools/manager.go b/Godeps/_workspace/src/k8s.io/kubernetes/pkg/kubelet/dockertools/manager.go
index <HASH>..<HASH> 100644
--- a/Godeps/_workspace/src/k8s.io/kubernetes/pkg/kubelet/dockertools/manager.go
+++ b/Godeps/_workspace/src/k8s.io/kubernetes/pkg/kubelet/dockertools/manager.go
@@ -1599,7 +1599,7 @@ func (dm *DockerManager) computePodContainerChanges(pod *api.Pod, podStatus *kub
defer func() {
metrics.ContainerManagerLatency.WithLabelValues("computePodContainerChanges").Observe(metrics.SinceInMicroseconds(start))
}()
- glog.V(4).Infof("Syncing Pod %q: %+v", format.Pod(pod), pod)
+ glog.V(5).Infof("Syncing Pod %q: %+v", format.Pod(pod), pod)
containersToStart := make(map[int]string)
containersToKeep := make(map[kubecontainer.DockerID]int)
|
UPSTREAM: <I>: Reduce node controller debug logging
|
openshift_origin
|
train
|
ab6abfad31f4a87e22137475c4182be105cef11e
|
diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/pillar/__init__.py
+++ b/salt/pillar/__init__.py
@@ -1037,6 +1037,11 @@ class Pillar(object):
decrypt_errors = self.decrypt_pillar(pillar)
if decrypt_errors:
pillar.setdefault('_errors', []).extend(decrypt_errors)
+
+ # Reset the file_roots for the renderers
+ for mod_name in sys.modules:
+ if mod_name.startswith('salt.loaded.int.render.'):
+ sys.modules[mod_name].__opts__['file_roots'] = self.actual_file_roots
return pillar
def decrypt_pillar(self, pillar):
diff --git a/tests/integration/runners/test_state.py b/tests/integration/runners/test_state.py
index <HASH>..<HASH> 100644
--- a/tests/integration/runners/test_state.py
+++ b/tests/integration/runners/test_state.py
@@ -542,3 +542,88 @@ class OrchEventTest(ShellCase):
self.assertTrue(received)
del listener
signal.alarm(0)
+
+ def test_orchestration_with_pillar_dot_items(self):
+ '''
+ Test to confirm when using a state file that includes other state file, if
+ one of those state files includes pillar related functions that will not
+ be pulling from the pillar cache that all the state files are available and
+ the file_roots has been preserved. See issues #48277 and #46986.
+ '''
+ self.write_conf({
+ 'fileserver_backend': ['roots'],
+ 'file_roots': {
+ 'base': [self.base_env],
+ },
+ })
+
+ orch_sls = os.path.join(self.base_env, 'main.sls')
+ with salt.utils.fopen(orch_sls, 'w') as fp_:
+ fp_.write(textwrap.dedent('''
+ include:
+ - one
+ - two
+ - three
+ '''))
+
+ orch_sls = os.path.join(self.base_env, 'one.sls')
+ with salt.utils.fopen(orch_sls, 'w') as fp_:
+ fp_.write(textwrap.dedent('''
+ {%- set foo = salt['saltutil.runner']('pillar.show_pillar') %}
+ placeholder_one:
+ test.succeed_without_changes
+ '''))
+
+ orch_sls = os.path.join(self.base_env, 'two.sls')
+ with salt.utils.fopen(orch_sls, 'w') as fp_:
+ fp_.write(textwrap.dedent('''
+ placeholder_two:
+ test.succeed_without_changes
+ '''))
+
+ orch_sls = os.path.join(self.base_env, 'three.sls')
+ with salt.utils.fopen(orch_sls, 'w') as fp_:
+ fp_.write(textwrap.dedent('''
+ placeholder_three:
+ test.succeed_without_changes
+ '''))
+
+ orch_sls = os.path.join(self.base_env, 'main.sls')
+
+ listener = salt.utils.event.get_event(
+ 'master',
+ sock_dir=self.master_opts['sock_dir'],
+ transport=self.master_opts['transport'],
+ opts=self.master_opts)
+
+ start_time = time.time()
+ jid = self.run_run_plus(
+ 'state.orchestrate',
+ 'main',
+ __reload_config=True).get('jid')
+
+ if jid is None:
+ raise Exception('jid missing from run_run_plus output')
+
+ signal.signal(signal.SIGALRM, self.alarm_handler)
+ signal.alarm(self.timeout)
+ received = False
+ try:
+ while True:
+ event = listener.get_event(full=True)
+ if event is None:
+ continue
+ if event['tag'] == 'salt/run/{0}/ret'.format(jid):
+ received = True
+ # Don't wrap this in a try/except. We want to know if the
+ # data structure is different from what we expect!
+ ret = event['data']['return']['data']['master']
+ for state in ret:
+ data = ret[state]
+ # Each state should be successful
+ self.assertEqual(data['comment'], 'Success!')
+ break
+ finally:
+ self.assertTrue(received)
+ del listener
+ signal.alarm(0)
|
When pillar items are compiled a new render is instantiated but the file_roots is the pillar_roots. This change forces the __opts__['file_roots'] to be set to what is set in actual_file_roots for all renderers once compile_pillar has finished. Adding a test when this situation is run via a orchestration runner.
|
saltstack_salt
|
train
|
5f509c79d596460a4d8ac77d13f8d64afdb809b2
|
diff --git a/backtrader/feeds/btcsv.py b/backtrader/feeds/btcsv.py
index <HASH>..<HASH> 100644
--- a/backtrader/feeds/btcsv.py
+++ b/backtrader/feeds/btcsv.py
@@ -22,7 +22,6 @@ from __future__ import (absolute_import, division, print_function,
unicode_literals)
import datetime
-import itertools
from .. import feed
from ..utils import date2num
@@ -38,16 +37,16 @@ class BacktraderCSVData(feed.CSVDataBase):
'''
def _loadline(self, linetokens):
- i = itertools.count(0)
+ itoken = iter(linetokens)
- dttxt = linetokens[next(i)]
+ dttxt = next(itoken)
# Format is YYYY-MM-DD
y = int(dttxt[0:4])
m = int(dttxt[5:7])
d = int(dttxt[8:10])
if len(linetokens) == 8:
- tmtxt = linetokens[next(i)]
+ tmtxt = next(itoken)
# Format if present HH:MM:SS
hh = int(tmtxt[0:2])
@@ -63,12 +62,12 @@ class BacktraderCSVData(feed.CSVDataBase):
dtnum = date2num(dt)
self.lines.datetime[0] = dtnum
- self.lines.open[0] = float(linetokens[next(i)])
- self.lines.high[0] = float(linetokens[next(i)])
- self.lines.low[0] = float(linetokens[next(i)])
- self.lines.close[0] = float(linetokens[next(i)])
- self.lines.volume[0] = float(linetokens[next(i)])
- self.lines.openinterest[0] = float(linetokens[next(i)])
+ self.lines.open[0] = float(next(itoken))
+ self.lines.high[0] = float(next(itoken))
+ self.lines.low[0] = float(next(itoken))
+ self.lines.close[0] = float(next(itoken))
+ self.lines.volume[0] = float(next(itoken))
+ self.lines.openinterest[0] = float(next(itoken))
return True
|
Refactor Bactradercsv to use iterator instead of itertools.count
|
backtrader_backtrader
|
train
|
ddf564e135db85ac953f3b1d8ad773f585c46cb4
|
diff --git a/src/WPLoader.php b/src/WPLoader.php
index <HASH>..<HASH> 100644
--- a/src/WPLoader.php
+++ b/src/WPLoader.php
@@ -108,6 +108,9 @@ class WPLoader extends Module {
// allow me not to bother with traling slashes
$wpRootFolder = rtrim( $this->config['wpRootFolder'], '/' ) . '/';
+ // let's make sure this is pointing to a WordPress installation
+ $this->ensureWPRoot( $wpRootFolder );
+
// load an extra config file if any
$this->loadConfigFile( $wpRootFolder );
@@ -245,4 +248,13 @@ class WPLoader extends Module {
}
}
+ /**
+ * @param string $wpRootFolder
+ */
+ private function ensureWPRoot( $wpRootFolder ) {
+ if ( ! file_exists( $wpRootFolder . 'wp-settings.php' ) ) {
+ throw new ModuleConfigException( __CLASS__, "\nThe path `{$wpRootFolder}` is not pointing to a valid WordPress installation folder." );
+ }
+ }
+
}
|
added check and message for valid WP installation folder in WP Loader
|
lucatume_wp-browser
|
train
|
4bbda36f546914be7c5fbc2127085b719336fc40
|
diff --git a/area-light.js b/area-light.js
index <HASH>..<HASH> 100644
--- a/area-light.js
+++ b/area-light.js
@@ -19,14 +19,18 @@ function AreaLight (opts) {
width: 64,
height: 64,
pixelFormat: ctx.PixelFormat.RGBA32F,
- encoding: ctx.Encoding.Linear
+ encoding: ctx.Encoding.Linear,
+ min: ctx.Filter.Linear,
+ mag: ctx.Filter.Linear
})
AreaLight.ltc_mag_texture = ctx.texture2D({
data: AreaLightsData.mag,
width: 64,
height: 64,
pixelFormat: ctx.PixelFormat.R32F,
- encoding: ctx.Encoding.Linear
+ encoding: ctx.Encoding.Linear,
+ min: ctx.Filter.Linear,
+ mag: ctx.Filter.Linear
})
AreaLight.areaLightTextures = true
}
|
Add linear filtering to Area Light data texture
Fixes #<I>
|
pex-gl_pex-renderer
|
train
|
4218bfd4cfe90c06b53b37f5670e61229a241454
|
diff --git a/spec/lib/danger/ci_sources/buddybuild_spec.rb b/spec/lib/danger/ci_sources/buddybuild_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/danger/ci_sources/buddybuild_spec.rb
+++ b/spec/lib/danger/ci_sources/buddybuild_spec.rb
@@ -1,7 +1,7 @@
require "danger/ci_source/buddybuild"
RSpec.describe Danger::Buddybuild do
- let(:valid_env) do
+ let(:valid_env) do
{
"BUDDYBUILD_BUILD_ID" => "595be087b095370001d8e0b3",
"BUDDYBUILD_PULL_REQUEST" => "4",
@@ -11,30 +11,30 @@ RSpec.describe Danger::Buddybuild do
let(:source) { described_class.new(valid_env) }
- describe '.validates_as_ci?' do
- it 'validates when the required env vars are set' do
+ describe ".validates_as_ci?" do
+ it "validates when the required env vars are set" do
expect(described_class.validates_as_ci?(valid_env)).to be true
end
- it 'does not validate when the required env vars are not set' do
+ it "does not validate when the required env vars are not set" do
valid_env["BUDDYBUILD_BUILD_ID"] = nil
expect(described_class.validates_as_ci?(valid_env)).to be false
end
end
- describe '.validates_as_pr?' do
- it 'validates when the required env vars are set' do
+ describe ".validates_as_pr?" do
+ it "validates when the required env vars are set" do
expect(described_class.validates_as_pr?(valid_env)).to be true
end
- it 'does not validate when the required env vars are not set' do
+ it "does not validate when the required env vars are not set" do
valid_env["BUDDYBUILD_PULL_REQUEST"] = nil
expect(described_class.validates_as_pr?(valid_env)).to be false
end
end
- describe '.new' do
- it 'sets the repository slug' do
+ describe ".new" do
+ it "sets the repository slug" do
expect(source.repo_slug).to eq("palleas/Batman")
expect(source.pull_request_id).to eq("4")
end
|
Fix buddybuild_spec rubocop violations
|
danger_danger
|
train
|
3299b13c5c472cc7d9ffabc0160ac74699ca17be
|
diff --git a/src/cdp/options.js b/src/cdp/options.js
index <HASH>..<HASH> 100644
--- a/src/cdp/options.js
+++ b/src/cdp/options.js
@@ -99,6 +99,11 @@ module.exports = {
default: 'min',
type: 'string',
},
+ 'mocha.reporterOptions': {
+ description: 'Reporter options',
+ default: undefined,
+ type: 'object',
+ },
'client.port': {
description: 'Chrome port',
default: 9222,
diff --git a/src/cdp/runner.js b/src/cdp/runner.js
index <HASH>..<HASH> 100644
--- a/src/cdp/runner.js
+++ b/src/cdp/runner.js
@@ -31,8 +31,12 @@ class Runner {
this.onlyTestFilesBrowser = [];
this.all = true;
this.bind();
+ this.debugging = false;
}
log(mode, testFiles) {
+ if (this.debugging) {
+ return this;
+ }
console.log(`${mode}`);
console.log(' test');
testFiles.forEach((f) => {
@@ -221,6 +225,9 @@ class Runner {
if (!this.argv.watch) {
return this;
}
+ if (typeof process.stdin.setRawMode !== 'function') {
+ return this;
+ }
readline.emitKeypressEvents(process.stdin);
process.stdin.setRawMode(true);
process.stdin.setEncoding('utf8');
@@ -247,6 +254,7 @@ class Runner {
const debug = exv.includes('inspect') || exv.includes('debug');
if (debug || this.argv.chrome.devtools) {
this.argv.mocha.timeout = 0;
+ this.debugging = true;
}
}
relativeRootFile(file) {
diff --git a/src/node/index.js b/src/node/index.js
index <HASH>..<HASH> 100644
--- a/src/node/index.js
+++ b/src/node/index.js
@@ -24,8 +24,12 @@ class Runner {
this.isRunning = false;
this.all = true;
this.libs = libs;
+ this.debugging = false;
}
log(mode, testFiles, srcFiles) {
+ if (this.debugging) {
+ return this;
+ }
console.log(`${mode}`);
console.log(' test');
testFiles.forEach((f) => {
@@ -132,6 +136,9 @@ class Runner {
if (!this.argv.watch) {
return this;
}
+ if (typeof process.stdin.setRawMode !== 'function') {
+ return this;
+ }
readline.emitKeypressEvents(process.stdin);
process.stdin.setRawMode(true);
process.stdin.setEncoding('utf8');
@@ -222,6 +229,7 @@ class Runner {
const debug = exv.includes('inspect') || exv.includes('debug');
if (debug) {
this.argv.mocha.enableTimeouts = false;
+ this.debugging = true;
}
return this;
}
|
Exclude setupKeyPress when debugging
|
qlik-oss_after-work.js
|
train
|
2f1da686be4e1d045773da6c62aaaf963ac0d98e
|
diff --git a/ceph_deploy/exc.py b/ceph_deploy/exc.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/exc.py
+++ b/ceph_deploy/exc.py
@@ -35,6 +35,10 @@ class NeedMonError(DeployError):
Cannot find nodes with ceph-mon.
"""
+class NeedDiskError(DeployError):
+ """
+ Must supply disk/path argument
+ """
class UnsupportedPlatform(DeployError):
"""
diff --git a/ceph_deploy/osd.py b/ceph_deploy/osd.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/osd.py
+++ b/ceph_deploy/osd.py
@@ -136,6 +136,8 @@ def prepare(args, cfg, activate_prepared_disk):
errors = 0
for hostname, disk, journal in args.disk:
try:
+ if disk is None:
+ raise exc.NeedDiskError(hostname)
# TODO username
sudo = args.pushy(get_transport(hostname))
|
osd: throw new exception NeedDiskError if create is called with no disk
Fixes: #<I>
|
ceph_ceph-deploy
|
train
|
b556849cfcdc5e1a6a59c232dd7cf7cc89d8edfd
|
diff --git a/base/src/test/java/net/time4j/calendar/hindu/HinduFormatTest.java b/base/src/test/java/net/time4j/calendar/hindu/HinduFormatTest.java
index <HASH>..<HASH> 100644
--- a/base/src/test/java/net/time4j/calendar/hindu/HinduFormatTest.java
+++ b/base/src/test/java/net/time4j/calendar/hindu/HinduFormatTest.java
@@ -40,6 +40,16 @@ public class HinduFormatTest {
}
@Test
+ public void printDMY() {
+ ChronoFormatter<HinduCalendar> f =
+ ChronoFormatter.ofPattern("G, d. MMMM yyyy", PatternType.CLDR, Locale.ENGLISH, HinduCalendar.family());
+ HinduCalendar cal = HinduCalendar.ofOldSolar(3101, HinduMonth.of(IndianMonth.MAGHA).getRasi(), 19);
+ assertThat(
+ f.print(cal),
+ is("K.Y, 19. Magha 3101"));
+ }
+
+ @Test
public void parseDMY() throws ParseException {
ChronoFormatter<HinduCalendar> f =
ChronoFormatter.ofPattern("G, d. MMMM yyyy", PatternType.CLDR, Locale.ENGLISH, HinduCalendar.family())
|
added format test
see issue #<I>
|
MenoData_Time4J
|
train
|
12afa222b4128cd1cc1265b4bbfed83a8f90c306
|
diff --git a/ores/__init__.py b/ores/__init__.py
index <HASH>..<HASH> 100644
--- a/ores/__init__.py
+++ b/ores/__init__.py
@@ -1 +1 @@
-__version__ = "0.5.1"
+__version__ = "0.5.2"
diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,7 +3,7 @@ docopt >= 0.6.2, < 0.6.999
flask >= 0.10.1, < 0.10.999
flask-jsonpify >= 1.5.0, < 1.5.999
mediawiki-utilities >= 0.4.14, < 0.4.999
-revscoring >= 0.6.2
+revscoring >= 0.6.3
stopit >= 1.1.1, < 1.1.999
yamlconf >= 0.0.3, < 0.0.999
socketIO-client == 0.5.6
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,7 @@ def requirements(fname):
setup(
name="ores",
- version="0.5.1", # Update in ores/__init__.py too.
+ version="0.5.2", # Update in ores/__init__.py too.
author="Aaron Halfaker",
author_email="ahalfaker@wikimedia.org",
description=("A webserver for hosting scoring services."),
|
Bumps revscoring requirement to <I> and increments version to <I>
|
wikimedia_ores
|
train
|
4bef8e2f9b7ab71700e6f60abde8623dd52ed8c4
|
diff --git a/src/Intervention/Image/ImageServiceProvider.php b/src/Intervention/Image/ImageServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Intervention/Image/ImageServiceProvider.php
+++ b/src/Intervention/Image/ImageServiceProvider.php
@@ -2,7 +2,6 @@
namespace Intervention\Image;
-use Illuminate\Foundation\Application;
use Illuminate\Support\ServiceProvider;
class ImageServiceProvider extends ServiceProvider
@@ -61,9 +60,9 @@ class ImageServiceProvider extends ServiceProvider
*/
private function getProvider()
{
- if (get_class($this->app) == 'Laravel\Lumen\Application') {
+ if ($this->app instanceof \Laravel\Lumen\Application) {
$provider = '\Intervention\Image\ImageServiceProviderLumen';
- } elseif (version_compare(Application::VERSION, '5.0', '<')) {
+ } elseif (version_compare(\Illuminate\Foundation\Application::VERSION, '5.0', '<')) {
$provider = '\Intervention\Image\ImageServiceProviderLaravel4';
} else {
$provider = '\Intervention\Image\ImageServiceProviderLaravel5';
|
replace get_class with instanceof for $this->app
When using get_class, in Lumen the $this->app could only be an instance of the \Laravel\Lumen\Application and not an instance of a class extending from \Laravel\Lumen\Application. When using the instanceof check people can use a custom application class extending from the main Lumen Application class. To make this work i had to remove the use statements at the top and replace the references in getProvider with the full paths.
|
Intervention_image
|
train
|
e64e920826dafbffcd5e0ffcc7fcc239dd050713
|
diff --git a/salt/returners/couchdb_return.py b/salt/returners/couchdb_return.py
index <HASH>..<HASH> 100644
--- a/salt/returners/couchdb_return.py
+++ b/salt/returners/couchdb_return.py
@@ -2,9 +2,15 @@
Simple returner for CouchDB. Optional configuration
settings are listed below, along with sane defaults.
+couchdb.hooks
+ * is a list of dict objects.
+ * in each dict there is a key and value.
+ * the value is eval()'d
+ * optional "eval", which is executed beforehand.
+
couchdb.db: 'salt'
couchdb.url: 'http://salt:5984/'
-couchdb.hooks:
+couchdb.hooks: [ { "key": "timestamp", "value": "time.time()", "eval": "import time" } ]
'''
import logging
@@ -56,13 +62,10 @@ def _generate_doc( ret, options ):
r = ret
r["_id"] = ret["jid"]
- log.debug( "Starting hook iteration" )
for hook in options["hooks"]:
- log.debug( "Hook: %s" % hook )
# Eval if specified.
if hasattr( hook, "eval" ):
- log.debug( "Evaling '%s'" % hook["eval"] )
eval( hook["eval"] )
r[hook["key"]] = eval( hook["value"] )
@@ -82,7 +85,9 @@ def returner( ret ):
# Create the database if the configuration calls for it.
if not options["db"] in server:
+ log.debug( "Creating database %s" % options["db"] )
server.create( options["db"] )
# Save the document that comes out of _generate_doc.
server[options["db"]].save( _generate_doc( ret, options ) )
+
|
Some comments. Also removed some debug lines.
|
saltstack_salt
|
train
|
fcffe2b1d1b2a6bf3231f6a42c55b57d43e1c1ed
|
diff --git a/tools/doc/voikko_dev_prefs.py b/tools/doc/voikko_dev_prefs.py
index <HASH>..<HASH> 100644
--- a/tools/doc/voikko_dev_prefs.py
+++ b/tools/doc/voikko_dev_prefs.py
@@ -27,5 +27,5 @@
#libvoikko_bin='/usr/bin'
# Command for viewing differences between 'base' and 'current'.
-#diffviewcmd='diff -u0 "%s" "%s" | grep ^.C: 2>/dev/null | less'
+#diffviewcmd='diff -U 0 "%s" "%s" | grep ^.C: 2>/dev/null | less'
#diffviewcmd='vimdiff "%s" "%s"'
diff --git a/tools/pylib/voikkoutils.py b/tools/pylib/voikkoutils.py
index <HASH>..<HASH> 100644
--- a/tools/pylib/voikkoutils.py
+++ b/tools/pylib/voikkoutils.py
@@ -152,7 +152,7 @@ def get_preference(prefname):
if prefname == 'voikko_data_dir': return os.environ['HOME'] + '/svn/voikko/trunk/data'
if prefname == 'encoding': return locale.getpreferredencoding()
if prefname == 'libvoikko_bin': return '/usr/bin'
- if prefname == 'diffviewcmd': return 'diff -u0 "%s" "%s" | grep ^.C: 2>/dev/null | less'
+ if prefname == 'diffviewcmd': return 'diff -U 0 "%s" "%s" | grep ^.C: 2>/dev/null | less'
return None
## Returns True, if given character is a consonant, otherwise retuns False.
|
* Fix the use of diff to conform POSIX <I>-<I>
git-svn-id: <URL>
|
voikko_corevoikko
|
train
|
15a5b7b5792926168498b41f56bf5588831fda53
|
diff --git a/lib/shortener/engine.rb b/lib/shortener/engine.rb
index <HASH>..<HASH> 100644
--- a/lib/shortener/engine.rb
+++ b/lib/shortener/engine.rb
@@ -5,12 +5,6 @@ module Shortener
class ShortenerEngine < Rails::Engine
- # include the shortener helper methods in the base helper so that
- # they can be accessed everywhere
- # initializer 'shortener.helper' do |app|
- # ActionView::Base.send :include, ShortenerHelper
- # end
-
end
end
\ No newline at end of file
|
removed some unncessary code as Helpers were in fact available without it.
|
jpmcgrath_shortener
|
train
|
f8ceab135bb1d2d618e81f4cffb8ef400abb6c9c
|
diff --git a/scripts/pricefeeds/config-example.py b/scripts/pricefeeds/config-example.py
index <HASH>..<HASH> 100644
--- a/scripts/pricefeeds/config-example.py
+++ b/scripts/pricefeeds/config-example.py
@@ -7,6 +7,8 @@ user = ""
passwd = ""
unlock = ""
+ask_confirmation = True
+
################################################################################
## Delegate Feed Publish Parameters
################################################################################
diff --git a/scripts/pricefeeds/pricefeeds.py b/scripts/pricefeeds/pricefeeds.py
index <HASH>..<HASH> 100755
--- a/scripts/pricefeeds/pricefeeds.py
+++ b/scripts/pricefeeds/pricefeeds.py
@@ -566,8 +566,17 @@ if __name__ == "__main__":
print_stats()
## Check publish rules and publich feeds #####################################
- if publish_rule() and rpc._confirm("Are you SURE you would like to publish this feed?") :
- print("Update required! Forcing now!")
- update_feed(rpc,price_feeds)
+ publish = False
+ if publish_rule() :
+
+ if config.ask_confirmation :
+ if rpc._confirm("Are you SURE you would like to publish this feed?") :
+ publish = True
+ else :
+ publish = True
+
+ if publish :
+ print("Update required! Forcing now!")
+ update_feed(rpc,price_feeds)
else :
print("no update required")
|
[Price Feed] Option to remove Confirmation
|
xeroc_python-graphenelib
|
train
|
e5d7b7089c47b7524d7d3558090204338aadc3bf
|
diff --git a/lib/Utils.php b/lib/Utils.php
index <HASH>..<HASH> 100644
--- a/lib/Utils.php
+++ b/lib/Utils.php
@@ -32,4 +32,43 @@ class Utils {
return $output;
}
+
+ /**
+ * Public: Decode JSON string and throw error if fails
+ *
+ * $string - JSON string to decode
+ *
+ * Returns associative array
+ *
+ * Throws Exception if json decode fails with message about why
+ */
+ public static function jsonDecode($string) {
+ $json = json_decode($string, true);
+
+ // if json_decode failed
+ if($json == NULL) {
+ switch (json_last_error()) {
+ case JSON_ERROR_DEPTH:
+ throw new Exception('Maximum stack depth exceeded');
+ break;
+ case JSON_ERROR_STATE_MISMATCH:
+ throw new Exception('Underflow or the modes mismatch');
+ break;
+ case JSON_ERROR_CTRL_CHAR:
+ throw new Exception('Unexpected control character found');
+ break;
+ case JSON_ERROR_SYNTAX:
+ throw new Exception('Syntax error, malformed JSON');
+ break;
+ case JSON_ERROR_UTF8:
+ throw new Exception('Malformed UTF-8 characters, possibly incorrectly encoded');
+ break;
+ default:
+ throw new Exception('Unknown error');
+ break;
+ }
+ }
+
+ return $json;
+ }
}
diff --git a/tests/UtilsTest.php b/tests/UtilsTest.php
index <HASH>..<HASH> 100644
--- a/tests/UtilsTest.php
+++ b/tests/UtilsTest.php
@@ -21,4 +21,25 @@ class UtilsTest extends PHPUnit_Framework_TestCase {
$output = Utils::exec('echo hello!', true);
$this->assertEquals("hello!\n", $output);
}
+
+ /**
+ * Test JSON decode
+ */
+ function testJSONDecode() {
+ $json = '{"hello": "world"}';
+ $output = Utils::jsonDecode($json);
+
+ $this->assertArrayHasKey('hello', $output);
+ $this->assertEquals('world', $output['hello']);
+ }
+
+ /**
+ * Test JSON decode on malformed JSON
+ * @expectedException Exception
+ * @expectedExceptionMessage Syntax error, malformed JSON
+ */
+ function testJSONDecodeMalformed() {
+ $json = '{hello: "world}';
+ $output = Utils::jsonDecode($json);
+ }
}
|
Added jsondecode and tests
|
FusePump_cli.php
|
train
|
d3f9e075235f2ddb28e62f88b74d974a7095875f
|
diff --git a/UserAdminBundle/DataFixtures/MongoDB/LoadUserData.php b/UserAdminBundle/DataFixtures/MongoDB/LoadUserData.php
index <HASH>..<HASH> 100644
--- a/UserAdminBundle/DataFixtures/MongoDB/LoadUserData.php
+++ b/UserAdminBundle/DataFixtures/MongoDB/LoadUserData.php
@@ -4,6 +4,7 @@ namespace OpenOrchestra\UserAdminBundle\DataFixtures\MongoDB;
use Doctrine\Common\Persistence\ObjectManager;
use OpenOrchestra\ModelInterface\DataFixtures\OrchestraFunctionalFixturesInterface;
+use OpenOrchestra\Backoffice\Security\ContributionRoleInterface;
/**
* Class LoadUserData
|
Fix broken fixture (#<I>)
|
open-orchestra_open-orchestra-cms-bundle
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.