hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
a463c9e1c4c8fae1c4583107eed5da0cbbe1e047
|
diff --git a/lib/util/conflicter.js b/lib/util/conflicter.js
index <HASH>..<HASH> 100644
--- a/lib/util/conflicter.js
+++ b/lib/util/conflicter.js
@@ -143,22 +143,24 @@ conflicter.collision = function collision(filepath, content, cb) {
return cb('create');
}
- var encoding = null;
- if (!isBinaryFile(path.resolve(filepath))) {
- encoding = 'utf8';
- }
+ if (!fs.statSync(path.resolve(filepath)).isDirectory()) {
+ var encoding = null;
+ if (!isBinaryFile(path.resolve(filepath))) {
+ encoding = 'utf8';
+ }
- var actual = fs.readFileSync(path.resolve(filepath), encoding);
-
- // In case of binary content, `actual` and `content` are `Buffer` objects,
- // we just can't compare those 2 objects with standard `===`,
- // so we convert each binary content to an hexadecimal string first, and then compare them with standard `===`
- //
- // For not binary content, we can directly compare the 2 strings this way
- if ((!encoding && (actual.toString('hex') === content.toString('hex'))) ||
- (actual === content)) {
- log.identical(filepath);
- return cb('identical');
+ var actual = fs.readFileSync(path.resolve(filepath), encoding);
+
+ // In case of binary content, `actual` and `content` are `Buffer` objects,
+ // we just can't compare those 2 objects with standard `===`,
+ // so we convert each binary content to an hexadecimal string first, and then compare them with standard `===`
+ //
+ // For not binary content, we can directly compare the 2 strings this way
+ if ((!encoding && (actual.toString('hex') === content.toString('hex'))) ||
+ (actual === content)) {
+ log.identical(filepath);
+ return cb('identical');
+ }
}
if (self.force) {
|
Bulk copy feature #<I>
Added ability to skip processing on large directory copies. This
circumvents the issue with the maximum call stack
Updated new copy and directory methods to be separate
Fixed tabs back to spaces
Removed additional spaces
Reccomended changes: moved directory method out and fixed formatting
|
yeoman_environment
|
train
|
dac87b717d832dbdecabe43e81b96a4a7d6a3efd
|
diff --git a/value/src/test/java/com/google/auto/value/processor/CompilationTest.java b/value/src/test/java/com/google/auto/value/processor/CompilationTest.java
index <HASH>..<HASH> 100644
--- a/value/src/test/java/com/google/auto/value/processor/CompilationTest.java
+++ b/value/src/test/java/com/google/auto/value/processor/CompilationTest.java
@@ -38,6 +38,7 @@ import javax.lang.model.element.Element;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.ElementFilter;
import javax.tools.JavaFileObject;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@@ -2333,7 +2334,11 @@ public class CompilationTest {
.compilesWithoutError();
}
+ // Test currently ignored because sometimes we get two UNDEFINED errors and sometimes we
+ // get no warnings if there is an error, depending on the exact JDK version. We may be
+ // able to get something a bit more solid once we require JDK 8.
@Test
+ @Ignore
public void annotationReferencesUndefined() {
// Test that we don't throw an exception if asked to compile @SuppressWarnings(UNDEFINED)
// where UNDEFINED is an undefined symbol.
|
@Ignore annotationReferencesUndefined test, which causes problems with the Java 7 build on Travis.
-------------
Created by MOE: <URL>
|
google_auto
|
train
|
f27761c7d13672d087b895d6c8c115c0d68cd6f8
|
diff --git a/lib/kafkaesque.js b/lib/kafkaesque.js
index <HASH>..<HASH> 100644
--- a/lib/kafkaesque.js
+++ b/lib/kafkaesque.js
@@ -18,7 +18,7 @@ var assert = require('assert');
var api = require('./api');
var events = require('events');
var _ = require('underscore');
-var reInterval = require('reInterval');
+var reinterval = require('reinterval');
/**
* main entry point for kafka client
@@ -738,7 +738,7 @@ module.exports = function(options) {
console.log('error code', errorCode);
});
- _heartbeatInterval = reInterval(function heartbeat() {
+ _heartbeatInterval = reinterval(function heartbeat() {
if (!_resyncing) {
_groupCoordinator.heartbeat({group: _options.group, generation: _groupGeneration, memberId: _groupMemberId}, _noop);
}
diff --git a/samples/produce.js b/samples/produce.js
index <HASH>..<HASH> 100644
--- a/samples/produce.js
+++ b/samples/produce.js
@@ -9,14 +9,14 @@ var kafkaesque = require('../lib/kafkaesque')({brokers: [{host: 'localhost', por
// specify the topic and partition to produce to,
// produce the array of strings
// callback optional
-consumer.produce({topic: 'testing', partition: 0}, ['message form 1'], function(err, res) {})
+kafkaesque.produce({topic: 'testing', partition: 0}, ['message form 1'], function(err, res) {})
// specify the topic to produce to,
// kafkaesque will choose the partition to produce to (round-robin style)
// produce the string
-consumer.produce({topic: 'testing'}, 'message form 2')
+kafkaesque.produce({topic: 'testing'}, 'message form 2')
// specify the topic to produce to,
// kafkaesque will choose the partition to produce to (round-robin style)
// produce the string
-consumer.produce('testing', 'message form 3')
+kafkaesque.produce('testing', 'message form 3')
|
changed consumer to kafkaesque in produce.js and fix the required package name
|
apparatus_Kafkaesque
|
train
|
ef5a7b7940aa770084f5f367940b131c988b5336
|
diff --git a/public/bloom.js b/public/bloom.js
index <HASH>..<HASH> 100644
--- a/public/bloom.js
+++ b/public/bloom.js
@@ -62,11 +62,11 @@ window.addEventListener("load", function init() {
var i, mesh;
var geometry = new THREE.SphereBufferGeometry(1, 4, 4);
- var material = new THREE.MeshPhongMaterial({color: 0x00f000, shading: THREE.FlatShading});
+ var material = new THREE.MeshPhongMaterial({color: 0xffffff, shading: THREE.FlatShading});
for(i = 0; i < 100; ++i) {
- material = new THREE.MeshPhongMaterial({color: 0xffffff * Math.random(), shading: THREE.FlatShading});
+ material = new THREE.MeshPhongMaterial({color: 0xffffff, shading: THREE.FlatShading});
mesh = new THREE.Mesh(geometry, material);
mesh.position.set(Math.random() - 0.5, Math.random() - 0.5, Math.random() - 0.5).normalize();
@@ -82,7 +82,7 @@ window.addEventListener("load", function init() {
// Cage.
var geometry = new THREE.BoxGeometry(25, 825, 25);
- var material = new THREE.MeshLambertMaterial({color: 0x080808});
+ var material = new THREE.MeshLambertMaterial({color: 0x0b0b0b});
var mesh = new THREE.Mesh(geometry, material);
var o = new THREE.Object3D();
@@ -121,6 +121,7 @@ window.addEventListener("load", function init() {
var pass = new POSTPROCESSING.BloomPass({
resolution: 512,
+ blurriness: 1.0,
strength: 1.0
});
@@ -129,17 +130,31 @@ window.addEventListener("load", function init() {
// Shader settings.
+ //"resolution": Math.round(Math.log(pass.resolution) / Math.log(2)),
+ //gui.add(params, "resolution").min(6).max(11).step(1).onChange(function() { pass.resolution = Math.pow(2, params["resolution"]); });
+
var params = {
- //"resolution": Math.round(Math.log(pass.resolution) / Math.log(2)),
"resolution": pass.resolutionScale,
- "strength": pass.copyMaterial.uniforms.opacity.value,
- "blurriness": pass.blurriness
+ "blurriness": pass.blurriness,
+ "strength": pass.combineMaterial.uniforms.opacity2.value,
+ "average lum": pass.toneMappingMaterial.uniforms.averageLuminance.value,
+ "max luminance": pass.toneMappingMaterial.uniforms.maxLuminance.value,
+ "middle grey": pass.toneMappingMaterial.uniforms.middleGrey.value,
+ "blend": true
};
- //gui.add(params, "resolution").min(6).max(11).step(1).onChange(function() { pass.resolution = Math.pow(2, params["resolution"]); });
gui.add(params, "resolution").min(0.0).max(1.0).step(0.01).onChange(function() { pass.resolutionScale = params["resolution"]; composer.reset(); });
gui.add(params, "blurriness").min(0.0).max(3.0).step(0.1).onChange(function() { pass.blurriness = params["blurriness"]; });
- gui.add(params, "strength").min(0.0).max(3.0).step(0.01).onChange(function() { pass.copyMaterial.uniforms.opacity.value = pass.combineMaterial.uniforms.opacity2.value = params["strength"]; });
+ gui.add(params, "strength").min(0.0).max(3.0).step(0.01).onChange(function() { pass.combineMaterial.uniforms.opacity2.value = params["strength"]; });
+ gui.add(params, "average lum").min(0.01).max(1.0).step(0.01).onChange(function() { pass.toneMappingMaterial.uniforms.averageLuminance.value = params["average lum"]; });
+ gui.add(params, "max luminance").min(0.0).max(16.0).step(0.1).onChange(function() { pass.toneMappingMaterial.uniforms.maxLuminance.value = params["max luminance"]; });
+ gui.add(params, "middle grey").min(0.0).max(1.0).step(0.01).onChange(function() { pass.toneMappingMaterial.uniforms.middleGrey.value = params["middle grey"]; });
+
+ gui.add(params, "blend").onChange(function() {
+
+ pass.combineMaterial.uniforms.opacity1.value = params["blend"] ? 1.0 : 0.0;
+
+ });
/**
* Handles resizing.
|
Updated bloom demo.
Added tone-mapping options.
|
vanruesc_postprocessing
|
train
|
6bea1b169ec0ffe5e9f6e22a128c523d87f34dea
|
diff --git a/packages/cra-universal/src/config/webpack.config.js b/packages/cra-universal/src/config/webpack.config.js
index <HASH>..<HASH> 100644
--- a/packages/cra-universal/src/config/webpack.config.js
+++ b/packages/cra-universal/src/config/webpack.config.js
@@ -72,7 +72,11 @@ const config = {
chunkFilename: isProd('[id].[hash].chunk.js', '[id].chunk.js')
},
target: 'node',
- externals: [nodeExternals()],
+ externals: [
+ nodeExternals({
+ whitelist: [/\.(?!(?:jsx?|json)$).{1,5}$/i]
+ })
+ ],
watchOptions: {
ignored: /node_modules/,
aggregateTimeout: 300
diff --git a/packages/docs/package.json b/packages/docs/package.json
index <HASH>..<HASH> 100644
--- a/packages/docs/package.json
+++ b/packages/docs/package.json
@@ -9,6 +9,7 @@
"react-scripts": "1.1.4"
},
"dependencies": {
+ "basscss": "8.0.4",
"@cra-express/core": "^4.0.0-4",
"@cra-express/redux-prefetcher": "^4.0.0-4",
"loadable-components": "2.1.0",
diff --git a/packages/docs/src/App.js b/packages/docs/src/App.js
index <HASH>..<HASH> 100644
--- a/packages/docs/src/App.js
+++ b/packages/docs/src/App.js
@@ -3,6 +3,7 @@ import { Route, Switch } from 'react-router';
import routes from './routes';
import './App.css';
+import 'basscss/css/basscss.css';
const RouteWithSubRoutes = route => (
<Route
|
Add whitelist node external for non-js files (#<I>)
|
antonybudianto_cra-universal
|
train
|
948db1aca03d8bd0394861cfc5b8f1642e455372
|
diff --git a/db/orchestrate/orchestrate.go b/db/orchestrate/orchestrate.go
index <HASH>..<HASH> 100644
--- a/db/orchestrate/orchestrate.go
+++ b/db/orchestrate/orchestrate.go
@@ -111,12 +111,12 @@ func (ar *ArOrchestrate) Find(id interface{}, out interface{}) error {
func (ar *ArOrchestrate) DbSave() error {
var err error
- if ar.UpdatedAt != nil {
+ if ar.UpdatedAt != nil { // existing instance (PUT/PATCH)
+ _, err = ar.Client().Put(ar.ModelName(), ar.ID, ar.Self())
+ } else { // new instance (POST)
if ar.ID == "" {
ar.ID = uuid.NewV4().String()
}
- _, err = ar.Client().Put(ar.ModelName(), ar.ID, ar.Self())
- } else {
_, err = ar.Client().PutIfAbsent(ar.ModelName(), ar.ID, ar.Self())
}
|
fixed bug with id generation logic
|
obieq_goar
|
train
|
db10be0d3aa4152fdc7d489fa92eb1a1b78cfdcb
|
diff --git a/lib/ranger/room.js b/lib/ranger/room.js
index <HASH>..<HASH> 100644
--- a/lib/ranger/room.js
+++ b/lib/ranger/room.js
@@ -17,6 +17,8 @@ exports.Room = function (connection, attrs) {
this.topic = attrs.topic;
this.membershipLimit = attrs.membership_limit;
this.locked = attrs.locked;
+ this.openToGuests = attrs.open_to_guests;
+ this.guestToken = attrs.active_token_value;
this.createdAt = new Date(attrs.created_at);
this.updatedAt = new Date(attrs.updated_at);
|
Adding openToGuests and guestToken to Room
|
mrduncan_ranger
|
train
|
480c477389a3e2ec1b365b67d9be631c17c284cf
|
diff --git a/Util/Factory/Query/DoctrineBuilder.php b/Util/Factory/Query/DoctrineBuilder.php
index <HASH>..<HASH> 100644
--- a/Util/Factory/Query/DoctrineBuilder.php
+++ b/Util/Factory/Query/DoctrineBuilder.php
@@ -168,10 +168,11 @@ class DoctrineBuilder implements QueryInterface
}
if ($hydration_mode == Query::HYDRATE_ARRAY)
{
- foreach($this->fields as &$field){
- $field = $field . ' as ' . str_replace('.', '_', $field);
+ $selectFields = $this->fields;
+ foreach($selectFields as &$field){
+ $field = $field. ' as ' . str_replace('.', '_', $field);
}
- $qb->select(implode(" , ", $this->fields));
+ $qb->select(implode(" , ", $selectFields));
}
else
{
|
The bug occurs when 'SELECT' fields are not unique,
modify ONLY the 'SELECT' statement fields.
|
AliHichem_AliDatatableBundle
|
train
|
3fca630543325397cc182548d913df5f66912d5e
|
diff --git a/src/Native5/Services/Users/DefaultUserManager.php b/src/Native5/Services/Users/DefaultUserManager.php
index <HASH>..<HASH> 100644
--- a/src/Native5/Services/Users/DefaultUserManager.php
+++ b/src/Native5/Services/Users/DefaultUserManager.php
@@ -235,7 +235,7 @@ class DefaultUserManager extends ApiClient implements UserManager
global $logger;
$path = 'users/deactivate';
- $request = $this->_remoteServer->get($path)
+ $request = $this->_remoteServer->post($path)
->setPostField('username', $username);
try {
$response = $request->send();
|
Deactivate user should be a post, get was being used
|
native5_native5-sdk-services-php
|
train
|
691e87aab9e1aaa5a5284fbccc11c14408505150
|
diff --git a/pmxbotweb/pmxbotweb.py b/pmxbotweb/pmxbotweb.py
index <HASH>..<HASH> 100644
--- a/pmxbotweb/pmxbotweb.py
+++ b/pmxbotweb/pmxbotweb.py
@@ -54,6 +54,8 @@ def run(configFile=None, configDict=None, start=True):
'global': {
'server.socket_port': config.web_port,
'server.socket_host': config.web_host,
+ #'tools.encode.on': True,
+ 'tools.encode.encoding': 'utf-8',
},
'/pmxbot.png' : {
'tools.staticfile.on' : True,
|
Set default encoding to UTF-8; doing so seems to force the client to encode the query strings as utf-8, fixing issues with performing searches with international characters.
|
yougov_pmxbot
|
train
|
43d4fc18aff4b25a15309b055eeb6d1db8f72f7d
|
diff --git a/prow/cmd/deck/main.go b/prow/cmd/deck/main.go
index <HASH>..<HASH> 100644
--- a/prow/cmd/deck/main.go
+++ b/prow/cmd/deck/main.go
@@ -95,8 +95,11 @@ func gatherOptions() options {
return o
}
-// Matches letters, numbers, hyphens, and underscores.
-var objReg = regexp.MustCompile(`^[\w-]+$`)
+var (
+ // Matches letters, numbers, hyphens, and underscores.
+ objReg = regexp.MustCompile(`^[\w-]+$`)
+ staticFilesLocation = "static"
+)
func main() {
o := gatherOptions()
@@ -115,11 +118,11 @@ func main() {
gziphandler.GzipHandler(handleCached(http.FileServer(http.Dir(dir)))))
}
- // locally just serve from ./static, otherwise do the full main
+ // locally just serve from ./staticFilesLocation, otherwise do the full main
if o.runLocal {
- mux.Handle("/", staticHandlerFromDir("./static"))
+ mux.Handle("/", staticHandlerFromDir("./"+staticFilesLocation))
} else {
- mux.Handle("/", staticHandlerFromDir("/static"))
+ mux.Handle("/", staticHandlerFromDir("/"+staticFilesLocation))
mux = prodOnlyMain(o, mux)
}
@@ -166,6 +169,7 @@ func prodOnlyMain(o options, mux *http.ServeMux) *http.ServeMux {
mux.Handle("/rerun", gziphandler.GzipHandler(handleRerun(kc)))
mux.Handle("/config", gziphandler.GzipHandler(handleConfig(configAgent)))
mux.Handle("/branding.js", gziphandler.GzipHandler(handleBranding(configAgent)))
+ mux.Handle("/favicon.ico", gziphandler.GzipHandler(handleFavicon(configAgent)))
if o.hookURL != "" {
mux.Handle("/plugin-help.js",
@@ -564,7 +568,7 @@ func handleBranding(ca jobs.ConfigAgent) http.HandlerFunc {
b, err := json.Marshal(config.Deck.Branding)
if err != nil {
logrus.WithError(err).Error("Error marshaling branding config.")
- http.Error(w, "Failed to marhshal branding config.", http.StatusInternalServerError)
+ http.Error(w, "Failed to marshal branding config.", http.StatusInternalServerError)
return
}
// If we have a "var" query, then write out "var value = [...];".
@@ -577,6 +581,17 @@ func handleBranding(ca jobs.ConfigAgent) http.HandlerFunc {
}
}
+func handleFavicon(ca jobs.ConfigAgent) http.HandlerFunc {
+ return func(w http.ResponseWriter, r *http.Request) {
+ config := ca.Config()
+ if config.Deck.Branding != nil {
+ http.ServeFile(w, r, staticFilesLocation+"/"+config.Deck.Branding.Favicon)
+ } else {
+ http.ServeFile(w, r, staticFilesLocation+"/favicon.ico")
+ }
+ }
+}
+
func isValidatedGitOAuthConfig(githubOAuthConfig *config.GithubOAuthConfig) bool {
return githubOAuthConfig.ClientID != "" && githubOAuthConfig.ClientSecret != "" &&
githubOAuthConfig.RedirectURL != "" &&
|
Honor branding while serving favicon.ico
|
kubernetes_test-infra
|
train
|
1d922850e65a4d51f54ca3d6132fdd05441c97dc
|
diff --git a/structr-ui/src/main/java/org/structr/web/maintenance/DeployCommand.java b/structr-ui/src/main/java/org/structr/web/maintenance/DeployCommand.java
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/java/org/structr/web/maintenance/DeployCommand.java
+++ b/structr-ui/src/main/java/org/structr/web/maintenance/DeployCommand.java
@@ -33,11 +33,11 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
+import java.util.TreeMap;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
@@ -345,7 +345,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportFiles(final Path target, final Path configTarget) throws FrameworkException {
- final Map<String, Object> config = new LinkedHashMap<>();
+ final Map<String, Object> config = new TreeMap<>();
final App app = StructrApp.getInstance();
try (final Tx tx = app.tx()) {
@@ -377,7 +377,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportFilesAndFolders(final Path target, final Folder folder, final Map<String, Object> config) throws IOException {
- final Map<String, Object> properties = new LinkedHashMap<>();
+ final Map<String, Object> properties = new TreeMap<>();
final String name = folder.getName();
final Path path = target.resolve(name);
@@ -406,7 +406,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportFile(final Path target, final FileBase file, final Map<String, Object> config) throws IOException {
- final Map<String, Object> properties = new LinkedHashMap<>();
+ final Map<String, Object> properties = new TreeMap<>();
final String name = file.getName();
final Path src = file.getFileOnDisk().toPath();
Path targetPath = target.resolve(name);
@@ -453,7 +453,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportPages(final Path target, final Path configTarget) throws FrameworkException {
- final Map<String, Object> pagesConfig = new LinkedHashMap<>();
+ final Map<String, Object> pagesConfig = new TreeMap<>();
final App app = StructrApp.getInstance();
try (final Tx tx = app.tx()) {
@@ -465,7 +465,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
final String content = page.getContent(RenderContext.EditMode.DEPLOYMENT);
if (content != null) {
- final Map<String, Object> properties = new LinkedHashMap<>();
+ final Map<String, Object> properties = new TreeMap<>();
final String name = page.getName();
final Path pageFile = target.resolve(name + ".html");
boolean doExport = true;
@@ -517,7 +517,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportComponents(final Path target, final Path configTarget) throws FrameworkException {
- final Map<String, Object> configuration = new LinkedHashMap<>();
+ final Map<String, Object> configuration = new TreeMap<>();
final App app = StructrApp.getInstance();
try (final Tx tx = app.tx()) {
@@ -535,7 +535,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
if (inTrash || hasParent) {
continue;
}
-
+
final String content = node.getContent(RenderContext.EditMode.DEPLOYMENT);
if (content != null) {
@@ -545,7 +545,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
name = node.getUuid();
}
- final Map<String, Object> properties = new LinkedHashMap<>();
+ final Map<String, Object> properties = new TreeMap<>();
final Path targetFile = target.resolve(name + ".html");
if (Files.exists(targetFile)) {
@@ -594,7 +594,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportTemplates(final Path target, final Path configTarget) throws FrameworkException {
- final Map<String, Object> configuration = new LinkedHashMap<>();
+ final Map<String, Object> configuration = new TreeMap<>();
final App app = StructrApp.getInstance();
try (final Tx tx = app.tx()) {
@@ -626,7 +626,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
private void exportTemplateSource(final Path target, final DOMNode template, final Map<String, Object> configuration) throws FrameworkException {
- final Map<String, Object> properties = new LinkedHashMap<>();
+ final Map<String, Object> properties = new TreeMap<>();
boolean doExport = true;
final String content = template.getProperty(Template.content);
@@ -681,7 +681,7 @@ public class DeployCommand extends NodeServiceCommand implements MaintenanceComm
for (final ResourceAccess res : app.nodeQuery(ResourceAccess.class).sort(ResourceAccess.signature).getAsList()) {
- final Map<String, Object> grant = new LinkedHashMap<>();
+ final Map<String, Object> grant = new TreeMap<>();
grants.add(grant);
grant.put("signature", res.getProperty(ResourceAccess.signature));
|
Fixes order of exported properties and configuration data by using sorted maps.
|
structr_structr
|
train
|
34167cfc4c8256afb5cfe8f45f8c4a35a4015395
|
diff --git a/Paulus/Paulus.php b/Paulus/Paulus.php
index <HASH>..<HASH> 100644
--- a/Paulus/Paulus.php
+++ b/Paulus/Paulus.php
@@ -46,14 +46,11 @@ class Paulus {
*
* Paulus constructor
*
- * @param mixed $config Configuration array (or ArrayAccess class) defining Paulus' many options
- * @param mixed $request The router's request object
- * @param mixed $response The router's response object
- * @param mixed $service The router's service object
+ * @param array $config Configuration array defining Paulus' many options
* @access public
* @return Paulus
*/
- public function __construct( $config = null ) {
+ public function __construct( array $config = null ) {
// First things first... get our init time
if ( !defined( 'PAULUS_START_TIME' ) ) {
define( 'PAULUS_START_TIME', microtime( true ) );
@@ -62,8 +59,13 @@ class Paulus {
// Define our application's constants
$this->define_constants();
- // Either grab the passed config or use our Singleton Config
- $this->config = $config ?: Config::instance();
+ // Set our application's configuration from our Config instance
+ $this->config = Config::instance();
+
+ // If we passed a config, let's merge it in
+ if ( !is_null( $config ) ) {
+ Config::instance()->merge_custom_config( $config );
+ }
// Create our auto loader
$autoloader = new AutoLoader( $this->config );
|
Now allowing the merging of a custom configuration on app construction. :)
|
Rican7_Paulus
|
train
|
cc01e34fb753993a72e15e894905e3e1a3bcdec6
|
diff --git a/main_test.go b/main_test.go
index <HASH>..<HASH> 100644
--- a/main_test.go
+++ b/main_test.go
@@ -479,7 +479,7 @@ var _ = Describe("Router Integration", func() {
session, err := Start(gorouterCmd, GinkgoWriter, GinkgoWriter)
Expect(err).ToNot(HaveOccurred())
Eventually(session, 30*time.Second).Should(Say("Unable to fetch token"))
- Expect(session).To(Exit(1))
+ Eventually(session, 5 * time.Second).Should(Exit(1))
})
})
})
|
Fix expectation on session exit for main_test of gorouter.
[#<I>]
|
cloudfoundry_gorouter
|
train
|
414430c78f8aab601feb4067d3731e70abfce8ec
|
diff --git a/pandas/tests/reshape/merge/test_join.py b/pandas/tests/reshape/merge/test_join.py
index <HASH>..<HASH> 100644
--- a/pandas/tests/reshape/merge/test_join.py
+++ b/pandas/tests/reshape/merge/test_join.py
@@ -2,7 +2,7 @@ import numpy as np
from numpy.random import randn
import pytest
-from pandas._libs import join as libjoin
+from pandas._libs.join import inner_join, left_outer_join
import pandas as pd
from pandas import DataFrame, Index, MultiIndex, Series, concat, merge
@@ -48,7 +48,7 @@ class TestJoin:
right = a_([1, 1, 0, 4, 2, 2, 1], dtype=np.int64)
max_group = 5
- ls, rs = libjoin.left_outer_join(left, right, max_group)
+ ls, rs = left_outer_join(left, right, max_group)
exp_ls = left.argsort(kind="mergesort")
exp_rs = right.argsort(kind="mergesort")
@@ -70,7 +70,7 @@ class TestJoin:
right = a_([1, 1, 0, 4, 2, 2, 1], dtype=np.int64)
max_group = 5
- rs, ls = libjoin.left_outer_join(right, left, max_group)
+ rs, ls = left_outer_join(right, left, max_group)
exp_ls = left.argsort(kind="mergesort")
exp_rs = right.argsort(kind="mergesort")
@@ -116,7 +116,7 @@ class TestJoin:
right = a_([1, 1, 0, 4, 2, 2, 1, 4], dtype=np.int64)
max_group = 5
- ls, rs = libjoin.inner_join(left, right, max_group)
+ ls, rs = inner_join(left, right, max_group)
exp_ls = left.argsort(kind="mergesort")
exp_rs = right.argsort(kind="mergesort")
|
isort mypy import confilict (#<I>)
|
pandas-dev_pandas
|
train
|
ce1226974010c3d76b2b8473bcc3e202a677fec2
|
diff --git a/etrago/appl.py b/etrago/appl.py
index <HASH>..<HASH> 100644
--- a/etrago/appl.py
+++ b/etrago/appl.py
@@ -103,6 +103,10 @@ def etrago(args):
# for SH scenario run do data preperation:
if args['scn_name'] == 'SH Status Quo':
data_manipulation_sh(network)
+
+ # grouping of parallel lines
+ if args['line_grouping']:
+ group_parallel_lines(network)
#load shedding in order to hunt infeasibilities
if args['load_shedding']:
@@ -124,10 +128,6 @@ def etrago(args):
else:
extra_functionality=None
- # grouping of parallel lines
- if args['line_grouping']:
- group_parallel_lines(network)
-
# parallisation
if args['parallelisation']:
parallelisation(network, start_snapshot=args['start_snapshot'], end_snapshot=args['end_snapshot'],group_size=1, solver_name=args['solver'], extra_functionality=extra_functionality)
|
changed position of grouping function in script
otherwise there might be conflicts with network clustering.
|
openego_eTraGo
|
train
|
bd7e615e1aaff8404faa07fd85b018c289552378
|
diff --git a/src/Command/Local/LocalDrushAliasesCommand.php b/src/Command/Local/LocalDrushAliasesCommand.php
index <HASH>..<HASH> 100644
--- a/src/Command/Local/LocalDrushAliasesCommand.php
+++ b/src/Command/Local/LocalDrushAliasesCommand.php
@@ -2,6 +2,7 @@
namespace Platformsh\Cli\Command\Local;
use Cocur\Slugify\Slugify;
+use GuzzleHttp\Exception\BadResponseException;
use Platformsh\Cli\Command\CommandBase;
use Platformsh\Cli\Exception\RootNotFoundException;
use Platformsh\Cli\Local\BuildFlavor\Drupal;
@@ -118,7 +119,16 @@ class LocalDrushAliasesCommand extends CommandBase
// This will at least be used for \Platformsh\Cli\Service\Drush::getSiteUrl().
if (!$this->api()->hasCachedCurrentDeployment($environment) && $environment->isActive()) {
$this->debug('Fetching deployment information for environment: ' . $environment->id);
- $this->api()->getCurrentDeployment($environment);
+ try {
+ $this->api()->getCurrentDeployment($environment);
+ } catch (BadResponseException $e) {
+ if ($e->getResponse() && $e->getResponse()->getStatusCode() === 400) {
+ // Ignore 400 errors relating to an invalid deployment.
+ $this->debug('The deployment is invalid: ' . $e->getMessage());
+ } else {
+ throw $e;
+ }
+ }
}
if ($environment->deployment_target === 'local') {
|
Ignore <I> errors from getCurrentDeployment() in drush-aliases command
|
platformsh_platformsh-cli
|
train
|
c6b469768d33e771c6448238e0af88932a13b3fb
|
diff --git a/src/tuwien/auto/calimero/knxnetip/Connection.java b/src/tuwien/auto/calimero/knxnetip/Connection.java
index <HASH>..<HASH> 100644
--- a/src/tuwien/auto/calimero/knxnetip/Connection.java
+++ b/src/tuwien/auto/calimero/knxnetip/Connection.java
@@ -102,6 +102,7 @@ public final class Connection implements Closeable {
private static final Duration connectionTimeout = Duration.ofMillis(5000);
+ private volatile InetSocketAddress localEndpoint;
// ??? we currently cannot reuse a connection once it got closed
private final InetSocketAddress server;
private final Socket socket;
@@ -632,6 +633,8 @@ public final class Connection implements Closeable {
try {
socket.bind(bind);
+ // socket returns any-local after socket is closed, so keep actual address after bind
+ localEndpoint = (InetSocketAddress) socket.getLocalSocketAddress();
}
catch (final IOException e) {
throw new KnxRuntimeException("binding to local address " + bind, e);
@@ -642,7 +645,7 @@ public final class Connection implements Closeable {
return new SecureSession(user, userKey, deviceAuthCode);
}
- public InetSocketAddress localEndpoint() { return (InetSocketAddress) socket.getLocalSocketAddress(); }
+ public InetSocketAddress localEndpoint() { return localEndpoint; }
public InetSocketAddress server() { return server; }
|
Remember local endpoint even after connection was closed
|
calimero-project_calimero-core
|
train
|
f482ce3fc47d03c86fd8224ecfbfb3a92b0a5fb9
|
diff --git a/tests/Doctrine/Tests/DBAL/Platforms/MsSqlPlatformTest.php b/tests/Doctrine/Tests/DBAL/Platforms/MsSqlPlatformTest.php
index <HASH>..<HASH> 100644
--- a/tests/Doctrine/Tests/DBAL/Platforms/MsSqlPlatformTest.php
+++ b/tests/Doctrine/Tests/DBAL/Platforms/MsSqlPlatformTest.php
@@ -23,14 +23,14 @@ class MsSqlPlatformTest extends AbstractPlatformTestCase
{
return array(
'CREATE TABLE test (foo VARCHAR(255) DEFAULT NULL, bar VARCHAR(255) DEFAULT NULL)',
- 'CREATE UNIQUE INDEX test_foo_bar_uniq ON test (foo, bar)'
+ 'CREATE UNIQUE INDEX test_foo_bar_uniq ON test (foo, bar)'
);
}
public function getGenerateAlterTableSql()
{
return array(
- 'ALTER TABLE mytable RENAME TO userlist',
+ 'ALTER TABLE mytable RENAME TO userlist',
'ALTER TABLE mytable ADD quota INT DEFAULT NULL',
'ALTER TABLE mytable DROP COLUMN foo',
'ALTER TABLE mytable CHANGE bar baz VARCHAR(255) DEFAULT \'def\' NOT NULL',
@@ -127,7 +127,7 @@ DDB;
public function testDoesNotSupportSavePoints()
{
- $this->assertFalse($this->_platform->supportsSavepoints());
+ $this->assertTrue($this->_platform->supportsSavepoints());
}
public function getGenerateIndexSql()
|
fixed mssql test to expect that savepoints are supported
|
doctrine_dbal
|
train
|
b75f11213cd19598dd40b5f16a000c2a0a0be4b5
|
diff --git a/categories/__init__.py b/categories/__init__.py
index <HASH>..<HASH> 100644
--- a/categories/__init__.py
+++ b/categories/__init__.py
@@ -1,7 +1,7 @@
__version_info__ = {
'major': 1,
'minor': 2,
- 'micro': 2,
+ 'micro': 3,
'releaselevel': 'final',
'serial': 1
}
@@ -19,11 +19,14 @@ def get_version(short=False):
__version__ = get_version()
-try:
+def register():
from categories import settings
from categories.registration import (_process_registry, register_fk,
register_m2m)
_process_registry(settings.FK_REGISTRY, register_fk)
_process_registry(settings.M2M_REGISTRY, register_m2m)
-except:
- pass
+
+try:
+ register()
+except Exception as e:
+ print e
diff --git a/categories/registration.py b/categories/registration.py
index <HASH>..<HASH> 100644
--- a/categories/registration.py
+++ b/categories/registration.py
@@ -4,6 +4,7 @@ These functions handle the adding of fields to other models
from django.db.models import FieldDoesNotExist
import fields
from settings import FIELD_REGISTRY, MODEL_REGISTRY
+from django.utils.translation import ugettext_lazy as _
def register_m2m(model, field_name='categories', extra_params={}):
@@ -42,7 +43,7 @@ def _process_registry(registry, call_func):
for key, value in registry.items():
model = get_model(*key.split('.'))
if model is None:
- raise ImproperlyConfigured(_('%(key) is not a model') % {'key' : key})
+ raise ImproperlyConfigured(_('%(key)s is not a model') % {'key': key})
if isinstance(value, (tuple, list)):
for item in value:
if isinstance(item, basestring):
@@ -51,13 +52,13 @@ def _process_registry(registry, call_func):
field_name = item.pop('name')
call_func(model, field_name, extra_params=item)
else:
- raise ImproperlyConfigured(_("%(settings) doesn't recognize the value of %(key)") %
- {'settings' : 'CATEGORY_SETTINGS', 'key' : key})
+ raise ImproperlyConfigured(_("%(settings)s doesn't recognize the value of %(key)s") %
+ {'settings': 'CATEGORY_SETTINGS', 'key': key})
elif isinstance(value, basestring):
call_func(model, value)
elif isinstance(value, dict):
field_name = value.pop('name')
call_func(model, field_name, extra_params=value)
else:
- raise ImproperlyConfigured(_("%(settings) doesn't recognize the value of %(key)") %
- {'settings' : 'CATEGORY_SETTINGS', 'key' : key})
+ raise ImproperlyConfigured(_("%(settings)s doesn't recognize the value of %(key)s") %
+ {'settings': 'CATEGORY_SETTINGS', 'key': key})
|
Version <I>: Added a new way to register models manually
|
callowayproject_django-categories
|
train
|
b25337b598b142672983c02e29b9c1dc23c74ecb
|
diff --git a/retrofit/src/main/java/retrofit/RestAdapter.java b/retrofit/src/main/java/retrofit/RestAdapter.java
index <HASH>..<HASH> 100644
--- a/retrofit/src/main/java/retrofit/RestAdapter.java
+++ b/retrofit/src/main/java/retrofit/RestAdapter.java
@@ -56,6 +56,9 @@ import retrofit.mime.TypedOutput;
* by curly braces (e.g., "{foo}"). To add items to the query string of a URL use
* {@link retrofit.http.Query @Query}. If the path or query element has already been URI encoded
* use {@link retrofit.http.EncodedPath @EncodedPath} or {@link retrofit.http.EncodedQuery
+ *
+ * @author Bob Lee (bob@squareup.com)
+ * @author Jake Wharton (jw@squareup.com)
* @EncodedQuery} to prevent repeated encoding.
* <p>
* HTTP requests happen in one of two ways:
@@ -101,9 +104,6 @@ import retrofit.mime.TypedOutput;
* <p>
* Calling {@link #create(Class)} with {@code MyApi.class} will validate and create a new
* implementation of the API.
- *
- * @author Bob Lee (bob@squareup.com)
- * @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
@@ -177,6 +177,12 @@ public class RestAdapter {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
+ // Prevent API interfaces from extending other interfaces. This not only avoids a bug in
+ // Android (http://b.android.com/58753) but it forces composition of API declarations which is
+ // the recommended pattern.
+ if (service.getSuperclass() != null) {
+ throw new IllegalArgumentException("Interface definitions must not extend other interfaces.");
+ }
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
diff --git a/retrofit/src/test/java/retrofit/RestAdapterTest.java b/retrofit/src/test/java/retrofit/RestAdapterTest.java
index <HASH>..<HASH> 100644
--- a/retrofit/src/test/java/retrofit/RestAdapterTest.java
+++ b/retrofit/src/test/java/retrofit/RestAdapterTest.java
@@ -72,6 +72,8 @@ public class RestAdapterTest {
@GET("/") Response direct();
@GET("/") void direct(Callback<Response> callback);
}
+ private interface InvalidExample extends Example {
+ }
private Client mockClient;
private Executor mockRequestExecutor;
@@ -101,6 +103,14 @@ public class RestAdapterTest {
assertThat(example.toString()).isNotEmpty();
}
+ @Test public void interfaceWithExtendIsNotSupported() {
+ try {
+ new RestAdapter.Builder().setServer("http://foo/").build().create(InvalidExample.class);
+ } catch (IllegalArgumentException e) {
+ assertThat(e).hasMessage("Interface definitions must not extend other interfaces.");
+ }
+ }
+
@Test public void profilerObjectPassThrough() throws Exception {
Object data = new Object();
when(mockProfiler.beforeCall()).thenReturn(data);
|
Prevent API interfaces from extending other interfaces.
This not only avoids a bug in Android (<URL>) but it forces composition of API declarations which is the recommended pattern.
|
square_retrofit
|
train
|
5bddcf10fcae390161d435953ab2610380445e14
|
diff --git a/Hprose/Service.php b/Hprose/Service.php
index <HASH>..<HASH> 100644
--- a/Hprose/Service.php
+++ b/Hprose/Service.php
@@ -14,7 +14,7 @@
* *
* hprose service class for php 5.3+ *
* *
- * LastModified: Apr 10, 2015 *
+ * LastModified: Apr 20, 2015 *
* Author: Ma Bingyao <andot@hprose.com> *
* *
\**********************************************************/
@@ -83,6 +83,9 @@ namespace Hprose {
E_STRICT => 'Runtime Notice',
E_RECOVERABLE_ERROR => 'Catchable Fatal Error'
);
+ public static function getErrorTypeString($errno) {
+ return self::$errorTable[$errno];
+ }
private $calls = array();
private $names = array();
private $filters = array();
@@ -107,9 +110,6 @@ namespace Hprose {
}
return $data;
}
- protected function getErrorTypeString($errno) {
- return self::$errorTable[$errno];
- }
protected function sendError($error, $context) {
if ($this->onSendError !== null) {
$sendError = $this->onSendError;
|
Changed getErrorTypeString to public static method.
|
hprose_hprose-php
|
train
|
f1599d6f0d2a63610d380aaa9badee617f06b929
|
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/AgentConnector.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/AgentConnector.java
index <HASH>..<HASH> 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/AgentConnector.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/AgentConnector.java
@@ -90,6 +90,7 @@ public class AgentConnector extends AbstractServerConnector implements ClientPip
* @param workerJvmManager manager for WorkerJVM instances
* @param port the port for incoming connections
* @param threadPoolSize size of the Netty thread pool to connect to Worker instances
+ * @return the {@link AgentConnector} instance
*/
public static AgentConnector createInstance(Agent agent, WorkerJvmManager workerJvmManager, int port, int threadPoolSize) {
ConcurrentMap<String, ResponseFuture> futureMap = new ConcurrentHashMap<String, ResponseFuture>();
diff --git a/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/WorkerConnector.java b/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/WorkerConnector.java
index <HASH>..<HASH> 100644
--- a/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/WorkerConnector.java
+++ b/simulator/src/main/java/com/hazelcast/simulator/protocol/connector/WorkerConnector.java
@@ -111,6 +111,7 @@ public class WorkerConnector extends AbstractServerConnector {
* @param type the {@link WorkerType} of this Simulator Worker
* @param hazelcastInstance the {@link HazelcastInstance} for this Simulator Worker
* @param worker the {@link Worker} instance of this Simulator Worker
+ * @return the {@link WorkerConnector} instance
*/
public static WorkerConnector createInstance(int parentAddressIndex, int addressIndex, int port, WorkerType type,
HazelcastInstance hazelcastInstance, Worker worker) {
@@ -127,6 +128,7 @@ public class WorkerConnector extends AbstractServerConnector {
* @param hazelcastInstance the {@link HazelcastInstance} for this Simulator Worker
* @param worker the {@link Worker} instance of this Simulator Worker
* @param useRemoteLogger determines if the {@link RemoteExceptionLogger} or {@link FileExceptionLogger} should be used
+ * @return the {@link WorkerConnector} instance
*/
public static WorkerConnector createInstance(int parentAddressIndex, int addressIndex, int port, WorkerType type,
HazelcastInstance hazelcastInstance, Worker worker, boolean useRemoteLogger) {
|
Fixed JavaDoc errors in connector package.
|
hazelcast_hazelcast-simulator
|
train
|
0be50e66205eb4f669ee65090f44f0631ea840bf
|
diff --git a/swagger/swagger.go b/swagger/swagger.go
index <HASH>..<HASH> 100644
--- a/swagger/swagger.go
+++ b/swagger/swagger.go
@@ -9,7 +9,7 @@ const Name = "swagger"
var (
swaggerMu sync.RWMutex
- swaggers = make(map[string]Swagger)
+ swag Swagger
)
type Swagger interface {
@@ -23,17 +23,15 @@ func Register(name string, swagger Swagger) {
panic("swagger is nil")
}
- if _, dup := swaggers[name]; dup {
- panic("Register called twice for swag doc: " + name)
+ if swag != nil {
+ panic("Register called twice for swag: " + name)
}
- swaggers[name] = swagger
+ swag = swagger
}
func ReadDoc() (string, error) {
- if val, ok := swaggers[Name]; ok {
- return val.ReadDoc(), nil
+ if swag != nil {
+ return swag.ReadDoc(), nil
}
-
- return "", errors.New("Can't found swag doc")
-
+ return "", errors.New("Not yet registered swag.")
}
diff --git a/swagger/swagger_test.go b/swagger/swagger_test.go
index <HASH>..<HASH> 100644
--- a/swagger/swagger_test.go
+++ b/swagger/swagger_test.go
@@ -155,22 +155,34 @@ func (s *s) ReadDoc() string {
}
func TestRegister(t *testing.T) {
+ setup()
Register(Name, &s{})
d, _ := ReadDoc()
assert.Equal(t, doc, d)
}
+func TestReadDocBeforeRegistered(t *testing.T) {
+ setup()
+ _, err := ReadDoc()
+ assert.Error(t, err)
+}
+
func TestNilRegister(t *testing.T) {
+ setup()
var swagger Swagger
-
assert.Panics(t, func() {
Register(Name, swagger)
})
}
func TestCalledTwicelRegister(t *testing.T) {
+ setup()
assert.Panics(t, func() {
Register(Name, &s{})
Register(Name, &s{})
})
}
+
+func setup() {
+ swag = nil
+}
|
Improve swagger code coverage.
|
swaggo_swag
|
train
|
66db09139155d755f5d879d41bb61d8ce48ecefe
|
diff --git a/juju/placement.py b/juju/placement.py
index <HASH>..<HASH> 100644
--- a/juju/placement.py
+++ b/juju/placement.py
@@ -22,7 +22,13 @@ def parse(directive):
# Handle null case
return None
- if type(directive) in [dict, client.Placement]:
+ if isinstance(directive, (list, tuple)):
+ results = []
+ for d in directive:
+ results.extend(parse(d))
+ return results
+
+ if isinstance(directive, (dict, client.Placement)):
# We've been handed something that we can simply hand back to
# the api. (Forwards compatibility)
return [directive]
|
Support placement lists (#<I>)
|
juju_python-libjuju
|
train
|
a03b01c130cd86f1eacd7d1e55152f4baa41fcd7
|
diff --git a/neural/afni.py b/neural/afni.py
index <HASH>..<HASH> 100644
--- a/neural/afni.py
+++ b/neural/afni.py
@@ -816,7 +816,7 @@ def align_epi_anat(anatomy,epi_dsets,skull_strip_anat=True):
if is_nifti(dset):
if dset!=anatomy:
dset_nifti = nifti_copy(prefix(dset)+'_al+orig')
- if dset_nifti:
+ if dset_nifti and os.path.exists(dset_nifti):
nl.run(['gzip',dset_nifti])
if dset==anatomy or os.path.exists(suffix(dset,'_al')):
for s in ['_al+orig.HEAD','_al+orig.BRIK*','+orig.HEAD','+orig.BRIK*']:
|
trying to get rid of weird gzip errors
|
azraq27_neural
|
train
|
09d57819b990978e72f70a519ec5674ac33be698
|
diff --git a/gremlin-server/src/test/java/com/tinkerpop/gremlin/server/GremlinServerIntegrateTest.java b/gremlin-server/src/test/java/com/tinkerpop/gremlin/server/GremlinServerIntegrateTest.java
index <HASH>..<HASH> 100644
--- a/gremlin-server/src/test/java/com/tinkerpop/gremlin/server/GremlinServerIntegrateTest.java
+++ b/gremlin-server/src/test/java/com/tinkerpop/gremlin/server/GremlinServerIntegrateTest.java
@@ -144,6 +144,7 @@ public class GremlinServerIntegrateTest extends AbstractGremlinServerIntegration
@Test
public void shouldEventuallySucceedWithRoundRobin() throws Exception {
+ // todo: when we have a config on borrowed connection timeout, set it low here to make the test go faster.
final String noGremlinServer = "74.125.225.19";
final Cluster cluster = Cluster.create(noGremlinServer).addContactPoint("localhost").build();
final Client client = cluster.connect();
|
Add todo for later thought.
|
apache_tinkerpop
|
train
|
a108875fa60798e6f5dd1a52c4f7260b4e838895
|
diff --git a/biodata-tools/src/main/java/org/opencb/biodata/tools/alignment/BamManager.java b/biodata-tools/src/main/java/org/opencb/biodata/tools/alignment/BamManager.java
index <HASH>..<HASH> 100644
--- a/biodata-tools/src/main/java/org/opencb/biodata/tools/alignment/BamManager.java
+++ b/biodata-tools/src/main/java/org/opencb/biodata/tools/alignment/BamManager.java
@@ -21,10 +21,13 @@ package org.opencb.biodata.tools.alignment;
import ga4gh.Reads;
import htsjdk.samtools.*;
-import htsjdk.samtools.cram.ref.CRAMReferenceSource;
+import htsjdk.samtools.cram.structure.BlockCompressionMethod;
import htsjdk.samtools.reference.FastaSequenceIndexCreator;
import htsjdk.samtools.seekablestream.SeekableStream;
import htsjdk.samtools.seekablestream.SeekableStreamFactory;
+import htsjdk.samtools.util.BlockCompressedFilePointerUtil;
+import htsjdk.samtools.util.BlockCompressedInputStream;
+import htsjdk.samtools.util.BlockCompressedOutputStream;
import htsjdk.samtools.util.Log;
import org.ga4gh.models.ReadAlignment;
import org.opencb.biodata.models.alignment.RegionCoverage;
@@ -43,12 +46,13 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
-import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Base64;
import java.util.List;
+import java.util.stream.Collectors;
/**
* Created by imedina on 14/09/15.
@@ -190,6 +194,12 @@ public class BamManager {
return samReader.getFileHeader().getTextHeader();
}
+ public byte[] compressedHeader() {
+ OutputStream outputStream = new ByteArrayOutputStream();
+ BAMFileWriter.writeHeader(outputStream, samReader.getFileHeader());
+ return ((ByteArrayOutputStream) outputStream).toByteArray();
+ }
+
/*
* These methods aim to provide a very simple, safe and quick way of accessing to a small fragment of the BAM/CRAM file.
* This must not be used in production for reading big data files. It returns a maximum of 50,000 SAM records,
@@ -317,6 +327,43 @@ public class BamManager {
}
}
+ public List<String> getBreakpoints(Region region) {
+ if (samReader.hasIndex()) {
+ int sequenceIndex = samReader.getFileHeader().getSequenceIndex(region.getChromosome());
+ int start = region.getStart();
+ int end = region.getEnd();
+
+ BAMIndex index = samReader.indexing().getIndex();
+ List<Chunk> originalChunks = index.getSpanOverlapping(sequenceIndex, start, end).getChunks();
+
+// // We will add 20kbps to the end to get more chunks so we can easily get the last end chunk
+// List<Chunk> finalChunks;
+// do {
+// end += 20000;
+// finalChunks = index.getSpanOverlapping(sequenceIndex, start, end).getChunks();
+// } while (finalChunks.size() == originalChunks.size());
+
+ List<String> byteRanges = new ArrayList<>(originalChunks.size());
+// for (int i = 0; i < originalChunks.size(); i++) {
+// if (i == originalChunks.size() - 1) {
+// break;
+// }
+// byteRanges.add(BlockCompressedFilePointerUtil.getBlockAddress(originalChunks.get(i).getChunkStart()) + "-"
+// + (BlockCompressedFilePointerUtil.getBlockAddress(originalChunks.get(i+1).getChunkStart()) - 1));
+// }
+ for (Chunk originalChunk : originalChunks) {
+ long byte_start = BlockCompressedFilePointerUtil.getBlockAddress(originalChunk.getChunkStart());
+ long byte_end = BlockCompressedFilePointerUtil.getBlockAddress(originalChunk.getChunkEnd());
+ if (byte_start != byte_end) {
+ byteRanges.add(byte_start + "-" + (byte_end - 1));
+ }
+ }
+
+ return byteRanges;
+ }
+ return null;
+ }
+
/**
* Return the coverage average given a window size from a BigWig file. This is expected to have the same name
* that the BAM file with .bw suffix.
|
tools: add new methods to BamManager to support HTSget protocol
|
opencb_biodata
|
train
|
da15cd98d1645accef882fb059b96dfdef44e107
|
diff --git a/tests/inspectdb/slow_test.py b/tests/inspectdb/slow_test.py
index <HASH>..<HASH> 100644
--- a/tests/inspectdb/slow_test.py
+++ b/tests/inspectdb/slow_test.py
@@ -26,6 +26,7 @@ from salesforce.backend.base import SalesforceError
from tests.inspectdb import models as mdl
sf = connections['salesforce']
+# The same "django.setup()" is used by manage.py subcommands in Django 1.7+.
if DJANGO_17_PLUS:
django.setup()
@@ -35,6 +36,7 @@ def run():
for tab in sf.introspection.table_list_cache['sobjects']:
if tab['retrieveable'] and not tab['name'] in (
# These require specific filters (descried in their error messages)
+ 'CollaborationGroupRecord',
'ContentDocumentLink', 'Idea', 'IdeaComment', 'UserProfileFeed',
'Vote', #'OpportunityPartner', 'Product2Feed',
# UNKNOWN_EXCEPTION:
|
Fixed slow_test for the new Salesforce
|
django-salesforce_django-salesforce
|
train
|
317f41a3a7d9dd553617d998dbdee944f3a3493a
|
diff --git a/examples/echobot.py b/examples/echobot.py
index <HASH>..<HASH> 100755
--- a/examples/echobot.py
+++ b/examples/echobot.py
@@ -53,11 +53,12 @@ class Client(JabberClient):
self.stream.set_iq_get_handler("query","jabber:iq:version",self.get_version)
# set up handlers for <presence/> stanzas
- self.stream.set_presence_handler("available",self.presence)
- self.stream.set_presence_handler("subscribe",self.presence_control)
- self.stream.set_presence_handler("subscribed",self.presence_control)
- self.stream.set_presence_handler("unsubscribe",self.presence_control)
- self.stream.set_presence_handler("unsubscribed",self.presence_control)
+ self.stream.set_presence_handler(None, self.presence)
+ self.stream.set_presence_handler("noavailable", self.presence)
+ self.stream.set_presence_handler("subscribe", self.presence_control)
+ self.stream.set_presence_handler("subscribed", self.presence_control)
+ self.stream.set_presence_handler("unsubscribe", self.presence_control)
+ self.stream.set_presence_handler("unsubscribed", self.presence_control)
# set up handler for <message stanza>
self.stream.set_message_handler("normal",self.message)
|
- handle 'unavailable' presence and don't use 'available' presence type (None type should be used instead)
|
Jajcus_pyxmpp2
|
train
|
b320b3ed8bf4f49b3586adce923a6b291a63a674
|
diff --git a/library/src/main/java/com/mikepenz/materialize/holder/StringHolder.java b/library/src/main/java/com/mikepenz/materialize/holder/StringHolder.java
index <HASH>..<HASH> 100644
--- a/library/src/main/java/com/mikepenz/materialize/holder/StringHolder.java
+++ b/library/src/main/java/com/mikepenz/materialize/holder/StringHolder.java
@@ -16,6 +16,10 @@ public class StringHolder {
this.mText = text;
}
+ public StringHolder(String text) {
+ this.mText = text;
+ }
+
public StringHolder(@StringRes int textRes) {
this.mTextRes = textRes;
}
|
* add back `StringHolder(String..)` as it seems dex will fail in some cases otehrwise
|
mikepenz_Materialize
|
train
|
44e8cadf123d6f21fb270d30fd7f16f8ae9672d1
|
diff --git a/Services/ArticleTypeConfigurationService.php b/Services/ArticleTypeConfigurationService.php
index <HASH>..<HASH> 100644
--- a/Services/ArticleTypeConfigurationService.php
+++ b/Services/ArticleTypeConfigurationService.php
@@ -54,6 +54,10 @@ class ArticleTypeConfigurationService
'entryMethod' => 'getTitle',
'type' => 'text', 'max_size' => 255
),
+ 'dataLink' => array(
+ 'entryMethod' => 'getLink',
+ 'type' => 'text', 'max_size' => 255
+ ),
'dataLead' => array(
'entryMethod' => 'getSummary',
'type' => 'body', 'field_type_param' => 'editor_size=250'
@@ -143,7 +147,7 @@ class ArticleTypeConfigurationService
*/
public function update()
{
-
+ //
}
/**
diff --git a/Services/IngestService.php b/Services/IngestService.php
index <HASH>..<HASH> 100644
--- a/Services/IngestService.php
+++ b/Services/IngestService.php
@@ -177,9 +177,6 @@ class IngestService
$entry->setCreated($unparsedEntry->getCreated());
$entry->setUpdated($unparsedEntry->getUpdated());
- // Todo: Set this when published automatically
- //$entry->setPublished($unparsedEntry->getPublished());
-
$entry->setProduct($unparsedEntry->getProduct());
$entry->setStatus($unparsedEntry->getStatus());
$entry->setPriority($unparsedEntry->getPriority());
diff --git a/Services/PublisherService.php b/Services/PublisherService.php
index <HASH>..<HASH> 100644
--- a/Services/PublisherService.php
+++ b/Services/PublisherService.php
@@ -357,7 +357,7 @@ class PublisherService
$article->setAuthor($author, $order++);
}
} else {
- $name = $entry->getProduct() ?: $entry->getFeed()->getTitle();
+ $name = $entry->getProduct() ?: $entry->getFeed()->getName();
$author = new \Author($name);
if (!$author->exists()) {
$author->create();
|
WOBS-<I>: Fixes a few bugs regarding Product and link
- Adds link field to ArticleType table (plugin should be reinstalled,
after this update)
- Removed comment from IngestService regarding publishing article
- Uses feed name if no product is available
|
newscoop_plugin-IngestPluginBundle
|
train
|
028310ccaff8b0eb4cde4fbe338efc7d990f5cc0
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -55,7 +55,7 @@ copyright = '2012-2014, Thomas Roten'
# The short X.Y version.
version = '0.4'
# The full version, including alpha/beta/rc tags.
-release = '0.4.1'
+release = '0.4.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ with open('README.rst') as f:
setup(
name='fcache',
- version='0.4.1',
+ version='0.4.2',
author='Thomas Roten',
author_email='thomas@roten.us',
url='https://github.com/tsroten/fcache',
|
Bumps version to <I>.
|
tsroten_fcache
|
train
|
113d8c7dcc4f33f2c6d6be71f4f920db09640630
|
diff --git a/core/src/main/java/com/orientechnologies/orient/core/sql/parser/OFieldMatchPathItem.java b/core/src/main/java/com/orientechnologies/orient/core/sql/parser/OFieldMatchPathItem.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/orientechnologies/orient/core/sql/parser/OFieldMatchPathItem.java
+++ b/core/src/main/java/com/orientechnologies/orient/core/sql/parser/OFieldMatchPathItem.java
@@ -34,6 +34,7 @@ class OFieldMatchPathItem extends OMatchPathItem {
}
public void toString(Map<Object, Object> params, StringBuilder builder) {
+ builder.append(".");
field.toString(params, builder);
if (filter != null) {
filter.toString(params, builder);
diff --git a/core/src/test/java/com/orientechnologies/orient/core/sql/parser/OMatchStatementTest.java b/core/src/test/java/com/orientechnologies/orient/core/sql/parser/OMatchStatementTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/com/orientechnologies/orient/core/sql/parser/OMatchStatementTest.java
+++ b/core/src/test/java/com/orientechnologies/orient/core/sql/parser/OMatchStatementTest.java
@@ -216,6 +216,13 @@ public class OMatchStatementTest {
checkRightSyntax("MATCH {class: 'V', as: foo}-->{as:bar} RETURN foo.name, bar.name skip 10 limit 10");
}
+ @Test
+ public void testFieldTraversal() {
+ checkRightSyntax("MATCH {class: 'V', as: foo}.toBar{as:bar} RETURN foo.name, bar.name skip 10 limit 10");
+ checkRightSyntax("MATCH {class: 'V', as: foo}.toBar{as:bar}.out(){as:c} RETURN foo.name, bar.name skip 10 limit 10");
+ checkRightSyntax("MATCH {class: 'V', as: foo}.toBar.baz{as:bar} RETURN foo.name, bar.name skip 10 limit 10");
+ checkRightSyntax("MATCH {class: 'V', as: foo}.toBar.out(){as:bar} RETURN foo.name, bar.name skip 10 limit 10");
+ }
protected OrientSql getParserFor(String string) {
InputStream is = new ByteArrayInputStream(string.getBytes());
|
Fix parsing of MATCH statement with field traversal
|
orientechnologies_orientdb
|
train
|
ba458166c9bd35c9ed7e4104243237096d40d3f9
|
diff --git a/ips_vagrant/common/__init__.py b/ips_vagrant/common/__init__.py
index <HASH>..<HASH> 100644
--- a/ips_vagrant/common/__init__.py
+++ b/ips_vagrant/common/__init__.py
@@ -1,3 +1,4 @@
+from distutils.version import StrictVersion, LooseVersion
import os
import click
import logging
@@ -89,3 +90,20 @@ def cookiejar(name='session'):
log.warn('Session / cookie file exists, but could not be loaded', exc_info=e)
return cj
+
+
+def parse_version(vstring):
+ """
+ StrictVersion / LooseVersion decorator method
+ @type vstring: str
+ @return: StrictVersion if possible, otherwise LooseVersion
+ @rtype: StrictVersion or LooseVersion
+ """
+ try:
+ version = StrictVersion(vstring)
+ except ValueError:
+ logging.getLogger('ipvs.common.debug').info('Strict parsing failed, falling back to LooseVersion instead')
+ version = LooseVersion(vstring)
+ version.version = tuple(version.version)
+
+ return version
diff --git a/ips_vagrant/downloaders/ips.py b/ips_vagrant/downloaders/ips.py
index <HASH>..<HASH> 100644
--- a/ips_vagrant/downloaders/ips.py
+++ b/ips_vagrant/downloaders/ips.py
@@ -1,5 +1,4 @@
from collections import OrderedDict
-from distutils.version import LooseVersion
from glob import glob
import json
import os
@@ -8,7 +7,7 @@ from zipfile import ZipFile, BadZipfile
import re
from bs4 import BeautifulSoup
from mechanize import Browser
-from ips_vagrant.common import http_session
+from ips_vagrant.common import http_session, parse_version
from ips_vagrant.scrapers.errors import HtmlParserError
@@ -23,6 +22,7 @@ class IpsManager(object):
@type license: ips_vagrant.scraper.licenses.LicenseMeta
"""
self.ctx = ctx
+ self.log = logging.getLogger('ipsv.scraper.version')
self.session = http_session(ctx.cookiejar)
self._license = license
@@ -32,7 +32,6 @@ class IpsManager(object):
self._populate_local()
self._populate_latest()
self._sort()
- self.log = logging.getLogger('ipsv.scraper.version')
def _sort(self):
"""
@@ -48,7 +47,7 @@ class IpsManager(object):
for archive in archives:
try:
version = self._read_zip(archive)
- self.versions[version] = IpsMeta(self, version, filepath=archive)
+ self.versions[version.version] = IpsMeta(self, version, filepath=archive)
except BadZipfile as e:
self.log.warn('Unreadable zip archive in IPS versions directory (%s): %s', e.message, archive)
@@ -67,22 +66,22 @@ class IpsManager(object):
form = BeautifulSoup(script_tpl.text, "html.parser").find('form')
# Parse the response for a download link to the latest IPS release
- version = LooseVersion(form.find('label', {'for': 'version_latest'}).text).version
+ version = parse_version(form.find('label', {'for': 'version_latest'}).text)
self.log.info('Latest IPS version: %s', version)
url = form.get('action')
# If we have a cache for this version, just add our url to it
- if version in self.versions:
- self.versions[version].url = url
+ if version.version in self.versions:
+ self.versions[version.version].url = url
return
- self.versions[version] = IpsMeta(self, version, request=('post', url, {'version': 'latest'}))
+ self.versions[version.version] = IpsMeta(self, version, request=('post', url, {'version': 'latest'}))
def _read_zip(self, filepath):
"""
Read an IPS installation zipfile and return the core version number
@type filepath: str
- @rtype: tuple
+ @rtype: LooseVersion
"""
with ZipFile(filepath) as zip:
namelist = zip.namelist()
@@ -99,7 +98,7 @@ class IpsManager(object):
version = versions[-1]
self.log.debug('Version matched: ', version)
- return LooseVersion(version).version
+ return parse_version(version)
def get(self, version, use_cache=True):
"""
@@ -123,7 +122,7 @@ class IpsManager(object):
@property
def latest(self):
- return next(reversed(self.versions))
+ return self.versions[next(reversed(self.versions))]
class IpsMeta(object):
@@ -133,7 +132,7 @@ class IpsMeta(object):
def __init__(self, ips_manager, version, filepath=None, request=None):
"""
@type ips_manager: IpsManager
- @type version: tuple
+ @type version: LooseVersion
@type filepath: str or None
@type request: tuple or None (method, url, params)
"""
|
Version bugfixes and common method
|
FujiMakoto_IPS-Vagrant
|
train
|
680ecf1d66587c95b513faf812a76885d8702c4a
|
diff --git a/shinken/satellite.py b/shinken/satellite.py
index <HASH>..<HASH> 100644
--- a/shinken/satellite.py
+++ b/shinken/satellite.py
@@ -139,9 +139,9 @@ class IForArbiter(Pyro.core.ObjBase):
# Now manage modules
for module in conf['global']['modules']:
- print "Got module", module.get_name()
- # If we alrady got it, bypass
+ # If we already got it, bypass
if not module.get_name() in self.app.worker_modules:
+ logger.log("[%s] Got module : %s " % (self.name, module.get_name()))
self.app.worker_modules[module.get_name()] = {'to_q' : Queue()}
@@ -519,9 +519,11 @@ class Satellite(Daemon):
def _got_queue_from_action(self, a):
if hasattr(a, 'module_type'):
print "search for a module", a.module_type
+ if a.module_type in self.worker_modules:
+ return self.worker_modules[a.module_type]['to_q']
+ # Nothing found, it's not good at all!
return None
# If none, call the standard 'fork'
- print "As ka Queue", self.worker_modules['fork']['to_q']
return self.worker_modules['fork']['to_q']
|
*Clean : satellite code
*Add : now call really a queue of good worker for special module_type action
|
Alignak-monitoring_alignak
|
train
|
810db8b2bacb0f71c735b520380f51cf4a732235
|
diff --git a/features/support/worlds/medications.rb b/features/support/worlds/medications.rb
index <HASH>..<HASH> 100644
--- a/features/support/worlds/medications.rb
+++ b/features/support/worlds/medications.rb
@@ -57,8 +57,8 @@ module World
record_prescription_for(args)
end
- def revise_prescription_for(patient:, _user:,
- _drug_selector: default_medication_drug_selector, prescription_params: {})
+ def revise_prescription_for(patient:, user:, drug_selector: default_medication_drug_selector,
+ prescription_params: {})
prescription = patient.prescriptions.last!
update_params = { by: Renalware::SystemUser.find }
|
revert a rubocop "fix" which broke the acceptance tests
|
airslie_renalware-core
|
train
|
2ce688dfe7f9b267a5d5d5eecffea3e1aff752ae
|
diff --git a/lib/models.js b/lib/models.js
index <HASH>..<HASH> 100644
--- a/lib/models.js
+++ b/lib/models.js
@@ -103,19 +103,25 @@ class DataElementSpecifications {
constructor() {
this._nsMap = new Map();
+ this._fileMap = new Map();
this._grammarVersions = new Map();
}
get grammarVersions() { return Array.from(this._grammarVersions.values()); }
get namespaces() { return Array.from(this._nsMap.keys()); }
+ get files() { return Array.from(this._fileMap.keys()); }
- add(dataElement) {
+ add(dataElement, file) {
//console.log("Adding data element = "+JSON.stringify(dataElement));
const id = dataElement.identifier;
if (!this._nsMap.has(id.namespace)) {
this._nsMap.set(id.namespace, new Map());
}
this._nsMap.get(id.namespace).set(id.name, dataElement);
+ if (!this._fileMap.has(file)) {
+ this._fileMap.set(file, []);
+ }
+ this._fileMap.get(file).push(dataElement);
if (typeof dataElement.grammarVersion !== 'undefined') {
this._grammarVersions.set(dataElement.grammarVersion.toString(), dataElement.grammarVersion);
}
@@ -147,6 +153,13 @@ class DataElementSpecifications {
return [];
}
+ byFile(file) {
+ if (this._fileMap.has(file)) {
+ return Array.from(this._fileMap.get(file).values());
+ }
+ return [];
+ }
+
find(namespace, name) {
// Special case logic for the _Entry and _Concept keywords
if (namespace === '') {
@@ -419,8 +432,6 @@ class DataElement {
this._concepts = []; // Concept[]
this._fields = []; // Value[] (and its subclasses) -- excluding primitive values
this._hierarchy = []; // String[], list of base class FQNs
- this._filePath = ''; // String, holding relative file path containing DataElement
- this._orderIndex = 0; // number, holding index of data element in order in file
// also contains _value, _description, and _grammarVersion
}
// identifier is the unique Identifier (namespace+name) for the DataElement
@@ -516,28 +527,6 @@ class DataElement {
this._hierarchy = hierarchy;
}
- // the file path containing the definition of this element
- get filePath() { return this._filePath; }
- set filePath(filePath) {
- this._filePath = filePath;
- }
- // withFilePath is a convenience function for chaining
- withFilePath(filePath) {
- this.filePath = filePath;
- return this;
- }
-
- // the number holding the index of the data element in order in file
- get orderIndex() { return this._orderIndex; }
- set orderIndex(orderIndex) {
- this._orderIndex = orderIndex;
- }
- // withFilePath is a convenience function for chaining
- withOrderIndex(orderIndex) {
- this.orderIndex = orderIndex;
- return this;
- }
-
clone() {
const clone = new DataElement(this._identifier.clone(), this._isEntry, this._isAbstract);
if (this._description) {
@@ -558,12 +547,6 @@ class DataElement {
if (this._grammarVersion) {
clone._grammarVersion = this._grammarVersion.clone();
}
- if (this._filePath) {
- clone._filePath = this._filePath;
- }
- if (this._orderIndex) {
- clone._orderIndex = this._orderIndex;
- }
return clone;
}
@@ -579,9 +562,7 @@ class DataElement {
'hierarchy': this._hierarchy, //full hierarchy
'basedOn': this.basedOn.map(b => b.fqn || b.toString()),
'value': this.value != null ? this.value.toJSON() : undefined,
- 'fields': this._fields.map(f => f.toJSON()),
- 'filePath': this._filePath,
- 'orderIndex': this._orderIndex
+ 'fields': this._fields.map(f => f.toJSON())
};
clearEmptyFields(output, true);
|
Change to use file map in deSpecs rather than store in de
|
standardhealth_shr-models
|
train
|
d6d2a0dd69b77103194c71525933b82f8c326c83
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
#Stik.js
-[](https://travis-ci.org/lukelex/stik.js)[](https://codeclimate.com/github/lukelex/stik.js)
+[](https://travis-ci.org/lukelex/stik.js) [](https://codeclimate.com/github/lukelex/stik.js)
An opinionated JS library that wires your JavaScript execution to your HTML templates by creating pseudo closures of execution and scoping. Allowing you to manage, in a more consistant way, your events `binding/unbind` and your DOM scope manipulation.
diff --git a/src/context.js b/src/context.js
index <HASH>..<HASH> 100644
--- a/src/context.js
+++ b/src/context.js
@@ -2,31 +2,24 @@ window.stik || (window.stik = {});
(function(){
function Context(controller, action, template, executionUnit){
- if (!controller)
- throw "controller is missing";
- if (!action)
- throw "action is missing";
- if (!template)
- throw "template is missing";
- if (!executionUnit)
- throw "execution unit is missing";
+ if (!controller) { throw "controller is missing"; }
+ if (!action) { throw "action is missing"; }
+ if (!template) { throw "template is missing"; }
+ if (!executionUnit) { throw "execution unit is missing"; }
this.$$controller = controller;
this.$$action = action;
this.$$template = template;
this.$$executionUnit = executionUnit;
this.$$disposable = false;
- };
+ }
Context.prototype.$load = function(modules){
var dependencies = this.$resolveDependencies(
this.$mergeModules(modules)
);
- this.$$executionUnit.apply(
- new function(){},
- dependencies
- );
+ this.$$executionUnit.apply({}, dependencies);
};
Context.prototype.$mergeModules = function(modules){
diff --git a/src/courier.js b/src/courier.js
index <HASH>..<HASH> 100644
--- a/src/courier.js
+++ b/src/courier.js
@@ -3,7 +3,7 @@ window.stik || (window.stik = {});
(function(){
function Courier(){
this.$$receivers = {};
- };
+ }
Courier.prototype.$receive = function(box, opener){
this.$$receivers[box] || (this.$$receivers[box] = []);
@@ -15,7 +15,7 @@ window.stik || (window.stik = {});
for (var i = 0; i < openers.length; i++) {
openers[i](message);
- };
+ }
};
stik.Courier = Courier;
diff --git a/src/injector.js b/src/injector.js
index <HASH>..<HASH> 100644
--- a/src/injector.js
+++ b/src/injector.js
@@ -4,7 +4,7 @@ window.stik || (window.stik = {});
function Injector(executionUnit, modules){
this.$$executionUnit = executionUnit;
this.$$modules = modules;
- };
+ }
Injector.prototype.$resolveDependencies = function(){
var args = this.$extractArguments();
@@ -27,12 +27,11 @@ window.stik || (window.stik = {});
Injector.prototype.$grabModules = function(args){
var dependencies = [];
- if (args.length === 1 && args[0] === '')
- return [];
+ if (args.length === 1 && args[0] === '') { return []; }
for (var i = 0; i < args.length; i++) {
dependencies.push(this.$$modules[args[i]]);
- };
+ }
return dependencies;
};
|
Refactoring following some of CodeClimate wierd rules
|
stikjs_stik-core
|
train
|
95d6864e0a57c88ef483810dbeb8839f275637e0
|
diff --git a/js/qryptos.js b/js/qryptos.js
index <HASH>..<HASH> 100644
--- a/js/qryptos.js
+++ b/js/qryptos.js
@@ -108,20 +108,30 @@ module.exports = class qryptos extends Exchange {
let taker = this.safeFloat (market, 'taker_fee');
let active = !market['disabled'];
let minAmount = undefined;
+ let minPrice = undefined;
if (base === 'BTC') {
minAmount = 0.001;
} else if (base === 'ETH') {
minAmount = 0.01;
- } else if (base === 'QASH' || base === 'UBTC' || base === 'DASH' || base === 'NEO' || base === 'BCH') {
- minAmount = 0.0001;
+ } else {
+ minAmount = undefined;
+ }
+ if (quote === 'BTC') {
+ minPrice = 0.00000001;
+ } else if (quote === 'ETH' || quote === 'USD' || quote === 'JPY') {
+ minPrice = 0.00001;
+ } else {
+ minPrice = undefined;
}
- let minPrice = 0.00001;
let limits = {
amount: { min: minAmount },
price: { min: minPrice },
cost: { min: minPrice * minAmount },
};
- let precision = { amount: 8, price: 5 };
+ let precision = {
+ amount: (typeof minAmount !== 'undefined') ? -Math.log10 (minAmount) : undefined,
+ price: (typeof minPrice !== 'undefined') ? -Math.log10 (minPrice) : undefined,
+ };
result.push ({
'id': id,
'symbol': symbol,
|
qryptos: fetchMarkets: limits, precisions
|
ccxt_ccxt
|
train
|
f436f420d24607cf57c3e1ceb43c901bd128ce31
|
diff --git a/src/main/java/com/ait/lienzo/client/core/util/Geometry.java b/src/main/java/com/ait/lienzo/client/core/util/Geometry.java
index <HASH>..<HASH> 100755
--- a/src/main/java/com/ait/lienzo/client/core/util/Geometry.java
+++ b/src/main/java/com/ait/lienzo/client/core/util/Geometry.java
@@ -1123,24 +1123,20 @@ public final class Geometry
final double discrimant = (r * r * dSq) - (det * det);
- if (discrimant < 0)
+ if (lesserOrCloseEnough(discrimant, 0))
{
// line does not intersect
return new Point2DArray();
}
- if (discrimant == 0)
+ if (closeEnough(discrimant, 0))
{
// line only intersects once, so the start or end is inside of the circle
return new Point2DArray(((det * d.getY()) / dSq) + pc.getX(), ((-det * d.getX()) / dSq) + pc.getY());
}
final double discSqrt = Math.sqrt(discrimant);
- double sgn = 1;
-
- if (d.getY() < 0)
- {
- sgn = -1;
- }
+ double sgn = (d.getY() < 0) ? -1 : 1;
+
final Point2DArray intr = new Point2DArray((((det * d.getY()) + (sgn * d.getX() * discSqrt)) / dSq) + pc.getX(), (((-det * d.getX()) + (Math.abs(d.getY()) * discSqrt)) / dSq) + pc.getY());
return intr.push((((det * d.getY()) - (sgn * d.getX() * discSqrt)) / dSq) + pc.getX(), (((-det * d.getX()) - (Math.abs(d.getY()) * discSqrt)) / dSq) + pc.getY());
|
Using NRRF_PRECISION in to prevent float point comparison errors.
|
ahome-it_lienzo-core
|
train
|
e648f0d40b6bc077cd12679ffa59a9bc22404866
|
diff --git a/plexapi/video.py b/plexapi/video.py
index <HASH>..<HASH> 100644
--- a/plexapi/video.py
+++ b/plexapi/video.py
@@ -507,6 +507,10 @@ class Episode(Video, Playable):
if self._seasonNumber is None:
self._seasonNumber = self.parentIndex if self.parentIndex else self.season().seasonNumber
return utils.cast(int, self._seasonNumber)
+
+ def seasonEpisode(self):
+ """ Returns the s00e00 string containing the season and episode. """
+ return 'S%sE%s' % (str(self.seasonNumber).zfill(2), str(self.index).zfill(2))
@property
def seasonEpisode(self):
|
Update video.py
Adding seasonEpisode method that is listed in the documentation, but not in actually implimented
|
pkkid_python-plexapi
|
train
|
6d1e8b641aba0842a1885d331d6815d3105928c8
|
diff --git a/source/Core/Database/Adapter/DoctrineResultSet.php b/source/Core/Database/Adapter/DoctrineResultSet.php
index <HASH>..<HASH> 100644
--- a/source/Core/Database/Adapter/DoctrineResultSet.php
+++ b/source/Core/Database/Adapter/DoctrineResultSet.php
@@ -62,8 +62,7 @@ class DoctrineResultSet
$this->executeAdapted();
} else {
// @todo: double check, if this path or the DoctrineEmptyResultSet could be removed
- $this->EOF = true;
- $this->fields = false;
+ $this->setToEmptyState();
}
}
@@ -177,10 +176,43 @@ class DoctrineResultSet
}
/**
- * @todo: implement and test
+ * Load the n-th row.
+ *
+ * @param int $rowIndex The number of the row to load.
+ *
+ * @return bool Is there another row?
*/
- public function Move($row)
+ public function Move($rowIndex)
{
+ if ($this->isEmpty()) {
+ $this->setToEmptyState();
+
+ return false;
+ }
+
+ if ($this->isFetchedFirst()) {
+ $this->executeAdapted();
+ }
+
+ if (0 == $rowIndex) {
+ return true;
+ }
+
+ $lastFields = $this->fields;
+
+ while (0 < $rowIndex) {
+ $lastFields = $this->fields;
+
+ if (!$this->MoveNext()) {
+ $rowIndex = 0;
+ $this->fields = $lastFields;
+ $this->EOF = false;
+ }
+
+ $rowIndex--;
+ }
+
+ return true;
}
/**
@@ -254,4 +286,23 @@ class DoctrineResultSet
$this->setFetchedFirst(true);
}
+
+ /**
+ * Set the state of this wrapper to 'empty'.
+ */
+ private function setToEmptyState()
+ {
+ $this->EOF = true;
+ $this->fields = false;
+ }
+
+ /**
+ * Determine, if the wrapped result set is empty.
+ *
+ * @return bool Is the wrapped result set empty?
+ */
+ private function isEmpty()
+ {
+ return 0 === $this->recordCount();
+ }
}
\ No newline at end of file
diff --git a/tests/Integration/core/Database/Adapter/DoctrineResultSetTest.php b/tests/Integration/core/Database/Adapter/DoctrineResultSetTest.php
index <HASH>..<HASH> 100644
--- a/tests/Integration/core/Database/Adapter/DoctrineResultSetTest.php
+++ b/tests/Integration/core/Database/Adapter/DoctrineResultSetTest.php
@@ -41,7 +41,7 @@ class Integration_Core_Database_Adapter_DoctrineResultSetTest extends UnitTestCa
/**
* @var bool Should this test use the legacy database for the tests?
*/
- protected $useLegacyDatabase = false;
+ protected $useLegacyDatabase = true;
/**
* @var string The name of the class, including the complete namespace.
@@ -145,6 +145,51 @@ class Integration_Core_Database_Adapter_DoctrineResultSetTest extends UnitTestCa
}
/**
+ * Test, that the method 'Move' works with an empty result set.
+ */
+ public function testMoveWithEmptyResultSet()
+ {
+ $resultSet = $this->database->select('SELECT OXID FROM oxvouchers;');
+
+ $methodResult = $resultSet->Move(7);
+
+ $this->assertFalse($methodResult);
+ $this->assertTrue($resultSet->EOF);
+ $this->assertFalse($resultSet->fields);
+ }
+
+ /**
+ * @return array The parameters we want to use for the testMove method.
+ */
+ public function dataProvider_testMove()
+ {
+ return array(
+ array(2, array('0962081a5693597654fd2887af7a6095')),
+ array(0, array('09602cddb5af0aba745293d08ae6bcf6')),
+ array(1, array('09620040146118fbc4b7eef6a0faf072')),
+ array(300, array('a7c44be4a5ddee114.67356237')) // the last row (no. 239) stays
+ );
+ }
+ /**
+ * Test the method 'Move' with the parameters given by the corresponding data provider.
+ *
+ * @dataProvider dataProvider_testMove
+ *
+ * @param int $moveTo The index of the line we want to check.
+ * @param array $expectedFields The expected values in the given line.
+ */
+ public function testMove($moveTo, $expectedFields)
+ {
+ $resultSet = $this->database->select('SELECT OXID FROM oxarticles ORDER BY OXID;');
+
+ $methodResult = $resultSet->Move($moveTo);
+
+ $this->assertTrue($methodResult);
+ $this->assertEquals($expectedFields, $resultSet->fields);
+ $this->assertFalse($resultSet->EOF);
+ }
+
+ /**
* Test, that the result set of an empty select works as expected.
*
* @return DoctrineResultSet The empty result set.
|
ESDEV-<I> Reimplement method 'Move' and tests in class DoctrineResultSet.
The result set of adodblite has a method 'move', this must be implemented in the doctrine result set too.
(cherry picked from commit 0fe<I>a8)
|
OXID-eSales_oxideshop_ce
|
train
|
8e2af4e6d465b45ff229171a99e87111fda943e0
|
diff --git a/dusty/daemon.py b/dusty/daemon.py
index <HASH>..<HASH> 100644
--- a/dusty/daemon.py
+++ b/dusty/daemon.py
@@ -12,9 +12,10 @@ import os
import logging
import socket
import threading
+# requests refused to play nicely with pyinstaller
+import httplib
from docopt import docopt
-import requests
from .preflight import preflight_check, refresh_preflight_warnings
from .log import configure_logging, make_socket_logger, close_socket_logger
@@ -66,10 +67,12 @@ def close_client_connection(terminator=SOCKET_TERMINATOR):
def shut_down_http_server():
logging.info('Daemon is shutting down HTTP server')
try:
- r = requests.post('http://{}:{}/shutdown'.format(constants.DAEMON_HTTP_BIND_IP,
- constants.DAEMON_HTTP_BIND_PORT),
- timeout=2)
- r.raise_for_status()
+ h = httplib.HTTPConnection('{}:{}'.format(constants.DAEMON_HTTP_BIND_IP,
+ constants.DAEMON_HTTP_BIND_PORT))
+ h.request('POST', '/shutdown')
+ r = h.getresponse()
+ if r.status != 200:
+ raise ValueError('Got status code {} from response'.format(r.status))
except Exception as e:
logging.exception('Exception trying to shut down HTTP server')
diff --git a/requirements.py b/requirements.py
index <HASH>..<HASH> 100644
--- a/requirements.py
+++ b/requirements.py
@@ -7,7 +7,6 @@ install_requires = [
'Schemer==0.2.9',
'psutil==2.2.1',
'Flask==0.10.1',
- 'requests==2.5.1',
]
test_requires = [
|
Okay lets try this with stdlib
|
gamechanger_dusty
|
train
|
a60b1b05e5d6f0b3e9bfac364a08c48757badd2c
|
diff --git a/salt/utils/validate/__init__.py b/salt/utils/validate/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/utils/validate/__init__.py
+++ b/salt/utils/validate/__init__.py
@@ -0,0 +1,5 @@
+# -*- coding: utf-8 -*-
+'''
+The salt.utils.validate package contains routines for validating
+components and values.
+'''
|
encoding and docstring for package
|
saltstack_salt
|
train
|
2e0b07a0c39cdd2fb40aa954180b9ca328207ef3
|
diff --git a/src/components/chips/js/chipsDirective.js b/src/components/chips/js/chipsDirective.js
index <HASH>..<HASH> 100644
--- a/src/components/chips/js/chipsDirective.js
+++ b/src/components/chips/js/chipsDirective.js
@@ -38,7 +38,7 @@
*
* <ul>Validation
* <li>allow a validation callback</li>
- * <li>hilighting style for invalid chips</li>
+ * <li>highlighting style for invalid chips</li>
* </ul>
*
* <ul>Item mutation
|
docs(chips): corrected hilighting to highlighting (#<I>)
|
angular_material
|
train
|
636742c4baffb10e7d593160c2be9144f16d9f79
|
diff --git a/src/core/Tags.java b/src/core/Tags.java
index <HASH>..<HASH> 100644
--- a/src/core/Tags.java
+++ b/src/core/Tags.java
@@ -257,23 +257,64 @@ public final class Tags {
*/
static Map<String, String> getTags(final TSDB tsdb,
final byte[] row) throws NoSuchUniqueId {
+ try {
+ return getTagsAsync(tsdb, row).joinUninterruptibly();
+ } catch (RuntimeException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new RuntimeException("Should never be here", e);
+ }
+ }
+
+ /**
+ * Returns the tags stored in the given row key.
+ * @param tsdb The TSDB instance to use for Unique ID lookups.
+ * @param row The row key from which to extract the tags.
+ * @return A map of tag names (keys), tag values (values).
+ * @throws NoSuchUniqueId if the row key contained an invalid ID (unlikely).
+ * @since 1.2
+ */
+ static Deferred<Map<String, String>> getTagsAsync(final TSDB tsdb,
+ final byte[] row) throws NoSuchUniqueId {
final short name_width = tsdb.tag_names.width();
final short value_width = tsdb.tag_values.width();
final short tag_bytes = (short) (name_width + value_width);
- final byte[] tmp_name = new byte[name_width];
- final byte[] tmp_value = new byte[value_width];
final short metric_ts_bytes = (short) (tsdb.metrics.width()
+ Const.TIMESTAMP_BYTES);
- final HashMap<String, String> result
- = new HashMap<String, String>((row.length - metric_ts_bytes) / tag_bytes);
+
+ final ArrayList<Deferred<String>> deferreds =
+ new ArrayList<Deferred<String>>((row.length - metric_ts_bytes) / tag_bytes);
+
for (short pos = metric_ts_bytes; pos < row.length; pos += tag_bytes) {
+ final byte[] tmp_name = new byte[name_width];
+ final byte[] tmp_value = new byte[value_width];
+
System.arraycopy(row, pos, tmp_name, 0, name_width);
- final String name = tsdb.tag_names.getName(tmp_name);
+ deferreds.add(tsdb.tag_names.getNameAsync(tmp_name));
+
System.arraycopy(row, pos + name_width, tmp_value, 0, value_width);
- final String value = tsdb.tag_values.getName(tmp_value);
- result.put(name, value);
+ deferreds.add(tsdb.tag_values.getNameAsync(tmp_value));
}
- return result;
+
+ class NameCB implements Callback<Map<String, String>, ArrayList<String>> {
+ public Map<String, String> call(final ArrayList<String> names)
+ throws Exception {
+ final HashMap<String, String> result = new HashMap<String, String>(
+ (row.length - metric_ts_bytes) / tag_bytes);
+ String tagk = "";
+ for (String name : names) {
+ if (tagk.isEmpty()) {
+ tagk = name;
+ } else {
+ result.put(tagk, name);
+ tagk = "";
+ }
+ }
+ return result;
+ }
+ }
+
+ return Deferred.groupInOrder(deferreds).addCallback(new NameCB());
}
/**
@@ -366,7 +407,9 @@ public final class Tags {
}
// Put all the deferred tag resolutions in this list.
- tag_ids.add(name_id.addCallbackDeferring(new TagNameResolvedCB()));
+ final Deferred<byte[]> resolve =
+ name_id.addCallbackDeferring(new TagNameResolvedCB());
+ tag_ids.add(resolve);
}
// And then once we have all the tags resolved, sort them.
|
Add Tags.getTagsAsync() to resolve the tags asynchronously from a row key
|
OpenTSDB_opentsdb
|
train
|
59fc8aba265c36a71172711b0d3d37ec97a52b7c
|
diff --git a/src/fileseq/filesequence.py b/src/fileseq/filesequence.py
index <HASH>..<HASH> 100644
--- a/src/fileseq/filesequence.py
+++ b/src/fileseq/filesequence.py
@@ -756,7 +756,7 @@ class FileSequence(object):
else:
seq._pad = seq._frame_pad
- seq.__init__(utils.asString(seq))
+ seq.__init__(utils.asString(seq), pad_style=pad_style, allow_subframes=allow_subframes)
yield seq
@classmethod
diff --git a/test/test_unit.py b/test/test_unit.py
index <HASH>..<HASH> 100755
--- a/test/test_unit.py
+++ b/test/test_unit.py
@@ -982,6 +982,25 @@ class TestFileSequence(TestBase):
for expect in expects:
self.assertIn(expect, actual)
+ def test_yield_sequences_in_list_pad_style(self):
+ paths = [
+ 'seq/file.0001.exr',
+ 'seq/file.0002.exr',
+ 'seq/file.0003.exr',
+ ]
+
+ expect = 'seq/file.1-3#.exr'
+ actual = list(FileSequence.yield_sequences_in_list(paths, pad_style=fileseq.PAD_STYLE_HASH4))[0]
+ self.assertEqual(expect, str(actual))
+ self.assertEqual(fileseq.PAD_STYLE_HASH4, actual.padStyle())
+ self.assertEqual(4, actual.zfill())
+
+ expect = 'seq/file.1-3####.exr'
+ actual = list(FileSequence.yield_sequences_in_list(paths, pad_style=fileseq.PAD_STYLE_HASH1))[0]
+ self.assertEqual(expect, str(actual))
+ self.assertEqual(fileseq.PAD_STYLE_HASH1, actual.padStyle())
+ self.assertEqual(4, actual.zfill())
+
def testIgnoreFrameSetStrings(self):
for char in "xy:,".split():
fs = FileSequence("/path/to/file{0}1-1x1#.exr".format(char))
|
fix yield_sequences_in_list not retaining user pad_style and subframe options in sequences (refs #<I>)
|
sqlboy_fileseq
|
train
|
b1a076eb5adb00573a24025ee8100487bb7ce224
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -38,9 +38,9 @@ setup(
],
install_requires=[
'Django>=1.6.7',
- 'sqlparse==0.1.11',
- 'unicodecsv==0.13.0',
- 'six==1.10.0',
+ 'sqlparse>=0.1.11',
+ 'unicodecsv>=0.13.0',
+ 'six>=1.10.0',
],
include_package_data=True,
zip_safe = False,
|
Update setup.py
To fix installation problems
|
groveco_django-sql-explorer
|
train
|
2fb4de2f271adafa9ac1c2fc4e50cc0aeb1a3b69
|
diff --git a/gocron.go b/gocron.go
index <HASH>..<HASH> 100644
--- a/gocron.go
+++ b/gocron.go
@@ -1,5 +1,13 @@
// Package gocron : A Golang Job Scheduling Package.
//
+// Note from current maintainers:
+//
+// A currently maintained fork of this project has been migrated to https://github.com/go-co-op/gocron
+//
+// Disclaimer: we (the maintainers) tried, with no luck, to get in contact with Jason (the repository owner) in order to add new maintainers or leave the project within an organization. Unfortunately, he hasn't replied for months now (March, 2020).
+//
+// So, we decided to move the project to a new repository (as stated above), in order to keep the evolution of the project coming from as many people as possible. Feel free to reach over!
+//
// An in-process scheduler for periodic jobs that uses the builder pattern
// for configuration. Schedule lets you run Golang functions periodically
// at pre-determined intervals using a simple, human-friendly syntax.
|
add repo move note so it shows in godoc
|
jasonlvhit_gocron
|
train
|
7ab3213907ee15630a5e2d45a62237e527d9bb92
|
diff --git a/build/agents.rb b/build/agents.rb
index <HASH>..<HASH> 100644
--- a/build/agents.rb
+++ b/build/agents.rb
@@ -47,6 +47,7 @@ dependencies = [
'ext/common/LoggingAgent/RemoteSender.h',
'ext/common/LoggingAgent/ChangeNotifier.h',
'ext/common/LoggingAgent/DataStoreId.h',
+ 'ext/common/LoggingAgent/FilterSupport.h',
'ext/common/ServerInstanceDir.h',
'ext/common/Logging.h',
'ext/common/EventedServer.h',
|
LoggingAgent depends on FilterSupport.
|
phusion_passenger
|
train
|
79f55657e56c3864659193b00fff6bd8618c4137
|
diff --git a/packages/morph/lib/dom-helper.js b/packages/morph/lib/dom-helper.js
index <HASH>..<HASH> 100644
--- a/packages/morph/lib/dom-helper.js
+++ b/packages/morph/lib/dom-helper.js
@@ -127,6 +127,10 @@ prototype.setAttribute = function(element, name, value) {
element.setAttribute(name, value);
};
+prototype.removeAttribute = function(element, name) {
+ element.removeAttribute(name);
+};
+
if (doc && doc.createElementNS) {
// Only opt into namespace detection if a contextualElement
// is passed.
diff --git a/packages/morph/tests/dom-helper-test.js b/packages/morph/tests/dom-helper-test.js
index <HASH>..<HASH> 100644
--- a/packages/morph/tests/dom-helper-test.js
+++ b/packages/morph/tests/dom-helper-test.js
@@ -39,6 +39,16 @@ test('#setAttribute', function(){
equalHTML(node, '<div id="super-tag"></div>');
});
+test('#removeAttribute', function(){
+ var node = dom.createElement('div');
+ dom.setAttribute(node, 'id', 'super-tag');
+ equalHTML(node, '<div id="super-tag"></div>', 'precond - attribute exists');
+
+
+ dom.removeAttribute(node, 'id');
+ equalHTML(node, '<div></div>', 'attribute was removed');
+});
+
test('#createElement of tr with contextual table element', function(){
var tableElement = document.createElement('table'),
node = dom.createElement('tr', tableElement);
|
Add removeAttribute to DomHelper.
Needed by Ember to allow abstraction of `element.removeAttribute` (in
case we end up with special logic).
Also, dovetails with `dom.setAttribute`.
|
glimmerjs_glimmer-vm
|
train
|
30757221b433f1c13cd79e7a7242f3dff4fd02f4
|
diff --git a/cmd/client/sandbox.go b/cmd/client/sandbox.go
index <HASH>..<HASH> 100644
--- a/cmd/client/sandbox.go
+++ b/cmd/client/sandbox.go
@@ -137,6 +137,12 @@ var podSandboxStatusCommand = cli.Command{
var listPodSandboxCommand = cli.Command{
Name: "list",
Usage: "list pod sandboxes",
+ Flags: []cli.Flag{
+ cli.BoolFlag{
+ Name: "quiet",
+ Usage: "list only pod IDs",
+ },
+ },
Action: func(context *cli.Context) error {
// Set up a connection to the server.
conn, err := getClientConnection(context)
@@ -146,7 +152,7 @@ var listPodSandboxCommand = cli.Command{
defer conn.Close()
client := pb.NewRuntimeServiceClient(conn)
- err = ListPodSandboxes(client)
+ err = ListPodSandboxes(client, context.Bool("quiet"))
if err != nil {
return fmt.Errorf("listing pod sandboxes failed: %v", err)
}
@@ -258,12 +264,16 @@ func PodSandboxStatus(client pb.RuntimeServiceClient, ID string) error {
// ListPodSandboxes sends a ListPodSandboxRequest to the server, and parses
// the returned ListPodSandboxResponse.
-func ListPodSandboxes(client pb.RuntimeServiceClient) error {
+func ListPodSandboxes(client pb.RuntimeServiceClient, quiet bool) error {
r, err := client.ListPodSandbox(context.Background(), &pb.ListPodSandboxRequest{})
if err != nil {
return err
}
for _, pod := range r.Items {
+ if quiet {
+ fmt.Println(*pod.Id)
+ continue
+ }
fmt.Printf("ID: %s\n", *pod.Id)
if pod.Metadata != nil {
if pod.Metadata.Name != nil {
|
cmd/client/sandbox: add --quiet to pod list
|
cri-o_cri-o
|
train
|
ac354bb39a0cf74caadd8b6e3ef21651e09516a4
|
diff --git a/modules/social_features/social_tagging/src/SocialTaggingService.php b/modules/social_features/social_tagging/src/SocialTaggingService.php
index <HASH>..<HASH> 100644
--- a/modules/social_features/social_tagging/src/SocialTaggingService.php
+++ b/modules/social_features/social_tagging/src/SocialTaggingService.php
@@ -191,7 +191,7 @@ class SocialTaggingService {
// Get current terms parents.
$parents = $this->termStorage->loadParents($current_term->id());
$parent = reset($parents);
- $category_label = $parent->getTranslation($langcode)->getName();
+ $category_label = $parent->hasTranslation($langcode) ? $parent->getTranslation($langcode)->getName() : $parent->getName();
// Prepare the parameter;.
$parameter = $allowSplit ? social_tagging_to_machine_name($category_label) : 'tag';
@@ -205,7 +205,7 @@ class SocialTaggingService {
$tree[$parent->id()]['title'] = $category_label;
$tree[$parent->id()]['tags'][$current_term->id()] = [
'url' => $url,
- 'name' => $current_term->getTranslation($langcode)->getName(),
+ 'name' => $current_term->hasTranslation($langcode) ? $current_term->getTranslation($langcode)->getName() : $current_term->getName(),
];
}
}
|
Issue #<I> by navneet<I>,robertragas: Added defensive checks for checking existing translations.
|
goalgorilla_open_social
|
train
|
649872d3f2f879dbec25b3bee861b458b7d0df64
|
diff --git a/redis_metrics/models.py b/redis_metrics/models.py
index <HASH>..<HASH> 100644
--- a/redis_metrics/models.py
+++ b/redis_metrics/models.py
@@ -3,7 +3,7 @@ This app doesn't have any models, per se, but the following ``R`` class is a
lightweight wrapper around Redis.
"""
-from collections import OrderedDict
+from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
import redis
@@ -391,6 +391,62 @@ class R(object):
results = zip(keys, results)
return sorted(results)
+ def get_metric_history_as_rows(self, slugs, since=None, granularity='daily'):
+ """Provides the same data as ``get_metric_history``, but with metrics
+ organized by row. If you had the following yearly history, for example::
+
+ [
+ ('m:bar:y:2012', '1'),
+ ('m:bar:y:2013', '2'),
+ ('m:bar:y:2014', '3'),
+ ('m:foo:y:2012', '4'),
+ ('m:foo:y:2013', '5')
+ ('m:foo:y:2014', '6')
+ ]
+
+ this method would provide you with the following data structure::
+
+ 'periods': ['y:2012', 'y:2013', 'y:2014']
+ 'data': [
+ {
+ 'slug': 'bar',
+ 'values': [1, 2, 3]
+ },
+ {
+ 'slug': 'foo',
+ 'values': [4, 5, 6]
+ },
+ ]
+
+ """
+ slugs = sorted(slugs)
+ history = self.get_metric_history(slugs, since, granularity)
+
+ # Convert the history into an intermediate data structure organized
+ # by periods. Since the history is sorted by key (which includes both
+ # the slug and the date, the values should be ordered correctly.
+ periods = []
+ data = OrderedDict()
+ for k, v in history:
+ period = template_tags.strip_metric_prefix(k)
+ if period not in periods:
+ periods.append(period)
+
+ slug = template_tags.metric_slug(k)
+ if slug not in data:
+ data[slug] = []
+ data[slug].append(v)
+
+ # Now, reorganize data for our end result.
+ metrics = {'periods': periods, 'data': []}
+ for slug, values in data.items():
+ metrics['data'].append({
+ 'slug': slug,
+ 'values': values
+ })
+
+ return metrics # templates still don't like defaultdict's
+
def get_metric_history_as_columns(self, slugs, since=None,
granularity='daily'):
"""Provides the same data as ``get_metric_history``, but in a columnar
|
format data so it's easier to plot in Chart.js
|
bradmontgomery_django-redis-metrics
|
train
|
433cf3f90fc0f176f5cc16f627a3cdc873712a87
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -14,9 +14,11 @@ from django.test import Client, TestCase
from django.utils._os import upath
from PIL import Image
+from versatileimagefield.files import VersatileImageFileDescriptor
from versatileimagefield.datastructures.filteredimage import InvalidFilter
from versatileimagefield.datastructures.sizedimage import \
MalformedSizedImageKey, SizedImage
+from versatileimagefield.datastructures.filteredimage import FilteredImage
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
from versatileimagefield.registry import (
versatileimagefield_registry,
@@ -374,9 +376,17 @@ class VersatileImageFieldTestCase(TestCase):
)
)
- def test_VersatileImageFieldDescriptor__set__(self):
+ def VersatileImageFileDescriptor__get__None(self):
"""
- Ensures VersatileImageFieldDescriptor.__set__ works as intended
+ Calls VersatileImageFileDescriptor.__get__ without an instance
+ should raise AttributeError
+ """
+ x = VersatileImageFileDescriptor(self.jpg.image.name)
+ VersatileImageFileDescriptor.__get__(x)
+
+ def test_VersatileImageFileDescriptor(self):
+ """
+ Ensures VersatileImageFileDescriptor works as intended
"""
self.jpg.image = 'python-logo-2.jpg'
self.jpg.save()
@@ -398,6 +408,10 @@ class VersatileImageFieldTestCase(TestCase):
self.jpg.image = img_file
django_file = File(img_file)
self.jpg.image = django_file
+ self.assertRaises(
+ AttributeError,
+ self.VersatileImageFileDescriptor__get__None
+ )
def test_VersatileImageField_picklability(self):
"""
@@ -415,6 +429,17 @@ class VersatileImageFieldTestCase(TestCase):
jpg_instance.image.thumbnail['100x100'].url,
'/media/__sized__/python-logo-thumbnail-100x100.jpg'
)
+ pickled_state = self.jpg.image.__getstate__()
+ self.assertEqual(
+ pickled_state,
+ {
+ '_create_on_demand': False,
+ '_committed': True,
+ '_file': None,
+ 'name': 'python-logo.jpg',
+ 'closed': False
+ }
+ )
@staticmethod
def non_existent_rendition_key_set():
@@ -810,9 +835,21 @@ class VersatileImageFieldTestCase(TestCase):
x.process_image(image=None, image_format='JPEG', save_kwargs={},
width=100, height=100)
- def test_SizedImage_subclass_exceptions(self):
+ def FilteredImage_no_process_image(self):
+ class FilteredImageSubclass(FilteredImage):
+ filename_key = 'test'
+
+ x = FilteredImageSubclass(
+ self.jpg.image.name,
+ self.jpg.image.field.storage,
+ False,
+ filename_key='foo'
+ )
+ x.process_image(image=None, image_format='JPEG', save_kwargs={})
+
+ def test_ProcessedImage_subclass_exceptions(self):
"""
- Ensures improperly constructed SizedImage subclasses throw
+ Ensures improperly constructed ProcessedImage subclasses throw
NotImplementedError when appropriate.
"""
self.assertRaises(
@@ -823,3 +860,7 @@ class VersatileImageFieldTestCase(TestCase):
NotImplementedError,
self.SizedImage_no_process_image
)
+ self.assertRaises(
+ NotImplementedError,
+ self.FilteredImage_no_process_image
+ )
|
Improving test coverage for VersatileImageFileDescriptor & ProcessedImage
|
respondcreate_django-versatileimagefield
|
train
|
9164fb3add19988cbd0821c0591cda8f876ef636
|
diff --git a/storage/src/main/java/net/kuujo/copycat/io/storage/Log.java b/storage/src/main/java/net/kuujo/copycat/io/storage/Log.java
index <HASH>..<HASH> 100644
--- a/storage/src/main/java/net/kuujo/copycat/io/storage/Log.java
+++ b/storage/src/main/java/net/kuujo/copycat/io/storage/Log.java
@@ -294,12 +294,14 @@ public class Log implements AutoCloseable {
*/
public Log truncate(long index) {
checkOpen();
- checkIndex(index);
+ if (index > 0 && !containsIndex(index))
+ throw new IndexOutOfBoundsException(index + " is not a valid log index");
+
if (lastIndex() == index)
return this;
for (Segment segment : segments.segments()) {
- if (segment.containsIndex(index)) {
+ if (index == 0 || segment.containsIndex(index)) {
segment.truncate(index);
} else if (segment.descriptor().index() > index) {
segments.remove(segment);
diff --git a/storage/src/test/java/net/kuujo/copycat/io/storage/LogTest.java b/storage/src/test/java/net/kuujo/copycat/io/storage/LogTest.java
index <HASH>..<HASH> 100644
--- a/storage/src/test/java/net/kuujo/copycat/io/storage/LogTest.java
+++ b/storage/src/test/java/net/kuujo/copycat/io/storage/LogTest.java
@@ -142,6 +142,20 @@ public class LogTest {
}
/**
+ * Tests emptying the log.
+ */
+ public void testTruncateZero() throws Throwable {
+ try (Log log = createLog()) {
+ appendEntries(log, 100);
+ Assert.assertEquals(log.lastIndex(), 100);
+ log.truncate(0);
+ Assert.assertEquals(log.lastIndex(), 0);
+ appendEntries(log, 10);
+ Assert.assertEquals(log.lastIndex(), 10);
+ }
+ }
+
+ /**
* Tests skipping entries in the log.
*/
public void testSkip() throws Throwable {
|
Ensure exception is not thrown when log is truncated to index 0.
|
atomix_atomix
|
train
|
d79fcbc5460887b894dc24347e693615197ddaac
|
diff --git a/engines/bastion_katello/app/assets/javascripts/bastion_katello/sync-plans/sync-plan-helper.service.js b/engines/bastion_katello/app/assets/javascripts/bastion_katello/sync-plans/sync-plan-helper.service.js
index <HASH>..<HASH> 100644
--- a/engines/bastion_katello/app/assets/javascripts/bastion_katello/sync-plans/sync-plan-helper.service.js
+++ b/engines/bastion_katello/app/assets/javascripts/bastion_katello/sync-plans/sync-plan-helper.service.js
@@ -48,8 +48,7 @@
* @returns $resource sync plan
*/
this.createSyncPlan = function (syncPlan, success, error) {
- var GMT_OFFSET_MILLISECONDS = syncPlan.startDate.getTimezoneOffset() * 60000,
- syncDate = new Date(syncPlan.startDate.getTime() + GMT_OFFSET_MILLISECONDS),
+ var syncDate = new Date(syncPlan.startDate.getTime()),
syncTime = new Date(syncPlan.startTime || new Date());
syncDate.setHours(syncTime.getHours());
syncDate.setMinutes(syncTime.getMinutes());
|
Fixes #<I> - Sync Plan saved with incorrect date/time
|
Katello_katello
|
train
|
d4cb7dc4618d9816f90cd8a271ffd31ec063d458
|
diff --git a/test/core/queue.js b/test/core/queue.js
index <HASH>..<HASH> 100644
--- a/test/core/queue.js
+++ b/test/core/queue.js
@@ -66,7 +66,7 @@ describe('queue', function(){
should.exist(obj);
obj = JSON.parse(obj);
obj['class'].should.equal('someJob');
- // obj['args'].should.equal([1,2,3]);
+ obj['args'].should.eql([1,2,3]);
done();
});
});
@@ -80,7 +80,7 @@ describe('queue', function(){
should.exist(obj);
obj = JSON.parse(obj);
obj['class'].should.equal('someJob');
- // obj['args'].should.equal([1,2,3]);
+ obj['args'].should.eql([1,2,3]);
done();
});
});
@@ -96,7 +96,7 @@ describe('queue', function(){
should.exist(obj);
obj = JSON.parse(obj);
obj['class'].should.equal('someJob');
- // obj['args'].should.equal([1,2,3]);
+ obj['args'].should.eql([1,2,3]);
done();
});
});
diff --git a/test/core/scheduler.js b/test/core/scheduler.js
index <HASH>..<HASH> 100644
--- a/test/core/scheduler.js
+++ b/test/core/scheduler.js
@@ -31,7 +31,7 @@ describe('scheduler', function(){
var resolved = false;
scheduler = new specHelper.NR.scheduler({connection: connectionDetails, timeout: specHelper.timeout}, function(err){
- if(resolved === false){ // new versions of redis will keep retrying in node v0.11x...
+ if(resolved === false){ // new versions of redis will keep retrying in node v0.11x...
should.exist(err);
resolved = true;
done();
@@ -81,7 +81,7 @@ describe('scheduler', function(){
should.exist(obj);
obj = JSON.parse(obj);
obj['class'].should.equal('someJob');
- // obj['args'].should.equal([1,2,3]);
+ obj['args'].should.eql([1,2,3]);
done();
});
});
@@ -100,4 +100,4 @@ describe('scheduler', function(){
});
});
-});
\ No newline at end of file
+});
|
enable commented test lines
array comparison can be done with eql
|
taskrabbit_node-resque
|
train
|
4262d5284bd86443269e9481ff05e15562af9b1a
|
diff --git a/openquake/engine/export/hazard.py b/openquake/engine/export/hazard.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/export/hazard.py
+++ b/openquake/engine/export/hazard.py
@@ -104,6 +104,10 @@ def _get_result_export_path(calc_id, target_dir, result):
core.makedirs(directory)
if output_type in ('hazard_curve', 'hazard_map', 'uh_spectra'):
+ # include the poe in hazard map and uhs file names
+ if output_type in ('hazard_map', 'uh_spectra'):
+ output_type = '%s-poe_%s' % (output_type, result.poe)
+
if result.statistics is not None:
# we could have stats
if result.statistics == 'quantile':
|
export/hazard:
Include "-poe_N.N-" in filenames for UHS and hazard maps.
Former-commit-id: <I>d0e5bfb<I>fe6ba6dbed<I>eb9b<I>b2d1
|
gem_oq-engine
|
train
|
a718ffbd3b9473164ce967ad68aea2078bd97c2f
|
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/application_controller.rb
+++ b/app/controllers/application_controller.rb
@@ -50,12 +50,12 @@ class ApplicationController < ActionController::Base
@rubygem = Rubygem.find_by_name(params[:rubygem_id] || params[:id])
if @rubygem.blank?
respond_to do |format|
- format.html do
- render :file => "public/404", :status => :not_found, :layout => false, :formats => [:html]
- end
format.any do
render :text => "This rubygem could not be found.", :status => :not_found
end
+ format.html do
+ render :file => "public/404", :status => :not_found, :layout => false, :formats => [:html]
+ end
end
end
end
diff --git a/test/functional/api/v1/owners_controller_test.rb b/test/functional/api/v1/owners_controller_test.rb
index <HASH>..<HASH> 100644
--- a/test/functional/api/v1/owners_controller_test.rb
+++ b/test/functional/api/v1/owners_controller_test.rb
@@ -66,4 +66,13 @@ class Api::V1::OwnersControllerTest < ActionController::TestCase
:format => 'json'}
assert_recognizes(route, :path => '/api/v1/owners/example/gems.json', :method => :get)
end
+
+ should "return plain text 404 error" do
+ @user = create(:user)
+ @request.env["HTTP_AUTHORIZATION"] = @user.api_key
+ @request.accept = '*/*'
+ post :create, rubygem_id: 'bananas'
+ assert_equal 'This rubygem could not be found.', @response.body
+ end
+
end
|
Return plain text response for gems that can't be found via the API. Closes #<I>.
|
rubygems_rubygems.org
|
train
|
26374857e79980684862ba8b7967334ae438dc0a
|
diff --git a/lib/deep_cover/covered_code.rb b/lib/deep_cover/covered_code.rb
index <HASH>..<HASH> 100644
--- a/lib/deep_cover/covered_code.rb
+++ b/lib/deep_cover/covered_code.rb
@@ -87,9 +87,14 @@ module DeepCover
root.main
end
+ def comments
+ root
+ @comments
+ end
+
def root
@root ||= begin
- ast = parser.parse(@buffer)
+ ast, @comments = parser.parse_with_comments(@buffer)
Node::Root.new(ast, self)
end
end
|
+ CoveredCode#comments
|
deep-cover_deep-cover
|
train
|
8752d3ebc39c2abd97f8d081f405bedc9a9461f5
|
diff --git a/dev/com.ibm.ws.install/test/com/ibm/ws/install/JarAssetTest.java b/dev/com.ibm.ws.install/test/com/ibm/ws/install/JarAssetTest.java
index <HASH>..<HASH> 100755
--- a/dev/com.ibm.ws.install/test/com/ibm/ws/install/JarAssetTest.java
+++ b/dev/com.ibm.ws.install/test/com/ibm/ws/install/JarAssetTest.java
@@ -20,13 +20,13 @@ import java.net.MalformedURLException;
import org.junit.Rule;
import org.junit.Test;
-import test.common.SharedOutputManager;
-
import com.ibm.ws.install.internal.InstallUtils.InputStreamFileWriter;
import com.ibm.ws.install.internal.asset.JarAsset;
import com.ibm.ws.install.internal.asset.OpenSourceAsset;
import com.ibm.ws.install.internal.asset.SampleAsset;
+import test.common.SharedOutputManager;
+
public class JarAssetTest {
@Rule
public SharedOutputManager outputMgr = SharedOutputManager.getInstance();
@@ -35,7 +35,9 @@ public class JarAssetTest {
public void testSampleAsset() throws MalformedURLException, IOException {
final String m = "testSampleAsset";
File srcFile = new File("publish/massiveRepo/samples/SampleX.jar");
+ srcFile.getParentFile().mkdirs();
File jarFile = new File("build/unittest/tmp/SampleX.jar");
+ jarFile.getParentFile().mkdirs();
new InputStreamFileWriter(srcFile.getCanonicalFile().toURI().toURL().openConnection().getInputStream()).writeToFile(jarFile);
try {
SampleAsset sampleAsset = new SampleAsset("SampleX", "SampleX", jarFile, true);
@@ -70,7 +72,9 @@ public class JarAssetTest {
public void testOpenSourceAsset() throws MalformedURLException, IOException {
final String m = "testOpenSourceAsset";
File srcFile = new File("publish/massiveRepo/samples/SampleX.jar");
+ srcFile.getParentFile().mkdirs();
File jarFile = new File("build/unittest/tmp/SampleX.jar");
+ jarFile.getParentFile().mkdirs();
new InputStreamFileWriter(srcFile.getCanonicalFile().toURI().toURL().openConnection().getInputStream()).writeToFile(jarFile);
try {
OpenSourceAsset openSourceAsset = new OpenSourceAsset("SampleX", "SampleX", jarFile, true);
|
Ensure parent dirs are created in JarAssetTest
|
OpenLiberty_open-liberty
|
train
|
c10b9d9075258c4b46fad1dece926e7bf790a9bc
|
diff --git a/src/Leviu/Html/View.php b/src/Leviu/Html/View.php
index <HASH>..<HASH> 100644
--- a/src/Leviu/Html/View.php
+++ b/src/Leviu/Html/View.php
@@ -55,14 +55,14 @@ class View
//standard js file
//$this->js[] = URL . 'js/jquery-2.1.4.min.js';
- $this->js[] = URL . 'js/main.js';
- $this->js[] = URL . 'js/ajax.js';
+ //$this->js[] = URL . 'js/main.js';
+ //$this->js[] = URL . 'js/ajax.js';
//$this->js[] = URL . 'js/application.js';
//standard css file
- $this->css[] = URL . 'css/style.css';
+ //$this->css[] = URL . 'css/style.css';
- $this->title = 'App_Mk0';
+ $this->title = 'App';
}
/**
|
Change standard loaded css and js file
|
linna_framework
|
train
|
b481a6c1ad1c6f9053fc9ec45a32b02849f009fd
|
diff --git a/taglibs/src/main/java/org/springframework/security/taglibs/authz/AccessControlListTag.java b/taglibs/src/main/java/org/springframework/security/taglibs/authz/AccessControlListTag.java
index <HASH>..<HASH> 100644
--- a/taglibs/src/main/java/org/springframework/security/taglibs/authz/AccessControlListTag.java
+++ b/taglibs/src/main/java/org/springframework/security/taglibs/authz/AccessControlListTag.java
@@ -18,6 +18,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.security.access.PermissionEvaluator;
+import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.taglibs.TagLibConfig;
import org.springframework.web.context.support.WebApplicationContextUtils;
@@ -43,6 +44,7 @@ import java.util.*;
*
* @author Ben Alex
* @author Luke Taylor
+ * @author Rob Winch
*/
public class AccessControlListTag extends TagSupport {
//~ Static fields/initializers =====================================================================================
@@ -75,7 +77,8 @@ public class AccessControlListTag extends TagSupport {
return evalBody();
}
- if (SecurityContextHolder.getContext().getAuthentication() == null) {
+ Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
+ if (authentication == null) {
if (logger.isDebugEnabled()) {
logger.debug(
"SecurityContextHolder did not return a non-null Authentication object, so skipping tag body");
@@ -84,12 +87,14 @@ public class AccessControlListTag extends TagSupport {
return skipBody();
}
- if (permissionEvaluator.hasPermission(SecurityContextHolder.getContext().getAuthentication(),
- domainObject, hasPermission)) {
- return evalBody();
+ String[] requiredPermissions = hasPermission.split(",");
+ for(String requiredPermission : requiredPermissions) {
+ if (!permissionEvaluator.hasPermission(authentication, domainObject, requiredPermission)) {
+ return skipBody();
+ }
}
- return skipBody();
+ return evalBody();
}
private int skipBody() {
diff --git a/taglibs/src/test/java/org/springframework/security/taglibs/authz/AccessControlListTagTests.java b/taglibs/src/test/java/org/springframework/security/taglibs/authz/AccessControlListTagTests.java
index <HASH>..<HASH> 100644
--- a/taglibs/src/test/java/org/springframework/security/taglibs/authz/AccessControlListTagTests.java
+++ b/taglibs/src/test/java/org/springframework/security/taglibs/authz/AccessControlListTagTests.java
@@ -1,3 +1,15 @@
+/*
+ * Copyright 2002-2012 the original author or authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
package org.springframework.security.taglibs.authz;
import static org.junit.Assert.*;
@@ -20,6 +32,7 @@ import java.util.*;
/**
*
* @author Luke Taylor
+ * @author Rob Winch
* @since 3.0
*/
@SuppressWarnings("unchecked")
@@ -67,6 +80,26 @@ public class AccessControlListTagTests {
assertTrue((Boolean)pageContext.getAttribute("allowed"));
}
+ // SEC-2022
+ @Test
+ public void multiHasPermissionsAreSplit() throws Exception {
+ Object domainObject = new Object();
+ when(pe.hasPermission(bob, domainObject, "READ")).thenReturn(true);
+ when(pe.hasPermission(bob, domainObject, "WRITE")).thenReturn(true);
+
+ tag.setDomainObject(domainObject);
+ tag.setHasPermission("READ,WRITE");
+ tag.setVar("allowed");
+ assertSame(domainObject, tag.getDomainObject());
+ assertEquals("READ,WRITE", tag.getHasPermission());
+
+ assertEquals(Tag.EVAL_BODY_INCLUDE, tag.doStartTag());
+ assertTrue((Boolean)pageContext.getAttribute("allowed"));
+ verify(pe).hasPermission(bob, domainObject, "READ");
+ verify(pe).hasPermission(bob, domainObject, "WRITE");
+ verifyNoMoreInteractions(pe);
+ }
+
@Test
public void bodyIsSkippedIfAclDeniesAccess() throws Exception {
Object domainObject = new Object();
|
SEC-<I>: AccessControlListTag again supports , separated list of permissions
Spring Security <I>.x allowed developers to pass in a , separated list of permissions.
However, this functionality was accidentally removed in SEC-<I>.
The AcessControlListTag now splits the permissions using , as a delimiter
which fixes this passivity issue.
|
spring-projects_spring-security
|
train
|
83d9721c7c5f226030124af2cb2c7352ce315021
|
diff --git a/react/MuiCozyTheme/theme.js b/react/MuiCozyTheme/theme.js
index <HASH>..<HASH> 100644
--- a/react/MuiCozyTheme/theme.js
+++ b/react/MuiCozyTheme/theme.js
@@ -96,7 +96,7 @@ export const normalTheme = createMuiTheme({
primary: {
light: getCssVariableValue('primaryColorLight'),
main: getCssVariableValue('primaryColor'),
- dark: getCssVariableValue('scienceBlue'),
+ dark: getCssVariableValue('primaryColorDark'),
contrastText: getCssVariableValue('primaryContrastTextColor')
},
error: {
|
feat: Use primaryColorDark CSS variable in MUI theme
|
cozy_cozy-ui
|
train
|
57783c296eba684a6ecc9e7f397c361f5bd7d319
|
diff --git a/bin.js b/bin.js
index <HASH>..<HASH> 100755
--- a/bin.js
+++ b/bin.js
@@ -72,7 +72,7 @@ function uploadFiles (files) {
return files.indexOf(file) === index
})
- buildLog('Uploading ' + uniqueFiles.length + ' prebuilds(s) to Github releases')
+ buildLog('Uploading ' + uniqueFiles.length + ' prebuilds(s) to GitHub releases')
upload(Object.assign({}, opts, { files: uniqueFiles }), function (err, result) {
if (err) return onbuilderror(err)
buildLog('Found ' + result.old.length + ' prebuild(s) on Github')
@@ -81,7 +81,7 @@ function uploadFiles (files) {
buildLog('-> ' + build)
})
}
- buildLog('Uploaded ' + result.new.length + ' new prebuild(s) to Github')
+ buildLog('Uploaded ' + result.new.length + ' new prebuild(s) to GitHub')
if (result.new.length) {
result.new.forEach(function (build) {
buildLog('-> ' + build)
|
renamed mentions of "Github" to "GitHub" (#<I>)
|
prebuild_prebuild
|
train
|
ea186a13495a728049b2695fe431a60e1d3ec97f
|
diff --git a/cafe/database/sqlalchemy/session.py b/cafe/database/sqlalchemy/session.py
index <HASH>..<HASH> 100644
--- a/cafe/database/sqlalchemy/session.py
+++ b/cafe/database/sqlalchemy/session.py
@@ -14,18 +14,18 @@ class SQLAlchemySessionManager(SessionManager):
return cls.instance()
@classmethod
- def instance(cls, engine=None):
+ def instance(cls, engine=None, **kwargs):
"""
:type engine: sqlalchemy.engine.Engine or None
:rtype: cafe.database.sqlalchemy.session.SQLAlchemySessionManager
"""
- return cls(cls.factory(engine))
+ return cls(cls.factory(engine=engine, **kwargs))
@classmethod
- def factory(cls, engine=None):
+ def factory(cls, engine=None, **kwargs):
if engine is None:
engine = cls.engine()
- return sessionmaker(bind=engine)
+ return sessionmaker(bind=engine, **kwargs)
@classmethod
def engine(cls):
|
Allow kwargs to be passed to sessionmaker
SQLAlchemySessionManager.instance() > factory() passes kwargs through to be given to the sessionmaker, allowing sessionmaker parameters such as expire_on_commit to be set.
|
abn_cafeteria
|
train
|
fee8e89fb5eca78532dfdfe6e4ddfaed1c289520
|
diff --git a/lib/onebox/engine/whitelisted_generic_onebox.rb b/lib/onebox/engine/whitelisted_generic_onebox.rb
index <HASH>..<HASH> 100644
--- a/lib/onebox/engine/whitelisted_generic_onebox.rb
+++ b/lib/onebox/engine/whitelisted_generic_onebox.rb
@@ -82,6 +82,7 @@ module Onebox
meetup.com
mixcloud.com
mlb.com
+ myshopify.com
myspace.com
nba.com
nytimes.com
|
Add myshopify.com to whitelist
|
discourse_onebox
|
train
|
4be02137b6f25ad5859e77a3e07ebf09afe91bd5
|
diff --git a/test/signable_request_test.rb b/test/signable_request_test.rb
index <HASH>..<HASH> 100644
--- a/test/signable_request_test.rb
+++ b/test/signable_request_test.rb
@@ -25,6 +25,13 @@ describe OAuthenticator::SignableRequest do
end.merge(attributes))
end
+ def example_signed_request(authorization, attributes={})
+ attributes = attributes.merge(:authorization => authorization)
+ OAuthenticator::SignableRequest.new(base_example_initialize_attrs.reject do |k,_|
+ attributes.keys.any? { |ak| ak.to_s == k.to_s }
+ end.merge(attributes))
+ end
+
let :rsa_private_key do
"-----BEGIN PRIVATE KEY-----
MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALRiMLAh9iimur8V
@@ -106,7 +113,7 @@ Lw03eHTNQghS0A==
it 'does not allow protocol parameters to be specified when authorization is specified' do
OAuthenticator::SignableRequest::PROTOCOL_PARAM_KEYS.map do |key|
assert_raises(ArgumentError) do
- OAuthenticator::SignableRequest.new(base_example_initialize_attrs.merge(:authorization => {}, key => 'val'))
+ example_signed_request({}, key => 'val')
end
end
end
@@ -262,14 +269,10 @@ Lw03eHTNQghS0A==
assert !example_request.protocol_params.key?('oauth_signature')
end
it 'does include the signature of a given authorization' do
- assert_equal('a signature', OAuthenticator::SignableRequest.new(base_example_initialize_attrs.merge(
- :authorization => {'oauth_signature' => 'a signature'}
- )).protocol_params['oauth_signature'])
+ assert_equal('a signature', example_signed_request('oauth_signature' => 'a signature').protocol_params['oauth_signature'])
end
it 'does include unknown parameters of a given authorization' do
- assert_equal('bar', OAuthenticator::SignableRequest.new(base_example_initialize_attrs.merge(
- :authorization => {'foo' => 'bar'}
- )).protocol_params['foo'])
+ assert_equal('bar', example_signed_request('foo' => 'bar').protocol_params['foo'])
end
end
@@ -279,14 +282,13 @@ Lw03eHTNQghS0A==
end
it 'has a different signature than the given authorization if the given authorization is wrong' do
- request = OAuthenticator::SignableRequest.new(base_example_initialize_attrs.merge(
- :authorization => {
+ request = example_signed_request({
'oauth_consumer_key' => 'a consumer key',
'oauth_signature' => 'wrong%20secret&',
'oauth_signature_method' => 'PLAINTEXT',
},
- :consumer_secret => 'a consumer secret'
- ))
+ {:consumer_secret => 'a consumer secret'}
+ )
refute_equal(
request.protocol_params['oauth_signature'],
request.signed_protocol_params['oauth_signature']
@@ -352,9 +354,7 @@ Lw03eHTNQghS0A==
oauth_timestamp="137131201",
oauth_nonce="7d8f3e4a"
)
- assert OAuthenticator::SignableRequest.new(base_example_initialize_attrs.merge(
- :authorization => OAuthenticator.parse_authorization(authorization)
- )).send(:signature_base).include?("oauth_foo%3Dbar")
+ assert(example_signed_request(OAuthenticator.parse_authorization(authorization)).send(:signature_base).include?("oauth_foo%3Dbar"))
end
it 'reproduces a successful OAuth example GET (lifted from simple oauth)' do
|
signable request test - #example_signed_request helper for requests with an existing authorization
|
notEthan_oauthenticator
|
train
|
7856ffdddb46d24931137b97eca70d6486b1b19c
|
diff --git a/lib/lolcommits/plugins/lol_yammer.rb b/lib/lolcommits/plugins/lol_yammer.rb
index <HASH>..<HASH> 100644
--- a/lib/lolcommits/plugins/lol_yammer.rb
+++ b/lib/lolcommits/plugins/lol_yammer.rb
@@ -67,9 +67,7 @@ module Lolcommits
retries = 2
begin
lolimage = File.new(self.runner.main_image)
- attacment = client.create_pending_attachment(lolimage)
- debug attacment.body.inspect
- response = client.create_message(post)
+ response = client.create_message(post, :attachment1 => lolimage)
debug response.body.inspect
if response
puts "\t--> Status posted!" unless self.runner.capture_stealth
|
Use attachment1 as opts for image stream
|
lolcommits_lolcommits
|
train
|
f6fcbaa5700ec80d025292ee3d2f9bde57733beb
|
diff --git a/aws/resource_aws_macie_s3_bucket_association_test.go b/aws/resource_aws_macie_s3_bucket_association_test.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_macie_s3_bucket_association_test.go
+++ b/aws/resource_aws_macie_s3_bucket_association_test.go
@@ -160,7 +160,7 @@ func testAccPreCheckAWSMacie(t *testing.T) {
func testAccAWSMacieS3BucketAssociationConfig_basic(randInt int) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "test" {
- bucket = "tf-macie-test-bucket-%d"
+ bucket = "tf-test-macie-bucket-%d"
}
resource "aws_macie_s3_bucket_association" "test" {
@@ -172,7 +172,7 @@ resource "aws_macie_s3_bucket_association" "test" {
func testAccAWSMacieS3BucketAssociationConfig_basicOneTime(randInt int) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "test" {
- bucket = "tf-macie-test-bucket-%d"
+ bucket = "tf-test-macie-bucket-%d"
}
resource "aws_macie_s3_bucket_association" "test" {
@@ -188,7 +188,7 @@ resource "aws_macie_s3_bucket_association" "test" {
func testAccAWSMacieS3BucketAssociationConfig_accountIdAndPrefix(randInt int) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "test" {
- bucket = "tf-macie-test-bucket-%d"
+ bucket = "tf-test-macie-bucket-%d"
}
data "aws_caller_identity" "current" {}
@@ -204,7 +204,7 @@ resource "aws_macie_s3_bucket_association" "test" {
func testAccAWSMacieS3BucketAssociationConfig_accountIdAndPrefixOneTime(randInt int) string {
return fmt.Sprintf(`
resource "aws_s3_bucket" "test" {
- bucket = "tf-macie-test-bucket-%d"
+ bucket = "tf-test-macie-bucket-%d"
}
data "aws_caller_identity" "current" {}
|
Updates Macie S3 association bucket names to match S3 sweeper patterns
|
terraform-providers_terraform-provider-aws
|
train
|
d955db8c1928e9ec63bc0cb67e424bf182c380d7
|
diff --git a/lib/aruba/cucumber/command.rb b/lib/aruba/cucumber/command.rb
index <HASH>..<HASH> 100644
--- a/lib/aruba/cucumber/command.rb
+++ b/lib/aruba/cucumber/command.rb
@@ -13,14 +13,13 @@ When(/^I successfully run `(.*?)`(?: for up to (\d+) seconds)?$/)do |cmd, secs|
end
When(/^I run the following (?:commands|script)(?: (?:with|in) `([^`]+)`)?:$/) do |shell, commands|
- prepend_environment_variable('PATH', expand_path('bin') + File::PATH_SEPARATOR)
+ full_path = expand_path('bin/myscript')
Aruba.platform.mkdir(expand_path('bin'))
shell ||= Aruba.platform.default_shell
- Aruba::ScriptFile.new(:interpreter => shell, :content => commands,
- :path => expand_path('bin/myscript')).call
- step 'I run `myscript`'
+ Aruba::ScriptFile.new(:interpreter => shell, :content => commands, :path => full_path).call
+ step "I run `#{full_path}`"
end
When(/^I run `([^`]*)` interactively$/)do |cmd|
|
Run ad-hoc scripts without modifying PATH
|
cucumber_aruba
|
train
|
c8e82645284a17f333b050ce60eb5b2294fdedf1
|
diff --git a/lxd/storage_cgo.go b/lxd/storage_cgo.go
index <HASH>..<HASH> 100644
--- a/lxd/storage_cgo.go
+++ b/lxd/storage_cgo.go
@@ -23,6 +23,100 @@ package main
#define LO_FLAGS_AUTOCLEAR 4
#endif
+#define LXD_MAXPATH 4096
+#define LXD_NUMSTRLEN64 21
+#define LXD_MAX_LOOP_PATHLEN (2 * sizeof("loop/")) + LXD_NUMSTRLEN64 + sizeof("backing_file") + 1
+
+// If a loop file is already associated with a loop device, find it.
+static int find_associated_loop_device(const char *loop_file,
+ char *loop_dev_name)
+{
+ char looppath[LXD_MAX_LOOP_PATHLEN];
+ char buf[LXD_MAXPATH];
+ struct dirent *dp;
+ DIR *dir;
+ int dfd = -1, fd = -1;
+
+ dir = opendir("/sys/block");
+ if (!dir)
+ return -1;
+
+ while ((dp = readdir(dir))) {
+ int ret = -1;
+ size_t totlen;
+ struct stat fstatbuf;
+ char *delsuffix = " (deleted)";
+ size_t dellen = sizeof(delsuffix);
+
+ if (!dp)
+ break;
+
+ if (strncmp(dp->d_name, "loop", 4) != 0)
+ continue;
+
+ dfd = dirfd(dir);
+ if (dfd < 0)
+ continue;
+
+ ret = snprintf(looppath, sizeof(looppath),
+ "%s/loop/backing_file", dp->d_name);
+ if (ret < 0 || (size_t)ret >= sizeof(looppath))
+ continue;
+
+ ret = fstatat(dfd, looppath, &fstatbuf, 0);
+ if (ret < 0)
+ continue;
+
+ fd = openat(dfd, looppath, O_RDONLY | O_CLOEXEC, 0);
+ if (ret < 0)
+ continue;
+
+ // Clear buffer.
+ memset(buf, 0, sizeof(buf));
+ ret = read(fd, buf, sizeof(buf));
+ if (ret < 0)
+ continue;
+
+ totlen = strlen(buf);
+ // Trim newline.
+ if (buf[totlen - 1] == '\n') {
+ buf[totlen - 1] = '\0';
+ totlen--;
+ }
+
+ if (totlen > dellen) {
+ char *deleted = &buf[totlen - dellen];
+
+ // Skip deleted loop files.
+ if (!strcmp(deleted, delsuffix))
+ continue;
+ }
+
+ if (strcmp(buf, loop_file)) {
+ close(fd);
+ fd = -1;
+ continue;
+ }
+
+ ret = snprintf(loop_dev_name, LO_NAME_SIZE, "/dev/%s",
+ dp->d_name);
+ if (ret < 0 || ret >= LO_NAME_SIZE) {
+ close(fd);
+ fd = -1;
+ continue;
+ }
+
+ break;
+ }
+
+ closedir(dir);
+
+ if (fd < 0)
+ return -1;
+
+ return fd;
+}
+
static int get_unused_loop_dev_legacy(char *loop_name)
{
struct dirent *dp;
@@ -50,14 +144,14 @@ static int get_unused_loop_dev_legacy(char *loop_name)
continue;
ret = ioctl(fd, LOOP_GET_STATUS64, &lo64);
- if (ret < 0)
-
+ if (ret < 0) {
if (ioctl(fd, LOOP_GET_STATUS64, &lo64) == 0 ||
errno != ENXIO) {
close(fd);
fd = -1;
continue;
}
+ }
ret = snprintf(loop_name, LO_NAME_SIZE, "/dev/%s", dp->d_name);
if (ret < 0 || ret >= LO_NAME_SIZE) {
@@ -166,8 +260,16 @@ func prepareLoopDev(source string) (*os.File, error) {
cSource := C.CString(source)
defer C.free(unsafe.Pointer(cSource))
- loopFd := int(C.prepare_loop_dev(cSource, (*C.char)(cLoopDev)))
+ loopFd, _ := C.find_associated_loop_device(cSource, (*C.char)(cLoopDev))
+ if loopFd >= 0 {
+ return os.NewFile(uintptr(loopFd), C.GoString((*C.char)(cLoopDev))), nil
+ }
+
+ loopFd, err := C.prepare_loop_dev(cSource, (*C.char)(cLoopDev))
if loopFd < 0 {
+ if err != nil {
+ return nil, fmt.Errorf("Failed to prepare loop device: %s.", err)
+ }
return nil, fmt.Errorf("Failed to prepare loop device.")
}
|
storage_cgo: detect if loop file is already in use
|
lxc_lxd
|
train
|
24b3a05c76b047895ab1529986a7a36814dcf6bc
|
diff --git a/src/picker/PickerColumn.js b/src/picker/PickerColumn.js
index <HASH>..<HASH> 100644
--- a/src/picker/PickerColumn.js
+++ b/src/picker/PickerColumn.js
@@ -98,9 +98,13 @@ export default createComponent({
},
onTouchMove(event) {
- preventDefault(event);
this.moving = true;
this.touchMove(event);
+
+ if (this.direction === 'vertical') {
+ preventDefault(event, true);
+ }
+
this.offset = range(
this.startOffset + this.deltaY,
-(this.count * this.itemHeight),
|
[improvement] Picker: stop propagation when touchmove in vertical direction (#<I>)
|
youzan_vant
|
train
|
8d74b3cd5e7c2eaa5e2e6328edbb6403168bf4aa
|
diff --git a/core/src/main/java/hudson/model/AsyncPeriodicWork.java b/core/src/main/java/hudson/model/AsyncPeriodicWork.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/AsyncPeriodicWork.java
+++ b/core/src/main/java/hudson/model/AsyncPeriodicWork.java
@@ -38,12 +38,12 @@ public abstract class AsyncPeriodicWork extends PeriodicWork {
public final void doRun() {
try {
if(thread!=null && thread.isAlive()) {
- logger.log(Level.INFO, name+" thread is still running. Execution aborted.");
+ logger.log(this.getNormalLoggingLevel(), name+" thread is still running. Execution aborted.");
return;
}
thread = new Thread(new Runnable() {
public void run() {
- logger.log(Level.INFO, "Started "+name);
+ logger.log(getNormalLoggingLevel(), "Started "+name);
long startTime = System.currentTimeMillis();
StreamTaskListener l = createListener();
@@ -59,13 +59,13 @@ public abstract class AsyncPeriodicWork extends PeriodicWork {
l.closeQuietly();
}
- logger.log(Level.INFO, "Finished "+name+". "+
+ logger.log(getNormalLoggingLevel(), "Finished "+name+". "+
(System.currentTimeMillis()-startTime)+" ms");
}
},name+" thread");
thread.start();
} catch (Throwable t) {
- logger.log(Level.SEVERE, name+" thread failed with error", t);
+ logger.log(this.getErrorLoggingLevel(), name+" thread failed with error", t);
}
}
@@ -83,7 +83,27 @@ public abstract class AsyncPeriodicWork extends PeriodicWork {
protected File getLogFile() {
return new File(Jenkins.getInstance().getRootDir(),name+".log");
}
-
+
+ /**
+ * Returns the logging level at which normal messages are displayed.
+ *
+ * @return
+ * The logging level as @Level.
+ */
+ protected Level getNormalLoggingLevel() {
+ return Level.INFO;
+ }
+
+ /**
+ * Returns the logging level at which error messages are displayed.
+ *
+ * @return
+ * The logging level as @Level.
+ */
+ protected Level getErrorLoggingLevel() {
+ return Level.SEVERE;
+ }
+
/**
* Executes the task.
*
|
Added logging level methods to AsyncPeriodicWork.
|
jenkinsci_jenkins
|
train
|
12e9f64ae3a214ff69d99871ef3cc7735fc82c31
|
diff --git a/main.js b/main.js
index <HASH>..<HASH> 100644
--- a/main.js
+++ b/main.js
@@ -2,7 +2,8 @@ var katex = require('katex')
var fs = require('fs')
var _ = require('lodash')
-var parseExpression = function(raw, delimit, delimitEscaped, mathMode) {
+var parseExpression = function(raw, delimit, delimitEscaped, mathMode, finalPass) {
+ finalPass = finalPass || false
var lines = raw.split('\n')
var output = ''
for (var j = 0; j < lines.length; j++) {
@@ -23,36 +24,41 @@ var parseExpression = function(raw, delimit, delimitEscaped, mathMode) {
if (splitLine.length > 1 && splitLine.length % 2 === 1) {
// If there were matches and the code is well-formed, parse each math section (odd indices)
- for (var i = 0; i < splitLine.length; ++i) {
- if (i % 2 === 0) {
- parsedLine += splitLine[i]
- } else {
- try {
- parsedLine += katex.renderToString(splitLine[i],{displayMode: mathMode})
- }
- // Render unformatted text if there is an error
- catch (err) {
- parsedLine += '<p style=\"text-align:center;\">' + delimitEscaped + splitLine[i] + delimitEscaped + '</p>'
- }
- }
- }
+ parsedLine = processLine(splitLine)
} else {
// If the LaTeX isn't wellformed (need matched $$s and at least 2), don't parse the line.
parsedLine = lines[j]
}
// Sum up the resulting lines and add newlines back in
- output += j < lines.length - 1 ? parsedLine + '\n' : parsedLine
+ output += j < lines.length - 1 && !finalPass ? parsedLine + '\n' : parsedLine
}
return output
+
+ function processLine(rawLine){
+ var parsedLine = ''
+ for (var i = 0; i < rawLine.length; ++i) {
+ if (i % 2 === 0) {
+ parsedLine += rawLine[i]
+ } else {
+ try {
+ parsedLine += katex.renderToString(rawLine[i],{displayMode: mathMode})
+ }
+ // Render unformatted text if there is an error
+ catch (err) {
+ var original = delimitEscaped + rawLine[i] + delimitEscaped
+ parsedLine = mathMode ? '<p style=\"text-align:center;\">' + original + "<p>" : original
+ }
+ }
+ }
+
+ return parsedLine
+ }
}
var renderLaTeX = function(unparsed) {
// Need to parse for $$ first so it doesn't cause problems when check for $
var parsed = parseExpression(unparsed, '$$', '\$\$', true)
- parsed = parseExpression(parsed, '$', '\$', false)
-
- // Remove final newlines, to minimize the code
- parsed = parsed.replace(/\n/g, '')
+ parsed = parseExpression(parsed, '$', '\$', false, true)
// Render escaped dollar signs back to $
parsed = parsed.replace(/\\\$/g, '$')
|
Split off deeply nested code into helper function. Added argument to function to remove newlines on final pass to minimize the code.
|
joshuacaron_parse-katex
|
train
|
99af15c710f23a07ded0c514a263be5ce3196fba
|
diff --git a/core/src/main/java/com/digitalpebble/stormcrawler/bolt/FetcherBolt.java b/core/src/main/java/com/digitalpebble/stormcrawler/bolt/FetcherBolt.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/digitalpebble/stormcrawler/bolt/FetcherBolt.java
+++ b/core/src/main/java/com/digitalpebble/stormcrawler/bolt/FetcherBolt.java
@@ -679,12 +679,13 @@ public class FetcherBolt extends StatusEmitterBolt {
mergedMD.setValue("_redirTo", redirection);
}
- // mark this URL as redirected
- collector.emit(Constants.StatusStreamName, fit.t, tupleToSend);
-
+ // https://github.com/DigitalPebble/storm-crawler/issues/954
if (allowRedirs() && StringUtils.isNotBlank(redirection)) {
emitOutlink(fit.t, url, redirection, mergedMD);
}
+
+ // mark this URL as redirected
+ collector.emit(Constants.StatusStreamName, fit.t, tupleToSend);
}
// error
else {
diff --git a/core/src/main/java/com/digitalpebble/stormcrawler/bolt/JSoupParserBolt.java b/core/src/main/java/com/digitalpebble/stormcrawler/bolt/JSoupParserBolt.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/digitalpebble/stormcrawler/bolt/JSoupParserBolt.java
+++ b/core/src/main/java/com/digitalpebble/stormcrawler/bolt/JSoupParserBolt.java
@@ -336,6 +336,7 @@ public class JSoupParserBolt extends StatusEmitterBolt {
LOG.info("Found redir in {} to {}", url, redirection);
metadata.setValue("_redirTo", redirection);
+ // https://github.com/DigitalPebble/storm-crawler/issues/954
if (allowRedirs() && StringUtils.isNotBlank(redirection)) {
emitOutlink(tuple, new URL(url), redirection, metadata);
}
|
changed order of emit outlinks and emit of parent url when redirections happens in fetcher bolt (#<I>)
|
DigitalPebble_storm-crawler
|
train
|
2d85217432011fd75cfa1af46f613b38c85d2a54
|
diff --git a/ui/app/product/directives/pncProductVersionBCSets/pncProductVersionBCSets.js b/ui/app/product/directives/pncProductVersionBCSets/pncProductVersionBCSets.js
index <HASH>..<HASH> 100644
--- a/ui/app/product/directives/pncProductVersionBCSets/pncProductVersionBCSets.js
+++ b/ui/app/product/directives/pncProductVersionBCSets/pncProductVersionBCSets.js
@@ -62,16 +62,22 @@
// If the latestBuildConfigSetRecord is already shown
if (_.has(scope.latestBuildRecordSets, payload.buildSetConfigurationId) && scope.latestBuildRecordSets[payload.buildSetConfigurationId][0].id === payload.id) {
// I update the status with no reloads to optimize refresh
+ console.log('Updating BuildRecordSet #' + scope.latestBuildRecordSets[payload.buildSetConfigurationId][0].id
+ + ' with status ' + payload.buildStatus + ' and ' + payload.buildSetEndTime);
+
scope.latestBuildRecordSets[payload.buildSetConfigurationId][0].status = payload.buildStatus;
scope.latestBuildRecordSets[payload.buildSetConfigurationId][0].endTime = payload.buildSetEndTime;
}
else {
+ console.log('Reloading page');
+
delete scope.latestBuildRecordSets[payload.buildSetConfigurationId];
scope.page.reload();
}
}
};
+ scope.$on(eventTypes.BUILD_SET_STARTED, processEvent);
scope.$on(eventTypes.BUILD_SET_FINISHED, processEvent);
// Executing a build of a configurationSet forcing all the rebuilds
diff --git a/ui/app/product/directives/pncProductVersionBCs/pncProductVersionBCs.js b/ui/app/product/directives/pncProductVersionBCs/pncProductVersionBCs.js
index <HASH>..<HASH> 100644
--- a/ui/app/product/directives/pncProductVersionBCs/pncProductVersionBCs.js
+++ b/ui/app/product/directives/pncProductVersionBCs/pncProductVersionBCs.js
@@ -62,16 +62,22 @@
// If the latestBuildConfigRecord is already shown
if (_.has(scope.latestBuildRecords, payload.buildConfigurationId) && scope.latestBuildRecords[payload.buildConfigurationId][0].id === payload.id) {
// I update the status with no reloads to optimize refresh
+ console.log('Updating BuildRecord #' + scope.latestBuildRecords[payload.buildConfigurationId][0].id
+ + ' with status ' + payload.buildCoordinationStatus + ' and ' + payload.buildEndTime);
+
scope.latestBuildRecords[payload.buildConfigurationId][0].status = payload.buildCoordinationStatus;
scope.latestBuildRecordSets[payload.buildSetConfigurationId][0].endTime = payload.buildEndTime;
}
else {
+ console.log('Reloading page');
+
delete scope.latestBuildRecords[payload.buildConfigurationId];
scope.page.reload();
}
}
};
+ scope.$on(eventTypes.BUILD_STARTED, processEvent);
scope.$on(eventTypes.BUILD_FINISHED, processEvent);
// Executing a build of a configuration forcing all the rebuilds
|
Added debugging logs and START EVENTS to eatch for status updates
|
project-ncl_pnc
|
train
|
4f2571a4086f85405aa896c4adaa060d9330e0ec
|
diff --git a/lib/devise/rails/routes.rb b/lib/devise/rails/routes.rb
index <HASH>..<HASH> 100644
--- a/lib/devise/rails/routes.rb
+++ b/lib/devise/rails/routes.rb
@@ -94,10 +94,24 @@ module ActionDispatch::Routing
#
# devise_for :users, path: 'accounts'
#
- # * singular: setup the singular name for the given resource. This is used as the instance variable
- # name in controller, as the name in routes and the scope given to warden.
+ # * singular: setup the singular name for the given resource. This is used as the helper methods
+ # names in controller ("authenticate_#{singular}!", "#{singular}_signed_in?", "current_#{singular}"
+ # and "#{singular}_session"), as the scope name in routes and as the scope given to warden.
#
- # devise_for :users, singular: :user
+ # devise_for :admins, singular: :manager
+ #
+ # devise_scope :manager do
+ # ...
+ # end
+ #
+ # class ManagerController < ApplicationController
+ # before_filter authenticate_manager!
+ #
+ # def show
+ # @manager = current_manager
+ # ...
+ # end
+ # end
#
# * path_names: configure different path names to overwrite defaults :sign_in, :sign_out, :sign_up,
# :password, :confirmation, :unlock.
|
[ci skip] Write how to use `singular` option of `ActionDispatch::Routing::Mapper#devise_for`
* Replace "the instance variable name in controller" with "the helper methods
names in controller".
Devise dose not define instance variable for controllers but define helper
methods for controllers.
* Replace "the name in routes" with "the scope name in routes".
`singular` is used as an argument of `devise_scope`.
* Add sample codes of routing and controller.
|
plataformatec_devise
|
train
|
5ff37972fb5f7bd6f1dc9040758f75978d03f4a4
|
diff --git a/modules/custom/social_language/src/SocialLanguageMetadataBubblingUrlGenerator.php b/modules/custom/social_language/src/SocialLanguageMetadataBubblingUrlGenerator.php
index <HASH>..<HASH> 100644
--- a/modules/custom/social_language/src/SocialLanguageMetadataBubblingUrlGenerator.php
+++ b/modules/custom/social_language/src/SocialLanguageMetadataBubblingUrlGenerator.php
@@ -46,7 +46,23 @@ class SocialLanguageMetadataBubblingUrlGenerator extends MetadataBubblingUrlGene
$language = $this->languageManager->getCurrentLanguage();
if ($options['language']->getId() != $language->getId()) {
- $options['language'] = $language;
+ $reset_language = TRUE;
+ $unmodified_pages = [
+ 'content_translation_overview',
+ ];
+ $current_route = \Drupal::routeMatch()->getRouteName();
+ \Drupal::moduleHandler()
+ ->alter('social_language_unmodified_pages', $unmodified_pages);
+ $route_parts = explode('.', $current_route);
+ foreach ($unmodified_pages as $page) {
+ if (in_array($page, $route_parts)) {
+ $reset_language = FALSE;
+ break;
+ }
+ }
+ if ($reset_language) {
+ $options['language'] = $language;
+ }
}
}
|
OSSUPPORT-<I> by nielsvandermolen, kingdutch: The language URL generator sometimes breaks some URLs that are
meant to switch between two different languages (for example
linking to a Dutch entity from a translation overview that's
viewed in English). We add exceptions of targets and
origins where we disable the overwriting of links to combat this
issue and provide an alter hook so modules can add to this list of exceptions.
|
goalgorilla_open_social
|
train
|
0fa16d2d82504bc7508b755a07fe785177b6cb22
|
diff --git a/bin/bel_parse.rb b/bin/bel_parse.rb
index <HASH>..<HASH> 100755
--- a/bin/bel_parse.rb
+++ b/bin/bel_parse.rb
@@ -35,9 +35,9 @@ end
# read bel content
content =
if options[:bel]
- File.open(options[:bel]).read
+ File.open(options[:bel])
else
- $stdin.read
+ $stdin
end
class Main
|
read IO streams directly (e.g. no buffering)
closes #<I>
|
OpenBEL_bel.rb
|
train
|
1fb02ef7b4925c05a96ceb91e007bf308e057269
|
diff --git a/Swat/SwatDate.php b/Swat/SwatDate.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatDate.php
+++ b/Swat/SwatDate.php
@@ -1729,22 +1729,28 @@ class SwatDate extends HotDateTime
*/
public function addWholeMonths($months)
{
- $success = true;
+ $months = (integer)$months;
+
+ $years = (integer)($months / 12);
+ $months = $months % 12;
- $month = $this->getMonth() + (integer)$months;
+ $year = $this->getYear() + $years;
+ $month = $this->getMonth() + $months;
if ($month < 1) {
+ $year -= 1;
$month += 12;
- $this->setYear($this->getYear() - 1);
- $success = $this->setMonth($month);
} elseif ($month > 12) {
+ $year += 1;
$month -= 12;
- $this->setYear($this->getYear() + 1);
- $success = $this->setMonth($month);
- } else {
- $success = $this->setMonth($month);
}
+ $success = $this->setDate(
+ $year,
+ $month,
+ $this->getDay()
+ );
+
if (!$success) {
throw new Exception(
sprintf(
|
Fix serious logic errors noticed immediately by Isaac.
svn commit r<I>
|
silverorange_swat
|
train
|
ea6f3675a239fd93352eac8a0d1bedc433958844
|
diff --git a/api/app/handler.go b/api/app/handler.go
index <HASH>..<HASH> 100644
--- a/api/app/handler.go
+++ b/api/app/handler.go
@@ -13,6 +13,7 @@ import (
"github.com/globocom/tsuru/api/service"
"github.com/globocom/tsuru/db"
"github.com/globocom/tsuru/errors"
+ "github.com/globocom/tsuru/log"
"github.com/globocom/tsuru/repository"
"io"
"io/ioutil"
@@ -175,6 +176,7 @@ func createAppHelper(app *App, u *auth.User) ([]byte, error) {
app.setTeams(teams)
err = createApp(app)
if err != nil {
+ log.Printf("Got error while creating app: %s", err.Error())
if e, ok := err.(*ValidationError); ok {
return nil, &errors.Http{Code: http.StatusPreconditionFailed, Message: e.Message}
}
@@ -191,10 +193,12 @@ func createAppHelper(app *App, u *auth.User) ([]byte, error) {
}
c := gandalf.Client{Endpoint: gUrl}
if _, err := c.NewRepository(app.Name, users, false); err != nil {
- return nil, err
+ log.Printf("Got error while creating repository: %s", err.Error())
+ return nil, &errors.Http{Code: http.StatusInternalServerError, Message: err.Error()}
}
if err := c.GrantAccess([]string{app.Name}, users); err != nil {
- return nil, err
+ log.Printf("Got error while granting access to repository: %s", err.Error())
+ return nil, &errors.Http{Code: http.StatusInternalServerError, Message: err.Error()}
}
msg := map[string]string{
"status": "success",
diff --git a/repository/repository.go b/repository/repository.go
index <HASH>..<HASH> 100644
--- a/repository/repository.go
+++ b/repository/repository.go
@@ -69,6 +69,7 @@ func CloneOrPull(u Unit) ([]byte, error) {
func getGitServer() string {
gitServer, err := config.GetString("git:host")
if err != nil {
+ log.Print("git:host config not found")
panic(err)
}
return gitServer
@@ -85,10 +86,12 @@ func getGitServer() string {
func GitServerUri() string {
server, err := config.GetString("git:host")
if err != nil {
+ log.Print("git:host config not found")
panic(err)
}
protocol, err := config.GetString("git:protocol")
if err != nil {
+ log.Print("git:protocol config not found")
panic(err)
}
uri := fmt.Sprintf("%s://%s", protocol, server)
|
logging some actions in api/app and repository pkgs
|
tsuru_tsuru
|
train
|
3d477d8bec66cbeb336c2a04971311b9990c307a
|
diff --git a/spec/rubycards/deck_spec.rb b/spec/rubycards/deck_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubycards/deck_spec.rb
+++ b/spec/rubycards/deck_spec.rb
@@ -40,13 +40,13 @@ describe Deck do
context 'empty deck' do
it 'returns true' do
deck.cards.count.times { deck.draw }
- deck.empty?.should be_true
+ expect(deck).to be_empty
end
end
context 'full deck' do
it 'returns false' do
- deck.empty?.should be_false
+ expect(deck).not_to be_empty
end
end
end
|
deck spec for empty? to newer rspec syntax. This also stops Travis CI from complaining.
|
jdan_rubycards
|
train
|
192694fcaed8b672e0f32d63e0620bdbcdcec361
|
diff --git a/src/Sulu/Bundle/SecurityBundle/Entity/Role.php b/src/Sulu/Bundle/SecurityBundle/Entity/Role.php
index <HASH>..<HASH> 100644
--- a/src/Sulu/Bundle/SecurityBundle/Entity/Role.php
+++ b/src/Sulu/Bundle/SecurityBundle/Entity/Role.php
@@ -10,6 +10,8 @@
namespace Sulu\Bundle\SecurityBundle\Entity;
+use JMS\Serializer\Annotation\Exclude;
+
/**
* Role.
*/
@@ -22,6 +24,7 @@ class Role extends BaseRole
/**
* @var \Doctrine\Common\Collections\Collection
+ * @Exclude
*/
private $userRoles;
|
excluded user-roles from role-api
|
sulu_sulu
|
train
|
6d2f327111d1cfc5c89657e808517407e0574de1
|
diff --git a/AegeanTools/fitting.py b/AegeanTools/fitting.py
index <HASH>..<HASH> 100644
--- a/AegeanTools/fitting.py
+++ b/AegeanTools/fitting.py
@@ -748,14 +748,15 @@ def errors(source, model, wcshelper):
return source
# position errors
- if model[prefix + 'xo'].vary and model[prefix + 'yo'].vary:
+ if model[prefix + 'xo'].vary and model[prefix + 'yo'].vary \
+ and all(np.isfinite([err_xo, err_yo])):
offset = wcshelper.pix2sky([xo + err_xo, yo + err_yo])
source.err_ra = gcd(ref[0], ref[1], offset[0], ref[1])
source.err_dec = gcd(ref[0], ref[1], ref[0], offset[1])
else:
source.err_ra = source.err_dec = -1
- if model[prefix + 'theta'].vary:
+ if model[prefix + 'theta'].vary and np.isfinite(err_theta):
# pa error
off1 = wcshelper.pix2sky([xo + sx * np.cos(np.radians(theta)), yo + sy * np.sin(np.radians(theta))])
off2 = wcshelper.pix2sky(
@@ -764,7 +765,8 @@ def errors(source, model, wcshelper):
else:
source.err_pa = -1
- if model[prefix + 'sx'].vary and model[prefix + 'sy'].vary:
+ if model[prefix + 'sx'].vary and model[prefix + 'sy'].vary \
+ and all(np.isfinite([err_sx, err_sy])):
# major axis error
ref = wcshelper.pix2sky([xo + sx * np.cos(np.radians(theta)), yo + sy * np.sin(np.radians(theta))])
offset = wcshelper.pix2sky(
|
resolve #<I>
there are now 3 groups of uncertainty which are independent:
ra/dec
a/b
pa
|
PaulHancock_Aegean
|
train
|
94f7079e4c1ece2560003df8fe488c575c3dc85f
|
diff --git a/lib/hatchet.rb b/lib/hatchet.rb
index <HASH>..<HASH> 100644
--- a/lib/hatchet.rb
+++ b/lib/hatchet.rb
@@ -6,6 +6,7 @@ require 'json'
require 'stringio'
require 'fileutils'
require 'stringio'
+require 'date'
module Hatchet
end
|
We are using DateTime but didn’t ever require `date`.
|
heroku_hatchet
|
train
|
aab93753b520e5eb86face57e5b48acb84c8e37c
|
diff --git a/classes/PodsAdmin.php b/classes/PodsAdmin.php
index <HASH>..<HASH> 100644
--- a/classes/PodsAdmin.php
+++ b/classes/PodsAdmin.php
@@ -58,7 +58,7 @@ class PodsAdmin {
add_action( 'admin_head-media-upload-popup', array( $this, 'register_media_assets' ) );
// Add our debug to Site Info.
- add_filter( 'debug_information', array( $this, 'add_debug_information' );
+ add_filter( 'debug_information', array( $this, 'add_debug_information' ) );
$this->rest_admin();
|
Update classes/PodsAdmin.php
|
pods-framework_pods
|
train
|
2128cde948a8d0970021494cf5967b30171d0d4d
|
diff --git a/src/geshi/apt_sources.php b/src/geshi/apt_sources.php
index <HASH>..<HASH> 100644
--- a/src/geshi/apt_sources.php
+++ b/src/geshi/apt_sources.php
@@ -55,7 +55,7 @@ $language_data = array (
'stable/updates',
//Debian
'buzz', 'rex', 'bo', 'hamm', 'slink', 'potato', 'woody', 'sarge',
- 'etch', 'lenny', 'sid',
+ 'etch', 'lenny', 'wheezy', 'sid',
//Ubuntu
'warty', 'warty-updates', 'warty-security', 'warty-proposed', 'warty-backports',
'hoary', 'hoary-updates', 'hoary-security', 'hoary-proposed', 'hoary-backports',
@@ -65,7 +65,11 @@ $language_data = array (
'feisty', 'feisty-updates', 'feisty-security', 'feisty-proposed', 'feisty-backports',
'gutsy', 'gutsy-updates', 'gutsy-security', 'gutsy-proposed', 'gutsy-backports',
'hardy', 'hardy-updates', 'hardy-security', 'hardy-proposed', 'hardy-backports',
- 'intrepid', 'intrepid-updates', 'intrepid-security', 'intrepid-proposed', 'intrepid-backports'
+ 'intrepid', 'intrepid-updates', 'intrepid-security', 'intrepid-proposed', 'intrepid-backports',
+ 'jaunty', 'jaunty-updates', 'jaunty-security', 'jaunty-proposed', 'jaunty-backports',
+ 'karmic', 'karmic-updates', 'karmic-security', 'karmic-proposed', 'karmic-backports',
+ 'lucid', 'lucid-updates', 'lucid-security', 'lucid-proposed', 'lucid-backports',
+ 'maverick', 'maverick-updates', 'maverick-security', 'maverick-proposed', 'maverick-backports'
),
3 => array(
'main', 'restricted', 'preview', 'contrib', 'non-free',
@@ -141,4 +145,4 @@ $language_data = array (
'TAB_WIDTH' => 4
);
-?>
+?>
\ No newline at end of file
|
upd: Updated distribution list for APT Sources List language file
|
GeSHi_geshi-1.0
|
train
|
09750737a7c77bb8e3f2bf554fb4826788c0bb77
|
diff --git a/symfit/core/minimizers.py b/symfit/core/minimizers.py
index <HASH>..<HASH> 100644
--- a/symfit/core/minimizers.py
+++ b/symfit/core/minimizers.py
@@ -318,6 +318,9 @@ class ChainedMinimizer(BaseMinimizer):
del state['__signature__']
return state
+ def __str__(self):
+ return self.__class__.__name__ + '(minimizers={})'.format(self.minimizers)
+
class ScipyMinimize(object):
"""
Mix-in class that handles the execute calls to :func:`scipy.optimize.minimize`.
|
Add str representation to ChainedMinimizers
|
tBuLi_symfit
|
train
|
b355df0e2ecd646891b9cfc3c3226c76d1dcd429
|
diff --git a/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java b/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java
index <HASH>..<HASH> 100644
--- a/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java
+++ b/mockserver-core/src/test/java/org/mockserver/configuration/ConfigurationTest.java
@@ -3,6 +3,7 @@ package org.mockserver.configuration;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
import org.mockserver.server.initialize.ExpectationInitializerExample;
import org.mockserver.socket.tls.ForwardProxyTLSX509CertificatesTrustManager;
@@ -198,6 +199,7 @@ public class ConfigurationTest {
}
@Test
+ @Ignore
public void shouldSetAndGetMaxExpectations() {
int original = ConfigurationProperties.maxExpectations();
try {
@@ -223,6 +225,7 @@ public class ConfigurationTest {
}
@Test
+ @Ignore
public void shouldSetAndGetMaxLogEntries() {
int original = ConfigurationProperties.maxLogEntries();
try {
|
ignoring test that seems to fail in build for no know reason, will investigate and fix
|
jamesdbloom_mockserver
|
train
|
eab304c85b66b64b916ae4fec94ffcb4acc21255
|
diff --git a/support/cas-server-support-ldap/src/main/java/org/apereo/cas/authentication/LdapAuthenticationHandler.java b/support/cas-server-support-ldap/src/main/java/org/apereo/cas/authentication/LdapAuthenticationHandler.java
index <HASH>..<HASH> 100644
--- a/support/cas-server-support-ldap/src/main/java/org/apereo/cas/authentication/LdapAuthenticationHandler.java
+++ b/support/cas-server-support-ldap/src/main/java/org/apereo/cas/authentication/LdapAuthenticationHandler.java
@@ -11,6 +11,7 @@ import org.apereo.cas.util.CollectionUtils;
import org.ldaptive.LdapAttribute;
import org.ldaptive.LdapEntry;
import org.ldaptive.LdapException;
+import org.ldaptive.ResultCode;
import org.ldaptive.ReturnAttributes;
import org.ldaptive.auth.AuthenticationRequest;
import org.ldaptive.auth.AuthenticationResponse;
@@ -163,6 +164,10 @@ public class LdapAuthenticationHandler extends AbstractUsernamePasswordAuthentic
}
LOGGER.debug("LDAP response: [{}]", response);
+ if (!response.getResult() && response.getResultCode() == ResultCode.INVALID_CREDENTIALS) {
+ throw new FailedLoginException("Invalid credentials");
+ }
+
final List<MessageDescriptor> messageList;
final LdapPasswordPolicyConfiguration ldapPasswordPolicyConfiguration = (LdapPasswordPolicyConfiguration) super.getPasswordPolicyConfiguration();
if (ldapPasswordPolicyConfiguration != null) {
|
Fix for invalid credentials being allowed through password policy check (#<I>)
|
apereo_cas
|
train
|
047d1ad9f694fe588872d7a85d71a3ff9a6fab18
|
diff --git a/plugins/Admin/tests/TestCase/src/Controller/OrderDetails/OrderDetailsControllerEditPickupDayTest.php b/plugins/Admin/tests/TestCase/src/Controller/OrderDetails/OrderDetailsControllerEditPickupDayTest.php
index <HASH>..<HASH> 100644
--- a/plugins/Admin/tests/TestCase/src/Controller/OrderDetails/OrderDetailsControllerEditPickupDayTest.php
+++ b/plugins/Admin/tests/TestCase/src/Controller/OrderDetails/OrderDetailsControllerEditPickupDayTest.php
@@ -43,14 +43,6 @@ class OrderDetailsControllerEditPickupDayTest extends OrderDetailsControllerTest
$this->assertJsonError();
}
- public function testEditPickupDayAsSuperadminWrongWeekday()
- {
- $this->loginAsSuperadmin();
- $response = $this->editPickupDayOfOrderDetails([$this->orderDetailIdA, $this->orderDetailIdB], '2018-01-01', 'bla');
- $this->assertRegExpWithUnquotedString('Der Abholtag muss ein Freitag sein.', $response->msg);
- $this->assertJsonError();
- }
-
public function testEditPickupDayAsSuperadminOk()
{
$this->loginAsSuperadmin();
diff --git a/src/Model/Table/OrderDetailsTable.php b/src/Model/Table/OrderDetailsTable.php
index <HASH>..<HASH> 100644
--- a/src/Model/Table/OrderDetailsTable.php
+++ b/src/Model/Table/OrderDetailsTable.php
@@ -65,7 +65,6 @@ class OrderDetailsTable extends AppTable
public function validationPickupDay(Validator $validator)
{
$validator->notEquals('pickup_day', '1970-01-01', __('The_pickup_day_is_not_valid.'));
- $validator = $this->getAllowOnlyOneWeekdayValidator($validator, 'pickup_day', __('The_pickup_day'));
return $validator;
}
|
pickup day of order detail can now be changed to any day
|
foodcoopshop_foodcoopshop
|
train
|
9d6235c54e0e8e1acd0a712e63207e899e82dc4d
|
diff --git a/pkg/scheduler/framework/plugins/helper/normalize_score_test.go b/pkg/scheduler/framework/plugins/helper/normalize_score_test.go
index <HASH>..<HASH> 100644
--- a/pkg/scheduler/framework/plugins/helper/normalize_score_test.go
+++ b/pkg/scheduler/framework/plugins/helper/normalize_score_test.go
@@ -55,6 +55,11 @@ func TestDefaultNormalizeScore(t *testing.T) {
scores: []int64{1000, 1, 1, 1},
expectedScores: []int64{100, 0, 0, 0},
},
+ {
+ reverse: true,
+ scores: []int64{0, 1, 1, 1},
+ expectedScores: []int64{100, 0, 0, 0},
+ },
}
for i, test := range tests {
|
Add test case to improve the test coverage.
|
kubernetes_kubernetes
|
train
|
6fb930c73b532c1e11b06953791e26f7ba2017d7
|
diff --git a/cake/libs/model/model.php b/cake/libs/model/model.php
index <HASH>..<HASH> 100644
--- a/cake/libs/model/model.php
+++ b/cake/libs/model/model.php
@@ -1864,7 +1864,7 @@ class Model extends Object {
foreach (array_merge($this->hasMany, $this->hasOne) as $assoc => $data) {
if ($data['dependent'] === true && $cascade === true) {
- $model =& $this->{$assoc};
+ $model = $this->{$assoc};
$conditions = array($model->escapeField($data['foreignKey']) => $id);
if ($data['conditions']) {
$conditions = array_merge((array)$data['conditions'], $conditions);
@@ -2795,7 +2795,7 @@ class Model extends Object {
}
$db = ConnectionManager::getDataSource($this->useDbConfig);
if (!empty($oldConfig) && isset($db->config['prefix'])) {
- $oldDb =& ConnectionManager::getDataSource($oldConfig);
+ $oldDb = ConnectionManager::getDataSource($oldConfig);
if (!isset($this->tablePrefix) || (!isset($oldDb->config['prefix']) || $this->tablePrefix == $oldDb->config['prefix'])) {
$this->tablePrefix = $db->config['prefix'];
|
Removing assignments by reference in model class
|
cakephp_cakephp
|
train
|
b9fa7c30706f2c1d4f71cdb5848f289b475a5ba8
|
diff --git a/CHANGELOG b/CHANGELOG
index <HASH>..<HASH> 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,5 @@
+0.2.0
+ - feat: `QPSeries.get_qpimage` supports QPImage identifier as index
0.1.8
- code cleanup
0.1.7
diff --git a/qpimage/core.py b/qpimage/core.py
index <HASH>..<HASH> 100644
--- a/qpimage/core.py
+++ b/qpimage/core.py
@@ -20,6 +20,7 @@ VALID_INPUT_DATA = ["field",
class QPImage(object):
+ # required to create in-memory hdf5 files with unique fd
_instances = 0
def __init__(self, data=None, bg_data=None, which_data="phase",
diff --git a/qpimage/series.py b/qpimage/series.py
index <HASH>..<HASH> 100644
--- a/qpimage/series.py
+++ b/qpimage/series.py
@@ -154,25 +154,37 @@ class QPSeries(object):
Parameters
----------
- index: int
- Index of the qpimage
+ index: int or str
+ Index or identifier of the QPImage
Notes
-----
Instead of ``qps.get_qpimage(index)``, it is possible
to use the short-hand ``qps[index]``.
"""
- if index < -len(self):
- msg = "Index {} is out of bounds for QPSeries of size {}!".format(
- index, len(self))
- raise ValueError(msg)
- elif index < 0:
- index += len(self)
- name = "qpi_{}".format(index)
- if name in self.h5:
- group = self.h5[name]
+ if isinstance(index, str):
+ # search for the identifier
+ for ii in range(len(self)):
+ qpi = self[ii]
+ if "identifier" in qpi and qpi["identifier"] == index:
+ group = self.h5["qpi_{}".format(ii)]
+ break
+ else:
+ msg = "QPImage identifier '{}' not found!".format(index)
+ raise KeyError(msg)
else:
- msg = "Index {} not found for QPSeries of length {}".format(
- index, len(self))
- raise KeyError(msg)
+ # integer index
+ if index < -len(self):
+ msg = "Index {} out of bounds for QPSeries of size {}!".format(
+ index, len(self))
+ raise ValueError(msg)
+ elif index < 0:
+ index += len(self)
+ name = "qpi_{}".format(index)
+ if name in self.h5:
+ group = self.h5[name]
+ else:
+ msg = "Index {} not found for QPSeries of length {}".format(
+ index, len(self))
+ raise KeyError(msg)
return QPImage(h5file=group)
diff --git a/tests/test_series_convenience.py b/tests/test_series_convenience.py
index <HASH>..<HASH> 100644
--- a/tests/test_series_convenience.py
+++ b/tests/test_series_convenience.py
@@ -49,6 +49,34 @@ def test_getitem():
assert False, "Negative index exceeds size."
+def test_getitem_identifier():
+ size = 20
+ pha = np.repeat(np.linspace(0, 10, size), size)
+ pha = pha.reshape(size, size)
+
+ qpi1 = qpimage.QPImage(data=1.1 * pha,
+ which_data="phase",
+ meta_data={"identifier": "peter"})
+ qpi2 = qpimage.QPImage(data=1.2 * pha,
+ which_data="phase",
+ meta_data={"identifier": "hans"})
+ qpi3 = qpimage.QPImage(data=1.3 * pha,
+ which_data="phase",
+ meta_data={"identifier": "doe"})
+
+ series = qpimage.QPSeries(qpimage_list=[qpi1, qpi2, qpi3])
+ assert series["peter"] == qpi1
+ assert series["peter"] != qpi2
+ assert series["hans"] == qpi2
+ assert series["doe"] == qpi3
+ try:
+ series["john"]
+ except KeyError:
+ pass
+ else:
+ assert False, "'john' is not in series"
+
+
def test_identifier():
h5file = pathlib.Path(__file__).parent / "data" / "bg_tilt.h5"
qpi = qpimage.QPImage(h5file=h5file, h5mode="r")
|
feat: supports QPImage identifier as index
|
RI-imaging_qpimage
|
train
|
9b04298e29554f4f409eb2d1a0f8286839234a94
|
diff --git a/integtests/src/test/java/org/isisaddons/module/excel/integtests/ExcelModuleModuleSystemInitializer.java b/integtests/src/test/java/org/isisaddons/module/excel/integtests/ExcelModuleModuleSystemInitializer.java
index <HASH>..<HASH> 100644
--- a/integtests/src/test/java/org/isisaddons/module/excel/integtests/ExcelModuleModuleSystemInitializer.java
+++ b/integtests/src/test/java/org/isisaddons/module/excel/integtests/ExcelModuleModuleSystemInitializer.java
@@ -37,7 +37,7 @@ public class ExcelModuleModuleSystemInitializer {
.with(new ExcelAppManifest())
.with(new IsisConfigurationForJdoIntegTests())
.build()
- .initIfRequiredThenOpenSession();
+ .setUpSystem();
IsisSystemForTest.set(isft);
}
return isft;
|
required updates to integ tests as per ISIS-<I> refactorings
|
isisaddons-legacy_isis-module-excel
|
train
|
722536a0d7c8fff28a79ba46e64bba024948d8f2
|
diff --git a/server/spec/services/grid_service_deployer_spec.rb b/server/spec/services/grid_service_deployer_spec.rb
index <HASH>..<HASH> 100644
--- a/server/spec/services/grid_service_deployer_spec.rb
+++ b/server/spec/services/grid_service_deployer_spec.rb
@@ -82,7 +82,9 @@ describe GridServiceDeployer do
allow(subject).to receive(:deploy_service_instance)
deploy = Thread.new { subject.deploy }
MongoPubsub.publish(channel, {'event' => 'ping'})
- sleep 0.05
+ Timeout::timeout(1) do
+ sleep 0.05 until events.size > 0
+ end
expect(events.size).to eq(1)
deploy.kill
end
|
Fix flakey server test (GridServiceDeployer) (#<I>)
|
kontena_kontena
|
train
|
a02241e5df48e44e23dc0e66dbef3fdc3c91eb3e
|
diff --git a/src/com/google/javascript/jscomp/CodeConsumer.java b/src/com/google/javascript/jscomp/CodeConsumer.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/CodeConsumer.java
+++ b/src/com/google/javascript/jscomp/CodeConsumer.java
@@ -238,7 +238,7 @@ abstract class CodeConsumer {
add(" ");
}
- if ((long) x == x) {
+ if ((long) x == x && !isNegativeZero(x)) {
long value = (long) x;
long mantissa = value;
int exp = 0;
@@ -258,6 +258,10 @@ abstract class CodeConsumer {
}
}
+ static boolean isNegativeZero(double x) {
+ return x == 0.0 && Math.copySign(1, x) == -1.0;
+ }
+
static boolean isWordChar(char ch) {
return (ch == '_' ||
ch == '$' ||
diff --git a/test/com/google/javascript/jscomp/CodePrinterTest.java b/test/com/google/javascript/jscomp/CodePrinterTest.java
index <HASH>..<HASH> 100644
--- a/test/com/google/javascript/jscomp/CodePrinterTest.java
+++ b/test/com/google/javascript/jscomp/CodePrinterTest.java
@@ -1268,4 +1268,8 @@ public class CodePrinterTest extends TestCase {
assertPrint("var x = {0.2: 1};", "var x={\"0.2\":1}");
assertPrint("var x = {'0.2': 1};", "var x={\"0.2\":1}");
}
+
+ public void testIssue582() {
+ assertPrint("var x = -0.0;", "var x=-0.0");
+ }
}
|
Correct output of -<I>.
Fixes issue <I>.
R=acleung
DELTA=9 (8 added, 0 deleted, 1 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL>
|
google_closure-compiler
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.