hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
|---|---|---|---|---|
fe7cb6ded40a0ada5e1343b02dc8c0192748fbcf
|
diff --git a/dciclient/v1/helper.py b/dciclient/v1/helper.py
index <HASH>..<HASH> 100644
--- a/dciclient/v1/helper.py
+++ b/dciclient/v1/helper.py
@@ -112,9 +112,11 @@ def run_command(context, cmd, cwd=None, jobstate_id=None, team_id=None,
output.seek(0)
output.truncate()
- while pipe_process.poll() is None:
+ p_status = None
+ while p_status is None:
time.sleep(0.5)
try:
+ p_status = pipe_process.poll()
pstdout = pipe_process.stdout.read().decode('UTF-8', 'ignore')
except IOError:
pass
|
helper.run_command: avoid race condition
avoid a race condition where pipe_process.poll() is not None but the
pipe_process.stdout is unread.
Change-Id: I2e9f<I>c2dd3e5a8c<I>f<I>e<I>ac<I>e<I>
|
redhat-cip_python-dciclient
|
train
|
bfb1178a3eaa61b203adf82cc4c08538802a2703
|
diff --git a/lib/bittrex/client.rb b/lib/bittrex/client.rb
index <HASH>..<HASH> 100644
--- a/lib/bittrex/client.rb
+++ b/lib/bittrex/client.rb
@@ -1,5 +1,6 @@
require 'faraday'
require 'base64'
+require 'json'
module Bittrex
class Client
|
Add missing require 'json' to Client
|
mwerner_bittrex
|
train
|
0b0b99667f1084f4d3a09fbf66d9f4bb265929c7
|
diff --git a/config/flay.yml b/config/flay.yml
index <HASH>..<HASH> 100644
--- a/config/flay.yml
+++ b/config/flay.yml
@@ -1,3 +1,3 @@
---
threshold: 7
-total_score: 63
+total_score: 62
diff --git a/lib/substation/environment.rb b/lib/substation/environment.rb
index <HASH>..<HASH> 100644
--- a/lib/substation/environment.rb
+++ b/lib/substation/environment.rb
@@ -6,6 +6,8 @@ module Substation
# Encapsulates access to one registered {Substation::Action} instance
class Action
+ MissingClassError = Class.new(StandardError)
+
# Coerce the given name and config to an {Action} instance
#
# @param [#to_sym] name
@@ -19,7 +21,7 @@ module Substation
#
# @api private
def self.coerce(name, config)
- klass_name = config.fetch('action')
+ klass_name = config.fetch('action') { raise(MissingClassError) }
observers = config.fetch('observers', EMPTY_HASH)
new(name.to_sym, Utils.const_get(klass_name), Observers.coerce(observers))
diff --git a/spec/unit/substation/environment/action/class_methods/coerce_spec.rb b/spec/unit/substation/environment/action/class_methods/coerce_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/substation/environment/action/class_methods/coerce_spec.rb
+++ b/spec/unit/substation/environment/action/class_methods/coerce_spec.rb
@@ -41,7 +41,7 @@ describe Environment::Action, '.coerce' do
let(:config) { {} }
specify {
- expect { subject }.to raise_error(KeyError)
+ expect { subject }.to raise_error(described_class::MissingClassError)
}
end
end
|
Raise a dedicated error when no action class is configured
|
snusnu_substation
|
train
|
263dfa9ab07b271d01e004c5be68dff9513d173f
|
diff --git a/bosh-stemcell/spec/stemcells/centos_7_spec.rb b/bosh-stemcell/spec/stemcells/centos_7_spec.rb
index <HASH>..<HASH> 100644
--- a/bosh-stemcell/spec/stemcells/centos_7_spec.rb
+++ b/bosh-stemcell/spec/stemcells/centos_7_spec.rb
@@ -65,6 +65,13 @@ HERE
it { should contain('/usr/bin/gcc') }
end
end
+
+ describe 'mounted file systems: /etc/fstab should mount nfs with nodev (stig: V-38654)(stig: V-38652)' do
+ describe file('/etc/fstab') do
+ it { should be_file }
+ its (:content) { should_not match /nfs/ }
+ end
+ end
end
describe 'CentOS 7 stemcell tarball', stemcell_tarball: true do
diff --git a/bosh-stemcell/spec/stemcells/ubuntu_trusty_spec.rb b/bosh-stemcell/spec/stemcells/ubuntu_trusty_spec.rb
index <HASH>..<HASH> 100644
--- a/bosh-stemcell/spec/stemcells/ubuntu_trusty_spec.rb
+++ b/bosh-stemcell/spec/stemcells/ubuntu_trusty_spec.rb
@@ -243,6 +243,13 @@ HERE
it { should_not be_installed }
end
end
+
+ describe 'mounted file systems: /etc/fstab should mount nfs with nodev (stig: V-38654) (stig: V-38652)' do
+ describe file('/etc/fstab') do
+ it { should be_file }
+ its (:content) { should eq("# UNCONFIGURED FSTAB FOR BASE SYSTEM\n") }
+ end
+ end
end
describe 'Ubuntu 14.04 stemcell tarball', stemcell_tarball: true do
diff --git a/bosh-stemcell/spec/support/os_image_shared_examples.rb b/bosh-stemcell/spec/support/os_image_shared_examples.rb
index <HASH>..<HASH> 100644
--- a/bosh-stemcell/spec/support/os_image_shared_examples.rb
+++ b/bosh-stemcell/spec/support/os_image_shared_examples.rb
@@ -341,40 +341,28 @@ shared_examples_for 'every OS image' do
end
end
- describe 'mounted file systems: /etc/fstab' do
- it('should exist (stig: V-38654)(stig: V-38652)') do
- expect(File).to exist('/etc/fstab')
- end
-
- it('should be almost empty (stig: V-38654)(stig: V-38652)') do
- expect(File.read('/etc/fstab').chomp).to eq('# UNCONFIGURED FSTAB FOR BASE SYSTEM')
- end
- end
-
- describe 'IP forwarding for IPv4 must not be enabled' do
- it 'disables ip forwarding (stig: V-38511)' do
- expect(`grep net.ipv4.ip_forward #{backend.chroot_dir}/etc/sysctl.d/60-bosh-sysctl.conf`.strip).to eq('net.ipv4.ip_forward=0')
+ describe 'IP forwarding for IPv4 must not be enabled (stig: V-38511)' do
+ context file('/etc/sysctl.d/60-bosh-sysctl.conf') do
+ its (:content) { should match /^net\.ipv4\.ip_forward=0$/ }
end
end
- describe 'address space layout randomization (ASLR) should be enabled' do
- it 'enables address space layout randomization (ASLR) (stig: V-38596)' do
- expect(`grep kernel.randomize_va_space #{backend.chroot_dir}/etc/sysctl.d/60-bosh-sysctl.conf`.strip).to eq('kernel.randomize_va_space=2')
+ describe 'address space layout randomization (ASLR) should be enabled (stig: V-38596)' do
+ context file('/etc/sysctl.d/60-bosh-sysctl.conf') do
+ its (:content) { should match /^kernel\.randomize_va_space=2$/ }
end
end
- describe 'syncookies should be enabled' do
- it 'enables syncookies (stig: V-38539)' do
- expect(`grep net.ipv4.tcp_syncookies #{backend.chroot_dir}/etc/sysctl.d/60-bosh-sysctl.conf`.strip).to eq('net.ipv4.tcp_syncookies=1')
+ describe 'syncookies should be enabled (stig: V-38539)' do
+ context file('/etc/sysctl.d/60-bosh-sysctl.conf') do
+ its (:content) { should match /^net\.ipv4\.tcp_syncookies=1$/ }
end
end
- describe 'audit disk errors' do
- it 'audit logs disk errors to syslog (stig: V-38464)' do
- expect(`grep disk_error_action #{backend.chroot_dir}/etc/audit/auditd.conf`.strip).to eq('disk_error_action = SYSLOG')
- end
- it 'audits logs when disk is full (stig: V-38468)' do
- expect(`grep disk_full_action #{backend.chroot_dir}/etc/audit/auditd.conf`.strip).to eq('disk_full_action = SYSLOG')
+ describe 'audit disk errors logs disk errors to syslog (stig: V-38464) and logs when disk is full (stig: V-38468)' do
+ context file('/etc/audit/auditd.conf') do
+ its (:content) { should match /^disk_full_action = SYSLOG$/ }
+ its (:content) { should match /^disk_error_action = SYSLOG$/ }
end
end
end
diff --git a/bosh-stemcell/spec/support/stig.rb b/bosh-stemcell/spec/support/stig.rb
index <HASH>..<HASH> 100644
--- a/bosh-stemcell/spec/support/stig.rb
+++ b/bosh-stemcell/spec/support/stig.rb
@@ -21,4 +21,4 @@ RSpec.configure do |config|
list
end
-end
\ No newline at end of file
+end
|
Fixed bug in Unit test. Instead of shelling out we are using `describe
file` in a unit tests.
[#<I>](<URL>)
|
cloudfoundry_bosh
|
train
|
6a570765d4a987d489484e487e3f9a26c273b6fb
|
diff --git a/src/main/java/org/jboss/netty/channel/socket/http/HttpTunnelingChannelHandler.java b/src/main/java/org/jboss/netty/channel/socket/http/HttpTunnelingChannelHandler.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jboss/netty/channel/socket/http/HttpTunnelingChannelHandler.java
+++ b/src/main/java/org/jboss/netty/channel/socket/http/HttpTunnelingChannelHandler.java
@@ -112,9 +112,7 @@ class HttpTunnelingChannelHandler extends SimpleChannelUpstreamHandler {
cause = ex2;
}
} else {
- if (invalidated.compareAndSet(false, true)) {
- session.invalidate();
- }
+ invalidateHttpSession();
e.getChannel().close();
}
} finally {
@@ -132,16 +130,22 @@ class HttpTunnelingChannelHandler extends SimpleChannelUpstreamHandler {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
logger.warn("Unexpected exception", e.getCause());
- if (invalidated.compareAndSet(false, true)) {
- session.invalidate();
- }
+ invalidateHttpSession();
e.getChannel().close();
}
@Override
public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
+ invalidateHttpSession();
+ }
+
+ private void invalidateHttpSession() {
if (invalidated.compareAndSet(false, true)) {
- session.invalidate();
+ try {
+ session.invalidate();
+ } catch (Exception e) {
+ // Gulp - https://jira.jboss.org/jira/browse/JBWEB-139
+ }
}
}
|
Workaround for NPE in Tomcat on undeploy
|
netty_netty
|
train
|
d47025985fb927e9f5a5126bf5b76fda80956ce4
|
diff --git a/core/src/main/java/hudson/model/RunMap.java b/core/src/main/java/hudson/model/RunMap.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/RunMap.java
+++ b/core/src/main/java/hudson/model/RunMap.java
@@ -162,7 +162,11 @@ public final class RunMap<R extends Run<?,R>> extends AbstractMap<Integer,R> imp
buildDir.mkdirs();
String[] buildDirs = buildDir.list(new FilenameFilter() {
public boolean accept(File dir, String name) {
- return new File(dir,name).isDirectory();
+ // HUDSON-1461 sometimes create bogus data directories with impossible dates, such as year 0.
+ // Date object doesn't roundtrip year 0 (year is -2,-1,+1,+2,... and there's no zero),
+ // so we eventually fail to load this data.
+ // so don't even bother trying.k
+ return !name.startsWith("0000") && new File(dir,name).isDirectory();
}
});
|
[HUDSON-<I>] reducing the amount of bogus error messages that Hudson prints during start up.
git-svn-id: <URL>
|
jenkinsci_jenkins
|
train
|
83a6503a7c3dcd504c80e211c3e6a2dbb9da691c
|
diff --git a/test/document_test.js b/test/document_test.js
index <HASH>..<HASH> 100644
--- a/test/document_test.js
+++ b/test/document_test.js
@@ -14,10 +14,7 @@ describe("Document", function() {
before(function() {
brains.get('/document/encoding', function(req, res) {
res.header('Content-Type', 'text/html; charset=greek');
- res.send(`
- <html>
- <body>\xc3\xe5\xe9\xdc!</body>
- </html>`);
+ res.send(new Buffer("<html><body>\xc3\xe5\xe9\xdc!</body></html>"));
});
return browser.visit('/document/encoding');
});
|
Fix character encoding test case.
|
assaf_zombie
|
train
|
5fb92c2cf19fc7990db9945c89db31ca32930696
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,11 +3,11 @@ import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as readme:
- README = readme.read().split('h1>', 2)[1]
+ README = readme.read().split('h1>\n\n', 2)[1]
setup(
name='django-postgres-extra',
- version='1.21a3',
+ version='1.21a4',
packages=find_packages(),
include_package_data=True,
license='MIT License',
|
Cut out blank lines at the start of PyPi README
|
SectorLabs_django-postgres-extra
|
train
|
f04f66db5975f72ce2882a26c9004708dc2f07ff
|
diff --git a/aruba.gemspec b/aruba.gemspec
index <HASH>..<HASH> 100644
--- a/aruba.gemspec
+++ b/aruba.gemspec
@@ -25,7 +25,7 @@ Gem::Specification.new do |spec|
spec.add_runtime_dependency 'childprocess', '~> 3.0'
spec.add_runtime_dependency 'contracts', '~> 0.16.0'
- spec.add_runtime_dependency 'cucumber', ['>= 2.4', '< 4.0']
+ spec.add_runtime_dependency 'cucumber', ['>= 2.4', '< 5.0']
spec.add_runtime_dependency 'ffi', '~> 1.9'
spec.add_runtime_dependency 'rspec-expectations', '~> 3.4'
spec.add_runtime_dependency 'thor', '~> 1.0'
diff --git a/features/support/env.rb b/features/support/env.rb
index <HASH>..<HASH> 100644
--- a/features/support/env.rb
+++ b/features/support/env.rb
@@ -13,12 +13,12 @@ require 'aruba/config/jruby'
require 'rspec/expectations'
Before do |scenario|
- unless scenario.respond_to?(:feature) && scenario.respond_to?(:name)
- raise TypeError, "Don't know how to extract command name from #{scenario.class}"
+ if scenario.respond_to?(:feature) # Cucumber < 4
+ command_name = "#{scenario.feature.file} #{scenario.name}"
+ else
+ command_name = "#{scenario.location.file} #{scenario.name}"
end
- command_name = "#{scenario.feature.name} #{scenario.name}"
-
# Used in simplecov_setup so that each scenario has a different name and
# their coverage results are merged instead of overwriting each other as
# 'Cucumber Features'
diff --git a/features/support/timing.rb b/features/support/timing.rb
index <HASH>..<HASH> 100644
--- a/features/support/timing.rb
+++ b/features/support/timing.rb
@@ -3,7 +3,12 @@
scenario_times = {}
Around do |scenario, block|
- name = "#{scenario.feature.file}::#{scenario.name}"
+ if scenario.respond_to?(:feature) # Cucumber < 4
+ name = "#{scenario.feature.file}::#{scenario.name}"
+ else
+ name = "#{scenario.location.file}::#{scenario.name}"
+ end
+
start = Time.now
block.call
end_time = Time.now
|
Support Cucumber <I> through 4.x
|
cucumber_aruba
|
train
|
bb9d7435df36a0e5189e9abfdf3bf8f9a1065d6f
|
diff --git a/presto-main/src/main/java/com/facebook/presto/cli/HttpQueryClient.java b/presto-main/src/main/java/com/facebook/presto/cli/HttpQueryClient.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/main/java/com/facebook/presto/cli/HttpQueryClient.java
+++ b/presto-main/src/main/java/com/facebook/presto/cli/HttpQueryClient.java
@@ -101,6 +101,11 @@ public class HttpQueryClient
return debug;
}
+ public URI getQueryLocation()
+ {
+ return queryLocation;
+ }
+
public QueryInfo getQueryInfo(boolean forceRefresh)
{
QueryInfo queryInfo = finalQueryInfo.get();
diff --git a/presto-main/src/main/java/com/facebook/presto/cli/StatusPrinter.java b/presto-main/src/main/java/com/facebook/presto/cli/StatusPrinter.java
index <HASH>..<HASH> 100644
--- a/presto-main/src/main/java/com/facebook/presto/cli/StatusPrinter.java
+++ b/presto-main/src/main/java/com/facebook/presto/cli/StatusPrinter.java
@@ -55,6 +55,7 @@ public class StatusPrinter
/*
Query 16, RUNNING, 1 node, 855 splits
+http://my.server:8080/v1/query/16?pretty
Splits: 646 queued, 34 running, 175 done
CPU Time: 33.7s total, 191K rows/s, 16.6MB/s, 22% active
Per Node: 2.5 parallelism, 473K rows/s, 41.1MB/s
@@ -131,6 +132,10 @@ Parallelism: 2.5
pluralize("node", nodes));
out.println(querySummary);
+ if (queryClient.isDebug()) {
+ out.println(queryClient.getQueryLocation() + "?pretty");
+ }
+
// Splits: 1000 total, 842 done (84.20%)
String splitsSummary = String.format("Splits: %,d total, %,d done (%.2f%%)",
globalExecutionStats.getSplits(),
@@ -217,6 +222,10 @@ Parallelism: 2.5
globalExecutionStats.getSplits());
reprintLine(querySummary);
+ if (queryClient.isDebug()) {
+ reprintLine(queryClient.getQueryLocation() + "?pretty");
+ }
+
if (queryInfo.getState() == QueryState.PLANNING) {
return;
}
|
Print query url in client when in debug mode
|
prestodb_presto
|
train
|
fb4028735164d62a5266f7d7a4aa670b3234ff9b
|
diff --git a/DependencyInjection/AbstractConfiguration.php b/DependencyInjection/AbstractConfiguration.php
index <HASH>..<HASH> 100755
--- a/DependencyInjection/AbstractConfiguration.php
+++ b/DependencyInjection/AbstractConfiguration.php
@@ -46,7 +46,8 @@ abstract class AbstractConfiguration implements ConfigurationInterface
->arrayNode('dynamic_routing')
->children()
->scalarNode('name')->defaultNull()->end()
- ->scalarNode('class')->defaultNull()->end()
+ ->scalarNode('entity')->defaultNull()->end()
+ ->scalarNode('generator')->defaultNull()->end()
->arrayNode('defaults')
->useAttributeAsKey('name')
->prototype('scalar')->end()
diff --git a/DependencyInjection/Compiler/AbstractConfigureDynamicRoutingPass.php b/DependencyInjection/Compiler/AbstractConfigureDynamicRoutingPass.php
index <HASH>..<HASH> 100755
--- a/DependencyInjection/Compiler/AbstractConfigureDynamicRoutingPass.php
+++ b/DependencyInjection/Compiler/AbstractConfigureDynamicRoutingPass.php
@@ -30,9 +30,10 @@ abstract class AbstractConfigureDynamicRoutingPass implements CompilerPassInterf
public function process(ContainerBuilder $container)
{
- $parameters = $this->getExtensionConfiguration($container);
+ $routingDiscriminators = $container->getParameter('routing_discriminator_map');
- $definition = new Definition($parameters['dynamic_routing']['class'], [
+ $parameters = $this->getExtensionConfiguration($container);
+ $definition = new Definition($parameters['dynamic_routing']['generator'], [
'defaults' => $parameters['dynamic_routing']['defaults'],
'requirements' => $parameters['dynamic_routing']['requirements'],
'pattern' => $parameters['dynamic_routing']['pattern'],
@@ -40,5 +41,10 @@ abstract class AbstractConfigureDynamicRoutingPass implements CompilerPassInterf
$definition->addTag('route.generator');
$container->setDefinition($parameters['dynamic_routing']['name'] . '.route.generator', $definition);
+
+ if (null !== $parameters['dynamic_routing']['entity']) {
+ $routingDiscriminators[$parameters['dynamic_routing']['name']] = $parameters['dynamic_routing']['entity'];
+ $container->setParameter('routing_discriminator_map', $routingDiscriminators);
+ }
}
}
|
Dynamic routing discriminatorMap
(cherry picked from commit 8e<I>c<I>e9cea<I>c3e<I>c<I>a7)
|
WellCommerce_CouponBundle
|
train
|
853e4ec1f38c48b4708437b3440cc9e191d48d02
|
diff --git a/pylint/lint/pylinter.py b/pylint/lint/pylinter.py
index <HASH>..<HASH> 100644
--- a/pylint/lint/pylinter.py
+++ b/pylint/lint/pylinter.py
@@ -457,8 +457,8 @@ class PyLinter(
self.option_groups = option_groups + PyLinter.option_groups
self._options_methods = {"enable": self.enable, "disable": self.disable}
self._bw_options_methods = {
- "disable-msg": self.disable,
- "enable-msg": self.enable,
+ "disable-msg": self._options_methods["disable"],
+ "enable-msg": self._options_methods["enable"],
}
MessagesHandlerMixIn.__init__(self)
reporters.ReportsHandlerMixIn.__init__(self)
|
More modular code for enable/disable in pylinter
|
PyCQA_pylint
|
train
|
502ba4096e9c3019a060ab534b120f8a23c50f42
|
diff --git a/test/test_properties_defaults.py b/test/test_properties_defaults.py
index <HASH>..<HASH> 100644
--- a/test/test_properties_defaults.py
+++ b/test/test_properties_defaults.py
@@ -27,7 +27,6 @@ import unittest
import __import_shinken
from shinken.property import UnusedProp, none_object
-from shinken.objects.config import Config
import shinken.daemon
@@ -221,6 +220,7 @@ class TestConfig(unittest.TestCase, PropertiesTester):
])
def setUp(self):
+ from shinken.objects.config import Config
self.item = Config()
|
Move global imports into the test-classes.
|
Alignak-monitoring_alignak
|
train
|
437cca6991b4ef2ee9ca4131e829215111be73f1
|
diff --git a/java/client/test/org/openqa/selenium/devtools/ChromeDevToolsTestBase.java b/java/client/test/org/openqa/selenium/devtools/ChromeDevToolsTestBase.java
index <HASH>..<HASH> 100644
--- a/java/client/test/org/openqa/selenium/devtools/ChromeDevToolsTestBase.java
+++ b/java/client/test/org/openqa/selenium/devtools/ChromeDevToolsTestBase.java
@@ -28,7 +28,7 @@ public abstract class ChromeDevToolsTestBase extends DevToolsTestBase {
protected ChromeDriver chromeDriver;
@Before
- public void setUp() {
+ public void setUpChrome() {
assumeThat(driver).isInstanceOf(ChromeDriver.class);
chromeDriver = (ChromeDriver) driver;
|
[java] Renaming a test setup method to prevent overriding a method with the same name in the base class
|
SeleniumHQ_selenium
|
train
|
49f813cb5c84b063a9aa52910dbf70cde5edc8d2
|
diff --git a/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionConfiguration.java b/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionConfiguration.java
index <HASH>..<HASH> 100644
--- a/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionConfiguration.java
+++ b/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionConfiguration.java
@@ -43,10 +43,10 @@ public class ProtocolConnectionConfiguration {
private URI uri;
private Endpoint endpoint;
+ private OptionMap optionMap = OptionMap.EMPTY;
private long connectionTimeout = DEFAULT_CONNECT_TIMEOUT;
private CallbackHandler callbackHandler;
private Map<String, String> saslOptions = Collections.emptyMap();
- private boolean overrideSslContext;
private SSLContext sslContext;
private String clientBindAddress;
private ProtocolTimeoutHandler timeoutHandler;
@@ -69,6 +69,7 @@ public class ProtocolConnectionConfiguration {
*/
protected void validate() {
Assert.checkNotNullParam("endpoint", endpoint);
+ Assert.checkNotNullParam("optionMap", optionMap);
Assert.checkNotNullParam("uri", uri);
}
@@ -102,13 +103,12 @@ public class ProtocolConnectionConfiguration {
this.endpoint = endpoint;
}
- @Deprecated
public OptionMap getOptionMap() {
- return OptionMap.EMPTY;
+ return optionMap;
}
- @Deprecated
public void setOptionMap(OptionMap optionMap) {
+ this.optionMap = optionMap;
}
public long getConnectionTimeout() {
@@ -135,14 +135,6 @@ public class ProtocolConnectionConfiguration {
this.saslOptions = saslOptions;
}
- public boolean isOverrideSslContext() {
- return overrideSslContext;
- }
-
- public void setOverrideSslContext(boolean overrideSslContext) {
- this.overrideSslContext = overrideSslContext;
- }
-
public SSLContext getSslContext() {
return sslContext;
}
@@ -205,10 +197,10 @@ public class ProtocolConnectionConfiguration {
final ProtocolConnectionConfiguration target) {
target.uri = old.uri;
target.endpoint = old.endpoint;
+ target.optionMap = old.optionMap;
target.connectionTimeout = old.connectionTimeout;
target.callbackHandler = old.callbackHandler;
target.saslOptions = old.saslOptions;
- target.overrideSslContext = old.overrideSslContext;
target.sslContext = old.sslContext;
target.clientBindAddress = old.clientBindAddress;
target.timeoutHandler = old.timeoutHandler;
diff --git a/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionUtils.java b/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionUtils.java
index <HASH>..<HASH> 100644
--- a/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionUtils.java
+++ b/protocol/src/main/java/org/jboss/as/protocol/ProtocolConnectionUtils.java
@@ -46,7 +46,6 @@ import org.wildfly.security.auth.client.AuthenticationContext;
import org.wildfly.security.auth.client.AuthenticationContextConfigurationClient;
import org.wildfly.security.auth.client.MatchRule;
import org.xnio.IoFuture;
-import org.xnio.OptionMap;
import org.xnio.Options;
/**
@@ -162,11 +161,11 @@ public class ProtocolConnectionUtils {
authenticationContext = authenticationContext.withSsl(MatchRule.ALL, () -> finalSslContext);
if (clientBindAddress == null) {
- return endpoint.connect(uri, OptionMap.EMPTY, authenticationContext);
+ return endpoint.connect(uri, configuration.getOptionMap(), authenticationContext);
} else {
InetSocketAddress bindAddr = new InetSocketAddress(clientBindAddress, 0);
// TODO: bind address via connection builder
- return endpoint.connect(uri, OptionMap.EMPTY, authenticationContext);
+ return endpoint.connect(uri, configuration.getOptionMap(), authenticationContext);
}
}
|
[WFCORE-<I>] Restore using OptionMap for remaining non-security configuration.
|
wildfly_wildfly-core
|
train
|
fef91ec2c44c7c016d5ac6a22d2fc4ddae5ada99
|
diff --git a/evm/utils/address.py b/evm/utils/address.py
index <HASH>..<HASH> 100644
--- a/evm/utils/address.py
+++ b/evm/utils/address.py
@@ -22,7 +22,7 @@ def generate_contract_address(address, nonce):
def generate_CREATE2_contract_address(salt, code):
"""
- If contract is created by transaction, `salt` should be empty.
+ If contract is created by transaction, `salt` is specified by `transaction.salt`.
If contract is created by contract, `salt` is set by the creator contract.
"""
validate_length_lte(salt, 32, title="Salt")
|
Update docstring of generate_CREATE2_contract_address
|
ethereum_py-evm
|
train
|
b655f2745e850fc43d0b5330ec33932937aa8d49
|
diff --git a/src/main/webapp/js/hintjbpm.js b/src/main/webapp/js/hintjbpm.js
index <HASH>..<HASH> 100644
--- a/src/main/webapp/js/hintjbpm.js
+++ b/src/main/webapp/js/hintjbpm.js
@@ -147,8 +147,8 @@
var datainputs = csobj.properties.datainputset;
var dataoutputs = csobj.properties.dataoutputset;
var datainParts = datainputs.split(",");
- for(var i=0; i < datainParts.length; i++) {
- var nextPart = datainParts[i];
+ for(var j=0; j < datainParts.length; j++) {
+ var nextPart = datainParts[j];
if(nextPart.indexOf(":") > 0) {
var innerParts = nextPart.split(":");
maybeAdd('${'+innerParts[0]+'}');
@@ -157,8 +157,8 @@
}
}
var dataoutParts = dataoutputs.split(",");
- for(var j=0; j < dataoutParts.length; j++) {
- var nextPart = dataoutParts[j];
+ for(var k=0; k < dataoutParts.length; k++) {
+ var nextPart = dataoutParts[k];
if(nextPart.indexOf("=") > 0) {
var innerParts = nextPart.split("=");
maybeAdd(innerParts[0]);
|
fix for user task forms autocomplete on data inputs
|
kiegroup_jbpm-designer
|
train
|
ebc3d232f4b6c197d5dc5da2e1868d56b2096f1a
|
diff --git a/eth/downloader/queue.go b/eth/downloader/queue.go
index <HASH>..<HASH> 100644
--- a/eth/downloader/queue.go
+++ b/eth/downloader/queue.go
@@ -1129,12 +1129,13 @@ func (q *queue) deliverNodeData(results []trie.SyncResult, callback func(int, bo
if err != nil {
q.stateSchedLock.Unlock()
callback(i, progressed, err)
+ return
}
if err = batch.Write(); err != nil {
q.stateSchedLock.Unlock()
callback(i, progressed, err)
+ return // TODO(karalabe): If a DB write fails (disk full), we ought to cancel the sync
}
-
// Item processing succeeded, release the lock (temporarily)
progressed = progressed || prog
q.stateSchedLock.Unlock()
|
eth/downloader: fix mutex regression causing panics on fail (#<I>)
|
ethereum_go-ethereum
|
train
|
1f718c941e3472407facadd5d9b5fa4552bf780f
|
diff --git a/src/Button/index.js b/src/Button/index.js
index <HASH>..<HASH> 100644
--- a/src/Button/index.js
+++ b/src/Button/index.js
@@ -13,6 +13,8 @@ import { button } from '@bootstrap-styled/css-mixins/lib/buttons';
export const defaultProps = {
tag: 'button',
color: 'primary',
+ hover: false,
+ focus: false,
theme: {
'$enable-rounded': true,
'$enable-shadows': false,
@@ -60,6 +62,10 @@ export const propTypes = {
active: PropTypes.bool,
/** Toggles block CSS display. */
block: PropTypes.bool,
+ /** Toggle hover CSS className. */
+ hover: PropTypes.bool,
+ /** Toggle focus CSS className. */
+ focus: PropTypes.bool,
/** Toggles disable mouse event and CSS color. */
disabled: PropTypes.bool,
/** Toggles outline CSS border and background color. */
@@ -88,11 +94,7 @@ export const propTypes = {
/** Toggles drop up CSS style. */
dropup: PropTypes.bool,
/** Replace the default component tag by the one specified. Can be: */
- tag: PropTypes.oneOfType([
- PropTypes.string,
- PropTypes.element,
- PropTypes.func,
- ]),
+ tag: PropTypes.any,
/** Theme variables. Can be: */
theme: PropTypes.shape({
'$btn-padding-x': PropTypes.string,
@@ -172,11 +174,15 @@ class ButtonUnstyled extends React.Component { // eslint-disable-line react/pref
size,
ref,
tag: Tag,
+ hover,
+ focus,
...attributes
} = omit(this.props, ['theme']);
/* eslint-enable prefer-const */
const classes = mapToCssModules(cn(className, 'btn', {
+ hover,
+ focus,
dropup,
active,
disabled,
|
fix(button): Added props `hover` and `focus`, prop `tag` accept any prop types
|
bootstrap-styled_v4
|
train
|
dbca4e44ca36f528a71fb8a33e5db9aa29c8faf1
|
diff --git a/auth/mnet/auth.php b/auth/mnet/auth.php
index <HASH>..<HASH> 100644
--- a/auth/mnet/auth.php
+++ b/auth/mnet/auth.php
@@ -241,7 +241,7 @@ class auth_plugin_mnet extends auth_plugin_base {
require_once $CFG->dirroot . '/mnet/xmlrpc/client.php';
// verify the remote host is configured locally before attempting RPC call
- if (! $remotehost = $DB->get_record('mnet_host', array('wwwroot'=>$remotewwwroot))) {
+ if (! $remotehost = $DB->get_record('mnet_host', array('wwwroot' => $remotewwwroot, 'deleted' => 0))) {
print_error('notpermittedtoland', 'mnet');
}
@@ -1302,9 +1302,12 @@ class auth_plugin_mnet extends auth_plugin_base {
svc.apiversion,
h2s.id as h2s_id
FROM
+ {mnet_host} h,
{mnet_service} svc,
{mnet_host2service} h2s
WHERE
+ h.deleted = '0' AND
+ h.id = h2s.hostid AND
h2s.hostid = ? AND
h2s.serviceid = svc.id AND
svc.name = ? AND
|
MDL-<I> Require jump destination MNET peer to be non-deleted.
Also require landing source MNET peer to be non-deleted.
|
moodle_moodle
|
train
|
08528efc946be260aaccb5c8070723d23d09b295
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -36,6 +36,7 @@ MOD_NAMES = [
'thinc.linear.features',
'thinc.linear.serialize',
'thinc.linear.sparse',
+ 'thinc.linear.linear',
'thinc.neural.optimizers',
'thinc.neural.ops',
'thinc.extra.eg',
|
Add linear.pyx to setup.py
|
explosion_thinc
|
train
|
d5d2d02459d19727f17705c271eb7e0b4e3ee936
|
diff --git a/gulpfile.js b/gulpfile.js
index <HASH>..<HASH> 100644
--- a/gulpfile.js
+++ b/gulpfile.js
@@ -118,13 +118,11 @@ var jsTasks = function(filename) {
var writeToManifest = function(directory) {
return lazypipe()
.pipe(gulp.dest, path.dist + directory)
- .pipe(require('gulp-debug'))
.pipe($.livereload)
.pipe($.rev.manifest, revManifest, {
base: path.dist,
merge: true
})
- .pipe(require('gulp-debug'))
.pipe(gulp.dest, path.dist)();
};
diff --git a/lib/assets.php b/lib/assets.php
index <HASH>..<HASH> 100644
--- a/lib/assets.php
+++ b/lib/assets.php
@@ -19,10 +19,8 @@ namespace Roots\Sage\Assets;
*/
function asset_path($filename) {
$dist_path = get_template_directory_uri() . '/dist/';
-
- if (WP_ENV === 'development') {
- return $dist_path . $filename;
- }
+ $directory = dirname($filename) . '/';
+ $file = basename($filename);
$manifest_path = get_template_directory() . '/dist/assets.json';
@@ -32,10 +30,7 @@ function asset_path($filename) {
$manifest = [];
}
- $directory = dirname($filename) . '/';
- $file = basename($filename);
-
- if (array_key_exists($file, $manifest)) {
+ if (WP_ENV !== 'development' && array_key_exists($file, $manifest)) {
return $dist_path . $directory . $manifest[$file];
} else {
return $dist_path . $directory . $file;
@@ -59,6 +54,15 @@ function assets() {
add_filter('script_loader_src', __NAMESPACE__ . '\\jquery_local_fallback', 10, 2);
}
+ /**
+ * Livereload client
+ * https://github.com/livereload/livereload-js
+ */
+ if (WP_ENV === 'development') {
+ wp_register_script('livereload', 'http://localhost:35729/livereload.js?snipver=1', null, false, true);
+ wp_enqueue_script('livereload');
+ }
+
if (is_single() && comments_open() && get_option('thread_comments')) {
wp_enqueue_script('comment-reply');
}
@@ -115,3 +119,4 @@ function google_analytics() {
if (GOOGLE_ANALYTICS_ID) {
add_action('wp_footer', __NAMESPACE__ . '\\google_analytics', 20);
}
+
|
Fixes livereload
If the environment is development -> try loading the livereload client
being served by `gulp watch`
|
roots_sage
|
train
|
7a3de081ad7a7a45871f0556dc16c66ece7c973d
|
diff --git a/lib/stream/leo-stream.js b/lib/stream/leo-stream.js
index <HASH>..<HASH> 100644
--- a/lib/stream/leo-stream.js
+++ b/lib/stream/leo-stream.js
@@ -267,7 +267,7 @@ module.exports = function(configure) {
done(err);
} else {
obj.payload = r;
- done(null, obj)
+ done(null, obj);
}
});
}),
@@ -324,7 +324,7 @@ module.exports = function(configure) {
if (err) {
done(err);
} else {
- context.push(r, rOpts)
+ context.push(r, rOpts);
done();
}
});
@@ -688,7 +688,7 @@ module.exports = function(configure) {
}, opts || {});
let newStats = () => {
- return {}
+ return {};
};
let getStats = (botId, queue) => {
@@ -704,7 +704,7 @@ module.exports = function(configure) {
source_timestamp: null,
started_timestamp: null,
ended_timestamp: null
- }
+ };
}
return stats[botId][queue];
@@ -712,7 +712,9 @@ module.exports = function(configure) {
let stats = newStats();
if (stream == null) {
- stream = ls.through(function (event, done) { done(null, event); });
+ stream = ls.through(function(event, done) {
+ done(null, event);
+ });
}
stream.visit = (event, done) => {
@@ -726,14 +728,14 @@ module.exports = function(configure) {
stat.ended_timestamp = stat.ended_timestamp ? Math.max(timestamp, stat.ended_timestamp) : timestamp;
stat.eid = event.eid;
stat.units += event.units || 1;
- stat.start_eid = stat.start_eid || event.eid
+ stat.start_eid = stat.start_eid || event.eid;
if (done) {
done(null, event);
}
- }
+ };
- stream.checkpoint = function (params, done) {
+ stream.checkpoint = function(params, done) {
if (typeof params === "function") {
done = params;
params = {};
@@ -756,38 +758,39 @@ module.exports = function(configure) {
}
}
async.parallelLimit(tasks, 10, (err, results) => {
- err && console.log("Stats error:", err)
+ err && console.log("Stats error:", err);
stats = newStats();
done(err);
});
- }
+ };
stream.checkpoint.stream = ls.through((o, d) => d(null, o), (done) => {
stream.checkpoint(opts || {}, done);
});
stream.on("checkpoint", (opts) => {
stream.checkpoint(opts || {}, done);
});
- stream.getCheckpoint = function (botId, queue) { return getStats(botId, queue); }
+ stream.getCheckpoint = function(botId, queue) {
+ return getStats(botId, queue);
+ };
let write = stream._write;
- stream._write = function (event, enc, done) {
+ stream._write = function(event, enc, done) {
let r = write(event, enc, (err, data) => {
- console.log(event.eid, event.correlation_id)
if (!err) {
stream.visit(event);
}
return done(err, data);
});
return r;
- }
- stream.on("finish", function () {
+ };
+ stream.on("finish", function() {
console.log("finish");
- stream.checkpoint(opts || {}, () => { })
+ stream.checkpoint(opts || {}, () => {});
});
- stream.on("end", function () {
+ stream.on("end", function() {
console.log("end", arguments);
- stream.checkpoint(opts || {}, () => { })
+ stream.checkpoint(opts || {}, () => {});
process.exit();
});
diff --git a/lib/streams.js b/lib/streams.js
index <HASH>..<HASH> 100644
--- a/lib/streams.js
+++ b/lib/streams.js
@@ -102,7 +102,12 @@ let ls = module.exports = {
flush.call(this, done);
} : null);
},
- cmd: (watchCommands) => {
+ cmd: (watchCommands, singleFunc) => {
+ if (typeof singleFunc == "function") {
+ watchCommands = {
+ [watchCommands]: singleFunc
+ };
+ }
for (var key in watchCommands) {
if (!key.match(/^cmd/) && typeof watchCommands[key] == "function") {
watchCommands["cmd" + key] = watchCommands[key];
@@ -478,7 +483,7 @@ let ls = module.exports = {
if (shouldLog) {
return ls.pipeline(ls.log(shouldLog === true ? "devnull" : shouldLog), s);
} else {
- return stream;
+ return s;
}
},
process: function(id, func, outQueue) {
diff --git a/test/aaaaTest.js b/test/aaaaTest.js
index <HASH>..<HASH> 100644
--- a/test/aaaaTest.js
+++ b/test/aaaaTest.js
@@ -62,7 +62,6 @@ describe("Streams", function() {
done(null);
}
}), ls.devnull("afterCmd"), (err) => {
- console.log(err);
done(err);
});
initialStream.write({
|
bug fix for devnull, add single command function
|
LeoPlatform_Nodejs
|
train
|
69187c47c25ad510d627a72456037311c6a31a96
|
diff --git a/allocate.go b/allocate.go
index <HASH>..<HASH> 100644
--- a/allocate.go
+++ b/allocate.go
@@ -50,6 +50,30 @@ var DefaultExecAllocatorOptions = []ExecAllocatorOption{
NoFirstRun,
NoDefaultBrowserCheck,
Headless,
+
+ // After Puppeteer's default behavior.
+ Flag("disable-background-networking", true),
+ Flag("enable-features", "NetworkService,NetworkServiceInProcess"),
+ Flag("disable-background-timer-throttling", true),
+ Flag("disable-backgrounding-occluded-windows", true),
+ Flag("disable-breakpad", true),
+ Flag("disable-client-side-phishing-detection", true),
+ Flag("disable-default-apps", true),
+ Flag("disable-dev-shm-usage", true),
+ Flag("disable-extensions", true),
+ Flag("disable-features", "site-per-process,TranslateUI,BlinkGenPropertyTrees"),
+ Flag("disable-hang-monitor", true),
+ Flag("disable-ipc-flooding-protection", true),
+ Flag("disable-popup-blocking", true),
+ Flag("disable-prompt-on-repost", true),
+ Flag("disable-renderer-backgrounding", true),
+ Flag("disable-sync", true),
+ Flag("force-color-profile", "srgb"),
+ Flag("metrics-recording-only", true),
+ Flag("safebrowsing-disable-auto-update", true),
+ Flag("enable-automation", true),
+ Flag("password-store", "basic"),
+ Flag("use-mock-keychain", true),
}
// NewExecAllocator creates a new context set up with an ExecAllocator, suitable
@@ -325,9 +349,13 @@ func NoDefaultBrowserCheck(a *ExecAllocator) {
Flag("no-default-browser-check", true)(a)
}
-// Headless is the command line option to run in headless mode.
+// Headless is the command line option to run in headless mode. On top of
+// setting the headless flag, it also hides scrollbars and mutes audio.
func Headless(a *ExecAllocator) {
Flag("headless", true)(a)
+ // Like in Puppeteer.
+ Flag("hide-scrollbars", true)(a)
+ Flag("mute-audio", true)(a)
}
// DisableGPU is the command line option to disable the GPU process.
diff --git a/chromedp_test.go b/chromedp_test.go
index <HASH>..<HASH> 100644
--- a/chromedp_test.go
+++ b/chromedp_test.go
@@ -44,23 +44,21 @@ func init() {
panic(fmt.Sprintf("could not create temp directory: %v", err))
}
- // build on top of the default options
+ // Build on top of the default options.
allocOpts = append(allocOpts, DefaultExecAllocatorOptions...)
- // disabling the GPU helps portability with some systems like Travis,
- // and can slightly speed up the tests on other systems
+ // Disabling the GPU helps portability with some systems like Travis,
+ // and can slightly speed up the tests on other systems.
allocOpts = append(allocOpts, DisableGPU)
- // find the exec path once at startup
- // it's worth noting that newer versions of chrome (64+) run much faster
- // than older ones -- same for headless_shell ...
+ // Find the exec path once at startup.
execPath = os.Getenv("CHROMEDP_TEST_RUNNER")
if execPath == "" {
execPath = findExecPath()
}
allocOpts = append(allocOpts, ExecPath(execPath))
- // not explicitly needed to be set, as this vastly speeds up unit tests
+ // Not explicitly needed to be set, as this speeds up the tests
if noSandbox := os.Getenv("CHROMEDP_NO_SANDBOX"); noSandbox != "false" {
allocOpts = append(allocOpts, NoSandbox)
}
|
Add more default Chrome flags, after Puppeteer
These seem to speed up Chrome slightly; 'go test' goes from ~<I>s to
~<I>s on my laptop.
Moreover, it's likely that what Puppeteer does is what users will
expect, or at least not be surprised by.
|
chromedp_chromedp
|
train
|
a680d8bf67afc348dbfa9904003a0fd295646eae
|
diff --git a/bin/openquake b/bin/openquake
index <HASH>..<HASH> 100755
--- a/bin/openquake
+++ b/bin/openquake
@@ -135,11 +135,12 @@ def list_calculations():
"""Simple UI wrapper around
:function:`openquake.export.core.get_jobs`. It prints the results in
a nice way."""
- calcs = export.get_jobs(getpass.getuser())
- if len(calcs) > 0:
+ jobs = export.get_jobs(getpass.getuser())
+ if len(jobs) > 0:
print 'ID\tStatus\tDescription'
- for c in calcs:
- print '%s\t%s\t%s' % (c.id, c.status, c.description)
+ for c in jobs:
+ print '%s\t%s\t%s' % (c.id, c.status,
+ c.hazard_calculation.description)
def list_outputs(job_id):
diff --git a/openquake/calculators/hazard/classical/core_next.py b/openquake/calculators/hazard/classical/core_next.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/hazard/classical/core_next.py
+++ b/openquake/calculators/hazard/classical/core_next.py
@@ -357,19 +357,23 @@ class ClassicalHazardCalculator(base.CalculatorNext):
'virtual_host': amqp_cfg['vhost'],
}
- with kombu.BrokerConnection(**conn_args) as conn:
- task_signal_queue(conn.channel()).declare()
- with conn.Consumer(task_signal_queue,
- callbacks=[task_complete_callback]):
- conn.drain_events() # This blocks until a message is received.
-
- while (sources_computed['value']
- < total_sources_to_compute['value']):
+ while (sources_computed['value']
+ < total_sources_to_compute['value']):
+
+ with kombu.BrokerConnection(**conn_args) as conn:
+ task_signal_queue(conn.channel()).declare()
+ with conn.Consumer(task_signal_queue,
+ callbacks=[task_complete_callback]):
+ # This blocks until a message is received.
+ conn.drain_events()
+
+ # Once we receive a completion signal, enqueue the next
+ # piece of work (if there's anything left to be done).
try:
hazard_curves.apply_async(task_gen.next())
except StopIteration:
- # TODO: If the `task_gen` runs out before we compute
- # all sources, something is wrong. Raise an error.
+ # There are no more tasks to dispatch; now we just need
+ # to wait until all tasks signal completion.
pass
diff --git a/tests/calculators/hazard/classical/core_next_test.py b/tests/calculators/hazard/classical/core_next_test.py
index <HASH>..<HASH> 100644
--- a/tests/calculators/hazard/classical/core_next_test.py
+++ b/tests/calculators/hazard/classical/core_next_test.py
@@ -14,6 +14,7 @@
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
+import getpass
import unittest
import nhlib.imt
@@ -31,7 +32,7 @@ class ClassicalHazardCalculatorTestCase(unittest.TestCase):
def setUp(self):
cfg = helpers.demo_file('simple_fault_demo_hazard/job.ini')
- self.job = helpers.get_hazard_job(cfg)
+ self.job = helpers.get_hazard_job(cfg, username=getpass.getuser())
self.calc = core_next.ClassicalHazardCalculator(self.job)
def test_pre_execute(self):
@@ -179,9 +180,10 @@ class ClassicalHazardCalculatorTestCase(unittest.TestCase):
# Update job status to move on to the execution phase.
self.job.status = 'executing'
self.job.save()
- import nose; nose.tools.set_trace()
self.calc.execute()
- import nose; nose.tools.set_trace()
+ self.job.status = 'complete'
+ self.job.is_running = False
+ self.job.save()
class ImtsToNhlibTestCase(unittest.TestCase):
diff --git a/tests/utils/helpers.py b/tests/utils/helpers.py
index <HASH>..<HASH> 100644
--- a/tests/utils/helpers.py
+++ b/tests/utils/helpers.py
@@ -922,12 +922,14 @@ def _deep_eq(a, b, decimal):
assert a == b, "%s != %s" % (a, b)
-def get_hazard_job(cfg):
+def get_hazard_job(cfg, username=None):
"""
Given a path to a config file, create a
:class:`openquake.db.models.OqJob` object for a hazard calculation.
"""
- job = engine2.prepare_job(default_user().user_name)
+ username = username if username is not None else default_user().user_name
+
+ job = engine2.prepare_job(username)
params, files = engine2.parse_config(open(cfg, 'r'), force_inputs=True)
haz_calc = engine2.create_hazard_calculation(
job.owner, params, files.values())
|
Fixed broken calculator workflow
`execute` loop works now, signalling works, and adjusted bin/openquake
to properly list completed calculations.
|
gem_oq-engine
|
train
|
0de6d434e361e86e6dbcd7063bb72373e4f3e197
|
diff --git a/spec/punchblock/translator/asterisk/call_spec.rb b/spec/punchblock/translator/asterisk/call_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/punchblock/translator/asterisk/call_spec.rb
+++ b/spec/punchblock/translator/asterisk/call_spec.rb
@@ -691,7 +691,7 @@ module Punchblock
it "adds the join to the @pending_joins hash" do
translator.expects(:call_with_id).with(other_call_id).returns(other_call)
subject.execute_command command
- subject.pending_joins[other_channel].should be_a Command::Join
+ subject.pending_joins[other_channel].should be command
end
end
end#execute_command
|
[BUGFIX] Test should assert object identity
|
adhearsion_punchblock
|
train
|
1b90fe56efb5a9fe7de30187f58a0f0c7ad58e4c
|
diff --git a/media.go b/media.go
index <HASH>..<HASH> 100644
--- a/media.go
+++ b/media.go
@@ -368,8 +368,8 @@ func (item *Item) Save() error {
//
// See example: examples/media/itemDownload.go
func (item *Item) Download(folder, name string) (imgs, vds []string, err error) {
- imgFolder := fmt.Sprintf("%s%cimages%c", folder, os.PathSeparator, os.PathSeparator)
- vidFolder := fmt.Sprintf("%s%cvideos%c", folder, os.PathSeparator, os.PathSeparator)
+ imgFolder := path.Join(folder, "images")
+ vidFolder := path.Join(folder, "videos")
inst := item.media.instagram()
os.MkdirAll(folder, 0777)
@@ -384,9 +384,9 @@ func (item *Item) Download(folder, name string) (imgs, vds []string, err error)
return nil, nil, err
}
- nname = fmt.Sprintf("%s%c%s", imgFolder, os.PathSeparator, path.Base(u.Path))
+ nname = path.Join(imgFolder, path.Base(u.Path))
} else {
- nname = fmt.Sprintf("%s%c%s", imgFolder, os.PathSeparator, nname)
+ nname = path.Join(imgFolder, nname)
}
nname = getname(nname)
@@ -405,9 +405,9 @@ func (item *Item) Download(folder, name string) (imgs, vds []string, err error)
return nil, nil, err
}
- nname = fmt.Sprintf("%s%c%s", vidFolder, os.PathSeparator, path.Base(u.Path))
+ nname = path.Join(vidFolder, path.Base(u.Path))
} else {
- nname = fmt.Sprintf("%s%c%s", vidFolder, os.PathSeparator, nname)
+ nname = path.Join(vidFolder, nname)
}
nname = getname(nname)
|
Deleted fmt and change for Join paths xd
|
ahmdrz_goinsta
|
train
|
635980b0672c158eeda566e082e00b0b6a455f85
|
diff --git a/src/test/org/openscience/cdk/geometry/cip/CIPSMILESTest.java b/src/test/org/openscience/cdk/geometry/cip/CIPSMILESTest.java
index <HASH>..<HASH> 100644
--- a/src/test/org/openscience/cdk/geometry/cip/CIPSMILESTest.java
+++ b/src/test/org/openscience/cdk/geometry/cip/CIPSMILESTest.java
@@ -186,9 +186,6 @@ public class CIPSMILESTest extends CDKTestCase {
IStereoElement stereo = stereoElements.next();
Assert.assertNotNull(stereo);
Assert.assertTrue(stereo instanceof ITetrahedralChirality);
- System.out.println(((ITetrahedralChirality) stereo).getStereo());
- for (IAtom a : ((ITetrahedralChirality) stereo).getLigands())
- System.out.println(mol.getAtomNumber(a));
Assert.assertEquals(
CIP_CHIRALITY.R,
CIPTool.getCIPChirality(mol, (ITetrahedralChirality)stereo));
|
Removing print to stdout.
|
cdk_cdk
|
train
|
35b78690f382d03b5c7e0f79e9f2f4d7284a4b75
|
diff --git a/lxc/image.go b/lxc/image.go
index <HASH>..<HASH> 100644
--- a/lxc/image.go
+++ b/lxc/image.go
@@ -566,6 +566,11 @@ func (c *imageCmd) run(conf *config.Config, args []string) error {
public = i18n.G("yes")
}
+ cached := i18n.G("no")
+ if info.Cached {
+ cached = i18n.G("yes")
+ }
+
autoUpdate := i18n.G("disabled")
if info.AutoUpdate {
autoUpdate = i18n.G("enabled")
@@ -603,6 +608,7 @@ func (c *imageCmd) run(conf *config.Config, args []string) error {
fmt.Printf(" - %s\n", alias.Name)
}
}
+ fmt.Printf(i18n.G("Cached: %s")+"\n", cached)
fmt.Printf(i18n.G("Auto update: %s")+"\n", autoUpdate)
if info.UpdateSource != nil {
fmt.Println(i18n.G("Source:"))
|
lxc/image: Expose the "cached" flag
|
lxc_lxd
|
train
|
34306301167c4cf1657a767e4228a2d5aedc22f5
|
diff --git a/lib/google_static_maps_helper.rb b/lib/google_static_maps_helper.rb
index <HASH>..<HASH> 100644
--- a/lib/google_static_maps_helper.rb
+++ b/lib/google_static_maps_helper.rb
@@ -4,6 +4,7 @@ require File.dirname(__FILE__) + '/google_static_maps_helper/map'
require File.dirname(__FILE__) + '/google_static_maps_helper/location'
require File.dirname(__FILE__) + '/google_static_maps_helper/marker'
require File.dirname(__FILE__) + '/google_static_maps_helper/path'
+require File.dirname(__FILE__) + '/google_static_maps_helper/gmap_polyline_encoder'
#
# The Google Static Map Helper provides a simple interface to the
diff --git a/lib/google_static_maps_helper/path.rb b/lib/google_static_maps_helper/path.rb
index <HASH>..<HASH> 100644
--- a/lib/google_static_maps_helper/path.rb
+++ b/lib/google_static_maps_helper/path.rb
@@ -129,7 +129,11 @@ module GoogleStaticMapsHelper
end
def encoded_url_points
- ''
+ encoder = GMapPolylineEncoder.new
+ points_as_array = points.map { |location| [location.lat, location.lng]}
+ result = encoder.encode(points_as_array)
+
+ "enc:#{result[:points]}"
end
def unencoded_url_points
diff --git a/spec/path_spec.rb b/spec/path_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/path_spec.rb
+++ b/spec/path_spec.rb
@@ -167,6 +167,17 @@ describe GoogleStaticMapsHelper::Path do
end
describe "encoded poly lines" do
+ before do
+ @path.encode_points = true
+ end
+
+ it "should include 'enc:'" do
+ @path.url_params.should include('enc:')
+ end
+
+ it "should include encoded version of lng and lat" do
+ @path.url_params.should include('_ibE_seK_seK_seK')
+ end
end
end
end
|
It seems like the encoded returns the expected encoded points.
|
thhermansen_google_static_maps_helper
|
train
|
8c2b6e69c9571870b38fd4d17599da61dc913fad
|
diff --git a/gitenberg/actions.py b/gitenberg/actions.py
index <HASH>..<HASH> 100644
--- a/gitenberg/actions.py
+++ b/gitenberg/actions.py
@@ -9,7 +9,7 @@ def get_id(repo):
book = Book(None, repo_name=repo)
repo = book.github_repo.github.repository(orgname, repo)
- print repo.id
+ print (repo.id)
return repo.id
def get_book(repo_name):
@@ -27,11 +27,11 @@ def delete(repo_name):
repo = book.github_repo.github.repository(orgname, repo_name)
if repo:
if repo.delete():
- print "{} deleted".format(repo_name)
+ print ("{} deleted".format(repo_name))
else:
- print "couldn't delete {}".format(repo_name)
+ print ("couldn't delete {}".format(repo_name))
else:
- print "{} didn't exist".format(repo_name)
+ print ("{} didn't exist".format(repo_name))
def add_generated_cover(repo_name, tag=False):
book = get_cloned_book(repo_name)
diff --git a/gitenberg/book.py b/gitenberg/book.py
index <HASH>..<HASH> 100644
--- a/gitenberg/book.py
+++ b/gitenberg/book.py
@@ -182,7 +182,7 @@ class Book():
self.fetch()
self.make()
self.push()
- print u"{0} {1} added".format(self.book_id, self.meta._repo)
+ print (u"{0} {1} added".format(self.book_id, self.meta._repo))
except sh.ErrorReturnCode_12:
logging.error(u"{0} {1} timeout".format(self.book_id, self.meta._repo))
except sh.ErrorReturnCode_23:
@@ -247,7 +247,7 @@ class Book():
)
return cover_image
except OSError:
- print "OSError, probably Cairo not installed."
+ print ("OSError, probably Cairo not installed.")
return None
def add_covers(self):
diff --git a/gitenberg/clone.py b/gitenberg/clone.py
index <HASH>..<HASH> 100644
--- a/gitenberg/clone.py
+++ b/gitenberg/clone.py
@@ -56,6 +56,6 @@ class CloneVat(object):
self.local_repo = git.Repo.clone_from(self.get_clone_url_ssh(), self.library_book_dir())
return True, "Success! Cloned {0}".format(self.book_repo_name)
except git.exc.GitCommandError as e:
- print e
+ print (e)
logging.debug("clone ran into an issue, likely remote doesn't exist")
return False, "Error git returned a fail code"
diff --git a/gitenberg/tests/test_cover.py b/gitenberg/tests/test_cover.py
index <HASH>..<HASH> 100755
--- a/gitenberg/tests/test_cover.py
+++ b/gitenberg/tests/test_cover.py
@@ -23,7 +23,7 @@ class TestMakeCovers(unittest.TestCase):
self.assertTrue(os.path.exists(self.test_path))
except OSError:
# eat this exception so the test will pass in server environments
- print "OSError, probably Cairo not installed."
+ print ("OSError, probably Cairo not installed.")
return None
def tearDown(self):
diff --git a/gitenberg/tests/test_metadata.py b/gitenberg/tests/test_metadata.py
index <HASH>..<HASH> 100644
--- a/gitenberg/tests/test_metadata.py
+++ b/gitenberg/tests/test_metadata.py
@@ -37,7 +37,7 @@ class Yaml2MarcTest(unittest.TestCase):
self.pandata = Pandata(TESTDATA_FILENAME)
def test_pandata(self):
- print self.pandata
+ print (self.pandata)
self.assertEqual( self.pandata.gutenberg_issued , "2007-03-03")
self.assertTrue( isinstance( self.pandata.creator , dict))
self.assertTrue( isinstance( self.pandata.subjects[0] , tuple ))
diff --git a/gitenberg/workflow.py b/gitenberg/workflow.py
index <HASH>..<HASH> 100644
--- a/gitenberg/workflow.py
+++ b/gitenberg/workflow.py
@@ -63,10 +63,10 @@ def apply_list(arg_action, id_list):
for book_id in id_list:
try:
book = action(book_id)
- print u'{}\t{}\t{}'.format(arg_action, book_id, book.meta.title)
+ print (u'{}\t{}\t{}'.format(arg_action, book_id, book.meta.title))
book.remove()
except Exception as e:
- print u'error\t{}'.format(book_id)
+ print (u'error\t{}'.format(book_id))
logging.error(u"Error processing: {}\r{}".format(book_id, e))
|
changed all print statements to make them compatible with python 3
|
gitenberg-dev_gitberg
|
train
|
c93074d0289cf873047bf987a57cf49ed4baf479
|
diff --git a/Lib/fontbakery/specifications/googlefonts.py b/Lib/fontbakery/specifications/googlefonts.py
index <HASH>..<HASH> 100644
--- a/Lib/fontbakery/specifications/googlefonts.py
+++ b/Lib/fontbakery/specifications/googlefonts.py
@@ -880,30 +880,16 @@ def ttfautohint_stats(font):
from io import BytesIO
from fontTools.ttLib import TTFont
- try:
- hinted_size = os.stat(font).st_size
-
- buf = BytesIO()
- TTFont(font).save(buf)
- data = ttfautohint(in_buffer=buf.getvalue(), dehint=True)
-# dehinted = TTFont(BytesIO(data))
- dehinted_size = len(data)
- except OSError:
- return {"missing": True}
-
+ original_buffer = BytesIO()
+ TTFont(font).save(original_buffer)
+ dehinted_buffer = ttfautohint(in_buffer=original_buffer.getvalue(),
+ dehint=True)
return {
- "dehinted_size": dehinted_size,
- "hinted_size": hinted_size,
+ "dehinted_size": len(dehinted_buffer),
+ "hinted_size": os.stat(font).st_size,
"version": libttfautohint.version_string
}
-TTFAUTOHINT_MISSING_MSG = (
- "ttfautohint is not available!"
- " You really MUST check the fonts with this tool."
- " To install it, see https://github.com"
- "/googlefonts/gf-docs/blob/master"
- "/ProjectChecklist.md#ttfautohint")
-
@check(
id = 'com.google.fonts/check/054',
conditions = ['ttfautohint_stats']
@@ -914,22 +900,6 @@ def com_google_fonts_check_054(font, ttfautohint_stats):
Current implementation simply logs useful info
but there's no fail scenario for this checker."""
- if "missing" in ttfautohint_stats:
- yield WARN, Message("ttfa-missing",
- TTFAUTOHINT_MISSING_MSG)
- return
-
- if ttfautohint_stats["dehinted_size"] == 0:
- yield WARN, Message("ttfa-bug",
- ("ttfautohint --dehint reports that"
- " \"This font has already been processed"
- " with ttfautohint\"."
- " This is a bug in an old version of ttfautohint."
- " You'll need to upgrade it."
- " See https://github.com/googlefonts/fontbakery/"
- "issues/1043#issuecomment-249035069"))
- return
-
hinted = ttfautohint_stats["hinted_size"]
dehinted = ttfautohint_stats["dehinted_size"]
increase = hinted - dehinted
@@ -1032,11 +1002,6 @@ def com_google_fonts_check_056(ttFont, ttfautohint_stats):
" in the font version entries of the 'name' table."
" Such font version strings are currently:"
" {}").format(version_strings)
- elif "missing" in ttfautohint_stats:
- # Even though we skip here, we still have a chance of performing
- # early portions of the check in the 2 error/info scenarios above
- # regardless of the avaiability of ttfautohint.
- yield SKIP, TTFAUTOHINT_MISSING_MSG
else:
installed_ttfa = ttfautohint_stats["version"]
try:
diff --git a/tests/specifications/googlefonts_test.py b/tests/specifications/googlefonts_test.py
index <HASH>..<HASH> 100644
--- a/tests/specifications/googlefonts_test.py
+++ b/tests/specifications/googlefonts_test.py
@@ -528,15 +528,15 @@ def test_check_032():
assert status == WARN
-def NOT_IMPLEMENTED_test_check_054():
+def test_check_054():
""" Show hinting filesize impact. """
- # from fontbakery.specifications.googlefonts import com_google_fonts_check_054 as check
- # TODO: Implement-me!
- #
- # code-paths:
- # - WARN, code="ttfa-missing"
- # - WARN, code="ttfa-bug"
- # - INFO
+ from fontbakery.specifications.googlefonts import (com_google_fonts_check_054 as check,
+ ttfautohint_stats)
+ font = "data/test/mada/Mada-Regular.ttf"
+
+ print('Test this check always emits an INFO result...')
+ status, message = list(check(TTFont(font), ttfautohint_stats(font)))[-1]
+ assert status == INFO
def test_check_055():
@@ -578,7 +578,6 @@ def NOT_IMPLEMENTED_test_check_056():
# code-paths:
# - FAIL, code="lacks-version-strings"
# - INFO, "Could not detect which version of ttfautohint was used in this font."
- # - SKIP, TTFAUTOHINT_MISSING_MSG
# - WARN, "detected an old ttfa version"
# - PASS
# - FAIL, code="parse-error"
|
simplify ttfautohint-related checks and ...
... implement code-test for com.google.fonts/check/<I>
(issue #<I>)
|
googlefonts_fontbakery
|
train
|
408bb29acf66f55f50dd2aeb91a066278823168c
|
diff --git a/src/Model.php b/src/Model.php
index <HASH>..<HASH> 100644
--- a/src/Model.php
+++ b/src/Model.php
@@ -161,6 +161,64 @@ trait Model
*/
public function __set(string $prop, $value)
{
+ $annotations = $this->__ornamentalize();
+ foreach ($annotations['methods'] as $name => $anns) {
+ if (isset($anns['set']) && $anns['set'] == $prop) {
+ return call_user_func([$this, $name], $value);
+ }
+ }
+ if (isset($annotations['properties'][$prop])) {
+ if ($annotations['properties'][$prop]['readOnly']) {
+ // Modifying a readOnly (protected) property is only valid from
+ // within the same class context. This is a hacky way to check
+ // that, since `__set` obviously by default means "inside class
+ // context".
+ $debugs = debug_backtrace();
+ do {
+ $debug = array_shift($debugs);
+ } while ($debug['function'] == '__set' && $debugs);
+
+ $error = function () use ($debugs, $prop) {
+ $debug = $debugs[1] ?? $debugs[0];
+ throw new Error(
+ sprintf(
+ "Cannot access protected property %s::%s in %s:%d",
+ get_class($this),
+ $prop,
+ $debug['file'],
+ $debug['line']
+ ),
+ 0
+ );
+ };
+
+ // No class context? That's definitely illegal.
+ if (!isset($debug['class'])) {
+ $error();
+ }
+
+ // Is the calling class context not either ourselves, or a subclass?
+ // That's also illegal.
+ $reflection = new ReflectionClass($debug['class']);
+ $myclass = get_class($this);
+ if (!($reflection->getName() == $myclass || $reflection->isSubclassOf($myclass))) {
+ $error();
+ }
+ }
+ if (isset($annotations['properties'][$prop]['var'])
+ && class_exists($annotations['properties'][$prop]['var'])
+ && array_key_exists(
+ 'Ornament\Core\DecoratorInterface',
+ class_implements($annotations['properties'][$prop]['var'])
+ )
+ && $value instanceof $annotations['properties'][$prop]['var']
+ ) {
+ $value = $value->getSource();
+ }
+ if ($this->checkBaseType($annotations['properties'][$prop]) && !is_null($value)) {
+ settype($value, $annotations['properties'][$prop]['var']);
+ }
+ }
if (!property_exists($this->__state ?? new StdClass, $prop)) {
$debug = debug_backtrace()[0];
throw new Error(
@@ -174,63 +232,6 @@ trait Model
0
);
}
- $annotations = $this->__ornamentalize();
- if ($annotations['properties'][$prop]['readOnly']) {
- // Modifying a readOnly (protected) property is only valid from
- // within the same class context. This is a hacky way to check
- // that, since `__set` obviously by default means "inside class
- // context".
- $debugs = debug_backtrace();
- do {
- $debug = array_shift($debugs);
- } while ($debug['function'] == '__set' && $debugs);
-
- $error = function () use ($debugs, $prop) {
- $debug = $debugs[1] ?? $debugs[0];
- throw new Error(
- sprintf(
- "Cannot access protected property %s::%s in %s:%d",
- get_class($this),
- $prop,
- $debug['file'],
- $debug['line']
- ),
- 0
- );
- };
-
- // No class context? That's definitely illegal.
- if (!isset($debug['class'])) {
- $error();
- }
-
- // Is the calling class context not either ourselves, or a subclass?
- // That's also illegal.
- $reflection = new ReflectionClass($debug['class']);
- $myclass = get_class($this);
- if (!($reflection->getName() == $myclass || $reflection->isSubclassOf($myclass))) {
- $error();
- }
- }
- if (isset($annotations['properties'][$prop]['var'])
- && class_exists($annotations['properties'][$prop]['var'])
- && array_key_exists(
- 'Ornament\Core\DecoratorInterface',
- class_implements($annotations['properties'][$prop]['var'])
- )
- && $value instanceof $annotations['properties'][$prop]['var']
- ) {
- $value = $value->getSource();
- }
- foreach ($annotations['methods'] as $name => $anns) {
- if (isset($anns['set']) && $anns['set'] == $prop) {
- $value = call_user_func([$this, $name], $value);
- break;
- }
- }
- if ($this->checkBaseType($annotations['properties'][$prop]) && !is_null($value)) {
- settype($value, $annotations['properties'][$prop]['var']);
- }
$this->__state->$prop = $value;
return $value;
}
|
wrap in isset check, fix order and shortcircuit if we have a manual setter
|
ornament-orm_core
|
train
|
c04b146c83c0ed6cf523809d9e4450460035ef0e
|
diff --git a/src/main/java/com/lazerycode/jmeter/mojo/RunJMeterMojo.java b/src/main/java/com/lazerycode/jmeter/mojo/RunJMeterMojo.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/lazerycode/jmeter/mojo/RunJMeterMojo.java
+++ b/src/main/java/com/lazerycode/jmeter/mojo/RunJMeterMojo.java
@@ -1,16 +1,15 @@
package com.lazerycode.jmeter.mojo;
-import com.lazerycode.jmeter.exceptions.IOException;
-import com.lazerycode.jmeter.json.TestConfig;
-import com.lazerycode.jmeter.testrunner.TestManager;
-import org.apache.commons.io.FileUtils;
+import java.io.File;
+
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Execute;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
-import java.io.File;
+import com.lazerycode.jmeter.json.TestConfig;
+import com.lazerycode.jmeter.testrunner.TestManager;
/**
* JMeter Maven plugin.
|
Upgrade to JMeter <I>
Fix imports
|
jmeter-maven-plugin_jmeter-maven-plugin
|
train
|
d65f1fb08a4b4bb41aff64a699af06435750d235
|
diff --git a/lib/config/folderconfiguration.go b/lib/config/folderconfiguration.go
index <HASH>..<HASH> 100644
--- a/lib/config/folderconfiguration.go
+++ b/lib/config/folderconfiguration.go
@@ -94,12 +94,10 @@ func (f *FolderConfiguration) CreateMarker() error {
if err != nil {
return err
}
- if dir, err := fs.Open("."); err == nil {
- if serr := dir.Sync(); err != nil {
- l.Debugln("fsync %q failed: %v", ".", serr)
- }
- } else {
- l.Infof("fsync %q failed: %v", ".", err)
+ if dir, err := fs.Open("."); err != nil {
+ l.Debugln("folder marker: open . failed:", err)
+ } else if err := dir.Sync(); err != nil {
+ l.Debugln("folder marker: fsync . failed:", err)
}
fs.Hide(".stfolder")
}
|
lib/config: Improve debug logging around folder marker
|
syncthing_syncthing
|
train
|
be1732779dc768a586d2236c912818a04bc7d049
|
diff --git a/clustering/infinispan/src/main/java/org/jboss/as/clustering/infinispan/invoker/RetryingCacheInvoker.java b/clustering/infinispan/src/main/java/org/jboss/as/clustering/infinispan/invoker/RetryingCacheInvoker.java
index <HASH>..<HASH> 100644
--- a/clustering/infinispan/src/main/java/org/jboss/as/clustering/infinispan/invoker/RetryingCacheInvoker.java
+++ b/clustering/infinispan/src/main/java/org/jboss/as/clustering/infinispan/invoker/RetryingCacheInvoker.java
@@ -28,10 +28,7 @@ import java.util.Arrays;
import java.util.EnumSet;
import java.util.Set;
-import javax.transaction.xa.XAException;
-
import org.infinispan.Cache;
-import org.infinispan.CacheException;
import org.infinispan.context.Flag;
import org.infinispan.remoting.transport.jgroups.SuspectException;
import org.infinispan.util.concurrent.TimeoutException;
@@ -87,21 +84,6 @@ public class RetryingCacheInvoker implements CacheInvoker {
exception = e;
} catch (SuspectException e) {
exception = e;
- } catch (CacheException e) {
- // If the tx commit (e.g. Cache.endBatch(true)) was rolled back due to failure on prepare, then retry
- Throwable cause = e.getCause();
- if (cause instanceof XAException) {
- XAException xaCause = (XAException) cause;
- // TimeoutException/SuspectException on prepare throws XAException.XA_RBROLLBACK
- // See org.infinispan.transaction.TransactionCoordinator.prepare(...)
- if (xaCause.errorCode == XAException.XA_RBROLLBACK) {
- exception = e;
- } else {
- throw e;
- }
- } else {
- throw e;
- }
}
if (retry) {
|
Remove logic to retry on tx commit on SuspectException.
TX commit no longer throws SuspectException as of Infinispan <I>.CR2.
|
wildfly_wildfly
|
train
|
e543c08f134159e2b57425beeebd42464be0ced8
|
diff --git a/browsermob-proxy/src/main/java/com/groupon/odo/bmp/BrowserMobProxyHandler.java b/browsermob-proxy/src/main/java/com/groupon/odo/bmp/BrowserMobProxyHandler.java
index <HASH>..<HASH> 100644
--- a/browsermob-proxy/src/main/java/com/groupon/odo/bmp/BrowserMobProxyHandler.java
+++ b/browsermob-proxy/src/main/java/com/groupon/odo/bmp/BrowserMobProxyHandler.java
@@ -289,6 +289,7 @@ public class BrowserMobProxyHandler extends SeleniumProxyHandler {
private static final int HEADER_BUFFER_DEFAULT = 2;
private static final long serialVersionUID = 1L;
+ private final String localIP = "127.0.0.1";
private Server jettyServer;
private int headerBufferMultiplier = HEADER_BUFFER_DEFAULT;
@@ -604,7 +605,7 @@ public class BrowserMobProxyHandler extends SeleniumProxyHandler {
if (urlStr.startsWith("http://")) {
int httpPort = com.groupon.odo.proxylib.Utils.getSystemPort(Constants.SYS_HTTP_PORT);
- urlStr = urlStr.replace(getHostNameFromURL(urlStr), "127.0.0.1:" + httpPort);
+ urlStr = urlStr.replace(getHostNameFromURL(urlStr), localIP + ":" + httpPort);
}
okRequestBuilder = okRequestBuilder.url(urlStr);
|
Switching to use string for ip
|
groupon_odo
|
train
|
7a44e6c36b47bb63bc56fc6fba2b8501edcae69e
|
diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -868,6 +868,8 @@ Faker::Space.distance_measurement #=> "15 parsecs"
```ruby
Faker::Music.key #=> "C"
+Faker::Music.chord => "Amaj7"
+
Faker::Music.instrument #=> "Ukelele"
```
diff --git a/lib/faker/music.rb b/lib/faker/music.rb
index <HASH>..<HASH> 100644
--- a/lib/faker/music.rb
+++ b/lib/faker/music.rb
@@ -5,6 +5,10 @@ module Faker
keys.sample + key_variants.sample
end
+ def chord
+ key + chord_types.sample
+ end
+
def instrument
fetch('music.instruments')
end
@@ -16,6 +20,10 @@ module Faker
def key_variants
['b', '#', '']
end
+
+ def chord_types
+ ['', 'maj', '6', 'maj7', 'm', 'm7', '-7', '7', 'dom7', 'dim', 'dim7', 'm7b5']
+ end
end
end
end
diff --git a/test/test_faker_music.rb b/test/test_faker_music.rb
index <HASH>..<HASH> 100644
--- a/test/test_faker_music.rb
+++ b/test/test_faker_music.rb
@@ -20,6 +20,13 @@ class TestFakerMusic < Test::Unit::TestCase
end
end
+ def test_chord_types
+ assert @tester.chord_types.size == 12
+ @tester.chord_types.each do |chord_type|
+ assert !chord_type.nil?
+ end
+ end
+
def test_key
assert @tester.name.match(/([A-Z])+(b|#){0,1}/)
end
@@ -27,4 +34,8 @@ class TestFakerMusic < Test::Unit::TestCase
def test_instrument
assert @tester.instrument.match(/\w+/)
end
+
+ def test_chord
+ assert @tester.name.match(/([A-Z])+(b|#){0,1}+([a-zA-Z0-9]{0,4})/)
+ end
end
|
Add chords to music (#<I>)
|
stympy_faker
|
train
|
1f5f2f1085e227656186c215d2421bb4c00122fa
|
diff --git a/mutant/__init__.py b/mutant/__init__.py
index <HASH>..<HASH> 100644
--- a/mutant/__init__.py
+++ b/mutant/__init__.py
@@ -3,6 +3,6 @@ from __future__ import unicode_literals
import logging
-__version__ = VERSION = (0, 1, 0)
+__version__ = VERSION = (0, 1, 1)
logger = logging.getLogger('mutant')
|
Bumped version number to <I>
|
charettes_django-mutant
|
train
|
698509e3d3a63e89b0ae8fec7f025fc7ae78d642
|
diff --git a/src/Extension.php b/src/Extension.php
index <HASH>..<HASH> 100644
--- a/src/Extension.php
+++ b/src/Extension.php
@@ -112,7 +112,7 @@ abstract class Extension
/**
* @return NodeOptimizer
*/
- private function getNodeOptimizers()
+ public function getNodeOptimizers()
{
return array();
}
diff --git a/src/Extensions/Core.php b/src/Extensions/Core.php
index <HASH>..<HASH> 100644
--- a/src/Extensions/Core.php
+++ b/src/Extensions/Core.php
@@ -14,6 +14,7 @@ use Countable;
use InvalidArgumentException;
use Modules\Templating\Compiler\Functions\MethodFunction;
use Modules\Templating\Compiler\Functions\SimpleFunction;
+use Modules\Templating\Compiler\NodeOptimizer;
use Modules\Templating\Compiler\Operator;
use Modules\Templating\Compiler\Operators\ArithmeticOperators\AdditionOperator;
use Modules\Templating\Compiler\Operators\ArithmeticOperators\DivisionOperator;
@@ -67,6 +68,7 @@ use Modules\Templating\Compiler\Operators\UnaryOperators\PostDecrementOperator;
use Modules\Templating\Compiler\Operators\UnaryOperators\PostIncrementOperator;
use Modules\Templating\Compiler\Operators\UnaryOperators\PreDecrementOperator;
use Modules\Templating\Compiler\Operators\UnaryOperators\PreIncrementOperator;
+use Modules\Templating\Compiler\Optimizers\ForLoopOptimizer;
use Modules\Templating\Compiler\Tags\AssignTag;
use Modules\Templating\Compiler\Tags\CaseTag;
use Modules\Templating\Compiler\Tags\ElseIfTag;
@@ -253,6 +255,17 @@ class Core extends Extension
);
}
+ public function getNodeOptimizers()
+ {
+ $optimizers = array(
+ new ForLoopOptimizer()
+ );
+
+ return $optimizers;
+ }
+
+
+
/* Helper functions */
/**
|
Add ForLoopOptimizer to Core extension.
|
bugadani_Minty
|
train
|
d07f6d4c24348e9ff2eaa96709707f60ba04f863
|
diff --git a/prospector/encoding.py b/prospector/encoding.py
index <HASH>..<HASH> 100644
--- a/prospector/encoding.py
+++ b/prospector/encoding.py
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import codecs
import re
+import sys
class CouldNotHandleEncoding(Exception):
@@ -57,6 +58,9 @@ def determine_pyfile_encoding(path, default='utf8'):
def read_py_file(path):
+ if sys.version_info < (3, ):
+ return open(path, 'r').read()
+
encoding = determine_pyfile_encoding(path, default='utf8')
with codecs.open(path, encoding=encoding) as fip:
try:
diff --git a/prospector/tools/mccabe/__init__.py b/prospector/tools/mccabe/__init__.py
index <HASH>..<HASH> 100644
--- a/prospector/tools/mccabe/__init__.py
+++ b/prospector/tools/mccabe/__init__.py
@@ -32,8 +32,9 @@ class McCabeTool(ToolBase):
for code_file in found_files.iter_module_paths():
try:
+ contents = read_py_file(code_file)
tree = ast.parse(
- read_py_file(code_file),
+ contents,
filename=code_file,
)
except CouldNotHandleEncoding as err:
|
[refs #<I>] Fixing encoding handling for python 3 and 2 - the ast module cannot handle encoding declarations when passed in as unicode objects (<URL>)
|
PyCQA_prospector
|
train
|
e0dbc2e637cf5989e5ecbf21125a87e42faf4767
|
diff --git a/bcbio/variation/freebayes.py b/bcbio/variation/freebayes.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/freebayes.py
+++ b/bcbio/variation/freebayes.py
@@ -12,6 +12,7 @@ from bcbio.pipeline import config_utils
from bcbio.pipeline.shared import subset_variant_regions
from bcbio.provenance import do
from bcbio.variation import annotation, ploidy
+from bcbio.variation.vcfutils import get_paired_bams, is_sample_pair
def region_to_freebayes(region):
if isinstance(region, (list, tuple)):
@@ -57,6 +58,48 @@ def run_freebayes(align_bams, items, ref_file, assoc_files, region=None,
ref_file, config)
return ann_file
+
+def _run_freebayes_paired(align_bams, items, ref_file, assoc_files,
+ region=None, out_file=None):
+
+ config = items[0]["config"]
+ if out_file is None:
+ out_file = "%s-variants.vcf" % os.path.splitext(align_bams[0])[0]
+
+ tumor_bam, tumor_name, normal_bam, normal_name = get_paired_bams(
+ align_bams, items)
+
+ vcfsamplediff = config_utils.get_program("vcfsamplediff", config)
+
+ if out_file is None:
+ out_file = "%s-paired-variants.vcf" % os.path.splitext(
+ align_bams[0])[0]
+
+ if not file_exists(out_file):
+ with file_transaction(out_file) as tx_out_file:
+ cl = [config_utils.get_program("freebayes", config),
+ "--pooled-discrete", "--pvar", "0.7", "--genotype-qualities"]
+
+ bam.index(tumor_bam)
+ bam.index(normal_bam)
+
+ cl += [normal_bam, tumor_bam]
+ cl += _freebayes_options_from_config(items, config["algorithm"],
+ out_file, region)
+ cl = " ".join(cl)
+ cl += (" | {vcfsamplediff} -s VT {normal_name} {tumor_name} - >"
+ " {tx_out_file}")
+ cl = cl.format(**locals())
+
+ do.run(cl, "Genotyping paired variants with FreeBayes", {})
+ clean_vcf_output(out_file, _clean_freebayes_output, "nodups")
+
+ ann_file = annotation.annotate_nongatk_vcf(out_file, align_bams,
+ assoc_files["dbsnp"], ref_file,
+ config)
+ return ann_file
+
+
def _move_vcf(orig_file, new_file):
"""Move a VCF file with associated index.
"""
|
Add support for Freebayes paired tumor/normal variant calling
Currently untested and not plugged into the main pipeline yet
|
bcbio_bcbio-nextgen
|
train
|
fde7676aff9243fb04046640c7155661a0bc5308
|
diff --git a/eliot/tests/test_output.py b/eliot/tests/test_output.py
index <HASH>..<HASH> 100644
--- a/eliot/tests/test_output.py
+++ b/eliot/tests/test_output.py
@@ -249,7 +249,7 @@ class MemoryLoggerTests(TestCase):
for i in range(write_count):
logger.write(msg, serializer)
- msgs = list({} for i in range(thread_count))
+ msgs = list({u"i": i} for i in range(thread_count))
serializers = list(object() for i in range(thread_count))
write_args = zip(msgs, serializers)
threads = list(Thread(target=write, args=args) for args in write_args)
|
Make the msg dicts distinct values
Otherwise list.index always just picks the first one.
|
itamarst_eliot
|
train
|
59e898e467327e6b2ae57ce48f9ed351a6d958f5
|
diff --git a/src/Bindings/Browser/JSONConstants.php b/src/Bindings/Browser/JSONConstants.php
index <HASH>..<HASH> 100644
--- a/src/Bindings/Browser/JSONConstants.php
+++ b/src/Bindings/Browser/JSONConstants.php
@@ -240,6 +240,20 @@ class JSONConstants
const JSON_FEATURE_DESCRIPTION = "description";
const JSON_FEATURE_DATA = "featureData";
+ protected static $FEATURE_KEYS = array(
+ self::JSON_FEATURE_ID,
+ self::JSON_FEATURE_URL,
+ self::JSON_FEATURE_COMMON_NAME,
+ self::JSON_FEATURE_VERSION_LABEL,
+ self::JSON_FEATURE_DESCRIPTION,
+ self::JSON_FEATURE_DATA
+ );
+
+ public static function getFeatureKeys()
+ {
+ return self::$FEATURE_KEYS;
+ }
+
const JSON_OBJECT_PROPERTIES = "properties";
const JSON_OBJECT_SUCCINCT_PROPERTIES = "succinctProperties";
const JSON_OBJECT_PROPERTIES_EXTENSION = "propertiesExtension";
@@ -262,7 +276,6 @@ class JSONConstants
self::JSON_OBJECT_ACL,
self::JSON_OBJECT_EXACT_ACL,
self::JSON_OBJECT_POLICY_IDS,
- self::JSON_OBJECT_POLICY_IDS_IDS,
self::JSON_OBJECT_RENDITIONS
);
@@ -276,6 +289,15 @@ class JSONConstants
return self::$OBJECT_KEYS;
}
+ protected static $POLICY_IDS_KEYS = array(
+ self::JSON_OBJECT_POLICY_IDS_IDS
+ );
+
+ public static function getPolicyIdsKeys()
+ {
+ return self::$POLICY_IDS_KEYS;
+ }
+
const JSON_OBJECTINFOLDER_OBJECT = "object";
const JSON_OBJECTINFOLDER_PATH_SEGMENT = "pathSegment";
@@ -331,11 +353,57 @@ class JSONConstants
const JSON_ACL_ACES = "aces";
const JSON_ACL_IS_EXACT = "isExact";
+ protected static $ACL_KEYS = array(
+ self::JSON_ACL_ACES,
+ self::JSON_ACL_IS_EXACT
+ );
+
+ /**
+ * Returns an array of all acl keys
+ *
+ * @return array
+ */
+ public static function getAclKeys()
+ {
+ return self::$ACL_KEYS;
+ }
+
const JSON_ACE_PRINCIPAL = "principal";
const JSON_ACE_PRINCIPAL_ID = "principalId";
+
+ protected static $ACE_PRINCIPAL_KEYS = array(
+ self::JSON_ACE_PRINCIPAL_ID
+ );
+
+ /**
+ * Returns an array of all ace principal keys
+ *
+ * @return array
+ */
+ public static function getAcePrincipalKeys()
+ {
+ return self::$ACE_PRINCIPAL_KEYS;
+ }
+
const JSON_ACE_PERMISSIONS = "permissions";
const JSON_ACE_IS_DIRECT = "isDirect";
+ protected static $ACE_KEYS = array(
+ self::JSON_ACE_PRINCIPAL,
+ self::JSON_ACE_PERMISSIONS,
+ self::JSON_ACE_IS_DIRECT
+ );
+
+ /**
+ * Returns an array of all ace keys
+ *
+ * @return array
+ */
+ public static function getAceKeys()
+ {
+ return self::$ACE_KEYS;
+ }
+
const JSON_RENDITION_STREAM_ID = "streamId";
const JSON_RENDITION_MIMETYPE = "mimeType";
const JSON_RENDITION_LENGTH = "length";
diff --git a/tests/Unit/Bindings/Browser/JSONConstantsTest.php b/tests/Unit/Bindings/Browser/JSONConstantsTest.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/Bindings/Browser/JSONConstantsTest.php
+++ b/tests/Unit/Bindings/Browser/JSONConstantsTest.php
@@ -114,4 +114,44 @@ class JSONConstantsTest extends \PHPUnit_Framework_TestCase
JSONConstants::getRenditionKeys()
);
}
+
+ public function testGetFeatureKeysReturnsContentOfStaticArray()
+ {
+ $this->assertSame(
+ $this->getStaticAttribute('\\Dkd\\PhpCmis\\Bindings\\Browser\\JSONConstants', 'FEATURE_KEYS'),
+ JSONConstants::getFeatureKeys()
+ );
+ }
+
+ public function testGetPolicyIdsKeysReturnsContentOfStaticArray()
+ {
+ $this->assertSame(
+ $this->getStaticAttribute('\\Dkd\\PhpCmis\\Bindings\\Browser\\JSONConstants', 'POLICY_IDS_KEYS'),
+ JSONConstants::getPolicyIdsKeys()
+ );
+ }
+
+ public function testGetAclKeysReturnsContentOfStaticArray()
+ {
+ $this->assertSame(
+ $this->getStaticAttribute('\\Dkd\\PhpCmis\\Bindings\\Browser\\JSONConstants', 'ACL_KEYS'),
+ JSONConstants::getAclKeys()
+ );
+ }
+
+ public function testGetPrincipalKeysReturnsContentOfStaticArray()
+ {
+ $this->assertSame(
+ $this->getStaticAttribute('\\Dkd\\PhpCmis\\Bindings\\Browser\\JSONConstants', 'ACE_PRINCIPAL_KEYS'),
+ JSONConstants::getAcePrincipalKeys()
+ );
+ }
+
+ public function testGetAceKeysReturnsContentOfStaticArray()
+ {
+ $this->assertSame(
+ $this->getStaticAttribute('\\Dkd\\PhpCmis\\Bindings\\Browser\\JSONConstants', 'ACE_KEYS'),
+ JSONConstants::getAceKeys()
+ );
+ }
}
|
Add some missing getters to get a list of JSON Constants
Summary:
Add some missing public getters to get a list of
JSON Constants for a specific object type.
Reviewers: #forgetit-dev, claus.due
Reviewed By: #forgetit-dev, claus.due
Differential Revision: <URL>
|
dkd_php-cmis-client
|
train
|
247bb23233f8af8dc8dd70944d9382aeb4b6e80d
|
diff --git a/openquake/calculators/tests/event_based_risk_test.py b/openquake/calculators/tests/event_based_risk_test.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/tests/event_based_risk_test.py
+++ b/openquake/calculators/tests/event_based_risk_test.py
@@ -21,6 +21,7 @@ import unittest
import numpy
from openquake.baselib.general import gettemp
+from openquake.baselib.hdf5 import read_csv
from openquake.calculators.views import view, rst_table
from openquake.calculators.tests import CalculatorTestCase, strip_calc_id
from openquake.calculators.export import export
@@ -352,10 +353,17 @@ class EventBasedRiskTestCase(CalculatorTestCase):
hazard_calculation_id=str(self.calc.datastore.calc_id))
[fname] = export(('losses_by_event', 'csv'), self.calc.datastore)
self.assertEqualFiles('expected/agg_losses.csv', fname)
+ rup_ids = set(read_csv(fname, {None: '<S50'})['rup_id'])
[fname] = export(('agg_curves-rlzs', 'csv'), self.calc.datastore)
self.assertEqualFiles('expected/agg_curves.csv', fname)
+ # check that the IDs in losses_by_event.csv exist in ruptures.csv
+ [fname] = export(('ruptures', 'csv'), self.calc.datastore)
+ rupids = set(read_csv(fname, {None: '<S50'})['rupid'])
+ self.assertTrue(rup_ids <= rupids, 'There are non-existing rupture IDs'
+ ' in losses_by_event!')
+
def test_case_4_hazard(self):
# Turkey with SHARE logic tree; TODO: add site model
# it has 8 realizations but 4 of them have 0 ruptures
|
Added test [skip hazardlib]
|
gem_oq-engine
|
train
|
95c438dd0522c744dff67858d3d79924a515dfc4
|
diff --git a/test/providers/CouchDB.Provider.test.js b/test/providers/CouchDB.Provider.test.js
index <HASH>..<HASH> 100644
--- a/test/providers/CouchDB.Provider.test.js
+++ b/test/providers/CouchDB.Provider.test.js
@@ -107,6 +107,48 @@ jstest.run({
assert.isInstanceOf(Provider, couchDB[PROTO_PROPERY], "Expected the Generic Provider to be the prototype");
},
+ "Test CouchDB provider using a non existing server": function (test) {
+ test.async(1000);
+
+ var config = createConfig();
+ config.url = "http://nonexistingserver";
+ var couchDB = new CouchDB(config, nano);
+
+ couchDB.findAll(function (err) {
+ assert.hasValue(err, "Expected an error");
+ assert.areEqual("Server 'http://nonexistingserver/' not found.", err.message, "Expected another error message");
+ couchDB.recreate(function (err) {
+ test.complete();
+ });
+ });
+ },
+ "Test CouchDB provider using a non existing database": function (test) {
+ test.async(1000);
+
+ var config = createConfig();
+ config.database = "whatdatabase";
+ var couchDB = new CouchDB(config, nano);
+
+ couchDB.findAll(function (err) {
+ assert.hasValue(err, "Expected an error");
+ assert.areEqual("Database 'whatdatabase' not found.", err.message, "Expected another error message");
+ test.complete();
+ });
+ },
+ "Test CouchDB provider using a non existing user and password": function (test) {
+ test.async(1000);
+
+ var config = createConfig();
+ config.url = "http://who:what@localhost:5984";
+ var couchDB = new CouchDB(config, nano);
+
+ couchDB.findAll(function (err) {
+ assert.hasValue(err, "Expected an error");
+ assert.areEqual("Name or password is incorrect.", err.message, "Expected another error message");
+ test.complete();
+ });
+ },
+
"Test the create function to see if it creates the database": function (test) {
test.async(1000);
|
Added aditional tests that test for failures (server, database, credential issues)
|
Crafity_crafity-storage
|
train
|
e019e0cb1b7c61b9af04b422a09e2203078f8341
|
diff --git a/app/assets/javascripts/media_magick/plupload_it.js b/app/assets/javascripts/media_magick/plupload_it.js
index <HASH>..<HASH> 100644
--- a/app/assets/javascripts/media_magick/plupload_it.js
+++ b/app/assets/javascripts/media_magick/plupload_it.js
@@ -89,23 +89,22 @@
});
});
- $(".attachmentVideoUploader").live('click', function() {
+ $(".attachmentVideoUploader-" + $container.data('relation')).live('click', function() {
var $attachment = $(this).parents('.attachment');
var $attachmentUploader = $(this).parents('.attachmentUploader');
- console.log($container.data('relation'));
- // $.get('/upload', {
- // model: $container.data('model'),
- // id: $container.data('id'),
- // relation: $container.data('relation'),
- // relation_id: $attachment.data('id'),
- // embedded_in_model: $attachmentUploader.data('embedded-in-model'),
- // embedded_in_id: $attachmentUploader.data('embedded-in-id'),
- // partial: $container.data('partial') === undefined ? '' : $container.data('partial'),
- // video: $(".attachmentVideoUploaderField").val()
- // }, function(data) {
- // $(".loadedAttachments").append(data);
- // });
+ $.get('/upload', {
+ model: $container.data('model'),
+ id: $container.data('id'),
+ relation: $container.data('relation'),
+ relation_id: $attachment.data('id'),
+ embedded_in_model: $attachmentUploader.data('embedded-in-model'),
+ embedded_in_id: $attachmentUploader.data('embedded-in-id'),
+ partial: $container.data('partial') === undefined ? '' : $container.data('partial'),
+ video: $(".attachmentVideoUploaderField").val()
+ }, function(data) {
+ $(".loadedAttachments").append(data);
+ });
});
});
diff --git a/app/helpers/media_magick/application_helper.rb b/app/helpers/media_magick/application_helper.rb
index <HASH>..<HASH> 100644
--- a/app/helpers/media_magick/application_helper.rb
+++ b/app/helpers/media_magick/application_helper.rb
@@ -28,7 +28,7 @@ module MediaMagick
end
def attachment_container_for_video(model, relation)
- raw "<input type=\"text\" name=\"#{model.id.to_s}[#{relation}]\" id=\"#{model.id.to_s}_#{relation}\" class=\"attachmentVideoUploaderField\"><a class=\"attachmentVideoUploader\" href=\"javascript://\">upload</a>"
+ raw "<input type=\"text\" name=\"#{model.id.to_s}[#{relation}]\" id=\"#{model.id.to_s}_#{relation}\" class=\"attachmentVideoUploaderField\"><a class=\"attachmentVideoUploader-#{relation}\" href=\"javascript://\">upload</a>"
end
private
|
bug fix - video upload with more than one uploader
|
nudesign_media_magick
|
train
|
2347c0454959f8767c2e1e051060e3890daeb364
|
diff --git a/lib/active_node/validations/uniqueness_validator.rb b/lib/active_node/validations/uniqueness_validator.rb
index <HASH>..<HASH> 100644
--- a/lib/active_node/validations/uniqueness_validator.rb
+++ b/lib/active_node/validations/uniqueness_validator.rb
@@ -10,10 +10,11 @@ module ActiveNode
private
def other_matching_records(record, attribute, value)
+ record_class = record.class
if record.persisted?
- record.class.find_by_cypher("Match (n:#{record.class.label}) where n.#{attribute} = {value} and n.id <> {id} return n", value: value, id: record.id)
+ record_class.find_by_cypher "Match (n:#{record_class.label}) where n.#{attribute} = {value} and id(n) <> {id} return n", value: value, id: record.id
else
- record.class.find_by_cypher("Match (n:#{record.class.label}) where n.#{attribute} = {value} return n", value: value)
+ record_class.find_by_cypher "Match (n:#{record_class.label}) where n.#{attribute} = {value} return n", value: value
end
end
end
|
Update uniqieness lookup by id to use neo4j's id(n)
|
klobuczek_active_node
|
train
|
c5a8d5ad014357d9819bf2fa022773585e32e8a4
|
diff --git a/ggplot/geoms/annotate.py b/ggplot/geoms/annotate.py
index <HASH>..<HASH> 100644
--- a/ggplot/geoms/annotate.py
+++ b/ggplot/geoms/annotate.py
@@ -10,14 +10,17 @@ from ..utils.exceptions import GgplotError
class annotate(object):
- def __init__(self, geom, x=None, y=None, xmin=None,
- xmax=None, ymin=None, ymax=None,
+ def __init__(self, geom, x=None, y=None,
+ xmin=None, xmax=None, xend=None,
+ ymin=None, ymax=None, yend=None,
**kwargs):
variables = locals()
# position only, and combined aesthetics
position = {loc: variables[loc]
- for loc in ('x', 'y', 'xmin', 'xmax', 'ymin', 'ymax')
+ for loc in ('x', 'y',
+ 'xmin', 'xmax', 'xend',
+ 'ymin', 'ymax', 'yend')
if variables[loc] is not None}
aesthetics = position.copy()
aesthetics.update(kwargs)
diff --git a/ggplot/geoms/geom_hline.py b/ggplot/geoms/geom_hline.py
index <HASH>..<HASH> 100644
--- a/ggplot/geoms/geom_hline.py
+++ b/ggplot/geoms/geom_hline.py
@@ -11,8 +11,7 @@ from .geom_segment import geom_segment
class geom_hline(geom):
DEFAULT_AES = {'color': 'black', 'linetype': 'solid',
- 'size': 1.5, 'alpha': 1, 'y': None,
- 'xmin': None, 'xmax': None}
+ 'size': 1.5, 'alpha': 1}
REQUIRED_AES = {'yintercept'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity',
'show_guide': False, 'inherit_aes': False}
diff --git a/ggplot/geoms/geom_vline.py b/ggplot/geoms/geom_vline.py
index <HASH>..<HASH> 100644
--- a/ggplot/geoms/geom_vline.py
+++ b/ggplot/geoms/geom_vline.py
@@ -11,8 +11,7 @@ from .geom_segment import geom_segment
class geom_vline(geom):
DEFAULT_AES = {'color': 'black', 'linetype': 'solid',
- 'size': 1.5, 'alpha': 1, 'x': None,
- 'ymin': None, 'ymax': None}
+ 'size': 1.5, 'alpha': 1}
REQUIRED_AES = {'xintercept'}
DEFAULT_PARAMS = {'stat': 'identity', 'position': 'identity',
'show_guide': False, 'inherit_aes': False}
|
Remove [x|y]min, [x|y]max from geom_[h|v]line
Add xend and yend to `annotate`
|
has2k1_plotnine
|
train
|
979a8d73e1ccf7e627fc87b3b46d0c32ea227940
|
diff --git a/test/e2e/framework/metrics_util.go b/test/e2e/framework/metrics_util.go
index <HASH>..<HASH> 100644
--- a/test/e2e/framework/metrics_util.go
+++ b/test/e2e/framework/metrics_util.go
@@ -210,7 +210,10 @@ type SchedulingMetrics struct {
SchedulingLatency LatencyMetric `json:"schedulingLatency"`
BindingLatency LatencyMetric `json:"bindingLatency"`
E2ELatency LatencyMetric `json:"e2eLatency"`
- ThroughputSamples []float64 `json:"throughputSamples"`
+ ThroughputAverage float64 `json:"throughputAverage"`
+ ThroughputPerc50 float64 `json:"throughputPerc50"`
+ ThroughputPerc90 float64 `json:"throughputPerc90"`
+ ThroughputPerc99 float64 `json:"throughputPerc99"`
}
func (l *SchedulingMetrics) SummaryKind() string {
diff --git a/test/e2e/scalability/density.go b/test/e2e/scalability/density.go
index <HASH>..<HASH> 100644
--- a/test/e2e/scalability/density.go
+++ b/test/e2e/scalability/density.go
@@ -222,6 +222,24 @@ func density30AddonResourceVerifier(numNodes int) map[string]framework.ResourceC
return constraints
}
+func computeAverage(sample []float64) float64 {
+ sum := 0.0
+ for _, value := range sample {
+ sum += value
+ }
+ return sum / float64(len(sample))
+}
+
+func computeQuantile(sample []float64, quantile float64) float64 {
+ Expect(sort.Float64sAreSorted(sample)).To(Equal(true))
+ Expect(quantile >= 0.0 && quantile <= 1.0).To(Equal(true))
+ index := int(quantile*float64(len(sample))) - 1
+ if index < 0 {
+ return math.NaN()
+ }
+ return sample[index]
+}
+
func logPodStartupStatus(
c clientset.Interface,
expectedPods int,
@@ -400,7 +418,16 @@ var _ = SIGDescribe("Density", func() {
latency, err := framework.VerifySchedulerLatency(c)
framework.ExpectNoError(err)
if err == nil {
- latency.ThroughputSamples = scheduleThroughputs
+ // Compute avg and quantiles of throughput (excluding last element, that's usually an outlier).
+ sampleSize := len(scheduleThroughputs)
+ if sampleSize > 1 {
+ scheduleThroughputs = scheduleThroughputs[:sampleSize-1]
+ sort.Float64s(scheduleThroughputs)
+ latency.ThroughputAverage = computeAverage(scheduleThroughputs)
+ latency.ThroughputPerc50 = computeQuantile(scheduleThroughputs, 0.5)
+ latency.ThroughputPerc90 = computeQuantile(scheduleThroughputs, 0.9)
+ latency.ThroughputPerc99 = computeQuantile(scheduleThroughputs, 0.99)
+ }
summaries = append(summaries, latency)
}
summaries = append(summaries, testPhaseDurations)
|
Compute avg and quantiles of scheduler throughput in density test
|
kubernetes_kubernetes
|
train
|
d3c0dc2195449a2288c46657d1103ee91c0b9cc4
|
diff --git a/lib/fog/bluebox/models/blb/lb_application.rb b/lib/fog/bluebox/models/blb/lb_application.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/bluebox/models/blb/lb_application.rb
+++ b/lib/fog/bluebox/models/blb/lb_application.rb
@@ -10,13 +10,13 @@ module Fog
attribute :name
attribute :ip_v4
attribute :ip_v6
- attribute :services
attribute :description
attribute :created, :aliases => 'created_at'
- def services
- requires :id
- service.get_lb_services(id).body
+ def lb_services
+ Fog::Bluebox::BLB::LbServices.new({
+ :lb_application => self
+ })
end
end
diff --git a/lib/fog/bluebox/models/blb/lb_service.rb b/lib/fog/bluebox/models/blb/lb_service.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/bluebox/models/blb/lb_service.rb
+++ b/lib/fog/bluebox/models/blb/lb_service.rb
@@ -19,6 +19,10 @@ module Fog
end
+ def lb_application
+ collection.lb_application
+ end
+
end
end
end
diff --git a/lib/fog/bluebox/models/blb/lb_services.rb b/lib/fog/bluebox/models/blb/lb_services.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/bluebox/models/blb/lb_services.rb
+++ b/lib/fog/bluebox/models/blb/lb_services.rb
@@ -7,15 +7,16 @@ module Fog
class LbServices < Fog::Collection
model Fog::Bluebox::BLB::LbService
+ attr_accessor :data, :lb_application
+
def all
- data = service.get_lb_services.body
+ data = service.get_lb_services(lb_application.id).body
load(data)
end
- def get(application_id, service_id)
- if service_id && service = service.get_lb_services(service_id).body
- new(server)
- end
+ def get(lb_service_id)
+ lb_service = service.get_lb_service(lb_application.id, lb_service_id).body
+ new(lb_service)
rescue Fog::Bluebox::BLB::NotFound
nil
end
|
[bluebox|blb] lb_service collection implementation
|
fog_fog
|
train
|
5c49f87d4e5d5c9916a678e6eff1d6c0937c8ffc
|
diff --git a/raven/utils/__init__.py b/raven/utils/__init__.py
index <HASH>..<HASH> 100644
--- a/raven/utils/__init__.py
+++ b/raven/utils/__init__.py
@@ -8,12 +8,15 @@ raven.utils
import hashlib
import hmac
+import logging
try:
import pkg_resources
except ImportError:
pkg_resources = None
import sys
+logger = logging.getLogger('raven.errors')
+
def varmap(func, var, context=None, name=None):
"""
@@ -41,6 +44,32 @@ def varmap(func, var, context=None, name=None):
_VERSION_CACHE = {}
+def get_version_from_app(module_name, app):
+ if hasattr(app, 'get_version'):
+ get_version = app.get_version
+ if callable(get_version):
+ version = get_version()
+ else:
+ version = get_version
+ elif hasattr(app, 'VERSION'):
+ version = app.VERSION
+ elif hasattr(app, '__version__'):
+ version = app.__version__
+ elif pkg_resources:
+ # pull version from pkg_resources if distro exists
+ try:
+ version = pkg_resources.get_distribution(module_name).version
+ except pkg_resources.DistributionNotFound:
+ return None
+ else:
+ return None
+
+ if isinstance(version, (list, tuple)):
+ version = '.'.join(str(o) for o in version)
+
+ return version
+
+
def get_versions(module_list=None):
if not module_list:
return {}
@@ -57,28 +86,18 @@ def get_versions(module_list=None):
__import__(module_name)
except ImportError:
continue
- app = sys.modules[module_name]
- if hasattr(app, 'get_version'):
- get_version = app.get_version
- if callable(get_version):
- version = get_version()
- else:
- version = get_version
- elif hasattr(app, 'VERSION'):
- version = app.VERSION
- elif hasattr(app, '__version__'):
- version = app.__version__
- elif pkg_resources:
- # pull version from pkg_resources if distro exists
- try:
- version = pkg_resources.get_distribution(module_name).version
- except pkg_resources.DistributionNotFound:
- version = None
- else:
+
+ try:
+ app = sys.modules[module_name]
+ except KeyError:
+ continue
+
+ try:
+ version = get_version_from_app(module_name, app)
+ except Exception, e:
+ logger.exception(e)
version = None
- if isinstance(version, (list, tuple)):
- version = '.'.join(str(o) for o in version)
_VERSION_CACHE[module_name] = version
else:
version = _VERSION_CACHE[module_name]
|
Gracefully handle errors getting package versions (fixes GH-<I>)
|
getsentry_raven-python
|
train
|
fe92d96c3d56409627203e0cdd87fefae28fdc5e
|
diff --git a/lib/redfish/glassfish/interpreter/interpreter.rb b/lib/redfish/glassfish/interpreter/interpreter.rb
index <HASH>..<HASH> 100644
--- a/lib/redfish/glassfish/interpreter/interpreter.rb
+++ b/lib/redfish/glassfish/interpreter/interpreter.rb
@@ -173,10 +173,12 @@ module Redfish #nodoc
domain_options = domain_options(data['domain'] || {})
- run_context.task('domain', domain_options).action(:create)
- run_context.task('domain', domain_options).action(:start)
- run_context.task('domain', domain_options).action(:enable_secure_admin) if run_context.app_context.domain_secure
- run_context.task('domain', domain_options).action(:ensure_active)
+ if managed?(data['domain'])
+ run_context.task('domain', domain_options).action(:create)
+ run_context.task('domain', domain_options).action(:start)
+ run_context.task('domain', domain_options).action(:enable_secure_admin) if run_context.app_context.domain_secure
+ run_context.task('domain', domain_options).action(:ensure_active)
+ end
run_context.task('property_cache').action(:create)
interpret_system_facilities(run_context, data, domain_options)
@@ -281,7 +283,9 @@ module Redfish #nodoc
end
run_context.task('property_cache').action(:destroy)
- run_context.task('domain', domain_options).action(:complete)
+ if managed?(data['domain'])
+ run_context.task('domain', domain_options).action(:complete)
+ end
end
private
|
if managed flag is set to false then skip managing domain
|
realityforge_redfish
|
train
|
e79d821515754d6e6778525d202ba25992f68ff1
|
diff --git a/doc.go b/doc.go
index <HASH>..<HASH> 100644
--- a/doc.go
+++ b/doc.go
@@ -9,7 +9,7 @@ A native implementation of the RPM file specification in Go.
)
func main() {
- p, err := rpm.OpenPackage("my-package.rpm")
+ p, err := rpm.OpenPackageFile("my-package.rpm")
if err != nil {
panic(err)
}
diff --git a/yum/packageentry.go b/yum/packageentry.go
index <HASH>..<HASH> 100644
--- a/yum/packageentry.go
+++ b/yum/packageentry.go
@@ -2,6 +2,7 @@ package yum
import (
"fmt"
+ "time"
)
// PackageEntry is a RPM package as defined in a package repository database.
@@ -17,6 +18,7 @@ type PackageEntry struct {
package_size int64
release string
version string
+ time_build int64
}
type PackageEntries []PackageEntry
@@ -72,3 +74,7 @@ func (c *PackageEntry) Architecture() string {
func (c *PackageEntry) Epoch() int64 {
return c.epoch
}
+
+func (c *PackageEntry) BuildTime() time.Time {
+ return time.Unix(c.time_build, 0)
+}
diff --git a/yum/primarydb.go b/yum/primarydb.go
index <HASH>..<HASH> 100644
--- a/yum/primarydb.go
+++ b/yum/primarydb.go
@@ -42,6 +42,7 @@ const sqlSelectPackages = `SELECT
, location_href
, pkgId
, checksum_type
+ , time_build
FROM packages;`
type PrimaryDatabase struct {
@@ -116,7 +117,7 @@ func (c *PrimaryDatabase) Packages() (PackageEntries, error) {
p := PackageEntry{}
// scan the values into the slice
- if err = rows.Scan(&p.name, &p.architecture, &p.epoch, &p.version, &p.release, &p.package_size, &p.install_size, &p.archive_size, &p.locationhref, &p.checksum, &p.checksum_type); err != nil {
+ if err = rows.Scan(&p.name, &p.architecture, &p.epoch, &p.version, &p.release, &p.package_size, &p.install_size, &p.archive_size, &p.locationhref, &p.checksum, &p.checksum_type, &p.time_build); err != nil {
return nil, fmt.Errorf("Error scanning packages: %v", err)
}
|
Added BuildTime to PackageEntry struct
|
cavaliercoder_go-rpm
|
train
|
d8d2c7f502baebab1f1994dfaec843139eded747
|
diff --git a/sources/ch/epfl/lamp/util/SourceFile.java b/sources/ch/epfl/lamp/util/SourceFile.java
index <HASH>..<HASH> 100644
--- a/sources/ch/epfl/lamp/util/SourceFile.java
+++ b/sources/ch/epfl/lamp/util/SourceFile.java
@@ -89,6 +89,20 @@ public class SourceFile {
this.encoding = encoding;
}
+ /** Returns the short name (name without path) of this source file. */
+ public String getShortName() {
+ int start = name.lastIndexOf(File.separatorChar);
+ return start < 0 ? name : name.substring(start + 1);
+ }
+
+ /**
+ * Returns an instance of Position representing the given line and
+ * column of this source file.
+ */
+ public Position getPosition(int line, int column) {
+ return new Position(this, line, column);
+ }
+
/** Returns the content of the given line. */
public String getLine(int line) {
int index = lineNumber <= line ? nextIndex : (lineNumber = 0);
@@ -114,14 +128,6 @@ public class SourceFile {
}
}
- /**
- * Returns an instance of Position representing the given line and
- * column of this source file.
- */
- public Position getPosition(int line, int column) {
- return new Position(this, line, column);
- }
-
/** Returns the name of this source file. */
public String toString() {
return name;
|
- Added method getShortName
|
scala_scala
|
train
|
fadb8797f948c8f15aaa3191374da8f80af0e4b1
|
diff --git a/www/src/py2js.js b/www/src/py2js.js
index <HASH>..<HASH> 100644
--- a/www/src/py2js.js
+++ b/www/src/py2js.js
@@ -275,7 +275,8 @@ var $add_yield_from_code = $B.parser.$add_yield_from_code = function(yield_ctx)
pnode.bindings = pnode.bindings || {}
for(attr in repl){
- replace_with = replace_with.replace(new RegExp(attr, 'g'), repl[attr])
+ replace_with = replace_with.replace(new RegExp("\\b" + attr + "\\b", 'g'),
+ repl[attr])
// Add internal names to node bindings
pnode.bindings[repl[attr]] = true
}
@@ -296,6 +297,7 @@ var $add_yield_from_code = $B.parser.$add_yield_from_code = function(yield_ctx)
}
var new_node = new $YieldFromMarkerNode(params)
+
replace_node(pnode, new_node)
}
@@ -335,7 +337,7 @@ var $_SyntaxError = $B.parser.$_SyntaxError = function (context,msg,indent){
src, $pos, line_num, root)
}
$B.$SyntaxError(module, 'invalid syntax', src, $pos, line_num, root)
- }else{throw $B.$IndentationError(module, msg, src, $pos)}
+ }else{throw $B.$IndentationError(module, msg, src, $pos, line_num, root)}
}
/*
@@ -9299,7 +9301,10 @@ var inImported = $B.inImported = function(module){
var loop = $B.loop = function(){
if($B.tasks.length == 0){
// No more task to process.
- if(idb_cx){idb_cx.result.close()}
+ if(idb_cx){
+ idb_cx.result.close()
+ idb_cx.$closed = true
+ }
return
}
var task = $B.tasks.shift(),
@@ -9396,6 +9401,11 @@ function required_stdlib_imports(imports, start){
$B.run_script = function(src, name, run_loop){
// run_loop is set to true if run_script is added to tasks in
// ajax_load_script
+ if(run_loop){
+ if(idb_cx.$closed){
+ $B.tasks.push([$B.idb_open])
+ }
+ }
$B.$py_module_path[name] = $B.script_path
try{
var root = $B.py2js(src, name, name),
@@ -9408,7 +9418,7 @@ $B.run_script = function(src, name, run_loop){
__file__: $B.script_path + "/" + name
}
$B.file_cache[script.__file__] = src
- if($B.debug > 1){$log(js)}
+ if($B.debug > 1){console.log(js)}
}catch(err){
handle_error(err)
}
|
Fix bug in "yield from" ; set attribute $closed on indexedDB instance to reopen the db in run_script if it was closed
|
brython-dev_brython
|
train
|
aecd401cce4f83fed11a0cc013418bd4bdc76662
|
diff --git a/eZ/Publish/Core/FieldType/Tests/CountryTest.php b/eZ/Publish/Core/FieldType/Tests/CountryTest.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/FieldType/Tests/CountryTest.php
+++ b/eZ/Publish/Core/FieldType/Tests/CountryTest.php
@@ -11,6 +11,7 @@ namespace eZ\Publish\Core\FieldType\Tests;
use eZ\Publish\Core\FieldType\Country\Type as Country;
use eZ\Publish\Core\FieldType\Country\Value as CountryValue;
+use eZ\Publish\SPI\FieldType\Value as SPIValue;
/**
* @group fieldType
@@ -399,4 +400,22 @@ class CountryTest extends FieldTypeTest
),
);
}
+
+ public function provideDataForGetName()
+ {
+ return array(
+ array(
+ new CountryValue(),
+ ''
+ ),
+ array(
+ new CountryValue( array( 'FR' => array( 'Name' => 'France' ) ) ),
+ 'France'
+ ),
+ array(
+ new CountryValue( array( 'FR' => array( 'Name' => 'France' ), 'DE' => array( 'Name' => 'Deutschland' ) ) ),
+ 'France, Deutschland'
+ ),
+ );
+ }
}
diff --git a/eZ/Publish/Core/FieldType/Tests/FieldTypeTest.php b/eZ/Publish/Core/FieldType/Tests/FieldTypeTest.php
index <HASH>..<HASH> 100644
--- a/eZ/Publish/Core/FieldType/Tests/FieldTypeTest.php
+++ b/eZ/Publish/Core/FieldType/Tests/FieldTypeTest.php
@@ -11,6 +11,7 @@ namespace eZ\Publish\Core\FieldType\Tests;
use PHPUnit_Framework_TestCase;
use Exception;
+use eZ\Publish\SPI\FieldType\Value as SPIValue;
abstract class FieldTypeTest extends PHPUnit_Framework_TestCase
{
@@ -209,6 +210,13 @@ abstract class FieldTypeTest extends PHPUnit_Framework_TestCase
abstract public function provideInputForFromHash();
/**
+ * Provides data for the getName() test.
+ *
+ * @return array
+ */
+ abstract public function provideDataForGetName();
+
+ /**
* Provide data sets with field settings which are considered valid by the
* {@link validateFieldSettings()} method.
*
@@ -398,6 +406,20 @@ abstract class FieldTypeTest extends PHPUnit_Framework_TestCase
);
}
+ /**
+ * @dataProvider provideDataForGetName
+ *
+ * @param SPIValue $spiValue
+ * @param string $expected
+ */
+ public function testGetName( SPIValue $value, $expected )
+ {
+ self::assertSame(
+ $expected,
+ $this->getFieldTypeUnderTest()->getName( $value )
+ );
+ }
+
public function testValidatorConfigurationSchema()
{
$fieldType = $this->getFieldTypeUnderTest();
|
EZP-<I>: Added FieldTypeTest::testGetName()
|
ezsystems_ezpublish-kernel
|
train
|
c0ebffa1dbe7a38e71833969788f1f6acaacca12
|
diff --git a/tests.py b/tests.py
index <HASH>..<HASH> 100644
--- a/tests.py
+++ b/tests.py
@@ -9,11 +9,13 @@ from uuid import UUID
from flaskext.zodb import (Model, List, Mapping, BTree, Timestamp, UUID4,
PersistentList, PersistentMapping, OOBTree)
-from ZODB.DemoStorage import DemoStorage
+import tempfile
+from os import path
+import shutil
+from ZODB.FileStorage import FileStorage
TESTING = True
-ZODB_STORAGE = DemoStorage
class TestModel(Model):
@@ -69,11 +71,17 @@ def retrieve():
@request_context
def testapp():
+ tmpdir = tempfile.mkdtemp()
+ dbfile = path.join(tmpdir, 'test.db')
app = Flask(__name__)
app.config.from_object(__name__)
+ app.config['ZODB_STORAGE'] = lambda: FileStorage(dbfile)
app.register_module(mod)
db.init_app(app)
- return app
+ try:
+ yield app
+ finally:
+ shutil.rmtree(tmpdir)
zodb = Tests(contexts=[testapp])
|
Test with temporary FileStorages
|
dag_flask-zodb
|
train
|
2d854c3505ccad66e9a7d94267e51bed800433c2
|
diff --git a/airflow/providers/google/cloud/operators/mlengine.py b/airflow/providers/google/cloud/operators/mlengine.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/google/cloud/operators/mlengine.py
+++ b/airflow/providers/google/cloud/operators/mlengine.py
@@ -1115,6 +1115,13 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
:param job_dir: A Google Cloud Storage path in which to store training
outputs and other data needed for training. (templated)
:type job_dir: str
+ :param service_account: Optional service account to use when running the training application.
+ (templated)
+ The specified service account must have the `iam.serviceAccounts.actAs` role. The
+ Google-managed Cloud ML Engine service account must have the `iam.serviceAccountAdmin` role
+ for the specified service account.
+ If set to None or missing, the Google-managed Cloud ML Engine service account will be used.
+ :type service_account: str
:param project_id: The Google Cloud project name within which MLEngine training job should run.
If set to None or missing, the default project_id from the Google Cloud connection is used.
(templated)
@@ -1156,6 +1163,7 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
'_runtime_version',
'_python_version',
'_job_dir',
+ '_service_account',
'_impersonation_chain',
]
@@ -1176,6 +1184,7 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
runtime_version: Optional[str] = None,
python_version: Optional[str] = None,
job_dir: Optional[str] = None,
+ service_account: Optional[str] = None,
project_id: Optional[str] = None,
gcp_conn_id: str = 'google_cloud_default',
delegate_to: Optional[str] = None,
@@ -1197,6 +1206,7 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
self._runtime_version = runtime_version
self._python_version = python_version
self._job_dir = job_dir
+ self._service_account = service_account
self._gcp_conn_id = gcp_conn_id
self._delegate_to = delegate_to
self._mode = mode
@@ -1244,6 +1254,9 @@ class MLEngineStartTrainingJobOperator(BaseOperator):
if self._job_dir:
training_request['trainingInput']['jobDir'] = self._job_dir
+ if self._service_account:
+ training_request['trainingInput']['serviceAccount'] = self._service_account
+
if self._scale_tier is not None and self._scale_tier.upper() == "CUSTOM":
training_request['trainingInput']['masterType'] = self._master_type
diff --git a/tests/providers/google/cloud/operators/test_mlengine.py b/tests/providers/google/cloud/operators/test_mlengine.py
index <HASH>..<HASH> 100644
--- a/tests/providers/google/cloud/operators/test_mlengine.py
+++ b/tests/providers/google/cloud/operators/test_mlengine.py
@@ -413,6 +413,7 @@ class TestMLEngineTrainingOperator(unittest.TestCase):
training_input['trainingInput']['runtimeVersion'] = '1.6'
training_input['trainingInput']['pythonVersion'] = '3.5'
training_input['trainingInput']['jobDir'] = 'gs://some-bucket/jobs/test_training'
+ training_input['trainingInput']['serviceAccount'] = 'test@serviceaccount.com'
success_response = self.TRAINING_INPUT.copy()
success_response['state'] = 'SUCCEEDED'
@@ -423,6 +424,7 @@ class TestMLEngineTrainingOperator(unittest.TestCase):
runtime_version='1.6',
python_version='3.5',
job_dir='gs://some-bucket/jobs/test_training',
+ service_account='test@serviceaccount.com',
**self.TRAINING_DEFAULT_ARGS,
)
training_op.execute(MagicMock())
|
Add service_account to Google ML Engine operator (#<I>)
|
apache_airflow
|
train
|
7eb3747d95e3443089ae8e96c77aa801b753babe
|
diff --git a/lib/parsers/ParseResult.js b/lib/parsers/ParseResult.js
index <HASH>..<HASH> 100644
--- a/lib/parsers/ParseResult.js
+++ b/lib/parsers/ParseResult.js
@@ -4,7 +4,7 @@ function ParseResult() {
this._req = {}
}
-requestRegex = /^([A-Z]+)\s+(\/.+)/
+requestRegex = /^([A-Z]+)\s+(\/.*)/
responseRegex = /^(\d\d\d)\s/
ParseResult.prototype = {}
|
Allow the root path / in requests
|
agnoster_literapi
|
train
|
426393b2b3d8120235923d55b223107a10ad7822
|
diff --git a/lib/plugins/aws/package/lib/mergeIamTemplates.test.js b/lib/plugins/aws/package/lib/mergeIamTemplates.test.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/aws/package/lib/mergeIamTemplates.test.js
+++ b/lib/plugins/aws/package/lib/mergeIamTemplates.test.js
@@ -139,6 +139,114 @@ describe('#mergeIamTemplates()', () => {
})
);
+
+ it('should ensure IAM policies for custom named functions', () => {
+ const customFunctionName = 'foo-bar';
+ awsPackage.serverless.service.functions = {
+ [functionName]: {
+ name: customFunctionName,
+ artifact: 'test.zip',
+ handler: 'handler.hello',
+ },
+ };
+ serverless.service.setFunctionNames(); // Ensure to resolve function names
+
+ return awsPackage.mergeIamTemplates()
+ .then(() => {
+ const canonicalFunctionsPrefix =
+ `${awsPackage.serverless.service.service}-${awsPackage.provider.getStage()}`;
+
+ expect(awsPackage.serverless.service.provider.compiledCloudFormationTemplate
+ .Resources[awsPackage.provider.naming.getRoleLogicalId()]
+ ).to.deep.equal({
+ Type: 'AWS::IAM::Role',
+ Properties: {
+ AssumeRolePolicyDocument: {
+ Version: '2012-10-17',
+ Statement: [
+ {
+ Effect: 'Allow',
+ Principal: {
+ Service: [
+ 'lambda.amazonaws.com',
+ ],
+ },
+ Action: [
+ 'sts:AssumeRole',
+ ],
+ },
+ ],
+ },
+ Path: '/',
+ Policies: [
+ {
+ PolicyName: {
+ 'Fn::Join': [
+ '-',
+ [
+ awsPackage.provider.getStage(),
+ awsPackage.serverless.service.service,
+ 'lambda',
+ ],
+ ],
+ },
+ PolicyDocument: {
+ Version: '2012-10-17',
+ Statement: [
+ {
+ Effect: 'Allow',
+ Action: [
+ 'logs:CreateLogStream',
+ ],
+ Resource: [
+ {
+ 'Fn::Sub': 'arn:${AWS::Partition}:logs:${AWS::Region}:${AWS::AccountId}:'
+ + `log-group:/aws/lambda/${canonicalFunctionsPrefix}*:*`,
+ },
+ {
+ 'Fn::Sub': 'arn:${AWS::Partition}:logs:${AWS::Region}:${AWS::AccountId}:'
+ + `log-group:/aws/lambda/${customFunctionName}:*`,
+ },
+ ],
+ },
+ {
+ Effect: 'Allow',
+ Action: [
+ 'logs:PutLogEvents',
+ ],
+ Resource: [
+ {
+ 'Fn::Sub': 'arn:${AWS::Partition}:logs:${AWS::Region}:${AWS::AccountId}:'
+ + `log-group:/aws/lambda/${canonicalFunctionsPrefix}*:*:*`,
+ },
+ {
+ 'Fn::Sub': 'arn:${AWS::Partition}:logs:${AWS::Region}:${AWS::AccountId}:'
+ + `log-group:/aws/lambda/${customFunctionName}:*:*`,
+ },
+ ],
+ },
+ ],
+ },
+ },
+ ],
+ RoleName: {
+ 'Fn::Join': [
+ '-',
+ [
+ awsPackage.serverless.service.service,
+ awsPackage.provider.getStage(),
+ {
+ Ref: 'AWS::Region',
+ },
+ 'lambdaRole',
+ ],
+ ],
+ },
+ },
+ });
+ });
+ });
+
it('should add custom IAM policy statements', () => {
awsPackage.serverless.service.provider.iamRoleStatements = [
{
|
Configure test exposing issue with custom function name
Related to #<I>
|
serverless_serverless
|
train
|
1b3dae461bcc56c61b04c8be779599de1f808faa
|
diff --git a/src/main/java/org/gitlab/api/http/GitlabHTTPRequestor.java b/src/main/java/org/gitlab/api/http/GitlabHTTPRequestor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/gitlab/api/http/GitlabHTTPRequestor.java
+++ b/src/main/java/org/gitlab/api/http/GitlabHTTPRequestor.java
@@ -24,7 +24,6 @@ import org.gitlab.api.AuthMethod;
import org.gitlab.api.GitlabAPI;
import org.gitlab.api.GitlabAPIException;
import org.gitlab.api.TokenType;
-import org.gitlab.api.models.GitlabCommit;
/**
* Gitlab HTTP Requestor
@@ -121,7 +120,7 @@ public class GitlabHTTPRequestor {
* Has a fluent api for method chaining
*
* @param key Form parameter Key
- * @param value Form parameter Value
+ * @param file File data
* @return this
*/
public GitlabHTTPRequestor withAttachment(String key, File file) {
@@ -208,7 +207,7 @@ public class GitlabHTTPRequestor {
try {
url = root.getAPIUrl(tailApiUrl);
} catch (IOException e) {
- throw new Error(e);
+ throw new RuntimeException(e);
}
}
@@ -260,7 +259,7 @@ public class GitlabHTTPRequestor {
handleAPIError(e, connection);
}
} catch (IOException e) {
- throw new Error(e);
+ throw new RuntimeException(e);
}
}
|
gh-<I>: Propagate exceptions as a RuntimeException instead of java.lang.Error (#<I>)
|
timols_java-gitlab-api
|
train
|
e38a56a69b60460a592fdf594d23fe164d6d0493
|
diff --git a/eureka-core/src/main/java/com/netflix/eureka/PeerAwareInstanceRegistry.java b/eureka-core/src/main/java/com/netflix/eureka/PeerAwareInstanceRegistry.java
index <HASH>..<HASH> 100644
--- a/eureka-core/src/main/java/com/netflix/eureka/PeerAwareInstanceRegistry.java
+++ b/eureka-core/src/main/java/com/netflix/eureka/PeerAwareInstanceRegistry.java
@@ -707,7 +707,7 @@ public class PeerAwareInstanceRegistry extends InstanceRegistry {
return true;
}
}
- return false;
+ return true; // Everything non-amazon is registrable.
}
/**
|
Enabling registration for non-amazon datacenters on server startup.
`PeerAwareInstanceRegistry` disregards any non-amazon instances got from a peer on initial startup. This change enabled the registration.
Changes were discussed as part of pull request: <URL>
|
Netflix_eureka
|
train
|
d288cb7028bcd686d04d87c0199ad9cdc0a54a54
|
diff --git a/resource-server-utils/src/main/java/org/jasig/resourceserver/utils/cache/AggregationAwarePageCachingFilter.java b/resource-server-utils/src/main/java/org/jasig/resourceserver/utils/cache/AggregationAwarePageCachingFilter.java
index <HASH>..<HASH> 100644
--- a/resource-server-utils/src/main/java/org/jasig/resourceserver/utils/cache/AggregationAwarePageCachingFilter.java
+++ b/resource-server-utils/src/main/java/org/jasig/resourceserver/utils/cache/AggregationAwarePageCachingFilter.java
@@ -24,7 +24,7 @@ import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import net.sf.ehcache.CacheException;
-import net.sf.ehcache.constructs.web.filter.SimplePageCachingFilter;
+import net.sf.ehcache.constructs.web.filter.SimpleCachingHeadersPageCachingFilter;
import org.jasig.resourceserver.aggr.om.Included;
import org.jasig.resourceserver.utils.aggr.ResourcesElementsProvider;
@@ -37,7 +37,7 @@ import org.jasig.resourceserver.utils.aggr.ResourcesElementsProviderUtils;
* @author Eric Dalquist
* @version $Revision$
*/
-public class AggregationAwarePageCachingFilter extends SimplePageCachingFilter {
+public class AggregationAwarePageCachingFilter extends SimpleCachingHeadersPageCachingFilter {
private ResourcesElementsProvider resourcesElementsProvider;
/**
|
ETag, Last-Modified and Expires support
|
Jasig_resource-server
|
train
|
7d03d837e89fc7385cb7df034356aa94a635de02
|
diff --git a/com.adobe.epubcheck/src/com/adobe/epubcheck/opf/XRefChecker.java b/com.adobe.epubcheck/src/com/adobe/epubcheck/opf/XRefChecker.java
index <HASH>..<HASH> 100755
--- a/com.adobe.epubcheck/src/com/adobe/epubcheck/opf/XRefChecker.java
+++ b/com.adobe.epubcheck/src/com/adobe/epubcheck/opf/XRefChecker.java
@@ -136,12 +136,16 @@ public class XRefChecker {
public void registerReference(String srcResource, int srcLineNumber,
String refResource, String refFragment, int type) {
+ if( refResource.startsWith("data:") )
+ return;
references.add(new Reference(srcResource, srcLineNumber, refResource,
refFragment, type));
}
public void registerReference(String srcResource, int srcLineNumber,
String ref, int type) {
+ if( ref.startsWith("data:") )
+ return;
int hash = ref.indexOf("#");
String refResource;
String refFragment;
diff --git a/com.adobe.epubcheck/src/com/adobe/epubcheck/util/PathUtil.java b/com.adobe.epubcheck/src/com/adobe/epubcheck/util/PathUtil.java
index <HASH>..<HASH> 100755
--- a/com.adobe.epubcheck/src/com/adobe/epubcheck/util/PathUtil.java
+++ b/com.adobe.epubcheck/src/com/adobe/epubcheck/util/PathUtil.java
@@ -31,6 +31,8 @@ public class PathUtil {
public static String resolveRelativeReference(String base, String ref)
throws IllegalArgumentException {
+ if( ref.startsWith("data:") )
+ return ref;
try {
ref = URLDecoder.decode(ref, "UTF-8");
} catch (UnsupportedEncodingException e) {
|
Fix for issue <I>: Validation of SVG (data: URLs)
|
w3c_epubcheck
|
train
|
d2da470e4fdcd8245ba10929479c5b0afab2e249
|
diff --git a/scripts/eject.js b/scripts/eject.js
index <HASH>..<HASH> 100644
--- a/scripts/eject.js
+++ b/scripts/eject.js
@@ -39,6 +39,7 @@ prompt(
path.join('config', 'jest', 'FileStub.js'),
path.join('scripts', 'build.js'),
path.join('scripts', 'start.js'),
+ path.join('scripts', 'test.js'),
path.join('scripts', 'utils', 'checkRequiredFiles.js'),
path.join('scripts', 'utils', 'chrome.applescript'),
path.join('scripts', 'utils', 'getClientEnvironment.js'),
@@ -98,7 +99,6 @@ prompt(
delete appPackage.scripts['eject'];
Object.keys(appPackage.scripts).forEach(function (key) {
appPackage.scripts[key] = appPackage.scripts[key]
- .replace(/react-scripts test/g, 'jest --watch')
.replace(/react-scripts (\w+)/g, 'node scripts/$1.js');
});
diff --git a/scripts/test.js b/scripts/test.js
index <HASH>..<HASH> 100644
--- a/scripts/test.js
+++ b/scripts/test.js
@@ -1,3 +1,4 @@
+// @remove-on-eject-begin
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
@@ -6,6 +7,7 @@
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
+// @remove-on-eject-end
process.env.NODE_ENV = 'test';
process.env.PUBLIC_URL = '';
@@ -16,7 +18,6 @@ process.env.PUBLIC_URL = '';
// https://github.com/motdotla/dotenv
require('dotenv').config({silent: true});
-const createJestConfig = require('./utils/createJestConfig');
const jest = require('jest');
const path = require('path');
const paths = require('../config/paths');
@@ -28,10 +29,14 @@ if (!process.env.CI) {
argv.push('--watch');
}
+// @remove-on-eject-begin
+// This is not necessary after eject because we embed config into package.json.
+const createJestConfig = require('./utils/createJestConfig');
argv.push('--config', JSON.stringify(createJestConfig(
relativePath => path.resolve(__dirname, '..', relativePath),
path.resolve(paths.appSrc, '..'),
false
)));
+// @remove-on-eject-end
jest.run(argv);
|
Preserve scripts/test.js after ejecting (#<I>)
Fixes #<I>
|
Pajn_tscomp
|
train
|
4aecf094e0a184f37479d2cc82b7819892d95776
|
diff --git a/lib/rdl/wrap.rb b/lib/rdl/wrap.rb
index <HASH>..<HASH> 100644
--- a/lib/rdl/wrap.rb
+++ b/lib/rdl/wrap.rb
@@ -199,7 +199,7 @@ RUBY
else
tmp_klass = klass
end
- raise RuntimeError, "Deferred contract from class #{prev_klass} being applied in class #{tmp_klass}" if prev_klass != tmp_klass
+ raise RuntimeError, "Deferred contract from class #{prev_klass} being applied in class #{tmp_klass} to #{meth}" if prev_klass != tmp_klass
$__rdl_info.add(klass, meth, kind, contract)
RDL::Wrap.wrap(klass, meth) if h[:wrap]
unless $__rdl_info.set(klass, meth, :typecheck, h[:typecheck])
@@ -322,16 +322,18 @@ class Object
unless $__rdl_info.set(klass, meth, :typecheck, typecheck)
raise RuntimeError, "Inconsistent typecheck flag on #{RDL::Util.pp_klass_method(klass, meth)}"
end
- if wrap
+ if wrap || typecheck == :now
if RDL::Util.method_defined?(klass, meth) || meth == :initialize
$__rdl_info.set(klass, meth, :source_location, RDL::Util.to_class(klass).instance_method(meth).source_location)
RDL::Typecheck.typecheck(klass, meth) if typecheck == :now
- RDL::Wrap.wrap(klass, meth)
+ RDL::Wrap.wrap(klass, meth) if wrap
else
- $__rdl_to_wrap << [klass, meth]
- if (typecheck && typecheck != :call)
- $__rdl_to_typecheck[typecheck] = Set.new unless $__rdl_to_typecheck[typecheck]
- $__rdl_to_typecheck[typecheck].add([klass, meth])
+ if wrap
+ $__rdl_to_wrap << [klass, meth]
+ if (typecheck && typecheck != :call)
+ $__rdl_to_typecheck[typecheck] = Set.new unless $__rdl_to_typecheck[typecheck]
+ $__rdl_to_typecheck[typecheck].add([klass, meth])
+ end
end
end
end
|
adjust when checking is called in type slightly
|
plum-umd_rdl
|
train
|
67badc336c24d1bfe724e8bc4ba6ede401cc1562
|
diff --git a/wakatime/queue.py b/wakatime/queue.py
index <HASH>..<HASH> 100644
--- a/wakatime/queue.py
+++ b/wakatime/queue.py
@@ -16,8 +16,6 @@ import os
import traceback
from time import sleep
-from .compat import u
-
try:
import sqlite3
HAS_SQL = True
@@ -93,7 +91,7 @@ class Queue(object):
for row_name in ['file', 'time', 'project', 'language', 'lines', 'branch', 'is_write']:
if row[index] is not None:
clauses.append('{0}=?'.format(row_name))
- values.append(u(row[index]))
+ values.append(row[index])
else:
clauses.append('{0} IS NULL'.format(row_name))
index += 1
|
fix bug preventing offline heartbeats from being purged
|
wakatime_wakatime
|
train
|
a239d93288553aabdc63054ef61b429336e32f3c
|
diff --git a/lib/draper/decorator.rb b/lib/draper/decorator.rb
index <HASH>..<HASH> 100755
--- a/lib/draper/decorator.rb
+++ b/lib/draper/decorator.rb
@@ -16,15 +16,6 @@ module Draper
# @return [Hash] extra data to be used in user-defined methods.
attr_accessor :context
- def self.inherited(klass)
- begin
- alias_name = klass.name.downcase.gsub(/decorator/, "").to_sym
- klass.send(:define_method, alias_name) do
- object
- end
- rescue; end
- end
-
# Wraps an object in a new instance of the decorator.
#
# Decorators may be applied to other decorators. However, applying a
@@ -66,6 +57,7 @@ module Draper
# @return [void]
def self.decorates(object_class)
@object_class = object_class.to_s.camelize.constantize
+ alias_object_to_object_class_name
end
# Returns the source class corresponding to the decorator class, as set by
@@ -228,6 +220,15 @@ module Draper
private
+ def self.inherited(subclass)
+ subclass.alias_object_to_object_class_name
+ super
+ end
+
+ def self.alias_object_to_object_class_name
+ alias_method object_class.name.underscore, :object if object_class?
+ end
+
def self.object_class_name
raise NameError if name.nil? || name.demodulize !~ /.+Decorator$/
name.chomp("Decorator")
diff --git a/spec/draper/decorator_spec.rb b/spec/draper/decorator_spec.rb
index <HASH>..<HASH> 100755
--- a/spec/draper/decorator_spec.rb
+++ b/spec/draper/decorator_spec.rb
@@ -361,20 +361,43 @@ module Draper
end
end
- describe "aliasing object to wrapped model name" do
- class ::ProductDecorator < Decorator; end
- class ::Product
- attr_reader :name
- def initialize
- @name = "bob"
+ describe "aliasing object to object class name" do
+ context "when object_class is inferrable from the decorator name" do
+ it "aliases object to the object class name" do
+ object = stub
+ decorator = ProductDecorator.new(object)
+
+ expect(decorator.product).to be object
+ end
+ end
+
+ context "when object_class is set by decorates" do
+ it "aliases object to the object class name" do
+ decorator_class = Class.new(Decorator) { decorates Product }
+ object = stub
+ decorator = decorator_class.new(object)
+
+ expect(decorator.product).to be object
end
end
- it "aliases object to wrapped model name" do
- decorator = ProductDecorator.new(Product.new)
+ context "when object_class's name is several words long" do
+ it "underscores the method name" do
+ stub_const "LongWindedModel", Class.new
+ decorator_class = Class.new(Decorator) { decorates LongWindedModel }
+ object = stub
+ decorator = decorator_class.new(object)
- expect(decorator.product).not_to be nil
- expect(decorator.product.name).to eq "bob"
+ expect(decorator.long_winded_model).to be object
+ end
+ end
+
+ context "when object_class is not set" do
+ it "does not alias object" do
+ decorator_class = Class.new(Decorator)
+
+ expect(decorator_class.instance_methods).to eq Decorator.instance_methods
+ end
end
end
|
Improve aliasing to object class name
|
drapergem_draper
|
train
|
be042d699f7335ff55e48799480831f22b61c112
|
diff --git a/features/051_check_for_template_partial_loops.feature b/features/051_check_for_template_partial_loops.feature
index <HASH>..<HASH> 100644
--- a/features/051_check_for_template_partial_loops.feature
+++ b/features/051_check_for_template_partial_loops.feature
@@ -26,6 +26,12 @@ Feature: Check for template partial includes cycle
Then the template partials loop indefinitely warning 051 should not be displayed against the templates
And no error should have occurred
+ Scenario: Missing partial
+ Given a template that includes a missing partial with a relative subdirectory path
+ When I check the cookbook
+ Then the template partials loop indefinitely warning 051 should not be displayed against the templates
+ And no error should have occurred
+
Scenario Outline: Template directory contains binary files
Given a template directory that contains a binary file <file> that is not valid UTF-8
When I check the cookbook
diff --git a/features/step_definitions/cookbook_steps.rb b/features/step_definitions/cookbook_steps.rb
index <HASH>..<HASH> 100644
--- a/features/step_definitions/cookbook_steps.rb
+++ b/features/step_definitions/cookbook_steps.rb
@@ -1467,7 +1467,7 @@ Given /^a template that includes a partial( that includes the original template
write_file 'cookbooks/example/templates/default/b.erb', content
end
-Given 'a template that includes a partial with a relative subdirectory path' do
+Given /^a template that includes a (missing )?partial with a relative subdirectory path$/ do |missing|
write_recipe %q{
template "/tmp/a" do
source "a.erb"
@@ -1477,7 +1477,9 @@ Given 'a template that includes a partial with a relative subdirectory path' do
end
}
write_file 'cookbooks/example/templates/default/a.erb', '<%= render "partials/b.erb" %>'
- write_file 'cookbooks/example/templates/default/partials/b.erb', 'Partial content'
+ unless missing
+ write_file 'cookbooks/example/templates/default/partials/b.erb', 'Partial content'
+ end
end
Given 'access to the man page documentation' do
diff --git a/lib/foodcritic/api.rb b/lib/foodcritic/api.rb
index <HASH>..<HASH> 100644
--- a/lib/foodcritic/api.rb
+++ b/lib/foodcritic/api.rb
@@ -342,8 +342,11 @@ module FoodCritic
partial_path = Array(all_templates).find do |path|
(Pathname.new(template_path).dirname + included_partial).to_s == path
end
- Array(partial_path) + templates_included(all_templates, partial_path, depth + 1)
- end.flatten.uniq
+ if partial_path
+ Array(partial_path) +
+ templates_included(all_templates, partial_path, depth + 1)
+ end
+ end.flatten.uniq.compact
end
# Templates in the current cookbook
|
Don't raise if partial does not exist, refs #<I>.
|
Foodcritic_foodcritic
|
train
|
3618a44b82313597b49878c048c97e4713b00093
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,9 +19,6 @@ entry_points = {
runtime = {
'pyyaml>=3.10,<=3.12',
- 'tornado>=4.2.1',
- 'futures==3.0.5',
- 'requests==2.13.0',
}
develop = {
@@ -32,6 +29,8 @@ develop = {
'Sphinx',
'sphinx_rtd_theme',
'Jinja2==2.9.6',
+ 'futures==3.0.5',
+ 'requests==2.13.0',
'wheel'
}
@@ -43,6 +42,16 @@ if __name__ == "__main__":
name=name,
version=package_info["VERSION"],
description="Insights Application Programming Interface",
+ author_email="insights@redhat.com",
+ license="Apache 2",
+ url="https://access.redhat.com/insights",
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python :: 2.7",
+ "Topic :: System :: Monitoring",
+ "Intended Audience :: Information Technology"
+ ],
packages=find_packages(),
install_requires=list(runtime),
extras_require={
|
Prepare for release on pypi (#<I>)
* Adding package metadata for pypi
* Removing unnecessary runtime dependencies
requests and futures are only used by the archive tool script, so they
are moved to the develop dependency section.
|
RedHatInsights_insights-core
|
train
|
b07f722666c5218ea47c757f30acb0b6271cde89
|
diff --git a/hazelcast/src/main/java/com/hazelcast/transaction/impl/TransactionManagerServiceImpl.java b/hazelcast/src/main/java/com/hazelcast/transaction/impl/TransactionManagerServiceImpl.java
index <HASH>..<HASH> 100644
--- a/hazelcast/src/main/java/com/hazelcast/transaction/impl/TransactionManagerServiceImpl.java
+++ b/hazelcast/src/main/java/com/hazelcast/transaction/impl/TransactionManagerServiceImpl.java
@@ -174,7 +174,9 @@ public class TransactionManagerServiceImpl implements TransactionManagerService,
@Override
public void memberRemoved(MembershipServiceEvent event) {
- final String uuid = event.getMember().getUuid();
+ MemberImpl member = event.getMember();
+ final String uuid = member.getUuid();
+ logger.info("Committing/rolling-back alive transactions of " + member + ", UUID: " + uuid);
nodeEngine.getExecutionService().execute(ExecutionService.SYSTEM_EXECUTOR, new Runnable() {
@Override
public void run() {
@@ -188,9 +190,6 @@ public class TransactionManagerServiceImpl implements TransactionManagerService,
}
private void finalizeTransactionsOf(String uuid) {
- MemberImpl member = nodeEngine.getClusterService().getMember(uuid);
- logger.info("Committing/rolling-back alive transactions of "
- + (member != null ? member : "client") + ", UUID: " + uuid);
for (Map.Entry<String, TxBackupLog> entry : txBackupLogs.entrySet()) {
finalize(uuid, entry.getKey(), entry.getValue());
}
@@ -252,6 +251,7 @@ public class TransactionManagerServiceImpl implements TransactionManagerService,
@Override
public void clientDisconnected(String clientUuid) {
+ logger.info("Committing/rolling-back alive transactions of client, UUID: " + clientUuid);
finalizeTransactionsOf(clientUuid);
}
|
Fix transaction manager log added by commmit d9b<I>
|
hazelcast_hazelcast
|
train
|
97cac4b985cb9688bdf5454a38f9c5a89f02b1fa
|
diff --git a/src/geom/Vec3.js b/src/geom/Vec3.js
index <HASH>..<HASH> 100644
--- a/src/geom/Vec3.js
+++ b/src/geom/Vec3.js
@@ -125,8 +125,8 @@ define([
Logger.logMessage(Logger.LEVEL_SEVERE, "Vec3", "areColinear", "missingVector"));
}
- var ab = b.subtract(a).normalize(),
- bc = c.subtract(b).normalize();
+ var ab = new Vec3(a[0]-b[0],a[1]-b[1],a[2]-b[2]).normalize(),
+ bc = new Vec3(c[0]-b[0],c[1]-b[1],c[2]-b[2]).normalize();
// ab and bc are considered colinear if their dot product is near +/-1.
return Math.abs(ab.dot(bc)) > 0.999;
|
Inline subtraction to avoid overwriting subtrahend
|
NASAWorldWind_WebWorldWind
|
train
|
f3111a575aec81ce483bbcd53808df3d952fe05a
|
diff --git a/interop/grpclb_fallback/client.go b/interop/grpclb_fallback/client.go
index <HASH>..<HASH> 100644
--- a/interop/grpclb_fallback/client.go
+++ b/interop/grpclb_fallback/client.go
@@ -1,3 +1,4 @@
+// +build linux
// +build !appengine
// +build go1.11
|
interop: Build grpclb_fallback/client.go only for linux. (#<I>)
|
grpc_grpc-go
|
train
|
7c95b213db465db5a96998daa08890be9ddd4f51
|
diff --git a/src/pyop/request_validator.py b/src/pyop/request_validator.py
index <HASH>..<HASH> 100644
--- a/src/pyop/request_validator.py
+++ b/src/pyop/request_validator.py
@@ -29,11 +29,11 @@ def client_id_is_known(provider, authentication_request):
:raise InvalidAuthenticationRequest: if the client_id is unknown
"""
if authentication_request['client_id'] not in provider.clients:
- raise InvalidAuthenticationRequest('Unknown client_id \'{}\''.format(authentication_request['client_id']),
+ logger.error('Unknown client_id \'{}\''.format(authentication_request['client_id']))
+ raise InvalidAuthenticationRequest('Unknown client_id',
authentication_request,
oauth_error='unauthorized_client')
-
def redirect_uri_is_in_registered_redirect_uris(provider, authentication_request):
"""
Verifies the redirect uri is registered for the client making the request.
@@ -41,8 +41,9 @@ def redirect_uri_is_in_registered_redirect_uris(provider, authentication_request
:param authentication_request: authentication request to verify
:raise InvalidAuthenticationRequest: if the redirect uri is not registered
"""
- error = InvalidAuthenticationRequest('Redirect uri \'{}\' is not registered'.format(
- authentication_request['redirect_uri']), authentication_request)
+ error = InvalidAuthenticationRequest('Redirect uri is not registered',
+ authentication_request,
+ oauth_error="invalid_request")
try:
allowed_redirect_uris = provider.clients[authentication_request['client_id']]['redirect_uris']
except KeyError as e:
@@ -50,6 +51,7 @@ def redirect_uri_is_in_registered_redirect_uris(provider, authentication_request
raise error
if authentication_request['redirect_uri'] not in allowed_redirect_uris:
+ logger.error("Redirect uri \'{0}\' is not registered for this client".format(authentication_request['redirect_uri']))
raise error
@@ -60,9 +62,9 @@ def response_type_is_in_registered_response_types(provider, authentication_reque
:param authentication_request: authentication request to verify
:raise InvalidAuthenticationRequest: if the response type is not allowed
"""
- error = InvalidAuthenticationRequest('Response type \'{}\' is not registered'.format(
- ' '.join(authentication_request['response_type'])),
- authentication_request, oauth_error='invalid_request')
+ error = InvalidAuthenticationRequest('Response type is not registered',
+ authentication_request,
+ oauth_error='invalid_request')
try:
allowed_response_types = provider.clients[authentication_request['client_id']]['response_types']
except KeyError as e:
@@ -70,6 +72,7 @@ def response_type_is_in_registered_response_types(provider, authentication_reque
raise error
if not is_allowed_response_type(authentication_request['response_type'], allowed_response_types):
+ logger.error('Response type \'{}\' is not registered'.format(' '.join(authentication_request['response_type'])))
raise error
|
Adapted error handling
1. Make sure we always return an OIDC error to the requester
2. Reduce the attack surface for injections in a web context
by not reflecting user input.
3. Log all errors with details on the provider side for ease
of troubleshooting
|
IdentityPython_pyop
|
train
|
4ea7acc4b31fa0575bc4ea37f0b6c9bddfdb95a7
|
diff --git a/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/model/CategoriesOptions.java b/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/model/CategoriesOptions.java
index <HASH>..<HASH> 100644
--- a/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/model/CategoriesOptions.java
+++ b/natural-language-understanding/src/main/java/com/ibm/watson/natural_language_understanding/v1/model/CategoriesOptions.java
@@ -30,7 +30,7 @@ public class CategoriesOptions extends GenericModel {
public static class Builder {
private Boolean explanation;
private Long limit;
- private String model;
+ @Deprecated private String model;
private Builder(CategoriesOptions categoriesOptions) {
this.explanation = categoriesOptions.explanation;
@@ -77,6 +77,8 @@ public class CategoriesOptions extends GenericModel {
*
* @param model the model
* @return the CategoriesOptions builder
+ * @deprecated the model parameter is no longer supported by the Natural Language Understanding
+ * service and will be removed in the next major release
*/
public Builder model(String model) {
this.model = model;
@@ -135,6 +137,8 @@ public class CategoriesOptions extends GenericModel {
* categories models will no longer be accessible in Knowledge Studio on 17 December 2019.
*
* @return the model
+ * @deprecated the model parameter is no longer supported by the Natural Language Understanding
+ * service and will be removed in the next major release
*/
public String model() {
return model;
|
chore(natural-language-understanding-v1): post-gen manual change
|
watson-developer-cloud_java-sdk
|
train
|
756c888b71a6ff5ddac0bbc0894b5562f0f43635
|
diff --git a/neomodel/relationship.py b/neomodel/relationship.py
index <HASH>..<HASH> 100644
--- a/neomodel/relationship.py
+++ b/neomodel/relationship.py
@@ -104,8 +104,9 @@ class RelationshipManager(object):
if self.direction == EITHER:
raise Exception("Cannot connect with direction EITHER")
+ node_class = None
for cls in self.node_classes:
- if cls.__subclasscheck__(obj.__class__):
+ if obj.__class__ is cls:
node_class = cls
if not node_class:
allowed_cls = ", ".join([c.__name__ for c in self.node_classes])
diff --git a/test/test_relationships.py b/test/test_relationships.py
index <HASH>..<HASH> 100644
--- a/test/test_relationships.py
+++ b/test/test_relationships.py
@@ -85,23 +85,6 @@ def test_custom_methods():
assert u.special_name == 'Joe91'
-def test_abstract_class_relationships():
- j = Person(name='Joe', age=13).save()
- assert j
-
- u = SuperHero(name='UltraJoe', age=13, power='invisibility').save()
- assert u
-
- gr = Country(code='GR').save()
- assert gr
-
- gr.inhabitant.connect(j)
- assert gr.inhabitant.is_connected(j)
-
- gr.inhabitant.connect(u)
- assert gr.inhabitant.is_connected(u)
-
-
def test_props_relationship():
u = Person(name='Mar', age=20).save()
assert u
|
Connecting to subclasses no longer supported
|
neo4j-contrib_neomodel
|
train
|
f55fa8211b6faf08091dd99ed8e5e3f08ab9cf1f
|
diff --git a/api/client/commands.go b/api/client/commands.go
index <HASH>..<HASH> 100644
--- a/api/client/commands.go
+++ b/api/client/commands.go
@@ -1476,6 +1476,7 @@ func (cli *DockerCli) CmdPs(args ...string) error {
outCommand = out.Get("Command")
ports = engine.NewTable("", 0)
)
+ outCommand = strconv.Quote(outCommand)
if !*noTrunc {
outCommand = utils.Trunc(outCommand, 20)
}
|
Escape control and nonprintable characters in docker ps
The docker ps command displays the user-entered command running in a container.
If that command contained \n, \t, or other control characters, they were
interpreted literally, and newlines and tabs would be printed in the output.
Escape the command string to make things more readable.
Docker-DCO-<I>-
|
containers_storage
|
train
|
cc93a1fb54bea392cf0c2da5c82ab190a2f921bd
|
diff --git a/src/test/java/com/github/dockerjava/core/command/CopyArchiveToContainerCmdImplTest.java b/src/test/java/com/github/dockerjava/core/command/CopyArchiveToContainerCmdImplTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/github/dockerjava/core/command/CopyArchiveToContainerCmdImplTest.java
+++ b/src/test/java/com/github/dockerjava/core/command/CopyArchiveToContainerCmdImplTest.java
@@ -3,6 +3,7 @@ package com.github.dockerjava.core.command;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
+import static org.hamcrest.Matchers.equalTo;
import java.io.IOException;
import java.io.InputStream;
@@ -117,5 +118,39 @@ public class CopyArchiveToContainerCmdImplTest extends AbstractDockerClientTest
.withHostResource(localDir.toString())
.exec();
}
+
+ @Test
+ public void copyFileWithExecutePermission() throws Exception {
+ // create script file, add permission to execute
+ scriptPath = Files.createTempFile("run", ".sh");
+ boolean executable = scriptPath.toFile().setExecutable(true, false);
+ if (!executable){
+ throw new Exception("Execute permission on file not set!");
+ }
+ String snippet = "Running script with execute permission.";
+ String scriptTextStr = "#!/bin/sh\necho \"" + snippet + "\"";
+ // write content for created script
+ Files.write(scriptPath, scriptTextStr.getBytes());
+ // create a test container which starts and waits 3 seconds for the
+ // script to be copied to the container's home dir and then executes it
+ String containerCmd = "sleep 3; /home/" + scriptPath.getFileName().toString();
+ CreateContainerResponse container = docker.createContainerCmd("busybox")
+ .withName("test")
+ .withCmd("/bin/sh", "-c", containerCmd)
+ .exec();
+ // start the container
+ docker.startContainerCmd(container.getId()).exec();
+ // copy script to container home dir
+ docker.copyArchiveToContainerCmd(container.getId())
+ .withRemotePath("/home")
+ .withHostResource(scriptPath.toString())
+ .exec();
+ // await exid code
+ int exitCode = docker.waitContainerCmd(container.getId())
+ .exec(new WaitContainerResultCallback())
+ .awaitStatusCode();
+ // check result
+ assertThat(exitCode, equalTo(0));
+ }
}
|
Added Test Case for CopyArchiveToContainerCmdImpl when copying files with execute permission.
Test Case that simulates a scenario where a script file with permission to execute is copied to the containers with CopyArchiveToContainerCmdImpl and then it is executed.
|
docker-java_docker-java
|
train
|
7ef2f6f7d213cd0d88c44848ef83ef4efb273980
|
diff --git a/examples/shared_samples/ethernet_network.rb b/examples/shared_samples/ethernet_network.rb
index <HASH>..<HASH> 100644
--- a/examples/shared_samples/ethernet_network.rb
+++ b/examples/shared_samples/ethernet_network.rb
@@ -22,6 +22,13 @@ require_relative '../_client' # Gives access to @client
# - API500 for C7000
# - API500 for Synergy
+# Resources that can be created according to parameters:
+# api_version = 200 & variant = any to OneviewSDK::API200::EthernetNetwork
+# api_version = 300 & variant = C7000 to OneviewSDK::API300::C7000::EthernetNetwork
+# api_version = 300 & variant = Synergy to OneviewSDK::API300::C7000::EthernetNetwork
+# api_version = 500 & variant = C7000 to OneviewSDK::API500::C7000::EthernetNetwork
+# api_version = 500 & variant = Synergy to OneviewSDK::API500::C7000::EthernetNetwork
+
# variant represents the model[C7000, Synergy]
variant = ARGV[0]
diff --git a/examples/shared_samples/fcoe_network.rb b/examples/shared_samples/fcoe_network.rb
index <HASH>..<HASH> 100644
--- a/examples/shared_samples/fcoe_network.rb
+++ b/examples/shared_samples/fcoe_network.rb
@@ -21,6 +21,13 @@ require_relative '../_client' # Gives access to @client
# - API500 for C7000
# - API500 for Synergy
+# Resources that can be created according to parameters:
+# api_version = 200 & variant = any to OneviewSDK::API200::FCoENetwork
+# api_version = 300 & variant = C7000 to OneviewSDK::API300::C7000::FCoENetwork
+# api_version = 300 & variant = Synergy to OneviewSDK::API300::C7000::FCoENetwork
+# api_version = 500 & variant = C7000 to OneviewSDK::API500::C7000::FCoENetwork
+# api_version = 500 & variant = Synergy to OneviewSDK::API500::C7000::FCoENetwork
+
# variant represents the model[C7000, Synergy]
variant = ARGV[0]
|
Updating the doc in shared examples
|
HewlettPackard_oneview-sdk-ruby
|
train
|
77f72cde0262cd4d9e15842d133b3167b6f1608f
|
diff --git a/test/ImmutableArray/test-asMutable.js b/test/ImmutableArray/test-asMutable.js
index <HASH>..<HASH> 100644
--- a/test/ImmutableArray/test-asMutable.js
+++ b/test/ImmutableArray/test-asMutable.js
@@ -14,6 +14,7 @@ module.exports = function(config) {
var immutable = Immutable(array);
var mutable = immutable.asMutable();
+ assertIsArray(mutable);
assertCanBeMutated(mutable);
assert.isFalse( Immutable.isImmutable(mutable));
TestUtils.assertIsDeeplyImmutable(mutable[0]);
@@ -28,6 +29,7 @@ module.exports = function(config) {
var immutable = Immutable(array);
var mutable = immutable.asMutable({ deep: true });
+ assertIsArray(mutable);
assertCanBeMutated(mutable);
assert.isFalse( Immutable.isImmutable(mutable));
assert.isFalse( Immutable.isImmutable(mutable[0]));
@@ -52,4 +54,8 @@ module.exports = function(config) {
assert.fail("Exception when trying to verify that this array was mutable: " + JSON.stringify(array));
}
}
+
+ function assertIsArray(array) {
+ assert(array instanceof Array, "Expected an Array, but did not get one. Got: " + JSON.stringify(array))
+ }
};
|
Test that Array#asMutable actually returns arrays.
|
rtfeldman_seamless-immutable
|
train
|
fd2d183f37b81023dd906896e4acc9d39ab8c04e
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,13 +15,11 @@ SETUP = {
'author_email': "ubuntu-devel-discuss@lists.ubuntu.com",
'url': "https://code.launchpad.net/charm-helpers",
'install_requires': [
- # Specify precise versions of runtime dependencies where possible.
- 'netaddr==0.7.10', # trusty. precise is 0.7.5, but not in pypi.
- 'PyYAML==3.10', # precise
- 'Tempita==0.5.1', # precise
- 'netifaces==0.10', # trusty is 0.8, but using py3 compatible version for tests.
- 'Jinja2==2.6', # precise
- 'six==1.1', # precise
+ 'netaddr',
+ 'PyYAML',
+ 'Tempita',
+ 'Jinja2',
+ 'six',
],
'packages': [
"charmhelpers",
|
Removed version pins and netifaces to ensure that it installs properly on new trusty and precise images
|
juju_charm-helpers
|
train
|
d852269615b720fefce3a56d127feaf4d3c0a35f
|
diff --git a/core/src/main/java/org/kohsuke/stapler/TokenList.java b/core/src/main/java/org/kohsuke/stapler/TokenList.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/kohsuke/stapler/TokenList.java
+++ b/core/src/main/java/org/kohsuke/stapler/TokenList.java
@@ -86,14 +86,14 @@ public final class TokenList {
return tokens[--idx];
}
public int nextAsInt() throws NumberFormatException {
- long l = nextAsLong();
- if (l < Integer.MIN_VALUE) {
- throw new NumberFormatException(String.format("Token '%d' cannot be interpreted as an integer as its value is less than %d.", l, Integer.MIN_VALUE));
- } else if (l > Integer.MAX_VALUE) {
- throw new NumberFormatException(String.format("Token '%d' cannot be interpreted as an integer as its value is greater than %d.", l, Integer.MAX_VALUE));
+ long asLongValue = nextAsLong();
+ if (asLongValue < Integer.MIN_VALUE) {
+ throw new NumberFormatException(String.format("Token '%d' cannot be interpreted as an integer as its value is less than %d.", asLongValue, Integer.MIN_VALUE));
+ } else if (asLongValue > Integer.MAX_VALUE) {
+ throw new NumberFormatException(String.format("Token '%d' cannot be interpreted as an integer as its value is greater than %d.", asLongValue, Integer.MAX_VALUE));
}
- return (int) l;
+ return (int) asLongValue;
}
public long nextAsLong() throws NumberFormatException {
String p = peek();
|
renaming "l" variable to "asLongValue"
|
stapler_stapler
|
train
|
f44b68e0989a81cd77641a77b58accfa916bbc55
|
diff --git a/src/main.js b/src/main.js
index <HASH>..<HASH> 100644
--- a/src/main.js
+++ b/src/main.js
@@ -291,7 +291,7 @@ function TimekitBooking() {
// Setup and render FullCalendar
var initializeCalendar = function() {
- var sizing = decideCalendarSize();
+ var sizing = decideCalendarSize(config.fullCalendar.defaultView);
var args = {
defaultView: sizing.view,
@@ -328,18 +328,20 @@ function TimekitBooking() {
}
// Fires when window is resized and calendar must adhere
- var decideCalendarSize = function() {
+ var decideCalendarSize = function(currentView) {
- var view = 'agendaWeek';
- var height = 420;
+ var view, height;
var rootWidth = rootTarget.width();
+ currentView = currentView || calendarTarget.fullCalendar('getView').name
if (rootWidth < 480) {
- view = 'basicDay';
+ if (currentView === 'agendaWeek') view = 'basicDay';
height = 380;
rootTarget.addClass('is-small');
if (config.avatar) { height -= 15; }
} else {
+ view = config.fullCalendar.defaultView
+ height = 420;
rootTarget.removeClass('is-small');
}
@@ -643,6 +645,12 @@ function TimekitBooking() {
return $.extend(true, {}, defaultConfig.primary, suppliedConfig);
}
+ var applyConfigPreset = function (config, propertyName, propertyObject) {
+ var presetCheck = defaultConfig.presets[propertyName][propertyObject];
+ if (presetCheck) return $.extend(true, {}, config, presetCheck);
+ return config
+ }
+
// Setup config
var setConfig = function(suppliedConfig) {
@@ -654,31 +662,24 @@ function TimekitBooking() {
// Extend the default config with supplied settings
var newConfig = setConfigDefaults(suppliedConfig);
- // Apply timeDateFormat presets
- var presetsConfig = {};
- var timeDateFormatPreset = defaultConfig.presets.timeDateFormat[newConfig.localization.timeDateFormat];
- if(timeDateFormatPreset) presetsConfig = timeDateFormatPreset;
- var finalConfig = $.extend(true, {}, presetsConfig, newConfig);
-
- // Apply bookingGraph presets
- presetsConfig = {};
- var bookingGraphPreset = defaultConfig.presets.bookingGraph[newConfig.bookingGraph];
- if(bookingGraphPreset) presetsConfig = bookingGraphPreset;
- finalConfig = $.extend(true, {}, presetsConfig, finalConfig);
+ // Apply presets
+ newConfig = applyConfigPreset(newConfig, 'timeDateFormat', newConfig.localization.timeDateFormat)
+ newConfig = applyConfigPreset(newConfig, 'bookingGraph', newConfig.bookingGraph)
+ newConfig = applyConfigPreset(newConfig, 'availabilityView', newConfig.availabilityView)
// Check for required settings
- if (!finalConfig.email) {
+ if (!newConfig.email) {
utils.logError('A required config setting ("email") was missing');
}
- if (!finalConfig.apiToken) {
+ if (!newConfig.apiToken) {
utils.logError('A required config setting ("apiToken") was missing');
}
- if (!finalConfig.calendar && finalConfig.bookingGraph !== 'group_customer' && finalConfig.bookingGraph !== 'group_customer_payment' && !finalConfig.timekitFindTimeTeam) {
+ if (!newConfig.calendar && newConfig.bookingGraph !== 'group_customer' && newConfig.bookingGraph !== 'group_customer_payment' && !newConfig.timekitFindTimeTeam) {
utils.logError('A required config setting ("calendar") was missing');
}
// Set new config to instance config
- config = finalConfig;
+ config = newConfig;
return config;
|
restructured presets and auto-switch mobile view
|
timekit-io_booking-js
|
train
|
911133a2917788922ddb921516c1ade97310ccbf
|
diff --git a/agouti_dsl.go b/agouti_dsl.go
index <HASH>..<HASH> 100644
--- a/agouti_dsl.go
+++ b/agouti_dsl.go
@@ -5,12 +5,10 @@ import (
"github.com/sclevine/agouti/page"
"github.com/sclevine/agouti/phantom"
"github.com/sclevine/agouti/webdriver"
+ "net"
"time"
)
-const PHANTOM_HOST = "127.0.0.1"
-const PHANTOM_PORT = 8910
-
var phantomService *phantom.Service
type Page page.Page
@@ -26,13 +24,22 @@ func (f Do) Call(selection page.Selection) {
type Cookie webdriver.Cookie
func SetupAgouti() bool {
- phantomService = &phantom.Service{Host: PHANTOM_HOST, Port: PHANTOM_PORT, Timeout: 3 * time.Second}
+ phantomService = &phantom.Service{Address: freeAddress(), Timeout: 3 * time.Second}
if err := phantomService.Start(); err != nil {
panic("Agouti failed to start phantomjs: " + err.Error())
}
return true
}
+func freeAddress() string {
+ listener, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ panic("Agouti failed to locate a free port: " + err.Error())
+ }
+ defer listener.Close()
+ return listener.Addr().String()
+}
+
func CleanupAgouti(ignored bool) bool {
phantomService.Stop()
return true
diff --git a/phantom/service.go b/phantom/service.go
index <HASH>..<HASH> 100644
--- a/phantom/service.go
+++ b/phantom/service.go
@@ -14,8 +14,7 @@ import (
)
type Service struct {
- Host string
- Port int
+ Address string
Timeout time.Duration
process *os.Process
}
@@ -25,7 +24,7 @@ func (s *Service) Start() error {
return errors.New("phantomjs not found")
}
- command := exec.Command("phantomjs", fmt.Sprintf("--webdriver=%s:%d", s.Host, s.Port))
+ command := exec.Command("phantomjs", fmt.Sprintf("--webdriver=%s", s.Address))
command.Start()
s.process = command.Process
@@ -34,7 +33,7 @@ func (s *Service) Start() error {
func (s *Service) waitForServer() error {
client := &http.Client{}
- request, _ := http.NewRequest("GET", fmt.Sprintf("http://%s:%d/status", s.Host, s.Port), nil)
+ request, _ := http.NewRequest("GET", fmt.Sprintf("http://%s/status", s.Address), nil)
timeoutChan := time.After(s.Timeout)
failedChan := make(chan struct{}, 1)
@@ -77,7 +76,7 @@ func (s *Service) CreateSession() (*Session, error) {
client := &http.Client{}
postBody := strings.NewReader(`{"desiredCapabilities": {} }`)
- request, _ := http.NewRequest("POST", fmt.Sprintf("http://%s:%d/session", s.Host, s.Port), postBody)
+ request, _ := http.NewRequest("POST", fmt.Sprintf("http://%s/session", s.Address), postBody)
response, err := client.Do(request)
@@ -94,6 +93,6 @@ func (s *Service) CreateSession() (*Session, error) {
return nil, errors.New("phantomjs webdriver failed to return a session ID")
}
- sessionURL := fmt.Sprintf("http://%s:%d/session/%s", s.Host, s.Port, sessionResponse.SessionID)
+ sessionURL := fmt.Sprintf("http://%s/session/%s", s.Address, sessionResponse.SessionID)
return &Session{sessionURL}, nil
}
diff --git a/phantom/service_test.go b/phantom/service_test.go
index <HASH>..<HASH> 100644
--- a/phantom/service_test.go
+++ b/phantom/service_test.go
@@ -9,7 +9,6 @@ import (
"net/http"
"net/http/httptest"
"os"
- "strconv"
"strings"
"time"
)
@@ -18,7 +17,7 @@ var _ = Describe("Phantom service", func() {
var service *Service
BeforeEach(func() {
- service = &Service{Host: "127.0.0.1", Port: 42344, Timeout: 3 * time.Second}
+ service = &Service{Address: "127.0.0.1:42344", Timeout: 3 * time.Second}
})
Describe("#Start", func() {
@@ -91,9 +90,9 @@ var _ = Describe("Phantom service", func() {
Context("if the request fails", func() {
It("returns the request error", func() {
- service.Port = 0
+ service.Address = "potato"
_, err := service.CreateSession()
- Expect(err.Error()).To(ContainSubstring("Post http://127.0.0.1:0/session: dial tcp 127.0.0.1:0:"))
+ Expect(err.Error()).To(ContainSubstring("Post http://potato/session: dial tcp"))
})
})
@@ -102,7 +101,7 @@ var _ = Describe("Phantom service", func() {
fakeServer := httptest.NewServer(http.HandlerFunc(func(response http.ResponseWriter, request *http.Request) {
response.Write([]byte("{}"))
}))
- service.Port, _ = strconv.Atoi(strings.Split(fakeServer.URL, ":")[2])
+ service.Address = strings.Split(fakeServer.URL, "/")[2]
_, err := service.CreateSession()
Expect(err).To(MatchError("phantomjs webdriver failed to return a session ID"))
fakeServer.Close()
|
Moved phantomjs to run on arbitrary unused port
|
sclevine_agouti
|
train
|
91e8cef51e0cf4e2001e08ffd9d399bc9df2c4c8
|
diff --git a/pythran/analyses/constant_expressions.py b/pythran/analyses/constant_expressions.py
index <HASH>..<HASH> 100644
--- a/pythran/analyses/constant_expressions.py
+++ b/pythran/analyses/constant_expressions.py
@@ -25,32 +25,32 @@ class ConstantExpressions(NodeAnalysis):
return True
def visit_BoolOp(self, node):
- return all(map(self.visit, node.values)) and self.add(node)
+ return all([self.visit(x) for x in node.values]) and self.add(node)
def visit_BinOp(self, node):
- rec = all(map(self.visit, (node.left, node.right)))
+ rec = all([self.visit(x) for x in (node.left, node.right)])
return rec and self.add(node)
def visit_UnaryOp(self, node):
return self.visit(node.operand) and self.add(node)
def visit_IfExp(self, node):
- rec = all(map(self.visit, (node.test, node.body, node.orelse)))
+ rec = all([self.visit(x) for x in (node.test, node.body, node.orelse)])
return rec and self.add(node)
def visit_Compare(self, node):
- rec = all(map(self.visit, [node.left] + node.comparators))
+ rec = all([self.visit(x) for x in ([node.left] + node.comparators)])
return rec and self.add(node)
def visit_Call(self, node):
- rec = all(map(self.visit, node.args + [node.func]))
+ rec = all([self.visit(x) for x in (node.args + [node.func])])
return rec and self.add(node)
visit_Num = add
visit_Str = add
def visit_Subscript(self, node):
- rec = all(map(self.visit, (node.value, node.slice)))
+ rec = all([self.visit(x) for x in (node.value, node.slice)])
return rec and self.add(node)
def visit_Name(self, node):
@@ -90,11 +90,11 @@ class ConstantExpressions(NodeAnalysis):
return rec(MODULES, node).isconst() and self.add(node)
def visit_Dict(self, node):
- rec = all(map(self.visit, node.keys + node.values))
+ rec = all([self.visit(x) for x in (node.keys + node.values)])
return rec and self.add(node)
def visit_List(self, node):
- return all(map(self.visit, node.elts)) and self.add(node)
+ return all([self.visit(x) for x in node.elts]) and self.add(node)
visit_Tuple = visit_List
visit_Set = visit_List
diff --git a/pythran/analyses/parallel_maps.py b/pythran/analyses/parallel_maps.py
index <HASH>..<HASH> 100644
--- a/pythran/analyses/parallel_maps.py
+++ b/pythran/analyses/parallel_maps.py
@@ -18,8 +18,8 @@ class ParallelMaps(ModuleAnalysis):
def visit_Call(self, node):
if all(alias == MODULES['__builtin__']['map']
for alias in self.aliases[node.func]):
- if all(self.pure_expressions.__contains__(f)
- for f in self.aliases[node.args[0]]):
+ if all(f in self.pure_expressions
+ for f in self.aliases[node.args[0]]):
self.result.add(node)
def display(self, data):
diff --git a/pythran/analyses/pure_expressions.py b/pythran/analyses/pure_expressions.py
index <HASH>..<HASH> 100644
--- a/pythran/analyses/pure_expressions.py
+++ b/pythran/analyses/pure_expressions.py
@@ -27,14 +27,14 @@ class PureExpressions(ModuleAnalysis):
return False
def generic_visit(self, node):
- is_pure = all(map(self.visit, ast.iter_child_nodes(node)))
+ is_pure = all([self.visit(x) for x in ast.iter_child_nodes(node)])
if is_pure:
self.result.add(node)
return is_pure
def visit_Call(self, node):
# check if all arguments are Pures
- is_pure = all(self.visit(arg) for arg in node.args)
+ is_pure = all([self.visit(arg) for arg in node.args])
# check all possible function called
func_aliases = self.aliases[node.func]
@@ -54,7 +54,7 @@ class PureExpressions(ModuleAnalysis):
if ae:
try:
ast.literal_eval(arg)
- except ValueError as ve:
+ except ValueError:
is_pure = False
else:
is_pure = False
diff --git a/pythran/tests/euler/euler38.py b/pythran/tests/euler/euler38.py
index <HASH>..<HASH> 100644
--- a/pythran/tests/euler/euler38.py
+++ b/pythran/tests/euler/euler38.py
@@ -3,14 +3,14 @@
def solve():
'''
Take the number 192 and multiply it by each of 1, 2, and 3:
-
+
192 x 1 = 192
192 x 2 = 384
192 x 3 = 576
By concatenating each product we get the 1 to 9 pandigital, 192384576. We will call 192384576 the concatenated product of 192 and (1,2,3)
-
+
The same can be achieved by starting with 9 and multiplying by 1, 2, 3, 4, and 5, giving the pandigital, 918273645, which is the concatenated product of 9 and (1,2,3,4,5).
-
+
What is the largest 1 to 9 pandigital 9-digit number that can be formed as the concatenated product of an integer with (1,2, ... , n) where n > 1?
'''
|
Ensure all(map evalutate everything for pure analysis.
* Many constant folding were not performed with Python3.
|
serge-sans-paille_pythran
|
train
|
6deb757a509153e862b6ec0ec267421e01aa2ecb
|
diff --git a/tests/__init__.py b/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -11,13 +11,13 @@ from smartfile.exceptions import SmartFileResponseException
class BaseAPITestCase(unittest.TestCase):
_test_user = {
'name': 'Test User',
- 'username': 'test_úsáideoir',
+ 'username': 'test_user1',
'password': 'testpass',
'email': 'testuser@example.com'
}
_test_user2 = {
'name': 'Test User2',
- 'username': 'test_úsáideoir2',
+ 'username': 'test_user2',
'password': 'testpass2',
'email': 'testuser2@example.com'
}
|
Usernames need to be ASCII
The server still requires ASCII characters in some of the requests it
supports.
|
smartfile_client-python
|
train
|
7b368174e395b556e2a28a968b51aff4f818e1ab
|
diff --git a/src/qtism/data/AssessmentItemRefCollection.php b/src/qtism/data/AssessmentItemRefCollection.php
index <HASH>..<HASH> 100644
--- a/src/qtism/data/AssessmentItemRefCollection.php
+++ b/src/qtism/data/AssessmentItemRefCollection.php
@@ -14,9 +14,9 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
- * Copyright (c) 2013 (original work) Open Assessment Technologies SA (under the project TAO-PRODUCT);
+ * Copyright (c) 2013-2014 (original work) Open Assessment Technologies SA (under the project TAO-PRODUCT);
*
- * @author Jérôme Bogaerts, <jerome@taotesting.com>
+ * @author Jérôme Bogaerts <jerome@taotesting.com>
* @license GPLv2
* @package
*/
@@ -24,7 +24,7 @@
namespace qtism\data;
-use InvalidArgumentException as InvalidArgumentException;
+use \InvalidArgumentException;
/**
* A collection that aims at storing AssessmentItemRef objects.
@@ -37,7 +37,7 @@ class AssessmentItemRefCollection extends SectionPartCollection {
/**
* Check if $value is an AssessmentItemRef object.
*
- * @throws InvalidArgumentException If $value is not a AssessmentItemRef object.
+ * @throws \InvalidArgumentException If $value is not a AssessmentItemRef object.
*/
protected function checkType($value) {
if (!$value instanceof AssessmentItemRef) {
|
Documentation of AssessmentItemRefCollection.
|
oat-sa_qti-sdk
|
train
|
db86fa03fc679c904632d3c09d44f6301193398c
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,7 @@ import re
# container_child - pexpect-spawned child created to create the container
# host_child - pexpect spawned child living on the host container
-class setup(ShutItModule):
+class conn_docker(ShutItModule):
def is_installed(self,shutit):
return False
@@ -141,7 +141,7 @@ class setup(ShutItModule):
shutit.setup_prompt('SHUTIT_ROOT','root_prompt')
shutit.set_default_expect(config_dict['expect_prompts']['root_prompt'])
shutit.send_and_expect('export DEBIAN_FRONTEND=noninteractive',check_exit=False)
- shutit.pause_point('Anything you want to do to the container before the build starts?')
+ shutit.pause_point('Anything you want to do now the container is connected to?')
return True
def remove(self,shutit):
@@ -169,6 +169,20 @@ BUILDREPEND"""
container_child.sendline('exit') # Exit container
return True
+if not util.module_exists('shutit.tk.conn_docker'):
+ obj = conn_docker('shutit.tk.conn_docker',-0.1,'Connect ShutIt to docker')
+ util.get_shutit_modules().add(obj)
+ ShutItModule.register(conn_docker)
+
+class setup(ShutItModule):
+
+ def is_installed(self,shutit):
+ return False
+
+ def build(self,shutit):
+ shutit.pause_point('Anything you want to do to the container before the build starts?')
+ return True
+
if not util.module_exists('shutit.tk.setup'):
obj = setup('shutit.tk.setup',0.0,'Core ShutIt setup')
util.get_shutit_modules().add(obj)
diff --git a/shutit_main.py b/shutit_main.py
index <HASH>..<HASH> 100755
--- a/shutit_main.py
+++ b/shutit_main.py
@@ -28,10 +28,14 @@ import setup
import time
import sys
-# Gets a list of module ids by run_order
+# Gets a list of module ids by run_order, ignoring conn modules (run order < 0)
def module_ids(shutit, rev=False):
shutit_map = shutit.shutit_map
- ids = sorted(shutit_map.keys(), key=lambda mid: shutit_map[mid].run_order)
+ ids = [
+ key for key, value in shutit.shutit_map.iteritems()
+ if value.run_order >= 0.0
+ ]
+ ids = sorted(ids, key=lambda mid: shutit_map[mid].run_order)
if rev:
ids = list(reversed(ids))
return ids
@@ -92,8 +96,8 @@ def init_shutit_map(shutit):
modules = shutit.shutit_modules
- # Have we got anything to process?
- if len(modules) < 2 :
+ # Have we got anything to process outside of special modules?
+ if len([mod for mod in modules if mod.run_order > 0]) < 1:
shutit.log(modules)
util.fail('No ShutIt modules in path:\n\n' +
':'.join(cfg['host']['shutit_module_paths']) +
@@ -110,6 +114,7 @@ def init_shutit_map(shutit):
shutit.pause_point('',print_input=False)
run_orders = {}
+ has_conn_module = False
has_core_module = False
for m in modules:
assert isinstance(m, ShutItModule)
@@ -119,14 +124,15 @@ def init_shutit_map(shutit):
util.fail('Duplicate run order: ' + str(m.run_order) + ' for ' +
m.module_id + ' and ' + run_orders[m.run_order].module_id)
if m.run_order < 0:
- util.fail('Invalid run order ' + str(m.run_order) + ' for ' +
- m.module_id)
+ has_conn_module = True
if m.run_order == 0:
has_core_module = True
shutit_map[m.module_id] = run_orders[m.run_order] = m
if not has_core_module:
util.fail('No module with run_order=0 specified! This is required.')
+ if not has_conn_module:
+ util.fail('No module with run_order<0 specified! This is required.')
def config_collection(shutit):
cfg = shutit.cfg
@@ -149,6 +155,16 @@ def config_collection(shutit):
if not shutit_map[mid].get_config(shutit):
util.fail(mid + ' failed on get_config')
+def build_conn_module(shutit):
+ cfg = shutit.cfg
+ shutit_map = shutit.shutit_map
+ # Let's go. Run 0 every time, this should set up the container in pexpect.
+ conn_mid = 'shutit.tk.conn_docker'
+ if cfg['build']['tutorial']:
+ shutit.pause_point('\nRunning the conn module (' +
+ shutit.shutit_main_dir + '/setup.py)', print_input=False)
+ shutit_map[conn_mid].build(shutit)
+
def build_core_module(shutit):
cfg = shutit.cfg
shutit_map = shutit.shutit_map
@@ -441,7 +457,7 @@ def shutit_main():
util.load_shutit_modules(shutit)
init_shutit_map(shutit)
config_collection(shutit)
- build_core_module(shutit)
+ build_conn_module(shutit)
if cfg['action']['serve']:
import shutit_srv
@@ -473,6 +489,7 @@ def shutit_main():
# Dependency validation done.
+ build_core_module(shutit)
do_remove(shutit)
do_build(shutit)
do_test(shutit)
|
Add a concept of a conn module
|
ianmiell_shutit
|
train
|
db8bceef5ec4a1ce5cb5072af5836c5fc7f95e55
|
diff --git a/server/src/main/java/org/realityforge/gwt/appcache/server/AbstractManifestServlet.java b/server/src/main/java/org/realityforge/gwt/appcache/server/AbstractManifestServlet.java
index <HASH>..<HASH> 100644
--- a/server/src/main/java/org/realityforge/gwt/appcache/server/AbstractManifestServlet.java
+++ b/server/src/main/java/org/realityforge/gwt/appcache/server/AbstractManifestServlet.java
@@ -114,7 +114,7 @@ public abstract class AbstractManifestServlet
}
}
- private Set<BindingProperty> calculateBindingPropertiesForClient( final HttpServletRequest request )
+ final Set<BindingProperty> calculateBindingPropertiesForClient( final HttpServletRequest request )
throws ServletException
{
try
diff --git a/server/src/test/java/org/realityforge/gwt/appcache/server/ManifestServletTest.java b/server/src/test/java/org/realityforge/gwt/appcache/server/ManifestServletTest.java
index <HASH>..<HASH> 100644
--- a/server/src/test/java/org/realityforge/gwt/appcache/server/ManifestServletTest.java
+++ b/server/src/test/java/org/realityforge/gwt/appcache/server/ManifestServletTest.java
@@ -4,11 +4,14 @@ import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
+import org.realityforge.gwt.appcache.server.propertyprovider.PropertyProvider;
import org.testng.annotations.Test;
import static org.mockito.Mockito.*;
import static org.testng.Assert.*;
@@ -31,6 +34,51 @@ public class ManifestServletTest
}
}
+ static class TestPropertyProvider
+ implements PropertyProvider
+ {
+ private final String _key;
+ private final String _value;
+
+ TestPropertyProvider( final String key, final String value )
+ {
+ _key = key;
+ _value = value;
+ }
+
+ @Override
+ public String getPropertyName()
+ {
+ return _key;
+ }
+
+ @Override
+ public String getPropertyValue( final HttpServletRequest request )
+ throws Exception
+ {
+ return _value;
+ }
+ }
+
+ @Test
+ public void calculateBindingPropertiesForClient()
+ throws Exception
+ {
+ final TestManifestServlet servlet = new TestManifestServlet();
+ servlet.addPropertyProvider( new TestPropertyProvider( "X", "1" ) );
+ servlet.addPropertyProvider( new TestPropertyProvider( "Y", "2" ) );
+ final HttpServletRequest request = mock( HttpServletRequest.class );
+ final Set<BindingProperty> properties = servlet.calculateBindingPropertiesForClient( request );
+ assertEquals( properties.size(), 2 );
+ final Iterator<BindingProperty> iterator = properties.iterator();
+ final BindingProperty property1 = iterator.next();
+ final BindingProperty property2 = iterator.next();
+ assertEquals( property1.getName(), "X" );
+ assertEquals( property1.getValue(), "1" );
+ assertEquals( property2.getName(), "Y" );
+ assertEquals( property2.getValue(), "2" );
+ }
+
@Test
public void getModuleName()
throws Exception
@@ -77,7 +125,7 @@ public class ManifestServletTest
when( servletContext.getRealPath( "/foo/myapp/permutations.xml" ) ).thenReturn( permutations.getAbsolutePath() );
- final Map<String,List<BindingProperty>> bindings = servlet.getBindingMap( "/foo/", "myapp" );
+ final Map<String, List<BindingProperty>> bindings = servlet.getBindingMap( "/foo/", "myapp" );
assertNotNull( bindings );
assertTrue( bindings == servlet.getBindingMap( "/foo/", "myapp" ) );
|
Test calculateBindingPropertiesForClient
|
realityforge_gwt-appcache
|
train
|
e76f93887f24e510f5ac6db797e12323ac01c06b
|
diff --git a/utils/webpack/webpack.config.js b/utils/webpack/webpack.config.js
index <HASH>..<HASH> 100644
--- a/utils/webpack/webpack.config.js
+++ b/utils/webpack/webpack.config.js
@@ -21,7 +21,6 @@ const getExtensionsNodeModulesPaths = require('./lib/getExtensionsNodeModulesPat
const themePath = process.cwd();
const appConfig = getAppSettings(themePath);
-const componentsConfig = getComponentsSettings(themePath);
const themeConfig = getThemeConfig(themePath, appConfig);
const isoLang = convertLanguageToISO(appConfig.language);
const { sourceMap, ip, apiPort } = getDevConfig();
@@ -73,7 +72,7 @@ const config = {
'process.env': {
NODE_ENV: JSON.stringify(ENV),
APP_CONFIG: JSON.stringify(appConfig),
- COMPONENTS_CONFIG: JSON.stringify(componentsConfig),
+ COMPONENTS_CONFIG: JSON.stringify(getComponentsSettings(themePath)),
THEME_CONFIG: JSON.stringify(themeConfig),
THEME: JSON.stringify(process.env.theme),
// @deprecated Replaced by LOCALE and LOCALE_FILE - kept for now for theme compatibility.
|
PWA-<I> Get fresh component config on every run.
|
shopgate_pwa
|
train
|
2dde5bbaeb20dd8d1d7c8ca05d8b9ae5913967cc
|
diff --git a/src/components/link.js b/src/components/link.js
index <HASH>..<HASH> 100644
--- a/src/components/link.js
+++ b/src/components/link.js
@@ -40,6 +40,15 @@ export default {
const on = {
click: (e) => {
+ // don't redirect with control keys
+ /* istanbul ignore if */
+ if (e.metaKey || e.ctrlKey || e.shiftKey) return
+ // don't redirect when preventDefault called
+ /* istanbul ignore if */
+ if (e.defaultPrevented) return
+ // don't redirect on right click
+ /* istanbul ignore if */
+ if (e.button !== 0) return
e.preventDefault()
if (this.replace) {
router.replace(to)
|
don't trigger transition when control keys or other mouse buttons are used. (#<I>)
|
vuejs_vue-router
|
train
|
91ba99310547bbe40f33f4ecbae3ffbd870f3539
|
diff --git a/oct2py/session.py b/oct2py/session.py
index <HASH>..<HASH> 100644
--- a/oct2py/session.py
+++ b/oct2py/session.py
@@ -394,9 +394,11 @@ class Oct2Py(object):
"""
try:
doc = self._eval('help {0}'.format(name), log=False, verbose=False)
- except Oct2PyError:
- self._eval('type {0}'.format(name), log=False, verbose=False)
- doc = 'No documentation for {0}'.format(name)
+ except Oct2PyError as e:
+ if 'syntax error' in str(e):
+ raise(e)
+ doc = self._eval('type {0}'.format(name), log=False, verbose=False)
+ doc = doc.splitlines()[0]
return doc
def __getattr__(self, attr):
@@ -574,8 +576,7 @@ class _Session(object):
if not banner.endswith('\n'):
banner += '\n'
self.stdout.write(banner)
- pwd = self.evaluate(['pwd'], False, False)
- pwd = pwd.splitlines()[-1][6:]
+ pwd = self.get_pwd()
path = '%s/__oct2py_interact.m' % pwd
with open(path, 'wb') as fid:
msg = 'keyboard("%s")\n' % prompt
@@ -587,6 +588,11 @@ class _Session(object):
os.remove(path)
self._interact(prompt)
+ def get_pwd(self):
+ """Get the present working directory of the session"""
+ pwd = self.evaluate(['pwd'], False, False)
+ return pwd.splitlines()[-1][6:]
+
def _find_prompt(self, prompt='debug> ', disp=True):
"""Look for the prompt in the Octave output, print chars if disp"""
output = []
|
Improve docstring handling and create pwd method in Session
|
blink1073_oct2py
|
train
|
306fda14f0ea9a5349a2f5249f1abd9a831690af
|
diff --git a/python_modules/dagster/dagster/core/definitions/environment_configs.py b/python_modules/dagster/dagster/core/definitions/environment_configs.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/core/definitions/environment_configs.py
+++ b/python_modules/dagster/dagster/core/definitions/environment_configs.py
@@ -32,14 +32,14 @@ def SystemDict(fields, description=None):
return build_config_dict(fields, description, is_system_config=True)
-class _SolidContainerConfigDict(_ConfigHasFields):
+class SolidContainerConfigDict(_ConfigHasFields):
def __init__(self, name, fields, description=None, handle=None, child_solids_config_field=None):
self._handle = check.opt_inst_param(handle, 'handle', SolidHandle)
self._child_solids_config_field = check.opt_inst_param(
child_solids_config_field, 'child_solids_config_field', Field
)
- super(_SolidContainerConfigDict, self).__init__(
+ super(SolidContainerConfigDict, self).__init__(
key=name,
name=name,
kind=ConfigTypeKind.DICT,
@@ -48,6 +48,9 @@ class _SolidContainerConfigDict(_ConfigHasFields):
type_attributes=ConfigTypeAttributes(is_system_config=True),
)
+ def inst(self):
+ return self
+
@property
def handle(self):
'''A solid handle ref to the composite solid that is associated with this config schema
@@ -63,30 +66,17 @@ class _SolidContainerConfigDict(_ConfigHasFields):
return self._child_solids_config_field
-def SolidContainerConfigDict(
- name, fields, description=None, handle=None, child_solids_config_field=None
-):
- class _SolidContainerConfigDictInternal(_SolidContainerConfigDict):
- def __init__(self):
- super(_SolidContainerConfigDictInternal, self).__init__(
- name=name,
- fields=fields,
- description=description,
- handle=handle,
- child_solids_config_field=child_solids_config_field,
- )
-
- return _SolidContainerConfigDictInternal
-
-
def SystemSelector(fields, description=None):
return Selector(fields, description, is_system_config=True)
-class _SolidConfigDict(_ConfigHasFields):
- def __init__(self, name, fields, description):
+class SolidConfigDict(_ConfigHasFields):
+ def __init__(self, name, fields, description=None):
+ from dagster.core.types.field_utils import check_user_facing_fields_dict
+
+ check_user_facing_fields_dict(fields, 'NamedDict named "{}"'.format(name))
- super(_SolidConfigDict, self).__init__(
+ super(SolidConfigDict, self).__init__(
key=name,
name=name,
kind=ConfigTypeKind.DICT,
@@ -95,27 +85,16 @@ class _SolidConfigDict(_ConfigHasFields):
type_attributes=ConfigTypeAttributes(is_system_config=True),
)
-
-def SolidConfigDict(name, fields, description=None):
- from dagster.core.types.field_utils import check_user_facing_fields_dict
-
- check_user_facing_fields_dict(fields, 'NamedDict named "{}"'.format(name))
-
- class _SolidConfigDictInternal(_SolidConfigDict):
- def __init__(self):
- super(_SolidConfigDictInternal, self).__init__(
- name=name, fields=fields, description=description
- )
-
- return _SolidConfigDictInternal
+ def inst(self):
+ return self
def is_solid_dict(obj):
- return isinstance(obj, _SolidConfigDict)
+ return isinstance(obj, SolidConfigDict)
def is_solid_container_config(obj):
- return isinstance(obj, _SolidContainerConfigDict)
+ return isinstance(obj, SolidContainerConfigDict)
def _is_selector_field_optional(config_type):
|
(python-config-type-instance-9) Convert SolidContainerConfigDict and SolidConfigDict to inst()
Summary:
These are special config types in the environment configs system.
Depends on D<I>
Test Plan: BK
Reviewers: max, alangenfeld
Reviewed By: alangenfeld
Differential Revision: <URL>
|
dagster-io_dagster
|
train
|
05f5fd92a0192900c32db553e87d84ce49a36383
|
diff --git a/lib/cxxproject/ext/rake.rb b/lib/cxxproject/ext/rake.rb
index <HASH>..<HASH> 100644
--- a/lib/cxxproject/ext/rake.rb
+++ b/lib/cxxproject/ext/rake.rb
@@ -18,10 +18,15 @@ module Rake
class Application
attr_writer :max_parallel_tasks
+ attr_writer :check_unnecessary_includes
attr_writer :deriveIncludes
def max_parallel_tasks
@max_parallel_tasks ||= 8
end
+
+ def check_unnecessary_includes
+ @check_unnecessary_includes ||= false
+ end
def idei
@idei ||= Cxxproject::IDEInterface.new
@@ -117,17 +122,19 @@ module Rake
handle_jobs(jobs, args, invocation_chain)
end.join
- if not @failure # otherwise the dependency files might be incorrect or not complete
- @bb.incArray.each do |i|
- if not @bb.deps_in_depFiles.any? { |d| d.index(i) == 0 }
- msg = "INFO: Include to #{i} seems to be unnecessary"
- Cxxproject::Printer.printInfo msg
- res = Cxxproject::ErrorDesc.new
- res.file_name = @project_dir
- res.line_number = 0
- res.severity = Cxxproject::ErrorParser::SEVERITY_INFO
- res.message = msg
- Rake.application.idei.set_errors([res])
+ if @check_unnecessary_includes
+ if not @failure # otherwise the dependency files might be incorrect or not complete
+ @bb.incArray.each do |i|
+ if not @bb.deps_in_depFiles.any? { |d| d.index(i) == 0 }
+ msg = "INFO: Include to #{i} seems to be unnecessary"
+ Cxxproject::Printer.printInfo msg
+ res = Cxxproject::ErrorDesc.new
+ res.file_name = @project_dir
+ res.line_number = 0
+ res.severity = Cxxproject::ErrorParser::SEVERITY_INFO
+ res.message = msg
+ Rake.application.idei.set_errors([res])
+ end
end
end
end
|
check unnecessary includes now off per default, can be switched on
|
marcmo_cxxproject
|
train
|
e8f75d022200e5b77d78303b3eb092914290167b
|
diff --git a/src/Command.php b/src/Command.php
index <HASH>..<HASH> 100644
--- a/src/Command.php
+++ b/src/Command.php
@@ -33,6 +33,13 @@ class Command
public $useExec = false;
/**
+ * @var bool whether to capture stderr (2>&1) when `useExec` is true. This will try to redirect the
+ * stderr to stdout and provide the complete output of both in `getStdErr()` and `getError()`.
+ * Default is `true`.
+ */
+ public $captureStdErr = true;
+
+ /**
* @var string|null the initial working dir for proc_open(). Default is null for current PHP working dir.
*/
public $procCwd;
@@ -265,10 +272,12 @@ class Command
}
if ($this->useExec) {
- exec($command, $output, $this->_exitCode);
+ $execCommand = $this->captureStdErr ? "$command 2>&1" : $command;
+ exec($execCommand, $output, $this->_exitCode);
$this->_stdOut = trim(implode("\n", $output));
if ($this->_exitCode!==0) {
- $this->_error = 'Command failed';
+ $this->_stdErr = $this->_stdOut;
+ $this->_error = empty($this->_stdErr) ? 'Command failed' : $this->_stdErr;
return false;
}
} else {
diff --git a/tests/CommandTest.php b/tests/CommandTest.php
index <HASH>..<HASH> 100644
--- a/tests/CommandTest.php
+++ b/tests/CommandTest.php
@@ -178,8 +178,8 @@ class CommandTest extends \PHPUnit_Framework_TestCase
$this->assertFalse($command->execute());
$this->assertFalse($command->getExecuted());
$this->assertNotEmpty($command->getError());
- $this->assertEmpty($command->getStdErr());
- $this->assertEmpty($command->getOutput());
+ $this->assertNotEmpty($command->getStdErr());
+ $this->assertNotEmpty($command->getOutput());
$this->assertEquals(127, $command->getExitCode());
}
public function testCanNotRunInvalidCommandWithExec()
@@ -190,8 +190,8 @@ class CommandTest extends \PHPUnit_Framework_TestCase
$this->assertFalse($command->execute());
$this->assertFalse($command->getExecuted());
$this->assertNotEmpty($command->getError());
- $this->assertEmpty($command->getStdErr());
- $this->assertEmpty($command->getOutput());
+ $this->assertNotEmpty($command->getStdErr());
+ $this->assertNotEmpty($command->getOutput());
$this->assertEquals(2, $command->getExitCode());
}
|
Improve error capturing with useExec
|
mikehaertl_php-shellcommand
|
train
|
fa522c41fca4f5c08707c7f04f1228b929e61b68
|
diff --git a/projects/samskivert/src/java/com/samskivert/servlet/user/UserUtil.java b/projects/samskivert/src/java/com/samskivert/servlet/user/UserUtil.java
index <HASH>..<HASH> 100644
--- a/projects/samskivert/src/java/com/samskivert/servlet/user/UserUtil.java
+++ b/projects/samskivert/src/java/com/samskivert/servlet/user/UserUtil.java
@@ -1,5 +1,5 @@
//
-// $Id: UserUtil.java,v 1.5 2002/10/16 00:42:26 mdb Exp $
+// $Id: UserUtil.java,v 1.6 2002/10/16 00:44:48 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
@@ -40,14 +40,7 @@ public class UserUtil
buf.append(Math.random());
// and MD5 hash it
- String auth = StringUtil.md5hex(buf.toString());
- if (auth == null) {
- throw new RuntimeException("JVM missing MD5 message digest " +
- "algorithm implementation. User " +
- "management facilities require MD5 " +
- "encoding capabilities.");
- }
- return auth;
+ return StringUtil.md5hex(buf.toString());
}
/**
diff --git a/projects/samskivert/src/java/com/samskivert/util/StringUtil.java b/projects/samskivert/src/java/com/samskivert/util/StringUtil.java
index <HASH>..<HASH> 100644
--- a/projects/samskivert/src/java/com/samskivert/util/StringUtil.java
+++ b/projects/samskivert/src/java/com/samskivert/util/StringUtil.java
@@ -1,5 +1,5 @@
//
-// $Id: StringUtil.java,v 1.38 2002/10/16 00:42:26 mdb Exp $
+// $Id: StringUtil.java,v 1.39 2002/10/16 00:44:48 mdb Exp $
//
// samskivert library - useful routines for java programs
// Copyright (C) 2001 Michael Bayne
@@ -579,8 +579,10 @@ public class StringUtil
}
/**
- * Returns a hex string representing the MD5 encoded source or null if
- * the MD5 codec was not available in this JVM.
+ * Returns a hex string representing the MD5 encoded source.
+ *
+ * @exception RuntimeException thrown if the MD5 codec was not
+ * available in this JVM.
*/
public static String md5hex (String source)
{
@@ -588,7 +590,7 @@ public class StringUtil
MessageDigest digest = MessageDigest.getInstance("MD5");
return hexlate(digest.digest(source.getBytes()));
} catch (NoSuchAlgorithmException nsae) {
- return null;
+ throw new RuntimeException("MD5 codec not available");
}
}
|
Go ahead and throw the exception in StringUtil since everyone else will
end up doing something like that anyway.
git-svn-id: <URL>
|
samskivert_samskivert
|
train
|
095ef4d89a2bf273f1675756607ecfd43bb3f619
|
diff --git a/cqlengine/query.py b/cqlengine/query.py
index <HASH>..<HASH> 100644
--- a/cqlengine/query.py
+++ b/cqlengine/query.py
@@ -16,7 +16,7 @@ from cqlengine.functions import QueryValue, Token, BaseQueryFunction
#http://www.datastax.com/docs/1.1/references/cql/index
from cqlengine.operators import InOperator, EqualsOperator, GreaterThanOperator, GreaterThanOrEqualOperator
from cqlengine.operators import LessThanOperator, LessThanOrEqualOperator, BaseWhereOperator
-from cqlengine.statements import WhereClause, SelectStatement, DeleteStatement, UpdateStatement, AssignmentClause, InsertStatement, BaseCQLStatement
+from cqlengine.statements import WhereClause, SelectStatement, DeleteStatement, UpdateStatement, AssignmentClause, InsertStatement, BaseCQLStatement, MapUpdateClause, MapDeleteClause
class QueryException(CQLEngineException): pass
@@ -310,9 +310,9 @@ class AbstractQuerySet(object):
def _execute(self, q, params=None):
if self._batch:
- return self._batch.add_query(q, params=params)
+ return self._batch.add_query(q)
else:
- return execute(q, params=params, consistency_level=self._consistency)
+ return execute(q, consistency_level=self._consistency)
def __unicode__(self):
return self._select_query()
@@ -793,21 +793,11 @@ class ModelQuerySet(AbstractQuerySet):
us.add_assignment_clause(AssignmentClause(name, col.to_database(val)))
if us.assignments:
- qs = str(us)
- ctx = us.get_context()
- if self._batch:
- self._batch.add_query(qs, ctx)
- else:
- execute(qs, ctx, self._consistency)
+ self._execute(us)
if nulled_columns:
ds = DeleteStatement(self.column_family_name, fields=nulled_columns, where=self._where)
- qs = str(ds)
- ctx = ds.get_context()
- if self._batch:
- self._batch.add_query(qs, ctx)
- else:
- execute(qs, ctx, self._consistency)
+ self._execute(ds)
class DMLQuery(object):
@@ -845,36 +835,27 @@ class DMLQuery(object):
"""
executes a delete query to remove columns that have changed to null
"""
- values, field_names, field_ids, field_values, query_values = self._get_query_values()
-
- # delete nulled columns and removed map keys
- qs = ['DELETE']
- query_values = {}
-
- del_statements = []
- for k,v in self.instance._values.items():
+ ds = DeleteStatement(self.column_family_name)
+ deleted_fields = False
+ for _, v in self.instance._values.items():
col = v.column
if v.deleted:
- del_statements += ['"{}"'.format(col.db_field_name)]
+ ds.add_field(col.db_field_name)
+ deleted_fields = True
elif isinstance(col, Map):
- del_statements += col.get_delete_statement(v.value, v.previous_value, query_values)
+ uc = MapDeleteClause(col.db_field_name, v.value, v.previous_value)
+ if uc.get_context_size() > 0:
+ ds.add_field(uc)
+ deleted_fields = True
- if del_statements:
- qs += [', '.join(del_statements)]
-
- qs += ['FROM {}'.format(self.column_family_name)]
-
- qs += ['WHERE']
- where_statements = []
+ if deleted_fields:
for name, col in self.model._primary_keys.items():
- field_id = uuid4().hex
- query_values[field_id] = field_values[name]
- where_statements += ['"{}" = :{}'.format(col.db_field_name, field_id)]
- qs += [' AND '.join(where_statements)]
-
- qs = ' '.join(qs)
-
- self._execute(qs, query_values)
+ ds.add_where_clause(WhereClause(
+ col.db_field_name,
+ EqualsOperator(),
+ col.to_database(getattr(self.instance, name))
+ ))
+ self._execute(ds)
def update(self):
"""
diff --git a/cqlengine/statements.py b/cqlengine/statements.py
index <HASH>..<HASH> 100644
--- a/cqlengine/statements.py
+++ b/cqlengine/statements.py
@@ -569,11 +569,19 @@ class DeleteStatement(BaseCQLStatement):
for field in fields or []:
self.add_field(field)
+ def get_context(self):
+ ctx = super(DeleteStatement, self).get_context()
+ for field in self.fields:
+ field.update_context(ctx)
+ return ctx
+
def add_field(self, field):
if isinstance(field, basestring):
field = FieldDeleteClause(field)
if not isinstance(field, BaseClause):
raise StatementException("only instances of AssignmentClause can be added to statements")
+ field.set_context_id(self.context_counter)
+ self.context_counter += field.get_context_size()
self.fields.append(field)
def __unicode__(self):
|
moving delete null columns to delete statement
|
cqlengine_cqlengine
|
train
|
6208b7ac72b0be25924cc03a0e360a088ae98cd9
|
diff --git a/cachestore/remote/src/main/java/org/infinispan/loaders/remote/configuration/RemoteCacheStoreConfiguration.java b/cachestore/remote/src/main/java/org/infinispan/loaders/remote/configuration/RemoteCacheStoreConfiguration.java
index <HASH>..<HASH> 100644
--- a/cachestore/remote/src/main/java/org/infinispan/loaders/remote/configuration/RemoteCacheStoreConfiguration.java
+++ b/cachestore/remote/src/main/java/org/infinispan/loaders/remote/configuration/RemoteCacheStoreConfiguration.java
@@ -26,6 +26,7 @@ import org.infinispan.client.hotrod.impl.ConfigurationProperties;
import org.infinispan.configuration.BuiltBy;
import org.infinispan.configuration.cache.AbstractStoreConfiguration;
import org.infinispan.configuration.cache.AsyncStoreConfiguration;
+import org.infinispan.configuration.cache.LegacyConfigurationAdaptor;
import org.infinispan.configuration.cache.LegacyLoaderAdapter;
import org.infinispan.configuration.cache.SingletonStoreConfiguration;
import org.infinispan.loaders.remote.RemoteCacheStoreConfig;
@@ -141,11 +142,7 @@ public class RemoteCacheStoreConfiguration extends AbstractStoreConfiguration im
public RemoteCacheStoreConfig adapt() {
RemoteCacheStoreConfig config = new RemoteCacheStoreConfig();
// StoreConfiguration
- config.fetchPersistentState(fetchPersistentState());
- config.ignoreModifications(ignoreModifications());
- config.purgeOnStartup(purgeOnStartup());
- config.purgeSynchronously(purgeSynchronously());
- config.purgerThreads(purgerThreads());
+ LegacyConfigurationAdaptor.adapt(this, config);
// RemoteCacheStoreConfiguration
config.setRemoteCacheName(remoteCacheName);
|
Use common StoreConfiguration > CacheStoreConfig legacy adaptor for the RemoteCacheStore
|
infinispan_infinispan
|
train
|
51a627ac1da751d389ab3e265c32e4ff84fb334d
|
diff --git a/core/src/main/java/hudson/model/AbstractBuild.java b/core/src/main/java/hudson/model/AbstractBuild.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/AbstractBuild.java
+++ b/core/src/main/java/hudson/model/AbstractBuild.java
@@ -488,7 +488,7 @@ public abstract class AbstractBuild<P extends AbstractProject<P,R>,R extends Abs
return;
}
} catch (AbortException e) {
- // checkout error already reported
+ listener.error(e.getMessage());
} catch (IOException e) {
// checkout error not yet reported
e.printStackTrace(listener.getLogger());
diff --git a/core/src/main/java/hudson/model/AbstractProject.java b/core/src/main/java/hudson/model/AbstractProject.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/AbstractProject.java
+++ b/core/src/main/java/hudson/model/AbstractProject.java
@@ -1232,6 +1232,7 @@ public abstract class AbstractProject<P extends AbstractProject<P,R>,R extends A
return r;
}
} catch (AbortException e) {
+ listener.getLogger().println(e.getMessage());
listener.fatalError(Messages.AbstractProject_Aborted());
LOGGER.log(Level.FINE, "Polling "+this+" aborted",e);
return NO_CHANGES;
diff --git a/core/src/main/java/hudson/model/Run.java b/core/src/main/java/hudson/model/Run.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/hudson/model/Run.java
+++ b/core/src/main/java/hudson/model/Run.java
@@ -1285,6 +1285,7 @@ public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,Run
throw t;
} catch( AbortException e ) {// orderly abortion.
result = Result.FAILURE;
+ listener.error(e.getMessage());
LOGGER.log(FINE, "Build "+this+" aborted",e);
} catch( RunnerAbortedException e ) {// orderly abortion.
result = Result.FAILURE;
|
Hudson was failing to report error messages in several situations during a build.
git-svn-id: <URL>
|
jenkinsci_jenkins
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.