hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
34378b5c6ac10116d5d592eb983d653d12c436ee
diff --git a/scdl/scdl.py b/scdl/scdl.py index <HASH>..<HASH> 100755 --- a/scdl/scdl.py +++ b/scdl/scdl.py @@ -53,7 +53,7 @@ from scdl import __version__ from scdl import soundcloud, utils logging.basicConfig(level=logging.INFO, format='%(message)s') -logging.getLogger("requests").setLevel(logging.WARNING) +logging.getLogger('requests').setLevel(logging.WARNING) logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) logger.addFilter(utils.ColorizeFilter()) @@ -147,7 +147,6 @@ def get_item(track_url): """ Fetches metadata for an track or playlist """ - try: item = client.get('/resolve', url=track_url) except Exception: @@ -290,19 +289,15 @@ def download_playlist(playlist): os.makedirs(playlist_name) os.chdir(playlist_name) - playlist_file = open(playlist_name + ".m3u", "w+") - playlist_file.write("#EXTM3U\n") - - for counter, track_raw in enumerate(playlist.tracks, 1): - if offset > 0: - offset -= 1 - continue - mp3_url = get_item(track_raw['permalink_url']) - logger.info('Track n°{0}'.format(counter)) - download_track(mp3_url, playlist.title, playlist_file) - - playlist_file.close() - + with open(playlist_name + '.m3u', 'w+') as playlist_file: + playlist_file.write('#EXTM3U' + os.linesep) + for counter, track_raw in enumerate(playlist.tracks, 1): + if offset > 0: + offset -= 1 + continue + mp3_url = get_item(track_raw['permalink_url']) + logger.info('Track n°{0}'.format(counter)) + download_track(mp3_url, playlist.title, playlist_file) os.chdir('..') @@ -374,8 +369,7 @@ def download_track(track, playlist_name=None, playlist_file=None): # Add the track to the generated m3u playlist file if playlist_file: duration = math.floor(track.duration / 1000) - playlist_file.write("#EXTINF:" + str(duration) + "," + title + "\n") - playlist_file.write(filename + "\n") + playlist_file.write('#EXTINF:{0},{1}{3}{2}{3}'.format(duration, title, filename, os.linesep)) # Download if not os.path.isfile(filename): @@ -441,10 +435,9 @@ def signal_handler(signal, frame): handle keyboardinterrupt """ time.sleep(1) - files = os.listdir() - for f in files: - if not os.path.isdir(f) and '.tmp' in f: - os.remove(f) + for path in os.listdir(): + if not os.path.isdir(path) and '.tmp' in path: + os.remove(path) logger.newline() logger.info('Good bye!')
Basic code cleanup (avoid hardcoding \n, use single quote string by convention, use with statement to handle files, ...)
flyingrub_scdl
train
8ae258d3784dad67662f205a416ef1c083b82fc2
diff --git a/sbol-data-io-RDF/src/main/java/org/sbolstandard/core/io/rdf/RdfIo.java b/sbol-data-io-RDF/src/main/java/org/sbolstandard/core/io/rdf/RdfIo.java index <HASH>..<HASH> 100644 --- a/sbol-data-io-RDF/src/main/java/org/sbolstandard/core/io/rdf/RdfIo.java +++ b/sbol-data-io-RDF/src/main/java/org/sbolstandard/core/io/rdf/RdfIo.java @@ -440,7 +440,7 @@ public class RdfIo{ NamedProperty<QName>[] propertyArray = properties.toArray(new NamedProperty[properties.size()]); NamedProperties<QName> namedProperties = Datatree.NamedProperties(propertyArray); NamespaceBindings bindings = Datatree.NamespaceBindings( - (NamespaceBinding[]) document.getNamespaceBindings().toArray()); + (NamespaceBinding[]) document.getNamespaceBindings().toArray(new NamespaceBinding[document.getNamespaceBindings().size()])); if (document instanceof TopLevelDocument) {
Fixed issue with casting from toArray which causes java 9 problem
SynBioDex_sbol-data
train
c366c8923d61bfb75dde1576f0bb95513ae2886e
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ with open('README.rst') as file: long_description = file.read() setup(name = 'pytimeparse', - version = '1.0', + version = '1.0.0', description = 'Time expression parser', author = 'Will Roberts', author_email = 'wildwilhelm@gmail.com',
setup.py: change version number to <I>
wroberts_pytimeparse
train
05c7d2328000d0c6c49798540e4b1a370028be52
diff --git a/src/Recurrence/Type/Daily.php b/src/Recurrence/Type/Daily.php index <HASH>..<HASH> 100644 --- a/src/Recurrence/Type/Daily.php +++ b/src/Recurrence/Type/Daily.php @@ -30,6 +30,8 @@ class Daily implements RecurrenceInterface foreach ($dailyEvents as $dailyEvent) { + list(, $dailyEventTime) = explode(' ', $dailyEvent->getStartDate()); + $startMarker = $fromDate > new \DateTime($dailyEvent->getStartDate()) ? clone($fromDate) : new \DateTime($dailyEvent->getStartDate()); @@ -58,7 +60,7 @@ class Daily implements RecurrenceInterface } $newDailyEvent = clone($dailyEvent); - $newStartDate = $date; + $newStartDate = new \DateTime($date->format('Y-m-d').' '.$dailyEventTime); $duration = $newDailyEvent->getDuration(); $newDailyEvent->setStartDate($newStartDate); diff --git a/src/Recurrence/Type/MonthlyDate.php b/src/Recurrence/Type/MonthlyDate.php index <HASH>..<HASH> 100644 --- a/src/Recurrence/Type/MonthlyDate.php +++ b/src/Recurrence/Type/MonthlyDate.php @@ -30,6 +30,8 @@ class MonthlyDate implements RecurrenceInterface foreach($monthlyEvents as $monthlyEvent) { + list(, $monthlyEventTime) = explode(' ', $monthlyEvent->getStartDate()); + $monthlyDate = date('d', strtotime($monthlyEvent->getStartDate())); $start = $fromDate > new \DateTime($monthlyEvent->getStartDate()) @@ -73,7 +75,7 @@ class MonthlyDate implements RecurrenceInterface } $newMonthlyEvent = clone($monthlyEvent); - $newStartDate = $date; + $newStartDate = new \DateTime($date->format('Y-m-d').' '.$monthlyEventTime); $duration = $newMonthlyEvent->getDuration(); $newMonthlyEvent->setStartDate($newStartDate); diff --git a/src/Recurrence/Type/Weekly.php b/src/Recurrence/Type/Weekly.php index <HASH>..<HASH> 100644 --- a/src/Recurrence/Type/Weekly.php +++ b/src/Recurrence/Type/Weekly.php @@ -30,6 +30,8 @@ class Weekly implements RecurrenceInterface foreach ($weeklyEvents as $weeklyEvent) { + list(, $weeklyEventTime) = explode(' ', $weeklyEvent->getStartDate()); + // Retrieve the day of the week that the event takes place on $day = date('w', strtotime($weeklyEvent->getStartDate())); @@ -65,7 +67,7 @@ class Weekly implements RecurrenceInterface } $newWeeklyEvent = clone($weeklyEvent); - $newStartDate = $date; + $newStartDate = new \DateTime($date->format('Y-m-d').' '.$weeklyEventTime); $duration = $newWeeklyEvent->getDuration(); $newWeeklyEvent->setStartDate($newStartDate);
Ensure the occurrence times are that of their parent and not from the from date.
benplummer_calendarful
train
c30c14d933f04f232d9a5d619b300ccae8278c33
diff --git a/src/Riak/Command/Object/Store.php b/src/Riak/Command/Object/Store.php index <HASH>..<HASH> 100644 --- a/src/Riak/Command/Object/Store.php +++ b/src/Riak/Command/Object/Store.php @@ -40,6 +40,10 @@ class Store extends Command\Object implements CommandInterface $this->bucket = $builder->getBucket(); $this->location = $builder->getLocation(); + if ($this->location) { + $this->method = 'PUT'; + } + $this->headers = array_merge($this->headers, $this->object->getHeaders()); }
Added check that if the location is available, then do a PUT instead of a POST.
basho_riak-php-client
train
148c33a53d61b80c8c8b7efd45ef5bec75319eeb
diff --git a/src/peltak/core/fs.py b/src/peltak/core/fs.py index <HASH>..<HASH> 100644 --- a/src/peltak/core/fs.py +++ b/src/peltak/core/fs.py @@ -75,7 +75,6 @@ def match_globs(path, patterns): for pattern in (p for p in patterns if p): if pattern.startswith('/'): regex = fnmatch.translate(pattern[1:]) - regex = regex.replace('\\Z', '') temp_path = path[1:] if path.startswith('/') else path @@ -83,8 +82,9 @@ def match_globs(path, patterns): if m and m.start() == 0: return True - else: - return fnmatch.fnmatch(path, pattern) + + elif fnmatch.fnmatch(path, pattern): + return True return False diff --git a/src/peltak/testing/fixtures.py b/src/peltak/testing/fixtures.py index <HASH>..<HASH> 100644 --- a/src/peltak/testing/fixtures.py +++ b/src/peltak/testing/fixtures.py @@ -15,6 +15,7 @@ from peltak.core import docker @pytest.fixture() def registry_client(): """ Return fake RegistryClient + :return docker.RegistryClient: Fake registry client """ diff --git a/test/unit/core/fs/test_match_globs.py b/test/unit/core/fs/test_match_globs.py index <HASH>..<HASH> 100644 --- a/test/unit/core/fs/test_match_globs.py +++ b/test/unit/core/fs/test_match_globs.py @@ -10,10 +10,16 @@ from peltak.core import fs @pytest.mark.parametrize('path,patterns,expected', [ - ('/fake/path', ['fake', '/fake', 'path'], False), + ('/fake/path', ['fake'], False), + ('/fake/path', ['path'], False), + ('/fake/path', ['/fake'], False), ('/fake/path', ['*fake*'], True), ('/fake/path', ['/fake*'], True), ('/fake/path', ['*ke/pa*'], True), ]) def test_properly_matches_globs(path, patterns, expected): assert fs.match_globs(path, patterns) == expected + + +def test_all_patterns_are_tested(): + assert fs.match_globs('/fake/path', ['invalid', '/fake*']) is True
Fix match_globs() matching only first pattern
novopl_peltak
train
5996e31f681ad53e427576355745fab55b4de392
diff --git a/bitcoin/serialize.py b/bitcoin/serialize.py index <HASH>..<HASH> 100644 --- a/bitcoin/serialize.py +++ b/bitcoin/serialize.py @@ -25,6 +25,8 @@ __all__ = [ 'deserialize_hash', 'serialize_beint', 'deserialize_beint', + 'serialize_leint', + 'deserialize_leint', 'serialize_list', 'deserialize_list', ] @@ -104,6 +106,10 @@ def deserialize_hash(file_, len_): result += limb << ((len_ & ~1) * 8) return result +def serialize_leint(long_, len_=None): + return serialize_beint(long_, len_)[::-1] +deserialize_leint = deserialize_hash + def serialize_beint(long_, len_=None): if long_ < 0: raise ValueError(u"received integer value is negative") diff --git a/xunit/serialize.py b/xunit/serialize.py index <HASH>..<HASH> 100644 --- a/xunit/serialize.py +++ b/xunit/serialize.py @@ -7,6 +7,9 @@ # file LICENSE or http://www.opensource.org/licenses/mit-license.php. # +# Python 2 and 3 compatibility utilities +import six + # Python standard library, unit-testing import unittest2 @@ -236,6 +239,23 @@ class TestSerializeBeint(unittest2.TestCase): file_ = StringIO(result) self.assertEqual(deserialize_beint(file_, len_), hash_) +class TestSerializeLeint(unittest2.TestCase): + """Test serialization and deserialization of little integer values under a + variety of standard scenarios.""" + __metaclass__ = ScenarioMeta + class test_serialize(ScenarioTest): + scenarios = BEINT + def __test__(self, hash_, len_, result): + result = result[::-1] + self.assertEqual(serialize_leint(hash_, len_), result) + self.assertEqual(serialize_leint(hash_), result.rstrip(six.int2byte(0))) + class test_deserialize(ScenarioTest): + scenarios = BEINT + def __test__(self, hash_, len_, result): + result = result[::-1] + file_ = StringIO(result) + self.assertEqual(deserialize_leint(file_, len_), hash_) + # ===----------------------------------------------------------------------=== VARINT_LIST = [
Add serialize_leint and deserialize_leint based on the hash serialization routines.
maaku_python-bitcoin
train
36031ce1edc362ea669e6841cdfaab08f0113d52
diff --git a/hazelcast/src/main/java/com/hazelcast/executor/BaseCallableTaskOperation.java b/hazelcast/src/main/java/com/hazelcast/executor/BaseCallableTaskOperation.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/executor/BaseCallableTaskOperation.java +++ b/hazelcast/src/main/java/com/hazelcast/executor/BaseCallableTaskOperation.java @@ -17,8 +17,11 @@ package com.hazelcast.executor; import com.hazelcast.core.HazelcastInstanceAware; +import com.hazelcast.core.ManagedContext; +import com.hazelcast.instance.HazelcastInstanceImpl; import com.hazelcast.nio.ObjectDataInput; import com.hazelcast.nio.ObjectDataOutput; +import com.hazelcast.nio.serialization.SerializationServiceImpl; import com.hazelcast.spi.Operation; import java.io.IOException; @@ -44,8 +47,19 @@ abstract class BaseCallableTaskOperation extends Operation { @Override public final void beforeRun() throws Exception { + HazelcastInstanceImpl hazelcastInstance = (HazelcastInstanceImpl)getNodeEngine().getHazelcastInstance(); + SerializationServiceImpl serializationService = (SerializationServiceImpl) hazelcastInstance.getSerializationService(); + ManagedContext managedContext = serializationService.getManagedContext(); + + if(callable instanceof RunnableAdapter){ + RunnableAdapter adapter = (RunnableAdapter)callable; + adapter.setRunnable((Runnable)managedContext.initialize(adapter.getRunnable())); + } else{ + callable = (Callable)managedContext.initialize(callable); + } + if (callable instanceof HazelcastInstanceAware) { - ((HazelcastInstanceAware) callable).setHazelcastInstance(getNodeEngine().getHazelcastInstance()); + ((HazelcastInstanceAware) callable).setHazelcastInstance(hazelcastInstance); } } diff --git a/hazelcast/src/main/java/com/hazelcast/nio/serialization/SerializationServiceImpl.java b/hazelcast/src/main/java/com/hazelcast/nio/serialization/SerializationServiceImpl.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/main/java/com/hazelcast/nio/serialization/SerializationServiceImpl.java +++ b/hazelcast/src/main/java/com/hazelcast/nio/serialization/SerializationServiceImpl.java @@ -108,6 +108,10 @@ public final class SerializationServiceImpl implements SerializationService { registerClassDefinitions(classDefinitions, checkClassDefErrors); } + public ManagedContext getManagedContext(){ + return managedContext; + } + private void registerClassDefinitions(final Collection<ClassDefinition> classDefinitions, boolean checkClassDefErrors) { final Map<Integer, ClassDefinition> classDefMap = new HashMap<Integer, ClassDefinition>(classDefinitions.size()); for (ClassDefinition cd : classDefinitions) { diff --git a/hazelcast/src/test/java/com/hazelcast/executor/ExecutorServiceTest.java b/hazelcast/src/test/java/com/hazelcast/executor/ExecutorServiceTest.java index <HASH>..<HASH> 100644 --- a/hazelcast/src/test/java/com/hazelcast/executor/ExecutorServiceTest.java +++ b/hazelcast/src/test/java/com/hazelcast/executor/ExecutorServiceTest.java @@ -57,12 +57,16 @@ public class ExecutorServiceTest extends HazelcastTestSupport { } @Test - public void testManagedContextAppliedToLocal()throws Exception{ + public void testManagedContextAndLocal()throws Exception{ final Config config = new Config(); config.addExecutorConfig(new ExecutorConfig("test", 1)); config.setManagedContext(new ManagedContext(){ @Override public Object initialize(Object obj) { + if(obj instanceof Runnable){ + System.out.println(obj); + } + if(obj instanceof RunnableWithManagedContext){ RunnableWithManagedContext task = (RunnableWithManagedContext)obj; task.initializeCalled=true; @@ -76,7 +80,7 @@ public class ExecutorServiceTest extends HazelcastTestSupport { RunnableWithManagedContext task = new RunnableWithManagedContext(); executor.submit(task).get(); - assertTrue(task.initializeCalled); + assertTrue("The task should have been initialized by the ManagedContext",task.initializeCalled); } static class RunnableWithManagedContext implements Runnable{ @@ -87,6 +91,32 @@ public class ExecutorServiceTest extends HazelcastTestSupport { } } + @Test + public void hazelcastInstanceAwareAndLocal()throws Exception{ + final Config config = new Config(); + config.addExecutorConfig(new ExecutorConfig("test", 1)); + final HazelcastInstance instance = createHazelcastInstanceFactory(1).newHazelcastInstance(config); + IExecutorService executor = instance.getExecutorService("test"); + + HazelcastInstanceAwareRunnable task = new HazelcastInstanceAwareRunnable(); + executor.submit(task).get(); + assertTrue("The setHazelcastInstance should have been called",task.initializeCalled); + } + + static class HazelcastInstanceAwareRunnable implements Runnable,HazelcastInstanceAware{ + private volatile boolean initializeCalled = false; + + @Override + public void run() { + } + + @Override + public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { + initializeCalled = true; + } + } + + /** * Submit a null task must raise a NullPointerException */
Fixes #<I> ManagedContext skipped for local tasks This pr also includes an additional test for the HazelcastInstanceAware and local tasks.
hazelcast_hazelcast
train
1097e6fcf9d68bd2527dd22654dd62419b00049a
diff --git a/scripts/determine-pkg-versions.js b/scripts/determine-pkg-versions.js index <HASH>..<HASH> 100644 --- a/scripts/determine-pkg-versions.js +++ b/scripts/determine-pkg-versions.js @@ -122,7 +122,7 @@ function determineVersion(pkg, commitInfos) { } return commitInfos.reduce(pickBestVersionInfo(pkg), { - version: currentVersion, + version: semver.inc(currentVersion, 'patch'), changeType: 'patch', causedByCommit: '(dependency update - part of packages to be updated but no explicit commits referencing it)', }); diff --git a/scripts/lib/get-updated-pkgs.js b/scripts/lib/get-updated-pkgs.js index <HASH>..<HASH> 100644 --- a/scripts/lib/get-updated-pkgs.js +++ b/scripts/lib/get-updated-pkgs.js @@ -25,6 +25,7 @@ const Repository = require('lerna/lib/Repository'); const UpdatedPackagesCollector = require('lerna/lib/UpdatedPackagesCollector'); const lernaLogger = require('lerna/lib/logger'); const progressBar = require('lerna/lib/progressBar'); +const lernaJson = require('../../lerna.json'); module.exports = function() { const repository = new Repository(); @@ -32,8 +33,7 @@ module.exports = function() { const origBarDescriptor = Object.getOwnPropertyDescriptor(progressBar, 'bar'); const lernaCommand = { repository, - getOptions: () => ({}), - publishConfig: {}, + getOptions: () => lernaJson.publishConfig, }; const collector = new UpdatedPackagesCollector(lernaCommand);
fix(scripts): Ensure determine-pkg-versions outputs correct info (#<I>) - Ensure new version output by determine-pkg-versions is correct - Ensure proper publishConfig flags are sent to UpdatedPackagesCollector
material-components_material-components-web
train
38b2e9f9f67ff6e5fcfbf50beb9dd969cb594eb3
diff --git a/lib/chef/json_compat.rb b/lib/chef/json_compat.rb index <HASH>..<HASH> 100644 --- a/lib/chef/json_compat.rb +++ b/lib/chef/json_compat.rb @@ -53,10 +53,8 @@ class Chef end def to_json_pretty(obj, opts = nil) - opts ||= {} - options_map = {} - options_map[:pretty] = true - options_map[:indent] = opts[:indent] if opts.key?(:indent) + options_map = { pretty: true } + options_map[:indent] = opts[:indent] if opts.respond_to?(:key?) && opts.key?(:indent) to_json(obj, options_map).chomp end
Refactor to_json_pretty to avoid creating a hash.
chef_chef
train
5b77280dad0ebedb71b89d904a8dbba52baee120
diff --git a/packages/react-scripts/scripts/build.js b/packages/react-scripts/scripts/build.js index <HASH>..<HASH> 100644 --- a/packages/react-scripts/scripts/build.js +++ b/packages/react-scripts/scripts/build.js @@ -143,7 +143,7 @@ checkBrowsers(paths.appPath, isInteractive) function build(previousFileSizes) { console.log('Creating an optimized production build...'); - let compiler = webpack(config); + const compiler = webpack(config); return new Promise((resolve, reject) => { compiler.run((err, stats) => { let messages;
Make compiler variable const instead of let (#<I>) compiler is not being reassigned, so we can use a const here
facebook_create-react-app
train
f39f3b1668d3ad64fa4269807e0f609fbbde4c96
diff --git a/src/ServerlessOffline.js b/src/ServerlessOffline.js index <HASH>..<HASH> 100644 --- a/src/ServerlessOffline.js +++ b/src/ServerlessOffline.js @@ -20,10 +20,16 @@ export default class ServerlessOffline { #lambda = null #serverless = null - constructor(serverless, cliOptions) { + constructor(serverless, cliOptions, v3Utils) { this.#cliOptions = cliOptions this.#serverless = serverless + if (v3Utils) { + this.log = v3Utils.log + this.progress = v3Utils.progress + this.writeText = v3Utils.writeText + } + setLog((...args) => serverless.cli.log(...args)) this.commands = {
refactor: Adapt v3 log writing interfaces
dherault_serverless-offline
train
1709bbb218d06cccad5bad983c9392c78cd6c6da
diff --git a/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/AbstractCssFileBlock.java b/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/AbstractCssFileBlock.java index <HASH>..<HASH> 100644 --- a/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/AbstractCssFileBlock.java +++ b/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/AbstractCssFileBlock.java @@ -46,6 +46,8 @@ public abstract class AbstractCssFileBlock implements CssFileBlock { private boolean loadedOnce; + private boolean excludeCssBlock; + @SuppressWarnings("unused") private AbstractCssFileBlock() { } @@ -237,4 +239,21 @@ public abstract class AbstractCssFileBlock implements CssFileBlock { } return cssPropertiesAsMap; } + + /** + * @return the excludeCssBlock true if the css block has been excluded, i.e. + * it will not be contained in the generated css. + */ + public boolean isExcludeCssBlock() { + return excludeCssBlock; + } + + /** + * @param excludeCssBlock + * the excludeCssBlock to set. If it is set to true, then this + * css block will not be contained in the generated css. + */ + protected void setExcludeCssBlock(final boolean excludeCssBlock) { + this.excludeCssBlock = excludeCssBlock; + } } diff --git a/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/CssFile.java b/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/CssFile.java index <HASH>..<HASH> 100644 --- a/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/CssFile.java +++ b/wffweb/src/main/java/com/webfirmframework/wffweb/css/file/CssFile.java @@ -40,6 +40,10 @@ public abstract class CssFile implements Serializable, Cloneable { private boolean optimizeCssString = true; + private boolean modified; + + private boolean initialized; + private final Set<AbstractCssFileBlock> cssBlocks = new LinkedHashSet<AbstractCssFileBlock>() { private static final long serialVersionUID = 1_0_0L; @@ -110,14 +114,30 @@ public abstract class CssFile implements Serializable, Cloneable { final Set<AbstractCssFileBlock> cssFileBlocks = entry .getValue(); if (cssFileBlocks.size() > 0) { - toStringBuilder.append(entry.getKey()); - toStringBuilder.append('{'); + + boolean exclude = true; + final Map<String, CssProperty> cssProperties = new LinkedHashMap<String, CssProperty>(); for (final AbstractCssFileBlock cssFileBlock : cssFileBlocks) { - cssProperties.putAll( - cssFileBlock.getCssPropertiesAsMap()); + + // should be called before + // cssFileBlock.isExcludeCssBlock() + final Map<String, CssProperty> cssPropertiesAsMap = cssFileBlock + .getCssPropertiesAsMap(); + + if (!cssFileBlock.isExcludeCssBlock()) { + cssProperties.putAll(cssPropertiesAsMap); + exclude = false; + } } + if (exclude) { + continue; + } + + toStringBuilder.append(entry.getKey()); + toStringBuilder.append('{'); + for (final CssProperty cssProperty : cssProperties .values()) { toStringBuilder @@ -141,10 +161,6 @@ public abstract class CssFile implements Serializable, Cloneable { } }; - private boolean modified; - - private boolean initialized; - protected final void initCssFile() { if (!initialized) { updateCssBlocks(); @@ -200,7 +216,7 @@ public abstract class CssFile implements Serializable, Cloneable { /* * (non-Javadoc) - * + * * @see java.lang.Object#toString() */ // it's not a best practice to print css by toString method of this class.
Implemented excludeCssBlock in AbstractCssFileBlock class
webfirmframework_wff
train
1126b705c6fd3c5fa9cc6527067664b4b960db15
diff --git a/build.gradle.kts b/build.gradle.kts index <HASH>..<HASH> 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -21,7 +21,7 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar import com.jfrog.bintray.gradle.BintrayExtension import com.jfrog.bintray.gradle.tasks.BintrayUploadTask import org.apache.tools.ant.filters.ReplaceTokens -import java.util.* +import java.util.Date plugins { signing @@ -67,7 +67,7 @@ dependencies { api("org.slf4j:slf4j-api:1.7.25") //Web Connection Support - api("com.neovisionaries:nv-websocket-client:2.9") + api("com.neovisionaries:nv-websocket-client:2.10") api("com.squareup.okhttp3:okhttp:3.13.0") //Opus library support diff --git a/src/main/java/net/dv8tion/jda/internal/audio/AudioWebSocket.java b/src/main/java/net/dv8tion/jda/internal/audio/AudioWebSocket.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/dv8tion/jda/internal/audio/AudioWebSocket.java +++ b/src/main/java/net/dv8tion/jda/internal/audio/AudioWebSocket.java @@ -120,18 +120,9 @@ class AudioWebSocket extends WebSocketAdapter try { - WebSocketFactory socketFactory = getJDA().getWebSocketFactory(); - //noinspection SynchronizationOnLocalVariableOrMethodParameter - synchronized (socketFactory) - { - String host = IOUtil.getHost(wssEndpoint); - // null if the host is undefined, unlikely but we should handle it - if (host != null) - socketFactory.setServerName(host); - else // practically should never happen - socketFactory.setServerNames(null); - socket = socketFactory.createSocket(wssEndpoint); - } + WebSocketFactory socketFactory = new WebSocketFactory(getJDA().getWebSocketFactory()); + IOUtil.setServerName(socketFactory, wssEndpoint); + socket = socketFactory.createSocket(wssEndpoint); socket.setDirectTextMessage(true); socket.addListener(this); changeStatus(ConnectionStatus.CONNECTING_AWAITING_WEBSOCKET_CONNECT); diff --git a/src/main/java/net/dv8tion/jda/internal/requests/WebSocketClient.java b/src/main/java/net/dv8tion/jda/internal/requests/WebSocketClient.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/dv8tion/jda/internal/requests/WebSocketClient.java +++ b/src/main/java/net/dv8tion/jda/internal/requests/WebSocketClient.java @@ -358,18 +358,9 @@ public class WebSocketClient extends WebSocketAdapter implements WebSocketListen try { - WebSocketFactory socketFactory = api.getWebSocketFactory(); - //noinspection SynchronizationOnLocalVariableOrMethodParameter - synchronized (socketFactory) - { - String host = IOUtil.getHost(url); - // null if the host is undefined, unlikely but we should handle it - if (host != null) - socketFactory.setServerName(host); - else // practically should never happen - socketFactory.setServerNames(null); - socket = socketFactory.createSocket(url); - } + WebSocketFactory socketFactory = new WebSocketFactory(api.getWebSocketFactory()); + IOUtil.setServerName(socketFactory, url); + socket = socketFactory.createSocket(url); socket.setDirectTextMessage(true); socket.addHeader("Accept-Encoding", "gzip") .addListener(this) diff --git a/src/main/java/net/dv8tion/jda/internal/utils/IOUtil.java b/src/main/java/net/dv8tion/jda/internal/utils/IOUtil.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/dv8tion/jda/internal/utils/IOUtil.java +++ b/src/main/java/net/dv8tion/jda/internal/utils/IOUtil.java @@ -16,6 +16,7 @@ package net.dv8tion.jda.internal.utils; +import com.neovisionaries.ws.client.WebSocketFactory; import okhttp3.*; import okio.Okio; import org.slf4j.Logger; @@ -56,6 +57,14 @@ public class IOUtil return URI.create(uri).getHost(); } + public static void setServerName(WebSocketFactory factory, String url) + { + String host = getHost(url); + // null if the host is undefined, unlikely but we should handle it + if (host != null) + factory.setServerName(host); + } + public static OkHttpClient.Builder newHttpClientBuilder() { Dispatcher dispatcher = new Dispatcher();
Update WebSocket library to <I> This added a new copy constructor so we can cleanup some code
DV8FromTheWorld_JDA
train
ac22fb60bfb4a64609e55811c99586ea03b75f28
diff --git a/Chat/TalkBox.php b/Chat/TalkBox.php index <HASH>..<HASH> 100644 --- a/Chat/TalkBox.php +++ b/Chat/TalkBox.php @@ -239,10 +239,10 @@ class sb_Chat_TalkBox{ $stmt = $this->db->prepare($sql); if($stmt->execute($values)){ + $line->id = $this->db->lastInsertId(); $this->update_last_visit(); } - - return json_encode($line); + return $line; } @@ -266,10 +266,10 @@ class sb_Chat_TalkBox{ if(empty($chatter) && !$this->loaded_from_backup){ $sql = "INSERT INTO sb_TalkBox_".$this->room."_mem SELECT * FROM sb_TalkBox_".$this->room; - echo $sql; + $this->db->query($sql); $this->loaded_from_backup = 1; - $this->display(); + $this->display($id, $dir); } diff --git a/Text/BlingMedia.php b/Text/BlingMedia.php index <HASH>..<HASH> 100644 --- a/Text/BlingMedia.php +++ b/Text/BlingMedia.php @@ -458,9 +458,7 @@ class sb_Text_BlingMedia extends sb_Text_Bling{ * @return string The cleaned text */ public static function parse($str){ - $str = parent::clean($str); - $str = self::pdf_to_link($str); $str = self::images_to_html($str); $str = self::nonflash_media_to_html($str);
TalkBox and Text_BlingMedia now work with new estrip
surebert_surebert-framework
train
d039e3791aca6100a0e0944cc7f46d1f4d0f047b
diff --git a/lfs/gitscanner_catfilebatch.go b/lfs/gitscanner_catfilebatch.go index <HASH>..<HASH> 100644 --- a/lfs/gitscanner_catfilebatch.go +++ b/lfs/gitscanner_catfilebatch.go @@ -19,8 +19,6 @@ import ( func runCatFileBatch(pointerCh chan *WrappedPointer, lockableCh chan string, lockableSet *lockableNameSet, revs *StringChannelWrapper, errCh chan error) error { scanner, err := NewPointerScanner() if err != nil { - scanner.Close() - return err }
lfs: PointerScanner is nil after error, so don't close
git-lfs_git-lfs
train
a68f1dbbafa578266bd5199456054e621239df56
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -49,9 +49,9 @@ copyright = u'2015, Marc Brinkmann' # built documents. # # The short X.Y version. -version = '0.6.2' +version = '0.6.3' # The full version, including alpha/beta/rc tags. -release = '0.6.2.dev1' +release = '0.6.3.dev1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ def read(fname): setup( name='latex', - version='0.6.2.dev1', + version='0.6.3.dev1', description='Wrappers for calling LaTeX/building LaTeX documents.', long_description=read('README.rst'), author='Marc Brinkmann',
Start developing version <I>.dev1 (after release of <I>)
mbr_latex
train
da07356624706caddd0031fbcc50c9f858460949
diff --git a/vc_zoom/indico_vc_zoom/plugin.py b/vc_zoom/indico_vc_zoom/plugin.py index <HASH>..<HASH> 100644 --- a/vc_zoom/indico_vc_zoom/plugin.py +++ b/vc_zoom/indico_vc_zoom/plugin.py @@ -13,7 +13,7 @@ from wtforms.fields import TextAreaField from wtforms.fields.core import BooleanField from wtforms.fields.html5 import URLField from wtforms.fields.simple import StringField -from wtforms.validators import DataRequired, Optional, URL, ValidationError +from wtforms.validators import URL, DataRequired, Optional, ValidationError from indico.core import signals from indico.core.auth import multipass
Make isort happy ALL_CAPS names are apparently treated differently...
indico_indico-plugins
train
d8cb12072a9168361f28026979c5f392cdf182db
diff --git a/app/src/main/java/io/appium/uiautomator2/handler/FirstVisibleView.java b/app/src/main/java/io/appium/uiautomator2/handler/FirstVisibleView.java index <HASH>..<HASH> 100644 --- a/app/src/main/java/io/appium/uiautomator2/handler/FirstVisibleView.java +++ b/app/src/main/java/io/appium/uiautomator2/handler/FirstVisibleView.java @@ -58,7 +58,7 @@ public class FirstVisibleView extends SafeRequestHandler { } else { Logger.debug("Container for first visible is a uiobject2"); List<UiObject2> childObjects = ((UiObject2) element.getUiObject()).getChildren(); - if (childObjects.size() == 0) { + if (childObjects.isEmpty()) { throw new UiObjectNotFoundException("Could not get children for container object"); } UiObject2 childObject = childObjects.get(0); diff --git a/app/src/main/java/io/appium/uiautomator2/handler/GetDevicePixelRatio.java b/app/src/main/java/io/appium/uiautomator2/handler/GetDevicePixelRatio.java index <HASH>..<HASH> 100644 --- a/app/src/main/java/io/appium/uiautomator2/handler/GetDevicePixelRatio.java +++ b/app/src/main/java/io/appium/uiautomator2/handler/GetDevicePixelRatio.java @@ -24,7 +24,7 @@ public class GetDevicePixelRatio extends SafeRequestHandler { Instrumentation instrumentation = InstrumentationRegistry.getInstrumentation(); - Float ratio = GetDevicePixelRatio.getDeviceScaleRatio(instrumentation); + Float ratio = getDeviceScaleRatio(instrumentation); return new AppiumResponse(getSessionId(request), WDStatus.SUCCESS, ratio); } diff --git a/app/src/main/java/io/appium/uiautomator2/handler/ScrollToElement.java b/app/src/main/java/io/appium/uiautomator2/handler/ScrollToElement.java index <HASH>..<HASH> 100644 --- a/app/src/main/java/io/appium/uiautomator2/handler/ScrollToElement.java +++ b/app/src/main/java/io/appium/uiautomator2/handler/ScrollToElement.java @@ -53,7 +53,7 @@ public class ScrollToElement extends SafeRequestHandler { errorMsg.append("Element"); } - if (errorMsg.toString() != "") { + if (!errorMsg.toString().isEmpty()) { errorMsg.append(" was not an instance of UiObject; only UiSelector is supported. " + "Ensure you use the '-android uiautomator' locator strategy when " + "finding elements for use with ScrollToElement");
fix up some java oopses
appium_appium-uiautomator2-server
train
b2d71cb8131ea0f435d85a97f4ff43a235178cab
diff --git a/host-controller/src/main/java/org/jboss/as/domain/controller/resources/HostExcludeResourceDefinition.java b/host-controller/src/main/java/org/jboss/as/domain/controller/resources/HostExcludeResourceDefinition.java index <HASH>..<HASH> 100644 --- a/host-controller/src/main/java/org/jboss/as/domain/controller/resources/HostExcludeResourceDefinition.java +++ b/host-controller/src/main/java/org/jboss/as/domain/controller/resources/HostExcludeResourceDefinition.java @@ -60,6 +60,7 @@ public class HostExcludeResourceDefinition extends SimpleResourceDefinition { EAP63("EAP6.3", 1, 6), EAP64("EAP6.4", 1, 7), EAP70("EAP7.0", 4, 1), + EAP71("EAP7.1", 5, 0), WILDFLY10("WildFly10.0", 4, 0), WILDFLY10_1("WildFly10.1", 4, 2); diff --git a/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/DomainTransformers.java b/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/DomainTransformers.java index <HASH>..<HASH> 100644 --- a/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/DomainTransformers.java +++ b/host-controller/src/main/java/org/jboss/as/domain/controller/transformers/DomainTransformers.java @@ -73,7 +73,7 @@ public class DomainTransformers { public static void initializeDomainRegistry(final TransformerRegistry registry) { //The chains for transforming will be as follows - //For JBoss EAP: 4.0.0 -> 1.8.0 -> 1.7.0 -> 1.6.0 -> 1.5.0 + //For JBoss EAP: 5.0.0 -> 4.0.0 -> 1.8.0 -> 1.7.0 -> 1.6.0 -> 1.5.0 registerRootTransformers(registry); registerChainedManagementTransformers(registry);
[WFCORE-<I>] Add EAP <I> as a KnownRelease
wildfly_wildfly-core
train
ad147e433d5fedb31556bea5130c95b373dbf39c
diff --git a/tea/src/main/java/org/teatrove/tea/compiler/TypeChecker.java b/tea/src/main/java/org/teatrove/tea/compiler/TypeChecker.java index <HASH>..<HASH> 100644 --- a/tea/src/main/java/org/teatrove/tea/compiler/TypeChecker.java +++ b/tea/src/main/java/org/teatrove/tea/compiler/TypeChecker.java @@ -127,7 +127,7 @@ public class TypeChecker { synchronized (mListeners) { for (int i = 0; i < mListeners.size(); i++) { - ((ErrorListener)mListeners.elementAt(i)).compileError(e); + mListeners.elementAt(i).compileError(e); } } } @@ -2205,14 +2205,9 @@ public class TypeChecker { check(left); check(right); - Type type = Type.BOOLEAN_TYPE; - - if (binaryTypeCheck(node, Boolean.class)) { - left.convertTo(type); - right.convertTo(type); - } - - node.setType(type); + // NOTE: no need to check left and right types as the Truthful + // detector in the generator will ensure they match + node.setType(Type.BOOLEAN_TYPE); return null; } @@ -2224,14 +2219,9 @@ public class TypeChecker { check(left); check(right); - Type type = Type.BOOLEAN_TYPE; - - if (binaryTypeCheck(node, Boolean.class)) { - left.convertTo(type); - right.convertTo(type); - } - - node.setType(type); + // NOTE: no need to check left and right types as the Truthful + // detector in the generator will ensure they match + node.setType(Type.BOOLEAN_TYPE); return null; } diff --git a/tea/src/test/java/org/teatrove/tea/templates/TernaryTest.java b/tea/src/test/java/org/teatrove/tea/templates/TernaryTest.java index <HASH>..<HASH> 100755 --- a/tea/src/test/java/org/teatrove/tea/templates/TernaryTest.java +++ b/tea/src/test/java/org/teatrove/tea/templates/TernaryTest.java @@ -264,6 +264,14 @@ public class TernaryTest extends AbstractTemplateTest { { "m = 5; m = 'test'; (m isa String ? m.length : 0)", "4" }, { "f = null; f?.toString()?.length() == 0", "true" }, { "f = ''; if (not f) { f = null; }; f?.toString()?.length() ?: 5", "5" }, + + // multiple expressions + { "a = 5; a = 10; b = null; b = 'test'; if (a and b) { 'valid' }", "valid" }, + { "a = 5; a = 0; b = null; b = ''; ; if (a and b) { 'valid' }", "" }, + { "a = 5; a = 10; b = null; b = 'test'; (a and b ? 'valid' : 'invalid')", "valid" }, + { "a = 5; a = 10; b = null; b = 'test'; if (a or b) { 'valid' }", "valid" }, + { "a = getObject('test'); b = getTruthful(false); (a and not b ? 'valid' : 'invalid')", "valid" }, + { "a = getObject('test'); b = getTruthful(true); if (a and not b) { 'valid' } else { 'invalid' }", "invalid" } }; public static class TernaryContext {
Fix #<I> resolving truthful expressions in conjunctions (and, or, etc)
teatrove_teatrove
train
1b394bd0337ad3ccf75fd590fcc91a7577104e81
diff --git a/QuickBooks/IPP/IntuitAnywhere.php b/QuickBooks/IPP/IntuitAnywhere.php index <HASH>..<HASH> 100644 --- a/QuickBooks/IPP/IntuitAnywhere.php +++ b/QuickBooks/IPP/IntuitAnywhere.php @@ -351,7 +351,8 @@ class QuickBooks_IPP_IntuitAnywhere */ public function handle($app_tenant, $state = '') { - if ($this->check($app_tenant) and // We have tokens ... + if ($app_tenant and + $this->check($app_tenant) and // We have tokens ... $this->test($app_tenant)) // ... and they are valid { // They are already logged in, send them on to exchange data
Tweaks to ensure that ->test() does not succeed without a valid tenant value.
consolibyte_quickbooks-php
train
0080f94b382d96f1fc63629a1e53c62fef05d1b4
diff --git a/django_zappa/handler.py b/django_zappa/handler.py index <HASH>..<HASH> 100644 --- a/django_zappa/handler.py +++ b/django_zappa/handler.py @@ -84,30 +84,26 @@ def lambda_handler(event, context): returnme[item[0]] = item[1] returnme['Status'] = response.status_code - if response.status_code != 200: + if response.status_code in [400, 401, 403, 500]: - # So that we can always match on the first few characters - # ex '{"AAA": "404' + ## + # A failed attempt at doing content types and status codes + # without b64 hacking. Will work when we get JSON parsing in the mapping demplate: # returnme['AAA'] = str(response.status_code) - # returnme['errorMessage'] = str(response.status_code) - # returnme['errorType'] = str(response.status_code) - # returnme['stackTrace'] = str(response.status_code) - # error_json = json.dumps(returnme, sort_keys=True) - # print "Error JSON:" - # print error_json + # raise(error_json) + ## content = response.content content = "<!DOCTYPE html>" + str(response.status_code) + response.content - b64_content = base64.b64encode(content) - print b64_content - raise Exception(b64_content) + elif response.status_code in [301, 302]: + location = returnme['Location'].replace("http://zappa", "") + raise Exception(location) + else: + return returnme - print returnme - - return returnme # This is a management command invocation. elif event.get('command', None): from django.core import management diff --git a/requirements.txt b/requirements.txt index <HASH>..<HASH> 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ Django==1.8 mysql-connector-python-rf==2.1.3 wheel==0.24.0 -zappa==0.7.0 +zappa==0.8.0 diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ with open(os.path.join(os.path.dirname(__file__), 'requirements.txt')) as f: setup( name='django-zappa', - version='0.5.0', + version='0.6.0', packages=['django_zappa'], install_requires=required, include_package_data=True,
<I> - redirects working mostly
Miserlou_django-zappa
train
8cb9998c835221d527572d76753a587886df0fa4
diff --git a/server/sonar-server/src/main/java/org/sonar/server/ui/ws/ComponentNavigationAction.java b/server/sonar-server/src/main/java/org/sonar/server/ui/ws/ComponentNavigationAction.java index <HASH>..<HASH> 100644 --- a/server/sonar-server/src/main/java/org/sonar/server/ui/ws/ComponentNavigationAction.java +++ b/server/sonar-server/src/main/java/org/sonar/server/ui/ws/ComponentNavigationAction.java @@ -140,7 +140,6 @@ public class ComponentNavigationAction implements NavigationWsAction { .prop("name", component.name()) .prop("isComparable", componentTypeHasProperty(component, PROPERTY_COMPARABLE)) .prop("canBeFavorite", userSession.isLoggedIn()) - .prop("showBackgroundTasks", ActivityWsAction.isAllowedOnComponentUuid(userSession, component.uuid())) .prop("isFavorite", isFavourite(session, component, userSession)); List<DashboardDto> dashboards = activeDashboardDao.selectProjectDashboardsForUserLogin(session, userSession.getLogin()); @@ -200,7 +199,7 @@ public class ComponentNavigationAction implements NavigationWsAction { return componentKey; } - private void writeDashboards(JsonWriter json, List<DashboardDto> dashboards) { + private static void writeDashboards(JsonWriter json, List<DashboardDto> dashboards) { json.name("dashboards").beginArray(); for (DashboardDto dashboard : dashboards) { json.beginObject() @@ -242,6 +241,7 @@ public class ComponentNavigationAction implements NavigationWsAction { json.prop("showHistory", isAdmin && componentTypeHasProperty(component, PROPERTY_MODIFIABLE_HISTORY)); json.prop("showUpdateKey", isAdmin && componentTypeHasProperty(component, PROPERTY_UPDATABLE_KEY)); json.prop("showDeletion", isAdmin && componentTypeHasProperty(component, PROPERTY_DELETABLE)); + json.prop("showBackgroundTasks", ActivityWsAction.isAllowedOnComponentUuid(userSession, component.uuid())); } private boolean componentTypeHasProperty(ComponentDto component, String resourceTypeProperty) {
SONAR-<I> Fix location of showBackgroundTasks in response of api/navigation/component
SonarSource_sonarqube
train
dddc0d15f108637e8fd733359713e13bc1656fb4
diff --git a/src/test/java/redis/clients/jedis/tests/JedisSentinelTest.java b/src/test/java/redis/clients/jedis/tests/JedisSentinelTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/redis/clients/jedis/tests/JedisSentinelTest.java +++ b/src/test/java/redis/clients/jedis/tests/JedisSentinelTest.java @@ -85,16 +85,16 @@ public class JedisSentinelTest extends JedisTestBase { public void sentinelFailover() throws InterruptedException { Jedis j = new Jedis(sentinelForFailover.getHost(), sentinelForFailover.getPort()); + Jedis j2 = new Jedis(sentinelForFailover.getHost(), + sentinelForFailover.getPort()); try { List<String> masterHostAndPort = j .sentinelGetMasterAddrByName(FAILOVER_MASTER_NAME); HostAndPort currentMaster = new HostAndPort(masterHostAndPort.get(0), Integer.parseInt(masterHostAndPort.get(1))); - String result = j.sentinelFailover(FAILOVER_MASTER_NAME); - assertEquals("OK", result); - JedisSentinelTestUtil.waitForNewPromotedMaster(j); + JedisSentinelTestUtil.waitForNewPromotedMaster(FAILOVER_MASTER_NAME, j, j2); masterHostAndPort = j .sentinelGetMasterAddrByName(FAILOVER_MASTER_NAME);
Add second jedis connection to waitForNewPromotedMaster call
xetorthio_jedis
train
0413fa74f8ff2ae7f072a0f1d5338a845837c585
diff --git a/scripts/garp.php b/scripts/garp.php index <HASH>..<HASH> 100755 --- a/scripts/garp.php +++ b/scripts/garp.php @@ -51,10 +51,10 @@ if (file_exists(BASE_PATH . '/application/configs/environment.php')) { include_once BASE_PATH . '/application/configs/environment.php'; } -require_once dirname(__FILE__)."/../application/init.php"; +require_once dirname(__FILE__) . "/../application/init.php"; // Create application, bootstrap, and run -$applicationIni = APPLICATION_PATH.'/configs/application.ini'; +$applicationIni = APPLICATION_PATH . '/configs/application.ini'; try { $application = new Garp_Application(APPLICATION_ENV, $applicationIni); $application->bootstrap(); @@ -90,7 +90,7 @@ ini_set('display_errors', 'stderr'); $args = Garp_Cli::parseArgs($_SERVER['argv']); if (empty($args[0])) { Garp_Cli::errorOut('No command given.'); - Garp_Cli::errorOut('Usage: php garp.php <command> [args,..]'); + Garp_Cli::errorOut('Usage: g <command> [args,..]'); // @codingStandardsIgnoreStart exit(1); // @codingStandardsIgnoreEnd @@ -98,31 +98,30 @@ if (empty($args[0])) { /* Construct command classname */ $classArgument = ucfirst($args[0]); -$namespaces = array('Garp', 'App'); +$namespaces = array('App', 'Garp'); $config = Zend_Registry::get('config'); if (!empty($config->cli->namespaces)) { $namespaces = $config->cli->namespaces->toArray(); } -// Try to load CLI command in LIFO order -$i = count($namespaces)-1; -while ($i >= 0) { - $ns = $namespaces[$i]; - $commandName = $ns.'_Cli_Command_'.$classArgument; - if (class_exists($commandName)) { - break; - } - --$i; -} + +$commandNames = array_map( + function ($ns) use ($classArgument) { + return $ns . '_Cli_Command_' . $classArgument; + }, + $namespaces +); +$commandNames = array_filter($commandNames, 'class_exists'); // Remove the command name from the argument list $args = array_splice($args, 1); -if (isset($classLoader) && !$classLoader->isLoadable($commandName)) { +if (!count($commandNames)) { Garp_Cli::errorOut('Silly developer. This is not the command you\'re looking for.'); // @codingStandardsIgnoreStart exit(1); // @codingStandardsIgnoreEnd } +$commandName = current($commandNames); $command = new $commandName(); if (!$command instanceof Garp_Cli_Command) { Garp_Cli::errorOut( @@ -150,16 +149,9 @@ if (!in_array($classArgument, $commandsWithoutTranslation)) { } } -/** - * Helper functionality for the bash-completion script: look for the --complete flag. - * If it's present, dump a space-separated list of public methods. - */ -if (array_key_exists('complete', $args)) { - $publicMethods = $command->getPublicMethods(); - Garp_Cli::lineOut(implode(' ', $publicMethods)); -} else { - $command->main($args); -} +$command->main($args); + // @codingStandardsIgnoreStart exit(0); // @codingStandardsIgnoreEnd +
Refactored command resolving slightly
grrr-amsterdam_garp3
train
da19258899e8f5b0dbba7abe1f451b8b7c909463
diff --git a/dev/com.ibm.ws.concurrent.persistent/src/com/ibm/ws/concurrent/persistent/internal/InvokerTask.java b/dev/com.ibm.ws.concurrent.persistent/src/com/ibm/ws/concurrent/persistent/internal/InvokerTask.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.ws.concurrent.persistent/src/com/ibm/ws/concurrent/persistent/internal/InvokerTask.java +++ b/dev/com.ibm.ws.concurrent.persistent/src/com/ibm/ws/concurrent/persistent/internal/InvokerTask.java @@ -607,7 +607,12 @@ public class InvokerTask implements Runnable, Synchronization { processRetryableTaskFailure(failure, loader, nextFailureCount, config, taskName); } else { if (taskIdForPropTable != null) - taskStore.removeProperty(taskIdForPropTable); + try { + taskStore.removeProperty(taskIdForPropTable); + } catch (Throwable x) { + tranMgr.rollback(); + throw x; + } tranMgr.commit();
Issue #<I> nested transaction FFDC after rollback missed on error path
OpenLiberty_open-liberty
train
f7e2ed978f83127f79160c968967f8c7737cc20d
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -46,7 +46,7 @@ recursively_include(package_data, 'pythonforandroid/recipes', recursively_include(package_data, 'pythonforandroid/bootstraps', ['*.properties', '*.xml', '*.java', '*.tmpl', '*.txt', '*.png', '*.mk', '*.c', '*.h', '*.py', '*.sh', '*.jpg', '*.aidl', - '*.gradle', ]) + '*.gradle', '.gitkeep', 'gradlew*', '*.jar', ]) recursively_include(package_data, 'pythonforandroid/bootstraps', ['sdl-config', ]) recursively_include(package_data, 'pythonforandroid/bootstraps/webview',
setup.py: fix installation fix all remaining issues of #<I>
kivy_python-for-android
train
b00104cc84a5454cffcfbcf17fb5c499d848661b
diff --git a/safe/gui/tools/print_report_dialog.py b/safe/gui/tools/print_report_dialog.py index <HASH>..<HASH> 100644 --- a/safe/gui/tools/print_report_dialog.py +++ b/safe/gui/tools/print_report_dialog.py @@ -138,7 +138,7 @@ class PrintReportDialog(QtGui.QDialog, FORM_CLASS): override_template_found = filename # check for population exposure - self.is_population = isinstance(exposure_type, exposure_population) + self.is_population = exposure_type == exposure_population self.infographic_checkbox.setEnabled(self.is_population)
fix bug on print dialog (#<I>)
inasafe_inasafe
train
fd3d388ece0ed5d1e8ea417d0b29a5c665424c4e
diff --git a/oscrypto/_openssl/tls.py b/oscrypto/_openssl/tls.py index <HASH>..<HASH> 100644 --- a/oscrypto/_openssl/tls.py +++ b/oscrypto/_openssl/tls.py @@ -585,7 +585,10 @@ class TLSSocket(object): raise def _raw_read(self, rbio): - to_write = self._socket.recv(8192) + try: + to_write = self._socket.recv(8192) + except (socket_.error): + to_write = b'' output = to_write while to_write != b'': written = libssl.BIO_write(rbio, to_write, len(to_write))
Have OpenSSL socket read errors bubble up to the OpenSSL layer for better error messaging
wbond_oscrypto
train
899cbffe332b12ca5062986d0c51972acb2782cd
diff --git a/system/src/Grav/Common/Grav.php b/system/src/Grav/Common/Grav.php index <HASH>..<HASH> 100644 --- a/system/src/Grav/Common/Grav.php +++ b/system/src/Grav/Common/Grav.php @@ -235,7 +235,7 @@ class Grav extends Container // Calculate a Hash based on the raw file if ($page->eTag()) { - header('ETag: ' . md5($page->raw() . $page->modified())); + header('ETag: "' . md5($page->raw() . $page->modified()).'"'); } // Set debugger data in headers
ETag header should be inside double quotes (#<I>) It's small change, but still... <URL>
getgrav_grav
train
05225ef07f94bb9f4a67ad89360d1d10cc31ad32
diff --git a/includes/properties/class-papi-property-relationship.php b/includes/properties/class-papi-property-relationship.php index <HASH>..<HASH> 100644 --- a/includes/properties/class-papi-property-relationship.php +++ b/includes/properties/class-papi-property-relationship.php @@ -120,6 +120,11 @@ class Papi_Property_Relationship extends Papi_Property { $sort_option = $this->get_sort_option( $options->slug ); $value = $this->get_value(); + // By default we add posts per page key with the value -1 (all). + if ( ! isset( $settings->query['posts_per_page'] ) ) { + $settings->query['posts_per_page'] = -1; + } + // Fetch posts with the post types and the query. $posts = query_posts( array_merge( $settings->query, array( 'post_type' => _papi_to_array( $settings->post_type )
Added posts per page default value for property relationship
wp-papi_papi
train
d557d44360cc1d3f20c1a334fe5c325042753d61
diff --git a/src/lib/isMobilePhone.js b/src/lib/isMobilePhone.js index <HASH>..<HASH> 100644 --- a/src/lib/isMobilePhone.js +++ b/src/lib/isMobilePhone.js @@ -43,7 +43,7 @@ const phones = { 'fr-FR': /^(\+?33|0)[67]\d{8}$/, 'he-IL': /^(\+972|0)([23489]|5[012345689]|77)[1-9]\d{6}$/, 'hu-HU': /^(\+?36)(20|30|70)\d{7}$/, - 'id-ID': /^(\+?62|0)(0?8?\d\d\s?\d?)([\s?|\d]{7,12})$/, + 'id-ID': /^(\+?62|0)8(1[123456789]|2[1238]|3[1238]|5[12356789]|7[78]|9[56789]|8[123456789])([\s?|\d]{5,11})$/, 'it-IT': /^(\+?39)?\s?3\d{2} ?\d{6,7}$/, 'ja-JP': /^(\+?81|0)[789]0[ \-]?[1-9]\d{2}[ \-]?\d{5}$/, 'kk-KZ': /^(\+?7|8)?7\d{9}$/, diff --git a/test/validators.js b/test/validators.js index <HASH>..<HASH> 100644 --- a/test/validators.js +++ b/test/validators.js @@ -4335,20 +4335,53 @@ describe('Validators', () => { { locale: 'id-ID', valid: [ - '0217123456', '0811 778 998', + '0811 7785 9983', + '0812 7784 9984', + '0813 7782 9982', + '0821 1234 1234', + '0822 1234 1234', + '0823 1234 1234', + '0852 1234 6764', + '0853 1234 6764', + '0851 1234 6764', + '0814 7782 9982', + '0815 7782 9982', + '0816 7782 9982', + '0855 7782 9982', + '0856 7782 9982', + '0857 7782 9982', + '0858 7782 9982', + '0817 7785 9983', + '0818 7784 9984', + '0819 7782 9982', + '0859 1234 1234', + '0877 1234 1234', + '0878 1234 1234', + '0895 7785 9983', + '0896 7784 9984', + '0897 7782 9982', + '0898 1234 1234', + '0899 1234 1234', + '0881 7785 9983', + '0882 7784 9984', + '0883 7782 9982', + '0884 1234 1234', + '0886 1234 1234', + '0887 1234 1234', + '0888 7785 9983', + '0889 7784 9984', + '0828 7784 9984', + '0838 7784 9984', + '0831 7784 9984', + '0832 7784 9984', + '0833 7784 9984', '089931236181900', - '622178878890', '62811 778 998', '62811778998', - '6289931236181900', - '6221 740123456', - '62899 740123456', + '628993123618190', + '62898 740123456', '62899 7401 2346', - '0341 8123456', - '0778 89800910', - '0741 123456', - '+6221740123456', '+62811 778 998', '+62811778998', '+62812 9650 3508', @@ -4357,6 +4390,14 @@ describe('Validators', () => { '+62811787391', ], invalid: [ + '0899312361819001', + '0217123456', + '622178878890', + '6221 740123456', + '0341 8123456', + '0778 89800910', + '0741 123456', + '+6221740123456', '+65740 123 456', '', 'ASDFGJKLmZXJtZtesting123',
fix(isMobilePhone): Indonesian locale update (#<I>)
chriso_validator.js
train
02f8697733457cbe815b52aa78fa979f0602d2e2
diff --git a/clause-and-effect/09-searching-a-cyclic-graph.php b/clause-and-effect/09-searching-a-cyclic-graph.php index <HASH>..<HASH> 100644 --- a/clause-and-effect/09-searching-a-cyclic-graph.php +++ b/clause-and-effect/09-searching-a-cyclic-graph.php @@ -7,6 +7,9 @@ require 'vendor/autoload.php'; // clause and effect // worksheet 9: searching a cyclic graph +// this is more tricky than the acyclic one, because +// we need to avoid divergence + function a($a, $b) { return conde([ [eq($a, 'g'), eq($b, 'h')], diff --git a/clause-and-effect/15-multiple-disjoint-partial-maps.php b/clause-and-effect/15-multiple-disjoint-partial-maps.php index <HASH>..<HASH> 100644 --- a/clause-and-effect/15-multiple-disjoint-partial-maps.php +++ b/clause-and-effect/15-multiple-disjoint-partial-maps.php @@ -7,6 +7,15 @@ require 'vendor/autoload.php'; // clause and effect // worksheet 15: multiple disjoint partial maps +// goal: separate a herd (list of sheep and goats) +// into two separate lists, one for sheep and one +// for goats. +// +// variation: skip over invalid elements +// variation: put invalid elements in a separate list +// variation: split a list into two lists in an alternating +// (interleaving) way + function herd($l, $sheep, $goats) { return conde([ [eq($l, []), eq($sheep, []), eq($goats, [])], diff --git a/clause-and-effect/18-sequential-maps-with-state.php b/clause-and-effect/18-sequential-maps-with-state.php index <HASH>..<HASH> 100644 --- a/clause-and-effect/18-sequential-maps-with-state.php +++ b/clause-and-effect/18-sequential-maps-with-state.php @@ -7,6 +7,8 @@ require 'vendor/autoload.php'; // clause and effect // worksheet 18: sequential maps with state +// runlength coding, a very basic form of compression + function runcode($l, $c, $n, $x) { return conde([ [eq($l, []), eq($x, [['*', $n, $c]])],
add descriptions of what the various examples depict
igorw_reasoned-php
train
c2d0679c4b0602150f0e0ea9c66b54c8678dba7a
diff --git a/salt/states/test.py b/salt/states/test.py index <HASH>..<HASH> 100644 --- a/salt/states/test.py +++ b/salt/states/test.py @@ -201,33 +201,29 @@ def configurable_test_state(name, changes=True, result=True, comment=''): 'result': False, 'comment': comment } + change_data = { + 'testing': { + 'old': 'Unchanged', + 'new': 'Something pretended to change' + } + } if changes == 'Random': if random.choice([True, False]): # Following the docs as written here # http://docs.saltstack.com/ref/states/writing.html#return-data - ret['changes'] = { - 'testing': { - 'old': 'Unchanged', - 'new': 'Something pretended to change' - } - } + ret['changes'] = change_data elif changes is True: # If changes is True we place our dummy change dictionary into it. # Following the docs as written here # http://docs.saltstack.com/ref/states/writing.html#return-data - ret['changes'] = { - 'testing': { - 'old': 'Unchanged', - 'new': 'Something pretended to change' - } - } + ret['changes'] = change_data elif changes is False: ret['changes'] = {} else: err = ('You have specified the state option \'Changes\' with' - ' invalid arguments. It must be either ' - ' \'True\', \'False\', or \'Random\'') + ' invalid arguments. It must be either ' + ' \'True\', \'False\', or \'Random\'') raise SaltInvocationError(err) if result == 'Random':
states.test.configurable_test_state: refactor change_data Also adjust continued string indentation.
saltstack_salt
train
e698097195ef2c9676f76513cf0c1f28c2872b8a
diff --git a/lib/random_unique_id.rb b/lib/random_unique_id.rb index <HASH>..<HASH> 100644 --- a/lib/random_unique_id.rb +++ b/lib/random_unique_id.rb @@ -136,17 +136,7 @@ module RandomUniqueId # @see RandomUniqueId::ClassMethods#has_random_unique_id # @see RandomUniqueId.generate_random_id def generate_random_unique_id(n=self.random_unique_id_options[:min_rid_length], field="rid") - # Find the topmost class before ActiveRecord::Base so that when we do queries, we don't end up with type=Whatever in the where clause. - klass = self.class - self.class.ancestors.each do |k| - if k == ActiveRecord::Base - break # we reached the bottom of this barrel - end - if k.is_a? Class - klass = k - end - end - + klass = find_topmost_model_class case self.random_unique_id_options[:random_generation_method] when :rid begin @@ -181,6 +171,19 @@ module RandomUniqueId def self.generate_uuid SecureRandom.uuid end + + # Find the topmost class before ActiveRecord::Base so that when we do queries, we don't end up with type=Whatever in + # the where clause. + # @return [Class] the class object + def find_topmost_model_class + klass = self.class + self.class.ancestors.select { |k| k.is_a? Class }.each do |k| + if k == ActiveRecord::Base + return klass + end + klass = k + end + end end ActiveRecord::Base.send(:include, RandomUniqueId)
Refactoring: move finding the topmost class to its own method. Made it more functional.
pupeno_random_unique_id
train
752b4f49e9283078976973ebbe33576eaff80a3e
diff --git a/lib/backup/database/riak.rb b/lib/backup/database/riak.rb index <HASH>..<HASH> 100644 --- a/lib/backup/database/riak.rb +++ b/lib/backup/database/riak.rb @@ -45,8 +45,11 @@ module Backup end ## - # Performs the riak-admin command and outputs the - # data to the specified path based on the 'trigger' + # Performs the `riak-admin` command which creates a single dump file in + # @dump_path based on the `name` and `node`. + # + # `riak-admin` will append the `node` to the filename. + # i.e. <tmp_path>/<trigger>/databases/Riak/<name>-<node> def perform! super # ensure riak-admin user has permissions to write backup file @@ -57,8 +60,9 @@ module Backup if @model.compressor @model.compressor.compress_with do |command, ext| - run("#{ command } -c #{ backup_file }-#{ node } > #{ backup_file + ext }") - FileUtils.rm_f("#{ backup_file }-#{ node }") + backup_file << "-#{ node }" + run("#{ command } -c #{ backup_file } > #{ backup_file + ext }") + FileUtils.rm_f(backup_file) end end end diff --git a/spec/database/riak_spec.rb b/spec/database/riak_spec.rb index <HASH>..<HASH> 100644 --- a/spec/database/riak_spec.rb +++ b/spec/database/riak_spec.rb @@ -135,8 +135,8 @@ describe Backup::Database::Riak do db.expects(:run).in_sequence(s). with('riakadmin_command /dump/path/mydatabase node') db.expects(:run).in_sequence(s).with( - "compressor_command -c " + - "/dump/path/mydatabase-riak@localhost > /dump/path/mydatabase.gz" + "compressor_command -c /dump/path/mydatabase-riak@localhost > " + + "/dump/path/mydatabase-riak@localhost.gz" ) FileUtils.expects(:rm_f).in_sequence(s). with('/dump/path/mydatabase-riak@localhost')
keep appended node name when compressing riak dump file
backup_backup
train
3865454cbd30734bad172149abc02c0a7eeb44d1
diff --git a/lib/connection.js b/lib/connection.js index <HASH>..<HASH> 100644 --- a/lib/connection.js +++ b/lib/connection.js @@ -145,7 +145,7 @@ var Connection = module.exports = function(options) { // Allow to delegate connection refresh to outer function var self = this; var refreshFn = options.refreshFn; - if (!refreshFn && this.oauth2.clientId && this.oauth2.clientSecret) { + if (!refreshFn && this.oauth2.clientId) { refreshFn = oauthRefreshFn; } if (refreshFn) {
remove client secret check to accept refresh token
jsforce_jsforce
train
6c63d931f21102a7bc83af76dfe5017e43f5c5a2
diff --git a/examples/.eslintrc b/examples/.eslintrc index <HASH>..<HASH> 100644 --- a/examples/.eslintrc +++ b/examples/.eslintrc @@ -6,7 +6,6 @@ "import/prefer-default-export": "off", "max-len": "off", "no-param-reassign": "off", - "object-curly-spacing": "off", "object-shorthand": "off", "prefer-template": "off", "quote-props": "off", diff --git a/examples/02 Drag Around/Custom Drag Layer/Box.js b/examples/02 Drag Around/Custom Drag Layer/Box.js index <HASH>..<HASH> 100644 --- a/examples/02 Drag Around/Custom Drag Layer/Box.js +++ b/examples/02 Drag Around/Custom Drag Layer/Box.js @@ -20,7 +20,7 @@ export default class Box extends Component { const backgroundColor = yellow ? 'yellow' : 'white'; return ( - <div style={{...styles, backgroundColor }}> + <div style={{ ...styles, backgroundColor }}> {title} </div> ); diff --git a/examples/02 Drag Around/Naive/Container.js b/examples/02 Drag Around/Naive/Container.js index <HASH>..<HASH> 100644 --- a/examples/02 Drag Around/Naive/Container.js +++ b/examples/02 Drag Around/Naive/Container.js @@ -58,7 +58,7 @@ export default class Container extends Component { render() { const { hideSourceOnDrag, connectDropTarget } = this.props; - const { boxes} = this.state; + const { boxes } = this.state; return connectDropTarget( <div style={styles}>
Turn on 'object-curly-spacing' rule
react-dnd_react-dnd
train
66c527b3d14d198c1a541816194c728eb934bfad
diff --git a/codespell_lib/_codespell.py b/codespell_lib/_codespell.py index <HASH>..<HASH> 100755 --- a/codespell_lib/_codespell.py +++ b/codespell_lib/_codespell.py @@ -38,7 +38,7 @@ options = None file_opener = None quiet_level = 0 encodings = ['utf-8', 'iso-8859-1'] -regex = re.compile(r"[\w\-']+") +word_regex = re.compile(r"[\w\-']+") # Users might want to link this file into /usr/local/bin, so we resolve the # symbolic link path to the real path if necessary. default_dictionary = os.path.join(os.path.dirname(os.path.realpath(__file__)), @@ -222,7 +222,14 @@ def parse_options(args): 'equals "-" then default dictionary "%s" is used. ' 'This option can be specified multiple times.' % default_dictionary) - + parser.add_option('-r', '--regex', + action='store', type='string', + help='Regular expression which is used to find words. ' + 'By default any alphanumeric character, the ' + 'underscore, the hyphen, and the apostrophe is ' + 'used to build words (i.e. %s). This option cannot ' + 'be specified together with the write-changes ' + 'functionality. ' % word_regex.pattern) parser.add_option('-s', '--summary', action='store_true', default=False, help='print summary of fixes') @@ -413,7 +420,7 @@ def parse_file(filename, colors, summary): fixed_words = set() asked_for = set() - for word in regex.findall(line): + for word in word_regex.findall(line): lword = word.lower() if lword in misspellings: fix = misspellings[lword].fix @@ -500,6 +507,19 @@ def main(*args): options, args, parser = parse_options(args) + if options.regex: + if options.write_changes: + parser.error('--write-changes cannot be used together with ' + '--regex') + global word_regex + try: + word_regex = re.compile(options.regex) + except re.error as err: + print('ERROR: invalid regular expression "%s" (%s)' % + (options.regex, err), file=sys.stderr) + parser.print_help() + return 1 + dictionaries = options.dictionary or [default_dictionary] for dictionary in dictionaries: if dictionary is "-":
Add new --regex option to set regular expression used to find words A new command line option -r / --regex is implemented to allow the user to set the regular expression which is used to find the words in the input lines. This is useful in projects where you want to overwrite the default expression, for example to split words after an underscore like in ID_ESTABLISED. The new regex option cannot be used together with the --write-changes functionality since it is not guarantied that this will not break code.
codespell-project_codespell
train
fb2f0d6d0775e341f39ce161efbb920c0684514a
diff --git a/gdax/public_client.py b/gdax/public_client.py index <HASH>..<HASH> 100644 --- a/gdax/public_client.py +++ b/gdax/public_client.py @@ -157,7 +157,7 @@ class PublicClient(object): params['limit'] = limit r = requests.get(url, params=params) - r.raise_for_status() + # r.raise_for_status() result.extend(r.json()) @@ -167,8 +167,8 @@ class PublicClient(object): if limit <= 0: return result - # ensure that we don't get rate-limited/blocked - time.sleep(0.4) + # TODO: need a way to ensure that we don't get rate-limited/blocked + # time.sleep(0.4) return self.get_product_trades(product_id=product_id, after=r.headers['cb-after'], limit=limit, result=result) return result
remove raise and comment out time.sleep
danpaquin_coinbasepro-python
train
d069ae11f433077b6ac6b16a7322e13907a0feb8
diff --git a/exchange/bitswap/strategy/ledgermanager.go b/exchange/bitswap/strategy/ledgermanager.go index <HASH>..<HASH> 100644 --- a/exchange/bitswap/strategy/ledgermanager.go +++ b/exchange/bitswap/strategy/ledgermanager.go @@ -22,15 +22,19 @@ type Envelope struct { } type LedgerManager struct { - lock sync.RWMutex - // ledgerMap lists Ledgers by their Partner key. - ledgerMap map[u.Key]*ledger - bs bstore.Blockstore // FIXME taskqueue isn't threadsafe nor is it protected by a mutex. consider // a way to avoid sharing the taskqueue between the worker and the receiver - taskqueue *taskQueue - outbox chan Envelope + taskqueue *taskQueue + workSignal chan struct{} + + outbox chan Envelope + + bs bstore.Blockstore + + lock sync.RWMutex + // ledgerMap lists Ledgers by their Partner key. + ledgerMap map[u.Key]*ledger } func NewLedgerManager(ctx context.Context, bs bstore.Blockstore) *LedgerManager {
refactor: put mutex next to the things it protects If we put the lock next to the fields it protects, it can sometimes make it easier to reason about threadsafety. In this case, it reveals that the task queue (not threadsafe) isn't protected by the mutex, yet shared between the worker and callers. @whyrusleeping License: MIT
ipfs_go-ipfs
train
0d186e3c5b352caac1737393fd6296a85bc43e7f
diff --git a/metal/utils.py b/metal/utils.py index <HASH>..<HASH> 100644 --- a/metal/utils.py +++ b/metal/utils.py @@ -97,9 +97,9 @@ def recursive_merge_dicts(x, y, misses='report', verbose=None): recursive_merge_dicts(x[k], v, misses, verbose) else: if x[k] == v: - msg = f"Reaffirming {x}={x[k]}" + msg = f"Reaffirming {k}={x[k]}" else: - msg = f"Overwriting {x}={x[k]} to {k}={v}" + msg = f"Overwriting {k}={x[k]} to {k}={v}" x[k] = v if verbose: print(msg)
Fix printing bug in recursive_merge_dicts
HazyResearch_metal
train
1810a049d0b6bc3fd3f2d695ac2fd74beb0b336b
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,8 +12,8 @@ from setuptools import find_packages, setup setup( - name='experirun', - version='0.0.1', + name='experi', + version='0.0.8', python_requires='>=3.6', setup_requires=[], install_requires=[],
Change name and version number Getting ready to put on github, I prefer the shorter experi name, although I am still not set on that as the name.
malramsay64_experi
train
468fe155a63696f621190fc3fc8e3f94800410d5
diff --git a/launch_control/__init__.py b/launch_control/__init__.py index <HASH>..<HASH> 100644 --- a/launch_control/__init__.py +++ b/launch_control/__init__.py @@ -3,6 +3,11 @@ # # For more details see: # https://blueprints.launchpad.net/ubuntu/+spec/arm-m-validation-dashboard +""" +Public API for Launch Control. + +Please see one of the available packages for more information. +""" __version__ = "0.0.1"
Add docstring to launch_control package
zyga_json-schema-validator
train
c63ecbfd08b0e013278a6339ea5b40680c944b03
diff --git a/PySimpleGUI27.py b/PySimpleGUI27.py index <HASH>..<HASH> 100644 --- a/PySimpleGUI27.py +++ b/PySimpleGUI27.py @@ -12,7 +12,7 @@ from builtins import str from builtins import object from future import standard_library standard_library.install_aliases() -version = __version__ = "2.3.0.0 Unreleased" +version = __version__ = "2.2.1 Released - Patched" # 888888ba .d88888b oo dP .88888. dP dP dP
Corrected version number for the patch.
PySimpleGUI_PySimpleGUI
train
f7f7080a82d035452a943482817f537e1e648497
diff --git a/qutepart/completer.py b/qutepart/completer.py index <HASH>..<HASH> 100644 --- a/qutepart/completer.py +++ b/qutepart/completer.py @@ -2,7 +2,7 @@ """ from PyQt4.QtCore import QAbstractItemModel, QModelIndex, QObject, QSize, Qt -from PyQt4.QtGui import QListView +from PyQt4.QtGui import QListView, QStyle, QStyledItemDelegate class _CompletionModel(QAbstractItemModel): """QAbstractItemModel implementation for a list of completion variants @@ -49,17 +49,31 @@ class _CompletionModel(QAbstractItemModel): return QModelIndex() +class _StyledItemDelegate(QStyledItemDelegate): + """Draw QListView items without dotted focus frame + http://qt-project.org/faq/answer/how_can_i_remove_the_dotted_rectangle_from_the_cell_that_has_focus_in_my_qt + """ + def __init__(self, parent): + QStyledItemDelegate.__init__(self, parent) + + def paint(self, painter, option, index): + opt = option + opt.state &= ~QStyle.State_HasFocus + QStyledItemDelegate.paint(self, painter, opt, index) + + class _ListView(QListView): """Completion list widget """ def __init__(self, qpart, model): QListView.__init__(self, qpart.viewport()) + self.setItemDelegate(_StyledItemDelegate(self)) + self._qpart = qpart self.setModel(model) - qpart.cursorPositionChanged.connect(self._onCursorPositionChanged) - self.setStyleSheet("QTreeView:focus {border: 7px solid;}") # remove focus rect from the items + qpart.cursorPositionChanged.connect(self._onCursorPositionChanged) def _onCursorPositionChanged(self): """Cursor position changed. Update completion widget position
Remove QListView dotted frame with item delegate
andreikop_qutepart
train
f23fc65362df39dee871cde342eb068986987631
diff --git a/lib/clamp/subcommand/execution.rb b/lib/clamp/subcommand/execution.rb index <HASH>..<HASH> 100644 --- a/lib/clamp/subcommand/execution.rb +++ b/lib/clamp/subcommand/execution.rb @@ -10,7 +10,7 @@ module Clamp subcommand_class = find_subcommand_class(subcommand_name) subcommand = subcommand_class.new("#{invocation_path} #{subcommand_name}", context) self.class.declared_options.each do |option| - option_set = defined?(option.ivar_name) + option_set = instance_variable_defined?(option.ivar_name) if option_set && subcommand.respond_to?(option.write_method) subcommand.send(option.write_method, self.send(option.read_method)) end
Fix check for presence of explicitly set option.
mdub_clamp
train
cbbe09a93efb30fe9fcc52c43f1d3bc1944eb61d
diff --git a/build/Anglebars.js b/build/Anglebars.js index <HASH>..<HASH> 100644 --- a/build/Anglebars.js +++ b/build/Anglebars.js @@ -971,7 +971,7 @@ Anglebars.substrings.Fragment.prototype = { }, toString: function () { - return this.value; + return this.value || ''; } }; @@ -999,7 +999,7 @@ Anglebars.substrings.Interpolator = Anglebars.substring({ }, toString: function () { - return this.value; + return this.value || ''; } }); @@ -1027,6 +1027,7 @@ Anglebars.substrings.Section = Anglebars.substring({ while ( this.substrings.length ) { this.substrings.shift().teardown(); } + this.length = 0; }, bubble: function () { @@ -1048,7 +1049,6 @@ Anglebars.substrings.Section = Anglebars.substring({ if ( this.length ) { this.unrender(); this.length = 0; - return; } } @@ -1056,10 +1056,12 @@ Anglebars.substrings.Section = Anglebars.substring({ if ( !this.length ) { this.substrings[0] = new substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack ); this.length = 1; - return; } } + this.value = this.substrings.join( '' ); + this.parent.bubble(); + return; } @@ -1111,23 +1113,6 @@ Anglebars.substrings.Section = Anglebars.substring({ this.length = 1; } } - - this.rendered = true; - - - /*// if value is an array of hashes, iterate through - if ( Anglebars.utils.isArray( value ) ) { - for ( i=0; i<value.length; i+=1 ) { - this.substrings[i] = new Anglebars.substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack.concat( this.keypath + '.' + i ) ); - } - } - - // if value is a hash, add it to the context stack and update children - else { - this.substrings[0] = new Anglebars.substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack.concat( this.keypath ) ); - } - - this.rendered = true;*/ } // otherwise render if value is truthy, unrender if falsy @@ -1153,7 +1138,8 @@ Anglebars.substrings.Section = Anglebars.substring({ }, toString: function () { - return this.value; + console.log( 'stringifying', this, ': ', this.value ); + return this.value || ''; } }); diff --git a/src/substrings/Fragment.js b/src/substrings/Fragment.js index <HASH>..<HASH> 100644 --- a/src/substrings/Fragment.js +++ b/src/substrings/Fragment.js @@ -28,7 +28,7 @@ Anglebars.substrings.Fragment.prototype = { }, toString: function () { - return this.value; + return this.value || ''; } }; diff --git a/src/substrings/Interpolator.js b/src/substrings/Interpolator.js index <HASH>..<HASH> 100644 --- a/src/substrings/Interpolator.js +++ b/src/substrings/Interpolator.js @@ -21,7 +21,7 @@ Anglebars.substrings.Interpolator = Anglebars.substring({ }, toString: function () { - return this.value; + return this.value || ''; } }); diff --git a/src/substrings/Section.js b/src/substrings/Section.js index <HASH>..<HASH> 100644 --- a/src/substrings/Section.js +++ b/src/substrings/Section.js @@ -18,6 +18,7 @@ Anglebars.substrings.Section = Anglebars.substring({ while ( this.substrings.length ) { this.substrings.shift().teardown(); } + this.length = 0; }, bubble: function () { @@ -39,7 +40,6 @@ Anglebars.substrings.Section = Anglebars.substring({ if ( this.length ) { this.unrender(); this.length = 0; - return; } } @@ -47,10 +47,12 @@ Anglebars.substrings.Section = Anglebars.substring({ if ( !this.length ) { this.substrings[0] = new substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack ); this.length = 1; - return; } } + this.value = this.substrings.join( '' ); + this.parent.bubble(); + return; } @@ -102,23 +104,6 @@ Anglebars.substrings.Section = Anglebars.substring({ this.length = 1; } } - - this.rendered = true; - - - /*// if value is an array of hashes, iterate through - if ( Anglebars.utils.isArray( value ) ) { - for ( i=0; i<value.length; i+=1 ) { - this.substrings[i] = new Anglebars.substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack.concat( this.keypath + '.' + i ) ); - } - } - - // if value is a hash, add it to the context stack and update children - else { - this.substrings[0] = new Anglebars.substrings.Fragment( this.model.children, this.anglebars, this, this.contextStack.concat( this.keypath ) ); - } - - this.rendered = true;*/ } // otherwise render if value is truthy, unrender if falsy @@ -144,6 +129,7 @@ Anglebars.substrings.Section = Anglebars.substring({ }, toString: function () { - return this.value; + console.log( 'stringifying', this, ': ', this.value ); + return this.value || ''; } });
fixed bug whereby inverted sections failed to update parent substrings
ractivejs_ractive
train
e5c292e55028b4ea72581cf12021594bdfa0494e
diff --git a/datatableview/datatables.py b/datatableview/datatables.py index <HASH>..<HASH> 100644 --- a/datatableview/datatables.py +++ b/datatableview/datatables.py @@ -243,6 +243,9 @@ class Datatable(six.with_metaclass(DatatableMetaclass)): valid AJAX GET parameters from client modifications to the data they see. """ + if hasattr(self, '_configured'): + return + self.resolve_virtual_columns(*tuple(self.missing_columns)) self.config = self.normalize_config(self._meta.__dict__, self.query_config) @@ -267,6 +270,8 @@ class Datatable(six.with_metaclass(DatatableMetaclass)): self.columns[column_name].sort_direction = 'desc' if name[0] == '-' else 'asc' self.columns[column_name].index = index + self._configured = True + # Client request configuration mergers def normalize_config(self, config, query_config): """ diff --git a/datatableview/views/base.py b/datatableview/views/base.py index <HASH>..<HASH> 100644 --- a/datatableview/views/base.py +++ b/datatableview/views/base.py @@ -89,6 +89,9 @@ class DatatableMixin(DatatableJSONResponseMixin, MultipleObjectMixin): # Configuration getters def get_datatable(self, **kwargs): """ Gathers and returns the final :py:class:`Datatable` instance for processing. """ + if hasattr(self, '_datatable'): + return self._datatable + datatable_class = self.get_datatable_class() if datatable_class is None: class AutoMeta: @@ -106,7 +109,8 @@ class DatatableMixin(DatatableJSONResponseMixin, MultipleObjectMixin): datatable_class = type('%s_Synthesized' % (datatable_class.__name__,), (datatable_class,), { 'Meta': opts, }) - return datatable_class(**kwargs) + self._datatable = datatable_class(**kwargs) + return self._datatable def get_datatable_class(self): return self.datatable_class
Add memoization to view's datatable, and datatable's config
pivotal-energy-solutions_django-datatable-view
train
53b2c2eb31258f2ef16a6424161414c3c3ec8e02
diff --git a/vagrant_box_defaults.rb b/vagrant_box_defaults.rb index <HASH>..<HASH> 100644 --- a/vagrant_box_defaults.rb +++ b/vagrant_box_defaults.rb @@ -3,12 +3,12 @@ Vagrant.require_version ">= 2.2.0" $SERVER_BOX = "cilium/ubuntu-dev" -$SERVER_VERSION= "202" +$SERVER_VERSION= "208" $NETNEXT_SERVER_BOX= "cilium/ubuntu-next" -$NETNEXT_SERVER_VERSION= "98" +$NETNEXT_SERVER_VERSION= "105" @v54_SERVER_BOX= "cilium/ubuntu-5-4" -@v54_SERVER_VERSION= "5" +@v54_SERVER_VERSION= "12" @v419_SERVER_BOX= "cilium/ubuntu-4-19" -@v419_SERVER_VERSION= "41" +@v419_SERVER_VERSION= "48" @v49_SERVER_BOX= "cilium/ubuntu" -@v49_SERVER_VERSION= "202" +@v49_SERVER_VERSION= "208"
vagrant: bump all Vagrant box versions Pick up Go <I> and ipset.
cilium_cilium
train
59197f206d541efdd453c87daded9d1f90239d8f
diff --git a/spec/usage/integrations/turbolinks_integration_spec.rb b/spec/usage/integrations/turbolinks_integration_spec.rb index <HASH>..<HASH> 100644 --- a/spec/usage/integrations/turbolinks_integration_spec.rb +++ b/spec/usage/integrations/turbolinks_integration_spec.rb @@ -1,42 +1,145 @@ describe "Turbolinks integration", type: :feature, js: true do - before(:all) do - FileUtils.cp('spec/usage/application_with_turbolinks.js', 'spec/dummy/app/assets/javascripts/application.js') - end - before do - Rails.application.routes.draw do - get '/turbolinks', to: 'turbolinks#my_action', as: 'turbolinks_test_action' + class TestController < ActionController::Base + + before_action :check_params + + def check_params + expect_params(params.permit!.to_h) end - class TurbolinksController < ActionController::Base - layout "application" + def expect_params(params) + end - include Matestack::Ui::Core::ApplicationHelper + end + + it "Matestack can be used with turbolinks" do - def my_action + module Apps + end + + class Apps::TurbolinksTest < Matestack::Ui::App + def response + components { + nav do + link path: :turbolinks1_path do + button text: "Link to Page 1" + end + transition path: :turbolinks2_path do + button text: "Transition to Page 2" + end + link path: :turbolinks3_path do + button text: "Link to Page 3" + end + end + main do + page_content + end + } end end - module Pages::Turbolinks + module Pages::TurbolinksTest end - class Pages::Turbolinks::MyAction < Matestack::Ui::Page + class Pages::TurbolinksTest::Page1 < Matestack::Ui::Page def response components { - plain "Hello from matestack with turbolinks" + plain "Hello from matestack with turbolinks - Page 1" } end end - end + class Pages::TurbolinksTest::Page2 < Matestack::Ui::Page + def response + components { + plain "Hello from matestack with turbolinks - Page 2" + } + end + end + class Pages::TurbolinksTest::Page3 < Matestack::Ui::Page + def response + components { + plain "Hello from matestack with turbolinks - Page 3" + action action_config do + button text: "click me" + end + } + end - after do - FileUtils.cp('spec/usage/application.js', 'spec/dummy/app/assets/javascripts/application.js') + def action_config + return { + method: :post, + path: :action_test_path, + data: { + foo: "bar" + } + } + end + end + + Rails.application.routes.append do + get '/turbolinks1', to: 'turbolinks_test#page1', as: :turbolinks1 + get '/turbolinks2', to: 'turbolinks_test#page2', as: :turbolinks2 + get '/turbolinks3', to: 'turbolinks_test#page3', as: :turbolinks3 + post '/action_test', to: 'action_test#test' + end Rails.application.reload_routes! - end - specify "Matestack can be used with turbolinks" do - visit "/turbolinks" + class TurbolinksTestController < ActionController::Base + layout "application_with_turbolinks" + + include Matestack::Ui::Core::ApplicationHelper + + def page1 + responder_for(Pages::TurbolinksTest::Page1) + end + def page2 + responder_for(Pages::TurbolinksTest::Page2) + end + def page3 + responder_for(Pages::TurbolinksTest::Page3) + end + end + + class ActionTestController < TestController + + def test + render json: {}, status: 200 + end + + end + allow_any_instance_of(ActionTestController).to receive(:expect_params) + + + visit "/turbolinks1" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 1" + + click_button "Transition to Page 2" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 2" + + click_button "Link to Page 1" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 1" + + click_button "Transition to Page 2" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 2" + + click_button "Link to Page 1" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 1" + + visit "/turbolinks3" + + expect(page).to have_text "Hello from matestack with turbolinks - Page 3" + + expect_any_instance_of(ActionTestController).to receive(:expect_params) + .with(hash_including(:foo => "bar")) + + click_button "click me" - expect(page).to have_text "Hello from matestack without turbolinks" + expect(page).to have_text "Hello from matestack with turbolinks - Page 3" end end
refactored turbolinks spec in order to show turbolinks issues
basemate_matestack-ui-core
train
3b85975bc54bea25cfa4f4463f76aa70008de6ac
diff --git a/pug/nlp/djdb.py b/pug/nlp/djdb.py index <HASH>..<HASH> 100644 --- a/pug/nlp/djdb.py +++ b/pug/nlp/djdb.py @@ -1642,7 +1642,8 @@ def import_items(item_seq, dest_model, batch_len=500, clear=False, dry_run=True except: obj, row_errors = django_object_from_row(d, dest_model) try: - obj._update(save=False, overwrite=False) + if hasattr(obj, '_update'): + obj._update(save=False, overwrite=False) except: if verbosity: print_exc()
check for _update before running
hobson_pug
train
ca9533de0782c751199d18ad48007fe62f89c56f
diff --git a/holoviews/plotting/bokeh/annotation.py b/holoviews/plotting/bokeh/annotation.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/bokeh/annotation.py +++ b/holoviews/plotting/bokeh/annotation.py @@ -44,6 +44,8 @@ class LineAnnotationPlot(ElementPlot): _update_handles = ['glyph'] + _plot_methods = dict(single='Span') + def get_data(self, element, ranges=None, empty=False): data, mapping = {}, {} mapping['dimension'] = 'width' if isinstance(element, HLine) else 'height' diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -539,7 +539,10 @@ class ElementPlot(BokehPlot, GenericElementPlot): Initializes a new plot object with the last available frame. """ # Get element key and ranges for frame - element = self.hmap.last + if self.batched: + element = [el for el in self.hmap.data.values() if len(el)][-1] + else: + element = self.hmap.last key = self.keys[-1] ranges = self.compute_ranges(self.hmap, key, ranges) self.current_ranges = ranges @@ -1010,7 +1013,7 @@ class OverlayPlot(GenericOverlayPlot, LegendPlot): def initialize_plot(self, ranges=None, plot=None, plots=None): key = self.keys[-1] - element = self._get_frame(key) + element = [el for el in self.hmap.data.values() if len(el)][-1] ranges = self.compute_ranges(self.hmap, key, ranges) if plot is None and not self.tabs and not self.batched: plot = self._init_plot(key, element, ranges=ranges, plots=plots) diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/plot.py +++ b/holoviews/plotting/plot.py @@ -559,8 +559,8 @@ class GenericElementPlot(DimensionedPlot): self.hmap = element plot_element = self.hmap.last - if self.batched: - plot_element = plot_element.last + if self.batched and not isinstance(self, GenericOverlayPlot): + plot_element = [el for el in plot_element if len(el) > 0][-1] top_level = keys is None if top_level: @@ -851,7 +851,7 @@ class GenericOverlayPlot(GenericElementPlot): if issubclass(plottype, GenericOverlayPlot): opts['show_legend'] = self.show_legend - elif self.batched: + elif self.batched and 'batched' in plottype._plot_methods: opts['batched'] = self.batched if len(ordering) > self.legend_limit: opts['show_legend'] = False
Fixed empty NdOverlay on initialization in batched mode
pyviz_holoviews
train
bc9863d636e2be3025cdade374947145f759db89
diff --git a/JS Library/index.html b/JS Library/index.html index <HASH>..<HASH> 100644 --- a/JS Library/index.html +++ b/JS Library/index.html @@ -76,13 +76,13 @@ buttonsArray = El(".btns"), divs = El("div"); - /*el.evt("click",function(){ - + el.evt("click",function(){ + buttonsArray.hide(); }); - el2.evt("click",function(){ - - });*/ + test.evt("click",function(){ + buttonsArray.show(); + }); </script> </body> diff --git a/JS Library/modules/Append.js b/JS Library/modules/Append.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Append.js +++ b/JS Library/modules/Append.js @@ -37,4 +37,4 @@ } return this; } - } \ No newline at end of file + }; \ No newline at end of file diff --git a/JS Library/modules/CSS.js b/JS Library/modules/CSS.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/CSS.js +++ b/JS Library/modules/CSS.js @@ -8,17 +8,6 @@ * Requires: Core.js */ - /*Grindstone.prototype.css = function(newStyles){ - var element = this.init; - if (testParam(newStyles)){ - for (var i in newStyles){ - element.style[i] = newStyles[i]; - } - } else { - throw new Error("CSS properties to edit are undefined."); - } - }*/ - Grindstone.prototype.css = function(newStyles){ var results = this.init; if (results.length > 1){ @@ -44,4 +33,4 @@ } return this; } - } \ No newline at end of file + }; \ No newline at end of file diff --git a/JS Library/modules/Display.js b/JS Library/modules/Display.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Display.js +++ b/JS Library/modules/Display.js @@ -60,6 +60,6 @@ } else { element.style.display = "none"; } + return this; } - return this; }; \ No newline at end of file diff --git a/JS Library/modules/Fade.js b/JS Library/modules/Fade.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Fade.js +++ b/JS Library/modules/Fade.js @@ -10,12 +10,12 @@ Grindstone.prototype.fadeIn = function(duration){ var results = this.init; + if (!testParam(duration)){ + duration = 400; + } if (results.length > 1){ for (var i = 0; i < results.length; i++){ var element = results[i]; - if (!testParam(duration)){ - duration = 400; - } if (element.style.display != "block"){ element.style.display = "block"; var op = 0.01, @@ -34,9 +34,6 @@ return this; } else { var element = results; - if (!testParam(duration)){ - duration = 400; - } if (element.style.display != "block"){ element.style.display = "block"; var op = 0.01, @@ -57,12 +54,12 @@ Grindstone.prototype.fadeOut = function(duration){ var results = this.init; + if (!testParam(duration)){ + duration = 400; + } if (results.length > 1){ for (var i = 0; i < results.length; i++){ var element = results[i]; - if (!testParam(duration)){ - duration = 400; - } if (element.style.display != "none"){ var op = 1, gap = 25 / duration, @@ -81,9 +78,6 @@ return this; } else { var element = results; - if (!testParam(duration)){ - duration = 400; - } if (element.style.display != "none"){ var op = 1, gap = 25 / duration, diff --git a/JS Library/modules/Prepend.js b/JS Library/modules/Prepend.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Prepend.js +++ b/JS Library/modules/Prepend.js @@ -37,4 +37,4 @@ } return this; } - } \ No newline at end of file + }; \ No newline at end of file diff --git a/JS Library/modules/Remove.js b/JS Library/modules/Remove.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Remove.js +++ b/JS Library/modules/Remove.js @@ -29,4 +29,4 @@ } return this; } - } \ No newline at end of file + }; \ No newline at end of file diff --git a/JS Library/modules/Value.js b/JS Library/modules/Value.js index <HASH>..<HASH> 100644 --- a/JS Library/modules/Value.js +++ b/JS Library/modules/Value.js @@ -73,5 +73,6 @@ } else { throw new Error("Please specify the value to call."); } + return this; } }; \ No newline at end of file
Inconsequential edits. Tried to support arrays in Fade() and Classes(), unsuccessfully.
dzervoudakes_GrindstoneJS
train
9a753a887125895bdd0e2386e60c778d4ea24043
diff --git a/Controller/ResourceController.php b/Controller/ResourceController.php index <HASH>..<HASH> 100644 --- a/Controller/ResourceController.php +++ b/Controller/ResourceController.php @@ -365,7 +365,14 @@ class ResourceController * @EXT\Route( * "/download", * name="claro_resource_download", - * options={"expose"=true} + * options={"expose"=true}, + * defaults ={"forceArchive"=false} + * ) + * @EXT\Route( + * "/download/{forceArchive}", + * name="claro_resource_download", + * options={"expose"=true}, + * requirements={"forceArchive" = "^(true|false|0|1)$"}, * ) * @EXT\ParamConverter( * "nodes", @@ -379,13 +386,14 @@ class ResourceController * * @param array $nodes * + * @param bool $forceArchive * @return \Symfony\Component\HttpFoundation\Response */ - public function downloadAction(array $nodes) + public function downloadAction(array $nodes, $forceArchive = false) { $collection = new ResourceCollection($nodes); $this->checkAccess('EXPORT', $collection); - $data = $this->resourceManager->download($nodes); + $data = $this->resourceManager->download($nodes, $forceArchive); $file = $data['file']; $fileName = $data['name']; $mimeType = $data['mimeType']; diff --git a/Manager/ResourceManager.php b/Manager/ResourceManager.php index <HASH>..<HASH> 100644 --- a/Manager/ResourceManager.php +++ b/Manager/ResourceManager.php @@ -931,7 +931,7 @@ class ResourceManager * * @return array */ - public function download(array $elements) + public function download(array $elements, $forceArchive = false) { $data = array(); @@ -944,7 +944,7 @@ class ResourceManager $archive->open($pathArch, \ZipArchive::CREATE); $nodes = $this->expandResources($elements); - if (count($nodes) === 1) { + if (!$forceArchive && count($nodes) === 1) { $event = $this->dispatcher->dispatch( "download_{$nodes[0]->getResourceType()->getName()}", 'DownloadResource',
Adding forceArchive option. Usefull if you wanna keep the folders and architecture while you have only 1 real file.
claroline_CoreBundle
train
636c09d0c08c378c0f166305a0257a88355757cf
diff --git a/src/Assignment.php b/src/Assignment.php index <HASH>..<HASH> 100644 --- a/src/Assignment.php +++ b/src/Assignment.php @@ -340,13 +340,13 @@ class Assignment extends ReporticoObject if (preg_match('/embed_image\(.*\)/', $out_string)) { $this->non_assignment_operation = true; $out_string = preg_replace('/embed_image\(/', - '$this->embed_image("' . $this->query_name . "\",", $out_string); + '$this->embedImage("' . $this->query_name . "\",", $out_string); } if (preg_match('/embed_hyperlink\(.*\)/', $out_string)) { $this->non_assignment_operation = true; $out_string = preg_replace('/embed_hyperlink\(/', - '$this->embed_hyperlink("' . $this->query_name . "\",", $out_string); + '$this->embedHyperlink("' . $this->query_name . "\",", $out_string); } $out_string = preg_replace('/lineno\(\)/',
Correct embed_hyperlink drilldown assignment to call newly named EmbedHyperlink method
reportico-web_reportico
train
6d8c4cbfd1fa038d4bbbaba937f1813a88535c1e
diff --git a/CHANGELOG.md b/CHANGELOG.md index <HASH>..<HASH> 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/). +### v0.5.0 + +##### Added + +- Demodulize job_class attribute to get the right path. + ### v0.4.1 ##### Fixed diff --git a/lib/activeadmin_jobs/job_extensions.rb b/lib/activeadmin_jobs/job_extensions.rb index <HASH>..<HASH> 100644 --- a/lib/activeadmin_jobs/job_extensions.rb +++ b/lib/activeadmin_jobs/job_extensions.rb @@ -1,7 +1,7 @@ module ActiveadminJobs module JobExtensions def description - I18n.t!("activeadmin_jobs.#{job_class.tableize.singularize}.description") + I18n.t!("activeadmin_jobs.#{job_class.demodulize.tableize.singularize}.description") rescue I18n::MissingTranslationData "" end diff --git a/lib/activeadmin_jobs/job_result_renderer.rb b/lib/activeadmin_jobs/job_result_renderer.rb index <HASH>..<HASH> 100644 --- a/lib/activeadmin_jobs/job_result_renderer.rb +++ b/lib/activeadmin_jobs/job_result_renderer.rb @@ -15,7 +15,7 @@ module ActiveadminJobs private def partial_path - "#{job.job_class.tableize.singularize}.#{job.status}.html.erb" + "#{job.job_class.demodulize.tableize.singularize}.#{job.status}.html.erb" end def formatted_result
feat(): demodulize to get right path
platanus_activeadmin_jobs
train
c2a3d25d05b96828f39cba236db846d51fd50782
diff --git a/rah_flat.php b/rah_flat.php index <HASH>..<HASH> 100644 --- a/rah_flat.php +++ b/rah_flat.php @@ -177,9 +177,9 @@ class rah_flat { if($format == 'flat_meta'){ if( - !file_exists($file.'.meta') || - !is_readable($file.'.meta') || - !is_file($file.'.meta') + !file_exists($file.'.meta.xml') || + !is_readable($file.'.meta.xml') || + !is_file($file.'.meta.xml') ) continue;
Renamed meta files to "*.meta.xml"; added .xml extension.
gocom_rah_flat
train
b896b3df56293547016c0b4a16707a63142e92e0
diff --git a/lxd/certificates.go b/lxd/certificates.go index <HASH>..<HASH> 100644 --- a/lxd/certificates.go +++ b/lxd/certificates.go @@ -498,8 +498,19 @@ func certificatesPost(d *Daemon, r *http.Request) response.Response { return response.BadRequest(fmt.Errorf("Can't use certificate if token is requested")) } - if req.Token && req.Type != "client" { - return response.BadRequest(fmt.Errorf("Tokens can only be issued for client certificates")) + if req.Token { + if req.Type != "client" { + return response.BadRequest(fmt.Errorf("Tokens can only be issued for client certificates")) + } + + address, err := node.HTTPSAddress(d.db.Node) + if err != nil { + return response.InternalError(fmt.Errorf("Failed to fetch node address: %w", err)) + } + + if address == "" { + return response.BadRequest(fmt.Errorf("Can't issue token when server isn't listening on network")) + } } // Access check.
lxd/certificates: Require an address for token issuance Closes #<I>
lxc_lxd
train
c9e7346bfd9352397c21e5f0884a200ff8f37724
diff --git a/terraform/eval_diff.go b/terraform/eval_diff.go index <HASH>..<HASH> 100644 --- a/terraform/eval_diff.go +++ b/terraform/eval_diff.go @@ -310,11 +310,15 @@ func (n *EvalDiff) Eval(ctx EvalContext) (interface{}, error) { // from known prior values to unknown values, unless the provider is // able to predict new values for any of these computed attributes. nullPriorVal := cty.NullVal(schema.ImpliedType()) + + // create a new proposed value from the null state and the config + proposedNewVal = objchange.ProposedNewObject(schema, nullPriorVal, configVal) + resp = provider.PlanResourceChange(providers.PlanResourceChangeRequest{ TypeName: n.Addr.Resource.Type, Config: configVal, PriorState: nullPriorVal, - ProposedNewState: configVal, + ProposedNewState: proposedNewVal, PriorPrivate: plannedPrivate, }) // We need to tread carefully here, since if there are any warnings
create a new proposed value when replacing When replacing an instance, calculate a new proposed value from the null state and the config. This ensures that all unknown values are properly set.
hashicorp_terraform
train
237c28e0f571cbea76f679128be9ffbb7c59a7a4
diff --git a/utils/swarm.js b/utils/swarm.js index <HASH>..<HASH> 100644 --- a/utils/swarm.js +++ b/utils/swarm.js @@ -292,7 +292,7 @@ const lib = { published: onePortConfig.PublishedPort }; - if(onePortConfig.PublishMode && onePortConfig.PublishMode === 'local') { + if(onePortConfig.PublishMode && onePortConfig.PublishMode === 'host') { port.preserveClientIP = true; }
Bug fix in buildServiceRecord()
soajs_soajs.core.drivers
train
ec43aa121f1642c709310411a410460ba0f6fc78
diff --git a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker00.java b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker00.java index <HASH>..<HASH> 100644 --- a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker00.java +++ b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker00.java @@ -200,13 +200,13 @@ public class WebSocketClientHandshaker00 extends WebSocketClientHandshaker { } String upgrade = response.getHeader(Names.UPGRADE); - if (upgrade == null || !upgrade.equals(Values.WEBSOCKET)) { + if (upgrade == null || !upgrade.equalsIgnoreCase(Values.WEBSOCKET)) { throw new WebSocketHandshakeException("Invalid handshake response upgrade: " + response.getHeader(Names.UPGRADE)); } String connection = response.getHeader(Names.CONNECTION); - if (connection == null || !connection.equals(Values.UPGRADE)) { + if (connection == null || !connection.equalsIgnoreCase(Values.UPGRADE)) { throw new WebSocketHandshakeException("Invalid handshake response connection: " + response.getHeader(Names.CONNECTION)); } diff --git a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker08.java b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker08.java index <HASH>..<HASH> 100644 --- a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker08.java +++ b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker08.java @@ -178,13 +178,13 @@ public class WebSocketClientHandshaker08 extends WebSocketClientHandshaker { } String upgrade = response.getHeader(Names.UPGRADE); - if (upgrade == null || !upgrade.equals(Values.WEBSOCKET.toLowerCase())) { + if (upgrade == null || !upgrade.equalsIgnoreCase(Values.WEBSOCKET)) { throw new WebSocketHandshakeException("Invalid handshake response upgrade: " + response.getHeader(Names.UPGRADE)); } String connection = response.getHeader(Names.CONNECTION); - if (connection == null || !connection.equals(Values.UPGRADE)) { + if (connection == null || !connection.equalsIgnoreCase(Values.UPGRADE)) { throw new WebSocketHandshakeException("Invalid handshake response connection: " + response.getHeader(Names.CONNECTION)); } diff --git a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker13.java b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker13.java index <HASH>..<HASH> 100644 --- a/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker13.java +++ b/codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketClientHandshaker13.java @@ -178,13 +178,13 @@ public class WebSocketClientHandshaker13 extends WebSocketClientHandshaker { } String upgrade = response.getHeader(Names.UPGRADE); - if (upgrade == null || !upgrade.equals(Values.WEBSOCKET.toLowerCase())) { + if (upgrade == null || !upgrade.equalsIgnoreCase(Values.WEBSOCKET)) { throw new WebSocketHandshakeException("Invalid handshake response upgrade: " + response.getHeader(Names.UPGRADE)); } String connection = response.getHeader(Names.CONNECTION); - if (connection == null || !connection.equals(Values.UPGRADE)) { + if (connection == null || !connection.equalsIgnoreCase(Values.UPGRADE)) { throw new WebSocketHandshakeException("Invalid handshake response connection: " + response.getHeader(Names.CONNECTION)); }
Case-insensitive matching for Upgrade and Connection header (#<I>)
netty_netty
train
f56f7c626f0aebc4c40e7c513374020c2a92f200
diff --git a/lib/index.js b/lib/index.js index <HASH>..<HASH> 100644 --- a/lib/index.js +++ b/lib/index.js @@ -213,7 +213,11 @@ Tokenizer.prototype.splitAndMerge = function tokenizeSplitAndMerge(fn, opts) { Tokenizer.prototype.filter = function tokenizeFilter(fn) { return this.split(function(text, tok) { if (fn.apply(null, arguments)) { - return tok.value; + return { + value: tok.value, + index: 0, + offset: tok.offset + }; } return undefined; }); diff --git a/test/filter.js b/test/filter.js index <HASH>..<HASH> 100644 --- a/test/filter.js +++ b/test/filter.js @@ -14,5 +14,23 @@ describe('tokenize.filter()', function() { tokens[0].index.should.equal(11); tokens[0].offset.should.equal(4); }); + + it('should not change valeu of index and offset', function() { + var tokens = [ + { value: 'test', index: 10, offset: 10 }, + { value: 'test 2', index: 30, offset: 20} + ]; + + var filter = tokenize.filter(function() { + return true; + }); + + tokens = filter(tokens); + tokens.should.have.lengthOf(2); + tokens[0].index.should.equal(10); + tokens[0].offset.should.equal(10); + tokens[1].index.should.equal(30); + tokens[1].offset.should.equal(20); + }) });
Filter should not change value of offset
GitbookIO_tokenize-text
train
8ed670c8f9ed13ae035e3a3b8c4cd51a9f1bd931
diff --git a/test/test_grid.rb b/test/test_grid.rb index <HASH>..<HASH> 100644 --- a/test/test_grid.rb +++ b/test/test_grid.rb @@ -28,7 +28,7 @@ $: << File.expand_path("../ext", File.dirname(__FILE__)) $: << File.dirname(__FILE__) require 'test/unit' -#require 'rebuild' +require 'rebuild' require 'stub/cgi' require 'htmlgrid/label' require 'htmlgrid/grid'
... it wasn't grid.c after all ... still looking for the culprit.
zdavatz_htmlgrid
train
65e4d2c7af2cbd35147ae111ea5a8007e6c028fe
diff --git a/redisson/src/main/java/org/redisson/client/RedisClient.java b/redisson/src/main/java/org/redisson/client/RedisClient.java index <HASH>..<HASH> 100644 --- a/redisson/src/main/java/org/redisson/client/RedisClient.java +++ b/redisson/src/main/java/org/redisson/client/RedisClient.java @@ -177,7 +177,7 @@ public final class RedisClient { byte[] addr = NetUtil.createByteArrayFromIpAddressString(uri.getHost()); if (addr != null) { try { - resolvedAddr = new InetSocketAddress(InetAddress.getByAddress(uri.getHost(), addr), uri.getPort()); + resolvedAddr = new InetSocketAddress(InetAddress.getByAddress(addr), uri.getPort()); } catch (UnknownHostException e) { // skip } @@ -196,7 +196,8 @@ public final class RedisClient { } InetSocketAddress resolved = future.getNow(); - resolvedAddr = createInetSocketAddress(resolved, uri.getHost()); + byte[] addr = resolved.getAddress().getAddress(); + resolvedAddr = new InetSocketAddress(InetAddress.getByAddress(uri.getHost(), addr), resolved.getPort()); promise.trySuccess(resolvedAddr); } @@ -204,15 +205,6 @@ public final class RedisClient { return promise; } - private InetSocketAddress createInetSocketAddress(InetSocketAddress resolved, String host) { - byte[] addr = NetUtil.createByteArrayFromIpAddressString(resolved.getAddress().getHostAddress()); - try { - return new InetSocketAddress(InetAddress.getByAddress(host, addr), resolved.getPort()); - } catch (UnknownHostException e) { - throw new RuntimeException(e); - } - } - public RFuture<RedisConnection> connectAsync() { final RPromise<RedisConnection> f = new RedissonPromise<RedisConnection>();
Fixed - result of InetAddress.getHostAddress() and InetAddress.getHostName() doesn't match for IPV6 addresses. #<I>
redisson_redisson
train
b71f9be638893916f17037ae41ad0ac2838f5146
diff --git a/scripts/tools/MonitoringExtraFieldMigration.php b/scripts/tools/MonitoringExtraFieldMigration.php index <HASH>..<HASH> 100644 --- a/scripts/tools/MonitoringExtraFieldMigration.php +++ b/scripts/tools/MonitoringExtraFieldMigration.php @@ -62,6 +62,7 @@ class MonitoringExtraFieldMigration extends ScriptAction ], 'deleteKv' => [ + 'prefix' => 'd', 'flag' => true, 'longPrefix' => 'deleteKv', 'defaultValue' => 0, @@ -166,7 +167,7 @@ class MonitoringExtraFieldMigration extends ScriptAction $deliveryExecutions = $this->monitoringService->find([], $options); - if (!$this->getOption('deleteKv')) { + if (!$this->getOption('deleteKv') || !$this->getOption('wet-run')) { $offset += $chunkSize; } @@ -231,10 +232,15 @@ class MonitoringExtraFieldMigration extends ScriptAction $report = Report::createInfo(); $report->add(Report::createSuccess(sprintf('Saved delivery execution: %s', $this->updated))); $report->add(Report::createSuccess(sprintf('ExtraData removed from KV table: %s', $this->deleted))); - $report->add(new Report( - $wetrun ? Report::TYPE_SUCCESS : Report::TYPE_ERROR, - sprintf('Script runtime executed in `%s` mode', $wetrun ? 'WET_RUN' : 'DRY_RUN') - )); + if ($wetrun) { + $report->add(new Report(Report::TYPE_SUCCESS, 'Script runtime executed in `WET_RUN` mode')); + $report->add(new Report(Report::TYPE_INFO, + 'You can now check that `kv_delivery_monitoring` table is empty and delete it' + )); + + } else { + $report->add(new Report(Report::TYPE_ERROR, 'Script runtime executed in `DRY_RUN` mode')); + } return $report; } }
fix: add prefix to deleteKv option (cherry picked from commit ffc<I>d3a<I>efedf<I>ce<I>d6a5afb0b<I>bb2ab3)
oat-sa_extension-tao-proctoring
train
306e54f06f11a20063b8e5e473cc688e89721fea
diff --git a/lib/ProMotion/screen/screen_navigation.rb b/lib/ProMotion/screen/screen_navigation.rb index <HASH>..<HASH> 100644 --- a/lib/ProMotion/screen/screen_navigation.rb +++ b/lib/ProMotion/screen/screen_navigation.rb @@ -18,7 +18,7 @@ module ProMotion open_root_screen screen elsif args[:modal] - present_modal_view_controller screen, args[:animated] + present_modal_view_controller screen, args[:animated], args[:completion] elsif args[:in_tab] && self.tab_bar present_view_controller_in_tab_bar_controller screen, args[:in_tab] @@ -117,8 +117,8 @@ module ProMotion end end - def present_modal_view_controller(screen, animated) - self.presentModalViewController((screen.navigationController || screen), animated:animated) + def present_modal_view_controller(screen, animated, completion) + self.presentViewController((screen.navigationController || screen), animated:animated, completion:completion) end def present_view_controller_in_tab_bar_controller(screen, tab_name) diff --git a/spec/unit/screen_helpers_spec.rb b/spec/unit/screen_helpers_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/screen_helpers_spec.rb +++ b/spec/unit/screen_helpers_spec.rb @@ -135,11 +135,12 @@ describe "screen helpers" do it "should present the navigationController when showing a modal screen" do new_screen = @screen.send(:set_up_screen_for_open, BasicScreen, modal: true) - @screen.mock!('presentModalViewController:animated:') do |vc, animated| + @screen.mock!('presentViewController:animated:completion:') do |vc, animated, completion| vc.should == (new_screen.navigationController || new_screen) animated.should == true + completion.should == nil end - @screen.send(:present_modal_view_controller, new_screen, true) + @screen.send(:present_modal_view_controller, new_screen, true, nil) end # it "should push screen onto nav controller stack inside a tab bar" do @@ -157,28 +158,31 @@ describe "screen helpers" do end it "should present a modal screen if :modal is provided" do - @screen.mock!(:present_modal_view_controller) do |screen, animated| + @screen.mock!(:present_modal_view_controller) do |screen, animated, completion| screen.should.be.instance_of BasicScreen animated.should == true + completion.should.be.kind_of Proc end - screen = @screen.open BasicScreen, modal: true + screen = @screen.open BasicScreen, modal: true, completion: lambda{} screen.should.be.kind_of BasicScreen end it "should present a modal screen if open_modal is used" do - @screen.mock!(:present_modal_view_controller) do |screen, animated| + @screen.mock!(:present_modal_view_controller) do |screen, animated, completion| screen.should.be.instance_of BasicScreen animated.should == true + completion.should == nil end screen = @screen.open_modal BasicScreen screen.should.be.kind_of BasicScreen end - it "should respect animated property of opening modal screens" do + it "should respect animated and competion property of opening modal screens" do new_screen = @screen.send(:set_up_screen_for_open, BasicScreen) - @screen.mock!('presentModalViewController:animated:') do |vc, animated| + @screen.mock!('presentViewController:animated:completion:') do |vc, animated, completion| animated.should == false + completion.should == nil end screen = @screen.send(:open, new_screen, animated: false, modal: true)
update the deprecated presentModalViewController:animated: in favor of the new presentViewController:animated:completion: and updated tests
infinitered_ProMotion
train
38a5eae37e6695985f1eb3dc4678e30c3ecdd9a5
diff --git a/addon/properties/relations/belongs-to-loaded.js b/addon/properties/relations/belongs-to-loaded.js index <HASH>..<HASH> 100644 --- a/addon/properties/relations/belongs-to-loaded.js +++ b/addon/properties/relations/belongs-to-loaded.js @@ -107,6 +107,11 @@ export default class BelongsToLoadedRelation extends BelongsToProxiedRelation { return this.load.state; } + getModel() { + this.getLoadPromise(); + return super.getModel(...arguments); + } + serialize() { }
also getModel calls getLoadPromise --HG-- branch : feature/belongs-to-loaded
ampatspell_ember-cli-sofa
train
02189d883d14c5755740bc5c0bc46a6b5efb7d82
diff --git a/devices.js b/devices.js index <HASH>..<HASH> 100644 --- a/devices.js +++ b/devices.js @@ -3091,6 +3091,33 @@ const devices = [ execute(device, actions, callback); }, }, + + // Securifi + { + zigbeeModel: ['PP-WHT-US'], + model: 'PP-WHT-US', + vendor: 'Securifi', + description: 'Peanut Smart Plug', + supports: 'on/off, power measurement', + fromZigbee: [fz.ignore_electrical_change, fz.state, fz.ignore_onoff_change], + toZigbee: [tz.on_off], + configure: (ieeeAddr, shepherd, coordinator, callback) => { + const device = shepherd.find(ieeeAddr, 1); + const onOff = {direction: 0, attrId: 0, dataType: 16, minRepIntval: 0, maxRepIntval: 1000, repChange: 0}; + const rmsCurrent = { + direction: 0, attrId: 1288, dataType: 33, minRepIntval: 0, maxRepIntval: 3, repChange: 0, + }; + const electricalCfg = [rmsCurrent]; + const actions = [ + (cb) => device.foundation('genOnOff', 'configReport', [onOff], foundationCfg, cb), + (cb) => device.bind('genOnOff', coordinator, cb), + (cb) => device.foundation('haElectricalMeasurement', 'configReport', electricalCfg, foundationCfg, cb), + (cb) => device.bind('haElectricalMeasurement', coordinator, cb), + ]; + + execute(device, actions, callback); + }, + }, ]; module.exports = devices.map((device) =>
Add Securifi Peanut Smart Plug (#<I>) * Add Securifi Peanut Smart Plug Measurements not supported/tested as they require a firmware upgrade (?). Switch works though. See <URL>
Koenkk_zigbee-shepherd-converters
train
58de72e66615d7e543bff147950bff1ba4046077
diff --git a/composer.json b/composer.json index <HASH>..<HASH> 100644 --- a/composer.json +++ b/composer.json @@ -32,7 +32,7 @@ "symfony/yaml": "~2.6", "halleck45/php-metrics": "~1.1", "peridot-php/peridot": "~1.15", - "expectation/peridot-expectation": "~1.2", + "expect/peridot-expect-plugin": "~2.0", "holyshared/peridot-temporary-plugin": "~0.3", "codegyre/robo": "~0.5", "cloak/robo-coveralls-kit": "~1.1", diff --git a/peridot.coverage.php b/peridot.coverage.php index <HASH>..<HASH> 100644 --- a/peridot.coverage.php +++ b/peridot.coverage.php @@ -6,7 +6,7 @@ use Evenement\EventEmitterInterface; use Peridot\Configuration; use Peridot\Console\Command; use Peridot\Runner\SuiteLoaderInterface; -use expectation\peridot\ExpectationPlugin; +use expect\peridot\ExpectPlugin; use Symfony\Component\Yaml\Yaml; use Peridot\Reporter\Dot\DotReporterPlugin; use holyshared\peridot\temporary\TemporaryPlugin; @@ -87,7 +87,7 @@ class SuiteLoader implements SuiteLoaderInterface return function(EventEmitterInterface $emitter) { - ExpectationPlugin::create()->registerTo($emitter); + ExpectPlugin::create()->registerTo($emitter); TemporaryPlugin::create()->registerTo($emitter); (new DotReporterPlugin($emitter)); diff --git a/peridot.php b/peridot.php index <HASH>..<HASH> 100644 --- a/peridot.php +++ b/peridot.php @@ -1,13 +1,13 @@ <?php use Evenement\EventEmitterInterface; -use Peridot\Reporter\Dot\DotReporterPlugin; -use expectation\peridot\ExpectationPlugin; +use expect\peridot\ExpectPlugin; use holyshared\peridot\temporary\TemporaryPlugin; +use Peridot\Reporter\Dot\DotReporterPlugin; return function(EventEmitterInterface $emitter) { - ExpectationPlugin::create()->registerTo($emitter); + ExpectPlugin::create()->registerTo($emitter); TemporaryPlugin::create()->registerTo($emitter); (new DotReporterPlugin($emitter)); };
Upgrade peridot-expect-plugin
cloak-php_cloak
train
71ba370fdc0b030e96f2fde613334af77292b5c4
diff --git a/core/codegen/src/main/resources/JavaUtils/SeqUtil.java b/core/codegen/src/main/resources/JavaUtils/SeqUtil.java index <HASH>..<HASH> 100644 --- a/core/codegen/src/main/resources/JavaUtils/SeqUtil.java +++ b/core/codegen/src/main/resources/JavaUtils/SeqUtil.java @@ -103,14 +103,7 @@ public class SeqUtil if(left == null || right == null) throw new IllegalArgumentException("A sequences cannot be compared to null"); - if(left.size() != right.size()) - return false; - - for(int i = 0; i < left.size(); i++) - if(!left.get(i).equals(right.get(i))) - return false; - - return true; + return left.equals(right); } public static VDMSeq conc(VDMSeq left, VDMSeq right)
Improved equality check for VDMSeq class
overturetool_overture
train
7d31d9424300243758383d332bbd9e0dbd9cc37d
diff --git a/extensions/theme-layouts.php b/extensions/theme-layouts.php index <HASH>..<HASH> 100644 --- a/extensions/theme-layouts.php +++ b/extensions/theme-layouts.php @@ -19,7 +19,7 @@ * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * @package ThemeLayouts - * @version 0.6.0-alpha + * @version 1.0.0-alpha * @author Justin Tadlock <justin@justintadlock.com> * @copyright Copyright (c) 2010 - 2013, Justin Tadlock * @link http://justintadlock.com @@ -480,8 +480,6 @@ function theme_layouts_post_meta_box( $post, $box ) { <?php wp_nonce_field( basename( __FILE__ ), 'theme-layouts-nonce' ); ?> - <p><?php _e( 'Layout is a theme-specific structure for the single view of the post.', 'theme-layouts' ); ?></p> - <div class="post-layout-wrap"> <ul> <li><input type="radio" name="post-layout" id="post-layout-default" value="default" <?php checked( $post_layout, 'default' );?> /> <label for="post-layout-default"><?php echo esc_html( theme_layouts_get_string( 'default' ) ); ?></label></li>
Remove meta box description. We shouldn't need to explain the functionality.
justintadlock_hybrid-core
train
70fa431f0f73dd85437a974b2eab9009e2cc703b
diff --git a/lib/handlers/bin.js b/lib/handlers/bin.js index <HASH>..<HASH> 100644 --- a/lib/handlers/bin.js +++ b/lib/handlers/bin.js @@ -457,7 +457,8 @@ module.exports = Observable.extend({ formatHistory: function (bins, helpers, fn) { // reorder the bins based latest edited, and group by bin.url var order = {}, - urls = {}; + urls = {}, + orderedBins, loopOrder, i, length; bins.forEach(function (bin) { var time = new Date(bin.created).getTime(); @@ -478,12 +479,12 @@ module.exports = Observable.extend({ } }); - var orderedBins = [], - loopOrder = Object.keys(order).sort(function (a, b) { - return order[a] < order[b] ? -1 : 1; - }); + orderedBins = []; + loopOrder = Object.keys(order).sort(function (a, b) { + return order[a] < order[b] ? -1 : 1; + }); - for (var i = 0; i < loopOrder.length; i++) { + for (i = 0, length = loopOrder.length; i < length; i += 1) { orderedBins.push.apply(orderedBins, urls[loopOrder[i]]); }
Fix a couple of lint issues
jsbin_jsbin
train
588664b4916321a2820701af239b8a382df5f3fa
diff --git a/inc/cloner/class-cloner.php b/inc/cloner/class-cloner.php index <HASH>..<HASH> 100644 --- a/inc/cloner/class-cloner.php +++ b/inc/cloner/class-cloner.php @@ -970,7 +970,7 @@ class Cloner { } } } - } elseif ( $post_type === 'glossary ' ) { + } elseif ( $post_type === 'glossary' ) { foreach ( $this->sourceBookGlossary as $k => $v ) { if ( $v['id'] === absint( $section_id ) ) { return $v['metadata'];
Remove extra space in 'glossary' condition check (#<I>)
pressbooks_pressbooks
train
4e8984c10fba86ea3d0db7bf0c05c42ce47ebb50
diff --git a/dictdumper/json.py b/dictdumper/json.py index <HASH>..<HASH> 100644 --- a/dictdumper/json.py +++ b/dictdumper/json.py @@ -46,7 +46,7 @@ ESCAPE_DCT = { '\n': '\\n', '\r': '\\r', '\t': '\\t', - '\x0b': '\\U000b', + '\x0b': '\\u000b', } diff --git a/doc/source/conf.py b/doc/source/conf.py index <HASH>..<HASH> 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -22,7 +22,7 @@ copyright = '2020, Jarry Shaw' author = 'Jarry Shaw' # The full version, including alpha/beta/rc tags -release = '0.8.4.post1' +release = '0.8.4.post2' # -- General configuration --------------------------------------------------- diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ with open('README.md', 'rb') as file: long_desc = file.read().decode('utf-8') # version string -__version__ = '0.8.4.post1' +__version__ = '0.8.4.post2' # set-up script for pip distribution setup(
New distribution [<I>.post2] * missing out "\\x0b" control char
JarryShaw_DictDumper
train
77a7fe07403ea4e4b62623e7edcd0bc300940c2e
diff --git a/src/drakonli/PhpUtils/Builder/BuilderInterface.php b/src/drakonli/PhpUtils/Builder/BuilderInterface.php index <HASH>..<HASH> 100644 --- a/src/drakonli/PhpUtils/Builder/BuilderInterface.php +++ b/src/drakonli/PhpUtils/Builder/BuilderInterface.php @@ -3,15 +3,11 @@ namespace drakonli\PhpUtils\Builder; /** + * Tagging service + * * @author drakonli - Arthur Vinogradov - <artur.drakonli@gmail.com> * @link www.linkedin.com/in/drakonli */ interface BuilderInterface { - /** - * Resets built object to start building again from scratch - * - * @return static - */ - public function reset(); }
+ removed reset method from builder + builder interface now is just a tagging service
drakonli_php-utils
train
11c6cee5cdb4a2d6613634ee9bc904e1d3ba752e
diff --git a/spec/dbi.rb b/spec/dbi.rb index <HASH>..<HASH> 100644 --- a/spec/dbi.rb +++ b/spec/dbi.rb @@ -4,31 +4,31 @@ $dbh = connect_to_spec_database reset_data describe 'DBI::DatabaseHandle#select_column' do - + it 'selects one column' do name = $dbh.select_column( "SELECT name FROM authors LIMIT 1" ) name.class.should.not.equal Array name.should.equal 'author1' - + null = $dbh.select_column( "SELECT c4 FROM many_col_table WHERE c3 = 40" ) null.should.be.nil - + should.raise( DBI::DataError ) do $dbh.select_column( "SELECT name FROM authors WHERE 1+1 = 3" ) end end - + it 'selects one column of first row' do name = $dbh.select_column( "SELECT name FROM authors ORDER BY name DESC" ) name.should.equal 'author3' end - + it 'selects first column of first row' do name = $dbh.select_column( "SELECT name, id FROM authors ORDER BY name DESC" @@ -38,42 +38,44 @@ describe 'DBI::DatabaseHandle#select_column' do end describe 'DBI::DatabaseHandle#one_transaction' do - + it 'turns off autocommit for the duration of a single transaction' do $dbh.d( "DELETE FROM many_col_table;" ) $dbh.i( "INSERT INTO many_col_table ( id, c1 ) VALUES ( 1, 10 );" ) - + # Here we will attempt to increment a value two times in parallel. # If each multi-operation transaction is truly atomic, we expect that # the final value will reflect two increments. # If atomicity is not respected, the value should only reflect one # increment. - + # First, we test the non-transactional case, to show failure. - + thread1 = Thread.new do value = $dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" value.should.equal 10 sleep 2 # seconds $dbh.u "UPDATE many_col_table SET c1 = ?", ( value + 1 ) end - + + sleep 1 + thread2 = Thread.new do value = $dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" value.should.equal 10 # Update right away $dbh.u "UPDATE many_col_table SET c1 = ?", ( value + 1 ) end - + thread2.join thread1.join - + value = $dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" # Failure; two increments should give a final value of 12. value.should.equal( 10 + 1 ) - + # Now, we show that transactions keep things sane. - + thread1 = Thread.new do $dbh.one_transaction do |dbh| value = dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" @@ -81,7 +83,7 @@ describe 'DBI::DatabaseHandle#one_transaction' do dbh.u "UPDATE many_col_table SET c1 = ?", ( value + 1 ) end end - + thread2 = Thread.new do $dbh.one_transaction do |dbh| value = dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" @@ -89,50 +91,50 @@ describe 'DBI::DatabaseHandle#one_transaction' do dbh.u "UPDATE many_col_table SET c1 = ?", ( value + 1 ) end end - + thread2.join thread1.join - + value = $dbh.sc "SELECT c1 FROM many_col_table WHERE id = 1;" value.should.equal( 11 + 1 + 1 ) - + reset_data end - + end describe 'DBI::Row accessors' do - + it 'provide read access via #fieldname' do row = $dbh.select_one( "SELECT * FROM posts ORDER BY author_id DESC LIMIT 1" ) row.should.not.equal nil - + row._id.should.be.same_as row[ 'id' ] row.id_.should.be.same_as row[ 'id' ] row.author_id.should.be.same_as row[ 'author_id' ] row.text.should.be.same_as row[ 'text' ] - + row.text.should.equal 'Second post.' end - + it 'provide in-memory (non-syncing) write access via #fieldname=' do row = $dbh.select_one( "SELECT * FROM posts ORDER BY author_id DESC LIMIT 1" ) row.should.not.equal nil - + old_id = row._id row.id = old_id + 1 row._id.should.not.equal old_id row._id.should.equal( old_id + 1 ) - + old_text = row.text new_text = 'This is the new post text.' row.text = new_text row.text.should.not.equal old_text row.text.should.equal new_text end - + end \ No newline at end of file
Fixed one_transaction spec.
Pistos_m4dbi
train
2150fdd1d10fe9c6eb30ea21304a940a9bd23e40
diff --git a/findbugs/src/java/edu/umd/cs/findbugs/detect/FindUnrelatedTypesInGenericContainer.java b/findbugs/src/java/edu/umd/cs/findbugs/detect/FindUnrelatedTypesInGenericContainer.java index <HASH>..<HASH> 100644 --- a/findbugs/src/java/edu/umd/cs/findbugs/detect/FindUnrelatedTypesInGenericContainer.java +++ b/findbugs/src/java/edu/umd/cs/findbugs/detect/FindUnrelatedTypesInGenericContainer.java @@ -740,6 +740,9 @@ public class FindUnrelatedTypesInGenericContainer implements Detector { if (actualString.equals(objString) && expectedCat == TypeCategory.TYPE_VARIABLE) { return IncompatibleTypes.SEEMS_OK; } + if (expectedCat == TypeCategory.WILDCARD) { + return IncompatibleTypes.SEEMS_OK; + } if (ignoreBaseType) { if (expectedCat == TypeCategory.PARAMETERIZED && argCat == TypeCategory.PARAMETERIZED) { GenericObjectType parmGeneric = (GenericObjectType) expectedType;
Fix for false positives that were showing up in javafx code
spotbugs_spotbugs
train
4744f79124698a297d9c676575734d5ebf9f1a30
diff --git a/bem-views/404.blade.php b/bem-views/404.blade.php index <HASH>..<HASH> 100644 --- a/bem-views/404.blade.php +++ b/bem-views/404.blade.php @@ -3,4 +3,3 @@ @section('content') @includeFirst(['partials.404.' . $post_type, 'partials.404.default']) @stop -{{ /* THIS IS A SAMPLE VIEW */ }}
Remove sample view comment, everything here is a sample.
helsingborg-stad_Municipio
train
188cc3b666ba704534fa4f96e9e61f21f1e1ba7c
diff --git a/mmap.go b/mmap.go index <HASH>..<HASH> 100644 --- a/mmap.go +++ b/mmap.go @@ -81,25 +81,27 @@ func (m *MMap) header() *reflect.SliceHeader { return (*reflect.SliceHeader)(unsafe.Pointer(m)) } +func (m *MMap) addrLen() (uintptr, uintptr) { + header := m.header() + return header.Data, uintptr(header.Len) +} + // Lock keeps the mapped region in physical memory, ensuring that it will not be // swapped out. func (m MMap) Lock() error { - dh := m.header() - return lock(dh.Data, uintptr(dh.Len)) + return m.lock() } // Unlock reverses the effect of Lock, allowing the mapped region to potentially // be swapped out. // If m is already unlocked, aan error will result. func (m MMap) Unlock() error { - dh := m.header() - return unlock(dh.Data, uintptr(dh.Len)) + return m.unlock() } // Flush synchronizes the mapping's contents to the file's contents on disk. func (m MMap) Flush() error { - dh := m.header() - return flush(dh.Data, uintptr(dh.Len)) + return m.flush() } // Unmap deletes the memory mapped region, flushes any remaining changes, and sets @@ -109,8 +111,7 @@ func (m MMap) Flush() error { // Unmap should only be called on the slice value that was originally returned from // a call to Map. Calling Unmap on a derived slice may cause errors. func (m *MMap) Unmap() error { - dh := m.header() - err := unmap(dh.Data, uintptr(dh.Len)) + err := m.unmap() *m = nil return err } diff --git a/mmap_unix.go b/mmap_unix.go index <HASH>..<HASH> 100644 --- a/mmap_unix.go +++ b/mmap_unix.go @@ -34,34 +34,18 @@ func mmap(len int, inprot, inflags, fd uintptr, off int64) ([]byte, error) { return b, nil } -func flush(addr, len uintptr) error { - _, _, errno := unix.Syscall(unix.SYS_MSYNC, addr, len, unix.MS_SYNC) - if errno != 0 { - return unix.Errno(errno) - } - return nil +func (m MMap) flush() error { + return unix.Msync([]byte(m), unix.MS_SYNC) } -func lock(addr, len uintptr) error { - _, _, errno := unix.Syscall(unix.SYS_MLOCK, addr, len, 0) - if errno != 0 { - return unix.Errno(errno) - } - return nil +func (m MMap) lock() error { + return unix.Mlock([]byte(m)) } -func unlock(addr, len uintptr) error { - _, _, errno := unix.Syscall(unix.SYS_MUNLOCK, addr, len, 0) - if errno != 0 { - return unix.Errno(errno) - } - return nil +func (m MMap) unlock() error { + return unix.Munlock([]byte(m)) } -func unmap(addr, len uintptr) error { - _, _, errno := unix.Syscall(unix.SYS_MUNMAP, addr, len, 0) - if errno != 0 { - return unix.Errno(errno) - } - return nil +func (m MMap) unmap() error { + return unix.Munmap([]byte(m)) } diff --git a/mmap_windows.go b/mmap_windows.go index <HASH>..<HASH> 100644 --- a/mmap_windows.go +++ b/mmap_windows.go @@ -81,7 +81,8 @@ func mmap(len int, prot, flags, hfile uintptr, off int64) ([]byte, error) { return m, nil } -func flush(addr, len uintptr) error { +func (m MMap) flush() error { + addr, len := m.addrLen() errno := windows.FlushViewOfFile(addr, len) if errno != nil { return os.NewSyscallError("FlushViewOfFile", errno) @@ -99,21 +100,25 @@ func flush(addr, len uintptr) error { return os.NewSyscallError("FlushFileBuffers", errno) } -func lock(addr, len uintptr) error { +func (m MMap) lock() error { + addr, len := m.addrLen() errno := windows.VirtualLock(addr, len) return os.NewSyscallError("VirtualLock", errno) } -func unlock(addr, len uintptr) error { +func (m MMap) unlock() error { + addr, len := m.addrLen() errno := windows.VirtualUnlock(addr, len) return os.NewSyscallError("VirtualUnlock", errno) } -func unmap(addr, len uintptr) (err error) { - err = flush(addr, len) +func (m MMap) unmap() error { + err := m.flush() if err != nil { return err } + + addr := m.header().Data // Lock the UnmapViewOfFile along with the handleMap deletion. // As soon as we unmap the view, the OS is free to give the // same addr to another new map. We don't want another goroutine
Refactor to use provided unix functions golang.org/x/sys/unix provides functions for syscalls like msync that syscall didn't previously provide. This lets us simplify a little bit and simultaneously support more platforms that have different constant names.
edsrzf_mmap-go
train
f89247d4368b5947ba915a3e32867bcf2cd74369
diff --git a/openstack_dashboard/api/glance.py b/openstack_dashboard/api/glance.py index <HASH>..<HASH> 100644 --- a/openstack_dashboard/api/glance.py +++ b/openstack_dashboard/api/glance.py @@ -153,7 +153,7 @@ def image_create(request, **kwargs): asynchronously. In the case of 'data' the process of uploading the data may take - some time and is handed off to a seperate thread. + some time and is handed off to a separate thread. """ data = kwargs.pop('data', None)
Misspelling in message Misspelling in the message: "some time and is handed off to a seperate thread." Should be "some time and is handed off to a separate thread." Totally 1 occasion in horizon base code. Change-Id: I7a<I>c<I>c<I>c6fc<I>f<I>ea<I>bc0e8f6cb<I>
openstack_horizon
train
8c14bb44a98e6264cf2a829f50113068dd48765b
diff --git a/integration/global/create_buildpack_command_test.go b/integration/global/create_buildpack_command_test.go index <HASH>..<HASH> 100644 --- a/integration/global/create_buildpack_command_test.go +++ b/integration/global/create_buildpack_command_test.go @@ -85,6 +85,9 @@ var _ = PDescribe("create buildpack command", func() { var err error buildpackDir, err = ioutil.TempDir("", "buildpackdir-") Expect(err).ToNot(HaveOccurred()) + file, err := ioutil.TempFile(buildpackDir, "myfile-") + defer file.Close() + Expect(err).ToNot(HaveOccurred()) }) AfterEach(func() {
write a tempfile into the buildpack directory when creating from dir a buildpack created from an empty directory causes staging to fail. since we don't clean up during the integration tests, any attempts to cf push after the create-buildpack test that uses the temp dir had run would fail. [#<I>]
cloudfoundry_cli
train
7532a7b9c73f9934de10240b8461e15f34d0781c
diff --git a/src/Autolinker.js b/src/Autolinker.js index <HASH>..<HASH> 100644 --- a/src/Autolinker.js +++ b/src/Autolinker.js @@ -243,7 +243,7 @@ '(?:\\s*=\\s*' + attrValueRegex.source + ')?', // optional '=[value]' ')*', - '\\s*', // any trailing spaces before the closing '>' + '\\s*/?', // any trailing spaces and optional '/' before the closing '>' '>' ].join( "" ), 'g' ); } )(), diff --git a/tests/AutolinkerSpec.js b/tests/AutolinkerSpec.js index <HASH>..<HASH> 100644 --- a/tests/AutolinkerSpec.js +++ b/tests/AutolinkerSpec.js @@ -518,11 +518,26 @@ describe( "Autolinker", function() { } ); - it( "should NOT automatically link URLs within existing HTML tags", function() { + it( "should NOT automatically link URLs within the attributes of existing HTML tags", function() { var result = autolinker.link( '<p>Joe went to <a href="http://www.yahoo.com">yahoo</a></p>' ); expect( result ).toBe( '<p>Joe went to <a href="http://www.yahoo.com">yahoo</a></p>' ); } ); + + it( "should NOT automatically link URLs within the attributes of existing HTML tags when there are prefixed or suffixed spaces in the attribute values", function() { + var result = autolinker.link( '<p>Joe went to <a href=" http://www.yahoo.com">yahoo</a></p>' ); + expect( result ).toBe( '<p>Joe went to <a href=" http://www.yahoo.com">yahoo</a></p>' ); + + var result2 = autolinker.link( '<p>Joe went to <a href="http://www.yahoo.com ">yahoo</a></p>' ); + expect( result2 ).toBe( '<p>Joe went to <a href="http://www.yahoo.com ">yahoo</a></p>' ); + } ); + + + it( "should NOT automatically link URLs within self-closing tags", function() { + var result = autolinker.link( 'Just a flower image <img src="https://farm9.staticflickr.com/8378/8578790632_83c6471f3f_b.jpg" />' ); + expect( result ).toBe( 'Just a flower image <img src="https://farm9.staticflickr.com/8378/8578790632_83c6471f3f_b.jpg" />' ); + } ); + it( "should NOT automatically link a URL found within the inner text of a pre-existing anchor tag", function() { var result = autolinker.link( '<p>Joe went to <a href="http://www.yahoo.com">yahoo.com</a></p> yesterday.' ); @@ -548,6 +563,12 @@ describe( "Autolinker", function() { } ); + it( "should NOT automatically link an image tag with a URL inside of it, when it has another attribute which has extraneous spaces surround its value (Issue #45)", function() { + var result = autolinker.link( "Testing <img src='http://terryshoemaker.files.wordpress.com/2013/03/placeholder1.jpg' style=' height: 22px; background-color: rgb(0, 188, 204); border-radius: 7px; padding: 2px; margin: 0px 2px;'>" ); + expect( result ).toBe( "Testing <img src='http://terryshoemaker.files.wordpress.com/2013/03/placeholder1.jpg' style=' height: 22px; background-color: rgb(0, 188, 204); border-radius: 7px; padding: 2px; margin: 0px 2px;'>" ); + } ); + + it( "should allow the full range of HTML attribute name characters as specified in the W3C HTML syntax document (http://www.w3.org/TR/html-markup/syntax.html)", function() { // Note: We aren't actually expecting the HTML to be modified by this test var inAndOutHtml = '<ns:p>Foo <a data-qux-="" href="http://www.example.com">Bar<\/a> Baz<\/ns:p>';
Fix for handling self-closing tags.
gregjacobs_Autolinker.js
train
013b4cd154347b5940870c80d796d6acc6a076a5
diff --git a/optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/ranking/SubSingleBenchmarkRankBasedComparator.java b/optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/ranking/SubSingleBenchmarkRankBasedComparator.java index <HASH>..<HASH> 100644 --- a/optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/ranking/SubSingleBenchmarkRankBasedComparator.java +++ b/optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/ranking/SubSingleBenchmarkRankBasedComparator.java @@ -1,11 +1,11 @@ /* - * Copyright 2015 Red Hat, Inc. and/or its affiliates. + * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,13 +23,15 @@ import org.optaplanner.benchmark.impl.result.SubSingleBenchmarkResult; public class SubSingleBenchmarkRankBasedComparator implements Comparator<SubSingleBenchmarkResult>, Serializable { + private static final Comparator<SubSingleBenchmarkResult> COMPARATOR = + Comparator.nullsLast(Comparator + // Reverse, less is better (redundant: failed benchmarks don't get ranked at all) + .comparing(SubSingleBenchmarkResult::hasAnyFailure, Comparator.reverseOrder()) + .thenComparing(SubSingleBenchmarkResult::getRanking, Comparator.naturalOrder())); + @Override public int compare(SubSingleBenchmarkResult a, SubSingleBenchmarkResult b) { - return Comparator - // Reverse, less is better (redundant: failed benchmarks don't get ranked at all) - .comparing(SubSingleBenchmarkResult::hasAnyFailure, Comparator.reverseOrder()) - .thenComparing(SubSingleBenchmarkResult::getRanking, Comparator.naturalOrder()) - .compare(a, b); + return COMPARATOR.compare(a, b); } }
Fix NPE when benchmark result is null
kiegroup_optaplanner
train
4fb5c017fe5ca41ed95547a857c9c39efc4f1476
diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/airflow/providers/slack/hooks/slack_webhook.py index <HASH>..<HASH> 100644 --- a/airflow/providers/slack/hooks/slack_webhook.py +++ b/airflow/providers/slack/hooks/slack_webhook.py @@ -149,5 +149,5 @@ class SlackWebhookHook(HttpHook): endpoint=self.webhook_token, data=slack_message, headers={'Content-type': 'application/json'}, - extra_options={'proxies': proxies}, + extra_options={'proxies': proxies, 'check_response': True}, )
Check response status in slack webhook hook. (#<I>)
apache_airflow
train
548a54aa131ef6b8f8720468334741af92effec2
diff --git a/versions.js b/versions.js index <HASH>..<HASH> 100755 --- a/versions.js +++ b/versions.js @@ -3,7 +3,7 @@ const {readFile, writeFile, truncate, stat} = require("fs").promises; const {basename, dirname, join, relative} = require("path"); -const {cwd} = require("process"); +const {cwd: cwdFn} = require("process"); const {platform} = require("os"); const execa = require("execa"); const fastGlob = require("fast-glob"); @@ -12,7 +12,7 @@ const minimist = require("minimist"); const semver = require("semver"); const esc = str => str.replace(/[|\\{}()[\]^$+*?.-]/g, "\\$&"); -const pwd = cwd(); +const cwd = cwdFn(); const minOpts = { boolean: [ @@ -126,18 +126,18 @@ if (date) { async function find(name, base) { if (!base) { - const found = await findUp(name); - return found ? relative(pwd, found) : null; + const found = await findUp(name, {cwd}); + return found ? relative(cwd, found) : null; } else { return findUp(async directory => { const path = join(directory, name); if (directory.length < base.length) { return findUp.stop; } else { - const found = await findUp.exists(path); - return found ? relative(pwd, found) : null; + const found = await findUp.exists(path, {cwd}); + return found ? relative(cwd, found) : null; } - }); + }, {cwd}); } } @@ -321,7 +321,7 @@ async function main() { } // convert paths to relative - files = await Promise.all(files.map(file => relative(pwd, file))); + files = await Promise.all(files.map(file => relative(cwd, file))); if (!files.length) { throw new Error(`Found no files to do replacements in`);
refactor to 'cwd'
silverwind_ver
train
4301a7738db242df56da3e9574198fa82920a5c0
diff --git a/tests/src/Hodor/MessageQueue/Adapter/Amqp/ConfigProvider.php b/tests/src/Hodor/MessageQueue/Adapter/Amqp/ConfigProvider.php index <HASH>..<HASH> 100644 --- a/tests/src/Hodor/MessageQueue/Adapter/Amqp/ConfigProvider.php +++ b/tests/src/Hodor/MessageQueue/Adapter/Amqp/ConfigProvider.php @@ -8,10 +8,9 @@ class ConfigProvider { /** * @param array $queues - * @param array $config_overrides * @return Config */ - public static function getConfigAdapter(array $queues, array $config_overrides = []) + public static function getConfigAdapter(array $queues) { $config = new Config([]); foreach ($queues as $queue_key => $queue_config) { @@ -20,7 +19,6 @@ class ConfigProvider $queue_config = self::getQueueConfig(); } - $queue_config = array_merge($queue_config, $config_overrides); $config->addQueueConfig($queue_key, $queue_config); } diff --git a/tests/src/Hodor/MessageQueue/Adapter/Amqp/FactoryTest.php b/tests/src/Hodor/MessageQueue/Adapter/Amqp/FactoryTest.php index <HASH>..<HASH> 100644 --- a/tests/src/Hodor/MessageQueue/Adapter/Amqp/FactoryTest.php +++ b/tests/src/Hodor/MessageQueue/Adapter/Amqp/FactoryTest.php @@ -48,12 +48,11 @@ class FactoryTest extends BaseFactoryTest } /** - * @param array $config_overrides * @return Factory */ - protected function getTestFactory(array $config_overrides = []) + protected function getTestFactory() { - $config = ConfigProvider::getConfigAdapter(['only_q'], $config_overrides); + $config = ConfigProvider::getConfigAdapter(['only_q']); $test_factory = new Factory($config); $this->factories[] = $test_factory; diff --git a/tests/src/Hodor/MessageQueue/Adapter/FactoryTest.php b/tests/src/Hodor/MessageQueue/Adapter/FactoryTest.php index <HASH>..<HASH> 100644 --- a/tests/src/Hodor/MessageQueue/Adapter/FactoryTest.php +++ b/tests/src/Hodor/MessageQueue/Adapter/FactoryTest.php @@ -59,8 +59,7 @@ abstract class FactoryTest extends PHPUnit_Framework_TestCase } /** - * @param array $config_overrides * @return FactoryInterface */ - abstract protected function getTestFactory(array $config_overrides = []); + abstract protected function getTestFactory(); } diff --git a/tests/src/Hodor/MessageQueue/Adapter/Testing/FactoryTest.php b/tests/src/Hodor/MessageQueue/Adapter/Testing/FactoryTest.php index <HASH>..<HASH> 100644 --- a/tests/src/Hodor/MessageQueue/Adapter/Testing/FactoryTest.php +++ b/tests/src/Hodor/MessageQueue/Adapter/Testing/FactoryTest.php @@ -11,11 +11,10 @@ use Hodor\MessageQueue\Adapter\FactoryTest as BaseFactoryTest; class FactoryTest extends BaseFactoryTest { /** - * @param array $config_overrides * @return Factory */ - protected function getTestFactory(array $config_overrides = []) + protected function getTestFactory() { - return new Factory(ConfigProvider::getConfigAdapter(['only_q'], $config_overrides)); + return new Factory(ConfigProvider::getConfigAdapter(['only_q'])); } }
Remove remaining `config_overrides` references The `config_overrides` are not actually used anywhere, so there is no need to be passing around the variable
hold-the-door_ravens
train
4f8020ad1dac9f774ff264197f172b58f457e521
diff --git a/src/Admin/Grid/Builder.php b/src/Admin/Grid/Builder.php index <HASH>..<HASH> 100644 --- a/src/Admin/Grid/Builder.php +++ b/src/Admin/Grid/Builder.php @@ -220,7 +220,7 @@ class Builder $tools->getBlock( 'secondary' )->push( $this->exportOptions() ); $tools->getBlock( 'primary' )->push( $this->createButton() ); - if ( $this->grid->isPaginated() ) + if ( $this->grid->isPaginated() && $this->items->hasPages() ) { $pagination = ( new Pagination( $this->items ) )->render(); $tools->getBlock( $pagination->attributes()->get( 'class' ) )->push( $pagination->content() );
Do not show paginator if there is only one page
arbory_arbory
train
9475a843cbc9b172f58d9bb5a166b6569e2a4170
diff --git a/bloomrun.js b/bloomrun.js index <HASH>..<HASH> 100644 --- a/bloomrun.js +++ b/bloomrun.js @@ -60,6 +60,11 @@ BloomRun.prototype.default = function (payload) { BloomRun.prototype.add = function (pattern, payload) { if (onlyRegex(pattern)) { this._regexBucket.data.push(new PatternSet(pattern, payload, this._isDeep)) + + if (this._isDeep) { + this._regexBucket.data.sort(deepSort) + } + return this } diff --git a/lib/onlyRegex.js b/lib/onlyRegex.js index <HASH>..<HASH> 100644 --- a/lib/onlyRegex.js +++ b/lib/onlyRegex.js @@ -1,12 +1,12 @@ 'use strict' function onlyRegex (pattern) { - var match = false + var match = true for (var key in pattern) { if (pattern[key] instanceof RegExp) { match = true - } else if (match) { + } else if (typeof pattern[key] !== 'object') { match = false break } diff --git a/test.js b/test.js index <HASH>..<HASH> 100644 --- a/test.js +++ b/test.js @@ -533,3 +533,21 @@ test('List matches partially, in key order (2)', function (t) { 3 ]) }) + +test('recursive depth support, no other keys', function (t) { + t.plan(1) + + var instance = bloomrun({ indexing: 'depth' }) + var pattern1 = { some: { key: 'value' } } + var pattern2 = { some: { key: 'value', a: 'b' } } + + function payloadOne () { } + function payloadTwo () { } + + instance.add(pattern1, payloadOne) + instance.add(pattern2, payloadTwo) + + t.equal(instance.lookup({ + some: { key: 'value', a: 'b', c: 'd' } + }), payloadTwo) +})
Fixed an edge case where deep patterns without a key are not checked. Fixes #<I>.
mcollina_bloomrun
train
2786c3bd893102973b4b3893b0f4f9b7b1546027
diff --git a/Annotation/Admin.php b/Annotation/Admin.php index <HASH>..<HASH> 100644 --- a/Annotation/Admin.php +++ b/Annotation/Admin.php @@ -96,6 +96,11 @@ class Admin implements MetadataProcessorInterface public $keepOpen = false; /** + * @var bool + */ + public $onTop = false; + + /** * @param ClassMetadata $metadata */ public function processMetadata(ClassMetadata $metadata) @@ -112,6 +117,7 @@ class Admin implements MetadataProcessorInterface 'pager_type' => $this->pagerType, 'persist_filters' => $this->persistFilters, 'keep_open' => $this->keepOpen, + 'on_top' => $this->onTop, ); $tag = array_filter($tag, function ($v) { diff --git a/Resources/doc/reference/annotations.rst b/Resources/doc/reference/annotations.rst index <HASH>..<HASH> 100644 --- a/Resources/doc/reference/annotations.rst +++ b/Resources/doc/reference/annotations.rst @@ -51,7 +51,19 @@ All you have to do is include Sonata\AdminBundleAnnotations and define the value /** * @Sonata\Admin( - * class="AcmeBundle\Entity\MyEntity" + * class="AcmeBundle\Entity\MyEntity", + * id="service id (generated per default)", + * managerType="doctrine_mongodb (orm per default)", + * baseControllerName="SonataAdminBundle:CRUD", + * group="myGroup", + * label="myLabel", + * showInDashboard=true, + * translationDomain="OMG", + * pagerType="", + * persistFilters="", + * icon="<i class='fa fa-folder'></i>", + * keepOpen=false, + * onTop=false * ) */ class MyAdmin extends AbstractAdmin diff --git a/Tests/DependencyInjection/Compiler/AnnotationCompilerPassTest.php b/Tests/DependencyInjection/Compiler/AnnotationCompilerPassTest.php index <HASH>..<HASH> 100644 --- a/Tests/DependencyInjection/Compiler/AnnotationCompilerPassTest.php +++ b/Tests/DependencyInjection/Compiler/AnnotationCompilerPassTest.php @@ -60,6 +60,7 @@ class AnnotationCompilerPassTest extends PHPUnit_Framework_TestCase 'label' => 'Tests\Fixtures\Foo', 'show_in_dashboard' => false, 'keep_open' => false, + 'on_top' => false, ) ); } @@ -84,6 +85,7 @@ class AnnotationCompilerPassTest extends PHPUnit_Framework_TestCase 'label' => 'Foo', 'show_in_dashboard' => true, 'keep_open' => false, + 'on_top' => false, ) ); } @@ -97,7 +99,8 @@ class AnnotationCompilerPassTest extends PHPUnit_Framework_TestCase * group="myGroup", * label="myLabel", * translationDomain="OMG", - * keepOpen=true + * keepOpen=true, + * onTop=true * ) */ $annotation = new Admin(); @@ -108,6 +111,7 @@ class AnnotationCompilerPassTest extends PHPUnit_Framework_TestCase $annotation->showInDashboard = false; $annotation->translationDomain = 'OMG'; $annotation->keepOpen = true; + $annotation->onTop = true; $meta = new ClassMetadata('Sonata\AdminBundle\Tests\Fixtures\Entity\Foo'); @@ -121,6 +125,7 @@ class AnnotationCompilerPassTest extends PHPUnit_Framework_TestCase 'label' => 'myLabel', 'show_in_dashboard' => false, 'keep_open' => true, + 'on_top' => true, ) );
add missing onTop annotation to @admin (#<I>)
sonata-project_SonataAdminBundle
train
fc3a4c5d22209f13e27d201e1edba9118d2e0ed7
diff --git a/lib/raven/base.rb b/lib/raven/base.rb index <HASH>..<HASH> 100644 --- a/lib/raven/base.rb +++ b/lib/raven/base.rb @@ -78,7 +78,7 @@ module Raven # # @example # evt = Raven::Event.new(:message => "An error") - # Raven.send(evt) + # Raven.send_event(evt) def send_event(event) client.send_event(event) end @@ -119,7 +119,7 @@ module Raven if configuration.async? configuration.async.call(evt) else - send(evt) + send_event(evt) end end end diff --git a/spec/raven/integration_spec.rb b/spec/raven/integration_spec.rb index <HASH>..<HASH> 100644 --- a/spec/raven/integration_spec.rb +++ b/spec/raven/integration_spec.rb @@ -55,7 +55,7 @@ describe "Integration tests" do config.http_adapter = [:test, stubs] end - expect(Raven.logger).to receive(:warn).exactly(2).times + expect(Raven.logger).to receive(:warn).once expect { Raven.capture_exception(build_exception) }.not_to raise_error stubs.verify_stubbed_calls diff --git a/spec/raven/raven_spec.rb b/spec/raven/raven_spec.rb index <HASH>..<HASH> 100644 --- a/spec/raven/raven_spec.rb +++ b/spec/raven/raven_spec.rb @@ -5,7 +5,7 @@ describe Raven do let(:options) { double("options") } before do - allow(Raven).to receive(:send) + allow(Raven).to receive(:send_event) allow(Raven::Event).to receive(:from_message) { event } allow(Raven::Event).to receive(:from_exception) { event } end @@ -15,7 +15,7 @@ describe Raven do it 'sends the result of Event.capture_message' do expect(Raven::Event).to receive(:from_message).with(message, options) - expect(Raven).to receive(:send).with(event) + expect(Raven).to receive(:send_event).with(event) Raven.capture_message(message, options) end @@ -30,7 +30,7 @@ describe Raven do it 'sends the result of Event.capture_message' do expect(Raven::Event).to receive(:from_message).with(message, options) - expect(Raven).not_to receive(:send).with(event) + expect(Raven).not_to receive(:send_event).with(event) prior_async = Raven.configuration.async Raven.configuration.async = lambda { :ok } @@ -45,7 +45,7 @@ describe Raven do it 'sends the result of Event.capture_exception' do expect(Raven::Event).to receive(:from_exception).with(exception, options) - expect(Raven).to receive(:send).with(event) + expect(Raven).to receive(:send_event).with(event) Raven.capture_exception(exception, options) end @@ -60,7 +60,7 @@ describe Raven do it 'sends the result of Event.capture_exception' do expect(Raven::Event).to receive(:from_exception).with(exception, options) - expect(Raven).not_to receive(:send).with(event) + expect(Raven).not_to receive(:send_event).with(event) prior_async = Raven.configuration.async Raven.configuration.async = lambda { :ok } @@ -74,7 +74,7 @@ describe Raven do let(:exception) { build_exception } it 'sends the result of Event.capture_exception according to the result of should_capture' do - expect(Raven).not_to receive(:send).with(event) + expect(Raven).not_to receive(:send_event).with(event) prior_should_capture = Raven.configuration.should_capture Raven.configuration.should_capture = Proc.new { false }
Use #send_event when capturing an exception The #send method is deprecated, and causes noisy warnings.
getsentry_raven-ruby
train
fa77c2ff1dadba6939e91ef98bea26677cbf5c6d
diff --git a/lib/fog/compute/requests/aws/describe_volumes.rb b/lib/fog/compute/requests/aws/describe_volumes.rb index <HASH>..<HASH> 100644 --- a/lib/fog/compute/requests/aws/describe_volumes.rb +++ b/lib/fog/compute/requests/aws/describe_volumes.rb @@ -72,6 +72,7 @@ module Fog 'instance-id' => 'instanceId', 'status' => 'status' } + for filter_key, filter_value in filters if attachment_key = filter_key.split('attachment.')[1] aliased_key = attachment_aliases[filter_key] diff --git a/tests/helpers/collection_helper.rb b/tests/helpers/collection_helper.rb index <HASH>..<HASH> 100644 --- a/tests/helpers/collection_helper.rb +++ b/tests/helpers/collection_helper.rb @@ -39,8 +39,8 @@ def collection_tests(collection, params = {}, mocks_implemented = true) if !Fog.mocking? || mocks_implemented @identity = @identity.to_s - @identity.gsub!(/[a-zA-Z]/) { Fog::Mock.random_letters(1) } - @identity.gsub!(/\d/) { Fog::Mock.random_numbers(1) } + @identity = @identity.gsub(/[a-zA-Z]/) { Fog::Mock.random_letters(1) } + @identity = @identity.gsub(/\d/) { Fog::Mock.random_numbers(1) } @identity end
[tests] non-destructively generate id for get('fake') == nil tests
fog_fog
train
60b360d6adb05110ebf970703f6013aa599716bd
diff --git a/src/template/AttributeOps.js b/src/template/AttributeOps.js index <HASH>..<HASH> 100644 --- a/src/template/AttributeOps.js +++ b/src/template/AttributeOps.js @@ -496,10 +496,10 @@ let DOMConfig = { accept: IS_ATTRIBUTE, allowFullScreen: IS_BOOLEAN_ATTRIBUTE, allowTransparency: IS_ATTRIBUTE, + /** * Audio / video attributes ( DOM Living specs) */ - 'AudioTrack.enabled':IS_BOOLEAN_PROPERTY, 'AudioTrack.label': IS_ATTRIBUTE, 'AudioTrack.language': IS_ATTRIBUTE, @@ -516,7 +516,6 @@ let DOMConfig = { clipPath: IS_ATTRIBUTE, cols: IS_NUMERIC, crossOrigin: IS_ATTRIBUTE, - // Returns "true", "false", or "inherit", based on the state of the contenteditable attribute. contentEditable: IS_PROPERTY, contextMenu: IS_ATTRIBUTE, controls: IS_BOOLEAN_PROPERTY, diff --git a/tests/tests.js b/tests/tests.js index <HASH>..<HASH> 100644 --- a/tests/tests.js +++ b/tests/tests.js @@ -2068,6 +2068,30 @@ describe('Inferno acceptance tests', () => { describe('HTML attributes / properties', () => { + describe('Audio / video attributes', () => { + + it('should render the AudioTrack.label attribute', () => { + expect(attrOps.toHtml('AudioTrack.label', 'foo')).to.equal('AudioTrack.label="foo"'); + }); + + it('should render the VideoTrack.label attribute', () => { + expect(attrOps.toHtml('VideoTrack.label', 'foo')).to.equal('VideoTrack.label="foo"'); + }); + + it('should render the AudioTrack.language attribute', () => { + expect(attrOps.toHtml('AudioTrack.language', 'foo')).to.equal('AudioTrack.language="foo"'); + }); + + it('should render the VideoTrack.language attribute', () => { + expect(attrOps.toHtml('VideoTrack.language', 'foo')).to.equal('VideoTrack.language="foo"'); + }); + + // Boolean property + it('should render the AudioTrack.enabled attribute', () => { + expect(attrOps.toHtml('AudioTrack.enabled', true)).to.equal('AudioTrack.enabled="true"'); + }); + + }); describe('Booleans', () => { it('should not render unsafe custom attribute names', () => { @@ -2624,7 +2648,7 @@ describe('Inferno acceptance tests', () => { // Boolean property it('should render the AudioTrack.enabled attribute', () => { attrOps.set(container, 'AudioTrack.enabled', true); -// expect(container.getAttribute('AudioTrack.enabled')).to.eql('foo'); + expect(container.getAttribute('AudioTrack.enabled')).to.be.null; expect(container['AudioTrack.enabled']).to.be.true; });
video / audio special attr not working in t7
infernojs_inferno
train
c621e7d3815a56a8fa849e9a286307c75a555651
diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py index <HASH>..<HASH> 100644 --- a/core/dbt/adapters/factory.py +++ b/core/dbt/adapters/factory.py @@ -84,3 +84,12 @@ def reset_adapters(): for adapter in _ADAPTERS.values(): adapter.cleanup_connections() _ADAPTERS.clear() + + +def cleanup_connections(): + """Only clean up the adapter connections list without resetting the actual + adapters. + """ + with _ADAPTER_LOCK: + for adapter in _ADAPTERS.values(): + adapter.cleanup_connections() diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py index <HASH>..<HASH> 100644 --- a/core/dbt/adapters/sql/connections.py +++ b/core/dbt/adapters/sql/connections.py @@ -35,7 +35,10 @@ class SQLConnectionManager(BaseConnectionManager): if connection is this_connection: continue - self.cancel(connection) + # if the connection failed, the handle will be None so we have + # nothing to cancel. + if connection.handle is not None: + self.cancel(connection) names.append(connection.name) return names diff --git a/core/dbt/rpc.py b/core/dbt/rpc.py index <HASH>..<HASH> 100644 --- a/core/dbt/rpc.py +++ b/core/dbt/rpc.py @@ -23,7 +23,7 @@ import uuid from collections import namedtuple from queue import Empty as QueueEmpty -from dbt.adapters.factory import load_plugin +from dbt.adapters.factory import load_plugin, cleanup_connections from dbt import flags from dbt.logger import RPC_LOGGER as logger from dbt.logger import add_queue_handler @@ -300,6 +300,17 @@ class RequestTaskHandler: return result else: + # this is pretty unfortunate, but we have to reset the adapter + # cache _before_ we fork on posix. libpq, but also any other + # adapters that rely on file descriptors, get really messed up if + # you fork(), because the fds get inherited but the state isn't + # shared. The child process and the parent might end up trying to + # do things on the same fd at the same time. + # Also for some reason, if you do this after forking, even without + # calling close(), the connection in the parent ends up throwing + # 'connection already closed' exceptions + if os.name != 'nt': + cleanup_connections() self.process = multiprocessing.Process( target=_task_bootstrap, args=(self.task, self.queue, kwargs)
Fix some connection-related bugs - fix a bug where failed connections caused an AttributeError - fix an issue where the rpc server and its child processes secretly shared mutable state
fishtown-analytics_dbt
train
fef7d45208e9faac22a4f4a9bbd35bfc28f49419
diff --git a/patroni/etcd.py b/patroni/etcd.py index <HASH>..<HASH> 100644 --- a/patroni/etcd.py +++ b/patroni/etcd.py @@ -143,6 +143,10 @@ def catch_etcd_errors(func): return not func(*args, **kwargs) is None except (RetryFailedError, etcd.EtcdException): return False + except: + logger.exception("") + raise EtcdError("unexpected error") + return wrapper diff --git a/tests/test_etcd.py b/tests/test_etcd.py index <HASH>..<HASH> 100644 --- a/tests/test_etcd.py +++ b/tests/test_etcd.py @@ -8,7 +8,7 @@ import unittest from dns.exception import DNSException from mock import Mock, patch from patroni.dcs import Cluster, DCSError, Leader -from patroni.etcd import Client, Etcd +from patroni.etcd import Client, Etcd, EtcdError class MockResponse: @@ -266,3 +266,7 @@ class TestEtcd(unittest.TestCase): self.etcd.watch(4.5) self.etcd.watch(9.5) self.etcd.watch(100) + + @patch('patroni.etcd.Etcd.retry', Mock(side_effect=AttributeError("foo"))) + def test_other_exceptions(self): + self.assertRaises(EtcdError, self.etcd.cancel_initialization)
Handle unexpected exceptions in etcd. Previously, patroni would die after receiving an exception other than RetryFailedError, etcd.EtcdException from etcd. We have observed an AttributeError raised by etcd on some occasions. With this change, we demote ourselves, but not terminate on such exceptions.
zalando_patroni
train
8e44ea707f9cebf84a084e47681358f586b84109
diff --git a/billy/models/metadata.py b/billy/models/metadata.py index <HASH>..<HASH> 100644 --- a/billy/models/metadata.py +++ b/billy/models/metadata.py @@ -168,7 +168,18 @@ class Metadata(Document): committees = list(self.committees(*args, **kwargs)) legislators = self.legislators({'active': True}, fields=['full_name', 'state']) - legislators = dict((obj['_id'], obj) for obj in legislators) + _legislators = {} + + # This will be a cache of legislator objects used in + # the committees.html template. Includes ids in each + # legislator's _all_ids field (if it exists.) + for obj in legislators: + if 'all_ids' in obj: + for _id in obj['_all_ids']: + _legislators[_id] = obj + else: + _legislators[obj['_id']] = obj + del legislators for com in committees: - com._legislators = legislators + com._legislators = _legislators return committees
also check _all_ids when accessing legr's from a committee
openstates_billy
train
0e8fa90510edd388664f53da8608e9b3b72b4f1f
diff --git a/fastavro/_write.pyx b/fastavro/_write.pyx index <HASH>..<HASH> 100644 --- a/fastavro/_write.pyx +++ b/fastavro/_write.pyx @@ -242,15 +242,17 @@ cdef write_union(bytearray fo, datum, schema, dict named_schemas, fname): cdef int32 most_fields cdef int32 index cdef int32 fields + cdef str extracted_type cdef str schema_name best_match_index = -1 if isinstance(datum, tuple): (name, datum) = datum for index, candidate in enumerate(schema): - if extract_record_type(candidate) == "record": + extracted_type = extract_record_type(candidate) + if extracted_type == "record": schema_name = candidate["name"] else: - schema_name = candidate + schema_name = extracted_type if name == schema_name: best_match_index = index break diff --git a/fastavro/_write_py.py b/fastavro/_write_py.py index <HASH>..<HASH> 100644 --- a/fastavro/_write_py.py +++ b/fastavro/_write_py.py @@ -135,10 +135,11 @@ def write_union(encoder, datum, schema, named_schemas, fname): if isinstance(datum, tuple): (name, datum) = datum for index, candidate in enumerate(schema): - if extract_record_type(candidate) == "record": + extracted_type = extract_record_type(candidate) + if extracted_type == "record": schema_name = candidate["name"] else: - schema_name = candidate + schema_name = extracted_type if name == schema_name: best_match_index = index break diff --git a/tests/test_fastavro.py b/tests/test_fastavro.py index <HASH>..<HASH> 100644 --- a/tests/test_fastavro.py +++ b/tests/test_fastavro.py @@ -2505,3 +2505,35 @@ def test_reading_with_skip_using_pure_python(): ) assert roundtrip_records == [skip_record] + + +def test_tuple_writer_picks_correct_union_path(): + """https://github.com/fastavro/fastavro/issues/509""" + schema = { + "type": "record", + "name": "test_tuple_writer_picks_correct_union_path", + "fields": [ + { + "name": "Field", + "type": [ + {"type": "map", "values": "string"}, + { + "type": "record", + "name": "Record2", + "fields": [{"name": "Field", "type": "string"}], + }, + ], + }, + ], + } + + records = [{"Field": ("Record2", {"Field": "value"})}] + parsed_schema = fastavro.parse_schema(schema) + assert records == roundtrip(parsed_schema, records, return_record_name=True) + + records = [{"Field": ("map", {"Field": "value"})}] + expected_roundtrip_value = [{"Field": {"Field": "value"}}] + parsed_schema = fastavro.parse_schema(schema) + assert expected_roundtrip_value == roundtrip( + parsed_schema, records, return_record_name=True + )
fix bug in union writing with tuple notation
fastavro_fastavro
train
9e1d1b0ab6aece09f23140701067dfc501b34a20
diff --git a/core/src/main/java/hudson/model/BuildTimelineWidget.java b/core/src/main/java/hudson/model/BuildTimelineWidget.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/model/BuildTimelineWidget.java +++ b/core/src/main/java/hudson/model/BuildTimelineWidget.java @@ -62,8 +62,8 @@ public class BuildTimelineWidget { TimelineEventList result = new TimelineEventList(); for (Run r : builds.byTimestamp(min,max)) { Event e = new Event(); - e.start = r.getTime(); - e.end = new Date(r.timestamp+r.getDuration()); + e.start = new Date(r.getStartTimeInMillis()); + e.end = new Date(r.getStartTimeInMillis()+r.getDuration()); e.title = r.getFullDisplayName(); // what to put in the description? // e.description = "Longish description of event "+r.getFullDisplayName(); diff --git a/core/src/main/java/jenkins/widgets/BuildListTable.java b/core/src/main/java/jenkins/widgets/BuildListTable.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/jenkins/widgets/BuildListTable.java +++ b/core/src/main/java/jenkins/widgets/BuildListTable.java @@ -25,9 +25,13 @@ package jenkins.widgets; import hudson.Functions; +import hudson.Util; import hudson.model.BallColor; import hudson.model.Run; import net.sf.json.JSONObject; + +import java.util.Date; + import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.DoNotUse; @@ -45,6 +49,7 @@ public class BuildListTable extends RunListProgressiveRendering { element.put("displayName", build.getDisplayName()); element.put("timestampString", build.getTimestampString()); element.put("timestampString2", build.getTimestampString2()); + element.put("timestampString3", Util.XS_DATETIME_FORMATTER.format(new Date(build.getStartTimeInMillis()))); Run.Summary buildStatusSummary = build.getBuildStatusSummary(); element.put("buildStatusSummaryWorse", buildStatusSummary.isWorse); element.put("buildStatusSummaryMessage", buildStatusSummary.message); diff --git a/core/src/main/resources/lib/hudson/buildListTable.jelly b/core/src/main/resources/lib/hudson/buildListTable.jelly index <HASH>..<HASH> 100644 --- a/core/src/main/resources/lib/hudson/buildListTable.jelly +++ b/core/src/main/resources/lib/hudson/buildListTable.jelly @@ -47,7 +47,7 @@ THE SOFTWARE. insert('\u00A0'). insert(new Element('a', {href: '${rootURL}/' + e.url, 'class': 'model-link inside'}). update(e.displayName.escapeHTML()))); - tr.insert(new Element('td', {data: e.timestampString2, tooltip: '${%Click to center timeline on event}', onclick: 'javascript:tl.getBand(0).scrollToCenter(Timeline.DateTime.parseGregorianDateTime("' + e.timestampString2 + '"))'}). + tr.insert(new Element('td', {data: e.timestampString2, tooltip: '${%Click to center timeline on event}', onclick: 'javascript:tl.getBand(0).scrollToCenter(Timeline.DateTime.parseGregorianDateTime("' + e.timestampString3 + '"))'}). update(e.timestampString.escapeHTML())); tr.insert(new Element('td', {style: e.buildStatusSummaryWorse ? 'color: red' : ''}). update(e.buildStatusSummaryMessage.escapeHTML()));
[FIXED JENKINS-<I>] Use build start times instead of build scheduled times in `BuildTimelineWidget`. (#<I>)
jenkinsci_jenkins
train
31f3999420ec5d51936e6a540ee1cf9fcc21ef67
diff --git a/src/scheduler.js b/src/scheduler.js index <HASH>..<HASH> 100644 --- a/src/scheduler.js +++ b/src/scheduler.js @@ -15,7 +15,7 @@ export function batch (fn, ctx, args) { // this creates and returns a batched version of the passed function // the cache is necessary to always map the same thing to the same function // which makes sure that addEventListener/removeEventListener pairs don't break -const cache = new Map() +const cache = new WeakMap() function batchFn (fn) { if (typeof fn !== 'function') { return fn
fix a memory leak in the scheduler
solkimicreb_react-easy-state
train