hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
b77e7b38fecf530651c02dbccd5e5516c3331e41
diff --git a/src/Bridge/Elasticsearch/DataProvider/Filter/AbstractSearchFilter.php b/src/Bridge/Elasticsearch/DataProvider/Filter/AbstractSearchFilter.php index <HASH>..<HASH> 100644 --- a/src/Bridge/Elasticsearch/DataProvider/Filter/AbstractSearchFilter.php +++ b/src/Bridge/Elasticsearch/DataProvider/Filter/AbstractSearchFilter.php @@ -83,7 +83,7 @@ abstract class AbstractSearchFilter extends AbstractFilter implements ConstantSc return $clauseBody; } - return array_merge($clauseBody, [ + return array_merge_recursive($clauseBody, [ 'bool' => [ 'must' => $searches, ],
Allow multiple elasticsearch queries to be set Fixes #<I>
api-platform_core
train
71401254ae4a729a60614786316d5036702198b8
diff --git a/src/Timetable/Strategies/BaseTimetableStrategy.php b/src/Timetable/Strategies/BaseTimetableStrategy.php index <HASH>..<HASH> 100644 --- a/src/Timetable/Strategies/BaseTimetableStrategy.php +++ b/src/Timetable/Strategies/BaseTimetableStrategy.php @@ -4,16 +4,9 @@ namespace Timegridio\Concierge\Timetable\Strategies; use Timegridio\Concierge\Timetable\Timetable; -class BaseTimetableStrategy +abstract class BaseTimetableStrategy { protected $timetable; - protected function initTimetable($starting, $days) - { - $this->timetable - ->format('date.service.time') - ->from($starting) - ->future($days) - ->init(); - } + abstract protected function initTimetable($starting, $days); }
Turn BaseTimetableStrategy to abstract
timegridio_concierge
train
72349ef22f94b940a406c0e506fda3beb6f0289b
diff --git a/src/AuthorizationServer.php b/src/AuthorizationServer.php index <HASH>..<HASH> 100644 --- a/src/AuthorizationServer.php +++ b/src/AuthorizationServer.php @@ -26,8 +26,6 @@ class AuthorizationServer implements EmitterAwareInterface { use EmitterAwareTrait; - const ENCRYPTION_KEY_ERROR = 'You must set the encryption key going forward to improve the security of this library - see this page for more information https://oauth2.thephpleague.com/v5-security-improvements/'; - /** * @var GrantTypeInterface[] */ @@ -109,16 +107,6 @@ class AuthorizationServer implements EmitterAwareInterface } /** - * Set the encryption key - * - * @param string $key - */ - public function setEncryptionKey($key) - { - $this->encryptionKey = $key; - } - - /** * Enable a grant type on the server. * * @param GrantTypeInterface $grantType @@ -136,12 +124,6 @@ class AuthorizationServer implements EmitterAwareInterface $grantType->setPrivateKey($this->privateKey); $grantType->setPublicKey($this->publicKey); $grantType->setEmitter($this->getEmitter()); - - if ($this->encryptionKey === null) { - // @codeCoverageIgnoreStart - error_log(self::ENCRYPTION_KEY_ERROR); - // @codeCoverageIgnoreEnd - } $grantType->setEncryptionKey($this->encryptionKey); $this->enabledGrantTypes[$grantType->getIdentifier()] = $grantType; @@ -159,12 +141,6 @@ class AuthorizationServer implements EmitterAwareInterface */ public function validateAuthorizationRequest(ServerRequestInterface $request) { - if ($this->encryptionKey === null) { - // @codeCoverageIgnoreStart - error_log(self::ENCRYPTION_KEY_ERROR); - // @codeCoverageIgnoreEnd - } - foreach ($this->enabledGrantTypes as $grantType) { if ($grantType->canRespondToAuthorizationRequest($request)) { return $grantType->validateAuthorizationRequest($request);
Encryption key is now always required so remove redundent code
thephpleague_oauth2-server
train
92e6d01dd4b91ecc6b6dd1909a1e9cd1aab9d8b9
diff --git a/workshift/forms.py b/workshift/forms.py index <HASH>..<HASH> 100644 --- a/workshift/forms.py +++ b/workshift/forms.py @@ -251,9 +251,11 @@ class VerifyShiftForm(InteractShiftForm): def clean_pk(self): instance = super(VerifyShiftForm, self).clean_pk() - if not instance.workshifter: + workshifter = instance.workshifter or instance.liable + + if not workshifter: raise forms.ValidationError("Workshift is not filled.") - if not instance.pool.self_verify and instance.workshifter == self.profile: + if not instance.pool.self_verify and workshifter == self.profile: raise forms.ValidationError("Workshifter cannot verify self.") if instance.auto_verify: raise forms.ValidationError("Workshift is automatically verified.") @@ -275,8 +277,9 @@ class VerifyShiftForm(InteractShiftForm): instance.logs.add(entry) instance.save() - pool_hours = instance.workshifter.pool_hours \ - .get(pool=instance.get_info().pool) + workshifter = instance.workshifter or instance.liable + + pool_hours = workshifter.pool_hours.get(pool=instance.get_info().pool) pool_hours.standing += instance.hours pool_hours.save()
Fixed verify shift form to all verify liable people
knagra_farnsworth
train
5f6083fd0cb48c5f8bceb83243d6967b3fe332c8
diff --git a/scripts/murnfit.py b/scripts/murnfit.py index <HASH>..<HASH> 100755 --- a/scripts/murnfit.py +++ b/scripts/murnfit.py @@ -9,6 +9,17 @@ from scipy.optimize import leastsq from pymatgen.io.vasp import Vasprun from vasppy import Poscar from vasppy.summary import find_vasp_calculations +import argparse + +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt + +def parse_args(): + parser = argparse.ArgumentParser(description='Perform a Murnaghan equation of state fit across VASP subdirectories') + parser.add_argument( '-p', '--plot', action='store_true', help='generate murn.pdf plot of fit' ) + args = parser.parse_args() + return args def read_data( verbose=True ): dir_list = find_vasp_calculations() @@ -61,9 +72,25 @@ def fit( volumes, energies ): plsq = leastsq( objective, x0, args=( volumes, energies ) ) return plsq +def make_plot( volumes, energies, fit_params ): + v_min = volumes.min()*0.99 + v_max = volumes.max()*1.01 + v_fitting = np.linspace( v_min, v_max, num=50 ) + e_fitting = murnaghan( v_fitting, *fit_params ) + plt.figure( figsize=(8.0,6.0) ) + plt.plot( volumes, energies, 'o' ) + plt.plot( v_fitting, e_fitting, '--' ) + plt.xlabel( 'volume [A^3]' ) + plt.ylabel( 'energy [eV]' ) + plt.tight_layout() + plt.savefig( 'murn.pdf' ) + if __name__ == '__main__': + args = parse_args() df = read_data() e0, b0, bp, v0 = fit( np.array( df.volume ), np.array( df.energy ) )[0] + if args.plot: + make_plot( df.volume, df.energy, ( e0, b0, bp, v0 ) ) print( "E0: {:.4f}".format( e0 ) ) print( "V0: {:.4f}".format( v0 ) ) print( "opt. scaling: {:.5f}".format( ( v0 / df.scaling_factor.mean() )**(1/3) ) )
Added option to murnfit script to generate plot
bjmorgan_vasppy
train
098e80ed5a57964b5aac4ac306e4e2023cc722df
diff --git a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/securitygroups/SecurityGroupEntity.java b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/securitygroups/SecurityGroupEntity.java index <HASH>..<HASH> 100644 --- a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/securitygroups/SecurityGroupEntity.java +++ b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/securitygroups/SecurityGroupEntity.java @@ -51,7 +51,7 @@ public final class SecurityGroupEntity { * @param runningDefault the running default * @return the running default */ - private final boolean runningDefault; + private final Boolean runningDefault; /** * The spaces url @@ -67,14 +67,14 @@ public final class SecurityGroupEntity { * @param stagingDefault the staging default * @return the staging default */ - private final boolean stagingDefault; + private final Boolean stagingDefault; @Builder SecurityGroupEntity(@JsonProperty("name") String name, @JsonProperty("rules") @Singular List<RuleEntity> rules, - @JsonProperty("running_default") boolean runningDefault, + @JsonProperty("running_default") Boolean runningDefault, @JsonProperty("spaces_url") String spacesUrl, - @JsonProperty("staging_default") boolean stagingDefault) { + @JsonProperty("staging_default") Boolean stagingDefault) { this.name = name; this.rules = rules; diff --git a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spacequotadefinitions/SpaceQuotaDefinitionEntity.java b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spacequotadefinitions/SpaceQuotaDefinitionEntity.java index <HASH>..<HASH> 100644 --- a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spacequotadefinitions/SpaceQuotaDefinitionEntity.java +++ b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spacequotadefinitions/SpaceQuotaDefinitionEntity.java @@ -56,7 +56,7 @@ public final class SpaceQuotaDefinitionEntity { * @param nonBasicServicesAllowed the non basic services allowed boolean * @return the nonBasicServicesAllowed */ - private final boolean nonBasicServicesAllowed; + private final Boolean nonBasicServicesAllowed; /** * The organization id @@ -102,7 +102,7 @@ public final class SpaceQuotaDefinitionEntity { SpaceQuotaDefinitionEntity(@JsonProperty("instance_memory_limit") Integer instanceMemoryLimit, @JsonProperty("memory_limit") Integer memoryLimit, @JsonProperty("name") String name, - @JsonProperty("non_basic_services_allowed") boolean nonBasicServicesAllowed, + @JsonProperty("non_basic_services_allowed") Boolean nonBasicServicesAllowed, @JsonProperty("organization_guid") String organizationId, @JsonProperty("organization_url") String organizationUrl, @JsonProperty("spaces_url") String spacesUrl, diff --git a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spaces/ListSpaceServiceInstancesRequest.java b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spaces/ListSpaceServiceInstancesRequest.java index <HASH>..<HASH> 100644 --- a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spaces/ListSpaceServiceInstancesRequest.java +++ b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/spaces/ListSpaceServiceInstancesRequest.java @@ -82,7 +82,7 @@ public final class ListSpaceServiceInstancesRequest extends PaginatedRequest imp * @return the return user provided service instances */ @Getter(onMethod = @__(@JsonProperty("return_user_provided_service_instances"))) - private final boolean returnUserProvidedServiceInstances; + private final Boolean returnUserProvidedServiceInstances; /** * The service binding ids @@ -118,7 +118,7 @@ public final class ListSpaceServiceInstancesRequest extends PaginatedRequest imp String id, @Singular List<String> names, @Singular List<String> organizationIds, - boolean returnUserProvidedServiceInstances, + Boolean returnUserProvidedServiceInstances, @Singular List<String> serviceBindingIds, @Singular List<String> serviceKeyIds, @Singular List<String> servicePlanIds) { diff --git a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/users/UserEntity.java b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/users/UserEntity.java index <HASH>..<HASH> 100644 --- a/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/users/UserEntity.java +++ b/cloudfoundry-client/src/main/lombok/org/cloudfoundry/client/v2/users/UserEntity.java @@ -32,7 +32,7 @@ public final class UserEntity { * @param active the active boolean * @return active */ - private final boolean active; + private final Boolean active; /** * The admin property @@ -40,7 +40,7 @@ public final class UserEntity { * @param admin the admin boolean * @return admin */ - private final boolean admin; + private final Boolean admin; /** * The audited organizations url @@ -115,8 +115,8 @@ public final class UserEntity { private final String username; @Builder - UserEntity(@JsonProperty("active") boolean active, - @JsonProperty("admin") boolean admin, + UserEntity(@JsonProperty("active") Boolean active, + @JsonProperty("admin") Boolean admin, @JsonProperty("audited_organizations_url") String auditedOrganizationsUrl, @JsonProperty("audited_spaces_url") String auditedSpacesUrl, @JsonProperty("billing_managed_organizations_url") String
Replace boolean with Boolean This commit replaces all booleans with Booleans in request and response representations to allow for null values. [#<I>]
cloudfoundry_cf-java-client
train
c0f3fb2042d3205d11ce91e058573fafbe672003
diff --git a/f5/bigip/resource.py b/f5/bigip/resource.py index <HASH>..<HASH> 100644 --- a/f5/bigip/resource.py +++ b/f5/bigip/resource.py @@ -40,8 +40,8 @@ they are represented by the classes in this module. We refer to a server-provided resource as a "service". Thus far all URI referenced resources are "services" in this sense. -We use methods named Create, Refresh, Update, Load, Modify, and Delete to manipulate -BIG-IP® device services. +We use methods named Create, Refresh, Update, Load, Modify, and Delete to +manipulate BIG-IP® device services. Methods: @@ -69,7 +69,7 @@ Available Classes: construction. * ResourceBase -- only `refresh` is generally supported in all resource types, this class provides `refresh`. ResourceBase objects are usually - instantiated via setting lazy attributes. + instantiated via setting lazy attributes. All ResourceBase objects (except BIG-IPs) have a container (BIG-IPs contain themselves). The container is the object the ResourceBase is an attribute of. @@ -83,6 +83,10 @@ Available Classes: post (via _create) they uniquely depend on 2 uri's, a uri that supports the creating post, and the returned uri of the newly created resource. Example URI_path: /mgmt/tm/ltm/nat/~Common~testnat1 + * UnnamedResource -- Some resources correspond to URIs that do not have + unique names, therefore the class does _not_ support create-or-delete, + and supports a customized 'load' that doesn't require name/partition + parameters. """ import keyword import re diff --git a/f5/bigip/test/test_resource.py b/f5/bigip/test/test_resource.py index <HASH>..<HASH> 100644 --- a/f5/bigip/test/test_resource.py +++ b/f5/bigip/test/test_resource.py @@ -670,7 +670,6 @@ def test_collection_s(): class TestPathElement(object): def test_missing_req_param_true(self): - p = PathElement(mock.MagicMock()) rqset = set(['FOOPAR1', 'FOOPAR2']) fakearg = {'FOOPAR1': 'FOOVAL'} mrq = _missing_required_parameters(rqset, **fakearg) @@ -678,7 +677,6 @@ class TestPathElement(object): assert mrq == ['FOOPAR2'] def test_missing_req_param_false(self): - p = PathElement(mock.MagicMock()) rqset = set(['FOOPAR1']) fakearg = {'FOOPAR1': 'FOOVAL'} mrq = _missing_required_parameters(rqset, **fakearg)
more refactoring of docs
F5Networks_f5-common-python
train
03746477655cdf4d5e7c92205974de4274584494
diff --git a/src/test/java/com/taskadapter/redmineapi/RedmineManagerTest.java b/src/test/java/com/taskadapter/redmineapi/RedmineManagerTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/taskadapter/redmineapi/RedmineManagerTest.java +++ b/src/test/java/com/taskadapter/redmineapi/RedmineManagerTest.java @@ -1498,10 +1498,12 @@ public class RedmineManagerTest { */ @Test public void testGetVersions() throws RedmineException { - Project project = mgr.getProjectByKey(projectKey); - Version testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID())); - Version testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID())); + Project project = createProject(); + Version testVersion1 = null; + Version testVersion2 = null; try { + testVersion1 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID())); + testVersion2 = mgr.createVersion(new Version(project, "Version" + UUID.randomUUID())); List<Version> versions = mgr.getVersions(project.getId()); assertEquals("Wrong number of versions for project " + project.getName() + " delivered by Redmine Java API", 2, @@ -1519,6 +1521,7 @@ public class RedmineManagerTest { if (testVersion2 != null) { mgr.deleteVersion(testVersion2); } + mgr.deleteProject(project.getIdentifier()); } }
fixed testGetVersions() test, which was non-deterministic.
taskadapter_redmine-java-api
train
9b27b066131c98ea75ca9a29bde223d4fc5a4c05
diff --git a/spec/models/tag_spec.rb b/spec/models/tag_spec.rb index <HASH>..<HASH> 100644 --- a/spec/models/tag_spec.rb +++ b/spec/models/tag_spec.rb @@ -87,22 +87,22 @@ describe Tag do end context "with a simple keyword" do - let(:article) { create(:article, keywords: "akeyword") } + let(:article) { create(:article, keywords: "foo") } it { expect(article.tags.size).to eq(1) } it { expect(article.tags.first).to be_kind_of(Tag) } - it { expect(article.tags.first.name).to eq('akeyword') } + it { expect(article.tags.first.name).to eq('foo') } end context "with two keyword separate by a coma" do - let(:article) { create(:article, keywords: "first, second") } + let(:article) { create(:article, keywords: "foo, bar") } it { expect(article.tags.size).to eq(2) } - it { expect(article.tags.map(&:name)).to eq(['first', 'second']) } + it { expect(article.tags.map(&:name)).to eq(['foo', 'bar']) } end context "with two keyword with apostrophe" do - let(:article) { create(:article, keywords: "first, l'éléphant") } + let(:article) { create(:article, keywords: "foo, l'bar") } it { expect(article.tags.size).to eq(3) } - it { expect(article.tags.map(&:name)).to eq(['first', 'l', 'éléphant']) } + it { expect(article.tags.map(&:name)).to eq(['foo', 'l', 'bar']) } end context "with two identical keywords" do
Fix specs (missing ut8 or sample name without accents)
publify_publify
train
16301a5bb5487f359e129a340390969bfef7de26
diff --git a/src/LambdaFunction.js b/src/LambdaFunction.js index <HASH>..<HASH> 100644 --- a/src/LambdaFunction.js +++ b/src/LambdaFunction.js @@ -13,21 +13,32 @@ const { now } = Date module.exports = class LambdaFunction { constructor(config, options) { + const { + functionName, + lambdaName, + memorySize, + runtime, + timeout = DEFAULT_LAMBDA_TIMEOUT, + } = config + this._awsRequestId = null this._config = config this._executionTimeEnded = null this._executionTimeStarted = null this._executionTimeout = null + this._functionName = functionName + this._lambdaName = lambdaName + this._memorySize = memorySize this._options = options + this._runtime = runtime + this._timeout = timeout this._verifySupportedRuntime() } _startExecutionTimer() { - const { timeout = DEFAULT_LAMBDA_TIMEOUT } = this._config - this._executionTimeStarted = now() - this._executionTimeout = this._executionTimeStarted + timeout * 1000 + this._executionTimeout = this._executionTimeStarted + this._timeout * 1000 } _stopExecutionTimer() { @@ -35,15 +46,13 @@ module.exports = class LambdaFunction { } _verifySupportedRuntime() { - let { runtime } = this._config - // TODO what if runtime == null // -> fallback to node? or error out? - if (runtime === 'provided') { - runtime = this._options.providedRuntime + if (this._runtime === 'provided') { + this._runtime = this._options.providedRuntime - if (!runtime) { + if (!this._runtime) { throw new Error( `Runtime "provided" is not supported by "Serverless-Offline". Please specify the additional "providedRuntime" option. @@ -53,11 +62,11 @@ module.exports = class LambdaFunction { } // print message but keep working (don't error out or exit process) - if (!supportedRuntimes.has(runtime)) { + if (!supportedRuntimes.has(this._runtime)) { // this.printBlankLine(); // TODO console.log('') serverlessLog( - `Warning: found unsupported runtime '${runtime}' for function '${this._config.functionName}'`, + `Warning: found unsupported runtime '${this._runtime}' for function '${this._functionName}'`, ) } } @@ -75,8 +84,6 @@ module.exports = class LambdaFunction { } async runHandler() { - const { functionName, lambdaName, memorySize } = this._config - this._awsRequestId = createUniqueId() const lambdaContext = new LambdaContext({ @@ -87,8 +94,8 @@ module.exports = class LambdaFunction { // just return 0 for now if we are beyond alotted time (timeout) return time > 0 ? time : 0 }, - lambdaName, - memorySize, + lambdaName: this._lambdaName, + memorySize: this._memorySize, }) let callback @@ -118,7 +125,7 @@ module.exports = class LambdaFunction { // this only executes when we have an exception caused by synchronous code // TODO logging console.log(err) - throw new Error(`Uncaught error in '${functionName}' handler.`) + throw new Error(`Uncaught error in '${this._functionName}' handler.`) } // // not a Promise, which is not supported by aws
Apply private fields in LambdaFunction constructor
dherault_serverless-offline
train
cd4a32442891e1c6d255da06d0d4078308e75e28
diff --git a/phy/utils/tests/test_array.py b/phy/utils/tests/test_array.py index <HASH>..<HASH> 100644 --- a/phy/utils/tests/test_array.py +++ b/phy/utils/tests/test_array.py @@ -64,6 +64,11 @@ def test_chunk(): data = np.random.randn(200, 4) chunks = chunk_bounds(data.shape[0], 100, overlap=20) + with raises(ValueError): + data_chunk(data, (0, 0, 0)) + + assert data_chunk(data, (0, 0)).shape == (0, 4) + # Chunk 1. ch = next(chunks) d = data_chunk(data, ch)
Added a few tests for array utils.
kwikteam_phy
train
5510419989e39785cf52c0453c737a1cbe43bfa3
diff --git a/lib/activity-streams.js b/lib/activity-streams.js index <HASH>..<HASH> 100644 --- a/lib/activity-streams.js +++ b/lib/activity-streams.js @@ -22,6 +22,7 @@ var EventEmitter = require('event-emitter'), var objs = new ArrayKeys({ identifier: '@id' }), ee = EventEmitter(), + specialObjs = [], // the objects don't get rejected for bad props baseProps = { stream: [ '@type', 'actor', 'target', 'object', '@context' @@ -75,7 +76,10 @@ function validateObject(type, obj) { continue; } } - return 'invalid property ' + keys[i]; + + if (specialObjs.indexOf(obj['@type']) < 0) { + return 'invalid property ' + keys[i]; + } } } } @@ -181,19 +185,20 @@ var _Object = { module.exports = function (opts) { - if ((typeof opts === 'object') && - (typeof opts.customProps === 'object')) { - - var keys = Object.keys(opts.customProps); - for (var i = 0, len = keys.length; i < len; i += 1) { - if (typeof opts.customProps[keys[i]] === 'object') { - customProps[keys[i]] = []; - for (var j = 0, jlen = opts.customProps[keys[i]].length; j < jlen; j += 1) { - customProps[keys[i]].push(opts.customProps[keys[i]][j]); - } + if (typeof opts === 'object') { + specialObjs = opts.specialObjs || []; + + if (typeof opts.customProps === 'object') { + var keys = Object.keys(opts.customProps); + for (var i = 0, len = keys.length; i < len; i += 1) { + if (typeof opts.customProps[keys[i]] === 'object') { + customProps[keys[i]] = []; + for (var j = 0, jlen = opts.customProps[keys[i]].length; j < jlen; j += 1) { + customProps[keys[i]].push(opts.customProps[keys[i]][j]); + } + } } } - } return { diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "activity-streams", - "version": "2.0.0", + "version": "2.0.1", "description": "A simple tool to facilitate handling and referencing activity streams without unecessary verbosity.", "main": "./lib/activity-streams.js", "dependencies": { diff --git a/test/basics-suite.js b/test/basics-suite.js index <HASH>..<HASH> 100644 --- a/test/basics-suite.js +++ b/test/basics-suite.js @@ -112,6 +112,28 @@ function getTests() { }, { + desc: '# stream, specialObj', + run: function (env, test) { + var stream = env.mod.Stream({ + '@type': 'lol', + platform: 'irc', + actor: 'thingy', + object: { '@type': 'dude', foo: 'bar', content: 'har', secure: true }, + target: [ 'thingy1', 'thingy2' ] + }); + var expected = { + '@type': 'lol', + '@context': 'irc', + actor: { '@id': 'thingy' }, + target: [ { '@id': 'thingy1' }, { '@id': 'thingy2' }], + object: { '@type': 'dude', foo: 'bar', content: 'har', secure: true } + + }; + test.assert(stream, expected); + } + }, + + { desc: '# stream, string object (+ verb renaming)', run: function (env, test) { var stream = env.mod.Stream({ @@ -190,7 +212,8 @@ define(['require', 'array-keys'], function (require, ArrayKeys) { env.mod = require('./../lib/activity-streams')({ customProps: { credentials: [ 'secure' ] - } + }, + specialObjs: [ 'dude'] }); test.assertTypeAnd(env.mod, 'object'); test.assertTypeAnd(env.mod.Object, 'object'); @@ -205,7 +228,8 @@ define(['require', 'array-keys'], function (require, ArrayKeys) { env.mod = require('./../browser/activity-streams.js')({ customProps: { credentials: [ 'secure' ] - } + }, + specialObjs: [ 'dude'] }); test.assertTypeAnd(env.mod, 'object'); test.assertTypeAnd(env.mod.Object, 'object'); @@ -220,7 +244,8 @@ define(['require', 'array-keys'], function (require, ArrayKeys) { env.mod = require('./../browser/activity-streams.min.js')({ customProps: { credentials: [ 'secure' ] - } + }, + specialObjs: [ 'dude'] }); test.assertTypeAnd(env.mod, 'object'); test.assertTypeAnd(env.mod.Object, 'object');
added specialObjs config property to allow some @types properties to be ignored
silverbucket_activity-streams.js
train
1454d0335f60fbd8d99ce6990d6e469cb96da9bc
diff --git a/lib/xapian_db/adapters/active_record_adapter.rb b/lib/xapian_db/adapters/active_record_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/xapian_db/adapters/active_record_adapter.rb +++ b/lib/xapian_db/adapters/active_record_adapter.rb @@ -1,7 +1,10 @@ # encoding: utf-8 -# Adapter for ActiveRecord. To use it, simply set it as the -# default for any DocumentBlueprint or a specific DocumentBlueprint +# Adapter for ActiveRecord. To use it, configure it like this: +# XapianDb::Config.setup do |config| +# config.adapter :active_record +# end +# @author Gernot Kogler module XapianDb module Adapters @@ -35,18 +38,6 @@ module XapianDb # Add a method to reindex all models of this class define_singleton_method(:rebuild_xapian_index) do - # db = XapianDb::Adapters::ActiveRecordAdapter.database - # # First, delete all docs of this class - # db.delete_docs_of_class(klass) - # obj_count = klass.count - # puts "Reindexing #{obj_count} objects..." - # pbar = ProgressBar.new("Status", obj_count) - # klass.all.each do |obj| - # doc = @@blueprint.indexer.build_document_for(obj) - # db.store_doc(doc) - # pbar.inc - # end - # db.commit XapianDb::Config.writer.reindex_class(klass) end end @@ -59,7 +50,7 @@ module XapianDb # Implement access to the indexed object define_method :indexed_object do return @indexed_object unless @indexed_object.nil? - # retrieve the object id from data + # retrieve the class and id from data klass_name, id = data.split("-") klass = Kernel.const_get(klass_name) @indexed_object = klass.find(id.to_i) diff --git a/lib/xapian_db/adapters/datamapper_adapter.rb b/lib/xapian_db/adapters/datamapper_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/xapian_db/adapters/datamapper_adapter.rb +++ b/lib/xapian_db/adapters/datamapper_adapter.rb @@ -1,7 +1,10 @@ # encoding: utf-8 -# Adapter for datamapper. To use it, simply set it as the -# default for any DocumentBlueprint or a specific DocumentBlueprint +# Adapter for ActiveRecord. To use it, configure it like this: +# XapianDb::Config.setup do |config| +# config.adapter :datamapper +# end +# @author Gernot Kogler module XapianDb module Adapters @@ -47,7 +50,7 @@ module XapianDb # Implement access to the indexed object define_method :indexed_object do return @indexed_object unless @indexed_object.nil? - # retrieve the object id from data + # retrieve the class and id from data klass_name, id = data.split("-") klass = Kernel.const_get(klass_name) @indexed_object = klass.get(id.to_i) diff --git a/lib/xapian_db/database.rb b/lib/xapian_db/database.rb index <HASH>..<HASH> 100644 --- a/lib/xapian_db/database.rb +++ b/lib/xapian_db/database.rb @@ -1,6 +1,5 @@ # encoding: utf-8 -# Singleton class representing a Xapian database. # @author Gernot Kogler module XapianDb diff --git a/lib/xapian_db/document_blueprint.rb b/lib/xapian_db/document_blueprint.rb index <HASH>..<HASH> 100644 --- a/lib/xapian_db/document_blueprint.rb +++ b/lib/xapian_db/document_blueprint.rb @@ -35,7 +35,7 @@ module XapianDb return [] unless @blueprints return @searchable_prefixes unless @searchable_prefixes.nil? prefixes = [] - @blueprints.each do |klass, blueprint| + @blueprints.values.each do |blueprint| prefixes << blueprint.searchable_prefixes end @searchable_prefixes = prefixes.flatten.compact.uniq @@ -50,7 +50,7 @@ module XapianDb # Return an array of all configured text methods in this blueprint def searchable_prefixes - @prefixes ||= indexed_methods.map{|method_name, options| method_name} + @prefixes ||= indexed_methods.keys end # Lazily build and return a module that implements accessors for each field @@ -58,6 +58,7 @@ module XapianDb return @accessors_module unless @accessors_module.nil? @accessors_module = Module.new + # Add the accessor for the indexed class @accessors_module.instance_eval do define_method :domain_class do self.values[0].value diff --git a/lib/xapian_db/resultset.rb b/lib/xapian_db/resultset.rb index <HASH>..<HASH> 100644 --- a/lib/xapian_db/resultset.rb +++ b/lib/xapian_db/resultset.rb @@ -1,6 +1,6 @@ # encoding: utf-8 -# The resultset holds a Xapian::Query object and allows paged access +# The resultset encapsulates a Xapian::Query object and allows paged access # to the found documents. # author Gernot Kogler
[ADD] Changed some false comments
gernotkogler_xapian_db
train
2cafbca8812b3ff9ce8f578ffd2899920da5fa77
diff --git a/metrics-graphite/src/main/java/com/codahale/metrics/graphite/PickledGraphite.java b/metrics-graphite/src/main/java/com/codahale/metrics/graphite/PickledGraphite.java index <HASH>..<HASH> 100644 --- a/metrics-graphite/src/main/java/com/codahale/metrics/graphite/PickledGraphite.java +++ b/metrics-graphite/src/main/java/com/codahale/metrics/graphite/PickledGraphite.java @@ -17,7 +17,6 @@ import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.ArrayList; -import java.util.LinkedList; import java.util.List; import static java.nio.charset.StandardCharsets.UTF_8;
Removing unnecessary imports (#<I>)
dropwizard_metrics
train
d8d2e7164ac699db59ec67e66ed3b819284ac367
diff --git a/test/model_test.js b/test/model_test.js index <HASH>..<HASH> 100644 --- a/test/model_test.js +++ b/test/model_test.js @@ -481,7 +481,7 @@ describe('Seraph Model', function() { }); }); }); - it('should not run before/after comp events on transient compositions', function(done) { + it('should not run before-save comp events on transient compositions', function(done) { var beer = model(db, 'Beer'); var food = model(db, 'Food'); food.compose(beer, 'drink', 'goes_with', { @@ -494,6 +494,19 @@ describe('Seraph Model', function() { done() }); }); + it('should not run after-save comp events on transient compositions', function(done) { + var beer = model(db, 'Beer'); + var food = model(db, 'Food'); + food.compose(beer, 'drink', 'goes_with', { + transient: true + }); + beer.on('afterSave', function(obj, cb) { assert(false) }); + + food.save({name:'Pinnekjøtt', drink: {name: 'Humlekanon'}}, function(err, pinnekjøtt) { + assert(!err); + done() + }); + }); it('it should allow exclusion of composed models on save', function(done) { var beer = model(db, 'Beer'); var food = model(db, 'Food');
add check that afterSave event is not fired on transient compositions
brikteknologier_seraph-model
train
614d132922cad458de7c18420bd994e8429fefaa
diff --git a/debugger/static/js/runtime/1.js b/debugger/static/js/runtime/1.js index <HASH>..<HASH> 100644 --- a/debugger/static/js/runtime/1.js +++ b/debugger/static/js/runtime/1.js @@ -1,37 +1,27 @@ // XBlock runtime implementation. -// Constructor for a runtime object provided to an XBlock init function. -function runtime_1(element, children) { - var child_map = {} - $.each(children, function(idx, child) { - child_map[child.name] = child - }); - return { - handler_url: function(handler_name) { - var usage = $(element).data('usage'); - return "/handler/" + usage + "/" + handler_name + "?student=" + student_id; - }, - children: children, - child_map: child_map - } -} +var XBlock = (function () { -$(function() { - // Find all the children of an element that match the selector, but only - // the first instance found down any path. For example, we'll find all - // the ".xblock" elements below us, but not the ones that are themselves - // contained somewhere inside ".xblock" elements. - $.fn.immediateDescendents = function(selector) { - return this.children().map(function(idx, element) { - if ($(element).is(selector)) { - return element; - } else { - return $(element).immediateDescendents(selector).toArray(); - } - }); + // Constructors for a runtime object provided to an XBlock init function. + // Indexed by version number. Only 1 right now. + var runtime_constructors = { + 1: function (element, children) { + var child_map = {} + $.each(children, function(idx, child) { + child_map[child.name] = child + }); + return { + handler_url: function(handler_name) { + var usage = $(element).data('usage'); + return "/handler/" + usage + "/" + handler_name + "?student=" + student_id; + }, + children: children, + child_map: child_map + }; + } }; - function initializeBlock(element) { + var initializeBlock = function (element) { var children = initializeBlocks($(element)); var version = $(element).data('runtime-version'); @@ -39,24 +29,45 @@ $(function() { return null; } - var runtime = window['runtime_' + version](element, children); + var runtime = runtime_constructors[version](element, children); var init_fn = window[$(element).data('init')]; var js_block = init_fn(runtime, element) || {}; js_block.element = element; js_block.name = $(element).data('name'); return js_block; - } + }; - function initializeBlocks(element) { + var initializeBlocks = function (element) { return $(element).immediateDescendents('.xblock').map(function(idx, elem) { return initializeBlock(elem); }).toArray(); - } + }; + + return { + initializeBlocks: initializeBlocks + }; +}()); + + +$(function() { + // Find all the children of an element that match the selector, but only + // the first instance found down any path. For example, we'll find all + // the ".xblock" elements below us, but not the ones that are themselves + // contained somewhere inside ".xblock" elements. + $.fn.immediateDescendents = function(selector) { + return this.children().map(function(idx, element) { + if ($(element).is(selector)) { + return element; + } else { + return $(element).immediateDescendents(selector).toArray(); + } + }); + }; $('body').on('ajaxSend', function(elm, xhr, s) { // Pass along the Django-specific CSRF token. xhr.setRequestHeader('X-CSRFToken', $.cookie('csrftoken')); }); - initializeBlocks($('body')); + XBlock.initializeBlocks($('body')); });
Refactor this into a self-contained js object.
edx_XBlock
train
5ab2aab8ebd972ac7b95d575eb39319af78808af
diff --git a/src/Shell.php b/src/Shell.php index <HASH>..<HASH> 100644 --- a/src/Shell.php +++ b/src/Shell.php @@ -1368,9 +1368,9 @@ class Shell extends Application * * @param bool $interactive * - * @return string One line of user input + * @return string|false One line of user input */ - protected function readline(bool $interactive = true): string + protected function readline(bool $interactive = true) { if (!empty($this->inputBuffer)) { $line = \array_shift($this->inputBuffer);
Fix non-interactive mode. (string|false return type thunked into "". yay types!)
bobthecow_psysh
train
024ff36a0b67900e51f229689ce9512b70e4d14e
diff --git a/montblanc/impl/rime/v5/CompositeRimeSolver.py b/montblanc/impl/rime/v5/CompositeRimeSolver.py index <HASH>..<HASH> 100644 --- a/montblanc/impl/rime/v5/CompositeRimeSolver.py +++ b/montblanc/impl/rime/v5/CompositeRimeSolver.py @@ -725,7 +725,7 @@ class CompositeRimeSolver(MontblancNumpySolver): # Dirty index, indicating the CPU index of the # data currently on the GPU, used for avoiding # array transfer - self.thread_local.dirty = {} + self.thread_local.dirty = [{} for n in range(nsolvers)] # Configure thread local storage # Number of solvers in this thread @@ -865,7 +865,7 @@ class CompositeRimeSolver(MontblancNumpySolver): # Clear the dirty dictionary to force each array to be # transferred at least once. e.g. the beam cube - self.thread_local.dirty.clear() + [d.clear for d in self.thread_local.dirty] def _thread_enqueue_solve_batch(self, cpu_slice_map, gpu_slice_map, **kwargs): """ @@ -892,7 +892,7 @@ class CompositeRimeSolver(MontblancNumpySolver): # Cache keyed by array names and contained indices # This is used to avoid unnecessary CPU to GPU copies # by caching the last index of the CPU array - dirty = tl.dirty + dirty = tl.dirty[i] # Guard pool allocations with a coarse-grained mutex with subslvr.pool_lock: @@ -970,15 +970,17 @@ class CompositeRimeSolver(MontblancNumpySolver): X2_gpu_ary.get_async(ary=sub_X2, stream=subslvr.stream) # Enqueue transfer of simulator output (model visibilities) to the CPU - new_refs = self._enqueue_array(subslvr, cpu_slice_map, gpu_slice_map, + sim_output_refs = self._enqueue_array(subslvr, + cpu_slice_map, gpu_slice_map, direction=ASYNC_DTOH, dirty={}, classifiers=[Classifier.SIMULATOR_OUTPUT]) # Should only be model visibilities - assert len(new_refs) == 1, ('Expected one array (model visibilities), ' + assert len(sim_output_refs) == 1, ( + 'Expected one array (model visibilities), ' 'received {l} instead.'.format(l=len(new_refs))) - model_vis = new_refs['model_vis'][0] + model_vis = sim_output_refs['model_vis'][0] # Create and record an event directly after the chi-squared copy # We'll synchronise on this thread in our synchronisation executor @@ -1022,17 +1024,26 @@ class CompositeRimeSolver(MontblancNumpySolver): import pycuda.driver as cuda import pycuda.gpuarray as gpuarray + cuda_types = (cuda.PooledDeviceAllocation, cuda.PooledHostAllocation) + + debug_str_list = ['Pool de-allocations per array name',] + debug_str_list.extend('({k}, {l})'.format(k=k,l=len(v)) + for k, v in pool_refs.iteritems()) + + montblanc.log.debug(' '.join(debug_str_list)) + with pool_lock: - for ref in (r for k, rl in pool_refs.iteritems() for r in rl): + for k, ref in ((k, r) for k, rl in pool_refs.iteritems() + for r in rl): if isinstance(ref, np.ndarray): ref.base.free() - elif isinstance(ref, (cuda.PooledDeviceAllocation, cuda.PooledHostAllocation)): + elif isinstance(ref, cuda_types): ref.free() elif isinstance(ref, gpuarray.GPUArray): ref.gpudata.free() else: - raise TypeError("Unable to release pool allocated " - "object of type {t}.".format(t=type(ref))) + raise TypeError("Don't know how to release pool allocated " + "object '{n}'' of type {t}.".format(n=k, t=type(ref))) def _sync_wait(future): """ @@ -1046,15 +1057,14 @@ class CompositeRimeSolver(MontblancNumpySolver): cuda_event.synchronize() except cuda.LogicError as e: # Format the slices nicely - for k, s in cpu.iteritems(): - cpu[k] = '[{b}, {e}]'.format(b=s.start, e=s.stop) - - for k, s in gpu.iteritems(): - gpu[k] = '[{b}, {e}]'.format(b=s.start, e=s.stop) + pretty_cpu = { k: '[{b}, {e}]'.format(b=s.start, e=s.stop) + for k, s in cpu.iteritems() } + pretty_gpu = { k: '[{b}, {e}]'.format(b=s.start, e=s.stop) + for k, s in gpu.iteritems() } import json - print 'GPU', json.dumps(gpu, indent=2) - print 'CPU', json.dumps(cpu, indent=2) + print 'GPU', json.dumps(pretty_gpu, indent=2) + print 'CPU', json.dumps(pretty_cpu, indent=2) raise e, None, sys.exc_info()[2] # Work out the CPU view in the model visibilities
Use per sub-solver dirty cache dictionary. 7c1d9f<I>c<I>a<I>d<I>deb8c<I>c<I>ded introduced a regression by using a single dirty dictionary for multiple solvers. This meant that a section of data required by two solvers would be transferred to only the first, but not the second. This change uses a dirty cache for each sub-solver, fixing the above issue.
ska-sa_montblanc
train
315d5536ece2e075e88d65e176a79992a0df3b4b
diff --git a/luigi/interface.py b/luigi/interface.py index <HASH>..<HASH> 100644 --- a/luigi/interface.py +++ b/luigi/interface.py @@ -70,8 +70,10 @@ class EnvironmentParamsContainer(task.Task): description='Hostname of machine running remote scheduler') scheduler_port = parameter.IntParameter(is_global=True, default=8082, description='Port of remote scheduler api process') - lock = parameter.BooleanParameter(is_global=True, default=False, - description='Do not run if the task is already running') + lock = parameter.BooleanParameter(is_global=True, default=True, + description='(Deprecated, replaced by nolock) Do not run if similar process is already running') + nolock = parameter.BooleanParameter(is_global=True, default=False, + description='Ignore if similar process is already running') lock_pid_dir = parameter.Parameter(is_global=True, default='/var/tmp/luigi', description='Directory to store the pid file') workers = parameter.IntParameter(is_global=True, default=1, @@ -135,7 +137,7 @@ class Interface(object): if not configuration.get_config().getboolean('core', 'no_configure_logging', False): setup_interface_logging(logging_conf) - if env_params.lock and not(lock.acquire_for(env_params.lock_pid_dir)): + if not env_params.nolock and not(lock.acquire_for(env_params.lock_pid_dir)): sys.exit(1) if env_params.local_scheduler:
Make --lock default to True. Add --nolock to provide lockless functionality when needed
spotify_luigi
train
4c6d75ec2a241cf5e887ca9abb3b562d7c93327b
diff --git a/packages/archetype/src/__demo__/index.js b/packages/archetype/src/__demo__/index.js index <HASH>..<HASH> 100644 --- a/packages/archetype/src/__demo__/index.js +++ b/packages/archetype/src/__demo__/index.js @@ -24,6 +24,5 @@ export default { design: 'design theory about the archetype component', doc, examples, - howToUse: 'how to install the archetype component', title: 'Archetype' }; diff --git a/packages/hello-world/src/__demo__/index.js b/packages/hello-world/src/__demo__/index.js index <HASH>..<HASH> 100644 --- a/packages/hello-world/src/__demo__/index.js +++ b/packages/hello-world/src/__demo__/index.js @@ -24,6 +24,5 @@ export default { design: 'Lofty design theory about the helloworld', doc, examples, - howToUse: 'how to install the HelloWorld component', title: 'HelloWorld' }; diff --git a/packages/hello/src/__demo__/index.js b/packages/hello/src/__demo__/index.js index <HASH>..<HASH> 100644 --- a/packages/hello/src/__demo__/index.js +++ b/packages/hello/src/__demo__/index.js @@ -24,6 +24,5 @@ export default { design: 'Lofty design theory about hello', doc, examples, - howToUse: 'how to install the hello component', title: 'Hello' }; diff --git a/packages/site/src/components/ComponentDoc.js b/packages/site/src/components/ComponentDoc.js index <HASH>..<HASH> 100644 --- a/packages/site/src/components/ComponentDoc.js +++ b/packages/site/src/components/ComponentDoc.js @@ -35,7 +35,6 @@ type Props = { design: MnrlReactNode, doc: Object, examples?: Array<Example>, - howToUse: string, slug: string, title: string }; @@ -118,7 +117,6 @@ export default function ComponentDoc({ design, doc, examples, - howToUse, slug, title }: Props) { @@ -141,7 +139,10 @@ export default function ComponentDoc({ <SubNav> <NavElement href="#development">Development</NavElement> <NavElement href="#design">Design</NavElement> - <NavElement href="#how-to-use">How to Use</NavElement> + <NavElement + href={`https://github.com/mineral-ui/mineral-ui/blob/master/packages/${slug}/README.md#installation`}> + How to Use + </NavElement> <NavElement href={`https://github.com/mineral-ui/mineral-ui/blob/master/packages/${slug}/CHANGELOG.md`}> Changelog @@ -158,8 +159,6 @@ export default function ComponentDoc({ </div> <H2 id="design">Design</H2> <p>{design}</p> - <H2 id="how-to-use">How to Use</H2> - <p>{howToUse}</p> </div> </Root> ); diff --git a/packages/site/src/components/__tests__/__snapshots__/App.spec.js.snap b/packages/site/src/components/__tests__/__snapshots__/App.spec.js.snap index <HASH>..<HASH> 100644 --- a/packages/site/src/components/__tests__/__snapshots__/App.spec.js.snap +++ b/packages/site/src/components/__tests__/__snapshots__/App.spec.js.snap @@ -25,7 +25,6 @@ exports[`App renders correctly 1`] = ` "title": "Default", }, ], - "howToUse": "how to install the hello component", "title": "Hello", }, "hello-world": Object { @@ -41,7 +40,6 @@ exports[`App renders correctly 1`] = ` "title": "Default", }, ], - "howToUse": "how to install the HelloWorld component", "title": "HelloWorld", }, "style-utils": Object { @@ -114,7 +112,6 @@ exports[`App renders correctly 1`] = ` "title": "Style override via createStyledComponent", }, ], - "howToUse": "how to install the style-utils package", "title": "StyleUtils", }, "world": Object { @@ -136,7 +133,6 @@ exports[`App renders correctly 1`] = ` "title": "Custom Text", }, ], - "howToUse": "how to install the world", "title": "World", }, } diff --git a/packages/style-utils/src/__demo__/index.js b/packages/style-utils/src/__demo__/index.js index <HASH>..<HASH> 100644 --- a/packages/style-utils/src/__demo__/index.js +++ b/packages/style-utils/src/__demo__/index.js @@ -25,6 +25,5 @@ export default { 'Sample implentations of various methods of styling and style overrides.' }, examples, - howToUse: 'how to install the style-utils package', title: 'StyleUtils' }; diff --git a/packages/world/src/__demo__/index.js b/packages/world/src/__demo__/index.js index <HASH>..<HASH> 100644 --- a/packages/world/src/__demo__/index.js +++ b/packages/world/src/__demo__/index.js @@ -25,6 +25,5 @@ export default { design: 'Lofty design theory about the world', doc, examples, - howToUse: 'how to install the world', title: 'World' };
feat(site): link to readme for how to install/use we already have installation and usage instructions in the readme for each component. let's not repeat ourselves unneccesarily.
mineral-ui_mineral-ui
train
a16b31cae5934506d6596be41ca7d49a18e06a0c
diff --git a/system/core/functions/file.php b/system/core/functions/file.php index <HASH>..<HASH> 100644 --- a/system/core/functions/file.php +++ b/system/core/functions/file.php @@ -277,7 +277,7 @@ function gplcart_file_tempname($prefix = 'GC') * @param array $files * @return boolean */ -function gplcart_file_convert_upload(&$files) +function gplcart_file_multi_upload(&$files) { if (empty($files['name']) || (count($files['name']) == 1 && empty($files['name'][0]))) { return false;
Rename gplcart_file_convert_upload()
gplcart_gplcart
train
db1f99dc38727710d7482d2796026fd740ccfe3e
diff --git a/lib/extensions/crypt/crypt/rijndael.rb b/lib/extensions/crypt/crypt/rijndael.rb index <HASH>..<HASH> 100644 --- a/lib/extensions/crypt/crypt/rijndael.rb +++ b/lib/extensions/crypt/crypt/rijndael.rb @@ -87,6 +87,10 @@ class Rijndael def add_round_key(blockArray, roundKey) 0.upto(3) { |i| 0.upto(@blockWords) { |j| + + if blockArray[i][j].is_a?(String) then + blockArray[i][j] = blockArray[i][j].unpack('C*').first + end blockArray[i][j] ^= roundKey[i][j] } }
Fix Crypt problem with type of String[] in Ruby
rhomobile_rhodes
train
3275db347e2559f5dd02c6c9ef6ec14e335d49d6
diff --git a/edison-jobs/src/main/java/de/otto/edison/jobs/service/JobRunnable.java b/edison-jobs/src/main/java/de/otto/edison/jobs/service/JobRunnable.java index <HASH>..<HASH> 100644 --- a/edison-jobs/src/main/java/de/otto/edison/jobs/service/JobRunnable.java +++ b/edison-jobs/src/main/java/de/otto/edison/jobs/service/JobRunnable.java @@ -33,7 +33,7 @@ public interface JobRunnable { * * @param jobEventPublisher publishes events to the event bus. * @return false, if the job was skipped without doing anything, true otherwise - * @Deprecated use execute, don't remove this method + * @deprecated use execute, don't remove this method */ default boolean execute(JobEventPublisher jobEventPublisher) { return execute();
Reimplement the JobEventPublisher from Edison 1.x for backwards compatibility
otto-de_edison-microservice
train
f5187d7ec33e7e50b072001a58a4e6da538f515e
diff --git a/grib/src/main/java/ucar/nc2/grib/grib1/Grib1Index.java b/grib/src/main/java/ucar/nc2/grib/grib1/Grib1Index.java index <HASH>..<HASH> 100644 --- a/grib/src/main/java/ucar/nc2/grib/grib1/Grib1Index.java +++ b/grib/src/main/java/ucar/nc2/grib/grib1/Grib1Index.java @@ -115,9 +115,11 @@ public class Grib1Index extends GribIndex { FileInputStream fin = new FileInputStream(idxFile); // LOOK need DiskCache for non-writeable directories try { - //// header message - if (!NcStream.readAndTest(fin, MAGIC_START.getBytes())) - throw new IOException("Bad magic number of grib index, should be= " + MAGIC_START); + //// check header is ok + if (!NcStream.readAndTest(fin, MAGIC_START.getBytes())) { + log.debug("Bad magic number of grib index, should be= {}" + MAGIC_START); + return false; + } int v = NcStream.readVInt(fin); if (v != version) { diff --git a/grib/src/main/java/ucar/nc2/grib/grib1/tables/Grib1TimeTypeTable.java b/grib/src/main/java/ucar/nc2/grib/grib1/tables/Grib1TimeTypeTable.java index <HASH>..<HASH> 100644 --- a/grib/src/main/java/ucar/nc2/grib/grib1/tables/Grib1TimeTypeTable.java +++ b/grib/src/main/java/ucar/nc2/grib/grib1/tables/Grib1TimeTypeTable.java @@ -117,7 +117,7 @@ public class Grib1TimeTypeTable { } } - // code table 5 - 2010 edition of WMO manual on codes (I.2 � Bi � 21) + // code table 5 - 2010 edition of WMO manual on codes static public String getTimeTypeName(int timeRangeIndicator, int p1, int p2) { String timeRange; @@ -182,7 +182,7 @@ public class Grib1TimeTypeTable { If P1 = 0 then the data averaged in the basic interval P2 are assumed to be continuous, i.e. all available data are simply averaged together. - If P1 = 1 (the unit of time � octet 18, Code table 4 � is not + If P1 = 1 (the unit of time octet 18, Code table 4 is not relevant here) then the data averaged together in the basic interval P2 are valid only at the time (hour, minute) given in the reference time, for all the days included in the P2 period. The units of P2 are given by the contents of octet 18 and Code table 4 */ diff --git a/grib/src/main/java/ucar/nc2/grib/grib2/Grib2Index.java b/grib/src/main/java/ucar/nc2/grib/grib2/Grib2Index.java index <HASH>..<HASH> 100644 --- a/grib/src/main/java/ucar/nc2/grib/grib2/Grib2Index.java +++ b/grib/src/main/java/ucar/nc2/grib/grib2/Grib2Index.java @@ -114,9 +114,11 @@ public class Grib2Index extends GribIndex { FileInputStream fin = new FileInputStream(idxFile); // LOOK need DiskCache for non-writeable directories try { - //// header message - if (!NcStream.readAndTest(fin, MAGIC_START.getBytes())) - throw new IOException("Bad magic number of grib index, should be= " + MAGIC_START); + //// check header is ok + if (!NcStream.readAndTest(fin, MAGIC_START.getBytes())) { + log.debug("Bad magic number of grib index, should be= {}" + MAGIC_START); + return false; + } int v = NcStream.readVInt(fin); if (v != version) { diff --git a/intelliJ/threddsFull/thredds.ipr b/intelliJ/threddsFull/thredds.ipr index <HASH>..<HASH> 100644 --- a/intelliJ/threddsFull/thredds.ipr +++ b/intelliJ/threddsFull/thredds.ipr @@ -230,6 +230,7 @@ <excludeFromCompile> <file url="file://$PROJECT_DIR$/../../tds/src/main/java/thredds/servlet/tomcat/UsageValve.java" /> <file url="file://$PROJECT_DIR$/../../ldm/src/main/thredds/db/TestBabu.java" /> + <file url="file://$PROJECT_DIR$/../../cdm/src/test/java/ucar/nc2/jni/netcdf/TestJni.java" /> <file url="file://$PROJECT_DIR$/../../tds/src/main/java/thredds/servlet/MyWebDavServlet.java" /> <file url="file://$PROJECT_DIR$/../../ui/src/main/java/thredds/ui/catalog/search/Indexer.java" /> <file url="file://$PROJECT_DIR$/../../ui/src/main/java/thredds/ui/catalog/search/CatalogSearcher.java" /> diff --git a/ui/src/main/java/ucar/nc2/ui/Grib1CollectionPanel.java b/ui/src/main/java/ucar/nc2/ui/Grib1CollectionPanel.java index <HASH>..<HASH> 100644 --- a/ui/src/main/java/ucar/nc2/ui/Grib1CollectionPanel.java +++ b/ui/src/main/java/ucar/nc2/ui/Grib1CollectionPanel.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 1998 - 2011. University Corporation for Atmospheric Research/Unidata + * Copyright (c) 1998 - 2012. University Corporation for Atmospheric Research/Unidata * Portions of this software were developed by the Unidata Program at the * University Corporation for Atmospheric Research. *
grib: when wrong ncx index, delete
Unidata_thredds
train
fa4507c4dc45a06c94b2905f5139303106f05c83
diff --git a/src/discoursegraphs/merging.py b/src/discoursegraphs/merging.py index <HASH>..<HASH> 100755 --- a/src/discoursegraphs/merging.py +++ b/src/discoursegraphs/merging.py @@ -5,7 +5,7 @@ import os import sys import re -from networkx import write_gpickle +from networkx import write_dot from discoursegraphs import DiscourseDocumentGraph from discoursegraphs.relabel import relabel_nodes @@ -14,6 +14,7 @@ from discoursegraphs.readwrite.anaphoricity import AnaphoraDocumentGraph from discoursegraphs.readwrite.rst import RSTGraph, rst_tokenlist from discoursegraphs.readwrite.tiger import TigerDocumentGraph, tiger_tokenlist + def add_rst_to_tiger(tiger_docgraph, rst_graph): """ adds an RSTGraph to a TigerDocumentGraph, thereby adding edges from @@ -64,14 +65,14 @@ def map_anaphoricity_tokens_to_tiger(tiger_docgraph, anaphora_graph): """ # list of (token unicode, tiger_sent_id str, tiger_token_id str) tiger_tokens = tiger_tokenlist(tiger_docgraph) - + anaphora2tiger = {} for i, anaphora_node_id in enumerate(anaphora_graph.tokens): anaphora_token = anaphora_graph.node[anaphora_node_id]['anaphoricity:token'] tiger_token, tiger_sent_id, tiger_token_id = tiger_tokens[i] - + if anaphora_token == tiger_token: - anaphora2tiger[anaphora_node_id] = tiger_token_id + anaphora2tiger[anaphora_node_id] = tiger_token_id else: raise ValueError(u"tokens don't match: {0} (anaphoricity) vs. {1} (tiger)".format(anaphora_token, tiger_token)) return anaphora2tiger @@ -89,28 +90,27 @@ def add_anaphoricity_to_tiger(tiger_docgraph, anaphora_graph): anaphora_graph : AnaphoraDocumentGraph multidigraph representing a anaphorcity annotated document (ad-hoc format used in Christian Dittrich's diploma thesis) - """ + """ anaphora2tiger = map_anaphoricity_tokens_to_tiger(tiger_docgraph, anaphora_graph) relabel_nodes(anaphora_graph, anaphora2tiger, copy=False) tiger_docgraph.add_nodes_from(anaphora_graph.nodes(data=True)) - # we don't need these edges. they just go from the anaphoricity:root - # to the tokens - #~ tiger_docgraph.add_edges_from(anaphora_graph.edges(data=True)) - - - - + # the anaphora doc graph only contains trivial edges from its root + # node. we won't add them and will remove the root. + try: + tiger_docgraph.remove_node('anaphoricity:root_node') + except: + pass if __name__ == '__main__': if len(sys.argv) != 5: - sys.stderr.write('Usage: {0} tiger_file rst_file anaphoricity_file pickle_output_file\n'.format(sys.argv[0])) + sys.stderr.write('Usage: {0} tiger_file rst_file anaphoricity_file dot_output_file\n'.format(sys.argv[0])) sys.exit(1) else: tiger_filepath = sys.argv[1] rst_filepath = sys.argv[2] anaphora_filepath = sys.argv[3] - pickle_filepath = sys.argv[4] + dot_filepath = sys.argv[4] for filepath in (tiger_filepath, rst_filepath, anaphora_filepath): assert os.path.isfile(filepath), "{} doesn't exist".format(filepath) @@ -120,11 +120,5 @@ if __name__ == '__main__': add_rst_to_tiger(tiger_docgraph, rst_graph) add_anaphoricity_to_tiger(tiger_docgraph, anaphora_graph) + write_dot(tiger_docgraph, dot_filepath) - for i, node in tiger_docgraph.nodes(data=True): - print i, node - - for from_node, to_node, edge in tiger_docgraph.edges(data=True): - print from_node, to_node, edge - - write_gpickle(tiger_docgraph, pickle_filepath)
merging now produces dot output, ignores anaphoricity edges/root
arne-cl_discoursegraphs
train
bd1ff2cded568550e94659d286f3a3e21882111f
diff --git a/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser.php b/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser.php index <HASH>..<HASH> 100644 --- a/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser.php +++ b/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser.php @@ -50,9 +50,12 @@ class Parser extends ParserBase // [media=youtube]xxxxxxx[/media] $tagName = $tag->getAttribute('media'); - // If this tag doesn't have an id attribute, copy the value of the url attribute, so - // that the tag acts like [media=youtube id=xxxx]xxxx[/media] - if (!$tag->hasAttribute('id') && $tag->hasAttribute('url')) + // If this tag doesn't have an id attribute and the url attribute doesn't really look + // like an URL, copy the value of the url attribute, so that the tag acts like + // [media=youtube id=xxxx]xxxx[/media] + if (!$tag->hasAttribute('id') + && $tag->hasAttribute('url') + && strpos($tag->getAttribute('url'), '://') === false) { $tag->setAttribute('id', $tag->getAttribute('url')); } diff --git a/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser/TagFilter.js b/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser/TagFilter.js index <HASH>..<HASH> 100644 --- a/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser/TagFilter.js +++ b/src/s9e/TextFormatter/Plugins/MediaEmbed/Parser/TagFilter.js @@ -18,9 +18,12 @@ function (tag, tagStack, sites) // [media=youtube]xxxxxxx[/media] tagName = tag.getAttribute('media'); - // If this tag doesn't have an id attribute, copy the value of the url attribute, so - // that the tag acts like [media=youtube id=xxxx]xxxx[/media] - if (!tag.hasAttribute('id') && tag.hasAttribute('url')) + // If this tag doesn't have an id attribute and the url attribute doesn't really look + // like an URL, copy the value of the url attribute, so that the tag acts like + // [media=youtube id=xxxx]xxxx[/media] + if (!tag.hasAttribute('id') + && tag.hasAttribute('url') + && tag.getAttribute('url').indexOf('://') === -1) { tag.setAttribute('id', tag.getAttribute('url')); } diff --git a/tests/Plugins/MediaEmbed/ParserTest.php b/tests/Plugins/MediaEmbed/ParserTest.php index <HASH>..<HASH> 100644 --- a/tests/Plugins/MediaEmbed/ParserTest.php +++ b/tests/Plugins/MediaEmbed/ParserTest.php @@ -256,6 +256,23 @@ class ParserTest extends Test ); } ], + [ + // Test that we don't replace the "id" attribute with an URL + '[media=foo]http://example.org/123[/media]', + '<rt><FOO id="123" url="http://example.org/123">[media=foo]http://example.org/123[/media]</FOO></rt>', + [], + function ($configurator) + { + $configurator->MediaEmbed->add( + 'foo', + [ + 'host' => 'foo.example.org', + 'extract' => "/(?'id'\\d+)/", + 'template' => '' + ] + ); + } + ], ]; }
MediaEmbed: improved [media] tag by not replacing @id with @url if @url looks like an URL
s9e_TextFormatter
train
9f82d96073998f5d4b2e96e1121c66a9545a8276
diff --git a/openquake/calculators/reportwriter.py b/openquake/calculators/reportwriter.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/reportwriter.py +++ b/openquake/calculators/reportwriter.py @@ -135,7 +135,7 @@ def build_report(job_ini, output_dir=None): """ calc_id = logs.init() oq = readinput.get_oqparam(job_ini) - if oq.calculation_mode == 'classical': + if oq.calculation_mode in 'classical disaggregation': oq.calculation_mode = 'preclassical' oq.ground_motion_fields = False output_dir = output_dir or os.path.dirname(job_ini) diff --git a/openquake/hazardlib/calc/disagg.py b/openquake/hazardlib/calc/disagg.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/calc/disagg.py +++ b/openquake/hazardlib/calc/disagg.py @@ -27,7 +27,7 @@ import numpy import scipy.stats from openquake.hazardlib import pmf, contexts, const -from openquake.baselib.hdf5 import ArrayWrapper +from openquake.baselib import hdf5, performance from openquake.baselib.general import pack, groupby, AccumDict from openquake.hazardlib.calc import filters from openquake.hazardlib.geo.geodetic import npoints_between @@ -55,7 +55,8 @@ def _site_indices(sids_by_rup, N): return mat -def _disaggregate(cmaker, sitecol, rupdata, indices, iml2, eps3): +def _disaggregate(cmaker, sitecol, rupdata, indices, iml2, eps3, + pne_mon=performance.Monitor()): # disaggregate (separate) PoE in different contributions # returns AccumDict with keys (poe, imt) and mags, dists, lons, lats [sid] = sitecol.sids @@ -86,12 +87,13 @@ def _disaggregate(cmaker, sitecol, rupdata, indices, iml2, eps3): acc['lons'].append(rctx.lon_[sidx]) acc['lats'].append(rctx.lat_[sidx]) acc['dists'].append(dist) - for m, imt in enumerate(iml2.imts): - for p, poe in enumerate(iml2.poes_disagg): - iml = iml2[m, p] - pne = disaggregate_pne( - gsim, rctx, sitecol, dctx, imt, iml, *eps3) - acc[p, m].append(pne) + with pne_mon: + for m, imt in enumerate(iml2.imts): + for p, poe in enumerate(iml2.poes_disagg): + iml = iml2[m, p] + pne = disaggregate_pne( + gsim, rctx, sitecol, dctx, imt, iml, *eps3) + acc[p, m].append(pne) return pack(acc, 'mags dists lons lats P M'.split()) @@ -225,9 +227,8 @@ def build_matrices(rupdata, sitecol, cmaker, iml2s, trunclevel, for sid, iml2 in zip(sitecol.sids, iml2s): singlesitecol = sitecol.filtered([sid]) bins = get_bins(bin_edges, sid) - with pne_mon: - bdata = _disaggregate(cmaker, singlesitecol, rupdata, - indices[sid], iml2, eps3) + bdata = _disaggregate(cmaker, singlesitecol, rupdata, + indices[sid], iml2, eps3, pne_mon) with mat_mon: mat = _build_disagg_matrix(bdata, bins) if mat.any(): # nonzero @@ -335,8 +336,8 @@ def disaggregation( by_trt = groupby(sources, operator.attrgetter('tectonic_region_type')) bdata = {} sitecol = SiteCollection([site]) - iml2 = ArrayWrapper(numpy.array([[iml]]), - dict(imts=[imt], poes_disagg=[None], rlzi=0)) + iml2 = hdf5.ArrayWrapper(numpy.array([[iml]]), + dict(imts=[imt], poes_disagg=[None], rlzi=0)) eps3 = _eps3(truncation_level, n_epsilons) for trt, srcs in by_trt.items(): cmaker = ContextMaker(
Small cleanup on the disaggregation
gem_oq-engine
train
4e436f3912cd74353da42e0410f76d675137d62e
diff --git a/runner/lua/lua.go b/runner/lua/lua.go index <HASH>..<HASH> 100644 --- a/runner/lua/lua.go +++ b/runner/lua/lua.go @@ -5,6 +5,7 @@ import ( "github.com/valyala/fasthttp" "github.com/yuin/gopher-lua" "golang.org/x/net/context" + "math" "time" ) @@ -26,6 +27,7 @@ func New(filename, src string) *LuaRunner { Source: src, Client: &fasthttp.Client{ MaxIdleConnDuration: time.Duration(0), + MaxConnsPerHost: math.MaxInt32, }, } }
Don't limit per-host connections
loadimpact_k6
train
909695169c1ed7772ed436dcc762ba179feb4055
diff --git a/system/src/Grav/Common/GPM/GPM.php b/system/src/Grav/Common/GPM/GPM.php index <HASH>..<HASH> 100644 --- a/system/src/Grav/Common/GPM/GPM.php +++ b/system/src/Grav/Common/GPM/GPM.php @@ -206,15 +206,14 @@ class GPM extends Iterator public function getLatestVersionOfPackage($package_name) { $repository = $this->repository['plugins']; - if (isset($repository[$package_name])) { - return $repository[$package_name]->version; + return $repository[$package_name]->available; } //Not a plugin, it's a theme? $repository = $this->repository['themes']; if (isset($repository[$package_name])) { - return $repository[$package_name]->version; + return $repository[$package_name]->available; } return null;
Correct getting the latest available version of a package
getgrav_grav
train
612fd0b1d2c936877509a1979f2eb5e2e96989ba
diff --git a/vyked/host.py b/vyked/host.py index <HASH>..<HASH> 100644 --- a/vyked/host.py +++ b/vyked/host.py @@ -49,7 +49,6 @@ class Host: _logger.error('Invalid argument attached as service') cls._set_bus(service) - @classmethod def run(cls): if cls._tcp_service or cls._http_service: @@ -72,7 +71,7 @@ class Host: ssl_context = cls._tcp_service.ssl_context host_ip, host_port = cls._tcp_service.socket_address task = asyncio.get_event_loop().create_server(partial(get_vyked_protocol, cls._tcp_service.tcp_bus), - host_ip, host_port, ssl= ssl_context) + host_ip, host_port, ssl=ssl_context) result = asyncio.get_event_loop().run_until_complete(task) return result @@ -134,7 +133,8 @@ class Host: if not cls.ronin: if cls._tcp_service: asyncio.get_event_loop().run_until_complete( - cls._tcp_service.pubsub_bus.create_pubsub_handler(cls.pubsub_host, cls.pubsub_port)) + cls._tcp_service.pubsub_bus + .create_pubsub_handler(cls.pubsub_host, cls.pubsub_port)) if cls._http_service: asyncio.get_event_loop().run_until_complete( cls._http_service.pubsub_bus.create_pubsub_handler(cls.pubsub_host, cls.pubsub_port)) @@ -156,11 +156,12 @@ class Host: @classmethod def _set_bus(cls, service): - registry_client = RegistryClient(asyncio.get_event_loop(), cls.registry_host, cls.registry_port, cls.registry_client_ssl) + registry_client = RegistryClient( + asyncio.get_event_loop(), cls.registry_host, cls.registry_port, cls.registry_client_ssl) tcp_bus = TCPBus(registry_client) registry_client.conn_handler = tcp_bus - pubsub_bus = PubSubBus(registry_client, ssl_context=cls._tcp_service._ssl_context) - #pubsub_bus = PubSubBus(registry_client)#, cls._tcp_service._ssl_context) + # pubsub_bus = PubSubBus(registry_client, ssl_context=cls._tcp_service._ssl_context) + pubsub_bus = PubSubBus(registry_client) # , cls._tcp_service._ssl_context) registry_client.bus = tcp_bus if isinstance(service, TCPService):
pubsub _ssl_context initialization commented & some pep8 changes
kashifrazzaqui_vyked
train
49c3d9100c86870af1291158a93eee1b237acf53
diff --git a/ui/src/alerts/components/AlertsTable.js b/ui/src/alerts/components/AlertsTable.js index <HASH>..<HASH> 100644 --- a/ui/src/alerts/components/AlertsTable.js +++ b/ui/src/alerts/components/AlertsTable.js @@ -28,15 +28,11 @@ class AlertsTable extends Component { filterAlerts = (searchTerm, newAlerts) => { const alerts = newAlerts || this.props.alerts const filterText = searchTerm.toLowerCase() - const filteredAlerts = alerts.filter(h => { - if (h.host === null || h.name === null || h.level === null) { - return false - } - + const filteredAlerts = alerts.filter(({name, host, level}) => { return ( - h.name.toLowerCase().includes(filterText) || - h.host.toLowerCase().includes(filterText) || - h.level.toLowerCase().includes(filterText) + (name && name.toLowerCase().includes(filterText)) || + (host && host.toLowerCase().includes(filterText)) || + (level && level.toLowerCase().includes(filterText)) ) }) this.setState({searchTerm, filteredAlerts})
Fix alerts table to display if alerts are missing a host, name, or level
influxdata_influxdb
train
560fd7e29a87ff6f23fb4bac0ae6a0093fce70cb
diff --git a/cherrypy/test/test_states.py b/cherrypy/test/test_states.py index <HASH>..<HASH> 100644 --- a/cherrypy/test/test_states.py +++ b/cherrypy/test/test_states.py @@ -411,6 +411,13 @@ class SignalHandlingTests(helper.CPWebCase): def test_signal_handler_unsubscribe(self): self._require_signal_and_kill('SIGTERM') + # Although Windows has `os.kill` and SIGTERM is defined, the + # platform does not implement signals and sending SIGTERM + # will result in a forced termination of the process. + # Therefore, this test is not suitable for Windows. + if os.name == 'nt': + self.skip("SIGTERM not available") + # Spawn a normal, undaemonized process. p = helper.CPProcess(ssl=(self.scheme.lower()=='https')) p.write_conf( @@ -418,7 +425,7 @@ class SignalHandlingTests(helper.CPWebCase): test_case_name: "test_signal_handler_unsubscribe" """) p.start(imports='cherrypy.test._test_states_demo') - # Send a SIGTERM + # Ask the process to quit os.kill(p.get_pid(), signal.SIGTERM) # This might hang if things aren't working right, but meh. p.join()
Skip a test that consistently fails on Windows due to the implementation of os.kill (Python <I> and later). --HG-- branch : cherrypy-<I>.x
cherrypy_cheroot
train
53106010d98612ee0efd794d9850d05bcd4dfb9b
diff --git a/README.md b/README.md index <HASH>..<HASH> 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ $request->setTransactionReference($transactionReference); // Get the response message ready for returning. $response = $request->send(); -if (! $request->checkSignature()) { +if (! $request->isValid()) { // Respond to Sage Pay indicating we are not accepting anything about this message. // You might want to log `$request->getData()` first, for later analysis. diff --git a/src/Message/ServerNotifyRequest.php b/src/Message/ServerNotifyRequest.php index <HASH>..<HASH> 100644 --- a/src/Message/ServerNotifyRequest.php +++ b/src/Message/ServerNotifyRequest.php @@ -138,7 +138,7 @@ class ServerNotifyRequest extends AbstractRequest implements NotificationInterfa /** * Check whether the ignature is valid. */ - public function checkSignature() + public function isValid() { return $this->getSignature() == $this->buildSignature(); } @@ -259,7 +259,7 @@ class ServerNotifyRequest extends AbstractRequest implements NotificationInterfa public function getTransactionStatus() { // If the signature check fails, then all bets are off - the POST cannot be trusted. - if (!$this->checkSignature()) { + if (!$this->isValid()) { return static::STATUS_FAILED; } diff --git a/src/Message/ServerNotifyResponse.php b/src/Message/ServerNotifyResponse.php index <HASH>..<HASH> 100644 --- a/src/Message/ServerNotifyResponse.php +++ b/src/Message/ServerNotifyResponse.php @@ -40,7 +40,7 @@ class ServerNotifyResponse extends Response public function confirm($nextUrl, $detail = null) { // If the signature is invalid, then do not allow the confirm. - if (!$this->request->checkSignature()) { + if (!$this->request->isValid()) { throw new InvalidResponseException('Attempted to confirm an invalid notification'); } @@ -48,6 +48,14 @@ class ServerNotifyResponse extends Response } /** + * Alias for confirm(), trying to define some more general conventions. + */ + public function accept($nextUrl, $detail = null) + { + return $this->confirm($nextUrl, $detail); + } + + /** * Error * * Notify Sage Pay you received the payment details but there was an error and the payment @@ -59,7 +67,7 @@ class ServerNotifyResponse extends Response public function error($nextUrl, $detail = null) { // If the signature is invalid, then do not allow the confirm. - if (!$this->request->checkSignature()) { + if (!$this->request->isValid()) { throw new InvalidResponseException('Attempted to reject an invalid notification'); } @@ -67,6 +75,14 @@ class ServerNotifyResponse extends Response } /** + * Alias for error(), trying to define some more general conventions. + */ + public function reject($nextUrl, $detail = null) + { + return $this->error($nextUrl, $detail); + } + + /** * Convenience method. */ public function getData()
Changed checkSignature() to isValid() as a more general name (can check IP addresses, for example).
thephpleague_omnipay-sagepay
train
4e3464d182914da025123d79c794002054f1ee8c
diff --git a/middleware/validation.go b/middleware/validation.go index <HASH>..<HASH> 100644 --- a/middleware/validation.go +++ b/middleware/validation.go @@ -24,30 +24,6 @@ import ( "github.com/go-openapi/swag" ) -// NewValidation starts a new validation middleware -func newValidation(ctx *Context, next http.Handler) http.Handler { - - return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - matched, rCtx, _ := ctx.RouteInfo(r) - if rCtx != nil { - r = rCtx - } - if matched == nil { - ctx.NotFound(rw, r) - return - } - _, r, result := ctx.BindAndValidate(r, matched) - - if result != nil { - ctx.Respond(rw, r, matched.Produces, matched, result) - return - } - - debugLog("no result for %s %s", r.Method, r.URL.EscapedPath()) - next.ServeHTTP(rw, r) - }) -} - type validation struct { context *Context result []error @@ -56,15 +32,6 @@ type validation struct { bound map[string]interface{} } -type untypedBinder map[string]interface{} - -func (ub untypedBinder) BindRequest(r *http.Request, route *MatchedRoute, consumer runtime.Consumer) error { - if err := route.Binder.Bind(r, route.Params, consumer, ub); err != nil { - return err - } - return nil -} - // ContentType validates the content type of a request func validateContentType(allowed []string, actual string) error { debugLog("validating content type for %q against [%s]", actual, strings.Join(allowed, ", ")) diff --git a/middleware/validation_test.go b/middleware/validation_test.go index <HASH>..<HASH> 100644 --- a/middleware/validation_test.go +++ b/middleware/validation_test.go @@ -26,11 +26,33 @@ import ( "github.com/stretchr/testify/assert" ) +func newTestValidation(ctx *Context, next http.Handler) http.Handler { + + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + matched, rCtx, _ := ctx.RouteInfo(r) + if rCtx != nil { + r = rCtx + } + if matched == nil { + ctx.NotFound(rw, r) + return + } + _, r, result := ctx.BindAndValidate(r, matched) + + if result != nil { + ctx.Respond(rw, r, matched.Produces, matched, result) + return + } + + next.ServeHTTP(rw, r) + }) +} + func TestContentTypeValidation(t *testing.T) { spec, api := petstore.NewAPI(t) context := NewContext(spec, api, nil) context.router = DefaultRouter(spec, context.api) - mw := newValidation(context, http.HandlerFunc(terminator)) + mw := newTestValidation(context, http.HandlerFunc(terminator)) recorder := httptest.NewRecorder() request, _ := http.NewRequest("GET", "/api/pets", nil) @@ -82,7 +104,7 @@ func TestResponseFormatValidation(t *testing.T) { spec, api := petstore.NewAPI(t) context := NewContext(spec, api, nil) context.router = DefaultRouter(spec, context.api) - mw := newValidation(context, http.HandlerFunc(terminator)) + mw := newTestValidation(context, http.HandlerFunc(terminator)) recorder := httptest.NewRecorder() request, _ := http.NewRequest("POST", "/api/pets", bytes.NewBuffer([]byte(`name: Dog`)))
Bugfix: middleware/validation removes unused Removes unused `untypedBinder` and moves `newValidation` to test module and changes the name to `newTestValidation` as it is only used for test purposes.
go-openapi_runtime
train
561c4b208d55f57752073ecca63ab7443326b69d
diff --git a/core/server/web/api/v2/content/routes.js b/core/server/web/api/v2/content/routes.js index <HASH>..<HASH> 100644 --- a/core/server/web/api/v2/content/routes.js +++ b/core/server/web/api/v2/content/routes.js @@ -1,18 +1,11 @@ const express = require('express'); const api = require('../../../../api'); const apiv2 = require('../../../../api/v2'); -const shared = require('../../../shared'); const mw = require('./middleware'); module.exports = function apiRoutes() { const router = express.Router(); - // alias delete with del - router.del = router.delete; - - // ## CORS pre-flight check - router.options('*', shared.middlewares.api.cors); - // ## Configuration router.get('/configuration', api.http(api.configuration.read));
Removed OPTIONS cors middleware from content api (#<I>) no-issue The content API only supports GET requests so has no need for cors middleware on OPTIONS. This also removes the router.del helper as it's not used
TryGhost_Ghost
train
56b5cd99f42da0b41fe0d005642fdcba0437e975
diff --git a/dipper/sources/IMPC.py b/dipper/sources/IMPC.py index <HASH>..<HASH> 100644 --- a/dipper/sources/IMPC.py +++ b/dipper/sources/IMPC.py @@ -710,7 +710,8 @@ class IMPC(Source): provenance_model.add_study_to_measurements( study_bnode, measurements.keys()) self.graph.addTriple( - evidence_line_bnode, self.resolve('has_supporting_reference'), study_bnode) + evidence_line_bnode, self.resolve('has_evidence_item_output_from'), + study_bnode) return evidence_line_bnode
revert to a term I had lost track of
monarch-initiative_dipper
train
ed2e70d2444d481a4a82de308e1b9f9aea4d5455
diff --git a/lib/epub/cfi.rb b/lib/epub/cfi.rb index <HASH>..<HASH> 100644 --- a/lib/epub/cfi.rb +++ b/lib/epub/cfi.rb @@ -1,5 +1,7 @@ module EPUB class CFI < Struct.new(:path, :range) + include Comparable + SPECIAL_CHARS = '^[](),;=' # "5E", "5B", "5D", "28", "29", "2C", "3B", "3D" RE_ESCAPED_SPECIAL_CHARS = Regexp.escape(SPECIAL_CHARS) @@ -13,19 +15,54 @@ module EPUB end end + # @todo consider range + def <=>(other) + path <=> other.path + end + class Path < Struct.new(:step, :local_path) + include Comparable + + def <=>(other) + cmp = step <=> other.step + return cmp unless cmp == 0 + local_path <=> other.local_path + end end class Range < Struct.new(:start, :end) end class LocalPath < Struct.new(:steps, :redirected_path, :offset) + include Comparable + + def <=>(other) + cmp = steps <=> other.steps + return cmp unless cmp == 0 + cmp = redirected_path <=> other.redirected_path + return cmp unless cmp == 0 + return -1 if offset.nil? and !other.offset.nil? + return 1 if !offset.nil? and other.offset.nil? + offset <=> other.offset + end end class RedirectedPath < Struct.new(:path, :offset) + include Comparable + + def <=>(other) + cmp = path <=> other.path + return cmp unless cmp == 0 + offset <=> other.offset + end end class Step < Struct.new(:step, :assertion) + include Comparable + + def <=>(other) + step <=> other.step + end end class IDAssertion < Struct.new(:id, :parameters) @@ -35,9 +72,16 @@ module EPUB end class CharacterOffset < Struct.new(:offset, :assertion) + include Comparable + + def <=>(other) + offset <=> other.offset + end end class SpatialOffset < Struct.new(:x, :y, :temporal, :assertion) + include Comparable + def initialize(x, y, temporal, assertion) [x, y].each do |dimension| next unless dimension @@ -66,6 +110,21 @@ module EPUB super end + + # @note should split the class to spatial offset and temporal-spatial offset? + def <=>(other) + return -1 if temporal.nil? and !other.temporal.nil? + return 1 if !temporal.nil? and other.temporal.nil? + cmp = temporal <=> other.temporal + return cmp unless cmp == 0 + return -1 if y.nil? and !other.y.nil? + return 1 if !y.nil? and other.y.nil? + cmp = y <=> other.y + return cmp unless cmp == 0 + return -1 if x.nil? and !other.x.nil? + return 1 if !x.nil? and other.x.nil? + cmp = x <=> other.x + end end end end diff --git a/lib/epub/parser/cfi.rb b/lib/epub/parser/cfi.rb index <HASH>..<HASH> 100644 --- a/lib/epub/parser/cfi.rb +++ b/lib/epub/parser/cfi.rb @@ -5,6 +5,8 @@ require 'epub/cfi' EPUB::Parser::CFI = EPUB::CFIParser class EPUB::Parser::CFI + include Comparable + UNICODE_CHARACTER_EXCLUDING_SPECIAL_CHARS_AND_SPACE_AND_DOT_AND_COLON_AND_TILDE_AND_ATMARK_AND_SOLIDUS_AND_EXCLAMATION_MARK_PATTERN = /\u0009|\u000A|\u000D|[\u0022-\u0027]|[\u002A-\u002B]|\u002D|[\u0030-\u0039]|\u003C|[\u003E-\u0040]|[\u0041-\u005A]|\u005C|[\u005F-\u007D]|[\u007F-\uD7FF]|[\uE000-\uFFFD]|[\u10000-\u10FFFF]/ # excluding special chars and space(\u0020) and dot(\u002E) and colon(\u003A) and tilde(\u007E) and atmark(\u0040) and solidus(\u002F) and exclamation mark(\u0021) UNICODE_CHARACTER_PATTERN = Regexp.union(UNICODE_CHARACTER_EXCLUDING_SPECIAL_CHARS_AND_SPACE_AND_DOT_AND_COLON_AND_TILDE_AND_ATMARK_AND_SOLIDUS_AND_EXCLAMATION_MARK_PATTERN, Regexp.new(Regexp.escape(EPUB::CFI::SPECIAL_CHARS), / \.:~@!/))
Implement CFI sorting rules(partially)
KitaitiMakoto_epub-parser
train
491a130feb1fc43bbbb8aaea72fb6cefae1d656c
diff --git a/src/MultiRequest.php b/src/MultiRequest.php index <HASH>..<HASH> 100644 --- a/src/MultiRequest.php +++ b/src/MultiRequest.php @@ -10,7 +10,9 @@ */ namespace chillerlan\TinyCurl; + use chillerlan\TinyCurl\Response\MultiResponse; +use chillerlan\TinyCurl\Response\MultiResponseHandlerInterface; /** * Class MultiRequest @@ -27,6 +29,8 @@ class MultiRequest{ protected $curl_multi; /** + * cURL options for each handle + * * @var array */ protected $curl_options = []; @@ -46,11 +50,6 @@ class MultiRequest{ protected $responses = []; /** - * @var array - */ - protected $failed_requests = []; - - /** * concurrent request counter * * @var int @@ -73,10 +72,8 @@ class MultiRequest{ * @param \chillerlan\TinyCurl\MultiRequestOptions $options */ public function __construct(MultiRequestOptions $options){ - $this->options = $options; - $this->multiResponseHandler = new $options->handler($this); - + $this->setHandler(); $ca_info = is_file($this->options->ca_info) ? $this->options->ca_info : null; $this->curl_options = $this->options->curl_options + [ CURLOPT_RETURNTRANSFER => true, @@ -99,21 +96,62 @@ class MultiRequest{ } /** + * @param \chillerlan\TinyCurl\Response\MultiResponseHandlerInterface|null $handler + * + * @return $this + * @throws \chillerlan\TinyCurl\RequestException + */ + public function setHandler(MultiResponseHandlerInterface $handler = null){ + + if(!$handler){ + + if(!class_exists($this->options->handler)){ + throw new RequestException('!$this->options->handler'); + } + + $handler = new $this->options->handler($this); + + if(!is_a($handler, MultiResponseHandlerInterface::class)){ + throw new RequestException('!is_a($handler)'); + } + + } + + $this->multiResponseHandler = $handler; + + return $this; + } + + /** * @param array $urls + * + * @return $this + * @throws \chillerlan\TinyCurl\RequestException */ public function fetch(array $urls){ + + if(!$this->multiResponseHandler){ + throw new RequestException(); + } + $this->urls = $urls; $this->request_count = count($this->urls); $this->curl_multi = curl_multi_init(); $this->getResponse(); + + return $this; } /** * @param mixed $response + * * @see \chillerlan\TinyCurl\Response\MultiResponseHandlerInterface + * @return $this */ public function addResponse($response){ $this->responses[] = $response; + + return $this; } /**
added MultiRequest::setHandler();
chillerlan_php-curl
train
580f884674d068e05f11671f4dd8072f07160f5b
diff --git a/ember_debug/general-debug.js b/ember_debug/general-debug.js index <HASH>..<HASH> 100644 --- a/ember_debug/general-debug.js +++ b/ember_debug/general-debug.js @@ -22,8 +22,16 @@ var GeneralDebug = Ember.Object.extend(PortMixin, { this.sendBooted(); }, getLibraries: function() { - var libraries = arrayize(Ember.libraries); - this.sendMessage('libraries', { libraries: libraries }); + var libraries = Ember.libraries; + + // Ember has changed where the array of libraries is located. + // In older versions, `Ember.libraries` was the array itself, + // but now it's found under _registry. + if (libraries._registry) { + libraries = libraries._registry; + } + + this.sendMessage('libraries', { libraries: arrayize(libraries) }); }, refresh: function() { window.location.reload(); diff --git a/shared/in-page-script.js b/shared/in-page-script.js index <HASH>..<HASH> 100644 --- a/shared/in-page-script.js +++ b/shared/in-page-script.js @@ -4,6 +4,13 @@ $(function() { var libraries = window.Ember && window.Ember.libraries; if (libraries) { + // Ember has changed where the array of libraries is located. + // In older versions, `Ember.libraries` was the array itself, + // but now it's found under _registry. + if (libraries._registry) { + libraries = libraries._registry; + } + var versions = Array.prototype.slice.call(libraries, 0); window.postMessage({ type: 'emberVersion',
Changes where we find the Ember.libraries array of registered libs that changed with emberjs/ember.js#<I>
emberjs_ember-inspector
train
d0967bf594eac6a878a064b88e7c1a3e1ac4b9fb
diff --git a/packages/card/src/Card.story.js b/packages/card/src/Card.story.js index <HASH>..<HASH> 100644 --- a/packages/card/src/Card.story.js +++ b/packages/card/src/Card.story.js @@ -2,6 +2,7 @@ import React from "react"; import { storiesOf } from "@storybook/react"; import { withInfo } from "@storybook/addon-info"; import EmptyState from "@crave/farmblocks-empty-state"; +import Footer from "@crave/farmblocks-footer"; import { cardTypes } from "./constants/cardTypes"; import Card from "."; @@ -30,4 +31,18 @@ storiesOf("Card", module) <EmptyState title="My Card Title" description="My card description" /> </Card> )) + ) + .add( + "With content (footer and empty state) and 1136px width", + withInfo()(() => ( + <Card padding="0" width="1136px"> + <EmptyState title="My Card Title" description="My card description" /> + <Footer + helpText="Have questions about account setup?" + helpLinkText="Get support" + helpLinkHref="#" + helpImageSrc="https://crave-whatsgood-sandbox.imgix.net/businesses/32/inventory/8fae5d32-f6d4-47bb-8062-e4e85c47788b.png" + /> + </Card> + )) );
test(Card storybook): add a story for card with footer and empty-state affects: @crave/farmblocks-card
CraveFood_farmblocks
train
58bd18cb1ceb31981b7bddc3821d8ca4117791fc
diff --git a/test/org/mockitousage/basicapi/MocksSerializationTest.java b/test/org/mockitousage/basicapi/MocksSerializationTest.java index <HASH>..<HASH> 100644 --- a/test/org/mockitousage/basicapi/MocksSerializationTest.java +++ b/test/org/mockitousage/basicapi/MocksSerializationTest.java @@ -9,15 +9,18 @@ import org.fest.assertions.Assertions; import org.junit.Ignore; import org.junit.Test; import org.mockito.InOrder; +import org.mockito.Mockito; import org.mockito.exceptions.base.MockitoException; import org.mockito.internal.matchers.Any; import org.mockito.internal.stubbing.answers.ThrowsException; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.mockitousage.IMethods; +import org.mockitoutil.SimpleSerializationUtil; import org.mockitoutil.TestBase; import java.io.ByteArrayOutputStream; +import java.io.ObjectStreamException; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; @@ -25,19 +28,8 @@ import java.util.List; import java.util.Observable; import static org.mockito.Matchers.anyString; -import static org.mockito.Mockito.CALLS_REAL_METHODS; -import static org.mockito.Mockito.RETURNS_DEEP_STUBS; -import static org.mockito.Mockito.anyObject; -import static org.mockito.Mockito.inOrder; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.withSettings; -import static org.mockitoutil.SimpleSerializationUtil.deserializeMock; -import static org.mockitoutil.SimpleSerializationUtil.serializeAndBack; -import static org.mockitoutil.SimpleSerializationUtil.serializeMock; +import static org.mockito.Mockito.*; +import static org.mockitoutil.SimpleSerializationUtil.*; @SuppressWarnings({"unchecked", "serial"}) public class MocksSerializationTest extends TestBase implements Serializable { @@ -387,6 +379,30 @@ public class MocksSerializationTest extends TestBase implements Serializable { } + + public static class AClassWithPrivateNoArgConstructor { + private AClassWithPrivateNoArgConstructor() {} + List returningSomething() { return Collections.emptyList(); } + } + + @Test + public void private_constructor_currently_not_supported_at_the_moment_at_deserialization_time() throws Exception { + // given + AClassWithPrivateNoArgConstructor mockWithPrivateConstructor = Mockito.mock( + AClassWithPrivateNoArgConstructor.class, + Mockito.withSettings().serializable() + ); + + try { + // when + SimpleSerializationUtil.serializeAndBack(mockWithPrivateConstructor); + } catch (ObjectStreamException e) { + // then + Assertions.assertThat(e.getMessage()).contains("no valid constructor"); + } + } + + @Test @Ignore("Bug to fix !!! see issue 399") public void BUG_ISSUE_399_try_some_mocks_with_current_answers() throws Exception {
added test that document the fact that mocks cannot be deserialized if the mocked type has a private constructor. It might be possible in the future though it might requires tricks with objenesis and the custom object streams.
mockito_mockito
train
f92f89388a81b52a1e16baed64c1d1382fff88ca
diff --git a/lib/saddle/endpoint.rb b/lib/saddle/endpoint.rb index <HASH>..<HASH> 100644 --- a/lib/saddle/endpoint.rb +++ b/lib/saddle/endpoint.rb @@ -20,24 +20,36 @@ module Saddle @relative_path = relative_path_override || _relative_path() end + + # Generic request wrapper + def request(method, action, params={}, options={}) + # Augment in interesting options + options[:saddle] ||= {} + options[:saddle] = { + :call_chain => _path_array(), + :action => action, + } + @requester.send(method, _path(action), params, options) + end + # Provide GET functionality for the implementer class def get(action, params={}, options={}) - _request(:get, action, params, options) + request(:get, action, params, options) end # Provide POST functionality for the implementer class def post(action, params={}, options={}) - _request(:post, action, params, options) + request(:post, action, params, options) end # Provide PUT functionality for the implementer class def put(action, params={}, options={}) - _request(:put, action, params, options) + request(:put, action, params, options) end # Provide DELETE functionality for the implementer class def delete(action, params={}, options={}) - _request(:delete, action, params, options) + request(:delete, action, params, options) end @@ -67,17 +79,6 @@ module Saddle protected - def _request(method, action, params={}, options={}) - # Augment in interesting options - options[:saddle] ||= {} - options[:saddle] = { - :call_chain => _path_array(), - :action => action, - } - @requester.send(method, _path(action), params, options) - end - - # Get the url path for this endpoint/action combo def _path(action=nil) paths = _path_array() diff --git a/lib/saddle/version.rb b/lib/saddle/version.rb index <HASH>..<HASH> 100644 --- a/lib/saddle/version.rb +++ b/lib/saddle/version.rb @@ -1,3 +1,3 @@ module Saddle - VERSION = '0.0.25' + VERSION = '0.0.26' end
Endpoint.request is no longer protected
mLewisLogic_saddle
train
4d25a59e3fc972db05174b7fa609ec701dd0e446
diff --git a/course/user.php b/course/user.php index <HASH>..<HASH> 100644 --- a/course/user.php +++ b/course/user.php @@ -70,8 +70,25 @@ case "grade": $course = get_record('course', 'id', required_param('id', PARAM_INT)); if (!empty($course->showgrades)) { - require_once($CFG->dirroot.'/grade/lib.php'); - print_student_grade($user, $course); + require_once $CFG->libdir.'/gradelib.php'; + require_once $CFG->dirroot.'/grade/lib.php'; + require_once $CFG->dirroot.'/grade/report/user/lib.php'; + $context = get_context_instance(CONTEXT_COURSE, $id); + /// return tracking object + $gpr = new grade_plugin_return(array('type'=>'report', 'plugin'=>'user', 'courseid'=>$id, 'userid'=>$user->id)); + // Create a report instance + $report = new grade_report_user($id, $gpr, $context, $user->id); + + $gradetotal = 0; + $gradesum = 0; + + // print the page + print_heading(get_string('modulename', 'gradereport_user'). ' - '.fullname($report->user)); + + if ($report->fill_table()) { + echo $report->print_table(true); + } + // print_student_grade($user, $course); } break;
user the grader report for user activity reports
moodle_moodle
train
aa5f7df81abb6a9be0915a99283eda4b14329456
diff --git a/spec/tcpn_spec.rb b/spec/tcpn_spec.rb index <HASH>..<HASH> 100644 --- a/spec/tcpn_spec.rb +++ b/spec/tcpn_spec.rb @@ -111,25 +111,32 @@ describe FastTCPN::TCPN do let(:process) { net.place :process, name: :name } let(:cpu) { net.place :cpu, name: :name, process: :process } let(:out) { net.place :out } + let(:finished) { net.place :finished } before do - t = net.transition :work - t.sentry do |marking_for, result| + t1 = net.transition :work + t1.sentry do |marking_for, result| marking_for[:process].each do |p| marking_for[:cpu].each(:process, p.value.name) do |c| result << { process: p, cpu: c } end end end - t.input process - t.input cpu - t.output out do |binding| + t1.input process + t1.input cpu + t1.output out do |binding| binding[:process].value.name + "_done" end - t.output cpu do |binding| + t1.output cpu do |binding| binding[:cpu] end + t2 = net.transition :finish + t2.input out + t2.output finished do |binding| + binding[:out] + end + process_count.times do |p| process.add AppProcess.new(p.to_s) cpu_count.times.map { |c| cpu.add CPU.new("CPU#{c}_#{p}", p.to_s) } @@ -147,8 +154,12 @@ describe FastTCPN::TCPN do expect(cpu.marking.size).to eq cpu_count * process_count end - it "puts all tokens in out" do - expect(out.marking.size).to eq process_count + it "leaves no tokens in out" do + expect(out.marking.size).to eq 0 + end + + it "puts all tokens in finished" do + expect(finished.marking.size).to eq process_count end
Extended integration spec for Transition#sim
wrzasa_fast-tcpn
train
586a119624adc09f22e985a64d491f6342e099c6
diff --git a/eureka-core/src/main/java/com/netflix/eureka/util/EIPManager.java b/eureka-core/src/main/java/com/netflix/eureka/util/EIPManager.java index <HASH>..<HASH> 100644 --- a/eureka-core/src/main/java/com/netflix/eureka/util/EIPManager.java +++ b/eureka-core/src/main/java/com/netflix/eureka/util/EIPManager.java @@ -193,7 +193,12 @@ public class EIPManager { List<String> availableEIPList = new ArrayList<String>(); for (String eip : eipCandidates) { String eipTrimmed = eip.trim(); - if (myPublicIP.equals(eipTrimmed)) { + /* It's possible for myPublicIP to be null when eureka is restarted + * because unbinding an EIP on Amazon results in a null IP for the + * instance for a few minutes. In that case, it's ok to rebind the + * new EIP. + */ + if (myPublicIP != null && myPublicIP.equals(eipTrimmed)) { // Already associated to an EIP? logger.debug("Already bound to an EIP : " + eip); return null;
Allow EIP binding if the current IP of the instance is null. It's possible for myPublicIP to be null when eureka is restarted because unbinding an EIP on Amazon results in a null IP for the instance for a few minutes. In that case, it's ok to rebind the new EIP.
Netflix_eureka
train
4373657ffcbe25d3344c3dd81602c7c1b244abb6
diff --git a/src/com/google/javascript/jscomp/SymbolTable.java b/src/com/google/javascript/jscomp/SymbolTable.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/SymbolTable.java +++ b/src/com/google/javascript/jscomp/SymbolTable.java @@ -1317,6 +1317,19 @@ public final class SymbolTable return false; } + // Try to remove a reference by its fully qualified name. + // If the symbol has no references left, remove it completely. + private void tryRemoveLexicalQualifiedNameRef(String name, Node n) { + if (name != null) { + Symbol lexicalSym = getEnclosingScope(n).getQualifiedSlot(name); + if (lexicalSym != null && + lexicalSym.isLexicalVariable() && + lexicalSym.getDeclaration().getNode() == n) { + removeSymbol(lexicalSym); + } + } + } + private boolean maybeDefineTypedReference( Node n, String propName, JSType owner) { if (owner.isGlobalThisType()) { @@ -1360,6 +1373,7 @@ public final class SymbolTable n, n.getLastChild().getString(), owner); if (defined) { + tryRemoveLexicalQualifiedNameRef(n.getQualifiedName(), n); return; } } @@ -1372,6 +1386,8 @@ public final class SymbolTable maybeDefineTypedReference(n, n.getString(), owner); if (defined) { + tryRemoveLexicalQualifiedNameRef( + NodeUtil.getBestLValueName(n), n); return; } } diff --git a/test/com/google/javascript/jscomp/SymbolTableTest.java b/test/com/google/javascript/jscomp/SymbolTableTest.java index <HASH>..<HASH> 100644 --- a/test/com/google/javascript/jscomp/SymbolTableTest.java +++ b/test/com/google/javascript/jscomp/SymbolTableTest.java @@ -228,8 +228,11 @@ public class SymbolTableTest extends TestCase { "})();\n"); Symbol ab = getGlobalVar(table, "a.b"); - assertNotNull(ab); - assertEquals(1, table.getReferenceList(ab).size()); + assertNull(ab); + + Symbol propB = getGlobalVar(table, "A.prototype.b"); + assertNotNull(propB); + assertEquals(5, table.getReferenceList(propB).size()); } public void testRemovalOfNamespacedReferencesOfProperties() @@ -419,10 +422,7 @@ public class SymbolTableTest extends TestCase { getGlobalVar(table, "DomHelper.prototype.prop"); assertEquals(3, table.getReferenceList(prop).size()); - Symbol thisDotProp = - getLocalVar(table, "this.prop"); - assertEquals( - 1, table.getReferenceList(thisDotProp).size()); + assertNull(getLocalVar(table, "this.prop")); } public void testFieldReferences() throws Exception {
If we have both a lexical symbol ("property b of variable a") and a property-slot symbol ("property b of the type of a") then we should remove the lexical one. R=acleung DELTA=<I> (<I> added, 3 deleted, 3 changed) Revision created by MOE tool push_codebase. MOE_MIGRATION=<I> git-svn-id: <URL>
google_closure-compiler
train
df4ff01dfec77fe44fc4aff57228c5aef36e17db
diff --git a/spec/client_spec.rb b/spec/client_spec.rb index <HASH>..<HASH> 100644 --- a/spec/client_spec.rb +++ b/spec/client_spec.rb @@ -4,7 +4,6 @@ module RedisFailover Client::Redis = RedisStub Client::Redis::Client = Redis::Client class ClientStub < Client - attr_reader def current_master @master end @@ -130,8 +129,7 @@ module RedisFailover end -######### - describe 'redis connectivity failure handling', :focus => true do + describe 'redis connectivity failure handling' do before(:each) do class << client attr_reader :tries @@ -176,8 +174,6 @@ module RedisFailover end -######### - context 'with :verify_role true' do it 'properly detects when a node has changed roles' do
Cleanup some spec cruft.
ryanlecompte_redis_failover
train
bda21964f81d49765b4f524b14c33a5ed8920774
diff --git a/lib/cld3.rb b/lib/cld3.rb index <HASH>..<HASH> 100644 --- a/lib/cld3.rb +++ b/lib/cld3.rb @@ -44,7 +44,7 @@ module CLD3 RELIABILITY_HR_BS_THRESHOLD = 0.5 # Information about a predicted language. - Result = Struct.new("Result", :language, :probability, :is_reliable, :proportion) + Result = Struct.new("Result", :language, :probability, :reliable?, :proportion) def initialize(minNumBytes = MIN_NUM_BYTES_TO_CONSIDER, maxNumBytes = MAX_NUM_BYTES_TO_CONSIDER) @cc = Pointer.new(CLD3::Unstable.new_NNetLanguageIdentifier(minNumBytes, maxNumBytes)) @@ -66,7 +66,7 @@ module CLD3 Result.new( language == "und" ? nil : language, cc_result[:probability], - cc_result[:is_reliable], + cc_result[:reliable?], cc_result[:proportion]) end @@ -86,7 +86,7 @@ module CLD3 ffi_lib File.join(File.expand_path(File.dirname(__FILE__)), "..", "ext", "cld3", FFI.map_library_name("cld3")) class NNetLanguageIdentifierResult < FFI::Struct - layout :language_data, :pointer, :language_size, :size_t, :probability, :float, :proportion, :float, :is_reliable, :bool + layout :language_data, :pointer, :language_size, :size_t, :probability, :float, :proportion, :float, :reliable?, :bool end attach_function :delete_NNetLanguageIdentifier, [ :pointer ], :void
Rename is_reliable to reliable?
akihikodaki_cld3-ruby
train
12ad9ec8168b2fe619faacc9cb05c0737d16ff33
diff --git a/ApplicationMetricsStandalone/ApplicationMetricsApi/src/main/java/org/jam/metrics/applicationmetricsapi/MetricsCacheApi.java b/ApplicationMetricsStandalone/ApplicationMetricsApi/src/main/java/org/jam/metrics/applicationmetricsapi/MetricsCacheApi.java index <HASH>..<HASH> 100644 --- a/ApplicationMetricsStandalone/ApplicationMetricsApi/src/main/java/org/jam/metrics/applicationmetricsapi/MetricsCacheApi.java +++ b/ApplicationMetricsStandalone/ApplicationMetricsApi/src/main/java/org/jam/metrics/applicationmetricsapi/MetricsCacheApi.java @@ -79,6 +79,21 @@ public class MetricsCacheApi { return output; } + public static synchronized String printMetricCacheKeys(String deployment) { + String output = ""; + Map<String, ArrayList<Object>> cache; + Set<String> metricNames; + cache = getMetricsCache(deployment); + metricNames = cache.keySet(); + + Iterator<String> iob = metricNames.iterator(); + while (iob.hasNext()) { + output += "<br>Metric Cache Key : " + iob.next() + "</br>\n"; + } + + return output; + } + public static synchronized ArrayList<Object> getMetricsCacheValuesByKey(String deployment, String key) { ArrayList<Object> cacheValues; cacheValues = getMetricsCache(deployment).get(key); @@ -86,6 +101,36 @@ public class MetricsCacheApi { return cacheValues; } + public static synchronized int countMetricsCacheValuesByKey(String deployment, String key) { + ArrayList<Object> cacheValues; + cacheValues = getMetricsCache(deployment).get(key); + + return (cacheValues!=null?cacheValues.size():0); + } + + public static synchronized int countGroupObjectsInMetricCache(String deployment) { + int count = 0; + Map<String, ArrayList<Object>> cache; + Set<String> metricNames; + Collection<ArrayList<Object>> metricValues; + cache = getMetricsCache(deployment); + metricNames = cache.keySet(); + metricValues = cache.values(); + + Iterator<String> iob = metricNames.iterator(); + Iterator<ArrayList<Object>> iobv = metricValues.iterator(); + while (iob.hasNext()) { + iob.next(); + if (iobv.hasNext()) { + ArrayList<Object> cacheValues = iobv.next(); + int num = (cacheValues!=null?cacheValues.size():0); + count+=num; + } + } + + return count; + } + // Dummy comparison for test cases. public static synchronized boolean compareMetricsCacheValuesByKey(String deployment, String key, ArrayList<Object> valuesToCompare) { boolean isEqual = true; @@ -105,6 +150,11 @@ public class MetricsCacheApi { return isEqual; } + public static synchronized void deleteGroupInMetricsCache(String group) + { + MetricsCacheCollection.getMetricsCacheCollection().removeMetricsCacheInstance(group); + } + public static synchronized void cleanMetricsCache(String group) { MetricsCacheCollection.getMetricsCacheCollection().getMetricsCacheInstance(group).getMetricCache().clear(); diff --git a/JavaSeApplicationMetrics/ApplicationMetricsJavaSeApiTest3/src/main/java/org/jam/metrics/applicationmetricsjavaseapitest/ApplicationMetricsJavaSeApiTest.java b/JavaSeApplicationMetrics/ApplicationMetricsJavaSeApiTest3/src/main/java/org/jam/metrics/applicationmetricsjavaseapitest/ApplicationMetricsJavaSeApiTest.java index <HASH>..<HASH> 100644 --- a/JavaSeApplicationMetrics/ApplicationMetricsJavaSeApiTest3/src/main/java/org/jam/metrics/applicationmetricsjavaseapitest/ApplicationMetricsJavaSeApiTest.java +++ b/JavaSeApplicationMetrics/ApplicationMetricsJavaSeApiTest3/src/main/java/org/jam/metrics/applicationmetricsjavaseapitest/ApplicationMetricsJavaSeApiTest.java @@ -52,7 +52,19 @@ public class ApplicationMetricsJavaSeApiTest { if (MetricsCacheCollection.getMetricsCacheCollection().getMetricsCacheInstance(groupName)!=null) System.out.println(MetricsCacheApi.printMetricsCache(groupName)); - + + if (MetricsCacheCollection.getMetricsCacheCollection().getMetricsCacheInstance(groupName)!=null) + System.out.println("Count GroupObjects In MetricCache : " + MetricsCacheApi.countGroupObjectsInMetricCache(groupName)); + + if (MetricsCacheCollection.getMetricsCacheCollection().getMetricsCacheInstance(groupName)!=null) + MetricsCacheApi.deleteGroupInMetricsCache(groupName); + + if (MetricsCacheCollection.getMetricsCacheCollection().getMetricsCacheInstance(groupName)!=null) + System.out.println("Count GroupObjects In MetricCache : " + MetricsCacheApi.countGroupObjectsInMetricCache(groupName)); + else + System.out.println("Group " + groupName + " does not exist."); + + } catch (Exception e) { e.printStackTrace(); }
Adding Jam Api methods for group cache management .
panossot_jam-metrics
train
e5d1907f981fc6b59f3714bc65c6dd997755063c
diff --git a/src/common/eventemitter.js b/src/common/eventemitter.js index <HASH>..<HASH> 100644 --- a/src/common/eventemitter.js +++ b/src/common/eventemitter.js @@ -983,7 +983,7 @@ while (!currentEvent.done) { if (currentEvent.value !== arguments[0]) { - let pattern = new RegExp(currentEvent.value.replace(/\./g, '\\.').replace(/\*/gi, '.*'), 'g') + let pattern = new RegExp(currentEvent.value.replace(/\./g, '\\.').replace(/\*/g, '.*'), 'g') if (pattern.test(arguments[0])) { super.emit(currentEvent.value, ...args, Symbol(arguments[0])) diff --git a/src/common/eventemitter/EventEmitterBase.js b/src/common/eventemitter/EventEmitterBase.js index <HASH>..<HASH> 100644 --- a/src/common/eventemitter/EventEmitterBase.js +++ b/src/common/eventemitter/EventEmitterBase.js @@ -346,7 +346,7 @@ class EventEmitterBase { // eslint-disable-line no-unused-vars if (NGN.typeof(event) === 'regexp' || event.indexOf('*') >= 0) { // Convert wildcard events to a regular expression. if (NGN.typeof(event) !== 'regexp') { - event = new RegExp(event.replace('*', '.*', 'gi')) + event = new RegExp(event.replace(/\./g, '\\.').replace(/\*/g, '.*'), 'g') } // If the event name matches the event, keep it. return event.test(eventName) diff --git a/test/common/03-eventemitter.js b/test/common/03-eventemitter.js index <HASH>..<HASH> 100644 --- a/test/common/03-eventemitter.js +++ b/test/common/03-eventemitter.js @@ -537,8 +537,14 @@ test('Wilcard Support', function (t) { var ct = 0 - NGN.BUS.on('test.*', function () { ct += 1 }) - NGN.BUS.once('testing.*', function () { ct += 1 }) + NGN.BUS.on('test.*', function () { + console.log('test.* triggered by ', this.event) + ct += 1 + }) + NGN.BUS.once('testing.*', function () { + console.log('testing.* triggered by ', this.event) + ct += 1 + }) t.ok(NGN.BUS.eventNames().length === 2, 'Correctly registered wildcard events.') @@ -547,7 +553,7 @@ test('Wilcard Support', function (t) { NGN.BUS.emit('testing.something') setTimeout(function () { -console.log(ct) +console.log(ct) t.ok(ct === 3, 'Fired the correct number of events.') t.ok(NGN.BUS.eventNames().length === 1, 'Standard and adhoc events triggered and removed appropriately.') diff --git a/test/lib/eventemitter.js b/test/lib/eventemitter.js index <HASH>..<HASH> 100644 --- a/test/lib/eventemitter.js +++ b/test/lib/eventemitter.js @@ -354,7 +354,7 @@ class EventEmitterBase { // eslint-disable-line no-unused-vars if (NGN.typeof(event) === 'regexp' || event.indexOf('*') >= 0) { // Convert wildcard events to a regular expression. if (NGN.typeof(event) !== 'regexp') { - event = new RegExp(event.replace('*', '.*', 'gi')) + event = new RegExp(event.replace(/\./g, '\\.').replace(/\*/g, '.*'), 'g') } // If the event name matches the event, keep it. return event.test(eventName) @@ -1344,7 +1344,7 @@ class EventEmitterBase { // eslint-disable-line no-unused-vars while (!currentEvent.done) { if (currentEvent.value !== arguments[0]) { - let pattern = new RegExp(currentEvent.value.replace(/\./g, '\\.').replace(/\*/gi, '.*'), 'g') + let pattern = new RegExp(currentEvent.value.replace(/\./g, '\\.').replace(/\*/g, '.*'), 'g') if (pattern.test(arguments[0])) { super.emit(currentEvent.value, ...args, Symbol(arguments[0])) diff --git a/test/lib/eventemitter/EventEmitterBase.js b/test/lib/eventemitter/EventEmitterBase.js index <HASH>..<HASH> 100644 --- a/test/lib/eventemitter/EventEmitterBase.js +++ b/test/lib/eventemitter/EventEmitterBase.js @@ -346,7 +346,7 @@ class EventEmitterBase { // eslint-disable-line no-unused-vars if (NGN.typeof(event) === 'regexp' || event.indexOf('*') >= 0) { // Convert wildcard events to a regular expression. if (NGN.typeof(event) !== 'regexp') { - event = new RegExp(event.replace('*', '.*', 'gi')) + event = new RegExp(event.replace(/\./g, '\\.').replace(/\*/g, '.*'), 'g') } // If the event name matches the event, keep it. return event.test(eventName)
Fixed regex in event emitter for browser.
ngnjs_NGN
train
4d6f68773ca78b2d0bbaa575ff6e7a17b90ad2c1
diff --git a/datacats/cli/create.py b/datacats/cli/create.py index <HASH>..<HASH> 100644 --- a/datacats/cli/create.py +++ b/datacats/cli/create.py @@ -8,7 +8,7 @@ import sys from os.path import abspath from datacats.environment import Environment -from datacats.cli.install import install_all +from datacats.cli.install import install_all, clean_pyc from datacats.error import DatacatsError from datacats.cli.util import y_or_n_prompt, confirm_password @@ -124,6 +124,7 @@ Options: print 'Resetting...' environment.stop_supporting_containers() environment.stop_ckan() + clean_pyc(environment) environment.purge_data([opts['--site']], never_delete=True) init({ 'ENVIRONMENT_DIR': opts['ENVIRONMENT'], diff --git a/datacats/cli/install.py b/datacats/cli/install.py index <HASH>..<HASH> 100644 --- a/datacats/cli/install.py +++ b/datacats/cli/install.py @@ -5,8 +5,8 @@ # See LICENSE.txt or http://www.fsf.org/licensing/licenses/agpl-3.0.html import sys -from os import listdir -from os.path import isdir, exists +from os import listdir, walk, remove +from os.path import isdir, exists, join from datacats.docker import container_logs from clint.textui import colored @@ -48,12 +48,23 @@ Default: '.' '--site-url': None }) +def clean_pyc(environment, quiet=False): + if not quiet: + print 'Cleaning environment {} of pyc files...'.format(environment.name) + + for root, dirs, files in walk(environment.target): + for f in files: + if f.endswith('.pyc'): + remove(join(root, f)) def install_all(environment, clean, verbose=False, quiet=False): logs = check_connectivity() if logs.strip(): raise DatacatsError(logs) + if clean: + clean_pyc(environment, quiet) + srcdirs = set() reqdirs = set() for d in listdir(environment.target):
Clean pyc files on clean install, and on reset.
datacats_datacats
train
9378483378a7ea0663382c6793c1c0cd54c7fec5
diff --git a/code/extensions/SiteTreeContentReview.php b/code/extensions/SiteTreeContentReview.php index <HASH>..<HASH> 100644 --- a/code/extensions/SiteTreeContentReview.php +++ b/code/extensions/SiteTreeContentReview.php @@ -453,6 +453,10 @@ class SiteTreeContentReview extends DataExtension implements PermissionProvider } $options = $this->getOptions(); + + if (!$options) { + return false; + } if ($options->OwnerGroups()->count() == 0 && $options->OwnerUsers()->count() == 0) { return false;
Fixing method call bug in canBeReviewedBy() (#1) If getOptions returns false, then canBeReviewedBy() throws "Call to a member function OwnerGroups() on boolean". This will return false, in the the case that options are false.
silverstripe_silverstripe-contentreview
train
525335cd8520c19caaa73894af31c684508b14df
diff --git a/src/Nut/UserAdd.php b/src/Nut/UserAdd.php index <HASH>..<HASH> 100644 --- a/src/Nut/UserAdd.php +++ b/src/Nut/UserAdd.php @@ -2,6 +2,7 @@ namespace Bolt\Nut; +use Bolt\Storage\Entity; use Symfony\Component\Console\Input\InputArgument; use Symfony\Component\Console\Input\InputInterface; use Symfony\Component\Console\Output\OutputInterface; @@ -37,26 +38,28 @@ class UserAdd extends BaseCommand $displayname = $input->getArgument('displayname'); $role = $input->getArgument('role'); - $this->app['users']->getUsers(); - $user = $this->app['users']->getEmptyUser(); - $user['roles'] = [$role]; - $user['username'] = $username; - $user['password'] = $password; - $user['displayname'] = $displayname; - $user['email'] = $email; + $data = [ + 'username' => $username, + 'password' => $password, + 'email' => $email, + 'displayname' => $displayname, + 'roles' => [$role], + ]; + + $user = new Entity\Users($data); $valid = true; - if (! $this->app['users']->checkAvailability('username', $user['username'])) { + if (! $this->app['users']->checkAvailability('username', $user->getUsername())) { $valid = false; - $output->writeln("<error>Error creating user: username {$user['username']} already exists</error>"); + $output->writeln("<error>Error creating user: username {$user->getUsername()} already exists</error>"); } - if (! $this->app['users']->checkAvailability('email', $user['email'])) { + if (! $this->app['users']->checkAvailability('email', $user->getEmail())) { $valid = false; - $output->writeln("<error>Error creating user: email {$user['email']} exists</error>"); + $output->writeln("<error>Error creating user: email {$user->getEmail()} exists</error>"); } - if (! $this->app['users']->checkAvailability('displayname', $user['displayname'])) { + if (! $this->app['users']->checkAvailability('displayname', $user->getDisplayname())) { $valid = false; - $output->writeln("<error>Error creating user: display name {$user['displayname']} already exists</error>"); + $output->writeln("<error>Error creating user: display name {$user->getDisplayname()} already exists</error>"); } if ($valid) {
Encode user password when using console command to create a new user.
bolt_bolt
train
f4bdddbf96a7eb616cbd1cf0312e5d65c984c681
diff --git a/Kwf/Rest/Controller/Model.php b/Kwf/Rest/Controller/Model.php index <HASH>..<HASH> 100644 --- a/Kwf/Rest/Controller/Model.php +++ b/Kwf/Rest/Controller/Model.php @@ -156,6 +156,14 @@ class Kwf_Rest_Controller_Model extends Zend_Rest_Controller return $data; } + /** + * The head action handles HEAD requests and receives an 'id' parameter; it + * should respond with the server resource state of the resource identified + * by the 'id' value. + */ + public function headAction() + { + } // Handle GET and return a list of resources public function indexAction() {
zf <I> compatibility: add head action TODO: do something useful in there
koala-framework_koala-framework
train
ac648dada900ab6794fe30d53f2e995b263c22b7
diff --git a/lib/blazer/engine.rb b/lib/blazer/engine.rb index <HASH>..<HASH> 100644 --- a/lib/blazer/engine.rb +++ b/lib/blazer/engine.rb @@ -11,7 +11,7 @@ module Blazer Blazer.user_name = Blazer.settings["user_name"] if Blazer.settings["user_name"] Blazer.from_email = Blazer.settings["from_email"] if Blazer.settings["from_email"] - Blazer.user_class ||= Blazer.settings["user_class"] || User rescue nil + Blazer.user_class ||= Blazer.settings.key?("user_class") ? Blazer.settings["user_class"] : (User rescue nil) Blazer.user_method = Blazer.settings["user_method"] if Blazer.user_class Blazer.user_method ||= "current_#{Blazer.user_class.to_s.downcase.singularize}"
Allow for nil user class if user model exists
ankane_blazer
train
7af5e8b4c294e151b5df0c8043388d300193a9fc
diff --git a/project/library/CM/Mail.php b/project/library/CM/Mail.php index <HASH>..<HASH> 100644 --- a/project/library/CM/Mail.php +++ b/project/library/CM/Mail.php @@ -322,6 +322,7 @@ class CM_Mail extends CM_View_Abstract { require_once DIR_PHPMAILER . 'class.phpmailer.php'; try { $mail = new PHPMailer(true); + $mail->CharSet = 'utf-8'; foreach ($this->_replyTo as $replyTo) { $mail->AddReplyTo($replyTo['address'], $replyTo['name']);
Fix: Set phpmailer charset to utf-8
cargomedia_cm
train
0daab5ff7293f396c01ef3dae2fe8e0db2e60eae
diff --git a/test/unit/model/ActivityMonitoringServiceTest.php b/test/unit/model/ActivityMonitoringServiceTest.php index <HASH>..<HASH> 100644 --- a/test/unit/model/ActivityMonitoringServiceTest.php +++ b/test/unit/model/ActivityMonitoringServiceTest.php @@ -38,12 +38,12 @@ class ActivityMonitoringServiceTest extends TestCase $timeKeys = $service->getTimeKeys(new \DateInterval('PT1M'), clone($date)); $this->assertEquals(60, count($timeKeys)); - $this->assertEquals(((int)$date->format('i') + 1) % 60, $timeKeys[0]->format('i')); + $this->assertEquals(((int)$date->format('i') + 1) % 60, (int)$timeKeys[0]->format('i')); $this->assertEquals(0, $timeKeys[0]->format('s')); $timeKeys = $service->getTimeKeys(new \DateInterval('PT1H'), clone($date)); $this->assertEquals(24, count($timeKeys)); - $this->assertEquals(((int)$date->format('H') + 1) % 24, $timeKeys[0]->format('H')); + $this->assertEquals(((int)$date->format('H') + 1) % 24, (int)$timeKeys[0]->format('H')); $this->assertEquals(0, $timeKeys[0]->format('i')); $timeKeys = $service->getTimeKeys(new \DateInterval('P1D'), clone($date)); @@ -54,7 +54,7 @@ class ActivityMonitoringServiceTest extends TestCase $timeKeys = $service->getTimeKeys(new \DateInterval('P1M'), clone($date)); $this->assertEquals(12, count($timeKeys)); - $this->assertEquals((int)$date->format('m') % 12 + 1, $timeKeys[0]->format('m')); + $this->assertEquals((int)$date->format('m') % 12 + 1, (int)$timeKeys[0]->format('m')); $this->assertEquals(0, $timeKeys[0]->format('H')); $this->assertEquals('01', $timeKeys[0]->format('d')); }
Added cast to int the have a proper comparison.
oat-sa_extension-tao-proctoring
train
488ac02ced305f8887573319d755417a500c68d0
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -3,7 +3,7 @@ var assign = require('object-assign'); hexo.config.tag_generator = assign({ - per_page: hexo.config.per_page || 10 + per_page: typeof hexo.config.per_page === "undefined" ? 10 : hexo.config.per_page }, hexo.config.tag_generator); hexo.extend.generator.register('tag', require('./lib/generator')); \ No newline at end of file
Allow users to set the per_page option to 0
hexojs_hexo-generator-tag
train
d631d2e0eed795bd0b5e0edb323977a6c66ac416
diff --git a/ignite/utils.py b/ignite/utils.py index <HASH>..<HASH> 100644 --- a/ignite/utils.py +++ b/ignite/utils.py @@ -150,6 +150,10 @@ def manual_seed(seed: int) -> None: """ random.seed(seed) torch.manual_seed(seed) + + if torch.cuda.is_available(): + torch.cuda.manual_seed_all(seed) + try: import numpy as np
Added torch.cuda.manual_seed_all(seed) (#<I>)
pytorch_ignite
train
889dfcb0866968b0130955501f920f9ceabb3e86
diff --git a/js/test/Exchange/test.fetchOHLCV.js b/js/test/Exchange/test.fetchOHLCV.js index <HASH>..<HASH> 100644 --- a/js/test/Exchange/test.fetchOHLCV.js +++ b/js/test/Exchange/test.fetchOHLCV.js @@ -14,6 +14,7 @@ const log = require ('ololog') module.exports = async (exchange, symbol) => { const skippedExchanges = [ + 'bitmex', // an issue with null values,to be resolved later 'cex', 'okex', 'okcoinusd',
added bitmex to fetchOHLCV exceptions
ccxt_ccxt
train
ee0416de9a77064153b4283d6d4a709c4510125a
diff --git a/tests/test_octodns_provider_route53.py b/tests/test_octodns_provider_route53.py index <HASH>..<HASH> 100644 --- a/tests/test_octodns_provider_route53.py +++ b/tests/test_octodns_provider_route53.py @@ -1683,6 +1683,9 @@ class TestRoute53Provider(TestCase): 'Value': '1.2.3.4', }], 'TTL': 61, + # All the non-matches have a different Id so we'll fail if they + # match + 'HealthCheckId': '33', }, { # Not dynamic value, matching name, other type 'Name': 'a.unit.tests.', @@ -1691,6 +1694,7 @@ class TestRoute53Provider(TestCase): 'Value': '2001:0db8:3c4d:0015:0000:0000:1a2f:1a4b' }], 'TTL': 61, + 'HealthCheckId': '33', }, { # default value pool 'Name': '_octodns-default-value.a.unit.tests.', @@ -1702,6 +1706,7 @@ class TestRoute53Provider(TestCase): 'Value': '1.2.3.4', }], 'TTL': 61, + 'HealthCheckId': '33', }, { # different record 'Name': '_octodns-two-value.other.unit.tests.', @@ -1713,6 +1718,7 @@ class TestRoute53Provider(TestCase): 'Value': '1.2.3.4', }], 'TTL': 61, + 'HealthCheckId': '33', }, { # same everything, but different type 'Name': '_octodns-one-value.a.unit.tests.', @@ -1721,7 +1727,16 @@ class TestRoute53Provider(TestCase): 'Value': '2001:0db8:3c4d:0015:0000:0000:1a2f:1a4b' }], 'TTL': 61, - 'HealthCheckId': '42', + 'HealthCheckId': '33', + }, { + # same everything, sub + 'Name': '_octodns-one-value.sub.a.unit.tests.', + 'Type': 'A', + 'ResourceRecords': [{ + 'Value': '1.2.3.4', + }], + 'TTL': 61, + 'HealthCheckId': '33', }, { # match 'Name': '_octodns-one-value.a.unit.tests.',
Cover more Route<I> extra check edge cases and ensure it tests what we're after
github_octodns
train
fb8a45e65108f0b0d5e44c8fb0fe72f2bf574f54
diff --git a/lib/solargraph/api_map/probe.rb b/lib/solargraph/api_map/probe.rb index <HASH>..<HASH> 100644 --- a/lib/solargraph/api_map/probe.rb +++ b/lib/solargraph/api_map/probe.rb @@ -177,6 +177,7 @@ module Solargraph def resolve_pin_type pin, locals return pin.return_type unless pin.return_type.nil? return resolve_block_parameter(pin, locals) if pin.kind == Pin::BLOCK_PARAMETER + return resolve_method_parameter(pin) if pin.is_a?(Pin::MethodParameter) return resolve_variable(pin, locals) if pin.variable? nil end @@ -209,6 +210,18 @@ module Solargraph nil end + def resolve_method_parameter pin + matches = api_map.get_methods(pin.namespace, scope: pin.scope, visibility: [:public, :private, :protected]).select{|p| p.name == pin.context.name} + matches.each do |m| + next unless pin.context.parameters == m.parameters + next if m.docstring.nil? + tag = m.docstring.tags(:param).select{|t| t.name == pin.name}.first + next if tag.nil? or tag.types.nil? + return tag.types[0] + end + nil + end + def resolve_variable(pin, locals) return nil if pin.nil_assignment? # @todo Do we need the locals here? diff --git a/lib/solargraph/diagnostics/type_not_defined.rb b/lib/solargraph/diagnostics/type_not_defined.rb index <HASH>..<HASH> 100644 --- a/lib/solargraph/diagnostics/type_not_defined.rb +++ b/lib/solargraph/diagnostics/type_not_defined.rb @@ -7,16 +7,16 @@ module Solargraph def diagnose source, api_map result = [] source.pins.select{|p| p.kind == Pin::METHOD or p.kind == Pin::ATTRIBUTE}.each do |pin| - result.concat check_return_type(pin, api_map) - result.concat check_param_types(pin, api_map) - result.concat check_param_tags(pin, api_map) + result.concat check_return_type(pin, api_map, source) + result.concat check_param_types(pin, api_map, source) + result.concat check_param_tags(pin, api_map, source) end result end private - def check_return_type pin, api_map + def check_return_type pin, api_map, source result = [] unless defined_return_type?(pin, api_map) result.push( @@ -29,7 +29,7 @@ module Solargraph result end - def check_param_types pin, api_map + def check_param_types pin, api_map, source result = [] pin.parameters.each do |par| next if defined_param_type?(pin, par, api_map) @@ -43,7 +43,7 @@ module Solargraph result end - def check_param_tags pin, api_map + def check_param_tags pin, api_map, source result = [] unless pin.docstring.nil? pin.docstring.tags(:param).each do |par| @@ -84,7 +84,7 @@ module Solargraph matches = api_map.get_methods(pin.namespace, scope: pin.scope, visibility: [:public, :private, :protected]).select{|p| p.name == pin.name} matches.shift matches.each do |m| - next unless pin.params == m.params + next unless pin.parameters == m.parameters return true if param_in_docstring?(param, m.docstring) end false
Probe and TypeNotDefined param type detection.
castwide_solargraph
train
a5389464d7776450c7f904d013b71e659c7a1c82
diff --git a/mod/quiz/edit.php b/mod/quiz/edit.php index <HASH>..<HASH> 100644 --- a/mod/quiz/edit.php +++ b/mod/quiz/edit.php @@ -40,7 +40,7 @@ $courseid = optional_param('courseid'); $quizid = optional_param('quizid'); - $page = optional_param('page', 0); + $page = optional_param('page', -1); $perpage = optional_param('perpage', 20); $strquizzes = get_string('modulenameplural', 'quiz'); @@ -103,6 +103,12 @@ $modform->grades = quiz_get_all_question_grades($modform); } + if ($page > -1) { + $modform->page = $page; + } else { + $page = isset($modform->page) ? $modform->page : 0; + } + /// Now, check for commands on this page and modify variables as necessary if (isset($_REQUEST['up']) and confirm_sesskey()) { /// Move the given question up a slot
edit page now remembers the paging for the question list
moodle_moodle
train
3d218b1694d59715ad165e8e0c26e090af5de8d2
diff --git a/core/src/main/java/hudson/scm/SCM.java b/core/src/main/java/hudson/scm/SCM.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/scm/SCM.java +++ b/core/src/main/java/hudson/scm/SCM.java @@ -116,14 +116,6 @@ public abstract class SCM implements Describable<SCM>, ExtensionPoint { public abstract boolean checkout(AbstractBuild build, Launcher launcher, FilePath workspace, BuildListener listener, File changelogFile) throws IOException, InterruptedException; /** - * Checks out (or updates) the code into the workspace, but without computing changelog. - * - * TODO: This is an ugly abstraction. - * come back and check if this abstraction is really making much sense. - */ - public abstract boolean checkout(Launcher launcher, FilePath workspace, TaskListener listener) throws IOException, InterruptedException; - - /** * Adds environmental variables for the builds to the given map. */ public abstract void buildEnvVars(Map<String,String> env);
removed the method that Hudson isn't using. git-svn-id: <URL>
jenkinsci_jenkins
train
f56ca73af3a898eb297a5bd34ea8f15e73c32a7c
diff --git a/lib/authorizer/auth-interface.js b/lib/authorizer/auth-interface.js index <HASH>..<HASH> 100644 --- a/lib/authorizer/auth-interface.js +++ b/lib/authorizer/auth-interface.js @@ -1,40 +1,16 @@ var _ = require('lodash'), EMPTY = '', - upsertAuthParams, createAuthInterface; /** - * Helper function to conditionally update a VariableList from an object - * - * @param {VariableList} parameters - * @param {Object} modifiedParams - */ -upsertAuthParams = function (parameters, modifiedParams) { - var param; - _.forEach(modifiedParams, function (value, key) { - param = parameters.one(key); - - if (!param) { - return parameters.add({key: key, value: value, system: true}); - } - - // Update if the param is a system property or an empty user property (null, undefined or empty string) - if (param.system || param.value === EMPTY || _.isNil(param.value) || _.isNaN(param.value)) { - return param.update({key: key, value: value, system: true}); - } - }); -}; - -/** * Creates a wrapper around RequestAuth and provides getters and setters helper functions * * @constructs AuthInterface * @param {RequestAuth} auth - * @param {RequestAuth} originalAuth * @return {AuthInterface} * @throws {Error} */ -createAuthInterface = function (auth, originalAuth) { +createAuthInterface = function (auth) { if (!(auth && auth.parameters && auth.parameters())) { throw new Error('runtime~createAuthInterface: invalid auth'); } @@ -73,7 +49,8 @@ createAuthInterface = function (auth, originalAuth) { * @throws {Error} */ set: function (key, value) { - var modifiedParams = {}; + var modifiedParams = {}, + parameters; if (_.isObject(key)) { modifiedParams = key; @@ -85,8 +62,20 @@ createAuthInterface = function (auth, originalAuth) { throw new Error('runtime~AuthInterface: set should be called with `key` as a string or object'); } - upsertAuthParams(auth.parameters(), modifiedParams); - upsertAuthParams(originalAuth.parameters(), modifiedParams); + parameters = auth.parameters(); + _.forEach(modifiedParams, function (value, key) { + var param = parameters.one(key); + + if (!param) { + return parameters.add({key: key, value: value, system: true}); + } + + // Update if the param is a system property or an empty user property (null, undefined or empty string) + if (param.system || param.value === EMPTY || _.isNil(param.value) || _.isNaN(param.value)) { + return param.update({key: key, value: value, system: true}); + } + }); + return this; } }; diff --git a/lib/runner/request-helpers-postsend.js b/lib/runner/request-helpers-postsend.js index <HASH>..<HASH> 100644 --- a/lib/runner/request-helpers-postsend.js +++ b/lib/runner/request-helpers-postsend.js @@ -16,8 +16,9 @@ module.exports = [ if (!(context.auth && context.auth.type)) { return done(); } var auth = context.auth, + originalAuthParams = context.originalItem.getAuth().parameters(), authHandler = AuthLoader.getHandler(auth.type), - authInterface = createAuthInterface(auth, context.originalItem.getAuth()); + authInterface = createAuthInterface(auth); // bail out if there is no matching auth handler for the type if (!authHandler) { @@ -27,6 +28,11 @@ module.exports = [ // invoke `post` on the Auth authHandler.post(authInterface, context.response, function (err, success) { + // sync all auth system parameters to the original auth + auth.parameters().each(function (param) { + param && param.system && originalAuthParams.upsert({key: param.key, value: param.value, system: true}); + }); + // sync auth state back to item request _.set(context, 'item.request.auth', auth); diff --git a/lib/runner/request-helpers-presend.js b/lib/runner/request-helpers-presend.js index <HASH>..<HASH> 100644 --- a/lib/runner/request-helpers-presend.js +++ b/lib/runner/request-helpers-presend.js @@ -45,6 +45,7 @@ module.exports = [ // get auth handler var auth = context.auth, authType = auth.type, + originalAuthParams = context.originalItem.getAuth().parameters(), authHandler = AuthLoader.getHandler(authType), authPreHook, authInterface, @@ -76,7 +77,7 @@ module.exports = [ return done(); } - authInterface = createAuthInterface(auth, context.originalItem.getAuth()); + authInterface = createAuthInterface(auth); /** * We go through the `pre` request send validation for the auth. In this step one of the three things can happen @@ -104,6 +105,12 @@ module.exports = [ return done(); } + // sync all auth system parameters to the original auth + auth.parameters().each(function (param) { + param && param.system && + originalAuthParams.upsert({key: param.key, value: param.value, system: true}); + }); + // authHandler gave a go, sign the request if (success) { return authSignHook(); }
Move updating original auth logic to callbacks of presend and postsend helpers This is done so that this logic runs only once when all the set operations are done
postmanlabs_postman-runtime
train
f7f7ecaa757a0bc810a7ca43d27d13b95259743b
diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index <HASH>..<HASH> 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -606,10 +606,11 @@ def test_write_and_read_file_units( 0.465379, rtol=1e-5) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -79.86838) + fn2 = os.fspath(tmp_path / "test2.tif") with caplog.at_level(logging.WARNING): ngtw.save_dataset( test_image_small_mid_atlantic_K_L.data, - filename=fn, + filename=fn2, fill_value=0, blockxsize=128, blockysize=128, diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index <HASH>..<HASH> 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -204,7 +204,7 @@ class NinJoGeoTIFFWriter(GeoTIFFWriter): data_units = image.data.attrs.get("units") if (quantity.lower() == "temperature" and unit == "C" and - image.data.attrs.get("units") == "K"): + data_units == "K"): logger.debug("Adding offset for K → °C conversion") new_attrs = copy.deepcopy(image.data.attrs) im2 = type(image)(image.data.copy())
Use different temporary files Use two different temporary files when reading/writing in the unit test. Otherwise Windows becomes sad.
pytroll_satpy
train
b7adb8baccce0d3c50af3084b8fc552391fa23fb
diff --git a/fcm_django/fcm.py b/fcm_django/fcm.py index <HASH>..<HASH> 100644 --- a/fcm_django/fcm.py +++ b/fcm_django/fcm.py @@ -1,6 +1,18 @@ from pyfcm import FCMNotification from .settings import FCM_DJANGO_SETTINGS as SETTINGS + +# Copied from https://github.com/olucurious/PyFCM/blob/master/pyfcm/baseapi.py +response_dict = { + 'multicast_ids': [], + 'success': 0, + 'failure': 0, + 'canonical_ids': 0, + 'results': [], + 'topic_message_id': None +} + + def fcm_send_topic_message( api_key=None, json_encoder=None, diff --git a/fcm_django/models.py b/fcm_django/models.py index <HASH>..<HASH> 100644 --- a/fcm_django/models.py +++ b/fcm_django/models.py @@ -1,8 +1,11 @@ from __future__ import unicode_literals +from copy import deepcopy + from django.db import models from django.utils.translation import ugettext_lazy as _ +from .fcm import response_dict from .settings import FCM_DJANGO_SETTINGS as SETTINGS @@ -60,8 +63,9 @@ class FCMDeviceQuerySet(models.query.QuerySet): 'registration_id', flat=True )) + if len(registration_ids) == 0: - return [{'failure': len(self), 'success': 0}] + return dict(deepcopy(response_dict), failure=len(self), success=0) result = fcm_send_bulk_message( registration_ids=registration_ids, @@ -106,8 +110,9 @@ class FCMDeviceQuerySet(models.query.QuerySet): 'registration_id', flat=True )) + if len(registration_ids) == 0: - return [{'failure': len(self), 'success': 0}] + return dict(deepcopy(response_dict), failure=len(self), success=0) result = fcm_send_bulk_data_messages( api_key=api_key, @@ -239,8 +244,8 @@ class AbstractFCMDevice(Device): if 'error' in result['results'][0]: error_list = ['MissingRegistration', 'MismatchSenderId', 'InvalidRegistration', 'NotRegistered'] if result['results'][0]['error'] in error_list: - device.update(active=False) - self._delete_inactive_device_if_requested(device) + device.update(active=False) + self._delete_inactive_device_if_requested(device) @staticmethod def _delete_inactive_device_if_requested(device):
backward incompatible change: return dict instead of list object from quesryset's send_message() and send_data_message() methods in case if no active devices
xtrinch_fcm-django
train
5314eb75766b8312952e8ffc9b3def7bdc0af9d0
diff --git a/bmemcached/__init__.py b/bmemcached/__init__.py index <HASH>..<HASH> 100644 --- a/bmemcached/__init__.py +++ b/bmemcached/__init__.py @@ -93,13 +93,20 @@ class Server(object): if username and password: self.authenticate(username, password) + def _read_socket(self, size): + value = '' + while True: + value += self.connection.recv(size - len(value)) + if len(value) == size: + return value + def authenticate(self, username, password): logger.info('Authenticating as %s' % username) self.connection.send(struct.pack(self.HEADER_STRUCT, self.MAGIC['request'], self.COMMANDS['auth_negotiation']['command'], 0, 0, 0, 0, 0, 0, 0)) - header = self.connection.recv(self.HEADER_SIZE) + header = self._read_socket(self.HEADER_SIZE) (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, cas) = struct.unpack(self.HEADER_STRUCT, header) @@ -107,7 +114,7 @@ class Server(object): logger.debug('Server does not requires authentication.') return True - methods = self.connection.recv(bodylen).split(' ') + methods = self._read_socket(bodylen).split(' ') if not 'PLAIN' in methods: raise AuthenticationNotSupported('This module only supports ' + \ @@ -119,16 +126,16 @@ class Server(object): self.COMMANDS['auth_request']['struct'] % (len(method), len(auth)), self.MAGIC['request'], self.COMMANDS['auth_request']['command'], len(method), 0, 0, 0, len(method) + len(auth), 0, 0, method, auth)) - header = self.connection.recv(self.HEADER_SIZE) + header = self._read_socket(self.HEADER_SIZE) (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, cas) = struct.unpack(self.HEADER_STRUCT, header) if status != self.STATUS['success']: raise MemcachedException('Code: %d Message: %s' % (status, - self.connection.recv(bodylen))) + self._read_socket(bodylen))) logger.debug('Auth OK. Code: %d Message: %s' % (status, - self.connection.recv(bodylen))) + self._read_socket(bodylen))) return True @@ -167,7 +174,7 @@ class Server(object): self.COMMANDS['get']['command'], len(key), 0, 0, 0, len(key), 0, 0, key)) - header = self.connection.recv(self.HEADER_SIZE) + header = self._read_socket(self.HEADER_SIZE) (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, cas) = struct.unpack(self.HEADER_STRUCT, header) @@ -177,13 +184,13 @@ class Server(object): if status != self.STATUS['success']: if status == self.STATUS['key_not_found']: logger.debug('Key not found. Message: %s' \ - % self.connection.recv(bodylen)) + % self._read_socket(bodylen)) return None raise MemcachedException('Code: %d Message: %s' % (status, - self.connection.recv(bodylen))) + self._read_socket(bodylen))) - body = self.connection.recv(bodylen) + body = self._read_socket(bodylen) flags, value = struct.unpack('!L%ds' % (bodylen - 4, ), body) @@ -203,7 +210,7 @@ class Server(object): len(key), 8, 0, 0, len(key) + len(value) + 8, 0, 0, flags, time, key, value)) - header = self.connection.recv(self.HEADER_SIZE) + header = self._read_socket(self.HEADER_SIZE) (magic, opcode, keylen, extlen, datatype, status, bodylen, opaque, cas) = struct.unpack(self.HEADER_STRUCT, header) @@ -212,7 +219,7 @@ class Server(object): if status != self.STATUS['success']: raise MemcachedException('Code: %d Message: %s' % (status, - self.connection.recv(bodylen))) + self._read_socket(bodylen))) return True @@ -224,7 +231,7 @@ class Server(object): self.COMMANDS['delete']['command'], len(key), 0, 0, 0, len(key), 0, 0, key)) - header = self.connection.recv(self.HEADER_SIZE) + header = self._read_socket(self.HEADER_SIZE) # TODO: Why is this returning a string instead a real header? if header == 'Not found': @@ -235,8 +242,7 @@ class Server(object): if status != self.STATUS['success'] \ and status != self.STATUS['key_not_found']: - raise MemcachedException('Code: %d Message: %s' % (status, - self.connection.recv(bodylen))) + raise MemcachedException('Code: %d' % (status)) logger.debug('Key deleted %s' % key) return True
added a new approach to read from socket
jaysonsantos_python-binary-memcached
train
cec4cb4cb977fd15fb7ea8fd2585b7503e2ee7bd
diff --git a/tool/smarts/src/test/java/org/openscience/cdk/smiles/smarts/parser/SMARTSSearchTest.java b/tool/smarts/src/test/java/org/openscience/cdk/smiles/smarts/parser/SMARTSSearchTest.java index <HASH>..<HASH> 100644 --- a/tool/smarts/src/test/java/org/openscience/cdk/smiles/smarts/parser/SMARTSSearchTest.java +++ b/tool/smarts/src/test/java/org/openscience/cdk/smiles/smarts/parser/SMARTSSearchTest.java @@ -1680,46 +1680,29 @@ public class SMARTSSearchTest extends CDKTestCase { Assert.assertEquals(3, results[1]); } - @Test + @Test(expected=IllegalArgumentException.class) public void testInvalidPeriodicGroupNumber() throws Exception { - try { - int[] results = match("[G19]", "CCN"); - Assert.fail(); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } - - try { - int[] results = match("[G0]", "CCN"); - Assert.fail(); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + match("[G19]", "CCN"); + } - try { - int[] results = match("[G345]", "CCN"); - Assert.fail(); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + @Test(expected=IllegalArgumentException.class) + public void testInvalidPeriodicGroupNumber_2() throws Exception { + match("[G0]", "CCN"); + } + @Test(expected=IllegalArgumentException.class) + public void testInvalidPeriodicGroupNumber_3() throws Exception { + match("[G345]", "CCN"); } - @Test + @Test(expected=IllegalArgumentException.class) public void testNonPeriodicGroupNumber() throws Exception { - try { - int[] results = match("[G]", "CCN"); - Assert.fail("Should throw an exception if G is not followed by a number"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + match("[G]", "CCN"); // Should throw an exception if G is not followed by a number + } - try { - int[] results = match("[GA]", "CCN"); - Assert.fail("Should throw an exception if G is not followed by a number"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + @Test(expected=IllegalArgumentException.class) + public void testNonPeriodicGroupNumber_2() throws Exception { + match("[GA]", "CCN"); // Should throw an exception if G is not followed by a number } @Test @@ -1766,36 +1749,24 @@ public class SMARTSSearchTest extends CDKTestCase { } - @Test + @Test(expected=IllegalArgumentException.class) public void testBadHybridizationNumber() throws Exception { + match("[^]", "CCN"); // Should throw an exception if ^ is not followed by a number + } - try { - int[] results = match("[^]", "CCN"); - Assert.fail("Should throw an exception if ^ is not followed by a number"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } - - try { - int[] results = match("[^X]", "CCN"); - Assert.fail("Should throw an exception if ^ is not followed by a number"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + @Test(expected=IllegalArgumentException.class) + public void testBadHybridizationNumber_2() throws Exception { + match("[^X]", "CCN"); // Should throw an exception if ^ is not followed by a number + } - try { - int[] results = match("[^0]", "CCN"); - Assert.fail("Should throw an exception if ^ is not between 1 & 8"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + @Test(expected=IllegalArgumentException.class) + public void testBadHybridizationNumber_3() throws Exception { + match("[^0]", "CCN"); // Should throw an exception if ^ is not followed by a number + } - try { - int[] results = match("[^9]", "CCN"); - Assert.fail("Should throw an exception if ^ is not between 1 & 8"); - } catch (IllegalArgumentException pe) { - Assert.assertTrue(true); - } + @Test(expected=IllegalArgumentException.class) + public void testBadHybridizationNumber_4() throws Exception { + match("[^9]", "CCN"); // Should throw an exception if ^ is not followed by a number } /**
Fixed the unit tests: fail() causes the method to exit, so that not all tests were run
cdk_cdk
train
7f030ca7b07b1d5a5f55629460ca845b6bb1f6a8
diff --git a/cocaine/tools/actions/docker.py b/cocaine/tools/actions/docker.py index <HASH>..<HASH> 100644 --- a/cocaine/tools/actions/docker.py +++ b/cocaine/tools/actions/docker.py @@ -179,9 +179,9 @@ class StreamingAction(Action): def _handle_message(self, message): if "stream" in message: - log.info(message["stream"]) + log.info(message["stream"].rstrip('\n')) elif "error" in message: - error_msg = message["error"] + error_msg = message["error"].rstrip('\n') self._lasterr = DockerException(error_msg) log.error(error_msg) @@ -190,7 +190,8 @@ class StreamingAction(Action): def _on_body(self, data): self._jsonunpacker.feed(data) - map(self._handle_message, self._jsonunpacker) + for i in self._jsonunpacker: + self._handle_message(i) class Build(StreamingAction): diff --git a/cocaine/tools/helpers/__init__.py b/cocaine/tools/helpers/__init__.py index <HASH>..<HASH> 100644 --- a/cocaine/tools/helpers/__init__.py +++ b/cocaine/tools/helpers/__init__.py @@ -43,7 +43,7 @@ class JSONUnpacker(Iterable): js = json.JSONDecoder() try: res, index = js.raw_decode(self.buff) - self.buff = self.buff[index:] + self.buff = self.buff[index:].lstrip('\r\n') return res except JSONDecodeError: raise StopIteration
[Docker] Fix parsing of Docker API
cocaine_cocaine-tools
train
5f6212a5c9c63f9c7b93bad7c43f607ee7fa5805
diff --git a/packages/cerebral-provider-forms/src/index.js b/packages/cerebral-provider-forms/src/index.js index <HASH>..<HASH> 100644 --- a/packages/cerebral-provider-forms/src/index.js +++ b/packages/cerebral-provider-forms/src/index.js @@ -2,6 +2,7 @@ import {state} from 'cerebral/tags' import form from './form' import rules from './rules' import resetForm from './helpers/resetForm' +import formToJSON from './helpers/formToJSON' export {default as form} from './form' export {default as rules} from './rules' @@ -22,6 +23,9 @@ function FormsProvider (options = {}) { reset (path) { context.state.set(path, resetForm(context.state.get(path))) }, + toJSON (path) { + return formToJSON(context.state.get(path)) + }, updateRules (newRules) { Object.assign(rules, newRules) }, diff --git a/packages/cerebral-provider-forms/src/index.test.js b/packages/cerebral-provider-forms/src/index.test.js index <HASH>..<HASH> 100644 --- a/packages/cerebral-provider-forms/src/index.test.js +++ b/packages/cerebral-provider-forms/src/index.test.js @@ -163,4 +163,26 @@ describe('provider', () => { controller.getSignal('test')() rules._errorMessages = {} }) + it('should be able to convert to json', () => { + const controller = Controller({ + providers: [FormsProvider()], + state: { + form: { + name: { + value: 'Be' + } + } + }, + signals: { + test: [ + ({forms}) => { + assert.deepEqual(forms.toJSON('form'), { + name: 'Be' + }) + } + ] + } + }) + controller.getSignal('test')() + }) })
fix(forms): toJSON must return json (#<I>)
cerebral_cerebral
train
2aa176b4f9b2f6b50e65a75d9d07b326f4300d0b
diff --git a/src/geshi/csharp.php b/src/geshi/csharp.php index <HASH>..<HASH> 100644 --- a/src/geshi/csharp.php +++ b/src/geshi/csharp.php @@ -12,6 +12,8 @@ * * CHANGES * ------- + * 2015/04/14 + * - Added C# 5.0 and 6.0 missing keywords and #pragma directive * 2012/06/18 (1.0.8.11) * - Added missing keywords (Christian Stelzmann) * 2009/04/03 (1.0.8.6) @@ -62,7 +64,8 @@ $language_data = array ( 'ESCAPE_CHAR' => '\\', 'KEYWORDS' => array( 1 => array( - 'abstract', 'add', 'as', 'base', 'break', 'by', 'case', 'catch', 'const', 'continue', + 'abstract', 'add', 'as', 'async', 'await', 'base', + 'break', 'by', 'case', 'catch', 'const', 'continue', 'default', 'do', 'else', 'event', 'explicit', 'extern', 'false', 'finally', 'fixed', 'for', 'foreach', 'from', 'get', 'goto', 'group', 'if', 'implicit', 'in', 'into', 'internal', 'join', 'lock', 'namespace', 'null', @@ -74,10 +77,10 @@ $language_data = array ( ), 2 => array( '#elif', '#endif', '#endregion', '#else', '#error', '#define', '#if', - '#line', '#region', '#undef', '#warning' + '#line', '#pragma', '#region', '#undef', '#warning' ), 3 => array( - 'checked', 'is', 'new', 'sizeof', 'typeof', 'unchecked' + 'checked', 'is', 'new', 'nameof', 'sizeof', 'typeof', 'unchecked' ), 4 => array( 'bool', 'byte', 'char', 'class', 'decimal', 'delegate', 'double',
Some new keywords from C# 5 & 6 Add 'async' and 'await' keyword from c# <I>, 'nameof' operator from C# <I> and missing '#pragma' directive Resolves: <URL>
GeSHi_geshi-1.0
train
f698ea5dee96af46c97caf9d8ac2d17b94f689da
diff --git a/src/main/java/net/snowflake/client/core/SessionUtil.java b/src/main/java/net/snowflake/client/core/SessionUtil.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/snowflake/client/core/SessionUtil.java +++ b/src/main/java/net/snowflake/client/core/SessionUtil.java @@ -1250,10 +1250,6 @@ public class SessionUtil data.put(ClientAuthnParameter.EXT_AUTHN_DUO_METHOD.name(), "push"); } - logger.debug( - "implementation version = {}", - SnowflakeDriver.implementVersion); - data.put(ClientAuthnParameter.CLIENT_APP_VERSION.name(), loginInput.getAppVersion()); diff --git a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java +++ b/src/main/java/net/snowflake/client/jdbc/SnowflakeDriver.java @@ -175,14 +175,14 @@ public class SnowflakeDriver implements Driver { implementVersion = versionResourceBundleManager.getLocalizedMessage("version"); - logger.debug("implement version: {}", implementVersion); + logger.debug("Snowflake JDBC Version: {}", implementVersion); // parse implementation version major.minor.change if (implementVersion != null) { String[] versionBreakdown = implementVersion.split("\\."); - if (versionBreakdown != null && versionBreakdown.length == 3) + if (versionBreakdown.length == 3) { majorVersion = Integer.parseInt(versionBreakdown[0]); minorVersion = Integer.parseInt(versionBreakdown[1]); @@ -191,17 +191,15 @@ public class SnowflakeDriver implements Driver else throw new SnowflakeSQLException(SqlState.INTERNAL_ERROR, ErrorCode.INTERNAL_ERROR.getMessageCode(), - "Invalid implementation version: " + implementVersion); + "Invalid Snowflake JDBC Version: " + implementVersion); } else + { throw new SnowflakeSQLException(SqlState.INTERNAL_ERROR, ErrorCode.INTERNAL_ERROR.getMessageCode(), - "Null implementation version"); - - logger.debug("implementation_version = {}", implementVersion); - logger.debug("major version = {}", majorVersion); - logger.debug("minor version = {}", minorVersion); - logger.debug("change version = {}", changeVersion); + "Snowflake JDBC Version is not set. " + + "Ensure version.properties is included."); + } } catch (Exception ex) {
SNOW-<I>: Removed redundant version logging from JDBC
snowflakedb_snowflake-jdbc
train
da7b123d1ef694cbf2018239cf9a92aaf14ba4b1
diff --git a/CGRtools/algorithms/standardize.py b/CGRtools/algorithms/standardize.py index <HASH>..<HASH> 100644 --- a/CGRtools/algorithms/standardize.py +++ b/CGRtools/algorithms/standardize.py @@ -1329,8 +1329,7 @@ class StandardizeReaction: for mapping in bad_query.get_mapping(cgr, automorphism_filter=False): if not seen.isdisjoint(mapping.values()): # prevent matching same RC continue - mapping = {key : mapping.get(value, value) for key, value in fix.items()} - mapping.update(fix) + mapping = {mapping[n]: mapping[m] for n, m in fix.items()} reverse = {m: n for n, m in mapping.items()} for m in self.products: @@ -1355,16 +1354,17 @@ class StandardizeReaction: def load_remapping_rules(cls, reactions): """ Load AAM fixing rules. Required pairs of bad mapped and good mapped reactions. - Reactants in pairs should be fully equal (equal molecules and equal atom numbers). + Reactants in pairs should be fully equal (equal molecules and equal atom orders). Products should be equal but with different atom numbers. """ for bad, good in reactions: if str(bad) != str(good): raise ValueError('bad and good reaction should be equal') - - gc = (~good).augmented_substructure((~good).center_atoms, deep=1) - bc = (~bad).augmented_substructure((~bad).center_atoms, deep=1) - + + cgr_good, cgr_bad = ~good, ~bad + gc = cgr_good.augmented_substructure([x for l in good.centers_list for x in l], deep=1) + bc = cgr_bad.augmented_substructure([x for l in bad.centers_list for x in l], deep=1) + atoms = set(bc.atoms_numbers + gc.atoms_numbers) pr_g, pr_b = set(), set() @@ -1376,17 +1376,14 @@ class StandardizeReaction: strange_atoms = pr_b.difference(pr_g) atoms.update(strange_atoms) - bad_query = (~bad).substructure(atoms, as_query=True) - good_query = (~good).substructure(atoms.intersection(pr_g), as_query=True) + bad_query = cgr_bad.substructure(atoms.intersection(cgr_bad), as_query=True) + good_query = cgr_good.substructure(atoms.intersection(cgr_good), as_query=True) fix = {} rules = [] for mb, mg in zip(bad.products, good.products): - fx = min((m for m in - ({k: v for k, v in m.items() if k != v} - for m in mb.get_mapping(mg, automorphism_filter=False, optimize=False)) - if atoms.issuperset(m)), key=len) - fix.update(fx) + fix.update({k: v for k, v in zip(mb, mg) if k != v and k in atoms}) + valid = set(fix).difference(strange_atoms) rules.append((bad_query, good_query, fix, valid))
Fix mapp (#<I>) * updating query * updating query * mapping * update fix in rule * fix * cleanup
cimm-kzn_CGRtools
train
d1bb36256f2f86022884e6ee9e05b0536cb6384d
diff --git a/packages/discord.js/src/client/actions/MessageReactionAdd.js b/packages/discord.js/src/client/actions/MessageReactionAdd.js index <HASH>..<HASH> 100644 --- a/packages/discord.js/src/client/actions/MessageReactionAdd.js +++ b/packages/discord.js/src/client/actions/MessageReactionAdd.js @@ -23,7 +23,7 @@ class MessageReactionAdd extends Action { // Verify channel const channel = this.getChannel(data); - if (!channel || !channel.isTextBased()) return false; + if (!channel?.isTextBased()) return false; // Verify message const message = this.getMessage(data, channel); diff --git a/packages/discord.js/src/client/actions/MessageReactionRemove.js b/packages/discord.js/src/client/actions/MessageReactionRemove.js index <HASH>..<HASH> 100644 --- a/packages/discord.js/src/client/actions/MessageReactionRemove.js +++ b/packages/discord.js/src/client/actions/MessageReactionRemove.js @@ -20,7 +20,7 @@ class MessageReactionRemove extends Action { // Verify channel const channel = this.getChannel(data); - if (!channel || !channel.isTextBased()) return false; + if (!channel?.isTextBased()) return false; // Verify message const message = this.getMessage(data, channel); diff --git a/packages/discord.js/src/client/actions/MessageReactionRemoveAll.js b/packages/discord.js/src/client/actions/MessageReactionRemoveAll.js index <HASH>..<HASH> 100644 --- a/packages/discord.js/src/client/actions/MessageReactionRemoveAll.js +++ b/packages/discord.js/src/client/actions/MessageReactionRemoveAll.js @@ -7,7 +7,7 @@ class MessageReactionRemoveAll extends Action { handle(data) { // Verify channel const channel = this.getChannel(data); - if (!channel || !channel.isTextBased()) return false; + if (!channel?.isTextBased()) return false; // Verify message const message = this.getMessage(data, channel); diff --git a/packages/discord.js/src/client/actions/MessageReactionRemoveEmoji.js b/packages/discord.js/src/client/actions/MessageReactionRemoveEmoji.js index <HASH>..<HASH> 100644 --- a/packages/discord.js/src/client/actions/MessageReactionRemoveEmoji.js +++ b/packages/discord.js/src/client/actions/MessageReactionRemoveEmoji.js @@ -6,7 +6,7 @@ const Events = require('../../util/Events'); class MessageReactionRemoveEmoji extends Action { handle(data) { const channel = this.getChannel(data); - if (!channel || !channel.isTextBased()) return false; + if (!channel?.isTextBased()) return false; const message = this.getMessage(data, channel); if (!message) return false;
refactor(actions): use optional chaining (#<I>)
discordjs_discord.js
train
71c1a5a4b10d32132f4607588197c816764f8ad3
diff --git a/zappa/zappa.py b/zappa/zappa.py index <HASH>..<HASH> 100755 --- a/zappa/zappa.py +++ b/zappa/zappa.py @@ -337,6 +337,7 @@ class Zappa(object): # Trash the temp directory shutil.rmtree(temp_project_path) + shutil.rmtree(temp_package_path) # Warn if this is too large for Lambda. file_stats = os.stat(zip_path)
remove temp_package_path after uploading
Miserlou_Zappa
train
4f7dc3fbd6d52c9e26bdada6db98f8b75ec508f2
diff --git a/confidence/io.py b/confidence/io.py index <HASH>..<HASH> 100644 --- a/confidence/io.py +++ b/confidence/io.py @@ -79,7 +79,6 @@ def read_envvars(name: str, extension: typing.Optional[str] = None) -> Configura for var, value in environ.items() # TODO: document ignoring envvar_file if var.lower().startswith(prefix) and var.lower() != envvar_file} - # TODO: envvar values can only be str, how do we configure non-str values? if not values: return NotConfigured @@ -92,7 +91,8 @@ def read_envvars(name: str, extension: typing.Optional[str] = None) -> Configura # include the number of variables matched for debugging purposes logging.info(f'reading configuration from {len(values)} {prefix}* environment variables') - return Configuration({dotted(name): value for name, value in values.items()}) + # pass value to yaml.safe_load to align data type transformation with reading values from files + return Configuration({dotted(name): yaml.safe_load(value) for name, value in values.items()}) def read_envvar_file(name: str, extension: typing.Optional[str] = None) -> Configuration:
Pass values of environment variables to yaml.safe_load Should parse NAME_NS_KEY=5 as an int, rather than leaving it a str
HolmesNL_confidence
train
587a811d0941142329b4e6beb1a3cb1181a4d23c
diff --git a/cmd/containerd-shim/main_unix.go b/cmd/containerd-shim/main_unix.go index <HASH>..<HASH> 100644 --- a/cmd/containerd-shim/main_unix.go +++ b/cmd/containerd-shim/main_unix.go @@ -80,7 +80,7 @@ func main() { if err != nil { return err } - server := grpc.NewServer() + server := newServer() e, err := connectEvents(context.GlobalString("address")) if err != nil { return err @@ -182,9 +182,10 @@ func connect(address string, d func(string, time.Duration) (net.Conn, error)) (* gopts := []grpc.DialOption{ grpc.WithBlock(), grpc.WithInsecure(), - grpc.WithTimeout(100 * time.Second), + grpc.WithTimeout(60 * time.Second), grpc.WithDialer(d), grpc.FailOnNonTempDialError(true), + grpc.WithBackoffMaxDelay(3 * time.Second), } conn, err := grpc.Dial(dialAddress(address), gopts...) if err != nil { diff --git a/cmd/containerd-shim/shim_linux.go b/cmd/containerd-shim/shim_linux.go index <HASH>..<HASH> 100644 --- a/cmd/containerd-shim/shim_linux.go +++ b/cmd/containerd-shim/shim_linux.go @@ -33,7 +33,7 @@ func setupSignals() (chan os.Signal, error) { } func newServer() *grpc.Server { - return grpc.NewServer(grpc.Creds(NewUnixSocketCredentils(0, 0))) + return grpc.NewServer(grpc.Creds(NewUnixSocketCredentials(0, 0))) } type unixSocketCredentials struct { @@ -42,7 +42,7 @@ type unixSocketCredentials struct { serverName string } -func NewUnixSocketCredentils(uid, gid int) credentials.TransportCredentials { +func NewUnixSocketCredentials(uid, gid int) credentials.TransportCredentials { return &unixSocketCredentials{uid, gid, "locahost"} }
Check credentials when connecting to shim NewUnixSocketCredentials was actually never invoked before.
containerd_containerd
train
962eeeeb13ba81546cf6d3eec86baed3e4084cbb
diff --git a/src/test/java/javax/util/streamex/StreamExTest.java b/src/test/java/javax/util/streamex/StreamExTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/javax/util/streamex/StreamExTest.java +++ b/src/test/java/javax/util/streamex/StreamExTest.java @@ -39,6 +39,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.Stream; import org.junit.Test;
StreamExTest: compilation error fixed
amaembo_streamex
train
7d7f04005c25df791307f217e1f0610457d14ee8
diff --git a/src/python/test/test_dx_app_wizard_and_run_app_locally.py b/src/python/test/test_dx_app_wizard_and_run_app_locally.py index <HASH>..<HASH> 100755 --- a/src/python/test/test_dx_app_wizard_and_run_app_locally.py +++ b/src/python/test/test_dx_app_wizard_and_run_app_locally.py @@ -151,6 +151,8 @@ def create_app_dir_with_dxapp_json(dxapp_json, language): wizard.sendline() wizard.expect("Will this app need access to the parent project?") wizard.sendline() + wizard.expect("Choose an instance type for your app") + wizard.sendline() wizard.expect("App directory created") wizard.close()
Update test for a8d<I>
dnanexus_dx-toolkit
train
9751aa31453f6636f9ebfb2faf08bdf860601f6a
diff --git a/aws/resource_aws_ssm_maintenance_window_target_test.go b/aws/resource_aws_ssm_maintenance_window_target_test.go index <HASH>..<HASH> 100644 --- a/aws/resource_aws_ssm_maintenance_window_target_test.go +++ b/aws/resource_aws_ssm_maintenance_window_target_test.go @@ -18,6 +18,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_basic(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -53,6 +54,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_noNameOrDescription(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -83,6 +85,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_validation(t *testing.T) { name := acctest.RandString(10) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -108,6 +111,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_update(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -163,6 +167,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_resourceGroup(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -197,6 +202,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_disappears(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{ @@ -218,6 +224,7 @@ func TestAccAWSSSMMaintenanceWindowTarget_disappears_window(t *testing.T) { resourceName := "aws_ssm_maintenance_window_target.test" resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, + ErrorCheck: testAccErrorCheck(t, ssm.EndpointsID), Providers: testAccProviders, CheckDestroy: testAccCheckAWSSSMMaintenanceWindowTargetDestroy, Steps: []resource.TestStep{
tests/r/ssm_maintenance_window_target: Add ErrorCheck
terraform-providers_terraform-provider-aws
train
4416804be665efae6d06712310aae1d701ce9155
diff --git a/Entity/ExtendedFieldRepositoryTrait.php b/Entity/ExtendedFieldRepositoryTrait.php index <HASH>..<HASH> 100644 --- a/Entity/ExtendedFieldRepositoryTrait.php +++ b/Entity/ExtendedFieldRepositoryTrait.php @@ -268,6 +268,9 @@ EOSQL; // Now to update extended fields if there were any to be updated. if (!empty($extendedFields)) { $insertUpdates = $deletions = []; + $leadId = $entity->getId(); + /** @var \Doctrine\DBAL\Connection $connection */ + $connection = $this->getEntityManager()->getConnection(); foreach ($extendedFields as $extendedField) { if ( !isset($changes['fields']) @@ -289,13 +292,14 @@ EOSQL; null !== $changes['fields'][$extendedField['alias']][0] && null === $changes['fields'][$extendedField['alias']][1] ) { - // Removed an existing value, mark for deletion. - if (!isset($deletions[$tableName])) { - $deletions[$tableName] = []; - } - if (!isset($deletions[$tableName][$extendedField['id']])) { - $deletions[$tableName][$extendedField['id']] = null; - } + // Removed an existing value. + $connection->delete( + $tableName, + [ + 'lead_id' => $leadId, + 'lead_field_id' => $extendedField['id'], + ] + ); } else { // Mark for insert/update. if (!isset($insertUpdates[$tableName])) { @@ -306,12 +310,9 @@ EOSQL; } } } - $leadId = $entity->getId(); - /** @var \Doctrine\DBAL\Connection $connection */ - $connection = $this->getEntityManager()->getConnection(); - // Handle inserts/updates. + // Handle inserts/updates in bulk by table. if ($insertUpdates) { - $values = $bindings = []; + $values = $bindings = []; foreach ($insertUpdates as $tableName => $leadField) { foreach ($leadField as $leadFieldId => $value) { $values[] = $leadId.', '.$leadFieldId.', :'.$tableName.$leadFieldId; @@ -327,20 +328,6 @@ EOSQL; $stmt->execute(); } } - // Handle deletions (includes nullification). - if ($deletions) { - foreach ($deletions as $tableName => $leadField) { - foreach ($leadField as $leadFieldId => $value) { - $connection->delete( - $tableName, - [ - 'lead_id' => $leadId, - 'lead_field_id' => $leadFieldId, - ] - ); - } - } - } } $this->postSaveEntity($entity);
[ENG-<I>] Cleanup.
TheDMSGroup_mautic-extended-field
train
c255df347769e2e95261037c1c22b89659d4741b
diff --git a/lib/Executor.php b/lib/Executor.php index <HASH>..<HASH> 100644 --- a/lib/Executor.php +++ b/lib/Executor.php @@ -8,7 +8,7 @@ interface Executor { /** * @param string $sql * - * @return \Amp\Promise<\Amp\Postgres\Result> + * @return \Amp\Promise<\Amp\Postgres\CommandResult|\Amp\Postgres\TupleResult> * * @throws \Amp\Postgres\FailureException */ @@ -18,7 +18,7 @@ interface Executor { * @param string $sql * @param mixed ...$params * - * @return \Amp\Promise<\Amp\Postgres\Result> + * @return \Amp\Promise<\Amp\Postgres\CommandResult|\Amp\Postgres\TupleResult> * * @throws \Amp\Postgres\FailureException */ diff --git a/lib/Statement.php b/lib/Statement.php index <HASH>..<HASH> 100644 --- a/lib/Statement.php +++ b/lib/Statement.php @@ -8,7 +8,7 @@ interface Statement { /** * @param mixed ...$params * - * @return \Amp\Promise<\Amp\Postgres\Result> + * @return \Amp\Promise<\Amp\Postgres\CommandResult|\Amp\Postgres\TupleResult> */ public function execute(...$params): Promise; }
Update promise resolution types in doc blocks
amphp_postgres
train
b9c550a776000c2117acd7bc8b15ff0712a5771b
diff --git a/core.js b/core.js index <HASH>..<HASH> 100644 --- a/core.js +++ b/core.js @@ -4,6 +4,7 @@ var defaults = require('lodash.defaults'); var includes = require('lodash.includes'); var assign = require('lodash.assign'); var qs = require('qs'); +var mediaType = require('media-type'); module.exports = function (xhr) { @@ -127,7 +128,9 @@ module.exports = function (xhr) { } } else { // Parse body as JSON - if (typeof body === 'string' && (!params.headers.accept || params.headers.accept.indexOf('application/json')===0)) { + var accept = mediaType.fromString(params.headers.accept); + var parseJson = accept.isValid() && accept.type === 'application' && (accept.subtype === 'json' || accept.suffix === 'json'); + if (typeof body === 'string' && (!params.headers.accept || parseJson)) { try { body = JSON.parse(body); } catch (err) { diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -21,6 +21,7 @@ "lodash.defaults": "^3.1.0", "lodash.includes": "^3.1.0", "lodash.result": "^3.0.0", + "media-type": "^0.3.0", "qs": "^4.0.0", "request": "^2.55.0", "xhr": "^2.0.3" diff --git a/test/unit.js b/test/unit.js index <HASH>..<HASH> 100644 --- a/test/unit.js +++ b/test/unit.js @@ -326,3 +326,29 @@ test('Call "always" after error callback', function (t) { }); }); +test('should parse json for different media types', function (t) { + t.plan(4); + + var jsonMediaTypes = [ + '', // Test with no accept header + 'application/hal+json', + 'application/json+hal', + 'application/json' + ]; + + jsonMediaTypes.forEach(function (mediaType) { + sync('read', modelStub(), { + headers: { + accept: 'application/hal+json' + }, + success: function (resp, type, error) { + t.deepEqual(resp.good, 'json', (mediaType || 'no type') + ' is parsed as json'); + }, + xhrImplementation: function (ajaxSettings, callback) { + callback(null, {}, '{"good": "json"}'); + return {}; + } + }); + }); +}); +
parse response as json if accept subtype or suffix is json
AmpersandJS_ampersand-sync
train
51f891df4e5a6fb34a5328457513c1420e282a36
diff --git a/generators/app/index.js b/generators/app/index.js index <HASH>..<HASH> 100644 --- a/generators/app/index.js +++ b/generators/app/index.js @@ -12,7 +12,7 @@ module.exports = fountain.Base.extend({ this.option('sample', {type: Boolean, required: false}); - const prompts = [{ + const prompts = { when: !this.options.sample, type: 'list', name: 'sample', @@ -21,7 +21,11 @@ module.exports = fountain.Base.extend({ {name: 'A working landing page', value: 'techs'}, {name: 'Just a Hello World', value: 'hello'} ] - }]; + }; + + if (this.props.js === 'babel' && this.props.modules === 'webpack') { + prompts.choices.push({name: 'Redux TodoApp', value: 'todoApp'}); + } this.prompt(prompts, props => { Object.assign(this.props, this.options, props);
Add "Redux TodoApp in prompt for babel + webpack
FountainJS_generator-fountain-react
train
3f20ed476db346abc74b44cd5fb6ced5ed342808
diff --git a/core/src/main/java/me/prettyprint/cassandra/model/HSuperColumnImpl.java b/core/src/main/java/me/prettyprint/cassandra/model/HSuperColumnImpl.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/me/prettyprint/cassandra/model/HSuperColumnImpl.java +++ b/core/src/main/java/me/prettyprint/cassandra/model/HSuperColumnImpl.java @@ -55,14 +55,14 @@ public final class HSuperColumnImpl<SN,N,V> implements HSuperColumn<SN, N, V> { public HSuperColumnImpl(SuperColumn thriftSuperColumn, Serializer<SN> sNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) { this(sNameSerializer, nameSerializer, valueSerializer); - noneNull(thriftSuperColumn, sNameSerializer, nameSerializer, valueSerializer); + noneNull(thriftSuperColumn, sNameSerializer, nameSerializer); superName = sNameSerializer.fromByteBuffer(ByteBuffer.wrap(thriftSuperColumn.getName())); columns = fromThriftColumns(thriftSuperColumn.getColumns()); } /*package*/ HSuperColumnImpl(Serializer<SN> sNameSerializer, Serializer<N> nameSerializer, Serializer<V> valueSerializer) { - noneNull(sNameSerializer, nameSerializer, valueSerializer); + noneNull(sNameSerializer, nameSerializer); this.superNameSerializer = sNameSerializer; this.nameSerializer = nameSerializer; this.valueSerializer = valueSerializer; diff --git a/core/src/main/java/me/prettyprint/cassandra/service/template/SuperCfUpdater.java b/core/src/main/java/me/prettyprint/cassandra/service/template/SuperCfUpdater.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/me/prettyprint/cassandra/service/template/SuperCfUpdater.java +++ b/core/src/main/java/me/prettyprint/cassandra/service/template/SuperCfUpdater.java @@ -21,6 +21,7 @@ import me.prettyprint.cassandra.serializers.UUIDSerializer; import me.prettyprint.hector.api.ColumnFactory; import me.prettyprint.hector.api.Serializer; import me.prettyprint.hector.api.beans.HColumn; +import me.prettyprint.hector.api.factory.HFactory; /** * This provides an interface of updating a specified row, most likely with the @@ -86,8 +87,9 @@ public class SuperCfUpdater<K,SN,N> extends AbstractTemplateUpdater<K, N> { void updateInternal() { // HSuperColumnImpl needs a refactor, this construction is lame. // the value serializer is not used in HSuperColumnImpl, so this is safe for name + // TODO need to mod to work with 0 timestamp HSuperColumnImpl<SN, N, ?> column = new HSuperColumnImpl(getCurrentSuperColumn(), subColumns, - template.getEffectiveClock(), template.getTopSerializer(), template.getSubSerializer(), TypeInferringSerializer.get()); + 0, template.getTopSerializer(), template.getSubSerializer(), TypeInferringSerializer.get()); template.getMutator().addInsertion(getCurrentKey(), template.getColumnFamily(), column); }
changes for smoother supercol integration w. other protocols
hector-client_hector
train
826ce13ec7360c087a58103293679831940895a0
diff --git a/littlechef/chef.py b/littlechef/chef.py index <HASH>..<HASH> 100644 --- a/littlechef/chef.py +++ b/littlechef/chef.py @@ -38,12 +38,12 @@ def _save_config(node): it also saves to tmp_node.json """ filepath = os.path.join("nodes/", env.host_string + ".json") - files = ['tmp_node.json'] + files_to_create = ['tmp_node.json'] if not os.path.exists(filepath): # Only save to nodes/ if there is not already a file print "Saving configuration to {0}".format(filepath) - files.append(filepath) - for node_file in files: + files_to_create.append(filepath) + for node_file in files_to_create: with open(node_file, 'w') as f: f.write(json.dumps(node, indent=4)) f.write('\n') @@ -124,12 +124,11 @@ def _synchronize_node(cookbooks): path = os.path.join(cookbook_path, cookbook) if os.path.exists(path): cookbooks_by_path[path] = cookbook - - print "Uploading cookbooks... ({0})".format(", ".join(c for c in cookbooks)) - _upload_and_unpack([p for p in cookbooks_by_path.keys()]) - - print "Uploading roles..." - _upload_and_unpack(['roles']) + print "Uploading roles and cookbooks:" + print " ({0})".format(", ".join(c for c in cookbooks)) + to_upload = [p for p in cookbooks_by_path.keys()] + to_upload.append('roles') + _upload_and_unpack(to_upload) def _configure_node(configfile): diff --git a/littlechef/solo.py b/littlechef/solo.py index <HASH>..<HASH> 100644 --- a/littlechef/solo.py +++ b/littlechef/solo.py @@ -17,6 +17,7 @@ from fabric.api import * from fabric.contrib.files import append, exists from fabric.utils import abort +import littlechef from lib import credentials @@ -38,22 +39,22 @@ def install(distro_type, distro, gems): abort('wrong distro type: {0}'.format(distro_type)) -def configure(node_work_path, cookbook_paths): +def configure(): """Deploy chef-solo specific files.""" with credentials(): - sudo('mkdir -p {0}'.format(node_work_path)) - sudo('mkdir -p {0}/cache'.format(node_work_path)) + sudo('mkdir -p {0}'.format(littlechef.node_work_path)) + sudo('mkdir -p {0}/cache'.format(littlechef.node_work_path)) sudo('umask 0377; touch solo.rb') append('solo.rb', 'file_cache_path "{0}/cache"'.format( - node_work_path), use_sudo=True) - reversed_cookbook_paths = cookbook_paths[:] + littlechef.node_work_path), use_sudo=True) + reversed_cookbook_paths = littlechef.cookbook_paths[:] reversed_cookbook_paths.reverse() - cookbook_paths_line = 'cookbook_path [{0}]'.format( - ', '.join(['''"{0}/{1}"'''.format(node_work_path, x) \ + cookbook_paths_line = 'cookbook_path [{0}]'.format(', '.join( + ['''"{0}/{1}"'''.format(littlechef.node_work_path, x) \ for x in reversed_cookbook_paths])) append('solo.rb', cookbook_paths_line, use_sudo=True) - append('solo.rb', 'role_path "{0}/roles"'.format(node_work_path), - use_sudo=True) + append('solo.rb', 'role_path "{0}/roles"'.format( + littlechef.node_work_path), use_sudo=True) sudo('mkdir -p /etc/chef') sudo('mv solo.rb /etc/chef/')
Significantly speed up node synching by uploading roles and cookbooks in a single zip
tobami_littlechef
train
88a150cee1c3971f7aee01d7fc6650ae6a7f4588
diff --git a/agent/consul/leader.go b/agent/consul/leader.go index <HASH>..<HASH> 100644 --- a/agent/consul/leader.go +++ b/agent/consul/leader.go @@ -328,25 +328,6 @@ func (s *Server) getOrCreateAutopilotConfig() (*structs.AutopilotConfig, bool) { return config, true } -// reconcile is used to reconcile the differences between Serf -// membership and what is reflected in our strongly consistent store. -// Mainly we need to ensure all live nodes are registered, all failed -// nodes are marked as such, and all left nodes are de-registered. -func (s *Server) reconcile() (err error) { - defer metrics.MeasureSince([]string{"consul", "leader", "reconcile"}, time.Now()) - members := s.serfLAN.Members() - knownMembers := make(map[string]struct{}) - for _, member := range members { - if err := s.reconcileMember(member); err != nil { - return err - } - knownMembers[member.Name] = struct{}{} - } - - // Reconcile any members that have been reaped while we were not the leader - return s.reconcileReaped(knownMembers) -} - // reconcileReaped is used to reconcile nodes that have failed and been reaped // from Serf but remain in the catalog. This is done by looking for SerfCheckID // in a critical state that does not correspond to a known Serf member. We generate diff --git a/agent/consul/segment_stub.go b/agent/consul/segment_stub.go index <HASH>..<HASH> 100644 --- a/agent/consul/segment_stub.go +++ b/agent/consul/segment_stub.go @@ -4,7 +4,9 @@ package consul import ( "net" + "time" + "github.com/armon/go-metrics" "github.com/hashicorp/consul/agent/structs" "github.com/hashicorp/serf/serf" ) @@ -51,3 +53,23 @@ func (s *Server) setupSegments(config *Config, port int, rpcListeners map[string // floodSegments is a NOP in the OSS version of Consul. func (s *Server) floodSegments(config *Config) { } + +// reconcile is used to reconcile the differences between Serf membership and +// what is reflected in our strongly consistent store. Mainly we need to ensure +// all live nodes are registered, all failed nodes are marked as such, and all +// left nodes are de-registered. +func (s *Server) reconcile() (err error) { + defer metrics.MeasureSince([]string{"consul", "leader", "reconcile"}, time.Now()) + members := s.serfLAN.Members() + knownMembers := make(map[string]struct{}) + for _, member := range members { + if err := s.reconcileMember(member); err != nil { + return err + } + knownMembers[member.Name] = struct{}{} + } + + // Reconcile any members that have been reaped while we were not the + // leader. + return s.reconcileReaped(knownMembers) +}
Moves reconcile loop into segment stub.
hashicorp_consul
train
b34bb794da1a7c7a47042b3089c12437647ac434
diff --git a/huey/tests/base.py b/huey/tests/base.py index <HASH>..<HASH> 100644 --- a/huey/tests/base.py +++ b/huey/tests/base.py @@ -9,6 +9,7 @@ from huey.api import Huey from huey.consumer import Consumer from huey.registry import registry from huey.storage import BaseStorage +from huey.storage import RedisStorage def b(s): @@ -21,6 +22,17 @@ class DummyHuey(Huey): def get_storage(self, **kwargs): return BaseStorage() +class BrokenRedisStorage(RedisStorage): + def dequeue(self): + raise ValueError('broken redis dequeue') + +broken_redis_storage = BrokenRedisStorage() + +class BrokenHuey(Huey): + def get_storage(self): + return broken_redis_storage + + dummy_huey = DummyHuey() test_huey = RedisHuey('testing', blocking=False, read_timeout=0.1) diff --git a/huey/tests/test_consumer.py b/huey/tests/test_consumer.py index <HASH>..<HASH> 100644 --- a/huey/tests/test_consumer.py +++ b/huey/tests/test_consumer.py @@ -7,6 +7,7 @@ from huey.consumer import Consumer from huey.consumer import Scheduler from huey.consumer import Worker from huey.tests.base import b +from huey.tests.base import BrokenHuey from huey.tests.base import CaptureLogs from huey.tests.base import HueyTestCase from huey.tests.base import test_huey @@ -86,6 +87,25 @@ class TestConsumerAPIs(HueyTestCase): def get_periodic_tasks(self): return [hourly_task.task_class()] + def test_dequeue_errors(self): + huey = BrokenHuey() + consumer = Consumer(huey, max_delay=0.1, workers=2, + worker_type='thread') + + worker = consumer._create_worker() + state = {} + + @huey.task() + def modify_broken(k, v): + state[k] = v + + with CaptureLogs() as capture: + res = modify_broken('k', 'v') + worker.loop() + + self.assertEqual(capture.messages, ['Error reading from queue']) + self.assertEqual(state, {}) + def test_scheduler_interval(self): consumer = self.get_consumer(scheduler_interval=0.1) self.assertEqual(consumer.scheduler_interval, 1)
Add test for broken dequeue behavior. Refs #<I>.
coleifer_huey
train
a4cd737ebb14b8f53d96119adf28d3055d4b2a78
diff --git a/lib/capybara/driver/node.rb b/lib/capybara/driver/node.rb index <HASH>..<HASH> 100644 --- a/lib/capybara/driver/node.rb +++ b/lib/capybara/driver/node.rb @@ -87,7 +87,7 @@ module Capybara def inspect %(#<#{self.class} tag="#{tag_name}" path="#{path}">) - rescue NotSupportedByDriverError, 'Capybara::Driver::Node#inspect' + rescue NotSupportedByDriverError %(#<#{self.class} tag="#{tag_name}">) end diff --git a/lib/capybara/node/element.rb b/lib/capybara/node/element.rb index <HASH>..<HASH> 100644 --- a/lib/capybara/node/element.rb +++ b/lib/capybara/node/element.rb @@ -240,7 +240,7 @@ module Capybara def inspect %(#<Capybara::Element tag="#{tag_name}" path="#{path}">) - rescue NotSupportedByDriverError, 'Capybara::Node::Element#inspect' + rescue NotSupportedByDriverError %(#<Capybara::Element tag="#{tag_name}">) end end
Use Ruby 2.x compatible rescue clauses Ruby 2.x doesn't support non-class/module arguments to rescue clauses. Ruby <I> only supported it due to what is now considered a regression: <URL>
teamcapybara_capybara
train
a9458add5080028aa8792cedae6774a978758e8e
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,7 +3,6 @@ require 'parsi-date' RSpec.configure do |config| config.treat_symbols_as_metadata_keys_with_true_values = true config.run_all_when_everything_filtered = true - config.filter_run :focus # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing
removed rspec's unnecessary filter_run config it will be ignored anyway: All examples were filtered out; ignoring {:focus=>true}
hzamani_parsi-date
train
fbbea51d2018a5a069fa227752fbc97fafed16b6
diff --git a/tests/Doctrine/SkeletonMapper/Tests/Mapping/ClassMetadataTest.php b/tests/Doctrine/SkeletonMapper/Tests/Mapping/ClassMetadataTest.php index <HASH>..<HASH> 100644 --- a/tests/Doctrine/SkeletonMapper/Tests/Mapping/ClassMetadataTest.php +++ b/tests/Doctrine/SkeletonMapper/Tests/Mapping/ClassMetadataTest.php @@ -13,6 +13,7 @@ use PHPUnit_Framework_TestCase; */ class ClassMetadataTest extends PHPUnit_Framework_TestCase { + private $class; protected function setUp() @@ -130,6 +131,21 @@ class ClassMetadataTest extends PHPUnit_Framework_TestCase $this->assertTrue($this->class->hasAssociation('groups')); } + public function testAddingAssociationMappingDoesNotAddFieldMapping() + { + $this->assertFalse($this->class->hasAssociation('groups')); + + $this->class->mapField( + array( + 'fieldName' => 'groups', + 'targetObject' => 'Test', + 'type' => 'many', + ) + ); + + $this->assertFalse($this->class->hasField('groups')); + } + public function testIsSingleValuedAssociation() { $this->assertFalse($this->class->isSingleValuedAssociation('groups'));
Add test to ensure adding association mapping to ClassMetadata does not also add field mapping
doctrine_skeleton-mapper
train
3b3cfc66afe05d9c187b717541c41618df9da2e3
diff --git a/auto_lens/profile.py b/auto_lens/profile.py index <HASH>..<HASH> 100644 --- a/auto_lens/profile.py +++ b/auto_lens/profile.py @@ -433,6 +433,7 @@ class LightProfile(object): """ raise AssertionError("Flux at coordinates should be overridden") + # TODO: find a good test for subgridding of a light profile @iterative_subgrid def flux_at_coordinates_iteratively_subgridded(self, coordinates): return self.flux_at_coordinates(coordinates)
TODO: find a good test for subgridding of a light profile
Jammy2211_PyAutoLens
train
0fcd4429eeb5e114af28f1e330cb8448be021c84
diff --git a/core/src/main/java/org/primefaces/extensions/converter/SanitizingConverter.java b/core/src/main/java/org/primefaces/extensions/converter/SanitizingConverter.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/primefaces/extensions/converter/SanitizingConverter.java +++ b/core/src/main/java/org/primefaces/extensions/converter/SanitizingConverter.java @@ -69,7 +69,7 @@ public class SanitizingConverter implements Converter<Object>, Serializable { } String result = getPolicy().sanitize(value); if (isDecodeHtml()) { - result = org.owasp.html.Encoding.decodeHtml(result); + result = org.owasp.html.Encoding.decodeHtml(result, false); } return result.trim(); }
Call non-deprecated method
primefaces-extensions_core
train
f427e750490b486944cc9be3c99834ad5cf78b57
diff --git a/src/transformers/models/xlm/modeling_xlm.py b/src/transformers/models/xlm/modeling_xlm.py index <HASH>..<HASH> 100755 --- a/src/transformers/models/xlm/modeling_xlm.py +++ b/src/transformers/models/xlm/modeling_xlm.py @@ -659,9 +659,7 @@ class XLMPredLayer(nn.Module): scores = self.proj(x) outputs = (scores,) + outputs if y is not None: - loss = nn.functional.cross_entropy( - scores.view(-1, self.n_words), y.view(-1), reduction="elementwise_mean" - ) + loss = nn.functional.cross_entropy(scores.view(-1, self.n_words), y.view(-1), reduction="mean") outputs = (loss,) + outputs else: scores = self.proj.log_prob(x)
use mean instead of elementwise_mean in XLMPredLayer (#<I>) * use mean instead of elementwise_mean * make style
huggingface_pytorch-pretrained-BERT
train
2f97b71c10b0c83046389f4b31208a25bb956f72
diff --git a/libre/settings.py b/libre/settings.py index <HASH>..<HASH> 100644 --- a/libre/settings.py +++ b/libre/settings.py @@ -210,6 +210,9 @@ JOB_PROCESSING_MODE_IMMEDIATE = False # LQL LQL_DELIMITER = '_' +# Crispy forms +CRISPY_TEMPLATE_PACK = 'bootstrap' + # Overwrite defaults with local settings try: from settings_local import *
Add required cripy form settings for new version
commonwealth-of-puerto-rico_libre
train
e0f864b332636dd5c45517a7a441eb94aafd6237
diff --git a/warehouse/tasks.py b/warehouse/tasks.py index <HASH>..<HASH> 100644 --- a/warehouse/tasks.py +++ b/warehouse/tasks.py @@ -16,7 +16,7 @@ import celery.app.backends # We need to trick Celery into supporting rediss:// URLs which is how redis-py # signals that you should use Redis with TLS. -celery.app.backends.BACKEND_ALIASES["rediss"] = "warehouse.celery:TLSRedisBackend" # noqa +celery.app.backends.BACKEND_ALIASES["rediss"] = "warehouse.tasks:TLSRedisBackend" # noqa import celery import celery.backends.redis
Use the correct import (#<I>)
pypa_warehouse
train
d4040ae13e90e0a4e43c0669d6ce33ecd23cc0eb
diff --git a/tensor2tensor/data_generators/multi_problem.py b/tensor2tensor/data_generators/multi_problem.py index <HASH>..<HASH> 100644 --- a/tensor2tensor/data_generators/multi_problem.py +++ b/tensor2tensor/data_generators/multi_problem.py @@ -27,6 +27,12 @@ from tensor2tensor.utils import registry import tensorflow as tf +class MixingSchedule(object): + """Available schedules for mixing datasets.""" + EXPONENTIAL = "exponential" + CONSTANT = "constant" + + class MultiProblem(problem.Problem): """MultiProblem base class.""" @@ -82,6 +88,10 @@ class MultiProblem(problem.Problem): return self._hparams + @property + def mixing_schedule(self): + return MixingSchedule.EXPONENTIAL + def flatten_zip(self, *args): """A list of examples to a dataset containing mixed examples. @@ -158,15 +168,37 @@ class MultiProblem(problem.Problem): self.get_hparams() if is_training: + problem_step = tf.get_variable("problem_step", + shape=[], + dtype=tf.float32, + initializer=tf.zeros_initializer(), + trainable=False, + use_resource=True) dataset_iterators = [d.make_one_shot_iterator() for d in datasets] def get_next_from_dataset(dataset_iter): return dataset_iter.get_next() + def get_exp_sched_prob(): + with tf.control_dependencies([problem_step.assign_add(1)]): + # TODO(urvashik): Make 5e-8 a parameter. + # In the current setup, with about 100 examples per batch on average, + # the model converges to 50-50 mixing by ~140k problem steps. + return tf.minimum(1. - tf.exp(-5e-8 * problem_step), 0.5) + + def get_const_sched_prob(): + return 0.5 + def mix_data(example): del example + if self.mixing_schedule == MixingSchedule.EXPONENTIAL: + prob = get_exp_sched_prob() + elif self.mixing_schedule == MixingSchedule.CONSTANT: + prob = get_const_sched_prob() + else: + raise ValueError("Unknown schedule %s" % str(self.mixing_schedule)) return tf.data.Dataset.from_tensors(tf.cond( - tf.less(tf.random_uniform([]), 0.5), + tf.greater(tf.random_uniform([]), prob), lambda d=dataset_iterators[0]: get_next_from_dataset(d), lambda d=dataset_iterators[1]: get_next_from_dataset(d) ))
Exponential schedule for mixing datasets. PiperOrigin-RevId: <I>
tensorflow_tensor2tensor
train
bebe474afe6aa007d67081f1cc71e30ae5119ebb
diff --git a/raft.go b/raft.go index <HASH>..<HASH> 100644 --- a/raft.go +++ b/raft.go @@ -1452,7 +1452,7 @@ func (r *Raft) requestVote(rpc RPC, req *RequestVoteRequest) { if lastVoteTerm == req.Term && lastVoteCandBytes != nil { r.logger.Info("duplicate requestVote for same term", "term", req.Term) if bytes.Compare(lastVoteCandBytes, req.Candidate) == 0 { - r.logger.Warn("duplicate requestVote from", "candidate", req.Candidate) + r.logger.Warn("duplicate requestVote from", "candidate", r.trans.DecodePeer(req.Candidate)) resp.Granted = true } return
Return decoded candidate address on duplicate requestVote log
hashicorp_raft
train
675ed09ad8d216439087ce4dd59eb80f1a495ae4
diff --git a/dht.go b/dht.go index <HASH>..<HASH> 100644 --- a/dht.go +++ b/dht.go @@ -18,7 +18,7 @@ import ( pb "github.com/ipfs/go-ipfs/routing/dht/pb" kb "github.com/ipfs/go-ipfs/routing/kbucket" record "github.com/ipfs/go-ipfs/routing/record" - logging "github.com/ipfs/go-ipfs/vendor/QmXJkcEXB6C9h6Ytb6rrUTFU56Ro62zxgrbxTT3dgjQGA8/go-log" + logging "github.com/ipfs/go-ipfs/vendor/QmTBXYb6y2ZcJmoXVKk3pf9rzSEjbCg7tQaJW7RSuH14nv/go-log" proto "github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/gogo/protobuf/proto" ds "github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/jbenet/go-datastore" diff --git a/pb/message.go b/pb/message.go index <HASH>..<HASH> 100644 --- a/pb/message.go +++ b/pb/message.go @@ -6,7 +6,7 @@ import ( key "github.com/ipfs/go-ipfs/blocks/key" inet "github.com/ipfs/go-ipfs/p2p/net" peer "github.com/ipfs/go-ipfs/p2p/peer" - logging "github.com/ipfs/go-ipfs/vendor/QmXJkcEXB6C9h6Ytb6rrUTFU56Ro62zxgrbxTT3dgjQGA8/go-log" + logging "github.com/ipfs/go-ipfs/vendor/QmTBXYb6y2ZcJmoXVKk3pf9rzSEjbCg7tQaJW7RSuH14nv/go-log" ) var log = logging.Logger("dht.pb") diff --git a/query.go b/query.go index <HASH>..<HASH> 100644 --- a/query.go +++ b/query.go @@ -11,7 +11,7 @@ import ( u "github.com/ipfs/go-ipfs/util" pset "github.com/ipfs/go-ipfs/util/peerset" todoctr "github.com/ipfs/go-ipfs/util/todocounter" - logging "github.com/ipfs/go-ipfs/vendor/QmXJkcEXB6C9h6Ytb6rrUTFU56Ro62zxgrbxTT3dgjQGA8/go-log" + logging "github.com/ipfs/go-ipfs/vendor/QmTBXYb6y2ZcJmoXVKk3pf9rzSEjbCg7tQaJW7RSuH14nv/go-log" process "github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/jbenet/goprocess" ctxproc "github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/jbenet/goprocess/context"
update code to use new logging changes License: MIT
libp2p_go-libp2p-kad-dht
train