content_type stringclasses 8 values | main_lang stringclasses 7 values | message stringlengths 1 50 | sha stringlengths 40 40 | patch stringlengths 52 962k | file_count int64 1 300 |
|---|---|---|---|---|---|
Javascript | Javascript | fix mixin blueprint tests under windows | 3a2ef238f179a65f39bbd4adfac0cb2aa4b03cb2 | <ide><path>node-tests/blueprints/mixin-test.js
<ide> const emberNew = blueprintHelpers.emberNew;
<ide> const emberGenerateDestroy = blueprintHelpers.emberGenerateDestroy;
<ide> const setupPodConfig = blueprintHelpers.setupPodConfig;
<ide> const expectError = require('../helpers/expect-error');
<add>const EOL = require('os').EOL;
<ide>
<ide> const chai = require('ember-cli-blueprint-test-helpers/chai');
<ide> const expect = chai.expect;
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('app/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-app/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('app/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-app/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<ide> expect(_file('app/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-app/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<ide> expect(_file('app/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-app/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo', '--pod'], _file => {
<ide> expect(_file('app/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-app/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar', '--pod'], _file => {
<ide> expect(_file('app/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-app/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('src/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-app/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('src/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-app/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<ide> expect(_file('src/mixins/foo/bar/baz.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo/bar/baz-test.js')).to.contain(
<ide> "import FooBarBazMixin from 'my-app/mixins/foo/bar/baz';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('addon/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-addon/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('addon/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-addon/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<ide> expect(_file('addon/mixins/foo/bar/baz.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar/baz-test.js')).to.contain(
<ide> "import FooBarBazMixin from 'my-addon/mixins/foo/bar/baz';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar/baz', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/app/mixins/foo/bar/baz.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('addon/mixins/foo/bar/baz.js')).to.not.exist;
<ide> });
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo'], _file => {
<ide> expect(_file('src/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-addon/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar'], _file => {
<ide> expect(_file('src/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-addon/mixins/foo/bar';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar/baz'], _file => {
<ide> expect(_file('src/mixins/foo/bar/baz.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo/bar/baz-test.js')).to.contain(
<ide> "import FooBarBazMixin from 'my-addon/mixins/foo/bar/baz';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar/baz', '--dummy'], _file => {
<ide> expect(_file('tests/dummy/src/mixins/foo/bar/baz.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('src/mixins/foo/bar/baz.js')).to.not.exist;
<ide> });
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/mixins/foo.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo-test.js')).to.contain(
<ide> "import FooMixin from 'my-addon/mixins/foo';"
<ide> describe('Blueprint: mixin', function() {
<ide> return emberGenerateDestroy(['mixin', 'foo/bar', '--in-repo-addon=my-addon'], _file => {
<ide> expect(_file('lib/my-addon/addon/mixins/foo/bar.js'))
<ide> .to.contain("import Mixin from '@ember/object/mixin';")
<del> .to.contain('export default Mixin.create({\n});');
<add> .to.contain(`export default Mixin.create({${EOL}});`);
<ide>
<ide> expect(_file('tests/unit/mixins/foo/bar-test.js')).to.contain(
<ide> "import FooBarMixin from 'my-addon/mixins/foo/bar';" | 1 |
Javascript | Javascript | use block-scoping in test/pummel/test-timers.js | 49a1f7f15d1e8d996459ce4b503251a0be762763 | <ide><path>test/pummel/test-timers.js
<ide> const assert = require('assert');
<ide>
<ide> const WINDOW = 200; // Why does this need to be so big?
<ide>
<del>let interval_count = 0;
<ide>
<ide> const starttime = new Date();
<del>setTimeout(common.mustCall(function() {
<del> const endtime = new Date();
<add>{
<add> setTimeout(common.mustCall(function() {
<add> const endtime = new Date();
<ide>
<del> const diff = endtime - starttime;
<del> assert.ok(diff > 0);
<del> console.error(`diff: ${diff}`);
<add> const diff = endtime - starttime;
<add> assert.ok(diff > 0);
<add> console.error(`diff: ${diff}`);
<ide>
<del> assert.strictEqual(1000 - WINDOW < diff && diff < 1000 + WINDOW, true);
<del>}), 1000);
<add> assert.strictEqual(1000 - WINDOW < diff && diff < 1000 + WINDOW, true);
<add> }), 1000);
<add>}
<ide>
<ide> // This timer shouldn't execute
<del>const id = setTimeout(function() { assert.strictEqual(true, false); }, 500);
<del>clearTimeout(id);
<add>{
<add> const id = setTimeout(common.mustNotCall(), 500);
<add> clearTimeout(id);
<add>}
<ide>
<del>setInterval(function() {
<del> interval_count += 1;
<del> const endtime = new Date();
<add>{
<add> let interval_count = 0;
<ide>
<del> const diff = endtime - starttime;
<del> assert.ok(diff > 0);
<del> console.error(`diff: ${diff}`);
<add> setInterval(common.mustCall(function() {
<add> interval_count += 1;
<add> const endtime = new Date();
<ide>
<del> const t = interval_count * 1000;
<add> const diff = endtime - starttime;
<add> assert.ok(diff > 0);
<add> console.error(`diff: ${diff}`);
<ide>
<del> assert.strictEqual(t - WINDOW < diff && diff < t + WINDOW, true);
<add> const t = interval_count * 1000;
<ide>
<del> assert.strictEqual(interval_count <= 3, true);
<del> if (interval_count === 3)
<del> clearInterval(this);
<del>}, 1000);
<add> assert.strictEqual(t - WINDOW < diff && diff < t + WINDOW, true);
<add>
<add> assert.strictEqual(interval_count <= 3, true);
<add> if (interval_count === 3)
<add> clearInterval(this);
<add> }, 3), 1000);
<add>}
<ide>
<ide>
<ide> // Single param:
<del>setTimeout(function(param) {
<del> assert.strictEqual(param, 'test param');
<del>}, 1000, 'test param');
<add>{
<add> setTimeout(function(param) {
<add> assert.strictEqual(param, 'test param');
<add> }, 1000, 'test param');
<add>}
<ide>
<del>let interval_count2 = 0;
<del>setInterval(function(param) {
<del> ++interval_count2;
<del> assert.strictEqual(param, 'test param');
<add>{
<add> let interval_count = 0;
<add> setInterval(function(param) {
<add> ++interval_count;
<add> assert.strictEqual(param, 'test param');
<ide>
<del> if (interval_count2 === 3)
<del> clearInterval(this);
<del>}, 1000, 'test param');
<add> if (interval_count === 3)
<add> clearInterval(this);
<add> }, 1000, 'test param');
<add>}
<ide>
<ide>
<ide> // Multiple param
<del>setTimeout(function(param1, param2) {
<del> assert.strictEqual(param1, 'param1');
<del> assert.strictEqual(param2, 'param2');
<del>}, 1000, 'param1', 'param2');
<add>{
<add> setTimeout(function(param1, param2) {
<add> assert.strictEqual(param1, 'param1');
<add> assert.strictEqual(param2, 'param2');
<add> }, 1000, 'param1', 'param2');
<add>}
<ide>
<del>let interval_count3 = 0;
<del>setInterval(function(param1, param2) {
<del> ++interval_count3;
<del> assert.strictEqual(param1, 'param1');
<del> assert.strictEqual(param2, 'param2');
<add>{
<add> let interval_count = 0;
<add> setInterval(function(param1, param2) {
<add> ++interval_count;
<add> assert.strictEqual(param1, 'param1');
<add> assert.strictEqual(param2, 'param2');
<ide>
<del> if (interval_count3 === 3)
<del> clearInterval(this);
<del>}, 1000, 'param1', 'param2');
<add> if (interval_count === 3)
<add> clearInterval(this);
<add> }, 1000, 'param1', 'param2');
<add>}
<ide>
<ide> // setInterval(cb, 0) should be called multiple times.
<del>let count4 = 0;
<del>const interval4 = setInterval(function() {
<del> if (++count4 > 10) clearInterval(interval4);
<del>}, 0);
<del>
<del>
<del>// We should be able to clearTimeout multiple times without breakage.
<del>let expectedTimeouts = 3;
<del>
<del>function t() {
<del> expectedTimeouts--;
<add>{
<add> let count = 0;
<add> const interval = setInterval(common.mustCall(function() {
<add> if (++count > 10) clearInterval(interval);
<add> }, 11), 0);
<ide> }
<ide>
<del>setTimeout(t, 200);
<del>setTimeout(t, 200);
<del>const y = setTimeout(t, 200);
<del>
<del>clearTimeout(y);
<del>setTimeout(t, 200);
<del>clearTimeout(y);
<del>
<add>// We should be able to clearTimeout multiple times without breakage.
<add>{
<add> const t = common.mustCall(3);
<ide>
<del>process.on('exit', function() {
<del> assert.strictEqual(interval_count, 3);
<del> assert.strictEqual(count4, 11);
<add> setTimeout(t, 200);
<add> setTimeout(t, 200);
<add> const y = setTimeout(t, 200);
<ide>
<del> // Check that the correct number of timers ran.
<del> assert.strictEqual(expectedTimeouts, 0);
<del>});
<add> clearTimeout(y);
<add> setTimeout(t, 200);
<add> clearTimeout(y);
<add>} | 1 |
PHP | PHP | improve shouldbroadcastnow performance | c5c5b75a07af4442c437d8730ad0ecec8a5b97cc | <ide><path>src/Illuminate/Broadcasting/BroadcastManager.php
<ide> use Illuminate\Broadcasting\Broadcasters\RedisBroadcaster;
<ide> use Illuminate\Contracts\Broadcasting\Factory as FactoryContract;
<ide> use Illuminate\Contracts\Broadcasting\ShouldBroadcastNow;
<add>use Illuminate\Contracts\Bus\Dispatcher as BusDispatcherContract;
<ide> use InvalidArgumentException;
<ide> use Psr\Log\LoggerInterface;
<ide> use Pusher\Pusher;
<ide> public function event($event = null)
<ide> */
<ide> public function queue($event)
<ide> {
<del> $connection = $event instanceof ShouldBroadcastNow ? 'sync' : null;
<del>
<del> if (is_null($connection) && isset($event->connection)) {
<del> $connection = $event->connection;
<add> if ($event instanceof ShouldBroadcastNow) {
<add> return $this->app->make(BusDispatcherContract::class)->dispatchNow(new BroadcastEvent(clone $event));
<ide> }
<ide>
<ide> $queue = null;
<ide> public function queue($event)
<ide> $queue = $event->queue;
<ide> }
<ide>
<del> $this->app->make('queue')->connection($connection)->pushOn(
<add> $this->app->make('queue')->connection($event->connection ?? null)->pushOn(
<ide> $queue, new BroadcastEvent(clone $event)
<ide> );
<ide> } | 1 |
PHP | PHP | add only method to session | c681ef442a2c57a74671ec9d11d157349be66646 | <ide><path>src/Illuminate/Session/Store.php
<ide> public function all()
<ide> return $this->attributes;
<ide> }
<ide>
<add> /**
<add> * Get a subset of the session data.
<add> *
<add> * @param array $keys
<add> * @return array
<add> */
<add> public function only($keys)
<add> {
<add> return Arr::only($this->attributes, $keys);
<add> }
<add>
<ide> /**
<ide> * Checks if a key exists.
<ide> *
<ide><path>tests/Session/SessionStoreTest.php
<ide> public function testReflashWithNow()
<ide> $this->assertFalse(array_search('foo', $session->get('_flash.old')));
<ide> }
<ide>
<add> public function testOnly()
<add> {
<add> $session = $this->getSession();
<add> $session->put('foo', 'bar');
<add> $session->put('qu', 'ux');
<add> $this->assertEquals(['foo' => 'bar', 'qu' => 'ux'], $session->all());
<add> $this->assertEquals(['qu' => 'ux'], $session->only(['qu']));
<add> }
<add>
<ide> public function testReplace()
<ide> {
<ide> $session = $this->getSession(); | 2 |
PHP | PHP | fix undefined variable error | 545694d84b2fd9c63d4961386e9691df6251834f | <ide><path>lib/Cake/Controller/Component/SecurityComponent.php
<ide> protected function _validateCsrf($controller) {
<ide> * @return array An array of nonce => expires.
<ide> */
<ide> protected function _expireTokens($tokens) {
<add> $now = time();
<ide> foreach ($tokens as $nonce => $expires) {
<ide> if ($expires < $now) {
<ide> unset($tokens[$nonce]);
<ide> }
<ide> }
<del> $now = time();
<ide> $overflow = count($tokens) - $this->csrfLimit;
<ide> if ($overflow > 0) {
<ide> $tokens = array_slice($tokens, $overflow + 1, null, true); | 1 |
Javascript | Javascript | add test for autodestroy in stream | d6f52f5a38b2e93f62da2c18a58bc85991f11234 | <ide><path>test/parallel/test-stream-auto-destroy.js
<ide> const assert = require('assert');
<ide> assert(finished);
<ide> }));
<ide> }
<add>
<add>{
<add> const r = new stream.Readable({
<add> read() {
<add> r2.emit('error', new Error('fail'));
<add> }
<add> });
<add> const r2 = new stream.Readable({
<add> autoDestroy: true,
<add> destroy: common.mustCall((err, cb) => cb())
<add> });
<add>
<add> r.pipe(r2);
<add>}
<add>
<add>{
<add> const r = new stream.Readable({
<add> read() {
<add> w.emit('error', new Error('fail'));
<add> }
<add> });
<add> const w = new stream.Writable({
<add> autoDestroy: true,
<add> destroy: common.mustCall((err, cb) => cb())
<add> });
<add>
<add> r.pipe(w);
<add>} | 1 |
PHP | PHP | replace double quotes with single quotes | 3f695312a174f5b5c39171ec740d136442a9d186 | <ide><path>tests/Database/DatabaseEloquentIntegrationTest.php
<ide> public function setUp()
<ide>
<ide> $this->schema()->create('photos', function ($table) {
<ide> $table->increments('id');
<del> $table->unsignedInteger("imageable_id")->nullable();
<del> $table->string("imageable_type")->nullable();
<del> $table->index(["imageable_id", "imageable_type"]);
<add> $table->unsignedInteger('imageable_id')->nullable();
<add> $table->string('imageable_type')->nullable();
<add> $table->index(['imageable_id', 'imageable_type']);
<ide> $table->string('name');
<ide> $table->timestamps();
<ide> }); | 1 |
Python | Python | fix gpt-j _checkpoint_for_doc typo | 9396b40433408a5a3c1316a9ff40b026e1c1d708 | <ide><path>src/transformers/models/gptj/modeling_gptj.py
<ide>
<ide> logger = logging.get_logger(__name__)
<ide>
<del>_CHECKPOINT_FOR_DOC = "EleutherAI/gpt-j-6b"
<add>_CHECKPOINT_FOR_DOC = "EleutherAI/gpt-j-6B"
<ide> _CONFIG_FOR_DOC = "GPTJConfig"
<ide> _TOKENIZER_FOR_DOC = "GPT2Tokenizer"
<ide> | 1 |
Javascript | Javascript | fix a small mistake for cmap format 0 | ac163da4c877be93b876563f7898cfa7f474e13e | <ide><path>fonts.js
<ide> var Font = (function Font() {
<ide> deltas.push(index);
<ide>
<ide> var unicode = j + kCmapGlyphOffset;
<del> encoding[j].unicode = unicode;
<add> var mapping = encoding[j] || {};
<add> mapping.unicode = unicode;
<add> encoding[j] = mapping;
<ide> glyphs.push({ unicode: unicode });
<ide> }
<ide> } | 1 |
Python | Python | remove testfixtures module that is only used once | 3a046faaeb457572b1484faf158cc96eb81df44a | <ide><path>setup.py
<ide> def get_sphinx_theme_version() -> str:
<ide> 'pywinrm',
<ide> 'qds-sdk>=1.9.6',
<ide> 'requests_mock',
<del> 'testfixtures',
<ide> 'wheel',
<ide> 'yamllint',
<ide> ]
<ide><path>tests/providers/amazon/aws/hooks/test_glacier.py
<ide> import unittest
<ide> from unittest import mock
<ide>
<del>from testfixtures import LogCapture
<del>
<ide> from airflow.providers.amazon.aws.hooks.glacier import GlacierHook
<ide>
<ide> CREDENTIALS = "aws_conn"
<ide> def test_retrieve_inventory_should_log_mgs(self, mock_conn):
<ide> # given
<ide> job_id = {"jobId": "1234abcd"}
<ide> # when
<del> with LogCapture() as log:
<add> with self.assertLogs() as log:
<ide> mock_conn.return_value.initiate_job.return_value = job_id
<ide> self.hook.retrieve_inventory(VAULT_NAME)
<ide> # then
<del> log.check(
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Retrieving inventory for vault: {VAULT_NAME}",
<del> ),
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Initiated inventory-retrieval job for: {VAULT_NAME}",
<del> ),
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Retrieval Job ID: {job_id.get('jobId')}",
<del> ),
<add> self.assertEqual(
<add> log.output,
<add> [
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Retrieving inventory for vault: {VAULT_NAME}",
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Initiated inventory-retrieval job for: {VAULT_NAME}",
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Retrieval Job ID: {job_id.get('jobId')}",
<add> ],
<ide> )
<ide>
<ide> @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
<ide> def test_retrieve_inventory_results_should_return_response(self, mock_conn):
<ide> @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
<ide> def test_retrieve_inventory_results_should_log_mgs(self, mock_conn):
<ide> # when
<del> with LogCapture() as log:
<add> with self.assertLogs() as log:
<ide> mock_conn.return_value.get_job_output.return_value = REQUEST_RESULT
<ide> self.hook.retrieve_inventory_results(VAULT_NAME, JOB_ID)
<ide> # then
<del> log.check(
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Retrieving the job results for vault: {VAULT_NAME}...",
<del> ),
<add> self.assertEqual(
<add> log.output,
<add> [
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Retrieving the job results for vault: {VAULT_NAME}...",
<add> ],
<ide> )
<ide>
<ide> @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
<ide> def test_describe_job_should_return_status_succeeded(self, mock_conn):
<ide> @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn")
<ide> def test_describe_job_should_log_mgs(self, mock_conn):
<ide> # when
<del> with LogCapture() as log:
<add> with self.assertLogs() as log:
<ide> mock_conn.return_value.describe_job.return_value = JOB_STATUS
<ide> self.hook.describe_job(VAULT_NAME, JOB_ID)
<ide> # then
<del> log.check(
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
<del> ),
<del> (
<del> 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook',
<del> 'INFO',
<del> f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
<del> ),
<add> self.assertEqual(
<add> log.output,
<add> [
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}",
<add> 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:'
<add> + f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}",
<add> ],
<ide> ) | 2 |
Ruby | Ruby | convert cask test to spec | a616fab5bf84d9e2a07610666cf148444c04b3c4 | <ide><path>Library/Homebrew/cask/spec/cask/cask_spec.rb
<ide> end
<ide> end
<ide> end
<add>
<add> describe "load" do
<add> let(:hbc_relative_tap_path) { "../../Taps/caskroom/homebrew-cask" }
<add>
<add> it "returns an instance of the Cask for the given token" do
<add> c = Hbc.load("adium")
<add> expect(c).to be_kind_of(Hbc::Cask)
<add> expect(c.token).to eq("adium")
<add> end
<add>
<add> it "returns an instance of the Cask from a specific file location" do
<add> location = File.expand_path(hbc_relative_tap_path + "/Casks/dia.rb")
<add> c = Hbc.load(location)
<add> expect(c).to be_kind_of(Hbc::Cask)
<add> expect(c.token).to eq("dia")
<add> end
<add>
<add> it "returns an instance of the Cask from a url" do
<add> url = "file://" + File.expand_path(hbc_relative_tap_path + "/Casks/dia.rb")
<add> c = shutup do
<add> Hbc.load(url)
<add> end
<add> expect(c).to be_kind_of(Hbc::Cask)
<add> expect(c.token).to eq("dia")
<add> end
<add>
<add> it "raises an error when failing to download a Cask from a url" do
<add> expect {
<add> url = "file://" + File.expand_path(hbc_relative_tap_path + "/Casks/notacask.rb")
<add> shutup do
<add> Hbc.load(url)
<add> end
<add> }.to raise_error(Hbc::CaskUnavailableError)
<add> end
<add>
<add> it "returns an instance of the Cask from a relative file location" do
<add> c = Hbc.load(hbc_relative_tap_path + "/Casks/bbedit.rb")
<add> expect(c).to be_kind_of(Hbc::Cask)
<add> expect(c.token).to eq("bbedit")
<add> end
<add>
<add> it "uses exact match when loading by token" do
<add> expect(Hbc.load("test-opera").token).to eq("test-opera")
<add> expect(Hbc.load("test-opera-mail").token).to eq("test-opera-mail")
<add> end
<add>
<add> it "raises an error when attempting to load a Cask that doesn't exist" do
<add> expect {
<add> Hbc.load("notacask")
<add> }.to raise_error(Hbc::CaskUnavailableError)
<add> end
<add> end
<add>
<add> describe "all_tokens" do
<add> it "returns a token for every Cask" do
<add> all_cask_tokens = Hbc.all_tokens
<add> expect(all_cask_tokens.count).to be > 20
<add> all_cask_tokens.each { |token| expect(token).to be_kind_of(String) }
<add> end
<add> end
<add>
<add> describe "metadata" do
<add> it "proposes a versioned metadata directory name for each instance" do
<add> cask_token = "adium"
<add> c = Hbc.load(cask_token)
<add> metadata_path = Hbc.caskroom.join(cask_token, ".metadata", c.version)
<add> expect(c.metadata_versioned_container_path.to_s).to eq(metadata_path.to_s)
<add> end
<add> end
<ide> end
<ide><path>Library/Homebrew/cask/test/cask_test.rb
<del>require "test_helper"
<del>
<del>describe "Cask" do
<del> hbc_relative_tap_path = "../../Taps/caskroom/homebrew-cask"
<del> describe "load" do
<del> it "returns an instance of the Cask for the given token" do
<del> c = Hbc.load("adium")
<del> c.must_be_kind_of(Hbc::Cask)
<del> c.token.must_equal("adium")
<del> end
<del>
<del> it "returns an instance of the Cask from a specific file location" do
<del> location = File.expand_path(hbc_relative_tap_path + "/Casks/dia.rb")
<del> c = Hbc.load(location)
<del> c.must_be_kind_of(Hbc::Cask)
<del> c.token.must_equal("dia")
<del> end
<del>
<del> it "returns an instance of the Cask from a url" do
<del> url = "file://" + File.expand_path(hbc_relative_tap_path + "/Casks/dia.rb")
<del> c = shutup do
<del> Hbc.load(url)
<del> end
<del> c.must_be_kind_of(Hbc::Cask)
<del> c.token.must_equal("dia")
<del> end
<del>
<del> it "raises an error when failing to download a Cask from a url" do
<del> lambda {
<del> url = "file://" + File.expand_path(hbc_relative_tap_path + "/Casks/notacask.rb")
<del> shutup do
<del> Hbc.load(url)
<del> end
<del> }.must_raise(Hbc::CaskUnavailableError)
<del> end
<del>
<del> it "returns an instance of the Cask from a relative file location" do
<del> c = Hbc.load(hbc_relative_tap_path + "/Casks/bbedit.rb")
<del> c.must_be_kind_of(Hbc::Cask)
<del> c.token.must_equal("bbedit")
<del> end
<del>
<del> it "uses exact match when loading by token" do
<del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/test-opera.rb").token.must_equal("test-opera")
<del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/test-opera-mail.rb").token.must_equal("test-opera-mail")
<del> end
<del>
<del> it "raises an error when attempting to load a Cask that doesn't exist" do
<del> lambda {
<del> Hbc.load("notacask")
<del> }.must_raise(Hbc::CaskUnavailableError)
<del> end
<del> end
<del>
<del> describe "all_tokens" do
<del> it "returns a token for every Cask" do
<del> all_cask_tokens = Hbc.all_tokens
<del> all_cask_tokens.count.must_be :>, 20
<del> all_cask_tokens.each { |token| token.must_be_kind_of String }
<del> end
<del> end
<del>
<del> describe "metadata" do
<del> it "proposes a versioned metadata directory name for each instance" do
<del> cask_token = "adium"
<del> c = Hbc.load(cask_token)
<del> metadata_path = Hbc.caskroom.join(cask_token, ".metadata", c.version)
<del> c.metadata_versioned_container_path.to_s.must_equal(metadata_path.to_s)
<del> end
<del> end
<del>end | 2 |
Python | Python | fix typo at datasets/mnist.py | 97e391dd868abcf833c8273866b34aa69a0d4d17 | <ide><path>slim/datasets/mnist.py
<ide> """Provides data for the MNIST dataset.
<ide>
<ide> The dataset scripts used to create the dataset can be found at:
<del>tensorflow/models/slim/data/create_mnist_dataset.py
<add>tensorflow/models/slim/datasets/download_and_convert_mnist.py
<ide> """
<ide>
<ide> from __future__ import absolute_import | 1 |
Ruby | Ruby | fix destructive stringify_keys for label_tag | 895d64531d25a76c1de2d4fec9aba68c0ee8c104 | <ide><path>actionpack/lib/action_view/helpers/form_tag_helper.rb
<ide> def text_field_tag(name, value = nil, options = {})
<ide> def label_tag(name = nil, content_or_options = nil, options = nil, &block)
<ide> options = content_or_options if block_given? && content_or_options.is_a?(Hash)
<ide> options ||= {}
<del> options.stringify_keys!
<add> options = options.stringify_keys
<ide> options["for"] = sanitize_to_id(name) unless name.blank? || options.has_key?("for")
<del> content_tag :label, content_or_options || name.to_s.humanize, options, &block
<add> content_tag :label, content_or_options.is_a?(Hash) ? options : (content_or_options || name.to_s.humanize), options, &block
<ide> end
<ide>
<ide> # Creates a hidden form input field used to transmit data that would be lost due to HTTP's statelessness or
<ide><path>actionpack/test/template/form_tag_helper_test.rb
<ide> def test_image_submit_tag_options_symbolize_keys_side_effects
<ide> assert_equal options, { :option => "random_option" }
<ide> end
<ide>
<add> def test_image_label_tag_options_symbolize_keys_side_effects
<add> options = { :option => "random_option" }
<add> actual = label_tag "submit source", "title", options
<add> assert_equal options, { :option => "random_option" }
<add> end
<add>
<ide> def protect_against_forgery?
<ide> false
<ide> end | 2 |
Python | Python | fix typo in documentation | 6925cb7c249420a278e34c98d385264b1ac30293 | <ide><path>numpy/linalg/linalg.py
<ide> def eig(a):
<ide> --------
<ide> eigvals : eigenvalues of a non-symmetric array.
<ide>
<del> eigh : eigenvalues and eigenvectors of a symmetric or Hermitian
<del> (conjugate symmetric) array.
<add> eigh : eigenvalues and eigenvectors of a symmetric or Hermitian
<add> (conjugate symmetric) array.
<ide>
<ide> eigvalsh : eigenvalues of a symmetric or Hermitian (conjugate symmetric)
<del> array.
<add> array.
<ide>
<ide> Notes
<ide> -----
<ide> def det(a):
<ide>
<ide> >>> a = np.array([ [[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]] ])
<ide> >>> a.shape
<del> (2, 2, 2
<add> (3, 2, 2)
<ide> >>> np.linalg.det(a)
<ide> array([-2., -3., -8.])
<ide> | 1 |
Python | Python | remove need of datetime.timezone in test_views.py | 9997aff10bf4c1bfbde38fa65d7961ec431f75dc | <ide><path>tests/www/test_views.py
<ide> import unittest
<ide> import urllib
<ide> from contextlib import contextmanager
<del>from datetime import datetime as dt, timedelta, timezone as tz
<add>from datetime import datetime as dt, timedelta
<ide> from typing import Any, Dict, Generator, List, NamedTuple
<ide> from unittest import mock
<ide> from unittest.mock import PropertyMock
<ide> def test_create_dagrun_execution_date_with_timezone_utc(self):
<ide>
<ide> dr = self.session.query(models.DagRun).one()
<ide>
<del> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=tz.utc))
<add> self.assertEqual(dr.execution_date, timezone.datetime(2018, 7, 6, 5, 4, 3))
<ide>
<ide> def test_create_dagrun_execution_date_with_timezone_edt(self):
<ide> data = {
<ide> def test_create_dagrun_execution_date_with_timezone_edt(self):
<ide>
<ide> dr = self.session.query(models.DagRun).one()
<ide>
<del> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=tz(timedelta(hours=-4))))
<add> self.assertEqual(dr.execution_date, timezone.datetime(2018, 7, 6, 9, 4, 3))
<ide>
<ide> def test_create_dagrun_execution_date_with_timezone_pst(self):
<ide> data = {
<ide> def test_create_dagrun_execution_date_with_timezone_pst(self):
<ide>
<ide> dr = self.session.query(models.DagRun).one()
<ide>
<del> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=tz(timedelta(hours=-8))))
<add> self.assertEqual(dr.execution_date, timezone.datetime(2018, 7, 6, 13, 4, 3))
<ide>
<ide> @conf_vars({("core", "default_timezone"): "America/Toronto"})
<ide> def test_create_dagrun_execution_date_without_timezone_default_edt(self):
<ide> def test_create_dagrun_execution_date_without_timezone_default_edt(self):
<ide>
<ide> dr = self.session.query(models.DagRun).one()
<ide>
<del> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=tz(timedelta(hours=-4))))
<add> self.assertEqual(dr.execution_date, timezone.datetime(2018, 7, 6, 9, 4, 3))
<ide>
<ide> def test_create_dagrun_execution_date_without_timezone_default_utc(self):
<ide> data = {
<ide> def test_create_dagrun_execution_date_without_timezone_default_utc(self):
<ide>
<ide> dr = self.session.query(models.DagRun).one()
<ide>
<del> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=tz.utc))
<add> self.assertEqual(dr.execution_date, dt(2018, 7, 6, 5, 4, 3, tzinfo=timezone.TIMEZONE))
<ide>
<ide> def test_create_dagrun_valid_conf(self):
<ide> conf_value = dict(Valid=True) | 1 |
Ruby | Ruby | check position of 'revision' and 'keg_only' | cd267e0bce9ca9cc1af37deab96c47e1cfb7645c | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_file
<ide> [/^ mirror ["'][\S\ ]+["']/, "mirror"],
<ide> [/^ version ["'][\S\ ]+["']/, "version"],
<ide> [/^ (sha1|sha256) ["'][\S\ ]+["']/, "checksum"],
<add> [/^ revision/, "revision"],
<ide> [/^ head ["'][\S\ ]+["']/, "head"],
<ide> [/^ stable do/, "stable block"],
<ide> [/^ bottle do/, "bottle block"],
<ide> [/^ devel do/, "devel block"],
<ide> [/^ head do/, "head block"],
<ide> [/^ bottle (:unneeded|:disable)/, "bottle modifier"],
<add> [/^ keg_only/, "keg_only"],
<ide> [/^ option/, "option"],
<ide> [/^ depends_on/, "depends_on"],
<ide> [/^ def install/, "install method"], | 1 |
Text | Text | restore note about nested script_name handling | 99775fd1612217aaba2f3580b4204c6b75c04249 | <ide><path>guides/source/upgrading_ruby_on_rails.md
<ide> Also check your environment settings for `config.action_dispatch.best_standards_
<ide>
<ide> * In Rails 4.0, a generic `ActionDispatch::ParamsParser::ParseError` exception is raised when `ParamsParser` fails to parse request params. You will want to rescue this exception instead of the low-level `MultiJson::DecodeError`, for example.
<ide>
<add>* In Rails 4.0, `SCRIPT_NAME` is properly nested when engines are mounted on an app that's served from a URL prefix. You no longer have to set `default_url_options[:script_name]` to work around overwritten URL prefixes.
<add>
<ide> * Rails 4.0 deprecated `ActionController::Integration` in favor of `ActionDispatch::Integration`.
<ide> * Rails 4.0 deprecated `ActionController::IntegrationTest` in favor of `ActionDispatch::IntegrationTest`.
<ide> * Rails 4.0 deprecated `ActionController::PerformanceTest` in favor of `ActionDispatch::PerformanceTest`. | 1 |
Java | Java | fix measureinwindow when using nodes | 9a28701bd87893a968d687461efaa5ce36c212fd | <ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/FlatUIImplementation.java
<ide> public void setChildren(
<ide>
<ide> @Override
<ide> public void measure(int reactTag, Callback callback) {
<add> measureHelper(reactTag, false, callback);
<add> }
<add>
<add> private void measureHelper(int reactTag, boolean relativeToWindow, Callback callback) {
<ide> FlatShadowNode node = (FlatShadowNode) resolveShadowNode(reactTag);
<ide> if (node.mountsToView()) {
<ide> mStateBuilder.ensureBackingViewIsCreated(node);
<ide> public void measure(int reactTag, Callback callback) {
<ide> yInParent / parentHeight,
<ide> width / parentWidth,
<ide> height / parentHeight,
<add> relativeToWindow,
<ide> callback);
<ide> }
<ide>
<ide> public void findSubviewIn(int reactTag, float targetX, float targetY, Callback c
<ide>
<ide> @Override
<ide> public void measureInWindow(int reactTag, Callback callback) {
<del> ensureMountsToViewAndBackingViewIsCreated(reactTag);
<del> super.measureInWindow(reactTag, callback);
<add> measureHelper(reactTag, true, callback);
<ide> }
<ide>
<ide> @Override
<ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/FlatUIViewOperationQueue.java
<ide> private final class MeasureVirtualView implements UIOperation {
<ide> private final float mScaledWidth;
<ide> private final float mScaledHeight;
<ide> private final Callback mCallback;
<add> private final boolean mRelativeToWindow;
<ide>
<ide> private MeasureVirtualView(
<ide> int reactTag,
<ide> float scaledX,
<ide> float scaledY,
<ide> float scaledWidth,
<ide> float scaledHeight,
<add> boolean relativeToWindow,
<ide> Callback callback) {
<ide> mReactTag = reactTag;
<ide> mScaledX = scaledX;
<ide> mScaledY = scaledY;
<ide> mScaledWidth = scaledWidth;
<ide> mScaledHeight = scaledHeight;
<ide> mCallback = callback;
<add> mRelativeToWindow = relativeToWindow;
<ide> }
<ide>
<ide> @Override
<ide> public void execute() {
<ide> try {
<ide> // Measure native View
<del> mNativeViewHierarchyManager.measure(mReactTag, MEASURE_BUFFER);
<add> if (mRelativeToWindow) {
<add> // relative to the window
<add> mNativeViewHierarchyManager.measureInWindow(mReactTag, MEASURE_BUFFER);
<add> } else {
<add> // relative to the root view
<add> mNativeViewHierarchyManager.measure(mReactTag, MEASURE_BUFFER);
<add> }
<ide> } catch (NoSuchNativeViewException noSuchNativeViewException) {
<ide> // Invoke with no args to signal failure and to allow JS to clean up the callback
<ide> // handle.
<ide> public void execute() {
<ide> float width = PixelUtil.toDIPFromPixel(mScaledWidth * nativeViewWidth);
<ide> float height = PixelUtil.toDIPFromPixel(mScaledHeight * nativeViewHeight);
<ide>
<del> mCallback.invoke(0, 0, width, height, x, y);
<add> if (mRelativeToWindow) {
<add> mCallback.invoke(x, y, width, height);
<add> } else {
<add> mCallback.invoke(0, 0, width, height, x, y);
<add> }
<ide> }
<ide> }
<ide>
<ide> public void enqueueMeasureVirtualView(
<ide> float scaledY,
<ide> float scaledWidth,
<ide> float scaledHeight,
<add> boolean relativeToWindow,
<ide> Callback callback) {
<ide> enqueueUIOperation(new MeasureVirtualView(
<ide> reactTag,
<ide> scaledX,
<ide> scaledY,
<ide> scaledWidth,
<ide> scaledHeight,
<add> relativeToWindow,
<ide> callback));
<ide> }
<ide> | 2 |
Text | Text | release notes for 1.3.0-beta.1 retractable-eyebrow | ca4ddfadbae0a8a432b87254b5bd6c25b616ba42 | <ide><path>CHANGELOG.md
<add><a name="1.3.0-beta.1"></a>
<add># 1.3.0-beta.1 retractable-eyebrow (2014-03-07)
<add>
<add>
<add>## Bug Fixes
<add>
<add>- **$compile:** support templates with thead and tfoot root elements
<add> ([53ec5e13](https://github.com/angular/angular.js/commit/53ec5e13e5955830b6751019eef232bd2125c0b6),
<add> [#6289](https://github.com/angular/angular.js/issues/6289))
<add>- **style:** expressions in style tags
<add> ([0609453e](https://github.com/angular/angular.js/commit/0609453e1f9ae074f8d786df903096a6eadb6aa0),
<add> [#2387](https://github.com/angular/angular.js/issues/2387), [#6492](https://github.com/angular/angular.js/issues/6492))
<add>
<add>
<add>## Features
<add>
<add>- **input:** support types date, time, datetime-local, month, week
<add> ([46bd6dc8](https://github.com/angular/angular.js/commit/46bd6dc88de252886d75426efc2ce8107a5134e9),
<add> [#5864](https://github.com/angular/angular.js/issues/5864))
<add>
<add>
<add>## Breaking Changes
<add>
<add>- **build:** due to [eaa1d00b](https://github.com/angular/angular.js/commit/eaa1d00b24008f590b95ad099241b4003688cdda),
<add> As communicated before, IE8 is no longer supported.
<add>
<add>For more info: http://blog.angularjs.org/2013/12/angularjs-13-new-release-approaches.html
<add>
<add>
<add>
<ide> <a name="1.2.14"></a>
<ide> # 1.2.14 feisty-cryokinesis (2014-03-01)
<ide> | 1 |
Python | Python | make xfailed test for modules in public api pass | 69bd8010edb30674f39d2fc3b7ae0a1d03b55c23 | <ide><path>numpy/lib/format.py
<ide> )
<ide>
<ide>
<add>__all__ = []
<add>
<add>
<ide> MAGIC_PREFIX = b'\x93NUMPY'
<ide> MAGIC_LEN = len(MAGIC_PREFIX) + 2
<ide> ARRAY_ALIGN = 64 # plausible values are powers of 2 between 16 and 4096
<ide><path>numpy/matlib.py
<ide>
<ide> import numpy as np
<ide> from numpy.matrixlib.defmatrix import matrix, asmatrix
<del># need * as we're copying the numpy namespace
<add># need * as we're copying the numpy namespace (FIXME: this makes little sense)
<ide> from numpy import *
<ide>
<ide> __version__ = np.__version__
<ide><path>numpy/tests/test_public_api.py
<ide> def test_all_modules_are_expected():
<ide> raise AssertionError("Found unexpected modules: {}".format(modnames))
<ide>
<ide>
<del>@pytest.mark.xfail(reason="missing __all__ dicts are messing this up, "
<del> "needs work")
<add># Stuff that clearly shouldn't be in the API and is detected by the next test
<add># below
<add>SKIP_LIST = [
<add> 'numpy.math',
<add> 'numpy.distutils.log.sys',
<add> 'numpy.distutils.system_info.copy',
<add> 'numpy.distutils.system_info.distutils',
<add> 'numpy.distutils.system_info.log',
<add> 'numpy.distutils.system_info.os',
<add> 'numpy.distutils.system_info.platform',
<add> 'numpy.distutils.system_info.re',
<add> 'numpy.distutils.system_info.shutil',
<add> 'numpy.distutils.system_info.subprocess',
<add> 'numpy.distutils.system_info.sys',
<add> 'numpy.distutils.system_info.tempfile',
<add> 'numpy.distutils.system_info.textwrap',
<add> 'numpy.distutils.system_info.warnings',
<add> 'numpy.doc.constants.re',
<add> 'numpy.doc.constants.textwrap',
<add> 'numpy.lib.emath',
<add> 'numpy.lib.math',
<add> 'numpy.matlib.char',
<add> 'numpy.matlib.rec',
<add> 'numpy.matlib.emath',
<add> 'numpy.matlib.math',
<add> 'numpy.matlib.linalg',
<add> 'numpy.matlib.fft',
<add> 'numpy.matlib.random',
<add> 'numpy.matlib.ctypeslib',
<add> 'numpy.matlib.ma'
<add>]
<add>
<add>
<ide> def test_all_modules_are_expected_2():
<ide> """
<ide> Method checking all objects. The pkgutil-based method in
<ide> def find_unexpected_members(mod_name):
<ide> fullobjname = mod_name + '.' + objname
<ide> if isinstance(getattr(module, objname), types.ModuleType):
<ide> if is_unexpected(fullobjname):
<del> members.append(fullobjname)
<add> if fullobjname not in SKIP_LIST:
<add> members.append(fullobjname)
<ide>
<ide> return members
<ide> | 3 |
Python | Python | remove mention of quickselect in argpartition docs | da9f29a12e3224da87c84cc36ad16dc932cb517e | <ide><path>numpy/add_newdocs.py
<ide> def luf(lamdaexpr, *args, **kwargs):
<ide>
<ide> add_newdoc('numpy.core.multiarray', 'ndarray', ('argpartition',
<ide> """
<del> a.argpartition(kth, axis=-1, kind='quickselect', order=None)
<add> a.argpartition(kth, axis=-1, kind='introselect', order=None)
<ide>
<ide> Returns the indices that would partition this array.
<ide> | 1 |
Python | Python | remove usage of six | fd8b07c6bb0c197e7e1ed4dfee7e6e85cc69a7d1 | <ide><path>tests/jobs/test_scheduler_job.py
<ide> import mock
<ide> import psutil
<ide> import pytest
<del>import six
<ide> from mock import MagicMock, patch
<ide> from parameterized import parameterized
<ide> from sqlalchemy import func
<ide> def test_critical_section_execute_task_instances(self):
<ide> )
<ide> self.assertEqual(State.RUNNING, ti1.state)
<ide> self.assertEqual(State.RUNNING, ti2.state)
<del> six.assertCountEqual(self, [State.QUEUED, State.SCHEDULED], [ti3.state, ti4.state])
<add> self.assertCountEqual([State.QUEUED, State.SCHEDULED], [ti3.state, ti4.state])
<ide> self.assertEqual(1, res)
<ide>
<ide> def test_execute_task_instances_limit(self):
<ide><path>tests/providers/singularity/operators/test_singularity.py
<ide> import unittest
<ide>
<ide> import mock
<del>import six
<ide> from parameterized import parameterized
<ide> from spython.instance import Instance
<ide>
<ide> def test_execute(self, client_mock):
<ide> )
<ide> def test_command_is_required(self, command):
<ide> task = SingularityOperator(task_id='task-id', image="docker://busybox", command=command)
<del> with six.assertRaisesRegex(self, AirflowException, "You must define a command."):
<add> with self.assertRaisesRegex(AirflowException, "You must define a command."):
<ide> task.execute({})
<ide>
<ide> @mock.patch('airflow.providers.singularity.operators.singularity.Client') | 2 |
Javascript | Javascript | fix linting issues | 8567b8feea95ddff9bc7049eb50c8ec21620cdca | <ide><path>src/project.js
<ide> class Project extends Model {
<ide> // * `exact` If `true`, only add `projectPath` if it names an existing directory. If `false`, if `projectPath` is a
<ide> // a file or does not exist, its parent directory will be added instead.
<ide> addPath (projectPath, options = {}) {
<del>
<ide> const directory = this.getDirectoryForProjectPath(projectPath)
<del> if (projectPath === "/Users/foo/baz") {
<del> console.log("ree", directory)
<del> }
<ide> let ok = true
<ide> if (options.exact === true) {
<ide> ok = (directory.getPath() === projectPath)
<ide> }
<ide> ok = ok && directory.existsSync()
<ide>
<del>
<ide> if (!ok) {
<ide> if (options.mustExist === true) {
<ide> const err = new Error(`Project directory ${directory} does not exist`)
<ide> class Project extends Model {
<ide> if (existingDirectory.getPath() === directory.getPath()) { return }
<ide> }
<ide>
<del>
<ide> this.rootDirectories.push(directory)
<ide>
<ide> const didChangeCallback = events => {
<ide> class Project extends Model {
<ide> this.emitter.emit('did-change-files', events)
<ide> }
<ide> }
<add>
<ide> // We'll use the directory's custom onDidChangeFiles callback, if available.
<ide> // CustomDirectory::onDidChangeFiles should match the signature of
<ide> // Project::onDidChangeFiles below (although it may resolve asynchronously) | 1 |
Go | Go | change shm mode to 1777 | 986cf931c38b8cdc51da44af0313502ca1156cfc | <ide><path>pkg/libcontainer/nsinit/mount.go
<ide> func mountSystem(rootfs string, mountLabel string) error {
<ide> }{
<ide> {source: "proc", path: filepath.Join(rootfs, "proc"), device: "proc", flags: defaultMountFlags},
<ide> {source: "sysfs", path: filepath.Join(rootfs, "sys"), device: "sysfs", flags: defaultMountFlags},
<del> {source: "shm", path: filepath.Join(rootfs, "dev", "shm"), device: "tmpfs", flags: defaultMountFlags, data: label.FormatMountLabel("mode=1755,size=65536k", mountLabel)},
<add> {source: "shm", path: filepath.Join(rootfs, "dev", "shm"), device: "tmpfs", flags: defaultMountFlags, data: label.FormatMountLabel("mode=1777,size=65536k", mountLabel)},
<ide> {source: "devpts", path: filepath.Join(rootfs, "dev", "pts"), device: "devpts", flags: syscall.MS_NOSUID | syscall.MS_NOEXEC, data: label.FormatMountLabel("newinstance,ptmxmode=0666,mode=620,gid=5", mountLabel)},
<ide> } {
<ide> if err := os.MkdirAll(m.path, 0755); err != nil && !os.IsExist(err) { | 1 |
Text | Text | fix broken link | 170f4526f567b40245e75386df5f303085b32ac5 | <ide><path>docs/NativeComponentsAndroid.md
<ide> Setter declaration requirements for methods annotated with `@ReactPropGroup` are
<ide>
<ide> ## 4. Register the `ViewManager`
<ide>
<del>The final Java step is to register the ViewManager to the application, this happens in a similar way to [Native Modules](native-modules-android.html), via the applications package member function `createViewManagers.`
<add>The final Java step is to register the ViewManager to the application, this happens in a similar way to [Native Modules](docs/native-modules-android.html), via the applications package member function `createViewManagers.`
<ide>
<ide> ```java
<ide> @Override | 1 |
Java | Java | finalize many of flatshadownode methods | d1ccb6d23d1c35567c13f51cc75fdbffd0a77925 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/flat/FlatShadowNode.java
<ide> public void setBackgroundColor(int backgroundColor) {
<ide> }
<ide>
<ide> @ReactProp(name = "overflow")
<del> public void setOverflow(String overflow) {
<add> public final void setOverflow(String overflow) {
<ide> mClipToBounds = "hidden".equals(overflow);
<ide> invalidate();
<ide> }
<ide> protected final void invalidate() {
<ide> /**
<ide> * Returns an array of DrawCommands to perform during the View's draw pass.
<ide> */
<del> /* package */ DrawCommand[] getDrawCommands() {
<add> /* package */ final DrawCommand[] getDrawCommands() {
<ide> return mDrawCommands;
<ide> }
<ide>
<ide> /**
<ide> * Sets an array of DrawCommands to perform during the View's draw pass. StateBuilder uses old
<ide> * draw commands to compare to new draw commands and see if the View neds to be redrawn.
<ide> */
<del> /* package */ void setDrawCommands(DrawCommand[] drawCommands) {
<add> /* package */ final void setDrawCommands(DrawCommand[] drawCommands) {
<ide> mDrawCommands = drawCommands;
<ide> }
<ide>
<ide> /**
<ide> * Sets an array of AttachDetachListeners to call onAttach/onDetach when they are attached to or
<ide> * detached from a View that this shadow node maps to.
<ide> */
<del> /* package */ void setAttachDetachListeners(AttachDetachListener[] listeners) {
<add> /* package */ final void setAttachDetachListeners(AttachDetachListener[] listeners) {
<ide> mAttachDetachListeners = listeners;
<ide> }
<ide>
<ide> /**
<ide> * Returns an array of AttachDetachListeners associated with this shadow node.
<ide> */
<del> /* package */ AttachDetachListener[] getAttachDetachListeners() {
<add> /* package */ final AttachDetachListener[] getAttachDetachListeners() {
<ide> return mAttachDetachListeners;
<ide> }
<ide>
<ide> protected final void setNodeRegion(NodeRegion nodeRegion) {
<ide> /**
<ide> * Sets boundaries of the View that this node maps to relative to the parent left/top coordinate.
<ide> */
<del> /* package */ void setViewBounds(int left, int top, int right, int bottom) {
<add> /* package */ final void setViewBounds(int left, int top, int right, int bottom) {
<ide> mViewLeft = left;
<ide> mViewTop = top;
<ide> mViewRight = right;
<ide> protected final void setNodeRegion(NodeRegion nodeRegion) {
<ide> /**
<ide> * Left position of the View this node maps to relative to the parent View.
<ide> */
<del> /* package */ int getViewLeft() {
<add> /* package */ final int getViewLeft() {
<ide> return mViewLeft;
<ide> }
<ide>
<ide> /**
<ide> * Top position of the View this node maps to relative to the parent View.
<ide> */
<del> /* package */ int getViewTop() {
<add> /* package */ final int getViewTop() {
<ide> return mViewTop;
<ide> }
<ide>
<ide> /**
<ide> * Right position of the View this node maps to relative to the parent View.
<ide> */
<del> /* package */ int getViewRight() {
<add> /* package */ final int getViewRight() {
<ide> return mViewRight;
<ide> }
<ide>
<ide> /**
<ide> * Bottom position of the View this node maps to relative to the parent View.
<ide> */
<del> /* package */ int getViewBottom() {
<add> /* package */ final int getViewBottom() {
<ide> return mViewBottom;
<ide> }
<ide> | 1 |
PHP | PHP | fix typo in newexp(); resolve cakephp/docs | 51deb992482d4e1b97e58b97373f2844504d9312 | <ide><path>src/Database/Query.php
<ide> public function into($table)
<ide> * ### Example
<ide> *
<ide> * ```
<del> * $query->newExp()->lte('count', $query->identifier('total'));
<add> * $query->newExpr()->lte('count', $query->identifier('total'));
<ide> * ```
<ide> *
<ide> * @param string $identifier The identifier for an expression | 1 |
Python | Python | add numeric doctests (patch by gael) | 59cf27d758b0b2d929e281c3f6ac6316b6c53479 | <ide><path>numpy/core/numeric.py
<ide> 'array_repr', 'array_str', 'set_string_function',
<ide> 'little_endian', 'require',
<ide> 'fromiter', 'array_equal', 'array_equiv',
<del> 'indices', 'fromfunction',
<add> 'indices', 'fromfunction',
<ide> 'load', 'loads', 'isscalar', 'binary_repr', 'base_repr',
<ide> 'ones', 'identity', 'allclose', 'compare_chararrays', 'putmask',
<ide> 'seterr', 'geterr', 'setbufsize', 'getbufsize',
<ide> def argwhere(a):
<ide> """Return a 2-d array of shape N x a.ndim where each row
<ide> is a sequence of indices into a. This sequence must be
<ide> converted to a tuple in order to be used to index into a.
<add>
<add> >>> from numpy import ones, argwhere
<add> >>> argwhere(ones((2, 2)))
<add> array([[0, 0],
<add> [0, 1],
<add> [1, 0],
<add> [1, 1]])
<ide> """
<ide> return asarray(a.nonzero()).T
<ide>
<ide> def flatnonzero(a):
<ide> """Return indicies that are not-zero in flattened version of a
<ide>
<ide> Equivalent to a.ravel().nonzero()[0]
<add>
<add> >>> from numpy import arange, flatnonzero
<add> >>> arange(-2, 3)
<add> array([-2, -1, 0, 1, 2])
<add> >>> flatnonzero(arange(-2, 3))
<add> array([0, 1, 3, 4])
<ide> """
<ide> return a.ravel().nonzero()[0]
<ide>
<ide> def tensordot(a, b, axes=2):
<ide> def roll(a, shift, axis=None):
<ide> """Roll the elements in the array by 'shift' positions along
<ide> the given axis.
<add>
<add> >>> from numpy import roll
<add> >>> arange(10)
<add> array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
<add> >>> roll(arange(10), 2)
<add> array([8, 9, 0, 1, 2, 3, 4, 5, 6, 7])
<ide> """
<ide> a = asanyarray(a)
<ide> if axis is None:
<ide> def roll(a, shift, axis=None):
<ide> def rollaxis(a, axis, start=0):
<ide> """Return transposed array so that axis is rolled before start.
<ide>
<del> if a.shape is (3,4,5,6)
<del> rollaxis(a, 3, 1).shape is (3,6,4,5)
<del> rollaxis(a, 2, 0).shape is (5,3,4,6)
<del> rollaxis(a, 1, 3).shape is (3,5,4,6)
<del> rollaxis(a, 1, 4).shape is (3,5,6,4)
<add> >>> from numpy import ones, rollaxis
<add> >>> a = ones((3,4,5,6))
<add> >>> rollaxis(a, 3, 1).shape
<add> (3, 6, 4, 5)
<add> >>> rollaxis(a, 2, 0).shape
<add> (5, 3, 4, 6)
<add> >>> rollaxis(a, 1, 4).shape
<add> (3, 5, 6, 4)
<ide> """
<ide> n = a.ndim
<ide> if axis < 0: | 1 |
Python | Python | add the forgotten glances_statsd.py script | c088642ac2abb4fb55e14bccc06646fb5ab5fc9a | <ide><path>glances/exports/glances_statsd.py
<add># -*- coding: utf-8 -*-
<add>#
<add># This file is part of Glances.
<add>#
<add># Copyright (C) 2015 Nicolargo <nicolas@nicolargo.com>
<add>#
<add># Glances is free software; you can redistribute it and/or modify
<add># it under the terms of the GNU Lesser General Public License as published by
<add># the Free Software Foundation, either version 3 of the License, or
<add># (at your option) any later version.
<add>#
<add># Glances is distributed in the hope that it will be useful,
<add># but WITHOUT ANY WARRANTY; without even the implied warranty of
<add># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<add># GNU Lesser General Public License for more details.
<add>#
<add># You should have received a copy of the GNU Lesser General Public License
<add># along with this program. If not, see <http://www.gnu.org/licenses/>.
<add>
<add>"""Statsd interface class."""
<add>
<add># Import sys libs
<add>from statsd import StatsClient
<add>from numbers import Number
<add>import sys
<add>
<add># Import Glances lib
<add>from glances.core.glances_logging import logger
<add>from ConfigParser import NoSectionError, NoOptionError
<add>from glances.exports.glances_export import GlancesExport
<add>
<add>
<add>class Export(GlancesExport):
<add>
<add> """This class manages the Statsd export module."""
<add>
<add> def __init__(self, config=None, args=None):
<add> """Init the Statsd export IF."""
<add> GlancesExport.__init__(self, config=config, args=args)
<add>
<add> # Load the InfluxDB configuration file
<add> self.host = None
<add> self.port = None
<add> self.prefix = None
<add> self.export_enable = self.load_conf()
<add> if not self.export_enable:
<add> sys.exit(2)
<add>
<add> # Default prefix for stats is 'glances'
<add> if self.prefix is None:
<add> self.prefix = 'glances'
<add>
<add> # Init the Statsd client
<add> self.client = StatsClient(self.host,
<add> int(self.port),
<add> prefix=self.prefix)
<add>
<add> def load_conf(self, section="statsd"):
<add> """Load the Statsd configuration in the Glances configuration file"""
<add> if self.config is None:
<add> return False
<add> try:
<add> self.host = self.config.get_raw_option(section, "host")
<add> self.port = self.config.get_raw_option(section, "port")
<add> except NoSectionError:
<add> logger.critical("No Statsd configuration found")
<add> return False
<add> except NoOptionError as e:
<add> logger.critical("Error in the Statsd configuration (%s)" % e)
<add> return False
<add> else:
<add> logger.debug("Load Statsd from the Glances configuration file")
<add> # Prefix is optional
<add> try:
<add> self.prefix = self.config.get_raw_option(section, "prefix")
<add> except NoOptionError as e:
<add> pass
<add> return True
<add>
<add> def init(self, prefix='glances'):
<add> """Init the connection to the Statsd server"""
<add> if not self.export_enable:
<add> return None
<add> return StatsClient(self.host,
<add> self.port,
<add> prefix=prefix)
<add>
<add> def update(self, stats):
<add> """Update stats to the InfluxDB server."""
<add> if not self.export_enable:
<add> return False
<add>
<add> # Get the stats
<add> all_stats = stats.getAll()
<add> plugins = stats.getAllPlugins()
<add>
<add> # Loop over available plugin
<add> i = 0
<add> for plugin in plugins:
<add> if plugin in self.plugins_to_export():
<add> if type(all_stats[i]) is list:
<add> for item in all_stats[i]:
<add> export_names = map(
<add> lambda x: item[item['key']] + '.' + x, item.keys())
<add> export_values = item.values()
<add> self.__export(plugin, export_names, export_values)
<add> elif type(all_stats[i]) is dict:
<add> export_names = all_stats[i].keys()
<add> export_values = all_stats[i].values()
<add> self.__export(plugin, export_names, export_values)
<add> i += 1
<add>
<add> return True
<add>
<add> def __export(self, name, columns, points):
<add> """Export the stats to the Statsd server"""
<add> for i in range(0, len(columns)):
<add> if not isinstance(points[i], Number):
<add> continue
<add> stat_name = '{0}.{1}'.format(name, columns[i])
<add> stat_value = points[i]
<add> try:
<add> self.client.gauge(stat_name, stat_value)
<add> except Exception as e:
<add> logger.critical("Can not export stats to Statsd (%s)" % e) | 1 |
PHP | PHP | fix incorrect test | e59849b9c5913ad68e163c17fef59eac01734c99 | <ide><path>tests/TestCase/Http/Cookie/CookieTest.php
<ide> public function testToArrayCompat()
<ide> 'value' => 'cakephp-rocks',
<ide> 'path' => '/api',
<ide> 'domain' => 'cakephp.org',
<del> 'expires' => $date->format(DATE_COOKIE),
<add> 'expires' => 'Fri, 31-Mar-2017 12:34:56 GMT',
<ide> 'secure' => true,
<ide> 'httponly' => true
<ide> ]; | 1 |
Java | Java | fix broken links in javadoc | 1dcb6236a639ff74f9060487ed968a289c13f97f | <ide><path>spring-web/src/main/java/org/springframework/http/codec/xml/XmlEventDecoder.java
<ide> import org.springframework.util.xml.StaxUtils;
<ide>
<ide> /**
<del> * Decodes a {@link DataBuffer} stream into a stream of {@link XMLEvent DataBuffer} stream into a stream of {@link XMLEvents}.
<del> * That is, given the following XML:
<add> * Decodes a {@link DataBuffer} stream into a stream of {@link XMLEvent XMLEvents}.
<ide> *
<del> * <pre>
<add> * <p>Given the following XML:
<add> *
<add> * <pre class="code">
<ide> * <root>
<ide> * <child>foo</child>
<ide> * <child>bar</child>
<ide> * </root>
<ide> * </pre>
<ide> *
<del> * this method with result in a flux with the following events:
<add> * this decoder will produce a {@link Flux} with the following events:
<add> *
<ide> * <ol>
<ide> * <li>{@link javax.xml.stream.events.StartDocument}</li>
<ide> * <li>{@link javax.xml.stream.events.StartElement} {@code root}</li>
<ide> * <li>{@link javax.xml.stream.events.EndElement} {@code root}</li>
<ide> * </ol>
<ide> *
<del> * Note that this decoder is not registered by default but used internally
<del> * by other decoders who are there by default.
<add> * <p>Note that this decoder is not registered by default but is used internally
<add> * by other decoders which are registered by default.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/config/annotation/WebMvcConfigurer.java
<ide> default void configureDefaultServletHandling(DefaultServletHandlerConfigurer con
<ide> }
<ide>
<ide> /**
<del> * Add {@link Converter}s and {@link Formatter Converter}s and {@link Formatters} in addition to the ones
<add> * Add {@link Converter Converters} and {@link Formatter Formatters} in addition to the ones
<ide> * registered by default.
<ide> */
<ide> default void addFormatters(FormatterRegistry registry) { | 2 |
Mixed | Text | replace interface with simple type | 3accde6dee079fbde42f1928002bce43cb15833d | <ide><path>container/container.go
<ide> var (
<ide> errInvalidNetwork = fmt.Errorf("invalid network settings while building port map info")
<ide> )
<ide>
<del>// AttachError represents errors of attach
<del>type AttachError interface {
<del> IsDetached() bool
<del>}
<del>
<del>type detachError struct{}
<del>
<del>func (e detachError) IsDetached() bool {
<del> return true
<del>}
<add>// DetachError is special error which returned in case of container detach.
<add>type DetachError struct{}
<ide>
<del>func (e detachError) Error() string {
<add>func (DetachError) Error() string {
<ide> return "detached from container"
<ide> }
<ide>
<ide> func copyEscapable(dst io.Writer, src io.ReadCloser, keys []byte) (written int64
<ide> }
<ide> if i == len(keys)-1 {
<ide> src.Close()
<del> return 0, detachError{}
<add> return 0, DetachError{}
<ide> }
<ide> nr, er = src.Read(buf)
<ide> }
<ide><path>daemon/attach.go
<ide> func (daemon *Daemon) containerAttach(c *container.Container, stdin io.ReadClose
<ide> }
<ide> err := <-c.Attach(stdinPipe, stdout, stderr, keys)
<ide> if err != nil {
<del> e, ok := err.(container.AttachError)
<del> if ok && e.IsDetached() {
<add> if _, ok := err.(container.DetachError); ok {
<ide> daemon.LogContainerEvent(c, "detach")
<ide> } else {
<ide> logrus.Errorf("attach failed with error: %v", err)
<ide><path>daemon/exec.go
<ide> func (d *Daemon) ContainerExecStart(ctx context.Context, name string, stdin io.R
<ide> return fmt.Errorf("context cancelled")
<ide> case err := <-attachErr:
<ide> if err != nil {
<del> e, ok := err.(container.AttachError)
<del> if !ok || !e.IsDetached() {
<del> return fmt.Errorf("attach failed with error: %v", err)
<add> if _, ok := err.(container.DetachError); !ok {
<add> return fmt.Errorf("exec attach failed with error: %v", err)
<ide> }
<del> d.LogContainerEvent(c, "detach")
<add> d.LogContainerEvent(c, "exec_detach")
<ide> }
<ide> }
<ide> return nil
<ide><path>docs/reference/api/docker_remote_api.md
<ide> Some container-related events are not affected by container state, so they are n
<ide> * **export** emitted by `docker export`
<ide> * **exec_create** emitted by `docker exec`
<ide> * **exec_start** emitted by `docker exec` after **exec_create**
<add>* **detach** emitted when client is detached from container process
<add>* **exec_detach** emitted when client is detached from exec process
<ide>
<ide> Running `docker rmi` emits an **untag** event when removing an image name. The `rmi` command may also emit **delete** events when images are deleted by ID directly or by deleting the last tag referring to the image.
<ide>
<ide> This section lists each version from latest to oldest. Each listing includes a
<ide> * `GET /images/search` now takes a `filters` query parameter.
<ide> * `GET /events` now supports a `reload` event that is emitted when the daemon configuration is reloaded.
<ide> * `GET /events` now supports filtering by daemon name or ID.
<add>* `GET /events` now supports a `detach` event that is emitted on detaching from container process.
<add>* `GET /events` now supports an `exec_detach ` event that is emitted on detaching from exec process.
<ide> * `GET /images/json` now supports filters `since` and `before`.
<ide> * `POST /containers/(id or name)/start` no longer accepts a `HostConfig`.
<ide> * `POST /images/(name)/tag` no longer has a `force` query parameter.
<ide><path>docs/reference/api/docker_remote_api_v1.24.md
<ide> Get container events from docker, either in real time via streaming, or via poll
<ide>
<ide> Docker containers report the following events:
<ide>
<del> attach, commit, copy, create, destroy, die, exec_create, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<add> attach, commit, copy, create, destroy, detach, die, exec_create, exec_detach, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<ide>
<ide> Docker images report the following events:
<ide>
<ide><path>docs/reference/commandline/events.md
<ide> parent = "smn_cli"
<ide>
<ide> Docker containers report the following events:
<ide>
<del> attach, commit, copy, create, destroy, die, exec_create, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<add> attach, commit, copy, create, destroy, detach, die, exec_create, exec_detach, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<ide>
<ide> Docker images report the following events:
<ide>
<ide><path>man/docker-events.1.md
<ide> information and real-time information.
<ide>
<ide> Docker containers will report the following events:
<ide>
<del> attach, commit, copy, create, destroy, die, exec_create, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<add> attach, commit, copy, create, destroy, detach, die, exec_create, exec_detach, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause, update
<ide>
<ide> Docker images report the following events:
<ide> | 7 |
Python | Python | add property test for clipping | 4b20d78b74556f0a498ffb311084aad57bb358af | <ide><path>numpy/core/tests/test_numeric.py
<ide> assert_warns, HAS_REFCOUNT
<ide> )
<ide>
<add>from hypothesis import assume, given, strategies as st
<add>from hypothesis.extra import numpy as hynp
<add>
<ide>
<ide> class TestResize:
<ide> def test_copies(self):
<ide> def test_NaT_propagation(self, arr, amin, amax):
<ide> actual = np.clip(arr, amin, amax)
<ide> assert_equal(actual, expected)
<ide>
<add> @given(data=st.data(), shape=hynp.array_shapes())
<add> def test_clip_property(self, data, shape):
<add> """A property-based test using Hypothesis.
<add>
<add> This aims for maximum generality: it could in principle generate *any*
<add> valid inputs to np.clip, and in practice generates much more varied
<add> inputs than human testers come up with.
<add>
<add> Because many of the inputs have tricky dependencies - compatible dtypes
<add> and mutually-broadcastable shapes - we use `st.data()` strategy draw
<add> values *inside* the test function, from strategies we construct based
<add> on previous values. An alternative would be to define a custom strategy
<add> with `@st.composite`, but until we have duplicated code inline is fine.
<add>
<add> That accounts for most of the function; the actual test is just three
<add> lines to calculate and compare actual vs expected results!
<add> """
<add> # Our base array and bounds should not need to be of the same type as
<add> # long as they are all compatible - so we allow any int or float type.
<add> dtype_strategy = hynp.integer_dtypes() | hynp.floating_dtypes()
<add>
<add> # The following line is a total hack to disable the varied-dtypes
<add> # component of this test, because result != expected if dtypes can vary.
<add> dtype_strategy = st.just(data.draw(dtype_strategy))
<add>
<add> # Generate an arbitrary array of the chosen shape and dtype
<add> # This is the value that we clip.
<add> arr = data.draw(hynp.arrays(dtype=dtype_strategy, shape=shape))
<add>
<add> # Generate shapes for the bounds which can be broadcast with each other
<add> # and with the base shape. Below, we might decide to use scalar bounds,
<add> # but it's clearer to generate these shapes unconditionally in advance.
<add> in_shapes, result_shape = data.draw(
<add> hynp.mutually_broadcastable_shapes(
<add> num_shapes=2,
<add> base_shape=shape,
<add> # Commenting out the min_dims line allows zero-dimensional arrays,
<add> # and zero-dimensional arrays containing NaN make the test fail.
<add> min_dims=1
<add>
<add> )
<add> )
<add> amin = data.draw(
<add> dtype_strategy.flatmap(hynp.from_dtype)
<add> | hynp.arrays(dtype=dtype_strategy, shape=in_shapes[0])
<add> )
<add> amax = data.draw(
<add> dtype_strategy.flatmap(hynp.from_dtype)
<add> | hynp.arrays(dtype=dtype_strategy, shape=in_shapes[1])
<add> )
<add> # If we allow either bound to be a scalar `nan`, the test will fail -
<add> # so we just "assume" that away (if it is, this raises a special
<add> # exception and Hypothesis will try again with different inputs)
<add> assume(not np.isscalar(amin) or not np.isnan(amin))
<add> assume(not np.isscalar(amax) or not np.isnan(amax))
<add>
<add> # Then calculate our result and expected result and check that they're
<add> # equal! See gh-12519 for discussion deciding on this property.
<add> result = np.clip(arr, amin, amax)
<add> expected = np.minimum(amax, np.maximum(arr, amin))
<add> assert_array_equal(result, expected)
<add>
<ide>
<ide> class TestAllclose:
<ide> rtol = 1e-5 | 1 |
Javascript | Javascript | use ref counts to count timers | 64cc066f59f50e29fc3a587a3cfc8b1270eb45cd | <ide><path>lib/internal/bootstrap/node.js
<ide> setupPrepareStackTrace();
<ide>
<ide> const {
<add> Array,
<ide> ArrayPrototypeConcat,
<del> ArrayPrototypeFilter,
<del> ArrayPrototypeMap,
<add> ArrayPrototypeFill,
<ide> FunctionPrototypeCall,
<ide> JSONParse,
<ide> ObjectDefineProperty,
<ide> ObjectDefineProperties,
<ide> ObjectGetPrototypeOf,
<ide> ObjectPreventExtensions,
<ide> ObjectSetPrototypeOf,
<del> ObjectValues,
<ide> ReflectGet,
<ide> ReflectSet,
<ide> SymbolToStringTag,
<ide> const rawMethods = internalBinding('process_methods');
<ide> process._getActiveHandles = rawMethods._getActiveHandles;
<ide>
<ide> process.getActiveResourcesInfo = function() {
<add> const timerCounts = internalTimers.getTimerCounts();
<ide> return ArrayPrototypeConcat(
<ide> rawMethods._getActiveRequestsInfo(),
<ide> rawMethods._getActiveHandlesInfo(),
<del> ArrayPrototypeMap(
<del> ArrayPrototypeFilter(ObjectValues(internalTimers.activeTimersMap),
<del> ({ resource }) => resource.hasRef()),
<del> ({ type }) => type));
<add> ArrayPrototypeFill(new Array(timerCounts.timeoutCount), 'Timeout'),
<add> ArrayPrototypeFill(new Array(timerCounts.immediateCount), 'Immediate'));
<ide> };
<ide>
<ide> // TODO(joyeecheung): remove these
<ide><path>lib/internal/timers.js
<ide> const kRefed = Symbol('refed');
<ide> // Create a single linked list instance only once at startup
<ide> const immediateQueue = new ImmediateList();
<ide>
<del>// Object map containing timers
<del>//
<del>// - key = asyncId
<del>// - value = { type, resource }
<del>const activeTimersMap = ObjectCreate(null);
<del>
<ide> let nextExpiry = Infinity;
<ide> let refCount = 0;
<ide>
<ide> function initAsyncResource(resource, type) {
<ide> resource[trigger_async_id_symbol] = getDefaultTriggerAsyncId();
<ide> if (initHooksExist())
<ide> emitInit(asyncId, type, triggerAsyncId, resource);
<del> activeTimersMap[asyncId] = { type, resource };
<ide> }
<ide>
<ide> // Timer constructor function.
<ide> function getTimerCallbacks(runNextTicks) {
<ide>
<ide> if (destroyHooksExist())
<ide> emitDestroy(asyncId);
<del> delete activeTimersMap[asyncId];
<ide>
<ide> outstandingQueue.head = immediate = immediate._idleNext;
<ide> }
<ide> function getTimerCallbacks(runNextTicks) {
<ide>
<ide> if (destroyHooksExist())
<ide> emitDestroy(asyncId);
<del> delete activeTimersMap[asyncId];
<ide> }
<ide> continue;
<ide> }
<ide> function getTimerCallbacks(runNextTicks) {
<ide>
<ide> if (destroyHooksExist())
<ide> emitDestroy(asyncId);
<del> delete activeTimersMap[asyncId];
<ide> }
<ide> }
<ide>
<ide> class Immediate {
<ide> }
<ide> }
<ide>
<add>function getTimerCounts() {
<add> return {
<add> timeoutCount: refCount,
<add> immediateCount: immediateInfo[kRefCount],
<add> };
<add>}
<add>
<ide> module.exports = {
<ide> TIMEOUT_MAX,
<ide> kTimeout: Symbol('timeout'), // For hiding Timeouts on other internals.
<ide> module.exports = {
<ide> active,
<ide> unrefActive,
<ide> insert,
<del> activeTimersMap,
<ide> timerListMap,
<ide> timerListQueue,
<ide> decRefCount,
<del> incRefCount
<add> incRefCount,
<add> getTimerCounts,
<ide> };
<ide><path>lib/timers.js
<ide> const {
<ide> kRefed,
<ide> kHasPrimitive,
<ide> getTimerDuration,
<del> activeTimersMap,
<ide> timerListMap,
<ide> timerListQueue,
<ide> immediateQueue,
<ide> function unenroll(item) {
<ide> // Fewer checks may be possible, but these cover everything.
<ide> if (destroyHooksExist() && item[async_id_symbol] !== undefined)
<ide> emitDestroy(item[async_id_symbol]);
<del> delete activeTimersMap[item[async_id_symbol]];
<ide>
<ide> L.remove(item);
<ide>
<ide> function clearImmediate(immediate) {
<ide> if (destroyHooksExist() && immediate[async_id_symbol] !== undefined) {
<ide> emitDestroy(immediate[async_id_symbol]);
<ide> }
<del> delete activeTimersMap[immediate[async_id_symbol]];
<ide>
<ide> immediate._onImmediate = null;
<ide> | 3 |
Javascript | Javascript | update logic for non-signedin users | d8667662230e93d7654d3910f5db6f69733bf454 | <ide><path>client/src/components/Donation/PaypalButton.js
<ide> export class PaypalButton extends Component {
<ide> return { ...configurationObj };
<ide> }
<ide>
<del> handleApproval = data => {
<add> handleApproval = (data, isSubscription) => {
<ide> const { amount, duration } = this.state;
<ide> const { skipAddDonation = false } = this.props;
<del> if (!skipAddDonation || duration === 'oneTime') {
<add> // Skip the api if user is not signed in or if its a one-time donation
<add> if (skipAddDonation || !isSubscription) {
<add> this.props.onDonationStateChange(
<add> true,
<add> false,
<add> data.error ? data.error : ''
<add> );
<add> } else {
<ide> this.props.handleProcessing(
<ide> duration,
<ide> amount,
<ide> export class PaypalButton extends Component {
<ide> };
<ide> this.props.onDonationStateChange(false, false, data.error);
<ide> });
<del> } else {
<del> this.props.onDonationStateChange(
<del> true,
<del> false,
<del> data.error ? data.error : ''
<del> );
<ide> }
<ide> };
<ide>
<ide> export class PaypalButton extends Component {
<ide> });
<ide> }}
<ide> isSubscription={isSubscription}
<del> on={true}
<ide> onApprove={data => {
<del> this.handleApproval(data);
<add> this.handleApproval(data, isSubscription);
<ide> }}
<ide> onCancel={() => {
<ide> this.props.onDonationStateChange( | 1 |
Ruby | Ruby | handle non-standard tap remotes | 4e61f61a208c0074bdf60fc68b7bb0e88e67c3d7 | <ide><path>Library/Homebrew/dev-cmd/bump-cask-pr.rb
<ide> def fetch_cask(contents, version, config: nil)
<ide> end
<ide>
<ide> def check_open_pull_requests(cask, args:)
<del> GitHub.check_for_duplicate_pull_requests(cask.token, cask.tap.full_name,
<add> tap_remote_repo = cask.tap.remote_repo || cask.tap.full_name
<add> GitHub.check_for_duplicate_pull_requests(cask.token, tap_remote_repo,
<ide> state: "open",
<ide> file: cask.sourcefile_path.relative_path_from(cask.tap.path).to_s,
<ide> args: args)
<ide><path>Library/Homebrew/dev-cmd/bump-formula-pr.rb
<ide> def bump_formula_pr_args
<ide> def use_correct_linux_tap(formula, args:)
<ide> default_origin_branch = formula.tap.path.git_origin_branch
<ide>
<del> return formula.tap.full_name, "origin", default_origin_branch, "-" if !OS.linux? || !formula.tap.core_tap?
<add> return formula.tap.remote_repo, "origin", default_origin_branch, "-" if !OS.linux? || !formula.tap.core_tap?
<ide>
<del> tap_full_name = formula.tap.full_name.gsub("linuxbrew", "homebrew")
<del> homebrew_core_url = "https://github.com/#{tap_full_name}"
<add> tap_remote_repo = formula.tap.full_name.gsub("linuxbrew", "homebrew")
<add> homebrew_core_url = "https://github.com/#{tap_remote_repo}"
<ide> homebrew_core_remote = "homebrew"
<ide> previous_branch = formula.tap.path.git_branch || "master"
<ide> formula_path = formula.path.relative_path_from(formula.tap.path)
<ide> def use_correct_linux_tap(formula, args:)
<ide> ohai "git fetch #{homebrew_core_remote} HEAD #{default_origin_branch}"
<ide> ohai "git cat-file -e #{full_origin_branch}:#{formula_path}"
<ide> ohai "git checkout #{full_origin_branch}"
<del> return tap_full_name, homebrew_core_remote, default_origin_branch, previous_branch
<add> return tap_remote_repo, homebrew_core_remote, default_origin_branch, previous_branch
<ide> end
<ide>
<ide> formula.tap.path.cd do
<ide> def use_correct_linux_tap(formula, args:)
<ide> if quiet_system "git", "cat-file", "-e", "#{full_origin_branch}:#{formula_path}"
<ide> ohai "#{formula.full_name} exists in #{full_origin_branch}."
<ide> safe_system "git", "checkout", full_origin_branch
<del> return tap_full_name, homebrew_core_remote, default_origin_branch, previous_branch
<add> return tap_remote_repo, homebrew_core_remote, default_origin_branch, previous_branch
<ide> end
<ide> end
<ide> end
<ide> def bump_formula_pr
<ide> formula_spec = formula.stable
<ide> odie "#{formula}: no stable specification found!" if formula_spec.blank?
<ide>
<del> tap_full_name, remote, remote_branch, previous_branch = use_correct_linux_tap(formula, args: args)
<del> check_open_pull_requests(formula, tap_full_name, args: args)
<add> tap_remote_repo, remote, remote_branch, previous_branch = use_correct_linux_tap(formula, args: args)
<add> check_open_pull_requests(formula, tap_remote_repo, args: args)
<ide>
<ide> new_version = args.version
<del> check_new_version(formula, tap_full_name, version: new_version, args: args) if new_version.present?
<add> check_new_version(formula, tap_remote_repo, version: new_version, args: args) if new_version.present?
<ide>
<ide> opoo "This formula has patches that may be resolved upstream." if formula.patchlist.present?
<ide> if formula.resources.any? { |resource| !resource.name.start_with?("homebrew-") }
<ide> def bump_formula_pr
<ide> old_version = old_formula_version.to_s
<ide> forced_version = new_version.present?
<ide> new_url_hash = if new_url.present? && new_hash.present?
<del> check_new_version(formula, tap_full_name, url: new_url, args: args) if new_version.blank?
<add> check_new_version(formula, tap_remote_repo, url: new_url, args: args) if new_version.blank?
<ide> true
<ide> elsif new_tag.present? && new_revision.present?
<del> check_new_version(formula, tap_full_name, url: old_url, tag: new_tag, args: args) if new_version.blank?
<add> check_new_version(formula, tap_remote_repo, url: old_url, tag: new_tag, args: args) if new_version.blank?
<ide> false
<ide> elsif old_hash.blank?
<ide> if new_tag.blank? && new_version.blank? && new_revision.blank?
<ide> def bump_formula_pr
<ide> and old tag are both #{new_tag}.
<ide> EOS
<ide> end
<del> check_new_version(formula, tap_full_name, url: old_url, tag: new_tag, args: args) if new_version.blank?
<add> check_new_version(formula, tap_remote_repo, url: old_url, tag: new_tag, args: args) if new_version.blank?
<ide> resource_path, forced_version = fetch_resource(formula, new_version, old_url, tag: new_tag)
<ide> new_revision = Utils.popen_read("git", "-C", resource_path.to_s, "rev-parse", "-q", "--verify", "HEAD")
<ide> new_revision = new_revision.strip
<ide> def bump_formula_pr
<ide> #{new_url}
<ide> EOS
<ide> end
<del> check_new_version(formula, tap_full_name, url: new_url, args: args) if new_version.blank?
<add> check_new_version(formula, tap_remote_repo, url: new_url, args: args) if new_version.blank?
<ide> resource_path, forced_version = fetch_resource(formula, new_version, new_url)
<ide> Utils::Tar.validate_file(resource_path)
<ide> new_hash = resource_path.sha256
<ide> def bump_formula_pr
<ide> commit_message: "#{formula.name} #{new_formula_version}",
<ide> previous_branch: previous_branch,
<ide> tap: formula.tap,
<del> tap_full_name: tap_full_name,
<add> tap_remote_repo: tap_remote_repo,
<ide> pr_message: pr_message,
<ide> }
<ide> GitHub.create_bump_pr(pr_info, args: args)
<ide> def formula_version(formula, contents = nil)
<ide> end
<ide> end
<ide>
<del> def check_open_pull_requests(formula, tap_full_name, args:)
<del> GitHub.check_for_duplicate_pull_requests(formula.name, tap_full_name,
<add> def check_open_pull_requests(formula, tap_remote_repo, args:)
<add> GitHub.check_for_duplicate_pull_requests(formula.name, tap_remote_repo,
<ide> state: "open",
<ide> file: formula.path.relative_path_from(formula.tap.path).to_s,
<ide> args: args)
<ide> end
<ide>
<del> def check_new_version(formula, tap_full_name, args:, version: nil, url: nil, tag: nil)
<add> def check_new_version(formula, tap_remote_repo, args:, version: nil, url: nil, tag: nil)
<ide> if version.nil?
<ide> specs = {}
<ide> specs[:tag] = tag if tag.present?
<ide> version = Version.detect(url, **specs)
<ide> end
<ide> check_throttle(formula, version)
<del> check_closed_pull_requests(formula, tap_full_name, args: args, version: version)
<add> check_closed_pull_requests(formula, tap_remote_repo, args: args, version: version)
<ide> end
<ide>
<ide> def check_throttle(formula, new_version)
<ide> def check_throttle(formula, new_version)
<ide> odie "#{formula} should only be updated every #{throttled_rate} releases on multiples of #{throttled_rate}"
<ide> end
<ide>
<del> def check_closed_pull_requests(formula, tap_full_name, args:, version:)
<add> def check_closed_pull_requests(formula, tap_remote_repo, args:, version:)
<ide> # if we haven't already found open requests, try for an exact match across closed requests
<del> GitHub.check_for_duplicate_pull_requests(formula.name, tap_full_name,
<add> GitHub.check_for_duplicate_pull_requests(formula.name, tap_remote_repo,
<ide> version: version,
<ide> state: "closed",
<ide> file: formula.path.relative_path_from(formula.tap.path).to_s,
<ide><path>Library/Homebrew/dev-cmd/bump.rb
<ide> def livecheck_result(formula_or_cask)
<ide> end
<ide>
<ide> def retrieve_pull_requests(formula_or_cask, name)
<del> pull_requests = GitHub.fetch_pull_requests(name, formula_or_cask.tap&.full_name, state: "open")
<add> tap_remote_repo = formula_or_cask.tap&.remote_repo || formula_or_cask.tap&.full_name
<add> pull_requests = GitHub.fetch_pull_requests(name, tap_remote_repo, state: "open")
<ide> if pull_requests.try(:any?)
<ide> pull_requests = pull_requests.map { |pr| "#{pr["title"]} (#{Formatter.url(pr["html_url"])})" }.join(", ")
<ide> end
<ide><path>Library/Homebrew/tap.rb
<ide> def remote
<ide> @remote ||= path.git_origin
<ide> end
<ide>
<add> # The remote repository name of this {Tap}.
<add> # e.g. `user/homebrew-repo`
<add> def remote_repo
<add> raise TapUnavailableError, name unless installed?
<add>
<add> @remote_repo ||= remote&.sub(%r{^https://github\.com/}, "")&.sub(/\.git$/, "")
<add> end
<add>
<ide> # The default remote path to this {Tap}.
<ide> sig { returns(String) }
<ide> def default_remote
<ide><path>Library/Homebrew/test/tap_spec.rb
<ide> def setup_completion(link:)
<ide> end
<ide> end
<ide>
<add> describe "#remote_repo" do
<add> it "returns the remote repository" do
<add> setup_git_repo
<add>
<add> expect(homebrew_foo_tap.remote_repo).to eq("Homebrew/homebrew-foo")
<add> expect { described_class.new("Homebrew", "bar").remote_repo }.to raise_error(TapUnavailableError)
<add>
<add> services_tap = described_class.new("Homebrew", "services")
<add> services_tap.path.mkpath
<add> services_tap.path.cd do
<add> system "git", "init"
<add> system "git", "remote", "add", "origin", "https://github.com/Homebrew/homebrew-bar"
<add> end
<add> expect(services_tap.remote_repo).to eq("Homebrew/homebrew-bar")
<add> end
<add>
<add> it "returns nil if the Tap is not a Git repository" do
<add> expect(homebrew_foo_tap.remote_repo).to be nil
<add> end
<add>
<add> it "returns nil if Git is not available" do
<add> setup_git_repo
<add> allow(Utils::Git).to receive(:available?).and_return(false)
<add> expect(homebrew_foo_tap.remote_repo).to be nil
<add> end
<add> end
<add>
<ide> specify "Git variant" do
<ide> touch path/"README"
<ide> setup_git_repo
<ide><path>Library/Homebrew/utils/github.rb
<ide> def search_code(repo: nil, user: "Homebrew", path: ["Formula", "Casks", "."], fi
<ide> end
<ide> end
<ide>
<del> def issues_for_formula(name, tap: CoreTap.instance, tap_full_name: tap.full_name, state: nil)
<del> search_issues(name, repo: tap_full_name, state: state, in: "title")
<add> def issues_for_formula(name, tap: CoreTap.instance, tap_remote_repo: tap.full_name, state: nil)
<add> search_issues(name, repo: tap_remote_repo, state: state, in: "title")
<ide> end
<ide>
<ide> def user
<ide> def get_repo_license(user, repo)
<ide> nil
<ide> end
<ide>
<del> def fetch_pull_requests(name, tap_full_name, state: nil, version: nil)
<add> def fetch_pull_requests(name, tap_remote_repo, state: nil, version: nil)
<ide> if version.present?
<ide> query = "#{name} #{version}"
<ide> regex = /(^|\s)#{Regexp.quote(name)}(:|,|\s)(.*\s)?#{Regexp.quote(version)}(:|,|\s|$)/i
<ide> else
<ide> query = name
<ide> regex = /(^|\s)#{Regexp.quote(name)}(:|,|\s|$)/i
<ide> end
<del> issues_for_formula(query, tap_full_name: tap_full_name, state: state).select do |pr|
<add> issues_for_formula(query, tap_remote_repo: tap_remote_repo, state: state).select do |pr|
<ide> pr["html_url"].include?("/pull/") && regex.match?(pr["title"])
<ide> end
<ide> rescue API::RateLimitExceededError => e
<ide> opoo e.message
<ide> []
<ide> end
<ide>
<del> def check_for_duplicate_pull_requests(name, tap_full_name, state:, file:, args:, version: nil)
<del> pull_requests = fetch_pull_requests(name, tap_full_name, state: state, version: version).select do |pr|
<del> pr_files = API.open_rest(url_to("repos", tap_full_name, "pulls", pr["number"], "files"))
<add> def check_for_duplicate_pull_requests(name, tap_remote_repo, state:, file:, args:, version: nil)
<add> pull_requests = fetch_pull_requests(name, tap_remote_repo, state: state, version: version).select do |pr|
<add> pr_files = API.open_rest(url_to("repos", tap_remote_repo, "pulls", pr["number"], "files"))
<ide> pr_files.any? { |f| f["filename"] == file }
<ide> end
<ide> return if pull_requests.blank?
<ide> def check_for_duplicate_pull_requests(name, tap_full_name, state:, file:, args:,
<ide> end
<ide> end
<ide>
<del> def forked_repo_info!(tap_full_name)
<del> response = create_fork(tap_full_name)
<add> def forked_repo_info!(tap_remote_repo)
<add> response = create_fork(tap_remote_repo)
<ide> # GitHub API responds immediately but fork takes a few seconds to be ready.
<del> sleep 1 until check_fork_exists(tap_full_name)
<add> sleep 1 until check_fork_exists(tap_remote_repo)
<ide> remote_url = if system("git", "config", "--local", "--get-regexp", "remote\..*\.url", "git@github.com:.*")
<ide> response.fetch("ssh_url")
<ide> else
<ide> def create_bump_pr(info, args:)
<ide> branch = info[:branch_name]
<ide> commit_message = info[:commit_message]
<ide> previous_branch = info[:previous_branch] || "-"
<del> tap_full_name = info[:tap_full_name] || tap.full_name
<add> tap_remote_repo = info[:tap_remote_repo] || tap.full_name
<ide> pr_message = info[:pr_message]
<ide>
<ide> sourcefile_path.parent.cd do
<ide> def create_bump_pr(info, args:)
<ide> username = tap.user
<ide> else
<ide> begin
<del> remote_url, username = forked_repo_info!(tap_full_name)
<add> remote_url, username = forked_repo_info!(tap_remote_repo)
<ide> rescue *API::ERRORS => e
<ide> sourcefile_path.atomic_write(old_contents)
<ide> odie "Unable to fork: #{e.message}!"
<ide> def create_bump_pr(info, args:)
<ide> end
<ide>
<ide> begin
<del> url = create_pull_request(tap_full_name, commit_message,
<add> url = create_pull_request(tap_remote_repo, commit_message,
<ide> "#{username}:#{branch}", remote_branch, pr_message)["html_url"]
<ide> if args.no_browse?
<ide> puts url | 6 |
Javascript | Javascript | add support for inspecting symbols | 834382ae778be7c1bb24136bc699d2dc90ca2744 | <ide><path>packages/ember-metal/lib/utils.js
<ide> export function inspect(obj) {
<ide> return '[' + obj + ']';
<ide> }
<ide> // for non objects
<del> if (typeof obj !== 'object') {
<add> var type = typeof obj;
<add> if (type !== 'object' && type !== 'symbol') {
<ide> return ''+obj;
<ide> }
<ide> // overridden toString
<ide><path>packages/ember-metal/tests/utils_test.js
<add>import { inspect } from 'ember-metal/utils';
<add>
<add>QUnit.module("Ember Metal Utils");
<add>
<add>QUnit.test("inspect outputs the toString() representation of Symbols", function() {
<add> // Symbol is not defined on pre-ES2015 runtimes, so this let's us safely test
<add> // for it's existence (where a simple `if (Symbol)` would ReferenceError)
<add> let Symbol = Symbol || null;
<add>
<add> if (Symbol) {
<add> let symbol = Symbol('test');
<add> equal(inspect(symbol), 'Symbol(test)');
<add> } else {
<add> expect(0);
<add> }
<add>}); | 2 |
Java | Java | remove trailing whitespace from java source code | 7a690df92522a2e8bf0aad1b27c6238fbe489339 | <ide><path>spring-context/src/test/java/org/springframework/context/annotation/AutoProxyLazyInitTests.java
<ide> public void withStaticBeanMethod() {
<ide>
<ide> ApplicationContext ctx = new AnnotationConfigApplicationContext(ConfigWithStatic.class);
<ide> MyBean bean = ctx.getBean("myBean", MyBean.class);
<del>
<add>
<ide> assertFalse(MyBeanImpl.initialized);
<ide> bean.doIt();
<ide> assertTrue(MyBeanImpl.initialized);
<ide><path>spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
<ide> * <li>Searching on methods in interfaces, if the annotated element is a method
<ide> * <li>Searching on methods in superclasses, if the annotated element is a method
<ide> * </ul>
<del> *
<add> *
<ide> * <h3>Support for {@code @Inherited}</h3>
<ide> * <p>Methods following <em>get semantics</em> will honor the contract of
<ide> * Java's {@link java.lang.annotation.Inherited @Inherited} annotation.
<ide><path>spring-core/src/test/java/org/springframework/core/convert/support/GenericConversionServiceTests.java
<ide> /**
<ide> * Unit tests for the {@link GenericConversionService}.
<ide> *
<del> * <p>For tests involving the {@link DefaultConversionService}, see
<add> * <p>For tests involving the {@link DefaultConversionService}, see
<ide> * {@link DefaultConversionServiceTests}.
<ide> *
<ide> * @author Keith Donald
<ide><path>spring-core/src/test/java/org/springframework/util/concurrent/SettableListenableFutureTests.java
<ide> public void cancelDoesNotNotifyCallbacksOnSetException() {
<ide> private static class InterruptableSettableListenableFuture extends SettableListenableFuture<String> {
<ide>
<ide> private boolean interrupted = false;
<del>
<add>
<ide> @Override
<ide> protected void interruptTask() {
<ide> interrupted = true;
<ide> }
<del>
<add>
<ide> boolean calledInterruptTask() {
<ide> return interrupted;
<ide> }
<ide><path>spring-expression/src/main/java/org/springframework/expression/spel/CodeFlow.java
<ide> public static void insertUnboxNumberInsns(MethodVisitor mv, char targetDescripto
<ide> mv.visitTypeInsn(CHECKCAST, "java/lang/Number");
<ide> }
<ide> mv.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Number", "doubleValue", "()D", false);
<del> break;
<add> break;
<ide> case 'F':
<ide> if (stackDescriptor.equals("Ljava/lang/Object")) {
<ide> mv.visitTypeInsn(CHECKCAST, "java/lang/Number");
<ide><path>spring-expression/src/main/java/org/springframework/expression/spel/standard/InternalSpelExpressionParser.java
<ide> else if (peekToken(TokenKind.COLON,true)) {
<ide> // '}' - end of list
<ide> // ',' - more expressions in this list
<ide> // ':' - this is a map!
<del>
<add>
<ide> if (peekToken(TokenKind.RCURLY)) { // list with one item in it
<ide> List<SpelNodeImpl> listElements = new ArrayList<SpelNodeImpl>();
<ide> listElements.add(firstExpression);
<ide> else if (peekToken(TokenKind.COMMA, true)) { // multi item list
<ide> while (peekToken(TokenKind.COMMA,true));
<ide> closingCurly = eatToken(TokenKind.RCURLY);
<ide> expr = new InlineList(toPos(t.startPos,closingCurly.endPos),listElements.toArray(new SpelNodeImpl[listElements.size()]));
<del>
<add>
<ide> }
<ide> else if (peekToken(TokenKind.COLON, true)) { // map!
<ide> List<SpelNodeImpl> mapElements = new ArrayList<SpelNodeImpl>();
<ide><path>spring-expression/src/main/java/org/springframework/expression/spel/support/ReflectiveMethodExecutor.java
<ide> private Class<?> discoverPublicClass(Method method, Class<?> clazz) {
<ide> clazz.getDeclaredMethod(method.getName(), method.getParameterTypes());
<ide> return clazz;
<ide> } catch (NoSuchMethodException nsme) {
<del>
<add>
<ide> }
<ide> }
<ide> Class<?>[] intfaces = clazz.getInterfaces();
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/MapTests.java
<ide> public void testInlineMapWriting() {
<ide> // list should be unmodifiable
<ide> evaluate("{a:1, b:2, c:3, d:4, e:5}[a]=6", "[a:1,b: 2,c: 3,d: 4,e: 5]", unmodifiableClass);
<ide> }
<del>
<add>
<ide> @Test
<ide> public void testMapKeysThatAreAlsoSpELKeywords() {
<ide> SpelExpressionParser parser = new SpelExpressionParser();
<ide> public void testMapKeysThatAreAlsoSpELKeywords() {
<ide> expression = (SpelExpression) parser.parseExpression("foo['abc.def']");
<ide> o = expression.getValue(new MapHolder());
<ide> assertEquals("value", o);
<del>
<add>
<ide> expression = (SpelExpression)parser.parseExpression("foo[foo[NEW]]");
<ide> o = expression.getValue(new MapHolder());
<ide> assertEquals("37",o);
<ide>
<ide> expression = (SpelExpression)parser.parseExpression("foo[foo[new]]");
<ide> o = expression.getValue(new MapHolder());
<ide> assertEquals("38",o);
<del>
<add>
<ide> expression = (SpelExpression)parser.parseExpression("foo[foo[foo[T]]]");
<ide> o = expression.getValue(new MapHolder());
<ide> assertEquals("value",o);
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java
<ide> public void typeReference() throws Exception {
<ide> assertEquals(String.class,expression.getValue());
<ide> assertCanCompile(expression);
<ide> assertEquals(String.class,expression.getValue());
<del>
<add>
<ide> expression = parse("T(java.io.IOException)");
<ide> assertEquals(IOException.class,expression.getValue());
<ide> assertCanCompile(expression);
<ide> public void failsWhenSettingContextForExpression_SPR12326() {
<ide> context.setVariable("it", person);
<ide> expression.setEvaluationContext(context);
<ide> assertTrue(expression.getValue(Boolean.class));
<del> assertTrue(expression.getValue(Boolean.class));
<add> assertTrue(expression.getValue(Boolean.class));
<ide> assertCanCompile(expression);
<ide> assertTrue(expression.getValue(Boolean.class));
<ide> }
<ide> public void methodReferenceMissingCastAndRootObjectAccessing_SPR12326() {
<ide> assertTrue((Boolean)ex.getValue(context));
<ide> assertTrue((Boolean)ex.getValue(context));
<ide> }
<del>
<add>
<ide> public class Person {
<ide>
<ide> private int age;
<ide> public void setAge(int age) {
<ide> }
<ide>
<ide> public class Person3 {
<del>
<add>
<ide> private int age;
<ide>
<ide> public Person3(String name, int age) {
<ide> public void indexer() throws Exception {
<ide> float[] fs = new float[]{6.0f,7.0f,8.0f};
<ide> byte[] bs = new byte[]{(byte)2,(byte)3,(byte)4};
<ide> char[] cs = new char[]{'a','b','c'};
<del>
<add>
<ide> // Access String (reference type) array
<ide> expression = parser.parseExpression("[0]");
<ide> assertEquals("a",expression.getValue(sss));
<ide> public void fourteen(String a, String[]... vargs) {
<ide> for (String[] varg: vargs) {
<ide> s+="{";
<ide> for (String v: varg) {
<del> s+=v;
<add> s+=v;
<ide> }
<ide> s+="}";
<ide> }
<ide> public void fifteen(String a, int[]... vargs) {
<ide> for (int[] varg: vargs) {
<ide> s+="{";
<ide> for (int v: varg) {
<del> s+=Integer.toString(v);
<add> s+=Integer.toString(v);
<ide> }
<ide> s+="}";
<ide> }
<ide> public TestClass8(int i, String s, double d, boolean z) {
<ide> }
<ide>
<ide> public TestClass8() {
<del>
<add>
<ide> }
<ide>
<ide> public TestClass8(Integer i) {
<ide> public Obj3(int... params) {
<ide> }
<ide> output = b.toString();
<ide> }
<del>
<add>
<ide> public Obj3(String s, Float f, int... ints) {
<ide> StringBuilder b = new StringBuilder();
<ide> b.append(s);
<ide> public Obj3(String s, Float f, int... ints) {
<ide> output = b.toString();
<ide> }
<ide> }
<del>
<add>
<ide> public static class Obj4 {
<del>
<add>
<ide> public final String output;
<ide>
<ide> public Obj4(int[] params) {
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompHeaders.java
<ide> public class StompHeaders implements MultiValueMap<String, String>, Serializable
<ide> * Create a new instance to be populated with new header values.
<ide> */
<ide> public StompHeaders() {
<del> this(new LinkedMultiValueMap<String, String>(4), false);
<add> this(new LinkedMultiValueMap<String, String>(4), false);
<ide> }
<del>
<add>
<ide> private StompHeaders(Map<String, List<String>> headers, boolean readOnly) {
<ide> Assert.notNull(headers, "'headers' must not be null");
<ide> if (readOnly) {
<ide><path>spring-test/src/main/java/org/springframework/test/annotation/DirtiesContext.java
<ide> *
<ide> * <p>{@code @DirtiesContext} may be used as a class-level and method-level
<ide> * annotation within the same class or class hierarchy. In such scenarios, the
<del> * {@code ApplicationContext} will be marked as <em>dirty</em> before or
<add> * {@code ApplicationContext} will be marked as <em>dirty</em> before or
<ide> * after any such annotated method as well as before or after the current test
<ide> * class, depending on the configured {@link #methodMode} and {@link #classMode}.
<ide> *
<ide><path>spring-test/src/test/java/org/springframework/test/context/cache/ClassLevelDirtiesContextTestNGTests.java
<ide> * JUnit 4 based integration test which verifies correct {@linkplain ContextCache
<ide> * application context caching} in conjunction with Spring's TestNG support
<ide> * and {@link DirtiesContext @DirtiesContext} at the class level.
<del> *
<add> *
<ide> * <p>This class is a direct copy of {@link ClassLevelDirtiesContextTests},
<ide> * modified to verify behavior in conjunction with TestNG.
<ide> * | 12 |
Javascript | Javascript | fix resize bug of line / radar charts | 0c48c1148a7f791fb31a01f8b1ef51386559bab3 | <ide><path>src/controllers/controller.radar.js
<ide> export default class RadarController extends DatasetController {
<ide> const labels = meta.iScale.getLabels();
<ide>
<ide> // Update Line
<add> line.points = points;
<ide> // In resize mode only point locations change, so no need to set the points or options.
<ide> if (mode !== 'resize') {
<ide> const properties = {
<del> points,
<ide> _loop: true,
<ide> _fullLoop: labels.length === points.length,
<ide> options: me.resolveDatasetElementOptions()
<ide><path>src/elements/element.line.js
<ide> export default class LineElement extends Element {
<ide> }
<ide>
<ide> set points(points) {
<del> this._points = points;
<del> delete this._segments;
<del> delete this._path;
<add> const me = this;
<add> me._points = points;
<add> delete me._segments;
<add> delete me._path;
<add> me._pointsUpdated = false;
<ide> }
<ide>
<ide> get points() { | 2 |
Java | Java | add partial websocketmessage support | fb4e34fce486315935a0292ac0dbe7e0b1497f7f | <ide><path>spring-websocket/src/main/java/org/springframework/web/socket/BinaryMessage.java
<ide> */
<ide> package org.springframework.web.socket;
<ide>
<del>import java.io.ByteArrayInputStream;
<del>import java.io.InputStream;
<ide> import java.nio.ByteBuffer;
<ide>
<ide>
<ide> public final class BinaryMessage extends WebSocketMessage<ByteBuffer> {
<ide> /**
<ide> * Create a new {@link BinaryMessage} instance.
<ide> * @param payload a non-null payload
<del> * @param isLast if the message is the last of a series of partial messages
<ide> */
<ide> public BinaryMessage(ByteBuffer payload) {
<del> super(payload);
<del> this.bytes = null;
<add> this(payload, true);
<ide> }
<ide>
<ide> /**
<ide> * Create a new {@link BinaryMessage} instance.
<ide> * @param payload a non-null payload
<ide> * @param isLast if the message is the last of a series of partial messages
<ide> */
<add> public BinaryMessage(ByteBuffer payload, boolean isLast) {
<add> super(payload, isLast);
<add> this.bytes = null;
<add> }
<add>
<add> /**
<add> * Create a new {@link BinaryMessage} instance.
<add> * @param payload a non-null payload
<add> */
<ide> public BinaryMessage(byte[] payload) {
<del> this(payload, 0, (payload == null ? 0 : payload.length));
<add> this(payload, 0, (payload == null ? 0 : payload.length), true);
<add> }
<add>
<add> /**
<add> * Create a new {@link BinaryMessage} instance.
<add> * @param payload a non-null payload
<add> * @param isLast if the message is the last of a series of partial messages
<add> */
<add> public BinaryMessage(byte[] payload, boolean isLast) {
<add> this(payload, 0, (payload == null ? 0 : payload.length), isLast);
<ide> }
<ide>
<ide> /**
<ide> public BinaryMessage(byte[] payload) {
<ide> * @param len the length of the array considered for the payload
<ide> * @param isLast if the message is the last of a series of partial messages
<ide> */
<del> public BinaryMessage(byte[] payload, int offset, int len) {
<del> super(payload != null ? ByteBuffer.wrap(payload, offset, len) : null);
<add> public BinaryMessage(byte[] payload, int offset, int len, boolean isLast) {
<add> super(payload != null ? ByteBuffer.wrap(payload, offset, len) : null, isLast);
<ide> if(offset == 0 && len == payload.length) {
<ide> this.bytes = payload;
<ide> }
<ide> private byte[] getRemainingBytes(ByteBuffer payload) {
<ide> return result;
<ide> }
<ide>
<del> /**
<del> * Returns access to the message payload as an {@link InputStream}.
<del> */
<del> public InputStream getInputStream() {
<del> byte[] array = getByteArray();
<del> return (array != null) ? new ByteArrayInputStream(array) : null;
<del> }
<del>
<ide> @Override
<del> public String toString() {
<del> int size = (getPayload() != null) ? getPayload().remaining() : 0;
<del> return "WebSocket binary message size=" + size;
<add> protected int getPayloadSize() {
<add> return (getPayload() != null) ? getPayload().remaining() : 0;
<ide> }
<ide>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/TextMessage.java
<ide> */
<ide> public final class TextMessage extends WebSocketMessage<String> {
<ide>
<add>
<ide> /**
<ide> * Create a new {@link TextMessage} instance.
<ide> * @param payload the payload
<add> * @param isLast whether this the last part of a message received or transmitted in parts
<ide> */
<ide> public TextMessage(CharSequence payload) {
<del> super(payload.toString());
<add> super(payload.toString(), true);
<add> }
<add>
<add> /**
<add> * Create a new {@link TextMessage} instance.
<add> * @param payload the payload
<add> * @param isLast whether this the last part of a message received or transmitted in parts
<add> */
<add> public TextMessage(CharSequence payload, boolean isLast) {
<add> super(payload.toString(), isLast);
<ide> }
<ide>
<ide> /**
<ide> public Reader getReader() {
<ide> return new StringReader(getPayload());
<ide> }
<ide>
<add> @Override
<add> protected int getPayloadSize() {
<add> return getPayload().length();
<add> }
<add>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/WebSocketHandler.java
<ide> public interface WebSocketHandler {
<ide> */
<ide> void afterConnectionClosed(WebSocketSession session, CloseStatus closeStatus) throws Exception;
<ide>
<add> /**
<add> * Whether the WebSocketHandler handles messages in parts.
<add> */
<add> boolean supportsPartialMessages();
<add>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/WebSocketMessage.java
<ide>
<ide> private final T payload;
<ide>
<add> private final boolean last;
<add>
<ide>
<ide> /**
<ide> * Create a new {@link WebSocketMessage} instance with the given payload.
<ide> * @param payload a non-null payload
<ide> */
<del> WebSocketMessage(T payload) {
<add> WebSocketMessage(T payload, boolean isLast) {
<ide> Assert.notNull(payload, "Payload must not be null");
<ide> this.payload = payload;
<add> this.last = isLast;
<ide> }
<ide>
<ide> /**
<ide> public T getPayload() {
<ide> return this.payload;
<ide> }
<ide>
<del> @Override
<del> public String toString() {
<del> return getClass().getSimpleName() + " [payload=" + this.payload + "]";
<add> /**
<add> * Whether this is the last part of a message, when partial message support on a
<add> * {@link WebSocketHandler} is enabled. If partial message support is not enabled the
<add> * returned value is always {@code true}.
<add> */
<add> public boolean isLast() {
<add> return this.last;
<ide> }
<ide>
<ide> @Override
<ide> public boolean equals(Object other) {
<ide> return ObjectUtils.nullSafeEquals(this.payload, otherMessage.payload);
<ide> }
<ide>
<add> @Override
<add> public String toString() {
<add> return getClass().getSimpleName() + " [payload length=" + getPayloadSize() + ", last=" + isLast() + "]";
<add> }
<add>
<add> protected abstract int getPayloadSize();
<add>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/adapter/JettyWebSocketListenerAdapter.java
<ide> public void onWebSocketText(String payload) {
<ide>
<ide> @Override
<ide> public void onWebSocketBinary(byte[] payload, int offset, int len) {
<del> BinaryMessage message = new BinaryMessage(payload, offset, len);
<add> BinaryMessage message = new BinaryMessage(payload, offset, len, true);
<ide> try {
<ide> this.webSocketHandler.handleMessage(this.wsSession, message);
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/adapter/StandardEndpointAdapter.java
<ide> public void onOpen(final javax.websocket.Session session, EndpointConfig config)
<ide>
<ide> this.wsSession.initSession(session);
<ide>
<add> if (this.handler.supportsPartialMessages()) {
<add> session.addMessageHandler(new MessageHandler.Partial<String>() {
<add> @Override
<add> public void onMessage(String message, boolean isLast) {
<add> handleTextMessage(session, message, isLast);
<add> }
<add> });
<add> session.addMessageHandler(new MessageHandler.Partial<ByteBuffer>() {
<add> @Override
<add> public void onMessage(ByteBuffer message, boolean isLast) {
<add> handleBinaryMessage(session, message, isLast);
<add> }
<add> });
<add> }
<add> else {
<add> session.addMessageHandler(new MessageHandler.Whole<String>() {
<add> @Override
<add> public void onMessage(String message) {
<add> handleTextMessage(session, message, true);
<add> }
<add> });
<add> session.addMessageHandler(new MessageHandler.Whole<ByteBuffer>() {
<add> @Override
<add> public void onMessage(ByteBuffer message) {
<add> handleBinaryMessage(session, message, true);
<add> }
<add> });
<add> }
<add>
<ide> try {
<ide> this.handler.afterConnectionEstablished(this.wsSession);
<ide> }
<ide> catch (Throwable t) {
<ide> ExceptionWebSocketHandlerDecorator.tryCloseWithError(this.wsSession, t, logger);
<ide> return;
<ide> }
<del>
<del> session.addMessageHandler(new MessageHandler.Whole<String>() {
<del> @Override
<del> public void onMessage(String message) {
<del> handleTextMessage(session, message);
<del> }
<del> });
<del> session.addMessageHandler(new MessageHandler.Whole<ByteBuffer>() {
<del> @Override
<del> public void onMessage(ByteBuffer message) {
<del> handleBinaryMessage(session, message);
<del> }
<del> });
<ide> }
<ide>
<del> private void handleTextMessage(javax.websocket.Session session, String payload) {
<del> TextMessage textMessage = new TextMessage(payload);
<add> private void handleTextMessage(javax.websocket.Session session, String payload, boolean isLast) {
<add> TextMessage textMessage = new TextMessage(payload, isLast);
<ide> try {
<ide> this.handler.handleMessage(this.wsSession, textMessage);
<ide> }
<ide> private void handleTextMessage(javax.websocket.Session session, String payload)
<ide> }
<ide> }
<ide>
<del> private void handleBinaryMessage(javax.websocket.Session session, ByteBuffer payload) {
<del> BinaryMessage binaryMessage = new BinaryMessage(payload);
<add> private void handleBinaryMessage(javax.websocket.Session session, ByteBuffer payload, boolean isLast) {
<add> BinaryMessage binaryMessage = new BinaryMessage(payload, isLast);
<ide> try {
<ide> this.handler.handleMessage(this.wsSession, binaryMessage);
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/adapter/StandardWebSocketSessionAdapter.java
<ide> public boolean isOpen() {
<ide>
<ide> @Override
<ide> protected void sendTextMessage(TextMessage message) throws IOException {
<del> this.session.getBasicRemote().sendText(message.getPayload());
<add> this.session.getBasicRemote().sendText(message.getPayload(), message.isLast());
<ide> }
<ide>
<ide> @Override
<ide> protected void sendBinaryMessage(BinaryMessage message) throws IOException {
<del> this.session.getBasicRemote().sendBinary(message.getPayload());
<add> this.session.getBasicRemote().sendBinary(message.getPayload(), message.isLast());
<ide> }
<ide>
<ide> @Override
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/adapter/WebSocketHandlerAdapter.java
<ide> public void afterConnectionEstablished(WebSocketSession session) throws Exceptio
<ide> }
<ide>
<ide> @Override
<del> public final void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception {
<add> public void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception {
<ide> if (message instanceof TextMessage) {
<ide> handleTextMessage(session, (TextMessage) message);
<ide> }
<ide> else if (message instanceof BinaryMessage) {
<ide> handleBinaryMessage(session, (BinaryMessage) message);
<ide> }
<ide> else {
<del> // should not happen
<ide> throw new IllegalStateException("Unexpected WebSocket message type: " + message);
<ide> }
<ide> }
<ide> public void handleTransportError(WebSocketSession session, Throwable exception)
<ide> public void afterConnectionClosed(WebSocketSession session, CloseStatus status) throws Exception {
<ide> }
<ide>
<add> @Override
<add> public boolean supportsPartialMessages() {
<add> return false;
<add> }
<add>
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/support/LoggingWebSocketHandlerDecorator.java
<ide> public void afterConnectionEstablished(WebSocketSession session) throws Exceptio
<ide>
<ide> @Override
<ide> public void handleMessage(WebSocketSession session, WebSocketMessage<?> message) throws Exception {
<del> if (logger.isTraceEnabled()) {
<del> logger.trace("Received " + message + ", " + session);
<add> if (logger.isDebugEnabled()) {
<add> logger.debug("Received " + message + ", " + session);
<ide> }
<ide> super.handleMessage(session, message);
<ide> }
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/support/PerConnectionWebSocketHandler.java
<ide> public class PerConnectionWebSocketHandler implements WebSocketHandler, BeanFact
<ide> private final Map<WebSocketSession, WebSocketHandler> handlers =
<ide> new ConcurrentHashMap<WebSocketSession, WebSocketHandler>();
<ide>
<add> private final boolean supportsPartialMessages;
<add>
<ide>
<ide> public PerConnectionWebSocketHandler(Class<? extends WebSocketHandler> handlerType) {
<add> this(handlerType, false);
<add> }
<add>
<add> public PerConnectionWebSocketHandler(Class<? extends WebSocketHandler> handlerType, boolean supportsPartialMessages) {
<ide> this.provider = new BeanCreatingHandlerProvider<WebSocketHandler>(handlerType);
<add> this.supportsPartialMessages = supportsPartialMessages;
<ide> }
<ide>
<ide> @Override
<ide> private void destroy(WebSocketSession session) {
<ide> }
<ide> }
<ide>
<add> @Override
<add> public boolean supportsPartialMessages() {
<add> return this.supportsPartialMessages;
<add> }
<add>
<ide> @Override
<ide> public String toString() {
<ide> return "PerConnectionWebSocketHandlerProxy [handlerType=" + this.provider.getHandlerType() + "]";
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/support/WebSocketHandlerDecorator.java
<ide> public void afterConnectionClosed(WebSocketSession session, CloseStatus closeSta
<ide> this.delegate.afterConnectionClosed(session, closeStatus);
<ide> }
<ide>
<add> @Override
<add> public boolean supportsPartialMessages() {
<add> return this.delegate.supportsPartialMessages();
<add> }
<add>
<add>
<ide> @Override
<ide> public String toString() {
<ide> return getClass().getSimpleName() + " [delegate=" + this.delegate + "]"; | 11 |
Python | Python | use plac annotations for arguments | 4d896171ae43a4faba1b3c5cf480e641beb84cf3 | <ide><path>examples/training/train_parser.py
<ide> """
<ide> from __future__ import unicode_literals, print_function
<ide>
<add>import plac
<ide> import random
<ide> from pathlib import Path
<ide>
<ide> ]
<ide>
<ide>
<add>@plac.annotations(
<add> model=("Model name. Defaults to blank 'en' model.", "option", "m", str),
<add> output_dir=("Optional output directory", "option", "o", Path),
<add> n_iter=("Number of training iterations", "option", "n", int))
<ide> def main(model=None, output_dir=None, n_iter=1000):
<del> """Load the model, set up the pipeline and train the parser.
<del>
<del> model (unicode): Model name to start off with. If None, a blank English
<del> Language class is created.
<del> output_dir (unicode / Path): Optional output directory. If None, no model
<del> will be saved.
<del> n_iter (int): Number of iterations during training.
<del> """
<add> """Load the model, set up the pipeline and train the parser."""
<ide> if model is not None:
<ide> nlp = spacy.load(model) # load existing spaCy model
<ide> print("Loaded model '%s'" % model)
<ide> def main(model=None, output_dir=None, n_iter=1000):
<ide>
<ide>
<ide> if __name__ == '__main__':
<del> import plac
<ide> plac.call(main)
<ide>
<ide> # expected result: | 1 |
Text | Text | fix missing links | eb74ff4bf9e3b83edee346fc0dfa99e7e7cc086c | <ide><path>README.md
<ide> While these are tractable for CSS itself, we don’t need to duplicate the effor
<ide>
<ide> We track V8. Since V8 has wide support for ES6 and `async` and `await`, we transpile those. Since V8 doesn’t support class decorators, we don’t transpile those.
<ide>
<del>See [this](link to default babel config we use) and [this](link to issue that tracks the ability to change babel options)
<add>See [this](https://github.com/zeit/next.js/blob/master/server/build/webpack.js#L79) and [this](https://github.com/zeit/next.js/issues/26)
<ide>
<ide> </details>
<ide> | 1 |
Text | Text | fix comma splice [ci skip] | 092d1af137d6d52349880417923874e17508e608 | <ide><path>guides/source/engines.md
<ide> To hook into the initialization process of one of the following classes use the
<ide>
<ide> ## Configuration hooks
<ide>
<del>These are the available configuration hooks. They do not hook into any particular framework, instead they run in context of the entire application.
<add>These are the available configuration hooks. They do not hook into any particular framework, but instead they run in context of the entire application.
<ide>
<ide> | Hook | Use Case |
<ide> | ---------------------- | ------------------------------------------------------------------------------------- | | 1 |
Ruby | Ruby | push parameter instantiation to one method | 9669f6f7883787aa209207cab68b1069636aed9e | <ide><path>actionpack/lib/action_controller/test_case.rb
<ide> def paramify_values(hash_or_array_or_value)
<ide> hash_or_array_or_value.map {|i| paramify_values(i)}
<ide> when Rack::Test::UploadedFile, ActionDispatch::Http::UploadedFile
<ide> hash_or_array_or_value
<add> when nil then {}
<ide> else
<ide> hash_or_array_or_value.to_param
<ide> end
<ide> def process(action, http_method = 'GET', *args)
<ide>
<ide> @request.env['REQUEST_METHOD'] = http_method
<ide>
<del> parameters ||= {}
<ide> controller_class_name = @controller.class.anonymous? ?
<ide> "anonymous" :
<ide> @controller.class.name.underscore.sub(/_controller$/, '') | 1 |
Text | Text | add section for git remote mirroring | 3f64bcb2fbffc663be53b1739c1e6009be9db055 | <ide><path>docs/Installation.md
<ide> it does it too. You have to confirm everything it will do before it starts.
<ide> [Xcode](https://itunes.apple.com/us/app/xcode/id497799835) <sup>[3](#3)</sup>
<ide> * A Bourne-compatible shell for installation (e.g. `bash` or `zsh`) <sup>[4](#4)</sup>
<ide>
<add>## Git Remote Mirroring
<add>
<add>You can set `HOMEBREW_BREW_GIT_REMOTE` and/or `HOMEBREW_CORE_GIT_REMOTE` in your shell environment to use geolocalized Git mirrors to speed up Homebrew's installation with this script and, after installation, `brew update`.
<add>
<add>```bash
<add>export HOMEBREW_BREW_GIT_REMOTE="..." # put your Git mirror of Homebrew/brew here
<add>export HOMEBREW_CORE_GIT_REMOTE="..." # put your Git mirror of Homebrew/homebrew-core here
<add>/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
<add>```
<add>
<add>The default Git remote will be used if the corresponding environment variable is unset.
<add>
<ide> ## Alternative Installs
<ide>
<ide> ### Linux or Windows 10 Subsystem for Linux | 1 |
Python | Python | fix indentation in code examples in docs | 9ad5ed81037480eece92a3d6f4f5825e084f3b16 | <ide><path>docs/autogen.py
<ide> def process_class_docstring(docstring):
<ide> docstring = re.sub(r' ([^\s\\]+):(.*)\n',
<ide> r' - __\1__:\2\n',
<ide> docstring)
<add>
<add> docstring = docstring.replace(' ' * 5, '\t\t')
<ide> docstring = docstring.replace(' ' * 3, '\t')
<ide> docstring = docstring.replace(' ', '')
<ide> return docstring
<ide> def process_method_docstring(docstring):
<ide> docstring = re.sub(r' ([^\s\\]+):(.*)\n',
<ide> r' - __\1__:\2\n',
<ide> docstring)
<add>
<add> docstring = docstring.replace(' ' * 6, '\t\t')
<ide> docstring = docstring.replace(' ' * 4, '\t')
<ide> docstring = docstring.replace(' ', '')
<ide> return docstring | 1 |
Javascript | Javascript | simplify dispatchevent discrete argument | 121acae09060803d2fdc23fdfd2f671a47908d1f | <ide><path>packages/react-dom/src/events/DOMEventResponderSystem.js
<ide> import type {
<ide> ReactEventComponentInstance,
<ide> ReactResponderContext,
<ide> ReactResponderEvent,
<del> ReactResponderDispatchEventOptions,
<ide> } from 'shared/ReactTypes';
<ide> import type {DOMTopLevelEventType} from 'events/TopLevelEventTypes';
<ide> import {batchedUpdates, interactiveUpdates} from 'events/ReactGenericBatching';
<ide> const eventResponderContext: ReactResponderContext = {
<ide> dispatchEvent(
<ide> possibleEventObject: Object,
<ide> listener: ($Shape<PartialEventObject>) => void,
<del> {discrete}: ReactResponderDispatchEventOptions,
<add> discrete: boolean,
<ide> ): void {
<ide> validateResponderContext();
<ide> const {target, type, timeStamp} = possibleEventObject;
<ide><path>packages/react-dom/src/events/__tests__/DOMEventResponderSystem-test.internal.js
<ide> describe('DOMEventResponderSystem', () => {
<ide> phase: 'bubble',
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(syntheticEvent, props.onMagicClick, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(syntheticEvent, props.onMagicClick, true);
<ide> }
<ide> },
<ide> onEventCapture: (event, context, props) => {
<ide> describe('DOMEventResponderSystem', () => {
<ide> phase: 'capture',
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(syntheticEvent, props.onMagicClick, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(syntheticEvent, props.onMagicClick, true);
<ide> }
<ide> },
<ide> });
<ide> describe('DOMEventResponderSystem', () => {
<ide> phase,
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(pressEvent, props.onPress, {discrete: true});
<add> context.dispatchEvent(pressEvent, props.onPress, true);
<ide>
<ide> context.setTimeout(() => {
<ide> if (props.onLongPress) {
<ide> describe('DOMEventResponderSystem', () => {
<ide> phase,
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(longPressEvent, props.onLongPress, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(longPressEvent, props.onLongPress, true);
<ide> }
<ide>
<ide> if (props.onLongPressChange) {
<ide> describe('DOMEventResponderSystem', () => {
<ide> phase,
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(longPressChangeEvent, props.onLongPressChange, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(
<add> longPressChangeEvent,
<add> props.onLongPressChange,
<add> true,
<add> );
<ide> }
<ide> }, 500);
<ide> }
<ide> describe('DOMEventResponderSystem', () => {
<ide> type: 'click',
<ide> timeStamp: context.getTimeStamp(),
<ide> };
<del> context.dispatchEvent(syntheticEvent, props.onClick, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(syntheticEvent, props.onClick, true);
<ide> },
<ide> });
<ide>
<ide><path>packages/react-events/src/Drag.js
<ide> function dispatchDragEvent(
<ide> ): void {
<ide> const target = ((state.dragTarget: any): Element | Document);
<ide> const syntheticEvent = createDragEvent(context, name, target, eventData);
<del> context.dispatchEvent(syntheticEvent, listener, {discrete});
<add> context.dispatchEvent(syntheticEvent, listener, discrete);
<ide> }
<ide>
<ide> const DragResponder = {
<ide><path>packages/react-events/src/Focus.js
<ide> function dispatchFocusInEvents(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, props.onFocus, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, props.onFocus, true);
<ide> }
<ide> if (props.onFocusChange) {
<ide> const listener = () => {
<ide> function dispatchFocusInEvents(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, listener, true);
<ide> }
<ide> if (props.onFocusVisibleChange && state.isLocalFocusVisible) {
<ide> const listener = () => {
<ide> function dispatchFocusInEvents(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, listener, true);
<ide> }
<ide> }
<ide>
<ide> function dispatchFocusOutEvents(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, props.onBlur, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, props.onBlur, true);
<ide> }
<ide> if (props.onFocusChange) {
<ide> const listener = () => {
<ide> function dispatchFocusOutEvents(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, listener, true);
<ide> }
<ide> dispatchFocusVisibleOutEvent(context, props, state);
<ide> }
<ide> function dispatchFocusVisibleOutEvent(
<ide> target,
<ide> pointerType,
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, listener, true);
<ide> state.isLocalFocusVisible = false;
<ide> }
<ide> }
<ide><path>packages/react-events/src/Hover.js
<ide> function dispatchHoverChangeEvent(
<ide> 'hoverchange',
<ide> ((state.hoverTarget: any): Element | Document),
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, listener, true);
<ide> }
<ide>
<ide> function dispatchHoverStartEvents(
<ide> function dispatchHoverStartEvents(
<ide> 'hoverstart',
<ide> ((target: any): Element | Document),
<ide> );
<del> context.dispatchEvent(syntheticEvent, props.onHoverStart, {
<del> discrete: true,
<del> });
<add> context.dispatchEvent(syntheticEvent, props.onHoverStart, true);
<ide> }
<ide> if (props.onHoverChange) {
<ide> dispatchHoverChangeEvent(context, props, state);
<ide> function dispatchHoverEndEvents(
<ide> 'hoverend',
<ide> ((target: any): Element | Document),
<ide> );
<del> context.dispatchEvent(syntheticEvent, props.onHoverEnd, {discrete: true});
<add> context.dispatchEvent(syntheticEvent, props.onHoverEnd, true);
<ide> }
<ide> if (props.onHoverChange) {
<ide> dispatchHoverChangeEvent(context, props, state);
<ide> const HoverResponder = {
<ide> 'hovermove',
<ide> state.hoverTarget,
<ide> );
<del> context.dispatchEvent(syntheticEvent, props.onHoverMove, {
<del> discrete: false,
<del> });
<add> context.dispatchEvent(
<add> syntheticEvent,
<add> props.onHoverMove,
<add> true,
<add> );
<ide> }
<ide> }
<ide> }
<ide><path>packages/react-events/src/Press.js
<ide> function dispatchEvent(
<ide> pointerType,
<ide> event,
<ide> );
<del> context.dispatchEvent(syntheticEvent, listener, {
<del> discrete,
<del> });
<add> context.dispatchEvent(syntheticEvent, listener, discrete);
<ide> }
<ide>
<ide> function dispatchPressChangeEvent(
<ide><path>packages/react-events/src/Swipe.js
<ide> function dispatchSwipeEvent(
<ide> ) {
<ide> const target = ((state.swipeTarget: any): Element | Document);
<ide> const syntheticEvent = createSwipeEvent(context, name, target, eventData);
<del> context.dispatchEvent(syntheticEvent, listener, {discrete});
<add> context.dispatchEvent(syntheticEvent, listener, discrete);
<ide> }
<ide>
<ide> type SwipeState = {
<ide><path>packages/shared/ReactTypes.js
<ide> export type ReactResponderEvent = {
<ide> passiveSupported: boolean,
<ide> };
<ide>
<del>export type ReactResponderDispatchEventOptions = {
<del> discrete?: boolean,
<del>};
<del>
<ide> export type ReactResponderContext = {
<ide> dispatchEvent: (
<ide> eventObject: Object,
<ide> listener: (Object) => void,
<del> options: ReactResponderDispatchEventOptions,
<add> discrete: boolean,
<ide> ) => void,
<ide> isTargetWithinElement: (
<ide> childTarget: Element | Document, | 8 |
Ruby | Ruby | add reason helpers, rename valid | 373650d00d51c466fe5c9eaf5d09afdcb018baf7 | <ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> def audit_deps
<ide> end
<ide>
<ide> if @new_formula &&
<del> dep_f.keg_only_reason&.reason == :provided_by_macos &&
<del> dep_f.keg_only_reason.valid? &&
<add> dep_f.keg_only_reason.provided_by_macos? &&
<add> dep_f.keg_only_reason.applicable? &&
<ide> !%w[apr apr-util openblas openssl openssl@1.1].include?(dep.name)
<ide> new_formula_problem(
<ide> "Dependency '#{dep.name}' is provided by macOS; " \
<ide> def audit_versioned_keg_only
<ide> return unless @core_tap
<ide>
<ide> if formula.keg_only?
<del> return if formula.keg_only_reason.reason == :versioned_formula
<add> return if formula.keg_only_reason.versioned_formula?
<ide> if formula.name.start_with?("openssl", "libressl") &&
<del> formula.keg_only_reason.reason == :provided_by_macos
<add> formula.keg_only_reason.provided_by_macos?
<ide> return
<ide> end
<ide> end
<ide>
<add> # TODO: verify formulae still exist
<ide> keg_only_whitelist = %w[
<ide> autoconf@2.13
<ide> bash-completion@2
<ide><path>Library/Homebrew/extend/os/mac/formula_support.rb
<ide> # frozen_string_literal: true
<ide>
<ide> class KegOnlyReason
<del> def valid?
<add> def applicable?
<ide> true
<ide> end
<ide> end
<ide><path>Library/Homebrew/formula.rb
<ide> def caveats
<ide> def keg_only?
<ide> return false unless keg_only_reason
<ide>
<del> keg_only_reason.valid?
<add> keg_only_reason.applicable?
<ide> end
<ide>
<ide> # @private
<ide><path>Library/Homebrew/formula_support.rb
<ide> def initialize(reason, explanation)
<ide> @explanation = explanation
<ide> end
<ide>
<del> def valid?
<del> ![:provided_by_macos, :provided_by_osx, :shadowed_by_macos].include?(@reason)
<add> def versioned_formula?
<add> @reason == :versioned_formula
<add> end
<add>
<add> def provided_by_macos?
<add> @reason == :provided_by_macos
<add> end
<add>
<add> def shadowed_by_macos?
<add> @reason == :shadowed_by_macos
<add> end
<add>
<add> def by_macos?
<add> provided_by_macos? || shadowed_by_macos?
<add> end
<add>
<add> def applicable?
<add> # macOS reasons aren't applicable on other OSs
<add> # (see extend/os/mac/formula_support for override on macOS)
<add> !by_macos?
<ide> end
<ide>
<ide> def to_s
<ide> return @explanation unless @explanation.empty?
<ide>
<del> case @reason
<del> when :versioned_formula
<add> if versioned_formula?
<ide> <<~EOS
<ide> this is an alternate version of another formula
<ide> EOS
<del> when :provided_by_macos
<add> elsif provided_by_macos?
<ide> <<~EOS
<ide> macOS already provides this software and installing another version in
<ide> parallel can cause all kinds of trouble
<ide> EOS
<del> when :shadowed_by_macos
<add> elsif shadowed_by_macos?
<ide> <<~EOS
<ide> macOS provides similar software and installing this software in
<ide> parallel can cause all kinds of trouble | 4 |
Javascript | Javascript | resolve multiple deps to module ids | 421f203b57b1c4f0705b1f0d49ddceb8f920be10 | <ide><path>lib/HotModuleReplacementPlugin.js
<ide> HotModuleReplacementPlugin.prototype.apply = function(compiler) {
<ide> compiler.parser.plugin("call module.hot.accept", function(expr) {
<ide> if(!this.state.compilation.hotUpdateChunkTemplate) return false;
<ide> if(expr.arguments.length > 1) {
<del> var param = this.evaluateExpression(expr.arguments[0]);
<del> if(param.isString()) {
<del> var dep = new ModuleHotAcceptDependency(param.string, param.range);
<del> dep.optional = true;
<del> this.state.module.addDependency(dep);
<del> }
<add> var arg = this.evaluateExpression(expr.arguments[0]);
<add> var params = [];
<add> if(arg.isString()) {
<add> params = [arg];
<add> }
<add> if(arg.isArray()){
<add> params = arg.items;
<add> }
<add> params.forEach(function(param){
<add> var dep = new ModuleHotAcceptDependency(param.string, param.range);
<add> dep.optional = true;
<add> this.state.module.addDependency(dep);
<add> }.bind(this));
<ide> }
<ide> });
<ide> compiler.parser.plugin("call module.hot.decline", function(expr) { | 1 |
Javascript | Javascript | improve catalan localization | 560695112f1a3cd3937eddd00665d2f3ceeb8afd | <ide><path>src/locale/ca.js
<ide> export default moment.defineLocale('ca', {
<ide> sameElse : 'L'
<ide> },
<ide> relativeTime : {
<del> future : 'en %s',
<add> future : 'd\'aquí %s',
<ide> past : 'fa %s',
<ide> s : 'uns segons',
<ide> m : 'un minut',
<ide><path>src/test/locale/ca.js
<ide> test('from', function (assert) {
<ide> });
<ide>
<ide> test('suffix', function (assert) {
<del> assert.equal(moment(30000).from(0), 'en uns segons', 'prefix');
<add> assert.equal(moment(30000).from(0), 'd\'aquí uns segons', 'prefix');
<ide> assert.equal(moment(0).from(30000), 'fa uns segons', 'suffix');
<ide> });
<ide>
<ide> test('now from now', function (assert) {
<ide> });
<ide>
<ide> test('fromNow', function (assert) {
<del> assert.equal(moment().add({s: 30}).fromNow(), 'en uns segons', 'en uns segons');
<del> assert.equal(moment().add({d: 5}).fromNow(), 'en 5 dies', 'en 5 dies');
<add> assert.equal(moment().add({s: 30}).fromNow(), 'd\'aquí uns segons', 'd\'aquí uns segons');
<add> assert.equal(moment().add({d: 5}).fromNow(), 'd\'aquí 5 dies', 'd\'aquí 5 dies');
<ide> });
<ide>
<ide> test('calendar day', function (assert) { | 2 |
Javascript | Javascript | fix custom inspection with extra properties | d834275a48bc1f85e2289bf7e52a5035a4d97f7e | <ide><path>lib/buffer.js
<ide> const {
<ide> isUint8Array
<ide> } = require('internal/util/types');
<ide> const {
<del> formatProperty,
<del> kObjectType
<add> inspect: utilInspect
<ide> } = require('internal/util/inspect');
<ide>
<ide> const {
<ide> Buffer.prototype[customInspectSymbol] = function inspect(recurseTimes, ctx) {
<ide> str += ` ... ${remaining} more byte${remaining > 1 ? 's' : ''}`;
<ide> // Inspect special properties as well, if possible.
<ide> if (ctx) {
<add> let extras = false;
<ide> const filter = ctx.showHidden ? ALL_PROPERTIES : ONLY_ENUMERABLE;
<del> str += getOwnNonIndexProperties(this, filter).reduce((str, key) => {
<del> // Using `formatProperty()` expects an indentationLvl to be set.
<del> ctx.indentationLvl = 0;
<del> str += `, ${formatProperty(ctx, this, recurseTimes, key, kObjectType)}`;
<del> return str;
<del> }, '');
<add> const obj = getOwnNonIndexProperties(this, filter).reduce((obj, key) => {
<add> extras = true;
<add> obj[key] = this[key];
<add> return obj;
<add> }, Object.create(null));
<add> if (extras) {
<add> if (this.length !== 0)
<add> str += ', ';
<add> // '[Object: null prototype] {'.length === 26
<add> // This is guarded with a test.
<add> str += utilInspect(obj, {
<add> ...ctx,
<add> breakLength: Infinity,
<add> compact: true
<add> }).slice(27, -2);
<add> }
<ide> }
<ide> return `<${this.constructor.name} ${str}>`;
<ide> };
<ide><path>lib/internal/util/inspect.js
<ide> function formatWithOptions(inspectOptions, ...args) {
<ide>
<ide> module.exports = {
<ide> inspect,
<del> formatProperty,
<del> kObjectType,
<ide> format,
<ide> formatWithOptions
<ide> };
<ide><path>test/parallel/test-buffer-inspect.js
<ide> assert.strictEqual(util.inspect(b), expected);
<ide> assert.strictEqual(util.inspect(s), expected);
<ide>
<ide> b.inspect = undefined;
<del>assert.strictEqual(util.inspect(b), '<Buffer 31 32, inspect: undefined>');
<add>b.prop = new Uint8Array(0);
<add>assert.strictEqual(
<add> util.inspect(b),
<add> '<Buffer 31 32, inspect: undefined, prop: Uint8Array []>'
<add>);
<add>
<add>b = Buffer.alloc(0);
<add>b.prop = 123;
<add>
<add>assert.strictEqual(
<add> util.inspect(b),
<add> '<Buffer prop: 123>'
<add>); | 3 |
Go | Go | move init layer to top rather than bottom | c199ed228baf0e5d33b7739cc2442a32dece7020 | <ide><path>devmapper/deviceset_devmapper.go
<ide> func (devices *DeviceSetDM) loadMetaData() error {
<ide> return nil
<ide> }
<ide>
<del>func (devices *DeviceSetDM) createBaseLayer(dir string) error {
<del> for pth, typ := range map[string]string{
<del> "/dev/pts": "dir",
<del> "/dev/shm": "dir",
<del> "/proc": "dir",
<del> "/sys": "dir",
<del> "/.dockerinit": "file",
<del> "/etc/resolv.conf": "file",
<del> "/etc/hosts": "file",
<del> "/etc/hostname": "file",
<del> // "var/run": "dir",
<del> // "var/lock": "dir",
<del> } {
<del> if _, err := os.Stat(path.Join(dir, pth)); err != nil {
<del> if os.IsNotExist(err) {
<del> switch typ {
<del> case "dir":
<del> if err := os.MkdirAll(path.Join(dir, pth), 0755); err != nil {
<del> return err
<del> }
<del> case "file":
<del> if err := os.MkdirAll(path.Join(dir, path.Dir(pth)), 0755); err != nil {
<del> return err
<del> }
<del>
<del> if f, err := os.OpenFile(path.Join(dir, pth), os.O_CREATE, 0755); err != nil {
<del> return err
<del> } else {
<del> f.Close()
<del> }
<del> }
<del> } else {
<del> return err
<del> }
<del> }
<del> }
<del> return nil
<del>}
<del>
<ide> func (devices *DeviceSetDM) setupBaseImage() error {
<ide> oldInfo := devices.Devices[""]
<ide> if oldInfo != nil && oldInfo.Initialized {
<ide> func (devices *DeviceSetDM) setupBaseImage() error {
<ide> return err
<ide> }
<ide>
<del> tmpDir := path.Join(devices.loopbackDir(), "basefs")
<del> if err = os.MkdirAll(tmpDir, 0700); err != nil && !os.IsExist(err) {
<del> return err
<del> }
<del>
<del> err = devices.MountDevice("", tmpDir)
<del> if err != nil {
<del> return err
<del> }
<del>
<del> err = devices.createBaseLayer(tmpDir)
<del> if err != nil {
<del> _ = syscall.Unmount(tmpDir, 0)
<del> return err
<del> }
<del>
<del> err = devices.UnmountDevice("", tmpDir)
<del> if err != nil {
<del> return err
<del> }
<del>
<del> _ = os.Remove(tmpDir)
<del>
<ide> info.Initialized = true
<ide>
<ide> err = devices.saveMetadata()
<ide><path>image.go
<ide> func (image *Image) ensureImageDevice(devices DeviceSet) error {
<ide> return err
<ide> }
<ide>
<add> // The docker init layer is conceptually above all other layers, so we apply
<add> // it for every image. This is safe because the layer directory is the
<add> // definition of the image, and the device-mapper device is just a cache
<add> // of it instantiated. Diffs/commit compare the container device with the
<add> // image device, which will then *not* pick up the init layer changes as
<add> // part of the container changes
<add> dockerinitLayer, err := image.getDockerInitLayer()
<add> if err != nil {
<add> _ = devices.RemoveDevice(image.ID)
<add> return err
<add> }
<add> err = image.applyLayer(dockerinitLayer, mountDir)
<add> if err != nil {
<add> _ = devices.RemoveDevice(image.ID)
<add> return err
<add> }
<add>
<ide> err = devices.UnmountDevice(image.ID, mountDir)
<ide> if err != nil {
<ide> _ = devices.RemoveDevice(image.ID) | 2 |
PHP | PHP | add test for pipe execution order | 00bdd2f5d7e5fb153d6c0276a7042c4c4edc754e | <ide><path>tests/Pipeline/PipelineTest.php
<ide> public function testPipelineBasicUsage()
<ide> unset($_SERVER['__test.pipe.two']);
<ide> }
<ide>
<add> public function testMultiplePipelinesBackAndForthExecutionOrder()
<add> {
<add> $pipeTwo = function ($piped, $next) {
<add> $_SERVER['__test.pipeline'] = $_SERVER['__test.pipeline'].'_forward2';
<add>
<add> $value = $next($piped);
<add>
<add> $_SERVER['__test.pipeline'] = $_SERVER['__test.pipeline'].'_backward2';
<add>
<add> return $value;
<add> };
<add>
<add> $result = (new Pipeline(new Container))
<add> ->send('foo')
<add> ->through([PipelineTestPipeBack::class, $pipeTwo])
<add> ->then(function ($piped) {
<add> $_SERVER['__test.pipeline'] = $_SERVER['__test.pipeline'].'_core';
<add>
<add> return $piped;
<add> });
<add>
<add> $this->assertEquals('foo', $result);
<add> $this->assertEquals('forward1_forward2_core_backward2_backward1', $_SERVER['__test.pipeline']);
<add>
<add> unset($_SERVER['__test.pipeline']);
<add> }
<add>
<ide> public function testPipelineUsageWithObjects()
<ide> {
<ide> $result = (new Pipeline(new Container))
<ide> public function testPipelineThenReturnMethodRunsPipelineThenReturnsPassable()
<ide> }
<ide> }
<ide>
<add>class PipelineTestPipeBack
<add>{
<add> public function handle($piped, $next)
<add> {
<add> $_SERVER['__test.pipeline'] = 'forward1';
<add>
<add> $value = $next($piped);
<add>
<add> $_SERVER['__test.pipeline'] = $_SERVER['__test.pipeline'].'_backward1';
<add>
<add> return $value;
<add> }
<add>}
<add>
<ide> class PipelineTestPipeOne
<ide> {
<ide> public function handle($piped, $next) | 1 |
Ruby | Ruby | fix actioncontroller autoloads | e4c0163f3288cf992b8a60666d79f20f74daeab8 | <ide><path>actionpack/lib/action_controller.rb
<ide> module ActionController
<ide> # TODO: Review explicit to see if they will automatically be handled by
<ide> # the initilizer if they are really needed.
<ide> def self.load_all!
<del> [Base, CgiRequest, CgiResponse, RackRequest, RackRequest, Http::Headers, UrlRewriter, UrlWriter]
<add> [Base, CGIHandler, CgiRequest, RackRequest, RackRequest, Http::Headers, UrlRewriter, UrlWriter]
<ide> end
<ide>
<ide> autoload :AbstractRequest, 'action_controller/request'
<ide> module Http
<ide>
<ide> # DEPRECATE: Remove CGI support
<ide> autoload :CgiRequest, 'action_controller/cgi_process'
<del> autoload :CgiResponse, 'action_controller/cgi_process'
<ide> autoload :CGIHandler, 'action_controller/cgi_process'
<ide> end
<ide> | 1 |
Ruby | Ruby | use the correct model in the test | 2939c2c0e011347d06ba3cc9dfe4ebe3b08f6799 | <ide><path>activerecord/test/models/post.rb
<ide> def greeting
<ide> has_many :comments_with_extend_2, extend: [NamedExtension, NamedExtension2], class_name: "Comment", foreign_key: "post_id"
<ide>
<ide> has_many :author_favorites, through: :author
<del> has_many :author_favorites_with_scope, through: :author, class_name: "AuthorFavorite", source: "author_favorites"
<add> has_many :author_favorites_with_scope, through: :author, class_name: "AuthorFavoriteWithScope", source: "author_favorites"
<ide> has_many :author_categorizations, through: :author, source: :categorizations
<ide> has_many :author_addresses, through: :author
<ide> has_many :author_address_extra_with_address, | 1 |
Javascript | Javascript | fix resource name when filename is data uri | c872a68204fbe174689341d95716eb7747e64fc4 | <ide><path>lib/TemplatedPathPlugin.js
<ide>
<ide> "use strict";
<ide>
<add>const mime = require("mime-types");
<ide> const { basename, extname } = require("path");
<ide> const util = require("util");
<ide> const Chunk = require("./Chunk");
<ide> const replacePathVariables = (path, data, assetInfo) => {
<ide> // [name] - file
<ide> // [ext] - .js
<ide> if (typeof data.filename === "string") {
<del> const { path: file, query, fragment } = parseResource(data.filename);
<del>
<del> const ext = extname(file);
<del> const base = basename(file);
<del> const name = base.slice(0, base.length - ext.length);
<del> const path = file.slice(0, file.length - base.length);
<del>
<del> replacements.set("file", replacer(file));
<del> replacements.set("query", replacer(query, true));
<del> replacements.set("fragment", replacer(fragment, true));
<del> replacements.set("path", replacer(path, true));
<del> replacements.set("base", replacer(base));
<del> replacements.set("name", replacer(name));
<del> replacements.set("ext", replacer(ext, true));
<del> // Legacy
<del> replacements.set(
<del> "filebase",
<del> deprecated(
<del> replacer(base),
<del> "[filebase] is now [base]",
<del> "DEP_WEBPACK_TEMPLATE_PATH_PLUGIN_REPLACE_PATH_VARIABLES_FILENAME"
<del> )
<del> );
<add> // check that filename is data uri
<add> let match = data.filename.match(/^data:([^;,]+)/);
<add> if (match) {
<add> const ext = mime.extension(match[1]);
<add> const emptyReplacer = replacer("", true);
<add>
<add> replacements.set("file", emptyReplacer);
<add> replacements.set("query", emptyReplacer);
<add> replacements.set("fragment", emptyReplacer);
<add> replacements.set("path", emptyReplacer);
<add> replacements.set("base", emptyReplacer);
<add> replacements.set("name", emptyReplacer);
<add> replacements.set("ext", replacer(ext ? `.${ext}` : "", true));
<add> // Legacy
<add> replacements.set(
<add> "filebase",
<add> deprecated(
<add> emptyReplacer,
<add> "[filebase] is now [base]",
<add> "DEP_WEBPACK_TEMPLATE_PATH_PLUGIN_REPLACE_PATH_VARIABLES_FILENAME"
<add> )
<add> );
<add> } else {
<add> const { path: file, query, fragment } = parseResource(data.filename);
<add>
<add> const ext = extname(file);
<add> const base = basename(file);
<add> const name = base.slice(0, base.length - ext.length);
<add> const path = file.slice(0, file.length - base.length);
<add>
<add> replacements.set("file", replacer(file));
<add> replacements.set("query", replacer(query, true));
<add> replacements.set("fragment", replacer(fragment, true));
<add> replacements.set("path", replacer(path, true));
<add> replacements.set("base", replacer(base));
<add> replacements.set("name", replacer(name));
<add> replacements.set("ext", replacer(ext, true));
<add> // Legacy
<add> replacements.set(
<add> "filebase",
<add> deprecated(
<add> replacer(base),
<add> "[filebase] is now [base]",
<add> "DEP_WEBPACK_TEMPLATE_PATH_PLUGIN_REPLACE_PATH_VARIABLES_FILENAME"
<add> )
<add> );
<add> }
<ide> }
<ide>
<ide> // Compilation context
<ide><path>test/configCases/asset-modules/resource-from-data-uri/index.js
<add>import asset from "data:image/svg+xml;utf8,<svg><title>icon-square-small</title></svg>"
<add>
<add>it("should compile with correct filename", () => {
<add> expect(asset).toMatch(/public\/media\/\.[0-9a-zA-Z]{8}\.svg/);
<add>});
<ide><path>test/configCases/asset-modules/resource-from-data-uri/webpack.config.js
<add>/** @type {import("../../../../").Configuration} */
<add>module.exports = {
<add> output: {
<add> assetModuleFilename: "media/[name].[contenthash:8][ext]",
<add> publicPath: "public/"
<add> },
<add> module: {
<add> rules: [
<add> {
<add> mimetype: "image/svg+xml",
<add> type: "asset/resource"
<add> }
<add> ]
<add> },
<add> target: "web"
<add>}; | 3 |
Python | Python | remove duplicate state stopped | 2f514559ba13ca0268e9d5421a477514cc19dec0 | <ide><path>libcloud/compute/types.py
<ide> class NodeState(object):
<ide> :cvar TERMINATED: Node is terminated. This node can't be started later on.
<ide> :cvar STOPPED: Node is stopped. This node can be started later on.
<ide> :cvar PENDING: Node is pending.
<del> :cvar STOPPED: Node is stopped.
<ide> :cvar SUSPENDED: Node is suspended.
<ide> :cvar ERROR: Node is an error state. Usually no operations can be performed
<ide> on the node once it ends up in the error state. | 1 |
Javascript | Javascript | enforce correct `setstate()` usage | 051a67784f245b2a18ee4da2c75b53016aba4364 | <ide><path>Libraries/Lists/StateSafePureComponent.js
<add>/**
<add> * Copyright (c) Meta Platforms, Inc. and affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow strict
<add> * @format
<add> */
<add>
<add>import * as React from 'react';
<add>import invariant from 'invariant';
<add>
<add>/**
<add> * `setState` is called asynchronously, and should not rely on the value of
<add> * `this.props` or `this.state`:
<add> * https://reactjs.org/docs/state-and-lifecycle.html#state-updates-may-be-asynchronous
<add> *
<add> * SafePureComponent adds runtime enforcement, to catch cases where these
<add> * variables are read in a state updater function, instead of the ones passed
<add> * in.
<add> */
<add>export default class StateSafePureComponent<
<add> Props,
<add> State: interface {},
<add>> extends React.PureComponent<Props, State> {
<add> _inAsyncStateUpdate = false;
<add>
<add> constructor(props: Props) {
<add> super(props);
<add> this._installSetStateHooks();
<add> }
<add>
<add> setState(
<add> partialState: ?($Shape<State> | ((State, Props) => ?$Shape<State>)),
<add> callback?: () => mixed,
<add> ): void {
<add> if (typeof partialState === 'function') {
<add> super.setState((state, props) => {
<add> this._inAsyncStateUpdate = true;
<add> let ret;
<add> try {
<add> ret = partialState(state, props);
<add> } catch (err) {
<add> throw err;
<add> } finally {
<add> this._inAsyncStateUpdate = false;
<add> }
<add> return ret;
<add> }, callback);
<add> } else {
<add> super.setState(partialState, callback);
<add> }
<add> }
<add>
<add> _installSetStateHooks() {
<add> const that = this;
<add> let {props, state} = this;
<add>
<add> Object.defineProperty(this, 'props', {
<add> get() {
<add> invariant(
<add> !that._inAsyncStateUpdate,
<add> '"this.props" should not be accessed during state updates',
<add> );
<add> return props;
<add> },
<add> set(newProps: Props) {
<add> props = newProps;
<add> },
<add> });
<add> Object.defineProperty(this, 'state', {
<add> get() {
<add> invariant(
<add> !that._inAsyncStateUpdate,
<add> '"this.state" should not be acceessed during state updates',
<add> );
<add> return state;
<add> },
<add> set(newState: State) {
<add> state = newState;
<add> },
<add> });
<add> }
<add>}
<ide><path>Libraries/Lists/VirtualizedList_EXPERIMENTAL.js
<ide> import * as React from 'react';
<ide>
<ide> import {CellRenderMask} from './CellRenderMask';
<ide> import clamp from '../Utilities/clamp';
<add>import StateSafePureComponent from './StateSafePureComponent';
<ide>
<ide> const RefreshControl = require('../Components/RefreshControl/RefreshControl');
<ide> const ScrollView = require('../Components/ScrollView/ScrollView');
<ide> function findLastWhere<T>(
<ide> * - As an effort to remove defaultProps, use helper functions when referencing certain props
<ide> *
<ide> */
<del>class VirtualizedList extends React.PureComponent<Props, State> {
<add>class VirtualizedList extends StateSafePureComponent<Props, State> {
<ide> static contextType: typeof VirtualizedListContext = VirtualizedListContext;
<ide>
<ide> // scrollToEnd may be janky without getItemLayout prop
<ide> const styles = StyleSheet.create({
<ide> },
<ide> });
<ide>
<add>VirtualizedList.displayName = 'VirtualizedList_EXPERIMENTAL';
<ide> module.exports = VirtualizedList; | 2 |
Ruby | Ruby | use some puts for clang | fc7fb60b72d47aaf527bf53a7313db8fb84e7bdd | <ide><path>Library/Homebrew/system_config.rb
<ide> def dump_verbose_config(f = $stdout)
<ide> f.puts "GCC-4.2: build #{gcc_4_2}" unless gcc_4_2.null?
<ide> f.print "Clang: "
<ide> if clang.null?
<del> f.print "N/A"
<add> f.puts "N/A"
<ide> else
<ide> f.print "#{clang} build "
<ide> if clang_build.null?
<del> f.print "(parse error)"
<add> f.puts "(parse error)"
<ide> else
<del> f.print clang_build
<add> f.puts clang_build
<ide> end
<ide> end
<del> f.print "\n"
<ide> f.puts "Git: #{describe_git}"
<ide> f.puts "Curl: #{describe_curl}"
<ide> f.puts "Perl: #{describe_perl}" | 1 |
Python | Python | fix syntax error in celery.views | 96974bb9b8aaf7e958c4ba1282088519cd77c7c9 | <ide><path>celery/views.py
<ide> def apply(request, task_name, *args):
<ide>
<ide> task = tasks[task_name]
<ide> result = apply_async(task, args=args, kwargs=kwargs)
<del> return JSON_dump({"ok": "true", "task_id": result.task_id
<add> return JSON_dump({"ok": "true", "task_id": result.task_id})
<ide>
<ide>
<ide> def is_task_done(request, task_id): | 1 |
Python | Python | add default conns back only when needed | ac12401efeda16805d9b0ad67148bc0d12752973 | <ide><path>tests/api_connexion/endpoints/test_connection_endpoint.py
<ide> def setUpClass(cls) -> None:
<ide> def setUp(self) -> None:
<ide> self.client = self.app.test_client() # type:ignore
<ide> # we want only the connection created here for this test
<del> clear_db_connections()
<add> clear_db_connections(False)
<ide>
<ide> def tearDown(self) -> None:
<ide> clear_db_connections()
<ide><path>tests/test_utils/db.py
<ide> def clear_db_pools():
<ide> add_default_pool_if_not_exists(session)
<ide>
<ide>
<del>def clear_db_connections():
<add>def clear_db_connections(add_default_connections_back=True):
<ide> with create_session() as session:
<ide> session.query(Connection).delete()
<del> create_default_connections(session)
<add> if add_default_connections_back:
<add> create_default_connections(session)
<ide>
<ide>
<ide> def clear_db_variables(): | 2 |
Javascript | Javascript | fix error handling test-http-full-response | 23662f36e34bf07bea013861d082c4cf5affe0b1 | <ide><path>test/parallel/test-http-full-response.js
<ide> function runAb(opts, callback) {
<ide> common.printSkipMessage(`problem spawning \`ab\`.\n${stderr}`);
<ide> process.reallyExit(0);
<ide> }
<del> process.exit();
<del> return;
<add> throw err;
<ide> }
<ide>
<ide> let m = /Document Length:\s*(\d+) bytes/i.exec(stdout); | 1 |
Text | Text | add model cards for deeppavlov models | 8a2d9bc9ef38452e80ce872505a5ad5623c12657 | <ide><path>model_cards/DeepPavlov/bert-base-bg-cs-pl-ru-cased/README.md
<add>---
<add>language:
<add>- bulgarian
<add>- czech
<add>- polish
<add>- russian
<add>---
<add>
<add># bert-base-bg-cs-pl-ru-cased
<add>
<add>SlavicBERT\[1\] \(Slavic \(bg, cs, pl, ru\), cased, 12-layer, 768-hidden, 12-heads, 180M parameters\) was trained
<add>on Russian News and four Wikipedias: Bulgarian, Czech, Polish, and Russian.
<add>Subtoken vocabulary was built using this data. Multilingual BERT was used as an initialization for SlavicBERT.
<add>
<add>
<add>\[1\]: Arkhipov M., Trofimova M., Kuratov Y., Sorokin A. \(2019\).
<add>[Tuning Multilingual Transformers for Language-Specific Named Entity Recognition](https://www.aclweb.org/anthology/W19-3712/).
<add>ACL anthology W19-3712.
<ide><path>model_cards/DeepPavlov/bert-base-cased-conversational/README.md
<add>---
<add>language:
<add>- english
<add>---
<add>
<add># bert-base-cased-conversational
<add>
<add>Conversational BERT \(English, cased, 12-layer, 768-hidden, 12-heads, 110M parameters\) was trained
<add>on the English part of Twitter, Reddit, DailyDialogues\[1\], OpenSubtitles\[2\], Debates\[3\], Blogs\[4\],
<add>Facebook News Comments. We used this training data to build the vocabulary of English subtokens and took
<add>English cased version of BERT-base as an initialization for English Conversational BERT.
<add>
<add>
<add>\[1\]: Yanran Li, Hui Su, Xiaoyu Shen, Wenjie Li, Ziqiang Cao, and Shuzi Niu. DailyDialog: A Manually Labelled
<add>Multi-turn Dialogue Dataset. IJCNLP 2017.
<add>
<add>\[2\]: P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles.
<add>In Proceedings of the 10th International Conference on Language Resources and Evaluation \(LREC 2016\)
<add>
<add>\[3\]: Justine Zhang, Ravi Kumar, Sujith Ravi, Cristian Danescu-Niculescu-Mizil. Proceedings of NAACL, 2016.
<add>
<add>\[4\]: J. Schler, M. Koppel, S. Argamon and J. Pennebaker \(2006\). Effects of Age and Gender on Blogging
<add>in Proceedings of 2006 AAAI Spring Symposium on Computational Approaches for Analyzing Weblogs.
<ide><path>model_cards/DeepPavlov/bert-base-multilingual-cased-sentence/README.md
<add>---
<add>language:
<add>- multilingual
<add>---
<add>
<add># bert-base-multilingual-cased-sentence
<add>
<add>Sentence Multilingual BERT \(101 languages, cased, 12-layer, 768-hidden, 12-heads, 180M parameters\)
<add>is a representation-based sentence encoder for 101 languages of Multilingual BERT.
<add>It is initialized with Multilingual BERT and then fine-tuned on english MultiNLI\[1\] and on dev set
<add>of multilingual XNLI\[2\].
<add>Sentence representations are mean pooled token embeddings in the same manner as in Sentence-BERT\[3\].
<add>
<add>
<add>\[1\]: Williams A., Nangia N. & Bowman S. \(2017\) A Broad-Coverage Challenge Corpus for Sentence Understanding
<add>through Inference. arXiv preprint [arXiv:1704.05426](https://arxiv.org/abs/1704.05426)
<add>
<add>\[2\]: Williams A., Bowman S. \(2018\) XNLI: Evaluating Cross-lingual Sentence Representations.
<add>arXiv preprint [arXiv:1809.05053](https://arxiv.org/abs/1809.05053)
<add>
<add>\[3\]: N. Reimers, I. Gurevych \(2019\) Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks.
<add>arXiv preprint [arXiv:1908.10084](https://arxiv.org/abs/1908.10084)
<ide><path>model_cards/DeepPavlov/rubert-base-cased-conversational/README.md
<add>---
<add>language:
<add>- russian
<add>---
<add>
<add># rubert-base-cased-conversational
<add>
<add>Conversational RuBERT \(Russian, cased, 12-layer, 768-hidden, 12-heads, 180M parameters\) was trained
<add>on OpenSubtitles\[1\], [Dirty](https://d3.ru/), [Pikabu](https://pikabu.ru/),
<add>and a Social Media segment of Taiga corpus\[2\]. We assembled a new vocabulary for Conversational RuBERT model
<add>on this data and initialized the model with [RuBERT](../rubert-base-cased).
<add>
<add>
<add>\[1\]: P. Lison and J. Tiedemann, 2016, OpenSubtitles2016: Extracting Large Parallel Corpora from Movie and TV Subtitles.
<add>In Proceedings of the 10th International Conference on Language Resources and Evaluation \(LREC 2016\)
<add>
<add>\[2\]: Shavrina T., Shapovalova O. \(2017\) TO THE METHODOLOGY OF CORPUS CONSTRUCTION FOR MACHINE LEARNING:
<add>«TAIGA» SYNTAX TREE CORPUS AND PARSER. in proc. of “CORPORA2017”, international conference , Saint-Petersbourg, 2017.
<ide><path>model_cards/DeepPavlov/rubert-base-cased-sentence/README.md
<add>---
<add>language:
<add>- russian
<add>---
<add>
<add># rubert-base-cased-sentence
<add>
<add>Sentence RuBERT \(Russian, cased, 12-layer, 768-hidden, 12-heads, 180M parameters\)
<add>is a representation-based sentence encoder for Russian. It is initialized with RuBERT and fine-tuned on SNLI\[1\]
<add>google-translated to russian and on russian part of XNLI dev set\[2\]. Sentence representations are mean pooled
<add>token embeddings in the same manner as in Sentence-BERT\[3\].
<add>
<add>
<add>\[1\]: S. R. Bowman, G. Angeli, C. Potts, and C. D. Manning. \(2015\) A large annotated corpus for learning
<add>natural language inference. arXiv preprint [arXiv:1508.05326](https://arxiv.org/abs/1508.05326)
<add>
<add>\[2\]: Williams A., Bowman S. \(2018\) XNLI: Evaluating Cross-lingual Sentence Representations.
<add>arXiv preprint [arXiv:1809.05053](https://arxiv.org/abs/1809.05053)
<add>
<add>\[3\]: N. Reimers, I. Gurevych \(2019\) Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks.
<add>arXiv preprint [arXiv:1908.10084](https://arxiv.org/abs/1908.10084)
<ide><path>model_cards/DeepPavlov/rubert-base-cased/README.md
<add>---
<add>language:
<add>- russian
<add>---
<add>
<add># rubert-base-cased
<add>
<add>RuBERT \(Russian, cased, 12-layer, 768-hidden, 12-heads, 180M parameters\) was trained on the Russian part of Wikipedia
<add>and news data. We used this training data to build a vocabulary of Russian subtokens and took a multilingual version
<add>of BERT-base as an initialization for RuBERT\[1\].
<add>
<add>
<add>\[1\]: Kuratov, Y., Arkhipov, M. \(2019\). Adaptation of Deep Bidirectional Multilingual Transformers for Russian Language.
<add>arXiv preprint [arXiv:1905.07213](https://arxiv.org/abs/1905.07213). | 6 |
Python | Python | fix ticket . do some cleanups | 4d40dff448b1b368af6752fa8b9a93716db76b3f | <ide><path>numpy/lib/twodim_base.py
<ide> def fliplr(m):
<ide> """
<ide> m = asanyarray(m)
<ide> if m.ndim < 2:
<del> raise ValueError, "Input must be >= 2-d."
<add> raise ValueError("Input must be >= 2-d.")
<ide> return m[:, ::-1]
<ide>
<ide> def flipud(m):
<ide> def flipud(m):
<ide> """
<ide> m = asanyarray(m)
<ide> if m.ndim < 1:
<del> raise ValueError, "Input must be >= 1-d."
<add> raise ValueError("Input must be >= 1-d.")
<ide> return m[::-1,...]
<ide>
<ide> def rot90(m, k=1):
<ide> def rot90(m, k=1):
<ide> """
<ide> m = asanyarray(m)
<ide> if m.ndim < 2:
<del> raise ValueError, "Input must >= 2-d."
<add> raise ValueError("Input must >= 2-d.")
<ide> k = k % 4
<del> if k == 0: return m
<del> elif k == 1: return fliplr(m).swapaxes(0,1)
<del> elif k == 2: return fliplr(flipud(m))
<del> else: return fliplr(m.swapaxes(0,1)) # k==3
<add> if k == 0:
<add> return m
<add> elif k == 1:
<add> return fliplr(m).swapaxes(0,1)
<add> elif k == 2:
<add> return fliplr(flipud(m))
<add> else:
<add> # k == 3
<add> return fliplr(m.swapaxes(0,1))
<ide>
<ide> def eye(N, M=None, k=0, dtype=float):
<ide> """
<ide> def diag(v, k=0):
<ide> i = (-k) * s[1]
<ide> return v[:s[1]-k].flat[i::s[1]+1]
<ide> else:
<del> raise ValueError, "Input must be 1- or 2-d."
<add> raise ValueError("Input must be 1- or 2-d.")
<ide>
<del>def diagflat(v,k=0):
<add>def diagflat(v, k=0):
<ide> """
<ide> Create a two-dimensional array with the flattened input as a diagonal.
<ide>
<ide> def diagflat(v,k=0):
<ide> s = len(v)
<ide> n = s + abs(k)
<ide> res = zeros((n,n), v.dtype)
<del> if (k>=0):
<add> if (k >= 0):
<ide> i = arange(0,n-k)
<ide> fi = i+k+i*n
<ide> else:
<ide> def tri(N, M=None, k=0, dtype=float):
<ide> [ 1., 1., 0., 0., 0.]])
<ide>
<ide> """
<del> if M is None: M = N
<add> if M is None:
<add> M = N
<ide> m = greater_equal(subtract.outer(arange(N), arange(M)),-k)
<ide> return m.astype(dtype)
<ide>
<ide> def triu(m, k=0):
<ide>
<ide> """
<ide> m = asanyarray(m)
<del> out = multiply((1-tri(m.shape[0], m.shape[1], k-1, int)),m)
<add> out = multiply((1 - tri(m.shape[0], m.shape[1], k - 1, int)), m)
<ide> return out
<ide>
<ide> # borrowed from John Hunter and matplotlib
<ide> def vander(x, N=None):
<ide>
<ide> """
<ide> x = asarray(x)
<del> if N is None: N=len(x)
<add> if N is None:
<add> N=len(x)
<ide> X = ones( (len(x),N), x.dtype)
<del> for i in range(N-1):
<del> X[:,i] = x**(N-i-1)
<add> for i in range(N - 1):
<add> X[:,i] = x**(N - i - 1)
<ide> return X
<ide>
<ide>
<del>def histogram2d(x,y, bins=10, range=None, normed=False, weights=None):
<add>def histogram2d(x, y, bins=10, range=None, normed=False, weights=None):
<ide> """
<ide> Compute the bi-dimensional histogram of two data samples.
<ide>
<ide> def histogram2d(x,y, bins=10, range=None, normed=False, weights=None):
<ide> hist, edges = histogramdd([x,y], bins, range, normed, weights)
<ide> return hist, edges[0], edges[1]
<ide>
<del>
<del>def mask_indices(n,mask_func,k=0):
<add>
<add>def mask_indices(n, mask_func, k=0):
<ide> """
<ide> Return the indices to access (n, n) arrays, given a masking function.
<ide>
<ide> def mask_indices(n,mask_func,k=0):
<ide> array([1, 2, 5])
<ide>
<ide> """
<del> m = ones((n,n),int)
<del> a = mask_func(m,k)
<add> m = ones((n,n), int)
<add> a = mask_func(m, k)
<ide> return where(a != 0)
<ide>
<ide>
<del>def tril_indices(n,k=0):
<add>def tril_indices(n, k=0):
<ide> """
<ide> Return the indices for the lower-triangle of an (n, n) array.
<ide>
<ide> Parameters
<ide> ----------
<ide> n : int
<del> Sets the size of the arrays for which the returned indices will be valid.
<add> The row dimension of the square arrays for which the returned
<add> indices will be valid.
<ide> k : int, optional
<del> Diagonal offset (see `tril` for details).
<add> Diagonal offset (see `tril` for details).
<ide>
<ide> Returns
<ide> -------
<ide> def tril_indices(n,k=0):
<ide> [-10, -10, -10, -10]])
<ide>
<ide> """
<del> return mask_indices(n,tril,k)
<add> return mask_indices(n, tril, k)
<ide>
<ide>
<del>def tril_indices_from(arr,k=0):
<add>def tril_indices_from(arr, k=0):
<ide> """
<del> Return the indices for the lower-triangle of an (n, n) array.
<add> Return the indices for the lower-triangle of arr.
<ide>
<ide> See `tril_indices` for full details.
<ide>
<ide> Parameters
<ide> ----------
<del> n : int
<del> Sets the size of the arrays for which the returned indices will be valid.
<add> arr : array_like
<add> The indices will be valid for square arrays whose dimensions are
<add> the same as arr.
<ide> k : int, optional
<del> Diagonal offset (see `tril` for details).
<add> Diagonal offset (see `tril` for details).
<ide>
<ide> See Also
<ide> --------
<ide> def tril_indices_from(arr,k=0):
<ide> .. versionadded:: 1.4.0
<ide>
<ide> """
<del> if not arr.ndim==2 and arr.shape[0] == arr.shape[1]:
<add> if not (arr.ndim == 2 and arr.shape[0] == arr.shape[1]):
<ide> raise ValueError("input array must be 2-d and square")
<del> return tril_indices(arr.shape[0],k)
<add> return tril_indices(arr.shape[0], k)
<add>
<ide>
<del>
<del>def triu_indices(n,k=0):
<add>def triu_indices(n, k=0):
<ide> """
<ide> Return the indices for the upper-triangle of an (n, n) array.
<ide>
<ide> Parameters
<ide> ----------
<ide> n : int
<del> Sets the size of the arrays for which the returned indices will be valid.
<add> The size of the arrays for which the returned indices will
<add> be valid.
<ide> k : int, optional
<del> Diagonal offset (see `triu` for details).
<add> Diagonal offset (see `triu` for details).
<ide>
<ide> Returns
<ide> -------
<ide> def triu_indices(n,k=0):
<ide> [ 12, 13, 14, -1]])
<ide>
<ide> """
<del> return mask_indices(n,triu,k)
<add> return mask_indices(n, triu, k)
<ide>
<ide>
<del>def triu_indices_from(arr,k=0):
<add>def triu_indices_from(arr, k=0):
<ide> """
<ide> Return the indices for the upper-triangle of an (n, n) array.
<ide>
<ide> See `triu_indices` for full details.
<ide>
<ide> Parameters
<ide> ----------
<del> n : int
<del> Sets the size of the arrays for which the returned indices will be valid.
<add> arr : array_like
<add> The indices will be valid for square arrays whose dimensions are
<add> the same as arr.
<ide> k : int, optional
<ide> Diagonal offset (see `triu` for details).
<ide>
<ide> def triu_indices_from(arr,k=0):
<ide> .. versionadded:: 1.4.0
<ide>
<ide> """
<del> if not arr.ndim==2 and arr.shape[0] == arr.shape[1]:
<add> if not (arr.ndim == 2 and arr.shape[0] == arr.shape[1]):
<ide> raise ValueError("input array must be 2-d and square")
<ide> return triu_indices(arr.shape[0],k)
<ide> | 1 |
PHP | PHP | apply fixes from styleci | ea6cedb379a07a9dee713b269d05fc7fa6fda0d7 | <ide><path>src/Illuminate/Database/Schema/ForeignIdColumnDefinition.php
<ide> namespace Illuminate\Database\Schema;
<ide>
<ide> use Illuminate\Support\Str;
<del>use Illuminate\Database\Schema\Blueprint;
<ide>
<ide> class ForeignIdColumnDefinition extends ColumnDefinition
<ide> { | 1 |
Ruby | Ruby | fix svn remote test on mojave | 8bfde013f631ec5026313c9194ec2fff8cfc5f01 | <ide><path>Library/Homebrew/utils.rb
<ide> def inject_dump_stats!(the_module, pattern)
<ide>
<ide> $times = {}
<ide> at_exit do
<del> col_width = [$times.keys.map(&:size).max + 2, 15].max
<add> col_width = [$times.keys.map(&:size).max.to_i + 2, 15].max
<ide> $times.sort_by { |_k, v| v }.each do |method, time|
<ide> puts format("%-*s %0.4f sec", col_width, "#{method}:", time)
<ide> end
<ide><path>Library/Homebrew/utils/svn.rb
<ide> def self.svn_available?
<ide> def self.svn_remote_exists?(url)
<ide> return true unless svn_available?
<ide>
<del> ssl_args = ["--non-interactive", "--trust-server-cert"] if ENV["HOMEBREW_TEST_ONLINE"]
<del> quiet_system "svn", "ls", url, "--depth", "empty", *ssl_args
<add> # OK to unconditionally trust here because we're just checking if
<add> # a URL exists.
<add> quiet_system "svn", "ls", url, "--depth", "empty",
<add> "--non-interactive", "--trust-server-cert"
<ide> end
<ide> end | 2 |
Javascript | Javascript | remove eslint-disable from fixtures | c001ba65753f3eb41ecd8df3b013126593d95247 | <ide><path>test/fixtures/guess-hash-seed.js
<del>/* eslint-disable required-modules */
<ide> 'use strict';
<ide> function min(arr) {
<ide> let res = arr[0];
<ide><path>test/fixtures/inspector-global-function.js
<del>'use strict'; // eslint-disable-line required-modules
<add>'use strict';
<ide> let invocations = 0;
<ide> const interval = setInterval(() => {}, 1000);
<ide> | 2 |
Ruby | Ruby | add json output | 2f2645e962974002bdd6e78111678bb2d5f9b389 | <ide><path>Library/Homebrew/cmd/info.rb
<ide>
<ide> module Homebrew extend self
<ide> def info
<add> # eventually we'll solidify an API, but we'll keep old versions
<add> # awhile around for compatibility
<add> if ARGV.json == "v1"
<add> print_json
<add> else
<add> print_info
<add> end
<add> end
<add>
<add> def print_info
<ide> if ARGV.named.empty?
<ide> if ARGV.include? "--all"
<ide> Formula.each do |f|
<ide> def info
<ide> end
<ide> end
<ide>
<add> def print_json
<add> require 'vendor/multi_json'
<add>
<add> formulae = ARGV.include?("--all") ? Formula : ARGV.formulae
<add> json = formulae.map {|f| f.to_hash}
<add> if json.size == 1
<add> puts MultiJson.encode json.pop
<add> else
<add> puts MultiJson.encode json
<add> end
<add> end
<add>
<ide> def github_fork
<ide> if which 'git' and (HOMEBREW_REPOSITORY/".git").directory?
<ide> if `git remote -v` =~ %r{origin\s+(https?://|git(?:@|://))github.com[:/](.+)/homebrew}
<ide><path>Library/Homebrew/formula.rb
<ide> def recursive_requirements
<ide> reqs.flatten
<ide> end
<ide>
<add> def to_hash
<add> hsh = {
<add> "name" => name,
<add> "homepage" => homepage,
<add> "versions" => {
<add> "stable" => (stable.version.to_s if stable),
<add> "bottle" => bottle && MacOS.bottles_supported? || false,
<add> "devel" => (devel.version.to_s if devel),
<add> "head" => (head.version.to_s if head)
<add> },
<add> "installed" => [],
<add> "linked_keg" => (linked_keg.realpath.basename.to_s if linked_keg.exist?),
<add> "keg_only" => keg_only?,
<add> "dependencies" => deps.map {|dep| dep.to_s},
<add> "conflicts_with" => conflicts.map {|c| c.formula},
<add> "options" => [],
<add> "caveats" => caveats
<add> }
<add>
<add> build.each do |opt|
<add> hsh["options"] << {
<add> "option" => "--"+opt.name,
<add> "description" => opt.description
<add> }
<add> end
<add>
<add> if rack.directory?
<add> rack.children.each do |keg|
<add> next if keg.basename.to_s == '.DS_Store'
<add> tab = Tab.for_keg keg
<add>
<add> hsh["installed"] << {
<add> "version" => keg.basename.to_s,
<add> "used_options" => tab.used_options,
<add> "built_as_bottle" => tab.built_bottle
<add> }
<add> end
<add> end
<add>
<add> hsh
<add>
<add> end
<add>
<ide> protected
<ide>
<ide> # Pretty titles the command and buffers stdout/stderr | 2 |
Ruby | Ruby | use thread local queues | b9f9951d5f7e6f2c947c292a929a48d41c529f26 | <ide><path>activesupport/lib/active_support/log_subscriber.rb
<ide> def flush_all!
<ide> end
<ide>
<ide> def initialize
<del> @event_stack = []
<add> @queue_key = [self.class.name, object_id].join "-"
<ide> super
<ide> end
<ide>
<ide> def start(name, id, payload)
<ide> return unless logger
<ide>
<ide> e = ActiveSupport::Notifications::Event.new(name, Time.now, nil, id, payload)
<del> parent = @event_stack.last
<add> parent = event_stack.last
<ide> parent << e if parent
<ide>
<del> @event_stack.push e
<add> event_stack.push e
<ide> end
<ide>
<ide> def finish(name, id, payload)
<ide> return unless logger
<ide>
<ide> finished = Time.now
<del> event = @event_stack.pop
<add> event = event_stack.pop
<ide> event.end = finished
<ide> event.payload.merge!(payload)
<ide>
<ide> def color(text, color, bold=false)
<ide> bold = bold ? BOLD : ""
<ide> "#{bold}#{color}#{text}#{CLEAR}"
<ide> end
<add>
<add> private
<add>
<add> def event_stack
<add> Thread.current[@queue_key] ||= []
<add> end
<ide> end
<ide> end | 1 |
Javascript | Javascript | add support for ram bundle groups | aec7b34e50e4962236a110963a35305c74e5c285 | <ide><path>local-cli/bundle/output/unbundle/as-assets.js
<ide> function saveAsAssets(
<ide> const sourceMap =
<ide> relativizeSourceMap(
<ide> buildSourceMapWithMetaData({
<del> startupModules: startupModules.concat(),
<del> lazyModules: lazyModules.concat(),
<ide> fixWrapperOffset: true,
<add> lazyModules: lazyModules.concat(),
<add> moduleGroups: null,
<add> startupModules: startupModules.concat(),
<ide> }),
<ide> sourcemapSourcesRoot
<ide> );
<ide><path>local-cli/bundle/output/unbundle/build-unbundle-sourcemap-with-metadata.js
<ide> const {combineSourceMaps, combineSourceMapsAddingOffsets, joinModules} = require
<ide>
<ide> import type {ModuleGroups, ModuleTransportLike} from '../../types.flow';
<ide>
<del>type Params = {
<add>type Params = {|
<ide> fixWrapperOffset: boolean,
<ide> lazyModules: $ReadOnlyArray<ModuleTransportLike>,
<del> moduleGroups?: ModuleGroups,
<add> moduleGroups: ?ModuleGroups,
<ide> startupModules: $ReadOnlyArray<ModuleTransportLike>,
<del>};
<add>|};
<ide>
<ide> module.exports = ({fixWrapperOffset, lazyModules, moduleGroups, startupModules}: Params) => {
<ide> const options = fixWrapperOffset ? {fixWrapperOffset: true} : undefined;
<ide><path>local-cli/bundle/output/unbundle/util.js
<ide> function combineSourceMaps(
<ide>
<ide> function combineSourceMapsAddingOffsets(
<ide> modules: $ReadOnlyArray<ModuleTransportLike>,
<del> moduleGroups?: ModuleGroups,
<add> moduleGroups?: ?ModuleGroups,
<ide> options?: ?CombineOptions,
<ide> ): FBIndexMap {
<ide> const x_facebook_offsets = [];
<ide><path>packager/src/Bundler/Bundle.js
<ide> const crypto = require('crypto');
<ide> const debug = require('debug')('RNP:Bundle');
<ide> const invariant = require('fbjs/lib/invariant');
<ide>
<add>const {createRamBundleGroups} = require('./util');
<ide> const {fromRawMappings} = require('./source-map');
<ide> const {isMappingsMap} = require('../lib/SourceMap');
<ide>
<ide> class Bundle extends BundleBase {
<ide> lazyModules,
<ide> get groups() {
<ide> if (!groups) {
<del> groups = createGroups(ramGroups || [], lazyModules);
<add> groups = createRamBundleGroups(ramGroups || [], lazyModules, subtree);
<ide> }
<ide> return groups;
<ide> },
<ide> function partition(array, predicate) {
<ide> return [included, excluded];
<ide> }
<ide>
<del>function * filter(iterator, predicate) {
<del> for (const value of iterator) {
<del> if (predicate(value)) {
<del> yield value;
<del> }
<del> }
<del>}
<del>
<del>function * subtree(moduleTransport: ModuleTransport, moduleTransportsByPath, seen = new Set()) {
<add>function * subtree(
<add> moduleTransport: ModuleTransport,
<add> moduleTransportsByPath: Map<string, ModuleTransport>,
<add> seen = new Set(),
<add>) {
<ide> seen.add(moduleTransport.id);
<del> /* $FlowFixMe: there may not be a `meta` object */
<del> for (const [, {path}] of moduleTransport.meta.dependencyPairs || []) {
<add> const {meta} = moduleTransport;
<add> invariant(
<add> meta != null,
<add> 'Unexpected module transport without meta information: ' + moduleTransport.sourcePath,
<add> );
<add> for (const [, {path}] of meta.dependencyPairs || []) {
<ide> const dependency = moduleTransportsByPath.get(path);
<ide> if (dependency && !seen.has(dependency.id)) {
<ide> yield dependency.id;
<ide> function * subtree(moduleTransport: ModuleTransport, moduleTransportsByPath, see
<ide> }
<ide> }
<ide>
<del>class ArrayMap extends Map {
<del> get(key) {
<del> let array = super.get(key);
<del> if (!array) {
<del> array = [];
<del> this.set(key, array);
<del> }
<del> return array;
<del> }
<del>}
<del>
<del>function createGroups(ramGroups: Array<string>, lazyModules) {
<del> // build two maps that allow to lookup module data
<del> // by path or (numeric) module id;
<del> const byPath = new Map();
<del> const byId = new Map();
<del> lazyModules.forEach(m => {
<del> byPath.set(m.sourcePath, m);
<del> byId.set(m.id, m.sourcePath);
<del> });
<del>
<del> // build a map of group root IDs to an array of module IDs in the group
<del> const result: Map<number, Set<number>> = new Map(
<del> ramGroups
<del> .map(modulePath => {
<del> const root = byPath.get(modulePath);
<del> if (!root) {
<del> throw Error(`Group root ${modulePath} is not part of the bundle`);
<del> }
<del> return [
<del> root.id,
<del> // `subtree` yields the IDs of all transitive dependencies of a module
<del> /* $FlowFixMe: assumes the module is always in the Map */
<del> new Set(subtree(byPath.get(root.sourcePath), byPath)),
<del> ];
<del> })
<del> );
<del>
<del> if (ramGroups.length > 1) {
<del> // build a map of all grouped module IDs to an array of group root IDs
<del> const all = new ArrayMap();
<del> for (const [parent, children] of result) {
<del> for (const module of children) {
<del> all.get(module).push(parent);
<del> }
<del> }
<del>
<del> // find all module IDs that are part of more than one group
<del> const doubles = filter(all, ([, parents]) => parents.length > 1);
<del> for (const [moduleId, parents] of doubles) {
<del> // remove them from their groups
<del> /* $FlowFixMe: this assumes the element exists. */
<del> parents.forEach(p => result.get(p).delete(moduleId));
<del>
<del> // print a warning for each removed module
<del> const parentNames = parents.map(byId.get, byId);
<del> const lastName = parentNames.pop();
<del> throw new Error(
<del> /* $FlowFixMe: this assumes the element exists. */
<del> `Module ${byId.get(moduleId)} belongs to groups ${
<del> parentNames.join(', ')}, and ${lastName
<del> }. Removing it from all groups.`
<del> );
<del> }
<del> }
<del>
<del> return result;
<del>}
<del>
<ide> const isRawMappings = Array.isArray;
<ide>
<ide> module.exports = Bundle;
<ide><path>packager/src/Bundler/__tests__/Bundle-test.js
<ide> describe('Bundle', () => {
<ide> }).toThrow(
<ide> new Error(
<ide> `Module ${fsLocation('invariant')} belongs to groups ${fsLocation('React')}` +
<del> `, and ${fsLocation('OtherFramework')}. Removing it from all groups.`,
<add> `, and ${fsLocation('OtherFramework')}. Ensure that each module is only part of one group.`,
<ide> ),
<ide> );
<ide> });
<ide><path>packager/src/Bundler/util.js
<ide> const babelGenerate = require('babel-generator').default;
<ide> const babylon = require('babylon');
<ide>
<ide> import type {AssetDescriptor} from '.';
<add>import type {ModuleTransportLike} from '../../../local-cli/bundle/types.flow';
<add>
<add>type SubTree<T: ModuleTransportLike> = (
<add> moduleTransport: T,
<add> moduleTransportsByPath: Map<string, T>,
<add>) => Generator<number, void, void>;
<ide>
<ide> const assetPropertyBlacklist = new Set([
<ide> 'files',
<ide> function filterObject(object, blacklist) {
<ide> return copied;
<ide> }
<ide>
<add>function createRamBundleGroups<T: ModuleTransportLike>(
<add> ramGroups: $ReadOnlyArray<string>,
<add> groupableModules: $ReadOnlyArray<T>,
<add> subtree: SubTree<T>,
<add>): Map<number, Set<number>> {
<add> // build two maps that allow to lookup module data
<add> // by path or (numeric) module id;
<add> const byPath = new Map();
<add> const byId = new Map();
<add> groupableModules.forEach(m => {
<add> byPath.set(m.sourcePath, m);
<add> byId.set(m.id, m.sourcePath);
<add> });
<add>
<add> // build a map of group root IDs to an array of module IDs in the group
<add> const result: Map<number, Set<number>> = new Map(
<add> ramGroups
<add> .map(modulePath => {
<add> const root = byPath.get(modulePath);
<add> if (root == null) {
<add> throw Error(`Group root ${modulePath} is not part of the bundle`);
<add> }
<add> return [
<add> root.id,
<add> // `subtree` yields the IDs of all transitive dependencies of a module
<add> new Set(subtree(root, byPath)),
<add> ];
<add> })
<add> );
<add>
<add> if (ramGroups.length > 1) {
<add> // build a map of all grouped module IDs to an array of group root IDs
<add> const all = new ArrayMap();
<add> for (const [parent, children] of result) {
<add> for (const module of children) {
<add> all.get(module).push(parent);
<add> }
<add> }
<add>
<add> // find all module IDs that are part of more than one group
<add> const doubles = filter(all, ([, parents]) => parents.length > 1);
<add> for (const [moduleId, parents] of doubles) {
<add> const parentNames = parents.map(byId.get, byId);
<add> const lastName = parentNames.pop();
<add> throw new Error(
<add> `Module ${byId.get(moduleId) || moduleId} belongs to groups ${
<add> parentNames.join(', ')}, and ${String(lastName)
<add> }. Ensure that each module is only part of one group.`
<add> );
<add> }
<add> }
<add>
<add> return result;
<add>}
<add>
<add>function * filter(iterator, predicate) {
<add> for (const value of iterator) {
<add> if (predicate(value)) {
<add> yield value;
<add> }
<add> }
<add>}
<add>
<add>class ArrayMap extends Map {
<add> get(key) {
<add> let array = super.get(key);
<add> if (!array) {
<add> array = [];
<add> this.set(key, array);
<add> }
<add> return array;
<add> }
<add>}
<add>
<ide> module.exports = {
<add> createRamBundleGroups,
<ide> generateAssetCodeFileAst,
<ide> generateAssetTransformResult,
<ide> isAssetTypeAnImage,
<add><path>packager/src/ModuleGraph/output/__tests__/indexed-ram-bundle-test.js
<del><path>packager/src/ModuleGraph/output/__tests__/as-indexed-ram-bundle-test.js
<ide> let ids, modules, requireCall;
<ide> const idForPath = ({path}) => getId(path);
<ide> beforeAll(() => {
<ide> modules = [
<del> makeModule('a', 'script'),
<del> makeModule('b'),
<del> makeModule('c'),
<del> makeModule('d'),
<add> makeModule('a', [], 'script'),
<add> makeModule('b', ['c']),
<add> makeModule('c', ['f']),
<add> makeModule('d', ['e']),
<ide> makeModule('e'),
<ide> makeModule('f'),
<ide> ];
<del> requireCall = makeModule('r', 'script', 'require(1);');
<add> requireCall = makeModule('r', [], 'script', 'require(1);');
<ide>
<ide> ids = new Map(modules.map(({file}, i) => [file.path, i]));
<ide> ({code, map} = createRamBundle());
<ide> it('creates a source map', () => {
<ide> expect(map.x_facebook_offsets).toEqual([1, 2, 3, 4, 5, 6]);
<ide> });
<ide>
<del>describe('Optimization:', () => {
<add>describe('Startup section optimization', () => {
<ide> let last, preloaded;
<ide> beforeAll(() => {
<ide> last = modules[modules.length - 1];
<ide> describe('Optimization:', () => {
<ide> return section;
<ide> }
<ide> ));
<add> });
<add>});
<add>
<add>describe('RAM groups / common sections', () => {
<add> let groups, groupHeads;
<add> beforeAll(() => {
<add> groups = [
<add> [modules[1], modules[2], modules[5]],
<add> [modules[3], modules[4]],
<add> ];
<add> groupHeads = groups.map(g => g[0]);
<add> ({code, map} = createRamBundle(undefined, groupHeads.map(getPath)));
<add> });
<ide>
<add> it('supports grouping the transitive dependencies of files into common sections', () => {
<add> const {codeOffset, table} = parseOffsetTable(code);
<add>
<add> groups.forEach(group => {
<add> const [head, ...deps] = group.map(x => idForPath(x.file));
<add> const groupEntry = table[head];
<add> deps.forEach(id => expect(table[id]).toEqual(groupEntry));
<add>
<add> const [offset, length] = groupEntry;
<add> const groupCode = code.slice(codeOffset + offset, codeOffset + offset + length - 1);
<add> expect(groupCode.toString())
<add> .toEqual(group.map(m => m.file.code).join('\n'));
<add> });
<ide> });
<add>
<add> it('reflects section groups in the source map', () => {
<add> expect(map.x_facebook_offsets).toEqual([1, 2, 2, 5, 5, 2]);
<add> const maps = map.sections.slice(-2);
<add> const toplevelOffsets = [2, 5];
<add>
<add> maps.map((groupMap, i) => [groups[i], groupMap]).forEach(([group, groupMap], i) => {
<add> const offsets = group.reduce(moduleLineOffsets, [])[0];
<add> expect(groupMap).toEqual({
<add> map: {
<add> version: 3,
<add> sections: group.map((module, j) => ({
<add> map: module.file.map,
<add> offset: {line: offsets[j], column: 0},
<add> })),
<add> },
<add> offset: {line: toplevelOffsets[i], column: 0},
<add> });
<add> });
<add> });
<add>
<add> function moduleLineOffsets([offsets = [], line = 0], module) {
<add> return [[...offsets, line], line + countLines(module)];
<add> }
<ide> });
<ide>
<del>function createRamBundle(preloadedModules = new Set()) {
<del> const build = indexedRamBundle.createBuilder(preloadedModules);
<add>function createRamBundle(preloadedModules = new Set(), ramGroups) {
<add> const build = indexedRamBundle.createBuilder(preloadedModules, ramGroups);
<ide> const result = build({
<ide> filename: 'arbitrary/filename.js',
<ide> idForPath,
<ide> function createRamBundle(preloadedModules = new Set()) {
<ide> return {code: result.code, map: result.map};
<ide> }
<ide>
<del>function makeModule(name, type = 'module', moduleCode = `var ${name};`) {
<del> const path = `/${name}.js`;
<add>function makeModule(name, deps = [], type = 'module', moduleCode = `var ${name};`) {
<add> const path = makeModulePath(name);
<ide> return {
<del> dependencies: [],
<add> dependencies: deps.map(makeDependency),
<ide> file: {
<ide> code: type === 'module' ? makeModuleCode(moduleCode) : moduleCode,
<ide> map: type !== 'module'
<ide> function makeModuleCode(moduleCode) {
<ide> return `__d(() => {${moduleCode}})`;
<ide> }
<ide>
<add>function makeModulePath(name) {
<add> return `/${name}.js`;
<add>}
<add>
<add>function makeDependency(name) {
<add> const path = makeModulePath(name);
<add> return {
<add> id: name,
<add> path,
<add> };
<add>}
<add>
<ide> function getId(path) {
<ide> if (path === requireCall.file.path) {
<ide> return -1;
<ide><path>packager/src/ModuleGraph/output/indexed-ram-bundle.js
<ide> 'use strict';
<ide>
<ide> const buildSourceMapWithMetaData = require('../../../../local-cli/bundle/output/unbundle/build-unbundle-sourcemap-with-metadata.js');
<add>const nullthrows = require('fbjs/lib/nullthrows');
<ide>
<ide> const {buildTableAndContents, createModuleGroups} = require('../../../../local-cli/bundle/output/unbundle/as-indexed-file');
<add>const {createRamBundleGroups} = require('../../Bundler/util');
<ide> const {concat} = require('./util');
<ide>
<ide> import type {FBIndexMap} from '../../lib/SourceMap.js';
<ide> function asIndexedRamBundle({
<ide> idForPath,
<ide> modules,
<ide> preloadedModules,
<add> ramGroupHeads,
<ide> requireCalls,
<ide> }) {
<ide> const [startup, deferred] = partition(modules, preloadedModules);
<ide> const startupModules = Array.from(concat(startup, requireCalls));
<del> const deferredModules = deferred.map(m => toModuleTransport(m.file, idForPath));
<del> const moduleGroups = createModuleGroups(new Map(), deferredModules);
<add> const deferredModules = deferred.map(m => toModuleTransport(m, idForPath));
<add> const ramGroups = createRamBundleGroups(ramGroupHeads || [], deferredModules, subtree);
<add> const moduleGroups = createModuleGroups(ramGroups, deferredModules);
<ide>
<ide> const tableAndContents = buildTableAndContents(
<ide> startupModules.map(getModuleCode).join('\n'),
<ide> function asIndexedRamBundle({
<ide> code: Buffer.concat(tableAndContents),
<ide> map: buildSourceMapWithMetaData({
<ide> fixWrapperOffset: false,
<del> startupModules: startupModules.map(m => toModuleTransport(m.file, idForPath)),
<ide> lazyModules: deferredModules,
<add> moduleGroups,
<add> startupModules: startupModules.map(m => toModuleTransport(m, idForPath)),
<ide> }),
<ide> };
<ide> }
<ide>
<del>function toModuleTransport(file, idForPath) {
<add>function toModuleTransport({dependencies, file}, idForPath) {
<ide> return {
<ide> code: file.code,
<add> dependencies,
<ide> id: idForPath(file),
<ide> map: file.map,
<ide> name: file.path,
<ide> function partition(modules, preloadedModules) {
<ide> return [startup, deferred];
<ide> }
<ide>
<del>function createBuilder(preloadedModules: Set<string>): OutputFn<FBIndexMap> {
<del> return x => asIndexedRamBundle({preloadedModules, ...x});
<add>function *subtree(
<add> moduleTransport,
<add> moduleTransportsByPath,
<add> seen = new Set(),
<add>) {
<add> seen.add(moduleTransport.id);
<add> for (const {path} of moduleTransport.dependencies) {
<add> const dependency = nullthrows(moduleTransportsByPath.get(path));
<add> if (!seen.has(dependency.id)) {
<add> yield dependency.id;
<add> yield *subtree(dependency, moduleTransportsByPath, seen);
<add> }
<add> }
<add>}
<add>
<add>function createBuilder(
<add> preloadedModules: Set<string>,
<add> ramGroupHeads: ?$ReadOnlyArray<string>,
<add>): OutputFn<FBIndexMap> {
<add> return x => asIndexedRamBundle({...x, preloadedModules, ramGroupHeads});
<ide> }
<add>
<ide> exports.createBuilder = createBuilder; | 8 |
Go | Go | change content-type and small fix in run | 4a1e0d321ec8ef622673971deff0d191d198cc31 | <ide><path>api.go
<ide> func getContainersExport(srv *Server, w http.ResponseWriter, r *http.Request) ([
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ContainerExport(name, out); err != nil {
<ide> fmt.Fprintf(out, "Error: %s\n", err)
<ide> return nil, err
<ide> func getImages(srv *Server, w http.ResponseWriter, r *http.Request) ([]byte, err
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ImagesViz(out); err != nil {
<ide> fmt.Fprintf(out, "Error: %s\n", err)
<ide> }
<ide> func postImages(srv *Server, w http.ResponseWriter, r *http.Request) ([]byte, er
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if image != "" { //pull
<ide> registry := r.Form.Get("registry")
<ide> if err := srv.ImagePull(image, tag, registry, out); err != nil {
<ide> func postImagesInsert(srv *Server, w http.ResponseWriter, r *http.Request) ([]by
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ImageInsert(name, url, path, out); err != nil {
<ide> fmt.Fprintf(out, "Error: %s\n", err)
<ide> return nil, err
<ide> func postImagesPush(srv *Server, w http.ResponseWriter, r *http.Request) ([]byte
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ImagePush(name, registry, out); err != nil {
<ide> fmt.Fprintln(out, "Error: %s\n", err)
<ide> return nil, err
<ide> func postBuild(srv *Server, w http.ResponseWriter, r *http.Request) ([]byte, err
<ide> return nil, err
<ide> }
<ide> defer in.Close()
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ImageCreateFromFile(in, out); err != nil {
<ide> fmt.Fprintln(out, "Error: %s\n", err)
<ide> return nil, err
<ide> func postContainersAttach(srv *Server, w http.ResponseWriter, r *http.Request) (
<ide> }
<ide> defer in.Close()
<ide>
<del> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: raw-stream-hijack\r\n\r\n")
<add> fmt.Fprintf(out, "HTTP/1.1 200 OK\r\nContent-Type: application/vnd.docker.raw-stream\r\n\r\n")
<ide> if err := srv.ContainerAttach(name, logs, stream, stdin, stdout, stderr, in, out); err != nil {
<ide> fmt.Fprintf(out, "Error: %s\n", err)
<ide> return nil, err
<ide><path>commands.go
<ide> func CmdRun(args ...string) error {
<ide> if err != nil {
<ide> return err
<ide> }
<del> return nil
<ide> }
<ide> if err != nil {
<ide> return err | 2 |
Ruby | Ruby | use formula patches accessor | 797820946922d8c043f320adeb4cbaa88abbfcd4 | <ide><path>Library/Homebrew/formula.rb
<ide> def require_universal_deps?
<ide>
<ide> def patch
<ide> ohai "Patching"
<del> active_spec.patches.each(&:apply)
<add> patchlist.each(&:apply)
<ide> end
<ide>
<ide> # yields self with current working directory set to the uncompressed tarball
<ide> def prepare_patches
<ide> active_spec.add_legacy_patches(patches)
<ide> return if patchlist.empty?
<ide>
<del> active_spec.patches.grep(DATAPatch) { |p| p.path = path }
<add> patchlist.grep(DATAPatch) { |p| p.path = path }
<ide>
<del> active_spec.patches.select(&:external?).each do |patch|
<add> patchlist.select(&:external?).each do |patch|
<ide> patch.verify_download_integrity(patch.fetch)
<ide> end
<ide> end | 1 |
Go | Go | show devicemapper status in "docker info" | d733cdcebbcb6bc8573e1869b11f0d9116a92892 | <ide><path>api_params.go
<ide> type APIImages struct {
<ide> }
<ide>
<ide> type APIInfo struct {
<del> Debug bool
<del> Containers int
<del> Images int
<del> NFd int `json:",omitempty"`
<del> NGoroutines int `json:",omitempty"`
<del> MemoryLimit bool `json:",omitempty"`
<del> SwapLimit bool `json:",omitempty"`
<del> IPv4Forwarding bool `json:",omitempty"`
<del> LXCVersion string `json:",omitempty"`
<del> NEventsListener int `json:",omitempty"`
<del> KernelVersion string `json:",omitempty"`
<del> IndexServerAddress string `json:",omitempty"`
<add> Debug bool
<add> Containers int
<add> Images int
<add> NFd int `json:",omitempty"`
<add> NGoroutines int `json:",omitempty"`
<add> MemoryLimit bool `json:",omitempty"`
<add> SwapLimit bool `json:",omitempty"`
<add> IPv4Forwarding bool `json:",omitempty"`
<add> LXCVersion string `json:",omitempty"`
<add> NEventsListener int `json:",omitempty"`
<add> KernelVersion string `json:",omitempty"`
<add> IndexServerAddress string `json:",omitempty"`
<add> DevmapperPool string `json:",omitempty"`
<add> DevmapperDataUsed uint64 `json:",omitempty"`
<add> DevmapperDataTotal uint64 `json:",omitempty"`
<add> DevmapperMetadataUsed uint64 `json:",omitempty"`
<add> DevmapperMetadataTotal uint64 `json:",omitempty"`
<ide> }
<ide>
<ide> type APITop struct {
<ide><path>commands.go
<ide> func (cli *DockerCli) CmdInfo(args ...string) error {
<ide>
<ide> fmt.Fprintf(cli.out, "Containers: %d\n", out.Containers)
<ide> fmt.Fprintf(cli.out, "Images: %d\n", out.Images)
<add> if out.DevmapperDataTotal != 0 {
<add> fmt.Fprintf(cli.out, "Devmapper disk use: Data: %.1f/%.1f Metadata: %.1f/%.1f\n",
<add> float64(out.DevmapperDataUsed)/(1024*1024), float64(out.DevmapperDataTotal)/(1024*1024),
<add> float64(out.DevmapperMetadataUsed)/(1024*1024), float64(out.DevmapperMetadataTotal)/(1024*1024))
<add> }
<ide> if out.Debug || os.Getenv("DEBUG") != "" {
<ide> fmt.Fprintf(cli.out, "Debug mode (server): %v\n", out.Debug)
<ide> fmt.Fprintf(cli.out, "Debug mode (client): %v\n", os.Getenv("DEBUG") != "")
<ide> func (cli *DockerCli) CmdInfo(args ...string) error {
<ide> fmt.Fprintf(cli.out, "LXC Version: %s\n", out.LXCVersion)
<ide> fmt.Fprintf(cli.out, "EventsListeners: %d\n", out.NEventsListener)
<ide> fmt.Fprintf(cli.out, "Kernel Version: %s\n", out.KernelVersion)
<add> fmt.Fprintf(cli.out, "Devmapper pool: %s\n", out.DevmapperPool)
<ide> }
<ide>
<ide> if len(out.IndexServerAddress) != 0 {
<ide><path>devmapper/deviceset_devmapper.go
<ide> type DeviceSetDM struct {
<ide> activeMounts map[string]int
<ide> }
<ide>
<add>type DiskUsage struct {
<add> Used uint64
<add> Total uint64
<add>}
<add>
<add>type Status struct {
<add> PoolName string
<add> DataLoopback string
<add> MetadataLoopback string
<add> Data DiskUsage
<add> Metadata DiskUsage
<add>}
<add>
<ide> func getDevName(name string) string {
<ide> return "/dev/mapper/" + name
<ide> }
<ide> func (devices *DeviceSetDM) SetInitialized(hash string) error {
<ide> return nil
<ide> }
<ide>
<add>func (devices *DeviceSetDM) Status() *Status {
<add> devices.Lock()
<add> defer devices.Unlock()
<add>
<add> status := &Status {}
<add>
<add> if err := devices.ensureInit(); err != nil {
<add> return status
<add> }
<add>
<add> status.PoolName = devices.getPoolName()
<add> status.DataLoopback = path.Join( devices.loopbackDir(), "data")
<add> status.MetadataLoopback = path.Join( devices.loopbackDir(), "metadata")
<add>
<add> _, totalSizeInSectors, _, params, err := getStatus(devices.getPoolName())
<add> if err == nil {
<add> var transactionId, dataUsed, dataTotal, metadataUsed, metadataTotal uint64
<add> if _, err := fmt.Sscanf(params, "%d %d/%d %d/%d", &transactionId, &metadataUsed, &metadataTotal, &dataUsed, &dataTotal); err == nil {
<add> // Convert from blocks to bytes
<add> blockSizeInSectors := totalSizeInSectors / dataTotal;
<add>
<add> status.Data.Used = dataUsed * blockSizeInSectors * 512
<add> status.Data.Total = dataTotal * blockSizeInSectors * 512
<add>
<add> // metadata blocks are always 4k
<add> status.Metadata.Used = metadataUsed * 4096
<add> status.Metadata.Total = metadataTotal * 4096
<add> }
<add> }
<add>
<add> return status
<add>}
<add>
<ide> func (devices *DeviceSetDM) ensureInit() error {
<ide> if !devices.initialized {
<ide> devices.initialized = true
<ide><path>server.go
<ide> func (srv *Server) DockerInfo() *APIInfo {
<ide> kernelVersion = kv.String()
<ide> }
<ide>
<add> devSetInfo := srv.runtime.deviceSet.Status()
<add>
<ide> return &APIInfo{
<del> Containers: len(srv.runtime.List()),
<del> Images: imgcount,
<del> MemoryLimit: srv.runtime.capabilities.MemoryLimit,
<del> SwapLimit: srv.runtime.capabilities.SwapLimit,
<del> IPv4Forwarding: !srv.runtime.capabilities.IPv4ForwardingDisabled,
<del> Debug: os.Getenv("DEBUG") != "",
<del> NFd: utils.GetTotalUsedFds(),
<del> NGoroutines: runtime.NumGoroutine(),
<del> LXCVersion: lxcVersion,
<del> NEventsListener: len(srv.events),
<del> KernelVersion: kernelVersion,
<del> IndexServerAddress: auth.IndexServerAddress(),
<add> Containers: len(srv.runtime.List()),
<add> Images: imgcount,
<add> MemoryLimit: srv.runtime.capabilities.MemoryLimit,
<add> SwapLimit: srv.runtime.capabilities.SwapLimit,
<add> IPv4Forwarding: !srv.runtime.capabilities.IPv4ForwardingDisabled,
<add> Debug: os.Getenv("DEBUG") != "",
<add> NFd: utils.GetTotalUsedFds(),
<add> NGoroutines: runtime.NumGoroutine(),
<add> LXCVersion: lxcVersion,
<add> NEventsListener: len(srv.events),
<add> KernelVersion: kernelVersion,
<add> IndexServerAddress: auth.IndexServerAddress(),
<add> DevmapperPool: devSetInfo.PoolName,
<add> DevmapperDataUsed: devSetInfo.Data.Used,
<add> DevmapperDataTotal: devSetInfo.Data.Total,
<add> DevmapperMetadataUsed: devSetInfo.Metadata.Used,
<add> DevmapperMetadataTotal: devSetInfo.Metadata.Total,
<ide> }
<ide> }
<ide> | 4 |
Java | Java | show soft errors as redboxes in dev | d78602b8cdb54a2097034ad06e81ae88a56033e0 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/core/ExceptionsManagerModule.java
<ide> public void reportFatalException(String title, ReadableArray details, int except
<ide>
<ide> @ReactMethod
<ide> public void reportSoftException(String title, ReadableArray details, int exceptionId) {
<del> FLog.e(ReactConstants.TAG, stackTraceToString(title, details));
<add> if (mDevSupportManager.getDevSupportEnabled()) {
<add> mDevSupportManager.showNewJSError(title, details, exceptionId);
<add> } else {
<add> FLog.e(ReactConstants.TAG, stackTraceToString(title, details));
<add> }
<ide> }
<ide>
<ide> private void showOrThrowError(String title, ReadableArray details, int exceptionId) { | 1 |
Java | Java | fix checkstyle violation | 0f1d16bb05d049281478cd355e6718c026f25e9c | <ide><path>spring-web/src/test/java/org/springframework/web/method/ControllerAdviceBeanTests.java
<ide> import java.lang.annotation.Retention;
<ide> import java.lang.annotation.RetentionPolicy;
<ide> import java.util.List;
<del>
<ide> import javax.annotation.Priority;
<ide>
<ide> import org.junit.Test; | 1 |
Javascript | Javascript | add check for root user | 398968297ae387714431d3e1c6ff0f0cb24807d8 | <ide><path>test/parallel/test-child-process-spawnsync-validation-errors.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const spawnSync = require('child_process').spawnSync;
<ide> const signals = process.binding('constants').os.signals;
<add>const rootUser = common.isWindows ? false : process.getuid() === 0;
<ide>
<del>let invalidArgTypeError;
<del>let invalidArgTypeErrorCount = 62;
<del>
<del>if (common.isWindows) {
<del> invalidArgTypeError =
<del> common.expectsError({ code: 'ERR_INVALID_ARG_TYPE', type: TypeError }, 42);
<del>} else {
<del> invalidArgTypeError =
<del> common.expectsError({ code: 'ERR_INVALID_ARG_TYPE', type: TypeError },
<del> invalidArgTypeErrorCount);
<del>}
<add>const invalidArgTypeError = common.expectsError(
<add> { code: 'ERR_INVALID_ARG_TYPE', type: TypeError },
<add> common.isWindows || rootUser ? 42 : 62);
<ide>
<ide> const invalidRangeError =
<ide> common.expectsError({ code: 'ERR_OUT_OF_RANGE', type: RangeError }, 20);
<ide> function fail(option, value, message) {
<ide> if (!common.isWindows) {
<ide> {
<ide> // Validate the uid option
<del> if (process.getuid() !== 0) {
<add> if (!rootUser) {
<ide> pass('uid', undefined);
<ide> pass('uid', null);
<ide> pass('uid', process.getuid());
<ide> if (!common.isWindows) {
<ide> fail('uid', Infinity, invalidArgTypeError);
<ide> fail('uid', 3.1, invalidArgTypeError);
<ide> fail('uid', -3.1, invalidArgTypeError);
<del> } else {
<del> // Decrement invalidArgTypeErrorCount if validation isn't possible
<del> invalidArgTypeErrorCount -= 10;
<ide> }
<ide> }
<ide>
<ide> if (!common.isWindows) {
<ide> fail('gid', Infinity, invalidArgTypeError);
<ide> fail('gid', 3.1, invalidArgTypeError);
<ide> fail('gid', -3.1, invalidArgTypeError);
<del> } else {
<del> // Decrement invalidArgTypeErrorCount if validation isn't possible
<del> invalidArgTypeErrorCount -= 10;
<ide> }
<ide> }
<ide> } | 1 |
Python | Python | add init_envs in domain xml | cee583b4eadd488159d418d7ab8fa93d94c6b245 | <ide><path>libcloud/compute/drivers/libvirt_driver.py
<ide> def ex_get_hypervisor_sysinfo(self):
<ide>
<ide> def create_node(self, name, disk_size=4, ram=512,
<ide> cpu=1, image=None, disk_path=None, create_from_existing=None,
<del> os_type='linux', networks=[], cloud_init=None, public_key=None):
<add> os_type='linux', networks=[], cloud_init=None, public_key=None,
<add> env_vars={}):
<ide> """
<ide> Creates a VM
<ide>
<ide> def create_node(self, name, disk_size=4, ram=512,
<ide> net_type = 'bridge'
<ide> net_name = network
<ide>
<del> conf = XML_CONF_TEMPLATE % (emu, name, ram, cpu, disk_path, image_conf, net_type, net_type, net_name)
<add> init_env = ""
<add> for env_var in env_vars:
<add> init_env += "<initenv name='%s'>%s</initenv>\n" % (env_var, env_vars[env_var])
<add> conf = XML_CONF_TEMPLATE % (emu, name, ram, cpu, init_env, disk_path, image_conf, net_type, net_type, net_name)
<ide>
<ide> self.connection.defineXML(conf)
<ide>
<ide> def __repr__(self):
<ide> <type arch='x86_64'>hvm</type>
<ide> <boot dev='hd'/>
<ide> <boot dev='cdrom'/>
<add> %s
<ide> </os>
<ide> <features>
<ide> <acpi/> | 1 |
PHP | PHP | add support for no_color environment variable | 6885023407396dff4c35938bc510db2ebc143122 | <ide><path>src/Console/ConsoleOutput.php
<ide> public function __construct(string $stream = 'php://stdout')
<ide> (
<ide> function_exists('posix_isatty') &&
<ide> !posix_isatty($this->_output)
<add> ) ||
<add> (
<add> env('NO_COLOR') !== null
<ide> )
<ide> ) {
<ide> $this->_outputAs = self::PLAIN;
<ide><path>tests/TestCase/Console/ConsoleOutputTest.php
<ide> public function tearDown(): void
<ide> unset($this->output);
<ide> }
<ide>
<add> public function testNoColorEnvironmentVariable()
<add> {
<add> $_SERVER['NO_COLOR'] = '1';
<add> $output = new ConsoleOutput();
<add> $this->assertSame(ConsoleOutput::PLAIN, $output->getOutputAs());
<add>
<add> unset($_SERVER['NO_COLOR']);
<add> }
<add>
<ide> /**
<ide> * test writing with no new line
<ide> * | 2 |
Python | Python | add minimum version of pylint | 87d61dcbc9b17dd0130ab0bb4124f9eff2187f25 | <ide><path>setup.py
<ide> def get_sphinx_theme_version() -> str:
<ide> 'paramiko',
<ide> 'pipdeptree',
<ide> 'pre-commit',
<del> 'pylint',
<add> 'pylint>=2.7.0',
<ide> 'pysftp',
<ide> 'pytest~=6.0',
<ide> 'pytest-cov', | 1 |
Python | Python | add line breaks | a662a95294515f156b57325cdb0971c2d2416d9e | <ide><path>spacy/en/language_data.py
<ide> # improved list from Stone, Denis, Kwantes (2010)
<ide> STOP_WORDS = set("""
<ide>
<del>a about above across after afterwards again against all almost alone along already also although always am among amongst amount an and another any anyhow anyone anything anyway anywhere are around as at
<add>a about above across after afterwards again against all almost alone along
<add>already also although always am among amongst amount an and another any anyhow
<add>anyone anything anyway anywhere are around as at
<ide>
<del>back be became because become becomes becoming been before beforehand behind being below beside besides between beyond both bottom but by
<add>back be became because become becomes becoming been before beforehand behind
<add>being below beside besides between beyond both bottom but by
<ide>
<ide> call can cannot ca could
<ide>
<ide> did do does doing done down due during
<ide>
<del>each eight either eleven else elsewhere empty enough etc even ever every everyone everything everywhere except
<add>each eight either eleven else elsewhere empty enough etc even ever every
<add>everyone everything everywhere except
<ide>
<del>few fifteen fifty first five for former formerly forty four from front full further
<add>few fifteen fifty first five for former formerly forty four from front full
<add>further
<ide>
<ide> get give go
<ide>
<del>had has have he hence her here hereafter hereby herein hereupon hers herself him himself his how however hundred
<add>had has have he hence her here hereafter hereby herein hereupon hers herself
<add>him himself his how however hundred
<ide>
<ide> i if in inc indeed into is it its itself
<ide>
<ide>
<ide> just
<ide>
<del>made make many may me meanwhile might mine more moreover most mostly move much must my myself
<add>made make many may me meanwhile might mine more moreover most mostly move much
<add>must my myself
<ide>
<del>name namely neither never nevertheless next nine no nobody none noone nor not nothing now nowhere
<add>name namely neither never nevertheless next nine no nobody none noone nor not
<add>nothing now nowhere
<ide>
<del>of off often on once one only onto or other others otherwise our ours ourselves out over own
<add>of off often on once one only onto or other others otherwise our ours ourselves
<add>out over own
<ide>
<ide> part per perhaps please put
<ide>
<ide> quite
<ide>
<ide> rather re really regarding
<ide>
<del>same say see seem seemed seeming seems serious several she should show side since six sixty so some somehow someone something sometime sometimes somewhere still such
<add>same say see seem seemed seeming seems serious several she should show side
<add>since six sixty so some somehow someone something sometime sometimes somewhere
<add>still such
<ide>
<del>take ten than that the their them themselves then thence there thereafter thereby therefore therein thereupon these they third this those though three through throughout thru thus to together too top toward towards twelve twenty two
<add>take ten than that the their them themselves then thence there thereafter
<add>thereby therefore therein thereupon these they third this those though three
<add>through throughout thru thus to together too top toward towards twelve twenty
<add>two
<ide>
<ide> under until up unless upon us used using
<ide>
<del>various very very via was we well were what whatever when whence whenever where whereafter whereas whereby wherein whereupon wherever whether which while whither who whoever whole whom whose why will with within without would
<add>various very very via was we well were what whatever when whence whenever where
<add>whereafter whereas whereby wherein whereupon wherever whether which while
<add>whither who whoever whole whom whose why will with within without would
<ide>
<ide> yet you your yours yourself yourselves
<ide> | 1 |
Python | Python | set version to v2.1.0a13 | 6aab2d8533f75ed1b86508464bfe6c5e931d176e | <ide><path>spacy/about.py
<ide> # fmt: off
<ide>
<ide> __title__ = "spacy-nightly"
<del>__version__ = "2.1.0a12"
<add>__version__ = "2.1.0a13"
<ide> __summary__ = "Industrial-strength Natural Language Processing (NLP) with Python and Cython"
<ide> __uri__ = "https://spacy.io"
<ide> __author__ = "Explosion AI" | 1 |
Python | Python | fix some migrations | 6e5c9c845f7f0975178dbeb76d4ccfe95d0ed803 | <ide><path>airflow/migrations/versions/30867afad44a_rename_concurrency_column_in_dag_table_.py
<ide>
<ide> def upgrade():
<ide> """Apply Rename concurrency column in dag table to max_active_tasks"""
<add> conn = op.get_bind()
<add> is_sqlite = bool(conn.dialect.name == "sqlite")
<add>
<add> if is_sqlite:
<add> op.execute("PRAGMA foreign_keys=off")
<ide> with op.batch_alter_table('dag') as batch_op:
<ide> batch_op.alter_column(
<ide> 'concurrency',
<ide> new_column_name='max_active_tasks',
<ide> type_=sa.Integer(),
<ide> nullable=False,
<ide> )
<add> if is_sqlite:
<add> op.execute("PRAGMA foreign_keys=on")
<ide>
<ide>
<ide> def downgrade():
<ide><path>airflow/migrations/versions/c306b5b5ae4a_switch_xcom_table_to_use_run_id.py
<ide> from typing import Sequence
<ide>
<ide> from alembic import op
<del>from sqlalchemy import Column, Integer, LargeBinary, MetaData, Table, select
<add>from sqlalchemy import Column, Integer, LargeBinary, MetaData, Table, and_, select
<ide>
<ide> from airflow.migrations.db_types import TIMESTAMP, StringID
<ide>
<ide> def upgrade():
<ide> data pre-populated, adding back constraints we need, and renaming it to
<ide> replace the existing XCom table.
<ide> """
<add> conn = op.get_bind()
<add> is_sqlite = conn.dialect.name == "sqlite"
<add>
<ide> op.create_table("__airflow_tmp_xcom", *_get_new_xcom_columns())
<ide>
<ide> xcom = Table("xcom", metadata, *_get_old_xcom_columns())
<ide> def upgrade():
<ide> ],
<ide> ).select_from(
<ide> xcom.join(
<del> dagrun,
<del> xcom.c.dag_id == dagrun.c.dag_id,
<del> xcom.c.execution_date == dagrun.c.execution_date,
<add> right=dagrun,
<add> onclause=and_(
<add> xcom.c.dag_id == dagrun.c.dag_id,
<add> xcom.c.execution_date == dagrun.c.execution_date,
<add> ),
<ide> ),
<ide> )
<ide> op.execute(f"INSERT INTO __airflow_tmp_xcom {query.selectable.compile(op.get_bind())}")
<ide>
<add> if is_sqlite:
<add> op.execute("PRAGMA foreign_keys=off")
<ide> op.drop_table("xcom")
<add> if is_sqlite:
<add> op.execute("PRAGMA foreign_keys=on")
<ide> op.rename_table("__airflow_tmp_xcom", "xcom")
<ide>
<ide> with op.batch_alter_table("xcom") as batch_op:
<ide> def downgrade():
<ide> ],
<ide> ).select_from(
<ide> xcom.join(
<del> dagrun,
<del> xcom.c.dag_id == dagrun.c.dag_id,
<del> xcom.c.run_id == dagrun.c.run_id,
<add> right=dagrun,
<add> onclause=and_(
<add> xcom.c.dag_id == dagrun.c.dag_id,
<add> xcom.c.run_id == dagrun.c.run_id,
<add> ),
<ide> ),
<ide> )
<ide> op.execute(f"INSERT INTO __airflow_tmp_xcom {query.selectable.compile(op.get_bind())}") | 2 |
Python | Python | add merge insertion sort | 9ec71cbdda4a52f024c9d24f0ece14600ca05301 | <ide><path>sorts/merge_insertion_sort.py
<add>"""
<add>This is a pure Python implementation of the merge-insertion sort algorithm
<add>Source: https://en.wikipedia.org/wiki/Merge-insertion_sort
<add>
<add>For doctests run following command:
<add>python3 -m doctest -v merge_insertion_sort.py
<add>or
<add>python -m doctest -v merge_insertion_sort.py
<add>
<add>For manual testing run:
<add>python3 merge_insertion_sort.py
<add>"""
<add>
<add>from typing import List
<add>
<add>
<add>def merge_insertion_sort(collection: List[int]) -> List[int]:
<add> """Pure implementation of merge-insertion sort algorithm in Python
<add>
<add> :param collection: some mutable ordered collection with heterogeneous
<add> comparable items inside
<add> :return: the same collection ordered by ascending
<add>
<add> Examples:
<add> >>> merge_insertion_sort([0, 5, 3, 2, 2])
<add> [0, 2, 2, 3, 5]
<add>
<add> >>> merge_insertion_sort([99])
<add> [99]
<add>
<add> >>> merge_insertion_sort([-2, -5, -45])
<add> [-45, -5, -2]
<add> """
<add>
<add> def binary_search_insertion(sorted_list, item):
<add> left = 0
<add> right = len(sorted_list) - 1
<add> while left <= right:
<add> middle = (left + right) // 2
<add> if left == right:
<add> if sorted_list[middle] < item:
<add> left = middle + 1
<add> break
<add> elif sorted_list[middle] < item:
<add> left = middle + 1
<add> else:
<add> right = middle - 1
<add> sorted_list.insert(left, item)
<add> return sorted_list
<add>
<add> def sortlist_2d(list_2d):
<add> def merge(left, right):
<add> result = []
<add> while left and right:
<add> if left[0][0] < right[0][0]:
<add> result.append(left.pop(0))
<add> else:
<add> result.append(right.pop(0))
<add> return result + left + right
<add>
<add> length = len(list_2d)
<add> if length <= 1:
<add> return list_2d
<add> middle = length // 2
<add> return merge(sortlist_2d(list_2d[:middle]), sortlist_2d(list_2d[middle:]))
<add>
<add> if len(collection) <= 1:
<add> return collection
<add>
<add> """
<add> Group the items into two pairs, and leave one element if there is a last odd item.
<add>
<add> Example: [999, 100, 75, 40, 10000]
<add> -> [999, 100], [75, 40]. Leave 10000.
<add> """
<add> two_paired_list = []
<add> has_last_odd_item = False
<add> for i in range(0, len(collection), 2):
<add> if i == len(collection) - 1:
<add> has_last_odd_item = True
<add> else:
<add> """
<add> Sort two-pairs in each groups.
<add>
<add> Example: [999, 100], [75, 40]
<add> -> [100, 999], [40, 75]
<add> """
<add> if collection[i] < collection[i + 1]:
<add> two_paired_list.append([collection[i], collection[i + 1]])
<add> else:
<add> two_paired_list.append([collection[i + 1], collection[i]])
<add>
<add> """
<add> Sort two_paired_list.
<add>
<add> Example: [100, 999], [40, 75]
<add> -> [40, 75], [100, 999]
<add> """
<add> sorted_list_2d = sortlist_2d(two_paired_list)
<add>
<add> """
<add> 40 < 100 is sure because it has already been sorted.
<add> Generate the sorted_list of them so that you can avoid unnecessary comparison.
<add>
<add> Example:
<add> group0 group1
<add> 40 100
<add> 75 999
<add> ->
<add> group0 group1
<add> [40, 100]
<add> 75 999
<add> """
<add> result = [i[0] for i in sorted_list_2d]
<add>
<add> """
<add> 100 < 999 is sure because it has already been sorted.
<add> Put 999 in last of the sorted_list so that you can avoid unnecessary comparison.
<add>
<add> Example:
<add> group0 group1
<add> [40, 100]
<add> 75 999
<add> ->
<add> group0 group1
<add> [40, 100, 999]
<add> 75
<add> """
<add> result.append(sorted_list_2d[-1][1])
<add>
<add> """
<add> Insert the last odd item left if there is.
<add>
<add> Example:
<add> group0 group1
<add> [40, 100, 999]
<add> 75
<add> ->
<add> group0 group1
<add> [40, 100, 999, 10000]
<add> 75
<add> """
<add> if has_last_odd_item:
<add> pivot = collection[-1]
<add> result = binary_search_insertion(result, pivot)
<add>
<add> """
<add> Insert the remaining items.
<add> In this case, 40 < 75 is sure because it has already been sorted.
<add> Therefore, you only need to insert 75 into [100, 999, 10000],
<add> so that you can avoid unnecessary comparison.
<add>
<add> Example:
<add> group0 group1
<add> [40, 100, 999, 10000]
<add> ^ You don't need to compare with this as 40 < 75 is already sure.
<add> 75
<add> ->
<add> [40, 75, 100, 999, 10000]
<add> """
<add> is_last_odd_item_inserted_before_this_index = False
<add> for i in range(len(sorted_list_2d) - 1):
<add> if result[i] == collection[-i]:
<add> is_last_odd_item_inserted_before_this_index = True
<add> pivot = sorted_list_2d[i][1]
<add> # If last_odd_item is inserted before the item's index,
<add> # you should forward index one more.
<add> if is_last_odd_item_inserted_before_this_index:
<add> result = result[: i + 2] + binary_search_insertion(result[i + 2 :], pivot)
<add> else:
<add> result = result[: i + 1] + binary_search_insertion(result[i + 1 :], pivot)
<add>
<add> return result
<add>
<add>
<add>if __name__ == "__main__":
<add> user_input = input("Enter numbers separated by a comma:\n").strip()
<add> unsorted = [int(item) for item in user_input.split(",")]
<add> print(merge_insertion_sort(unsorted)) | 1 |
Python | Python | implement fake with statement for tests | 8c38eba6999a26d2a344950247c274984162697b | <ide><path>celery/tests/test_log.py
<del>from __future__ import with_statement, generators
<add>from __future__ import generators
<ide>
<ide> import os
<ide> import sys
<ide>
<ide> from celery.log import (setup_logger, emergency_error,
<ide> redirect_stdouts_to_logger, LoggingProxy)
<del>from celery.tests.utils import override_stdouts
<add>from celery.tests.utils import override_stdouts, execute_context
<ide>
<ide>
<ide> @contextmanager
<ide> class TestLog(unittest.TestCase):
<ide>
<ide> def _assertLog(self, logger, logmsg, loglevel=logging.ERROR):
<ide>
<del> sio = wrap_logger(logger, loglevel=loglevel)
<del> logger.log(loglevel, logmsg)
<del>
<del> return sio.getvalue().strip()
<add> def with_wrap_logger(sio):
<add> logger.log(loglevel, logmsg)
<add> return sio.getvalue().strip()
<add>
<add> context = wrap_logger(logger, loglevel=loglevel)
<add> execute_context(context, with_wrap_logger)
<ide>
<ide> def assertDidLogTrue(self, logger, logmsg, reason, loglevel=None):
<ide> val = self._assertLog(logger, logmsg, loglevel=loglevel)
<ide> def test_setup_logger_no_handlers_stream(self):
<ide> from multiprocessing import get_logger
<ide> l = get_logger()
<ide> l.handlers = []
<del> outs = override_stdouts()
<del> stdout, stderr = outs
<del> l = setup_logger(logfile=stderr, loglevel=logging.INFO)
<del> l.info("The quick brown fox...")
<del> self.assertTrue("The quick brown fox..." in stderr.getvalue())
<add>
<add> def with_override_stdouts(outs):
<add> stdout, stderr = outs
<add> l = setup_logger(logfile=stderr, loglevel=logging.INFO)
<add> l.info("The quick brown fox...")
<add> self.assertTrue("The quick brown fox..." in stderr.getvalue())
<add>
<add> context = override_stdouts()
<add> execute_context(context, with_override_stdouts)
<add>
<ide>
<ide> def test_setup_logger_no_handlers_file(self):
<ide> from multiprocessing import get_logger
<ide> def test_setup_logger_no_handlers_file(self):
<ide>
<ide> def test_emergency_error_stderr(self):
<ide> outs = override_stdouts()
<del> stdout, stderr = outs
<del> emergency_error(None, "The lazy dog crawls under the fast fox")
<del> self.assertTrue("The lazy dog crawls under the fast fox" in \
<del> stderr.getvalue())
<add>
<add> def with_override_stdouts(outs):
<add> stdout, stderr = outs
<add> emergency_error(None, "The lazy dog crawls under the fast fox")
<add> self.assertTrue("The lazy dog crawls under the fast fox" in
<add> stderr.getvalue())
<add>
<add> context = override_stdouts()
<add> execute_context(context, with_override_stdouts)
<ide>
<ide> def test_emergency_error_file(self):
<ide> tempfile = mktemp(suffix="unittest", prefix="celery")
<ide> emergency_error(tempfile, "Vandelay Industries")
<ide> tempfilefh = open(tempfile, "r")
<del> self.assertTrue("Vandelay Industries" in "".join(tempfilefh))
<del> tempfilefh.close()
<del> os.unlink(tempfile)
<add> try:
<add> self.assertTrue("Vandelay Industries" in "".join(tempfilefh))
<add> finally:
<add> tempfilefh.close()
<add> os.unlink(tempfile)
<ide>
<ide> def test_redirect_stdouts(self):
<ide> logger = setup_logger(loglevel=logging.ERROR, logfile=None)
<del> did_exc = None
<ide> try:
<del> sio = wrap_logger(logger)
<del> redirect_stdouts_to_logger(logger, loglevel=logging.ERROR)
<del> logger.error("foo")
<del> self.assertTrue("foo" in sio.getvalue())
<del> except Exception, e:
<del> did_exc = e
<add> def with_wrap_logger(sio):
<add> redirect_stdouts_to_logger(logger, loglevel=logging.ERROR)
<add> logger.error("foo")
<add> self.assertTrue("foo" in sio.getvalue())
<add>
<add> context = wrap_logger(logger)
<add> execute_context(context, with_wrap_logger)
<add> finally:
<add> sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
<ide>
<del> sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__
<del>
<del> if did_exc:
<del> raise did_exc
<ide>
<ide> def test_logging_proxy(self):
<ide> logger = setup_logger(loglevel=logging.ERROR, logfile=None)
<del> sio = wrap_logger(logger)
<del> p = LoggingProxy(logger)
<del> p.close()
<del> p.write("foo")
<del> self.assertTrue("foo" not in sio.getvalue())
<del> p.closed = False
<del> p.write("foo")
<del> self.assertTrue("foo" in sio.getvalue())
<del> lines = ["baz", "xuzzy"]
<del> p.writelines(lines)
<del> for line in lines:
<del> self.assertTrue(line in sio.getvalue())
<del> p.flush()
<del> p.close()
<del> self.assertFalse(p.isatty())
<del> self.assertTrue(p.fileno() is None)
<add>
<add> def with_wrap_logger(sio):
<add> p = LoggingProxy(logger)
<add> p.close()
<add> p.write("foo")
<add> self.assertTrue("foo" not in sio.getvalue())
<add> p.closed = False
<add> p.write("foo")
<add> self.assertTrue("foo" in sio.getvalue())
<add> lines = ["baz", "xuzzy"]
<add> p.writelines(lines)
<add> for line in lines:
<add> self.assertTrue(line in sio.getvalue())
<add> p.flush()
<add> p.close()
<add> self.assertFalse(p.isatty())
<add> self.assertTrue(p.fileno() is None)
<add>
<add> context = wrap_logger(logger)
<add> execute_context(context, with_wrap_logger)
<ide><path>celery/tests/test_worker.py
<del>from __future__ import with_statement
<del>
<ide> import unittest
<ide> from Queue import Queue, Empty
<ide> from datetime import datetime, timedelta
<ide>
<ide> from celery import conf
<ide> from celery.utils import gen_unique_id, noop
<add>from celery.tests.utils import execute_context
<ide> from celery.tests.compat import catch_warnings
<ide> from celery.worker import WorkController
<ide> from celery.worker.listener import CarrotListener, RUN, CLOSE
<ide> def test_receive_message_unknown(self):
<ide> m = create_message(backend, unknown={"baz": "!!!"})
<ide> l.event_dispatcher = MockEventDispatcher()
<ide> l.control_dispatch = MockControlDispatch()
<del> log = catch_warnings(record=True)
<del> l.receive_message(m.decode(), m)
<del> self.assertTrue(log)
<del> self.assertTrue("unknown message" in log[0].message.args[0])
<add>
<add> def with_catch_warnings(log):
<add> l.receive_message(m.decode(), m)
<add> self.assertTrue(log)
<add> self.assertTrue("unknown message" in log[0].message.args[0])
<add>
<add> context = catch_warnings(record=True)
<add> execute_context(context, with_catch_warnings)
<ide>
<ide> def test_receieve_message(self):
<ide> l = CarrotListener(self.ready_queue, self.eta_schedule, self.logger,
<ide><path>celery/tests/test_worker_job.py
<ide> # -*- coding: utf-8 -*-
<del>from __future__ import with_statement
<del>
<ide> import sys
<ide> import logging
<ide> import unittest
<ide> from celery.log import setup_logger
<ide> from celery.task.base import Task
<ide> from celery.utils import gen_unique_id
<add>from celery.tests.utils import execute_context
<ide> from celery.tests.compat import catch_warnings
<ide> from celery.models import TaskMeta
<ide> from celery.result import AsyncResult
<ide> def _error_exec(self, *args, **kwargs):
<ide>
<ide> WorkerTaskTrace.execute = _error_exec
<ide> try:
<del> log = catch_warnings(record=True)
<del> res = execute_and_trace(mytask.name, gen_unique_id(),
<del> [4], {})
<del> self.assertTrue(isinstance(res, ExceptionInfo))
<del> self.assertTrue(log)
<del> self.assertTrue("Exception outside" in log[0].message.args[0])
<del> self.assertTrue("KeyError" in log[0].message.args[0])
<add> def with_catch_warnings(log):
<add> res = execute_and_trace(mytask.name, gen_unique_id(),
<add> [4], {})
<add> self.assertTrue(isinstance(res, ExceptionInfo))
<add> self.assertTrue(log)
<add> self.assertTrue("Exception outside" in log[0].message.args[0])
<add> self.assertTrue("KeyError" in log[0].message.args[0])
<add>
<add> context = catch_warnings(record=True)
<add> execute_context(context, with_catch_warnings)
<ide> finally:
<ide> WorkerTaskTrace.execute = old_exec
<ide>
<ide><path>celery/tests/utils.py
<ide> def __exit__(self, type, value, traceback):
<ide> if sys.exc_info()[1] is not value:
<ide> raise
<ide>
<del>def fallback_contextmanager(func):
<add>def fallback_contextmanager(fun):
<ide> def helper(*args, **kwds):
<del> return GeneratorContextManager(func(*args, **kwds))
<add> return GeneratorContextManager(fun(*args, **kwds))
<ide> return helper
<ide>
<add>
<add>def execute_context(context, fun):
<add> val = context.__enter__()
<add> exc_info = (None, None, None)
<add> retval = None
<add> try:
<add> retval = fun(val)
<add> except:
<add> exc_info = sys.exc_info()
<add> context.__exit__(*exc_info)
<add> return retval
<add>
<add>
<ide> try:
<ide> from contextlib import contextmanager
<ide> except ImportError: | 4 |
Ruby | Ruby | remove trailing space | cd40043357491f386a4e3dd095ed4ad6abb54f81 | <ide><path>activesupport/lib/active_support/core_ext/string/output_safety.rb
<ide> def #{unsafe_method}!(*args) # def capitalize!(*args)
<ide> class_eval <<-EOT, __FILE__, __LINE__ + 1
<ide> def #{unsafe_method}(*args, &block) # def gsub(*args, &block)
<ide> if block # if block
<del> to_str.#{unsafe_method}(*args) { |*params| # to_str.gsub(*args) { |*params|
<add> to_str.#{unsafe_method}(*args) { |*params| # to_str.gsub(*args) { |*params|
<ide> set_block_back_references(block, $~) # set_block_back_references(block, $~)
<ide> block.call(*params) # block.call(*params)
<ide> } # } | 1 |
Java | Java | fix typo in error handler | 2231263b8b1a298c9fe90006cdf502ea92911b5f | <ide><path>ReactAndroid/src/main/java/com/facebook/react/NativeModuleRegistryBuilder.java
<ide> public void processPackage(ReactPackage reactPackage) {
<ide> + name
<ide> + " tried to override "
<ide> + existingNativeModule.getClassName()
<del> + " for module name .Check the getPackages() method in MainApplication.java, it might be that module is being created twice. If this was your intention, set canOverrideExistingModule=true");
<add> + ". Check the getPackages() method in MainApplication.java, it might be that module is being created twice. If this was your intention, set canOverrideExistingModule=true");
<ide> }
<ide> mModules.remove(existingNativeModule);
<ide> } | 1 |
Python | Python | reset cached chan on producer when connection lost | ab301c20040366db05090d26f94211f6e11fbbf7 | <ide><path>celery/backends/amqp.py
<ide> # -*- coding: utf-8 -*-
<ide> import os
<ide> import socket
<add>import threading
<ide> import time
<ide>
<ide> from datetime import timedelta
<ide> def __init__(self, connection=None, exchange=None, exchange_type=None,
<ide> self.queue_arguments["x-expires"] = int(self.expires * 1000.0)
<ide> self.connection_max = (connection_max or
<ide> conf.CELERY_AMQP_TASK_RESULT_CONNECTION_MAX)
<add> self.mutex = threading.Lock()
<ide>
<ide> def _create_binding(self, task_id):
<ide> name = task_id.replace("-", "")
<ide> def _store_result(self, task_id, result, status, traceback=None,
<ide> max_retries=20, interval_start=0, interval_step=1,
<ide> interval_max=1):
<ide> """Send task return value and status."""
<del> conn = self.pool.acquire(block=True)
<add> self.mutex.acquire()
<ide> try:
<del> send = conn.ensure(self, self._publish_result,
<del> max_retries=max_retries,
<del> interval_start=interval_start,
<del> interval_step=interval_step,
<del> interval_max=interval_max)
<del> send(conn, task_id, {"task_id": task_id, "status": status,
<del> "result": self.encode_result(result, status),
<del> "traceback": traceback})
<add> conn = self.pool.acquire(block=True)
<add> try:
<add>
<add> def errback(error, delay):
<add> conn._result_producer_chan = None
<add> print("Couldn't send result for %r: %r. Retry in %rs." % (
<add> task_id, error, delay))
<add>
<add> send = conn.ensure(self, self._publish_result,
<add> max_retries=max_retries,
<add> errback=errback,
<add> interval_start=interval_start,
<add> interval_step=interval_step,
<add> interval_max=interval_max)
<add> send(conn, task_id, {"task_id": task_id, "status": status,
<add> "result": self.encode_result(result, status),
<add> "traceback": traceback})
<add> finally:
<add> conn.release()
<ide> finally:
<del> conn.release()
<add> self.mutex.release()
<ide>
<ide> return result
<ide> | 1 |
Java | Java | add support for quartz features in cronexpression | 93b53dae2966ee5ab8ca7c0a00f56673f939a18a | <ide><path>spring-context/src/main/java/org/springframework/scheduling/support/BitsCronField.java
<add>/*
<add> * Copyright 2002-2020 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * https://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.scheduling.support;
<add>
<add>import java.time.DateTimeException;
<add>import java.time.temporal.Temporal;
<add>import java.time.temporal.ValueRange;
<add>import java.util.BitSet;
<add>
<add>import org.springframework.lang.Nullable;
<add>import org.springframework.util.Assert;
<add>import org.springframework.util.StringUtils;
<add>
<add>/**
<add> * Efficient {@link BitSet}-based extension of {@link CronField}.
<add> * Created using the {@code parse*} methods.
<add> *
<add> * @author Arjen Poutsma
<add> * @since 5.3
<add> */
<add>final class BitsCronField extends CronField {
<add>
<add> private static final BitsCronField ZERO_NANOS;
<add>
<add>
<add> static {
<add> ZERO_NANOS = new BitsCronField(Type.NANO);
<add> ZERO_NANOS.bits.set(0);
<add> }
<add>
<add> private final BitSet bits;
<add>
<add>
<add>
<add> private BitsCronField(Type type) {
<add> super(type);
<add> this.bits = new BitSet((int) type.range().getMaximum());
<add> }
<add>
<add> /**
<add> * Return a {@code BitsCronField} enabled for 0 nano seconds.
<add> */
<add> public static BitsCronField zeroNanos() {
<add> return BitsCronField.ZERO_NANOS;
<add> }
<add>
<add> /**
<add> * Parse the given value into a seconds {@code BitsCronField}, the first entry of a cron expression.
<add> */
<add> public static BitsCronField parseSeconds(String value) {
<add> return parseField(value, Type.SECOND);
<add> }
<add>
<add> /**
<add> * Parse the given value into a minutes {@code BitsCronField}, the second entry of a cron expression.
<add> */
<add> public static BitsCronField parseMinutes(String value) {
<add> return BitsCronField.parseField(value, Type.MINUTE);
<add> }
<add>
<add> /**
<add> * Parse the given value into a hours {@code BitsCronField}, the third entry of a cron expression.
<add> */
<add> public static BitsCronField parseHours(String value) {
<add> return BitsCronField.parseField(value, Type.HOUR);
<add> }
<add>
<add> /**
<add> * Parse the given value into a days of months {@code BitsCronField}, the fourth entry of a cron expression.
<add> */
<add> public static BitsCronField parseDaysOfMonth(String value) {
<add> return parseDate(value, Type.DAY_OF_MONTH);
<add> }
<add>
<add> /**
<add> * Parse the given value into a month {@code BitsCronField}, the fifth entry of a cron expression.
<add> */
<add> public static BitsCronField parseMonth(String value) {
<add> return BitsCronField.parseField(value, Type.MONTH);
<add> }
<add>
<add> /**
<add> * Parse the given value into a days of week {@code BitsCronField}, the sixth entry of a cron expression.
<add> */
<add> public static BitsCronField parseDaysOfWeek(String value) {
<add> BitsCronField result = parseDate(value, Type.DAY_OF_WEEK);
<add> BitSet bits = result.bits;
<add> if (bits.get(0)) {
<add> // cron supports 0 for Sunday; we use 7 like java.time
<add> bits.set(7);
<add> bits.clear(0);
<add> }
<add> return result;
<add> }
<add>
<add>
<add> private static BitsCronField parseDate(String value, BitsCronField.Type type) {
<add> if (value.indexOf('?') != -1) {
<add> value = "*";
<add> }
<add> return BitsCronField.parseField(value, type);
<add> }
<add>
<add> private static BitsCronField parseField(String value, Type type) {
<add> Assert.hasLength(value, "Value must not be empty");
<add> Assert.notNull(type, "Type must not be null");
<add> try {
<add> BitsCronField result = new BitsCronField(type);
<add> String[] fields = StringUtils.delimitedListToStringArray(value, ",");
<add> for (String field : fields) {
<add> int slashPos = field.indexOf('/');
<add> if (slashPos == -1) {
<add> ValueRange range = parseRange(field, type);
<add> result.setBits(range);
<add> }
<add> else {
<add> String rangeStr = value.substring(0, slashPos);
<add> String deltaStr = value.substring(slashPos + 1);
<add> ValueRange range = parseRange(rangeStr, type);
<add> if (rangeStr.indexOf('-') == -1) {
<add> range = ValueRange.of(range.getMinimum(), type.range().getMaximum());
<add> }
<add> int delta = Integer.parseInt(deltaStr);
<add> if (delta <= 0) {
<add> throw new IllegalArgumentException("Incrementer delta must be 1 or higher");
<add> }
<add> result.setBits(range, delta);
<add> }
<add> }
<add> return result;
<add> }
<add> catch (DateTimeException | IllegalArgumentException ex) {
<add> String msg = ex.getMessage() + " '" + value + "'";
<add> throw new IllegalArgumentException(msg, ex);
<add> }
<add> }
<add>
<add> private static ValueRange parseRange(String value, Type type) {
<add> if (value.indexOf('*') != -1) {
<add> return type.range();
<add> }
<add> else {
<add> int hyphenPos = value.indexOf('-');
<add> if (hyphenPos == -1) {
<add> int result = type.checkValidValue(Integer.parseInt(value));
<add> return ValueRange.of(result, result);
<add> }
<add> else {
<add> int min = Integer.parseInt(value.substring(0, hyphenPos));
<add> int max = Integer.parseInt(value.substring(hyphenPos + 1));
<add> min = type.checkValidValue(min);
<add> max = type.checkValidValue(max);
<add> return ValueRange.of(min, max);
<add> }
<add> }
<add> }
<add>
<add> @Nullable
<add> @Override
<add> public <T extends Temporal & Comparable<? super T>> T nextOrSame(T temporal) {
<add> int current = type().get(temporal);
<add> int next = this.bits.nextSetBit(current);
<add> if (next == -1) {
<add> temporal = type().rollForward(temporal);
<add> next = this.bits.nextSetBit(0);
<add> }
<add> if (next == current) {
<add> return temporal;
<add> }
<add> else {
<add> int count = 0;
<add> current = type().get(temporal);
<add> while (current != next && count++ < CronExpression.MAX_ATTEMPTS) {
<add> temporal = type().elapseUntil(temporal, next);
<add> current = type().get(temporal);
<add> }
<add> if (count >= CronExpression.MAX_ATTEMPTS) {
<add> return null;
<add> }
<add> return type().reset(temporal);
<add> }
<add> }
<add>
<add> BitSet bits() {
<add> return this.bits;
<add> }
<add>
<add> private void setBits(ValueRange range) {
<add> this.bits.set((int) range.getMinimum(), (int) range.getMaximum() + 1);
<add> }
<add>
<add> private void setBits(ValueRange range, int delta) {
<add> for (int i = (int) range.getMinimum(); i <= range.getMaximum(); i += delta) {
<add> this.bits.set(i);
<add> }
<add> }
<add>
<add> @Override
<add> public int hashCode() {
<add> return this.bits.hashCode();
<add> }
<add>
<add> @Override
<add> public boolean equals(Object o) {
<add> if (this == o) {
<add> return true;
<add> }
<add> if (!(o instanceof BitsCronField)) {
<add> return false;
<add> }
<add> BitsCronField other = (BitsCronField) o;
<add> return type() == other.type() &&
<add> this.bits.equals(other.bits);
<add> }
<add>
<add> @Override
<add> public String toString() {
<add> return type() + " " + this.bits;
<add> }
<add>
<add>}
<ide><path>spring-context/src/main/java/org/springframework/scheduling/support/CronExpression.java
<ide> private CronExpression(
<ide> String expression) {
<ide>
<ide> // to make sure we end up at 0 nanos, we add an extra field
<del> this.fields = new CronField[]{daysOfWeek, months, daysOfMonth, hours, minutes, seconds, CronField.zeroNanos()};
<add> this.fields = new CronField[]{CronField.zeroNanos(), seconds, minutes, hours, daysOfMonth, months, daysOfWeek};
<ide> this.expression = expression;
<ide> }
<ide>
<ide> private CronExpression(
<ide> * Ranges of numbers are expressed by two numbers separated with a hyphen
<ide> * ({@code -}). The specified range is inclusive.
<ide> * </li>
<del> * <li>Following a range (or {@code *}) with {@code "/n"} specifies
<del> * skips of the number's value through the range.
<add> * <li>Following a range (or {@code *}) with {@code /n} specifies
<add> * the interval of the number's value through the range.
<ide> * </li>
<ide> * <li>
<ide> * English names can also be used for the "month" and "day of week" fields.
<ide> * Use the first three letters of the particular day or month (case does not
<ide> * matter).
<ide> * </li>
<add> * <li>
<add> * The "day of month" and "day of week" fields can contain a
<add> * {@code L}-character, which stands for "last", and has a different meaning
<add> * in each field:
<add> * <ul>
<add> * <li>
<add> * In the "day of month" field, {@code L} stands for "the last day of the
<add> * month". If followed by an negative offset (i.e. {@code L-n}), it means
<add> * "{@code n}th-to-last day of the month". If followed by {@code W} (i.e.
<add> * {@code LW}), it means "the last weekday of the month".
<add> * </li>
<add> * <li>
<add> * In the "day of week" field, {@code L} stands for "the last day of the
<add> * week", and uses the
<add> * {@linkplain java.util.Locale#getDefault() system default locale}
<add> * to determine which day that is (i.e. Sunday or Saturday).
<add> * If prefixed by a number or three-letter name (i.e. {@code dL} or
<add> * {@code DDDL}), it means "the last day of week {@code d} (or {@code DDD})
<add> * in the month".
<add> * </li>
<add> * </ul>
<add> * </li>
<add> * <li>
<add> * The "day of month" field can be {@code nW}, which stands for "the nearest
<add> * weekday to day of the month {@code n}".
<add> * If {@code n} falls on Saturday, this yields the Friday before it.
<add> * If {@code n} falls on Sunday, this yields the Monday after,
<add> * which also happens if {@code n} is {@code 1} and falls on a Saturday
<add> * (i.e. {@code 1W} stands for "the first weekday of the month").
<add> * </li>
<add> * <li>
<add> * The "day of week" field can be {@code d#n} (or {@code DDD#n}), which
<add> * stands for "the {@code n}-th day of week {@code d} (or {@code DDD}) in
<add> * the month".
<add> * </li>
<ide> * </ul>
<ide> *
<ide> * <p>Example expressions:
<ide> private CronExpression(
<ide> * <li>{@code "0 0/30 8-10 * * *"} = 8:00, 8:30, 9:00, 9:30, 10:00 and 10:30 every day.</li>
<ide> * <li>{@code "0 0 9-17 * * MON-FRI"} = on the hour nine-to-five weekdays</li>
<ide> * <li>{@code "0 0 0 25 12 ?"} = every Christmas Day at midnight</li>
<add> * <li>{@code "0 0 0 L * *"} = last day of the month at midnight</li>
<add> * <li>{@code "0 0 0 L-3 * *"} = third-to-last day of the month at midnight</li>
<add> * <li>{@code "0 0 0 1W * *"} = first weekday of the month at midnight</li>
<add> * <li>{@code "0 0 0 LW * *"} = last weekday of the month at midnight</li>
<add> * <li>{@code "0 0 0 * * L"} = last day of the week at midnight</li>
<add> * <li>{@code "0 0 0 * * 5L"} = last Friday of the month at midnight</li>
<add> * <li>{@code "0 0 0 * * THUL"} = last Thursday of the month at midnight</li>
<add> * <li>{@code "0 0 0 ? * 5#2"} = the second Friday in the month at midnight</li>
<add> * <li>{@code "0 0 0 ? * MON#1"} = the first Monday in the month at midnight</li>
<ide> * </ul>
<ide> *
<ide> * <p>The following macros are also supported:
<ide> private static String resolveMacros(String expression) {
<ide> * if no such temporal can be found
<ide> */
<ide> @Nullable
<del> public <T extends Temporal> T next(T temporal) {
<add> public <T extends Temporal & Comparable<? super T>> T next(T temporal) {
<ide> return nextOrSame(ChronoUnit.NANOS.addTo(temporal, 1));
<ide> }
<ide>
<ide>
<ide> @Nullable
<del> private <T extends Temporal> T nextOrSame(T temporal) {
<add> private <T extends Temporal & Comparable<? super T>> T nextOrSame(T temporal) {
<ide> for (int i = 0; i < MAX_ATTEMPTS; i++) {
<ide> T result = nextOrSameInternal(temporal);
<ide> if (result == null || result.equals(temporal)) {
<ide> private <T extends Temporal> T nextOrSame(T temporal) {
<ide> }
<ide>
<ide> @Nullable
<del> private <T extends Temporal> T nextOrSameInternal(T temporal) {
<add> private <T extends Temporal & Comparable<? super T>> T nextOrSameInternal(T temporal) {
<ide> for (CronField field : this.fields) {
<ide> temporal = field.nextOrSame(temporal);
<ide> if (temporal == null) {
<ide><path>spring-context/src/main/java/org/springframework/scheduling/support/CronField.java
<ide> import java.time.temporal.ChronoField;
<ide> import java.time.temporal.Temporal;
<ide> import java.time.temporal.ValueRange;
<del>import java.util.BitSet;
<ide>
<ide> import org.springframework.lang.Nullable;
<del>import org.springframework.util.Assert;
<ide> import org.springframework.util.StringUtils;
<ide>
<ide> /**
<del> * A single field in a cron pattern. Created using the {@code parse*} methods,
<add> * Single field in a cron pattern. Created using the {@code parse*} methods,
<ide> * main and only entry point is {@link #nextOrSame(Temporal)}.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.3
<ide> */
<del>final class CronField {
<add>abstract class CronField {
<ide>
<ide> private static final String[] MONTHS = new String[]{"JAN", "FEB", "MAR", "APR", "MAY", "JUN", "JUL", "AUG", "SEP",
<ide> "OCT", "NOV", "DEC"};
<ide>
<ide> private static final String[] DAYS = new String[]{"MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"};
<ide>
<del> private static final CronField ZERO_NANOS;
<del>
<del>
<del> static {
<del> ZERO_NANOS = new CronField(Type.NANO);
<del> ZERO_NANOS.bits.set(0);
<del> }
<del>
<del>
<ide> private final Type type;
<ide>
<del> private final BitSet bits;
<del>
<ide>
<del> private CronField(Type type) {
<add> protected CronField(Type type) {
<ide> this.type = type;
<del> this.bits = new BitSet((int) type.range().getMaximum());
<ide> }
<ide>
<del>
<ide> /**
<ide> * Return a {@code CronField} enabled for 0 nano seconds.
<ide> */
<ide> public static CronField zeroNanos() {
<del> return ZERO_NANOS;
<add> return BitsCronField.zeroNanos();
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a seconds {@code CronField}, the first entry of a cron expression.
<ide> */
<ide> public static CronField parseSeconds(String value) {
<del> return parseField(value, Type.SECOND);
<add> return BitsCronField.parseSeconds(value);
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a minutes {@code CronField}, the second entry of a cron expression.
<ide> */
<ide> public static CronField parseMinutes(String value) {
<del> return parseField(value, Type.MINUTE);
<add> return BitsCronField.parseMinutes(value);
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a hours {@code CronField}, the third entry of a cron expression.
<ide> */
<ide> public static CronField parseHours(String value) {
<del> return parseField(value, Type.HOUR);
<add> return BitsCronField.parseHours(value);
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a days of months {@code CronField}, the fourth entry of a cron expression.
<ide> */
<ide> public static CronField parseDaysOfMonth(String value) {
<del> return parseDate(value, Type.DAY_OF_MONTH);
<add> if (value.contains("L") || value.contains("W")) {
<add> return QuartzCronField.parseDaysOfMonth(value);
<add> }
<add> else {
<add> return BitsCronField.parseDaysOfMonth(value);
<add> }
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a month {@code CronField}, the fifth entry of a cron expression.
<ide> */
<ide> public static CronField parseMonth(String value) {
<ide> value = replaceOrdinals(value, MONTHS);
<del> return parseField(value, Type.MONTH);
<add> return BitsCronField.parseMonth(value);
<ide> }
<ide>
<ide> /**
<ide> * Parse the given value into a days of week {@code CronField}, the sixth entry of a cron expression.
<ide> */
<ide> public static CronField parseDaysOfWeek(String value) {
<ide> value = replaceOrdinals(value, DAYS);
<del> CronField result = parseDate(value, Type.DAY_OF_WEEK);
<del> if (result.bits.get(0)) {
<del> // cron supports 0 for Sunday; we use 7 like java.time
<del> result.bits.set(7);
<del> result.bits.clear(0);
<del> }
<del> return result;
<del> }
<del>
<del>
<del> private static CronField parseDate(String value, Type type) {
<del> if (value.indexOf('?') != -1) {
<del> value = "*";
<del> }
<del> return parseField(value, type);
<del> }
<del>
<del> private static CronField parseField(String value, Type type) {
<del> Assert.hasLength(value, "Value must not be empty");
<del> Assert.notNull(type, "Type must not be null");
<del> try {
<del> CronField result = new CronField(type);
<del> String[] fields = StringUtils.delimitedListToStringArray(value, ",");
<del> for (String field : fields) {
<del> int slashPos = field.indexOf('/');
<del> if (slashPos == -1) {
<del> ValueRange range = parseRange(field, type);
<del> result.setBits(range);
<del> }
<del> else {
<del> String rangeStr = value.substring(0, slashPos);
<del> String deltaStr = value.substring(slashPos + 1);
<del> ValueRange range = parseRange(rangeStr, type);
<del> if (rangeStr.indexOf('-') == -1) {
<del> range = ValueRange.of(range.getMinimum(), type.range().getMaximum());
<del> }
<del> int delta = Integer.parseInt(deltaStr);
<del> if (delta <= 0) {
<del> throw new IllegalArgumentException("Incrementer delta must be 1 or higher");
<del> }
<del> result.setBits(range, delta);
<del> }
<del> }
<del> return result;
<del> }
<del> catch (DateTimeException | IllegalArgumentException ex) {
<del> String msg = ex.getMessage() + " '" + value + "'";
<del> throw new IllegalArgumentException(msg, ex);
<del> }
<del> }
<del>
<del> private static ValueRange parseRange(String value, Type type) {
<del> if (value.indexOf('*') != -1) {
<del> return type.range();
<add> if (value.contains("L") || value.contains("#")) {
<add> return QuartzCronField.parseDaysOfWeek(value);
<ide> }
<ide> else {
<del> int hyphenPos = value.indexOf('-');
<del> if (hyphenPos == -1) {
<del> int result = type.checkValidValue(Integer.parseInt(value));
<del> return ValueRange.of(result, result);
<del> }
<del> else {
<del> int min = Integer.parseInt(value.substring(0, hyphenPos));
<del> int max = Integer.parseInt(value.substring(hyphenPos + 1));
<del> min = type.checkValidValue(min);
<del> max = type.checkValidValue(max);
<del> return ValueRange.of(min, max);
<del> }
<add> return BitsCronField.parseDaysOfWeek(value);
<ide> }
<ide> }
<ide>
<add>
<ide> private static String replaceOrdinals(String value, String[] list) {
<ide> value = value.toUpperCase();
<ide> for (int i = 0; i < list.length; i++) {
<ide> private static String replaceOrdinals(String value, String[] list) {
<ide> * @return the next or same temporal matching the pattern
<ide> */
<ide> @Nullable
<del> public <T extends Temporal> T nextOrSame(T temporal) {
<del> int current = this.type.get(temporal);
<del> int next = this.bits.nextSetBit(current);
<del> if (next == -1) {
<del> temporal = this.type.rollForward(temporal);
<del> next = this.bits.nextSetBit(0);
<del> }
<del> if (next == current) {
<del> return temporal;
<del> }
<del> else {
<del> int count = 0;
<del> current = this.type.get(temporal);
<del> while (current != next && count++ < CronExpression.MAX_ATTEMPTS) {
<del> temporal = this.type.elapseUntil(temporal, next);
<del> current = this.type.get(temporal);
<del> }
<del> if (count >= CronExpression.MAX_ATTEMPTS) {
<del> return null;
<del> }
<del> return this.type.reset(temporal);
<del> }
<del> }
<del>
<add> public abstract <T extends Temporal & Comparable<? super T>> T nextOrSame(T temporal);
<ide>
<del> BitSet bits() {
<del> return this.bits;
<del> }
<del>
<del> private void setBits(ValueRange range) {
<del> this.bits.set((int) range.getMinimum(), (int) range.getMaximum() + 1);
<del> }
<del>
<del> private void setBits(ValueRange range, int delta) {
<del> for (int i = (int) range.getMinimum(); i <= range.getMaximum(); i += delta) {
<del> this.bits.set(i);
<del> }
<del> }
<del>
<del>
<del> @Override
<del> public int hashCode() {
<del> return this.bits.hashCode();
<del> }
<del>
<del> @Override
<del> public boolean equals(Object o) {
<del> if (this == o) {
<del> return true;
<del> }
<del> if (!(o instanceof CronField)) {
<del> return false;
<del> }
<del> CronField other = (CronField) o;
<del> return this.type == other.type &&
<del> this.bits.equals(other.bits);
<del> }
<ide>
<del> @Override
<del> public String toString() {
<del> return this.type + " " + this.bits;
<add> protected Type type() {
<add> return this.type;
<ide> }
<ide>
<ide>
<ide> /**
<ide> * Represents the type of cron field, i.e. seconds, minutes, hours,
<ide> * day-of-month, month, day-of-week.
<ide> */
<del> private enum Type {
<add> protected enum Type {
<ide> NANO(ChronoField.NANO_OF_SECOND),
<ide> SECOND(ChronoField.SECOND_OF_MINUTE, ChronoField.NANO_OF_SECOND),
<ide> MINUTE(ChronoField.MINUTE_OF_HOUR, ChronoField.SECOND_OF_MINUTE, ChronoField.NANO_OF_SECOND),
<ide> public int checkValidValue(int value) {
<ide> * @return the elapsed temporal, typically with {@code goal} as value
<ide> * for this type.
<ide> */
<del> public <T extends Temporal> T elapseUntil(T temporal, int goal) {
<add> public <T extends Temporal & Comparable<? super T>> T elapseUntil(T temporal, int goal) {
<ide> int current = get(temporal);
<ide> if (current < goal) {
<ide> return this.field.getBaseUnit().addTo(temporal, goal - current);
<ide> public <T extends Temporal> T elapseUntil(T temporal, int goal) {
<ide> * @param <T> the type of temporal
<ide> * @return the rolled forward temporal
<ide> */
<del> public <T extends Temporal> T rollForward(T temporal) {
<add> public <T extends Temporal & Comparable<? super T>> T rollForward(T temporal) {
<ide> int current = get(temporal);
<ide> ValueRange range = temporal.range(this.field);
<ide> long amount = range.getMaximum() - current + 1;
<ide><path>spring-context/src/main/java/org/springframework/scheduling/support/QuartzCronField.java
<add>/*
<add> * Copyright 2002-2020 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * https://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.scheduling.support;
<add>
<add>import java.time.DateTimeException;
<add>import java.time.DayOfWeek;
<add>import java.time.temporal.ChronoField;
<add>import java.time.temporal.ChronoUnit;
<add>import java.time.temporal.Temporal;
<add>import java.time.temporal.TemporalAdjuster;
<add>import java.time.temporal.TemporalAdjusters;
<add>import java.time.temporal.TemporalField;
<add>import java.time.temporal.WeekFields;
<add>import java.util.Locale;
<add>
<add>import org.springframework.lang.Nullable;
<add>import org.springframework.util.Assert;
<add>
<add>/**
<add> * Extension of {@link CronField} for
<add> * <a href="https://www.quartz-scheduler.org>Quartz</a> -specific fields.
<add> * Created using the {@code parse*} methods, uses a {@link TemporalAdjuster}
<add> * internally.
<add> *
<add> * @author Arjen Poutsma
<add> * @since 5.3
<add> */
<add>final class QuartzCronField extends CronField {
<add>
<add> /**
<add> * Temporal adjuster that returns the last weekday of the month.
<add> */
<add> private static final TemporalAdjuster lastWeekdayOfMonth = temporal -> {
<add> Temporal lastDayOfMonth = TemporalAdjusters.lastDayOfMonth().adjustInto(temporal);
<add> int dayOfWeek = lastDayOfMonth.get(ChronoField.DAY_OF_WEEK);
<add> if (dayOfWeek == 6) { // Saturday
<add> return lastDayOfMonth.minus(1, ChronoUnit.DAYS);
<add> }
<add> else if (dayOfWeek == 7) { // Sunday
<add> return lastDayOfMonth.minus(2, ChronoUnit.DAYS);
<add> }
<add> else {
<add> return lastDayOfMonth;
<add> }
<add> };
<add>
<add>
<add> private final Type rollForwardType;
<add>
<add> private final TemporalAdjuster adjuster;
<add>
<add> private final String value;
<add>
<add>
<add> private QuartzCronField(Type type, TemporalAdjuster adjuster, String value) {
<add> this(type, type, adjuster, value);
<add> }
<add>
<add> /**
<add> * Constructor for fields that need to roll forward over a different type
<add> * than the type this field represents. See {@link #parseDaysOfWeek(String)}.
<add> */
<add> private QuartzCronField(Type type, Type rollForwardType, TemporalAdjuster adjuster, String value) {
<add> super(type);
<add> this.adjuster = adjuster;
<add> this.value = value;
<add> this.rollForwardType = rollForwardType;
<add> }
<add>
<add>
<add> /**
<add> * Parse the given value into a days of months {@code QuartzCronField}, the fourth entry of a cron expression.
<add> * Expects a "L" or "W" in the given value.
<add> */
<add> public static QuartzCronField parseDaysOfMonth(String value) {
<add> int idx = value.lastIndexOf('L');
<add> if (idx != -1) {
<add> TemporalAdjuster adjuster;
<add> if (idx != 0) {
<add> throw new IllegalArgumentException("Unrecognized characters before 'L' in '" + value + "'");
<add> }
<add> else if (value.length() == 2 && value.charAt(1) == 'W') { // "LW"
<add> adjuster = lastWeekdayOfMonth;
<add> }
<add> else {
<add> if (value.length() == 1) { // "L"
<add> adjuster = TemporalAdjusters.lastDayOfMonth();
<add> }
<add> else { // "L-[0-9]+"
<add> int offset = Integer.parseInt(value.substring(idx + 1));
<add> if (offset >= 0) {
<add> throw new IllegalArgumentException("Offset '" + offset + " should be < 0 '" + value + "'");
<add> }
<add> adjuster = lastDayWithOffset(offset);
<add> }
<add> }
<add> return new QuartzCronField(Type.DAY_OF_MONTH, adjuster, value);
<add> }
<add> idx = value.lastIndexOf('W');
<add> if (idx != -1) {
<add> if (idx == 0) {
<add> throw new IllegalArgumentException("No day-of-month before 'W' in '" + value + "'");
<add> }
<add> else if (idx != value.length() - 1) {
<add> throw new IllegalArgumentException("Unrecognized characters after 'W' in '" + value + "'");
<add> }
<add> else { // "[0-9]+W"
<add> int dayOfMonth = Integer.parseInt(value.substring(0, idx));
<add> dayOfMonth = Type.DAY_OF_MONTH.checkValidValue(dayOfMonth);
<add> TemporalAdjuster adjuster = weekdayNearestTo(dayOfMonth);
<add> return new QuartzCronField(Type.DAY_OF_MONTH, adjuster, value);
<add> }
<add> }
<add> throw new IllegalArgumentException("No 'L' or 'W' found in '" + value + "'");
<add> }
<add>
<add> /**
<add> * Parse the given value into a days of week {@code QuartzCronField}, the sixth entry of a cron expression.
<add> * Expects a "L" or "#" in the given value.
<add> */
<add> public static QuartzCronField parseDaysOfWeek(String value) {
<add> int idx = value.lastIndexOf('L');
<add> if (idx != -1) {
<add> if (idx != value.length() - 1) {
<add> throw new IllegalArgumentException("Unrecognized characters after 'L' in '" + value + "'");
<add> }
<add> else {
<add> TemporalAdjuster adjuster;
<add> if (idx == 0) { // "L"
<add> adjuster = lastDayOfWeek(Locale.getDefault());
<add> }
<add> else { // "[0-7]L"
<add> DayOfWeek dayOfWeek = parseDayOfWeek(value.substring(0, idx));
<add> adjuster = TemporalAdjusters.lastInMonth(dayOfWeek);
<add> }
<add> return new QuartzCronField(Type.DAY_OF_WEEK, Type.DAY_OF_MONTH, adjuster, value);
<add> }
<add> }
<add> idx = value.lastIndexOf('#');
<add> if (idx != -1) {
<add> if (idx == 0) {
<add> throw new IllegalArgumentException("No day-of-week before '#' in '" + value + "'");
<add> }
<add> else if (idx == value.length() - 1) {
<add> throw new IllegalArgumentException("No ordinal after '#' in '" + value + "'");
<add> }
<add> // "[0-7]#[0-9]+"
<add> DayOfWeek dayOfWeek = parseDayOfWeek(value.substring(0, idx));
<add> int ordinal = Integer.parseInt(value.substring(idx + 1));
<add>
<add> TemporalAdjuster adjuster = TemporalAdjusters.dayOfWeekInMonth(ordinal, dayOfWeek);
<add> return new QuartzCronField(Type.DAY_OF_WEEK, Type.DAY_OF_MONTH, adjuster, value);
<add> }
<add> throw new IllegalArgumentException("No 'L' or '#' found in '" + value + "'");
<add> }
<add>
<add>
<add> private static DayOfWeek parseDayOfWeek(String value) {
<add> int dayOfWeek = Integer.parseInt(value);
<add> if (dayOfWeek == 0) {
<add> dayOfWeek = 7; // cron is 0 based; java.time 1 based
<add> }
<add> try {
<add> return DayOfWeek.of(dayOfWeek);
<add> }
<add> catch (DateTimeException ex) {
<add> String msg = ex.getMessage() + " '" + value + "'";
<add> throw new IllegalArgumentException(msg, ex);
<add> }
<add> }
<add>
<add> /**
<add> * Return a temporal adjuster that finds the nth-to-last day of the month.
<add> * @param offset the negative offset, i.e. -3 means third-to-last
<add> * @return a nth-to-last day-of-month adjuster
<add> */
<add> private static TemporalAdjuster lastDayWithOffset(int offset) {
<add> Assert.isTrue(offset < 0, "Offset should be < 0");
<add> return temporal -> {
<add> Temporal lastDayOfMonth = TemporalAdjusters.lastDayOfMonth().adjustInto(temporal);
<add> return lastDayOfMonth.plus(offset, ChronoUnit.DAYS);
<add> };
<add> }
<add>
<add> /**
<add> * Return a temporal adjuster that finds the last day-of-week, depending
<add> * on the given locale.
<add> * @param locale the locale to base the last day calculation on
<add> * @return the last day-of-week adjuster
<add> */
<add> private static TemporalAdjuster lastDayOfWeek(Locale locale) {
<add> Assert.notNull(locale, "Locale must not be null");
<add> TemporalField dayOfWeek = WeekFields.of(locale).dayOfWeek();
<add> return temporal -> temporal.with(dayOfWeek, 7);
<add> }
<add>
<add> /**
<add> * Return a temporal adjuster that finds the weekday nearest to the given
<add> * day-of-month. If {@code dayOfMonth} falls on a Saturday, the date is
<add> * moved back to Friday; if it falls on a Sunday (or if {@code dayOfMonth}
<add> * is 1 and it falls on a Saturday), it is moved forward to Monday.
<add> * @param dayOfMonth the goal day-of-month
<add> * @return the weekday-nearest-to adjuster
<add> */
<add> private static TemporalAdjuster weekdayNearestTo(int dayOfMonth) {
<add> return temporal -> {
<add> int current = Type.DAY_OF_MONTH.get(temporal);
<add> int dayOfWeek = temporal.get(ChronoField.DAY_OF_WEEK);
<add>
<add> if ((current == dayOfMonth && dayOfWeek < 6) || // dayOfMonth is a weekday
<add> (dayOfWeek == 5 && current == dayOfMonth - 1) || // dayOfMonth is a Saturday, so Friday before
<add> (dayOfWeek == 1 && current == dayOfMonth + 1) || // dayOfMonth is a Sunday, so Monday after
<add> (dayOfWeek == 1 && dayOfMonth == 1 && current == 3)) { // dayOfMonth is the 1st, so Monday 3rd
<add> return temporal;
<add> }
<add> int count = 0;
<add> while (count++ < CronExpression.MAX_ATTEMPTS) {
<add> temporal = Type.DAY_OF_MONTH.elapseUntil(cast(temporal), dayOfMonth);
<add> current = Type.DAY_OF_MONTH.get(temporal);
<add> if (current == dayOfMonth) {
<add> dayOfWeek = temporal.get(ChronoField.DAY_OF_WEEK);
<add>
<add> if (dayOfWeek == 6) { // Saturday
<add> if (dayOfMonth != 1) {
<add> return temporal.minus(1, ChronoUnit.DAYS);
<add> }
<add> else {
<add> // exception for "1W" fields: execute on nearest Monday
<add> return temporal.plus(2, ChronoUnit.DAYS);
<add> }
<add> }
<add> else if (dayOfWeek == 7) { // Sunday
<add> return temporal.plus(1, ChronoUnit.DAYS);
<add> }
<add> else {
<add> return temporal;
<add> }
<add> }
<add> }
<add> return null;
<add> };
<add> }
<add>
<add> @SuppressWarnings("unchecked")
<add> private static <T extends Temporal & Comparable<? super T>> T cast(Temporal temporal) {
<add> return (T) temporal;
<add> }
<add>
<add>
<add> @Override
<add> public <T extends Temporal & Comparable<? super T>> T nextOrSame(T temporal) {
<add> T result = adjust(temporal);
<add> if (result != null) {
<add> if (result.compareTo(temporal) < 0) {
<add> // We ended up before the start, roll forward and try again
<add> temporal = this.rollForwardType.rollForward(temporal);
<add> result = adjust(temporal);
<add> }
<add> }
<add> return result;
<add> }
<add>
<add>
<add> @Nullable
<add> @SuppressWarnings("unchecked")
<add> private <T extends Temporal & Comparable<? super T>> T adjust(T temporal) {
<add> return (T) this.adjuster.adjustInto(temporal);
<add> }
<add>
<add>
<add> @Override
<add> public int hashCode() {
<add> return this.value.hashCode();
<add> }
<add>
<add> @Override
<add> public boolean equals(Object o) {
<add> if (this == o) {
<add> return true;
<add> }
<add> if (!(o instanceof QuartzCronField)) {
<add> return false;
<add> }
<add> QuartzCronField other = (QuartzCronField) o;
<add> return type() == other.type() &&
<add> this.value.equals(other.value);
<add> }
<add>
<add> @Override
<add> public String toString() {
<add> return type() + " '" + this.value + "'";
<add>
<add> }
<add>
<add>}
<ide><path>spring-context/src/test/java/org/springframework/scheduling/support/BitsCronFieldTests.java
<add>/*
<add> * Copyright 2002-2020 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * https://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.scheduling.support;
<add>
<add>import org.junit.jupiter.api.Test;
<add>
<add>import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
<add>import static org.springframework.scheduling.support.BitSetAssert.assertThat;
<add>
<add>/**
<add> * @author Arjen Poutsma
<add> */
<add>public class BitsCronFieldTests {
<add>
<add> @Test
<add> void parse() {
<add> assertThat(BitsCronField.parseSeconds("42").bits()).hasUnsetRange(0, 41).hasSet(42).hasUnsetRange(43, 59);
<add> assertThat(BitsCronField.parseMinutes("1,2,5,9").bits()).hasUnset(0).hasSet(1, 2).hasUnset(3,4).hasSet(5).hasUnsetRange(6,8).hasSet(9).hasUnsetRange(10,59);
<add> assertThat(BitsCronField.parseSeconds("0-4,8-12").bits()).hasSetRange(0, 4).hasUnsetRange(5,7).hasSetRange(8, 12).hasUnsetRange(13,59);
<add> assertThat(BitsCronField.parseHours("0-23/2").bits()).hasSet(0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22).hasUnset(1,3,5,7,9,11,13,15,17,19,21,23);
<add> assertThat(BitsCronField.parseDaysOfWeek("0").bits()).hasUnsetRange(0, 6).hasSet(7, 7);
<add> assertThat(BitsCronField.parseSeconds("57/2").bits()).hasUnsetRange(0, 56).hasSet(57).hasUnset(58).hasSet(59);
<add> }
<add>
<add> @Test
<add> void invalidRange() {
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseSeconds(""));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseSeconds("0-12/0"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseSeconds("60"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseMinutes("60"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseDaysOfMonth("0"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseDaysOfMonth("32"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseMonth("0"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseMonth("13"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseDaysOfWeek("8"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> BitsCronField.parseSeconds("20-10"));
<add> }
<add>
<add> @Test
<add> void parseWildcards() {
<add> assertThat(BitsCronField.parseSeconds("*").bits()).hasSetRange(0, 60);
<add> assertThat(BitsCronField.parseMinutes("*").bits()).hasSetRange(0, 60);
<add> assertThat(BitsCronField.parseHours("*").bits()).hasSetRange(0, 23);
<add> assertThat(BitsCronField.parseDaysOfMonth("*").bits()).hasUnset(0).hasSetRange(1, 31);
<add> assertThat(BitsCronField.parseDaysOfMonth("?").bits()).hasUnset(0).hasSetRange(1, 31);
<add> assertThat(BitsCronField.parseMonth("*").bits()).hasUnset(0).hasSetRange(1, 12);
<add> assertThat(BitsCronField.parseDaysOfWeek("*").bits()).hasUnset(0).hasSetRange(1, 7);
<add> assertThat(BitsCronField.parseDaysOfWeek("?").bits()).hasUnset(0).hasSetRange(1, 7);
<add> }
<add>
<add> @Test
<add> void names() {
<add> assertThat(((BitsCronField)CronField.parseMonth("JAN,FEB,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC")).bits())
<add> .hasUnset(0).hasSetRange(1, 12);
<add> assertThat(((BitsCronField)CronField.parseDaysOfWeek("SUN,MON,TUE,WED,THU,FRI,SAT")).bits())
<add> .hasUnset(0).hasSetRange(1, 7);
<add> }
<add>
<add>}
<ide><path>spring-context/src/test/java/org/springframework/scheduling/support/CronExpressionTests.java
<ide> import java.time.Year;
<ide> import java.time.ZoneId;
<ide> import java.time.ZonedDateTime;
<add>import java.time.temporal.ChronoField;
<add>import java.time.temporal.Temporal;
<add>import java.util.Locale;
<ide>
<add>import org.assertj.core.api.Condition;
<ide> import org.junit.jupiter.api.Test;
<ide>
<ide> import static java.time.DayOfWeek.FRIDAY;
<ide> import static java.time.DayOfWeek.MONDAY;
<add>import static java.time.DayOfWeek.SATURDAY;
<ide> import static java.time.DayOfWeek.SUNDAY;
<ide> import static java.time.DayOfWeek.TUESDAY;
<ide> import static java.time.DayOfWeek.WEDNESDAY;
<ide> */
<ide> class CronExpressionTests {
<ide>
<add> private static final Condition<Temporal> weekday = new Condition<Temporal>("weekday") {
<add>
<add> @Override
<add> public boolean matches(Temporal value) {
<add> int dayOfWeek = value.get(ChronoField.DAY_OF_WEEK);
<add> return dayOfWeek != 6 && dayOfWeek != 7;
<add> }
<add> };
<add>
<add>
<ide> @Test
<ide> void matchAll() {
<ide> CronExpression expression = CronExpression.parse("* * * * * *");
<ide> void hourly() {
<ide> }
<ide>
<ide>
<add> @Test
<add> void quartzLastDayOfMonth() {
<add> CronExpression expression = CronExpression.parse("0 0 0 L * *");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 31, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 2, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 3, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 4, 30, 0, 0);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void quartzLastDayOfMonthOffset() {
<add> // L-3 = third-to-last day of the month
<add> CronExpression expression = CronExpression.parse("0 0 0 L-3 * *");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 28, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 2, 26, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 3, 28, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 4, 27, 0, 0);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void quartzLastWeekdayOfMonth() {
<add> CronExpression expression = CronExpression.parse("0 0 0 LW * *");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 31, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 2, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 3, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 4, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 5, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 6, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 7, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 8, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 9, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 10, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 11, 28, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 12, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add> }
<add>
<add> @Test
<add> public void quartzLastDayOfWeekFirstDayMonday() {
<add> Locale defaultLocale = Locale.getDefault();
<add> try {
<add> Locale.setDefault(Locale.UK);
<add>
<add> CronExpression expression = CronExpression.parse("0 0 0 * * L");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 6, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SUNDAY);
<add>
<add> last = actual;
<add> expected = expected.plusWeeks(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SUNDAY);
<add> }
<add> finally {
<add> Locale.setDefault(defaultLocale);
<add> }
<add> }
<add>
<add> @Test
<add> public void quartzLastDayOfWeekFirstDaySunday() {
<add> Locale defaultLocale = Locale.getDefault();
<add> try {
<add> Locale.setDefault(Locale.US);
<add>
<add> CronExpression expression = CronExpression.parse("0 0 0 * * L");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 5, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SATURDAY);
<add>
<add> last = actual;
<add> expected = expected.plusWeeks(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SATURDAY);
<add> }
<add> finally {
<add> Locale.setDefault(defaultLocale);
<add> }
<add> }
<add>
<add> @Test
<add> public void quartzLastDayOfWeekOffset() {
<add> // last Friday (5) of the month
<add> CronExpression expression = CronExpression.parse("0 0 0 * * 5L");
<add>
<add> LocalDateTime last = LocalDateTime.of(LocalDate.of(2008, 1, 4), LocalTime.now());
<add> LocalDateTime expected = LocalDateTime.of(2008, 1, 25, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 2, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 3, 28, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 4, 25, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 5, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 6, 27, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 7, 25, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 8, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 9, 26, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 10, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 11, 28, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2008, 12, 26, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add> }
<add>
<add> @Test
<add> void quartzWeekdayNearestTo15() {
<add> CronExpression expression = CronExpression.parse("0 0 0 15W * ?");
<add>
<add> LocalDateTime last = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 15, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 2, 14, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 3, 16, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 4, 15, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add> }
<add>
<add> @Test
<add> void quartzWeekdayNearestTo1() {
<add> CronExpression expression = CronExpression.parse("0 0 0 1W * ?");
<add>
<add> LocalDateTime last = LocalDateTime.of(2019, 12, 31, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 2, 3, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 3, 2, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 4, 1, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add> }
<add>
<add> @Test
<add> void quartzWeekdayNearestTo31() {
<add> CronExpression expression = CronExpression.parse("0 0 0 31W * ?");
<add>
<add> LocalDateTime last = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 31, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 3, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 7, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 8, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 10, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 12, 31, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual).is(weekday);
<add> }
<add>
<add> @Test
<add> void quartz2ndFridayOfTheMonth() {
<add> CronExpression expression = CronExpression.parse("0 0 0 ? * 5#2");
<add>
<add> LocalDateTime last = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 10, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 2, 14, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 3, 13, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 4, 10, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add> }
<add>
<add> @Test
<add> void quartz2ndFridayOfTheMonthDayName() {
<add> CronExpression expression = CronExpression.parse("0 0 0 ? * FRI#2");
<add>
<add> LocalDateTime last = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 10, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 2, 14, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 3, 13, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 4, 10, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(FRIDAY);
<add> }
<add>
<add> @Test
<add> void quartzFifthWednesdayOfTheMonth() {
<add> CronExpression expression = CronExpression.parse("0 0 0 ? * 3#5");
<add>
<add> LocalDateTime last = LocalDateTime.of(2020, 1, 1, 0, 0);
<add> LocalDateTime expected = LocalDateTime.of(2020, 1, 29, 0, 0);
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(WEDNESDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 4, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(WEDNESDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 7, 29, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(WEDNESDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 9, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(WEDNESDAY);
<add>
<add> last = actual;
<add> expected = LocalDateTime.of(2020, 12, 30, 0, 0);
<add> actual = expression.next(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(WEDNESDAY);
<add> }
<ide> }
<ide><path>spring-context/src/test/java/org/springframework/scheduling/support/CronFieldTests.java
<del>/*
<del> * Copyright 2002-2020 the original author or authors.
<del> *
<del> * Licensed under the Apache License, Version 2.0 (the "License");
<del> * you may not use this file except in compliance with the License.
<del> * You may obtain a copy of the License at
<del> *
<del> * https://www.apache.org/licenses/LICENSE-2.0
<del> *
<del> * Unless required by applicable law or agreed to in writing, software
<del> * distributed under the License is distributed on an "AS IS" BASIS,
<del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<del> * See the License for the specific language governing permissions and
<del> * limitations under the License.
<del> */
<del>
<del>package org.springframework.scheduling.support;
<del>
<del>import org.junit.jupiter.api.Test;
<del>
<del>import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
<del>import static org.springframework.scheduling.support.BitSetAssert.assertThat;
<del>
<del>/**
<del> * @author Arjen Poutsma
<del> */
<del>public class CronFieldTests {
<del>
<del> @Test
<del> void parse() {
<del> assertThat(CronField.parseSeconds("42").bits()).hasUnsetRange(0, 41).hasSet(42).hasUnsetRange(43, 59);
<del> assertThat(CronField.parseMinutes("1,2,5,9").bits()).hasUnset(0).hasSet(1, 2).hasUnset(3,4).hasSet(5).hasUnsetRange(6,8).hasSet(9).hasUnsetRange(10,59);
<del> assertThat(CronField.parseSeconds("0-4,8-12").bits()).hasSetRange(0, 4).hasUnsetRange(5,7).hasSetRange(8, 12).hasUnsetRange(13,59);
<del> assertThat(CronField.parseHours("0-23/2").bits()).hasSet(0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22).hasUnset(1,3,5,7,9,11,13,15,17,19,21,23);
<del> assertThat(CronField.parseDaysOfWeek("0").bits()).hasUnsetRange(0, 6).hasSet(7, 7);
<del> assertThat(CronField.parseSeconds("57/2").bits()).hasUnsetRange(0, 56).hasSet(57).hasUnset(58).hasSet(59);
<del> }
<del>
<del> @Test
<del> void invalidRange() {
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseSeconds(""));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseSeconds("0-12/0"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseSeconds("60"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseMinutes("60"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseDaysOfMonth("0"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseDaysOfMonth("32"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseMonth("0"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseMonth("13"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseDaysOfWeek("8"));
<del> assertThatIllegalArgumentException().isThrownBy(() -> CronField.parseSeconds("20-10"));
<del> }
<del>
<del> @Test
<del> void parseWildcards() {
<del> assertThat(CronField.parseSeconds("*").bits()).hasSetRange(0, 60);
<del> assertThat(CronField.parseMinutes("*").bits()).hasSetRange(0, 60);
<del> assertThat(CronField.parseHours("*").bits()).hasSetRange(0, 23);
<del> assertThat(CronField.parseDaysOfMonth("*").bits()).hasUnset(0).hasSetRange(1, 31);
<del> assertThat(CronField.parseDaysOfMonth("?").bits()).hasUnset(0).hasSetRange(1, 31);
<del> assertThat(CronField.parseMonth("*").bits()).hasUnset(0).hasSetRange(1, 12);
<del> assertThat(CronField.parseDaysOfWeek("*").bits()).hasUnset(0).hasSetRange(1, 7);
<del> assertThat(CronField.parseDaysOfWeek("?").bits()).hasUnset(0).hasSetRange(1, 7);
<del> }
<del>
<del> @Test
<del> void names() {
<del> assertThat(CronField.parseMonth("JAN,FEB,MAR,APR,MAY,JUN,JUL,AUG,SEP,OCT,NOV,DEC").bits())
<del> .hasUnset(0).hasSetRange(1, 12);
<del> assertThat(CronField.parseDaysOfWeek("SUN,MON,TUE,WED,THU,FRI,SAT").bits())
<del> .hasUnset(0).hasSetRange(1, 7);
<del> }
<del>
<del>}
<ide><path>spring-context/src/test/java/org/springframework/scheduling/support/QuartzCronFieldTests.java
<add>/*
<add> * Copyright 2002-2020 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * https://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.scheduling.support;
<add>
<add>import java.time.DayOfWeek;
<add>import java.time.LocalDate;
<add>import java.util.Locale;
<add>
<add>import org.junit.jupiter.api.Test;
<add>
<add>import static java.time.DayOfWeek.SATURDAY;
<add>import static java.time.DayOfWeek.SUNDAY;
<add>import static org.assertj.core.api.Assertions.assertThat;
<add>import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
<add>
<add>/**
<add> * @author Arjen Poutsma
<add> */
<add>class QuartzCronFieldTests {
<add>
<add> @Test
<add> void lastDayOfMonth() {
<add> QuartzCronField field = QuartzCronField.parseDaysOfMonth("L");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 30);
<add> assertThat(field.nextOrSame(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void lastDayOfMonthOffset() {
<add> QuartzCronField field = QuartzCronField.parseDaysOfMonth("L-3");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 27);
<add> assertThat(field.nextOrSame(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void lastWeekdayOfMonth() {
<add> QuartzCronField field = QuartzCronField.parseDaysOfMonth("LW");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 30);
<add> LocalDate actual = field.nextOrSame(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual.getDayOfWeek()).isEqualTo(DayOfWeek.TUESDAY);
<add> assertThat(actual).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> public void lastDayOfWeekFirstDayMonday() {
<add> Locale defaultLocale = Locale.getDefault();
<add> try {
<add> Locale.setDefault(Locale.UK);
<add> QuartzCronField field = QuartzCronField.parseDaysOfWeek("L");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 21);
<add> assertThat(field.nextOrSame(last)).isEqualTo(expected);
<add>
<add> LocalDate actual = field.nextOrSame(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SUNDAY);
<add> }
<add> finally {
<add> Locale.setDefault(defaultLocale);
<add> }
<add> }
<add>
<add> @Test
<add> public void lastDayOfWeekFirstDaySunday() {
<add> Locale defaultLocale = Locale.getDefault();
<add> try {
<add> Locale.setDefault(Locale.US);
<add> QuartzCronField field = QuartzCronField.parseDaysOfWeek("L");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 20);
<add> assertThat(field.nextOrSame(last)).isEqualTo(expected);
<add>
<add> LocalDate actual = field.nextOrSame(last);
<add> assertThat(actual).isNotNull();
<add> assertThat(actual).isEqualTo(expected);
<add> assertThat(actual.getDayOfWeek()).isEqualTo(SATURDAY);
<add> }
<add> finally {
<add> Locale.setDefault(defaultLocale);
<add> }
<add> }
<add>
<add> @Test
<add> void lastDayOfWeekOffset() {
<add> // last Thursday (4) of the month
<add> QuartzCronField field = QuartzCronField.parseDaysOfWeek("4L");
<add>
<add> LocalDate last = LocalDate.of(2020, 6, 16);
<add> LocalDate expected = LocalDate.of(2020, 6, 25);
<add> assertThat(field.nextOrSame(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void invalidValues() {
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth(""));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("1L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("LL"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("4L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("0L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("W"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("W1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("WW"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfMonth("32W"));
<add>
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek(""));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("L1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("LL"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("-4L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("8L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("#"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("1#"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("#1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("1#L"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("L#1"));
<add> assertThatIllegalArgumentException().isThrownBy(() -> QuartzCronField.parseDaysOfWeek("8#1"));
<add> }
<add>
<add>} | 8 |
Ruby | Ruby | unify delete_multi_entries method signatute | c8e7f3ca7b5fb47af1fd130b743d7e87d16acd38 | <ide><path>activesupport/lib/active_support/cache/redis_cache_store.rb
<ide> def delete_entry(key, options)
<ide> end
<ide>
<ide> # Deletes multiple entries in the cache. Returns the number of entries deleted.
<del> def delete_multi_entries(entries, options)
<add> def delete_multi_entries(entries, **_options)
<ide> redis.with { |c| c.del(entries) }
<ide> end
<ide> | 1 |
Javascript | Javascript | update invalid date check in tests | 4ba3f90c184ed7cb43469b7ac3bf4fe7d6163f8d | <ide><path>src/test/moment/create.js
<ide> test('parsing only meridiem results in invalid date', function (assert) {
<ide>
<ide> test('invalid dates return invalid for methods that access the _d prop', function (assert) {
<ide> var momentAsDate = moment(['2015', '12', '1']).toDate();
<del> assert.equal(momentAsDate, 'Invalid Date', 'toDate returns invalid');
<add> assert.ok(momentAsDate instanceof Date, 'toDate returns a Date object');
<add> assert.ok(isNaN(momentAsDate.getTime()), 'toDate returns an invalid Date invalid');
<ide> }); | 1 |
Javascript | Javascript | fix bad comment | 0888cdd4123e80fcb859d90808bd6cc708eec3d6 | <ide><path>test/simple/test-cli-eval.js
<ide> child.exec(nodejs + ' --eval "console.error(42)"',
<ide> assert.equal(stderr, '42\n');
<ide> });
<ide>
<del>// assert that nothing is written to stdout
<add>// assert that the expected output is written to stdout
<ide> ['--print --eval', '-p -e', '-pe'].forEach(function(s) {
<ide> var cmd = nodejs + ' ' + s + ' ';
<ide> | 1 |
Python | Python | fix data processing in script | 8cd56e30363fc00d947992ae412551f1775a5cfa | <ide><path>examples/run_seq2seq_finetuning.py
<ide> class TextDataset(Dataset):
<ide> [2] https://github.com/abisee/cnn-dailymail/
<ide> """
<ide>
<del> def __init_(self, tokenizer_src, tokenizer_tgt, data_dir="", block_size=512):
<add> def __init__(self, tokenizer, prefix='train', data_dir="", block_size=512):
<ide> assert os.path.isdir(data_dir)
<ide>
<ide> # Load features that have already been computed if present
<ide> cached_features_file = os.path.join(
<del> data_dir, "cached_lm_{}_{}".format(block_size, data_dir)
<add> data_dir, "cached_lm_{}_{}".format(block_size, prefix)
<ide> )
<ide> if os.path.exists(cached_features_file):
<ide> logger.info("Loading features from cached file %s", cached_features_file)
<ide> def __init_(self, tokenizer_src, tokenizer_tgt, data_dir="", block_size=512):
<ide> return
<ide>
<ide> logger.info("Creating features from dataset at %s", data_dir)
<del>
<add> self.examples = []
<ide> datasets = ["cnn", "dailymail"]
<ide> for dataset in datasets:
<ide> path_to_stories = os.path.join(data_dir, dataset, "stories")
<ide> def __init_(self, tokenizer_src, tokenizer_tgt, data_dir="", block_size=512):
<ide> except IndexError: # skip ill-formed stories
<ide> continue
<ide>
<del> story = tokenizer_src.convert_tokens_to_ids(
<del> tokenizer_src.tokenize(story)
<del> )
<add> story = tokenizer.encode(story)
<ide> story_seq = _fit_to_block_size(story, block_size)
<ide>
<del> summary = tokenizer_tgt.convert_tokens_to_ids(
<del> tokenizer_tgt.tokenize(summary)
<del> )
<add> summary = tokenizer.encode(summary)
<ide> summary_seq = _fit_to_block_size(summary, block_size)
<ide>
<ide> self.examples.append((story_seq, summary_seq))
<ide>
<ide> logger.info("Saving features into cache file %s", cached_features_file)
<ide> with open(cached_features_file, "wb") as sink:
<del> pickle.dump(self.examples, sink, protocole=pickle.HIGHEST_PROTOCOL)
<add> pickle.dump(self.examples, sink, protocol=pickle.HIGHEST_PROTOCOL)
<ide>
<ide> def __len__(self):
<ide> return len(self.examples)
<ide> def _fit_to_block_size(sequence, block_size):
<ide> if len(sequence) > block_size:
<ide> return sequence[:block_size]
<ide> else:
<del> return sequence.extend([-1] * [block_size - len(sequence)])
<add> return sequence.extend([-1] * (block_size - len(sequence)))
<ide>
<ide>
<del>def load_and_cache_examples(args, tokenizer_src, tokenizer_tgt):
<del> dataset = TextDataset(tokenizer_src, tokenizer_tgt, file_path=args.data_dir)
<add>def load_and_cache_examples(args, tokenizer):
<add> dataset = TextDataset(tokenizer, data_dir=args.data_dir)
<ide> return dataset
<ide>
<ide>
<ide> def main():
<ide> "--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer."
<ide> )
<ide> parser.add_argument(
<del> "--decoder_name_or_path",
<add> "--model_name_or_path",
<ide> default="bert-base-cased",
<ide> type=str,
<del> help="The model checkpoint to initialize the decoder's weights with.",
<add> help="The model checkpoint to initialize the encoder and decoder's weights with.",
<ide> )
<ide> parser.add_argument(
<del> "--decoder_type",
<add> "--model_type",
<ide> default="bert",
<ide> type=str,
<ide> help="The decoder architecture to be fine-tuned.",
<ide> )
<del> parser.add_argument(
<del> "--encoder_name_or_path",
<del> default="bert-base-cased",
<del> type=str,
<del> help="The model checkpoint to initialize the encoder's weights with.",
<del> )
<del> parser.add_argument(
<del> "--encoder_type",
<del> default="bert",
<del> type=str,
<del> help="The encoder architecture to be fine-tuned.",
<del> )
<ide> parser.add_argument(
<ide> "--learning_rate",
<ide> default=5e-5,
<ide> def main():
<ide> )
<ide> args = parser.parse_args()
<ide>
<del> if args.encoder_type != "bert" or args.decoder_type != "bert":
<add> if args.model_type != "bert":
<ide> raise ValueError(
<ide> "Only the BERT architecture is currently supported for seq2seq."
<ide> )
<ide> def main():
<ide> set_seed(args)
<ide>
<ide> # Load pretrained model and tokenizer
<del> encoder_tokenizer_class = AutoTokenizer.from_pretrained(args.encoder_name_or_path)
<del> decoder_tokenizer_class = AutoTokenizer.from_pretrained(args.decoder_name_or_path)
<del> model = Model2Model.from_pretrained(
<del> args.encoder_name_or_path, args.decoder_name_or_path
<del> )
<add> tokenizer = AutoTokenizer.from_pretrained(args.model_name_or_path)
<add> model = Model2Model.from_pretrained(args.model_name_or_path)
<ide> # model.to(device)
<ide>
<ide> logger.info("Training/evaluation parameters %s", args) | 1 |
Ruby | Ruby | ensure git --version actually matches | a252c90a0d49cae0889a9a540a3c220c53340425 | <ide><path>Library/Homebrew/cmd/doctor.rb
<ide> def __check_git_version
<ide> # https://help.github.com/articles/https-cloning-errors
<ide> `git --version`.chomp =~ /git version ((?:\d+\.?)+)/
<ide>
<del> if Version.new($1) < Version.new("1.7.10") then <<-EOS.undent
<add> if $1 and Version.new($1) < Version.new("1.7.10") then <<-EOS.undent
<ide> An outdated version of Git was detected in your PATH.
<ide> Git 1.7.10 or newer is required to perform checkouts over HTTPS from GitHub.
<ide> Please upgrade: brew upgrade git | 1 |
Javascript | Javascript | allow examples with hidden source code | 260725efcd15e960941af6bc1f468e3fb1ac45c5 | <ide><path>docs/src/templates/doc_widgets.js
<ide> //jqlite instead. jqlite's find() method currently supports onlt getElementsByTagName!
<ide> var example = element.find('pre').eq(0), //doc-source
<ide> exampleSrc = example.text(),
<add> showSource = example.attr('source') !== 'false',
<ide> jsfiddle = example.attr('jsfiddle') || true,
<ide> scenario = element.find('pre').eq(1); //doc-scenario
<ide>
<ide> var code = indent(exampleSrc);
<ide> var tabHtml =
<del> '<ul class="doc-example">' +
<add> '<ul class="doc-example">';
<add>
<add> // show source tab, if not disabled
<add> if (showSource) {
<add> tabHtml +=
<ide> '<li class="doc-example-heading"><h3>Source</h3></li>' +
<ide> '<li class="doc-example-source" ng:non-bindable>' +
<ide> jsFiddleButton(jsfiddle) + // may or may not have value
<ide> '<pre class="brush: js; html-script: true; highlight: [' +
<del> code.hilite + ']; toolbar: false;"></pre></li>' +
<add> code.hilite + ']; toolbar: false;"></pre></li>';
<add> }
<add> // show live preview tab
<add> tabHtml +=
<ide> '<li class="doc-example-heading"><h3>Live Preview</h3></li>' +
<ide> '<li class="doc-example-live">' + exampleSrc +'</li>';
<add> // show scenario tab, if present
<ide> if (scenario.text()) {
<ide> tabHtml +=
<ide> '<li class="doc-example-heading"><h3>Scenario Test</h3></li>' + | 1 |
Javascript | Javascript | treat bare \r as a line ending | 60f18ede39e428867fbfe6df8c536fc2fcf79947 | <ide><path>lib/readline.js
<ide> function Interface(input, output, completer, terminal) {
<ide> return new Interface(input, output, completer, terminal);
<ide> }
<ide>
<add> this._sawReturn = false;
<add>
<ide> EventEmitter.call(this);
<ide>
<ide> if (arguments.length === 1) {
<ide> Interface.prototype.write = function(d, key) {
<ide> this.terminal ? this._ttyWrite(d, key) : this._normalWrite(d);
<ide> };
<ide>
<add>// \r\n, \n, or \r followed by something other than \n
<add>var lineEnding = /\r?\n|\r(?!\n)/;
<ide> Interface.prototype._normalWrite = function(b) {
<ide> if (b === undefined) {
<ide> return;
<ide> }
<ide> var string = this._decoder.write(b);
<add> if (this._sawReturn) {
<add> string = string.replace(/^\n/, '');
<add> this._sawReturn = false;
<add> }
<add>
<ide> if (this._line_buffer) {
<ide> string = this._line_buffer + string;
<ide> this._line_buffer = null;
<ide> }
<del> if (string.indexOf('\n') !== -1) {
<add> if (lineEnding.test(string)) {
<add> this._sawReturn = /\r$/.test(string);
<add>
<ide> // got one or more newlines; process into "line" events
<del> var lines = string.split(/\r?\n/);
<add> var lines = string.split(lineEnding);
<ide> // either '' or (concievably) the unfinished portion of the next line
<ide> string = lines.pop();
<ide> this._line_buffer = string;
<ide> Interface.prototype._ttyWrite = function(s, key) {
<ide> } else {
<ide> /* No modifier keys used */
<ide>
<add> // \r bookkeeping is only relevant if a \n comes right after.
<add> if (this._sawReturn && key.name !== 'enter')
<add> this._sawReturn = false;
<add>
<ide> switch (key.name) {
<del> case 'enter':
<add> case 'return': // carriage return, i.e. \r
<add> this._sawReturn = true;
<ide> this._line();
<ide> break;
<ide>
<add> case 'enter':
<add> if (this._sawReturn)
<add> this._sawReturn = false
<add> else
<add> this._line();
<add> break;
<add>
<ide> case 'backspace':
<ide> this._deleteLeft();
<ide> break;
<ide> Interface.prototype._ttyWrite = function(s, key) {
<ide> this._moveCursor(+1);
<ide> break;
<ide>
<del> case 'return': // carriage return, i.e. \r
<ide> case 'home':
<ide> this._moveCursor(-Infinity);
<ide> break;
<ide><path>test/simple/test-readline-interface.js
<ide> FakeInput.prototype.end = function() {};
<ide> assert.equal(callCount, expectedLines.length - 1);
<ide> rli.close();
<ide>
<add> // \r\n should emit one line event when split across multiple writes.
<add> fi = new FakeInput();
<add> rli = new readline.Interface({ input: fi, output: fi, terminal: terminal });
<add> expectedLines = ['foo', 'bar', 'baz', 'bat'];
<add> callCount = 0;
<add> rli.on('line', function(line) {
<add> assert.equal(line, expectedLines[callCount]);
<add> callCount++;
<add> });
<add> expectedLines.forEach(function(line) {
<add> fi.emit('data', line + '\r');
<add> fi.emit('data', '\n');
<add> });
<add> assert.equal(callCount, expectedLines.length);
<add> rli.close();
<add>
<add> // \r should behave like \n when alone
<add> fi = new FakeInput();
<add> rli = new readline.Interface({ input: fi, output: fi, terminal: true });
<add> expectedLines = ['foo', 'bar', 'baz', 'bat'];
<add> callCount = 0;
<add> rli.on('line', function(line) {
<add> assert.equal(line, expectedLines[callCount]);
<add> callCount++;
<add> });
<add> fi.emit('data', expectedLines.join('\r'));
<add> assert.equal(callCount, expectedLines.length - 1);
<add> rli.close();
<add>
<add>
<ide> // sending a multi-byte utf8 char over multiple writes
<ide> var buf = Buffer('☮', 'utf8');
<ide> fi = new FakeInput(); | 2 |
Python | Python | fix init for mt5 | 48395d6b8e36e2571db2c3ad54eea6d8f0add184 | <ide><path>src/transformers/__init__.py
<ide> from .models.mbart import MBartConfig
<ide> from .models.mmbt import MMBTConfig
<ide> from .models.mobilebert import MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, MobileBertConfig, MobileBertTokenizer
<add>from .models.mt5 import MT5Config
<ide> from .models.openai import OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP, OpenAIGPTConfig, OpenAIGPTTokenizer
<ide> from .models.pegasus import PegasusConfig
<ide> from .models.phobert import PhobertTokenizer
<ide> MobileBertPreTrainedModel,
<ide> load_tf_weights_in_mobilebert,
<ide> )
<del> from .models.mt5 import MT5Config, MT5ForConditionalGeneration, MT5Model
<add> from .models.mt5 import MT5ForConditionalGeneration, MT5Model
<ide> from .models.openai import (
<ide> OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_LIST,
<ide> OpenAIGPTDoubleHeadsModel,
<ide><path>src/transformers/utils/dummy_pt_objects.py
<ide> def load_tf_weights_in_mobilebert(*args, **kwargs):
<ide> requires_pytorch(load_tf_weights_in_mobilebert)
<ide>
<ide>
<del>class MT5Config:
<del> def __init__(self, *args, **kwargs):
<del> requires_pytorch(self)
<del>
<del>
<ide> class MT5ForConditionalGeneration:
<ide> def __init__(self, *args, **kwargs):
<ide> requires_pytorch(self) | 2 |
Text | Text | update devops with password guidelines | cb5369a5829ed02236d4f764caa6ef6d7a854b92 | <ide><path>docs/devops.md
<ide> The domain name will be different than **`freeCodeCamp.org`**. Currently this pu
<ide>
<ide> <h3 align="center"><a href='https://www.freecodecamp.dev' _target='blank'><code>www.freecodecamp.dev</code></a></h4>
<ide>
<add>To prevent accidental indexing on search engines and users accidenatly using this site (without knowledge of it being a developement site) is closed off with a simple password:
<add>
<add><h3 align="center"><code>freecodecamp-is-awesome</code></h4>
<add>
<ide> ## Identifying the current version of platform
<ide>
<ide> **The current version of the platform is always available at [`freeCodeCamp.org`](https://www.freecodecamp.org).** | 1 |
Text | Text | remove duplicate -v flag | b6ea0f43aeb7ff1dcb03658e38bacae1130abd91 | <ide><path>README.md
<ide> Depending on which framework is installed (TensorFlow 2.0 and/or PyTorch), the i
<ide> You can run the tests from the root of the cloned repository with the commands:
<ide>
<ide> ```bash
<del>python -m unittest -v discover -s tests -t . -v
<del>python -m unittest -v discover -s examples -t examples -v
<add>python -m unittest discover -s tests -t . -v
<add>python -m unittest discover -s examples -t examples -v
<ide> ```
<ide>
<ide> or | 1 |
Ruby | Ruby | fix typo in assert_changes error message | d76ed9e47e8209811914eb6e2e55c232e2795471 | <ide><path>activesupport/lib/active_support/testing/assertions.rb
<ide> def assert_changes(expression, message = nil, from: UNTRACKED, to: UNTRACKED, &b
<ide> after = exp.call
<ide>
<ide> if to == UNTRACKED
<del> error = "#{expression.inspect} didn't changed"
<add> error = "#{expression.inspect} didn't change"
<ide> error = "#{message}.\n#{error}" if message
<ide> assert_not_equal before, after, error
<ide> else | 1 |
Mixed | Python | add run_type to dagrun | 533b14341c774329d8184e6e559528ae8ed34b3a | <ide><path>UPDATING.md
<ide> https://developers.google.com/style/inclusive-documentation
<ide>
<ide> -->
<ide>
<add>### DAG.create_dagrun accepts run_type and does not require run_id
<add>This change is caused by adding `run_type` column to `DagRun`.
<add>
<add>Previous signature:
<add>```python
<add>def create_dagrun(self,
<add> run_id,
<add> state,
<add> execution_date=None,
<add> start_date=None,
<add> external_trigger=False,
<add> conf=None,
<add> session=None):
<add>```
<add>current:
<add>```python
<add>def create_dagrun(self,
<add> state,
<add> execution_date=None,
<add> run_id=None,
<add> start_date=None,
<add> external_trigger=False,
<add> conf=None,
<add> run_type=None,
<add> session=None):
<add>```
<add>If user provides `run_id` then the `run_type` will be derived from it by checking prefix, allowed types
<add>: `manual`, `scheduled`, `backfill` (defined by `airflow.utils.types.DagRunType`).
<add>
<add>If user provides `run_type` and `execution_date` then `run_id` is constructed as
<add>`{run_type}__{execution_data.isoformat()}`.
<add>
<add>Airflow should construct dagruns using `run_type` and `execution_date`, creation using
<add>`run_id` is preserved for user actions.
<add>
<add>
<ide> ### Standardised "extra" requirements
<ide>
<ide> We standardised the Extras names and synchronized providers package names with the main airflow extras.
<ide> For example instead of `pip install apache-airflow[atlas]` you should use
<ide>
<ide> The deprecated extras will be removed in 2.1:
<ide>
<del>
<ide> ### Skipped tasks can satisfy wait_for_downstream
<ide>
<ide> Previously, a task instance with `wait_for_downstream=True` will only run if the downstream task of
<ide><path>airflow/api/common/experimental/mark_tasks.py
<ide> def _create_dagruns(dag, execution_dates, state, run_type):
<ide>
<ide> for date in dates_to_create:
<ide> dag_run = dag.create_dagrun(
<del> run_id=f"{run_type}__{date.isoformat()}",
<ide> execution_date=date,
<ide> start_date=timezone.utcnow(),
<ide> external_trigger=False,
<ide> state=state,
<add> run_type=run_type,
<ide> )
<ide> dag_runs.append(dag_run)
<ide>
<ide> def get_subdag_runs(dag, session, state, task_ids, commit, confirmed_dates):
<ide> dag_runs = _create_dagruns(current_task.subdag,
<ide> execution_dates=confirmed_dates,
<ide> state=State.RUNNING,
<del> run_type=DagRunType.BACKFILL_JOB.value)
<add> run_type=DagRunType.BACKFILL_JOB)
<ide>
<ide> verify_dagruns(dag_runs, commit, state, session, current_task)
<ide>
<ide><path>airflow/api/common/experimental/trigger_dag.py
<ide> def _trigger_dag(
<ide> execution_date.isoformat(),
<ide> min_dag_start_date.isoformat()))
<ide>
<del> if not run_id:
<del> run_id = f"{DagRunType.MANUAL.value}__{execution_date.isoformat()}"
<add> run_id = run_id or DagRun.generate_run_id(DagRunType.MANUAL, execution_date)
<add> dag_run = dag_run.find(dag_id=dag_id, run_id=run_id)
<ide>
<del> dag_run_id = dag_run.find(dag_id=dag_id, run_id=run_id)
<del> if dag_run_id:
<del> raise DagRunAlreadyExists("Run id {} already exists for dag id {}".format(
<del> run_id,
<del> dag_id
<del> ))
<add> if dag_run:
<add> raise DagRunAlreadyExists(
<add> f"Run id {dag_run.run_id} already exists for dag id {dag_id}"
<add> )
<ide>
<ide> run_conf = None
<ide> if conf:
<del> if isinstance(conf, dict):
<del> run_conf = conf
<del> else:
<del> run_conf = json.loads(conf)
<add> run_conf = conf if isinstance(conf, dict) else json.loads(conf)
<ide>
<ide> triggers = []
<ide> dags_to_trigger = [dag]
<ide> def _trigger_dag(
<ide> conf=run_conf,
<ide> external_trigger=True,
<ide> )
<add>
<ide> triggers.append(trigger)
<ide> if dag.subdags:
<ide> dags_to_trigger.extend(dag.subdags)
<ide><path>airflow/jobs/backfill_job.py
<ide> def _get_dag_run(self, run_date: datetime, dag: DAG, session: Session = None):
<ide> :param session: the database session object
<ide> :return: a DagRun in state RUNNING or None
<ide> """
<del> run_id = f"{DagRunType.BACKFILL_JOB.value}__{run_date.isoformat()}"
<del>
<ide> # consider max_active_runs but ignore when running subdags
<ide> respect_dag_max_active_limit = bool(dag.schedule_interval and not dag.is_subdag)
<ide>
<ide> def _get_dag_run(self, run_date: datetime, dag: DAG, session: Session = None):
<ide> return None
<ide>
<ide> run = run or dag.create_dagrun(
<del> run_id=run_id,
<ide> execution_date=run_date,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING,
<ide> external_trigger=False,
<ide> session=session,
<ide> conf=self.conf,
<add> run_type=DagRunType.BACKFILL_JOB,
<ide> )
<ide>
<ide> # set required transient field
<ide> run.dag = dag
<ide>
<ide> # explicitly mark as backfill and running
<ide> run.state = State.RUNNING
<del> run.run_id = run_id
<add> run.run_id = run.generate_run_id(DagRunType.BACKFILL_JOB, run_date)
<add> run.run_type = DagRunType.BACKFILL_JOB.value
<ide> run.verify_integrity(session=session)
<ide> return run
<ide>
<ide><path>airflow/jobs/base_job.py
<ide> def reset_state_for_orphaned_tasks(self, filter_by_dag_run=None, session=None):
<ide> .filter(
<ide> # pylint: disable=comparison-with-callable
<ide> DR.state == State.RUNNING,
<del> DR.run_id.notlike(f"{DagRunType.BACKFILL_JOB.value}__%"),
<add> DR.run_type != DagRunType.BACKFILL_JOB.value,
<ide> TI.state.in_(resettable_states))).all()
<ide> else:
<ide> resettable_tis = filter_by_dag_run.get_task_instances(state=resettable_states,
<ide><path>airflow/jobs/scheduler_job.py
<ide> def create_dag_run(
<ide> .filter_by(dag_id=dag.dag_id)
<ide> .filter(or_(
<ide> DagRun.external_trigger == False, # noqa: E712 pylint: disable=singleton-comparison
<del> # add % as a wildcard for the like query
<del> DagRun.run_id.like(f"{DagRunType.SCHEDULED.value}__%"))
<del> )
<add> DagRun.run_type == DagRunType.SCHEDULED.value
<add> ))
<ide> )
<ide> last_scheduled_run = qry.scalar()
<ide>
<ide> def create_dag_run(
<ide>
<ide> if next_run_date and period_end and period_end <= timezone.utcnow():
<ide> next_run = dag.create_dagrun(
<del> run_id=f"{DagRunType.SCHEDULED.value}__{next_run_date.isoformat()}",
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=next_run_date,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING,
<ide> def _find_executable_task_instances(self, simple_dag_bag: SimpleDagBag, session=
<ide> .outerjoin(
<ide> DR, and_(DR.dag_id == TI.dag_id, DR.execution_date == TI.execution_date)
<ide> )
<del> .filter(or_(DR.run_id.is_(None), not_(DR.run_id.like(f"{DagRunType.BACKFILL_JOB.value}__%"))))
<add> .filter(or_(DR.run_id.is_(None), DR.run_type != DagRunType.BACKFILL_JOB.value))
<ide> .outerjoin(DM, DM.dag_id == TI.dag_id)
<ide> .filter(or_(DM.dag_id.is_(None), not_(DM.is_paused)))
<ide> .filter(TI.state == State.SCHEDULED)
<ide><path>airflow/migrations/versions/3c20cacc0044_add_dagrun_run_type.py
<add>#
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<add>
<add>"""
<add>Add DagRun run_type
<add>
<add>Revision ID: 3c20cacc0044
<add>Revises: 952da73b5eff
<add>Create Date: 2020-04-08 13:35:25.671327
<add>
<add>"""
<add>
<add>import sqlalchemy as sa
<add>from alembic import op
<add>from sqlalchemy import Boolean, Column, Integer, PickleType, String
<add>from sqlalchemy.ext.declarative import declarative_base
<add>
<add>from airflow.models.base import ID_LEN
<add>from airflow.utils import timezone
<add>from airflow.utils.sqlalchemy import UtcDateTime
<add>from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<add>
<add># revision identifiers, used by Alembic.
<add>revision = "3c20cacc0044"
<add>down_revision = "952da73b5eff"
<add>branch_labels = None
<add>depends_on = None
<add>
<add>Base = declarative_base()
<add>
<add>
<add>class DagRun(Base):
<add> """
<add> DagRun describes an instance of a Dag. It can be created
<add> by the scheduler (for regular runs) or by an external trigger
<add> """
<add> __tablename__ = "dag_run"
<add>
<add> id = Column(Integer, primary_key=True)
<add> dag_id = Column(String(ID_LEN))
<add> execution_date = Column(UtcDateTime, default=timezone.utcnow)
<add> start_date = Column(UtcDateTime, default=timezone.utcnow)
<add> end_date = Column(UtcDateTime)
<add> _state = Column('state', String(50), default=State.RUNNING)
<add> run_id = Column(String(ID_LEN))
<add> external_trigger = Column(Boolean, default=True)
<add> run_type = Column(String(50), nullable=False)
<add> conf = Column(PickleType)
<add>
<add>
<add>def upgrade():
<add> """Apply Add DagRun run_type"""
<add> run_type_col_type = sa.String(length=50)
<add>
<add> # Add nullable column
<add> with op.batch_alter_table("dag_run") as batch_op:
<add> batch_op.add_column(sa.Column("run_type", run_type_col_type, nullable=True))
<add>
<add> # Generate run type for existing records
<add> connection = op.get_bind()
<add> sessionmaker = sa.orm.sessionmaker()
<add> session = sessionmaker(bind=connection)
<add>
<add> for run_type in DagRunType:
<add> session.query(DagRun).filter(DagRun.run_id.like(f"{run_type.value}__%")).update(
<add> {DagRun.run_type: run_type.value}, synchronize_session=False
<add> )
<add>
<add> session.query(DagRun).filter(DagRun.run_type.is_(None)).update(
<add> {DagRun.run_type: DagRunType.MANUAL.value}, synchronize_session=False
<add> )
<add> session.commit()
<add>
<add> # Make run_type not nullable
<add> with op.batch_alter_table("dag_run") as batch_op:
<add> batch_op.alter_column("run_type", type_=run_type_col_type, nullable=False)
<add>
<add>
<add>def downgrade():
<add> """Unapply Add DagRun run_type"""
<add> op.drop_column("run_type")
<ide><path>airflow/models/dag.py
<ide> from airflow.utils.session import provide_session
<ide> from airflow.utils.sqlalchemy import Interval, UtcDateTime
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> log = logging.getLogger(__name__)
<ide>
<ide> def cli(self):
<ide>
<ide> @provide_session
<ide> def create_dagrun(self,
<del> run_id,
<ide> state,
<ide> execution_date=None,
<add> run_id=None,
<ide> start_date=None,
<ide> external_trigger=False,
<ide> conf=None,
<add> run_type=None,
<ide> session=None):
<ide> """
<ide> Creates a dag run from this dag including the tasks associated with this dag.
<ide> Returns the dag run.
<ide>
<ide> :param run_id: defines the run id for this dag run
<ide> :type run_id: str
<add> :param run_type: type of DagRun
<add> :type run_type: airflow.utils.types.DagRunType
<ide> :param execution_date: the execution date of this dag run
<ide> :type execution_date: datetime.datetime
<ide> :param state: the state of the dag run
<ide> def create_dagrun(self,
<ide> :param session: database session
<ide> :type session: sqlalchemy.orm.session.Session
<ide> """
<add> if run_id and not run_type:
<add> if not isinstance(run_id, str):
<add> raise ValueError(f"`run_id` expected to be a str is {type(run_id)}")
<add> run_type: DagRunType = DagRunType.from_run_id(run_id)
<add> elif run_type and execution_date:
<add> if not isinstance(run_type, DagRunType):
<add> raise ValueError(f"`run_type` expected to be a DagRunType is {type(run_type)}")
<add> run_id = DagRun.generate_run_id(run_type, execution_date)
<add> elif not run_id:
<add> raise AirflowException(
<add> "Creating DagRun needs either `run_id` or both `run_type` and `execution_date`"
<add> )
<add>
<ide> run = DagRun(
<ide> dag_id=self.dag_id,
<ide> run_id=run_id,
<ide> execution_date=execution_date,
<ide> start_date=start_date,
<ide> external_trigger=external_trigger,
<ide> conf=conf,
<del> state=state
<add> state=state,
<add> run_type=run_type.value,
<ide> )
<ide> session.add(run)
<ide>
<ide><path>airflow/models/dagrun.py
<ide> class DagRun(Base, LoggingMixin):
<ide> _state = Column('state', String(50), default=State.RUNNING)
<ide> run_id = Column(String(ID_LEN))
<ide> external_trigger = Column(Boolean, default=True)
<add> run_type = Column(String(50), nullable=False)
<ide> conf = Column(PickleType)
<ide>
<ide> dag = None
<ide> class DagRun(Base, LoggingMixin):
<ide> )
<ide>
<ide> def __init__(self, dag_id=None, run_id=None, execution_date=None, start_date=None, external_trigger=None,
<del> conf=None, state=None):
<add> conf=None, state=None, run_type=None):
<ide> self.dag_id = dag_id
<ide> self.run_id = run_id
<ide> self.execution_date = execution_date
<ide> self.start_date = start_date
<ide> self.external_trigger = external_trigger
<ide> self.conf = conf
<ide> self.state = state
<add> self.run_type = run_type
<ide> super().__init__()
<ide>
<ide> def __repr__(self):
<ide> def find(
<ide> state: Optional[str] = None,
<ide> external_trigger: Optional[bool] = None,
<ide> no_backfills: Optional[bool] = False,
<add> run_type: Optional[DagRunType] = None,
<ide> session: Session = None,
<ide> execution_start_date=None, execution_end_date=None
<ide> ):
<ide> def find(
<ide> :type dag_id: str or list[str]
<ide> :param run_id: defines the run id for this dag run
<ide> :type run_id: str
<add> :param run_type: type of DagRun
<add> :type run_type: airflow.utils.types.DagRunType
<ide> :param execution_date: the execution date
<ide> :type execution_date: datetime.datetime or list[datetime.datetime]
<ide> :param state: the state of the dag run
<ide> def find(
<ide> qry = qry.filter(DR.state == state)
<ide> if external_trigger is not None:
<ide> qry = qry.filter(DR.external_trigger == external_trigger)
<add> if run_type:
<add> qry = qry.filter(DR.run_type == run_type.value)
<ide> if no_backfills:
<del> # in order to prevent a circular dependency
<del> qry = qry.filter(DR.run_id.notlike(f"{DagRunType.BACKFILL_JOB.value}__%"))
<add> qry = qry.filter(DR.run_type != DagRunType.BACKFILL_JOB.value)
<ide>
<ide> dr = qry.order_by(DR.execution_date).all()
<ide>
<ide> return dr
<ide>
<add> @staticmethod
<add> def generate_run_id(run_type: DagRunType, execution_date: datetime) -> str:
<add> return f"{run_type.value}__{execution_date.isoformat()}"
<add>
<ide> @provide_session
<ide> def get_task_instances(self, state=None, session=None):
<ide> """
<ide> def get_run(session, dag_id, execution_date):
<ide>
<ide> @property
<ide> def is_backfill(self):
<del> return (
<del> self.run_id is not None and
<del> self.run_id.startswith(f"{DagRunType.BACKFILL_JOB.value}")
<del> )
<add> return self.run_type == DagRunType.BACKFILL_JOB.value
<ide>
<ide> @classmethod
<ide> @provide_session
<ide><path>airflow/operators/dagrun_operator.py
<ide> from typing import Dict, Optional, Union
<ide>
<ide> from airflow.api.common.experimental.trigger_dag import trigger_dag
<del>from airflow.models import BaseOperator
<add>from airflow.models import BaseOperator, DagRun
<ide> from airflow.utils import timezone
<ide> from airflow.utils.decorators import apply_defaults
<add>from airflow.utils.types import DagRunType
<ide>
<ide>
<ide> class TriggerDagRunOperator(BaseOperator):
<ide> def __init__(
<ide>
<ide> def execute(self, context: Dict):
<ide> if isinstance(self.execution_date, datetime.datetime):
<del> run_id = "trig__{}".format(self.execution_date.isoformat())
<add> execution_date = self.execution_date
<ide> elif isinstance(self.execution_date, str):
<del> run_id = "trig__{}".format(self.execution_date)
<del> self.execution_date = timezone.parse(self.execution_date) # trigger_dag() expects datetime
<add> execution_date = timezone.parse(self.execution_date)
<add> self.execution_date = execution_date
<ide> else:
<del> run_id = "trig__{}".format(timezone.utcnow().isoformat())
<add> execution_date = timezone.utcnow()
<ide>
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, execution_date)
<ide> # Ignore MyPy type for self.execution_date because it doesn't pick up the timezone.parse() for strings
<ide> trigger_dag(
<ide> dag_id=self.trigger_dag_id,
<ide><path>airflow/operators/subdag_operator.py
<ide> from airflow.utils.decorators import apply_defaults
<ide> from airflow.utils.session import create_session, provide_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide>
<ide> class SkippedStatePropagationOptions(Enum):
<ide> def pre_execute(self, context):
<ide>
<ide> if dag_run is None:
<ide> dag_run = self.subdag.create_dagrun(
<del> run_id="scheduled__{}".format(execution_date.isoformat()),
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=execution_date,
<ide> state=State.RUNNING,
<ide> external_trigger=True,
<ide><path>airflow/ti_deps/deps/dagrun_id_dep.py
<ide>
<ide> """This module defines dep for DagRun ID validation"""
<ide>
<del>from re import match
<del>
<ide> from airflow.ti_deps.deps.base_ti_dep import BaseTIDep
<ide> from airflow.utils.session import provide_session
<ide> from airflow.utils.types import DagRunType
<ide> def _get_dep_statuses(self, ti, session, dep_context=None):
<ide> """
<ide> dagrun = ti.get_dagrun(session)
<ide>
<del> if not dagrun or not dagrun.run_id or not match(f"{DagRunType.BACKFILL_JOB.value}.*", dagrun.run_id):
<add> if not dagrun or not dagrun.run_id or dagrun.run_type != DagRunType.BACKFILL_JOB.value:
<ide> yield self._passing_status(
<del> reason=f"Task's DagRun doesn't exist or the run_id is either NULL "
<del> f"or doesn't start with {DagRunType.BACKFILL_JOB.value}")
<add> reason=f"Task's DagRun doesn't exist or run_id is either NULL "
<add> f"or run_type is not {DagRunType.BACKFILL_JOB.value}")
<ide> else:
<ide> yield self._failing_status(
<ide> reason=f"Task's DagRun run_id is not NULL "
<del> f"and starts with {DagRunType.BACKFILL_JOB.value}")
<add> f"and run type is {DagRunType.BACKFILL_JOB.value}")
<ide><path>airflow/utils/types.py
<ide> class DagRunType(enum.Enum):
<ide> BACKFILL_JOB = "backfill"
<ide> SCHEDULED = "scheduled"
<ide> MANUAL = "manual"
<add>
<add> @staticmethod
<add> def from_run_id(run_id: str) -> "DagRunType":
<add> """
<add> Resolved DagRun type from run_id.
<add> """
<add> for run_type in DagRunType:
<add> if run_id and run_id.startswith(f"{run_type.value}__"):
<add> return run_type
<add> return DagRunType.MANUAL
<ide><path>airflow/www/forms.py
<ide> from airflow.configuration import conf
<ide> from airflow.models import Connection
<ide> from airflow.utils import timezone
<add>from airflow.utils.types import DagRunType
<ide> from airflow.www.validators import ValidJson
<ide> from airflow.www.widgets import AirflowDateTimePickerWidget
<ide>
<ide> class DagRunForm(DynamicForm):
<ide>
<ide> def populate_obj(self, item):
<ide> super().populate_obj(item)
<add> item.run_type = DagRunType.from_run_id(item.run_id).value
<ide> if item.conf:
<ide> item.conf = json.loads(item.conf)
<ide>
<ide><path>airflow/www/views.py
<ide> def trigger(self, session=None):
<ide> return redirect(origin)
<ide>
<ide> execution_date = timezone.utcnow()
<del> run_id = f"{DagRunType.MANUAL.value}__{execution_date.isoformat()}"
<ide>
<del> dr = DagRun.find(dag_id=dag_id, run_id=run_id)
<add> dr = DagRun.find(dag_id=dag_id, execution_date=execution_date, run_type=DagRunType.MANUAL)
<ide> if dr:
<del> flash("This run_id {} already exists".format(run_id))
<add> flash(f"This run_id {dr.run_id} already exists")
<ide> return redirect(origin)
<ide>
<ide> run_conf = {}
<ide> def trigger(self, session=None):
<ide>
<ide> dag = dagbag.get_dag(dag_id)
<ide> dag.create_dagrun(
<del> run_id=run_id,
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=execution_date,
<ide> state=State.RUNNING,
<ide> conf=run_conf,
<del> external_trigger=True
<add> external_trigger=True,
<ide> )
<ide>
<ide> flash(
<ide> class DagRunModelView(AirflowModelView):
<ide> base_permissions = ['can_list', 'can_add']
<ide>
<ide> add_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger', 'conf']
<del> list_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger', 'conf']
<del> search_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger', 'conf']
<add> list_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'run_type', 'external_trigger', 'conf']
<add> search_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'run_type', 'external_trigger', 'conf']
<ide>
<ide> base_order = ('execution_date', 'desc')
<ide>
<ide><path>tests/api/client/test_local_client.py
<ide> from airflow.api.client.local_client import Client
<ide> from airflow.example_dags import example_bash_operator
<ide> from airflow.exceptions import AirflowException
<del>from airflow.models import DAG, DagBag, DagModel, Pool
<add>from airflow.models import DAG, DagBag, DagModel, DagRun, Pool
<ide> from airflow.utils import timezone
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide> from tests.test_utils.db import clear_db_pools
<ide>
<ide> EXECDATE = timezone.utcnow()
<ide> def tearDown(self):
<ide> @patch.object(DAG, 'create_dagrun')
<ide> def test_trigger_dag(self, mock):
<ide> test_dag_id = "example_bash_operator"
<add> run_id = DagRun.generate_run_id(DagRunType.MANUAL, EXECDATE_NOFRACTIONS)
<add>
<ide> DagBag(include_examples=True)
<ide>
<ide> # non existent
<ide> def test_trigger_dag(self, mock):
<ide> with freeze_time(EXECDATE):
<ide> # no execution date, execution date should be set automatically
<ide> self.client.trigger_dag(dag_id=test_dag_id)
<del> mock.assert_called_once_with(run_id="manual__{0}".format(EXECDATE_ISO),
<add> mock.assert_called_once_with(run_id=run_id,
<ide> execution_date=EXECDATE_NOFRACTIONS,
<ide> state=State.RUNNING,
<ide> conf=None,
<ide> def test_trigger_dag(self, mock):
<ide>
<ide> # execution date with microseconds cutoff
<ide> self.client.trigger_dag(dag_id=test_dag_id, execution_date=EXECDATE)
<del> mock.assert_called_once_with(run_id="manual__{0}".format(EXECDATE_ISO),
<add> mock.assert_called_once_with(run_id=run_id,
<ide> execution_date=EXECDATE_NOFRACTIONS,
<ide> state=State.RUNNING,
<ide> conf=None,
<ide> external_trigger=True)
<ide> mock.reset_mock()
<ide>
<ide> # run id
<del> run_id = "my_run_id"
<del> self.client.trigger_dag(dag_id=test_dag_id, run_id=run_id)
<del> mock.assert_called_once_with(run_id=run_id,
<add> custom_run_id = "my_run_id"
<add> self.client.trigger_dag(dag_id=test_dag_id, run_id=custom_run_id)
<add> mock.assert_called_once_with(run_id=custom_run_id,
<ide> execution_date=EXECDATE_NOFRACTIONS,
<ide> state=State.RUNNING,
<ide> conf=None,
<ide> def test_trigger_dag(self, mock):
<ide> # test conf
<ide> conf = '{"name": "John"}'
<ide> self.client.trigger_dag(dag_id=test_dag_id, conf=conf)
<del> mock.assert_called_once_with(run_id="manual__{0}".format(EXECDATE_ISO),
<add> mock.assert_called_once_with(run_id=run_id,
<ide> execution_date=EXECDATE_NOFRACTIONS,
<ide> state=State.RUNNING,
<ide> conf=json.loads(conf),
<ide><path>tests/api/common/experimental/test_delete_dag.py
<ide> from airflow.utils.dates import days_ago
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DM = models.DagModel
<ide> DR = models.DagRun
<ide> def setup_dag_models(self, for_sub_dag=False):
<ide> test_date = days_ago(1)
<ide> with create_session() as session:
<ide> session.add(DM(dag_id=self.key, fileloc=self.dag_file_path, is_subdag=for_sub_dag))
<del> session.add(DR(dag_id=self.key))
<add> session.add(DR(dag_id=self.key, run_type=DagRunType.MANUAL.value))
<ide> session.add(TI(task=task,
<ide> execution_date=test_date,
<ide> state=State.SUCCESS))
<ide><path>tests/api/common/experimental/test_mark_tasks.py
<ide>
<ide> import time
<ide> import unittest
<del>from datetime import datetime, timedelta
<add>from datetime import timedelta
<ide>
<ide> import pytest
<ide>
<ide> def setUp(self):
<ide> clear_db_runs()
<ide> drs = _create_dagruns(self.dag1, self.execution_dates,
<ide> state=State.RUNNING,
<del> run_type=DagRunType.SCHEDULED.value)
<add> run_type=DagRunType.SCHEDULED)
<ide> for dr in drs:
<ide> dr.dag = self.dag1
<ide> dr.verify_integrity()
<ide>
<ide> drs = _create_dagruns(self.dag2,
<ide> [self.dag2.default_args['start_date']],
<ide> state=State.RUNNING,
<del> run_type=DagRunType.SCHEDULED.value)
<add> run_type=DagRunType.SCHEDULED)
<ide>
<ide> for dr in drs:
<ide> dr.dag = self.dag2
<ide> def setUp(self):
<ide> drs = _create_dagruns(self.dag3,
<ide> self.dag3_execution_dates,
<ide> state=State.SUCCESS,
<del> run_type=DagRunType.MANUAL.value)
<add> run_type=DagRunType.MANUAL)
<ide> for dr in drs:
<ide> dr.dag = self.dag3
<ide> dr.verify_integrity()
<ide> def _verify_task_instance_states(self, dag, date, state, session=None):
<ide>
<ide> def _create_test_dag_run(self, state, date):
<ide> return self.dag1.create_dagrun(
<del> run_id='manual__' + datetime.now().isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> state=state,
<ide> execution_date=date
<ide> )
<ide> def test_set_state_without_commit(self):
<ide> @provide_session
<ide> def test_set_state_with_multiple_dagruns(self, session=None):
<ide> self.dag2.create_dagrun(
<del> run_id='manual__' + datetime.now().isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> state=State.FAILED,
<ide> execution_date=self.execution_dates[0],
<ide> session=session
<ide> )
<ide> self.dag2.create_dagrun(
<del> run_id='manual__' + datetime.now().isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> state=State.FAILED,
<ide> execution_date=self.execution_dates[1],
<ide> session=session
<ide> )
<ide> self.dag2.create_dagrun(
<del> run_id='manual__' + datetime.now().isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> state=State.RUNNING,
<ide> execution_date=self.execution_dates[2],
<ide> session=session
<ide><path>tests/cli/commands/test_dag_command.py
<ide> def test_next_execution(self):
<ide> dag = self.dagbag.dags[dag_id]
<ide> # Create a DagRun for each DAG, to prepare for next step
<ide> dag.create_dagrun(
<del> run_id=DagRunType.MANUAL.value,
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=now,
<ide> start_date=now,
<ide> state=State.FAILED
<ide><path>tests/jobs/test_backfill_job.py
<ide> def test_sub_set_subdag(self):
<ide> drs = DagRun.find(dag_id=dag.dag_id, execution_date=DEFAULT_DATE)
<ide> dr = drs[0]
<ide>
<del> self.assertEqual(f"{DagRunType.BACKFILL_JOB.value}__{DEFAULT_DATE.isoformat()}", dr.run_id)
<add> self.assertEqual(DagRun.generate_run_id(DagRunType.BACKFILL_JOB, DEFAULT_DATE), dr.run_id)
<ide> for ti in dr.get_task_instances():
<ide> if ti.task_id == 'leave1' or ti.task_id == 'leave2':
<ide> self.assertEqual(State.SUCCESS, ti.state)
<ide> def test_update_counters(self):
<ide> job = BackfillJob(dag=dag)
<ide>
<ide> session = settings.Session()
<del> dr = dag.create_dagrun(run_id=DagRunType.SCHEDULED.value,
<add> dr = dag.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide><path>tests/jobs/test_scheduler_job.py
<ide> def test_execute_task_instances_backfill_tasks_wont_execute(self):
<ide> session = settings.Session()
<ide>
<ide> dr1 = dag_file_processor.create_dag_run(dag)
<del> dr1.run_id = f"{DagRunType.BACKFILL_JOB.value}__blah"
<add> dr1.run_type = DagRunType.BACKFILL_JOB.value
<ide> ti1 = TaskInstance(task1, dr1.execution_date)
<ide> ti1.refresh_from_db()
<ide> ti1.state = State.SCHEDULED
<ide> def test_find_executable_task_instances_backfill_nodagrun(self):
<ide>
<ide> dr1 = dag_file_processor.create_dag_run(dag)
<ide> dr2 = dag_file_processor.create_dag_run(dag)
<del> dr2.run_id = f"{DagRunType.BACKFILL_JOB.value}__asdf"
<add> dr2.run_type = DagRunType.BACKFILL_JOB.value
<ide>
<ide> ti_no_dagrun = TaskInstance(task1, DEFAULT_DATE - datetime.timedelta(days=1))
<ide> ti_backfill = TaskInstance(task1, dr2.execution_date)
<ide> def test_change_state_for_tis_without_dagrun(self):
<ide> dag3 = SerializedDAG.from_dict(SerializedDAG.to_dict(dag3))
<ide>
<ide> session = settings.Session()
<del> dr1 = dag1.create_dagrun(run_id=DagRunType.SCHEDULED.value,
<add> dr1 = dag1.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> session=session)
<ide>
<del> dr2 = dag2.create_dagrun(run_id=DagRunType.SCHEDULED.value,
<add> dr2 = dag2.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> def test_reset_state_for_orphaned_tasks(self):
<ide> dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag))
<ide>
<ide> dag.clear()
<del> dr = dag.create_dagrun(run_id=f"{DagRunType.SCHEDULED.value}__",
<add> dr = dag.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> session=session)
<del> dr2 = dag.create_dagrun(run_id=f"{DagRunType.BACKFILL_JOB.value}__",
<add> dr2 = dag.create_dagrun(run_type=DagRunType.BACKFILL_JOB,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE + datetime.timedelta(1),
<ide> start_date=DEFAULT_DATE,
<ide> def test_scheduler_loop_should_change_state_for_tis_without_dagrun(self,
<ide>
<ide> # Create DAG run with FAILED state
<ide> dag.clear()
<del> dr = dag.create_dagrun(run_id=DagRunType.SCHEDULED.value,
<add> dr = dag.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> state=State.FAILED,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> def test_reset_orphaned_tasks_backfill_dag(self):
<ide> ti = dr1.get_task_instances(session=session)[0]
<ide> ti.state = State.SCHEDULED
<ide> dr1.state = State.RUNNING
<del> dr1.run_id = f"{DagRunType.BACKFILL_JOB.value}__sdfsfdfsd"
<add> dr1.run_type = DagRunType.BACKFILL_JOB.value
<ide> session.merge(ti)
<ide> session.merge(dr1)
<ide> session.commit()
<ide> def test_task_with_upstream_skip_process_task_instances():
<ide>
<ide> dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
<ide> dag.clear()
<del> dr = dag.create_dagrun(run_id=f"manual__{DEFAULT_DATE.isoformat()}",
<add> dr = dag.create_dagrun(run_type=DagRunType.MANUAL,
<ide> state=State.RUNNING,
<ide> execution_date=DEFAULT_DATE)
<ide> assert dr is not None
<ide><path>tests/models/test_dag.py
<ide> def test_schedule_dag_fake_scheduled_previous(self):
<ide> start_date=DEFAULT_DATE))
<ide>
<ide> dag_file_processor = DagFileProcessor(dag_ids=[], log=mock.MagicMock())
<del> run_id = f"{DagRunType.SCHEDULED.value}__{DEFAULT_DATE.isoformat()}"
<del> dag.create_dagrun(run_id=run_id,
<add> dag.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.SUCCESS,
<ide> external_trigger=True)
<ide> def test_set_dag_runs_state(self):
<ide> assert len(drs) == 3
<ide> assert all(dr.state == State.NONE for dr in drs)
<ide>
<add> def test_create_dagrun_run_id_is_generated(self):
<add> dag = DAG(dag_id="run_id_is_generated")
<add> dr = dag.create_dagrun(run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, state=State.NONE)
<add> assert dr.run_id == f"{DagRunType.MANUAL.value}__{DEFAULT_DATE.isoformat()}"
<add>
<add> def test_create_dagrun_run_type_is_obtained_from_run_id(self):
<add> dag = DAG(dag_id="run_type_is_obtained_from_run_id")
<add> dr = dag.create_dagrun(run_id=f"{DagRunType.SCHEDULED.value}__", state=State.NONE)
<add> assert dr.run_type == DagRunType.SCHEDULED.value
<add>
<add> dr = dag.create_dagrun(run_id="custom_is_set_to_manual", state=State.NONE)
<add> assert dr.run_type == DagRunType.MANUAL.value
<add>
<ide>
<ide> class TestQueries(unittest.TestCase):
<ide>
<ide><path>tests/models/test_dagrun.py
<ide> def create_dag_run(self, dag,
<ide> if execution_date is None:
<ide> execution_date = now
<ide> if is_backfill:
<del> run_id = f"{DagRunType.BACKFILL_JOB.value}__{now.isoformat()}"
<add> run_type = DagRunType.BACKFILL_JOB
<ide> else:
<del> run_id = 'manual__' + now.isoformat()
<add> run_type = DagRunType.MANUAL
<ide> dag_run = dag.create_dagrun(
<del> run_id=run_id,
<add> run_type=run_type,
<ide> execution_date=execution_date,
<ide> start_date=now,
<ide> state=state,
<ide> def test_dagrun_find(self):
<ide> dag_id1 = "test_dagrun_find_externally_triggered"
<ide> dag_run = models.DagRun(
<ide> dag_id=dag_id1,
<del> run_id='manual__' + now.isoformat(),
<add> run_type=DagRunType.MANUAL.value,
<ide> execution_date=now,
<ide> start_date=now,
<ide> state=State.RUNNING,
<ide> def test_dagrun_find(self):
<ide> dag_id2 = "test_dagrun_find_not_externally_triggered"
<ide> dag_run = models.DagRun(
<ide> dag_id=dag_id2,
<del> run_id='manual__' + now.isoformat(),
<add> run_type=DagRunType.MANUAL.value,
<ide> execution_date=now,
<ide> start_date=now,
<ide> state=State.RUNNING,
<ide> def test_get_task_instance_on_empty_dagrun(self):
<ide> # don't want
<ide> dag_run = models.DagRun(
<ide> dag_id=dag.dag_id,
<del> run_id='manual__' + now.isoformat(),
<add> run_type=DagRunType.MANUAL.value,
<ide> execution_date=now,
<ide> start_date=now,
<ide> state=State.RUNNING,
<ide> def test_is_backfill(self):
<ide> dag = DAG(dag_id='test_is_backfill', start_date=DEFAULT_DATE)
<ide>
<ide> dagrun = self.create_dag_run(dag, execution_date=DEFAULT_DATE)
<del> dagrun.run_id = f"{DagRunType.BACKFILL_JOB.value}__sfddsffds"
<add> dagrun.run_type = DagRunType.BACKFILL_JOB.value
<ide>
<ide> dagrun2 = self.create_dag_run(
<ide> dag, execution_date=DEFAULT_DATE + datetime.timedelta(days=1))
<ide><path>tests/models/test_skipmixin.py
<ide> from airflow.operators.dummy_operator import DummyOperator
<ide> from airflow.utils import timezone
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = timezone.datetime(2016, 1, 1)
<ide>
<ide> def test_skip(self, mock_now):
<ide> with dag:
<ide> tasks = [DummyOperator(task_id='task')]
<ide> dag_run = dag.create_dagrun(
<del> run_id='manual__' + now.isoformat(),
<add> run_type=DagRunType.MANUAL,
<add> execution_date=now,
<ide> state=State.FAILED,
<ide> )
<ide> SkipMixin().skip(
<ide><path>tests/models/test_taskinstance.py
<ide> from airflow.utils import timezone
<ide> from airflow.utils.session import create_session, provide_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide> from tests.models import DEFAULT_DATE
<ide> from tests.test_utils import db
<ide> from tests.test_utils.config import conf_vars
<ide> def _test_previous_dates_setup(schedule_interval: Union[str, datetime.timedelta,
<ide>
<ide> def get_test_ti(session, execution_date: pendulum.datetime, state: str) -> TI:
<ide> dag.create_dagrun(
<del> run_id='scheduled__{}'.format(execution_date.to_iso8601_string()),
<add> run_type=DagRunType.SCHEDULED,
<ide> state=state,
<ide> execution_date=execution_date,
<ide> start_date=pendulum.utcnow(),
<ide> def _env_var_check_callback(self):
<ide> self.assertEqual('hive_in_python_op', os.environ['AIRFLOW_CTX_TASK_ID'])
<ide> self.assertEqual(DEFAULT_DATE.isoformat(),
<ide> os.environ['AIRFLOW_CTX_EXECUTION_DATE'])
<del> self.assertEqual('manual__' + DEFAULT_DATE.isoformat(),
<add> self.assertEqual(DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE),
<ide> os.environ['AIRFLOW_CTX_DAG_RUN_ID'])
<ide>
<ide> def test_echo_env_variables(self):
<ide> def test_echo_env_variables(self):
<ide> dag=dag,
<ide> python_callable=self._env_var_check_callback)
<ide> dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide><path>tests/operators/test_bash.py
<ide> import mock
<ide>
<ide> from airflow.exceptions import AirflowException
<add>from airflow.models import DagRun
<ide> from airflow.models.dag import DAG
<ide> from airflow.operators.bash import BashOperator
<ide> from airflow.utils import timezone
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = datetime(2016, 1, 1, tzinfo=timezone.utc)
<ide> END_DATE = datetime(2016, 1, 2, tzinfo=timezone.utc)
<ide> def test_echo_env_variables(self):
<ide> dagrun_timeout=timedelta(minutes=60))
<ide>
<ide> dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=now,
<ide> state=State.RUNNING,
<ide> def test_echo_env_variables(self):
<ide> self.assertIn('bash_op_test', output)
<ide> self.assertIn('echo_env_vars', output)
<ide> self.assertIn(DEFAULT_DATE.isoformat(), output)
<del> self.assertIn('manual__' + DEFAULT_DATE.isoformat(), output)
<add> self.assertIn(DagRun.generate_run_id(DagRunType.MANUAL, DEFAULT_DATE), output)
<ide>
<ide> def test_return_value(self):
<ide> bash_operator = BashOperator(
<ide><path>tests/operators/test_branch_operator.py
<ide> from airflow.utils import timezone
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = timezone.datetime(2016, 1, 1)
<ide> INTERVAL = datetime.timedelta(hours=12)
<ide> def test_with_dag_run(self):
<ide> self.dag.clear()
<ide>
<ide> dagrun = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_with_skip_in_branch_downstream_dependencies(self):
<ide> self.dag.clear()
<ide>
<ide> dagrun = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide><path>tests/operators/test_latest_only_operator.py
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<ide> from airflow.utils.trigger_rule import TriggerRule
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = timezone.datetime(2016, 1, 1)
<ide> END_DATE = timezone.datetime(2016, 1, 2)
<ide> def test_skipping_non_latest(self):
<ide> downstream_task3.set_upstream(downstream_task)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="scheduled__1",
<add> run_type=DagRunType.SCHEDULED,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> )
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="scheduled__2",
<add> run_type=DagRunType.SCHEDULED,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=timezone.datetime(2016, 1, 1, 12),
<ide> state=State.RUNNING,
<ide> )
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="scheduled__3",
<add> run_type=DagRunType.SCHEDULED,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=END_DATE,
<ide> state=State.RUNNING,
<ide> def test_not_skipping_external(self):
<ide> downstream_task2.set_upstream(downstream_task)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="manual__1",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> external_trigger=True,
<ide> )
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="manual__2",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=timezone.datetime(2016, 1, 1, 12),
<ide> state=State.RUNNING,
<ide> external_trigger=True,
<ide> )
<ide>
<ide> self.dag.create_dagrun(
<del> run_id="manual__3",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=END_DATE,
<ide> state=State.RUNNING,
<ide><path>tests/operators/test_python.py
<ide> from airflow.utils import timezone
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = timezone.datetime(2016, 1, 1)
<ide> END_DATE = timezone.datetime(2016, 1, 2)
<ide> def test_python_callable_arguments_are_templatized(self):
<ide> dag=self.dag)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_python_callable_keyword_arguments_are_templatized(self):
<ide> dag=self.dag)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_python_operator_shallow_copy_attr(self):
<ide>
<ide> def test_conflicting_kwargs(self):
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> def func(dag):
<ide>
<ide> def test_context_with_conflicting_op_args(self):
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> def func(custom, dag):
<ide>
<ide> def test_context_with_kwargs(self):
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> def test_with_dag_run(self):
<ide> self.dag.clear()
<ide>
<ide> dr = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_with_skip_in_branch_downstream_dependencies(self):
<ide> self.dag.clear()
<ide>
<ide> dr = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_with_skip_in_branch_downstream_dependencies2(self):
<ide> self.dag.clear()
<ide>
<ide> dr = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_xcom_push(self):
<ide> self.dag.clear()
<ide>
<ide> dr = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_clear_skipped_downstream_task(self):
<ide> self.dag.clear()
<ide>
<ide> dr = self.dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_with_dag_run(self):
<ide>
<ide> logging.error("Tasks %s", dag.tasks)
<ide> dr = dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_clear_skipped_downstream_task(self):
<ide> dag.clear()
<ide>
<ide> dr = dag.create_dagrun(
<del> run_id="manual__",
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide><path>tests/operators/test_subdag_operator.py
<ide> from airflow.utils.session import create_session
<ide> from airflow.utils.state import State
<ide> from airflow.utils.timezone import datetime
<add>from airflow.utils.types import DagRunType
<ide> from tests.test_utils.db import clear_db_runs
<ide>
<ide> DEFAULT_DATE = datetime(2016, 1, 1)
<ide> def test_execute_create_dagrun_wait_until_success(self):
<ide> subdag_task.post_execute(context={'execution_date': DEFAULT_DATE})
<ide>
<ide> subdag.create_dagrun.assert_called_once_with(
<del> run_id="scheduled__{}".format(DEFAULT_DATE.isoformat()),
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING,
<ide> external_trigger=True,
<ide> def test_rerun_failed_subdag(self):
<ide> session.commit()
<ide>
<ide> sub_dagrun = subdag.create_dagrun(
<del> run_id="scheduled__{}".format(DEFAULT_DATE.isoformat()),
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.FAILED,
<ide> external_trigger=True,
<ide><path>tests/sensors/test_base_sensor.py
<ide> from airflow.utils import timezone
<ide> from airflow.utils.state import State
<ide> from airflow.utils.timezone import datetime
<add>from airflow.utils.types import DagRunType
<ide>
<ide> DEFAULT_DATE = datetime(2015, 1, 1)
<ide> TEST_DAG_ID = 'unit_test_dag'
<ide> def setUp(self):
<ide>
<ide> def _make_dag_run(self):
<ide> return self.dag.create_dagrun(
<del> run_id='manual__',
<add> run_type=DagRunType.MANUAL,
<ide> start_date=timezone.utcnow(),
<ide> execution_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide><path>tests/sensors/test_python.py
<ide> from airflow.sensors.python import PythonSensor
<ide> from airflow.utils.state import State
<ide> from airflow.utils.timezone import datetime
<add>from airflow.utils.types import DagRunType
<ide> from tests.operators.test_python import Call, TestPythonBase, build_recording_function
<ide>
<ide> DEFAULT_DATE = datetime(2015, 1, 1)
<ide> def test_python_callable_arguments_are_templatized(self):
<ide> dag=self.dag)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide> def test_python_callable_keyword_arguments_are_templatized(self):
<ide> dag=self.dag)
<ide>
<ide> self.dag.create_dagrun(
<del> run_id='manual__' + DEFAULT_DATE.isoformat(),
<add> run_type=DagRunType.MANUAL,
<ide> execution_date=DEFAULT_DATE,
<ide> start_date=DEFAULT_DATE,
<ide> state=State.RUNNING
<ide><path>tests/test_core.py
<ide> def test_externally_triggered_dagrun(self):
<ide> start_date=DEFAULT_DATE)
<ide> task = DummyOperator(task_id='test_externally_triggered_dag_context',
<ide> dag=dag)
<del> run_id = f"{DagRunType.SCHEDULED.value}__{execution_date.isoformat()}"
<del> dag.create_dagrun(run_id=run_id,
<add> dag.create_dagrun(run_type=DagRunType.SCHEDULED,
<ide> execution_date=execution_date,
<ide> state=State.RUNNING,
<ide> external_trigger=True)
<ide><path>tests/ti_deps/deps/test_dagrun_id_dep.py
<ide> def test_dagrun_id_is_backfill(self):
<ide> Task instances whose dagrun ID is a backfill dagrun ID should fail this dep.
<ide> """
<ide> dagrun = DagRun()
<del> dagrun.run_id = f"{DagRunType.BACKFILL_JOB.value}__something"
<add> dagrun.run_id = "anything"
<add> dagrun.run_type = DagRunType.BACKFILL_JOB.value
<ide> ti = Mock(get_dagrun=Mock(return_value=dagrun))
<ide> self.assertFalse(DagrunIdDep().is_met(ti=ti))
<ide>
<ide> def test_dagrun_id_is_not_backfill(self):
<ide> Task instances whose dagrun ID is not a backfill dagrun ID should pass this dep.
<ide> """
<ide> dagrun = DagRun()
<del> dagrun.run_id = 'notbackfill_something'
<add> dagrun.run_type = 'custom_type'
<ide> ti = Mock(get_dagrun=Mock(return_value=dagrun))
<ide> self.assertTrue(DagrunIdDep().is_met(ti=ti))
<ide>
<ide><path>tests/www/test_views.py
<ide> def test_index(self):
<ide>
<ide> class TestAirflowBaseViews(TestBase):
<ide> EXAMPLE_DAG_DEFAULT_DATE = dates.days_ago(2)
<del> run_id = f"test_{DagRunType.SCHEDULED.value}__{EXAMPLE_DAG_DEFAULT_DATE.isoformat()}"
<ide>
<ide> @classmethod
<ide> def setUpClass(cls):
<ide> def prepare_dagruns(self):
<ide> self.xcom_dag = self.dagbag.dags['example_xcom']
<ide>
<ide> self.bash_dagrun = self.bash_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide>
<ide> self.sub_dagrun = self.sub_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide>
<ide> self.xcom_dagrun = self.xcom_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide> def test_view_uses_existing_dagbag(self, endpoint, mock_get_dag):
<ide> def test_escape_in_tree_view(self, test_str, seralized_test_str):
<ide> dag = self.dagbag.dags['test_tree_view']
<ide> dag.create_dagrun(
<del> run_id=self.run_id,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<add> run_type=DagRunType.MANUAL,
<ide> state=State.RUNNING,
<ide> conf={"abc": test_str},
<ide> )
<ide> def test_escape_in_tree_view(self, test_str, seralized_test_str):
<ide> def test_dag_details_trigger_origin_tree_view(self):
<ide> dag = self.dagbag.dags['test_tree_view']
<ide> dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide> def test_dag_details_trigger_origin_tree_view(self):
<ide> def test_dag_details_trigger_origin_graph_view(self):
<ide> dag = self.dagbag.dags['test_graph_view']
<ide> dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide> class TestDagACLView(TestBase):
<ide> """
<ide> next_year = dt.now().year + 1
<ide> default_date = timezone.datetime(next_year, 6, 1)
<del> run_id = f"test_{DagRunType.SCHEDULED.value}__{default_date.isoformat()}"
<ide>
<ide> @classmethod
<ide> def setUpClass(cls):
<ide> def prepare_dagruns(self):
<ide> self.sub_dag = dagbag.dags['example_subdag_operator']
<ide>
<ide> self.bash_dagrun = self.bash_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.default_date,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide>
<ide> self.sub_dagrun = self.sub_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.default_date,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide> def test_trigger_dag_button(self):
<ide>
<ide> run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
<ide> self.assertIsNotNone(run)
<del> self.assertIn("manual__", run.run_id)
<add> self.assertIn(DagRunType.MANUAL.value, run.run_id)
<add> self.assertEqual(run.run_type, DagRunType.MANUAL.value)
<ide>
<ide> @pytest.mark.xfail(condition=using_mysql, reason="This test might be flaky on mysql")
<ide> def test_trigger_dag_conf(self):
<ide> def test_trigger_dag_conf(self):
<ide>
<ide> run = self.session.query(DR).filter(DR.dag_id == test_dag_id).first()
<ide> self.assertIsNotNone(run)
<del> self.assertIn("manual__", run.run_id)
<add> self.assertIn(DagRunType.MANUAL.value, run.run_id)
<add> self.assertEqual(run.run_type, DagRunType.MANUAL.value)
<ide> self.assertEqual(run.conf, conf_dict)
<ide>
<ide> def test_trigger_dag_conf_malformed(self):
<ide> def test_list_dagrun_includes_conf(self):
<ide>
<ide> class TestDecorators(TestBase):
<ide> EXAMPLE_DAG_DEFAULT_DATE = dates.days_ago(2)
<del> run_id = f"test_{DagRunType.SCHEDULED.value}__{EXAMPLE_DAG_DEFAULT_DATE.isoformat()}"
<ide>
<ide> @classmethod
<ide> def setUpClass(cls):
<ide> def prepare_dagruns(self):
<ide> self.xcom_dag = dagbag.dags['example_xcom']
<ide>
<ide> self.bash_dagrun = self.bash_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide>
<ide> self.sub_dagrun = self.sub_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING)
<ide>
<ide> self.xcom_dagrun = self.xcom_dag.create_dagrun(
<del> run_id=self.run_id,
<add> run_type=DagRunType.SCHEDULED,
<ide> execution_date=self.EXAMPLE_DAG_DEFAULT_DATE,
<ide> start_date=timezone.utcnow(),
<ide> state=State.RUNNING) | 35 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.