content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
|---|---|---|---|---|---|
Text
|
Text
|
remove broken link
|
72a08c11c67a3f664873480f6b1e8526cc46af3b
|
<ide><path>docs/NativeComponentsIOS.md
<ide> RCT_EXPORT_MODULE()
<ide> }
<ide> ```
<ide>
<del>You can see we're setting the manager as the delegate for every view that it vends, then in the delegate method `-mapView:regionDidChangeAnimated:` the region is combined with the `reactTag` target to make an event that is dispatched to the corresponding React component instance in your application via `sendInputEventWithName:body:`. The event name `@"topChange"` maps to the `onChange` callback prop in JavaScript (mappings are [here](https://github.com/facebook/react-native/blob/master/React/Modules/RCTUIManager.m#L1165)). This callback is invoked with the raw event, which we typically process in the wrapper component to make a simpler API:
<add>You can see we're setting the manager as the delegate for every view that it vends, then in the delegate method `-mapView:regionDidChangeAnimated:` the region is combined with the `reactTag` target to make an event that is dispatched to the corresponding React component instance in your application via `sendInputEventWithName:body:`. The event name `@"topChange"` maps to the `onChange` callback prop in JavaScript. This callback is invoked with the raw event, which we typically process in the wrapper component to make a simpler API:
<ide>
<ide> ```javascript
<ide> // MapView.js
| 1
|
Go
|
Go
|
enable a unit test on windows
|
9484c3bd81fa8aab46b7268c5c3bfeb8e14aa369
|
<ide><path>pkg/fileutils/fileutils_test.go
<ide> import (
<ide> "runtime"
<ide> "strings"
<ide> "testing"
<add>
<add> "fmt"
<add> "github.com/stretchr/testify/assert"
<add> "github.com/stretchr/testify/require"
<ide> )
<ide>
<ide> // CopyFile with invalid src
<ide> func TestMatchesWithMalformedPatterns(t *testing.T) {
<ide> }
<ide> }
<ide>
<del>// Test lots of variants of patterns & strings
<add>type matchesTestCase struct {
<add> pattern string
<add> text string
<add> pass bool
<add>}
<add>
<ide> func TestMatches(t *testing.T) {
<del> // TODO Windows: Port this test
<del> if runtime.GOOS == "windows" {
<del> t.Skip("Needs porting to Windows")
<del> }
<del> tests := []struct {
<del> pattern string
<del> text string
<del> pass bool
<del> }{
<add> tests := []matchesTestCase{
<ide> {"**", "file", true},
<ide> {"**", "file/", true},
<ide> {"**/", "file", true}, // weird one
<ide> func TestMatches(t *testing.T) {
<ide> {"abc.def", "abcZdef", false},
<ide> {"abc?def", "abcZdef", true},
<ide> {"abc?def", "abcdef", false},
<del> {"a\\*b", "a*b", true},
<del> {"a\\", "a", false},
<del> {"a\\", "a\\", false},
<ide> {"a\\\\", "a\\", true},
<ide> {"**/foo/bar", "foo/bar", true},
<ide> {"**/foo/bar", "dir/foo/bar", true},
<ide> func TestMatches(t *testing.T) {
<ide> {"**/.foo", "bar.foo", false},
<ide> }
<ide>
<add> if runtime.GOOS != "windows" {
<add> tests = append(tests, []matchesTestCase{
<add> {"a\\*b", "a*b", true},
<add> {"a\\", "a", false},
<add> {"a\\", "a\\", false},
<add> }...)
<add> }
<add>
<ide> for _, test := range tests {
<add> desc := fmt.Sprintf("pattern=%q text=%q", test.pattern, test.text)
<ide> pm, err := NewPatternMatcher([]string{test.pattern})
<del> if err != nil {
<del> t.Fatalf("invalid pattern %s", test.pattern)
<del> }
<add> require.NoError(t, err, desc)
<ide> res, _ := pm.Matches(test.text)
<del> if res != test.pass {
<del> t.Fatalf("Failed: %v - res:%v", test, res)
<del> }
<add> assert.Equal(t, test.pass, res, desc)
<ide> }
<ide> }
<ide>
| 1
|
Go
|
Go
|
remove doc that doesn't apply to journald
|
f1412f29423def166b0d77e162eec867afc56a8c
|
<ide><path>daemon/logger/journald/journald.go
<ide> func init() {
<ide> }
<ide>
<ide> // New creates a journald logger using the configuration passed in on
<del>// the context. Supported context configuration variables are
<del>// syslog-address, syslog-facility, & syslog-tag.
<add>// the context.
<ide> func New(ctx logger.Context) (logger.Logger, error) {
<ide> if !journal.Enabled() {
<ide> return nil, fmt.Errorf("journald is not enabled on this host")
| 1
|
Javascript
|
Javascript
|
fix typo in test description
|
cb9447b2c3579e1f00b7d9318ca8ba7f42ff7beb
|
<ide><path>test/unit/api.js
<ide> test('should be able to initialize player twice on the same tag using string ref
<ide> player.dispose();
<ide> });
<ide>
<del>test('videojs.players should be availble after minification', function() {
<add>test('videojs.players should be available after minification', function() {
<ide> var videoTag = PlayerTest.makeTag();
<ide> var id = videoTag.id;
<ide>
| 1
|
Javascript
|
Javascript
|
use transclusion function passed in to link
|
08793a690abe3eda40deae10f8a0a117779bdbd9
|
<ide><path>src/ng/directive/ngTransclude.js
<ide> *
<ide> */
<ide> var ngTranscludeDirective = ngDirective({
<del> controller: ['$element', '$transclude', function($element, $transclude) {
<add> link: function($scope, $element, $attrs, controller, $transclude) {
<ide> if (!$transclude) {
<ide> throw minErr('ngTransclude')('orphan',
<del> 'Illegal use of ngTransclude directive in the template! ' +
<del> 'No parent directive that requires a transclusion found. ' +
<del> 'Element: {0}',
<del> startingTag($element));
<add> 'Illegal use of ngTransclude directive in the template! ' +
<add> 'No parent directive that requires a transclusion found. ' +
<add> 'Element: {0}',
<add> startingTag($element));
<ide> }
<del>
<del> // remember the transclusion fn but call it during linking so that we don't process transclusion before directives on
<del> // the parent element even when the transclusion replaces the current element. (we can't use priority here because
<del> // that applies only to compile fns and not controllers
<del> this.$transclude = $transclude;
<del> }],
<del>
<del> link: function($scope, $element, $attrs, controller) {
<del> controller.$transclude(function(clone) {
<add>
<add> $transclude(function(clone) {
<ide> $element.empty();
<ide> $element.append(clone);
<ide> });
| 1
|
Python
|
Python
|
make __all__ immutable.
|
d59518f5fb68957b2d179aa572af6f58cd02de40
|
<ide><path>celery/__init__.py
<ide>
<ide> # -eof meta-
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Celery', 'bugreport', 'shared_task', 'task',
<ide> 'current_app', 'current_task', 'maybe_signature',
<ide> 'chain', 'chord', 'chunks', 'group', 'signature',
<ide> 'xmap', 'xstarmap', 'uuid',
<del>]
<add>)
<ide>
<ide> VERSION_BANNER = '{0} ({1})'.format(__version__, SERIES)
<ide>
<ide><path>celery/__main__.py
<ide> import sys
<ide> from . import maybe_patch_concurrency
<ide>
<del>__all__ = ['main']
<add>__all__ = ('main',)
<ide>
<ide>
<ide> def main():
<ide><path>celery/_state.py
<ide> from celery.local import Proxy
<ide> from celery.utils.threads import LocalStack
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'set_default_app', 'get_current_app', 'get_current_task',
<ide> 'get_current_worker_task', 'current_app', 'current_task',
<ide> 'connect_on_app_finalize',
<del>]
<add>)
<ide>
<ide> #: Global default app used when no current app.
<ide> default_app = None
<ide><path>celery/app/__init__.py
<ide> from .base import Celery
<ide> from .utils import AppPickler
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Celery', 'AppPickler', 'app_or_default', 'default_app',
<ide> 'bugreport', 'enable_trace', 'disable_trace', 'shared_task',
<ide> 'push_current_task', 'pop_current_task',
<del>]
<add>)
<ide>
<ide> #: Proxy always returning the app set as default.
<ide> default_app = Proxy(lambda: _state.default_app)
<ide><path>celery/app/amqp.py
<ide>
<ide> from . import routes as _routes
<ide>
<del>__all__ = ['AMQP', 'Queues', 'task_message']
<add>__all__ = ('AMQP', 'Queues', 'task_message')
<ide>
<ide> PY3 = sys.version_info[0] == 3
<ide>
<ide><path>celery/app/annotations.py
<ide> _first_match = firstmethod('annotate')
<ide> _first_match_any = firstmethod('annotate_any')
<ide>
<del>__all__ = ['MapAnnotation', 'prepare', 'resolve_all']
<add>__all__ = ('MapAnnotation', 'prepare', 'resolve_all')
<ide>
<ide>
<ide> class MapAnnotation(dict):
<ide><path>celery/app/backends.py
<ide> from celery.five import reraise
<ide> from celery.utils.imports import load_extension_class_names, symbol_by_name
<ide>
<del>__all__ = ['by_name', 'by_url']
<add>__all__ = ('by_name', 'by_url')
<ide>
<ide> UNKNOWN_BACKEND = """
<ide> Unknown result backend: {0!r}. Did you spell that correctly? ({1!r})
<ide><path>celery/app/base.py
<ide> # Load all builtin tasks
<ide> from . import builtins # noqa
<ide>
<del>__all__ = ['Celery']
<add>__all__ = ('Celery',)
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/app/builtins.py
<ide> from celery._state import connect_on_app_finalize
<ide> from celery.utils.log import get_logger
<ide>
<del>__all__ = []
<add>__all__ = ()
<ide> logger = get_logger(__name__)
<ide>
<ide>
<ide><path>celery/app/control.py
<ide> from celery.exceptions import DuplicateNodenameWarning
<ide> from celery.utils.text import pluralize
<ide>
<del>__all__ = ['Inspect', 'Control', 'flatten_reply']
<add>__all__ = ('Inspect', 'Control', 'flatten_reply')
<ide>
<ide> W_DUPNODE = """\
<ide> Received multiple replies from node {0}: {1}.
<ide><path>celery/app/defaults.py
<ide> from celery.utils.functional import memoize
<ide> from celery.utils.serialization import strtobool
<ide>
<del>__all__ = ['Option', 'NAMESPACES', 'flatten', 'find']
<add>__all__ = ('Option', 'NAMESPACES', 'flatten', 'find')
<ide>
<ide> is_jython = sys.platform.startswith('java')
<ide> is_pypy = hasattr(sys, 'pypy_version_info')
<ide><path>celery/app/log.py
<ide> from celery.utils.nodenames import node_format
<ide> from celery.utils.term import colored
<ide>
<del>__all__ = ['TaskFormatter', 'Logging']
<add>__all__ = ('TaskFormatter', 'Logging')
<ide>
<ide> MP_LOG = os.environ.get('MP_LOG', False)
<ide>
<ide><path>celery/app/registry.py
<ide> from celery.exceptions import NotRegistered, InvalidTaskError
<ide> from celery.five import items
<ide>
<del>__all__ = ['TaskRegistry']
<add>__all__ = ('TaskRegistry',)
<ide>
<ide>
<ide> class TaskRegistry(dict):
<ide><path>celery/app/routes.py
<ide> from celery.utils.functional import maybe_evaluate, mlazy
<ide> from celery.utils.imports import symbol_by_name
<ide>
<del>__all__ = ['MapRoute', 'Router', 'prepare']
<add>__all__ = ('MapRoute', 'Router', 'prepare')
<ide>
<ide>
<ide> def glob_to_re(glob, quote=string.punctuation.replace('*', '')):
<ide><path>celery/app/task.py
<ide> from .registry import _unpickle_task_v2
<ide> from .utils import appstr
<ide>
<del>__all__ = ['Context', 'Task']
<add>__all__ = ('Context', 'Task')
<ide>
<ide> #: extracts attributes related to publishing a message from an object.
<ide> extract_exec_options = mattrgetter(
<ide><path>celery/app/trace.py
<ide> get_pickleable_exception, get_pickled_exception, get_pickleable_etype,
<ide> )
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'TraceInfo', 'build_tracer', 'trace_task',
<ide> 'setup_worker_optimizations', 'reset_worker_optimizations',
<del>]
<add>)
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/app/utils.py
<ide> DEFAULTS, SETTING_KEYS, find,
<ide> )
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Settings', 'appstr', 'bugreport',
<ide> 'filter_hidden_settings', 'find_app',
<del>]
<add>)
<ide>
<ide> #: Format used to generate bug-report information.
<ide> BUGREPORT_INFO = """
<ide><path>celery/apps/beat.py
<ide> from celery.utils.log import LOG_LEVELS, get_logger
<ide> from celery.utils.time import humanize_seconds
<ide>
<del>__all__ = ['Beat']
<add>__all__ = ('Beat',)
<ide>
<ide> STARTUP_INFO_FMT = """
<ide> LocalTime -> {timestamp}
<ide><path>celery/apps/multi.py
<ide> )
<ide> from celery.utils.saferepr import saferepr
<ide>
<del>__all__ = ['Cluster', 'Node']
<add>__all__ = ('Cluster', 'Node')
<ide>
<ide> CELERY_EXE = 'celery'
<ide>
<ide><path>celery/apps/worker.py
<ide> from celery.utils.text import pluralize
<ide> from celery.worker import WorkController
<ide>
<del>__all__ = ['Worker']
<add>__all__ = ('Worker',)
<ide>
<ide> logger = get_logger(__name__)
<ide> is_jython = sys.platform.startswith('java')
<ide><path>celery/backends/amqp.py
<ide>
<ide> from .base import BaseBackend
<ide>
<del>__all__ = ['BacklogLimitExceeded', 'AMQPBackend']
<add>__all__ = ('BacklogLimitExceeded', 'AMQPBackend')
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/backends/async.py
<ide> from celery.five import Empty, monotonic
<ide> from celery.utils.threads import THREAD_TIMEOUT_MAX
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'AsyncBackendMixin', 'BaseResultConsumer', 'Drainer',
<ide> 'register_drainer',
<del>]
<add>)
<ide>
<ide> drainers = {}
<ide>
<ide><path>celery/backends/base.py
<ide> create_exception_cls,
<ide> )
<ide>
<del>__all__ = ['BaseBackend', 'KeyValueStoreBackend', 'DisabledBackend']
<add>__all__ = ('BaseBackend', 'KeyValueStoreBackend', 'DisabledBackend')
<ide>
<ide> EXCEPTION_ABLE_CODECS = frozenset({'pickle'})
<ide> PY3 = sys.version_info >= (3, 0)
<ide><path>celery/backends/cache.py
<ide> from celery.utils.functional import LRUCache
<ide> from .base import KeyValueStoreBackend
<ide>
<del>__all__ = ['CacheBackend']
<add>__all__ = ('CacheBackend',)
<ide>
<ide> _imp = [None]
<ide>
<ide><path>celery/backends/cassandra.py
<ide> cassandra = None # noqa
<ide>
<ide>
<del>__all__ = ['CassandraBackend']
<add>__all__ = ('CassandraBackend',)
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/backends/consul.py
<ide>
<ide> logger = get_logger(__name__)
<ide>
<del>__all__ = ['ConsulBackend']
<add>__all__ = ('ConsulBackend',)
<ide>
<ide> CONSUL_MISSING = """\
<ide> You need to install the python-consul library in order to use \
<ide><path>celery/backends/couchbase.py
<ide> except ImportError:
<ide> Couchbase = Connection = NotFoundError = None # noqa
<ide>
<del>__all__ = ['CouchbaseBackend']
<add>__all__ = ('CouchbaseBackend',)
<ide>
<ide>
<ide> class CouchbaseBackend(KeyValueStoreBackend):
<ide><path>celery/backends/couchdb.py
<ide> except ImportError:
<ide> pycouchdb = None # noqa
<ide>
<del>__all__ = ['CouchBackend']
<add>__all__ = ('CouchBackend',)
<ide>
<ide> ERR_LIB_MISSING = """\
<ide> You need to install the pycouchdb library to use the CouchDB result backend\
<ide><path>celery/backends/database/__init__.py
<ide>
<ide> logger = logging.getLogger(__name__)
<ide>
<del>__all__ = ['DatabaseBackend']
<add>__all__ = ('DatabaseBackend',)
<ide>
<ide>
<ide> @contextmanager
<ide><path>celery/backends/database/models.py
<ide> from celery.five import python_2_unicode_compatible
<ide> from .session import ResultModelBase
<ide>
<del>__all__ = ['Task', 'TaskSet']
<add>__all__ = ('Task', 'TaskSet')
<ide>
<ide>
<ide> @python_2_unicode_compatible
<ide><path>celery/backends/database/session.py
<ide>
<ide> ResultModelBase = declarative_base()
<ide>
<del>__all__ = ['SessionManager']
<add>__all__ = ('SessionManager',)
<ide>
<ide>
<ide> def _after_fork_cleanup_session(session):
<ide><path>celery/backends/dynamodb.py
<ide> except ImportError: # pragma: no cover
<ide> boto3 = ClientError = None # noqa
<ide>
<del>__all__ = ['DynamoDBBackend']
<add>__all__ = ('DynamoDBBackend',)
<ide>
<ide>
<ide> # Helper class that describes a DynamoDB attribute
<ide><path>celery/backends/elasticsearch.py
<ide> except ImportError:
<ide> elasticsearch = None # noqa
<ide>
<del>__all__ = ['ElasticsearchBackend']
<add>__all__ = ('ElasticsearchBackend',)
<ide>
<ide> E_LIB_MISSING = """\
<ide> You need to install the elasticsearch library to use the Elasticsearch \
<ide><path>celery/backends/mongodb.py
<ide> class InvalidDocument(Exception): # noqa
<ide> pass
<ide>
<del>__all__ = ['MongoBackend']
<add>__all__ = ('MongoBackend',)
<ide>
<ide> BINARY_CODECS = frozenset(['pickle', 'msgpack'])
<ide>
<ide><path>celery/backends/redis.py
<ide> redis = None # noqa
<ide> get_redis_error_classes = None # noqa
<ide>
<del>__all__ = ['RedisBackend']
<add>__all__ = ('RedisBackend',)
<ide>
<ide> E_REDIS_MISSING = """
<ide> You need to install the redis library in order to use \
<ide><path>celery/backends/riak.py
<ide> except ImportError: # pragma: no cover
<ide> riak = RiakClient = last_written_resolver = None # noqa
<ide>
<del>__all__ = ['RiakBackend']
<add>__all__ = ('RiakBackend',)
<ide>
<ide> E_BUCKET_NAME = """\
<ide> Riak bucket names must be composed of ASCII characters only, not: {0!r}\
<ide><path>celery/backends/rpc.py
<ide> from . import base
<ide> from .async import AsyncBackendMixin, BaseResultConsumer
<ide>
<del>__all__ = ['BacklogLimitExceeded', 'RPCBackend']
<add>__all__ = ('BacklogLimitExceeded', 'RPCBackend')
<ide>
<ide> E_NO_CHORD_SUPPORT = """
<ide> The "rpc" result backend does not support chords!
<ide><path>celery/beat.py
<ide> from .utils.time import humanize_seconds
<ide> from .utils.log import get_logger, iter_open_logger_fds
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'SchedulingError', 'ScheduleEntry', 'Scheduler',
<ide> 'PersistentScheduler', 'Service', 'EmbeddedService',
<del>]
<add>)
<ide>
<ide> event_t = namedtuple('event_t', ('time', 'priority', 'entry'))
<ide>
<ide><path>celery/bin/__init__.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from .base import Option
<ide>
<del>__all__ = ['Option']
<add>__all__ = ('Option',)
<ide><path>celery/bin/amqp.py
<ide> from celery.five import string_t
<ide> from celery.utils.serialization import strtobool
<ide>
<del>__all__ = ['AMQPAdmin', 'AMQShell', 'Spec', 'amqp']
<add>__all__ = ('AMQPAdmin', 'AMQShell', 'Spec', 'amqp')
<ide>
<ide> # Map to coerce strings to other types.
<ide> COERCE = {bool: strtobool}
<ide><path>celery/bin/base.py
<ide> except NameError: # pragma: no cover
<ide> pass
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Error', 'UsageError', 'Extensions', 'Command', 'Option', 'daemon_options',
<del>]
<add>)
<ide>
<ide> # always enable DeprecationWarnings, so our users can see them.
<ide> for warning in (CDeprecationWarning, CPendingDeprecationWarning):
<ide><path>celery/bin/beat.py
<ide> from celery.platforms import detached, maybe_drop_privileges
<ide> from celery.bin.base import Command, daemon_options
<ide>
<del>__all__ = ['beat']
<add>__all__ = ('beat',)
<ide>
<ide> HELP = __doc__
<ide>
<ide><path>celery/bin/celery.py
<ide> from celery.bin.worker import worker
<ide> from celery.bin.upgrade import upgrade
<ide>
<del>__all__ = ['CeleryCommand', 'main']
<add>__all__ = ('CeleryCommand', 'main')
<ide>
<ide> HELP = """
<ide> ---- -- - - ---- Commands- -------------- --- ------------
<ide><path>celery/bin/celeryd_detach.py
<ide> from celery.utils.nodenames import default_nodename, node_format
<ide> from celery.bin.base import daemon_options
<ide>
<del>__all__ = ['detached_celeryd', 'detach']
<add>__all__ = ('detached_celeryd', 'detach')
<add>
<ide> logger = get_logger(__name__)
<ide> C_FAKEFORK = os.environ.get('C_FAKEFORK')
<ide>
<ide><path>celery/bin/events.py
<ide> from celery.platforms import detached, set_process_title, strargv
<ide> from celery.bin.base import Command, daemon_options
<ide>
<del>__all__ = ['events']
<add>__all__ = ('events',)
<ide>
<ide> HELP = __doc__
<ide>
<ide><path>celery/bin/graph.py
<ide> from celery.utils.graph import DependencyGraph, GraphFormatter
<ide> from .base import Command
<ide>
<del>__all__ = ['graph']
<add>__all__ = ('graph',)
<ide>
<ide>
<ide> class graph(Command):
<ide><path>celery/bin/logtool.py
<ide> from fileinput import FileInput
<ide> from .base import Command
<ide>
<del>__all__ = ['logtool']
<add>__all__ = ('logtool',)
<ide>
<ide> RE_LOG_START = re.compile(r'^\[\d\d\d\d\-\d\d-\d\d ')
<ide> RE_TASK_RECEIVED = re.compile(r'.+?\] Received')
<ide><path>celery/bin/multi.py
<ide> from celery.utils import term
<ide> from celery.utils.text import pluralize
<ide>
<del>__all__ = ['MultiTool']
<add>__all__ = ('MultiTool',)
<ide>
<ide> USAGE = """\
<ide> usage: {prog_name} start <node1 node2 nodeN|range> [worker options]
<ide><path>celery/bin/worker.py
<ide> from celery.utils.log import LOG_LEVELS, mlevel
<ide> from celery.utils.nodenames import default_nodename
<ide>
<del>__all__ = ['worker', 'main']
<add>__all__ = ('worker', 'main')
<ide>
<ide> HELP = __doc__
<ide>
<ide><path>celery/bootsteps.py
<ide> else:
<ide> IGNORE_ERRORS = (GreenletExit,)
<ide>
<del>__all__ = ['Blueprint', 'Step', 'StartStopStep', 'ConsumerStep']
<add>__all__ = ('Blueprint', 'Step', 'StartStopStep', 'ConsumerStep')
<ide>
<ide> #: States
<ide> RUN = 0x1
<ide><path>celery/canvas.py
<ide> from celery.utils.objects import getitem_property
<ide> from celery.utils.text import truncate, remove_repeating_from_task
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Signature', 'chain', 'xmap', 'xstarmap', 'chunks',
<ide> 'group', 'chord', 'signature', 'maybe_signature',
<del>]
<add>)
<ide>
<ide> PY3 = sys.version_info[0] == 3
<ide>
<ide><path>celery/concurrency/__init__.py
<ide> # too much (e.g., for eventlet patching)
<ide> from kombu.utils.imports import symbol_by_name
<ide>
<del>__all__ = ['get_implementation']
<add>__all__ = ('get_implementation',)
<ide>
<ide> ALIASES = {
<ide> 'prefork': 'celery.concurrency.prefork:TaskPool',
<ide><path>celery/concurrency/asynpool.py
<ide> def __read__(fd, buf, size, read=os.read): # noqa
<ide> def unpack_from(fmt, iobuf, unpack=struct.unpack): # noqa
<ide> return unpack(fmt, iobuf.getvalue()) # <-- BytesIO
<ide>
<del>__all__ = ['AsynPool']
<add>__all__ = ('AsynPool',)
<ide>
<ide> logger = get_logger(__name__)
<ide> error, debug = logger.error, logger.debug
<ide><path>celery/concurrency/base.py
<ide> from celery.utils.text import truncate
<ide> from celery.utils.log import get_logger
<ide>
<del>__all__ = ['BasePool', 'apply_target']
<add>__all__ = ('BasePool', 'apply_target')
<ide>
<ide> logger = get_logger('celery.pool')
<ide>
<ide><path>celery/concurrency/eventlet.py
<ide> import sys
<ide> from kombu.five import monotonic
<ide>
<del>__all__ = ['TaskPool']
<add>__all__ = ('TaskPool',)
<ide>
<ide> W_RACE = """\
<ide> Celery module with %s imported before eventlet patched\
<ide><path>celery/concurrency/gevent.py
<ide> except ImportError: # pragma: no cover
<ide> Timeout = None # noqa
<ide>
<del>__all__ = ['TaskPool']
<add>__all__ = ('TaskPool',)
<ide>
<ide> # pylint: disable=redefined-outer-name
<ide> # We cache globals and attribute lookups, so disable this warning.
<ide><path>celery/concurrency/prefork.py
<ide>
<ide> from .asynpool import AsynPool
<ide>
<del>__all__ = ['TaskPool', 'process_initializer', 'process_destructor']
<add>__all__ = ('TaskPool', 'process_initializer', 'process_destructor')
<ide>
<ide> #: List of signals to reset when a child process starts.
<ide> WORKER_SIGRESET = {
<ide><path>celery/concurrency/solo.py
<ide> import os
<ide> from .base import BasePool, apply_target
<ide>
<del>__all__ = ['TaskPool']
<add>__all__ = ('TaskPool',)
<ide>
<ide>
<ide> class TaskPool(BasePool):
<ide><path>celery/contrib/abortable.py
<ide> def myview(request):
<ide> from celery import Task
<ide> from celery.result import AsyncResult
<ide>
<del>__all__ = ['AbortableAsyncResult', 'AbortableTask']
<add>__all__ = ('AbortableAsyncResult', 'AbortableTask')
<ide>
<ide>
<ide> """
<ide><path>celery/contrib/migrate.py
<ide> from celery.utils.nodenames import worker_direct
<ide> from celery.utils.text import str_to_list
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'StopFiltering', 'State', 'republish', 'migrate_task',
<ide> 'migrate_tasks', 'move', 'task_id_eq', 'task_id_in',
<ide> 'start_filter', 'move_task_by_id', 'move_by_idmap',
<ide> 'move_by_taskmap', 'move_direct', 'move_direct_by_id',
<del>]
<add>)
<ide>
<ide> MOVING_PROGRESS_FMT = """\
<ide> Moving task {state.filtered}/{state.strtotal}: \
<ide><path>celery/contrib/rdb.py
<ide> def add(x, y):
<ide> from billiard.process import current_process
<ide> from celery.five import range
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'CELERY_RDB_HOST', 'CELERY_RDB_PORT', 'DEFAULT_PORT',
<ide> 'Rdb', 'debugger', 'set_trace',
<del>]
<add>)
<ide>
<ide> DEFAULT_PORT = 6899
<ide>
<ide><path>celery/events/__init__.py
<ide> from .event import Event, event_exchange, get_exchange, group_from
<ide> from .receiver import EventReceiver
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Event', 'EventDispatcher', 'EventReceiver',
<ide> 'event_exchange', 'get_exchange', 'group_from',
<del>]
<add>)
<ide><path>celery/events/cursesmon.py
<ide> from celery.five import items, values
<ide> from celery.utils.text import abbr, abbrtask
<ide>
<del>__all__ = ['CursesMonitor', 'evtop']
<add>__all__ = ('CursesMonitor', 'evtop')
<ide>
<ide> BORDER_SPACING = 4
<ide> LEFT_BORDER_OFFSET = 3
<ide><path>celery/events/dispatcher.py
<ide>
<ide> from .event import Event, get_exchange, group_from
<ide>
<del>__all__ = ['EventDispatcher']
<add>__all__ = ('EventDispatcher',)
<ide>
<ide>
<ide> class EventDispatcher(object):
<ide><path>celery/events/dumper.py
<ide> from celery.utils.functional import LRUCache
<ide> from celery.utils.time import humanize_seconds
<ide>
<del>__all__ = ['Dumper', 'evdump']
<add>__all__ = ('Dumper', 'evdump')
<ide>
<ide> TASK_NAMES = LRUCache(limit=0xFFF)
<ide>
<ide><path>celery/events/event.py
<ide> from copy import copy
<ide> from kombu import Exchange
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Event', 'event_exchange', 'get_exchange', 'group_from',
<del>]
<add>)
<ide>
<ide> #: Exchange used to send events on.
<ide> #: Note: Use :func:`get_exchange` instead, as the type of
<ide><path>celery/events/receiver.py
<ide>
<ide> from .event import get_exchange
<ide>
<del>__all__ = ['EventReceiver']
<add>__all__ = ('EventReceiver',)
<ide>
<ide> CLIENT_CLOCK_SKEW = -1
<ide>
<ide><path>celery/events/snapshot.py
<ide> from celery.utils.log import get_logger
<ide> from celery.utils.time import rate
<ide>
<del>__all__ = ['Polaroid', 'evcam']
<add>__all__ = ('Polaroid', 'evcam')
<ide>
<ide> logger = get_logger('celery.evcam')
<ide>
<ide><path>celery/events/state.py
<ide> from celery.utils.functional import LRUCache, memoize, pass1
<ide> from celery.utils.log import get_logger
<ide>
<del>__all__ = ['Worker', 'Task', 'State', 'heartbeat_expires']
<add>__all__ = ('Worker', 'Task', 'State', 'heartbeat_expires')
<ide>
<ide> # pylint: disable=redefined-outer-name
<ide> # We cache globals and attribute lookups, so disable this warning.
<ide><path>celery/exceptions.py
<ide> )
<ide> from kombu.exceptions import OperationalError
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> # Warnings
<ide> 'CeleryWarning',
<ide> 'AlwaysEagerIgnored', 'DuplicateNodenameWarning',
<ide>
<ide> # Worker shutdown semi-predicates (inherits from SystemExit).
<ide> 'WorkerShutdown', 'WorkerTerminate',
<del>]
<add>)
<ide>
<ide> UNREGISTERED_FMT = """\
<ide> Task of kind {0} never registered, please make sure it's imported.\
<ide><path>celery/fixups/django.py
<ide> from celery import signals
<ide> from celery.exceptions import FixupWarning, ImproperlyConfigured
<ide>
<del>__all__ = ['DjangoFixup', 'fixup']
<add>__all__ = ('DjangoFixup', 'fixup')
<ide>
<ide> ERR_NOT_INSTALLED = """\
<ide> Environment variable DJANGO_SETTINGS_MODULE is defined
<ide><path>celery/loaders/__init__.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from celery.utils.imports import symbol_by_name, import_from_cwd
<ide>
<del>__all__ = ['get_loader_cls']
<add>__all__ = ('get_loader_cls',)
<ide>
<ide> LOADER_ALIASES = {
<ide> 'app': 'celery.loaders.app:AppLoader',
<ide><path>celery/loaders/app.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from .base import BaseLoader
<ide>
<del>__all__ = ['AppLoader']
<add>__all__ = ('AppLoader',)
<ide>
<ide>
<ide> class AppLoader(BaseLoader):
<ide><path>celery/loaders/base.py
<ide> import_from_cwd, symbol_by_name, NotAPackage, find_module,
<ide> )
<ide>
<del>__all__ = ['BaseLoader']
<add>__all__ = ('BaseLoader',)
<ide>
<ide> _RACE_PROTECTION = False
<ide>
<ide><path>celery/loaders/default.py
<ide> from celery.utils.serialization import strtobool
<ide> from .base import BaseLoader
<ide>
<del>__all__ = ['Loader', 'DEFAULT_CONFIG_MODULE']
<add>__all__ = ('Loader', 'DEFAULT_CONFIG_MODULE')
<ide>
<ide> DEFAULT_CONFIG_MODULE = 'celeryconfig'
<ide>
<ide><path>celery/local.py
<ide> from types import ModuleType
<ide> from .five import bytes_if_py2, items, string, string_t
<ide>
<del>__all__ = ['Proxy', 'PromiseProxy', 'try_import', 'maybe_evaluate']
<add>__all__ = ('Proxy', 'PromiseProxy', 'try_import', 'maybe_evaluate')
<ide>
<ide> __module__ = __name__ # used by Proxy class body
<ide>
<ide><path>celery/platforms.py
<ide> grp = try_import('grp')
<ide> mputil = try_import('multiprocessing.util')
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'EX_OK', 'EX_FAILURE', 'EX_UNAVAILABLE', 'EX_USAGE', 'SYSTEM',
<ide> 'IS_macOS', 'IS_WINDOWS', 'SIGMAP', 'pyimplementation', 'LockFailed',
<ide> 'get_fdmax', 'Pidfile', 'create_pidlock', 'close_open_fds',
<ide> 'DaemonContext', 'detached', 'parse_uid', 'parse_gid', 'setgroups',
<ide> 'initgroups', 'setgid', 'setuid', 'maybe_drop_privileges', 'signals',
<ide> 'signal_name', 'set_process_title', 'set_mp_process_title',
<ide> 'get_errno_name', 'ignore_errno', 'fd_by_path', 'isatty',
<del>]
<add>)
<ide>
<ide> # exitcodes
<ide> EX_OK = getattr(os, 'EX_OK', 0)
<ide><path>celery/result.py
<ide> except ImportError:
<ide> tblib = None
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'ResultBase', 'AsyncResult', 'ResultSet',
<ide> 'GroupResult', 'EagerResult', 'result_from_tuple',
<del>]
<add>)
<ide>
<ide> E_WOULDBLOCK = """\
<ide> Never call result.get() within a task!
<ide><path>celery/schedules.py
<ide> timezone, maybe_make_aware, ffwd, localize
<ide> )
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'ParseException', 'schedule', 'crontab', 'crontab_parser',
<ide> 'maybe_schedule', 'solar',
<del>]
<add>)
<ide>
<ide> schedstate = namedtuple('schedstate', ('is_due', 'next'))
<ide>
<ide><path>celery/security/__init__.py
<ide> Please see the configuration reference for more information.
<ide> """
<ide>
<del>__all__ = ['setup_security']
<add>__all__ = ('setup_security',)
<ide>
<ide>
<ide> def setup_security(allowed_serializers=None, key=None, cert=None, store=None,
<ide><path>celery/security/certificate.py
<ide> from celery.five import values
<ide> from .utils import crypto, reraise_errors
<ide>
<del>__all__ = ['Certificate', 'CertStore', 'FSCertStore']
<add>__all__ = ('Certificate', 'CertStore', 'FSCertStore')
<ide>
<ide>
<ide> class Certificate(object):
<ide><path>celery/security/key.py
<ide> from kombu.utils.encoding import ensure_bytes
<ide> from .utils import crypto, reraise_errors
<ide>
<del>__all__ = ['PrivateKey']
<add>__all__ = ('PrivateKey',)
<ide>
<ide>
<ide> class PrivateKey(object):
<ide><path>celery/security/serialization.py
<ide> from .key import PrivateKey
<ide> from .utils import reraise_errors
<ide>
<del>__all__ = ['SecureSerializer', 'register_auth']
<add>__all__ = ('SecureSerializer', 'register_auth')
<ide>
<ide>
<ide> class SecureSerializer(object):
<ide><path>celery/security/utils.py
<ide> except ImportError: # pragma: no cover
<ide> crypto = None # noqa
<ide>
<del>__all__ = ['reraise_errors']
<add>__all__ = ('reraise_errors',)
<ide>
<ide>
<ide> @contextmanager
<ide><path>celery/signals.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from .utils.dispatch import Signal
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'before_task_publish', 'after_task_publish',
<ide> 'task_prerun', 'task_postrun', 'task_success',
<ide> 'task_retry', 'task_failure', 'task_revoked', 'celeryd_init',
<ide> 'beat_init', 'beat_embedded_init', 'heartbeat_sent',
<ide> 'eventlet_pool_started', 'eventlet_pool_preshutdown',
<ide> 'eventlet_pool_postshutdown', 'eventlet_pool_apply',
<del>]
<add>)
<ide>
<ide> # - Task
<ide> before_task_publish = Signal(
<ide><path>celery/states.py
<ide> """
<ide> from __future__ import absolute_import, unicode_literals
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'PENDING', 'RECEIVED', 'STARTED', 'SUCCESS', 'FAILURE',
<ide> 'REVOKED', 'RETRY', 'IGNORED', 'READY_STATES', 'UNREADY_STATES',
<ide> 'EXCEPTION_STATES', 'PROPAGATE_STATES', 'precedence', 'state',
<del>]
<add>)
<ide>
<ide> #: State precedence.
<ide> #: None represents the precedence of an unknown state.
<ide><path>celery/task/__init__.py
<ide> from celery._state import current_app, current_task as current
<ide> from celery.local import LazyModule, Proxy, recreate_module
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'BaseTask', 'Task', 'PeriodicTask', 'task', 'periodic_task',
<ide> 'group', 'chord', 'subtask',
<del>]
<add>)
<ide>
<ide>
<ide> STATICA_HACK = True
<ide><path>celery/task/base.py
<ide> from celery.schedules import maybe_schedule
<ide> from celery.utils.log import get_task_logger
<ide>
<del>__all__ = ['Context', 'Task', 'TaskType', 'PeriodicTask', 'task']
<add>__all__ = ('Context', 'Task', 'TaskType', 'PeriodicTask', 'task')
<ide>
<ide> #: list of methods that must be classmethods in the old API.
<ide> _COMPAT_CLASSMETHODS = (
<ide><path>celery/utils/__init__.py
<ide> from .functional import memoize # noqa
<ide> from .nodenames import worker_direct, nodename, nodesplit
<ide>
<del>__all__ = ['worker_direct', 'gen_task_name', 'nodename', 'nodesplit',
<del> 'cached_property', 'uuid']
<add>__all__ = ('worker_direct', 'gen_task_name', 'nodename', 'nodesplit',
<add> 'cached_property', 'uuid')
<ide>
<ide> PY3 = sys.version_info[0] == 3
<ide>
<ide><path>celery/utils/abstract.py
<ide> from collections import Callable
<ide> from celery.five import with_metaclass
<ide>
<del>__all__ = ['CallableTask', 'CallableSignature']
<add>__all__ = ('CallableTask', 'CallableSignature')
<ide>
<ide>
<ide> def _hasattr(C, attr):
<ide><path>celery/utils/collections.py
<ide> class LazyObject(object): # noqa
<ide> pass
<ide> LazySettings = LazyObject # noqa
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'AttributeDictMixin', 'AttributeDict', 'BufferMap', 'ChainMap',
<ide> 'ConfigurationView', 'DictAttribute', 'Evictable',
<ide> 'LimitedSet', 'Messagebuffer', 'OrderedDict',
<ide> 'force_mapping', 'lpmerge',
<del>]
<add>)
<ide>
<ide> PY3 = sys.version_info[0] >= 3
<ide>
<ide><path>celery/utils/debug.py
<ide> except ImportError:
<ide> Process = None # noqa
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'blockdetection', 'sample_mem', 'memdump', 'sample',
<ide> 'humanbytes', 'mem_rss', 'ps', 'cry',
<del>]
<add>)
<ide>
<ide> UNITS = (
<ide> (2 ** 40.0, 'TB'),
<ide><path>celery/utils/deprecated.py
<ide> from vine.utils import wraps
<ide> from celery.exceptions import CPendingDeprecationWarning, CDeprecationWarning
<ide>
<del>__all__ = ['Callable', 'Property', 'warn']
<add>__all__ = ('Callable', 'Property', 'warn')
<ide>
<ide>
<ide> PENDING_DEPRECATION_FMT = """
<ide><path>celery/utils/dispatch/__init__.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from .signal import Signal
<ide>
<del>__all__ = ['Signal']
<add>__all__ = ('Signal',)
<ide><path>celery/utils/dispatch/signal.py
<ide> except ImportError:
<ide> from .weakref_backports import WeakMethod # noqa
<ide>
<del>__all__ = ['Signal']
<add>__all__ = ('Signal',)
<ide>
<ide> PY3 = sys.version_info[0] >= 3
<ide> logger = get_logger(__name__)
<ide><path>celery/utils/functional.py
<ide>
<ide> from celery.five import UserList, getfullargspec, range
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'LRUCache', 'is_list', 'maybe_list', 'memoize', 'mlazy', 'noop',
<ide> 'first', 'firstmethod', 'chunks', 'padlist', 'mattrgetter', 'uniq',
<ide> 'regen', 'dictfilter', 'lazy', 'maybe_evaluate', 'head_from_fun',
<ide> 'maybe', 'fun_accepts_kwargs',
<del>]
<add>)
<ide>
<ide> IS_PY3 = sys.version_info[0] == 3
<ide>
<ide><path>celery/utils/graph.py
<ide> from kombu.utils.encoding import safe_str, bytes_to_str
<ide> from celery.five import items, python_2_unicode_compatible
<ide>
<del>__all__ = ['DOT', 'CycleError', 'DependencyGraph', 'GraphFormatter']
<add>__all__ = ('DOT', 'CycleError', 'DependencyGraph', 'GraphFormatter')
<ide>
<ide>
<ide> class DOT:
<ide><path>celery/utils/imports.py
<ide> #: task to be that of ``App.main``.
<ide> MP_MAIN_FILE = os.environ.get('MP_MAIN_FILE')
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'NotAPackage', 'qualname', 'instantiate', 'symbol_by_name',
<ide> 'cwd_in_path', 'find_module', 'import_from_cwd',
<ide> 'reload_from_cwd', 'module_file', 'gen_task_name',
<del>]
<add>)
<ide>
<ide>
<ide> class NotAPackage(Exception):
<ide><path>celery/utils/iso8601.py
<ide> from datetime import datetime
<ide> from pytz import FixedOffset
<ide>
<del>__all__ = ['parse_iso8601']
<add>__all__ = ('parse_iso8601',)
<ide>
<ide> # Adapted from http://delete.me.uk/2005/03/iso8601.html
<ide> ISO8601_REGEX = re.compile(
<ide><path>celery/utils/log.py
<ide>
<ide> from .term import colored
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'ColorFormatter', 'LoggingProxy', 'base_logger',
<ide> 'set_in_sighandler', 'in_sighandler', 'get_logger',
<ide> 'get_task_logger', 'mlevel',
<ide> 'get_multiprocessing_logger', 'reset_multiprocessing_logger',
<del>]
<add>)
<ide>
<ide> _process_aware = False
<ide> _in_sighandler = False
<ide><path>celery/utils/nodenames.py
<ide>
<ide> gethostname = memoize(1, Cache=dict)(socket.gethostname)
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'worker_direct', 'gethostname', 'nodename',
<ide> 'anon_nodename', 'nodesplit', 'default_nodename',
<ide> 'node_format', 'host_format',
<del>]
<add>)
<ide>
<ide>
<ide> def worker_direct(hostname):
<ide><path>celery/utils/objects.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from functools import reduce
<ide>
<del>__all__ = ['Bunch', 'FallbackContext', 'getitem_property', 'mro_lookup']
<add>__all__ = ('Bunch', 'FallbackContext', 'getitem_property', 'mro_lookup')
<ide>
<ide>
<ide> class Bunch(object):
<ide><path>celery/utils/saferepr.py
<ide>
<ide> from .text import truncate
<ide>
<del>__all__ = ['saferepr', 'reprstream']
<add>__all__ = ('saferepr', 'reprstream')
<ide>
<ide> # pylint: disable=redefined-outer-name
<ide> # We cache globals and attribute lookups, so disable this warning.
<ide><path>celery/utils/serialization.py
<ide>
<ide> PY33 = sys.version_info >= (3, 3)
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'UnpickleableExceptionWrapper', 'subclass_exception',
<ide> 'find_pickleable_exception', 'create_exception_cls',
<ide> 'get_pickleable_exception', 'get_pickleable_etype',
<ide> 'get_pickled_exception', 'strtobool',
<del>]
<add>)
<ide>
<ide> #: List of base classes we probably don't want to reduce to.
<ide> try:
<ide><path>celery/utils/sysinfo.py
<ide> from math import ceil
<ide> from kombu.utils.objects import cached_property
<ide>
<del>__all__ = ['load_average', 'df']
<add>__all__ = ('load_average', 'df')
<ide>
<ide>
<ide> if hasattr(os, 'getloadavg'):
<ide><path>celery/utils/term.py
<ide> from celery.five import python_2_unicode_compatible, string
<ide> from celery.platforms import isatty
<ide>
<del>__all__ = ['colored']
<add>__all__ = ('colored',)
<ide>
<ide> BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
<ide> OP_SEQ = '\033[%dm'
<ide><path>celery/utils/text.py
<ide> from pprint import pformat
<ide> from celery.five import string_t
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'abbr', 'abbrtask', 'dedent', 'dedent_initial',
<ide> 'ensure_newlines', 'ensure_sep',
<ide> 'fill_paragraphs', 'indent', 'join',
<ide> 'pluralize', 'pretty', 'str_to_list', 'simple_format', 'truncate',
<del>]
<add>)
<ide>
<ide> UNKNOWN_SIMPLE_FORMAT_KEY = """
<ide> Unknown format %{0} in string {1!r}.
<ide><path>celery/utils/threads.py
<ide> from dummy_thread import get_ident # noqa
<ide>
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'bgThread', 'Local', 'LocalStack', 'LocalManager',
<ide> 'get_ident', 'default_socket_timeout',
<del>]
<add>)
<ide>
<ide> USE_FAST_LOCALS = os.environ.get('USE_FAST_LOCALS')
<ide> PY3 = sys.version_info[0] == 3
<ide><path>celery/utils/time.py
<ide> from .iso8601 import parse_iso8601
<ide> from .text import pluralize
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'LocalTimezone', 'timezone', 'maybe_timedelta',
<ide> 'delta_resolution', 'remaining', 'rate', 'weekday',
<ide> 'humanize_seconds', 'maybe_iso8601', 'is_naive',
<ide> 'make_aware', 'localize', 'to_utc', 'maybe_make_aware',
<ide> 'ffwd', 'utcoffset', 'adjust_timestamp',
<ide> 'get_exponential_backoff_interval',
<del>]
<add>)
<ide>
<ide> PY3 = sys.version_info[0] == 3
<ide> PY33 = sys.version_info >= (3, 3)
<ide><path>celery/utils/timer2.py
<ide>
<ide> TIMER_DEBUG = os.environ.get('TIMER_DEBUG')
<ide>
<del>__all__ = ['Entry', 'Schedule', 'Timer', 'to_timestamp']
<add>__all__ = ('Entry', 'Schedule', 'Timer', 'to_timestamp')
<ide>
<ide>
<ide> class Timer(threading.Thread):
<ide><path>celery/worker/__init__.py
<ide> from __future__ import absolute_import, unicode_literals
<ide> from .worker import WorkController
<ide>
<del>__all__ = ['WorkController']
<add>__all__ = ('WorkController',)
<ide><path>celery/worker/autoscale.py
<ide> from . import state
<ide> from .components import Pool
<ide>
<del>__all__ = ['Autoscaler', 'WorkerComponent']
<add>__all__ = ('Autoscaler', 'WorkerComponent')
<ide>
<ide> logger = get_logger(__name__)
<ide> debug, info, error = logger.debug, logger.info, logger.error
<ide><path>celery/worker/components.py
<ide> from celery.platforms import IS_WINDOWS
<ide> from celery.utils.log import worker_logger as logger
<ide>
<del>__all__ = ['Timer', 'Hub', 'Pool', 'Beat', 'StateDB', 'Consumer']
<add>__all__ = ('Timer', 'Hub', 'Pool', 'Beat', 'StateDB', 'Consumer')
<ide>
<ide> GREEN_POOLS = {'eventlet', 'gevent'}
<ide>
<ide><path>celery/worker/consumer/__init__.py
<ide> from .mingle import Mingle
<ide> from .tasks import Tasks
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'Consumer', 'Agent', 'Connection', 'Control',
<ide> 'Events', 'Gossip', 'Heart', 'Mingle', 'Tasks',
<del>]
<add>)
<ide><path>celery/worker/consumer/agent.py
<ide> from celery import bootsteps
<ide> from .connection import Connection
<ide>
<del>__all__ = ['Agent']
<add>__all__ = ('Agent',)
<ide>
<ide>
<ide> class Agent(bootsteps.StartStopStep):
<ide><path>celery/worker/consumer/connection.py
<ide> from celery import bootsteps
<ide> from celery.utils.log import get_logger
<ide>
<del>__all__ = ['Connection']
<add>__all__ = ('Connection',)
<ide>
<ide> logger = get_logger(__name__)
<ide> info = logger.info
<ide><path>celery/worker/consumer/consumer.py
<ide> task_reserved, maybe_shutdown, reserved_requests,
<ide> )
<ide>
<del>__all__ = ['Consumer', 'Evloop', 'dump_body']
<add>__all__ = ('Consumer', 'Evloop', 'dump_body')
<ide>
<ide> CLOSE = bootsteps.CLOSE
<ide> TERMINATE = bootsteps.TERMINATE
<ide><path>celery/worker/consumer/control.py
<ide> from celery.worker import pidbox
<ide> from .tasks import Tasks
<ide>
<del>__all__ = ['Control']
<add>__all__ = ('Control',)
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/worker/consumer/events.py
<ide> from celery import bootsteps
<ide> from .connection import Connection
<ide>
<del>__all__ = ['Events']
<add>__all__ = ('Events',)
<ide>
<ide>
<ide> class Events(bootsteps.StartStopStep):
<ide><path>celery/worker/consumer/gossip.py
<ide>
<ide> from .mingle import Mingle
<ide>
<del>__all__ = ['Gossip']
<add>__all__ = ('Gossip',)
<add>
<ide> logger = get_logger(__name__)
<ide> debug, info = logger.debug, logger.info
<ide>
<ide><path>celery/worker/consumer/heart.py
<ide> from celery.worker import heartbeat
<ide> from .events import Events
<ide>
<del>__all__ = ['Heart']
<add>__all__ = ('Heart',)
<ide>
<ide>
<ide> class Heart(bootsteps.StartStopStep):
<ide><path>celery/worker/consumer/mingle.py
<ide> from celery.utils.log import get_logger
<ide> from .events import Events
<ide>
<del>__all__ = ['Mingle']
<add>__all__ = ('Mingle',)
<ide>
<ide> logger = get_logger(__name__)
<ide> debug, info, exception = logger.debug, logger.info, logger.exception
<ide><path>celery/worker/consumer/tasks.py
<ide> from celery.utils.log import get_logger
<ide> from .mingle import Mingle
<ide>
<del>__all__ = ['Tasks']
<add>__all__ = ('Tasks',)
<add>
<ide> logger = get_logger(__name__)
<ide> debug = logger.debug
<ide>
<ide><path>celery/worker/control.py
<ide> from . import state as worker_state
<ide> from .request import Request
<ide>
<del>__all__ = ['Panel']
<add>__all__ = ('Panel',)
<ide>
<ide> DEFAULT_TASK_INFO_ITEMS = ('exchange', 'routing_key', 'rate_limit')
<ide> logger = get_logger(__name__)
<ide><path>celery/worker/heartbeat.py
<ide> from celery.utils.sysinfo import load_average
<ide> from .state import SOFTWARE_INFO, active_requests, all_total_count
<ide>
<del>__all__ = ['Heart']
<add>__all__ = ('Heart',)
<ide>
<ide>
<ide> class Heart(object):
<ide><path>celery/worker/loops.py
<ide> from celery.utils.log import get_logger
<ide> from . import state
<ide>
<del>__all__ = ['asynloop', 'synloop']
<add>__all__ = ('asynloop', 'synloop')
<ide>
<ide> # pylint: disable=redefined-outer-name
<ide> # We cache globals and attribute lookups, so disable this warning.
<ide><path>celery/worker/pidbox.py
<ide> from celery.utils.log import get_logger
<ide> from . import control
<ide>
<del>__all__ = ['Pidbox', 'gPidbox']
<add>__all__ = ('Pidbox', 'gPidbox')
<ide>
<ide> logger = get_logger(__name__)
<ide> debug, error, info = logger.debug, logger.error, logger.info
<ide><path>celery/worker/request.py
<ide>
<ide> from . import state
<ide>
<del>__all__ = ['Request']
<add>__all__ = ('Request',)
<ide>
<ide> # pylint: disable=redefined-outer-name
<ide> # We cache globals and attribute lookups, so disable this warning.
<ide><path>celery/worker/state.py
<ide> from celery.five import Counter
<ide> from celery.utils.collections import LimitedSet
<ide>
<del>__all__ = [
<add>__all__ = (
<ide> 'SOFTWARE_INFO', 'reserved_requests', 'active_requests',
<ide> 'total_count', 'revoked', 'task_reserved', 'maybe_shutdown',
<ide> 'task_accepted', 'task_ready', 'Persistent',
<del>]
<add>)
<ide>
<ide> #: Worker software/platform information.
<ide> SOFTWARE_INFO = {
<ide><path>celery/worker/strategy.py
<ide> from .request import create_request_cls
<ide> from .state import task_reserved
<ide>
<del>__all__ = ['default']
<add>__all__ = ('default',)
<ide>
<ide> logger = get_logger(__name__)
<ide>
<ide><path>celery/worker/worker.py
<ide>
<ide> from . import state
<ide>
<del>__all__ = ['WorkController']
<add>__all__ = ('WorkController',)
<ide>
<ide> #: Default socket timeout at shutdown.
<ide> SHUTDOWN_SOCKET_TIMEOUT = 5.0
<ide><path>examples/django/proj/__init__.py
<ide> # Django starts so that shared_task will use this app.
<ide> from .celery import app as celery_app
<ide>
<del>__all__ = ['celery_app']
<add>__all__ = ('celery_app',)
<ide><path>t/unit/concurrency/test_eventlet.py
<ide>
<ide>
<ide> @skip.if_pypy()
<add>@skip.unless_module('eventlet')
<ide> class EventletCase:
<ide>
<ide> def setup(self):
<ide><path>t/unit/concurrency/test_gevent.py
<ide> from __future__ import absolute_import, unicode_literals
<del>from case import Mock, skip
<add>from case import Mock
<ide> from celery.concurrency.gevent import (
<ide> Timer,
<ide> TaskPool,
<ide> )
<ide>
<ide>
<del>@skip.if_pypy()
<ide> class test_gevent_patch:
<ide>
<ide> def test_is_patched(self):
<ide> def test_is_patched(self):
<ide> patch_all.assert_called()
<ide>
<ide>
<del>@skip.if_pypy()
<ide> class test_Timer:
<ide>
<ide> def setup(self):
<ide> def test_sched(self):
<ide> g.cancel()
<ide>
<ide>
<del>@skip.if_pypy()
<ide> class test_TaskPool:
<ide>
<ide> def setup(self):
<ide> def test_pool(self):
<ide> assert x.num_processes == 3
<ide>
<ide>
<del>@skip.if_pypy()
<ide> class test_apply_timeout:
<ide>
<ide> def test_apply_timeout(self):
<ide><path>t/unit/conftest.py
<ide> # that installs the pytest plugin into the setuptools registry.
<ide> from celery.contrib.pytest import (
<ide> celery_app, celery_enable_logging, depends_on_current_app,
<add> celery_parameters
<ide> )
<ide> from celery.contrib.testing.app import Trap, TestApp
<ide> from celery.contrib.testing.mocks import (
<ide> TaskMessage, TaskMessage1, task_message_from_sig,
<ide> )
<ide>
<ide> # Tricks flake8 into silencing redefining fixtures warnings.
<del>__all__ = [
<add>__all__ = (
<ide> 'celery_app', 'celery_enable_logging', 'depends_on_current_app',
<del>]
<add> 'celery_parameters'
<add>)
<ide>
<ide> try:
<ide> WindowsError = WindowsError # noqa
| 135
|
Java
|
Java
|
update todos in annotatedelementutils[tests]
|
91e46cf2ad55fc8ec3bda087d5fe9c02aa1ec45b
|
<ide><path>spring-core/src/main/java/org/springframework/core/annotation/AnnotatedElementUtils.java
<ide> private static <T> T processWithFindSemantics(AnnotatedElement element, String a
<ide> }
<ide>
<ide> try {
<del> // TODO [SPR-12738] Resolve equivalent parameterized
<del> // method (i.e., bridged method) in superclass.
<ide> Method equivalentMethod = clazz.getDeclaredMethod(method.getName(),
<ide> method.getParameterTypes());
<ide> Method resolvedEquivalentMethod = BridgeMethodResolver.findBridgedMethod(equivalentMethod);
<ide><path>spring-core/src/test/java/org/springframework/core/annotation/AnnotatedElementUtilsTests.java
<ide> public void findAnnotationAttributesInheritedFromAbstractMethod() throws NoSuchM
<ide> }
<ide>
<ide> /**
<del> * TODO [SPR-12738] Enable test.
<del> *
<ide> * <p>{@code AbstractClassWithInheritedAnnotation} declares {@code handleParameterized(T)}; whereas,
<ide> * {@code ConcreteClassWithInheritedAnnotation} declares {@code handleParameterized(String)}.
<ide> *
<del> * <p>Thus, this test fails because {@code AnnotatedElementUtils.processWithFindSemantics()}
<del> * does not resolve an equivalent method for {@code handleParameterized(String)}
<del> * in {@code AbstractClassWithInheritedAnnotation}.
<add> * <p>As of Spring 4.2 RC1, {@code AnnotatedElementUtils.processWithFindSemantics()} does not resolve an
<add> * <em>equivalent</em> method in {@code AbstractClassWithInheritedAnnotation} for the <em>bridged</em>
<add> * {@code handleParameterized(String)} method.
<ide> *
<ide> * @since 4.2
<ide> */
<del> @Ignore("Disabled until SPR-12738 is resolved")
<ide> @Test
<ide> public void findAnnotationAttributesInheritedFromBridgedMethod() throws NoSuchMethodException {
<ide> Method method = ConcreteClassWithInheritedAnnotation.class.getMethod("handleParameterized", String.class);
<ide> AnnotationAttributes attributes = findAnnotationAttributes(method, Transactional.class);
<del> assertNotNull("Should find @Transactional on ConcreteClassWithInheritedAnnotation.handleParameterized() method", attributes);
<add> assertNull("Should not find @Transactional on bridged ConcreteClassWithInheritedAnnotation.handleParameterized() method", attributes);
<ide> }
<ide>
<ide> /**
<ide> public void findAnnotationAttributesFromBridgeMethod() throws NoSuchMethodExcept
<ide> assertTrue(bridgedMethod != null && !bridgedMethod.isBridge());
<ide>
<ide> AnnotationAttributes attributes = findAnnotationAttributes(bridgeMethod, Order.class);
<del> assertNotNull("Should find @Order on StringGenericParameter.getFor() method", attributes);
<add> assertNotNull("Should find @Order on StringGenericParameter.getFor() bridge method", attributes);
<ide> }
<ide>
<ide>
| 2
|
Python
|
Python
|
remove trailing whitespace from all python files
|
28e608a2c2340ec33b8f5f49d36626eb12811866
|
<ide><path>examples/benchmarks.py
<ide> from transformers import AutoModel
<ide>
<ide>
<del>input_text = """Bent over their instruments, three hundred Fertilizers were plunged, as
<del>the Director of Hatcheries and Conditioning entered the room, in the
<del>
<del>
<del>
<del>scarcely breathing silence, the absent-minded, soliloquizing hum or
<del>whistle, of absorbed concentration. A troop of newly arrived students,
<del>very young, pink and callow, followed nervously, rather abjectly, at the
<del>Director's heels. Each of them carried a notebook, in which, whenever
<del>the great man spoke, he desperately scribbled. Straight from the
<del>horse's mouth. It was a rare privilege. The D. H. C. for Central London
<del>always made a point of personally conducting his new students round
<del>the various departments.
<del>
<del>"Just to give you a general idea," he would explain to them. For of
<del>course some sort of general idea they must have, if they were to do
<del>their work intelligently-though as little of one, if they were to be good
<del>and happy members of society, as possible. For particulars, as every
<del>one knows, make for virtue and happiness; generalities are intellectu-
<del>ally necessary evils. Not philosophers but fret-sawyers and stamp col-
<del>lectors compose the backbone of society.
<del>
<del>"To-morrow," he would add, smiling at them with a slightly menacing
<del>geniality, "you'll be settling down to serious work. You won't have time
<del>for generalities. Meanwhile ..."
<del>
<del>Meanwhile, it was a privilege. Straight from the horse's mouth into the
<del>notebook. The boys scribbled like mad.
<del>
<del>Tall and rather thin but upright, the Director advanced into the room.
<del>He had a long chin and big rather prominent teeth, just covered, when
<del>he was not talking, by his full, floridly curved lips. Old, young? Thirty?
<del>Fifty? Fifty-five? It was hard to say. And anyhow the question didn't
<del>arise; in this year of stability, A. F. 632, it didn't occur to you to ask it.
<del>
<del>"I shall begin at the beginning," said the D.H.C. and the more zealous
<del>students recorded his intention in their notebooks: Begin at the begin-
<del>ning. "These," he waved his hand, "are the incubators." And opening
<del>an insulated door he showed them racks upon racks of numbered test-
<del>tubes. "The week's supply of ova. Kept," he explained, "at blood heat;
<del>whereas the male gametes," and here he opened another door, "they
<del>have to be kept at thirty-five instead of thirty-seven. Full blood heat
<del>sterilizes." Rams wrapped in theremogene beget no lambs.
<del>
<del>Still leaning against the incubators he gave them, while the pencils
<del>scurried illegibly across the pages, a brief description of the modern
<del>
<del>
<del>
<del>fertilizing process; spoke first, of course, of its surgical introduc-
<del>tion-"the operation undergone voluntarily for the good of Society, not
<del>to mention the fact that it carries a bonus amounting to six months'
<del>salary"; continued with some account of the technique for preserving
<del>the excised ovary alive and actively developing; passed on to a consid-
<del>eration of optimum temperature, salinity, viscosity; referred to the liq-
<del>uor in which the detached and ripened eggs were kept; and, leading
<del>his charges to the work tables, actually showed them how this liquor
<del>was drawn off from the test-tubes; how it was let out drop by drop
<del>onto the specially warmed slides of the microscopes; how the eggs
<del>which it contained were inspected for abnormalities, counted and
<del>transferred to a porous receptacle; how (and he now took them to
<del>watch the operation) this receptacle was immersed in a warm bouillon
<del>containing free-swimming spermatozoa-at a minimum concentration
<del>of one hundred thousand per cubic centimetre, he insisted; and how,
<del>after ten minutes, the container was lifted out of the liquor and its
<del>contents re-examined; how, if any of the eggs remained unfertilized, it
<del>was again immersed, and, if necessary, yet again; how the fertilized
<del>ova went back to the incubators; where the Alphas and Betas re-
<del>mained until definitely bottled; while the Gammas, Deltas and Epsilons
<del>were brought out again, after only thirty-six hours, to undergo Bo-
<del>kanovsky's Process.
<del>
<del>"Bokanovsky's Process," repeated the Director, and the students un-
<del>derlined the words in their little notebooks.
<del>
<del>One egg, one embryo, one adult-normality. But a bokanovskified egg
<del>will bud, will proliferate, will divide. From eight to ninety-six buds, and
<del>every bud will grow into a perfectly formed embryo, and every embryo
<del>into a full-sized adult. Making ninety-six human beings grow where
<del>only one grew before. Progress.
<del>
<del>"Essentially," the D.H.C. concluded, "bokanovskification consists of a
<del>series of arrests of development. We check the normal growth and,
<del>paradoxically enough, the egg responds by budding."
<del>
<del>Responds by budding. The pencils were busy.
<del>
<del>He pointed. On a very slowly moving band a rack-full of test-tubes was
<del>entering a large metal box, another, rack-full was emerging. Machinery
<del>faintly purred. It took eight minutes for the tubes to go through, he
<del>
<del>
<del>
<del>told them. Eight minutes of hard X-rays being about as much as an
<del>egg can stand. A few died; of the rest, the least susceptible divided
<del>into two; most put out four buds; some eight; all were returned to the
<del>incubators, where the buds began to develop; then, after two days,
<del>were suddenly chilled, chilled and checked. Two, four, eight, the buds
<del>in their turn budded; and having budded were dosed almost to death
<del>with alcohol; consequently burgeoned again and having budded-bud
<del>out of bud out of bud-were thereafter-further arrest being generally
<del>fatal-left to develop in peace. By which time the original egg was in a
<del>fair way to becoming anything from eight to ninety-six embryos- a
<del>prodigious improvement, you will agree, on nature. Identical twins-but
<del>not in piddling twos and threes as in the old viviparous days, when an
<del>egg would sometimes accidentally divide; actually by dozens, by
<del>scores at a time.
<del>
<del>"Scores," the Director repeated and flung out his arms, as though he
<del>were distributing largesse. "Scores."
<del>
<del>But one of the students was fool enough to ask where the advantage
<del>lay.
<del>
<del>"My good boy!" The Director wheeled sharply round on him. "Can't you
<del>see? Can't you see?" He raised a hand; his expression was solemn.
<del>"Bokanovsky's Process is one of the major instruments of social stabil-
<del>ity!"
<del>
<del>Major instruments of social stability.
<del>
<del>Standard men and women; in uniform batches. The whole of a small
<del>factory staffed with the products of a single bokanovskified egg.
<del>
<del>"Ninety-six identical twins working ninety-six identical machines!" The
<del>voice was almost tremulous with enthusiasm. "You really know where
<del>you are. For the first time in history." He quoted the planetary motto.
<del>"Community, Identity, Stability." Grand words. "If we could bo-
<del>kanovskify indefinitely the whole problem would be solved."
<del>
<del>Solved by standard Gammas, unvarying Deltas, uniform Epsilons. Mil-
<del>lions of identical twins. The principle of mass production at last applied
<del>to biology.
<del>
<del>
<del>
<del>"But, alas," the Director shook his head, "we can't bokanovskify indefi-
<del>nitely."
<del>
<del>Ninety-six seemed to be the limit; seventy-two a good average. From
<del>the same ovary and with gametes of the same male to manufacture as
<del>many batches of identical twins as possible-that was the best (sadly a
<del>second best) that they could do. And even that was difficult.
<del>
<del>"For in nature it takes thirty years for two hundred eggs to reach ma-
<del>turity. But our business is to stabilize the population at this moment,
<del>here and now. Dribbling out twins over a quarter of a century-what
<del>would be the use of that?"
<del>
<del>Obviously, no use at all. But Podsnap's Technique had immensely ac-
<del>celerated the process of ripening. They could make sure of at least a
<del>hundred and fifty mature eggs within two years. Fertilize and bo-
<del>kanovskify-in other words, multiply by seventy-two-and you get an
<del>average of nearly eleven thousand brothers and sisters in a hundred
<del>and fifty batches of identical twins, all within two years of the same
<del>age.
<del>
<del>"And in exceptional cases we can make one ovary yield us over fifteen
<del>thousand adult individuals."
<del>
<del>Beckoning to a fair-haired, ruddy young man who happened to be
<del>passing at the moment. "Mr. Foster," he called. The ruddy young man
<del>approached. "Can you tell us the record for a single ovary, Mr. Foster?"
<del>
<del>"Sixteen thousand and twelve in this Centre," Mr. Foster replied with-
<del>out hesitation. He spoke very quickly, had a vivacious blue eye, and
<del>took an evident pleasure in quoting figures. "Sixteen thousand and
<del>twelve; in one hundred and eighty-nine batches of identicals. But of
<del>course they've done much better," he rattled on, "in some of the tropi-
<del>cal Centres. Singapore has often produced over sixteen thousand five
<del>hundred; and Mombasa has actually touched the seventeen thousand
<del>mark. But then they have unfair advantages. You should see the way a
<del>negro ovary responds to pituitary! It's quite astonishing, when you're
<del>used to working with European material. Still," he added, with a laugh
<del>(but the light of combat was in his eyes and the lift of his chin was
<del>challenging), "still, we mean to beat them if we can. I'm working on a
<del>wonderful Delta-Minus ovary at this moment. Only just eighteen
<del>
<del>
<del>
<del>months old. Over twelve thousand seven hundred children already, ei-
<del>ther decanted or in embryo. And still going strong. We'll beat them
<del>yet."
<del>
<del>"That's the spirit I like!" cried the Director, and clapped Mr. Foster on
<del>the shoulder. "Come along with us, and give these boys the benefit of
<del>your expert knowledge."
<del>
<del>Mr. Foster smiled modestly. "With pleasure." They went.
<del>In the Bottling Room all was harmonious bustle and ordered activity.
<del>Flaps of fresh sow's peritoneum ready cut to the proper size came
<del>shooting up in little lifts from the Organ Store in the sub-basement.
<del>Whizz and then, click! the lift-hatches hew open; the bottle-liner had
<del>only to reach out a hand, take the flap, insert, smooth-down, and be-
<del>fore the lined bottle had had time to travel out of reach along the end-
<del>less band, whizz, click! another flap of peritoneum had shot up from
<del>the depths, ready to be slipped into yet another bottle, the next of that
<del>slow interminable procession on the band.
<del>
<del>Next to the Liners stood the Matriculators. The procession advanced;
<del>one by one the eggs were transferred from their test-tubes to the
<del>larger containers; deftly the peritoneal lining was slit, the morula
<del>dropped into place, the saline solution poured in ... and already the
<del>bottle had passed, and it was the turn of the labellers. Heredity, date
<del>of fertilization, membership of Bokanovsky Group-details were trans-
<del>ferred from test-tube to bottle. No longer anonymous, but named,
<del>identified, the procession marched slowly on; on through an opening in
<del>the wall, slowly on into the Social Predestination Room.
<del>"Eighty-eight cubic metres of card-index," said Mr. Foster with relish,
<add>input_text = """Bent over their instruments, three hundred Fertilizers were plunged, as
<add>the Director of Hatcheries and Conditioning entered the room, in the
<add>
<add>
<add>
<add>scarcely breathing silence, the absent-minded, soliloquizing hum or
<add>whistle, of absorbed concentration. A troop of newly arrived students,
<add>very young, pink and callow, followed nervously, rather abjectly, at the
<add>Director's heels. Each of them carried a notebook, in which, whenever
<add>the great man spoke, he desperately scribbled. Straight from the
<add>horse's mouth. It was a rare privilege. The D. H. C. for Central London
<add>always made a point of personally conducting his new students round
<add>the various departments.
<add>
<add>"Just to give you a general idea," he would explain to them. For of
<add>course some sort of general idea they must have, if they were to do
<add>their work intelligently-though as little of one, if they were to be good
<add>and happy members of society, as possible. For particulars, as every
<add>one knows, make for virtue and happiness; generalities are intellectu-
<add>ally necessary evils. Not philosophers but fret-sawyers and stamp col-
<add>lectors compose the backbone of society.
<add>
<add>"To-morrow," he would add, smiling at them with a slightly menacing
<add>geniality, "you'll be settling down to serious work. You won't have time
<add>for generalities. Meanwhile ..."
<add>
<add>Meanwhile, it was a privilege. Straight from the horse's mouth into the
<add>notebook. The boys scribbled like mad.
<add>
<add>Tall and rather thin but upright, the Director advanced into the room.
<add>He had a long chin and big rather prominent teeth, just covered, when
<add>he was not talking, by his full, floridly curved lips. Old, young? Thirty?
<add>Fifty? Fifty-five? It was hard to say. And anyhow the question didn't
<add>arise; in this year of stability, A. F. 632, it didn't occur to you to ask it.
<add>
<add>"I shall begin at the beginning," said the D.H.C. and the more zealous
<add>students recorded his intention in their notebooks: Begin at the begin-
<add>ning. "These," he waved his hand, "are the incubators." And opening
<add>an insulated door he showed them racks upon racks of numbered test-
<add>tubes. "The week's supply of ova. Kept," he explained, "at blood heat;
<add>whereas the male gametes," and here he opened another door, "they
<add>have to be kept at thirty-five instead of thirty-seven. Full blood heat
<add>sterilizes." Rams wrapped in theremogene beget no lambs.
<add>
<add>Still leaning against the incubators he gave them, while the pencils
<add>scurried illegibly across the pages, a brief description of the modern
<add>
<add>
<add>
<add>fertilizing process; spoke first, of course, of its surgical introduc-
<add>tion-"the operation undergone voluntarily for the good of Society, not
<add>to mention the fact that it carries a bonus amounting to six months'
<add>salary"; continued with some account of the technique for preserving
<add>the excised ovary alive and actively developing; passed on to a consid-
<add>eration of optimum temperature, salinity, viscosity; referred to the liq-
<add>uor in which the detached and ripened eggs were kept; and, leading
<add>his charges to the work tables, actually showed them how this liquor
<add>was drawn off from the test-tubes; how it was let out drop by drop
<add>onto the specially warmed slides of the microscopes; how the eggs
<add>which it contained were inspected for abnormalities, counted and
<add>transferred to a porous receptacle; how (and he now took them to
<add>watch the operation) this receptacle was immersed in a warm bouillon
<add>containing free-swimming spermatozoa-at a minimum concentration
<add>of one hundred thousand per cubic centimetre, he insisted; and how,
<add>after ten minutes, the container was lifted out of the liquor and its
<add>contents re-examined; how, if any of the eggs remained unfertilized, it
<add>was again immersed, and, if necessary, yet again; how the fertilized
<add>ova went back to the incubators; where the Alphas and Betas re-
<add>mained until definitely bottled; while the Gammas, Deltas and Epsilons
<add>were brought out again, after only thirty-six hours, to undergo Bo-
<add>kanovsky's Process.
<add>
<add>"Bokanovsky's Process," repeated the Director, and the students un-
<add>derlined the words in their little notebooks.
<add>
<add>One egg, one embryo, one adult-normality. But a bokanovskified egg
<add>will bud, will proliferate, will divide. From eight to ninety-six buds, and
<add>every bud will grow into a perfectly formed embryo, and every embryo
<add>into a full-sized adult. Making ninety-six human beings grow where
<add>only one grew before. Progress.
<add>
<add>"Essentially," the D.H.C. concluded, "bokanovskification consists of a
<add>series of arrests of development. We check the normal growth and,
<add>paradoxically enough, the egg responds by budding."
<add>
<add>Responds by budding. The pencils were busy.
<add>
<add>He pointed. On a very slowly moving band a rack-full of test-tubes was
<add>entering a large metal box, another, rack-full was emerging. Machinery
<add>faintly purred. It took eight minutes for the tubes to go through, he
<add>
<add>
<add>
<add>told them. Eight minutes of hard X-rays being about as much as an
<add>egg can stand. A few died; of the rest, the least susceptible divided
<add>into two; most put out four buds; some eight; all were returned to the
<add>incubators, where the buds began to develop; then, after two days,
<add>were suddenly chilled, chilled and checked. Two, four, eight, the buds
<add>in their turn budded; and having budded were dosed almost to death
<add>with alcohol; consequently burgeoned again and having budded-bud
<add>out of bud out of bud-were thereafter-further arrest being generally
<add>fatal-left to develop in peace. By which time the original egg was in a
<add>fair way to becoming anything from eight to ninety-six embryos- a
<add>prodigious improvement, you will agree, on nature. Identical twins-but
<add>not in piddling twos and threes as in the old viviparous days, when an
<add>egg would sometimes accidentally divide; actually by dozens, by
<add>scores at a time.
<add>
<add>"Scores," the Director repeated and flung out his arms, as though he
<add>were distributing largesse. "Scores."
<add>
<add>But one of the students was fool enough to ask where the advantage
<add>lay.
<add>
<add>"My good boy!" The Director wheeled sharply round on him. "Can't you
<add>see? Can't you see?" He raised a hand; his expression was solemn.
<add>"Bokanovsky's Process is one of the major instruments of social stabil-
<add>ity!"
<add>
<add>Major instruments of social stability.
<add>
<add>Standard men and women; in uniform batches. The whole of a small
<add>factory staffed with the products of a single bokanovskified egg.
<add>
<add>"Ninety-six identical twins working ninety-six identical machines!" The
<add>voice was almost tremulous with enthusiasm. "You really know where
<add>you are. For the first time in history." He quoted the planetary motto.
<add>"Community, Identity, Stability." Grand words. "If we could bo-
<add>kanovskify indefinitely the whole problem would be solved."
<add>
<add>Solved by standard Gammas, unvarying Deltas, uniform Epsilons. Mil-
<add>lions of identical twins. The principle of mass production at last applied
<add>to biology.
<add>
<add>
<add>
<add>"But, alas," the Director shook his head, "we can't bokanovskify indefi-
<add>nitely."
<add>
<add>Ninety-six seemed to be the limit; seventy-two a good average. From
<add>the same ovary and with gametes of the same male to manufacture as
<add>many batches of identical twins as possible-that was the best (sadly a
<add>second best) that they could do. And even that was difficult.
<add>
<add>"For in nature it takes thirty years for two hundred eggs to reach ma-
<add>turity. But our business is to stabilize the population at this moment,
<add>here and now. Dribbling out twins over a quarter of a century-what
<add>would be the use of that?"
<add>
<add>Obviously, no use at all. But Podsnap's Technique had immensely ac-
<add>celerated the process of ripening. They could make sure of at least a
<add>hundred and fifty mature eggs within two years. Fertilize and bo-
<add>kanovskify-in other words, multiply by seventy-two-and you get an
<add>average of nearly eleven thousand brothers and sisters in a hundred
<add>and fifty batches of identical twins, all within two years of the same
<add>age.
<add>
<add>"And in exceptional cases we can make one ovary yield us over fifteen
<add>thousand adult individuals."
<add>
<add>Beckoning to a fair-haired, ruddy young man who happened to be
<add>passing at the moment. "Mr. Foster," he called. The ruddy young man
<add>approached. "Can you tell us the record for a single ovary, Mr. Foster?"
<add>
<add>"Sixteen thousand and twelve in this Centre," Mr. Foster replied with-
<add>out hesitation. He spoke very quickly, had a vivacious blue eye, and
<add>took an evident pleasure in quoting figures. "Sixteen thousand and
<add>twelve; in one hundred and eighty-nine batches of identicals. But of
<add>course they've done much better," he rattled on, "in some of the tropi-
<add>cal Centres. Singapore has often produced over sixteen thousand five
<add>hundred; and Mombasa has actually touched the seventeen thousand
<add>mark. But then they have unfair advantages. You should see the way a
<add>negro ovary responds to pituitary! It's quite astonishing, when you're
<add>used to working with European material. Still," he added, with a laugh
<add>(but the light of combat was in his eyes and the lift of his chin was
<add>challenging), "still, we mean to beat them if we can. I'm working on a
<add>wonderful Delta-Minus ovary at this moment. Only just eighteen
<add>
<add>
<add>
<add>months old. Over twelve thousand seven hundred children already, ei-
<add>ther decanted or in embryo. And still going strong. We'll beat them
<add>yet."
<add>
<add>"That's the spirit I like!" cried the Director, and clapped Mr. Foster on
<add>the shoulder. "Come along with us, and give these boys the benefit of
<add>your expert knowledge."
<add>
<add>Mr. Foster smiled modestly. "With pleasure." They went.
<add>In the Bottling Room all was harmonious bustle and ordered activity.
<add>Flaps of fresh sow's peritoneum ready cut to the proper size came
<add>shooting up in little lifts from the Organ Store in the sub-basement.
<add>Whizz and then, click! the lift-hatches hew open; the bottle-liner had
<add>only to reach out a hand, take the flap, insert, smooth-down, and be-
<add>fore the lined bottle had had time to travel out of reach along the end-
<add>less band, whizz, click! another flap of peritoneum had shot up from
<add>the depths, ready to be slipped into yet another bottle, the next of that
<add>slow interminable procession on the band.
<add>
<add>Next to the Liners stood the Matriculators. The procession advanced;
<add>one by one the eggs were transferred from their test-tubes to the
<add>larger containers; deftly the peritoneal lining was slit, the morula
<add>dropped into place, the saline solution poured in ... and already the
<add>bottle had passed, and it was the turn of the labellers. Heredity, date
<add>of fertilization, membership of Bokanovsky Group-details were trans-
<add>ferred from test-tube to bottle. No longer anonymous, but named,
<add>identified, the procession marched slowly on; on through an opening in
<add>the wall, slowly on into the Social Predestination Room.
<add>"Eighty-eight cubic metres of card-index," said Mr. Foster with relish,
<ide> as they entered."""
<ide>
<ide>
<ide><path>hubconf.py
<ide>
<ide> @add_start_docstrings(AutoConfig.__doc__)
<ide> def config(*args, **kwargs):
<del> r"""
<add> r"""
<ide> # Using torch.hub !
<ide> import torch
<ide>
<ide> def config(*args, **kwargs):
<ide>
<ide> @add_start_docstrings(AutoTokenizer.__doc__)
<ide> def tokenizer(*args, **kwargs):
<del> r"""
<add> r"""
<ide> # Using torch.hub !
<ide> import torch
<ide>
<ide><path>templates/adding_a_new_model/modeling_tf_xxx.py
<ide> class TFXxxPreTrainedModel(TFPreTrainedModel):
<ide> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.XxxConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.XxxConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> class TFXxxPreTrainedModel(TFPreTrainedModel):
<ide> (a) For sequence pairs:
<ide>
<ide> ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1``
<ide>
<ide> (b) For single sequences:
<ide>
<ide> ``tokens: [CLS] the dog is hairy . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0``
<ide>
<ide> Xxx is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide><path>templates/adding_a_new_model/modeling_xxx.py
<ide> def _init_weights(self, module):
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.XxxConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.XxxConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> def _init_weights(self, module):
<ide> (a) For sequence pairs:
<ide>
<ide> ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1``
<ide>
<ide> (b) For single sequences:
<ide>
<ide> ``tokens: [CLS] the dog is hairy . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0``
<ide>
<ide> Xxx is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> class XxxForQuestionAnswering(XxxPreTrainedModel):
<ide> question, text = "Who was Jim Henson?", "Jim Henson was a nice puppet"
<ide> input_text = "[CLS] " + question + " [SEP] " + text + " [SEP]"
<ide> input_ids = tokenizer.encode(input_text)
<del> token_type_ids = [0 if i <= input_ids.index(102) else 1 for i in range(len(input_ids))]
<add> token_type_ids = [0 if i <= input_ids.index(102) else 1 for i in range(len(input_ids))]
<ide> start_scores, end_scores = model(torch.tensor([input_ids]), token_type_ids=torch.tensor([token_type_ids]))
<del> all_tokens = tokenizer.convert_ids_to_tokens(input_ids)
<add> all_tokens = tokenizer.convert_ids_to_tokens(input_ids)
<ide> print(' '.join(all_tokens[torch.argmax(start_scores) : torch.argmax(end_scores)+1]))
<ide> # a nice puppet
<ide>
<ide><path>transformers/commands/user.py
<ide> class LoginCommand(BaseUserCommand):
<ide> def run(self):
<ide> print(
<ide> """
<del> _| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_|
<del> _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
<del> _|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_|
<del> _| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
<del> _| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_|
<add> _| _| _| _| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _|_|_|_| _|_| _|_|_| _|_|_|_|
<add> _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
<add> _|_|_|_| _| _| _| _|_| _| _|_| _| _| _| _| _| _|_| _|_|_| _|_|_|_| _| _|_|_|
<add> _| _| _| _| _| _| _| _| _| _| _|_| _| _| _| _| _| _| _|
<add> _| _| _|_| _|_|_| _|_|_| _|_|_| _| _| _|_|_| _| _| _| _|_|_| _|_|_|_|
<ide>
<ide> """
<ide> )
<ide><path>transformers/data/processors/squad.py
<ide> def squad_convert_examples_to_features(
<ide> processor = SquadV2Processor()
<ide> examples = processor.get_dev_examples(data_dir)
<ide>
<del> features = squad_convert_examples_to_features(
<add> features = squad_convert_examples_to_features(
<ide> examples=examples,
<ide> tokenizer=tokenizer,
<ide> max_seq_length=args.max_seq_length,
<ide> class SquadFeatures(object):
<ide> has more information related to that token and should be prioritized over this feature for that token.
<ide> tokens: list of tokens corresponding to the input ids
<ide> token_to_orig_map: mapping between the tokens and the original text, needed in order to identify the answer.
<del> start_position: start of the answer token index
<del> end_position: end of the answer token index
<add> start_position: start of the answer token index
<add> end_position: end of the answer token index
<ide> """
<ide>
<ide> def __init__(
<ide><path>transformers/modeling_albert.py
<ide> def _init_weights(self, module):
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.AlbertConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.AlbertConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> def _init_weights(self, module):
<ide> (a) For sequence pairs:
<ide>
<ide> ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1``
<ide>
<ide> (b) For single sequences:
<ide>
<ide> ``tokens: [CLS] the dog is hairy . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0``
<ide>
<ide> Albert is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> class AlbertForQuestionAnswering(AlbertPreTrainedModel):
<ide> question, text = "Who was Jim Henson?", "Jim Henson was a nice puppet"
<ide> input_text = "[CLS] " + question + " [SEP] " + text + " [SEP]"
<ide> input_ids = tokenizer.encode(input_text)
<del> token_type_ids = [0 if i <= input_ids.index(102) else 1 for i in range(len(input_ids))]
<add> token_type_ids = [0 if i <= input_ids.index(102) else 1 for i in range(len(input_ids))]
<ide> start_scores, end_scores = model(torch.tensor([input_ids]), token_type_ids=torch.tensor([token_type_ids]))
<del> all_tokens = tokenizer.convert_ids_to_tokens(input_ids)
<add> all_tokens = tokenizer.convert_ids_to_tokens(input_ids)
<ide> print(' '.join(all_tokens[torch.argmax(start_scores) : torch.argmax(end_scores)+1]))
<ide> # a nice puppet
<ide>
<ide><path>transformers/modeling_auto.py
<ide> def __init__(self):
<ide> def from_config(cls, config):
<ide> r""" Instantiates one of the base model classes of the library
<ide> from a configuration.
<del>
<add>
<ide> config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:
<ide> The model class to instantiate is selected based on the configuration class:
<ide> - isInstance of `distilbert` configuration class: DistilBertModel (DistilBERT model)
<ide> def from_config(cls, config):
<ide> - isInstance of `roberta` configuration class: RobertaModel (Roberta model)
<ide>
<ide> Examples::
<del>
<add>
<ide> config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from S3 and cache.
<ide> model = AutoModelForTokenClassification.from_config(config) # E.g. model was saved using `save_pretrained('./test/saved_model/')`
<ide> """
<ide><path>transformers/modeling_camembert.py
<ide> CAMEMBERT_START_DOCSTRING = r""" The CamemBERT model was proposed in
<ide> `CamemBERT: a Tasty French Language Model`_
<ide> by Louis Martin, Benjamin Muller, Pedro Javier Ortiz Suárez, Yoann Dupont, Laurent Romary, Éric Villemonte de la Clergerie, Djamé Seddah, and Benoît Sagot. It is based on Facebook's RoBERTa model released in 2019.
<del>
<add>
<ide> It is a model trained on 138GB of French text.
<del>
<add>
<ide> This implementation is the same as RoBERTa.
<ide>
<ide> This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.CamembertConfig`): Model configuration class with all the parameters of the
<add> config (:class:`~transformers.CamembertConfig`): Model configuration class with all the parameters of the
<ide> model. Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide>
<ide> ``tokens: <s> the dog is hairy . </s>``
<ide>
<del> Fully encoded sequences or sequence pairs can be obtained using the CamembertTokenizer.encode function with
<add> Fully encoded sequences or sequence pairs can be obtained using the CamembertTokenizer.encode function with
<ide> the ``add_special_tokens`` parameter set to ``True``.
<ide>
<ide> CamemBERT is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> class CamembertForMaskedLM(RobertaForMaskedLM):
<ide>
<ide>
<ide> @add_start_docstrings(
<del> """CamemBERT Model transformer with a sequence classification/regression head on top (a linear layer
<add> """CamemBERT Model transformer with a sequence classification/regression head on top (a linear layer
<ide> on top of the pooled output) e.g. for GLUE tasks. """,
<ide> CAMEMBERT_START_DOCSTRING,
<ide> CAMEMBERT_INPUTS_DOCSTRING,
<ide><path>transformers/modeling_ctrl.py
<ide> def _init_weights(self, module):
<ide> module.weight.data.fill_(1.0)
<ide>
<ide>
<del>CTRL_START_DOCSTRING = r""" CTRL model was proposed in
<add>CTRL_START_DOCSTRING = r""" CTRL model was proposed in
<ide> `CTRL: A Conditional Transformer Language Model for Controllable Generation`_
<ide> by Nitish Shirish Keskar*, Bryan McCann*, Lav R. Varshney, Caiming Xiong and Richard Socher.
<ide> It's a causal (unidirectional) transformer pre-trained using language modeling on a very large
<ide> def _init_weights(self, module):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer):
<ide> that contains pre-computed hidden-states (key and values in the attention blocks) as computed by the model
<del> (see `past` output below). Can be used to speed up sequential decoding. The token ids which have their past given to this model
<add> (see `past` output below). Can be used to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Mask to avoid performing attention on padding token indices.
<ide> class CTRLModel(CTRLPreTrainedModel):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``:
<ide> that contains pre-computed hidden-states (key and values in the attention blocks).
<del> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<add> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<ide> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<ide> class CTRLLMHeadModel(CTRLPreTrainedModel):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``:
<ide> that contains pre-computed hidden-states (key and values in the attention blocks).
<del> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<add> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<ide> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<ide><path>transformers/modeling_distilbert.py
<ide> def _init_weights(self, module):
<ide>
<ide> For more information on DistilBERT, please refer to our
<ide> `detailed blog post`_
<del>
<add>
<ide> .. _`detailed blog post`:
<ide> https://medium.com/huggingface/distilbert-8cf3380435b5
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.DistilBertConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.DistilBertConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> def _init_weights(self, module):
<ide> **input_ids** ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Indices of input sequence tokens in the vocabulary.
<ide> The input sequences should start with `[CLS]` and end with `[SEP]` tokens.
<del>
<add>
<ide> For now, ONLY BertTokenizer(`bert-base-uncased`) is supported and you should use this tokenizer when using DistilBERT.
<ide> **attention_mask**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Mask to avoid performing attention on padding token indices.
<ide><path>transformers/modeling_gpt2.py
<ide> def _init_weights(self, module):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer):
<ide> that contains pre-computed hidden-states (key and values in the attention blocks) as computed by the model
<del> (see `past` output below). Can be used to speed up sequential decoding. The token ids which have their past given to this model
<add> (see `past` output below). Can be used to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **attention_mask**: (`optional`) ``torch.FloatTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Mask to avoid performing attention on padding token indices.
<ide> class GPT2Model(GPT2PreTrainedModel):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``:
<ide> that contains pre-computed hidden-states (key and values in the attention blocks).
<del> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<add> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<ide> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<ide> class GPT2LMHeadModel(GPT2PreTrainedModel):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``:
<ide> that contains pre-computed hidden-states (key and values in the attention blocks).
<del> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<add> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<ide> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<ide> class GPT2DoubleHeadsModel(GPT2PreTrainedModel):
<ide> **past**:
<ide> list of ``torch.FloatTensor`` (one for each layer) of shape ``(2, batch_size, num_heads, sequence_length, embed_size_per_head)``:
<ide> that contains pre-computed hidden-states (key and values in the attention blocks).
<del> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<add> Can be used (see `past` input) to speed up sequential decoding. The token ids which have their past given to this model
<ide> should not be passed as input ids as they have already been computed.
<ide> **hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
<ide> list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
<ide> class GPT2DoubleHeadsModel(GPT2PreTrainedModel):
<ide>
<ide> import torch
<ide> from transformers import GPT2Tokenizer, GPT2DoubleHeadsModel
<del>
<add>
<ide> tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
<ide> model = GPT2DoubleHeadsModel.from_pretrained('gpt2')
<del>
<add>
<ide> # Add a [CLS] to the vocabulary (we should train it also!)
<ide> tokenizer.add_special_tokens({'cls_token': '[CLS]'})
<ide> model.resize_token_embeddings(len(tokenizer)) # Update the model embeddings with the new vocabulary size
<ide> print(tokenizer.cls_token_id, len(tokenizer)) # The newly token the last token of the vocabulary
<del>
<add>
<ide> choices = ["Hello, my dog is cute [CLS]", "Hello, my cat is cute [CLS]"]
<ide> encoded_choices = [tokenizer.encode(s) for s in choices]
<ide> cls_token_location = [tokens.index(tokenizer.cls_token_id) for tokens in encoded_choices]
<ide><path>transformers/modeling_mmbt.py
<ide> def forward(self, input_modal, start_token=None, end_token=None, position_ids=No
<ide> return embeddings
<ide>
<ide>
<del>MMBT_START_DOCSTRING = r""" MMBT model was proposed in
<add>MMBT_START_DOCSTRING = r""" MMBT model was proposed in
<ide> `Supervised Multimodal Bitransformers for Classifying Images and Text`_
<ide> by Douwe Kiela, Suvrat Bhooshan, Hamed Firooz, Davide Testuggine.
<del> It's a supervised multimodal bitransformer model that fuses information from text and other image encoders,
<add> It's a supervised multimodal bitransformer model that fuses information from text and other image encoders,
<ide> and obtain state-of-the-art performance on various multimodal classification benchmark tasks.
<ide>
<ide> This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and
<ide> def forward(self, input_modal, start_token=None, end_token=None, position_ids=No
<ide> Parameters:
<ide> config (:class:`~transformers.MMBTConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<del> transformer (:class: `~nn.Module`): A text transformer that is used by MMBT.
<add> transformer (:class: `~nn.Module`): A text transformer that is used by MMBT.
<ide> It should have embeddings, encoder, and pooler attributes.
<del> encoder (:class: `~nn.Module`): Encoder for the second modality.
<add> encoder (:class: `~nn.Module`): Encoder for the second modality.
<ide> It should take in a batch of modal inputs and return k, n dimension embeddings.
<ide> """
<ide>
<ide> MMBT_INPUTS_DOCSTRING = r""" Inputs:
<ide> **input_modal**: ``torch.FloatTensor`` of shape ``(batch_size, ***)``:
<del> The other modality data. It will be the shape that the encoder for that type expects.
<add> The other modality data. It will be the shape that the encoder for that type expects.
<ide> e.g. With an Image Encoder, the shape would be (batch_size, channels, height, width)
<ide> **input_ids**: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Indices of input sequence tokens in the vocabulary.
<ide> def forward(self, input_modal, start_token=None, end_token=None, position_ids=No
<ide> **token_type_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Segment token indices to indicate different portions of the inputs.
<ide> **modal_token_type_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, modal_sequence_length)``:
<del> Segment token indices to indicate different portions of the non-text modality.
<add> Segment token indices to indicate different portions of the non-text modality.
<ide> The embeddings from these tokens will be summed with the respective token embeddings for the non-text modality.
<ide> **position_ids**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<ide> Indices of positions of each input sequence tokens in the position embeddings.
<ide><path>transformers/modeling_roberta.py
<ide> def create_position_ids_from_inputs_embeds(self, inputs_embeds):
<ide> `RoBERTa: A Robustly Optimized BERT Pretraining Approach`_
<ide> by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer,
<ide> Veselin Stoyanov. It is based on Google's BERT model released in 2018.
<del>
<add>
<ide> It builds on BERT and modifies key hyperparameters, removing the next-sentence pretraining
<ide> objective and training with much larger mini-batches and learning rates.
<del>
<del> This implementation is the same as BertModel with a tiny embeddings tweak as well as a setup for Roberta pretrained
<add>
<add> This implementation is the same as BertModel with a tiny embeddings tweak as well as a setup for Roberta pretrained
<ide> models.
<ide>
<ide> This model is a PyTorch `torch.nn.Module`_ sub-class. Use it as a regular PyTorch Module and
<ide> def create_position_ids_from_inputs_embeds(self, inputs_embeds):
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.RobertaConfig`): Model configuration class with all the parameters of the
<add> config (:class:`~transformers.RobertaConfig`): Model configuration class with all the parameters of the
<ide> model. Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> def create_position_ids_from_inputs_embeds(self, inputs_embeds):
<ide>
<ide> ``tokens: <s> the dog is hairy . </s>``
<ide>
<del> Fully encoded sequences or sequence pairs can be obtained using the RobertaTokenizer.encode function with
<add> Fully encoded sequences or sequence pairs can be obtained using the RobertaTokenizer.encode function with
<ide> the ``add_special_tokens`` parameter set to ``True``.
<ide>
<ide> RoBERTa is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> def forward(self, features, **kwargs):
<ide>
<ide>
<ide> @add_start_docstrings(
<del> """RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<add> """RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<ide> on top of the pooled output) e.g. for GLUE tasks. """,
<ide> ROBERTA_START_DOCSTRING,
<ide> ROBERTA_INPUTS_DOCSTRING,
<ide><path>transformers/modeling_t5.py
<ide> def forward(
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide><path>transformers/modeling_tf_albert.py
<ide> def call(self, hidden_states):
<ide> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.AlbertConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.AlbertConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> def call(self, hidden_states):
<ide> (a) For sequence pairs:
<ide>
<ide> ``tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1``
<ide>
<ide> (b) For single sequences:
<ide>
<ide> ``tokens: [CLS] the dog is hairy . [SEP]``
<del>
<add>
<ide> ``token_type_ids: 0 0 0 0 0 0 0``
<ide>
<ide> Albert is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide><path>transformers/modeling_tf_ctrl.py
<ide> class TFCTRLPreTrainedModel(TFPreTrainedModel):
<ide> base_model_prefix = "transformer"
<ide>
<ide>
<del>CTRL_START_DOCSTRING = r""" CTRL model was proposed in
<add>CTRL_START_DOCSTRING = r""" CTRL model was proposed in
<ide> `CTRL: A Conditional Transformer Language Model for Controllable Generation`_
<ide> by Nitish Shirish Keskar*, Bryan McCann*, Lav R. Varshney, Caiming Xiong and Richard Socher.
<ide> It's a causal (unidirectional) transformer pre-trained using language modeling on a very large
<ide><path>transformers/modeling_tf_distilbert.py
<ide> def call(self, inputs, inputs_embeds=None, mode="embedding", training=False):
<ide> linear tensor, float32 with shape [batch_size, length, vocab_size].
<ide> Raises:
<ide> ValueError: if mode is not valid.
<del>
<add>
<ide> Shared weights logic adapted from
<ide> https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24
<ide> """
<ide> class TFDistilBertPreTrainedModel(TFPreTrainedModel):
<ide>
<ide> For more information on DistilBERT, please refer to our
<ide> `detailed blog post`_
<del>
<add>
<ide> This model is a tf.keras.Model `tf.keras.Model`_ sub-class. Use it as a regular TF 2.0 Keras Model and
<ide> refer to the TF 2.0 documentation for all matter related to general usage and behavior.
<ide>
<ide> class TFDistilBertPreTrainedModel(TFPreTrainedModel):
<ide> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.DistilBertConfig`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.DistilBertConfig`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> class TFDistilBertPreTrainedModel(TFPreTrainedModel):
<ide> **input_ids** ``Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
<ide> Indices of input sequence tokens in the vocabulary.
<ide> The input sequences should start with `[CLS]` and end with `[SEP]` tokens.
<del>
<add>
<ide> For now, ONLY BertTokenizer(`bert-base-uncased`) is supported and you should use this tokenizer when using DistilBERT.
<ide> **attention_mask**: (`optional`) ``Numpy array`` or ``tf.Tensor`` of shape ``(batch_size, sequence_length)``:
<ide> Mask to avoid performing attention on padding token indices.
<ide><path>transformers/modeling_tf_gpt2.py
<ide> class TFGPT2DoubleHeadsModel(TFGPT2PreTrainedModel):
<ide>
<ide> tokenizer = GPT2Tokenizer.from_pretrained('gpt2')
<ide> model = TFGPT2DoubleHeadsModel.from_pretrained('gpt2')
<del>
<add>
<ide> # Add a [CLS] to the vocabulary (we should train it also!)
<ide> # This option is currently not implemented in TF 2.0
<ide> raise NotImplementedError
<ide> tokenizer.add_special_tokens({'cls_token': '[CLS]'})
<ide> model.resize_token_embeddings(len(tokenizer)) # Update the model embeddings with the new vocabulary size
<ide> print(tokenizer.cls_token_id, len(tokenizer)) # The newly token the last token of the vocabulary
<del>
<add>
<ide> choices = ["Hello, my dog is cute [CLS]", "Hello, my cat is cute [CLS]"]
<ide> encoded_choices = [tokenizer.encode(s) for s in choices]
<ide> cls_token_location = [tokens.index(tokenizer.cls_token_id) for tokens in encoded_choices]
<ide><path>transformers/modeling_tf_openai.py
<ide> class TFOpenAIGPTDoubleHeadsModel(TFOpenAIGPTPreTrainedModel):
<ide>
<ide> tokenizer = OpenAIGPTTokenizer.from_pretrained('openai-gpt')
<ide> model = TFOpenAIGPTDoubleHeadsModel.from_pretrained('openai-gpt')
<del>
<add>
<ide> # Add a [CLS] to the vocabulary (we should train it also!)
<ide> # This option is currently not implemented in TF 2.0
<ide> raise NotImplementedError
<ide><path>transformers/modeling_tf_roberta.py
<ide> class TFRobertaPreTrainedModel(TFPreTrainedModel):
<ide> `RoBERTa: A Robustly Optimized BERT Pretraining Approach`_
<ide> by Yinhan Liu, Myle Ott, Naman Goyal, Jingfei Du, Mandar Joshi, Danqi Chen, Omer Levy, Mike Lewis, Luke Zettlemoyer,
<ide> Veselin Stoyanov. It is based on Google's BERT model released in 2018.
<del>
<add>
<ide> It builds on BERT and modifies key hyperparameters, removing the next-sentence pretraining
<ide> objective and training with much larger mini-batches and learning rates.
<del>
<del> This implementation is the same as BertModel with a tiny embeddings tweak as well as a setup for Roberta pretrained
<add>
<add> This implementation is the same as BertModel with a tiny embeddings tweak as well as a setup for Roberta pretrained
<ide> models.
<ide>
<ide> This model is a tf.keras.Model `tf.keras.Model`_ sub-class. Use it as a regular TF 2.0 Keras Model and
<ide> class TFRobertaPreTrainedModel(TFPreTrainedModel):
<ide> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.RobertaConfig`): Model configuration class with all the parameters of the
<add> config (:class:`~transformers.RobertaConfig`): Model configuration class with all the parameters of the
<ide> model. Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide> class TFRobertaPreTrainedModel(TFPreTrainedModel):
<ide>
<ide> ``tokens: <s> the dog is hairy . </s>``
<ide>
<del> Fully encoded sequences or sequence pairs can be obtained using the RobertaTokenizer.encode function with
<add> Fully encoded sequences or sequence pairs can be obtained using the RobertaTokenizer.encode function with
<ide> the ``add_special_tokens`` parameter set to ``True``.
<ide>
<ide> RoBERTa is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> def call(self, features, training=False):
<ide>
<ide>
<ide> @add_start_docstrings(
<del> """RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<add> """RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<ide> on top of the pooled output) e.g. for GLUE tasks. """,
<ide> ROBERTA_START_DOCSTRING,
<ide> ROBERTA_INPUTS_DOCSTRING,
<ide><path>transformers/modeling_tf_t5.py
<ide> def dummy_inputs(self):
<ide> `model({'input_ids': input_ids, 'token_type_ids': token_type_ids})`
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
<add> config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
<ide> Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide><path>transformers/modeling_tf_utils.py
<ide> def resize_token_embeddings(self, new_num_tokens=None):
<ide> Arguments:
<ide>
<ide> new_num_tokens: (`optional`) int:
<del> New number of tokens in the embedding matrix. Increasing the size will add newly initialized vectors at the end. Reducing the size will remove vectors from the end.
<add> New number of tokens in the embedding matrix. Increasing the size will add newly initialized vectors at the end. Reducing the size will remove vectors from the end.
<ide> If not provided or None: does nothing and just returns a pointer to the input tokens ``tf.Variable`` Module of the model.
<ide>
<ide> Return: ``tf.Variable``
<ide> def call(self, inputs, mode="embedding"):
<ide> linear tensor, float32 with shape [batch_size, length, vocab_size].
<ide> Raises:
<ide> ValueError: if mode is not valid.
<del>
<add>
<ide> Shared weights logic adapted from
<ide> https://github.com/tensorflow/models/blob/a009f4fb9d2fc4949e32192a944688925ef78659/official/transformer/v2/embedding_layer.py#L24
<ide> """
<ide><path>transformers/modeling_xlm.py
<ide> class XLMForQuestionAnsweringSimple(XLMPreTrainedModel):
<ide> **cls_index**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
<ide> Labels for position (index) of the classification token to use as input for computing plausibility of the answer.
<ide> **p_mask**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<del> Optional mask of tokens which can't be in answers (e.g. [CLS], [PAD], ...)
<add> Optional mask of tokens which can't be in answers (e.g. [CLS], [PAD], ...)
<ide>
<ide> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<ide> **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
<ide> class XLMForQuestionAnswering(XLMPreTrainedModel):
<ide> **cls_index**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
<ide> Labels for position (index) of the classification token to use as input for computing plausibility of the answer.
<ide> **p_mask**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``:
<del> Optional mask of tokens which can't be in answers (e.g. [CLS], [PAD], ...)
<add> Optional mask of tokens which can't be in answers (e.g. [CLS], [PAD], ...)
<ide>
<ide> Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
<ide> **loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
<ide><path>transformers/modeling_xlm_roberta.py
<ide> XLM_ROBERTA_START_DOCSTRING = r""" The XLM-RoBERTa model was proposed in
<ide> `Unsupervised Cross-lingual Representation Learning at Scale`_
<ide> by Alexis Conneau, Kartikay Khandelwal, Naman Goyal, Vishrav Chaudhary, Guillaume Wenzek, Francisco Guzmán, Edouard Grave, Myle Ott, Luke Zettlemoyer and Veselin Stoyanov. It is based on Facebook's RoBERTa model released in 2019.
<del>
<add>
<ide> It is a large multi-lingual language model, trained on 2.5TB of filtered CommonCrawl data.
<ide>
<ide> This implementation is the same as RoBERTa.
<ide> https://pytorch.org/docs/stable/nn.html#module
<ide>
<ide> Parameters:
<del> config (:class:`~transformers.XLMRobertaConfig`): Model configuration class with all the parameters of the
<add> config (:class:`~transformers.XLMRobertaConfig`): Model configuration class with all the parameters of the
<ide> model. Initializing with a config file does not load the weights associated with the model, only the configuration.
<ide> Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
<ide> """
<ide>
<ide> ``tokens: <s> the dog is hairy . </s>``
<ide>
<del> Fully encoded sequences or sequence pairs can be obtained using the XLMRobertaTokenizer.encode function with
<add> Fully encoded sequences or sequence pairs can be obtained using the XLMRobertaTokenizer.encode function with
<ide> the ``add_special_tokens`` parameter set to ``True``.
<ide>
<ide> XLM-RoBERTa is a model with absolute position embeddings so it's usually advised to pad the inputs on
<ide> class XLMRobertaForMaskedLM(RobertaForMaskedLM):
<ide>
<ide>
<ide> @add_start_docstrings(
<del> """XLM-RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<add> """XLM-RoBERTa Model transformer with a sequence classification/regression head on top (a linear layer
<ide> on top of the pooled output) e.g. for GLUE tasks. """,
<ide> XLM_ROBERTA_START_DOCSTRING,
<ide> XLM_ROBERTA_INPUTS_DOCSTRING,
<ide><path>transformers/tokenization_utils.py
<ide> def encode_plus(
<ide> padding index, up to their max length. If no max length is specified, the padding is done up to the model's max length.
<ide> The tokenizer padding sides are handled by the following strings:
<ide> - 'left': pads on the left of the sequences
<del> - 'right': pads on the right of the sequences
<add> - 'right': pads on the right of the sequences
<ide> Defaults to False: no padding.
<ide> return_tensors: (optional) can be set to 'tf' or 'pt' to return respectively TensorFlow tf.constant
<ide> or PyTorch torch.Tensor instead of a list of python integers.
<ide> def prepare_for_model(
<ide> padding index, up to their max length. If no max length is specified, the padding is done up to the model's max length.
<ide> The tokenizer padding sides are handled by the following strings:
<ide> - 'left': pads on the left of the sequences
<del> - 'right': pads on the right of the sequences
<add> - 'right': pads on the right of the sequences
<ide> Defaults to False: no padding.
<ide> return_tensors: (optional) can be set to 'tf' or 'pt' to return respectively TensorFlow tf.constant
<ide> or PyTorch torch.Tensor instead of a list of python integers.
<ide><path>utils/download_glue_data.py
<ide> Original source: https://gist.github.com/W4ngatang/60c2bdb54d156a41194446737ce03e2e
<ide>
<ide> Note: for legal reasons, we are unable to host MRPC.
<del>You can either use the version hosted by the SentEval team, which is already tokenized,
<add>You can either use the version hosted by the SentEval team, which is already tokenized,
<ide> or you can download the original data from (https://download.microsoft.com/download/D/4/6/D46FF87A-F6B9-4252-AA8B-3604ED519838/MSRParaphraseCorpus.msi) and extract the data from it manually.
<ide> For Windows users, you can run the .msi file. For Mac and Linux users, consider an external library such as 'cabextract' (see below for an example).
<ide> You should then rename and place specific files in a folder (see below for an example).
| 27
|
Javascript
|
Javascript
|
expose tapable plugins on export
|
6b679341b12f996e0db0b4edef58741099074559
|
<ide><path>lib/index.js
<ide> exportPlugins(module.exports, {
<ide> ExternalsPlugin: () => require("./ExternalsPlugin"),
<ide> HotModuleReplacementPlugin: () => require("./HotModuleReplacementPlugin"),
<ide> IgnorePlugin: () => require("./IgnorePlugin"),
<add> JavascriptModulesPlugin: () => require("./JavascriptModulesPlugin"),
<ide> LibraryTemplatePlugin: () => require("./LibraryTemplatePlugin"),
<ide> LoaderOptionsPlugin: () => require("./LoaderOptionsPlugin"),
<ide> LoaderTargetPlugin: () => require("./LoaderTargetPlugin"),
<ide> Module: () => require("./Module"),
<ide> ModuleFilenameHelpers: () => require("./ModuleFilenameHelpers"),
<ide> NoEmitOnErrorsPlugin: () => require("./NoEmitOnErrorsPlugin"),
<add> NormalModule: () => require("./NormalModule"),
<ide> NormalModuleReplacementPlugin: () =>
<ide> require("./NormalModuleReplacementPlugin"),
<ide> PrefetchPlugin: () => require("./PrefetchPlugin"),
<ide> exportPlugins((module.exports.node = {}), {
<ide> ReadFileCompileWasmPlugin: () => require("./node/ReadFileCompileWasmPlugin")
<ide> });
<ide>
<add>exportPlugins((module.exports.wasm = {}), {
<add> AsyncWebAssemblyModulesPlugin: () =>
<add> require("./wasm-async/AsyncWebAssemblyModulesPlugin")
<add>});
<add>
<ide> exportPlugins((module.exports.debug = {}), {
<ide> ProfilingPlugin: () => require("./debug/ProfilingPlugin")
<ide> });
| 1
|
Text
|
Text
|
add v4.5.0 to changelog
|
fa7db213b2f870bce4b153c1167746bff1f6a209
|
<ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<del>### v4.4.2 (June 13, 2022)
<add>### v4.5.0 (June 13, 2022)
<ide>
<del>- [#20114](https://github.com/emberjs/ember.js/pull/20114) [BUGFIX] Fix generated import paths for test setup functions in addons
<add>- [#20052](https://github.com/emberjs/ember.js/pull/20052) / [#20055](https://github.com/emberjs/ember.js/pull/20055) [FEATURE] Add the default helper manager to implement [RFC #0756](https://github.com/emberjs/rfcs/blob/master/text/0756-helper-default-manager.md).
<add>- [#20053](https://github.com/emberjs/ember.js/pull/20053) [FEATURE] Expose `renderSettled` from `@ember/renderer` to enable implementation of [RFC #0785](https://github.com/emberjs/rfcs/blob/master/text/0785-remove-set-get-in-tests.md).
<ide>
<del>### v4.5.0-beta.2 (June 6, 2022)
<add>### v4.4.2 (June 13, 2022)
<ide>
<del>- [#20082](https://github.com/emberjs/ember.js/pull/20082) [BUGFIX] Fix blueprint generation
<add>- [#20114](https://github.com/emberjs/ember.js/pull/20114) [BUGFIX] Fix generated import paths for test setup functions in addons
<ide>
<ide> ### v4.4.1 (May 31, 2022)
<ide>
<ide> - [#20082](https://github.com/emberjs/ember.js/pull/20082) [BUGFIX] Fix blueprints publication
<ide>
<del>### v4.5.0-beta.1 (May 2, 2022)
<del>
<del>- [#20052](https://github.com/emberjs/ember.js/pull/20052) / [#20055](https://github.com/emberjs/ember.js/pull/20055) [FEATURE] Add the default helper manager to implement [RFC #0756](https://github.com/emberjs/rfcs/blob/master/text/0756-helper-default-manager.md).
<del>- [#20053](https://github.com/emberjs/ember.js/pull/20053) [FEATURE] Expose `renderSettled` from `@ember/renderer` to enable implementation of [RFC #0785](https://github.com/emberjs/rfcs/blob/master/text/0785-remove-set-get-in-tests.md).
<del>
<ide> ### v4.4.0 (May 2, 2022)
<ide>
<ide> - [#19882](https://github.com/emberjs/ember.js/pull/19882) / [#20005](https://github.com/emberjs/ember.js/pull/20005) [FEATURE] Implement the `unique-id` helper per [RFC #0659](https://github.com/emberjs/rfcs/blob/master/text/0659-unique-id-helper.md).
| 1
|
Ruby
|
Ruby
|
simplify conversion to binary
|
a4583da838a7ebf069bc4d07b27007c0a7b258e9
|
<ide><path>actionview/lib/action_view/template/handlers/erb.rb
<ide> def call(template, source)
<ide> # wrong, we can still find an encoding tag
<ide> # (<%# encoding %>) inside the String using a regular
<ide> # expression
<del> template_source = source.dup.force_encoding(Encoding::ASCII_8BIT)
<add> template_source = source.b
<ide>
<ide> erb = template_source.gsub(ENCODING_TAG, "")
<ide> encoding = $2
| 1
|
Javascript
|
Javascript
|
use html entity to avoid doc breakage in parsing
|
6e3489e3d29b0bcc5841d6613c2c58f5fb2c53a2
|
<ide><path>packages/ember-handlebars/lib/helpers/template.js
<ide> require('ember-handlebars/ext');
<ide> </script>
<ide>
<ide> This helper looks for templates in the global Ember.TEMPLATES hash. If you
<del> add <script> tags to your page with the `data-template-name` attribute set,
<add> add <script> tags to your page with the `data-template-name` attribute set,
<ide> they will be compiled and placed in this hash automatically.
<ide>
<ide> You can also manually register templates by adding them to the hash:
| 1
|
Javascript
|
Javascript
|
fix typo in storagefolder.clear
|
fd5f8af292abac8352360f08c6b0ff4140b72445
|
<ide><path>src/storage-folder.js
<ide> class StorageFolder {
<ide> if (!this.path) return
<ide> fs.remove(this.path, error => {
<ide> if (error) console.warn(`Error deleting ${this.path}`, error.stack, error)
<del> reolve()
<add> resolve()
<ide> })
<ide> })
<ide> }
| 1
|
Ruby
|
Ruby
|
match example with the sentance. [ci-skip]
|
f8ac66ae6d2f03f297177bcabfd1bd314db49103
|
<ide><path>actionpack/lib/action_dispatch/http/parameters.rb
<ide> def path_parameters=(parameters) # :nodoc:
<ide> # Returns a hash with the \parameters used to form the \path of the request.
<ide> # Returned hash keys are symbols:
<ide> #
<del> # {'action' => 'my_action', 'controller' => 'my_controller'}
<add> # { action: "my_action", controller: "my_controller" }
<ide> def path_parameters
<ide> get_header(PARAMETERS_KEY) || set_header(PARAMETERS_KEY, {})
<ide> end
| 1
|
Javascript
|
Javascript
|
fix typo in ember.string#classify doc
|
f3d0fd3e0d6a3dc75d3e5e14119f295d7ced488a
|
<ide><path>packages/ember-runtime/lib/system/string.js
<ide> Ember.String = {
<ide> },
<ide>
<ide> /**
<del> Returns the lowerCaseCamel form of a string.
<add> Returns the lowerCamelCase form of a string.
<ide>
<ide> ```javascript
<ide> 'innerHTML'.camelize(); // 'innerHTML'
| 1
|
Text
|
Text
|
add history section to `fetch`-related globals
|
396e8e3001f14f1ad62b2ce0aea6ead7adc55719
|
<ide><path>doc/api/globals.md
<ide> This variable may appear to be global but is not. See [`exports`][].
<ide> added:
<ide> - v17.5.0
<ide> - v16.15.0
<add>changes:
<add> - version: v18.0.0
<add> pr-url: https://github.com/nodejs/node/pull/41811
<add> description: No longer behind `--experimental-global-fetch` CLI flag.
<ide> -->
<ide>
<ide> > Stability: 1 - Experimental. Disable this API with the [`--no-experimental-fetch`][]
<ide> A browser-compatible implementation of the [`fetch()`][] function.
<ide> added:
<ide> - v17.6.0
<ide> - v16.15.0
<add>changes:
<add> - version: v18.0.0
<add> pr-url: https://github.com/nodejs/node/pull/41811
<add> description: No longer behind `--experimental-global-fetch` CLI flag.
<ide> -->
<ide>
<ide> > Stability: 1 - Experimental. Disable this API with the [`--no-experimental-fetch`][]
<ide> Node.js this is different. The top-level scope is not the global scope;
<ide> added:
<ide> - v17.5.0
<ide> - v16.15.0
<add>changes:
<add> - version: v18.0.0
<add> pr-url: https://github.com/nodejs/node/pull/41811
<add> description: No longer behind `--experimental-global-fetch` CLI flag.
<ide> -->
<ide>
<ide> > Stability: 1 - Experimental. Disable this API with the [`--no-experimental-fetch`][]
<ide> This variable may appear to be global but is not. See [`require()`][].
<ide> added:
<ide> - v17.5.0
<ide> - v16.15.0
<add>changes:
<add> - version: v18.0.0
<add> pr-url: https://github.com/nodejs/node/pull/41811
<add> description: No longer behind `--experimental-global-fetch` CLI flag.
<ide> -->
<ide>
<ide> > Stability: 1 - Experimental. Disable this API with the [`--no-experimental-fetch`][]
<ide> A browser-compatible implementation of {Response}.
<ide> added:
<ide> - v17.5.0
<ide> - v16.15.0
<add>changes:
<add> - version: v18.0.0
<add> pr-url: https://github.com/nodejs/node/pull/41811
<add> description: No longer behind `--experimental-global-fetch` CLI flag.
<ide> -->
<ide>
<ide> > Stability: 1 - Experimental. Disable this API with the [`--no-experimental-fetch`][]
| 1
|
PHP
|
PHP
|
apply inflections to core commands
|
a53cfefdead181f6b760ef862193c0625175adb9
|
<add><path>src/Command/CacheClearallCommand.php
<del><path>src/Command/CacheClearAllCommand.php
<ide> use Cake\Console\ConsoleOptionParser;
<ide>
<ide> /**
<del> * CacheClearAll command.
<add> * CacheClearall command.
<ide> */
<del>class CacheClearAllCommand extends Command
<add>class CacheClearallCommand extends Command
<ide> {
<ide> /**
<ide> * Hook method for defining this command's option parser.
<ide><path>src/Console/CommandScanner.php
<ide> */
<ide> namespace Cake\Console;
<ide>
<del>use Cake\Command\CacheClearAllCommand;
<del>use Cake\Command\CacheClearCommand;
<del>use Cake\Command\CacheListCommand;
<del>use Cake\Command\HelpCommand;
<del>use Cake\Command\UpgradeCommand;
<del>use Cake\Command\VersionCommand;
<ide> use Cake\Core\App;
<ide> use Cake\Core\Configure;
<ide> use Cake\Core\Plugin;
<ide> public function scanCore(): array
<ide> '',
<ide> ['command_list']
<ide> );
<del>
<del> $coreCommands = [
<del> [
<del> 'name' => 'cache clear',
<del> 'fullName' => 'cache clear',
<del> 'class' => CacheClearCommand::class,
<del> ],
<del> [
<del> 'name' => 'cache clear_all',
<del> 'fullName' => 'cache clear_all',
<del> 'class' => CacheClearAllCommand::class,
<del> ],
<del> [
<del> 'name' => 'cache list',
<del> 'fullName' => 'cache list',
<del> 'class' => CacheListCommand::class,
<del> ],
<del> [
<del> 'name' => 'help',
<del> 'fullName' => 'help',
<del> 'class' => HelpCommand::class,
<del> ],
<del> [
<del> 'name' => 'upgrade',
<del> 'fullName' => 'upgrade',
<del> 'class' => UpgradeCommand::class,
<del> ],
<del> [
<del> 'name' => 'version',
<del> 'fullName' => 'version',
<del> 'class' => VersionCommand::class,
<del> ],
<del> ];
<add> $coreCommands = $this->scanDir(
<add> dirname(__DIR__) . DIRECTORY_SEPARATOR . 'Command' . DIRECTORY_SEPARATOR,
<add> 'Cake\Command\\',
<add> '',
<add> ['command_list']
<add> );
<add> $coreCommands = $this->inflectCommandNames($coreCommands);
<ide>
<ide> return array_merge($coreShells, $coreCommands);
<ide> }
<ide>
<add> /**
<add> * Inflect multi-word command names based on conventions
<add> *
<add> * @param array $commands The array of command metadata to mutate
<add> * @return array The updated command metadata
<add> * @see \Cake\Console\CommandScanner::scanDir()
<add> */
<add> protected function inflectCommandNames(array $commands): array
<add> {
<add> foreach ($commands as $i => $command) {
<add> $command['name'] = str_replace('_', ' ', $command['name']);
<add> $commands[$i] = $command;
<add> }
<add>
<add> return $commands;
<add> }
<add>
<ide> /**
<ide> * Scan the application for shells & commands.
<ide> *
| 2
|
Javascript
|
Javascript
|
fix linter errors
|
d124248db0a4d500a4ce6e52f86550a18af45580
|
<ide><path>src/config-schema.js
<ide> const configSchema = {
<ide> customFileTypes: {
<ide> type: 'object',
<ide> default: {},
<del> description: "Associates scope names (e.g. `\"source.js\"`) with arrays of file extensions and file names (e.g. `[\"Somefile\", \".js2\"]`)",
<add> description: 'Associates scope names (e.g. `"source.js"`) with arrays of file extensions and file names (e.g. `["Somefile", ".js2"]`)',
<ide> additionalProperties: {
<ide> type: 'array',
<ide> items: {
<ide> const configSchema = {
<ide> ]
<ide> },
<ide> warnOnLargeFileLimit: {
<del> description: "Warn before opening files larger than this number of megabytes.",
<del> type: "number",
<add> description: 'Warn before opening files larger than this number of megabytes.',
<add> type: 'number',
<ide> default: 20
<ide> }
<ide> }
<ide> const configSchema = {
<ide> }
<ide> }
<ide> }
<del>};
<add>}
<ide>
<ide> if (['win32', 'linux'].includes(process.platform)) {
<ide> configSchema.core.properties.autoHideMenuBar = {
<ide> type: 'boolean',
<ide> default: false,
<ide> description: 'Automatically hide the menu bar and toggle it by pressing Alt. This is only supported on Windows & Linux.'
<del> };
<add> }
<ide> }
<ide>
<ide> if (process.platform === 'darwin') {
<ide> configSchema.core.properties.useCustomTitleBar = {
<ide> type: 'boolean',
<ide> default: false,
<ide> description: 'Use custom, theme-aware title bar.<br>Note: This currently does not include a proxy icon.<br>This setting will require a relaunch of Atom to take effect.'
<del> };
<add> }
<ide> }
<ide>
<ide> export default configSchema
| 1
|
Text
|
Text
|
add some missing events to the api docs
|
4bb3c253e83454e00381723f17ca69dd4e9b3d7a
|
<ide><path>docs/reference/api/docker_remote_api_v1.20.md
<ide> polling (using since).
<ide>
<ide> Docker containers report the following events:
<ide>
<del> create, destroy, die, exec_create, exec_start, export, kill, oom, pause, restart, start, stop, unpause
<add> attach, commit, copy, create, destroy, die, exec_create, exec_start, export, kill, oom, pause, rename, resize, restart, start, stop, top, unpause
<ide>
<ide> and Docker images report:
<ide>
<del> untag, delete
<add> delete, import, pull, push, tag, untag
<ide>
<ide> **Example request**:
<ide>
| 1
|
Javascript
|
Javascript
|
remove unused variables from raycaster
|
367fb776a190e9957e82068fb65f5cd335abc1bf
|
<ide><path>src/core/Raycaster.js
<ide>
<ide> };
<ide>
<del> var v0 = new THREE.Vector3(), v1 = new THREE.Vector3(), v2 = new THREE.Vector3();
<del>
<ide> // http://www.blackpawn.com/texts/pointinpoly/default.html
<ide>
<ide> var intersectObject = function ( object, raycaster, intersects ) {
| 1
|
Java
|
Java
|
fix race condition with oncompletion/onerror
|
dd22b8fd3971c7b0cdc1976d45c81ba12d5da604
|
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/AbstractListenerReadPublisher.java
<ide> /*
<del> * Copyright 2002-2018 the original author or authors.
<add> * Copyright 2002-2019 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> <T> void subscribe(AbstractListenerReadPublisher<T> publisher, Subscriber<? supe
<ide> publisher.subscriber = subscriber;
<ide> subscriber.onSubscribe(subscription);
<ide> publisher.changeState(SUBSCRIBING, NO_DEMAND);
<del> // Now safe to check "beforeDemand" flags, they won't change once in NO_DEMAND
<del> String logPrefix = publisher.getLogPrefix();
<del> if (publisher.completionBeforeDemand) {
<del> rsReadLogger.trace(logPrefix + "Completed before demand");
<del> publisher.state.get().onAllDataRead(publisher);
<del> }
<del> Throwable ex = publisher.errorBeforeDemand;
<del> if (ex != null) {
<del> if (rsReadLogger.isTraceEnabled()) {
<del> rsReadLogger.trace(logPrefix + "Completed with error before demand: " + ex);
<del> }
<del> publisher.state.get().onError(publisher, ex);
<del> }
<add> handleCompletionOrErrorBeforeDemand(publisher);
<ide> }
<ide> else {
<ide> throw new IllegalStateException("Failed to transition to SUBSCRIBING, " +
<ide> <T> void subscribe(AbstractListenerReadPublisher<T> publisher, Subscriber<? supe
<ide> @Override
<ide> <T> void onAllDataRead(AbstractListenerReadPublisher<T> publisher) {
<ide> publisher.completionBeforeDemand = true;
<add> handleCompletionOrErrorBeforeDemand(publisher);
<ide> }
<ide>
<ide> @Override
<ide> <T> void onError(AbstractListenerReadPublisher<T> publisher, Throwable ex) {
<ide> publisher.errorBeforeDemand = ex;
<add> handleCompletionOrErrorBeforeDemand(publisher);
<add> }
<add>
<add> private <T> void handleCompletionOrErrorBeforeDemand(AbstractListenerReadPublisher<T> publisher) {
<add> if (publisher.state.get().equals(NO_DEMAND)) {
<add> if (publisher.completionBeforeDemand) {
<add> rsReadLogger.trace(publisher.getLogPrefix() + "Completed before demand");
<add> publisher.state.get().onAllDataRead(publisher);
<add> }
<add> Throwable ex = publisher.errorBeforeDemand;
<add> if (ex != null) {
<add> if (rsReadLogger.isTraceEnabled()) {
<add> String prefix = publisher.getLogPrefix();
<add> rsReadLogger.trace(prefix + "Completed with error before demand: " + ex);
<add> }
<add> publisher.state.get().onError(publisher, ex);
<add> }
<add> }
<ide> }
<ide> },
<ide>
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/AbstractListenerWriteFlushProcessor.java
<ide> /*
<del> * Copyright 2002-2018 the original author or authors.
<add> * Copyright 2002-2019 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public <T> void writeComplete(AbstractListenerWriteFlushProcessor<T> processor)
<ide> }
<ide> if (processor.changeState(this, REQUESTED)) {
<ide> if (processor.subscriberCompleted) {
<del> if (processor.isFlushPending()) {
<del> // Ensure the final flush
<del> processor.changeState(REQUESTED, FLUSHING);
<del> processor.flushIfPossible();
<del> }
<del> else if (processor.changeState(REQUESTED, COMPLETED)) {
<del> processor.resultPublisher.publishComplete();
<del> }
<del> else {
<del> processor.state.get().onComplete(processor);
<del> }
<add> handleSubscriberCompleted(processor);
<ide> }
<ide> else {
<ide> Assert.state(processor.subscription != null, "No subscription");
<ide> else if (processor.changeState(REQUESTED, COMPLETED)) {
<ide> @Override
<ide> public <T> void onComplete(AbstractListenerWriteFlushProcessor<T> processor) {
<ide> processor.subscriberCompleted = true;
<add> // A competing write might have completed very quickly
<add> if (processor.state.get().equals(State.REQUESTED)) {
<add> handleSubscriberCompleted(processor);
<add> }
<add> }
<add>
<add> private <T> void handleSubscriberCompleted(AbstractListenerWriteFlushProcessor<T> processor) {
<add> if (processor.isFlushPending()) {
<add> // Ensure the final flush
<add> processor.changeState(State.REQUESTED, State.FLUSHING);
<add> processor.flushIfPossible();
<add> }
<add> else if (processor.changeState(State.REQUESTED, State.COMPLETED)) {
<add> processor.resultPublisher.publishComplete();
<add> }
<add> else {
<add> processor.state.get().onComplete(processor);
<add> }
<ide> }
<ide> },
<ide>
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/AbstractListenerWriteProcessor.java
<ide> /*
<del> * Copyright 2002-2018 the original author or authors.
<add> * Copyright 2002-2019 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public <T> void onWritePossible(AbstractListenerWriteProcessor<T> processor) {
<ide> @Override
<ide> public <T> void onComplete(AbstractListenerWriteProcessor<T> processor) {
<ide> processor.subscriberCompleted = true;
<add> // A competing write might have completed very quickly
<add> if (processor.state.get().equals(State.REQUESTED)) {
<add> processor.changeStateToComplete(State.REQUESTED);
<add> }
<ide> }
<ide> },
<ide>
<ide> WRITING {
<ide> @Override
<ide> public <T> void onComplete(AbstractListenerWriteProcessor<T> processor) {
<ide> processor.subscriberCompleted = true;
<add> // A competing write might have completed very quickly
<add> if (processor.state.get().equals(State.REQUESTED)) {
<add> processor.changeStateToComplete(State.REQUESTED);
<add> }
<ide> }
<ide> },
<ide>
| 3
|
Python
|
Python
|
add xfailing test [ci skip]
|
74a19aeb1cc384e8ccdff8b664c9df0550be2f92
|
<ide><path>spacy/tests/vocab_vectors/test_vectors.py
<ide> def test_vectors_most_similar(most_similar_vectors_data):
<ide> assert all(row[0] == i for i, row in enumerate(best_rows))
<ide>
<ide>
<add>@pytest.mark.xfail
<add>def test_vectors_most_similar_identical():
<add> """Test that most similar identical vectors are assigned a score of 1.0."""
<add> data = numpy.asarray([[4, 2, 2, 2], [4, 2, 2, 2], [1, 1, 1, 1]], dtype="f")
<add> v = Vectors(data=data, keys=["A", "B", "C"])
<add> keys, _, scores = v.most_similar(numpy.asarray([[4, 2, 2, 2]], dtype="f"))
<add> assert scores[0][0] == 1.0 # not 1.0000002
<add> data = numpy.asarray([[1, 2, 3], [1, 2, 3], [1, 1, 1]], dtype="f")
<add> v = Vectors(data=data, keys=["A", "B", "C"])
<add> keys, _, scores = v.most_similar(numpy.asarray([[1, 2, 3]], dtype="f"))
<add> assert scores[0][0] == 1.0 # not 0.9999999
<add>
<add>
<ide> @pytest.mark.parametrize("text", ["apple and orange"])
<ide> def test_vectors_token_vector(tokenizer_v, vectors, text):
<ide> doc = tokenizer_v(text)
| 1
|
Text
|
Text
|
fix benchmark link typo in turbopack example
|
06ded795bea91346a6c88dbe04b3eb954a5ff2f4
|
<ide><path>examples/with-turbopack/README.md
<ide> # Next.js + Turbopack App Directory Playground
<ide>
<del>[Turbopack](https://turbo.build/pack) is a new incremental bundler optimized for JavaScript and TypeScript, written in Rust by the creators of Webpack and Next.js at [Vercel](https://vercel.com). On large applications Turbopack updates 10x faster than Vite and 700x faster than Webpack ([benchmark](https://turbo.build/pack/pack/docs/benchmarks)). For the biggest applications the difference grows even more stark with updates up to 20x faster than Vite.
<add>[Turbopack](https://turbo.build/pack) is a new incremental bundler optimized for JavaScript and TypeScript, written in Rust by the creators of Webpack and Next.js at [Vercel](https://vercel.com). On large applications Turbopack updates 10x faster than Vite and 700x faster than Webpack ([benchmark](https://turbo.build/pack/docs/benchmarks)). For the biggest applications the difference grows even more stark with updates up to 20x faster than Vite.
<ide>
<ide> This playground is a mirror of the [Next.js v13 App Directory Playground](https://github.com/vercel/app-playground), but uses Turbopack as the Next.js development server (`next dev --turbo`).
<ide>
| 1
|
Go
|
Go
|
remove duplicate 'warning'
|
7f118519eb007b2423fff428aceddabd6c1c301d
|
<ide><path>api.go
<ide> func postContainersCreate(srv *Server, version float64, w http.ResponseWriter, r
<ide> }
<ide>
<ide> if len(config.Dns) == 0 && len(srv.runtime.Dns) == 0 && utils.CheckLocalDns() {
<del> out.Warnings = append(out.Warnings, fmt.Sprintf("WARNING: Docker detected local DNS server on resolv.conf. Using default external servers: %v", defaultDns))
<add> out.Warnings = append(out.Warnings, fmt.Sprintf("Docker detected local DNS server on resolv.conf. Using default external servers: %v", defaultDns))
<ide> config.Dns = defaultDns
<ide> }
<ide>
| 1
|
Javascript
|
Javascript
|
combine sequence of .push() into one statement
|
7496e8e5b7113f8362f88d52d7731ed84f71e881
|
<ide><path>src/ng/compile.js
<ide> function $CompileProvider($provide, $$sanitizeUriProvider) {
<ide> var childBoundTranscludeFn = boundTranscludeFn;
<ide> if (scope.$$destroyed) return;
<ide> if (linkQueue) {
<del> linkQueue.push(scope);
<del> linkQueue.push(node);
<del> linkQueue.push(rootElement);
<del> linkQueue.push(childBoundTranscludeFn);
<add> linkQueue.push(scope,
<add> node,
<add> rootElement,
<add> childBoundTranscludeFn);
<ide> } else {
<ide> if (afterTemplateNodeLinkFn.transcludeOnThisElement) {
<ide> childBoundTranscludeFn = createBoundTranscludeFn(scope, afterTemplateNodeLinkFn.transclude, boundTranscludeFn);
<ide><path>src/ng/filter/filters.js
<ide> function formatNumber(number, pattern, groupSep, decimalSep, fractionSize) {
<ide> }
<ide> }
<ide>
<del> parts.push(isNegative ? pattern.negPre : pattern.posPre);
<del> parts.push(formatedText);
<del> parts.push(isNegative ? pattern.negSuf : pattern.posSuf);
<add> parts.push(isNegative ? pattern.negPre : pattern.posPre,
<add> formatedText,
<add> isNegative ? pattern.negSuf : pattern.posSuf);
<ide> return parts.join('');
<ide> }
<ide>
<ide><path>src/ngSanitize/filter/linky.js
<ide> angular.module('ngSanitize').filter('linky', ['$sanitize', function($sanitize) {
<ide> function addLink(url, text) {
<ide> html.push('<a ');
<ide> if (angular.isDefined(target)) {
<del> html.push('target="');
<del> html.push(target);
<del> html.push('" ');
<add> html.push('target="',
<add> target,
<add> '" ');
<ide> }
<del> html.push('href="');
<del> html.push(url);
<del> html.push('">');
<add> html.push('href="',
<add> url,
<add> '">');
<ide> addText(text);
<ide> html.push('</a>');
<ide> }
| 3
|
Text
|
Text
|
fix transformer.initialize example
|
ffaa0d6b9ba7656de0dcbeaaa6fdc05e0b07b1b0
|
<ide><path>website/docs/api/transformer.md
<ide> by [`Language.initialize`](/api/language#initialize).
<ide> >
<ide> > ```python
<ide> > trf = nlp.add_pipe("transformer")
<del>> trf.initialize(lambda: [], nlp=nlp)
<add>> trf.initialize(lambda: iter([]), nlp=nlp)
<ide> > ```
<ide>
<ide> | Name | Description |
| 1
|
Javascript
|
Javascript
|
use feather icons for navigation
|
5e6e5e92cabdecd0dc2d90c16d7ec4cb28e054a4
|
<ide><path>Libraries/YellowBox/UI/YellowBoxImageSource.js
<ide> const YellowBoxImageSource = {
<ide> : scale > 1
<ide> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAAf0lEQVRYhe2UvQ2AIBQGL3EBR3AESkv3bxxFN8DmWUgwvkI+En1X0cBd+IMg+DuDyDMCs413kfMiX4EMbD3l8oCaPIU85B4mYLEF5XJscrYFPRGvb/sZ4IlocubJGdH0wj1FSG77XYT0qdUi5O+8jOjyyZQRUnkZ0UUeBMF3OQC/0VsyGlxligAAAABJRU5ErkJggg=='
<ide> : 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAASElEQVQ4jWNgGJHAgIGBIYESze8ZGBjWU6L5PAMDgwBNNCdAFZJt83qoQmRDSHK2AFQhzBCy/IxsCNkBJsDAwLAfiknWPBIBAETPFeuA4fr6AAAAAElFTkSuQmCC',
<add> chevronLeft:
<add> scale > 2
<add> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAgElEQVRoge3YMQ6DQBAEwRYvnf8nPAECbAnkyATsrt0lXUyPdAE6kCRJ/yXA+jopLbkhwHY6a2nNl8I1ftSA8Bm/MeQKBeNrBONrBONrBONrhMHxcPwOlMUvT32oszD8CoEj+giO6CE4oofgiB7Cj44Y86zyFoYPgOFPi5Ik6WwHji+QVIOyhqgAAAAASUVORK5CYII='
<add> : scale > 1
<add> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAaUlEQVRYhe3WsQ2AMAwAwRcs5LEYg3HpYANoQKKgcEEUI/6adM5LbgySfmZsNDeACdiApdEfaQGswH6+Xd1jugc9xYQxxhjz9RhaxwxvDuul3MrAqDyjsozKKnWgXUqdsJcAZgqsTFJ5B7gjUNw0n0HHAAAAAElFTkSuQmCC'
<add> : 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAARklEQVQ4jWNgGPKAmUh1AQwMDBIMDAwPyLEkgYGB4T/UELI1J9BdcwCxmpnIMZ1YkECsK+hmCNZoZCHCgAUMDAwfoHg4AgDJuQ/bcLyV+QAAAABJRU5ErkJggg==',
<add> chevronRight:
<add> scale > 2
<add> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAAeElEQVRoge3YMQ6AIBQE0Ykn3fs3HEEbC6MdFp+v8xJaspNQAZIkqbcA4zwpXTJpAPvlpHTNhHtAu4jwDDCiQjBiDcGINQQj1hCMWEN4Boy3l25vL/iL0PgJBcfXCI6vERxfIzi+Rmg8Hj7wrdL+Yys0/1qUJEmzDvSAkFQ8EOdJAAAAAElFTkSuQmCC'
<add> : scale > 1
<add> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACQAAAAkCAYAAADhAJiYAAAAZElEQVRYhe3WsQmAQAxA0Y8ulLEcw3HtdANtBNvzCJjD/5pUgQ9pApJ+Zu7YCWABDmDLzemzA+c94+MW4AkqExUY1caoVka1GibqlSm7qJJSJzPGGGMylYqBgi9sACtFYiQN7wKC6VDcJ7tlpQAAAABJRU5ErkJggg=='
<add> : 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAANUlEQVQ4jWNgGLbAgYGBIYASAwIYGBj+MzAwJFBiSMLQMISJEpMptp2mmimORgcGChPSEAIAHGENPH8gqdYAAAAASUVORK5CYII=',
<ide> loader:
<ide> scale > 2
<ide> ? 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAABXElEQVRoge2a3W3DMAyEr+0CHkGjaISOcKN4k6zQETpCR+gGzgbpQ10kcamIpKQ6avQBBPxg3pHwL2UDg/8LASxrcNdKnCwATmssrUyeWgnju/DmXs8tRP+Sh2kgAJga1rFlWj2rcMD5YqQh77QJLbzIORjyRIJQCJW5ngYo5AVlrsgkCGqbsDbAhFfxqZsSZibP0oDXQ43HQPsg82i7sBoR+VcJq2YxKcPo0IoJLRZXmYGC6ezQmQUdVqhPBVH/CNBTSMkLVlzjA8Bbocb7GoPBoADi+umZilYzbrG/JrnljOvy734iu4To/BQaDB6Rl4LciPPF9Lmjhgvi+s7w6tCIGw3WKS0P8fvWNjt0ZkGHFeq7CQXTbkZKGg2JOxrqPUZ3s6ziNdju38IjS/dLi0EQpDLX2gDQYHEX6Hx5/YcA+6H0NgAYPnCMj3x7Mxq4wTGx3Q1E578aDDR8AX0mOGD6BEN/AAAAAElFTkSuQmCC'
<ide><path>Libraries/YellowBox/UI/YellowBoxInspectorHeader.js
<ide>
<ide> 'use strict';
<ide>
<add>const Image = require('Image');
<ide> const Platform = require('Platform');
<ide> const React = require('React');
<ide> const SafeAreaView = require('SafeAreaView');
<ide> const StyleSheet = require('StyleSheet');
<ide> const Text = require('Text');
<del>const UTFSequence = require('UTFSequence');
<ide> const View = require('View');
<add>const YellowBoxImageSource = require('YellowBoxImageSource');
<ide> const YellowBoxPressable = require('YellowBoxPressable');
<ide> const YellowBoxStyle = require('YellowBoxStyle');
<ide>
<ide> const YellowBoxInspectorHeader = (props: Props): React.Node => {
<ide> <View style={styles.header}>
<ide> <YellowBoxInspectorHeaderButton
<ide> disabled={props.warnings[prevIndex] == null}
<del> label={UTFSequence.TRIANGLE_LEFT}
<add> image={YellowBoxImageSource.chevronLeft}
<ide> onPress={() => props.onSelectIndex(prevIndex)}
<ide> />
<ide> <View style={styles.headerTitle}>
<ide> <Text style={styles.headerTitleText}>{titleText}</Text>
<ide> </View>
<ide> <YellowBoxInspectorHeaderButton
<ide> disabled={props.warnings[nextIndex] == null}
<del> label={UTFSequence.TRIANGLE_RIGHT}
<add> image={YellowBoxImageSource.chevronRight}
<ide> onPress={() => props.onSelectIndex(nextIndex)}
<ide> />
<ide> </View>
<ide> const YellowBoxInspectorHeader = (props: Props): React.Node => {
<ide> const YellowBoxInspectorHeaderButton = (
<ide> props: $ReadOnly<{|
<ide> disabled: boolean,
<del> label: React.Node,
<add> image: string,
<ide> onPress?: ?() => void,
<ide> |}>,
<ide> ): React.Node => (
<ide> const YellowBoxInspectorHeaderButton = (
<ide> onPress={props.disabled ? null : props.onPress}
<ide> style={styles.headerButton}>
<ide> {props.disabled ? null : (
<del> <Text style={styles.headerButtonText}>{props.label}</Text>
<add> <Image
<add> source={{height: 16, uri: props.image, width: 16}}
<add> style={styles.headerButtonImage}
<add> />
<ide> )}
<ide> </YellowBoxPressable>
<ide> );
<ide> const styles = StyleSheet.create({
<ide> aspectRatio: 1,
<ide> justifyContent: 'center',
<ide> },
<del> headerButtonText: {
<del> color: YellowBoxStyle.getTextColor(1),
<del> fontSize: 16,
<del> includeFontPadding: false,
<del> lineHeight: 20,
<add> headerButtonImage: {
<add> tintColor: YellowBoxStyle.getTextColor(1),
<ide> },
<ide> headerTitle: {
<ide> alignItems: 'center',
| 2
|
PHP
|
PHP
|
fix warning when _ids is an empty value
|
4bc05ff96fab36bb5c8f337f82143d02311f0c48
|
<ide><path>src/ORM/Marshaller.php
<ide> public function many(array $data, array $include = []) {
<ide> * @return array An array of built entities.
<ide> */
<ide> protected function _belongsToMany(Association $assoc, array $data, $include = []) {
<del> if (isset($data['_ids']) && is_array($data['_ids'])) {
<add> $hasIds = isset($data['_ids']);
<add> if ($hasIds && is_array($data['_ids'])) {
<ide> return $this->_loadBelongsToMany($assoc, $data['_ids']);
<ide> }
<add> if ($hasIds) {
<add> return [];
<add> }
<ide>
<ide> $records = $this->many($data, $include);
<ide> if (!in_array('_joinData', $include) && !isset($include['_joinData'])) {
<ide><path>tests/TestCase/ORM/MarshallerTest.php
<ide> public function testManyAssociations() {
<ide> * @return void
<ide> */
<ide> public function testOneGenerateBelongsToManyEntitiesFromIds() {
<add> $data = [
<add> 'title' => 'Haz tags',
<add> 'body' => 'Some content here',
<add> 'tags' => ['_ids' => '']
<add> ];
<add> $marshall = new Marshaller($this->articles);
<add> $result = $marshall->one($data, ['Tags']);
<add>
<add> $this->assertCount(0, $result->tags);
<add>
<ide> $data = [
<ide> 'title' => 'Haz tags',
<ide> 'body' => 'Some content here',
| 2
|
Go
|
Go
|
enable more integration tests
|
66a37b460c839ee7a9aca118840b441764292c97
|
<ide><path>integration/build/build_test.go
<ide> import (
<ide> )
<ide>
<ide> func TestBuildWithRemoveAndForceRemove(t *testing.T) {
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> defer setupTest(t)()
<ide>
<ide> cases := []struct {
<ide> func TestBuildMultiStageCopy(t *testing.T) {
<ide>
<ide> func TestBuildMultiStageParentConfig(t *testing.T) {
<ide> skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.35"), "broken in earlier versions")
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> dockerfile := `
<ide> FROM busybox AS stage0
<ide> ENV WHO=parent
<ide> func TestBuildWithEmptyLayers(t *testing.T) {
<ide> // #35652
<ide> func TestBuildMultiStageOnBuild(t *testing.T) {
<ide> skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.33"), "broken in earlier versions")
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> defer setupTest(t)()
<ide> // test both metadata and layer based commands as they may be implemented differently
<ide> dockerfile := `FROM busybox AS stage1
<ide> COPY bar /`
<ide> // docker/for-linux#135
<ide> // #35641
<ide> func TestBuildMultiStageLayerLeak(t *testing.T) {
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.37"), "broken in earlier versions")
<ide> ctx := context.TODO()
<ide> defer setupTest(t)()
<ide><path>integration/container/copy_test.go
<ide> import (
<ide>
<ide> func TestCopyFromContainerPathDoesNotExist(t *testing.T) {
<ide> defer setupTest(t)()
<del> skip.If(t, testEnv.OSType == "windows")
<ide>
<ide> ctx := context.Background()
<ide> apiclient := testEnv.APIClient()
<ide> func TestCopyFromContainerPathIsNotDir(t *testing.T) {
<ide>
<ide> func TestCopyToContainerPathDoesNotExist(t *testing.T) {
<ide> defer setupTest(t)()
<del> skip.If(t, testEnv.OSType == "windows")
<ide>
<ide> ctx := context.Background()
<ide> apiclient := testEnv.APIClient()
<ide><path>integration/container/exec_test.go
<ide> func TestExecWithCloseStdin(t *testing.T) {
<ide>
<ide> func TestExec(t *testing.T) {
<ide> skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.35"), "broken in earlier versions")
<del> skip.If(t, testEnv.OSType == "windows", "FIXME. Probably needs to wait for container to be in running state.")
<ide> defer setupTest(t)()
<ide> ctx := context.Background()
<ide> client := testEnv.APIClient()
<ide> func TestExec(t *testing.T) {
<ide> assert.NilError(t, err)
<ide> out := string(r)
<ide> assert.NilError(t, err)
<del> assert.Assert(t, is.Contains(out, "PWD=/tmp"), "exec command not running in expected /tmp working directory")
<add> expected := "PWD=/tmp"
<add> if testEnv.OSType == "windows" {
<add> expected = "PWD=C:/tmp"
<add> }
<add> assert.Assert(t, is.Contains(out, expected), "exec command not running in expected /tmp working directory")
<ide> assert.Assert(t, is.Contains(out, "FOO=BAR"), "exec command not running with expected environment variable FOO")
<ide> }
<ide>
<ide><path>integration/container/kill_test.go
<ide> func TestKillContainerInvalidSignal(t *testing.T) {
<ide> }
<ide>
<ide> func TestKillContainer(t *testing.T) {
<del> skip.If(t, testEnv.OSType == "windows", "TODO Windows: FIXME. No SIGWINCH")
<ide> defer setupTest(t)()
<ide> client := testEnv.APIClient()
<ide>
<ide> testCases := []struct {
<ide> doc string
<ide> signal string
<ide> status string
<add> skipOs string
<ide> }{
<ide> {
<ide> doc: "no signal",
<ide> signal: "",
<ide> status: "exited",
<add> skipOs: "",
<ide> },
<ide> {
<ide> doc: "non killing signal",
<ide> signal: "SIGWINCH",
<ide> status: "running",
<add> skipOs: "windows",
<ide> },
<ide> {
<ide> doc: "killing signal",
<ide> signal: "SIGTERM",
<ide> status: "exited",
<add> skipOs: "",
<ide> },
<ide> }
<ide>
<ide> for _, tc := range testCases {
<ide> tc := tc
<ide> t.Run(tc.doc, func(t *testing.T) {
<add> skip.If(t, testEnv.OSType == tc.skipOs, "Windows does not support SIGWINCH")
<ide> ctx := context.Background()
<ide> id := container.Run(ctx, t, client)
<ide> err := client.ContainerKill(ctx, id, tc.signal)
<ide><path>integration/container/nat_test.go
<ide> func TestNetworkNat(t *testing.T) {
<ide> }
<ide>
<ide> func TestNetworkLocalhostTCPNat(t *testing.T) {
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> skip.If(t, testEnv.IsRemoteDaemon)
<ide>
<ide> defer setupTest(t)()
<ide><path>integration/container/resize_test.go
<ide> import (
<ide> )
<ide>
<ide> func TestResize(t *testing.T) {
<del> skip.If(t, testEnv.OSType == "windows", "FIXME")
<ide> defer setupTest(t)()
<ide> client := testEnv.APIClient()
<ide> ctx := context.Background()
<ide><path>integration/image/remove_test.go
<ide> import (
<ide> "github.com/docker/docker/integration/internal/container"
<ide> "gotest.tools/v3/assert"
<ide> is "gotest.tools/v3/assert/cmp"
<del> "gotest.tools/v3/skip"
<ide> )
<ide>
<ide> func TestRemoveImageOrphaning(t *testing.T) {
<del> skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME")
<ide> defer setupTest(t)()
<ide> ctx := context.Background()
<ide> client := testEnv.APIClient()
<ide><path>integration/image/tag_test.go
<ide> import (
<ide> "github.com/docker/docker/testutil"
<ide> "gotest.tools/v3/assert"
<ide> is "gotest.tools/v3/assert/cmp"
<del> "gotest.tools/v3/skip"
<ide> )
<ide>
<ide> // tagging a named image in a new unprefixed repo should work
<ide> func TestTagExistedNameWithoutForce(t *testing.T) {
<ide> // ensure tagging using official names works
<ide> // ensure all tags result in the same name
<ide> func TestTagOfficialNames(t *testing.T) {
<del> skip.If(t, testEnv.OSType == "windows")
<ide> defer setupTest(t)()
<ide> client := testEnv.APIClient()
<ide> ctx := context.Background()
<ide><path>integration/volume/volume_test.go
<ide> import (
<ide> "github.com/google/go-cmp/cmp/cmpopts"
<ide> "gotest.tools/v3/assert"
<ide> is "gotest.tools/v3/assert/cmp"
<del> "gotest.tools/v3/skip"
<ide> )
<ide>
<ide> func TestVolumesCreateAndList(t *testing.T) {
<ide> func TestVolumesCreateAndList(t *testing.T) {
<ide> }
<ide>
<ide> func TestVolumesRemove(t *testing.T) {
<del> skip.If(t, testEnv.OSType == "windows", "FIXME")
<ide> defer setupTest(t)()
<ide> client := testEnv.APIClient()
<ide> ctx := context.Background()
| 9
|
Javascript
|
Javascript
|
add warning that cameraroll has been moved to rnc
|
a2f11cb01fc01031e48bc4d8a1ff29edf32cdd9f
|
<ide><path>Libraries/react-native/react-native-implementation.js
<ide> module.exports = {
<ide> return require('BackHandler');
<ide> },
<ide> get CameraRoll() {
<add> warnOnce(
<add> 'cameraroll-moved',
<add> 'CameraRoll has been extracted from react-native core and will be removed in a future release. ' +
<add> "It can now be installed and imported from '@react-native-community/cameraroll' instead of 'react-native'. " +
<add> 'See https://github.com/react-native-community/react-native-cameraroll',
<add> );
<ide> return require('CameraRoll');
<ide> },
<ide> get Clipboard() {
| 1
|
Javascript
|
Javascript
|
use correct fontweight value in thememanager spec
|
ff429230f1a4b6869522e9ec993e1a846d74cd13
|
<ide><path>spec/theme-manager-spec.js
<ide> h2 {
<ide> it('returns a disposable allowing styles applied by the given path to be removed', function () {
<ide> const cssPath = require.resolve('./fixtures/css.css')
<ide>
<del> expect(getComputedStyle(document.body).fontWeight).not.toBe('bold')
<add> expect(getComputedStyle(document.body).fontWeight).not.toBe('700')
<ide> const disposable = atom.themes.requireStylesheet(cssPath)
<del> expect(getComputedStyle(document.body).fontWeight).toBe('bold')
<add> expect(getComputedStyle(document.body).fontWeight).toBe('700')
<ide>
<ide> let styleElementRemovedHandler
<ide> atom.styles.onDidRemoveStyleElement(styleElementRemovedHandler = jasmine.createSpy('styleElementRemovedHandler'))
| 1
|
Javascript
|
Javascript
|
optimize tessellatemodifier.js slightly
|
c222310930338f978d958857db0a99bd437d18c3
|
<ide><path>examples/js/modifiers/TessellateModifier.js
<ide> THREE.TessellateModifier.prototype.modify = function ( geometry ) {
<ide>
<ide> var faces = [];
<ide> var faceVertexUvs = [];
<del> var maxEdgeLength = this.maxEdgeLength;
<add> var maxEdgeLengthSquared = this.maxEdgeLength * this.maxEdgeLength;
<ide>
<ide> for ( var i = 0, il = geometry.faceVertexUvs.length; i < il; i ++ ) {
<ide>
<ide> THREE.TessellateModifier.prototype.modify = function ( geometry ) {
<ide> var vb = geometry.vertices[ b ];
<ide> var vc = geometry.vertices[ c ];
<ide>
<del> var dab = va.distanceTo( vb );
<del> var dbc = vb.distanceTo( vc );
<del> var dac = va.distanceTo( vc );
<add> var dab = va.distanceToSquared( vb );
<add> var dbc = vb.distanceToSquared( vc );
<add> var dac = va.distanceToSquared( vc );
<ide>
<del> if ( dab > maxEdgeLength || dbc > maxEdgeLength || dac > maxEdgeLength ) {
<add> if ( dab > maxEdgeLengthSquared || dbc > maxEdgeLengthSquared || dac > maxEdgeLengthSquared ) {
<ide>
<ide> var m = geometry.vertices.length;
<ide>
| 1
|
Python
|
Python
|
improve code quality of externaltasksensor
|
b57b9321133a28126e17d17885c80dc04a2e121e
|
<ide><path>airflow/sensors/external_task_sensor.py
<ide>
<ide> import datetime
<ide> import os
<del>from typing import FrozenSet, Optional, Union
<add>from typing import Any, Callable, FrozenSet, List, Optional, Union
<ide>
<ide> from sqlalchemy import func
<ide>
<ide> def operator_extra_links(self):
<ide> def __init__(
<ide> self,
<ide> *,
<del> external_dag_id,
<del> external_task_id=None,
<del> allowed_states=None,
<del> failed_states=None,
<del> execution_delta=None,
<del> execution_date_fn=None,
<del> check_existence=False,
<add> external_dag_id: str,
<add> external_task_id: Optional[str] = None,
<add> allowed_states: Optional[List[str]] = None,
<add> failed_states: Optional[List[str]] = None,
<add> execution_delta: Optional[datetime.timedelta] = None,
<add> execution_date_fn: Optional[Callable] = None,
<add> check_existence: bool = False,
<ide> **kwargs,
<ide> ):
<ide> super().__init__(**kwargs)
<ide> def __init__(
<ide> self.external_dag_id = external_dag_id
<ide> self.external_task_id = external_task_id
<ide> self.check_existence = check_existence
<del> # we only check the existence for the first time.
<del> self.has_checked_existence = False
<add> self._has_checked_existence = False
<ide>
<ide> @provide_session
<ide> def poke(self, context, session=None):
<ide> def poke(self, context, session=None):
<ide> dttm = context['execution_date']
<ide>
<ide> dttm_filter = dttm if isinstance(dttm, list) else [dttm]
<del> serialized_dttm_filter = ','.join([datetime.isoformat() for datetime in dttm_filter])
<add> serialized_dttm_filter = ','.join(dt.isoformat() for dt in dttm_filter)
<ide>
<ide> self.log.info(
<ide> 'Poking for %s.%s on %s ... ', self.external_dag_id, self.external_task_id, serialized_dttm_filter
<ide> )
<ide>
<del> DM = DagModel
<del> # we only do the check for 1st time, no need for subsequent poke
<del> if self.check_existence and not self.has_checked_existence:
<del> dag_to_wait = session.query(DM).filter(DM.dag_id == self.external_dag_id).first()
<del>
<del> if not dag_to_wait:
<del> raise AirflowException(f'The external DAG {self.external_dag_id} does not exist.')
<del> elif not os.path.exists(dag_to_wait.fileloc):
<del> raise AirflowException(f'The external DAG {self.external_dag_id} was deleted.')
<del>
<del> if self.external_task_id:
<del> refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
<del> if not refreshed_dag_info.has_task(self.external_task_id):
<del> raise AirflowException(
<del> f'The external task {self.external_task_id} in '
<del> f'DAG {self.external_dag_id} does not exist.'
<del> )
<del> self.has_checked_existence = True
<add> # In poke mode this will check dag existence only once
<add> if self.check_existence and not self._has_checked_existence:
<add> self._check_for_existence(session=session)
<ide>
<ide> count_allowed = self.get_count(dttm_filter, session, self.allowed_states)
<ide>
<ide> count_failed = -1
<del> if len(self.failed_states) > 0:
<add> if self.failed_states:
<ide> count_failed = self.get_count(dttm_filter, session, self.failed_states)
<ide>
<del> session.commit()
<ide> if count_failed == len(dttm_filter):
<ide> if self.external_task_id:
<ide> raise AirflowException(
<ide> def poke(self, context, session=None):
<ide>
<ide> return count_allowed == len(dttm_filter)
<ide>
<del> def get_count(self, dttm_filter, session, states):
<add> def _check_for_existence(self, session) -> None:
<add> dag_to_wait = session.query(DagModel).filter(DagModel.dag_id == self.external_dag_id).first()
<add>
<add> if not dag_to_wait:
<add> raise AirflowException(f'The external DAG {self.external_dag_id} does not exist.')
<add>
<add> if not os.path.exists(dag_to_wait.fileloc):
<add> raise AirflowException(f'The external DAG {self.external_dag_id} was deleted.')
<add>
<add> if self.external_task_id:
<add> refreshed_dag_info = DagBag(dag_to_wait.fileloc).get_dag(self.external_dag_id)
<add> if not refreshed_dag_info.has_task(self.external_task_id):
<add> raise AirflowException(
<add> f'The external task {self.external_task_id} in '
<add> f'DAG {self.external_dag_id} does not exist.'
<add> )
<add> self._has_checked_existence = True
<add>
<add> def get_count(self, dttm_filter, session, states) -> int:
<ide> """
<ide> Get the count of records against dttm filter and states
<ide>
<ide> def get_count(self, dttm_filter, session, states):
<ide> """
<ide> TI = TaskInstance
<ide> DR = DagRun
<del>
<ide> if self.external_task_id:
<del> # .count() is inefficient
<ide> count = (
<del> session.query(func.count())
<add> session.query(func.count()) # .count() is inefficient
<ide> .filter(
<ide> TI.dag_id == self.external_dag_id,
<ide> TI.task_id == self.external_task_id,
<ide> def get_count(self, dttm_filter, session, states):
<ide> .scalar()
<ide> )
<ide> else:
<del> # .count() is inefficient
<ide> count = (
<ide> session.query(func.count())
<ide> .filter(
<ide> def get_count(self, dttm_filter, session, states):
<ide> )
<ide> return count
<ide>
<del> def _handle_execution_date_fn(self, context):
<add> def _handle_execution_date_fn(self, context) -> Any:
<ide> """
<ide> This function is to handle backwards compatibility with how this operator was
<ide> previously where it only passes the execution date, but also allow for the newer
<ide> class ExternalTaskMarker(DummyOperator):
<ide> def __init__(
<ide> self,
<ide> *,
<del> external_dag_id,
<del> external_task_id,
<add> external_dag_id: str,
<add> external_task_id: str,
<ide> execution_date: Optional[Union[str, datetime.datetime]] = "{{ execution_date.isoformat() }}",
<ide> recursion_depth: int = 10,
<ide> **kwargs,
<ide> def __init__(
<ide> self.execution_date = execution_date
<ide> else:
<ide> raise TypeError(
<del> 'Expected str or datetime.datetime type for execution_date. Got {}'.format(
<del> type(execution_date)
<del> )
<add> f'Expected str or datetime.datetime type for execution_date. Got {type(execution_date)}'
<ide> )
<add>
<ide> if recursion_depth <= 0:
<ide> raise ValueError("recursion_depth should be a positive integer")
<ide> self.recursion_depth = recursion_depth
| 1
|
Javascript
|
Javascript
|
buffer partial reads before doing expect match
|
afe3c1cdea8cd88e04a09961567216606f5f0117
|
<ide><path>test/simple/test-repl.js
<ide> function tcp_test() {
<ide> });
<ide>
<ide> server_tcp.addListener('listening', function () {
<add> var read_buffer = "";
<add>
<ide> client_tcp = net.createConnection(PORT);
<ide>
<ide> client_tcp.addListener('connect', function () {
<ide> function tcp_test() {
<ide> });
<ide>
<ide> client_tcp.addListener('data', function (data) {
<del> var data_str = data.asciiSlice(0, data.length);
<del> sys.puts("TCP data: " + data_str + ", compare to " + client_tcp.expect);
<del> assert.strictEqual(client_tcp.expect, data_str);
<del> if (client_tcp.list && client_tcp.list.length > 0) {
<del> send_expect(client_tcp.list);
<add> read_buffer += data.asciiSlice(0, data.length);
<add> sys.puts("TCP data: " + read_buffer + ", expecting " + client_tcp.expect);
<add> if (read_buffer.indexOf(prompt_tcp) !== -1) {
<add> assert.strictEqual(client_tcp.expect, read_buffer);
<add> read_buffer = "";
<add> if (client_tcp.list && client_tcp.list.length > 0) {
<add> send_expect(client_tcp.list);
<add> }
<add> else {
<add> sys.puts("End of TCP test.");
<add> client_tcp.end();
<add> client_unix.end();
<add> clearTimeout(timer);
<add> }
<ide> }
<ide> else {
<del> sys.puts("End of TCP test.");
<del> client_tcp.end();
<del> client_unix.end();
<del> clearTimeout(timer);
<add> sys.puts("didn't see prompt yet, buffering");
<ide> }
<ide> });
<ide>
<ide> function unix_test() {
<ide> });
<ide>
<ide> server_unix.addListener('listening', function () {
<add> var read_buffer = "";
<add>
<ide> client_unix = net.createConnection(unix_socket_path);
<ide>
<ide> client_unix.addListener('connect', function () {
<ide> function unix_test() {
<ide> });
<ide>
<ide> client_unix.addListener('data', function (data) {
<del> var data_str = data.asciiSlice(0, data.length);
<del> sys.puts("Unix data: " + data_str + ", compare to " + client_unix.expect);
<del> assert.strictEqual(client_unix.expect, data_str);
<del> if (client_unix.list && client_unix.list.length > 0) {
<del> send_expect(client_unix.list);
<add> read_buffer += data.asciiSlice(0, data.length);
<add> sys.puts("Unix data: " + read_buffer + ", expecting " + client_unix.expect);
<add> if (read_buffer.indexOf(prompt_unix) !== -1) {
<add> assert.strictEqual(client_unix.expect, read_buffer);
<add> read_buffer = "";
<add> if (client_unix.list && client_unix.list.length > 0) {
<add> send_expect(client_unix.list);
<add> }
<add> else {
<add> sys.puts("End of Unix test, running TCP test.");
<add> tcp_test();
<add> }
<ide> }
<ide> else {
<del> sys.puts("End of Unix test, running TCP test.");
<del> tcp_test();
<add> sys.puts("didn't see prompt yet, bufering.");
<ide> }
<ide> });
<ide>
| 1
|
Text
|
Text
|
fix line length [ci skip]
|
98a5169c2ee6355a6e13df33b6219b04201d1c96
|
<ide><path>guides/source/migrations.md
<ide> this, then you should set the schema format to `:sql`.
<ide> Instead of using Active Record's schema dumper, the database's structure will
<ide> be dumped using a tool specific to the database (via the `db:structure:dump`
<ide> Rake task) into `db/structure.sql`. For example, for PostgreSQL, the `pg_dump`
<del>utility is used. For MySQL, this file will contain the output of `SHOW CREATE
<del>TABLE` for the various tables.
<add>utility is used. For MySQL, this file will contain the output of
<add>`SHOW CREATE TABLE` for the various tables.
<ide>
<ide> Loading these schemas is simply a question of executing the SQL statements they
<ide> contain. By definition, this will create a perfect copy of the database's
| 1
|
Python
|
Python
|
remove obsolete function
|
c6b4f63c7c96a8c1dd52bb3afc1aade8fbfdfc3a
|
<ide><path>spacy/ml/spacy_vectors.py
<del>import numpy
<del>from thinc.api import Model, Unserializable
<del>
<del>
<del>def SpacyVectors(vectors) -> Model:
<del> attrs = {"vectors": Unserializable(vectors)}
<del> model = Model("spacy_vectors", forward, attrs=attrs)
<del> return model
<del>
<del>
<del>def forward(model, docs, is_train: bool):
<del> batch = []
<del> vectors = model.attrs["vectors"].obj
<del> for doc in docs:
<del> indices = numpy.zeros((len(doc),), dtype="i")
<del> for i, word in enumerate(doc):
<del> if word.orth in vectors.key2row:
<del> indices[i] = vectors.key2row[word.orth]
<del> else:
<del> indices[i] = 0
<del> batch_vectors = vectors.data[indices]
<del> batch.append(batch_vectors)
<del>
<del> def backprop(dY):
<del> return None
<del>
<del> return batch, backprop
| 1
|
Ruby
|
Ruby
|
make test for “cask already installed” less strict
|
e4da2dfb9f49349251b51e80be57d2a70c979321
|
<ide><path>Library/Homebrew/cask/test/cask/cli/install_test.rb
<ide>
<ide> TestHelper.must_output(self, lambda {
<ide> Hbc::CLI::Install.run("local-transmission", "")
<del> }, %r{Warning: A Cask for local-transmission is already installed. Add the "--force" option to force re-install.})
<add> }, %r{Warning: A Cask for local-transmission is already installed.})
<ide> end
<ide>
<ide> it "allows double install with --force" do
| 1
|
Javascript
|
Javascript
|
fix linting errors
|
e23af02606ef6ed6e575c0344ea2027e97a11ce2
|
<ide><path>src/block-decorations-presenter.js
<ide> class BlockDecorationsPresenter {
<ide> this.emitter = new Emitter()
<ide> this.firstUpdate = true
<ide> this.lineTopIndex = lineTopIndex
<del> this.blocksByDecoration = new Map
<del> this.decorationsByBlock = new Map
<del> this.observedDecorations = new Set
<del> this.measuredDecorations = new Set
<add> this.blocksByDecoration = new Map()
<add> this.decorationsByBlock = new Map()
<add> this.observedDecorations = new Set()
<add> this.measuredDecorations = new Set()
<ide>
<ide> this.observeModel()
<ide> }
<ide> class BlockDecorationsPresenter {
<ide> }
<ide>
<ide> onDidUpdateState (callback) {
<del> return this.emitter.on("did-update-state", callback)
<add> return this.emitter.on('did-update-state', callback)
<ide> }
<ide>
<ide> setLineHeight (lineHeight) {
<ide> class BlockDecorationsPresenter {
<ide>
<ide> update () {
<ide> if (this.firstUpdate) {
<del> for (let decoration of this.model.getDecorations({type: "block"})) {
<add> for (let decoration of this.model.getDecorations({type: 'block'})) {
<ide> this.observeDecoration(decoration)
<ide> }
<ide> this.firstUpdate = false
<ide> class BlockDecorationsPresenter {
<ide> }
<ide>
<ide> this.measuredDecorations.add(decoration)
<del> this.emitter.emit("did-update-state")
<add> this.emitter.emit('did-update-state')
<ide> }
<ide>
<ide> invalidateDimensionsForDecoration (decoration) {
<ide> this.measuredDecorations.delete(decoration)
<del> this.emitter.emit("did-update-state")
<add> this.emitter.emit('did-update-state')
<ide> }
<ide>
<ide> decorationsForScreenRow (screenRow) {
<del> let blocks = this.lineTopIndex.allBlocks().filter((block) => block.row == screenRow)
<add> let blocks = this.lineTopIndex.allBlocks().filter((block) => block.row === screenRow)
<ide> return blocks.map((block) => this.decorationsByBlock.get(block.id)).filter((decoration) => decoration)
<ide> }
<ide>
<ide> decorationsForScreenRowRange (startRow, endRow) {
<ide> let blocks = this.lineTopIndex.allBlocks()
<del> let decorationsByScreenRow = new Map
<add> let decorationsByScreenRow = new Map()
<ide> for (let block of blocks) {
<ide> let decoration = this.decorationsByBlock.get(block.id)
<ide> let hasntMeasuredDecoration = !this.measuredDecorations.has(decoration)
<ide> class BlockDecorationsPresenter {
<ide> }
<ide>
<ide> observeDecoration (decoration) {
<del> if (!decoration.isType("block") || this.observedDecorations.has(decoration)) {
<add> if (!decoration.isType('block') || this.observedDecorations.has(decoration)) {
<ide> return
<ide> }
<ide>
<ide> class BlockDecorationsPresenter {
<ide> let block = this.lineTopIndex.insertBlock(screenRow, 0)
<ide> this.decorationsByBlock.set(block, decoration)
<ide> this.blocksByDecoration.set(decoration, block)
<del> this.emitter.emit("did-update-state")
<add> this.emitter.emit('did-update-state')
<ide> }
<ide>
<ide> didMoveDecoration (decoration, {textChanged}) {
<ide> class BlockDecorationsPresenter {
<ide> let block = this.blocksByDecoration.get(decoration)
<ide> let newScreenRow = decoration.getMarker().getHeadScreenPosition().row
<ide> this.lineTopIndex.moveBlock(block, newScreenRow)
<del> this.emitter.emit("did-update-state")
<add> this.emitter.emit('did-update-state')
<ide> }
<ide>
<ide> didDestroyDecoration (decoration) {
<ide> class BlockDecorationsPresenter {
<ide> this.blocksByDecoration.delete(decoration)
<ide> this.decorationsByBlock.delete(block)
<ide> }
<del> this.emitter.emit("did-update-state")
<add> this.emitter.emit('did-update-state')
<ide> }
<ide> }
<ide><path>src/linear-line-top-index.js
<ide> class LineTopIndex {
<ide> }
<ide>
<ide> resizeBlock (id, height) {
<del> let block = this.blocks.find((block) => block.id == id)
<add> let block = this.blocks.find((block) => block.id === id)
<ide> if (block) {
<ide> block.height = height
<ide> }
<ide> }
<ide>
<ide> moveBlock (id, newRow) {
<del> let block = this.blocks.find((block) => block.id == id)
<add> let block = this.blocks.find((block) => block.id === id)
<ide> if (block) {
<ide> block.row = newRow
<ide> this.blocks.sort((a, b) => a.row - b.row)
<ide> }
<ide> }
<ide>
<ide> removeBlock (id) {
<del> let index = this.blocks.findIndex((block) => block.id == id)
<del> if (index != -1) {
<add> let index = this.blocks.findIndex((block) => block.id === id)
<add> if (index !== -1) {
<ide> this.blocks.splice(index, 1)
<ide> }
<ide> }
<ide> class LineTopIndex {
<ide> }
<ide>
<ide> blocksHeightForRow (row) {
<del> let blocksForRow = this.blocks.filter((block) => block.row == row)
<add> let blocksForRow = this.blocks.filter((block) => block.row === row)
<ide> return blocksForRow.reduce((a, b) => a + b.height, 0)
<ide> }
<ide>
<ide> class LineTopIndex {
<ide> let remainingHeight = Math.max(0, top - lastTop)
<ide> let remainingRows = Math.min(this.maxRow, lastRow + remainingHeight / this.defaultLineHeight)
<ide> switch (roundingStrategy) {
<del> case "floor":
<add> case 'floor':
<ide> return Math.floor(remainingRows)
<del> case "ceil":
<add> case 'ceil':
<ide> return Math.ceil(remainingRows)
<ide> default:
<ide> throw new Error(`Cannot use '${roundingStrategy}' as a rounding strategy!`)
| 2
|
Javascript
|
Javascript
|
remove outdated comment about gcc
|
dc232e6774e60029be7995d1b4e3e37f8ff843ef
|
<ide><path>scripts/rollup/build.js
<ide> function getPlugins(
<ide> // Don't let it create global variables in the browser.
<ide> // https://github.com/facebook/react/issues/10909
<ide> assume_function_wrapper: !isUMDBundle,
<del> // Works because `google-closure-compiler-js` is forked in Yarn lockfile.
<del> // We can remove this if GCC merges my PR:
<del> // https://github.com/google/closure-compiler/pull/2707
<del> // and then the compiled version is released via `google-closure-compiler-js`.
<ide> renaming: !shouldStayReadable,
<ide> })
<ide> ),
| 1
|
Ruby
|
Ruby
|
simplify the delete all w/ dependency method
|
e0e586094f968b1f8fa410aa84d105bc8e44e537
|
<ide><path>activerecord/lib/active_record/associations/collection_association.rb
<ide> def delete(*records)
<ide> end
<ide>
<ide> def delete_all_with_dependency(dependent)
<del> if (loaded? || dependent == :destroy) && dependent != :delete_all
<del> delete_or_destroy(load_target, dependent)
<del> else
<add> if dependent == :delete_all
<ide> delete_records(:all, dependent)
<add> else
<add> delete_or_destroy(load_target, dependent)
<ide> end
<ide> end
<ide>
| 1
|
PHP
|
PHP
|
add initial implementation of brace placeholders
|
d0e8f2d40ee4d934b5e784eaa52334cea0ed83d9
|
<ide><path>src/Routing/Route/Route.php
<ide> class Route
<ide> */
<ide> protected $middleware = [];
<ide>
<add> /**
<add> * Track whether or not brace keys `{var}` were used.
<add> *
<add> * @var bool
<add> */
<add> protected $braceKeys = false;
<add>
<ide> /**
<ide> * Valid HTTP methods.
<ide> *
<ide> protected function _writeRoute()
<ide> $names = $routeParams = [];
<ide> $parsed = preg_quote($this->template, '#');
<ide>
<del> preg_match_all('/:([a-z0-9-_]+(?<![-_]))/i', $route, $namedElements);
<add> if (strpos($route, '{') !== false) {
<add> preg_match_all('/\{([a-z0-9-_]+)\}/i', $route, $namedElements);
<add> $this->braceKeys = true;
<add> } else {
<add> preg_match_all('/:([a-z0-9-_]+(?<![-_]))/i', $route, $namedElements);
<add> $this->braceKeys = false;
<add> }
<ide> foreach ($namedElements[1] as $i => $name) {
<del> $search = '\\' . $namedElements[0][$i];
<add> $search = preg_quote($namedElements[0][$i]);
<ide> if (isset($this->options[$name])) {
<ide> $option = null;
<ide> if ($name !== 'plugin' && array_key_exists($name, $this->defaults)) {
<ide> $option = '?';
<ide> }
<del> $slashParam = '/\\' . $namedElements[0][$i];
<add> $slashParam = '/' . $search;
<ide> if (strpos($parsed, $slashParam) !== false) {
<ide> $routeParams[$slashParam] = '(?:/(?P<' . $name . '>' . $this->options[$name] . ')' . $option . ')' . $option;
<ide> } else {
<ide> protected function _writeUrl($params, $pass = [], $query = [])
<ide> } elseif (strpos($out, $key) != strlen($out) - strlen($key)) {
<ide> $key .= '/';
<ide> }
<del> $search[] = ':' . $key;
<add> if ($this->braceKeys) {
<add> $search[] = "{{$key}}";
<add> } else {
<add> $search[] = ':' . $key;
<add> }
<ide> $replace[] = $string;
<ide> }
<ide>
<ide><path>tests/TestCase/Routing/Route/RouteTest.php
<ide> public function testBasicRouteCompiling()
<ide> *
<ide> * @return void
<ide> */
<del> public function testRouteBuildingSmallPlaceholders()
<add> public function testRouteCompileSmallPlaceholders()
<ide> {
<ide> $route = new Route(
<ide> '/fighters/:id/move/:x/:y',
<ide> public function testRouteBuildingSmallPlaceholders()
<ide> $this->assertEquals('/fighters/123/move/8/42', $result);
<ide> }
<ide>
<add> /**
<add> * Test route compile with brace format.
<add> *
<add> * @return void
<add> */
<add> public function testRouteCompileBraces()
<add> {
<add> $route = new Route(
<add> '/fighters/{id}/move/{x}/{y}',
<add> ['controller' => 'Fighters', 'action' => 'move'],
<add> ['id' => '\d+', 'x' => '\d+', 'y' => '\d+', 'pass' => ['id', 'x', 'y']]
<add> );
<add> $pattern = $route->compile();
<add> $this->assertRegExp($pattern, '/fighters/123/move/8/42');
<add>
<add> $result = $route->match([
<add> 'controller' => 'Fighters',
<add> 'action' => 'move',
<add> 'id' => 123,
<add> 'x' => 8,
<add> 'y' => 42
<add> ]);
<add> $this->assertEquals('/fighters/123/move/8/42', $result);
<add>
<add> $route = new Route(
<add> '/images/{id}/{x}x{y}',
<add> ['controller' => 'Images', 'action' => 'view']
<add> );
<add> $pattern = $route->compile();
<add> $this->assertRegExp($pattern, '/images/123/640x480');
<add>
<add> $result = $route->match([
<add> 'controller' => 'Images',
<add> 'action' => 'view',
<add> 'id' => 123,
<add> 'x' => 8,
<add> 'y' => 42
<add> ]);
<add> $this->assertEquals('/images/123/8x42', $result);
<add> }
<add>
<add> /**
<add> * Test route compile with mixed placeholder types brace format.
<add> *
<add> * @return void
<add> */
<add> public function testRouteCompileMixedPlaceholders()
<add> {
<add> $route = new Route(
<add> '/fighters/{id}/move/{x}/:y',
<add> ['controller' => 'Fighters', 'action' => 'move'],
<add> ['id' => '\d+', 'x' => '\d+', 'pass' => ['id', 'x']]
<add> );
<add> $pattern = $route->compile();
<add> $this->assertRegExp($pattern, '/fighters/123/move/8/:y');
<add>
<add> $result = $route->match([
<add> 'controller' => 'Fighters',
<add> 'action' => 'move',
<add> 'id' => 123,
<add> 'x' => 8,
<add> 'y' => 9
<add> ]);
<add> $this->assertEquals('/fighters/123/move/8/:y?y=9', $result);
<add> }
<add>
<ide> /**
<ide> * Test parsing routes with extensions.
<ide> *
| 2
|
Python
|
Python
|
simplify tests for cli connections commands
|
e81527c5d39840b37180060a0865467c37d698ee
|
<ide><path>airflow/cli/commands/connection_command.py
<ide> def connections_export(args):
<ide> connections = session.query(Connection).order_by(Connection.conn_id).all()
<ide> msg = _format_connections(connections, filetype)
<ide> args.file.write(msg)
<add> args.file.close()
<ide>
<ide> if _is_stdout(args.file):
<ide> print("Connections successfully exported.", file=sys.stderr)
<ide><path>tests/cli/commands/test_connection_command.py
<ide> import io
<ide> import json
<ide> import re
<del>import unittest
<ide> from contextlib import redirect_stdout
<ide> from unittest import mock
<ide>
<ide> import pytest
<del>from parameterized import parameterized
<ide>
<ide> from airflow.cli import cli_parser
<ide> from airflow.cli.commands import connection_command
<ide> from tests.test_utils.db import clear_db_connections
<ide>
<ide>
<del>class TestCliGetConnection(unittest.TestCase):
<del> def setUp(self):
<del> self.parser = cli_parser.get_parser()
<del> clear_db_connections()
<add>@pytest.fixture(scope='module', autouse=True)
<add>def clear_connections():
<add> yield
<add> clear_db_connections(add_default_connections_back=False)
<ide>
<del> def tearDown(self):
<del> clear_db_connections()
<add>
<add>class TestCliGetConnection:
<add> parser = cli_parser.get_parser()
<add>
<add> def setup_method(self):
<add> clear_db_connections(add_default_connections_back=True)
<ide>
<ide> def test_cli_connection_get(self):
<ide> with redirect_stdout(io.StringIO()) as stdout:
<ide> def test_cli_connection_get_invalid(self):
<ide> connection_command.connections_get(self.parser.parse_args(["connections", "get", "INVALID"]))
<ide>
<ide>
<del>class TestCliListConnections(unittest.TestCase):
<add>class TestCliListConnections:
<add> parser = cli_parser.get_parser()
<ide> EXPECTED_CONS = [
<del> (
<del> 'airflow_db',
<del> 'mysql',
<del> ),
<del> (
<del> 'google_cloud_default',
<del> 'google_cloud_platform',
<del> ),
<del> (
<del> 'http_default',
<del> 'http',
<del> ),
<del> (
<del> 'local_mysql',
<del> 'mysql',
<del> ),
<del> (
<del> 'mongo_default',
<del> 'mongo',
<del> ),
<del> (
<del> 'mssql_default',
<del> 'mssql',
<del> ),
<del> (
<del> 'mysql_default',
<del> 'mysql',
<del> ),
<del> (
<del> 'pinot_broker_default',
<del> 'pinot',
<del> ),
<del> (
<del> 'postgres_default',
<del> 'postgres',
<del> ),
<del> (
<del> 'presto_default',
<del> 'presto',
<del> ),
<del> (
<del> 'sqlite_default',
<del> 'sqlite',
<del> ),
<del> (
<del> 'trino_default',
<del> 'trino',
<del> ),
<del> (
<del> 'vertica_default',
<del> 'vertica',
<del> ),
<add> ('airflow_db', 'mysql'),
<add> ('google_cloud_default', 'google_cloud_platform'),
<add> ('http_default', 'http'),
<add> ('local_mysql', 'mysql'),
<add> ('mongo_default', 'mongo'),
<add> ('mssql_default', 'mssql'),
<add> ('mysql_default', 'mysql'),
<add> ('pinot_broker_default', 'pinot'),
<add> ('postgres_default', 'postgres'),
<add> ('presto_default', 'presto'),
<add> ('sqlite_default', 'sqlite'),
<add> ('trino_default', 'trino'),
<add> ('vertica_default', 'vertica'),
<ide> ]
<ide>
<del> def setUp(self):
<del> self.parser = cli_parser.get_parser()
<del> clear_db_connections()
<del>
<del> def tearDown(self):
<del> clear_db_connections()
<add> def setup_method(self):
<add> clear_db_connections(add_default_connections_back=True)
<ide>
<ide> def test_cli_connections_list_as_json(self):
<ide> args = self.parser.parse_args(["connections", "list", "--output", "json"])
<ide> def test_cli_connections_filter_conn_id(self):
<ide> args = self.parser.parse_args(
<ide> ["connections", "list", "--output", "json", '--conn-id', 'http_default']
<ide> )
<del>
<ide> with redirect_stdout(io.StringIO()) as stdout:
<ide> connection_command.connections_list(args)
<ide> stdout = stdout.getvalue()
<del>
<ide> assert "http_default" in stdout
<ide>
<ide>
<del>class TestCliExportConnections(unittest.TestCase):
<del> @provide_session
<del> def setUp(self, session=None):
<add>class TestCliExportConnections:
<add> parser = cli_parser.get_parser()
<add>
<add> def setup_method(self):
<ide> clear_db_connections(add_default_connections_back=False)
<ide> merge_conn(
<ide> Connection(
<ide> def setUp(self, session=None):
<ide> password="plainpassword",
<ide> schema="airflow",
<ide> ),
<del> session,
<ide> )
<ide> merge_conn(
<ide> Connection(
<ide> def setUp(self, session=None):
<ide> port=8082,
<ide> extra='{"endpoint": "druid/v2/sql"}',
<ide> ),
<del> session,
<ide> )
<ide>
<del> self.parser = cli_parser.get_parser()
<del>
<del> def tearDown(self):
<del> clear_db_connections()
<del>
<ide> def test_cli_connections_export_should_return_error_for_invalid_command(self):
<ide> with pytest.raises(SystemExit):
<del> self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> ]
<del> )
<add> self.parser.parse_args(["connections", "export"])
<ide>
<ide> def test_cli_connections_export_should_return_error_for_invalid_format(self):
<ide> with pytest.raises(SystemExit):
<ide> self.parser.parse_args(["connections", "export", "--format", "invalid", "/path/to/file"])
<ide>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_return_error_for_invalid_export_format(
<del> self, mock_file_open, mock_splittext
<del> ):
<del> output_filepath = '/tmp/connections.invalid'
<del> mock_splittext.return_value = (None, '.invalid')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<del> with pytest.raises(
<del> SystemExit, match=r"Unsupported file format. The file must have the extension .yaml, .json, .env"
<del> ):
<add> def test_cli_connections_export_should_return_error_for_invalid_export_format(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.invalid'
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<add> with pytest.raises(SystemExit, match=r"Unsupported file format"):
<ide> connection_command.connections_export(args)
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_not_called()
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<ide> @mock.patch.object(connection_command, 'create_session')
<del> def test_cli_connections_export_should_return_error_if_create_session_fails(
<del> self, mock_session, mock_file_open, mock_splittext
<add> def test_cli_connections_export_should_raise_error_if_create_session_fails(
<add> self, mock_create_session, tmp_path
<ide> ):
<del> output_filepath = '/tmp/connections.json'
<add> output_filepath = tmp_path / 'connections.json'
<ide>
<ide> def my_side_effect():
<ide> raise Exception("dummy exception")
<ide>
<del> mock_session.side_effect = my_side_effect
<del> mock_splittext.return_value = (None, '.json')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> mock_create_session.side_effect = my_side_effect
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> with pytest.raises(Exception, match=r"dummy exception"):
<ide> connection_command.connections_export(args)
<ide>
<del> mock_splittext.assert_not_called()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_not_called()
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<ide> @mock.patch.object(connection_command, 'create_session')
<del> def test_cli_connections_export_should_return_error_if_fetching_connections_fails(
<del> self, mock_session, mock_file_open, mock_splittext
<add> def test_cli_connections_export_should_raise_error_if_fetching_connections_fails(
<add> self, mock_session, tmp_path
<ide> ):
<del> output_filepath = '/tmp/connections.json'
<add> output_filepath = tmp_path / 'connections.json'
<ide>
<ide> def my_side_effect(_):
<ide> raise Exception("dummy exception")
<ide>
<ide> mock_session.return_value.__enter__.return_value.query.return_value.order_by.side_effect = (
<ide> my_side_effect
<ide> )
<del> mock_splittext.return_value = (None, '.json')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> with pytest.raises(Exception, match=r"dummy exception"):
<ide> connection_command.connections_export(args)
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_not_called()
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<ide> @mock.patch.object(connection_command, 'create_session')
<del> def test_cli_connections_export_should_not_return_error_if_connections_is_empty(
<del> self, mock_session, mock_file_open, mock_splittext
<add> def test_cli_connections_export_should_not_raise_error_if_connections_is_empty(
<add> self, mock_session, tmp_path
<ide> ):
<del> output_filepath = '/tmp/connections.json'
<del>
<add> output_filepath = tmp_path / 'connections.json'
<ide> mock_session.return_value.__enter__.return_value.query.return_value.all.return_value = []
<del> mock_splittext.return_value = (None, '.json')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> connection_command.connections_export(args)
<add> assert output_filepath.read_text() == '{}'
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with('{}')
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_export_as_json(self, mock_file_open, mock_splittext):
<del> output_filepath = '/tmp/connections.json'
<del> mock_splittext.return_value = (None, '.json')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> def test_cli_connections_export_should_export_as_json(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.json'
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> connection_command.connections_export(args)
<del>
<ide> expected_connections = json.dumps(
<ide> {
<ide> "airflow_db": {
<ide> def test_cli_connections_export_should_export_as_json(self, mock_file_open, mock
<ide> },
<ide> indent=2,
<ide> )
<add> assert output_filepath.read_text() == expected_connections
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with(expected_connections)
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_export_as_yaml(self, mock_file_open, mock_splittext):
<del> output_filepath = '/tmp/connections.yaml'
<del> mock_splittext.return_value = (None, '.yaml')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> def test_cli_connections_export_should_export_as_yaml(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.yaml'
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> connection_command.connections_export(args)
<del>
<ide> expected_connections = (
<ide> "airflow_db:\n"
<ide> " conn_type: mysql\n"
<ide> def test_cli_connections_export_should_export_as_yaml(self, mock_file_open, mock
<ide> " port: 8082\n"
<ide> " schema: null\n"
<ide> )
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with(expected_connections)
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_export_as_env(self, mock_file_open, mock_splittext):
<del> output_filepath = '/tmp/connections.env'
<del> mock_splittext.return_value = (None, '.env')
<add> assert output_filepath.read_text() == expected_connections
<ide>
<add> def test_cli_connections_export_should_export_as_env(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.env'
<ide> args = self.parser.parse_args(
<ide> [
<ide> "connections",
<ide> "export",
<del> output_filepath,
<add> output_filepath.as_posix(),
<ide> ]
<ide> )
<ide> connection_command.connections_export(args)
<del>
<ide> expected_connections = [
<del> "airflow_db=mysql://root:plainpassword@mysql/airflow\n"
<del> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n",
<del> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n"
<del> "airflow_db=mysql://root:plainpassword@mysql/airflow\n",
<add> "airflow_db=mysql://root:plainpassword@mysql/airflow",
<add> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql",
<ide> ]
<add> assert output_filepath.read_text().splitlines() == expected_connections
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with(mock.ANY)
<del> assert mock_file_open.return_value.write.call_args_list[0][0][0] in expected_connections
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_export_as_env_for_uppercase_file_extension(
<del> self, mock_file_open, mock_splittext
<del> ):
<del> output_filepath = '/tmp/connections.ENV'
<del> mock_splittext.return_value = (None, '.ENV')
<del>
<del> args = self.parser.parse_args(
<del> [
<del> "connections",
<del> "export",
<del> output_filepath,
<del> ]
<del> )
<add> def test_cli_connections_export_should_export_as_env_for_uppercase_file_extension(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.ENV'
<add> args = self.parser.parse_args(["connections", "export", output_filepath.as_posix()])
<ide> connection_command.connections_export(args)
<del>
<ide> expected_connections = [
<del> "airflow_db=mysql://root:plainpassword@mysql/airflow\n"
<del> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n",
<del> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql\n"
<del> "airflow_db=mysql://root:plainpassword@mysql/airflow\n",
<add> "airflow_db=mysql://root:plainpassword@mysql/airflow",
<add> "druid_broker_default=druid://druid-broker:8082?endpoint=druid%2Fv2%2Fsql",
<ide> ]
<ide>
<del> mock_splittext.assert_called_once()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with(mock.ANY)
<del> assert mock_file_open.return_value.write.call_args_list[0][0][0] in expected_connections
<del>
<del> @mock.patch('os.path.splitext')
<del> @mock.patch('builtins.open', new_callable=mock.mock_open())
<del> def test_cli_connections_export_should_force_export_as_specified_format(
<del> self, mock_file_open, mock_splittext
<del> ):
<del> output_filepath = '/tmp/connections.yaml'
<add> assert output_filepath.read_text().splitlines() == expected_connections
<ide>
<add> def test_cli_connections_export_should_force_export_as_specified_format(self, tmp_path):
<add> output_filepath = tmp_path / 'connections.yaml'
<ide> args = self.parser.parse_args(
<ide> [
<ide> "connections",
<ide> "export",
<del> output_filepath,
<add> output_filepath.as_posix(),
<ide> "--format",
<ide> "json",
<ide> ]
<ide> )
<ide> connection_command.connections_export(args)
<del>
<ide> expected_connections = json.dumps(
<ide> {
<ide> "airflow_db": {
<ide> def test_cli_connections_export_should_force_export_as_specified_format(
<ide> },
<ide> indent=2,
<ide> )
<del> mock_splittext.assert_not_called()
<del> mock_file_open.assert_called_once_with(output_filepath, 'w', -1, 'UTF-8', None)
<del> mock_file_open.return_value.write.assert_called_once_with(expected_connections)
<add> assert output_filepath.read_text() == expected_connections
<ide>
<ide>
<ide> TEST_URL = "postgresql://airflow:airflow@host:5432/airflow"
<ide>
<ide>
<del>class TestCliAddConnections(unittest.TestCase):
<del> @classmethod
<del> def setUpClass(cls):
<del> cls.parser = cli_parser.get_parser()
<del> clear_db_connections()
<add>class TestCliAddConnections:
<add> parser = cli_parser.get_parser()
<ide>
<del> @classmethod
<del> def tearDownClass(cls):
<del> clear_db_connections()
<add> def setup_method(self):
<add> clear_db_connections(add_default_connections_back=False)
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> 'cmd, expected_output, expected_conn',
<ide> [
<ide> (
<ide> [
<ide> def tearDownClass(cls):
<ide> "schema": None,
<ide> },
<ide> ),
<del> ]
<add> ],
<ide> )
<ide> def test_cli_connection_add(self, cmd, expected_output, expected_conn):
<ide> with redirect_stdout(io.StringIO()) as stdout:
<ide> def test_cli_connections_add_invalid_uri(self):
<ide> )
<ide>
<ide>
<del>class TestCliDeleteConnections(unittest.TestCase):
<del> @classmethod
<del> def setUpClass(cls):
<del> cls.parser = cli_parser.get_parser()
<del> clear_db_connections()
<add>class TestCliDeleteConnections:
<add> parser = cli_parser.get_parser()
<ide>
<del> @classmethod
<del> def tearDownClass(cls):
<del> clear_db_connections()
<add> def setup_method(self):
<add> clear_db_connections(add_default_connections_back=False)
<ide>
<ide> @provide_session
<ide> def test_cli_delete_connections(self, session=None):
<ide> def test_cli_delete_invalid_connection(self):
<ide> connection_command.connections_delete(self.parser.parse_args(["connections", "delete", "fake"]))
<ide>
<ide>
<del>class TestCliImportConnections(unittest.TestCase):
<del> @classmethod
<del> def setUpClass(cls):
<del> cls.parser = cli_parser.get_parser()
<del> clear_db_connections(add_default_connections_back=False)
<add>class TestCliImportConnections:
<add> parser = cli_parser.get_parser()
<ide>
<del> @classmethod
<del> def tearDownClass(cls):
<del> clear_db_connections()
<add> def setup_method(self):
<add> clear_db_connections(add_default_connections_back=False)
<ide>
<ide> @mock.patch('os.path.exists')
<ide> def test_cli_connections_import_should_return_error_if_file_does_not_exist(self, mock_exists):
<ide> def test_cli_connections_import_should_return_error_if_file_does_not_exist(self,
<ide> with pytest.raises(SystemExit, match=r"Missing connections file."):
<ide> connection_command.connections_import(self.parser.parse_args(["connections", "import", filepath]))
<ide>
<del> @parameterized.expand(
<del> [
<del> ("sample.jso",),
<del> ("sample.yml",),
<del> ("sample.environ",),
<del> ]
<del> )
<add> @pytest.mark.parametrize('filepath', ["sample.jso", "sample.yml", "sample.environ"])
<ide> @mock.patch('os.path.exists')
<ide> def test_cli_connections_import_should_return_error_if_file_format_is_invalid(
<del> self, filepath, mock_exists
<add> self, mock_exists, filepath
<ide> ):
<ide> mock_exists.return_value = True
<ide> with pytest.raises(
| 2
|
Text
|
Text
|
update video challenge
|
756f33775e63117dd911877645d0fb5e83100eeb
|
<ide><path>curriculum/challenges/english/02-javascript-algorithms-and-data-structures/basic-javascript/testing-objects-for-properties.english.md
<ide> id: 567af2437cbaa8c51670a16c
<ide> title: Testing Objects for Properties
<ide> challengeType: 1
<del>videoUrl: 'https://scrimba.com/c/cm8Q7Ua'
<add>videoUrl: 'https://scrimba.com/c/c6Wz4ySr'
<ide> forumTopicId: 18324
<ide> ---
<ide>
<ide><path>curriculum/challenges/russian/02-javascript-algorithms-and-data-structures/basic-javascript/testing-objects-for-properties.russian.md
<ide> id: 567af2437cbaa8c51670a16c
<ide> title: Testing Objects for Properties
<ide> challengeType: 1
<del>videoUrl: https://scrimba.com/c/cm8Q7Ua
<add>videoUrl: https://scrimba.com/c/c6Wz4ySr
<ide> forumTopicId: 18324
<ide> localeTitle: Тестирование объектов для свойств
<ide> ---
| 2
|
Javascript
|
Javascript
|
add json as default extension
|
53c16c81471ede22eeeaac5b6357132eb2117846
|
<ide><path>lib/ModuleParseError.js
<ide> function ModuleParseError(module, source, err) {
<ide> Error.captureStackTrace(this, ModuleParseError);
<ide> this.name = "ModuleParseError";
<ide> this.message = "Module parse failed: " + module.request + " " + err.message;
<add> this.message += "\nYou may need an appropriate loader to handle this file type.";
<ide> if(typeof err.lineNumber === "number") {
<ide> source = source.split("\n");
<ide> this.message += "\n| " + source.slice(Math.max(0, err.lineNumber - 3), err.lineNumber + 2).join("\n| ");
<ide><path>lib/WebpackOptionsDefaulter.js
<ide> WebpackOptionsDefaulter.prototype.process = function(options) {
<ide> ["loader", "main"]);
<ide>
<ide> options.resolve.extensions = defaultByTarget(options.resolve.extensions,
<del> ["", ".webpack.js", ".web.js", ".js"],
<del> ["", ".webpack-worker.js", ".webworker.js", ".web.js", ".js"],
<del> ["", ".webpack-node.js", ".js", ".node"],
<del> ["", ".js"]);
<add> ["", ".webpack.js", ".web.js", ".js", ".json"],
<add> ["", ".webpack-worker.js", ".webworker.js", ".web.js", ".js", ".json"],
<add> ["", ".webpack-node.js", ".js", ".json", ".node"],
<add> ["", ".js", ".json"]);
<ide>
<ide> options.resolveLoader.extensions = defaultByTarget(options.resolveLoader.extensions,
<ide> ["", ".webpack-loader.js", ".web-loader.js", ".loader.js", ".js"],
| 2
|
Python
|
Python
|
use _umath_linalg for slogdet()
|
87dc3f6b66465d80d8fe36aeb40a33546f979e67
|
<ide><path>numpy/linalg/linalg.py
<ide> def slogdet(a):
<ide>
<ide> Parameters
<ide> ----------
<del> a : array_like
<add> a : (..., M, M) array_like
<ide> Input array, has to be a square 2-D array.
<ide>
<ide> Returns
<ide> -------
<del> sign : float or complex
<add> sign : (...) array_like
<ide> A number representing the sign of the determinant. For a real matrix,
<ide> this is 1, 0, or -1. For a complex matrix, this is a complex number
<ide> with absolute value 1 (i.e., it is on the unit circle), or else 0.
<del> logdet : float
<add> logdet : (...) array_like
<ide> The natural log of the absolute value of the determinant.
<ide>
<ide> If the determinant is zero, then `sign` will be 0 and `logdet` will be
<ide> def slogdet(a):
<ide> >>> sign * np.exp(logdet)
<ide> -2.0
<ide>
<add> Computing log-determinants for a stack of matrices:
<add>
<add> >>> a = np.array([ [[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]] ])
<add> >>> a.shape
<add> (3, 2, 2)
<add> >>> sign, logdet = np.linalg.slogdet(a)
<add> >>> (sign, logdet)
<add> (array([-1., -1., -1.]), array([ 0.69314718, 1.09861229, 2.07944154]))
<add> >>> sign * np.exp(logdet)
<add> array([-2., -3., -8.])
<add>
<ide> This routine succeeds where ordinary `det` does not:
<ide>
<ide> >>> np.linalg.det(np.eye(500) * 0.1)
<ide> def slogdet(a):
<ide>
<ide> """
<ide> a = asarray(a)
<del> _assertRank2(a)
<del> _assertSquareness(a)
<add> _assertNonEmpty(a)
<add> _assertRankAtLeast2(a)
<add> _assertNdSquareness(a)
<ide> t, result_t = _commonType(a)
<del> a = _fastCopyAndTranspose(t, a)
<del> a = _to_native_byte_order(a)
<del> n = a.shape[0]
<del> if isComplexType(t):
<del> lapack_routine = lapack_lite.zgetrf
<del> else:
<del> lapack_routine = lapack_lite.dgetrf
<del> pivots = zeros((n,), fortran_int)
<del> results = lapack_routine(n, n, a, n, pivots, 0)
<del> info = results['info']
<del> if (info < 0):
<del> raise TypeError("Illegal input to Fortran routine")
<del> elif (info > 0):
<del> return (t(0.0), _realType(t)(-Inf))
<del> sign = 1. - 2. * (add.reduce(pivots != arange(1, n + 1)) % 2)
<del> d = diagonal(a)
<del> absd = absolute(d)
<del> sign *= multiply.reduce(d / absd)
<del> log(absd, absd)
<del> logdet = add.reduce(absd, axis=-1)
<del> return sign, logdet
<add> real_t = _realType(result_t)
<add> sign, logdet = _umath_linalg.slogdet(a.astype(t))
<add> return sign.astype(result_t), logdet.astype(real_t)
<ide>
<ide> def det(a):
<ide> """
<ide> def det(a):
<ide>
<ide> Notes
<ide> -----
<add> Broadcasting rules apply, see the `numpy.linalg` documentation for
<add> details.
<add>
<ide> The determinant is computed via LU factorization using the LAPACK
<ide> routine z/dgetrf.
<ide>
| 1
|
PHP
|
PHP
|
fix incorrect regex
|
8f387afacf2fbf2d20e6a7a7ef1cb1298d06992a
|
<ide><path>src/Validation/Validation.php
<ide> public static function cc($check, $type = 'fast', $deep = false, $regex = null)
<ide> }
<ide> $cards = [
<ide> 'all' => [
<del> 'amex' => '/^3[4|7]\\d{13}$/',
<add> 'amex' => '/^3[47]\\d{13}$/',
<ide> 'bankcard' => '/^56(10\\d\\d|022[1-5])\\d{10}$/',
<ide> 'diners' => '/^(?:3(0[0-5]|[68]\\d)\\d{11})|(?:5[1-5]\\d{14})$/',
<ide> 'disc' => '/^(?:6011|650\\d)\\d{12}$/',
| 1
|
Ruby
|
Ruby
|
add version detection support for php url
|
9d36096d68d5b362f223b993ac7662588450d942
|
<ide><path>Library/Homebrew/version.rb
<ide> def self._parse(spec)
<ide> # e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
<ide> m = /\.v(\d+[a-z]?)/.match(stem)
<ide> return m.captures.first unless m.nil?
<add>
<add> # e.g. https://secure.php.net/get/php-7.1.10.tar.bz2/from/this/mirror
<add> m = /[-.vV]?((?:\d+\.)+\d+(?:[-_.]?(?i:alpha|beta|pre|rc)\.?\d{,2})?)/.match(spec_s)
<add> return m.captures.first unless m.nil?
<ide> end
<ide> end
<ide>
| 1
|
Python
|
Python
|
fix spacy vocab command
|
98c35d2585c548e6ff2c25a537cfd81c25482283
|
<ide><path>spacy/__main__.py
<ide> 'convert': convert,
<ide> 'package': package,
<ide> 'model': model,
<del> 'model': vocab,
<add> 'vocab': vocab,
<ide> 'profile': profile,
<ide> 'validate': validate
<ide> }
<ide><path>spacy/cli/vocab.py
<del>'''Compile a vocabulary from a lexicon jsonl file and word vectors.'''
<ide> # coding: utf8
<ide> from __future__ import unicode_literals
<ide>
<del>from pathlib import Path
<ide> import plac
<ide> import json
<ide> import spacy
<ide> import numpy
<del>from spacy.util import ensure_path
<add>from pathlib import Path
<add>
<add>from ..util import prints, ensure_path
<ide>
<ide>
<ide> @plac.annotations(
<ide> lang=("model language", "positional", None, str),
<del> output_dir=("output directory to store model in", "positional", None, str),
<add> output_dir=("model output directory", "positional", None, Path),
<ide> lexemes_loc=("location of JSONL-formatted lexical data", "positional",
<del> None, str),
<del> vectors_loc=("location of vectors data, as numpy .npz (optional)",
<del> "positional", None, str),
<del> version=("Model version", "option", "V", str),
<del>)
<del>def make_vocab(lang, output_dir, lexemes_loc, vectors_loc=None, version=None):
<del> out_dir = ensure_path(output_dir)
<del> jsonl_loc = ensure_path(lexemes_loc)
<add> None, Path),
<add> vectors_loc=("optional: location of vectors data, as numpy .npz",
<add> "positional", None, str))
<add>def make_vocab(cmd, lang, output_dir, lexemes_loc, vectors_loc=None):
<add> """Compile a vocabulary from a lexicon jsonl file and word vectors."""
<add> if not lexemes_loc.exists():
<add> prints(lexemes_loc, title="Can't find lexical data", exits=1)
<add> vectors_loc = ensure_path(vectors_loc)
<ide> nlp = spacy.blank(lang)
<ide> for word in nlp.vocab:
<ide> word.rank = 0
<del> with jsonl_loc.open() as file_:
<add> lex_added = 0
<add> vec_added = 0
<add> with lexemes_loc.open() as file_:
<ide> for line in file_:
<ide> if line.strip():
<ide> attrs = json.loads(line)
<ide> def make_vocab(lang, output_dir, lexemes_loc, vectors_loc=None, version=None):
<ide> lex = nlp.vocab[attrs['orth']]
<ide> lex.set_attrs(**attrs)
<ide> assert lex.rank == attrs['id']
<add> lex_added += 1
<ide> if vectors_loc is not None:
<ide> vector_data = numpy.load(open(vectors_loc, 'rb'))
<ide> nlp.vocab.clear_vectors(width=vector_data.shape[1])
<del> added = 0
<ide> for word in nlp.vocab:
<ide> if word.rank:
<ide> nlp.vocab.vectors.add(word.orth_, row=word.rank,
<ide> vector=vector_data[word.rank])
<del> added += 1
<del> nlp.to_disk(out_dir)
<add> vec_added += 1
<add> if not output_dir.exists():
<add> output_dir.mkdir()
<add> nlp.to_disk(output_dir)
<add> prints("{} entries, {} vectors".format(lex_added, vec_added), output_dir,
<add> title="Sucessfully compiled vocab and vectors, and saved model")
<ide> return nlp
| 2
|
Text
|
Text
|
add guide about abi stability
|
65366addb29f195f53b700ba0de43381342086d7
|
<ide><path>doc/guides/abi-stability.md
<add># ABI Stability
<add>
<add>## Introduction
<add>An Application Binary Interface (ABI) is a way for programs to call functions
<add>and use data structures from other compiled programs. It is the compiled version
<add>of an Application Programming Interface (API). In other words, the headers files
<add>describing the classes, functions, data structures, enumerations, and constants
<add>which enable an application to perform a desired task correspond by way of
<add>compilation to a set of addresses and expected parameter values and memory
<add>structure sizes and layouts with which the provider of the ABI was compiled.
<add>
<add>The application using the ABI must be compiled such that the available
<add>addresses, expected parameter values, and memory structure sizes and layouts
<add>agree with those with which the ABI provider was compiled. This is usually
<add>accomplished by compiling against the headers provided by the ABI provider.
<add>
<add>Since the provider of the ABI and the user of the ABI may be compiled at
<add>different times with different versions of the compiler, a portion of the
<add>responsibility for ensuring ABI compatibility lies with the compiler. Different
<add>versions of the compiler, perhaps provided by different vendors, must all
<add>produce the same ABI from a header file with a certain content, and must produce
<add>code for the application using the ABI that accesses the API described in a
<add>given header according to the conventions of the ABI resulting from the
<add>description in the header. Modern compilers have a fairly good track record of
<add>not breaking the ABI compatibility of the applications they compile.
<add>
<add>The remaining responsibility for ensuring ABI compatibility lies with the team
<add>maintaining the header files which provide the API that results, upon
<add>compilation, in the ABI that is to remain stable. Changes to the header files
<add>can be made, but the nature of the changes has to be closely tracked to ensure
<add>that, upon compilation, the ABI does not change in a way that will render
<add>existing users of the ABI incompatible with the new version.
<add>
<add>## ABI Stability in Node.js
<add>Node.js provides header files maintained by several independent teams. For
<add>example, header files such as `node.h` and `node_buffer.h` are maintained by
<add>the Node.js team. `v8.h` is maintained by the V8 team, which, although in close
<add>co-operation with the Node.js team, is independent, and with its own schedule
<add>and priorities. Thus, the Node.js team has only partial control over the
<add>changes that are introduced in the headers the project provides. As a result,
<add>the Node.js project has adopted [semantic versioning](https://semver.org/).
<add>This ensures that the APIs provided by the project will result in a stable ABI
<add>for all minor and patch versions of Node.js released within one major version.
<add>In practice, this means that the Node.js project has committed itself to
<add>ensuring that a Node.js native addon compiled against a given major version of
<add>Node.js will load successfully when loaded by any Node.js minor or patch version
<add>within the major version against which it was compiled.
<add>
<add>## N-API
<add>Demand has arisen for equipping Node.js with an API that results in an ABI that
<add>remains stable across multiple Node.js major versions. The motivation for
<add>creating such an API is as follows:
<add>* The JavaScript language has remained compatible with itself since its very
<add>early days, whereas the ABI of the engine executing the JavaScript code changes
<add>with every major version of Node.js. This means that applications consisting of
<add>Node.js packages written entirely in JavaScript need not be recompiled,
<add>reinstalled, or redeployed as a new major version of Node.js is dropped into
<add>the production environment in which such applications run. In contrast, if an
<add>application depends on a package that contains a native addon, the application
<add>has to be recompiled, reinstalled, and redeployed whenever a new major version
<add>of Node.js is introduced into the production environment. This disparity
<add>between Node.js packages containing native addons and those that are written
<add>entirely in JavaScript has added to the maintenance burden of production
<add>systems which rely on native addons.
<add>
<add>* Other projects have started to produce JavaScript interfaces that are
<add>essentially alternative implementations of Node.js. Since these projects are
<add>usually built on a different JavaScript engine than V8, their native addons
<add>necessarily take on a different structure and use a different API. Nevertheless,
<add>using a single API for a native addon across different implementations of the
<add>Node.js JavaScript API would allow these projects to take advantage of the
<add>ecosystem of JavaScript packages that has accrued around Node.js.
<add>
<add>* Node.js may contain a different JavaScript engine in the future. This means
<add>that, externally, all Node.js interfaces would remain the same, but the V8
<add>header file would be absent. Such a step would cause the disruption of the
<add>Node.js ecosystem in general, and that of the native addons in particular, if
<add>an API that is JavaScript engine agnostic is not first provided by Node.js and
<add>adopted by native addons.
<add>
<add>To these ends Node.js has introduced N-API in version 8.6.0 and marked it as a
<add>stable component of the project as of Node.js 8.12.0. The API is defined in the
<add>headers [`node_api.h`][] and [`node_api_types.h`][], and provides a forward-
<add>compatibility guarantee that crosses the Node.js major version boundary. The
<add>guarantee can be stated as follows:
<add>
<add>**A given version *n* of N-API will be available in the major version of
<add>Node.js in which it was published, and in all subsequent versions of Node.js,
<add>including subsequent major versions.**
<add>
<add>A native addon author can take advantage of the N-API forward compatibility
<add>guarantee by ensuring that the addon makes use only of APIs defined in
<add>`node_api.h` and data structures and constants defined in `node_api_types.h`.
<add>By doing so, the author facilitates adoption of their addon by indicating to
<add>production users that the maintenance burden for their application will increase
<add>no more by the addition of the native addon to their project than it would by
<add>the addition of a package written purely in JavaScript.
<add>
<add>N-API is versioned because new APIs are added from time to time. Unlike
<add>semantic versioning, N-API versioning is cumulative. That is, each version of
<add>N-API conveys the same meaning as a minor version in the semver system, meaning
<add>that all changes made to N-API will be backwards compatible. Additionally, new
<add>N-APIs are added under an experimental flag to give the community an opportunity
<add>to vet them in a production environment. Experimental status means that,
<add>although care has been taken to ensure that the new API will not have to be
<add>modified in an ABI-incompatible way in the future, it has not yet been
<add>sufficiently proven in production to be correct and useful as designed and, as
<add>such, may undergo ABI-incompatible changes before it is finally incorporated
<add>into a forthcoming version of N-API. That is, an experimental N-API is not yet
<add>covered by the forward compatibility guarantee.
<add>
<add>[`node_api.h`]: ../../src/node_api.h
<add>[`node_api_types.h`]: ../..src/node_api_types.h
| 1
|
PHP
|
PHP
|
apply fixes from styleci
|
473ffd34889673a3730456672fdba281febf0737
|
<ide><path>src/Illuminate/Http/Resources/Json/JsonResource.php
<ide> public function toArray($request)
<ide> if (is_null($this->resource)) {
<ide> return [];
<ide> }
<del>
<add>
<ide> return is_array($this->resource)
<ide> ? $this->resource
<ide> : $this->resource->toArray();
| 1
|
Ruby
|
Ruby
|
run the logger tests in isolation
|
39dec69712aaf954c0f00f2a32c32356d789e3ed
|
<ide><path>activesupport/lib/active_support/testing/isolation.rb
<ide> class ParallelEach
<ide> include Enumerable
<ide>
<ide> # default to 2 cores
<del> CORES = ENV['TEST_CORES'].to_i || 2
<add> CORES = (ENV['TEST_CORES'] || 2).to_i
<ide>
<ide> def initialize list
<ide> @list = list
<ide><path>railties/test/application/rack/logger_test.rb
<ide> module ApplicationTests
<ide> module RackTests
<ide> class LoggerTest < ActiveSupport::TestCase
<add> include ActiveSupport::Testing::Isolation
<ide> include ActiveSupport::LogSubscriber::TestHelper
<ide> include Rack::Test::Methods
<ide>
<ide> def setup
<ide> end
<ide>
<ide> def teardown
<add> super
<ide> teardown_app
<ide> end
<ide>
| 2
|
PHP
|
PHP
|
refactor the memcached class
|
cb7a59711aebc7b9cf30695ccdd71c70468aa504
|
<ide><path>system/memcached.php
<ide> class Memcached {
<ide> */
<ide> public static function instance()
<ide> {
<del> if (is_null(static::$instance))
<del> {
<del> if ( ! class_exists('Memcache'))
<del> {
<del> throw new \Exception('Attempting to use Memcached, but the Memcached PHP extension is not installed on this server.');
<del> }
<add> return ( ! is_null(static::$instance)) ? static::$instance : static::$instance = static::connect();
<add> }
<ide>
<del> $memcache = new \Memcache;
<add> /**
<add> * Connect to the configured Memcached servers.
<add> *
<add> * @return Memcache
<add> */
<add> private static function connect()
<add> {
<add> if ( ! class_exists('Memcache'))
<add> {
<add> throw new \Exception('Attempting to use Memcached, but the Memcached PHP extension is not installed on this server.');
<add> }
<ide>
<del> foreach (Config::get('cache.servers') as $server)
<del> {
<del> $memcache->addServer($server['host'], $server['port'], true, $server['weight']);
<del> }
<add> $memcache = new \Memcache;
<ide>
<del> if ($memcache->getVersion() === false)
<del> {
<del> throw new \Exception('Memcached is configured. However, no connections could be made. Please verify your memcached configuration.');
<del> }
<add> foreach (Config::get('cache.servers') as $server)
<add> {
<add> $memcache->addServer($server['host'], $server['port'], true, $server['weight']);
<add> }
<ide>
<del> static::$instance = $memcache;
<add> if ($memcache->getVersion() === false)
<add> {
<add> throw new \Exception('Memcached is configured. However, no connections could be made. Please verify your memcached configuration.');
<ide> }
<ide>
<del> return static::$instance;
<add> return $memcache;
<ide> }
<ide>
<ide> }
<ide>\ No newline at end of file
| 1
|
Java
|
Java
|
remove assertion making locations mandatory
|
2ef20f63bc97cb4612befdc9d63cab1554694992
|
<ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/resource/ResourceHttpRequestHandler.java
<ide> public ResourceHttpRequestHandler() {
<ide> * for serving static resources.
<ide> */
<ide> public void setLocations(List<Resource> locations) {
<del> Assert.notEmpty(locations, "Locations list must not be empty");
<add> Assert.notNull(locations, "Locations list must not be null");
<ide> this.locations.clear();
<ide> this.locations.addAll(locations);
<ide> }
| 1
|
Text
|
Text
|
remove note about `eslint-config-next`
|
4cb96fb0470e83e62f045be68069eb3b66d8567a
|
<ide><path>contributing/examples/adding-examples.md
<ide> When you add an example to the [examples](examples) directory, please follow the
<ide> - If API routes aren't used in an example, they should be omitted
<ide> - If an example exists for a certain library and you would like to showcase a specific feature of that library, the existing example should be updated (instead of adding a new example)
<ide> - Package manager specific config should not be added (e.g. `resolutions` in `package.json`)
<del>- In `package.json` the version of `next` (and `eslint-config-next`) should be `latest`
<add>- In `package.json` the version of `next` should be `latest`
<ide> - In `package.json` the dependency versions should be up-to-date
<ide> - Use `export default function` for page components and API Routes instead of `const`/`let` (The exception is if the page has `getInitialProps`, in which case [`NextPage`](https://nextjs.org/docs/api-reference/data-fetching/get-initial-props#typescript) could be useful)
<ide> - CMS example directories should be prefixed with `cms-`
| 1
|
Javascript
|
Javascript
|
cleanup the generation of the error message
|
b146cae02c6345d324b1c53845fb01b783226669
|
<ide><path>src/minErr.js
<ide> function minErr(module, ErrorConstructor) {
<ide> ErrorConstructor = ErrorConstructor || Error;
<ide> return function() {
<del> var code = arguments[0],
<del> prefix = '[' + (module ? module + ':' : '') + code + '] ',
<del> template = arguments[1],
<del> templateArgs = arguments,
<add> var SKIP_INDEXES = 2;
<ide>
<del> message, i;
<add> var templateArgs = arguments,
<add> code = templateArgs[0],
<add> message = '[' + (module ? module + ':' : '') + code + '] ',
<add> template = templateArgs[1],
<add> paramPrefix, i;
<ide>
<del> message = prefix + template.replace(/\{\d+\}/g, function(match) {
<del> var index = +match.slice(1, -1), arg;
<add> message += template.replace(/\{\d+\}/g, function(match) {
<add> var index = +match.slice(1, -1),
<add> shiftedIndex = index + SKIP_INDEXES;
<ide>
<del> if (index + 2 < templateArgs.length) {
<del> return toDebugString(templateArgs[index + 2]);
<add> if (shiftedIndex < templateArgs.length) {
<add> return toDebugString(templateArgs[shiftedIndex]);
<ide> }
<add>
<ide> return match;
<ide> });
<ide>
<del> message = message + '\nhttp://errors.angularjs.org/"NG_VERSION_FULL"/' +
<add> message += '\nhttp://errors.angularjs.org/"NG_VERSION_FULL"/' +
<ide> (module ? module + '/' : '') + code;
<del> for (i = 2; i < arguments.length; i++) {
<del> message = message + (i == 2 ? '?' : '&') + 'p' + (i - 2) + '=' +
<del> encodeURIComponent(toDebugString(arguments[i]));
<add>
<add> for (i = SKIP_INDEXES, paramPrefix = '?'; i < templateArgs.length; i++, paramPrefix = '&') {
<add> message += paramPrefix + 'p' + (i - SKIP_INDEXES) + '=' +
<add> encodeURIComponent(toDebugString(templateArgs[i]));
<ide> }
<add>
<ide> return new ErrorConstructor(message);
<ide> };
<ide> }
<ide><path>test/minErrSpec.js
<ide> describe('minErr', function() {
<ide> var typeMinErr = minErr('type', TypeError);
<ide> expect(typeMinErr('acode', 'aproblem') instanceof TypeError).toBe(true);
<ide> });
<add>
<add>
<add> it('should include a properly formatted error reference URL in the message', function() {
<add> // to avoid maintaining the root URL in two locations, we only validate the parameters
<add> expect(testError('acode', 'aproblem', 'a', 'b', 'value with space').message)
<add> .toMatch(/^[\s\S]*\?p0=a&p1=b&p2=value%20with%20space$/);
<add> });
<ide> });
| 2
|
PHP
|
PHP
|
newline all the things
|
6982865171c67d73ec0bdf60182c4f75c74180fd
|
<ide><path>src/Illuminate/Bus/Dispatcher.php
<ide> protected function commandShouldBeQueued($command)
<ide> public function dispatchToQueue($command)
<ide> {
<ide> $connection = isset($command->connection) ? $command->connection : null;
<add>
<ide> $queue = call_user_func($this->queueResolver, $connection);
<ide>
<ide> if (! $queue instanceof Queue) {
| 1
|
Python
|
Python
|
add test for ndarray.__array_function__
|
4ece3b1ce10e5aca966d8adfb85e6761747567b9
|
<ide><path>numpy/core/tests/test_overrides.py
<ide> def _get_overloaded_args(relevant_args):
<ide> return args
<ide>
<ide>
<del>_IMPLEMENTED = None
<add>def _return_self(self, *args, **kwargs):
<add> return self
<ide>
<ide>
<ide> class TestGetOverloadedTypesAndArgs(object):
<ide> def test_ndarray(self):
<ide> def test_ndarray_subclasses(self):
<ide>
<ide> class OverrideSub(np.ndarray):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> class NoOverrideSub(np.ndarray):
<ide> pass
<ide> class NoOverrideSub(np.ndarray):
<ide> def test_ndarray_and_duck_array(self):
<ide>
<ide> class Other(object):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> array = np.array(1)
<ide> other = Other()
<ide> class Other(object):
<ide> def test_many_duck_arrays(self):
<ide>
<ide> class A(object):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> class B(A):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> class C(A):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> class D(object):
<del> __array_function__ = _IMPLEMENTED
<add> __array_function__ = _return_self
<ide>
<ide> a = A()
<ide> b = B()
<ide> class D(object):
<ide> assert_equal(_get_overloaded_args([a, c, b]), [c, b, a])
<ide>
<ide>
<add>class TestNDArrayArrayFunction(object):
<add>
<add> def test_method(self):
<add>
<add> class SubOverride(np.ndarray):
<add> __array_function__ = _return_self
<add>
<add> class NoOverrideSub(np.ndarray):
<add> pass
<add>
<add> array = np.array(1)
<add>
<add> def func():
<add> return 'original'
<add>
<add> result = array.__array_function__(
<add> func=func, types=(np.ndarray,), args=(), kwargs={})
<add> assert_equal(result, 'original')
<add>
<add> result = array.__array_function__(
<add> func=func, types=(np.ndarray, SubOverride), args=(), kwargs={})
<add> assert_(result is NotImplemented)
<add>
<add> result = array.__array_function__(
<add> func=func, types=(np.ndarray, NoOverrideSub), args=(), kwargs={})
<add> assert_equal(result, 'original')
<add>
<add>
<ide> # need to define this at the top level to test pickling
<ide> @array_function_dispatch(lambda array: (array,))
<ide> def dispatched_one_arg(array):
<ide> def test_docstring(self):
<ide>
<ide>
<ide> def _new_duck_type_and_implements():
<add> """Create a duck array type and implements functions."""
<ide> HANDLED_FUNCTIONS = {}
<ide>
<ide> class MyArray(object):
| 1
|
Javascript
|
Javascript
|
ignore non-canberequiredbyusers built-in
|
c7222b35899ad352d78f288ddc16c6d19ca79a06
|
<ide><path>lib/internal/main/eval_string.js
<ide> const { addBuiltinLibsToObject } = require('internal/modules/cjs/helpers');
<ide> const { getOptionValue } = require('internal/options');
<ide>
<ide> prepareMainThreadExecution();
<del>addBuiltinLibsToObject(globalThis);
<add>addBuiltinLibsToObject(globalThis, '<eval>');
<ide> markBootstrapComplete();
<ide>
<ide> const source = getOptionValue('--eval');
<ide><path>lib/internal/modules/cjs/helpers.js
<ide> function stripBOM(content) {
<ide> return content;
<ide> }
<ide>
<del>function addBuiltinLibsToObject(object) {
<add>function addBuiltinLibsToObject(object, dummyModuleName) {
<ide> // Make built-in modules available directly (loaded lazily).
<del> const { builtinModules } = require('internal/modules/cjs/loader').Module;
<add> const Module = require('internal/modules/cjs/loader').Module;
<add> const { builtinModules } = Module;
<add>
<add> // To require built-in modules in user-land and ignore modules whose
<add> // `canBeRequiredByUsers` is false. So we create a dummy module object and not
<add> // use `require()` directly.
<add> const dummyModule = new Module(dummyModuleName);
<add>
<ide> ArrayPrototypeForEach(builtinModules, (name) => {
<ide> // Neither add underscored modules, nor ones that contain slashes (e.g.,
<ide> // 'fs/promises') or ones that are already defined.
<ide> function addBuiltinLibsToObject(object) {
<ide>
<ide> ObjectDefineProperty(object, name, {
<ide> get: () => {
<del> const lib = require(name);
<add> const lib = dummyModule.require(name);
<ide>
<ide> // Disable the current getter/setter and set up a new
<ide> // non-enumerable property.
<ide><path>lib/repl.js
<ide> REPLServer.prototype.createContext = function() {
<ide> value: makeRequireFunction(replModule)
<ide> });
<ide>
<del> addBuiltinLibsToObject(context);
<add> addBuiltinLibsToObject(context, '<REPL>');
<ide>
<ide> return context;
<ide> };
<ide><path>test/parallel/test-repl-built-in-modules.js
<add>'use strict';
<add>
<add>require('../common');
<add>const assert = require('assert');
<add>const cp = require('child_process');
<add>
<add>function runREPLWithAdditionalFlags(flags) {
<add> // Use -i to force node into interactive mode, despite stdout not being a TTY
<add> const args = ['-i'].concat(flags);
<add> const ret = cp.execFileSync(process.execPath, args, {
<add> input: 'require(\'events\');\nrequire(\'wasi\');',
<add> encoding: 'utf8',
<add> });
<add> return ret;
<add>}
<add>
<add>// Run REPL in normal mode.
<add>let stdout = runREPLWithAdditionalFlags([]);
<add>assert.match(stdout, /\[Function: EventEmitter\] {/);
<add>assert.match(
<add> stdout,
<add> /Uncaught Error: Cannot find module 'wasi'[\w\W]+- <repl>\n/);
<add>
<add>// Run REPL with '--experimental-wasi-unstable-preview1'
<add>stdout = runREPLWithAdditionalFlags([
<add> '--experimental-wasi-unstable-preview1',
<add>]);
<add>assert.match(stdout, /\[Function: EventEmitter\] {/);
<add>assert.doesNotMatch(
<add> stdout,
<add> /Uncaught Error: Cannot find module 'wasi'[\w\W]+- <repl>\n/);
<add>assert.match(stdout, /{ WASI: \[class WASI\] }/);
| 4
|
Python
|
Python
|
remove bilstm import
|
bae59bf92f7d4a22b5b1391699d29f2892de9792
|
<ide><path>spacy/_ml.py
<ide> from thinc.neural._classes.attention import ParametricAttention
<ide> from thinc.linear.linear import LinearModel
<ide> from thinc.api import uniqued, wrap, flatten_add_lengths
<del>from thinc.neural._classes.rnn import BiLSTM
<ide>
<ide>
<ide> from .attrs import ID, ORTH, LOWER, NORM, PREFIX, SUFFIX, SHAPE, TAG, DEP
| 1
|
Javascript
|
Javascript
|
move url data to fixtures
|
e571fd4f6ccadd9e77095ca0377a9b07f49bc56b
|
<ide><path>benchmark/fixtures/url-inputs.js
<add>'use strict';
<add>
<add>exports.urls = {
<add> long: 'http://nodejs.org:89/docs/latest/api/foo/bar/qua/13949281/0f28b/' +
<add> '/5d49/b3020/url.html#test?payload1=true&payload2=false&test=1' +
<add> '&benchmark=3&foo=38.38.011.293&bar=1234834910480&test=19299&3992&' +
<add> 'key=f5c65e1e98fe07e648249ad41e1cfdb0',
<add> short: 'https://nodejs.org/en/blog/',
<add> idn: 'http://你好你好.在线',
<add> auth: 'https://user:pass@example.com/path?search=1',
<add> file: 'file:///foo/bar/test/node.js',
<add> ws: 'ws://localhost:9229/f46db715-70df-43ad-a359-7f9949f39868',
<add> javascript: 'javascript:alert("node is awesome");',
<add> percent: 'https://%E4%BD%A0/foo',
<add> dot: 'https://example.org/./a/../b/./c'
<add>};
<add>
<add>exports.searchParams = {
<add> noencode: 'foo=bar&baz=quux&xyzzy=thud',
<add> multicharsep: 'foo=bar&&&&&&&&&&baz=quux&&&&&&&&&&xyzzy=thud',
<add> encodefake: 'foo=%©ar&baz=%A©uux&xyzzy=%©ud',
<add> encodemany: '%66%6F%6F=bar&%62%61%7A=quux&xyzzy=%74h%75d',
<add> encodelast: 'foo=bar&baz=quux&xyzzy=thu%64',
<add> multivalue: 'foo=bar&foo=baz&foo=quux&quuy=quuz',
<add> multivaluemany: 'foo=bar&foo=baz&foo=quux&quuy=quuz&foo=abc&foo=def&' +
<add> 'foo=ghi&foo=jkl&foo=mno&foo=pqr&foo=stu&foo=vwxyz',
<add> manypairs: 'a&b&c&d&e&f&g&h&i&j&k&l&m&n&o&p&q&r&s&t&u&v&w&x&y&z',
<add> manyblankpairs: '&&&&&&&&&&&&&&&&&&&&&&&&',
<add> altspaces: 'foo+bar=baz+quux&xyzzy+thud=quuy+quuz&abc=def+ghi'
<add>};
<ide><path>benchmark/url/legacy-vs-whatwg-url-get-prop.js
<ide> const common = require('../common.js');
<ide> const url = require('url');
<ide> const URL = url.URL;
<ide> const assert = require('assert');
<del>
<del>const inputs = {
<del> long: 'http://nodejs.org:89/docs/latest/api/url.html#test?' +
<del> 'payload1=true&payload2=false&test=1&benchmark=3&' +
<del> 'foo=38.38.011.293&bar=1234834910480&test=19299&3992&' +
<del> 'key=f5c65e1e98fe07e648249ad41e1cfdb0',
<del> short: 'https://nodejs.org/en/blog/',
<del> idn: 'http://你好你好',
<del> auth: 'https://user:pass@example.com/path?search=1',
<del> special: 'file:///foo/bar/test/node.js',
<del> percent: 'https://%E4%BD%A0/foo',
<del> dot: 'https://example.org/./a/../b/./c'
<del>};
<add>const inputs = require('../fixtures/url-inputs.js').urls;
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> type: Object.keys(inputs),
<ide><path>benchmark/url/legacy-vs-whatwg-url-parse.js
<ide> const common = require('../common.js');
<ide> const url = require('url');
<ide> const URL = url.URL;
<ide> const assert = require('assert');
<del>
<del>const inputs = {
<del> long: 'http://nodejs.org:89/docs/latest/api/url.html#test?' +
<del> 'payload1=true&payload2=false&test=1&benchmark=3&' +
<del> 'foo=38.38.011.293&bar=1234834910480&test=19299&3992&' +
<del> 'key=f5c65e1e98fe07e648249ad41e1cfdb0',
<del> short: 'https://nodejs.org/en/blog/',
<del> idn: 'http://你好你好',
<del> auth: 'https://user:pass@example.com/path?search=1',
<del> special: 'file:///foo/bar/test/node.js',
<del> percent: 'https://%E4%BD%A0/foo',
<del> dot: 'https://example.org/./a/../b/./c'
<del>};
<add>const inputs = require('../fixtures/url-inputs.js').urls;
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> type: Object.keys(inputs),
<ide><path>benchmark/url/legacy-vs-whatwg-url-searchparams-parse.js
<ide> const common = require('../common.js');
<ide> const { URLSearchParams } = require('url');
<ide> const querystring = require('querystring');
<del>
<del>const inputs = {
<del> noencode: 'foo=bar&baz=quux&xyzzy=thud',
<del> encodemany: '%66%6F%6F=bar&%62%61%7A=quux&xyzzy=%74h%75d',
<del> encodefake: 'foo=%©ar&baz=%A©uux&xyzzy=%©ud',
<del> encodelast: 'foo=bar&baz=quux&xyzzy=thu%64',
<del> multicharsep: 'foo=bar&&&&&&&&&&baz=quux&&&&&&&&&&xyzzy=thud',
<del> multivalue: 'foo=bar&foo=baz&foo=quux&quuy=quuz',
<del> multivaluemany: 'foo=bar&foo=baz&foo=quux&quuy=quuz&foo=abc&foo=def&' +
<del> 'foo=ghi&foo=jkl&foo=mno&foo=pqr&foo=stu&foo=vwxyz',
<del> manypairs: 'a&b&c&d&e&f&g&h&i&j&k&l&m&n&o&p&q&r&s&t&u&v&w&x&y&z'
<del>};
<add>const inputs = require('../fixtures/url-inputs.js').searchParams;
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> type: Object.keys(inputs),
<ide><path>benchmark/url/legacy-vs-whatwg-url-searchparams-serialize.js
<ide> const common = require('../common.js');
<ide> const { URLSearchParams } = require('url');
<ide> const querystring = require('querystring');
<del>
<del>const inputs = {
<del> noencode: 'foo=bar&baz=quux&xyzzy=thud',
<del> encodemany: '%66%6F%6F=bar&%62%61%7A=quux&xyzzy=%74h%75d',
<del> encodefake: 'foo=%©ar&baz=%A©uux&xyzzy=%©ud',
<del> encodelast: 'foo=bar&baz=quux&xyzzy=thu%64',
<del> multicharsep: 'foo=bar&&&&&&&&&&baz=quux&&&&&&&&&&xyzzy=thud',
<del> multivalue: 'foo=bar&foo=baz&foo=quux&quuy=quuz',
<del> multivaluemany: 'foo=bar&foo=baz&foo=quux&quuy=quuz&foo=abc&foo=def&' +
<del> 'foo=ghi&foo=jkl&foo=mno&foo=pqr&foo=stu&foo=vwxyz',
<del> manypairs: 'a&b&c&d&e&f&g&h&i&j&k&l&m&n&o&p&q&r&s&t&u&v&w&x&y&z'
<del>};
<add>const inputs = require('../fixtures/url-inputs.js').searchParams;
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> type: Object.keys(inputs),
<ide><path>benchmark/url/legacy-vs-whatwg-url-serialize.js
<ide> const common = require('../common.js');
<ide> const url = require('url');
<ide> const URL = url.URL;
<ide> const assert = require('assert');
<del>
<del>const inputs = {
<del> long: 'http://nodejs.org:89/docs/latest/api/url.html#test?' +
<del> 'payload1=true&payload2=false&test=1&benchmark=3&' +
<del> 'foo=38.38.011.293&bar=1234834910480&test=19299&3992&' +
<del> 'key=f5c65e1e98fe07e648249ad41e1cfdb0',
<del> short: 'https://nodejs.org/en/blog/',
<del> idn: 'http://你好你好',
<del> auth: 'https://user:pass@example.com/path?search=1',
<del> special: 'file:///foo/bar/test/node.js',
<del> percent: 'https://%E4%BD%A0/foo',
<del> dot: 'https://example.org/./a/../b/./c'
<del>};
<add>const inputs = require('../fixtures/url-inputs.js').urls;
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> type: Object.keys(inputs),
| 6
|
Text
|
Text
|
update webpack and browserify example readme files
|
b293d1caf5f331f01283b7df4579cc7ea641379d
|
<ide><path>examples/browserify/README.md
<ide> ## Overview
<ide>
<del>Example to demonstrate PDF.js library usage with browserify.
<add>Example to demonstrate PDF.js library usage with Browserify.
<ide>
<ide> ## Getting started
<ide>
<ide> Build project and install the example dependencies:
<ide>
<del> $ gulp dist
<del> $ cd examples/browserify
<del> $ npm install
<add> $ gulp dist
<add> $ cd examples/browserify
<add> $ npm install
<ide>
<del>To build browserify bundles, run `gulp build`. If you are running
<add>To build Browserify bundles, run `gulp build`. If you are running
<ide> a web server, you can observe the build results at
<ide> http://localhost:8888/examples/browserify/index.html
<ide>
<ide> See main.js, worker.js and gulpfile.js files. Please notice that PDF.js
<ide> packaging requires packaging of the main application and PDF.js worker code,
<del>and the workerSrc path shall be set to the latter file.
<add>and the `workerSrc` path shall be set to the latter file.
<ide>
<ide> Alternatives to the gulp commands (without compression) are:
<ide>
<del> $ mkdir -p ../../build/browserify
<del> $ node_modules/.bin/browserify main.js -o ../../build/browserify/bundle.js
<del> $ node_modules/.bin/browserify worker.js -o ../../build/browserify/pdf.worker.bundle.js
<add> $ mkdir -p ../../build/browserify
<add> $ node_modules/.bin/browserify main.js -o ../../build/browserify/bundle.js
<add> $ node_modules/.bin/browserify worker.js -o ../../build/browserify/pdf.worker.bundle.js
<ide><path>examples/webpack/README.md
<ide> ## Overview
<ide>
<del>Example to demonstrate PDF.js library usage with webpack.
<add>Example to demonstrate PDF.js library usage with Webpack.
<ide>
<ide> ## Getting started
<ide>
<ide> Build project and install the example dependencies:
<ide>
<del> $ gulp dist
<del> $ cd examples/webpack
<del> $ npm install
<add> $ gulp dist
<add> $ cd examples/webpack
<add> $ npm install
<ide>
<del>To build webpack bundles, run `node_modules/.bin/webpack`. If you are running
<add>To build Webpack bundles, run `node_modules/.bin/webpack`. If you are running
<ide> a web server, you can observe the build results at
<ide> http://localhost:8888/examples/webpack/index.html
<ide>
<ide> See main.js and webpack.config.js files. Please notice that PDF.js packaging
<del>requires 'entry' loader.
<add>requires the `entry` loader.
| 2
|
Text
|
Text
|
add 0.69.5 changelog
|
2452c5f16e8bbbf6f17d09e77d3b8f073837bf18
|
<ide><path>CHANGELOG.md
<ide> # Changelog
<ide>
<add>## v0.69.5
<add>
<add>### Changed
<add>
<add>- Bump react-native-codegen to 0.69.2 ([df3d52bfbf](https://github.com/facebook/react-native/commit/df3d52bfbf4254cd16e1dc0ca0af2743cd7e11c1) by [@dmitryrykun](https://github.com/dmitryrykun))
<add>
<add>#### Android specific
<add>
<add>- Replaced reactnativeutilsjni with reactnativejni in the build process to reduce size ([54a4fcbfdc](https://github.com/facebook/react-native/commit/54a4fcbfdcc8111b3010b2c31ed3c1d48632ce4c) by [@SparshaSaha](https://github.com/SparshaSaha))
<add>
<add>### Fixed
<add>
<add>- Codegen should ignore `.d.ts` files ([0f0d52067c](https://github.com/facebook/react-native/commit/0f0d52067cb89fdb39a99021f0745282ce087fc2) by [@tido64](https://github.com/tido64))
<add>
<ide> ## v0.69.4
<ide>
<ide> ### Changed
| 1
|
PHP
|
PHP
|
accept only array
|
f43091f78e1021f0156f834ee685ec46cda83a54
|
<ide><path>src/Illuminate/Database/Eloquent/Model.php
<ide> public function qualifyColumn($column)
<ide> /**
<ide> * Qualify the column's lists name by the model's table.
<ide> *
<del> * @param array|mixed $columns
<add> * @param array $columns
<ide> * @return array
<ide> */
<ide> public function qualifyColumns($columns)
<ide> {
<del> $columns = is_array($columns) ? $columns : func_get_args();
<ide> $qualifiedArray = [];
<ide>
<ide> foreach ($columns as $column) {
<ide><path>tests/Database/DatabaseEloquentBuilderTest.php
<ide> public function testQualifyColumns()
<ide>
<ide> $builder->setModel(new EloquentModelStub);
<ide>
<del> $this->assertEquals(['stub.column', 'stub.name'], $builder->qualifyColumns('column', 'name'));
<ide> $this->assertEquals(['stub.column', 'stub.name'], $builder->qualifyColumns(['column', 'name']));
<ide> }
<ide>
| 2
|
Python
|
Python
|
fix data augmentation
|
f52249fe2eb5afca9e68060a99d8cb31a6175c72
|
<ide><path>spacy/training/augment.py
<ide> import copy
<ide> from functools import partial
<ide> from ..util import registry
<add>from ..tokens import Doc
<ide>
<ide>
<ide> @registry.augmenters("spacy.dont_augment.v1")
<ide> def orth_variants_augmenter(nlp, example, *, level: float = 0.0, lower: float =
<ide> orig_dict["token_annotation"],
<ide> lower=raw_text is not None and random.random() < lower,
<ide> )
<del> if variant_text is None:
<del> doc = Doc(nlp.vocab, words=variant_token_annot["words"])
<del> else:
<add> if variant_text:
<ide> doc = nlp.make_doc(variant_text)
<add> else:
<add> doc = Doc(nlp.vocab, words=variant_token_annot["ORTH"])
<add> variant_token_annot["ORTH"] = [w.text for w in doc]
<add> variant_token_annot["SPACY"] = [w.whitespace_ for w in doc]
<ide> orig_dict["token_annotation"] = variant_token_annot
<ide> yield example.from_dict(doc, orig_dict)
<ide>
| 1
|
Javascript
|
Javascript
|
return old status from _setrawmode
|
dc57b9e78fa0c64ce83c1e54a01e95fccb88e301
|
<ide><path>lib/readline.js
<ide> Interface.prototype.setPrompt = function(prompt) {
<ide>
<ide>
<ide> Interface.prototype._setRawMode = function(mode) {
<add> const wasInRawMode = this.input.isRaw;
<add>
<ide> if (typeof this.input.setRawMode === 'function') {
<del> return this.input.setRawMode(mode);
<add> this.input.setRawMode(mode);
<ide> }
<add>
<add> return wasInRawMode;
<ide> };
<ide>
<ide>
| 1
|
Python
|
Python
|
add cnn seq2seq example
|
627bf95c11199419eb6d2017c322a0d78ef3dc3c
|
<ide><path>examples/cnn_seq2seq.py
<add>'''# Sequence-to-sequence example in Keras (character-level).
<add>
<add>This script demonstrates how to implement a basic character-level CNN
<add>sequence-to-sequence model. We apply it to translating
<add>short English sentences into short French sentences,
<add>character-by-character. Note that it is fairly unusual to
<add>do character-level machine translation, as word-level
<add>models are much more common in this domain. This example
<add>is for demonstration purposes only.
<add>
<add>**Summary of the algorithm**
<add>
<add>- We start with input sequences from a domain (e.g. English sentences)
<add> and corresponding target sequences from another domain
<add> (e.g. French sentences).
<add>- An encoder CNN encodes the input character sequence.
<add>- A decoder CNN is trained to turn the target sequences into
<add> the same sequence but offset by one timestep in the future,
<add> a training process called "teacher forcing" in this context.
<add> It uses the output from the encoder.
<add> Effectively, the decoder learns to generate `targets[t+1...]`
<add> given `targets[...t]`, conditioned on the input sequence.
<add>- In inference mode, when we want to decode unknown input sequences, we:
<add> - Encode the input sequence.
<add> - Start with a target sequence of size 1
<add> (just the start-of-sequence character)
<add> - Feed the input sequence and 1-char target sequence
<add> to the decoder to produce predictions for the next character
<add> - Sample the next character using these predictions
<add> (we simply use argmax).
<add> - Append the sampled character to the target sequence
<add> - Repeat until we hit the character limit.
<add>
<add>**Data download**
<add>
<add>[English to French sentence pairs.
<add>](http://www.manythings.org/anki/fra-eng.zip)
<add>
<add>[Lots of neat sentence pairs datasets.
<add>](http://www.manythings.org/anki/)
<add>
<add>**References**
<add>
<add>- lstm_seq2seq.py
<add>- https://wanasit.github.io/attention-based-sequence-to-sequence-in-keras.html
<add>'''
<add>from __future__ import print_function
<add>
<add>import numpy as np
<add>
<add>from keras.layers import Input, Convolution1D, Dot, Dense, Activation, Concatenate
<add>from keras.models import Model
<add>
<add>batch_size = 64 # Batch size for training.
<add>epochs = 100 # Number of epochs to train for.
<add>num_samples = 10000 # Number of samples to train on.
<add># Path to the data txt file on disk.
<add>data_path = 'fra-eng/fra.txt'
<add>
<add># Vectorize the data.
<add>input_texts = []
<add>target_texts = []
<add>input_characters = set()
<add>target_characters = set()
<add>with open(data_path, 'r', encoding='utf-8') as f:
<add> lines = f.read().split('\n')
<add>for line in lines[: min(num_samples, len(lines) - 1)]:
<add> input_text, target_text = line.split('\t')
<add> # We use "tab" as the "start sequence" character
<add> # for the targets, and "\n" as "end sequence" character.
<add> target_text = '\t' + target_text + '\n'
<add> input_texts.append(input_text)
<add> target_texts.append(target_text)
<add> for char in input_text:
<add> if char not in input_characters:
<add> input_characters.add(char)
<add> for char in target_text:
<add> if char not in target_characters:
<add> target_characters.add(char)
<add>
<add>input_characters = sorted(list(input_characters))
<add>target_characters = sorted(list(target_characters))
<add>num_encoder_tokens = len(input_characters)
<add>num_decoder_tokens = len(target_characters)
<add>max_encoder_seq_length = max([len(txt) for txt in input_texts])
<add>max_decoder_seq_length = max([len(txt) for txt in target_texts])
<add>
<add>print('Number of samples:', len(input_texts))
<add>print('Number of unique input tokens:', num_encoder_tokens)
<add>print('Number of unique output tokens:', num_decoder_tokens)
<add>print('Max sequence length for inputs:', max_encoder_seq_length)
<add>print('Max sequence length for outputs:', max_decoder_seq_length)
<add>
<add>input_token_index = dict(
<add> [(char, i) for i, char in enumerate(input_characters)])
<add>target_token_index = dict(
<add> [(char, i) for i, char in enumerate(target_characters)])
<add>
<add>encoder_input_data = np.zeros(
<add> (len(input_texts), max_encoder_seq_length, num_encoder_tokens),
<add> dtype='float32')
<add>decoder_input_data = np.zeros(
<add> (len(input_texts), max_decoder_seq_length, num_decoder_tokens),
<add> dtype='float32')
<add>decoder_target_data = np.zeros(
<add> (len(input_texts), max_decoder_seq_length, num_decoder_tokens),
<add> dtype='float32')
<add>
<add>for i, (input_text, target_text) in enumerate(zip(input_texts, target_texts)):
<add> for t, char in enumerate(input_text):
<add> encoder_input_data[i, t, input_token_index[char]] = 1.
<add> for t, char in enumerate(target_text):
<add> # decoder_target_data is ahead of decoder_input_data by one timestep
<add> decoder_input_data[i, t, target_token_index[char]] = 1.
<add> if t > 0:
<add> # decoder_target_data will be ahead by one timestep
<add> # and will not include the start character.
<add> decoder_target_data[i, t - 1, target_token_index[char]] = 1.
<add>
<add># Define an input sequence and process it.
<add>encoder_inputs = Input(shape=(None, num_encoder_tokens))
<add># Encoder
<add>x_encoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal')(encoder_inputs)
<add>x_encoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal', dilation_rate=2)(x_encoder)
<add>x_encoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal', dilation_rate=4)(x_encoder)
<add>
<add>decoder_inputs = Input(shape=(None, num_decoder_tokens))
<add># Decoder
<add>x_decoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal')(decoder_inputs)
<add>x_decoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal', dilation_rate=2)(x_decoder)
<add>x_decoder = Convolution1D(256, kernel_size=3, activation='relu',
<add> padding='causal', dilation_rate=4)(x_decoder)
<add># Attention
<add>attention = Dot(axes=[2, 2])([x_decoder, x_encoder])
<add>attention = Activation('softmax')(attention)
<add>
<add>context = Dot(axes=[2, 1])([attention, x_encoder])
<add>decoder_combined_context = Concatenate(axis=-1)([context, x_decoder])
<add>
<add>decoder_outputs = Convolution1D(64, kernel_size=3, activation='relu',
<add> padding='causal')(decoder_combined_context)
<add>decoder_outputs = Convolution1D(64, kernel_size=3, activation='relu',
<add> padding='causal')(decoder_outputs)
<add># Output
<add>decoder_dense = Dense(num_decoder_tokens, activation='softmax')
<add>decoder_outputs = decoder_dense(decoder_outputs)
<add>
<add># Define the model that will turn
<add># `encoder_input_data` & `decoder_input_data` into `decoder_target_data`
<add>model = Model([encoder_inputs, decoder_inputs], decoder_outputs)
<add>model.summary()
<add>
<add># Run training
<add>model.compile(optimizer='adam', loss='categorical_crossentropy')
<add>model.fit([encoder_input_data, decoder_input_data], decoder_target_data,
<add> batch_size=batch_size,
<add> epochs=epochs,
<add> validation_split=0.2)
<add># Save model
<add>model.save('cnn_s2s.h5')
<add>
<add># Next: inference mode (sampling).
<add>
<add># Define sampling models
<add>reverse_input_char_index = dict(
<add> (i, char) for char, i in input_token_index.items())
<add>reverse_target_char_index = dict(
<add> (i, char) for char, i in target_token_index.items())
<add>
<add>nb_examples = 100
<add>in_encoder = encoder_input_data[:nb_examples]
<add>in_decoder = np.zeros(
<add> (len(input_texts), max_decoder_seq_length, num_decoder_tokens),
<add> dtype='float32')
<add>
<add>in_decoder[:, 0, target_token_index["\t"]] = 1
<add>
<add>predict = np.zeros(
<add> (len(input_texts), max_decoder_seq_length),
<add> dtype='float32')
<add>
<add>for i in range(max_decoder_seq_length - 1):
<add> predict = model.predict([in_encoder, in_decoder])
<add> predict = predict.argmax(axis=-1)
<add> predict_ = predict[:, i].ravel().tolist()
<add> for j, x in enumerate(predict_):
<add> in_decoder[j, i + 1, x] = 1
<add>
<add>for seq_index in range(nb_examples):
<add> # Take one sequence (part of the training set)
<add> # for trying out decoding.
<add> output_seq = predict[seq_index, :].ravel().tolist()
<add> decoded = []
<add> for x in output_seq:
<add> if reverse_target_char_index[x] == "\n":
<add> break
<add> else:
<add> decoded.append(reverse_target_char_index[x])
<add> decoded_sentence = "".join(decoded)
<add> print('-')
<add> print('Input sentence:', input_texts[seq_index])
<add> print('Decoded sentence:', decoded_sentence)
| 1
|
Javascript
|
Javascript
|
improve floret vectors display in pipeline docs
|
04c6e5cb9526c3ac3ce395be7de5fa607ddefe4b
|
<ide><path>website/src/templates/models.js
<ide> function formatVectors(data) {
<ide> if (!data) return 'n/a'
<ide> if (Object.values(data).every(n => n === 0)) return 'context vectors only'
<ide> const { keys, vectors, width } = data
<del> return `${abbrNum(keys)} keys, ${abbrNum(vectors)} unique vectors (${width} dimensions)`
<add> if (keys >= 0) {
<add> return `${abbrNum(keys)} keys, ${abbrNum(vectors)} unique vectors (${width} dimensions)`
<add> } else {
<add> return `${abbrNum(vectors)} floret vectors (${width} dimensions)`
<add> }
<ide> }
<ide>
<ide> function formatAccuracy(data, lang) {
| 1
|
Javascript
|
Javascript
|
remove last use of mapobject
|
8d1e416d9a56bb126d2f50038acb3663f7655ed0
|
<ide><path>src/renderers/shared/stack/reconciler/__tests__/ReactMultiChildReconcile-test.js
<ide> var ReactDOM = require('ReactDOM');
<ide> var ReactDOMComponentTree = require('ReactDOMComponentTree');
<ide> var ReactInstanceMap = require('ReactInstanceMap');
<ide>
<del>var mapObject = require('mapObject');
<del>
<ide> var stripEmptyValues = function(obj) {
<ide> var ret = {};
<ide> var name;
<ide> class FriendsStatusDisplay extends React.Component {
<ide>
<ide>
<ide> function getInternalStateByUserName(statusDisplays) {
<del> return mapObject(statusDisplays, function(statusDisplay, key) {
<del> return statusDisplay.getInternalState();
<del> });
<add> return Object.keys(statusDisplays).reduce((acc, key) => {
<add> acc[key] = statusDisplays[key].getInternalState();
<add> return acc;
<add> }, {});
<ide> }
<ide>
<ide> /**
| 1
|
Ruby
|
Ruby
|
set a default value for .report handled kwarg
|
1e8936605380bfc1e97bc99333879e133a56b2b9
|
<ide><path>activesupport/lib/active_support/error_reporter.rb
<ide> def set_context(...)
<ide>
<ide> # When the block based +handle+ and +record+ methods are not suitable, you can directly use +report+
<ide> #
<del> # Rails.error.report(error, handled: true)
<del> def report(error, handled:, severity: handled ? :warning : :error, context: {})
<add> # Rails.error.report(error)
<add> def report(error, handled: true, severity: handled ? :warning : :error, context: {})
<ide> unless SEVERITIES.include?(severity)
<ide> raise ArgumentError, "severity must be one of #{SEVERITIES.map(&:inspect).join(", ")}, got: #{severity.inspect}"
<ide> end
| 1
|
Ruby
|
Ruby
|
reduce allocation in expand cache key
|
1eb1966f29430f23ae05f6556668bbb5447cae43
|
<ide><path>activesupport/lib/active_support/cache.rb
<ide> def lookup_store(store = nil, *parameters)
<ide> #
<ide> # The +key+ argument can also respond to +cache_key+ or +to_param+.
<ide> def expand_cache_key(key, namespace = nil)
<del> expanded_cache_key = (namespace ? "#{namespace}/" : "").dup
<add> expanded_cache_key = namespace ? +"#{namespace}/" : +""
<ide>
<ide> if prefix = ENV["RAILS_CACHE_ID"] || ENV["RAILS_APP_VERSION"]
<ide> expanded_cache_key << "#{prefix}/"
| 1
|
Text
|
Text
|
fix typo in caching docs
|
9f66fc9a7ca51ad5535cb48e199cf2fbe9eecec6
|
<ide><path>docs/api-guide/caching.md
<ide> provided in Django.
<ide>
<ide> Django provides a [`method_decorator`][decorator] to use
<ide> decorators with class based views. This can be used with
<del>with other cache decorators such as [`cache_page`][page] and
<add>other cache decorators such as [`cache_page`][page] and
<ide> [`vary_on_cookie`][cookie].
<ide>
<ide> ```python
| 1
|
Java
|
Java
|
pass the correct url to android webview events
|
b436943a279f116fc8f376a377afc04aa7c5a774
|
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/webview/ReactWebViewManager.java
<ide> public void onPageFinished(WebView webView, String url) {
<ide> if (!mLastLoadFailed) {
<ide> ReactWebView reactWebView = (ReactWebView) webView;
<ide> reactWebView.callInjectedJavaScript();
<del> emitFinishEvent(webView);
<add> emitFinishEvent(webView, url);
<ide> }
<ide> }
<ide>
<ide> public void onPageStarted(WebView webView, String url, Bitmap favicon) {
<ide> new TopLoadingStartEvent(
<ide> webView.getId(),
<ide> SystemClock.uptimeMillis(),
<del> createWebViewEvent(webView)));
<add> createWebViewEvent(webView, url)));
<ide> }
<ide>
<ide> @Override
<ide> public void onReceivedError(
<ide> mLastLoadFailed = true;
<ide>
<ide> // In case of an error JS side expect to get a finish event first, and then get an error event
<del> // Android WebView does it in the oposite way, so we need to simulate that behavior
<del> emitFinishEvent(webView);
<add> // Android WebView does it in the opposite way, so we need to simulate that behavior
<add> emitFinishEvent(webView, failingUrl);
<ide>
<ide> ReactContext reactContext = (ReactContext) ((ReactWebView) webView).getContext();
<del> WritableMap eventData = createWebViewEvent(webView);
<add> WritableMap eventData = createWebViewEvent(webView, failingUrl);
<ide> eventData.putDouble("code", errorCode);
<ide> eventData.putString("description", description);
<ide>
<ide> public void doUpdateVisitedHistory(WebView webView, String url, boolean isReload
<ide> new TopLoadingStartEvent(
<ide> webView.getId(),
<ide> SystemClock.uptimeMillis(),
<del> createWebViewEvent(webView)));
<add> createWebViewEvent(webView, url)));
<ide> }
<ide>
<del> private void emitFinishEvent(WebView webView) {
<add> private void emitFinishEvent(WebView webView, String url) {
<ide> ReactContext reactContext = (ReactContext) webView.getContext();
<ide>
<ide> EventDispatcher eventDispatcher =
<ide> private void emitFinishEvent(WebView webView) {
<ide> new TopLoadingFinishEvent(
<ide> webView.getId(),
<ide> SystemClock.uptimeMillis(),
<del> createWebViewEvent(webView)));
<add> createWebViewEvent(webView, url)));
<ide> }
<ide>
<del> private WritableMap createWebViewEvent(WebView webView) {
<add> private WritableMap createWebViewEvent(WebView webView, String url) {
<ide> WritableMap event = Arguments.createMap();
<ide> event.putDouble("target", webView.getId());
<del> event.putString("url", webView.getUrl());
<add> // Don't use webView.getUrl() here, the URL isn't updated to the new value yet in callbacks
<add> // like onPageFinished
<add> event.putString("url", url);
<ide> event.putBoolean("loading", !mLastLoadFailed && webView.getProgress() != 100);
<ide> event.putString("title", webView.getTitle());
<ide> event.putBoolean("canGoBack", webView.canGoBack());
<ide> public void setHtml(WebView view, @Nullable String html) {
<ide> public void setUrl(WebView view, @Nullable String url) {
<ide> // TODO(8495359): url and html are coupled as they both call loadUrl, therefore in case when
<ide> // property url is removed in favor of property html being added in single transaction we may
<del> // end up in a state when blank url is loaded as it depends onthe oreder of update operations!
<add> // end up in a state when blank url is loaded as it depends on the order of update operations!
<ide> if (url != null) {
<ide> view.loadUrl(url);
<ide> } else {
| 1
|
Python
|
Python
|
fix mixup with bytes.decode() and str.encode()
|
6afd1a3dc183eb0edfde878df7a655cc58caece4
|
<ide><path>tools/v8_gypfiles/GN-scraper.py
<ide> def DoMain(args):
<ide> gn_filename, pattern = args
<ide> src_root = os.path.dirname(gn_filename)
<del> with open(gn_filename, 'r') as gn_file:
<del> gn_content = gn_file.read().encode('utf-8')
<add> with open(gn_filename, 'rb') as gn_file:
<add> gn_content = gn_file.read().decode('utf-8')
<ide>
<ide> scraper_re = re.compile(pattern + r'\[([^\]]+)', re.DOTALL)
<ide> matches = scraper_re.search(gn_content)
| 1
|
Python
|
Python
|
update minimum install requirements
|
07000942dacaeffa84d9db57ea42570fcfb0395e
|
<ide><path>setup.py
<ide> setup(
<ide> name="Flask",
<ide> install_requires=[
<del> "Werkzeug>=0.15",
<del> "Jinja2>=2.10.1",
<del> "itsdangerous>=0.24",
<del> "click>=5.1",
<add> "Werkzeug>=2.0.0rc4",
<add> "Jinja2>=3.0.0rc1",
<add> "itsdangerous>=2.0.0rc2",
<add> "click>=8.0.0rc1",
<ide> ],
<ide> extras_require={
<ide> "async": ["asgiref>=3.2"],
| 1
|
Javascript
|
Javascript
|
remove sizebot race condition
|
5fe97dbe19917b4c49618073ccc5632b593ec9fa
|
<ide><path>dangerfile.js
<ide> function git(args) {
<ide> try {
<ide> let baseCIBuildId = null;
<ide> const statusesResponse = await fetch(
<del> `https://api.github.com/repos/facebook/react/commits/${baseCommit}/statuses`
<add> `https://api.github.com/repos/facebook/react/commits/${baseCommit}/status`
<ide> );
<del> const statuses = await statusesResponse.json();
<add> const {statuses, state} = await statusesResponse.json();
<add> if (state === 'failure') {
<add> warn(`Base commit is broken: ${baseCommit}`);
<add> return;
<add> }
<ide> for (let i = 0; i < statuses.length; i++) {
<ide> const status = statuses[i];
<ide> // This must match the name of the CI job that creates the build artifacts
<ide> function git(args) {
<ide> )[1];
<ide> break;
<ide> }
<del> if (status.state === 'failure') {
<del> warn(`Base commit is broken: ${baseCommit}`);
<add> if (status.state === 'pending') {
<add> warn(`Build job for base commit is still pending: ${baseCommit}`);
<ide> return;
<ide> }
<ide> }
| 1
|
Javascript
|
Javascript
|
fix flaky test-preload
|
b5a75baa5bc879cbd4a2c5885fb75be8605abae3
|
<ide><path>test/parallel/test-preload.js
<ide> var stdinStdout = '';
<ide> stdinProc.stdout.on('data', function(d) {
<ide> stdinStdout += d;
<ide> });
<del>stdinProc.on('exit', function(code) {
<add>stdinProc.on('close', function(code) {
<ide> assert.equal(code, 0);
<ide> assert.equal(stdinStdout, 'A\nhello\n');
<ide> });
<ide> var replStdout = '';
<ide> replProc.stdout.on('data', function(d) {
<ide> replStdout += d;
<ide> });
<del>replProc.on('exit', function(code) {
<add>replProc.on('close', function(code) {
<ide> assert.equal(code, 0);
<ide> const output = [
<ide> 'A',
| 1
|
Javascript
|
Javascript
|
add $event to ng-swipe
|
507d8021b1c91cc0cefc0418e61b04597ad1030b
|
<ide><path>src/ngTouch/directive/ngSwipe.js
<ide> function makeSwipeDirective(directiveName, direction, eventName) {
<ide> }
<ide>
<ide> $swipe.bind(element, {
<del> 'start': function(coords) {
<add> 'start': function(coords, event) {
<ide> startCoords = coords;
<ide> valid = true;
<ide> },
<del> 'cancel': function() {
<add> 'cancel': function(event) {
<ide> valid = false;
<ide> },
<del> 'end': function(coords) {
<add> 'end': function(coords, event) {
<ide> if (validSwipe(coords)) {
<ide> scope.$apply(function() {
<ide> element.triggerHandler(eventName);
<del> swipeHandler(scope);
<add> swipeHandler(scope, {$event: event});
<ide> });
<ide> }
<ide> }
<ide><path>src/ngTouch/swipe.js
<ide> ngTouch.factory('$swipe', [function() {
<ide> totalX = 0;
<ide> totalY = 0;
<ide> lastPos = startCoords;
<del> eventHandlers['start'] && eventHandlers['start'](startCoords);
<add> eventHandlers['start'] && eventHandlers['start'](startCoords, event);
<ide> });
<ide>
<ide> element.on('touchcancel', function(event) {
<ide> active = false;
<del> eventHandlers['cancel'] && eventHandlers['cancel']();
<add> eventHandlers['cancel'] && eventHandlers['cancel'](event);
<ide> });
<ide>
<ide> element.on('touchmove mousemove', function(event) {
<ide> ngTouch.factory('$swipe', [function() {
<ide> if (totalY > totalX) {
<ide> // Allow native scrolling to take over.
<ide> active = false;
<del> eventHandlers['cancel'] && eventHandlers['cancel']();
<add> eventHandlers['cancel'] && eventHandlers['cancel'](event);
<ide> return;
<ide> } else {
<ide> // Prevent the browser from scrolling.
<ide> event.preventDefault();
<del>
<del> eventHandlers['move'] && eventHandlers['move'](coords);
<add> eventHandlers['move'] && eventHandlers['move'](coords, event);
<ide> }
<ide> });
<ide>
<ide> element.on('touchend mouseup', function(event) {
<ide> if (!active) return;
<ide> active = false;
<del> eventHandlers['end'] && eventHandlers['end'](getCoordinates(event));
<add> eventHandlers['end'] && eventHandlers['end'](getCoordinates(event), event);
<ide> });
<ide> }
<ide> };
<ide><path>test/ngTouch/directive/ngSwipeSpec.js
<ide> var swipeTests = function(description, restrictBrowsers, startEvent, moveEvent,
<ide> expect($rootScope.swiped).toBe(true);
<ide> }));
<ide>
<add> it('should pass event object', inject(function($rootScope, $compile) {
<add> element = $compile('<div ng-swipe-left="event = $event"></div>')($rootScope);
<add> $rootScope.$digest();
<add>
<add> browserTrigger(element, startEvent, {
<add> keys : [],
<add> x : 100,
<add> y : 20
<add> });
<add> browserTrigger(element, endEvent,{
<add> keys: [],
<add> x: 20,
<add> y: 20
<add> });
<add> expect($rootScope.event).toBeDefined();
<add> }));
<add>
<ide> it('should not swipe if you move too far vertically', inject(function($rootScope, $compile, $rootElement) {
<ide> element = $compile('<div ng-swipe-left="swiped = true"></div>')($rootScope);
<ide> $rootElement.append(element);
| 3
|
Javascript
|
Javascript
|
use notimestamp instead of -1
|
ffe516f3bf57a13e123411a9ca76b48915a00acb
|
<ide><path>packages/react-reconciler/src/ReactFiberLane.js
<ide> export function markRootFinished(root: FiberRoot, remainingLanes: Lanes) {
<ide> const lane = 1 << index;
<ide>
<ide> // Clear the expiration time
<del> expirationTimes[index] = -1;
<add> expirationTimes[index] = NoTimestamp;
<ide>
<ide> lanes &= ~lane;
<ide> }
| 1
|
Python
|
Python
|
create newton_forward_interpolation.py (#333)
|
54830644a280ee2b8e59497f303220a211422118
|
<ide><path>arithmetic_analysis/newton_forward_interpolation.py
<add># https://www.geeksforgeeks.org/newton-forward-backward-interpolation/
<add>
<add>import math
<add>
<add># for calculating u value
<add>def ucal(u, p):
<add> """
<add> >>> ucal(1, 2)
<add> 0
<add> >>> ucal(1.1, 2)
<add> 0.11000000000000011
<add> >>> ucal(1.2, 2)
<add> 0.23999999999999994
<add> """
<add> temp = u
<add> for i in range(1, p):
<add> temp = temp * (u - i)
<add> return temp
<add>
<add>
<add>def main():
<add> n = int(input("enter the numbers of values"))
<add> y = []
<add> for i in range(n):
<add> y.append([])
<add> for i in range(n):
<add> for j in range(n):
<add> y[i].append(j)
<add> y[i][j] = 0
<add>
<add> print("enter the values of parameters in a list")
<add> x = list(map(int, input().split()))
<add>
<add> print("enter the values of corresponding parameters")
<add> for i in range(n):
<add> y[i][0] = float(input())
<add>
<add> value = int(input("enter the value to interpolate"))
<add> u = (value - x[0]) / (x[1] - x[0])
<add>
<add> # for calculating forward difference table
<add>
<add> for i in range(1, n):
<add> for j in range(n - i):
<add> y[j][i] = y[j + 1][i - 1] - y[j][i - 1]
<add>
<add> summ = y[0][0]
<add> for i in range(1, n):
<add> summ += (ucal(u, i) * y[0][i]) / math.factorial(i)
<add>
<add> print("the value at {} is {}".format(value, summ))
<add>
<add>
<add>if __name__ == "__main__":
<add> main()
| 1
|
Text
|
Text
|
fix some nits in hardcoded manpage links
|
a14a0fa8dc401188f67d26a82daae9423c54b41f
|
<ide><path>doc/api/errors.md
<ide> Creation of a [`zlib`][] object failed due to incorrect configuration.
<ide> [`--force-fips`]: cli.html#cli_force_fips
<ide> [`child_process`]: child_process.html
<ide> [`cipher.getAuthTag()`]: crypto.html#crypto_cipher_getauthtag
<add>[`Class: assert.AssertionError`]: assert.html#assert_class_assert_assertionerror
<ide> [`crypto.timingSafeEqual()`]: crypto.html#crypto_crypto_timingsafeequal_a_b
<ide> [`dgram.createSocket()`]: dgram.html#dgram_dgram_createsocket_options_callback
<ide> [`ERR_INVALID_ARG_TYPE`]: #ERR_INVALID_ARG_TYPE
<ide> Creation of a [`zlib`][] object failed due to incorrect configuration.
<ide> [`process.setUncaughtExceptionCaptureCallback()`]: process.html#process_process_setuncaughtexceptioncapturecallback_fn
<ide> [`require('crypto').setEngine()`]: crypto.html#crypto_crypto_setengine_engine_flags
<ide> [`server.listen()`]: net.html#net_server_listen
<del>[`Class: assert.AssertionError`]: assert.html#assert_class_assert_assertionerror
<add>[`zlib`]: zlib.html
<ide> [ES6 module]: esm.html
<ide> [Node.js Error Codes]: #nodejs-error-codes
<ide> [V8's stack trace API]: https://github.com/v8/v8/wiki/Stack-Trace-API
<ide> Creation of a [`zlib`][] object failed due to incorrect configuration.
<ide> [ICU]: intl.html#intl_internationalization_support
<ide> [online]: http://man7.org/linux/man-pages/man3/errno.3.html
<ide> [stream-based]: stream.html
<del>[syscall]: http://man7.org/linux/man-pages/man2/syscall.2.html
<add>[syscall]: http://man7.org/linux/man-pages/man2/syscalls.2.html
<ide> [try-catch]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
<ide> [vm]: vm.html
<ide> [WHATWG Supported Encodings]: util.html#util_whatwg_supported_encodings
<del>[`zlib`]: zlib.html
<ide><path>doc/api/fs.md
<ide> The recursive option is only supported on macOS and Windows.
<ide> This feature depends on the underlying operating system providing a way
<ide> to be notified of filesystem changes.
<ide>
<del>* On Linux systems, this uses [`inotify`]
<del>* On BSD systems, this uses [`kqueue`]
<del>* On macOS, this uses [`kqueue`] for files and [`FSEvents`] for directories.
<add>* On Linux systems, this uses [`inotify(7)`].
<add>* On BSD systems, this uses [`kqueue(2)`].
<add>* On macOS, this uses [`kqueue(2)`] for files and [`FSEvents`] for directories.
<ide> * On SunOS systems (including Solaris and SmartOS), this uses [`event ports`].
<ide> * On Windows systems, this feature depends on [`ReadDirectoryChangesW`].
<ide> * On Aix systems, this feature depends on [`AHAFS`], which must be enabled.
<ide> the file contents.
<ide> [`fs.watch()`]: #fs_fs_watch_filename_options_listener
<ide> [`fs.write()`]: #fs_fs_write_fd_buffer_offset_length_position_callback
<ide> [`fs.writeFile()`]: #fs_fs_writefile_file_data_options_callback
<del>[`inotify`]: http://man7.org/linux/man-pages/man7/inotify.7.html
<del>[`kqueue`]: https://www.freebsd.org/cgi/man.cgi?kqueue
<add>[`inotify(7)`]: http://man7.org/linux/man-pages/man7/inotify.7.html
<add>[`kqueue(2)`]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2
<ide> [`net.Socket`]: net.html#net_class_net_socket
<ide> [`stat()`]: fs.html#fs_fs_stat_path_callback
<ide> [`util.promisify()`]: util.html#util_util_promisify_original
<ide><path>doc/api/net.md
<ide> length of the queue of pending connections. The actual length will be determined
<ide> by the OS through sysctl settings such as `tcp_max_syn_backlog` and `somaxconn`
<ide> on Linux. The default value of this parameter is 511 (not 512).
<ide>
<del>All [`net.Socket`][] are set to `SO_REUSEADDR` (See [socket(7)][] for details).
<add>All [`net.Socket`][] are set to `SO_REUSEADDR` (see [`socket(7)`][] for
<add>details).
<ide>
<ide> The `server.listen()` method can be called again if and only if there was an
<ide> error during the first `server.listen()` call or `server.close()` has been
<ide> Returns `true` if input is a version 6 IP address, otherwise returns `false`.
<ide> [`server.listen(handle)`]: #net_server_listen_handle_backlog_callback
<ide> [`server.listen(options)`]: #net_server_listen_options_callback
<ide> [`server.listen(path)`]: #net_server_listen_path_backlog_callback
<add>[`socket(7)`]: http://man7.org/linux/man-pages/man7/socket.7.html
<ide> [`socket.connect()`]: #net_socket_connect
<ide> [`socket.connect(options)`]: #net_socket_connect_options_connectlistener
<ide> [`socket.connect(path)`]: #net_socket_connect_path_connectlistener
<ide> Returns `true` if input is a version 6 IP address, otherwise returns `false`.
<ide> [Readable Stream]: stream.html#stream_class_stream_readable
<ide> [duplex stream]: stream.html#stream_class_stream_duplex
<ide> [half-closed]: https://tools.ietf.org/html/rfc1122
<del>[socket(7)]: http://man7.org/linux/man-pages/man7/socket.7.html
<ide> [stream_writable_write]: stream.html#stream_writable_write_chunk_encoding_callback
<ide> [unspecified IPv4 address]: https://en.wikipedia.org/wiki/0.0.0.0
<ide> [unspecified IPv6 address]: https://en.wikipedia.org/wiki/IPv6_address#Unspecified_address
| 3
|
Javascript
|
Javascript
|
ensure socket cleanup on response end
|
234fb122bbdf87bb33d8fba41165ecfa92031b71
|
<ide><path>lib/http.js
<ide> function parserOnIncomingClient(res, shouldKeepAlive) {
<ide> COUNTER_HTTP_CLIENT_RESPONSE();
<ide> req.res = res;
<ide> res.req = req;
<del> var handled = req.emit('response', res);
<add>
<add> // add our listener first, so that we guarantee socket cleanup
<ide> res.on('end', responseOnEnd);
<add> var handled = req.emit('response', res);
<ide>
<ide> // If the user did not listen for the 'response' event, then they
<ide> // can't possibly read the data, so we ._dump() it into the void
<ide> function responseOnEnd() {
<ide> }
<ide> socket.removeListener('close', socketCloseListener);
<ide> socket.removeListener('error', socketErrorListener);
<del> socket.emit('free');
<add> // Mark this socket as available, AFTER user-added end
<add> // handlers have a chance to run.
<add> process.nextTick(function() {
<add> socket.emit('free');
<add> });
<ide> }
<ide> }
<ide>
<ide><path>test/simple/test-http-end-throw-socket-handling.js
<add>// Copyright Joyent, Inc. and other Node contributors.
<add>//
<add>// Permission is hereby granted, free of charge, to any person obtaining a
<add>// copy of this software and associated documentation files (the
<add>// "Software"), to deal in the Software without restriction, including
<add>// without limitation the rights to use, copy, modify, merge, publish,
<add>// distribute, sublicense, and/or sell copies of the Software, and to permit
<add>// persons to whom the Software is furnished to do so, subject to the
<add>// following conditions:
<add>//
<add>// The above copyright notice and this permission notice shall be included
<add>// in all copies or substantial portions of the Software.
<add>//
<add>// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<add>// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
<add>// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
<add>// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
<add>// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
<add>// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
<add>// USE OR OTHER DEALINGS IN THE SOFTWARE.
<add>
<add>var common = require('../common');
<add>var assert = require('assert');
<add>
<add>// Make sure that throwing in 'end' handler doesn't lock
<add>// up the socket forever.
<add>//
<add>// This is NOT a good way to handle errors in general, but all
<add>// the same, we should not be so brittle and easily broken.
<add>
<add>var http = require('http');
<add>
<add>var n = 0;
<add>var server = http.createServer(function(req, res) {
<add> if (++n === 10) server.close();
<add> res.end('ok');
<add>});
<add>
<add>server.listen(common.PORT, function() {
<add> for (var i = 0; i < 10; i++) {
<add> var options = { port: common.PORT };
<add>
<add> var req = http.request(options, function (res) {
<add> res.resume()
<add> res.on('end', function() {
<add> throw new Error('gleep glorp');
<add> });
<add> });
<add> req.end();
<add> }
<add>});
<add>
<add>setTimeout(function() {
<add> process.removeListener('uncaughtException', catcher);
<add> throw new Error('Taking too long!');
<add>}, 1000).unref();
<add>
<add>process.on('uncaughtException', catcher);
<add>var errors = 0;
<add>function catcher() {
<add> errors++;
<add>}
<add>
<add>process.on('exit', function() {
<add> assert.equal(errors, 10);
<add> console.log('ok');
<add>});
| 2
|
Text
|
Text
|
add v3.22.0-beta.3 to changelog
|
d654a6e588451503f8b2518d6d31ed264f95f634
|
<ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<add>### v3.22.0-beta.3 (September 09, 2020)
<add>
<add>- [#19124](https://github.com/emberjs/ember.js/pull/19124) Fix rendering engine usage within a `fastboot` sandbox
<add>
<ide> ### v3.22.0-beta.2 (August 31, 2020)
<ide>
<ide> - [#19106](https://github.com/emberjs/ember.js/pull/19106) [BUGFIX] Ensure `destroy` methods on `CoreObject` are invoked.
| 1
|
Python
|
Python
|
add link to scikeras migration guide
|
f34e136e917eb0e8ec9403f719bbaee5ba2ab1fc
|
<ide><path>keras/wrappers/scikit_learn.py
<ide> class KerasClassifier(BaseWrapper):
<ide> """Implementation of the scikit-learn classifier API for Keras.
<ide>
<ide> DEPRECATED. Use [Sci-Keras](https://github.com/adriangb/scikeras) instead.
<add> See https://www.adriangb.com/scikeras/stable/migration.html for help migrating.
<ide> """
<ide>
<ide> def __init__(self, build_fn=None, **sk_params):
<ide> warnings.warn(
<ide> 'KerasClassifier is deprecated, '
<del> 'use Sci-Keras (https://github.com/adriangb/scikeras) instead.',
<add> 'use Sci-Keras (https://github.com/adriangb/scikeras) instead. '
<add> 'See https://www.adriangb.com/scikeras/stable/migration.html for help migrating.',
<ide> DeprecationWarning,
<ide> stacklevel=2)
<ide> super().__init__(build_fn, **sk_params)
<ide> class KerasRegressor(BaseWrapper):
<ide> """Implementation of the scikit-learn regressor API for Keras.
<ide>
<ide> DEPRECATED. Use [Sci-Keras](https://github.com/adriangb/scikeras) instead.
<add> See https://www.adriangb.com/scikeras/stable/migration.html for help migrating.
<ide> """
<ide>
<ide> @doc_controls.do_not_doc_inheritable
<ide> def __init__(self, build_fn=None, **sk_params):
<ide> warnings.warn(
<ide> 'KerasRegressor is deprecated, '
<del> 'use Sci-Keras (https://github.com/adriangb/scikeras) instead.',
<add> 'use Sci-Keras (https://github.com/adriangb/scikeras) instead. '
<add> 'See https://www.adriangb.com/scikeras/stable/migration.html for help migrating.',
<ide> DeprecationWarning,
<ide> stacklevel=2)
<ide> super().__init__(build_fn, **sk_params)
| 1
|
Python
|
Python
|
fix trainable arg
|
0a9c0ca461f280c988e252baaf5b13a12f6204f6
|
<ide><path>keras/models.py
<ide> def __init__(self, layers=[], name=None):
<ide> self.model = None # internal Model instance
<ide> self.inputs = [] # tensors
<ide> self.outputs = [] # tensors (length 1)
<del> self.trainable = True
<add> self._trainable = True
<ide>
<ide> # model attributes
<ide> self.inbound_nodes = []
<ide> def build(self, input_shape=None):
<ide> ' Add some layers first.')
<ide> # actually create the model
<ide> self.model = Model(self.inputs, self.outputs[0], name=self.name + '_model')
<add> self.model.trainable = self.trainable
<ide>
<ide> # mirror model attributes
<ide> self.supports_masking = self.model.supports_masking
<ide> def _gather_dict_attr(self, attr):
<ide> list(layer_dict.items()))
<ide> return all_attrs
<ide>
<add> @property
<add> def trainable(self):
<add> return self._trainable
<add>
<add> @trainable.setter
<add> def trainable(self, value):
<add> if self.model:
<add> self.model.trainable = value
<add> self._trainable = value
<add>
<ide> @property
<ide> def trainable_weights(self):
<ide> if not self.trainable:
| 1
|
Javascript
|
Javascript
|
fix couple of animation bugs
|
a73a8c4a5ec83b1fd55fdfbbffcaf0e3b4787025
|
<ide><path>src/core/core.animations.js
<ide> import {isObject} from '../helpers/helpers.core';
<ide>
<ide> const numbers = ['x', 'y', 'borderWidth', 'radius', 'tension'];
<ide> const colors = ['borderColor', 'backgroundColor'];
<del>const animationOptions = ['duration', 'easing', 'from', 'to', 'type', 'easing', 'loop', 'fn'];
<add>const animationOptions = ['delay', 'duration', 'easing', 'fn', 'from', 'loop', 'to', 'type'];
<ide>
<ide> defaults.set('animation', {
<ide> // Plain properties can be overridden in each object
<ide><path>src/core/core.config.js
<ide> import defaults from './core.defaults';
<del>import {mergeIf, resolveObjectKey, isArray, isFunction, valueOrDefault} from '../helpers/helpers.core';
<add>import {mergeIf, resolveObjectKey, isArray, isFunction, valueOrDefault, isObject} from '../helpers/helpers.core';
<ide> import {_attachContext, _createResolver, _descriptors} from '../helpers/helpers.config';
<ide>
<ide> export function getIndexAxis(type, options) {
<ide> export default class Config {
<ide>
<ide> /**
<ide> * @param {object[]} scopes
<del> * @param {function|object} context
<add> * @param {object} [context]
<add> * @param {string[]} [prefixes]
<ide> */
<ide> createResolver(scopes, context, prefixes = ['']) {
<del> const cached = getResolver(this._resolverCache, scopes, prefixes);
<del> return context && cached.needContext
<del> ? _attachContext(cached.resolver, isFunction(context) ? context() : context)
<del> : cached.resolver;
<add> const {resolver} = getResolver(this._resolverCache, scopes, prefixes);
<add> return isObject(context)
<add> ? _attachContext(resolver, isFunction(context) ? context() : context)
<add> : resolver;
<ide> }
<ide> }
<ide>
<ide> function getResolver(resolverCache, scopes, prefixes) {
<ide> const resolver = _createResolver(scopes, prefixes);
<ide> cached = {
<ide> resolver,
<del> subPrefixes: prefixes.filter(p => !p.toLowerCase().includes('hover')),
<del> needContext: needContext(resolver, Object.getOwnPropertyNames(resolver))
<add> subPrefixes: prefixes.filter(p => !p.toLowerCase().includes('hover'))
<ide> };
<ide> cache.set(cacheKey, cached);
<ide> }
<ide><path>src/core/core.datasetController.js
<ide> export default class DatasetController {
<ide> const config = me.chart.config;
<ide> const scopeKeys = config.datasetAnimationScopeKeys(me._type);
<ide> const scopes = config.getOptionScopes(me.getDataset().animation, scopeKeys);
<del> const context = () => me.getContext(index, active, mode);
<del> options = config.createResolver(scopes, context);
<add> options = config.createResolver(scopes, me.getContext(index, active, mode));
<ide> }
<ide> const animations = new Animations(chart, options && options[mode] || options);
<ide> if (options && options._cacheable) {
| 3
|
PHP
|
PHP
|
add the ability to set the default model type
|
2fd7ac50139d8ab969ca8a9bc15ad2717f1b1dd6
|
<ide><path>src/Datasource/ModelAwareTrait.php
<ide>
<ide> use Cake\Datasource\Exception\MissingModelException;
<ide> use InvalidArgumentException;
<add>use UnexpectedValueException;
<ide>
<ide> /**
<ide> * Provides functionality for loading table classes
<ide> trait ModelAwareTrait
<ide> */
<ide> protected $_modelFactories = [];
<ide>
<add> /**
<add> * The model type to use.
<add> *
<add> * @var string
<add> */
<add> protected $_modelType = 'Table';
<add>
<ide> /**
<ide> * Set the modelClass and modelKey properties based on conventions.
<ide> *
<ide> protected function _setModelClass($name)
<ide> * be thrown.
<ide> *
<ide> * @param string|null $modelClass Name of model class to load. Defaults to $this->modelClass
<del> * @param string $type The type of repository to load. Defaults to 'Table' which
<del> * delegates to Cake\ORM\TableRegistry.
<add> * @param string|null $modelType The type of repository to load. Defaults to the modelType() value.
<ide> * @return object The model instance created.
<ide> * @throws \Cake\Datasource\Exception\MissingModelException If the model class cannot be found.
<ide> * @throws \InvalidArgumentException When using a type that has not been registered.
<add> * @throws \UnexpectedValueException If no model type has been defined
<ide> */
<del> public function loadModel($modelClass = null, $type = 'Table')
<add> public function loadModel($modelClass = null, $modelType = null)
<ide> {
<ide> if ($modelClass === null) {
<ide> $modelClass = $this->modelClass;
<ide> }
<add> if ($modelType === null) {
<add> $modelType = $this->modelType();
<add>
<add> if ($modelType === null) {
<add> throw new UnexpectedValueException('No model type has been defined');
<add> }
<add> }
<ide>
<ide> list(, $alias) = pluginSplit($modelClass, true);
<ide>
<ide> if (isset($this->{$alias})) {
<ide> return $this->{$alias};
<ide> }
<ide>
<del> if (!isset($this->_modelFactories[$type])) {
<add> if (!isset($this->_modelFactories[$modelType])) {
<ide> throw new InvalidArgumentException(sprintf(
<ide> 'Unknown repository type "%s". Make sure you register a type before trying to use it.',
<del> $type
<add> $modelType
<ide> ));
<ide> }
<del> $factory = $this->_modelFactories[$type];
<add> $factory = $this->_modelFactories[$modelType];
<ide> $this->{$alias} = $factory($modelClass);
<ide> if (!$this->{$alias}) {
<del> throw new MissingModelException([$modelClass, $type]);
<add> throw new MissingModelException([$modelClass, $modelType]);
<ide> }
<ide> return $this->{$alias};
<ide> }
<ide> public function modelFactory($type, callable $factory)
<ide> {
<ide> $this->_modelFactories[$type] = $factory;
<ide> }
<add>
<add> /**
<add> * Set or get the model type to be used by this class
<add> *
<add> * @param string|null $modelType The model type or null to retrieve the current
<add> *
<add> * @return string|$this
<add> */
<add> public function modelType($modelType = null)
<add> {
<add> if ($modelType === null) {
<add> return $this->_modelType;
<add> }
<add>
<add> $this->_modelType = $modelType;
<add>
<add> return $this;
<add> }
<ide> }
<ide><path>tests/TestCase/Datasource/ModelAwareTraitTest.php
<ide> public function testLoadModel()
<ide> $stub = new Stub();
<ide> $stub->setProps('Articles');
<ide> $stub->modelFactory('Table', ['\Cake\ORM\TableRegistry', 'get']);
<add> $stub->modelType('Table');
<ide>
<ide> $result = $stub->loadModel();
<ide> $this->assertInstanceOf('Cake\ORM\Table', $result);
<ide> public function testLoadModelPlugin()
<ide> $stub = new Stub();
<ide> $stub->setProps('Articles');
<ide> $stub->modelFactory('Table', ['\Cake\ORM\TableRegistry', 'get']);
<add> $stub->modelType('Table');
<ide>
<ide> $result = $stub->loadModel('TestPlugin.Comments');
<ide> $this->assertInstanceOf('TestPlugin\Model\Table\CommentsTable', $result);
<ide> public function testModelFactory()
<ide> $this->assertEquals('Magic', $stub->Magic->name);
<ide> }
<ide>
<add> /**
<add> * test alternate default model type.
<add> *
<add> * @return void
<add> */
<add> public function testModelType()
<add> {
<add> $stub = new Stub();
<add> $stub->setProps('Articles');
<add>
<add> $stub->modelFactory('Test', function ($name) {
<add> $mock = new \StdClass();
<add> $mock->name = $name;
<add> return $mock;
<add> });
<add> $stub->modelType('Test');
<add>
<add> $result = $stub->loadModel('Magic');
<add> $this->assertInstanceOf('\StdClass', $result);
<add> $this->assertInstanceOf('\StdClass', $stub->Magic);
<add> $this->assertEquals('Magic', $stub->Magic->name);
<add> }
<add>
<ide> /**
<ide> * test MissingModelException being thrown
<ide> *
| 2
|
Javascript
|
Javascript
|
upgrade jest apis to match www
|
efaa26eb50169aba7f9194119faf64b67cbff460
|
<ide><path>src/addons/transitions/__tests__/ReactCSSTransitionGroup-test.js
<ide> describe('ReactCSSTransitionGroup', () => {
<ide> var container;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactCSSTransitionGroup = require('ReactCSSTransitionGroup');
<ide><path>src/isomorphic/children/__tests__/ReactChildren-test.js
<ide> describe('ReactChildren', () => {
<ide> var ReactFragment;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactFragment = require('ReactFragment');
<ide> });
<ide><path>src/isomorphic/classic/__tests__/ReactContextValidator-test.js
<ide> describe('ReactContextValidator', () => {
<ide> }
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/isomorphic/classic/element/__tests__/ReactElement-test.js
<ide> describe('ReactElement', () => {
<ide> var originalSymbol;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> // Delete the native Symbol if we have one to ensure we test the
<ide> // unpolyfilled environment.
<ide> describe('ReactElement', () => {
<ide> return OTHER_SYMBOL;
<ide> };
<ide>
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide>
<ide> describe('comparing jsx vs .createFactory() vs .createElement()', () => {
<ide> var Child;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide><path>src/isomorphic/classic/element/__tests__/ReactElementValidator-test.js
<ide> describe('ReactElementValidator', () => {
<ide> var ComponentClass;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/isomorphic/classic/types/__tests__/ReactPropTypes-test.js
<ide> describe('ReactPropTypes', () => {
<ide>
<ide> describe('Custom validator', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> });
<ide>
<ide> it('should have been called with the right params', () => {
<ide><path>src/isomorphic/classic/types/__tests__/ReactPropTypesProduction-test.js
<ide> describe('ReactPropTypesProduction', function() {
<ide> env: Object.assign({}, process.env, {NODE_ENV: 'production'}),
<ide> };
<ide>
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> PropTypes = require('ReactPropTypes');
<ide> React = require('React');
<ide> ReactPropTypeLocations = require('ReactPropTypeLocations');
<ide> describe('ReactPropTypesProduction', function() {
<ide>
<ide> describe('Custom validator', function() {
<ide> beforeEach(function() {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> });
<ide>
<ide> it('should not have been called', function() {
<ide><path>src/isomorphic/modern/element/__tests__/ReactJSXElement-test.js
<ide> describe('ReactJSXElement', () => {
<ide> var Component;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/isomorphic/modern/element/__tests__/ReactJSXElementValidator-test.js
<ide> describe('ReactJSXElementValidator', () => {
<ide> var RequiredPropComponent;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/dom/__tests__/ReactDOMProduction-test.js
<ide> describe('ReactDOMProduction', () => {
<ide> env: Object.assign({}, process.env, {NODE_ENV: 'production'}),
<ide> };
<ide>
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> });
<ide><path>src/renderers/dom/shared/__tests__/CSSProperty-test.js
<ide> describe('CSSProperty', () => {
<ide> var CSSProperty;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> CSSProperty = require('CSSProperty');
<ide> });
<ide>
<ide><path>src/renderers/dom/shared/__tests__/CSSPropertyOperations-test.js
<ide> describe('CSSPropertyOperations', () => {
<ide> var CSSPropertyOperations;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> CSSPropertyOperations = require('CSSPropertyOperations');
<ide> });
<ide>
<ide><path>src/renderers/dom/shared/__tests__/DOMPropertyOperations-test.js
<ide> describe('DOMPropertyOperations', () => {
<ide> var ReactDOMComponentTree;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> var ReactDOMInjection = require('ReactDOMInjection');
<ide> ReactDOMInjection.inject();
<ide>
<ide><path>src/renderers/dom/shared/__tests__/ReactBrowserEventEmitter-test.js
<ide> function registerSimpleTestHandler() {
<ide>
<ide> describe('ReactBrowserEventEmitter', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> LISTENER.mockClear();
<ide> EventListener = require('EventListener');
<ide> EventPluginHub = require('EventPluginHub');
<ide><path>src/renderers/dom/shared/__tests__/ReactDOMComponent-test.js
<ide> describe('ReactDOMComponent', () => {
<ide> }
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMFeatureFlags = require('ReactDOMFeatureFlags');
<ide> describe('ReactDOMComponent', () => {
<ide> it('should warn about the `onScroll` issue when unsupported (IE8)', () => {
<ide> // Mock this here so we can mimic IE8 support. We require isEventSupported
<ide> // before React so it's pre-mocked before React would require it.
<del> jest.resetModuleRegistry()
<add> jest.resetModules()
<ide> .mock('isEventSupported');
<ide> var isEventSupported = require('isEventSupported');
<ide> isEventSupported.mockReturnValueOnce(false);
<ide><path>src/renderers/dom/shared/__tests__/ReactDOMInvalidARIAHook-test.js
<ide> describe('ReactDOMInvalidARIAHook', () => {
<ide> var mountComponent;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide>
<ide><path>src/renderers/dom/shared/__tests__/ReactEventIndependence-test.js
<ide> var ReactTestUtils;
<ide>
<ide> describe('ReactEventIndependence', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/dom/shared/__tests__/ReactEventListener-test.js
<ide> describe('ReactEventListener', () => {
<ide> var handleTopLevel;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMComponentTree = require('ReactDOMComponentTree');
<ide><path>src/renderers/dom/shared/__tests__/ReactMount-test.js
<ide> var WebComponents;
<ide>
<ide> describe('ReactMount', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> describe('ReactMount', () => {
<ide> });
<ide>
<ide> it('should warn if the unmounted node was rendered by another copy of React', () => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> var ReactDOMOther = require('ReactDOM');
<ide> var container = document.createElement('div');
<ide>
<ide><path>src/renderers/dom/shared/__tests__/ReactRenderDocument-test.js
<ide> var UNMOUNT_INVARIANT_MESSAGE =
<ide>
<ide> describe('rendering React components at document', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/dom/shared/__tests__/ReactServerRendering-test.js
<ide> var ROOT_ATTRIBUTE_NAME;
<ide>
<ide> describe('ReactDOMServer', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMFeatureFlags = require('ReactDOMFeatureFlags');
<ide><path>src/renderers/dom/shared/__tests__/validateDOMNesting-test.js
<ide> function isTagStackValid(stack) {
<ide>
<ide> describe('ReactContextValidator', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> validateDOMNesting = require('validateDOMNesting');
<ide> });
<ide><path>src/renderers/dom/shared/eventPlugins/__tests__/BeforeInputEventPlugin-test.js
<ide> describe('BeforeInputEventPlugin', function() {
<ide>
<ide> function initialize(simulator) {
<ide> // Need to delete cached modules before executing simulator
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> // Initialize variables in the scope of BeforeInputEventPlugin
<ide> simulator();
<ide><path>src/renderers/dom/shared/eventPlugins/__tests__/EnterLeaveEventPlugin-test.js
<ide> var ReactDOMComponentTree;
<ide>
<ide> describe('EnterLeaveEventPlugin', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> EnterLeaveEventPlugin = require('EnterLeaveEventPlugin');
<ide> React = require('React');
<ide><path>src/renderers/dom/shared/wrappers/__tests__/ReactDOMInput-test.js
<ide> describe('ReactDOMInput', () => {
<ide> }
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMServer = require('ReactDOMServer');
<ide><path>src/renderers/native/__tests__/ReactNativeEvents-test.js
<ide> var UIManager;
<ide> var createReactNativeComponentClass;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> RCTEventEmitter = require('RCTEventEmitter');
<ide> React = require('React');
<ide><path>src/renderers/shared/__tests__/ReactDebugTool-test.js
<ide> describe('ReactDebugTool', () => {
<ide> var ReactDebugTool;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> ReactDebugTool = require('ReactDebugTool');
<ide> });
<ide>
<ide><path>src/renderers/shared/fiber/__tests__/ReactCoroutine-test.js
<ide> var ReactCoroutine;
<ide>
<ide> describe('ReactCoroutine', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> ReactCoroutine = require('ReactCoroutine');
<ide><path>src/renderers/shared/fiber/__tests__/ReactIncremental-test.js
<ide> var ReactNoop;
<ide>
<ide> describe('ReactIncremental', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactIncrementalErrorHandling-test.js
<ide> var ReactNoop;
<ide>
<ide> describe('ReactIncrementalErrorHandling', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactIncrementalReflection-test.js
<ide> var ReactNoop;
<ide>
<ide> describe('ReactIncrementalReflection', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactIncrementalScheduling-test.js
<ide> var ReactNoop;
<ide>
<ide> describe('ReactIncrementalScheduling', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactIncrementalSideEffects-test.js
<ide> var ReactNoop;
<ide>
<ide> describe('ReactIncrementalSideEffects', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactTopLevelFragment-test.js
<ide> var ReactNoop;
<ide> // probably move to one of the other test files once it is official.
<ide> describe('ReactTopLevelFragment', function() {
<ide> beforeEach(function() {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/fiber/__tests__/ReactTopLevelText-test.js
<ide> var ReactNoop;
<ide> // probably move to one of the other test files once it is official.
<ide> describe('ReactTopLevelText', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactNoop = require('ReactNoop');
<ide> });
<ide><path>src/renderers/shared/hooks/__tests__/ReactComponentTreeHook-test.js
<ide> describe('ReactComponentTreeHook', () => {
<ide> var ReactComponentTreeTestUtils;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> describe('ReactComponentTreeHook', () => {
<ide> global.Set = undefined;
<ide> Array.from = undefined;
<ide>
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/shared/hooks/__tests__/ReactComponentTreeHook-test.native.js
<ide> describe('ReactComponentTreeHook', () => {
<ide> var Text;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactNative = require('ReactNative');
<ide><path>src/renderers/shared/hooks/__tests__/ReactHostOperationHistoryHook-test.js
<ide> describe('ReactHostOperationHistoryHook', () => {
<ide> var ReactHostOperationHistoryHook;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactPerf = require('ReactPerf');
<ide><path>src/renderers/shared/shared/__tests__/ReactChildReconciler-test.js
<ide> describe('ReactChildReconciler', () => {
<ide> }
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide><path>src/renderers/shared/shared/__tests__/ReactComponentLifeCycle-test.js
<ide> function getLifeCycleState(instance): ComponentLifeCycle {
<ide> */
<ide> describe('ReactComponentLifeCycle', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide><path>src/renderers/shared/shared/__tests__/ReactCompositeComponent-test.js
<ide> var ReactTestUtils;
<ide> describe('ReactCompositeComponent', () => {
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMFeatureFlags = require('ReactDOMFeatureFlags');
<ide><path>src/renderers/shared/shared/__tests__/ReactEmptyComponent-test.js
<ide> var log;
<ide>
<ide> describe('ReactEmptyComponent', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/shared/shared/__tests__/ReactIdentity-test.js
<ide> var ReactTestUtils;
<ide> describe('ReactIdentity', () => {
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactFragment = require('ReactFragment');
<ide><path>src/renderers/shared/shared/__tests__/ReactMultiChild-test.js
<ide> describe('ReactMultiChild', () => {
<ide> var ReactDOMFeatureFlags;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide> ReactDOMFeatureFlags = require('ReactDOMFeatureFlags');
<ide><path>src/renderers/shared/shared/__tests__/ReactMultiChildReconcile-test.js
<ide> function testPropsSequence(sequence) {
<ide>
<ide> describe('ReactMultiChildReconcile', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> });
<ide>
<ide> it('should reset internal state if removed then readded in an array', () => {
<ide><path>src/renderers/shared/shared/__tests__/ReactStateSetters-test.js
<ide> var TestComponentWithMixin;
<ide>
<ide> describe('ReactStateSetters', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> TestComponent = class extends React.Component {
<ide> state = {foo: 'foo'};
<ide><path>src/renderers/shared/shared/__tests__/refs-destruction-test.js
<ide> var TestComponent;
<ide>
<ide> describe('refs-destruction', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> React = require('React');
<ide> ReactDOM = require('ReactDOM');
<ide><path>src/renderers/shared/shared/__tests__/refs-test.js
<ide> var expectClickLogsLengthToBe = function(instance, length) {
<ide>
<ide> describe('reactiverefs', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide> });
<ide> describe('reactiverefs', () => {
<ide> describe('ref swapping', () => {
<ide> let RefHopsAround;
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide>
<ide> describe('ref swapping', () => {
<ide>
<ide> describe('string refs between fiber and stack', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide> });
<ide><path>src/renderers/shared/shared/event/__tests__/EventPluginHub-test.js
<ide> describe('EventPluginHub', () => {
<ide> var ReactTestUtils;
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> React = require('React');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide> });
<ide><path>src/renderers/shared/shared/event/eventPlugins/__tests__/ResponderEventPlugin-test.js
<ide> function deleteAllListeners(node) {
<ide> describe('ResponderEventPlugin', () => {
<ide>
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> EventPluginHub = require('EventPluginHub');
<ide> EventPluginUtils = require('EventPluginUtils');
<ide><path>src/renderers/shared/stack/reconciler/__tests__/Transaction-test.js
<ide> var Transaction;
<ide> var INIT_ERRORED = 'initErrored'; // Just a dummy value to check for.
<ide> describe('Transaction', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> Transaction = require('Transaction');
<ide> });
<ide>
<ide><path>src/renderers/shared/utils/__tests__/ReactErrorUtils-test.js
<ide> describe('ReactErrorUtils', () => {
<ide> global.process = {
<ide> env: Object.assign({}, process.env, {NODE_ENV: 'production'}),
<ide> };
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> ReactErrorUtils = require('ReactErrorUtils');
<ide> expect(ReactErrorUtils.invokeGuardedCallback).toEqual(
<ide> ReactErrorUtils.invokeGuardedCallbackWithCatch
<ide><path>src/shared/utils/__tests__/KeyEscapeUtils-test.js
<ide> var KeyEscapeUtils;
<ide>
<ide> describe('KeyEscapeUtils', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide>
<ide> KeyEscapeUtils = require('KeyEscapeUtils');
<ide> });
<ide><path>src/shared/utils/__tests__/reactProdInvariant-test.js
<ide> var reactProdInvariant;
<ide>
<ide> describe('reactProdInvariant', () => {
<ide> beforeEach(() => {
<del> jest.resetModuleRegistry();
<add> jest.resetModules();
<ide> reactProdInvariant = require('reactProdInvariant');
<ide> });
<ide>
| 54
|
Ruby
|
Ruby
|
move add_counter_cache_methods to class level
|
cae842e662bbf6d6e6c87857d6ab68a982b1663f
|
<ide><path>activerecord/lib/active_record/associations/builder/belongs_to.rb
<ide> def self.define_callbacks(model, reflection)
<ide>
<ide> def define_accessors(mixin, reflection)
<ide> super
<del> add_counter_cache_methods mixin
<add> self.class.add_counter_cache_methods mixin
<ide> end
<ide>
<ide> private
<ide>
<del> def add_counter_cache_methods(mixin)
<add> def self.add_counter_cache_methods(mixin)
<ide> return if mixin.method_defined? :belongs_to_counter_cache_after_create
<ide>
<ide> mixin.class_eval do
| 1
|
Ruby
|
Ruby
|
fix invalid rack response in test
|
c5537c1158c91e3244d24751b1e86290b136dc09
|
<ide><path>actionpack/test/dispatch/request/xml_params_parsing_test.rb
<ide> class Linted
<ide> def call(env)
<ide> bar = env['action_dispatch.request.request_parameters']['foo']
<ide> result = "<ok>#{bar}</ok>"
<del> [200, {"Content-Type" => "application/xml", "Content-Length" => result.length.to_s}, result]
<add> [200, {"Content-Type" => "application/xml", "Content-Length" => result.length.to_s}, [result]]
<ide> end
<ide> end
<ide> req = Rack::MockRequest.new(ActionDispatch::ParamsParser.new(Linted.new))
| 1
|
PHP
|
PHP
|
fix feedback from review
|
04416be02203e74783c356c79fb9ccfaca673579
|
<ide><path>src/Database/Connection.php
<ide> public function __construct(array $config)
<ide> $this->setDriver($driver, $config);
<ide>
<ide> if (!empty($config['log'])) {
<del> $this->enableQueryLogging($config['log']);
<add> $this->enableQueryLogging((bool)$config['log']);
<ide> }
<ide> }
<ide>
<ide> public function cacheMetadata($cache): void
<ide> */
<ide> public function enableQueryLogging(bool $value): ConnectionInterface
<ide> {
<del> $this->_logQueries = (bool)$value;
<add> $this->_logQueries = $value;
<ide>
<ide> return $this;
<ide> }
<ide><path>src/Database/Exception/NestedTransactionRollbackException.php
<ide> namespace Cake\Database\Exception;
<ide>
<ide> use Cake\Core\Exception\Exception;
<add>use Throwable;
<ide>
<ide> /**
<ide> * Class NestedTransactionRollbackException
<ide> class NestedTransactionRollbackException extends Exception
<ide> *
<ide> * @param string|null $message If no message is given a default meesage will be used.
<ide> * @param int $code Status code, defaults to 500.
<del> * @param \Exception|null $previous the previous exception.
<add> * @param \Throwable|null $previous the previous exception.
<ide> */
<del> public function __construct(?string $message = null, int $code = 500, ?\Exception $previous = null)
<add> public function __construct(?string $message = null, int $code = 500, ?Throwable $previous = null)
<ide> {
<ide> if ($message === null) {
<ide> $message = 'Cannot commit transaction - rollback() has been already called in the nested transaction';
<ide><path>src/Database/Log/QueryLogger.php
<ide> public function log(LoggedQuery $query): void
<ide> * @param \Cake\Database\Log\LoggedQuery $query to be written in log
<ide> * @return void
<ide> */
<del> protected function _log(\Cake\Database\Log\LoggedQuery $query): void
<add> protected function _log(LoggedQuery $query): void
<ide> {
<ide> Log::write('debug', $query, ['queriesLog']);
<ide> }
<ide> protected function _log(\Cake\Database\Log\LoggedQuery $query): void
<ide> * @param \Cake\Database\Log\LoggedQuery $query The query to log
<ide> * @return string
<ide> */
<del> protected function _interpolate(\Cake\Database\Log\LoggedQuery $query): string
<add> protected function _interpolate(LoggedQuery $query): string
<ide> {
<ide> $params = array_map(function ($p) {
<ide> if ($p === null) {
<ide><path>src/Database/SqlserverCompiler.php
<ide> class SqlserverCompiler extends QueryCompiler
<ide> * @param \Cake\Database\ValueBinder $generator the placeholder generator to be used in expressions
<ide> * @return string
<ide> */
<del> protected function _buildInsertPart(array $parts, \Cake\Database\Query $query, \Cake\Database\ValueBinder $generator): string
<add> protected function _buildInsertPart(array $parts, Query $query, ValueBinder $generator): string
<ide> {
<ide> $table = $parts[0];
<ide> $columns = $this->_stringifyExpressions($parts[1], $generator);
| 4
|
Text
|
Text
|
add html elements (from line 67 to 114)
|
0a8b93691597e9269ddce12702d7701f4744e108
|
<ide><path>guide/english/html/index.md
<ide> Since the early days of the web, there have been many versions of HTML
<ide> - [HTML Elements](https://guide.freecodecamp.org/html/elements)
<ide> - [Semantic HTML](https://guide.freecodecamp.org/html/html5-semantic-elements)
<ide> - [HTML Attributes](https://guide.freecodecamp.org/html/attributes)
<add>
<add># HTML Elements
<add>
<add>The extent of an element is indicated by a pair of tags: a "start tag" `<p>` and "end tag" `</p>`. The text content of the element, if any, is placed between these tags.
<add>
<add>Tags may also enclose further tag markup between the start and end, including a mixture of tags and text. This indicates further (nested) elements, as children of the parent element.
<add>
<add>The start tag may also include attributes within the tag. These indicate other information, such as identifiers for sections within the document, identifiers used to bind style information to the presentation of the document, and for some tags such as the `<img>` used to embed images, the reference to the image resource.
<add>
<add>Some elements, such as the line break `<br>` or `<br/>`, do not permit any embedded content, either text or further tags. These require only a single empty tag (start tag) and do not use an end tag.
<add>
<add>### Element examples
<add>
<add>Header of the HTML document:
<add>
<add>```html
<add><head>...</head>
<add>```
<add>The `<title>...</title>` element is one element normally included in the head. For example:
<add>
<add>```html
<add><head>
<add> <title>The Title</title>
<add></head>
<add>```
<add>
<add>**Headings**
<add>HTML headings are defined with the `<h1>` to `<h6>` tags:
<add>
<add>```html
<add><h1>Heading 1</h1>
<add><h2>Heading 2</h2>
<add><h3>Heading 3</h3>
<add><h4>Heading 4</h4>
<add><h5>Heading 5</h5>
<add><h6>Heading 6</h6>
<add>```
<add>
<add>**Paragraphs**
<add>
<add>`<p>Paragraph 1</p>`
<add>`<p>Paragraph 2</p>`
<add>
<add>**Line Breaks**
<add>
<add>```html
<add><br/>
<add>```
<add>
<add>The difference between `<br/>` and `<p>` is that `br` breaks a line without altering the semantic structure of the page, whereas `p` sections the page into paragraphs. Note also that `br` is an empty element in that, although it may have attributes, it can take no content and it may not have an end tag.
<add>
<add>```html
<add><p>This is a paragraph <br> with <br> line breaks</p>
<add>```
<add>
<add>**Anchor/Links**
<add>
<add>To create a link the `<a>` tag is used. The href attribute holds the URL address of the link.
<add>
<add>```html
<add><a href="https://www.youtube.com">A link to Youtube!</a>
<add>```
<add>
<add>**Inputs**
<add>
<add>There are many possible ways a user can give input/s like:
<add>
<add>```html
<add><input type="text" /> <!-- This is for text input -->
<add><input type="file" /> <!-- This is for uploading files -->
<add><input type="checkbox" /> <!-- This is for checkboxes -->
<add>```
<add>
<add>**Comments**
<add>
<add>```
<add><!-- This is a comment -->
<add>```
<add>
<add>Comments can help in the understanding of the markup and do not display in the webpage.
| 1
|
Javascript
|
Javascript
|
add method for changing url params
|
77a1acc7fcad7a8a7d0376b33d38a8977372cfe2
|
<ide><path>src/ngRoute/route.js
<ide> */
<ide> /* global -ngRouteModule */
<ide> var ngRouteModule = angular.module('ngRoute', ['ng']).
<del> provider('$route', $RouteProvider);
<add> provider('$route', $RouteProvider),
<add> $routeMinErr = angular.$$minErr('ngRoute');
<ide>
<ide> /**
<ide> * @ngdoc provider
<ide> function $RouteProvider(){
<ide> reload: function() {
<ide> forceReload = true;
<ide> $rootScope.$evalAsync(updateRoute);
<add> },
<add>
<add> /**
<add> * @ngdoc method
<add> * @name $route#updateParams
<add> *
<add> * @description
<add> * Causes `$route` service to update the current URL, replacing
<add> * current route parameters with those specified in `newParams`.
<add> * Provided property names that match the route's path segment
<add> * definitions will be interpolated into the location's path, while
<add> * remaining properties will be treated as query params.
<add> *
<add> * @param {Object} newParams mapping of URL parameter names to values
<add> */
<add> updateParams: function(newParams) {
<add> if (this.current && this.current.$$route) {
<add> var searchParams = {}, self=this;
<add>
<add> angular.forEach(Object.keys(newParams), function(key) {
<add> if (!self.current.pathParams[key]) searchParams[key] = newParams[key];
<add> });
<add>
<add> newParams = angular.extend({}, this.current.params, newParams);
<add> $location.path(interpolate(this.current.$$route.originalPath, newParams));
<add> $location.search(angular.extend({}, $location.search(), searchParams));
<add> }
<add> else {
<add> throw $routeMinErr('norout', 'Tried updating route when with no current route');
<add> }
<ide> }
<ide> };
<ide>
<ide><path>test/ngRoute/routeSpec.js
<ide> describe('$route', function() {
<ide> });
<ide> });
<ide>
<del>
<ide> describe('reload', function() {
<ide>
<ide> it('should reload even if reloadOnSearch is false', function() {
<ide> describe('$route', function() {
<ide> });
<ide> });
<ide> });
<add>
<add> describe('update', function() {
<add> it('should support single-parameter route updating', function() {
<add> var routeChangeSpy = jasmine.createSpy('route change');
<add>
<add> module(function($routeProvider) {
<add> $routeProvider.when('/bar/:barId', {controller: angular.noop});
<add> });
<add>
<add> inject(function($route, $routeParams, $location, $rootScope) {
<add> $rootScope.$on('$routeChangeSuccess', routeChangeSpy);
<add>
<add> $location.path('/bar/1');
<add> $rootScope.$digest();
<add> routeChangeSpy.reset();
<add>
<add> $route.updateParams({barId: '2'});
<add> $rootScope.$digest();
<add>
<add> expect($routeParams).toEqual({barId: '2'});
<add> expect(routeChangeSpy).toHaveBeenCalledOnce();
<add> expect($location.path()).toEqual('/bar/2');
<add> });
<add> });
<add>
<add> it('should support total multi-parameter route updating', function() {
<add> var routeChangeSpy = jasmine.createSpy('route change');
<add>
<add> module(function($routeProvider) {
<add> $routeProvider.when('/bar/:barId/:fooId/:spamId/:eggId', {controller: angular.noop});
<add> });
<add>
<add> inject(function($route, $routeParams, $location, $rootScope) {
<add> $rootScope.$on('$routeChangeSuccess', routeChangeSpy);
<add>
<add> $location.path('/bar/1/2/3/4');
<add> $rootScope.$digest();
<add> routeChangeSpy.reset();
<add>
<add> $route.updateParams({barId: '5', fooId: '6', spamId: '7', eggId: '8'});
<add> $rootScope.$digest();
<add>
<add> expect($routeParams).toEqual({barId: '5', fooId: '6', spamId: '7', eggId: '8'});
<add> expect(routeChangeSpy).toHaveBeenCalledOnce();
<add> expect($location.path()).toEqual('/bar/5/6/7/8');
<add> });
<add> });
<add>
<add> it('should support partial multi-parameter route updating', function() {
<add> var routeChangeSpy = jasmine.createSpy('route change');
<add>
<add> module(function($routeProvider) {
<add> $routeProvider.when('/bar/:barId/:fooId/:spamId/:eggId', {controller: angular.noop});
<add> });
<add>
<add> inject(function($route, $routeParams, $location, $rootScope) {
<add> $rootScope.$on('$routeChangeSuccess', routeChangeSpy);
<add>
<add> $location.path('/bar/1/2/3/4');
<add> $rootScope.$digest();
<add> routeChangeSpy.reset();
<add>
<add> $route.updateParams({barId: '5', fooId: '6'});
<add> $rootScope.$digest();
<add>
<add> expect($routeParams).toEqual({barId: '5', fooId: '6', spamId: '3', eggId: '4'});
<add> expect(routeChangeSpy).toHaveBeenCalledOnce();
<add> expect($location.path()).toEqual('/bar/5/6/3/4');
<add> });
<add> });
<add>
<add>
<add> it('should update query params when new properties are not in path', function() {
<add> var routeChangeSpy = jasmine.createSpy('route change');
<add>
<add> module(function($routeProvider) {
<add> $routeProvider.when('/bar/:barId/:fooId/:spamId/', {controller: angular.noop});
<add> });
<add>
<add> inject(function($route, $routeParams, $location, $rootScope) {
<add> $rootScope.$on('$routeChangeSuccess', routeChangeSpy);
<add>
<add> $location.path('/bar/1/2/3');
<add> $location.search({initial: 'true'});
<add> $rootScope.$digest();
<add> routeChangeSpy.reset();
<add>
<add> $route.updateParams({barId: '5', fooId: '6', eggId: '4'});
<add> $rootScope.$digest();
<add>
<add> expect($routeParams).toEqual({barId: '5', fooId: '6', spamId: '3', eggId: '4', initial: 'true'});
<add> expect(routeChangeSpy).toHaveBeenCalledOnce();
<add> expect($location.path()).toEqual('/bar/5/6/3/');
<add> expect($location.search()).toEqual({eggId: '4', initial: 'true'});
<add> });
<add> });
<add>
<add>
<add> it('should complain if called without an existing route', inject(function($route) {
<add> expect($route.updateParams).toThrowMinErr('ngRoute', 'norout');
<add> }));
<add> });
<ide> });
| 2
|
Javascript
|
Javascript
|
use correct npm bin name depending on the os
|
309ecff84d2635bb4d615e4867ed967c9a8b8a60
|
<ide><path>script/lib/install-script-dependencies.js
<ide> process.env.ELECTRON_CUSTOM_VERSION = CONFIG.appMetadata.electronVersion;
<ide>
<ide> module.exports = function(ci) {
<ide> console.log('Installing script dependencies');
<add> const npmBinName = process.platform === 'win32' ? 'npm.cmd' : 'npm';
<ide> childProcess.execFileSync(
<del> 'npm',
<add> npmBinName,
<ide> ['--loglevel=error', ci ? 'ci' : 'install'],
<ide> { env: process.env, cwd: CONFIG.scriptRootPath }
<ide> );
| 1
|
Ruby
|
Ruby
|
remove heroku formula
|
5e68a0872e0540adacd4d5ff2a988113eba045d2
|
<ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> def audit_specs
<ide> throttled = %w[
<ide> aws-sdk-cpp 10
<ide> awscli 10
<del> heroku 10
<ide> quicktype 10
<ide> vim 50
<ide> ]
| 1
|
Python
|
Python
|
allow the same config in the auto mapping
|
b1a7dfe099b852340868f9aa7c75bb805ce57596
|
<ide><path>src/transformers/models/auto/configuration_auto.py
<ide> def __getitem__(self, key):
<ide> module_name = model_type_to_module_name(key)
<ide> if module_name not in self._modules:
<ide> self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
<del> return getattr(self._modules[module_name], value)
<add> if hasattr(self._modules[module_name], value):
<add> return getattr(self._modules[module_name], value)
<add>
<add> # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the
<add> # object at the top level.
<add> transformers_module = importlib.import_module("transformers")
<add> return getattr(transformers_module, value)
<ide>
<ide> def keys(self):
<ide> return list(self._mapping.keys()) + list(self._extra_content.keys())
| 1
|
Javascript
|
Javascript
|
remove unnecessary checks
|
33585cf027b516eb549a3902d474a2064078b123
|
<ide><path>lib/buildChunkGraph.js
<ide> const visitModules = (
<ide> }
<ide> // fallthrough
<ide> case ENTER_MODULE: {
<del> if (chunkGroup !== undefined) {
<del> const index = chunkGroup.getModulePreOrderIndex(module);
<del> if (index === undefined) {
<del> chunkGroup.setModulePreOrderIndex(
<del> module,
<del> chunkGroupCounters.get(chunkGroup).preOrderIndex++
<del> );
<del> }
<add> const index = chunkGroup.getModulePreOrderIndex(module);
<add> if (index === undefined) {
<add> chunkGroup.setModulePreOrderIndex(
<add> module,
<add> chunkGroupCounters.get(chunkGroup).preOrderIndex++
<add> );
<ide> }
<ide>
<ide> if (
<ide> const visitModules = (
<ide> break;
<ide> }
<ide> case LEAVE_MODULE: {
<del> if (chunkGroup !== undefined) {
<del> const index = chunkGroup.getModulePostOrderIndex(module);
<del> if (index === undefined) {
<del> chunkGroup.setModulePostOrderIndex(
<del> module,
<del> chunkGroupCounters.get(chunkGroup).postOrderIndex++
<del> );
<del> }
<add> const index = chunkGroup.getModulePostOrderIndex(module);
<add> if (index === undefined) {
<add> chunkGroup.setModulePostOrderIndex(
<add> module,
<add> chunkGroupCounters.get(chunkGroup).postOrderIndex++
<add> );
<ide> }
<ide>
<ide> if (
| 1
|
Ruby
|
Ruby
|
avoid actioncontroller when render mail, at loaded
|
c2a3ff0027e60cce0f2ea7f8ccc1326b5ab1b782
|
<ide><path>actiontext/lib/action_text/engine.rb
<ide> def to_trix_content_attachment_partial_path
<ide> end
<ide>
<ide> initializer "action_text.helper" do
<del> %i[action_controller_base action_mailer].each do |abstract_controller|
<del> ActiveSupport.on_load(abstract_controller) do
<add> %i[action_controller_base action_mailer].each do |base|
<add> ActiveSupport.on_load(base) do
<ide> helper ActionText::Engine.helpers
<ide> end
<ide> end
<ide> end
<ide>
<ide> initializer "action_text.renderer" do
<del> ActiveSupport.on_load(:action_text_content) do
<del> self.default_renderer = Class.new(ActionController::Base).renderer
<add> ActiveSupport.on_load(:action_controller_base) do
<add> ActiveSupport.on_load(:action_text_content) do
<add> self.default_renderer = Class.new(ActionController::Base).renderer
<add> end
<ide> end
<ide>
<del> %i[action_controller_base action_mailer].each do |abstract_controller|
<del> ActiveSupport.on_load(abstract_controller) do
<add> %i[action_controller_base action_mailer].each do |base|
<add> ActiveSupport.on_load(base) do
<ide> around_action do |controller, action|
<ide> ActionText::Content.with_renderer(controller, &action)
<ide> end
| 1
|
Javascript
|
Javascript
|
get challenges directly from /curriculum
|
2da8eb23e9d66d5ec86a7b4f700ee4b9f213d56d
|
<ide><path>api-server/server/boot/certificate.js
<ide> import { oldDataVizId } from '../../../config/misc';
<ide> import certTypes from '../utils/certTypes.json';
<ide> import superBlockCertTypeMap from '../utils/superBlockCertTypeMap';
<ide> import { completeCommitment$ } from '../utils/commit';
<add>import { getChallenges } from '../utils/get-curriculum';
<ide>
<ide> const log = debug('fcc:certification');
<ide>
<del>export default function bootCertificate(app) {
<add>export default function bootCertificate(app, done) {
<ide> const api = app.loopback.Router();
<del>
<del> const certTypeIds = createCertTypeIds(app);
<del> const showCert = createShowCert(app);
<del> const verifyCert = createVerifyCert(certTypeIds, app);
<del>
<del> api.put('/certificate/verify', ifNoUser401, ifNoSuperBlock404, verifyCert);
<del> api.get('/certificate/showCert/:username/:cert', showCert);
<del>
<del> app.use(api);
<add> // TODO: rather than getting all the challenges, then grabbing the certs,
<add> // consider just getting the certs.
<add> getChallenges().then(allChallenges => {
<add> const certTypeIds = createCertTypeIds(allChallenges);
<add> const showCert = createShowCert(app);
<add> const verifyCert = createVerifyCert(certTypeIds, app);
<add>
<add> api.put('/certificate/verify', ifNoUser401, ifNoSuperBlock404, verifyCert);
<add> api.get('/certificate/showCert/:username/:cert', showCert);
<add>
<add> app.use(api);
<add> done();
<add> });
<ide> }
<ide>
<ide> export function getFallbackFrontEndDate(completedChallenges, completedDate) {
<ide> const renderCertifiedEmail = loopback.template(
<ide> path.join(__dirname, '..', 'views', 'emails', 'certified.ejs')
<ide> );
<ide>
<del>function createCertTypeIds(app) {
<del> const { Challenge } = app.models;
<del>
<add>function createCertTypeIds(allChallenges) {
<ide> return {
<ide> // legacy
<del> [certTypes.frontEnd]: getIdsForCert$(legacyFrontEndChallengeId, Challenge),
<del> [certTypes.backEnd]: getIdsForCert$(legacyBackEndChallengeId, Challenge),
<del> [certTypes.dataVis]: getIdsForCert$(legacyDataVisId, Challenge),
<del> [certTypes.infosecQa]: getIdsForCert$(legacyInfosecQaId, Challenge),
<del> [certTypes.fullStack]: getIdsForCert$(legacyFullStackId, Challenge),
<add> [certTypes.frontEnd]: getCertById(legacyFrontEndChallengeId, allChallenges),
<add> [certTypes.backEnd]: getCertById(legacyBackEndChallengeId, allChallenges),
<add> [certTypes.dataVis]: getCertById(legacyDataVisId, allChallenges),
<add> [certTypes.infosecQa]: getCertById(legacyInfosecQaId, allChallenges),
<add> [certTypes.fullStack]: getCertById(legacyFullStackId, allChallenges),
<ide>
<ide> // modern
<del> [certTypes.respWebDesign]: getIdsForCert$(respWebDesignId, Challenge),
<del> [certTypes.frontEndLibs]: getIdsForCert$(frontEndLibsId, Challenge),
<del> [certTypes.dataVis2018]: getIdsForCert$(dataVis2018Id, Challenge),
<del> [certTypes.jsAlgoDataStruct]: getIdsForCert$(jsAlgoDataStructId, Challenge),
<del> [certTypes.apisMicroservices]: getIdsForCert$(
<add> [certTypes.respWebDesign]: getCertById(respWebDesignId, allChallenges),
<add> [certTypes.frontEndLibs]: getCertById(frontEndLibsId, allChallenges),
<add> [certTypes.dataVis2018]: getCertById(dataVis2018Id, allChallenges),
<add> [certTypes.jsAlgoDataStruct]: getCertById(
<add> jsAlgoDataStructId,
<add> allChallenges
<add> ),
<add> [certTypes.apisMicroservices]: getCertById(
<ide> apisMicroservicesId,
<del> Challenge
<add> allChallenges
<add> ),
<add> [certTypes.qaV7]: getCertById(qaV7Id, allChallenges),
<add> [certTypes.infosecV7]: getCertById(infosecV7Id, allChallenges),
<add> [certTypes.sciCompPyV7]: getCertById(sciCompPyV7Id, allChallenges),
<add> [certTypes.dataAnalysisPyV7]: getCertById(
<add> dataAnalysisPyV7Id,
<add> allChallenges
<ide> ),
<del> [certTypes.qaV7]: getIdsForCert$(qaV7Id, Challenge),
<del> [certTypes.infosecV7]: getIdsForCert$(infosecV7Id, Challenge),
<del> [certTypes.sciCompPyV7]: getIdsForCert$(sciCompPyV7Id, Challenge),
<del> [certTypes.dataAnalysisPyV7]: getIdsForCert$(dataAnalysisPyV7Id, Challenge),
<del> [certTypes.machineLearningPyV7]: getIdsForCert$(
<add> [certTypes.machineLearningPyV7]: getCertById(
<ide> machineLearningPyV7Id,
<del> Challenge
<add> allChallenges
<ide> )
<ide> };
<ide> }
<ide> const completionHours = {
<ide> [certTypes.machineLearningPyV7]: 400
<ide> };
<ide>
<del>function getIdsForCert$(id, Challenge) {
<del> return observeQuery(Challenge, 'findById', id, {
<del> id: true,
<del> tests: true,
<del> name: true,
<del> challengeType: true
<del> }).shareReplay();
<add>// returns an array with a single element, to be flatMap'd by createdVerifyCert
<add>function getCertById(anId, allChallenges) {
<add> return allChallenges
<add> .filter(({ id }) => id === anId)
<add> .map(({ id, tests, name, challengeType }) => ({
<add> id,
<add> tests,
<add> name,
<add> challengeType
<add> }));
<ide> }
<ide>
<ide> const superBlocks = Object.keys(superBlockCertTypeMap);
<ide><path>api-server/server/boot/challenge.js
<ide> import { ifNoUserSend } from '../utils/middleware';
<ide> import { dasherize } from '../../../utils/slugs';
<ide> import _pathMigrations from '../resources/pathMigration.json';
<ide> import { fixCompletedChallengeItem } from '../../common/utils';
<add>import { getChallenges } from '../utils/get-curriculum';
<ide>
<ide> const log = debug('fcc:boot:challenges');
<ide>
<ide> export default async function bootChallenge(app, done) {
<ide> const api = app.loopback.Router();
<ide> const router = app.loopback.Router();
<ide> const redirectToLearn = createRedirectToLearn(_pathMigrations);
<del> const challengeUrlResolver = await createChallengeUrlResolver(app);
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> await getChallenges()
<add> );
<ide> const redirectToCurrentChallenge = createRedirectToCurrentChallenge(
<ide> challengeUrlResolver
<ide> );
<ide> export function buildChallengeUrl(challenge) {
<ide> return `/learn/${dasherize(superBlock)}/${dasherize(block)}/${dashedName}`;
<ide> }
<ide>
<del>export function getFirstChallenge(Challenge) {
<del> return new Promise(resolve => {
<del> Challenge.findOne(
<del> { where: { challengeOrder: 0, superOrder: 1, order: 0 } },
<del> (err, challenge) => {
<del> if (err || isEmpty(challenge)) {
<del> return resolve('/learn');
<del> }
<del> return resolve(buildChallengeUrl(challenge));
<del> }
<del> );
<del> });
<add>// this is only called once during boot, so it can be slow.
<add>export function getFirstChallenge(allChallenges) {
<add> const first = allChallenges.find(
<add> ({ challengeOrder, superOrder, order }) =>
<add> challengeOrder === 0 && superOrder === 1 && order === 0
<add> );
<add>
<add> return first ? buildChallengeUrl(first) : '/learn';
<add>}
<add>
<add>function getChallengeById(allChallenges, targetId) {
<add> return allChallenges.find(({ id }) => id === targetId);
<ide> }
<ide>
<ide> export async function createChallengeUrlResolver(
<del> app,
<add> allChallenges,
<ide> { _getFirstChallenge = getFirstChallenge } = {}
<ide> ) {
<del> const { Challenge } = app.models;
<ide> const cache = new Map();
<del> const firstChallenge = await _getFirstChallenge(Challenge);
<add> const firstChallenge = _getFirstChallenge(allChallenges);
<add>
<ide> return function resolveChallengeUrl(id) {
<ide> if (isEmpty(id)) {
<ide> return Promise.resolve(firstChallenge);
<del> }
<del> return new Promise(resolve => {
<del> if (cache.has(id)) {
<del> return resolve(cache.get(id));
<del> }
<del> return Challenge.findById(id, (err, challenge) => {
<del> if (err || isEmpty(challenge)) {
<del> return resolve(firstChallenge);
<add> } else {
<add> return new Promise(resolve => {
<add> if (cache.has(id)) {
<add> resolve(cache.get(id));
<add> }
<add>
<add> const challenge = getChallengeById(allChallenges, id);
<add> if (isEmpty(challenge)) {
<add> resolve(firstChallenge);
<add> } else {
<add> const challengeUrl = buildChallengeUrl(challenge);
<add> cache.set(id, challengeUrl);
<add> resolve(challengeUrl);
<ide> }
<del> const challengeUrl = buildChallengeUrl(challenge);
<del> cache.set(id, challengeUrl);
<del> return resolve(challengeUrl);
<ide> });
<del> });
<add> }
<ide> };
<ide> }
<ide>
<ide><path>api-server/server/boot_tests/challenge.test.js
<ide> /* global describe xdescribe it expect */
<del>import { isEqual, first, find } from 'lodash';
<add>import { first, find } from 'lodash';
<ide> import sinon from 'sinon';
<ide> import { mockReq, mockRes } from 'sinon-express-mock';
<ide>
<ide> import {
<ide> import {
<ide> firstChallengeUrl,
<ide> requestedChallengeUrl,
<add> mockAllChallenges,
<ide> mockChallenge,
<del> mockFirstChallenge,
<ide> mockUser,
<del> mockApp,
<ide> mockGetFirstChallenge,
<del> firstChallengeQuery,
<ide> mockCompletedChallenge,
<ide> mockCompletedChallenges,
<ide> mockPathMigrationMap
<ide> describe('boot/challenge', () => {
<ide>
<ide> describe('challengeUrlResolver', () => {
<ide> it('resolves to the first challenge url by default', async () => {
<del> const challengeUrlResolver = await createChallengeUrlResolver(mockApp, {
<del> _getFirstChallenge: mockGetFirstChallenge
<del> });
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> mockAllChallenges,
<add> {
<add> _getFirstChallenge: mockGetFirstChallenge
<add> }
<add> );
<ide>
<ide> return challengeUrlResolver().then(url => {
<ide> expect(url).toEqual(firstChallengeUrl);
<ide> });
<del> });
<add> }, 10000);
<ide>
<ide> // eslint-disable-next-line max-len
<ide> it('returns the first challenge url if the provided id does not relate to a challenge', async () => {
<del> const challengeUrlResolver = await createChallengeUrlResolver(mockApp, {
<del> _getFirstChallenge: mockGetFirstChallenge
<del> });
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> mockAllChallenges,
<add> {
<add> _getFirstChallenge: mockGetFirstChallenge
<add> }
<add> );
<ide>
<ide> return challengeUrlResolver('not-a-real-challenge').then(url => {
<ide> expect(url).toEqual(firstChallengeUrl);
<ide> });
<ide> });
<ide>
<ide> it('resolves the correct url for the requested challenge', async () => {
<del> const challengeUrlResolver = await createChallengeUrlResolver(mockApp, {
<del> _getFirstChallenge: mockGetFirstChallenge
<del> });
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> mockAllChallenges,
<add> {
<add> _getFirstChallenge: mockGetFirstChallenge
<add> }
<add> );
<ide>
<ide> return challengeUrlResolver('123abc').then(url => {
<ide> expect(url).toEqual(requestedChallengeUrl);
<ide> describe('boot/challenge', () => {
<ide> });
<ide>
<ide> describe('getFirstChallenge', () => {
<del> const createMockChallengeModel = success =>
<del> success
<del> ? {
<del> findOne(query, cb) {
<del> return isEqual(query, firstChallengeQuery)
<del> ? cb(null, mockFirstChallenge)
<del> : cb(new Error('no challenge found'));
<del> }
<del> }
<del> : {
<del> findOne(_, cb) {
<del> return cb(new Error('no challenge found'));
<del> }
<del> };
<ide> it('returns the correct challenge url from the model', async () => {
<del> const result = await getFirstChallenge(createMockChallengeModel(true));
<add> const result = await getFirstChallenge(mockAllChallenges);
<ide>
<ide> expect(result).toEqual(firstChallengeUrl);
<ide> });
<ide>
<ide> it('returns the learn base if no challenges found', async () => {
<del> const result = await getFirstChallenge(createMockChallengeModel(false));
<add> const result = await getFirstChallenge([]);
<ide>
<ide> expect(result).toEqual('/learn');
<ide> });
<ide> describe('boot/challenge', () => {
<ide>
<ide> // eslint-disable-next-line max-len
<ide> it('redirects to the url provided by the challengeUrlResolver', async done => {
<del> const challengeUrlResolver = await createChallengeUrlResolver(mockApp, {
<del> _getFirstChallenge: mockGetFirstChallenge
<del> });
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> mockAllChallenges,
<add> {
<add> _getFirstChallenge: mockGetFirstChallenge
<add> }
<add> );
<ide> const expectedUrl = `${mockHomeLocation}${requestedChallengeUrl}`;
<ide> const redirectToCurrentChallenge = createRedirectToCurrentChallenge(
<ide> challengeUrlResolver,
<ide> describe('boot/challenge', () => {
<ide>
<ide> // eslint-disable-next-line max-len
<ide> it('redirects to the first challenge for users without a currentChallengeId', async done => {
<del> const challengeUrlResolver = await createChallengeUrlResolver(mockApp, {
<del> _getFirstChallenge: mockGetFirstChallenge
<del> });
<add> const challengeUrlResolver = await createChallengeUrlResolver(
<add> mockAllChallenges,
<add> {
<add> _getFirstChallenge: mockGetFirstChallenge
<add> }
<add> );
<ide> const redirectToCurrentChallenge = createRedirectToCurrentChallenge(
<ide> challengeUrlResolver,
<ide> { _homeLocation: mockHomeLocation, _learnUrl: mockLearnUrl }
<ide><path>api-server/server/boot_tests/fixtures.js
<ide> export const mockFirstChallenge = {
<ide> id: '456def',
<ide> block: 'first',
<ide> superBlock: 'the',
<del> dashedName: 'challenge'
<add> dashedName: 'challenge',
<add> challengeOrder: 0,
<add> superOrder: 1,
<add> order: 0
<ide> };
<ide>
<ide> export const mockCompletedChallenge = {
<ide> export function createNewUserFromEmail(email) {
<ide>
<ide> export const mockApp = {
<ide> models: {
<del> Challenge: {
<del> find() {
<del> return firstChallengeUrl;
<del> },
<del> findById(id, cb) {
<del> return id === mockChallenge.id
<del> ? cb(null, mockChallenge)
<del> : cb(new Error('challenge not found'));
<del> }
<del> },
<ide> Donation: {
<ide> findOne(query, cb) {
<ide> return isEqual(query, matchSubscriptionIdQuery)
<ide> export const mockApp = {
<ide> }
<ide> };
<ide>
<add>export const mockAllChallenges = [mockFirstChallenge, mockChallenge];
<add>
<ide> export const mockGetFirstChallenge = () => firstChallengeUrl;
<ide>
<ide> export const matchEmailQuery = {
<ide><path>api-server/server/utils/get-curriculum.js
<add>import { flatten } from 'lodash';
<add>
<add>import { getChallengesForLang } from '../../../curriculum/getChallenges';
<add>
<add>// TODO: this caching is handy if we want to field requests that need to 'query'
<add>// the curriculum, but if we force the client to handle
<add>// redirectToCurrentChallenge and, instead, only report the current challenge
<add>// id via the user object, then we should *not* store this so it can be garbage
<add>// collected.
<add>
<add>let curriculum;
<add>export async function getCurriculum() {
<add> curriculum = curriculum
<add> ? curriculum
<add> : getChallengesForLang(process.env.LOCALE);
<add> return curriculum;
<add>}
<add>
<add>export async function getChallenges() {
<add> return getCurriculum().then(curriculum => {
<add> return Object.keys(curriculum)
<add> .map(key => curriculum[key].blocks)
<add> .reduce((challengeArray, superBlock) => {
<add> const challengesForBlock = Object.keys(superBlock).map(
<add> key => superBlock[key].challenges
<add> );
<add> return [...challengeArray, ...flatten(challengesForBlock)];
<add> }, []);
<add> });
<add>}
| 5
|
Javascript
|
Javascript
|
handle async updates to location
|
8d39bd8abf423517b5bff70137c2a29e32bff76d
|
<ide><path>src/ng/browser.js
<ide> function Browser(window, document, $log, $sniffer) {
<ide> var cachedState, lastHistoryState,
<ide> lastBrowserUrl = location.href,
<ide> baseElement = document.find('base'),
<del> reloadLocation = null;
<add> pendingLocation = null;
<ide>
<ide> cacheState();
<ide> lastHistoryState = cachedState;
<ide> function Browser(window, document, $log, $sniffer) {
<ide> // Do the assignment again so that those two variables are referentially identical.
<ide> lastHistoryState = cachedState;
<ide> } else {
<del> if (!sameBase || reloadLocation) {
<del> reloadLocation = url;
<add> if (!sameBase || pendingLocation) {
<add> pendingLocation = url;
<ide> }
<ide> if (replace) {
<ide> location.replace(url);
<ide> function Browser(window, document, $log, $sniffer) {
<ide> } else {
<ide> location.hash = getHash(url);
<ide> }
<add> if (location.href !== url) {
<add> pendingLocation = url;
<add> }
<ide> }
<ide> return self;
<ide> // getter
<ide> } else {
<del> // - reloadLocation is needed as browsers don't allow to read out
<del> // the new location.href if a reload happened.
<add> // - pendingLocation is needed as browsers don't allow to read out
<add> // the new location.href if a reload happened or if there is a bug like in iOS 9 (see
<add> // https://openradar.appspot.com/22186109).
<ide> // - the replacement is a workaround for https://bugzilla.mozilla.org/show_bug.cgi?id=407172
<del> return reloadLocation || location.href.replace(/%27/g,"'");
<add> return pendingLocation || location.href.replace(/%27/g,"'");
<ide> }
<ide> };
<ide>
<ide> function Browser(window, document, $log, $sniffer) {
<ide> urlChangeInit = false;
<ide>
<ide> function cacheStateAndFireUrlChange() {
<add> pendingLocation = null;
<ide> cacheState();
<ide> fireUrlChange();
<ide> }
<ide><path>test/ng/browserSpecs.js
<ide> function MockWindow(options) {
<ide> var events = {};
<ide> var timeouts = this.timeouts = [];
<ide> var locationHref = 'http://server/';
<add> var committedHref = 'http://server/';
<ide> var mockWindow = this;
<ide> var msie = options.msie;
<ide> var ieState;
<ide>
<ide> historyEntriesLength = 1;
<ide>
<add> function replaceHash(href, hash) {
<add> // replace the hash with the new one (stripping off a leading hash if there is one)
<add> // See hash setter spec: https://url.spec.whatwg.org/#urlutils-and-urlutilsreadonly-members
<add> return stripHash(href) + '#' + hash.replace(/^#/,'');
<add> }
<add>
<add>
<ide> this.setTimeout = function(fn) {
<ide> return timeouts.push(fn) - 1;
<ide> };
<ide> function MockWindow(options) {
<ide>
<ide> this.location = {
<ide> get href() {
<del> return locationHref;
<add> return committedHref;
<ide> },
<ide> set href(value) {
<ide> locationHref = value;
<ide> mockWindow.history.state = null;
<ide> historyEntriesLength++;
<add> if (!options.updateAsync) this.flushHref();
<ide> },
<ide> get hash() {
<del> return getHash(locationHref);
<add> return getHash(committedHref);
<ide> },
<ide> set hash(value) {
<del> // replace the hash with the new one (stripping off a leading hash if there is one)
<del> // See hash setter spec: https://url.spec.whatwg.org/#urlutils-and-urlutilsreadonly-members
<del> locationHref = stripHash(locationHref) + '#' + value.replace(/^#/,'');
<add> locationHref = replaceHash(locationHref, value);
<add> if (!options.updateAsync) this.flushHref();
<ide> },
<ide> replace: function(url) {
<ide> locationHref = url;
<ide> mockWindow.history.state = null;
<add> if (!options.updateAsync) this.flushHref();
<add> },
<add> flushHref: function() {
<add> committedHref = locationHref;
<ide> }
<ide> };
<ide>
<ide> describe('browser', function() {
<ide>
<ide> logs = {log:[], warn:[], info:[], error:[]};
<ide>
<del> var fakeLog = {log: function() { logs.log.push(slice.call(arguments)); },
<add> fakeLog = {log: function() { logs.log.push(slice.call(arguments)); },
<ide> warn: function() { logs.warn.push(slice.call(arguments)); },
<ide> info: function() { logs.info.push(slice.call(arguments)); },
<ide> error: function() { logs.error.push(slice.call(arguments)); }};
<ide> describe('browser', function() {
<ide> describe('integration tests with $location', function() {
<ide>
<ide> function setup(options) {
<add> fakeWindow = new MockWindow(options);
<add> browser = new Browser(fakeWindow, fakeDocument, fakeLog, sniffer);
<add>
<ide> module(function($provide, $locationProvider) {
<add>
<ide> spyOn(fakeWindow.history, 'pushState').andCallFake(function(stateObj, title, newUrl) {
<ide> fakeWindow.location.href = newUrl;
<ide> });
<ide> describe('browser', function() {
<ide> });
<ide>
<ide> });
<add>
<add> // issue #12241
<add> it('should not infinite digest if the browser does not synchronously update the location properties', function() {
<add> setup({
<add> history: true,
<add> html5Mode: true,
<add> updateAsync: true // Simulate a browser that doesn't update the href synchronously
<add> });
<add>
<add> inject(function($location, $rootScope) {
<add>
<add> // Change the hash within Angular and check that we don't infinitely digest
<add> $location.hash('newHash');
<add> expect(function() { $rootScope.$digest(); }).not.toThrow();
<add> expect($location.absUrl()).toEqual('http://server/#newHash');
<add>
<add> // Now change the hash from outside Angular and check that $location updates correctly
<add> fakeWindow.location.hash = '#otherHash';
<add>
<add> // simulate next tick - since this browser doesn't update synchronously
<add> fakeWindow.location.flushHref();
<add> fakeWindow.fire('hashchange');
<add>
<add> expect($location.absUrl()).toEqual('http://server/#otherHash');
<add> });
<add> });
<ide> });
<ide>
<ide> describe('integration test with $rootScope', function() {
| 2
|
Javascript
|
Javascript
|
fix global leak of compilationeventbinding
|
2e6292ac36065a12de8139a9022663edd78047c1
|
<ide><path>test/RequireJsStuffPlugin.test.js
<ide> describe("RequireJsStuffPlugin", function() {
<ide> });
<ide>
<ide> describe("parser handler", function() {
<del> var parser, parserEventBindings;
<add> let parser;
<add> let parserEventBindings;
<add> let compilationEventBinding;
<ide>
<ide> beforeEach(function() {
<ide> compilationEventBinding = compilationEventBindings[0];
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.