Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
list
start_point
list
end_point
list
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
make_test_environ_builder
(app, path='/', base_url=None, *args, **kwargs)
Creates a new test builder with some application defaults thrown in.
Creates a new test builder with some application defaults thrown in.
def make_test_environ_builder(app, path='/', base_url=None, *args, **kwargs): """Creates a new test builder with some application defaults thrown in.""" http_host = app.config.get('SERVER_NAME') app_root = app.config.get('APPLICATION_ROOT') if base_url is None: url = url_parse(path) base_url = 'http://%s/' % (url.netloc or http_host or 'localhost') if app_root: base_url += app_root.lstrip('/') if url.netloc: path = url.path if url.query: path += '?' + url.query return EnvironBuilder(path, base_url, *args, **kwargs)
[ "def", "make_test_environ_builder", "(", "app", ",", "path", "=", "'/'", ",", "base_url", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "http_host", "=", "app", ".", "config", ".", "get", "(", "'SERVER_NAME'", ")", "app_root", "=", ...
[ 23, 0 ]
[ 36, 58 ]
python
en
['en', 'en', 'en']
True
FlaskClient.session_transaction
(self, *args, **kwargs)
When used in combination with a ``with`` statement this opens a session transaction. This can be used to modify the session that the test client uses. Once the ``with`` block is left the session is stored back. :: with client.session_transaction() as session: session['value'] = 42 Internally this is implemented by going through a temporary test request context and since session handling could depend on request variables this function accepts the same arguments as :meth:`~flask.Flask.test_request_context` which are directly passed through.
When used in combination with a ``with`` statement this opens a session transaction. This can be used to modify the session that the test client uses. Once the ``with`` block is left the session is stored back.
def session_transaction(self, *args, **kwargs): """When used in combination with a ``with`` statement this opens a session transaction. This can be used to modify the session that the test client uses. Once the ``with`` block is left the session is stored back. :: with client.session_transaction() as session: session['value'] = 42 Internally this is implemented by going through a temporary test request context and since session handling could depend on request variables this function accepts the same arguments as :meth:`~flask.Flask.test_request_context` which are directly passed through. """ if self.cookie_jar is None: raise RuntimeError('Session transactions only make sense ' 'with cookies enabled.') app = self.application environ_overrides = kwargs.setdefault('environ_overrides', {}) self.cookie_jar.inject_wsgi(environ_overrides) outer_reqctx = _request_ctx_stack.top with app.test_request_context(*args, **kwargs) as c: sess = app.open_session(c.request) if sess is None: raise RuntimeError('Session backend did not open a session. ' 'Check the configuration') # Since we have to open a new request context for the session # handling we want to make sure that we hide out own context # from the caller. By pushing the original request context # (or None) on top of this and popping it we get exactly that # behavior. It's important to not use the push and pop # methods of the actual request context object since that would # mean that cleanup handlers are called _request_ctx_stack.push(outer_reqctx) try: yield sess finally: _request_ctx_stack.pop() resp = app.response_class() if not app.session_interface.is_null_session(sess): app.save_session(sess, resp) headers = resp.get_wsgi_headers(c.request.environ) self.cookie_jar.extract_wsgi(c.request.environ, headers)
[ "def", "session_transaction", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "cookie_jar", "is", "None", ":", "raise", "RuntimeError", "(", "'Session transactions only make sense '", "'with cookies enabled.'", ")", "app", "=...
[ 64, 4 ]
[ 111, 68 ]
python
en
['en', 'en', 'en']
True
install._called_from_setup
(run_frame)
Attempt to detect whether run() was called from setup() or by another command. If called by setup(), the parent caller will be the 'run_command' method in 'distutils.dist', and *its* caller will be the 'run_commands' method. If called any other way, the immediate caller *might* be 'run_command', but it won't have been called by 'run_commands'. Return True in that case or if a call stack is unavailable. Return False otherwise.
Attempt to detect whether run() was called from setup() or by another command. If called by setup(), the parent caller will be the 'run_command' method in 'distutils.dist', and *its* caller will be the 'run_commands' method. If called any other way, the immediate caller *might* be 'run_command', but it won't have been called by 'run_commands'. Return True in that case or if a call stack is unavailable. Return False otherwise.
def _called_from_setup(run_frame): """ Attempt to detect whether run() was called from setup() or by another command. If called by setup(), the parent caller will be the 'run_command' method in 'distutils.dist', and *its* caller will be the 'run_commands' method. If called any other way, the immediate caller *might* be 'run_command', but it won't have been called by 'run_commands'. Return True in that case or if a call stack is unavailable. Return False otherwise. """ if run_frame is None: msg = "Call stack not available. bdist_* commands may fail." warnings.warn(msg) if platform.python_implementation() == 'IronPython': msg = "For best results, pass -X:Frames to enable call stack." warnings.warn(msg) return True res = inspect.getouterframes(run_frame)[2] caller, = res[:1] info = inspect.getframeinfo(caller) caller_module = caller.f_globals.get('__name__', '') return ( caller_module == 'distutils.dist' and info.function == 'run_commands' )
[ "def", "_called_from_setup", "(", "run_frame", ")", ":", "if", "run_frame", "is", "None", ":", "msg", "=", "\"Call stack not available. bdist_* commands may fail.\"", "warnings", ".", "warn", "(", "msg", ")", "if", "platform", ".", "python_implementation", "(", ")",...
[ 69, 4 ]
[ 93, 9 ]
python
en
['en', 'error', 'th']
False
GenerateConfig
(context)
Generate configuration.
Generate configuration.
def GenerateConfig(context): """Generate configuration.""" name_prefix = context.env['deployment'] + '-' + context.env['name'] instance = { 'zone': context.properties['zone'], 'machineType': ZonalComputeUrl( context.env['project'], context.properties['zone'], 'machineTypes', 'f1-micro'), 'metadata': { 'items': [{ 'key': 'startup-script', 'value': context.properties['startup-script'] }] }, 'disks': [{ 'deviceName': 'boot', 'type': 'PERSISTENT', 'autoDelete': True, 'boot': True, 'initializeParams': { 'diskName': name_prefix + '-disk', 'sourceImage': GlobalComputeUrl( 'debian-cloud', 'images', 'family/debian-9' ) }, }], 'networkInterfaces': [{ 'accessConfigs': [{ 'name': 'external-nat', 'type': 'ONE_TO_ONE_NAT' }], 'network': GlobalComputeUrl( context.env['project'], 'networks', 'default') }] } # Resources to return. resources = { 'resources': [{ 'name': name_prefix + '-vm', 'type': 'compute.v1.instance', 'properties': instance }] } return resources
[ "def", "GenerateConfig", "(", "context", ")", ":", "name_prefix", "=", "context", ".", "env", "[", "'deployment'", "]", "+", "'-'", "+", "context", ".", "env", "[", "'name'", "]", "instance", "=", "{", "'zone'", ":", "context", ".", "properties", "[", ...
[ 30, 0 ]
[ 77, 18 ]
python
en
['en', 'la', 'en']
False
shortcut
(request, content_type_id, object_id)
Redirect to an object's page based on a content-type ID and an object ID.
Redirect to an object's page based on a content-type ID and an object ID.
def shortcut(request, content_type_id, object_id): """ Redirect to an object's page based on a content-type ID and an object ID. """ # Look up the object, making sure it's got a get_absolute_url() function. try: content_type = ContentType.objects.get(pk=content_type_id) if not content_type.model_class(): raise Http404( _("Content type %(ct_id)s object has no associated model") % {'ct_id': content_type_id} ) obj = content_type.get_object_for_this_type(pk=object_id) except (ObjectDoesNotExist, ValueError): raise Http404( _('Content type %(ct_id)s object %(obj_id)s doesn’t exist') % {'ct_id': content_type_id, 'obj_id': object_id} ) try: get_absolute_url = obj.get_absolute_url except AttributeError: raise Http404( _('%(ct_name)s objects don’t have a get_absolute_url() method') % {'ct_name': content_type.name} ) absurl = get_absolute_url() # Try to figure out the object's domain, so we can do a cross-site redirect # if necessary. # If the object actually defines a domain, we're done. if absurl.startswith(('http://', 'https://', '//')): return HttpResponseRedirect(absurl) # Otherwise, we need to introspect the object's relationships for a # relation to the Site object try: object_domain = get_current_site(request).domain except ObjectDoesNotExist: object_domain = None if apps.is_installed('django.contrib.sites'): Site = apps.get_model('sites.Site') opts = obj._meta for field in opts.many_to_many: # Look for a many-to-many relationship to Site. if field.remote_field.model is Site: site_qs = getattr(obj, field.name).all() if object_domain and site_qs.filter(domain=object_domain).exists(): # The current site's domain matches a site attached to the # object. break # Caveat: In the case of multiple related Sites, this just # selects the *first* one, which is arbitrary. site = site_qs.first() if site: object_domain = site.domain break else: # No many-to-many relationship to Site found. Look for a # many-to-one relationship to Site. for field in obj._meta.fields: if field.remote_field and field.remote_field.model is Site: try: site = getattr(obj, field.name) except Site.DoesNotExist: continue if site is not None: object_domain = site.domain break # If all that malarkey found an object domain, use it. Otherwise, fall back # to whatever get_absolute_url() returned. if object_domain is not None: protocol = request.scheme return HttpResponseRedirect('%s://%s%s' % (protocol, object_domain, absurl)) else: return HttpResponseRedirect(absurl)
[ "def", "shortcut", "(", "request", ",", "content_type_id", ",", "object_id", ")", ":", "# Look up the object, making sure it's got a get_absolute_url() function.", "try", ":", "content_type", "=", "ContentType", ".", "objects", ".", "get", "(", "pk", "=", "content_type_...
[ 8, 0 ]
[ 87, 43 ]
python
en
['en', 'error', 'th']
False
ogrinfo
(data_source, num_features=10)
Walk the available layers in the supplied `data_source`, displaying the fields for the first `num_features` features.
Walk the available layers in the supplied `data_source`, displaying the fields for the first `num_features` features.
def ogrinfo(data_source, num_features=10): """ Walk the available layers in the supplied `data_source`, displaying the fields for the first `num_features` features. """ # Checking the parameters. if isinstance(data_source, str): data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise Exception('Data source parameter must be a string or a DataSource object.') for i, layer in enumerate(data_source): print("data source : %s" % data_source.name) print("==== layer %s" % i) print(" shape type: %s" % GEO_CLASSES[layer.geom_type.num].__name__) print(" # features: %s" % len(layer)) print(" srs: %s" % layer.srs) extent_tup = layer.extent.tuple print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4])) print("Displaying the first %s features ====" % num_features) width = max(*map(len, layer.fields)) fmt = " %%%ss: %%s" % width for j, feature in enumerate(layer[:num_features]): print("=== Feature %s" % j) for fld_name in layer.fields: type_name = feature[fld_name].type_name output = fmt % (fld_name, type_name) val = feature.get(fld_name) if val: if isinstance(val, str): val_fmt = ' ("%s")' else: val_fmt = ' (%s)' output += val_fmt % val else: output += ' (None)' print(output)
[ "def", "ogrinfo", "(", "data_source", ",", "num_features", "=", "10", ")", ":", "# Checking the parameters.", "if", "isinstance", "(", "data_source", ",", "str", ")", ":", "data_source", "=", "DataSource", "(", "data_source", ")", "elif", "isinstance", "(", "d...
[ 10, 0 ]
[ 50, 29 ]
python
en
['en', 'error', 'th']
False
rgb
(r, g, b, a=255)
(Internal) Turns an RGB color into a Qt compatible color integer.
(Internal) Turns an RGB color into a Qt compatible color integer.
def rgb(r, g, b, a=255): """(Internal) Turns an RGB color into a Qt compatible color integer.""" # use qRgb to pack the colors, and then turn the resulting long # into a negative integer with the same bitpattern. return qRgba(r, g, b, a) & 0xFFFFFFFF
[ "def", "rgb", "(", "r", ",", "g", ",", "b", ",", "a", "=", "255", ")", ":", "# use qRgb to pack the colors, and then turn the resulting long", "# into a negative integer with the same bitpattern.", "return", "qRgba", "(", "r", ",", "g", ",", "b", ",", "a", ")", ...
[ 56, 0 ]
[ 60, 41 ]
python
en
['en', 'ca', 'en']
True
fromqimage
(im)
:param im: QImage or PIL ImageQt object
:param im: QImage or PIL ImageQt object
def fromqimage(im): """ :param im: QImage or PIL ImageQt object """ buffer = QBuffer() qt_openmode = QIODevice.OpenMode if qt_version == "6" else QIODevice buffer.open(qt_openmode.ReadWrite) # preserve alpha channel with png # otherwise ppm is more friendly with Image.open if im.hasAlphaChannel(): im.save(buffer, "png") else: im.save(buffer, "ppm") b = BytesIO() b.write(buffer.data()) buffer.close() b.seek(0) return Image.open(b)
[ "def", "fromqimage", "(", "im", ")", ":", "buffer", "=", "QBuffer", "(", ")", "qt_openmode", "=", "QIODevice", ".", "OpenMode", "if", "qt_version", "==", "\"6\"", "else", "QIODevice", "buffer", ".", "open", "(", "qt_openmode", ".", "ReadWrite", ")", "# pre...
[ 63, 0 ]
[ 82, 24 ]
python
en
['en', 'error', 'th']
False
align8to32
(bytes, width, mode)
converts each scanline of data from 8 bit to 32 bit aligned
converts each scanline of data from 8 bit to 32 bit aligned
def align8to32(bytes, width, mode): """ converts each scanline of data from 8 bit to 32 bit aligned """ bits_per_pixel = {"1": 1, "L": 8, "P": 8}[mode] # calculate bytes per line and the extra padding if needed bits_per_line = bits_per_pixel * width full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) extra_padding = -bytes_per_line % 4 # already 32 bit aligned by luck if not extra_padding: return bytes new_data = [] for i in range(len(bytes) // bytes_per_line): new_data.append( bytes[i * bytes_per_line : (i + 1) * bytes_per_line] + b"\x00" * extra_padding ) return b"".join(new_data)
[ "def", "align8to32", "(", "bytes", ",", "width", ",", "mode", ")", ":", "bits_per_pixel", "=", "{", "\"1\"", ":", "1", ",", "\"L\"", ":", "8", ",", "\"P\"", ":", "8", "}", "[", "mode", "]", "# calculate bytes per line and the extra padding if needed", "bits_...
[ 99, 0 ]
[ 124, 29 ]
python
en
['en', 'error', 'th']
False
incoming_sms
()
Send a dynamic reply to an incoming text message
Send a dynamic reply to an incoming text message
def incoming_sms(): """Send a dynamic reply to an incoming text message""" # Get the message the user sent our Twilio number body = request.values.get('Body', None) # Start our TwiML response resp = MessagingResponse() # Determine the right reply for this message if body == 'hello': resp.message("Hi!") elif body == 'bye': resp.message("Goodbye") return str(resp)
[ "def", "incoming_sms", "(", ")", ":", "# Get the message the user sent our Twilio number", "body", "=", "request", ".", "values", ".", "get", "(", "'Body'", ",", "None", ")", "# Start our TwiML response", "resp", "=", "MessagingResponse", "(", ")", "# Determine the ri...
[ 6, 0 ]
[ 20, 20 ]
python
en
['en', 'en', 'en']
True
_ConfigName
(context)
Return the short config name.
Return the short config name.
def _ConfigName(context): """Return the short config name.""" return '{}-config'.format(context.env['deployment'])
[ "def", "_ConfigName", "(", "context", ")", ":", "return", "'{}-config'", ".", "format", "(", "context", ".", "env", "[", "'deployment'", "]", ")" ]
[ 63, 0 ]
[ 65, 54 ]
python
en
['en', 'en', 'en']
True
_ConfigUrl
(context)
Returns the full URL to the config, including hostname.
Returns the full URL to the config, including hostname.
def _ConfigUrl(context): """Returns the full URL to the config, including hostname.""" return '{endpoint}/projects/{project}/configs/{config}'.format( endpoint=RTC_ENDPOINT, project=context.env['project'], config=_ConfigName(context))
[ "def", "_ConfigUrl", "(", "context", ")", ":", "return", "'{endpoint}/projects/{project}/configs/{config}'", ".", "format", "(", "endpoint", "=", "RTC_ENDPOINT", ",", "project", "=", "context", ".", "env", "[", "'project'", "]", ",", "config", "=", "_ConfigName", ...
[ 68, 0 ]
[ 73, 34 ]
python
en
['en', 'en', 'en']
True
_WaiterName
(context)
Returns the short waiter name.
Returns the short waiter name.
def _WaiterName(context): """Returns the short waiter name.""" # This name is only used for the DM manifest entry. The actual waiter name # within RuntimeConfig is static, as it is scoped to the config resource. return '{}-software'.format(context.env['deployment'])
[ "def", "_WaiterName", "(", "context", ")", ":", "# This name is only used for the DM manifest entry. The actual waiter name", "# within RuntimeConfig is static, as it is scoped to the config resource.", "return", "'{}-software'", ".", "format", "(", "context", ".", "env", "[", "'de...
[ 76, 0 ]
[ 80, 56 ]
python
en
['en', 'no', 'en']
True
_Timeout
(context)
Returns the timeout property or a default value if unspecified.
Returns the timeout property or a default value if unspecified.
def _Timeout(context): """Returns the timeout property or a default value if unspecified.""" timeout = context.properties.get('timeout', DEFAULT_TIMEOUT) try: return str(int(timeout)) except ValueError: raise PropertyError('Invalid timeout value: {}'.format(timeout))
[ "def", "_Timeout", "(", "context", ")", ":", "timeout", "=", "context", ".", "properties", ".", "get", "(", "'timeout'", ",", "DEFAULT_TIMEOUT", ")", "try", ":", "return", "str", "(", "int", "(", "timeout", ")", ")", "except", "ValueError", ":", "raise",...
[ 83, 0 ]
[ 89, 68 ]
python
en
['en', 'en', 'en']
True
_SuccessNumber
(context)
Returns the successNumber property or a default value if unspecified.
Returns the successNumber property or a default value if unspecified.
def _SuccessNumber(context): """Returns the successNumber property or a default value if unspecified.""" number = context.properties.get('successNumber', DEFAULT_SUCCESS_NUMBER) try: number = int(number) if number < 1: raise PropertyError('successNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid successNumber value: {}'.format(number))
[ "def", "_SuccessNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'successNumber'", ",", "DEFAULT_SUCCESS_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "...
[ 92, 0 ]
[ 101, 73 ]
python
en
['en', 'en', 'en']
True
_FailureNumber
(context)
Returns the failureNumber property or a default value if unspecified.
Returns the failureNumber property or a default value if unspecified.
def _FailureNumber(context): """Returns the failureNumber property or a default value if unspecified.""" number = context.properties.get('failureNumber', DEFAULT_FAILURE_NUMBER) try: number = int(number) if number < 1: raise PropertyError('failureNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid failureNumber value: {}'.format(number))
[ "def", "_FailureNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'failureNumber'", ",", "DEFAULT_FAILURE_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "...
[ 104, 0 ]
[ 113, 73 ]
python
en
['en', 'en', 'en']
True
_WaiterDependsOn
(context)
Returns the waiterDependsOn property or an empty list if unspecified.
Returns the waiterDependsOn property or an empty list if unspecified.
def _WaiterDependsOn(context): """Returns the waiterDependsOn property or an empty list if unspecified.""" depends_on = context.properties.get('waiterDependsOn', []) if not isinstance(depends_on, list): raise PropertyError('waiterDependsOn must be a list: {}'.format(depends_on)) for item in depends_on: if not isinstance(item, types.StringTypes): raise PropertyError( 'waiterDependsOn must be a list of strings: {}'.format(depends_on)) return depends_on
[ "def", "_WaiterDependsOn", "(", "context", ")", ":", "depends_on", "=", "context", ".", "properties", ".", "get", "(", "'waiterDependsOn'", ",", "[", "]", ")", "if", "not", "isinstance", "(", "depends_on", ",", "list", ")", ":", "raise", "PropertyError", "...
[ 116, 0 ]
[ 127, 19 ]
python
en
['en', 'en', 'en']
True
_RuntimeConfig
(context)
Constructs a RuntimeConfig resource.
Constructs a RuntimeConfig resource.
def _RuntimeConfig(context): """Constructs a RuntimeConfig resource.""" deployment_name = context.env['deployment'] return { 'name': _ConfigName(context), 'type': 'runtimeconfig.v1beta1.config', 'properties': { 'config': _ConfigName(context), 'description': ('Holds software readiness status ' 'for deployment {}').format(deployment_name), }, }
[ "def", "_RuntimeConfig", "(", "context", ")", ":", "deployment_name", "=", "context", ".", "env", "[", "'deployment'", "]", "return", "{", "'name'", ":", "_ConfigName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.config'", ",", "'properties'...
[ 130, 0 ]
[ 142, 3 ]
python
en
['en', 'en', 'en']
True
_Waiter
(context)
Constructs a waiter resource.
Constructs a waiter resource.
def _Waiter(context): """Constructs a waiter resource.""" waiter_timeout = _Timeout(context) return { 'name': _WaiterName(context), 'type': 'runtimeconfig.v1beta1.waiter', 'metadata': { 'dependsOn': _WaiterDependsOn(context), }, 'properties': { 'parent': '$(ref.{}.name)'.format(_ConfigName(context)), 'waiter': 'software', 'timeout': '{}s'.format(waiter_timeout), 'success': { 'cardinality': { 'number': _SuccessNumber(context), 'path': '{}/success'.format(STATUS_PATH), }, }, 'failure': { 'cardinality': { 'number': _FailureNumber(context), 'path': '{}/failure'.format(STATUS_PATH), }, }, }, }
[ "def", "_Waiter", "(", "context", ")", ":", "waiter_timeout", "=", "_Timeout", "(", "context", ")", "return", "{", "'name'", ":", "_WaiterName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.waiter'", ",", "'metadata'", ":", "{", "'dependsOn...
[ 145, 0 ]
[ 172, 3 ]
python
en
['en', 'en', 'en']
True
GenerateConfig
(context)
Entry function to generate the DM config.
Entry function to generate the DM config.
def GenerateConfig(context): """Entry function to generate the DM config.""" content = { 'resources': [ _RuntimeConfig(context), _Waiter(context), ], 'outputs': [ { 'name': 'config-url', 'value': _ConfigUrl(context) }, { 'name': 'variable-path', 'value': STATUS_PATH }, ] } return yaml.safe_dump(content)
[ "def", "GenerateConfig", "(", "context", ")", ":", "content", "=", "{", "'resources'", ":", "[", "_RuntimeConfig", "(", "context", ")", ",", "_Waiter", "(", "context", ")", ",", "]", ",", "'outputs'", ":", "[", "{", "'name'", ":", "'config-url'", ",", ...
[ 175, 0 ]
[ 193, 32 ]
python
en
['en', 'en', 'en']
True
user_data_dir
(appname=None, appauthor=None, version=None, roaming=False)
r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/<AppName> Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/<AppName>".
r"""Return full path to the user-specific data dir for this application.
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/<AppName> Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/<AppName>". """ if system == "win32": if appauthor is None: appauthor = appname const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_data_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "roaming", "=", "False", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "ap...
[ 44, 0 ]
[ 96, 15 ]
python
en
['en', 'en', 'en']
True
site_data_dir
(appname=None, appauthor=None, version=None, multipath=False)
r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/<AppName>', if XDG_DATA_DIRS is not set Typical user data directories are: Mac OS X: /Library/Application Support/<AppName> Unix: /usr/local/share/<AppName> or /usr/share/<AppName> Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
r"""Return full path to the user-shared data dir for this application.
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/<AppName>', if XDG_DATA_DIRS is not set Typical user data directories are: Mac OS X: /Library/Application Support/<AppName> Unix: /usr/local/share/<AppName> or /usr/share/<AppName> Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: # XDG default for $XDG_DATA_DIRS # only first, if multipath is False path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if appname and version: path = os.path.join(path, version) return path
[ "def", "site_data_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "multipath", "=", "False", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "...
[ 99, 0 ]
[ 162, 15 ]
python
en
['en', 'en', 'en']
True
user_config_dir
(appname=None, appauthor=None, version=None, roaming=False)
r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: same as user_data_dir Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/<AppName>".
r"""Return full path to the user-specific config dir for this application.
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: same as user_data_dir Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by deafult "~/.config/<AppName>". """ if system in ["win32", "darwin"]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_config_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "roaming", "=", "False", ")", ":", "if", "system", "in", "[", "\"win32\"", ",", "\"darwin\"", "]", ":", "path", "=", "user_data_dir", ...
[ 165, 0 ]
[ 202, 15 ]
python
en
['en', 'en', 'en']
True
site_config_dir
(appname=None, appauthor=None, version=None, multipath=False)
r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
r"""Return full path to the user-shared data dir for this application.
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system in ["win32", "darwin"]: path = site_data_dir(appname, appauthor) if appname and version: path = os.path.join(path, version) else: # XDG default for $XDG_CONFIG_DIRS # only first, if multipath is False path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.sep.join([x, appname]) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path
[ "def", "site_config_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "multipath", "=", "False", ")", ":", "if", "system", "in", "[", "\"win32\"", ",", "\"darwin\"", "]", ":", "path", "=", "site_data_dir"...
[ 205, 0 ]
[ 253, 15 ]
python
en
['en', 'en', 'en']
True
user_cache_dir
(appname=None, appauthor=None, version=None, opinion=True)
r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/<AppName> Unix: ~/.cache/<AppName> (XDG default) Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option.
r"""Return full path to the user-specific cache dir for this application.
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/<AppName> Unix: ~/.cache/<AppName> (XDG default) Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) if opinion: path = os.path.join(path, "Cache") elif system == 'darwin': path = os.path.expanduser('~/Library/Caches') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_cache_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "opinion", "=", "True", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "ap...
[ 256, 0 ]
[ 310, 15 ]
python
en
['en', 'en', 'en']
True
user_log_dir
(appname=None, appauthor=None, version=None, opinion=True)
r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Logs/<AppName> Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option.
r"""Return full path to the user-specific log dir for this application.
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Logs/<AppName> Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. """ if system == "darwin": path = os.path.join( os.path.expanduser('~/Library/Logs'), appname) elif system == "win32": path = user_data_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "Logs") else: path = user_cache_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "log") if appname and version: path = os.path.join(path, version) return path
[ "def", "user_log_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "opinion", "=", "True", ")", ":", "if", "system", "==", "\"darwin\"", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ...
[ 313, 0 ]
[ 361, 15 ]
python
en
['en', 'en', 'en']
True
_get_win_folder_from_registry
(csidl_name)
This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names.
This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names.
def _get_win_folder_from_registry(csidl_name): """This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. """ import _winreg shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", }[csidl_name] key = _winreg.OpenKey( _winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" ) dir, type = _winreg.QueryValueEx(key, shell_folder_name) return dir
[ "def", "_get_win_folder_from_registry", "(", "csidl_name", ")", ":", "import", "_winreg", "shell_folder_name", "=", "{", "\"CSIDL_APPDATA\"", ":", "\"AppData\"", ",", "\"CSIDL_COMMON_APPDATA\"", ":", "\"Common AppData\"", ",", "\"CSIDL_LOCAL_APPDATA\"", ":", "\"Local AppDat...
[ 407, 0 ]
[ 425, 14 ]
python
en
['en', 'en', 'en']
True
update_guessed_word
(guess, word, guessed_word)
Update guessed_word to reflect correctly guessed characters
Update guessed_word to reflect correctly guessed characters
def update_guessed_word(guess, word, guessed_word): "Update guessed_word to reflect correctly guessed characters" indices = [i for i, c in enumerate(word) if c == guess] guessed_word = list(guessed_word) for i in indices: guessed_word[i] = guess return "".join(guessed_word)
[ "def", "update_guessed_word", "(", "guess", ",", "word", ",", "guessed_word", ")", ":", "indices", "=", "[", "i", "for", "i", ",", "c", "in", "enumerate", "(", "word", ")", "if", "c", "==", "guess", "]", "guessed_word", "=", "list", "(", "guessed_word"...
[ 101, 0 ]
[ 107, 32 ]
python
en
['en', 'en', 'en']
True
create_pca_vars
(var_name, size)
Creates PCA variables. Given variable name and size, create and return PCA variables for count, mean, covariance, eigenvalues, eignvectors, and k principal components. Args: var_name: String denoting which set of variables to create. Values are "time" and "feat". size: The size of the variable, either sequence length or number of features. Returns: PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components.
Creates PCA variables.
def create_pca_vars(var_name, size): """Creates PCA variables. Given variable name and size, create and return PCA variables for count, mean, covariance, eigenvalues, eignvectors, and k principal components. Args: var_name: String denoting which set of variables to create. Values are "time" and "feat". size: The size of the variable, either sequence length or number of features. Returns: PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components. """ with tf.variable_scope( name_or_scope="pca_vars", reuse=tf.AUTO_REUSE): count_var = tf.get_variable( name="pca_{}_count_var".format(var_name), dtype=tf.int64, initializer=tf.zeros(shape=[], dtype=tf.int64), trainable=False) mean_var = tf.get_variable( name="pca_{}_mean_var".format(var_name), dtype=tf.float64, initializer=tf.zeros(shape=[size], dtype=tf.float64), trainable=False) cov_var = tf.get_variable( name="pca_{}_cov_var".format(var_name), dtype=tf.float64, initializer=tf.zeros(shape=[size, size], dtype=tf.float64), trainable=False) eigval_var = tf.get_variable( name="pca_{}_eigval_var".format(var_name), dtype=tf.float64, initializer=tf.zeros(shape=[size], dtype=tf.float64), trainable=False) eigvec_var = tf.get_variable( name="pca_{}_eigvec_var".format(var_name), dtype=tf.float64, initializer=tf.zeros(shape=[size, size], dtype=tf.float64), trainable=False) k_pc_var = tf.get_variable( name="pca_{}_k_principal_components_var".format(var_name), dtype=tf.int64, initializer=tf.ones(shape=[], dtype=tf.int64), trainable=False) return count_var, mean_var, cov_var, eigval_var, eigvec_var, k_pc_var
[ "def", "create_pca_vars", "(", "var_name", ",", "size", ")", ":", "with", "tf", ".", "variable_scope", "(", "name_or_scope", "=", "\"pca_vars\"", ",", "reuse", "=", "tf", ".", "AUTO_REUSE", ")", ":", "count_var", "=", "tf", ".", "get_variable", "(", "name"...
[ 7, 0 ]
[ 61, 71 ]
python
en
['en', 'en', 'en']
True
create_both_pca_vars
(seq_len, num_feat)
Creates both time & feature major PCA variables. Given dimensions of inputs, create and return PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components for both time and feature major representations. Args: seq_len: Number of timesteps in sequence. num_feat: Number of features. Returns: PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components for both time and feature major representations.
Creates both time & feature major PCA variables.
def create_both_pca_vars(seq_len, num_feat): """Creates both time & feature major PCA variables. Given dimensions of inputs, create and return PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components for both time and feature major representations. Args: seq_len: Number of timesteps in sequence. num_feat: Number of features. Returns: PCA variables for count, mean, covariance, eigenvalues, eigenvectors, and k principal components for both time and feature major representations. """ # Time based (pca_time_count_var, pca_time_mean_var, pca_time_cov_var, pca_time_eigval_var, pca_time_eigvec_var, pca_time_k_pc_var) = create_pca_vars( var_name="time", size=num_feat) # Features based (pca_feat_count_var, pca_feat_mean_var, pca_feat_cov_var, pca_feat_eigval_var, pca_feat_eigvec_var, pca_feat_k_pc_var) = create_pca_vars( var_name="feat", size=seq_len) return (pca_time_count_var, pca_time_mean_var, pca_time_cov_var, pca_time_eigval_var, pca_time_eigvec_var, pca_time_k_pc_var, pca_feat_count_var, pca_feat_mean_var, pca_feat_cov_var, pca_feat_eigval_var, pca_feat_eigvec_var, pca_feat_k_pc_var)
[ "def", "create_both_pca_vars", "(", "seq_len", ",", "num_feat", ")", ":", "# Time based", "(", "pca_time_count_var", ",", "pca_time_mean_var", ",", "pca_time_cov_var", ",", "pca_time_eigval_var", ",", "pca_time_eigvec_var", ",", "pca_time_k_pc_var", ")", "=", "create_pc...
[ 64, 0 ]
[ 109, 28 ]
python
en
['en', 'en', 'en']
True
pca_reconstruction_k_pc
(X_cen, pca_eigvec_var, k_pc)
PCA reconstruction with k principal components. Given centered data matrix tensor X, variables for the column means and eigenvectors, and the number of principal components, returns the reconstruction of X centered. Args: X_cen: tf.float64 matrix tensor of centered input data. pca_eigvec_var: tf.float64 matrix variable storing eigenvectors. k_pc: Number of principal components to keep. Returns: X_cen_recon: 2D input data tensor reconstructed.
PCA reconstruction with k principal components.
def pca_reconstruction_k_pc(X_cen, pca_eigvec_var, k_pc): """PCA reconstruction with k principal components. Given centered data matrix tensor X, variables for the column means and eigenvectors, and the number of principal components, returns the reconstruction of X centered. Args: X_cen: tf.float64 matrix tensor of centered input data. pca_eigvec_var: tf.float64 matrix variable storing eigenvectors. k_pc: Number of principal components to keep. Returns: X_cen_recon: 2D input data tensor reconstructed. """ # time_shape = (num_feat, num_feat) # feat_shape = (seq_len, seq_len) projection_matrix = tf.matmul( a=pca_eigvec_var[:, -k_pc:], b=pca_eigvec_var[:, -k_pc:], transpose_b=True) # time_shape = (cur_batch_size * seq_len, num_feat) # feat_shape = (cur_batch_size * num_feat, seq_len) X_cen_recon = tf.matmul( a=X_cen, b=projection_matrix) return X_cen_recon
[ "def", "pca_reconstruction_k_pc", "(", "X_cen", ",", "pca_eigvec_var", ",", "k_pc", ")", ":", "# time_shape = (num_feat, num_feat)", "# feat_shape = (seq_len, seq_len)", "projection_matrix", "=", "tf", ".", "matmul", "(", "a", "=", "pca_eigvec_var", "[", ":", ",", "-"...
[ 112, 0 ]
[ 140, 20 ]
python
en
['en', 'en', 'en']
True
pca_reconstruction_k_pc_mse
(X_cen, pca_eigvec_var, k_pc)
PCA reconstruction with k principal components. Given centered data matrix tensor X, variables for the column means and eigenvectors, and the number of principal components, returns reconstruction MSE. Args: X_cen: tf.float64 matrix tensor of centered input data. pca_eigvec_var: tf.float64 matrix variable storing eigenvectors. k_pc: Number of principal components to keep. Returns: mse: Reconstruction mean squared error.
PCA reconstruction with k principal components.
def pca_reconstruction_k_pc_mse(X_cen, pca_eigvec_var, k_pc): """PCA reconstruction with k principal components. Given centered data matrix tensor X, variables for the column means and eigenvectors, and the number of principal components, returns reconstruction MSE. Args: X_cen: tf.float64 matrix tensor of centered input data. pca_eigvec_var: tf.float64 matrix variable storing eigenvectors. k_pc: Number of principal components to keep. Returns: mse: Reconstruction mean squared error. """ # time_shape = (cur_batch_size * seq_len, num_feat) # feat_shape = (cur_batch_size * num_feat, seq_len) X_cen_recon = pca_reconstruction_k_pc( X_cen, pca_eigvec_var, k_pc) # time_shape = (cur_batch_size * seq_len, num_feat) # feat_shape = (cur_batch_size * num_feat, seq_len) error = X_cen - X_cen_recon # shape = () mse = tf.reduce_mean( input_tensor=tf.reduce_sum( input_tensor=tf.square(x=error), axis=-1)) return mse
[ "def", "pca_reconstruction_k_pc_mse", "(", "X_cen", ",", "pca_eigvec_var", ",", "k_pc", ")", ":", "# time_shape = (cur_batch_size * seq_len, num_feat)", "# feat_shape = (cur_batch_size * num_feat, seq_len)", "X_cen_recon", "=", "pca_reconstruction_k_pc", "(", "X_cen", ",", "pca_e...
[ 143, 0 ]
[ 172, 12 ]
python
en
['en', 'en', 'en']
True
find_best_k_principal_components
(X_recon_mse, pca_k_pc_var)
Find best k principal components from reconstruction MSE. Given reconstruction MSE, return number of principal components with lowest MSE in varible. Args: X_recon_mse: tf.float64 vector tensor of reconstruction mean squared error. pca_k_pc_var: tf.int64 scalar variable to hold best number of principal components. Returns: pca_k_pc_var: Updated scalar variable now with best number of principal components.
Find best k principal components from reconstruction MSE.
def find_best_k_principal_components(X_recon_mse, pca_k_pc_var): """Find best k principal components from reconstruction MSE. Given reconstruction MSE, return number of principal components with lowest MSE in varible. Args: X_recon_mse: tf.float64 vector tensor of reconstruction mean squared error. pca_k_pc_var: tf.int64 scalar variable to hold best number of principal components. Returns: pca_k_pc_var: Updated scalar variable now with best number of principal components. """ best_pca_k_pc = tf.argmin(input=X_recon_mse) + 1 with tf.control_dependencies( control_inputs=[tf.assign(ref=pca_k_pc_var, value=best_pca_k_pc)]): return tf.identity(input=pca_k_pc_var)
[ "def", "find_best_k_principal_components", "(", "X_recon_mse", ",", "pca_k_pc_var", ")", ":", "best_pca_k_pc", "=", "tf", ".", "argmin", "(", "input", "=", "X_recon_mse", ")", "+", "1", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", ...
[ 175, 0 ]
[ 197, 42 ]
python
en
['en', 'en', 'en']
True
set_k_principal_components
(user_k_pc, pca_k_pc_var)
Set k principal components from user-defined value. Given user-defined number of principal components, return variable set to this value. Args: user_k_pc: User-defined python integer for number of principal components. pca_k_pc_var: tf.int64 scalar variable to hold chosen number of principal components. Returns: pca_k_pc_var: Updated scalar variable now with chosen number of principal components.
Set k principal components from user-defined value.
def set_k_principal_components(user_k_pc, pca_k_pc_var): """Set k principal components from user-defined value. Given user-defined number of principal components, return variable set to this value. Args: user_k_pc: User-defined python integer for number of principal components. pca_k_pc_var: tf.int64 scalar variable to hold chosen number of principal components. Returns: pca_k_pc_var: Updated scalar variable now with chosen number of principal components. """ with tf.control_dependencies( control_inputs=[tf.assign(ref=pca_k_pc_var, value=user_k_pc)]): return tf.identity(input=pca_k_pc_var)
[ "def", "set_k_principal_components", "(", "user_k_pc", ",", "pca_k_pc_var", ")", ":", "with", "tf", ".", "control_dependencies", "(", "control_inputs", "=", "[", "tf", ".", "assign", "(", "ref", "=", "pca_k_pc_var", ",", "value", "=", "user_k_pc", ")", "]", ...
[ 200, 0 ]
[ 220, 42 ]
python
en
['en', 'en', 'en']
True
pca_model
(X, mode, params, cur_batch_size, dummy_var)
PCA to reconstruct inputs and minimize reconstruction error. Given data matrix tensor X, the current Estimator mode, the dictionary of parameters, current batch size, and the number of features, process through PCA model subgraph and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT. params: Dictionary of parameters. cur_batch_size: Current batch size, could be partially filled. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: Reconstruction loss. train_op: Train operation so that Estimator can correctly add to dependency graph. X_time: 2D tensor representation of time major input data. X_time_recon: 2D tensor representation of time major input data. X_feat: 2D tensor representation of feature major input data. X_feat_recon: 2D tensor representation of feature major input data.
PCA to reconstruct inputs and minimize reconstruction error.
def pca_model(X, mode, params, cur_batch_size, dummy_var): """PCA to reconstruct inputs and minimize reconstruction error. Given data matrix tensor X, the current Estimator mode, the dictionary of parameters, current batch size, and the number of features, process through PCA model subgraph and return reconstructed inputs as output. Args: X: tf.float64 matrix tensor of input data. mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT. params: Dictionary of parameters. cur_batch_size: Current batch size, could be partially filled. dummy_var: Dummy variable used to allow training mode to happen since it requires a gradient to tie back to the graph dependency. Returns: loss: Reconstruction loss. train_op: Train operation so that Estimator can correctly add to dependency graph. X_time: 2D tensor representation of time major input data. X_time_recon: 2D tensor representation of time major input data. X_feat: 2D tensor representation of feature major input data. X_feat_recon: 2D tensor representation of feature major input data. """ # Reshape into 2-D tensors # Time based # shape = (cur_batch_size * seq_len, num_feat) X_time = tf.reshape( tensor=X, shape=[cur_batch_size * params["seq_len"], params["num_feat"]]) # Features based # shape = (cur_batch_size, num_feat, seq_len) X_transposed = tf.transpose(a=X, perm=[0, 2, 1]) # shape = (cur_batch_size * num_feat, seq_len) X_feat = tf.reshape( tensor=X_transposed, shape=[cur_batch_size * params["num_feat"], params["seq_len"]]) ############################################################################## # Variables for calculating error distribution statistics (pca_time_count_var, pca_time_mean_var, pca_time_cov_var, pca_time_eigval_var, pca_time_eigvec_var, pca_time_k_pc_var, pca_feat_count_var, pca_feat_mean_var, pca_feat_cov_var, pca_feat_eigval_var, pca_feat_eigvec_var, pca_feat_k_pc_var) = create_both_pca_vars( params["seq_len"], params["num_feat"]) # 3. Loss function, training/eval ops if (mode == tf.estimator.ModeKeys.TRAIN and params["training_mode"] == "reconstruction"): if not params["autotune_principal_components"]: with tf.variable_scope(name_or_scope="pca_vars", reuse=tf.AUTO_REUSE): # Check if batch is a singleton, very important for covariance math # Time based # shape = () singleton_condition = tf.equal( x=cur_batch_size * params["seq_len"], y=1) pca_time_cov_var, pca_time_mean_var, pca_time_count_var = tf.cond( pred=singleton_condition, true_fn=lambda: singleton_batch_cov_variable_updating( params["seq_len"], X_time, pca_time_count_var, pca_time_mean_var, pca_time_cov_var), false_fn=lambda: non_singleton_batch_cov_variable_updating( cur_batch_size, params["seq_len"], X_time, pca_time_count_var, pca_time_mean_var, pca_time_cov_var)) # shape = (num_feat,) & (num_feat, num_feat) pca_time_eigval_tensor, pca_time_eigvec_tensor = tf.linalg.eigh( tensor=pca_time_cov_var) if params["k_principal_components_time"] is not None: pca_time_k_pc = set_k_principal_components( params["k_principal_components_time"], pca_time_k_pc_var) else: pca_time_k_pc = tf.zeros(shape=(), dtype=tf.float64) # Features based # shape = () singleton_features_condition = tf.equal( x=cur_batch_size * params["num_feat"], y=1) pca_feat_cov_var, pca_feat_mean_var, pca_feat_count_var = tf.cond( pred=singleton_features_condition, true_fn=lambda: singleton_batch_cov_variable_updating( params["num_feat"], X_feat, pca_feat_count_var, pca_feat_mean_var, pca_feat_cov_var), false_fn=lambda: non_singleton_batch_cov_variable_updating( cur_batch_size, params["num_feat"], X_feat, pca_feat_count_var, pca_feat_mean_var, pca_feat_cov_var)) # shape = (seq_len,) & (seq_len, seq_len) pca_feat_eigval_tensor, pca_feat_eigvec_tensor = tf.linalg.eigh( tensor=pca_feat_cov_var) if params["k_principal_components_feat"] is not None: pca_feat_k_pc = set_k_principal_components( params["k_principal_components_feat"], pca_feat_k_pc_var) else: pca_feat_k_pc = tf.zeros(shape=(), dtype=tf.float64) # Lastly use control dependencies around loss to enforce the mahalanobis # variables to be assigned, the control order matters, hence the separate # contexts with tf.control_dependencies( control_inputs=[pca_time_cov_var, pca_feat_cov_var]): with tf.control_dependencies( control_inputs=[pca_time_mean_var, pca_feat_mean_var]): with tf.control_dependencies( control_inputs=[pca_time_count_var, pca_feat_count_var]): with tf.control_dependencies( control_inputs=[tf.assign(ref=pca_time_eigval_var, value=pca_time_eigval_tensor), tf.assign(ref=pca_time_eigvec_var, value=pca_time_eigvec_tensor), tf.assign(ref=pca_feat_eigval_var, value=pca_feat_eigval_tensor), tf.assign(ref=pca_feat_eigvec_var, value=pca_feat_eigvec_tensor), pca_time_k_pc, pca_feat_k_pc]): loss = tf.reduce_sum( input_tensor=tf.zeros( shape=(), dtype=tf.float64) * dummy_var) train_op = tf.contrib.layers.optimize_loss( loss=loss, global_step=tf.train.get_global_step(), learning_rate=params["learning_rate"], optimizer="SGD") return loss, train_op, None, None, None, None else: # Time based if params["k_principal_components_time"] is None: # shape = (cur_batch_size * seq_len, num_feat) X_time_cen = X_time - pca_time_mean_var # shape = (num_feat - 1,) X_time_recon_mse = tf.map_fn( fn=lambda x: pca_reconstruction_k_pc_mse( X_time_cen, pca_time_eigvec_var, x), elems=tf.range(start=1, limit=params["num_feat"], dtype=tf.int64), dtype=tf.float64) pca_time_k_pc = find_best_k_principal_components( X_time_recon_mse, pca_time_k_pc_var) else: pca_time_k_pc = set_k_principal_components( params["k_principal_components_time"], pca_time_k_pc_var) if params["k_principal_components_feat"] is None: # Features based # shape = (cur_batch_size * num_feat, seq_len) X_feat_cen = X_feat - pca_feat_mean_var # shape = (seq_len - 1,) X_feat_recon_mse = tf.map_fn( fn=lambda x: pca_reconstruction_k_pc_mse( X_feat_cen, pca_feat_eigvec_var, x), elems=tf.range(start=1, limit=params["seq_len"], dtype=tf.int64), dtype=tf.float64) pca_feat_k_pc = find_best_k_principal_components( X_feat_recon_mse, pca_feat_k_pc_var) else: pca_feat_k_pc = set_k_principal_components( params["k_principal_components_feat"], pca_feat_k_pc_var) with tf.control_dependencies( control_inputs=[pca_time_k_pc, pca_feat_k_pc]): loss = tf.reduce_sum( input_tensor=tf.zeros( shape=(), dtype=tf.float64) * dummy_var) train_op = tf.contrib.layers.optimize_loss( loss=loss, global_step=tf.train.get_global_step(), learning_rate=params["learning_rate"], optimizer="SGD") return loss, train_op, None, None, None, None else: # Time based # shape = (cur_batch_size * seq_len, num_feat) X_time_cen = X_time - pca_time_mean_var # shape = (cur_batch_size * seq_len, num_feat) if params["k_principal_components_time"] is None: X_time_recon = pca_reconstruction_k_pc( X_time_cen, pca_time_eigvec_var, pca_time_k_pc_var) else: X_time_recon = pca_reconstruction_k_pc( X_time_cen, pca_time_eigvec_var, params["k_principal_components_time"]) # Features based # shape = (cur_batch_size * num_feat, seq_len) X_feat_cen = X_feat - pca_feat_mean_var # shape = (cur_batch_size * num_feat, seq_len) if params["k_principal_components_feat"] is None: X_feat_recon = pca_reconstruction_k_pc( X_feat_cen, pca_feat_eigvec_var, pca_feat_k_pc_var) else: X_feat_recon = pca_reconstruction_k_pc( X_feat_cen, pca_feat_eigvec_var, params["k_principal_components_feat"]) return None, None, X_time_cen, X_time_recon, X_feat_cen, X_feat_recon
[ "def", "pca_model", "(", "X", ",", "mode", ",", "params", ",", "cur_batch_size", ",", "dummy_var", ")", ":", "# Reshape into 2-D tensors", "# Time based", "# shape = (cur_batch_size * seq_len, num_feat)", "X_time", "=", "tf", ".", "reshape", "(", "tensor", "=", "X",...
[ 223, 0 ]
[ 469, 73 ]
python
en
['en', 'en', 'en']
True
user_data_dir
(appname=None, appauthor=None, version=None, roaming=False)
r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/<AppName> # or ~/.config/<AppName>, if the other does not exist Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/<AppName>".
r"""Return full path to the user-specific data dir for this application.
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user data directories are: Mac OS X: ~/Library/Application Support/<AppName> # or ~/.config/<AppName>, if the other does not exist Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName> Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName> Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName> Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName> For Unix, we follow the XDG spec and support $XDG_DATA_HOME. That means, by default "~/.local/share/<AppName>". """ if system == "win32": if appauthor is None: appauthor = appname const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_data_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "roaming", "=", "False", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "ap...
[ 48, 0 ]
[ 100, 15 ]
python
en
['en', 'en', 'en']
True
site_data_dir
(appname=None, appauthor=None, version=None, multipath=False)
r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/<AppName>', if XDG_DATA_DIRS is not set Typical site data directories are: Mac OS X: /Library/Application Support/<AppName> Unix: /usr/local/share/<AppName> or /usr/share/<AppName> Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
r"""Return full path to the user-shared data dir for this application.
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of data dirs should be returned. By default, the first item from XDG_DATA_DIRS is returned, or '/usr/local/share/<AppName>', if XDG_DATA_DIRS is not set Typical site data directories are: Mac OS X: /Library/Application Support/<AppName> Unix: /usr/local/share/<AppName> or /usr/share/<AppName> Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName> Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7. For Unix, this is using the $XDG_DATA_DIRS[0] default. WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) elif system == 'darwin': path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: # XDG default for $XDG_DATA_DIRS # only first, if multipath is False path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if appname and version: path = os.path.join(path, version) return path
[ "def", "site_data_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "multipath", "=", "False", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "...
[ 103, 0 ]
[ 166, 15 ]
python
en
['en', 'en', 'en']
True
user_config_dir
(appname=None, appauthor=None, version=None, roaming=False)
r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user config directories are: Mac OS X: same as user_data_dir Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by default "~/.config/<AppName>".
r"""Return full path to the user-specific config dir for this application.
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific config dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user config directories are: Mac OS X: same as user_data_dir Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined Win *: same as user_data_dir For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. That means, by default "~/.config/<AppName>". """ if system in ["win32", "darwin"]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_config_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "roaming", "=", "False", ")", ":", "if", "system", "in", "[", "\"win32\"", ",", "\"darwin\"", "]", ":", "path", "=", "user_data_dir", ...
[ 169, 0 ]
[ 206, 15 ]
python
en
['en', 'en', 'en']
True
site_config_dir
(appname=None, appauthor=None, version=None, multipath=False)
r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set Typical site config directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
r"""Return full path to the user-shared data dir for this application.
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): r"""Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set Typical site config directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why. """ if system in ["win32", "darwin"]: path = site_data_dir(appname, appauthor) if appname and version: path = os.path.join(path, version) else: # XDG default for $XDG_CONFIG_DIRS (missing or empty) # see <https://github.com/pypa/pip/pull/7501#discussion_r360624829> # only first, if multipath is False path = os.getenv('XDG_CONFIG_DIRS') or '/etc/xdg' pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) if x] if appname: if version: appname = os.path.join(appname, version) pathlist = [os.path.join(x, appname) for x in pathlist] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path
[ "def", "site_config_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "multipath", "=", "False", ")", ":", "if", "system", "in", "[", "\"win32\"", ",", "\"darwin\"", "]", ":", "path", "=", "site_data_dir"...
[ 211, 0 ]
[ 260, 15 ]
python
en
['en', 'en', 'en']
True
user_cache_dir
(appname=None, appauthor=None, version=None, opinion=True)
r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/<AppName> Unix: ~/.cache/<AppName> (XDG default) Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option.
r"""Return full path to the user-specific cache dir for this application.
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific cache dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Cache" to the base app data dir for Windows. See discussion below. Typical user cache directories are: Mac OS X: ~/Library/Caches/<AppName> Unix: ~/.cache/<AppName> (XDG default) Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming app data dir (the default returned by `user_data_dir` above). Apps typically put cache data somewhere *under* the given dir here. Some examples: ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache ...\Acme\SuperApp\Cache\1.0 OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. This can be disabled with the `opinion=False` option. """ if system == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) # When using Python 2, return paths as bytes on Windows like we do on # other operating systems. See helper function docs for more details. if not PY3 and isinstance(path, unicode): path = _win_path_to_bytes(path) if appname: if appauthor is not False: path = os.path.join(path, appauthor, appname) else: path = os.path.join(path, appname) if opinion: path = os.path.join(path, "Cache") elif system == 'darwin': path = os.path.expanduser('~/Library/Caches') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache')) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_cache_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "opinion", "=", "True", ")", ":", "if", "system", "==", "\"win32\"", ":", "if", "appauthor", "is", "None", ":", "appauthor", "=", "ap...
[ 263, 0 ]
[ 321, 15 ]
python
en
['en', 'en', 'en']
True
user_state_dir
(appname=None, appauthor=None, version=None, roaming=False)
r"""Return full path to the user-specific state dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user state directories are: Mac OS X: same as user_data_dir Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined Win *: same as user_data_dir For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> to extend the XDG spec and support $XDG_STATE_HOME. That means, by default "~/.local/state/<AppName>".
r"""Return full path to the user-specific state dir for this application.
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False): r"""Return full path to the user-specific state dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "roaming" (boolean, default False) can be set True to use the Windows roaming appdata directory. That means that for users on a Windows network setup for roaming profiles, this user data will be sync'd on login. See <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> for a discussion of issues. Typical user state directories are: Mac OS X: same as user_data_dir Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined Win *: same as user_data_dir For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state> to extend the XDG spec and support $XDG_STATE_HOME. That means, by default "~/.local/state/<AppName>". """ if system in ["win32", "darwin"]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
[ "def", "user_state_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "roaming", "=", "False", ")", ":", "if", "system", "in", "[", "\"win32\"", ",", "\"darwin\"", "]", ":", "path", "=", "user_data_dir", ...
[ 324, 0 ]
[ 363, 15 ]
python
en
['en', 'en', 'en']
True
user_log_dir
(appname=None, appauthor=None, version=None, opinion=True)
r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user log directories are: Mac OS X: ~/Library/Logs/<AppName> Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option.
r"""Return full path to the user-specific log dir for this application.
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True): r"""Return full path to the user-specific log dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. You may pass False to disable it. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "opinion" (boolean) can be False to disable the appending of "Logs" to the base app data dir for Windows, and "log" to the base cache dir for Unix. See discussion below. Typical user log directories are: Mac OS X: ~/Library/Logs/<AppName> Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs On Windows the only suggestion in the MSDN docs is that local settings go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in examples of what some windows apps use for a logs dir.) OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA` value for Windows and appends "log" to the user cache dir for Unix. This can be disabled with the `opinion=False` option. """ if system == "darwin": path = os.path.join( os.path.expanduser('~/Library/Logs'), appname) elif system == "win32": path = user_data_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "Logs") else: path = user_cache_dir(appname, appauthor, version) version = False if opinion: path = os.path.join(path, "log") if appname and version: path = os.path.join(path, version) return path
[ "def", "user_log_dir", "(", "appname", "=", "None", ",", "appauthor", "=", "None", ",", "version", "=", "None", ",", "opinion", "=", "True", ")", ":", "if", "system", "==", "\"darwin\"", ":", "path", "=", "os", ".", "path", ".", "join", "(", "os", ...
[ 366, 0 ]
[ 414, 15 ]
python
en
['en', 'en', 'en']
True
_get_win_folder_from_registry
(csidl_name)
This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names.
This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names.
def _get_win_folder_from_registry(csidl_name): """This is a fallback technique at best. I'm not sure if using the registry for this guarantees us the correct answer for all CSIDL_* names. """ if PY3: import winreg as _winreg else: import _winreg shell_folder_name = { "CSIDL_APPDATA": "AppData", "CSIDL_COMMON_APPDATA": "Common AppData", "CSIDL_LOCAL_APPDATA": "Local AppData", }[csidl_name] key = _winreg.OpenKey( _winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" ) dir, type = _winreg.QueryValueEx(key, shell_folder_name) return dir
[ "def", "_get_win_folder_from_registry", "(", "csidl_name", ")", ":", "if", "PY3", ":", "import", "winreg", "as", "_winreg", "else", ":", "import", "_winreg", "shell_folder_name", "=", "{", "\"CSIDL_APPDATA\"", ":", "\"AppData\"", ",", "\"CSIDL_COMMON_APPDATA\"", ":"...
[ 465, 0 ]
[ 486, 14 ]
python
en
['en', 'en', 'en']
True
_win_path_to_bytes
(path)
Encode Windows paths to bytes. Only used on Python 2. Motivation is to be consistent with other operating systems where paths are also returned as bytes. This avoids problems mixing bytes and Unicode elsewhere in the codebase. For more details and discussion see <https://github.com/pypa/pip/issues/3463>. If encoding using ASCII and MBCS fails, return the original Unicode path.
Encode Windows paths to bytes. Only used on Python 2.
def _win_path_to_bytes(path): """Encode Windows paths to bytes. Only used on Python 2. Motivation is to be consistent with other operating systems where paths are also returned as bytes. This avoids problems mixing bytes and Unicode elsewhere in the codebase. For more details and discussion see <https://github.com/pypa/pip/issues/3463>. If encoding using ASCII and MBCS fails, return the original Unicode path. """ for encoding in ('ASCII', 'MBCS'): try: return path.encode(encoding) except (UnicodeEncodeError, LookupError): pass return path
[ "def", "_win_path_to_bytes", "(", "path", ")", ":", "for", "encoding", "in", "(", "'ASCII'", ",", "'MBCS'", ")", ":", "try", ":", "return", "path", ".", "encode", "(", "encoding", ")", "except", "(", "UnicodeEncodeError", ",", "LookupError", ")", ":", "p...
[ 580, 0 ]
[ 595, 15 ]
python
en
['en', 'en', 'en']
True
DataHandling.get_builtup_data
(self,targets=[], not_targets=[], color_mode='grayscale')
画像のフルパスを受け取る :return: 正規化されたデータ、ラベル
画像のフルパスを受け取る :return: 正規化されたデータ、ラベル
def get_builtup_data(self,targets=[], not_targets=[], color_mode='grayscale'): """ 画像のフルパスを受け取る :return: 正規化されたデータ、ラベル """ train, valid = self.data_handling.split_train_test(targets=targets, not_targets=not_targets) return self.data_handling.preprocess(train=train, valid=valid, color_mode=color_mode)
[ "def", "get_builtup_data", "(", "self", ",", "targets", "=", "[", "]", ",", "not_targets", "=", "[", "]", ",", "color_mode", "=", "'grayscale'", ")", ":", "train", ",", "valid", "=", "self", ".", "data_handling", ".", "split_train_test", "(", "targets", ...
[ 11, 4 ]
[ 17, 93 ]
python
en
['en', 'error', 'th']
False
DataHandling.read_dirs
(self, target_label)
:return: target type=list ターゲット画像のリストをくるんだリスト target[0]で取り出す 0dimensionには基本的に1要素しか入らないがnot_targetの形状を考慮しそちらの形状で揃えた not_target type=list ターゲット以外の画像のリストをくるんだリスト not_target[index]で各ノイズにアクセスできる ノイズ一つ一つもリストになっていて、そのまま一つのラベルとして使用する
:return: target type=list ターゲット画像のリストをくるんだリスト target[0]で取り出す 0dimensionには基本的に1要素しか入らないがnot_targetの形状を考慮しそちらの形状で揃えた
def read_dirs(self, target_label): """ :return: target type=list ターゲット画像のリストをくるんだリスト target[0]で取り出す 0dimensionには基本的に1要素しか入らないがnot_targetの形状を考慮しそちらの形状で揃えた not_target type=list ターゲット以外の画像のリストをくるんだリスト not_target[index]で各ノイズにアクセスできる ノイズ一つ一つもリストになっていて、そのまま一つのラベルとして使用する """ target_dir, not_target_dir = self.get_img_dir(target_label=target_label, split_tag=True) return self.read_datas_dir(target=target_dir, not_target=not_target_dir, target_label=target_label)
[ "def", "read_dirs", "(", "self", ",", "target_label", ")", ":", "target_dir", ",", "not_target_dir", "=", "self", ".", "get_img_dir", "(", "target_label", "=", "target_label", ",", "split_tag", "=", "True", ")", "return", "self", ".", "read_datas_dir", "(", ...
[ 22, 4 ]
[ 36, 107 ]
python
en
['en', 'error', 'th']
False
DataHandling.get_builtup_data_include_noise
(self)
:return: tuple (self.x_train, self.x_test, self.y_train, self.y_test)
:return: tuple (self.x_train, self.x_test, self.y_train, self.y_test)
def get_builtup_data_include_noise(self): """ :return: tuple (self.x_train, self.x_test, self.y_train, self.y_test) """ return self.oppo.return_datafiles()
[ "def", "get_builtup_data_include_noise", "(", "self", ")", ":", "return", "self", ".", "oppo", ".", "return_datafiles", "(", ")" ]
[ 48, 4 ]
[ 52, 43 ]
python
en
['en', 'error', 'th']
False
define_filenames
(folder: Path)
Locates all relevant breseq files.
Locates all relevant breseq files.
def define_filenames(folder: Path) -> Dict[str, Path]: """ Locates all relevant breseq files.""" files = { 'index': folder / "output" / "index.html", 'gd': folder / "output" / "evidence" / "annotated.gd", 'vcf': folder / "data" / "output.vcf", 'summary': folder / "data" / "summary.json", 'summaryHtml': folder / "output" / "summary.html", 'marginal': folder / "output" / "marginal.html" } return files
[ "def", "define_filenames", "(", "folder", ":", "Path", ")", "->", "Dict", "[", "str", ",", "Path", "]", ":", "files", "=", "{", "'index'", ":", "folder", "/", "\"output\"", "/", "\"index.html\"", ",", "'gd'", ":", "folder", "/", "\"output\"", "/", "\"e...
[ 11, 0 ]
[ 23, 13 ]
python
en
['en', 'it', 'en']
True
move_breseq_folder
(source_folder: Path, destination_folder: Path)
Moves the relevant breseq files into a new folder. Does not move any of the unneccessary files. Parameters ---------- source_folder: Source breseq folder. destination_folder: The parent folder where the files should be saved. The contents of the source folder will be added to a subfolder named after the linked sample.
Moves the relevant breseq files into a new folder. Does not move any of the unneccessary files. Parameters ---------- source_folder: Source breseq folder. destination_folder: The parent folder where the files should be saved. The contents of the source folder will be added to a subfolder named after the linked sample.
def move_breseq_folder(source_folder: Path, destination_folder: Path): """ Moves the relevant breseq files into a new folder. Does not move any of the unneccessary files. Parameters ---------- source_folder: Source breseq folder. destination_folder: The parent folder where the files should be saved. The contents of the source folder will be added to a subfolder named after the linked sample. """ make_breseq_folders(destination_folder) files_source = define_filenames(source_folder) files_destination = define_filenames(destination_folder) for key in files_source.keys(): filename_source = files_source[key] if filename_source.exists(): filename_destination = files_destination[key] filename_destination.write_bytes(filename_source.read_bytes())
[ "def", "move_breseq_folder", "(", "source_folder", ":", "Path", ",", "destination_folder", ":", "Path", ")", ":", "make_breseq_folders", "(", "destination_folder", ")", "files_source", "=", "define_filenames", "(", "source_folder", ")", "files_destination", "=", "defi...
[ 35, 0 ]
[ 52, 65 ]
python
en
['en', 'error', 'th']
False
ShapeBuilder.default_sizes
(cls, scale)
Convert single scale parameter to a dict of arguments for build.
Convert single scale parameter to a dict of arguments for build.
def default_sizes(cls, scale): """Convert single scale parameter to a dict of arguments for build.""" raise RuntimeError('Using "scale" is not supported for %s' % cls.__name__)
[ "def", "default_sizes", "(", "cls", ",", "scale", ")", ":", "raise", "RuntimeError", "(", "'Using \"scale\" is not supported for %s'", "%", "cls", ".", "__name__", ")" ]
[ 46, 4 ]
[ 49, 40 ]
python
en
['en', 'en', 'en']
True
ShapeBuilder.diameter_to_default_scale
(cls, diameter)
Convert diameter parameter to a default size scale.
Convert diameter parameter to a default size scale.
def diameter_to_default_scale(cls, diameter): """Convert diameter parameter to a default size scale.""" raise RuntimeError('Using "diameter" is not supported for %s' % cls.__name__)
[ "def", "diameter_to_default_scale", "(", "cls", ",", "diameter", ")", ":", "raise", "RuntimeError", "(", "'Using \"diameter\" is not supported for %s'", "%", "cls", ".", "__name__", ")" ]
[ 52, 4 ]
[ 55, 40 ]
python
en
['en', 'fr', 'en']
True
ShapeBuilder._build
(cls, **kwargs)
Build the shape with the parameters. Returns either shape_or_list_shapes or tuple (shape_or_list_shapes, phantom_vertices).
Build the shape with the parameters.
def _build(cls, **kwargs): """Build the shape with the parameters. Returns either shape_or_list_shapes or tuple (shape_or_list_shapes, phantom_vertices). """ pass
[ "def", "_build", "(", "cls", ",", "*", "*", "kwargs", ")", ":", "pass" ]
[ 59, 4 ]
[ 67, 12 ]
python
en
['en', 'en', 'en']
True
ShapeBuilder.build
(cls, scale=None, diameter=None, **kwargs)
Build the shape either from scale, diameter, or kwargs. At least one of diameter or scale must be None. Returns tuple (list_of_shapes, phantom_vertices).
Build the shape either from scale, diameter, or kwargs.
def build(cls, scale=None, diameter=None, **kwargs): """Build the shape either from scale, diameter, or kwargs. At least one of diameter or scale must be None. Returns tuple (list_of_shapes, phantom_vertices). """ assert not (scale is not None and diameter is not None ), 'Cannot build shape from both scale and diameter' if diameter is not None: scale = cls.diameter_to_default_scale(diameter) if scale is not None: kwargs.update(cls.default_sizes(scale)) ret = cls._build(**kwargs) # Add phantom_vertices if not provided. if not isinstance(ret, (tuple, list)): ret = ret, None elif len(ret) < 2 or isinstance(ret[1], scene_if.Shape): ret = ret, None # Make sure a list of shapes is returned. shapes, phantom_vertices = ret if isinstance(shapes, scene_if.Shape): shapes = [shapes] return shapes, phantom_vertices
[ "def", "build", "(", "cls", ",", "scale", "=", "None", ",", "diameter", "=", "None", ",", "*", "*", "kwargs", ")", ":", "assert", "not", "(", "scale", "is", "not", "None", "and", "diameter", "is", "not", "None", ")", ",", "'Cannot build shape from both...
[ 70, 4 ]
[ 93, 39 ]
python
en
['en', 'en', 'en']
True
strongly_connected_components
(G)
Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- comp : generator of sets A generator of sets of nodes, one for each strongly connected component of G.
Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- comp : generator of sets A generator of sets of nodes, one for each strongly connected component of G.
def strongly_connected_components(G): # noqa: C901 ''' Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- comp : generator of sets A generator of sets of nodes, one for each strongly connected component of G. ''' preorder = {} lowlink = {} scc_found = {} scc_queue = [] i = 0 # Preorder counter for source in G.nodes(): if source not in scc_found: queue = [source] while queue: v = queue[-1] if v not in preorder: i = i + 1 preorder[v] = i done = 1 v_nbrs = G.neighbors(v) for w in v_nbrs: if w not in preorder: queue.append(w) done = 0 break if done == 1: lowlink[v] = preorder[v] for w in v_nbrs: if w not in scc_found: if preorder[w] > preorder[v]: lowlink[v] = min([lowlink[v], lowlink[w]]) else: lowlink[v] = min([lowlink[v], preorder[w]]) queue.pop() if lowlink[v] == preorder[v]: scc_found[v] = True scc = {v} while ( scc_queue and preorder[scc_queue[-1]] > preorder[v] ): k = scc_queue.pop() scc_found[k] = True scc.add(k) yield scc else: scc_queue.append(v)
[ "def", "strongly_connected_components", "(", "G", ")", ":", "# noqa: C901", "preorder", "=", "{", "}", "lowlink", "=", "{", "}", "scc_found", "=", "{", "}", "scc_queue", "=", "[", "]", "i", "=", "0", "# Preorder counter", "for", "source", "in", "G", ".",...
[ 87, 0 ]
[ 139, 43 ]
python
en
['en', 'error', 'th']
False
simple_cycles
(G)
Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- cycle_generator: generator A generator that produces elementary cycles of the graph. Each cycle is represented by a list of nodes along the cycle.
Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- cycle_generator: generator A generator that produces elementary cycles of the graph. Each cycle is represented by a list of nodes along the cycle.
def simple_cycles(G): # noqa: C901 ''' Adapted from networkx: http://networkx.github.io/ Parameters ---------- G : DiGraph Returns ------- cycle_generator: generator A generator that produces elementary cycles of the graph. Each cycle is represented by a list of nodes along the cycle. ''' def _unblock(thisnode, blocked, B): stack = set([thisnode]) while stack: node = stack.pop() if node in blocked: blocked.remove(node) stack.update(B[node]) B[node].clear() # Johnson's algorithm requires some ordering of the nodes. # We assign the arbitrary ordering given by the strongly connected comps # There is no need to track the ordering as each node removed as processed. # save the actual graph so we can mutate it here # We only take the edges because we do not want to # copy edge and node attributes here. subG = G.subgraph(G.nodes()) sccs = list(strongly_connected_components(subG)) while sccs: scc = sccs.pop() # order of scc determines ordering of nodes startnode = scc.pop() # Processing node runs 'circuit' routine from recursive version path = [startnode] blocked = set() # vertex: blocked from search? closed = set() # nodes involved in a cycle blocked.add(startnode) B = defaultdict(set) # graph portions that yield no elementary circuit stack = [(startnode, list(subG.neighbors(startnode)))] while stack: thisnode, nbrs = stack[-1] if nbrs: nextnode = nbrs.pop() if nextnode == startnode: yield path[:] closed.update(path) elif nextnode not in blocked: path.append(nextnode) stack.append((nextnode, list(subG.neighbors(nextnode)))) closed.discard(nextnode) blocked.add(nextnode) continue # done with nextnode... look for more neighbors if not nbrs: # no more nbrs if thisnode in closed: _unblock(thisnode, blocked, B) else: for nbr in subG.neighbors(thisnode): if thisnode not in B[nbr]: B[nbr].add(thisnode) stack.pop() path.pop() # done processing this node subG.remove_node(startnode) H = subG.subgraph(scc) # make smaller to avoid work in SCC routine sccs.extend(list(strongly_connected_components(H)))
[ "def", "simple_cycles", "(", "G", ")", ":", "# noqa: C901", "def", "_unblock", "(", "thisnode", ",", "blocked", ",", "B", ")", ":", "stack", "=", "set", "(", "[", "thisnode", "]", ")", "while", "stack", ":", "node", "=", "stack", ".", "pop", "(", "...
[ 142, 0 ]
[ 209, 59 ]
python
en
['en', 'error', 'th']
False
find_cycle
(graph)
Looks for a cycle in the graph. If found, returns the first cycle. If nodes a1, a2, ..., an are in a cycle, then this returns: [(a1,a2), (a2,a3), ... (an-1,an), (an, a1)] Otherwise returns an empty list.
Looks for a cycle in the graph. If found, returns the first cycle. If nodes a1, a2, ..., an are in a cycle, then this returns: [(a1,a2), (a2,a3), ... (an-1,an), (an, a1)] Otherwise returns an empty list.
def find_cycle(graph): ''' Looks for a cycle in the graph. If found, returns the first cycle. If nodes a1, a2, ..., an are in a cycle, then this returns: [(a1,a2), (a2,a3), ... (an-1,an), (an, a1)] Otherwise returns an empty list. ''' cycles = list(simple_cycles(graph)) if cycles: nodes = cycles[0] nodes.append(nodes[0]) edges = [] prev = nodes[0] for node in nodes[1:]: edges.append((prev, node)) prev = node return edges else: return []
[ "def", "find_cycle", "(", "graph", ")", ":", "cycles", "=", "list", "(", "simple_cycles", "(", "graph", ")", ")", "if", "cycles", ":", "nodes", "=", "cycles", "[", "0", "]", "nodes", ".", "append", "(", "nodes", "[", "0", "]", ")", "edges", "=", ...
[ 212, 0 ]
[ 230, 17 ]
python
en
['en', 'error', 'th']
False
get_stacktrace
(thread_id)
Returns the stack trace for the thread id as a list of strings.
Returns the stack trace for the thread id as a list of strings.
def get_stacktrace(thread_id): ''' Returns the stack trace for the thread id as a list of strings. ''' gdb.execute('thread %d' % thread_id, from_tty=False, to_string=True) output = gdb.execute('bt', from_tty=False, to_string=True) stacktrace_lines = output.strip().split('\n') return stacktrace_lines
[ "def", "get_stacktrace", "(", "thread_id", ")", ":", "gdb", ".", "execute", "(", "'thread %d'", "%", "thread_id", ",", "from_tty", "=", "False", ",", "to_string", "=", "True", ")", "output", "=", "gdb", ".", "execute", "(", "'bt'", ",", "from_tty", "=", ...
[ 233, 0 ]
[ 240, 27 ]
python
en
['en', 'error', 'th']
False
is_thread_blocked_with_frame
( thread_id, top_line, expected_top_line, expected_frame )
Returns True if we found expected_top_line in top_line, and we found the expected_frame in the thread's stack trace.
Returns True if we found expected_top_line in top_line, and we found the expected_frame in the thread's stack trace.
def is_thread_blocked_with_frame( thread_id, top_line, expected_top_line, expected_frame ): ''' Returns True if we found expected_top_line in top_line, and we found the expected_frame in the thread's stack trace. ''' if expected_top_line not in top_line: return False stacktrace_lines = get_stacktrace(thread_id) return any(expected_frame in line for line in stacktrace_lines)
[ "def", "is_thread_blocked_with_frame", "(", "thread_id", ",", "top_line", ",", "expected_top_line", ",", "expected_frame", ")", ":", "if", "expected_top_line", "not", "in", "top_line", ":", "return", "False", "stacktrace_lines", "=", "get_stacktrace", "(", "thread_id"...
[ 243, 0 ]
[ 253, 67 ]
python
en
['en', 'error', 'th']
False
print_cycle
(graph, lwp_to_thread_id, cycle)
Prints the threads and mutexes involved in the deadlock.
Prints the threads and mutexes involved in the deadlock.
def print_cycle(graph, lwp_to_thread_id, cycle): '''Prints the threads and mutexes involved in the deadlock.''' for (m, n) in cycle: print( 'Thread %d (LWP %d) is waiting on %s (0x%016x) held by ' 'Thread %d (LWP %d)' % ( lwp_to_thread_id[m], m, graph.attributes(m, n)['mutex_type'].value, graph.attributes(m, n)['mutex'], lwp_to_thread_id[n], n ) )
[ "def", "print_cycle", "(", "graph", ",", "lwp_to_thread_id", ",", "cycle", ")", ":", "for", "(", "m", ",", "n", ")", "in", "cycle", ":", "print", "(", "'Thread %d (LWP %d) is waiting on %s (0x%016x) held by '", "'Thread %d (LWP %d)'", "%", "(", "lwp_to_thread_id", ...
[ 299, 0 ]
[ 309, 9 ]
python
en
['en', 'en', 'en']
True
get_thread_info
()
Returns a pair of: - map of LWP -> thread ID - map of blocked threads LWP -> potential mutex type
Returns a pair of: - map of LWP -> thread ID - map of blocked threads LWP -> potential mutex type
def get_thread_info(): ''' Returns a pair of: - map of LWP -> thread ID - map of blocked threads LWP -> potential mutex type ''' # LWP -> thread ID lwp_to_thread_id = {} # LWP -> potential mutex type it is blocked on blocked_threads = {} output = gdb.execute('info threads', from_tty=False, to_string=True) lines = output.strip().split('\n')[1:] regex = re.compile(r'[\s\*]*(\d+).*Thread.*\(LWP (\d+)\).*') for line in lines: try: thread_id = int(regex.match(line).group(1)) thread_lwp = int(regex.match(line).group(2)) lwp_to_thread_id[thread_lwp] = thread_id mutex_type = MutexType.get_mutex_type(thread_id, line) if mutex_type: blocked_threads[thread_lwp] = mutex_type except Exception: continue return (lwp_to_thread_id, blocked_threads)
[ "def", "get_thread_info", "(", ")", ":", "# LWP -> thread ID", "lwp_to_thread_id", "=", "{", "}", "# LWP -> potential mutex type it is blocked on", "blocked_threads", "=", "{", "}", "output", "=", "gdb", ".", "execute", "(", "'info threads'", ",", "from_tty", "=", "...
[ 312, 0 ]
[ 338, 46 ]
python
en
['en', 'error', 'th']
False
get_pthread_mutex_t_owner_and_address
(lwp_to_thread_id, thread_lwp)
Finds the thread holding the mutex that this thread is blocked on. Returns a pair of (lwp of thread owning mutex, mutex address), or (None, None) if not found.
Finds the thread holding the mutex that this thread is blocked on. Returns a pair of (lwp of thread owning mutex, mutex address), or (None, None) if not found.
def get_pthread_mutex_t_owner_and_address(lwp_to_thread_id, thread_lwp): ''' Finds the thread holding the mutex that this thread is blocked on. Returns a pair of (lwp of thread owning mutex, mutex address), or (None, None) if not found. ''' # Go up the stack to the pthread_mutex_lock frame gdb.execute( 'thread %d' % lwp_to_thread_id[thread_lwp], from_tty=False, to_string=True ) gdb.execute('frame 1', from_tty=False, to_string=True) # Get the owner of the mutex by inspecting the internal # fields of the mutex. try: mutex_info = gdb.parse_and_eval('mutex').dereference() mutex_owner_lwp = int(mutex_info['__data']['__owner']) return (mutex_owner_lwp, int(mutex_info.address)) except gdb.error: return (None, None)
[ "def", "get_pthread_mutex_t_owner_and_address", "(", "lwp_to_thread_id", ",", "thread_lwp", ")", ":", "# Go up the stack to the pthread_mutex_lock frame", "gdb", ".", "execute", "(", "'thread %d'", "%", "lwp_to_thread_id", "[", "thread_lwp", "]", ",", "from_tty", "=", "Fa...
[ 341, 0 ]
[ 362, 27 ]
python
en
['en', 'error', 'th']
False
get_pthread_rwlock_t_owner_and_address
(lwp_to_thread_id, thread_lwp)
If the thread is waiting on a write-locked pthread_rwlock_t, this will return the pair of: (lwp of thread that is write-owning the mutex, mutex address) or (None, None) if not found, or if the mutex is read-locked.
If the thread is waiting on a write-locked pthread_rwlock_t, this will return the pair of: (lwp of thread that is write-owning the mutex, mutex address) or (None, None) if not found, or if the mutex is read-locked.
def get_pthread_rwlock_t_owner_and_address(lwp_to_thread_id, thread_lwp): ''' If the thread is waiting on a write-locked pthread_rwlock_t, this will return the pair of: (lwp of thread that is write-owning the mutex, mutex address) or (None, None) if not found, or if the mutex is read-locked. ''' # Go up the stack to the pthread_rwlock_{rd|wr}lock frame gdb.execute( 'thread %d' % lwp_to_thread_id[thread_lwp], from_tty=False, to_string=True ) gdb.execute('frame 2', from_tty=False, to_string=True) # Get the owner of the mutex by inspecting the internal # fields of the mutex. try: rwlock_info = gdb.parse_and_eval('rwlock').dereference() rwlock_owner_lwp = int(rwlock_info['__data']['__writer']) # We can only track the owner if it is currently write-locked. # If it is not write-locked or if it is currently read-locked, # possibly by multiple threads, we cannot find the owner. if rwlock_owner_lwp != 0: return (rwlock_owner_lwp, int(rwlock_info.address)) else: return (None, None) except gdb.error: return (None, None)
[ "def", "get_pthread_rwlock_t_owner_and_address", "(", "lwp_to_thread_id", ",", "thread_lwp", ")", ":", "# Go up the stack to the pthread_rwlock_{rd|wr}lock frame", "gdb", ".", "execute", "(", "'thread %d'", "%", "lwp_to_thread_id", "[", "thread_lwp", "]", ",", "from_tty", "...
[ 365, 0 ]
[ 393, 27 ]
python
en
['en', 'error', 'th']
False
DiGraph.node_link_data
(self)
Returns the graph as a dictionary in a format that can be serialized.
Returns the graph as a dictionary in a format that can be serialized.
def node_link_data(self): ''' Returns the graph as a dictionary in a format that can be serialized. ''' data = { 'directed': True, 'multigraph': False, 'graph': {}, 'links': [], 'nodes': [], } # Do one pass to build a map of node -> position in nodes node_to_number = {} for node in self.adjacency_map.keys(): node_to_number[node] = len(data['nodes']) data['nodes'].append({'id': node}) # Do another pass to build the link information for node, neighbors in self.adjacency_map.items(): for neighbor in neighbors: link = self.attributes_map[(node, neighbor)].copy() link['source'] = node_to_number[node] link['target'] = node_to_number[neighbor] data['links'].append(link) return data
[ "def", "node_link_data", "(", "self", ")", ":", "data", "=", "{", "'directed'", ":", "True", ",", "'multigraph'", ":", "False", ",", "'graph'", ":", "{", "}", ",", "'links'", ":", "[", "]", ",", "'nodes'", ":", "[", "]", ",", "}", "# Do one pass to b...
[ 58, 4 ]
[ 84, 19 ]
python
en
['en', 'error', 'th']
False
MutexType.get_mutex_type
(thread_id, top_line)
Returns the probable mutex type, based on the first line of the thread's stack. Returns None if not found.
Returns the probable mutex type, based on the first line of the thread's stack. Returns None if not found.
def get_mutex_type(thread_id, top_line): ''' Returns the probable mutex type, based on the first line of the thread's stack. Returns None if not found. ''' if is_thread_blocked_with_frame( thread_id, top_line, '__lll_lock_wait', 'pthread_mutex' ): return MutexType.PTHREAD_MUTEX_T if is_thread_blocked_with_frame( thread_id, top_line, 'futex_wait', 'pthread_rwlock' ): return MutexType.PTHREAD_RWLOCK_T return None
[ "def", "get_mutex_type", "(", "thread_id", ",", "top_line", ")", ":", "if", "is_thread_blocked_with_frame", "(", "thread_id", ",", "top_line", ",", "'__lll_lock_wait'", ",", "'pthread_mutex'", ")", ":", "return", "MutexType", ".", "PTHREAD_MUTEX_T", "if", "is_thread...
[ 263, 4 ]
[ 277, 19 ]
python
en
['en', 'error', 'th']
False
MutexType.get_mutex_owner_and_address_func_for_type
(mutex_type)
Returns a function to resolve the mutex owner and address for the given type. The returned function f has the following signature: f: args: (map of thread lwp -> thread id), blocked thread lwp returns: (lwp of thread owning mutex, mutex address) or (None, None) if not found. Returns None if there is no function for this mutex_type.
Returns a function to resolve the mutex owner and address for the given type. The returned function f has the following signature:
def get_mutex_owner_and_address_func_for_type(mutex_type): ''' Returns a function to resolve the mutex owner and address for the given type. The returned function f has the following signature: f: args: (map of thread lwp -> thread id), blocked thread lwp returns: (lwp of thread owning mutex, mutex address) or (None, None) if not found. Returns None if there is no function for this mutex_type. ''' if mutex_type == MutexType.PTHREAD_MUTEX_T: return get_pthread_mutex_t_owner_and_address if mutex_type == MutexType.PTHREAD_RWLOCK_T: return get_pthread_rwlock_t_owner_and_address return None
[ "def", "get_mutex_owner_and_address_func_for_type", "(", "mutex_type", ")", ":", "if", "mutex_type", "==", "MutexType", ".", "PTHREAD_MUTEX_T", ":", "return", "get_pthread_mutex_t_owner_and_address", "if", "mutex_type", "==", "MutexType", ".", "PTHREAD_RWLOCK_T", ":", "re...
[ 280, 4 ]
[ 296, 19 ]
python
en
['en', 'error', 'th']
False
Deadlock.invoke
(self, arg, from_tty)
Prints the threads and mutexes in a deadlock, if it exists.
Prints the threads and mutexes in a deadlock, if it exists.
def invoke(self, arg, from_tty): '''Prints the threads and mutexes in a deadlock, if it exists.''' lwp_to_thread_id, blocked_threads = get_thread_info() # Nodes represent threads. Edge (A,B) exists if thread A # is waiting on a mutex held by thread B. graph = DiGraph() # Go through all the blocked threads and see which threads # they are blocked on, and build the thread wait graph. for thread_lwp, mutex_type in blocked_threads.items(): get_owner_and_address_func = \ MutexType.get_mutex_owner_and_address_func_for_type(mutex_type) if not get_owner_and_address_func: continue mutex_owner_lwp, mutex_address = get_owner_and_address_func( lwp_to_thread_id, thread_lwp ) if mutex_owner_lwp and mutex_address: graph.add_edge( thread_lwp, mutex_owner_lwp, mutex=mutex_address, mutex_type=mutex_type ) # A deadlock exists if there is a cycle in the graph. cycle = find_cycle(graph) if cycle: print('Found deadlock!') print_cycle(graph, lwp_to_thread_id, cycle) else: print( 'No deadlock detected. ' 'Do you have debug symbols installed?' )
[ "def", "invoke", "(", "self", ",", "arg", ",", "from_tty", ")", ":", "lwp_to_thread_id", ",", "blocked_threads", "=", "get_thread_info", "(", ")", "# Nodes represent threads. Edge (A,B) exists if thread A", "# is waiting on a mutex held by thread B.", "graph", "=", "DiGraph...
[ 402, 4 ]
[ 437, 13 ]
python
en
['en', 'en', 'en']
True
no_append_slash
(view_func)
Mark a view function as excluded from CommonMiddleware's APPEND_SLASH redirection.
Mark a view function as excluded from CommonMiddleware's APPEND_SLASH redirection.
def no_append_slash(view_func): """ Mark a view function as excluded from CommonMiddleware's APPEND_SLASH redirection. """ # view_func.should_append_slash = False would also work, but decorators are # nicer if they don't have side effects, so return a new function. def wrapped_view(*args, **kwargs): return view_func(*args, **kwargs) wrapped_view.should_append_slash = False return wraps(view_func)(wrapped_view)
[ "def", "no_append_slash", "(", "view_func", ")", ":", "# view_func.should_append_slash = False would also work, but decorators are", "# nicer if they don't have side effects, so return a new function.", "def", "wrapped_view", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", ...
[ 3, 0 ]
[ 13, 41 ]
python
en
['en', 'error', 'th']
False
connection_requires_http_tunnel
( proxy_url=None, proxy_config=None, destination_scheme=None )
Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc)
Returns True if the connection requires an HTTP CONNECT through the proxy.
def connection_requires_http_tunnel( proxy_url=None, proxy_config=None, destination_scheme=None ): """ Returns True if the connection requires an HTTP CONNECT through the proxy. :param URL proxy_url: URL of the proxy. :param ProxyConfig proxy_config: Proxy configuration from poolmanager.py :param str destination_scheme: The scheme of the destination. (i.e https, http, etc) """ # If we're not using a proxy, no way to use a tunnel. if proxy_url is None: return False # HTTP destinations never require tunneling, we always forward. if destination_scheme == "http": return False # Support for forwarding with HTTPS proxies and HTTPS destinations. if ( proxy_url.scheme == "https" and proxy_config and proxy_config.use_forwarding_for_https ): return False # Otherwise always use a tunnel. return True
[ "def", "connection_requires_http_tunnel", "(", "proxy_url", "=", "None", ",", "proxy_config", "=", "None", ",", "destination_scheme", "=", "None", ")", ":", "# If we're not using a proxy, no way to use a tunnel.", "if", "proxy_url", "is", "None", ":", "return", "False",...
[ 3, 0 ]
[ 33, 15 ]
python
en
['en', 'error', 'th']
False
create_proxy_ssl_context
( ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None )
Generates a default proxy ssl context if one hasn't been provided by the user.
Generates a default proxy ssl context if one hasn't been provided by the user.
def create_proxy_ssl_context( ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None ): """ Generates a default proxy ssl context if one hasn't been provided by the user. """ ssl_context = create_urllib3_context( ssl_version=resolve_ssl_version(ssl_version), cert_reqs=resolve_cert_reqs(cert_reqs), ) if ( not ca_certs and not ca_cert_dir and not ca_cert_data and hasattr(ssl_context, "load_default_certs") ): ssl_context.load_default_certs() return ssl_context
[ "def", "create_proxy_ssl_context", "(", "ssl_version", ",", "cert_reqs", ",", "ca_certs", "=", "None", ",", "ca_cert_dir", "=", "None", ",", "ca_cert_data", "=", "None", ")", ":", "ssl_context", "=", "create_urllib3_context", "(", "ssl_version", "=", "resolve_ssl_...
[ 36, 0 ]
[ 55, 22 ]
python
en
['en', 'error', 'th']
False
run
(argv=None)
The main function which creates the pipeline and runs it
The main function which creates the pipeline and runs it
def run(argv=None): """The main function which creates the pipeline and runs it""" parser = argparse.ArgumentParser() parser.add_argument( '--input-bucket', dest='input_bucket', required=True, default='data-daimlr', help='GS bucket_name where the input files are present') parser.add_argument( '--input-path', dest='input_path', required=False, help='GS folder name, if the input files are inside a bucket folder') parser.add_argument( '--input-files', dest='input_files', required=True, help='Comma delimited names of all input files to be imported') parser.add_argument( '--bq-dataset', dest='bq_dataset', required=True, default='rawdata', help='Output BQ dataset to write the results to') # Parse arguments from the command line known_args, pipeline_args = parser.parse_known_args(argv) # Initiate the pipeline using the pipeline arguments logging.info('START - Pipeline') p = beam.Pipeline(argv=pipeline_args) for input_file in known_args.input_files.split(','): logging.info('START - Preparing file %s' % (input_file)) table_name = os.path.splitext(input_file)[0].split('_')[0] logging.info('Retrieving information for table %s' % (table_name)) try: table = _fetch_table(table_name) except InvalidArgument, e: raise SystemExit('Error getting information for table [%s]: %s' % (table_name, e)) if not table: raise SystemExit('No table found') fields = json.loads(table['columns'].decode('utf-8'), object_pairs_hook=OrderedDict) gs_path = os.path.join( known_args.input_bucket, *[ known_args.input_path if known_args.input_path else "", input_file ]) logging.info('GS path being read from: %s' % (gs_path)) (p | 'Read From Text - ' + input_file >> beam.io.ReadFromText( gs_path, coder=FileCoder(fields.keys()), skip_header_lines=1) | 'Prepare Field Types - ' + input_file >> beam.ParDo( PrepareFieldTypes(), fields) | 'Inject Timestamp - ' + input_file >> beam.ParDo(InjectTimestamp()) | 'Write to BigQuery - ' + input_file >> beam.io.Write( beam.io.BigQuerySink( # The table name passed in from the command line known_args.bq_dataset + '.' + table_name, # Schema of the table schema=_get_bq_schema(fields), # Creates the table in BigQuery if it does not exist create_disposition=beam.io.BigQueryDisposition. CREATE_IF_NEEDED, # Data will be appended to the table write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))) logging.info('END - Preparing file %s' % (input_file)) p.run().wait_until_finish() logging.info('END - Pipeline')
[ "def", "run", "(", "argv", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--input-bucket'", ",", "dest", "=", "'input_bucket'", ",", "required", "=", "True", ",", "default", "=", "...
[ 174, 0 ]
[ 251, 34 ]
python
en
['en', 'en', 'en']
True
get_args
()
Argument parser. Returns: Dictionary of arguments.
Argument parser. Returns: Dictionary of arguments.
def get_args(): """Argument parser. Returns: Dictionary of arguments. """ parser = argparse.ArgumentParser() parser.add_argument( '--job-dir', type=str, required=True, help='local or GCS location for writing checkpoints and exporting ' 'models') parser.add_argument( '--bucket', type=str, required=True, help='GCS bucket where you stored the training data') parser.add_argument( '--num-train-examples', type=int, default=100000, help='number of examples to train the model, default=100000') parser.add_argument( '--num-eval-examples', type=int, default=10000, help='number of examples to evaluate the model, default=10000') parser.add_argument( '--num-evals', type=int, default=20, help='number of evaluations during the training, default=20') parser.add_argument( '--batch-size', default=128, type=int, help='number of records to read during each training step, default=128') parser.add_argument( '--learning-rate', default=.01, type=float, help='learning rate for gradient descent, default=.01') parser.add_argument( '--verbosity', choices=['DEBUG', 'ERROR', 'FATAL', 'INFO', 'WARN'], default='INFO') args, _ = parser.parse_known_args() return args
[ "def", "get_args", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--job-dir'", ",", "type", "=", "str", ",", "required", "=", "True", ",", "help", "=", "'local or GCS location for writing checkp...
[ 28, 0 ]
[ 75, 15 ]
python
da
['fr', 'da', 'pt']
False
train_and_evaluate
(args)
Trains and evaluates the Keras model. Uses the Keras model defined in model.py and trains on data loaded in model.py. Saves the trained model in TensorFlow SavedModel format to the path defined in part by the --job-dir argument. Args: args: dictionary of arguments - see get_args() for details
Trains and evaluates the Keras model. Uses the Keras model defined in model.py and trains on data loaded in model.py. Saves the trained model in TensorFlow SavedModel format to the path defined in part by the --job-dir argument. Args: args: dictionary of arguments - see get_args() for details
def train_and_evaluate(args): """Trains and evaluates the Keras model. Uses the Keras model defined in model.py and trains on data loaded in model.py. Saves the trained model in TensorFlow SavedModel format to the path defined in part by the --job-dir argument. Args: args: dictionary of arguments - see get_args() for details """ ts = datetime.datetime.now().strftime('%Y%m%d%H%M%S') # Create the Keras Model keras_model = model.create_keras_model(learning_rate=args.learning_rate) # Create the dataset generator training_dataset = model.read_dataset(args.bucket, 'train', '', args.batch_size) validation_dataset = model.read_dataset(args.bucket, 'eval', '', args.batch_size, eval=True).take(args.num_eval_examples//args.batch_size) # Setup TensorBoard callback. tensorboard_cb = tf.keras.callbacks.TensorBoard( os.path.join(args.job_dir, 'tensorboard', ts), histogram_freq=1) # Train model keras_model.fit( training_dataset, steps_per_epoch=args.num_train_examples//(args.batch_size*args.num_evals), epochs=args.num_evals, validation_data=validation_dataset, verbose=1, callbacks=[tensorboard_cb]) export_path = os.path.join(args.job_dir, 'export', ts) keras_model.save(export_path, save_format="tf") print('Model exported to: {}'.format(export_path))
[ "def", "train_and_evaluate", "(", "args", ")", ":", "ts", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")", "# Create the Keras Model", "keras_model", "=", "model", ".", "create_keras_model", "(", "learning_rate...
[ 78, 0 ]
[ 114, 54 ]
python
en
['en', 'en', 'en']
True
Storage.locked_get
(self)
Retrieve Credential from file. Returns: oauth2client.client.Credentials Raises: IOError if the file is a symbolic link.
Retrieve Credential from file.
def locked_get(self): """Retrieve Credential from file. Returns: oauth2client.client.Credentials Raises: IOError if the file is a symbolic link. """ credentials = None _helpers.validate_file(self._filename) try: f = open(self._filename, 'rb') content = f.read() f.close() except IOError: return credentials try: credentials = client.Credentials.new_from_json(content) credentials.set_store(self) except ValueError: pass return credentials
[ "def", "locked_get", "(", "self", ")", ":", "credentials", "=", "None", "_helpers", ".", "validate_file", "(", "self", ".", "_filename", ")", "try", ":", "f", "=", "open", "(", "self", ".", "_filename", ",", "'rb'", ")", "content", "=", "f", ".", "re...
[ 34, 4 ]
[ 58, 26 ]
python
en
['en', 'en', 'en']
True
Storage._create_file_if_needed
(self)
Create an empty file if necessary. This method will not initialize the file. Instead it implements a simple version of "touch" to ensure the file has been created.
Create an empty file if necessary.
def _create_file_if_needed(self): """Create an empty file if necessary. This method will not initialize the file. Instead it implements a simple version of "touch" to ensure the file has been created. """ if not os.path.exists(self._filename): old_umask = os.umask(0o177) try: open(self._filename, 'a+b').close() finally: os.umask(old_umask)
[ "def", "_create_file_if_needed", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_filename", ")", ":", "old_umask", "=", "os", ".", "umask", "(", "0o177", ")", "try", ":", "open", "(", "self", ".", "_filename"...
[ 60, 4 ]
[ 71, 35 ]
python
en
['en', 'en', 'en']
True
Storage.locked_put
(self, credentials)
Write Credentials to file. Args: credentials: Credentials, the credentials to store. Raises: IOError if the file is a symbolic link.
Write Credentials to file.
def locked_put(self, credentials): """Write Credentials to file. Args: credentials: Credentials, the credentials to store. Raises: IOError if the file is a symbolic link. """ self._create_file_if_needed() _helpers.validate_file(self._filename) f = open(self._filename, 'w') f.write(credentials.to_json()) f.close()
[ "def", "locked_put", "(", "self", ",", "credentials", ")", ":", "self", ".", "_create_file_if_needed", "(", ")", "_helpers", ".", "validate_file", "(", "self", ".", "_filename", ")", "f", "=", "open", "(", "self", ".", "_filename", ",", "'w'", ")", "f", ...
[ 73, 4 ]
[ 86, 17 ]
python
en
['en', 'en', 'en']
True
Storage.locked_delete
(self)
Delete Credentials file. Args: credentials: Credentials, the credentials to store.
Delete Credentials file.
def locked_delete(self): """Delete Credentials file. Args: credentials: Credentials, the credentials to store. """ os.unlink(self._filename)
[ "def", "locked_delete", "(", "self", ")", ":", "os", ".", "unlink", "(", "self", ".", "_filename", ")" ]
[ 88, 4 ]
[ 94, 33 ]
python
de
['de', 'it', 'en']
False
extract_resources
(taxonomy_filepath)
Reads a .json representing a taxonomy and returns a data structure representing their hierarchical relationship :param taxonomy_file: a string representing a path to a .json file :return: Node representing root of taxonomic tree
Reads a .json representing a taxonomy and returns a data structure representing their hierarchical relationship :param taxonomy_file: a string representing a path to a .json file :return: Node representing root of taxonomic tree
def extract_resources(taxonomy_filepath): """ Reads a .json representing a taxonomy and returns a data structure representing their hierarchical relationship :param taxonomy_file: a string representing a path to a .json file :return: Node representing root of taxonomic tree """ try: with open(taxonomy_filepath, 'r') as fp: json_str = fp.read() json_data = json.loads(json_str) root = DictImporter().import_(json_data) finally: fp.close() return root
[ "def", "extract_resources", "(", "taxonomy_filepath", ")", ":", "try", ":", "with", "open", "(", "taxonomy_filepath", ",", "'r'", ")", "as", "fp", ":", "json_str", "=", "fp", ".", "read", "(", ")", "json_data", "=", "json", ".", "loads", "(", "json_str",...
[ 48, 0 ]
[ 64, 15 ]
python
en
['en', 'error', 'th']
False
read_users
(users_fp)
Reads a .csv from @user_fp representing users into a list of dictionaries, each elt of which represents a user :param user_fp: a .csv file where each line represents a user :return: a list of dictionaries
Reads a .csv from
def read_users(users_fp): """ Reads a .csv from @user_fp representing users into a list of dictionaries, each elt of which represents a user :param user_fp: a .csv file where each line represents a user :return: a list of dictionaries """ users = [] with open(users_fp, 'r') as fp: fields = fp.readline().rstrip().split(",") for line in fp: user = dict(zip(fields, line.rstrip().split(","))) users.append(user) return users
[ "def", "read_users", "(", "users_fp", ")", ":", "users", "=", "[", "]", "with", "open", "(", "users_fp", ",", "'r'", ")", "as", "fp", ":", "fields", "=", "fp", ".", "readline", "(", ")", ".", "rstrip", "(", ")", ".", "split", "(", "\",\"", ")", ...
[ 67, 0 ]
[ 80, 16 ]
python
en
['en', 'error', 'th']
False
publish_burst
(burst, num_events_counter, fp)
Publishes and prints each event :param burst: a list of dictionaries, each representing an event :param num_events_counter: an instance of Value shared by all processes to track the number of published events :param publisher: a PubSub publisher :param topic_path: a topic path for PubSub :return:
Publishes and prints each event :param burst: a list of dictionaries, each representing an event :param num_events_counter: an instance of Value shared by all processes to track the number of published events :param publisher: a PubSub publisher :param topic_path: a topic path for PubSub :return:
def publish_burst(burst, num_events_counter, fp): """ Publishes and prints each event :param burst: a list of dictionaries, each representing an event :param num_events_counter: an instance of Value shared by all processes to track the number of published events :param publisher: a PubSub publisher :param topic_path: a topic path for PubSub :return: """ for event_dict in burst: json_str = json.dumps(event_dict) num_events_counter.value += 1 fp.write(json_str + '\n')
[ "def", "publish_burst", "(", "burst", ",", "num_events_counter", ",", "fp", ")", ":", "for", "event_dict", "in", "burst", ":", "json_str", "=", "json", ".", "dumps", "(", "event_dict", ")", "num_events_counter", ".", "value", "+=", "1", "fp", ".", "write",...
[ 82, 0 ]
[ 95, 33 ]
python
en
['en', 'error', 'th']
False
create_user_process
(user, root, num_events_counter)
Code for continuously-running process representing a user publishing events to pubsub :param user: a dictionary representing characteristics of the user :param root: an instance of AnyNode representing the home page of a website :param num_events_counter: a variable shared among all processes used to track the number of events published :return:
Code for continuously-running process representing a user publishing events to pubsub :param user: a dictionary representing characteristics of the user :param root: an instance of AnyNode representing the home page of a website :param num_events_counter: a variable shared among all processes used to track the number of events published :return:
def create_user_process(user, root, num_events_counter): """ Code for continuously-running process representing a user publishing events to pubsub :param user: a dictionary representing characteristics of the user :param root: an instance of AnyNode representing the home page of a website :param num_events_counter: a variable shared among all processes used to track the number of events published :return: """ user['page'] = root user['is_online'] = True user['offline_events'] = [] user['time'] = datetime.now() while True: fp = open(str(os.getpid()) + ".out", "a") read_time_secs = random.uniform(0, page_read_secs * 2) user['time'] += timedelta(seconds=read_time_secs) prob = random.random() event = generate_event(user) if user['is_online']: if prob < online_to_offline_probability: user['is_online'] = False user['offline_events'] = [event] else: publish_burst([event], num_events_counter, fp) else: user['offline_events'].append(event) if prob < offline_to_online_probability: user['is_online'] = True publish_burst(user['offline_events'], num_events_counter, fp) user['offline_events'] = [] fp.close()
[ "def", "create_user_process", "(", "user", ",", "root", ",", "num_events_counter", ")", ":", "user", "[", "'page'", "]", "=", "root", "user", "[", "'is_online'", "]", "=", "True", "user", "[", "'offline_events'", "]", "=", "[", "]", "user", "[", "'time'"...
[ 97, 0 ]
[ 129, 18 ]
python
en
['en', 'error', 'th']
False
generate_event
(user)
Returns a dictionary representing an event :param user: :return:
Returns a dictionary representing an event :param user: :return:
def generate_event(user): """ Returns a dictionary representing an event :param user: :return: """ user['page'] = get_next_page(user) uri = str(user['page'].name) event_time = user['time'] current_time_str = event_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') file_size_bytes = random.choice(range(min_file_size_bytes, max_file_size_bytes)) http_request = "\"{} {} HTTP/1.0\"".format(random.choice(verbs), uri) http_response = random.choice(responses) event_values = [user['ip'], user['id'], float(user['lat']), float(user['lng']), current_time_str, http_request, http_response, file_size_bytes, user['user_agent']] return dict(zip(log_fields, event_values))
[ "def", "generate_event", "(", "user", ")", ":", "user", "[", "'page'", "]", "=", "get_next_page", "(", "user", ")", "uri", "=", "str", "(", "user", "[", "'page'", "]", ".", "name", ")", "event_time", "=", "user", "[", "'time'", "]", "current_time_str",...
[ 131, 0 ]
[ 147, 46 ]
python
en
['en', 'error', 'th']
False
get_next_page
(user)
Consults the user's representation of the web site taxonomy to determine the next page that they visit :param user: :return:
Consults the user's representation of the web site taxonomy to determine the next page that they visit :param user: :return:
def get_next_page(user): """ Consults the user's representation of the web site taxonomy to determine the next page that they visit :param user: :return: """ possible_next_pages = [user['page']] if not user['page'].is_leaf: possible_next_pages += list(user['page'].children) if (user['page'].parent != None): possible_next_pages += [user['page'].parent] next_page = random.choice(possible_next_pages) return next_page
[ "def", "get_next_page", "(", "user", ")", ":", "possible_next_pages", "=", "[", "user", "[", "'page'", "]", "]", "if", "not", "user", "[", "'page'", "]", ".", "is_leaf", ":", "possible_next_pages", "+=", "list", "(", "user", "[", "'page'", "]", ".", "c...
[ 149, 0 ]
[ 161, 20 ]
python
en
['en', 'error', 'th']
False
get_wsgi_application
()
The public interface to Django's WSGI support. Return a WSGI callable. Avoids making django.core.handlers.WSGIHandler a public API, in case the internal WSGI implementation changes or moves in the future.
The public interface to Django's WSGI support. Return a WSGI callable.
def get_wsgi_application(): """ The public interface to Django's WSGI support. Return a WSGI callable. Avoids making django.core.handlers.WSGIHandler a public API, in case the internal WSGI implementation changes or moves in the future. """ django.setup(set_prefix=False) return WSGIHandler()
[ "def", "get_wsgi_application", "(", ")", ":", "django", ".", "setup", "(", "set_prefix", "=", "False", ")", "return", "WSGIHandler", "(", ")" ]
[ 4, 0 ]
[ 12, 24 ]
python
en
['en', 'error', 'th']
False
Command.run_from_argv
(self, argv)
Pre-parse the command line to extract the value of the --testrunner option. This allows a test runner to define additional command line arguments.
Pre-parse the command line to extract the value of the --testrunner option. This allows a test runner to define additional command line arguments.
def run_from_argv(self, argv): """ Pre-parse the command line to extract the value of the --testrunner option. This allows a test runner to define additional command line arguments. """ self.test_runner = get_command_line_option(argv, '--testrunner') super().run_from_argv(argv)
[ "def", "run_from_argv", "(", "self", ",", "argv", ")", ":", "self", ".", "test_runner", "=", "get_command_line_option", "(", "argv", ",", "'--testrunner'", ")", "super", "(", ")", ".", "run_from_argv", "(", "argv", ")" ]
[ 15, 4 ]
[ 22, 35 ]
python
en
['en', 'error', 'th']
False
UserProfileManager.create_user
(self, email, name, password=None)
Create a new user profile
Create a new user profile
def create_user(self, email, name, password=None): """Create a new user profile""" if not email: raise ValueError('User must have an email address') email = self.normalize_email(email) user = self.model(email=email, name=name) user.set_password(password) user.save(using=self._db) return user
[ "def", "create_user", "(", "self", ",", "email", ",", "name", ",", "password", "=", "None", ")", ":", "if", "not", "email", ":", "raise", "ValueError", "(", "'User must have an email address'", ")", "email", "=", "self", ".", "normalize_email", "(", "email",...
[ 10, 4 ]
[ 21, 19 ]
python
en
['en', 'it', 'en']
True
UserProfileManager.create_superuser
(self, email, name, password)
Create and save a new superuser with given details
Create and save a new superuser with given details
def create_superuser(self, email, name, password): """Create and save a new superuser with given details""" user = self.create_user(email, name, password) user.is_superuser = True user.is_staff = True user.save(using=self._db) return user
[ "def", "create_superuser", "(", "self", ",", "email", ",", "name", ",", "password", ")", ":", "user", "=", "self", ".", "create_user", "(", "email", ",", "name", ",", "password", ")", "user", ".", "is_superuser", "=", "True", "user", ".", "is_staff", "...
[ 23, 4 ]
[ 31, 19 ]
python
en
['en', 'en', 'en']
True
UserProfile.get_full_name
(self)
Retrieve full name for user
Retrieve full name for user
def get_full_name(self): """Retrieve full name for user""" return self.name
[ "def", "get_full_name", "(", "self", ")", ":", "return", "self", ".", "name" ]
[ 45, 4 ]
[ 47, 24 ]
python
en
['en', 'no', 'en']
True
UserProfile.get_short_name
(self)
Retrieve short name of user
Retrieve short name of user
def get_short_name(self): """Retrieve short name of user""" return self.name
[ "def", "get_short_name", "(", "self", ")", ":", "return", "self", ".", "name" ]
[ 49, 4 ]
[ 51, 24 ]
python
en
['en', 'pt', 'en']
True
UserProfile.__str__
(self)
Return string representation of user
Return string representation of user
def __str__(self): """Return string representation of user""" return self.email
[ "def", "__str__", "(", "self", ")", ":", "return", "self", ".", "email" ]
[ 53, 4 ]
[ 55, 25 ]
python
en
['en', 'no', 'en']
True
ProfileFeedItem.__str__
(self)
Return the model as a string
Return the model as a string
def __str__(self): """Return the model as a string""" return self.status_text
[ "def", "__str__", "(", "self", ")", ":", "return", "self", ".", "status_text" ]
[ 66, 4 ]
[ 68, 31 ]
python
en
['en', 'en', 'en']
True
register_handler
(handler)
Install application-specific HDF5 image handler. :param handler: Handler object.
Install application-specific HDF5 image handler.
def register_handler(handler): """ Install application-specific HDF5 image handler. :param handler: Handler object. """ global _handler _handler = handler
[ "def", "register_handler", "(", "handler", ")", ":", "global", "_handler", "_handler", "=", "handler" ]
[ 16, 0 ]
[ 23, 22 ]
python
en
['en', 'error', 'th']
False
split_first
(s, delims)
.. deprecated:: 1.25 Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims.
.. deprecated:: 1.25
def split_first(s, delims): """ .. deprecated:: 1.25 Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example:: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, "", None return s[:min_idx], s[min_idx + 1 :], min_delim
[ "def", "split_first", "(", "s", ",", "delims", ")", ":", "min_idx", "=", "None", "min_delim", "=", "None", "for", "d", "in", "delims", ":", "idx", "=", "s", ".", "find", "(", "d", ")", "if", "idx", "<", "0", ":", "continue", "if", "min_idx", "is"...
[ 174, 0 ]
[ 206, 51 ]
python
en
['en', 'error', 'th']
False
_encode_invalid_chars
(component, allowed_chars, encoding="utf-8")
Percent-encodes a URI component without reapplying onto an already percent-encoded component.
Percent-encodes a URI component without reapplying onto an already percent-encoded component.
def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): """Percent-encodes a URI component without reapplying onto an already percent-encoded component. """ if component is None: return component component = six.ensure_text(component) # Normalize existing percent-encoded bytes. # Try to see if the component we're encoding is already percent-encoded # so we can skip all '%' characters but still encode all others. component, percent_encodings = PERCENT_RE.subn( lambda match: match.group(0).upper(), component ) uri_bytes = component.encode("utf-8", "surrogatepass") is_percent_encoded = percent_encodings == uri_bytes.count(b"%") encoded_component = bytearray() for i in range(0, len(uri_bytes)): # Will return a single character bytestring on both Python 2 & 3 byte = uri_bytes[i : i + 1] byte_ord = ord(byte) if (is_percent_encoded and byte == b"%") or ( byte_ord < 128 and byte.decode() in allowed_chars ): encoded_component += byte continue encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) return encoded_component.decode(encoding)
[ "def", "_encode_invalid_chars", "(", "component", ",", "allowed_chars", ",", "encoding", "=", "\"utf-8\"", ")", ":", "if", "component", "is", "None", ":", "return", "component", "component", "=", "six", ".", "ensure_text", "(", "component", ")", "# Normalize exi...
[ 209, 0 ]
[ 240, 45 ]
python
en
['en', 'en', 'en']
True
_encode_target
(target)
Percent-encodes a request target so that there are no invalid characters
Percent-encodes a request target so that there are no invalid characters
def _encode_target(target): """Percent-encodes a request target so that there are no invalid characters""" path, query = TARGET_RE.match(target).groups() target = _encode_invalid_chars(path, PATH_CHARS) query = _encode_invalid_chars(query, QUERY_CHARS) if query is not None: target += "?" + query return target
[ "def", "_encode_target", "(", "target", ")", ":", "path", ",", "query", "=", "TARGET_RE", ".", "match", "(", "target", ")", ".", "groups", "(", ")", "target", "=", "_encode_invalid_chars", "(", "path", ",", "PATH_CHARS", ")", "query", "=", "_encode_invalid...
[ 319, 0 ]
[ 326, 17 ]
python
en
['en', 'en', 'en']
True
parse_url
(url)
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. This parser is RFC 3986 compliant. The parser logic and helper functions are based heavily on work done in the ``rfc3986`` module. :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. This parser is RFC 3986 compliant.
def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. This parser is RFC 3986 compliant. The parser logic and helper functions are based heavily on work done in the ``rfc3986`` module. :param str url: URL to parse into a :class:`.Url` namedtuple. Partly backwards-compatible with :mod:`urlparse`. Example:: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/mail/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ if not url: # Empty return Url() source_url = url if not SCHEME_RE.search(url): url = "//" + url try: scheme, authority, path, query, fragment = URI_RE.match(url).groups() normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES if scheme: scheme = scheme.lower() if authority: auth, _, host_port = authority.rpartition("@") auth = auth or None host, port = _HOST_PORT_RE.match(host_port).groups() if auth and normalize_uri: auth = _encode_invalid_chars(auth, USERINFO_CHARS) if port == "": port = None else: auth, host, port = None, None, None if port is not None: port = int(port) if not (0 <= port <= 65535): raise LocationParseError(url) host = _normalize_host(host, scheme) if normalize_uri and path: path = _remove_path_dot_segments(path) path = _encode_invalid_chars(path, PATH_CHARS) if normalize_uri and query: query = _encode_invalid_chars(query, QUERY_CHARS) if normalize_uri and fragment: fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) except (ValueError, AttributeError): return six.raise_from(LocationParseError(source_url), None) # For the sake of backwards compatibility we put empty # string values for path if there are any defined values # beyond the path in the URL. # TODO: Remove this when we break backwards compatibility. if not path: if query is not None or fragment is not None: path = "" else: path = None # Ensure that each part of the URL is a `str` for # backwards compatibility. if isinstance(url, six.text_type): ensure_func = six.ensure_text else: ensure_func = six.ensure_str def ensure_type(x): return x if x is None else ensure_func(x) return Url( scheme=ensure_type(scheme), auth=ensure_type(auth), host=ensure_type(host), port=port, path=ensure_type(path), query=ensure_type(query), fragment=ensure_type(fragment), )
[ "def", "parse_url", "(", "url", ")", ":", "if", "not", "url", ":", "# Empty", "return", "Url", "(", ")", "source_url", "=", "url", "if", "not", "SCHEME_RE", ".", "search", "(", "url", ")", ":", "url", "=", "\"//\"", "+", "url", "try", ":", "scheme"...
[ 329, 0 ]
[ 423, 5 ]
python
en
['en', 'error', 'th']
False
get_host
(url)
Deprecated. Use :func:`parse_url` instead.
Deprecated. Use :func:`parse_url` instead.
def get_host(url): """ Deprecated. Use :func:`parse_url` instead. """ p = parse_url(url) return p.scheme or "http", p.hostname, p.port
[ "def", "get_host", "(", "url", ")", ":", "p", "=", "parse_url", "(", "url", ")", "return", "p", ".", "scheme", "or", "\"http\"", ",", "p", ".", "hostname", ",", "p", ".", "port" ]
[ 426, 0 ]
[ 431, 49 ]
python
en
['en', 'error', 'th']
False
Url.hostname
(self)
For backwards-compatibility with urlparse. We're nice like that.
For backwards-compatibility with urlparse. We're nice like that.
def hostname(self): """For backwards-compatibility with urlparse. We're nice like that.""" return self.host
[ "def", "hostname", "(", "self", ")", ":", "return", "self", ".", "host" ]
[ 109, 4 ]
[ 111, 24 ]
python
en
['en', 'en', 'en']
True
Url.request_uri
(self)
Absolute path including the query string.
Absolute path including the query string.
def request_uri(self): """Absolute path including the query string.""" uri = self.path or "/" if self.query is not None: uri += "?" + self.query return uri
[ "def", "request_uri", "(", "self", ")", ":", "uri", "=", "self", ".", "path", "or", "\"/\"", "if", "self", ".", "query", "is", "not", "None", ":", "uri", "+=", "\"?\"", "+", "self", ".", "query", "return", "uri" ]
[ 114, 4 ]
[ 121, 18 ]
python
en
['en', 'en', 'en']
True
Url.netloc
(self)
Network location including host and port
Network location including host and port
def netloc(self): """Network location including host and port""" if self.port: return "%s:%d" % (self.host, self.port) return self.host
[ "def", "netloc", "(", "self", ")", ":", "if", "self", ".", "port", ":", "return", "\"%s:%d\"", "%", "(", "self", ".", "host", ",", "self", ".", "port", ")", "return", "self", ".", "host" ]
[ 124, 4 ]
[ 128, 24 ]
python
en
['en', 'en', 'en']
True
Url.url
(self)
Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment'
Convert self into a url
def url(self): """ Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:password@host.com:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self url = u"" # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: url += scheme + u"://" if auth is not None: url += auth + u"@" if host is not None: url += host if port is not None: url += u":" + str(port) if path is not None: url += path if query is not None: url += u"?" + query if fragment is not None: url += u"#" + fragment return url
[ "def", "url", "(", "self", ")", ":", "scheme", ",", "auth", ",", "host", ",", "port", ",", "path", ",", "query", ",", "fragment", "=", "self", "url", "=", "u\"\"", "# We use \"is not None\" we want things to happen with empty strings (or 0 port)", "if", "scheme", ...
[ 131, 4 ]
[ 168, 18 ]
python
en
['en', 'error', 'th']
False