hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bbb49b4b8be7c24fc7faee54154a345b7779f0c2 | 8,193 | py | Python | config/settings/production.py | jamesaud/se1-group4 | 5280b13dff33e72ce717318a8dd78a06cd6effb3 | [
"MIT"
] | 1 | 2021-09-09T15:43:09.000Z | 2021-09-09T15:43:09.000Z | config/settings/production.py | jamesaud/se1-group4 | 5280b13dff33e72ce717318a8dd78a06cd6effb3 | [
"MIT"
] | null | null | null | config/settings/production.py | jamesaud/se1-group4 | 5280b13dff33e72ce717318a8dd78a06cd6effb3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Production Configurations
- Use Amazon's S3 for storing static files and uploaded media
- Use mailgun to send emails
- Use Redis for cache
"""
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
import logging
import os
import raven
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env('DJANGO_SECRET_KEY')
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
#SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Use Whitenoise to serve static files
# See: https://whitenoise.readthedocs.io/
WHITENOISE_MIDDLEWARE = ('whitenoise.middleware.WhiteNoiseMiddleware', )
MIDDLEWARE = WHITENOISE_MIDDLEWARE + MIDDLEWARE
# SECURITY CONFIGURATION
# ------------------------------------------------------------------------------
# See https://docs.djangoproject.com/en/1.9/ref/middleware/#module-django.middleware.security
# and https://docs.djangoproject.com/ja/1.9/howto/deployment/checklist/#run-manage-py-check-deploy
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
'DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=False)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
'DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=False)
SECURE_BROWSER_XSS_FILTER = False
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = False
SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=False)
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
X_FRAME_OPTIONS = 'DENY'
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/doaain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['jmatcher.com'])
# END SITE CONFIGURATION
INSTALLED_APPS += ('gunicorn', )
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
RAVEN_MIDDLEWARE = ('raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware',)
MIDDLEWARE = RAVEN_MIDDLEWARE + MIDDLEWARE
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.io/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
AWS_S3_CALLING_FORMAT = OrdinaryCallingFormat()
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage' # Configure with AWS
# Static Assets
# ------------------------
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# COMPRESSOR
# ------------------------------------------------------------------------------
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = STATIC_URL
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='jmatcher <noreply@jmatcher.com>')
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[jmatcher] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# Anymail with Mailgun
INSTALLED_APPS += ("anymail", )
ANYMAIL = {
"MAILGUN_API_KEY": env('DJANGO_MAILGUN_API_KEY'),
"MAILGUN_SENDER_DOMAIN": env('MAILGUN_SENDER_DOMAIN')
}
EMAIL_BACKEND = "anymail.backends.mailgun.MailgunBackend"
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Use the Heroku-style specification
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db('DATABASE_URL')
# CACHING
# ------------------------------------------------------------------------------
REDIS_LOCATION = '{0}/{1}'.format(env('REDIS_URL', default='redis://127.0.0.1:6379'), 0)
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_LOCATION,
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# LOGGING CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
# Sentry Configuration
SENTRY_DSN = env('DJANGO_SENTRY_DSN')
SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['sentry', ],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'sentry': {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console', ],
'propagate': False,
},
'raven': {
'level': 'DEBUG',
'handlers': ['console', ],
'propagate': False,
},
'sentry.errors': {
'level': 'DEBUG',
'handlers': ['console', ],
'propagate': False,
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'sentry', ],
'propagate': False,
},
},
}
SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO)
RAVEN_CONFIG = {
'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO),
'DSN': SENTRY_DSN,
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
| 35.314655 | 117 | 0.621018 |
f1f9cca0eb36c3504aee579f64b50b274c05a7c3 | 1,901 | py | Python | tests/provider/noaa/ghcn/test_api_stations.py | meteoDaniel/wetterdienst | 106a2fa9f887983281a6886c15bb3a845850dfb7 | [
"MIT"
] | 3 | 2020-06-19T09:21:07.000Z | 2020-06-30T22:12:42.000Z | tests/provider/noaa/ghcn/test_api_stations.py | meteoDaniel/wetterdienst | 106a2fa9f887983281a6886c15bb3a845850dfb7 | [
"MIT"
] | 27 | 2020-06-17T23:10:37.000Z | 2020-07-01T22:05:17.000Z | tests/provider/noaa/ghcn/test_api_stations.py | meteoDaniel/wetterdienst | 106a2fa9f887983281a6886c15bb3a845850dfb7 | [
"MIT"
] | 1 | 2020-06-22T22:37:45.000Z | 2020-06-22T22:37:45.000Z | import pandas as pd
from pandas._testing import assert_frame_equal
from wetterdienst.provider.noaa.ghcn import NoaaGhcnRequest
def test_noaa_ghcn_stations():
df = NoaaGhcnRequest("daily").all().df.iloc[:5, :]
df_expected = pd.DataFrame(
{
"station_id": pd.Series(
[
"ACW00011604",
"ACW00011647",
"AE000041196",
"AEM00041194",
"AEM00041217",
],
dtype=str,
),
"from_date": pd.to_datetime(
[
"1949-01-01 00:00:00+00:00",
"1957-01-01 00:00:00+00:00",
"1944-01-01 00:00:00+00:00",
"1983-01-01 00:00:00+00:00",
"1983-01-01 00:00:00+00:00",
]
),
"to_date": pd.to_datetime(
[
"1949-12-31 00:00:00+00:00",
"1970-12-31 00:00:00+00:00",
"2022-12-31 00:00:00+00:00",
"2022-12-31 00:00:00+00:00",
"2022-12-31 00:00:00+00:00",
]
),
"height": pd.Series([10.1, 19.2, 34.0, 10.4, 26.8], dtype=float),
"latitude": pd.Series([17.1167, 17.1333, 25.333, 25.255, 24.433], dtype=float),
"longitude": pd.Series([-61.7833, -61.7833, 55.517, 55.364, 54.651], dtype=float),
"name": pd.Series(
[
"ST JOHNS COOLIDGE FLD",
"ST JOHNS",
"SHARJAH INTER. AIRP",
"DUBAI INTL",
"ABU DHABI INTL",
],
dtype=str,
),
"state": pd.Series([pd.NA] * 5, dtype=str),
}
)
assert_frame_equal(df, df_expected)
| 33.350877 | 94 | 0.409784 |
f673400ad3e5dedb315f834e41a9c4c8b07654c3 | 3,151 | py | Python | export-neptune-to-elasticsearch/lambda/export_neptune_to_kinesis.py | kaddybrar/amazon-neptune-tools | 86ce4f1451e3a7446f48db64a5b3bdfbf9a99208 | [
"Apache-2.0"
] | 1 | 2020-11-11T15:04:56.000Z | 2020-11-11T15:04:56.000Z | export-neptune-to-elasticsearch/lambda/export_neptune_to_kinesis.py | yugijimoh/amazon-neptune-tools | 18cd7c80736a39fa1b74be16264fb0b02ff82784 | [
"Apache-2.0"
] | null | null | null | export-neptune-to-elasticsearch/lambda/export_neptune_to_kinesis.py | yugijimoh/amazon-neptune-tools | 18cd7c80736a39fa1b74be16264fb0b02ff82784 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import json
import os
import boto3
import logging
from datetime import datetime
client = boto3.client('batch')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def trigger_neptune_export():
neptune_export_jar_uri = os.environ['NEPTUNE_EXPORT_JAR_URI']
neptune_endpoint = os.environ['NEPTUNE_ENDPOINT']
neptune_port = os.environ['NEPTUNE_PORT']
neptune_engine = os.environ['NEPTUNE_ENGINE']
stream_name = os.environ['STREAM_NAME']
job_suffix = os.environ['JOB_SUFFIX']
region = os.environ['AWS_REGION']
concurrency = os.environ['CONCURRENCY']
scope = os.environ['EXPORT_SCOPE']
additional_params = os.environ['ADDITIONAL_PARAMS']
if additional_params:
additional_params = additional_params if additional_params.startswith(' ') else ' {}'.format(additional_params)
else:
additional_params = ''
use_iam_auth = '' if neptune_engine == 'sparql' else ' --use-iam-auth'
export_command = 'export-pg' if neptune_engine == 'gremlin' else 'export-rdf'
concurrency_param = ' --concurrency {}'.format(concurrency) if neptune_engine == 'gremlin' else ''
scope_param = ' --scope {}'.format(scope) if neptune_engine == 'gremlin' else ''
command = 'df -h && wget {} && export SERVICE_REGION="{}" && java -Xms8g -Xmx8g -jar neptune-export.jar {} -e {} -p {} -d /neptune/results --output stream --stream-name {} --region {} --format neptuneStreamsJson --log-level info --use-ssl{}{}{}{}'.format(
neptune_export_jar_uri,
region,
export_command,
neptune_endpoint,
neptune_port,
stream_name,
region,
use_iam_auth,
concurrency_param,
scope_param,
additional_params)
logger.info('Command: {}'.format(command))
submit_job_response = client.submit_job(
jobName='export-neptune-to-kinesis-{}-{}'.format(job_suffix, round(datetime.utcnow().timestamp() * 1000)),
jobQueue='export-neptune-to-kinesis-queue-{}'.format(job_suffix),
jobDefinition='export-neptune-to-kinesis-job-{}'.format(job_suffix),
containerOverrides={
'command': [
'sh',
'-c',
command
]
}
)
return submit_job_response
def lambda_handler(event, context):
result = trigger_neptune_export()
job_name = result['jobName']
job_id = result['jobId']
return {
'jobName': job_name,
'jobId': job_id
}
| 35.404494 | 259 | 0.649 |
4f316446431aa1ba7fd888ed58cf396fb4d95574 | 23,595 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_gallery_image_versions_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_gallery_image_versions_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_03_01/operations/_gallery_image_versions_operations.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class GalleryImageVersionsOperations(object):
"""GalleryImageVersionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2019_03_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
gallery_image_version_name, # type: str
gallery_image_version, # type: "models.GalleryImageVersion"
**kwargs # type: Any
):
# type: (...) -> "models.GalleryImageVersion"
cls = kwargs.pop('cls', None) # type: ClsType["models.GalleryImageVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
'galleryImageName': self._serialize.url("gallery_image_name", gallery_image_name, 'str'),
'galleryImageVersionName': self._serialize.url("gallery_image_version_name", gallery_image_version_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(gallery_image_version, 'GalleryImageVersion')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('GalleryImageVersion', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('GalleryImageVersion', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('GalleryImageVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions/{galleryImageVersionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
gallery_image_version_name, # type: str
gallery_image_version, # type: "models.GalleryImageVersion"
**kwargs # type: Any
):
# type: (...) -> LROPoller["models.GalleryImageVersion"]
"""Create or update a gallery Image Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery in which the Image Definition
resides.
:type gallery_name: str
:param gallery_image_name: The name of the gallery Image Definition in which the Image Version
is to be created.
:type gallery_image_name: str
:param gallery_image_version_name: The name of the gallery Image Version to be created. Needs
to follow semantic version name pattern: The allowed characters are digit and period. Digits
must be within the range of a 32-bit integer. Format:
:code:`<MajorVersion>`.:code:`<MinorVersion>`.:code:`<Patch>`.
:type gallery_image_version_name: str
:param gallery_image_version: Parameters supplied to the create or update gallery Image Version
operation.
:type gallery_image_version: ~azure.mgmt.compute.v2019_03_01.models.GalleryImageVersion
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either GalleryImageVersion or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_03_01.models.GalleryImageVersion]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.GalleryImageVersion"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_image_name=gallery_image_name,
gallery_image_version_name=gallery_image_version_name,
gallery_image_version=gallery_image_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GalleryImageVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions/{galleryImageVersionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
gallery_image_version_name, # type: str
expand=None, # type: Optional[Union[str, "models.ReplicationStatusTypes"]]
**kwargs # type: Any
):
# type: (...) -> "models.GalleryImageVersion"
"""Retrieves information about a gallery Image Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery in which the Image Definition
resides.
:type gallery_name: str
:param gallery_image_name: The name of the gallery Image Definition in which the Image Version
resides.
:type gallery_image_name: str
:param gallery_image_version_name: The name of the gallery Image Version to be retrieved.
:type gallery_image_version_name: str
:param expand: The expand expression to apply on the operation.
:type expand: str or ~azure.mgmt.compute.v2019_03_01.models.ReplicationStatusTypes
:keyword callable cls: A custom type or function that will be passed the direct response
:return: GalleryImageVersion, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2019_03_01.models.GalleryImageVersion
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GalleryImageVersion"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
'galleryImageName': self._serialize.url("gallery_image_name", gallery_image_name, 'str'),
'galleryImageVersionName': self._serialize.url("gallery_image_version_name", gallery_image_version_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('GalleryImageVersion', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions/{galleryImageVersionName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
gallery_image_version_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
'galleryImageName': self._serialize.url("gallery_image_name", gallery_image_name, 'str'),
'galleryImageVersionName': self._serialize.url("gallery_image_version_name", gallery_image_version_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions/{galleryImageVersionName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
gallery_image_version_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Delete a gallery Image Version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery in which the Image Definition
resides.
:type gallery_name: str
:param gallery_image_name: The name of the gallery Image Definition in which the Image Version
resides.
:type gallery_image_name: str
:param gallery_image_version_name: The name of the gallery Image Version to be deleted.
:type gallery_image_version_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery_image_name=gallery_image_name,
gallery_image_version_name=gallery_image_version_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions/{galleryImageVersionName}'} # type: ignore
def list_by_gallery_image(
self,
resource_group_name, # type: str
gallery_name, # type: str
gallery_image_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.GalleryImageVersionList"]
"""List gallery Image Versions in a gallery Image Definition.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery in which the Image Definition
resides.
:type gallery_name: str
:param gallery_image_name: The name of the Shared Image Gallery Image Definition from which the
Image Versions are to be listed.
:type gallery_image_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either GalleryImageVersionList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_03_01.models.GalleryImageVersionList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.GalleryImageVersionList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-03-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_gallery_image.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'galleryName': self._serialize.url("gallery_name", gallery_name, 'str'),
'galleryImageName': self._serialize.url("gallery_image_name", gallery_image_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('GalleryImageVersionList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_gallery_image.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}/images/{galleryImageName}/versions'} # type: ignore
| 50.524625 | 247 | 0.670184 |
1d6a05237bd8dd1d73108b584fc29c99b3734df6 | 1,351 | py | Python | utils/Config.py | salem-devloper/test2 | fae117616924e7044114d6b3cd27ce02de954891 | [
"MIT"
] | null | null | null | utils/Config.py | salem-devloper/test2 | fae117616924e7044114d6b3cd27ce02de954891 | [
"MIT"
] | null | null | null | utils/Config.py | salem-devloper/test2 | fae117616924e7044114d6b3cd27ce02de954891 | [
"MIT"
] | null | null | null | """
Common utility functions and classes
"""
import torch
class Option():
"""Training Configuration """
name = "Lung-Segmentation"
# root dir of training and validation set
root_dir = '../input/lung-segmentation-drive/lung-segmentation'
test_root = '/media/storage/wu1114/RSNA/stage_1_test'
result_root = '/media/storage/wu1114/RSNA/rsna-unet/test_result/'
img_size = 512
num_workers = 1 # number of threads for data loading
shuffle = False # shuffle the data set
batch_size = 8 # GTX1060 3G Memory
epochs = 150 # number of epochs to train
plot_every = 5 # vis every N batches
is_train = True # True for training, False for making prediction
save_model = True # True for saving the model, False for not saving the model
caffe_pretrain = False
env = 'RSNA-UNet'
n_gpu = 2 # number of GPUs
learning_rate = 1e-3 # learning rage
weight_decay = 1e-4 # weight decay
pin_memory = True # use pinned (page-locked) memory. when using CUDA, set to True
is_cuda = torch.cuda.is_available() # True --> GPU
num_gpus = torch.cuda.device_count() # number of GPUs
checkpoint_dir = "./checkpoints" # dir to save checkpoints
dtype = torch.cuda.FloatTensor if is_cuda else torch.Tensor # data type
opt = Option() | 34.641026 | 88 | 0.664693 |
e842a7f565e212faf391daf8cff8e38aae1dc565 | 7,303 | py | Python | ABBA-BABA/freq.py | evodify/population-genetic-analyses | 5295f9d68736ac02fc5f3ece43dadd5bf4e98e6f | [
"MIT"
] | 3 | 2018-01-31T09:57:10.000Z | 2021-02-03T18:34:01.000Z | ABBA-BABA/freq.py | evodify/population-genetic-analyses | 5295f9d68736ac02fc5f3ece43dadd5bf4e98e6f | [
"MIT"
] | null | null | null | ABBA-BABA/freq.py | evodify/population-genetic-analyses | 5295f9d68736ac02fc5f3ece43dadd5bf4e98e6f | [
"MIT"
] | 1 | 2019-09-02T06:13:29.000Z | 2019-09-02T06:13:29.000Z | #!/usr/bin/env python2
"""
Original script was written by Simon Martin (shm45@cam.ac.uk)
Publication: MARTIN ET AL. 2013 GENOME-WIDE EVIDENCE FOR SPECIATION WITH GENE FLOW IN HELICONIUS BUTTERFLIES
Email: shm45@cam.ac.uk
07-04-2013
This script calculates the frequency of either the minor allele or the derived allele in each defined population.
To calculate the derived allele frequency, an out-group sequence/population must be defined as a reference for the ancestral state.
If multiple out-groups exist and they are not fixed for a single allele, a consensus can be taken.
Only biallelic sites will be considered.
Input "calls file" format:
scaffold position ind1 ind2 ind3 etc...
scf1 1 T T C Y
NOTE a header row with unique names for all individuals is essential.
Output format is .csv with scaffold, position and frequencies. "NA" will be specified for invalid or monomorphic sites.
Command:
python freq.py -i <input file> -o <output file> -p <population_string> -a derived -O <out-group population name> --consensus -M 3
the populations are specified in a single string as follows:
-p "pop1Name[ind1,ind2,ind3,ind4];pop2Name[ind1,ind2,ind3,ind4]" (quotation marks must be present to avoid conflict with unix)
the pout-group must match one of the population names. e.g. -O pop2Name
***********************************************************************************************
Modified to use single nucleotide gaps "-" by Dmytro Kryvokhyzha (dmytro.kryvokhyzha@evobio.eu)
"""
import sys
### Functions
def get_intv(string,borders = "()",inc = False):
if len(borders) != 2:
print "WARNING: borders must contain two characters"
starts = []
ends = []
output = []
for x in range(len(string)):
if string[x] == borders[0]:
starts.append(x)
if string[x] == borders[1]:
ends.append(x+1)
if len(starts) <= len(ends):
for n in range(len(starts)):
if inc:
output.append(string[starts[n]:ends[n]])
else:
output.append(string[starts[n]+1:ends[n]-1])
else:
for n in range(len(ends)):
if inc:
output.append(string[starts[n]:ends[n]])
else:
output.append(string[starts[n]+1:ends[n]-1])
return output
def haplo(calls):
output = []
for call in calls:
if call in "ACGTN-":
output.append(call)
output.append(call)
elif call == "K":
output.append("G")
output.append("T")
elif call == "M":
output.append("A")
output.append("C")
elif call == "R":
output.append("A")
output.append("G")
elif call == "S":
output.append("C")
output.append("G")
elif call == "W":
output.append("A")
output.append("T")
elif call == "Y":
output.append("C")
output.append("T")
else:
print "WARNING", call, "is not recognised as a valid base or ambiguous base"
output.append("N")
output.append("N")
return output
def getOptionValue(option):
optionPos = [i for i,j in enumerate(sys.argv) if j == option][0]
optionValue = sys.argv[optionPos + 1]
return optionValue
def unique(things):
output = []
for x in things:
if x not in output:
output.append(x)
output.sort()
return output
def exclude(things, x):
output = [i for i in things if i != x]
return(output)
def uniqueAlleles(bases):
haploBases = haplo(bases)
output = unique([i for i in haploBases if i in "ACGT-"])
return output
def baseFreq(bases,base):
haploBases = [i for i in haplo(bases) if i in "ACGT-"]
freq = (float(haploBases.count(base))) / len(haploBases)
return freq
def mostCommon(things):
output = []
counts = []
uniqueThings = unique(things)
for thing in uniqueThings:
counts.append(things.count(thing))
maxCount = max(counts)
for n in range(len(counts)):
if counts[n] == maxCount:
output.append(uniqueThings[n])
return output
def majorAllele(bases):
haploBases = [i for i in haplo(bases) if i in "ACGT-"]
major = mostCommon(haploBases)[0]
return major[0]
### get files
if "-i" in sys.argv:
fileName = getOptionValue("-i")
else:
print "\nplease specify input file name using -i <file_name> \n"
sys.exit()
file = open(fileName, "rU")
line = file.readline()
names = line.split()
line= file.readline()
if "-o" in sys.argv:
outName = getOptionValue("-o")
out = open(outName, "w")
else:
print "\nplease specify output file name using -o <file_name> \n"
sys.exit()
if "-p" in sys.argv:
popString = getOptionValue("-p")
else:
print "\nplease specify populations using -p\n"
sys.exit()
if "-a" in sys.argv:
if getOptionValue("-a") == "derived":
derived = True
elif getOptionValue("-a") == "minor":
derived = False
else:
print "\nAllele (-a) can only be 'minor' or 'derived'."
sys.exit()
else:
print "\nPlease specify whether the derived or minor allele frequency must be calculated.\n"
sys.exit()
if "-O" in sys.argv:
outGroup = getOptionValue("-O")
else:
if derived:
print "\nPlease specify outgroup population using -O\n"
sys.exit()
if "-M" in sys.argv:
popMin = int(getOptionValue("-M"))
else:
popMin = 1
if "--consensus" in sys.argv:
outgroupConsensus = True
else:
outgroupConsensus = False
pops = []
#for each population, store the name and individual names
for popData in popString.strip("\"").split(";"):
currentPop = popData.split("[")[0]
pops.append(currentPop)
vars()[currentPop + "Inds"] = get_intv(popData,"[]")[0].split(",")
for ind in vars()[currentPop + "Inds"]:
if ind not in names:
print ind, "not found in header line."
sys.exit()
if derived and outGroup not in pops:
print "\nThe specified outgroup, ", outGroup, ", was not a specified population."
sys.exit()
# write output header
out.write(names[0] + "," + names[1])
for pop in pops:
out.write("," + pop)
out.write("\n")
linesDone = 0
### for each line, check if its a biallelic SNP, if so, continue to other populations
while len(line) > 1:
objects = line.split()
output = [objects[0],objects[1]]
# check not triallelic
alleles = uniqueAlleles(objects[2:])
if len(alleles) == 0 or len(alleles) > 2:
for pop in pops:
output.append("NA")
elif len(alleles) == 1:
for pop in pops:
output.append("0.0")
else:
# get major allele or ancestral state
if derived:
ogBases = []
for ind in vars()[outGroup + "Inds"]:
ogBases.append(objects[names.index(ind)])
ogAlleles = uniqueAlleles(ogBases)
if len(ogAlleles) == 1:
refState = ogAlleles[0]
elif len(ogAlleles) == 2 and outgroupConsensus:
refState = majorAllele(ogBases)
else:
refState = None
else:
refState = majorAllele(objects[2:])
if refState:
for pop in pops:
popCalls = []
for ind in vars()[pop + "Inds"]:
popCalls.append(objects[names.index(ind)])
if len(exclude(popCalls,"N")) >= popMin:
freq = 1 - baseFreq(popCalls,refState)
else: freq = "NA"
output.append(str(freq))
else:
for pop in pops:
output.append("NA")
out.write(",".join(output))
out.write("\n")
line = file.readline()
linesDone += 1
if linesDone % 1000000 == 0:
print linesDone, "lines done..."
out.close
file.close
| 27.25 | 131 | 0.639737 |
cae1a43be9c3aca4627671fec58a01136616ebae | 389 | py | Python | lib/python2.7/site-packages/networkx/algorithms/centrality/__init__.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | 15 | 2018-04-26T08:17:18.000Z | 2021-03-05T08:44:13.000Z | lib/python2.7/site-packages/networkx/algorithms/centrality/__init__.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | null | null | null | lib/python2.7/site-packages/networkx/algorithms/centrality/__init__.py | nishaero/wifi-userseg-ryu | 1132f2c813b79eff755bdd1a9e73e7ad3980af7c | [
"Apache-2.0"
] | 6 | 2018-04-12T15:49:27.000Z | 2022-01-27T12:34:50.000Z | from .betweenness import *
from .betweenness_subset import *
from .closeness import *
from .communicability_alg import *
from .current_flow_closeness import *
from .current_flow_betweenness import *
from .current_flow_betweenness_subset import *
from .degree_alg import *
from .dispersion import *
from .eigenvector import *
from .harmonic import *
from .katz import *
from .load import *
| 27.785714 | 46 | 0.799486 |
3d251e8cfa869c458ef3292bb118592cac295650 | 125 | py | Python | fau_colors/__init__.py | mgmax/fau_colors | e755459c12cc75d03be9d7e16a56b1c8027ae37b | [
"MIT"
] | null | null | null | fau_colors/__init__.py | mgmax/fau_colors | e755459c12cc75d03be9d7e16a56b1c8027ae37b | [
"MIT"
] | null | null | null | fau_colors/__init__.py | mgmax/fau_colors | e755459c12cc75d03be9d7e16a56b1c8027ae37b | [
"MIT"
] | null | null | null | __version__ = "1.0.1"
from fau_colors.v2021 import cmaps, colors, colors_all, colors_dark, register_cmaps, unregister_cmaps
| 31.25 | 101 | 0.808 |
0c9984321f8412a6e44340f8fbb940d9c66a0ab2 | 316 | py | Python | ouijabot_proxy/setup.py | StanfordMSL/ouijabot | 7248d95212123c32c94ef1faaedd2e4907a4d8e1 | [
"MIT"
] | 3 | 2017-11-06T08:38:16.000Z | 2019-02-05T22:03:00.000Z | ouijabot_proxy/setup.py | StanfordMSL/ouijabot | 7248d95212123c32c94ef1faaedd2e4907a4d8e1 | [
"MIT"
] | 1 | 2019-02-05T22:34:55.000Z | 2019-02-05T22:34:55.000Z | ouijabot_proxy/setup.py | StanfordMSL/ouijabot | 7248d95212123c32c94ef1faaedd2e4907a4d8e1 | [
"MIT"
] | 1 | 2018-08-10T18:09:42.000Z | 2018-08-10T18:09:42.000Z | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['ouijabot_proxy'],
package_dir={'': 'src'}
)
setup(**setup_args) | 26.333333 | 65 | 0.759494 |
81a93559c98f79087f8b432a7718956d15e975fa | 40,964 | py | Python | pkgs/conda-4.1.11-py27_0/lib/python2.7/site-packages/conda/install.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | pkgs/conda-4.1.11-py27_0/lib/python2.7/site-packages/conda/install.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | pkgs/conda-4.1.11-py27_0/lib/python2.7/site-packages/conda/install.py | wangyum/anaconda | 6e5a0dbead3327661d73a61e85414cf92aa52be6 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # (c) 2012-2014 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
#
# conda is distributed under the terms of the BSD 3-clause license.
# Consult LICENSE.txt or http://opensource.org/licenses/BSD-3-Clause.
""" This module contains:
* all low-level code for extracting, linking and unlinking packages
* a very simple CLI
These API functions have argument names referring to:
dist: canonical package name (e.g. 'numpy-1.6.2-py26_0')
pkgs_dir: the "packages directory" (e.g. '/opt/anaconda/pkgs' or
'/home/joe/envs/.pkgs')
prefix: the prefix of a particular environment, which may also
be the "default" environment (i.e. sys.prefix),
but is otherwise something like '/opt/anaconda/envs/foo',
or even any prefix, e.g. '/home/joe/myenv'
Also, this module is directly invoked by the (self extracting (sfx)) tarball
installer to create the initial environment, therefore it needs to be
standalone, i.e. not import any other parts of `conda` (only depend on
the standard library).
"""
from __future__ import print_function, division, absolute_import
import errno
import functools
import json
import logging
import os
import re
import shlex
import shutil
import stat
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
from os.path import (abspath, basename, dirname, isdir, isfile, islink,
join, normpath, normcase)
on_win = bool(sys.platform == "win32")
try:
from conda.lock import Locked
from conda.utils import win_path_to_unix, url_path
from conda.config import remove_binstar_tokens, pkgs_dirs, url_channel
import conda.config as config
except ImportError:
# Make sure this still works as a standalone script for the Anaconda
# installer.
pkgs_dirs = [sys.prefix]
class Locked(object):
def __init__(self, *args, **kwargs):
pass
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
pass
def win_path_to_unix(path, root_prefix=""):
"""Convert a path or ;-separated string of paths into a unix representation
Does not add cygdrive. If you need that, set root_prefix to "/cygdrive"
"""
path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' # noqa
def translation(found_path):
found = found_path.group(1).replace("\\", "/").replace(":", "")
return root_prefix + "/" + found
return re.sub(path_re, translation, path).replace(";/", ":/")
def url_path(path):
path = abspath(path)
if on_win:
path = '/' + path.replace(':', '|').replace('\\', '/')
return 'file://%s' % path
# There won't be any binstar tokens in the installer anyway
def remove_binstar_tokens(url):
return url
# A simpler version of url_channel will do
def url_channel(url):
return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults'
pkgs_dirs = [join(sys.prefix, 'pkgs')]
if on_win:
import ctypes
from ctypes import wintypes
CreateHardLink = ctypes.windll.kernel32.CreateHardLinkW
CreateHardLink.restype = wintypes.BOOL
CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.LPVOID]
try:
CreateSymbolicLink = ctypes.windll.kernel32.CreateSymbolicLinkW
CreateSymbolicLink.restype = wintypes.BOOL
CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR,
wintypes.DWORD]
except AttributeError:
CreateSymbolicLink = None
def win_hard_link(src, dst):
"Equivalent to os.link, using the win32 CreateHardLink call."
if not CreateHardLink(dst, src, None):
raise OSError('win32 hard link failed')
def win_soft_link(src, dst):
"Equivalent to os.symlink, using the win32 CreateSymbolicLink call."
if CreateSymbolicLink is None:
raise OSError('win32 soft link not supported')
if not CreateSymbolicLink(dst, src, isdir(src)):
raise OSError('win32 soft link failed')
def win_conda_bat_redirect(src, dst, shell):
"""Special function for Windows XP where the `CreateSymbolicLink`
function is not available.
Simply creates a `.bat` file at `dst` which calls `src` together with
all command line arguments.
Works of course only with callable files, e.g. `.bat` or `.exe` files.
"""
from conda.utils import shells
try:
os.makedirs(os.path.dirname(dst))
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(os.path.dirname(dst)):
pass
else:
raise
# bat file redirect
if not os.path.isfile(dst + '.bat'):
with open(dst + '.bat', 'w') as f:
f.write('@echo off\ncall "%s" %%*\n' % src)
# TODO: probably need one here for powershell at some point
# This one is for bash/cygwin/msys
# set default shell to bash.exe when not provided, as that's most common
if not shell:
shell = "bash.exe"
# technically these are "links" - but islink doesn't work on win
if not os.path.isfile(dst):
with open(dst, "w") as f:
f.write("#!/usr/bin/env bash \n")
if src.endswith("conda"):
f.write('%s "$@"' % shells[shell]['path_to'](src+".exe"))
else:
f.write('source %s "$@"' % shells[shell]['path_to'](src))
# Make the new file executable
# http://stackoverflow.com/a/30463972/1170370
mode = os.stat(dst).st_mode
mode |= (mode & 292) >> 2 # copy R bits to X
os.chmod(dst, mode)
log = logging.getLogger(__name__)
stdoutlog = logging.getLogger('stdoutlog')
class NullHandler(logging.Handler):
""" Copied from Python 2.7 to avoid getting
`No handlers could be found for logger "patch"`
http://bugs.python.org/issue16539
"""
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
log.addHandler(NullHandler())
LINK_HARD = 1
LINK_SOFT = 2
LINK_COPY = 3
link_name_map = {
LINK_HARD: 'hard-link',
LINK_SOFT: 'soft-link',
LINK_COPY: 'copy',
}
def _link(src, dst, linktype=LINK_HARD):
if linktype == LINK_HARD:
if on_win:
win_hard_link(src, dst)
else:
os.link(src, dst)
elif linktype == LINK_SOFT:
if on_win:
win_soft_link(src, dst)
else:
os.symlink(src, dst)
elif linktype == LINK_COPY:
# copy relative symlinks as symlinks
if not on_win and islink(src) and not os.readlink(src).startswith('/'):
os.symlink(os.readlink(src), dst)
else:
shutil.copy2(src, dst)
else:
raise Exception("Did not expect linktype=%r" % linktype)
def _remove_readonly(func, path, excinfo):
os.chmod(path, stat.S_IWRITE)
func(path)
def warn_failed_remove(function, path, exc_info):
if exc_info[1].errno == errno.EACCES:
log.warn("Cannot remove, permission denied: {0}".format(path))
elif exc_info[1].errno == errno.ENOTEMPTY:
log.warn("Cannot remove, not empty: {0}".format(path))
else:
log.warn("Cannot remove, unknown reason: {0}".format(path))
def exp_backoff_fn(fn, *args):
"""Mostly for retrying file operations that fail on Windows due to virus scanners"""
if not on_win:
return fn(*args)
import random
# with max_tries = 5, max total time ~= 3.2 sec
# with max_tries = 6, max total time ~= 6.5 sec
max_tries = 6
for n in range(max_tries):
try:
result = fn(*args)
except (OSError, IOError) as e:
log.debug(repr(e))
if e.errno in (errno.EPERM, errno.EACCES):
if n == max_tries-1:
raise
time.sleep(((2 ** n) + random.random()) * 0.1)
else:
raise
else:
return result
def rm_rf(path, max_retries=5, trash=True):
"""
Completely delete path
max_retries is the number of times to retry on failure. The default is
5. This only applies to deleting a directory.
If removing path fails and trash is True, files will be moved to the trash directory.
"""
if islink(path) or isfile(path):
# Note that we have to check if the destination is a link because
# exists('/path/to/dead-link') will return False, although
# islink('/path/to/dead-link') is True.
try:
os.unlink(path)
return
except (OSError, IOError):
log.warn("Cannot remove, permission denied: {0}".format(path))
if trash and move_path_to_trash(path):
return
elif isdir(path):
# On Windows, always move to trash first.
if trash and on_win and move_path_to_trash(path, preclean=False):
return
try:
for i in range(max_retries):
try:
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
return
except OSError as e:
if trash and move_path_to_trash(path):
return
msg = "Unable to delete %s\n%s\n" % (path, e)
if on_win:
try:
shutil.rmtree(path, onerror=_remove_readonly)
return
except OSError as e1:
msg += "Retry with onerror failed (%s)\n" % e1
p = subprocess.Popen(['cmd', '/c', 'rd', '/s', '/q', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 0:
msg += '%s\n%s\n' % (stdout, stderr)
else:
if not isdir(path):
return
log.debug(msg + "Retrying after %s seconds..." % i)
time.sleep(i)
# Final time. pass exceptions to caller.
shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove)
finally:
# If path was removed, ensure it's not in linked_data_
if not isdir(path):
delete_linked_data_any(path)
def rm_empty_dir(path):
"""
Remove the directory `path` if it is a directory and empty.
If the directory does not exist or is not empty, do nothing.
"""
try:
os.rmdir(path)
except OSError: # directory might not exist or not be empty
pass
def yield_lines(path):
for line in open(path):
line = line.strip()
if not line or line.startswith('#'):
continue
yield line
prefix_placeholder = ('/opt/anaconda1anaconda2'
# this is intentionally split into parts,
# such that running this program on itself
# will leave it unchanged
'anaconda3')
def read_has_prefix(path):
"""
reads `has_prefix` file and return dict mapping filenames to
tuples(placeholder, mode)
"""
res = {}
try:
for line in yield_lines(path):
try:
placeholder, mode, f = [x.strip('"\'') for x in
shlex.split(line, posix=False)]
res[f] = (placeholder, mode)
except ValueError:
res[line] = (prefix_placeholder, 'text')
except IOError:
pass
return res
class PaddingError(Exception):
pass
def binary_replace(data, a, b):
"""
Perform a binary replacement of `data`, where the placeholder `a` is
replaced with `b` and the remaining string is padded with null characters.
All input arguments are expected to be bytes objects.
"""
def replace(match):
occurances = match.group().count(a)
padding = (len(a) - len(b))*occurances
if padding < 0:
raise PaddingError(a, b, padding)
return match.group().replace(a, b) + b'\0' * padding
pat = re.compile(re.escape(a) + b'([^\0]*?)\0')
res = pat.sub(replace, data)
assert len(res) == len(data)
return res
def replace_long_shebang(mode, data):
if mode == 'text':
shebang_match = re.match(br'^(#!((?:\\ |[^ \n\r])+)(.*))', data)
if shebang_match:
whole_shebang, executable, options = shebang_match.groups()
if len(whole_shebang) > 127:
executable_name = executable.decode('utf-8').split('/')[-1]
new_shebang = '#!/usr/bin/env {0}{1}'.format(executable_name,
options.decode('utf-8'))
data = data.replace(whole_shebang, new_shebang.encode('utf-8'))
else:
pass # TODO: binary shebangs exist; figure this out in the future if text works well
return data
def replace_prefix(mode, data, placeholder, new_prefix):
if mode == 'text':
data = data.replace(placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
# Skip binary replacement in Windows. Some files do have prefix information embedded, but
# this should not matter, as it is not used for things like RPATH.
elif mode == 'binary':
if not on_win:
data = binary_replace(data, placeholder.encode('utf-8'), new_prefix.encode('utf-8'))
else:
logging.debug("Skipping prefix replacement in binary on Windows")
else:
sys.exit("Invalid mode: %s" % mode)
return data
def update_prefix(path, new_prefix, placeholder=prefix_placeholder, mode='text'):
if on_win:
# force all prefix replacements to forward slashes to simplify need to escape backslashes
# replace with unix-style path separators
new_prefix = new_prefix.replace('\\', '/')
path = os.path.realpath(path)
with open(path, 'rb') as fi:
original_data = data = fi.read()
data = replace_prefix(mode, data, placeholder, new_prefix)
if not on_win:
data = replace_long_shebang(mode, data)
if data == original_data:
return
st = os.lstat(path)
# Remove file before rewriting to avoid destroying hard-linked cache
os.remove(path)
with exp_backoff_fn(open, path, 'wb') as fo:
fo.write(data)
os.chmod(path, stat.S_IMODE(st.st_mode))
def dist2pair(dist):
dist = str(dist)
if dist.endswith(']'):
dist = dist.split('[', 1)[0]
if dist.endswith('.tar.bz2'):
dist = dist[:-8]
parts = dist.split('::', 1)
return 'defaults' if len(parts) < 2 else parts[0], parts[-1]
def dist2quad(dist):
channel, dist = dist2pair(dist)
parts = dist.rsplit('-', 2) + ['', '']
return (parts[0], parts[1], parts[2], channel)
def dist2name(dist):
return dist2quad(dist)[0]
def name_dist(dist):
return dist2name(dist)
def dist2filename(dist, suffix='.tar.bz2'):
return dist2pair(dist)[1] + suffix
def dist2dirname(dist):
return dist2filename(dist, '')
def create_meta(prefix, dist, info_dir, extra_info):
"""
Create the conda metadata, in a given prefix, for a given package.
"""
# read info/index.json first
with open(join(info_dir, 'index.json')) as fi:
meta = json.load(fi)
# add extra info, add to our intenral cache
meta.update(extra_info)
if not meta.get('url'):
meta['url'] = read_url(dist)
# write into <env>/conda-meta/<dist>.json
meta_dir = join(prefix, 'conda-meta')
if not isdir(meta_dir):
os.makedirs(meta_dir)
with open(join(meta_dir, dist2filename(dist, '.json')), 'w') as fo:
json.dump(meta, fo, indent=2, sort_keys=True)
if prefix in linked_data_:
load_linked_data(prefix, dist, meta)
def mk_menus(prefix, files, remove=False):
"""
Create cross-platform menu items (e.g. Windows Start Menu)
Passes all menu config files %PREFIX%/Menu/*.json to ``menuinst.install``.
``remove=True`` will remove the menu items.
"""
menu_files = [f for f in files
if (f.lower().startswith('menu/') and
f.lower().endswith('.json'))]
if not menu_files:
return
elif basename(abspath(prefix)).startswith('_'):
logging.warn("Environment name starts with underscore '_'. "
"Skipping menu installation.")
return
try:
import menuinst
except:
logging.warn("Menuinst could not be imported:")
logging.warn(traceback.format_exc())
return
for f in menu_files:
try:
menuinst.install(join(prefix, f), remove, prefix)
except:
stdoutlog.error("menuinst Exception:")
stdoutlog.error(traceback.format_exc())
def run_script(prefix, dist, action='post-link', env_prefix=None):
"""
call the post-link (or pre-unlink) script, and return True on success,
False on failure
"""
path = join(prefix, 'Scripts' if on_win else 'bin', '.%s-%s.%s' % (
name_dist(dist),
action,
'bat' if on_win else 'sh'))
if not isfile(path):
return True
if on_win:
try:
args = [os.environ['COMSPEC'], '/c', path]
except KeyError:
return False
else:
shell_path = '/bin/sh' if 'bsd' in sys.platform else '/bin/bash'
args = [shell_path, path]
env = os.environ
env['ROOT_PREFIX'] = sys.prefix
env['PREFIX'] = str(env_prefix or prefix)
env['PKG_NAME'], env['PKG_VERSION'], env['PKG_BUILDNUM'], _ = dist2quad(dist)
if action == 'pre-link':
env['SOURCE_DIR'] = str(prefix)
try:
subprocess.check_call(args, env=env)
except subprocess.CalledProcessError:
return False
return True
def read_url(dist):
res = package_cache().get(dist, {}).get('urls', (None,))
return res[0] if res else None
def read_icondata(source_dir):
import base64
try:
data = open(join(source_dir, 'info', 'icon.png'), 'rb').read()
return base64.b64encode(data).decode('utf-8')
except IOError:
pass
return None
def read_no_link(info_dir):
res = set()
for fn in 'no_link', 'no_softlink':
try:
res.update(set(yield_lines(join(info_dir, fn))))
except IOError:
pass
return res
# Should this be an API function?
def symlink_conda(prefix, root_dir, shell=None):
# do not symlink root env - this clobbers activate incorrectly.
# prefix should always be longer than, or outside the root dir.
if normcase(normpath(prefix)) in normcase(normpath(root_dir)):
return
if on_win:
where = 'Scripts'
symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell)
else:
where = 'bin'
symlink_fn = os.symlink
if not isdir(join(prefix, where)):
os.makedirs(join(prefix, where))
symlink_conda_hlp(prefix, root_dir, where, symlink_fn)
def symlink_conda_hlp(prefix, root_dir, where, symlink_fn):
scripts = ["conda", "activate", "deactivate"]
prefix_where = join(prefix, where)
if not isdir(prefix_where):
os.makedirs(prefix_where)
for f in scripts:
root_file = join(root_dir, where, f)
prefix_file = join(prefix_where, f)
try:
# try to kill stale links if they exist
if os.path.lexists(prefix_file):
os.remove(prefix_file)
# if they're in use, they won't be killed. Skip making new symlink.
if not os.path.lexists(prefix_file):
symlink_fn(root_file, prefix_file)
except (IOError, OSError) as e:
if (os.path.lexists(prefix_file) and
(e.errno in (errno.EPERM, errno.EACCES, errno.EROFS))):
log.debug("Cannot symlink {0} to {1}. Ignoring since link already exists."
.format(root_file, prefix_file))
else:
raise
# ========================== begin API functions =========================
def try_hard_link(pkgs_dir, prefix, dist):
dist = dist2filename(dist, '')
src = join(pkgs_dir, dist, 'info', 'index.json')
dst = join(prefix, '.tmp-%s' % dist)
assert isfile(src), src
assert not isfile(dst), dst
try:
if not isdir(prefix):
os.makedirs(prefix)
_link(src, dst, LINK_HARD)
# Some file systems (at least BeeGFS) do not support hard-links
# between files in different directories. Depending on the
# file system configuration, a symbolic link may be created
# instead. If a symbolic link is created instead of a hard link,
# return False.
return not os.path.islink(dst)
except OSError:
return False
finally:
rm_rf(dst)
rm_empty_dir(prefix)
# ------- package cache ----- construction
# The current package cache does not support the ability to store multiple packages
# with the same filename from different channels. Furthermore, the filename itself
# cannot be used to disambiguate; we must read the URL from urls.txt to determine
# the source channel. For this reason, we now fully parse the directory and its
# accompanying urls.txt file so we can make arbitrary queries without having to
# read this data multiple times.
package_cache_ = {}
fname_table_ = {}
def add_cached_package(pdir, url, overwrite=False, urlstxt=False):
"""
Adds a new package to the cache. The URL is used to determine the
package filename and channel, and the directory pdir is scanned for
both a compressed and an extracted version of that package. If
urlstxt=True, this URL will be appended to the urls.txt file in the
cache, so that subsequent runs will correctly identify the package.
"""
package_cache()
if '/' in url:
dist = url.rsplit('/', 1)[-1]
else:
dist = url
url = None
if dist.endswith('.tar.bz2'):
fname = dist
dist = dist[:-8]
else:
fname = dist + '.tar.bz2'
xpkg = join(pdir, fname)
if not overwrite and xpkg in fname_table_:
return
if not isfile(xpkg):
xpkg = None
xdir = join(pdir, dist)
if not (isdir(xdir) and
isfile(join(xdir, 'info', 'files')) and
isfile(join(xdir, 'info', 'index.json'))):
xdir = None
if not (xpkg or xdir):
return
if url:
url = remove_binstar_tokens(url)
_, schannel = url_channel(url)
prefix = '' if schannel == 'defaults' else schannel + '::'
xkey = xpkg or (xdir + '.tar.bz2')
fname_table_[xkey] = fname_table_[url_path(xkey)] = prefix
fkey = prefix + dist
rec = package_cache_.get(fkey)
if rec is None:
rec = package_cache_[fkey] = dict(files=[], dirs=[], urls=[])
if url and url not in rec['urls']:
rec['urls'].append(url)
if xpkg and xpkg not in rec['files']:
rec['files'].append(xpkg)
if xdir and xdir not in rec['dirs']:
rec['dirs'].append(xdir)
if urlstxt:
try:
with open(join(pdir, 'urls.txt'), 'a') as fa:
fa.write('%s\n' % url)
except IOError:
pass
def package_cache():
"""
Initializes the package cache. Each entry in the package cache
dictionary contains three lists:
- urls: the URLs used to refer to that package
- files: the full pathnames to fetched copies of that package
- dirs: the full pathnames to extracted copies of that package
Nominally there should be no more than one entry in each list, but
in theory this can handle the presence of multiple copies.
"""
if package_cache_:
return package_cache_
# Stops recursion
package_cache_['@'] = None
for pdir in pkgs_dirs:
try:
data = open(join(pdir, 'urls.txt')).read()
for url in data.split()[::-1]:
if '/' in url:
add_cached_package(pdir, url)
except IOError:
pass
if isdir(pdir):
for fn in os.listdir(pdir):
add_cached_package(pdir, fn)
del package_cache_['@']
return package_cache_
def cached_url(url):
package_cache()
return fname_table_.get(url)
def find_new_location(dist):
"""
Determines the download location for the given package, and the name
of a package, if any, that must be removed to make room. If the
given package is already in the cache, it returns its current location,
under the assumption that it will be overwritten. If the conflict
value is None, that means there is no other package with that same
name present in the cache (e.g., no collision).
"""
rec = package_cache().get(dist)
if rec:
return dirname((rec['files'] or rec['dirs'])[0]), None
fname = dist2filename(dist)
dname = fname[:-8]
# Look for a location with no conflicts
# On the second pass, just pick the first location
for p in range(2):
for pkg_dir in pkgs_dirs:
pkg_path = join(pkg_dir, fname)
prefix = fname_table_.get(pkg_path)
if p or prefix is None:
return pkg_dir, prefix + dname if p else None
# ------- package cache ----- fetched
def fetched():
"""
Returns the (set of canonical names) of all fetched packages
"""
return set(dist for dist, rec in package_cache().items() if rec['files'])
def is_fetched(dist):
"""
Returns the full path of the fetched package, or None if it is not in the cache.
"""
for fn in package_cache().get(dist, {}).get('files', ()):
return fn
def rm_fetched(dist):
"""
Checks to see if the requested package is in the cache; and if so, it removes both
the package itself and its extracted contents.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['files']:
del fname_table_[fname]
del fname_table_[url_path(fname)]
with Locked(dirname(fname)):
rm_rf(fname)
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
del package_cache_[dist]
# ------- package cache ----- extracted
def extracted():
"""
return the (set of canonical names) of all extracted packages
"""
return set(dist for dist, rec in package_cache().items() if rec['dirs'])
def is_extracted(dist):
"""
returns the full path of the extracted data for the requested package,
or None if that package is not extracted.
"""
for fn in package_cache().get(dist, {}).get('dirs', ()):
return fn
def rm_extracted(dist):
"""
Removes any extracted versions of the given package found in the cache.
"""
rec = package_cache().get(dist)
if rec is None:
return
for fname in rec['dirs']:
with Locked(dirname(fname)):
rm_rf(fname)
if rec['files']:
rec['dirs'] = []
else:
del package_cache_[dist]
def extract(dist):
"""
Extract a package, i.e. make a package available for linkage. We assume
that the compressed package is located in the packages directory.
"""
rec = package_cache()[dist]
url = rec['urls'][0]
fname = rec['files'][0]
assert url and fname
pkgs_dir = dirname(fname)
with Locked(pkgs_dir):
path = fname[:-8]
temp_path = path + '.tmp'
rm_rf(temp_path)
with tarfile.open(fname) as t:
t.extractall(path=temp_path)
rm_rf(path)
exp_backoff_fn(os.rename, temp_path, path)
if sys.platform.startswith('linux') and os.getuid() == 0:
# When extracting as root, tarfile will by restore ownership
# of extracted files. However, we want root to be the owner
# (our implementation of --no-same-owner).
for root, dirs, files in os.walk(path):
for fn in files:
p = join(root, fn)
os.lchown(p, 0, 0)
add_cached_package(pkgs_dir, url, overwrite=True)
# Because the conda-meta .json files do not include channel names in
# their filenames, we have to pull that information from the .json
# files themselves. This has made it necessary in virtually all
# circumstances to load the full set of files from this directory.
# Therefore, we have implemented a full internal cache of this
# data to eliminate redundant file reads.
linked_data_ = {}
def load_linked_data(prefix, dist, rec=None):
schannel, dname = dist2pair(dist)
meta_file = join(prefix, 'conda-meta', dname + '.json')
if rec is None:
try:
with open(meta_file) as fi:
rec = json.load(fi)
except IOError:
return None
else:
linked_data(prefix)
url = rec.get('url')
fn = rec.get('fn')
if not fn:
fn = rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2'
if fn[:-8] != dname:
log.debug('Ignoring invalid package metadata file: %s' % meta_file)
return None
channel = rec.get('channel')
if channel:
channel = channel.rstrip('/')
if not url or (url.startswith('file:') and channel[0] != '<unknown>'):
url = rec['url'] = channel + '/' + fn
channel, schannel = url_channel(url)
rec['url'] = url
rec['channel'] = channel
rec['schannel'] = schannel
rec['link'] = rec.get('link') or True
cprefix = '' if schannel == 'defaults' else schannel + '::'
linked_data_[prefix][str(cprefix + dname)] = rec
return rec
def delete_linked_data(prefix, dist, delete=True):
recs = linked_data_.get(prefix)
if recs and dist in recs:
del recs[dist]
if delete:
meta_path = join(prefix, 'conda-meta', dist2filename(dist, '.json'))
if isfile(meta_path):
os.unlink(meta_path)
def delete_linked_data_any(path):
'''Here, path may be a complete prefix or a dist inside a prefix'''
dist = ''
while True:
if path in linked_data_:
if dist:
delete_linked_data(path, dist)
return True
else:
del linked_data_[path]
return True
path, dist = os.path.split(path)
if not dist:
return False
def load_meta(prefix, dist):
"""
Return the install meta-data for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
return linked_data(prefix).get(dist)
def linked_data(prefix):
"""
Return a dictionary of the linked packages in prefix.
"""
# Manually memoized so it can be updated
recs = linked_data_.get(prefix)
if recs is None:
recs = linked_data_[prefix] = {}
meta_dir = join(prefix, 'conda-meta')
if isdir(meta_dir):
for fn in os.listdir(meta_dir):
if fn.endswith('.json'):
load_linked_data(prefix, fn[:-5])
return recs
def linked(prefix):
"""
Return the set of canonical names of linked packages in prefix.
"""
return set(linked_data(prefix).keys())
def is_linked(prefix, dist):
"""
Return the install metadata for a linked package in a prefix, or None
if the package is not linked in the prefix.
"""
# FIXME Functions that begin with `is_` should return True/False
return load_meta(prefix, dist)
def delete_trash(prefix=None):
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
if not isdir(trash_dir):
continue
try:
log.debug("Trying to delete the trash dir %s" % trash_dir)
rm_rf(trash_dir, max_retries=1, trash=False)
except OSError as e:
log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e))
def move_to_trash(prefix, f, tempdir=None):
"""
Move a file or folder f from prefix to the trash
tempdir is a deprecated parameter, and will be ignored.
This function is deprecated in favor of `move_path_to_trash`.
"""
return move_path_to_trash(join(prefix, f) if f else prefix)
def move_path_to_trash(path, preclean=True):
"""
Move a path to the trash
"""
# Try deleting the trash every time we use it.
if preclean:
delete_trash()
for pkg_dir in pkgs_dirs:
trash_dir = join(pkg_dir, '.trash')
try:
os.makedirs(trash_dir)
except OSError as e1:
if e1.errno != errno.EEXIST:
continue
trash_file = tempfile.mktemp(dir=trash_dir)
try:
os.rename(path, trash_file)
except OSError as e:
log.debug("Could not move %s to %s (%s)" % (path, trash_file, e))
else:
log.debug("Moved to trash: %s" % (path,))
delete_linked_data_any(path)
if not preclean:
rm_rf(trash_file, max_retries=1, trash=False)
return True
return False
def link(prefix, dist, linktype=LINK_HARD, index=None):
"""
Set up a package in a specified (environment) prefix. We assume that
the package has been extracted (using extract() above).
"""
index = index or {}
source_dir = is_extracted(dist)
assert source_dir is not None
pkgs_dir = dirname(source_dir)
log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' %
(pkgs_dir, prefix, dist, linktype))
if not run_script(source_dir, dist, 'pre-link', prefix):
sys.exit('Error: pre-link failed: %s' % dist)
info_dir = join(source_dir, 'info')
files = list(yield_lines(join(info_dir, 'files')))
has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix'))
no_link = read_no_link(info_dir)
with Locked(prefix), Locked(pkgs_dir):
for f in files:
src = join(source_dir, f)
dst = join(prefix, f)
dst_dir = dirname(dst)
if not isdir(dst_dir):
os.makedirs(dst_dir)
if os.path.exists(dst):
log.warn("file already exists: %r" % dst)
rm_rf(dst)
lt = linktype
if f in has_prefix_files or f in no_link or islink(src):
lt = LINK_COPY
try:
_link(src, dst, lt)
except OSError as e:
log.error('failed to link (src=%r, dst=%r, type=%r, error=%r)' %
(src, dst, lt, e))
for f in sorted(has_prefix_files):
placeholder, mode = has_prefix_files[f]
try:
update_prefix(join(prefix, f), prefix, placeholder, mode)
except PaddingError:
sys.exit("ERROR: placeholder '%s' too short in: %s\n" %
(placeholder, dist))
# make sure that the child environment behaves like the parent,
# wrt user/system install on win
# This is critical for doing shortcuts correctly
if on_win:
nonadmin = join(sys.prefix, ".nonadmin")
if isfile(nonadmin):
open(join(prefix, ".nonadmin"), 'w').close()
if config.shortcuts:
mk_menus(prefix, files, remove=False)
if not run_script(prefix, dist, 'post-link'):
sys.exit("Error: post-link failed for: %s" % dist)
meta_dict = index.get(dist + '.tar.bz2', {})
meta_dict['url'] = read_url(dist)
try:
alt_files_path = join(prefix, 'conda-meta', dist2filename(dist, '.files'))
meta_dict['files'] = list(yield_lines(alt_files_path))
os.unlink(alt_files_path)
except IOError:
meta_dict['files'] = files
meta_dict['link'] = {'source': source_dir,
'type': link_name_map.get(linktype)}
if 'icon' in meta_dict:
meta_dict['icondata'] = read_icondata(source_dir)
create_meta(prefix, dist, info_dir, meta_dict)
def unlink(prefix, dist):
"""
Remove a package from the specified environment, it is an error if the
package does not exist in the prefix.
"""
with Locked(prefix):
run_script(prefix, dist, 'pre-unlink')
meta = load_meta(prefix, dist)
# Always try to run this - it should not throw errors where menus do not exist
mk_menus(prefix, meta['files'], remove=True)
dst_dirs1 = set()
for f in meta['files']:
dst = join(prefix, f)
dst_dirs1.add(dirname(dst))
rm_rf(dst)
# remove the meta-file last
delete_linked_data(prefix, dist, delete=True)
dst_dirs2 = set()
for path in dst_dirs1:
while len(path) > len(prefix):
dst_dirs2.add(path)
path = dirname(path)
# in case there is nothing left
dst_dirs2.add(join(prefix, 'conda-meta'))
dst_dirs2.add(prefix)
for path in sorted(dst_dirs2, key=len, reverse=True):
rm_empty_dir(path)
def messages(prefix):
path = join(prefix, '.messages.txt')
try:
with open(path) as fi:
sys.stdout.write(fi.read())
except IOError:
pass
finally:
rm_rf(path)
def duplicates_to_remove(dist_metas, keep_dists):
"""
Returns the (sorted) list of distributions to be removed, such that
only one distribution (for each name) remains. `keep_dists` is an
iterable of distributions (which are not allowed to be removed).
"""
from collections import defaultdict
keep_dists = set(keep_dists)
ldists = defaultdict(set) # map names to set of distributions
for dist in dist_metas:
name = name_dist(dist)
ldists[name].add(dist)
res = set()
for dists in ldists.values():
# `dists` is the group of packages with the same name
if len(dists) == 1:
# if there is only one package, nothing has to be removed
continue
if dists & keep_dists:
# if the group has packages which are have to be kept, we just
# take the set of packages which are in group but not in the
# ones which have to be kept
res.update(dists - keep_dists)
else:
# otherwise, we take lowest (n-1) (sorted) packages
res.update(sorted(dists)[:-1])
return sorted(res)
# =========================== end API functions ==========================
def main():
# This CLI is only invoked from the self-extracting shell installers
global pkgs_dirs
from optparse import OptionParser
p = OptionParser(description="conda link tool used by installer")
p.add_option('--file',
action="store",
help="path of a file containing distributions to link, "
"by default all packages extracted in the cache are "
"linked")
p.add_option('--prefix',
action="store",
default=sys.prefix,
help="prefix (defaults to %default)")
p.add_option('-v', '--verbose',
action="store_true")
opts, args = p.parse_args()
if args:
p.error('no arguments expected')
logging.basicConfig()
prefix = opts.prefix
pkgs_dir = join(prefix, 'pkgs')
pkgs_dirs = [pkgs_dir]
if opts.verbose:
print("prefix: %r" % prefix)
if opts.file:
idists = list(yield_lines(join(prefix, opts.file)))
else:
idists = sorted(extracted())
assert idists
linktype = (LINK_HARD
if try_hard_link(pkgs_dir, prefix, idists[0]) else
LINK_COPY)
if opts.verbose:
print("linktype: %s" % link_name_map[linktype])
for dist in idists:
if opts.verbose:
print("linking: %s" % dist)
link(prefix, dist, linktype)
messages(prefix)
for dist in duplicates_to_remove(linked(prefix), idists):
meta_path = join(prefix, 'conda-meta', dist + '.json')
print("WARNING: unlinking: %s" % meta_path)
try:
os.rename(meta_path, meta_path + '.bak')
except OSError:
rm_rf(meta_path)
if __name__ == '__main__':
main()
| 32.692737 | 121 | 0.591056 |
f44b8a208d3003af4ba1a3a4b2cb77782780e7d1 | 760 | py | Python | app/urls.py | EgiAprilianto/django-crud | b678eda0113fc40e1d28ba32cd5307d70eeef67d | [
"OML"
] | null | null | null | app/urls.py | EgiAprilianto/django-crud | b678eda0113fc40e1d28ba32cd5307d70eeef67d | [
"OML"
] | null | null | null | app/urls.py | EgiAprilianto/django-crud | b678eda0113fc40e1d28ba32cd5307d70eeef67d | [
"OML"
] | 1 | 2022-03-09T14:19:04.000Z | 2022-03-09T14:19:04.000Z | """app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('', include('crud.urls')),
]
| 33.043478 | 77 | 0.698684 |
f8e805f9ad1d1dd5b5548ab0b65924b5f7488a8b | 188 | py | Python | fastapi_sample/__init__.py | anyakichi/fastapi-sample | 745403c585b86c76a7926c41fcc5f15d1dd25e29 | [
"MIT"
] | null | null | null | fastapi_sample/__init__.py | anyakichi/fastapi-sample | 745403c585b86c76a7926c41fcc5f15d1dd25e29 | [
"MIT"
] | null | null | null | fastapi_sample/__init__.py | anyakichi/fastapi-sample | 745403c585b86c76a7926c41fcc5f15d1dd25e29 | [
"MIT"
] | null | null | null | __version__ = "0.1.0"
from typing import Any, Dict
from fastapi import FastAPI
app = FastAPI()
@app.get("/")
async def root() -> Dict[str, Any]:
return {"message": "Hello World"}
| 14.461538 | 37 | 0.648936 |
9279ac39e83ae587a8fd6f65dfdd7e4680b40976 | 5,360 | py | Python | ultracart/models/single_sign_on_token_response.py | UltraCart/rest_api_v2_sdk_python | d734ea13fabc7a57872ff68bac06861edb8fd882 | [
"Apache-2.0"
] | 1 | 2018-03-15T16:56:23.000Z | 2018-03-15T16:56:23.000Z | ultracart/models/single_sign_on_token_response.py | UltraCart/rest_api_v2_sdk_python | d734ea13fabc7a57872ff68bac06861edb8fd882 | [
"Apache-2.0"
] | null | null | null | ultracart/models/single_sign_on_token_response.py | UltraCart/rest_api_v2_sdk_python | d734ea13fabc7a57872ff68bac06861edb8fd882 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
UltraCart Rest API V2
UltraCart REST API Version 2 # noqa: E501
OpenAPI spec version: 2.0.0
Contact: support@ultracart.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SingleSignOnTokenResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'expiration_dts': 'str',
'ip_address': 'str',
'simple_key': 'str'
}
attribute_map = {
'expiration_dts': 'expiration_dts',
'ip_address': 'ip_address',
'simple_key': 'simple_key'
}
def __init__(self, expiration_dts=None, ip_address=None, simple_key=None): # noqa: E501
"""SingleSignOnTokenResponse - a model defined in Swagger""" # noqa: E501
self._expiration_dts = None
self._ip_address = None
self._simple_key = None
self.discriminator = None
if expiration_dts is not None:
self.expiration_dts = expiration_dts
if ip_address is not None:
self.ip_address = ip_address
if simple_key is not None:
self.simple_key = simple_key
@property
def expiration_dts(self):
"""Gets the expiration_dts of this SingleSignOnTokenResponse. # noqa: E501
Expiration date/time after which time the key is no longer valid # noqa: E501
:return: The expiration_dts of this SingleSignOnTokenResponse. # noqa: E501
:rtype: str
"""
return self._expiration_dts
@expiration_dts.setter
def expiration_dts(self, expiration_dts):
"""Sets the expiration_dts of this SingleSignOnTokenResponse.
Expiration date/time after which time the key is no longer valid # noqa: E501
:param expiration_dts: The expiration_dts of this SingleSignOnTokenResponse. # noqa: E501
:type: str
"""
self._expiration_dts = expiration_dts
@property
def ip_address(self):
"""Gets the ip_address of this SingleSignOnTokenResponse. # noqa: E501
IP address of the user which we recommend you lock the simple key's usage to. # noqa: E501
:return: The ip_address of this SingleSignOnTokenResponse. # noqa: E501
:rtype: str
"""
return self._ip_address
@ip_address.setter
def ip_address(self, ip_address):
"""Sets the ip_address of this SingleSignOnTokenResponse.
IP address of the user which we recommend you lock the simple key's usage to. # noqa: E501
:param ip_address: The ip_address of this SingleSignOnTokenResponse. # noqa: E501
:type: str
"""
self._ip_address = ip_address
@property
def simple_key(self):
"""Gets the simple_key of this SingleSignOnTokenResponse. # noqa: E501
The simple key that can then be used to make SDK calls on the users behalf. # noqa: E501
:return: The simple_key of this SingleSignOnTokenResponse. # noqa: E501
:rtype: str
"""
return self._simple_key
@simple_key.setter
def simple_key(self, simple_key):
"""Sets the simple_key of this SingleSignOnTokenResponse.
The simple key that can then be used to make SDK calls on the users behalf. # noqa: E501
:param simple_key: The simple_key of this SingleSignOnTokenResponse. # noqa: E501
:type: str
"""
self._simple_key = simple_key
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SingleSignOnTokenResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SingleSignOnTokenResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.804598 | 99 | 0.607463 |
1c4c80dd1d7a0504e1fe8dd219c522ab897f9309 | 1,212 | py | Python | events/migrations/0013_auto_20190807_0658.py | gda2048/thefirst | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 5 | 2019-08-19T14:49:29.000Z | 2019-12-19T19:03:54.000Z | events/migrations/0013_auto_20190807_0658.py | Sirkirill/PhychoBlog | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 10 | 2020-02-12T00:46:12.000Z | 2022-02-10T09:16:47.000Z | events/migrations/0013_auto_20190807_0658.py | Sirkirill/PhychoBlog | f0a74c0a53d507297c58eb267152f6b17339ac02 | [
"Apache-2.0"
] | 1 | 2019-10-10T13:04:11.000Z | 2019-10-10T13:04:11.000Z | # Generated by Django 2.2.4 on 2019-08-07 06:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0012_auto_20190804_1909'),
]
operations = [
migrations.AlterField(
model_name='announcement',
name='content',
field=models.TextField(blank=True, max_length=300, verbose_name='Описание анонса'),
),
migrations.AlterField(
model_name='announcement',
name='name',
field=models.CharField(max_length=100, verbose_name='Название'),
),
migrations.AlterField(
model_name='event',
name='alt',
field=models.TextField(blank=True, max_length=300, null=True, verbose_name='Описание фото'),
),
migrations.AlterField(
model_name='event',
name='content',
field=models.TextField(blank=True, max_length=400, null=True, verbose_name='Описание мероприятия'),
),
migrations.AlterField(
model_name='event',
name='name',
field=models.CharField(max_length=100, verbose_name='Название'),
),
]
| 31.894737 | 111 | 0.589934 |
3428219a5de9f2c5c4b6cc74c1c96e220cb8247e | 4,964 | py | Python | testscripts/RDKB/component/TDKB_TR181/TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/TDKB_TR181/TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/TDKB_TR181/TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot.py | rdkcmf/rdkb-tools-tdkb | 9f9c3600cd701d5fc90ac86a6394ebd28d49267e | [
"Apache-2.0"
] | null | null | null | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2020 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>1</version>
<name>TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot</name>
<primitive_test_id/>
<primitive_test_name>TDKB_TR181Stub_Get</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>To set all tr181 parameters in TDM module using WEBPA and check if the values that are set persist after a reboot.</synopsis>
<groups_id/>
<execution_time>60</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
<box_type>Emulator</box_type>
<box_type>RPI</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_TDKB_TR181_95</test_case_id>
<test_objective>To set all tr181 parameters in TDM module using WEBPA and check if the values that are set persist after a reboot.</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband,RPI,Emulator</test_setup>
<pre_requisite>TDK test agent should be running
TDM module's parameter xml should be available</pre_requisite>
<api_or_interface_used>None</api_or_interface_used>
<input_parameters>None</input_parameters>
<automation_approch>1. Load tdkbtr181 module and sysutil module
2. Set the validation type as WEBPA
3. Invoke setAllParams() by passing TDM as module name and rebootTest flag as true
4. Do the reboot
5. Check if all parameters are having set value and revert parameters to original values
6. Display the final status of set and the list of failed parameters, if any
7. Unload modules</automation_approch>
<expected_output>Parameters should persist on reboot</expected_output>
<priority>High</priority>
<test_stub_interface>tdktr181</test_stub_interface>
<test_script>TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot</test_script>
<skipped>No</skipped>
<release_version>M76</release_version>
<remarks>None</remarks>
</test_cases>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
import tdkbSetAllParams
from tdkbVariables import *;
import webpaUtility;
from webpaUtility import *
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("tdkbtr181","1");
obj1 = tdklib.TDKScriptingLibrary("sysutil","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot');
obj1.configureTestCase(ip,port,'TDKB_TR181_WEBPA_TDM_SetAllParameterValues_WithReboot');
#Get the result of connection with test component and DUT
loadmodulestatus=obj.getLoadModuleResult();
loadmodulestatus1=obj1.getLoadModuleResult();
if "SUCCESS" in loadmodulestatus.upper() :
#Set the result status of execution
obj.setLoadModuleStatus("SUCCESS")
obj1.setLoadModuleStatus("SUCCESS")
tdkTestObj,preRequisiteStatus = webpaPreRequisite(obj1);
if "SUCCESS" in preRequisiteStatus:
print "The module to test is: TDM";
setup_type = "WEBPA"
#Invoke the utility function to set and validate the values for all configured tr181 params
moduleStatus,failedParams= tdkbSetAllParams.setAllParams("TDM", setup_type, obj, obj1,"true");
print "Status of TDM module is : ",moduleStatus, "\n";
if moduleStatus == "FAILURE":
print "The failed params are ", failedParams, "\n";
tdkTestObj.setResultStatus("FAILURE");
else:
tdkTestObj.setResultStatus("FAILURE");
print "Webpa Pre-requisite failed. Please check parodus and webpa processes are running in device"
obj.unloadModule("tdkbtr181");
obj1.unloadModule("sysutil");
else:
print "Failed to load module";
obj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| 41.714286 | 152 | 0.719782 |
f5d7c55a50cb872100fc43dbe4cd1db32f67029e | 8,612 | py | Python | ppb/sprites.py | milasp/pursuedpybear | 166cbc3e7b9cd10b50c512f37ab1d943dfbdfb7b | [
"Artistic-2.0"
] | null | null | null | ppb/sprites.py | milasp/pursuedpybear | 166cbc3e7b9cd10b50c512f37ab1d943dfbdfb7b | [
"Artistic-2.0"
] | null | null | null | ppb/sprites.py | milasp/pursuedpybear | 166cbc3e7b9cd10b50c512f37ab1d943dfbdfb7b | [
"Artistic-2.0"
] | null | null | null | from inspect import getfile
from pathlib import Path
from typing import Union
import ppb
from ppb import Vector
from ppb.events import EventMixin
from ppb.utils import FauxFloat
import ppb_vector
TOP = "top"
BOTTOM = "bottom"
LEFT = "left"
RIGHT = "right"
error_message = "'{klass}' object does not have attribute '{attribute}'"
side_attribute_error_message = error_message.format
class Side(FauxFloat):
"""
Acts like a float, but also has a variety of accessors.
"""
sides = {
LEFT: ('x', -1),
RIGHT: ('x', 1),
TOP: ('y', 1),
BOTTOM: ('y', -1)
}
def __init__(self, parent: 'BaseSprite', side: str):
self.side = side
self.parent = parent
def __repr__(self):
return f"Side({self.parent!r}, {self.side!r})"
def __str__(self):
return str(float(self))
def _lookup_side(self, side):
dimension, sign = self.sides[side]
return dimension, sign * self.parent._offset_value
def __float__(self):
dimension, offset = self._lookup_side(self.side)
return self.parent.position[dimension] + offset
@property
def top(self) -> Vector:
"""
Get the corner vector
"""
self._attribute_gate(TOP, [TOP, BOTTOM])
return Vector(float(self), float(self.parent.top))
@top.setter
def top(self, value):
self._attribute_gate(TOP, [TOP, BOTTOM])
self.parent.position = self._mk_update_vector_side(TOP, value)
@property
def bottom(self) -> Vector:
"""
Get the corner vector
"""
self._attribute_gate(BOTTOM, [TOP, BOTTOM])
return Vector(float(self), float(self.parent.bottom))
@bottom.setter
def bottom(self, value):
self._attribute_gate(BOTTOM, [TOP, BOTTOM])
self.parent.position = self._mk_update_vector_side(BOTTOM, value)
@property
def left(self) -> Vector:
"""
Get the corner vector
"""
self._attribute_gate(LEFT, [LEFT, RIGHT])
return Vector(float(self.parent.left), float(self))
@left.setter
def left(self, value):
self._attribute_gate(LEFT, [LEFT, RIGHT])
self.parent.position = self._mk_update_vector_side(LEFT, value)
@property
def right(self) -> Vector:
"""
Get the corner vector
"""
self._attribute_gate(RIGHT, [LEFT, RIGHT])
return Vector(float(self.parent.right), float(self))
@right.setter
def right(self, value):
self._attribute_gate(RIGHT, [LEFT, RIGHT])
self.parent.position = self._mk_update_vector_side(RIGHT, value)
@property
def center(self) -> Vector:
"""
Get the midpoint vector
"""
if self.side in (TOP, BOTTOM):
return Vector(self.parent.center.x, float(self))
else:
return Vector(float(self), self.parent.center.y)
@center.setter
def center(self, value):
self.parent.position = self._mk_update_vector_center(value)
def _mk_update_vector_side(self, attribute, value: Vector):
"""
Calculate the updated vector, based on the given corner.
That is, handles the calculation for forms like sprite.top.left = vector
"""
value = Vector(value)
assert attribute != 'center'
# Does a bunch of dynamc resolution:
# Sprite.top.left
# ^ ^ attribute
# self.side
self_dimension, self_offset = self._lookup_side(self.side)
attr_dimension, attr_offset = self._lookup_side(attribute)
assert self_dimension != attr_dimension
fields = {
self_dimension: value[self_dimension] - self_offset,
attr_dimension: value[attr_dimension] - attr_offset,
}
return Vector(fields)
def _mk_update_vector_center(self, value):
"""
Calculate the update vector, based on the given side.
That is, handles the calculation for forms like sprite.right = number
"""
value = Vector(value)
# Pretty similar to ._mk_update_vector_side()
self_dimension, self_offset = self._lookup_side(self.side)
attr_dimension = 'y' if self_dimension == 'x' else 'x'
fields = {
self_dimension: value[self_dimension] - self_offset,
attr_dimension: value[attr_dimension]
}
return Vector(fields)
def _attribute_gate(self, attribute, bad_sides):
if self.side in bad_sides:
name = type(self).__name__
message = side_attribute_error_message(klass=name,
attribute=attribute)
raise AttributeError(message)
class Rotatable:
"""
A simple rotation mixin. Can be included with sprites.
"""
_rotation = 0
# This is necessary to make facing do the thing while also being adjustable.
#: The baseline vector, representing the "front" of the sprite
basis = Vector(0, -1)
# Considered making basis private, the only reason to do so is to
# discourage people from relying on it as data.
@property
def facing(self):
"""
The direction the "front" is facing
"""
return Vector(*self.basis).rotate(self.rotation).normalize()
@facing.setter
def facing(self, value):
self.rotation = self.basis.angle(value)
@property
def rotation(self):
"""
The amount the sprite is rotated, in degrees
"""
return self._rotation
@rotation.setter
def rotation(self, value):
self._rotation = value % 360
def rotate(self, degrees):
"""
Rotate the sprite by a given angle (in degrees).
"""
self.rotation += degrees
class BaseSprite(EventMixin, Rotatable):
"""
The base Sprite class. All sprites should inherit from this (directly or
indirectly).
"""
#: (:py:class:`ppb.Image`): The image asset
image = None
#: (:py:class:`ppb.Vector`): Location of the sprite
position: Vector = Vector(0, 0)
#: The width/height of the sprite (sprites are square)
size: Union[int, float] = 1
def __init__(self, **kwargs):
super().__init__()
self.position = Vector(self.position)
# Initialize things
for k, v in kwargs.items():
# Abbreviations
if k == 'pos':
k = 'position'
# Castings
if k == 'position':
v = Vector(v)
setattr(self, k, v)
# Trigger some calculations
self.size = self.size
@property
def center(self) -> Vector:
"""
The position of the center of the sprite
"""
return self.position
@center.setter
def center(self, value: ppb_vector.VectorLike):
self.position = Vector(value)
@property
def left(self) -> Side:
"""
The left side
"""
return Side(self, LEFT)
@left.setter
def left(self, value: float):
self.position = Vector(value + self._offset_value, self.position.y)
@property
def right(self) -> Side:
"""
The right side
"""
return Side(self, RIGHT)
@right.setter
def right(self, value):
self.position = Vector(value - self._offset_value, self.position.y)
@property
def top(self) -> Side:
"""
The top side
"""
return Side(self, TOP)
@top.setter
def top(self, value):
self.position = Vector(self.position.x, value - self._offset_value)
@property
def bottom(self) -> Side:
"""
The bottom side
"""
return Side(self, BOTTOM)
@bottom.setter
def bottom(self, value):
self.position = Vector(self.position.x, value + self._offset_value)
@property
def _offset_value(self):
return self.size / 2
def __image__(self):
if self.image is None:
klass = type(self)
prefix = Path(klass.__module__.replace('.', '/'))
try:
klassfile = getfile(klass)
except TypeError:
prefix = Path('.')
else:
if Path(klassfile).name != '__init__.py':
prefix = prefix.parent
if prefix == Path('.'):
self.image = ppb.Image(f"{klass.__name__.lower()}.png")
else:
self.image = ppb.Image(f"{prefix!s}/{klass.__name__.lower()}.png")
return self.image
| 27.514377 | 82 | 0.58523 |
c0019d2121822ace056ffb8a014ed8ef8d44877c | 3,800 | py | Python | ComputeScore/main_180405.py | samik1986/ML_Semantic_Segmenation_NMI | eaf963e13e18ce091edb45dcb0cc67727ef32f1a | [
"MIT"
] | 13 | 2020-07-13T17:44:39.000Z | 2021-12-15T01:44:24.000Z | ComputeScore/main_180405.py | samik1986/ML_Semantic_Segmenation_NMI | eaf963e13e18ce091edb45dcb0cc67727ef32f1a | [
"MIT"
] | 22 | 2020-03-31T11:48:27.000Z | 2022-02-10T01:50:35.000Z | ComputeScore/main_180405.py | samik1986/ML_Semantic_Segmenation_NMI | eaf963e13e18ce091edb45dcb0cc67727ef32f1a | [
"MIT"
] | 5 | 2020-11-07T11:06:53.000Z | 2021-11-13T19:19:07.000Z | from dk_metric import image_metrics
import os
from multiprocessing import Process, Lock, Manager
import numpy as np
import time
import sys
'''python3 main.py gt_folder pre_folder output_folder [optional startt endt stepsize]'''
gt_folder = sys.argv[1]
prop_folder = sys.argv[2]
output_csv = os.path.join(sys.argv[3], 'scores.csv')
startt, endt, stepsize = 0.05, 0.95, 0.01
if len(sys.argv) > 4:
startt, endt, stepsize = list(map(float, sys.argv[4:]))
radius = 3
Thread_Cnt = 16
files = os.listdir(prop_folder)
lock = Lock()
ALL_thresholds = []
ALL_precision, ALL_recall, ALL_F1, ALL_Jaccard, ALL_mod_prec, ALL_mod_recall, ALL_mod_F1 = [],[],[],[],[],[],[]
manager = Manager()
def cal_fp_tp(files, l, threshold):
# sTP, sFP, sFN, msTP, msFP, msFN
start_time = time.time()
sTP, sFP, sFN, msTP, msFP, msFN = 0, 0, 0, 0, 0, 0
for i, f in enumerate(files):
gt_path = os.path.join(gt_folder, f.replace('_row', '_label'))
prop_path = os.path.join(prop_folder, f)
# gt_path = os.path.join(gt_folder, f)
# prop_path = os.path.join(prop_folder, f)
if i != 0 and i % 200 == 0:
print(os.getpid(), i, 'th file... use', time.time() - start_time, 'seconds.')
TP, FP, FN = image_metrics.get_TP_FP_FN(gt_path, prop_path, threshold=threshold)
mTP, mFP, mFN = image_metrics.get_mod_TP_FP_FN(gt_path, prop_path, radius=radius, threshold=threshold)
sTP += TP
sFP += FP
sFN += FN
msTP += mTP
msFP += mFP
msFN += mFN
with lock:
l[0] += sTP
l[1] += sFP
l[2] += sFN
l[3] += msTP
l[4] += msFP
l[5] += msFN
thresholds = np.arange(startt, endt, stepsize).tolist()
for threshold in thresholds:
ALL_thresholds.append(threshold)
print('-------------', threshold, '-------------')
threshold *= 255
l = manager.list([0, 0, 0, 0, 0, 0])
pool = []
files_threads = np.array_split(files, Thread_Cnt)
for i in range(Thread_Cnt):
pool.append(Process(target=cal_fp_tp, args=(files_threads[i].tolist(), l, threshold,)))
for t in pool:
t.start()
for t in pool:
t.join()
sTP, sFP, sFN, msTP, msFP, msFN = list(l)
Precision = sTP / (sTP + sFP) if (sTP + sFP != 0) else 1
Recall = sTP / (sTP + sFN) if(sTP + sFN != 0) else 1
Jaccard = 1 / (1/Precision + 1/Recall - 1) if (Precision > 0 and Recall > 0) else 0
F1 = 2 * Precision * Recall / (Precision + Recall) if (Precision > 0 and Recall > 0) else 0
ALL_precision.append(Precision)
ALL_recall.append(Recall)
ALL_Jaccard.append(Jaccard)
ALL_F1.append(F1)
mPrecision = msTP / (msTP + msFP) if (msTP + msFP != 0) else 1
mRecall = msTP / (msTP + msFN) if(msTP + msFN != 0) else 1
mF1 = 2 * mPrecision * mRecall / (mPrecision + mRecall) if (mPrecision > 0 and mRecall > 0) else 0
ALL_mod_prec.append(mPrecision)
ALL_mod_recall.append(mRecall)
ALL_mod_F1.append(mF1)
with open(output_csv, 'w') as output:
data_thre = 'Threshold,' + ','.join(['{:.6f}'.format(v) for v in ALL_thresholds])
data_pre = 'Precision,' + ','.join(['{:.6f}'.format(v) for v in ALL_precision])
data_rec = 'Recall,' + ','.join(['{:.6f}'.format(v) for v in ALL_recall])
data_jac = 'Jaccard,' + ','.join(['{:.6f}'.format(v) for v in ALL_Jaccard])
data_f1 = 'F1,' + ','.join(['{:.6f}'.format(v) for v in ALL_F1])
data_mpre = 'Mod_Prec,' + ','.join(['{:.6f}'.format(v) for v in ALL_mod_prec])
data_mrec = 'Mod_Rec,' + ','.join(['{:.6f}'.format(v) for v in ALL_mod_recall])
data_mf1 = 'Mod_F1,' + ','.join(['{:.6f}'.format(v) for v in ALL_mod_F1])
output.write('\n'.join([data_thre, data_pre, data_rec, data_jac, data_f1, data_mpre, data_mrec, data_mf1]))
| 35.849057 | 113 | 0.606053 |
e41529d5b611417ab7c17dad0e03946181557d69 | 724 | py | Python | python_3/synthetic_data_generator/experiments/expr_generate_random_shuffled_products.py | duttashi/valet | 25b57db860d5c1abce9f1d8b45b73bc8e8743025 | [
"MIT"
] | null | null | null | python_3/synthetic_data_generator/experiments/expr_generate_random_shuffled_products.py | duttashi/valet | 25b57db860d5c1abce9f1d8b45b73bc8e8743025 | [
"MIT"
] | 39 | 2020-10-11T06:57:35.000Z | 2021-11-02T08:57:01.000Z | python_3/synthetic_data_generator/experiments/expr_generate_random_shuffled_products.py | duttashi/valet | 25b57db860d5c1abce9f1d8b45b73bc8e8743025 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 13 07:50:11 2020
Objective: Given a product list as an input to a function,
randomly shuffle and repeat the list N number of times
Function parameters: product list, number of repetitions
Function return value: random shuffled list of products
@author: Ashish
"""
import random
def random_shuffled_products(loc_list,num):
rand_shuf_str = [item for item in loc_list for i in range(num)]
random.shuffle(rand_shuf_str)
return(rand_shuf_str)
# implementation
lst = ['soft toys','kitchenware','electronics','mobile phones','laptops']
n=10
rand_loc_lst = random_shuffled_products(lst, n)
print(rand_loc_lst)
print("list size: ", len(rand_loc_lst)) | 32.909091 | 74 | 0.734807 |
7ddd16dc05d37ca55426b2cedbcadc7d04f73bc5 | 100 | py | Python | fprofiles_api/apps.py | Mfaizs7/fprofiles-rest-api | d3a77b6de6a24d5fc2a23eb619c7d72fe3a8b48c | [
"MIT"
] | null | null | null | fprofiles_api/apps.py | Mfaizs7/fprofiles-rest-api | d3a77b6de6a24d5fc2a23eb619c7d72fe3a8b48c | [
"MIT"
] | null | null | null | fprofiles_api/apps.py | Mfaizs7/fprofiles-rest-api | d3a77b6de6a24d5fc2a23eb619c7d72fe3a8b48c | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class FprofilesApiConfig(AppConfig):
name = 'fprofiles_api'
| 16.666667 | 36 | 0.78 |
e3a5b686b230bf16ef4167c7db80ac7822c0f816 | 1,669 | py | Python | rdflib/__init__.py | Letractively/rdflib | b92e162efd61506d19ed25a12960cd32f0c7432e | [
"BSD-3-Clause"
] | null | null | null | rdflib/__init__.py | Letractively/rdflib | b92e162efd61506d19ed25a12960cd32f0c7432e | [
"BSD-3-Clause"
] | null | null | null | rdflib/__init__.py | Letractively/rdflib | b92e162efd61506d19ed25a12960cd32f0c7432e | [
"BSD-3-Clause"
] | null | null | null | """\
A pure Python package providing the core RDF constructs.
The packages is intended to provide the core RDF types and interfaces
for working with RDF. The package defines a plugin interface for
parsers, stores, and serializers that other packages can use to
implement parsers, stores, and serializers that will plug into the
rdflib package.
The primary interface `rdflib` exposes to work with RDF is
`rdflib.graph.Graph`.
A tiny example:
>>> import rdflib
>>> g = rdflib.Graph()
>>> result = g.parse("http://www.w3.org/People/Berners-Lee/card")
>>> print("graph has %s statements." % len(g))
graph has 77 statements.
>>>
>>> for s, p, o in g:
... if (s, p, o) not in g:
... raise Exception("It better be!")
>>> s = g.serialize(format='n3')
"""
__docformat__ = "restructuredtext en"
# The format of the __version__ line is matched by a regex in setup.py
__version__ = "3.3.0-dev"
__date__ = "2012/01/19"
__all__ = [
'URIRef',
'BNode',
'Literal',
'Variable',
'Namespace',
'Graph',
'ConjunctiveGraph',
'RDF',
'RDFS',
'OWL',
'XSD',
'util',
]
import sys
# generator expressions require 2.4
assert sys.version_info >= (2, 4, 0), "rdflib requires Python 2.4 or higher"
del sys
import logging
_LOGGER = logging.getLogger("rdflib")
_LOGGER.info("version: %s" % __version__)
from rdflib.term import URIRef, BNode, Literal, Variable
from rdflib.namespace import Namespace
from rdflib.graph import Graph, ConjunctiveGraph
from rdflib.namespace import RDF, RDFS, OWL, XSD
from rdflib import plugin
from rdflib import query
from rdflib import util
| 21.397436 | 76 | 0.673457 |
38ea3630f549c36f26f2f680a90640035ca2e5ac | 1,807 | py | Python | tests/test_streaming.py | parking52/kopf | 96c27872ac794f5ba8155c175f072be30ebc000c | [
"MIT"
] | null | null | null | tests/test_streaming.py | parking52/kopf | 96c27872ac794f5ba8155c175f072be30ebc000c | [
"MIT"
] | null | null | null | tests/test_streaming.py | parking52/kopf | 96c27872ac794f5ba8155c175f072be30ebc000c | [
"MIT"
] | null | null | null | import collections.abc
import pytest
from kopf.reactor.watching import StopStreaming, streaming_next, streaming_aiter
async def test_streaming_next_never_ends_with_stopiteration():
lst = []
src = iter(lst)
with pytest.raises(StopStreaming) as e:
streaming_next(src)
assert not isinstance(e, StopIteration)
assert not isinstance(e, StopAsyncIteration)
async def test_streaming_next_yields_and_ends():
lst = [1, 2, 3]
src = iter(lst)
val1 = streaming_next(src)
val2 = streaming_next(src)
val3 = streaming_next(src)
assert val1 == 1
assert val2 == 2
assert val3 == 3
with pytest.raises(StopStreaming):
streaming_next(src)
async def test_streaming_iterator_with_regular_next_yields_and_ends():
lst = [1, 2, 3]
src = iter(lst)
itr = streaming_aiter(src)
assert isinstance(itr, collections.abc.AsyncIterator)
assert isinstance(itr, collections.abc.AsyncGenerator)
val1 = next(src)
val2 = next(src)
val3 = next(src)
assert val1 == 1
assert val2 == 2
assert val3 == 3
with pytest.raises(StopIteration):
next(src)
async def test_streaming_iterator_with_asyncfor_works():
lst = [1, 2, 3]
src = iter(lst)
itr = streaming_aiter(src)
assert isinstance(itr, collections.abc.AsyncIterator)
assert isinstance(itr, collections.abc.AsyncGenerator)
vals = []
async for val in itr:
vals.append(val)
assert vals == lst
async def test_streaming_iterator_with_syncfor_fails():
lst = [1, 2, 3]
src = iter(lst)
itr = streaming_aiter(src)
assert isinstance(itr, collections.abc.AsyncIterator)
assert isinstance(itr, collections.abc.AsyncGenerator)
with pytest.raises(TypeError):
for _ in itr:
pass
| 23.166667 | 80 | 0.684007 |
55591e79c51d9f8834c55f4f87cd2b8901696bc8 | 9,020 | py | Python | core/src/main/bin/datax.py | xrfinbupt/DataX | 643e6f862357e34d14a7eb86c1a55ac68c31a502 | [
"Apache-2.0"
] | 6 | 2020-05-09T07:40:48.000Z | 2020-12-17T10:28:36.000Z | core/src/main/bin/datax.py | phillip2019/DataX | ad40291e23bbee4034da7200b57b014dce9ba5d4 | [
"Apache-2.0"
] | 18 | 2020-01-13T17:02:16.000Z | 2020-12-23T05:45:55.000Z | core/src/main/bin/datax.py | zhongjiajie/DataX | 643e6f862357e34d14a7eb86c1a55ac68c31a502 | [
"Apache-2.0"
] | 5 | 2019-10-25T01:44:33.000Z | 2021-10-20T05:53:57.000Z | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from __future__ import print_function
import sys
import os
import signal
import subprocess
import time
import re
import socket
import json
from optparse import OptionParser
from optparse import OptionGroup
from string import Template
import codecs
import platform
def isWindows():
return platform.system() == 'Windows'
DATAX_HOME = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DATAX_VERSION = 'DATAX-OPENSOURCE-3.0'
if isWindows():
codecs.register(lambda name: name == 'cp65001' and codecs.lookup('utf-8') or None)
CLASS_PATH = ("%s/lib/*") % (DATAX_HOME)
else:
CLASS_PATH = ("%s/lib/*:.") % (DATAX_HOME)
LOGBACK_FILE = ("%s/conf/logback.xml") % (DATAX_HOME)
DEFAULT_JVM = "-Xms1g -Xmx1g -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s/log" % (DATAX_HOME)
DEFAULT_PROPERTY_CONF = "-Dfile.encoding=UTF-8 -Dlogback.statusListenerClass=ch.qos.logback.core.status.NopStatusListener -Djava.security.egd=file:///dev/urandom -Ddatax.home=%s -Dlogback.configurationFile=%s" % (
DATAX_HOME, LOGBACK_FILE)
ENGINE_COMMAND = "java -server ${jvm} %s -classpath %s ${params} com.alibaba.datax.core.Engine -mode ${mode} -jobid ${jobid} -job ${job}" % (
DEFAULT_PROPERTY_CONF, CLASS_PATH)
REMOTE_DEBUG_CONFIG = "-Xdebug -Xrunjdwp:transport=dt_socket,server=y,address=9999"
RET_STATE = {
"KILL": 143,
"FAIL": -1,
"OK": 0,
"RUN": 1,
"RETRY": 2
}
def getLocalIp():
try:
return socket.gethostbyname(socket.getfqdn(socket.gethostname()))
except:
return "Unknown"
def suicide(signum, e):
global child_process
print(">>", sys.stderr, "[Error] DataX receive unexpected signal %d, starts to suicide." % signum)
if child_process:
child_process.send_signal(signal.SIGQUIT)
time.sleep(1)
child_process.kill()
print(">>", sys.stderr, "DataX Process was killed ! you did ?")
sys.exit(RET_STATE["KILL"])
def register_signal():
if not isWindows():
global child_process
signal.signal(2, suicide)
signal.signal(3, suicide)
signal.signal(15, suicide)
def getOptionParser():
usage = "usage: %prog [options] job-url-or-path"
parser = OptionParser(usage=usage)
prodEnvOptionGroup = OptionGroup(parser, "Product Env Options",
"Normal user use these options to set jvm parameters, job runtime mode etc. "
"Make sure these options can be used in Product Env.")
prodEnvOptionGroup.add_option("-j", "--jvm", metavar="<jvm parameters>", dest="jvmParameters", action="store",
default=DEFAULT_JVM, help="Set jvm parameters if necessary.")
prodEnvOptionGroup.add_option("--jobid", metavar="<job unique id>", dest="jobid", action="store", default="-1",
help="Set job unique id when running by Distribute/Local Mode.")
prodEnvOptionGroup.add_option("-m", "--mode", metavar="<job runtime mode>",
action="store", default="standalone",
help="Set job runtime mode such as: standalone, local, distribute. "
"Default mode is standalone.")
prodEnvOptionGroup.add_option("-p", "--params", metavar="<parameter used in job config>",
action="store", dest="params",
help='Set job parameter, eg: the source tableName you want to set it by command, '
'then you can use like this: -p"-DtableName=your-table-name", '
'if you have mutiple parameters: -p"-DtableName=your-table-name -DcolumnName=your-column-name".'
'Note: you should config in you job tableName with ${tableName}.')
prodEnvOptionGroup.add_option("-r", "--reader", metavar="<parameter used in view job config[reader] template>",
action="store", dest="reader",type="string",
help='View job config[reader] template, eg: mysqlreader,streamreader')
prodEnvOptionGroup.add_option("-w", "--writer", metavar="<parameter used in view job config[writer] template>",
action="store", dest="writer",type="string",
help='View job config[writer] template, eg: mysqlwriter,streamwriter')
parser.add_option_group(prodEnvOptionGroup)
devEnvOptionGroup = OptionGroup(parser, "Develop/Debug Options",
"Developer use these options to trace more details of DataX.")
devEnvOptionGroup.add_option("-d", "--debug", dest="remoteDebug", action="store_true",
help="Set to remote debug mode.")
devEnvOptionGroup.add_option("--loglevel", metavar="<log level>", dest="loglevel", action="store",
default="info", help="Set log level such as: debug, info, all etc.")
parser.add_option_group(devEnvOptionGroup)
return parser
def generateJobConfigTemplate(reader, writer):
readerRef = "Please refer to the %s document:\n https://github.com/alibaba/DataX/blob/master/%s/doc/%s.md \n" % (reader,reader,reader)
writerRef = "Please refer to the %s document:\n https://github.com/alibaba/DataX/blob/master/%s/doc/%s.md \n " % (writer,writer,writer)
print(readerRef)
print(writerRef)
jobGuid = 'Please save the following configuration as a json file and use\n python {DATAX_HOME}/bin/datax.py {JSON_FILE_NAME}.json \nto run the job.\n'
print(jobGuid)
jobTemplate={
"job": {
"setting": {
"speed": {
"channel": ""
}
},
"content": [
{
"reader": {},
"writer": {}
}
]
}
}
readerTemplatePath = "%s/plugin/reader/%s/plugin_job_template.json" % (DATAX_HOME,reader)
writerTemplatePath = "%s/plugin/writer/%s/plugin_job_template.json" % (DATAX_HOME,writer)
try:
readerPar = readPluginTemplate(readerTemplatePath)
except Exception:
print("Read reader[%s] template error: can\'t find file %s" % (reader,readerTemplatePath))
try:
writerPar = readPluginTemplate(writerTemplatePath)
except Exception:
print("Read writer[%s] template error: : can\'t find file %s" % (writer,writerTemplatePath))
jobTemplate['job']['content'][0]['reader'] = readerPar
jobTemplate['job']['content'][0]['writer'] = writerPar
print(json.dumps(jobTemplate, indent=4, sort_keys=True))
def readPluginTemplate(plugin):
with open(plugin, 'r') as f:
return json.load(f)
def isUrl(path):
if not path:
return False
assert (isinstance(path, str))
m = re.match(r"^http[s]?://\S+\w*", path.lower())
if m:
return True
else:
return False
def buildStartCommand(options, args):
commandMap = {}
tempJVMCommand = DEFAULT_JVM
if options.jvmParameters:
tempJVMCommand = tempJVMCommand + " " + options.jvmParameters
if options.remoteDebug:
tempJVMCommand = tempJVMCommand + " " + REMOTE_DEBUG_CONFIG
print('local ip: ', getLocalIp())
if options.loglevel:
tempJVMCommand = tempJVMCommand + " " + ("-Dloglevel=%s" % (options.loglevel))
if options.mode:
commandMap["mode"] = options.mode
# jobResource 可能是 URL,也可能是本地文件路径(相对,绝对)
jobResource = args[0]
if not isUrl(jobResource):
jobResource = os.path.abspath(jobResource)
if jobResource.lower().startswith("file://"):
jobResource = jobResource[len("file://"):]
job_file_name = os.path.basename(jobResource).rstrip('.json')
jobParams = ("-Dlog.file.name=%s") % job_file_name
if options.params:
jobParams = jobParams + " " + options.params
if options.jobid:
commandMap["jobid"] = options.jobid
commandMap["jvm"] = tempJVMCommand
commandMap["params"] = jobParams
commandMap["job"] = jobResource
return Template(ENGINE_COMMAND).substitute(**commandMap)
def printCopyright():
print('''
DataX (%s), From Alibaba !
Copyright (C) 2010-2017, Alibaba Group. All Rights Reserved.
''' % DATAX_VERSION)
sys.stdout.flush()
if __name__ == "__main__":
printCopyright()
parser = getOptionParser()
options, args = parser.parse_args(sys.argv[1:])
if options.reader is not None and options.writer is not None:
generateJobConfigTemplate(options.reader,options.writer)
sys.exit(RET_STATE['OK'])
if len(args) != 1:
parser.print_help()
sys.exit(RET_STATE['FAIL'])
startCommand = buildStartCommand(options, args)
# print startCommand
child_process = subprocess.Popen(startCommand, shell=True)
register_signal()
(stdout, stderr) = child_process.communicate()
sys.exit(child_process.returncode)
| 39.047619 | 213 | 0.627938 |
2f179bb5b9d53414b6e2c0a59dfbb0f34c45eae1 | 21,569 | py | Python | modules/sniffer.py | vaginessa/BoopSuite | 0c7afe1fc6772339617d707ac18e4dedac1d955c | [
"MIT"
] | null | null | null | modules/sniffer.py | vaginessa/BoopSuite | 0c7afe1fc6772339617d707ac18e4dedac1d955c | [
"MIT"
] | null | null | null | modules/sniffer.py | vaginessa/BoopSuite | 0c7afe1fc6772339617d707ac18e4dedac1d955c | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from logging import getLogger, ERROR
from random import choice
from threading import Thread
from scapy.all import *
from scapy.contrib.wpa_eapol import WPA_key
import devices
import globalsx
import packets
import clients
import networks
getLogger("scapy.runtime").setLevel(ERROR)
__VERSION__ = "2.0.0"
class Sniffer:
def __init__(
self,
interface,
channels,
target,
mac,
unassociated,
diagnose,
open_network,
Clients_,
kill_time,
deauth=None,
packets=None,
skip=None
):
self.mChannel = devices.get_channel(interface)
self.mUnassociated = unassociated
self.mInterface = interface
self.mDiagnose = diagnose
self.mChannels = channels
self.mClients = Clients_
self.mTarget = target
self.mOpen = open_network
self.mMAC = mac
conf.iface = interface
if kill_time:
globalsx.gKILLTIME = kill_time
self.mDeauthPackets = packets
self.mSkip = skip
globalsx.gDEAUTH = deauth
self.mTime = kill_time
self.mPackets = 0
self.mHidden = []
self.mAPs = {}
self.mCls = {}
self.mUCls = {}
if len(self.mChannels) == 1:
self.mHop = False
self.mChannel = channels[0]
devices.set_channel(devices.get_device(interface), channels[0])
else:
self.mHop = True
hop_thread = Thread(target=self.hopper)
hop_thread.daemon = True
hop_thread.start()
if globalsx.gDEAUTH:
deauth_thread = Thread(target=self.deauther)
deauth_thread.daemon = True
deauth_thread.start()
if not self.mDiagnose:
printer_thread = Thread(target=self.printer)
printer_thread.daemon = True
printer_thread.start()
return
def deauther(self):
while globalsx.gALIVE:
deauth_packets = []
for i in globalsx.gDEAUTHS[self.mChannel]:
if i[0] == self.mSkip or i[1] == self.mSkip: return
dpkt1 = Dot11(
addr1=i[0],
addr2=i[1],
addr3=i[1]) / Dot11Deauth()
dpkt2 = Dot11(
addr1=i[1],
addr2=i[0],
addr3=i[0]) / Dot11Deauth()
deauth_packets.append(dpkt1)
deauth_packets.append(dpkt2)
for deauths in deauth_packets:
send(deauths, inter=float(0.001), count=int(self.mDeauthPackets), verbose=False)
time.sleep(2)
if not globalsx.gALIVE:
return
return 0
def hopper(self):
interface = devices.get_device(self.mInterface)
# Repeat until program exits.
while globalsx.gALIVE:
try:
if len(globalsx.gFILTERCHANNEL) != len(self.mTarget):
channel = choice(self.mChannels)
devices.set_channel(interface, channel)
self.mChannel = channel
# If target found.
else:
if globalsx.gFILTERCHANNEL:
channel = choice(globalsx.gFILTERCHANNEL)
else:
channel = choice(self.mChannels)
devices.set_channel(interface, channel)
self.mChannel = channel
if len(globalsx.gFILTERCHANNEL) == 1:
break
if self.mDiagnose:
print("[CH]: Channel Set to: {0}".format(self.mChannel))
except AttributeError:
print("Error on interpreter shutdown. Disregard.")
sys.exit(0)
time.sleep(2.75)
# Exit hopper.
return
def printer(self):
# Loop until program exit.
while globalsx.gALIVE:
wifis = list(map(self.get_access_points, self.mAPs))
wifis.sort(key=lambda x: (x[7]))
# Get Clients
if not self.mClients:
clients = list(map(self.get_Clients, self.mCls))
if self.mUnassociated:
clients += list(map(self.get_un_Clients, self.mUCls))
clients.sort(key=lambda x: (x[4], x[1]))
ptime = globalsx.get_elapsed_time()
# Clear the console.
os.system("clear")
# Write top line to terminal.
sys.stdout.write(
"[{2}] T: [{0}] C: [{1}]\n\n".format(
ptime,
self.mChannel,
self.mPackets,
)
)
# Print first header line in red.
sys.stdout.write(
"{0}{1}{2}{3}{4}{5}{6}{7}\n".format(
"Mac Addr".ljust(19, " "),
"Enc".ljust(10, " "),
"Cipher".ljust(12, " "),
"Ch".ljust(5, " "),
"Vendor".ljust(10, " "),
"Sig".ljust(5, " "),
"Bcns".ljust(8, " "),
"SSID"
)
)
for item in wifis:
# Print access points
if self.mOpen:
if"OPEN" not in item[1]:
sys.stdout.write(
" {0}{1}{2}{3:<5}{4}{5:<5}{6:<8}{7}\n".format(
item[0].ljust(19, " "),
item[1].ljust(10, " "),
item[2].ljust(11, " "),
item[3],
item[4].ljust(10, " "),
item[5],
item[6],
item[7].encode('utf-8')
)
)
else:
sys.stdout.write(
" {0}{1}{2}{3:<5}{4}{5:<5}{6:<8}{7}\n".format(
item[0].ljust(19, " "),
item[1].ljust(10, " "),
item[2].ljust(11, " "),
item[3],
item[4].ljust(10, " "),
item[5],
item[6],
item[7].encode('utf-8')
)
)
if not self.mClients:
# Print second header in red.
sys.stdout.write(
"\n{0}{1}{2}{3}{4}\n".format(
"Mac".ljust(19, " "),
"AP Mac".ljust(19, " "),
"Noise".ljust(7, " "),
"Sig".ljust(5, " "),
"AP SSID"
)
)
for item in clients:
# Print Clients.
sys.stdout.write(
" {0}{1}{2:<7}{3:<5}{4}\n".format(
item[0].ljust(19, " "),
item[1].ljust(19, " "),
item[2],
item[3],
item[4].encode('utf-8')
)
)
time.sleep(1.75)
# If exits.
return
# C-extension map method for retrieving access points
def get_access_points(self, ap):
return [
self.mAPs[ap].mMAC,
self.mAPs[ap].mEnc,
self.mAPs[ap].mCipher,
self.mAPs[ap].mCh,
self.mAPs[ap].mVen,
self.mAPs[ap].mSig,
self.mAPs[ap].mBeacons,
self.mAPs[ap].mSSID
]
# C-extension map method for retrieving Clients
def get_Clients(self, cl):
return [
self.mCls[cl].mMAC,
self.mCls[cl].mBSSID,
self.mCls[cl].mNoise,
self.mCls[cl].mSig,
self.mCls[cl].mESSID
]
# C-extension map method for retrieving unassociated Clients
def get_un_Clients(self, cl):
return [
self.mUCls[cl].mMAC, # Mac
"-", # NULL
self.mUCls[cl].mNoise, # Noise
self.mUCls[cl].mSig, # Signal
"-" # NULL
]
def run(self):
# Check if a target is set.
if self.mTarget:
if len(self.mTarget) > 1:
filter_string = "ether host "+self.mTarget[0]
for client in self.mTarget[1:]:
filter_string += " or ether host " + client
else:
filter_string = "ether host " + self.mTarget[0].lower()
sniff(
iface=self.mInterface,
filter=filter_string,
prn=self.sniff_packets,
store=0
)
# If no target is set.
else:
sniff(
iface=self.mInterface,
prn=self.sniff_packets,
store=0
)
return
# Method for parsing packets.
def sniff_packets(self, packet_object):
self.mPackets += 1
try:
if packet_object.type == 0:
if packet_object.subtype == 4:
self.handler_probe_request(packet_object)
elif packet_object.subtype == 5 and packet_object.addr3 in self.mHidden:
self.handler_probe_response(packet_object)
elif packet_object.subtype == 8 and devices.check_valid_mac(packet_object.addr3):
self.handler_beacon(packet_object)
elif packet_object.subtype == 12:
self.handler_deauth(packet_object)
elif packet_object.type == 1:
self.handler_ctrl(packet_object)
elif packet_object.type == 2:
self.handler_data(packet_object)
except AttributeError as e:
print("Error raised most likely during shutdown." + str(e))
return
# Handler for probe requests
def handler_probe_request(self, packet):
if self.mUCls.get(packet.addr2):
self.mUCls[packet.addr2].mSig = (packets.get_rssi(packet.notdecoded))
self.mUCls[packet.addr2] + 1
# If Client not seen.
elif devices.check_valid_mac(packet.addr2):
if self.mCls.get(packet.addr2):
del self.mCls[packet.addr2]
self.mUCls[packet.addr2] = clients.Client(packet.addr2, "", packets.get_rssi(packet.notdecoded), "")
if self.mDiagnose:
print("[PR-1]: Unassociated clients.Client: {0}".format(packet.addr2))
return
# Handler probe responses
def handler_probe_response(self, packet):
# update ssid info
if self.mAPs.get(packet.addr3):
self.mAPs[packet.addr3].mSSID = packets.get_ssid(packet.info)
self.mHidden.remove(packet.addr3)
# Append this packet as beacon packet for later cracking.
self.mAPs[packet.addr3].packets.append(packet)
if self.mDiagnose:
print("[P-1]: Hidden Network Uncovered: " + packets.get_ssid(packet.info))
return
# Handler for beacons
def handler_beacon(self, packet):
# If AP already seen.
if self.mAPs.get(packet.addr2):
self.mAPs[packet.addr2].mSig = (packets.get_rssi(packet.notdecoded))
self.mAPs[packet.addr2] + 1
# If beacon is a new AP.
else:
# Get name of Access Point.
name = packets.get_ssid(packet.info)
if "< len: " in name:
self.mHidden.append(packet.addr3)
channel = packets.get_channel(packet)
if self.mHop and int(channel) != int(self.mChannel):
return
# sec is a set() cipher is a string
sec, cipher = packets.get_security(packet)
# Test if oui in mac address
oui = packets.get_vendor(packet.addr3)
# Create AP object.
self.mAPs[packet.addr2] = networks.AccessPoint(
name,
":".join(sec),
cipher,
channel,
packet.addr3,
unicode(oui),
packets.get_rssi(packet.notdecoded),
packet
)
# If target found set filter and cancel hopper thread.
if packet.addr3 in self.mTarget:
globalsx.gFILTERCHANNEL.append(int(channel))
if self.mDiagnose:
print("[B-1]: New Network: {0}".format(name.encode('utf-8')))
return
def handler_deauth(self, packet):
# check addresses
if self.mAPs.get(packet.addr1) and not devices.check_valid_mac(packet.addr2):
# Deauth is targeting broadcast > Do nothing but flag this.
if self.mDiagnose:
print("[D-1]: Deauth to broadcast at: {0}".format(packet.addr1))
elif self.mAPs.get(packet.addr2) and not devices.check_valid_mac(packet.addr1):
# Deauth is targeting broadcast > Do nothing but flag this.
if self.mDiagnose:
print("[D-2]: Deauth to broadcast at: {0}".format(packet.addr2))
elif self.mCls.get(packet.addr1):
del self.mCls[packet.addr1]
self.mUCls[packet.addr1] = clients.Client(packet.addr1, "", packets.get_rssi(packet.notdecoded), "")
if self.mDiagnose:
print("[D-3]: Deauth to target at: {0}".format(packet.addr1))
elif self.mCls.get(packet.addr2):
del self.mCls[packet.addr2]
self.mUCls[packet.addr2] = clients.Client(packet.addr2, "", packets.get_rssi(packet.notdecoded), "")
if self.mDiagnose:
print("[D-4]: Deauth to target at: {0}".format(packet.addr2))
else:
if self.mDiagnose:
print("[D-99]: Deauth detected.")
return
# Handler for ctrl packets
def handler_ctrl(self, packet):
# If AP has been seen
if packet.addr1 in self.mAPs:
self.mAPs[packet.addr1].mSig = (packets.get_rssi(packet.notdecoded))
return
# Handler for data packets.
def handler_data(self, packet):
if packet.addr1 == packet.addr2:
return # <!-- What the fuck?
# if ap has been seen
if self.mAPs.get(packet.addr1):
# if clients.Client has been seen
if self.mCls.get(packet.addr2):
# if clients.Client changed access points
if self.mCls[packet.addr2].mBSSID != packet.addr1:
# Update access point
self.mCls[packet.addr2].mSSID = (packet.addr1)
if self.mDiagnose:
print("[Da-1]: clients.Client: {0} probing for: {1}".format(packet.addr2, packet.addr1))
# Update signal and noise
self.mCls[packet.addr2] + 1
self.mCls[packet.addr2].mSig = (packets.get_rssi(packet.notdecoded))
# If clients.Client was previously unassociated
elif self.mUCls.get(packet.addr2):
# Create a new clients.Client object
self.mCls[packet.addr2] = clients.Client(packet.addr2, packet.addr1, packets.get_rssi(packet.notdecoded), self.mAPs[packet.addr1].mSSID)
if globalsx.gDEAUTH:
globalsx.gDEAUTHS[self.mChannel].append([packet.addr2, packet.addr1])
# Destroy previous clients.Client object
del self.mUCls[packet.addr2]
if self.mDiagnose:
print("[Da-2]: clients.Client has associated: {0}".format(packet.addr2))
# if clients.Client previously unseen
elif devices.check_valid_mac(packet.addr2):
# Create new clients.Client object
self.mCls[packet.addr2] = clients.Client(packet.addr2, packet.addr1, packets.get_rssi(packet.notdecoded), self.mAPs[packet.addr1].mSSID);
if globalsx.gDEAUTH:
globalsx.gDEAUTHS[self.mChannel].append([packet.addr2, packet.addr1])
if self.mDiagnose:
print("[Da-3]: New clients.Client: {0}, {1}".format(packet.addr2, packet.addr1))
# If access point seen
elif self.mAPs.get(packet.addr2):
# If clients.Client seen.
if self.mCls.get(packet.addr1):
# if clients.Client changed access points
if self.mCls[packet.addr1].mBSSID != packet.addr2:
self.mCls[packet.addr1].mSSID = (packet.addr2)
if self.mDiagnose:
print("[Da-4]: clients.Client: {0} probing for: {1}".format(packet.addr2, packet.addr1))
# Update noise and signal
self.mCls[packet.addr1] + 1;
self.mCls[packet.addr1].mSig = (packets.get_rssi(packet.notdecoded))
# if clients.Client was previously unassociated
elif self.mUCls.get(packet.addr1):
# Create new clients.Client and delete old object
self.mCls[packet.addr1] = clients.Client(packet.addr1, packet.addr2, packets.get_rssi(packet.notdecoded), self.mAPs[packet.addr2].mSSID)
if globalsx.gDEAUTH:
globalsx.gDEAUTHS[self.mChannel].append([packet.addr1, packet.addr2])
del self.mUCls[packet.addr1]
if self.mDiagnose:
print("[Da-5]: clients.Client has associated: {0}".format(packet.addr1))
# Check if mac is valid before creating new object.
elif devices.check_valid_mac(packet.addr1):
# Create new clients.Client object
self.mCls[packet.addr1] = clients.Client(packet.addr1, packet.addr2, packets.get_rssi(packet.notdecoded), self.mAPs[packet.addr2].mSSID)
if globalsx.gDEAUTH:
globalsx.gDEAUTHS[self.mChannel].append([packet.addr1, packet.addr2])
if self.mDiagnose:
print("[Da-6]: New clients.Client: {0}".format(packet.addr1))
# Check if packet is part of a wpa handshake
if packet.haslayer(WPA_key):
# If mac has not been seen.
if packet.addr3 not in self.mAPs:
return
# If mac has been seen
else:
# Get wpa layer
layer = packet.getlayer(WPA_key)
if (packet.FCfield & 1):
# From DS = 0, To DS = 1
STA = packet.addr2
elif (packet.FCfield & 2):
# From DS = 1, To DS = 0
STA = packet.addr1
# This info may be unnecessary.
key_info = layer.key_info
wpa_key_length = layer.wpa_key_length
replay_counter = layer.replay_counter
WPA_KEY_INFO_INSTALL = 64
WPA_KEY_INFO_ACK = 128
WPA_KEY_INFO_MIC = 256
# check for frame 2
if (key_info & WPA_KEY_INFO_MIC) and ((key_info & WPA_KEY_INFO_ACK == 0) and (key_info & WPA_KEY_INFO_INSTALL == 0) and (wpa_key_length > 0)):
if self.mDiagnose:
print("[K-1]: {0}".format(packet.addr3))
self.mAPs[packet.addr3].frame2 = 1
self.mAPs[packet.addr3].packets.append(packet[0])
# check for frame 3
elif (key_info & WPA_KEY_INFO_MIC) and ((key_info & WPA_KEY_INFO_ACK) and (key_info & WPA_KEY_INFO_INSTALL)):
if self.mDiagnose:
print("[K-2]: {0}".format(packet.addr3))
self.mAPs[packet.addr3].frame3 = 1
self.mAPs[packet.addr3].replay_counter = replay_counter
self.mAPs[packet.addr3].packets.append(packet[0])
# check for frame 4
elif (key_info & WPA_KEY_INFO_MIC) and ((key_info & WPA_KEY_INFO_ACK == 0) and (key_info & WPA_KEY_INFO_INSTALL == 0) and self.mAPs[packet.addr3].replay_counter == replay_counter):
if self.mDiagnose:
print("[K-3]: {0}".format(packet.addr3))
self.mAPs[packet.addr3].frame4 = 1
self.mAPs[packet.addr3].packets.append(packet[0])
if (self.mAPs[packet.addr3].frame2 and self.mAPs[packet.addr3].frame3 and self.mAPs[packet.addr3].frame4):
if self.mDiagnose:
print("[Key]: {0}".format(packet.addr3))
folder_path = ("pcaps/")
filename = ("{0}_{1}.pcap").format(self.mAPs[packet.addr3].mSSID.encode('utf-8'), packet.addr3[-5:].replace(":", ""))
wrpcap(folder_path+filename, self.mAPs[packet.addr3].packets)
self.mAPs[packet.addr3].mCapped = True
# except:
# print("Write failed.")
return
| 32.434586 | 196 | 0.499189 |
bb702c39d20eb57762d5bc883810c7969f9e9950 | 624 | pyde | Python | light/listing77ex2/listing77ex2.pyde | Drozdnik/2019-fall-polytech-cs | 02154dd152c454c25bdce93a0643267e8f65eee4 | [
"MIT"
] | null | null | null | light/listing77ex2/listing77ex2.pyde | Drozdnik/2019-fall-polytech-cs | 02154dd152c454c25bdce93a0643267e8f65eee4 | [
"MIT"
] | null | null | null | light/listing77ex2/listing77ex2.pyde | Drozdnik/2019-fall-polytech-cs | 02154dd152c454c25bdce93a0643267e8f65eee4 | [
"MIT"
] | null | null | null | coint = 0
def setup():
background(100)
smooth()
img1 = loadImage("data/0000.jpg")
img2 = loadImage("data/0001.jpg")
size(800,800)
global img1, img2
def mouseClicked():
global coint
if mouseButton == LEFT:
coint += 5
if coint < 0:
coint = 0
if mouseButton == RIGHT:
coint -= 5
if coint > 100:
coint = 100
def draw():
myTint000 = map(coint, 0, 100, 0, 255)
myTint001 = map(coint, 0, 100, 255, 0)
tint(255, myTint001)
image(img1, 0, 0)
tint(255, myTint000)
image(img2,0,0)
| 22.285714 | 43 | 0.514423 |
fca28d37c36beed997074514bbd78281ce58f96e | 4,526 | py | Python | Lib/test/script_helper.py | deadsnakes/python3.1 | 88d77610a7873c5161bfc15cd69557fc7697b1a3 | [
"PSF-2.0"
] | null | null | null | Lib/test/script_helper.py | deadsnakes/python3.1 | 88d77610a7873c5161bfc15cd69557fc7697b1a3 | [
"PSF-2.0"
] | null | null | null | Lib/test/script_helper.py | deadsnakes/python3.1 | 88d77610a7873c5161bfc15cd69557fc7697b1a3 | [
"PSF-2.0"
] | null | null | null | # Common utility functions used by various script execution tests
# e.g. test_cmd_line, test_cmd_line_script and test_runpy
import sys
import os
import os.path
import tempfile
import subprocess
import py_compile
import contextlib
import shutil
import zipfile
from test.support import strip_python_stderr
# Executing the interpreter in a subprocess
def _assert_python(expected_success, *args, **env_vars):
cmd_line = [sys.executable]
if not env_vars:
cmd_line.append('-E')
# Need to preserve the original environment, for in-place testing of
# shared library builds.
env = os.environ.copy()
# But a special flag that can be set to override -- in this case, the
# caller is responsible to pass the full environment.
if env_vars.pop('__cleanenv', None):
env = {}
env.update(env_vars)
cmd_line.extend(args)
p = subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=env)
try:
out, err = p.communicate()
finally:
subprocess._cleanup()
p.stdout.close()
p.stderr.close()
rc = p.returncode
err = strip_python_stderr(err)
if (rc and expected_success) or (not rc and not expected_success):
raise AssertionError(
"Process return code is %d, "
"stderr follows:\n%s" % (rc, err.decode('ascii', 'ignore')))
return rc, out, err
def assert_python_ok(*args, **env_vars):
"""
Assert that running the interpreter with `args` and optional environment
variables `env_vars` is ok and return a (return code, stdout, stderr) tuple.
"""
return _assert_python(True, *args, **env_vars)
def assert_python_failure(*args, **env_vars):
"""
Assert that running the interpreter with `args` and optional environment
variables `env_vars` fails and return a (return code, stdout, stderr) tuple.
"""
return _assert_python(False, *args, **env_vars)
def spawn_python(*args):
cmd_line = [sys.executable, '-E']
cmd_line.extend(args)
return subprocess.Popen(cmd_line, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
def kill_python(p):
p.stdin.close()
data = p.stdout.read()
p.stdout.close()
# try to cleanup the child so we don't appear to leak when running
# with regrtest -R.
p.wait()
subprocess._cleanup()
return data
# Script creation utilities
@contextlib.contextmanager
def temp_dir():
dirname = tempfile.mkdtemp()
dirname = os.path.realpath(dirname)
try:
yield dirname
finally:
shutil.rmtree(dirname)
def make_script(script_dir, script_basename, source):
script_filename = script_basename+os.extsep+'py'
script_name = os.path.join(script_dir, script_filename)
# The script should be encoded to UTF-8, the default string encoding
script_file = open(script_name, 'w', encoding='utf-8')
script_file.write(source)
script_file.close()
return script_name
def make_pkg(pkg_dir, init_source=''):
os.mkdir(pkg_dir)
make_script(pkg_dir, '__init__', init_source)
def make_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename,
source, depth=1, compiled=False):
unlink = []
init_name = make_script(zip_dir, '__init__', '')
unlink.append(init_name)
init_basename = os.path.basename(init_name)
script_name = make_script(zip_dir, script_basename, source)
unlink.append(script_name)
if compiled:
init_name = py_compile(init_name, doraise=True)
script_name = py_compile(script_name, doraise=True)
unlink.extend((init_name, script_name))
pkg_names = [os.sep.join([pkg_name]*i) for i in range(1, depth+1)]
script_name_in_zip = os.path.join(pkg_names[-1], os.path.basename(script_name))
zip_filename = zip_basename+os.extsep+'zip'
zip_name = os.path.join(zip_dir, zip_filename)
zip_file = zipfile.ZipFile(zip_name, 'w')
for name in pkg_names:
init_name_in_zip = os.path.join(name, init_basename)
zip_file.write(init_name, init_name_in_zip)
zip_file.write(script_name, script_name_in_zip)
zip_file.close()
for name in unlink:
os.unlink(name)
#if test.support.verbose:
# zip_file = zipfile.ZipFile(zip_name, 'r')
# print 'Contents of %r:' % zip_name
# zip_file.printdir()
# zip_file.close()
return zip_name, os.path.join(zip_name, script_name_in_zip)
| 34.815385 | 83 | 0.681617 |
bcdf33198097b64581b4456d32432bc3fed4d2b2 | 4,275 | py | Python | sort.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | 3 | 2018-12-08T21:44:45.000Z | 2019-03-01T03:22:32.000Z | sort.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | null | null | null | sort.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | null | null | null | import random
import sublime
import sublime_plugin
def permute_selection(f, v, e):
regions = [s for s in v.sel() if not s.empty()]
regions.sort()
txt = [v.substr(s) for s in regions]
txt = f(txt)
# no sane way to handle this case
if len(txt) != len(regions):
return
# Do the replacement in reverse order, so the character offsets don't get
# invalidated
combined = list(zip(regions, txt))
combined.sort(key=lambda x: x[0], reverse=True)
for x in combined:
[r, t] = x
v.replace(e, r, t)
def case_insensitive_sort(txt):
txt.sort(key=lambda x: x.lower())
return txt
def case_sensitive_sort(txt):
txt.sort()
return txt
def reverse_list(l):
l.reverse()
return l
def shuffle_list(l):
random.shuffle(l)
return l
def uniquealise_list(l):
table = {}
res = []
for x in l:
if x not in table:
table[x] = x
res.append(x)
return res
permute_funcs = {
"reverse": reverse_list,
"shuffle": shuffle_list,
"unique": uniquealise_list,
}
def unique_selection(v, e):
regions = [s for s in v.sel() if not s.empty()]
regions.sort()
dupregions = []
table = {}
for r in regions:
txt = v.substr(r)
if txt not in table:
table[txt] = r
else:
dupregions.append(r)
dupregions.reverse()
for r in dupregions:
v.erase(e, r)
def shrink_wrap_region(view, region):
a, b = region.begin(), region.end()
for a in range(a, b):
if not view.substr(a).isspace():
break
for b in range(b - 1, a, -1):
if not view.substr(b).isspace():
b += 1
break
return sublime.Region(a, b)
def shrinkwrap_and_expand_non_empty_selections_to_entire_line(v):
sw = shrink_wrap_region
regions = []
for sel in v.sel():
if not sel.empty():
regions.append(v.line(sw(v, v.line(sel))))
v.sel().subtract(sel)
for r in regions:
v.sel().add(r)
def permute_lines(f, v, e):
shrinkwrap_and_expand_non_empty_selections_to_entire_line(v)
regions = [s for s in v.sel() if not s.empty()]
if not regions:
regions = [sublime.Region(0, v.size())]
regions.sort(reverse=True)
for r in regions:
txt = v.substr(r)
lines = txt.split('\n')
lines = f(lines)
v.replace(e, r, u"\n".join(lines))
def has_multiple_non_empty_selection_region(v):
return len([s for s in v.sel() if not s.empty()]) > 1
class SortLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, case_sensitive=False, reverse=False, remove_duplicates=False):
view = self.view
if case_sensitive:
permute_lines(case_sensitive_sort, view, edit)
else:
permute_lines(case_insensitive_sort, view, edit)
if reverse:
permute_lines(reverse_list, view, edit)
if remove_duplicates:
permute_lines(uniquealise_list, view, edit)
class SortSelectionCommand(sublime_plugin.TextCommand):
def run(self, edit, case_sensitive=False, reverse=False, remove_duplicates=False):
view = self.view
permute_selection(
case_sensitive_sort if case_sensitive else case_insensitive_sort,
view, edit)
if reverse:
permute_selection(reverse_list, view, edit)
if remove_duplicates:
unique_selection(view, edit)
def is_enabled(self, **kw):
return has_multiple_non_empty_selection_region(self.view)
class PermuteLinesCommand(sublime_plugin.TextCommand):
def run(self, edit, operation='shuffle'):
permute_lines(permute_funcs[operation], self.view, edit)
class PermuteSelectionCommand(sublime_plugin.TextCommand):
def run(self, edit, operation='shuffle'):
view = self.view
if operation == "reverse":
permute_selection(reverse_list, view, edit)
elif operation == "shuffle":
permute_selection(shuffle_list, view, edit)
elif operation == "unique":
unique_selection(view, edit)
def is_enabled(self, **kw):
return has_multiple_non_empty_selection_region(self.view)
| 22.983871 | 86 | 0.618246 |
e204d77a87f2bfc2d3f26169452ee021c901068e | 2,028 | py | Python | p14_grade_point_avg.py | Muneeb97/Python | 3d8e78268e6e09f25f43b1d119aa54129e979da4 | [
"MIT"
] | null | null | null | p14_grade_point_avg.py | Muneeb97/Python | 3d8e78268e6e09f25f43b1d119aa54129e979da4 | [
"MIT"
] | null | null | null | p14_grade_point_avg.py | Muneeb97/Python | 3d8e78268e6e09f25f43b1d119aa54129e979da4 | [
"MIT"
] | null | null | null | #5 grades point average and more
grades= []
print("Grades 0-100")
grade_number = int(input("How many grades do you want to enter: "))
def grade_collect(n):
for i in range(n):
grade_entered = int(input("Enter Grade: "))
if(grade_entered == 0 or grade_entered >100):
print("Error! Incorrect Value. Try again")
grade_entered = int(input("Enter Grade: "))
grades.append(grade_entered)
def grade_avg(grades_lst):
sum = 0
for i in range(len(grades_lst)):
sum = sum + grades_lst[i]
return(sum/len(grades_lst))
def print_sorted_grade(grades_lst):
sorted_g = sorted(grades_lst,reverse=True)
print("Sorted Grades are: ")
for i in sorted_g:
print(" ",i)
def summary_grd(grades_lst):
print("General Summary: ")
print("Total number of grades: ",len(grades_lst))
print("The highest grade is: ",max(grades_lst))
print("The lowest grade is: ",min(grades_lst))
print("The Average is: ",grade_avg(grades_lst))
def next_grade_toget_avg(grades):
print("Your current avg is: ",sum(grades)/len(grades))
to_avg = int(input("What is your desired avg? "))
nxt_grade_req = (to_avg *2)-sum(grades)/len(grades)
while(nxt_grade_req > 100):
print("Unacheivable Average. Choose again.")
to_avg = int(input("What is your desired avg? "))
nxt_grade_req = (to_avg *2)-sum(grades)/len(grades)
print('\n')
print("You have to get atleast {} for your next grade to get the desired average".format(nxt_grade_req))
def change_grade(grades):
print("You can change one grade.")
chnge_grd = int(input("Which grade to change? "))
chnge_grd_with = int(input("Change grade with what? "))
grades[grades.index(chnge_grd)]=chnge_grd_with
grade_collect(grade_number)
print_sorted_grade(grades)
summary_grd(grades)
print('\n')
next_grade_toget_avg(grades)
change_grade(grades)
summary_grd(grades)
| 33.245902 | 109 | 0.64497 |
59cbf70163f23b530a29da29a4532f89bbb4373e | 1,265 | py | Python | saleor/search/backends/elasticsearch_dashboard.py | jdruiter/saleor | 9393ac20bd3e82c8ec1f17f6e47e3d7379f20419 | [
"BSD-3-Clause"
] | 1 | 2021-01-29T13:28:29.000Z | 2021-01-29T13:28:29.000Z | saleor/search/backends/elasticsearch_dashboard.py | jdruiter/saleor | 9393ac20bd3e82c8ec1f17f6e47e3d7379f20419 | [
"BSD-3-Clause"
] | 1 | 2022-02-10T14:46:00.000Z | 2022-02-10T14:46:00.000Z | saleor/search/backends/elasticsearch_dashboard.py | jdruiter/saleor | 9393ac20bd3e82c8ec1f17f6e47e3d7379f20419 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import unicode_literals
from ..documents import ProductDocument, OrderDocument, UserDocument
from elasticsearch_dsl.query import MultiMatch
def _search_products(phrase):
prod_query = MultiMatch(
fields=['name', 'title', 'description'],
query=phrase,
type='cross_fields')
return ProductDocument.search().query(prod_query).sort('_score').source(
False)
def _search_users(phrase):
user_query = MultiMatch(
fields=['user', 'email', 'first_name', 'last_name'],
query=phrase,
type='cross_fields',
operator='and')
return UserDocument.search().query(user_query).source(False)
def _search_orders(phrase):
order_query = MultiMatch(
fields=['user', 'status', 'discount_name'], query=phrase)
return OrderDocument.search().query(order_query).source(False)
def get_search_queries(phrase):
''' Execute external search for all objects matching phrase '''
return {
'products': _search_products(phrase),
'users': _search_users(phrase),
'orders': _search_orders(phrase)
}
def search(phrase):
''' Provide queryset for every search result '''
return {k: s.to_queryset() for k, s in get_search_queries(phrase).items()}
| 29.418605 | 78 | 0.683004 |
b3f618fb5cfbc24d9d551ab3defb1a5aa08b963f | 2,163 | py | Python | Detection_Cheating/controllers/analysis_con.py | kimheejoo/Detection_Cheating_Server | 488a9c61ab5c3030c7949695549f2edc34f20be1 | [
"MIT"
] | 2 | 2022-01-04T05:50:50.000Z | 2022-03-10T05:57:56.000Z | Detection_Cheating/controllers/analysis_con.py | kimheejoo/Detection_Cheating_Server | 488a9c61ab5c3030c7949695549f2edc34f20be1 | [
"MIT"
] | null | null | null | Detection_Cheating/controllers/analysis_con.py | kimheejoo/Detection_Cheating_Server | 488a9c61ab5c3030c7949695549f2edc34f20be1 | [
"MIT"
] | 2 | 2021-01-05T15:50:32.000Z | 2021-09-28T06:23:35.000Z | '''
analysis controller
'''
from flask import current_app
from models import db, Students
from datetime import datetime
import scapy.all as sc
def getPacketTime(packet_path):
try:
pkts = sc.rdpcap(packet_path)
except MemoryError:
print("Memory Error로 인해 packet을 열지 못하였습니다.")
return None
pktTimes = []
pktTime = datetime.fromtimestamp(pkts[0].time)
pktTimes.append(pktTime.strftime("%Y-%m-%d %H:%M:%S"))
pktTime = datetime.fromtimestamp(pkts[len(pkts) - 1].time)
pktTimes.append(pktTime.strftime("%Y-%m-%d %H:%M:%S"))
return pktTimes
def network_analysis_con(student):
'''네트워크 분석
Args:
student: Students객체
Returns:
return: 사용한 패킷 목록 리스트 (String)
'''
# pcapng = current_app.config["UPLOAD_PACKET_FOLDER"] + f"{student.student_number}.pcapng"
result = []
with open(student.packet_path, "rb") as f:
string = f.read().hex()
if "676f6f676c6561647365727669636573" in string: #google search(googleadservices)
result.append("Google")
if "7365617263682e6e617665722e" or "626c6f672e6e61766572" in string: #search.naver, blog.naver
result.append("Naver")
if "7365617263682e6461756d2e" in string: #search.daum
result.append("Daum")
if "6b616b616f" in string: #kakao
result.append("KaKaoTalk")
if "646973636f7264" in string: #discord
result.append("Discord")
if "796f7574756265" in string: #youtube
result.append("Youtube")
if "676974687562" in string: #github
result.append("Github")
list = str()
for i, packet in enumerate(result):
if len(result)-1 == i:
list = list + packet
else:
list = list + packet + "/"
student = Students.query.filter(Students.student_number == student.student_number)
student.update({'network_result': list})
# student = student.first()
temp_range = getPacketTime(student.first().packet_path)
time_range = temp_range[0] + "/" +temp_range[1]
student.update({'time_range': time_range})
db.session.commit()
| 30.041667 | 102 | 0.632455 |
1dbbc12b54380bc49a8a4128792adc3bac6c6df5 | 182 | py | Python | test/src_examples/python/py_error_and_fail_manyErrors/Root/test/test3.py | codeboardio/kali | 454c5c392aa0fb222b67b54549b32edbf15071c0 | [
"MIT"
] | null | null | null | test/src_examples/python/py_error_and_fail_manyErrors/Root/test/test3.py | codeboardio/kali | 454c5c392aa0fb222b67b54549b32edbf15071c0 | [
"MIT"
] | null | null | null | test/src_examples/python/py_error_and_fail_manyErrors/Root/test/test3.py | codeboardio/kali | 454c5c392aa0fb222b67b54549b32edbf15071c0 | [
"MIT"
] | 2 | 2017-10-13T11:30:22.000Z | 2020-01-12T14:22:28.000Z | from Root import application
import unittest
class test3(unittest.TestCase):
def test_one(self):
self.assertEqual(application.foo(),1)
if __name__ == '__main__':
unittest.main()
| 22.75 | 39 | 0.769231 |
509b07e84321b4393b79fa3391ca37e73e56fbe1 | 5,664 | py | Python | LinearResponseVariationalBayes/NormalParams.py | rgiordan/LinearResponseVariationalBayes.py | 5305d7e204481e518c5ef82bbed065ec627a4a9e | [
"Apache-2.0"
] | 6 | 2018-08-19T08:00:00.000Z | 2019-10-27T10:31:36.000Z | LinearResponseVariationalBayes/NormalParams.py | rgiordan/LinearResponseVariationalBayes.py | 5305d7e204481e518c5ef82bbed065ec627a4a9e | [
"Apache-2.0"
] | 9 | 2017-06-09T17:58:28.000Z | 2018-10-04T22:52:33.000Z | LinearResponseVariationalBayes/NormalParams.py | rgiordan/LinearResponseVariationalBayes.py | 5305d7e204481e518c5ef82bbed065ec627a4a9e | [
"Apache-2.0"
] | 1 | 2018-01-15T06:26:35.000Z | 2018-01-15T06:26:35.000Z | import LinearResponseVariationalBayes as vb
import LinearResponseVariationalBayes.ExponentialFamilies as ef
from autograd import numpy as np
class MVNParam(vb.ModelParamsDict):
def __init__(self, name='', dim=2, min_info=0.0):
super().__init__(name=name)
self.__dim = dim
self.push_param(vb.VectorParam('mean', dim))
self.push_param(vb.PosDefMatrixParam('info', dim, diag_lb=min_info))
def e(self):
return self['mean'].get()
def cov(self):
return np.linalg.inv(self['info'].get())
def e_outer(self):
mean = self['mean'].get()
e_outer = np.outer(mean, mean) + self.cov()
return 0.5 * (e_outer + e_outer.transpose())
def entropy(self):
return ef.multivariate_normal_entropy(self['info'].get())
class UVNParam(vb.ModelParamsDict):
def __init__(self, name='', min_info=0.0):
super().__init__(name=name)
self.push_param(vb.ScalarParam('mean'))
self.push_param(vb.ScalarParam('info', lb=min_info))
def e(self):
return self['mean'].get()
def e_outer(self):
return self['mean'].get() ** 2 + 1 / self['info'].get()
def var(self):
return 1. / self['info'].get()
def e_exp(self):
return ef.get_e_lognormal(
self['mean'].get(), 1. / self['info'].get())
def var_exp(self):
return ef.get_var_lognormal(
self['mean'].get(), 1. / self['info'].get())
def e2_exp(self):
return self.e_exp() ** 2 + self.var_exp()
def entropy(self):
return ef.univariate_normal_entropy(self['info'].get())
# TODO: better to derive this from UVNParamArray?
class UVNParamVector(vb.ModelParamsDict):
def __init__(self, name='', length=2, min_info=0.0):
super().__init__(name=name)
self.__size = length
self.push_param(vb.VectorParam('mean', length))
self.push_param(vb.VectorParam('info', length, lb=min_info))
def e(self):
return self['mean'].get()
def e_outer(self):
return self['mean'].get() ** 2 + 1 / self['info'].get()
def var(self):
return 1. / self['info'].get()
def e_exp(self):
return ef.get_e_lognormal(self['mean'].get(),
1. / self['info'].get())
def var_exp(self):
return ef.get_var_lognormal(self['mean'].get(),
1. / self['info'].get())
def e2_exp(self):
return self.e_exp() ** 2 + self.var_exp()
def entropy(self):
return np.sum(ef.univariate_normal_entropy(self['info'].get()))
def size(self):
return self.__size
class UVNParamArray(vb.ModelParamsDict):
def __init__(self, name='', shape=(1, 1), min_info=0.0):
super().__init__(name=name)
self.__shape = shape
self.push_param(vb.ArrayParam('mean', shape))
self.push_param(vb.ArrayParam('info', shape, lb=min_info))
def e(self):
return self['mean'].get()
def e_outer(self):
return self['mean'].get() ** 2 + 1 / self['info'].get()
def var(self):
return 1. / self['info'].get()
def e_exp(self):
return ef.get_e_lognormal(self['mean'].get(),
1. / self['info'].get())
def var_exp(self):
return ef.get_var_lognormal(self['mean'].get(),
1. / self['info'].get())
def e2_exp(self):
return self.e_exp() ** 2 + self.var_exp()
def entropy(self):
return np.sum(ef.univariate_normal_entropy(self['info'].get()))
def shape(self):
return self.__shape
# A moment parameterization of the UVN array.
class UVNMomentParamArray(vb.ModelParamsDict):
def __init__(self, name='', shape=(2, 3), min_info=0.0):
super().__init__(name=name)
self.__shape = shape
self.push_param(vb.ArrayParam('e', shape))
self.push_param(vb.ArrayParam('e2', shape, lb=min_info))
def e(self):
return self['e'].get()
def e_outer(self):
return self['e2'].get()
def var(self):
return self['e2'].get() - self['e'].get() ** 2
def e_exp(self):
return ef.get_e_lognormal(self['e'].get(), self.var())
def var_exp(self):
return ef.get_e_lognormal(self['e'].get(), self.var())
def e2_exp(self):
return self.e_exp() ** 2 + self.var_exp()
def entropy(self):
info = 1. / self.var()
return np.sum(ef.univariate_normal_entropy(info))
def shape(self):
return self.__shape
def set_from_uvn_param_array(self, uvn_par):
assert(uvn_par.shape() == self.shape())
self['e'].set(uvn_par.e())
self['e2'].set(uvn_par.e_outer())
# Set from a scalar array assuming zero variance.
def set_from_constant(self, scalar_array_par):
assert(scalar_array_par.shape() == self.shape())
self['e'].set(scalar_array_par.get())
self['e2'].set(scalar_array_par.get() ** 2)
# Array of multivariate normals
# for now each row is a draw from a MVN with diagonal constant variance ...
# not sure how to handle a collection of matrices yet
# but for the current IBP model, this is all we need
class MVNArray(vb.ModelParamsDict):
def __init__(self, name='', shape=(2,2), min_info=0.0):
super().__init__(name=name)
# self.name = name
self.__shape = shape
self.push_param(vb.ArrayParam('mean', shape=shape))
self.push_param(vb.VectorParam('info', size=shape[0], lb=min_info))
def e(self):
return self['mean'].get()
def e2(self):
var = 1 / self['info'].get()
return self['mean'].get()**2 + var[:, None]
| 34.748466 | 76 | 0.595516 |
6baa31b5382cdb4743e60acb2e12e1249c797744 | 88 | py | Python | ontology/logistic_regression/sherlock/listify_circuits_k18_forward.py | ehbeam/neuro-knowledge-engine | 9dc56ade0bbbd8d14f0660774f787c3f46d7e632 | [
"MIT"
] | 15 | 2020-07-17T07:10:26.000Z | 2022-02-18T05:51:45.000Z | ontology/neural_network/sherlock/listify_circuits_k18_forward.py | YifeiCAO/neuro-knowledge-engine | 9dc56ade0bbbd8d14f0660774f787c3f46d7e632 | [
"MIT"
] | 2 | 2022-01-14T09:10:12.000Z | 2022-01-28T17:32:42.000Z | ontology/neural_network/sherlock/listify_circuits_k18_forward.py | YifeiCAO/neuro-knowledge-engine | 9dc56ade0bbbd8d14f0660774f787c3f46d7e632 | [
"MIT"
] | 4 | 2021-12-22T13:27:32.000Z | 2022-02-18T05:51:47.000Z | #!/bin/python
import listify_circuits
listify_circuits.optimize_circuits(18, 'forward') | 22 | 49 | 0.829545 |
f919b450f32ed59470ec9ed0132ffbe5b0da3585 | 8,625 | py | Python | tests/ut/python/dataset/test_random_posterize.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 2 | 2021-04-22T07:00:59.000Z | 2021-11-08T02:49:09.000Z | tests/ut/python/dataset/test_random_posterize.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 1 | 2020-12-29T06:46:38.000Z | 2020-12-29T06:46:38.000Z | tests/ut/python/dataset/test_random_posterize.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 1 | 2021-05-10T03:30:36.000Z | 2021-05-10T03:30:36.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Testing RandomPosterize op in DE
"""
import numpy as np
import mindspore.dataset as ds
import mindspore.dataset.vision.c_transforms as c_vision
from mindspore import log as logger
from util import visualize_list, save_and_check_md5, \
config_get_set_seed, config_get_set_num_parallel_workers, diff_mse
GENERATE_GOLDEN = False
DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
def test_random_posterize_op_c(plot=False, run_golden=False):
"""
Test RandomPosterize in C transformations (uses assertion on mse as using md5 could have jpeg decoding
inconsistencies)
"""
logger.info("test_random_posterize_op_c")
original_seed = config_get_set_seed(55)
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
# define map operations
transforms1 = [
c_vision.Decode(),
c_vision.RandomPosterize((1, 8))
]
# First dataset
data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data1 = data1.map(operations=transforms1, input_columns=["image"])
# Second dataset
data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data2 = data2.map(operations=[c_vision.Decode()], input_columns=["image"])
image_posterize = []
image_original = []
for item1, item2 in zip(data1.create_dict_iterator(num_epochs=1, output_numpy=True),
data2.create_dict_iterator(num_epochs=1, output_numpy=True)):
image1 = item1["image"]
image2 = item2["image"]
image_posterize.append(image1)
image_original.append(image2)
# check mse as md5 can be inconsistent.
# mse = 2.9668956 is calculated from
# a thousand runs of diff_mse(np.array(image_original), np.array(image_posterize)) that all produced the same mse.
# allow for an error of 0.0000005
assert abs(2.9668956 - diff_mse(np.array(image_original), np.array(image_posterize))) <= 0.0000005
if run_golden:
# check results with md5 comparison
filename = "random_posterize_01_result_c.npz"
save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
if plot:
visualize_list(image_original, image_posterize)
# Restore configuration
ds.config.set_seed(original_seed)
ds.config.set_num_parallel_workers(original_num_parallel_workers)
def test_random_posterize_op_fixed_point_c(plot=False, run_golden=True):
"""
Test RandomPosterize in C transformations with fixed point
"""
logger.info("test_random_posterize_op_c")
# define map operations
transforms1 = [
c_vision.Decode(),
c_vision.RandomPosterize(1)
]
# First dataset
data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data1 = data1.map(operations=transforms1, input_columns=["image"])
# Second dataset
data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data2 = data2.map(operations=[c_vision.Decode()], input_columns=["image"])
image_posterize = []
image_original = []
for item1, item2 in zip(data1.create_dict_iterator(num_epochs=1, output_numpy=True),
data2.create_dict_iterator(num_epochs=1, output_numpy=True)):
image1 = item1["image"]
image2 = item2["image"]
image_posterize.append(image1)
image_original.append(image2)
if run_golden:
# check results with md5 comparison
filename = "random_posterize_fixed_point_01_result_c.npz"
save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
if plot:
visualize_list(image_original, image_posterize)
def test_random_posterize_default_c_md5(plot=False, run_golden=True):
"""
Test RandomPosterize C Op (default params) with md5 comparison
"""
logger.info("test_random_posterize_default_c_md5")
original_seed = config_get_set_seed(5)
original_num_parallel_workers = config_get_set_num_parallel_workers(1)
# define map operations
transforms1 = [
c_vision.Decode(),
c_vision.RandomPosterize()
]
# First dataset
data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data1 = data1.map(operations=transforms1, input_columns=["image"])
# Second dataset
data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data2 = data2.map(operations=[c_vision.Decode()], input_columns=["image"])
image_posterize = []
image_original = []
for item1, item2 in zip(data1.create_dict_iterator(output_numpy=True),
data2.create_dict_iterator(output_numpy=True)):
image1 = item1["image"]
image2 = item2["image"]
image_posterize.append(image1)
image_original.append(image2)
if run_golden:
# check results with md5 comparison
filename = "random_posterize_01_default_result_c.npz"
save_and_check_md5(data1, filename, generate_golden=GENERATE_GOLDEN)
if plot:
visualize_list(image_original, image_posterize)
# Restore configuration
ds.config.set_seed(original_seed)
ds.config.set_num_parallel_workers(original_num_parallel_workers)
def test_random_posterize_exception_bit():
"""
Test RandomPosterize: out of range input bits and invalid type
"""
logger.info("test_random_posterize_exception_bit")
# Test max > 8
try:
_ = c_vision.RandomPosterize((1, 9))
except ValueError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert str(e) == "Input is not within the required interval of (1 to 8)."
# Test min < 1
try:
_ = c_vision.RandomPosterize((0, 7))
except ValueError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert str(e) == "Input is not within the required interval of (1 to 8)."
# Test max < min
try:
_ = c_vision.RandomPosterize((8, 1))
except ValueError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert str(e) == "Input is not within the required interval of (1 to 8)."
# Test wrong type (not uint8)
try:
_ = c_vision.RandomPosterize(1.1)
except TypeError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert str(e) == "Argument bits with value 1.1 is not of type (<class 'list'>, <class 'tuple'>, <class 'int'>)."
# Test wrong number of bits
try:
_ = c_vision.RandomPosterize((1, 1, 1))
except TypeError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert str(e) == "Size of bits should be a single integer or a list/tuple (min, max) of length 2."
def test_rescale_with_random_posterize():
"""
Test RandomPosterize: only support CV_8S/CV_8U
"""
logger.info("test_rescale_with_random_posterize")
DATA_DIR_10 = "../data/dataset/testCifar10Data"
dataset = ds.Cifar10Dataset(DATA_DIR_10)
rescale_op = c_vision.Rescale((1.0 / 255.0), 0.0)
dataset = dataset.map(operations=rescale_op, input_columns=["image"])
random_posterize_op = c_vision.RandomPosterize((4, 8))
dataset = dataset.map(operations=random_posterize_op, input_columns=["image"], num_parallel_workers=1)
try:
_ = dataset.output_shapes()
except RuntimeError as e:
logger.info("Got an exception in DE: {}".format(str(e)))
assert "Input image data type can not be float" in str(e)
if __name__ == "__main__":
test_random_posterize_op_c(plot=False, run_golden=False)
test_random_posterize_op_fixed_point_c(plot=False)
test_random_posterize_default_c_md5(plot=False)
test_random_posterize_exception_bit()
test_rescale_with_random_posterize()
| 38.163717 | 120 | 0.694493 |
5d2fb52f90c271fbb96cb73db468ab7243f29422 | 2,484 | py | Python | classification/Decision Tree/tree.py | 4yub1k/Machine-learning | 8f15be5409e6b47b8d7da1ab0fd04f717f2668a8 | [
"MIT"
] | null | null | null | classification/Decision Tree/tree.py | 4yub1k/Machine-learning | 8f15be5409e6b47b8d7da1ab0fd04f717f2668a8 | [
"MIT"
] | null | null | null | classification/Decision Tree/tree.py | 4yub1k/Machine-learning | 8f15be5409e6b47b8d7da1ab0fd04f717f2668a8 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#Here we have pick the best drug (A,B) for a patient based on their age, sex etc
"""Load CSV"""
df = pd.read_csv("drug.csv", sep=',')
print(df[0:5])
print("Total row/entries : ",df.shape[0],df.shape)
"""Convert to array of vales (no column row)"""
x= df[['Age', 'Sex', 'BP', 'Cholesterol', 'Na_to_K']].values
y= df['Drug'] #.values will put it in array
"""Lets convert the categorical to numerical values pandas.get_dummies()"""
from sklearn import preprocessing
le_sex = preprocessing.LabelEncoder()
le_sex.fit(['F','M']) #name inside column, as 0,1
x[:,1] = le_sex.transform(x[:,1]) # age,sex,BP as sex is at 1 postion ['Age', 'Sex', 'BP', 'Cholesterol', 'Na_to_K']
le_BP = preprocessing.LabelEncoder()
le_BP.fit([ 'LOW', 'NORMAL', 'HIGH'])#0,1,2
x[:,2] = le_BP.transform(x[:,2]) #at second position
le_Chol = preprocessing.LabelEncoder()
le_Chol.fit([ 'NORMAL', 'HIGH'])#0,1
x[:,3] = le_Chol.transform(x[:,3]) #at third positions
print(x[0:5])
print(y[0:5])
"""Train/test split using built in"""
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=3)
"""Model"""
from sklearn.tree import DecisionTreeClassifier
#max depth number of columns 0,1,2,3,4 | ['Age', 'Sex', 'BP', 'Cholesterol', 'Na_to_K']
#specify criterion="entropy"
drugTree = DecisionTreeClassifier(criterion="entropy", max_depth = 4)
#drugTree # it shows the default parameters
drugTree.fit(x_train,y_train)
"""predict"""
#testing
y_pred = drugTree.predict(x_test)
print(y_pred[0:5]) #predicted y for test set
print(y_test[0:5]) #origional y test set, see the difference of prediction
"""Evaluation"""
from sklearn import metrics
print("DecisionTrees's Accuracy: ", metrics.accuracy_score(y_test, y_pred))
#as other tests take numerical values will use accuracy
"""To draw a tree save this snippet for future"""
from io import StringIO
import pydotplus
import matplotlib.image as mpimg
from sklearn import tree
dot_data = StringIO()
filename = "drugtree.png"
featureNames = df.columns[0:5]
out=tree.export_graphviz(drugTree,feature_names=featureNames, out_file=dot_data, class_names= np.unique(y_train), filled=True, special_characters=True,rotate=False)
graph = pydotplus.graph_from_dot_data(dot_data.getvalue())
graph.write_png(filename)
img = mpimg.imread(filename)
plt.figure(figsize=(100, 200))
plt.imshow(img,interpolation='nearest') | 34.5 | 167 | 0.732689 |
af3b6d7c9460ec364b6440c207b9a392e010f48f | 114 | py | Python | config.py | pulbhaba/tagged-pets-bot | 42d7238825e37c8988657b797e99578b703baadb | [
"Apache-2.0"
] | null | null | null | config.py | pulbhaba/tagged-pets-bot | 42d7238825e37c8988657b797e99578b703baadb | [
"Apache-2.0"
] | null | null | null | config.py | pulbhaba/tagged-pets-bot | 42d7238825e37c8988657b797e99578b703baadb | [
"Apache-2.0"
] | null | null | null | COOKIE_BHA = 'This should be the cookies for one user'
COOKIE_NIKKI = 'This should be the cookies for other user'
| 38 | 58 | 0.77193 |
18e921daa9c34a39d0134e9a009152de91a11453 | 237,609 | py | Python | python_modules/dagster/dagster_tests/core_tests/snap_tests/snapshots/snap_test_pipeline_snap.py | dbatten5/dagster | d76e50295054ffe5a72f9b292ef57febae499528 | [
"Apache-2.0"
] | 4,606 | 2018-06-21T17:45:20.000Z | 2022-03-31T23:39:42.000Z | python_modules/dagster/dagster_tests/core_tests/snap_tests/snapshots/snap_test_pipeline_snap.py | dbatten5/dagster | d76e50295054ffe5a72f9b292ef57febae499528 | [
"Apache-2.0"
] | 6,221 | 2018-06-12T04:36:01.000Z | 2022-03-31T21:43:05.000Z | python_modules/dagster/dagster_tests/core_tests/snap_tests/snapshots/snap_test_pipeline_snap.py | dbatten5/dagster | d76e50295054ffe5a72f9b292ef57febae499528 | [
"Apache-2.0"
] | 619 | 2018-08-22T22:43:09.000Z | 2022-03-31T22:48:06.000Z | # -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['test_basic_dep_fan_out 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.efd6e48220d7eb65a0b9e8814dd15fa00be63496": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "passone",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "passtwo",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "return_one",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.efd6e48220d7eb65a0b9e8814dd15fa00be63496",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.f45e9ef85f03667b2e47ff8e3f8ae61fa4779fd2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"passone\\": {}, \\"passtwo\\": {}, \\"return_one\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.efd6e48220d7eb65a0b9e8814dd15fa00be63496"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.f45e9ef85f03667b2e47ff8e3f8ae61fa4779fd2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [
{
"__class__": "InputDependencySnap",
"input_name": "value",
"is_dynamic_collect": false,
"upstream_output_snaps": [
{
"__class__": "OutputHandleSnap",
"output_name": "result",
"solid_name": "return_one"
}
]
}
],
"is_dynamic_mapped": false,
"solid_def_name": "passthrough",
"solid_name": "passone",
"tags": {}
},
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [
{
"__class__": "InputDependencySnap",
"input_name": "value",
"is_dynamic_collect": false,
"upstream_output_snaps": [
{
"__class__": "OutputHandleSnap",
"output_name": "result",
"solid_name": "return_one"
}
]
}
],
"is_dynamic_mapped": false,
"solid_def_name": "passthrough",
"solid_name": "passtwo",
"tags": {}
},
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "return_one",
"solid_name": "return_one",
"tags": {}
}
]
},
"description": null,
"graph_def_name": "single_dep_pipeline",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.f45e9ef85f03667b2e47ff8e3f8ae61fa4779fd2"
}
],
"name": "single_dep_pipeline",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [
{
"__class__": "InputDefSnap",
"dagster_type_key": "Int",
"description": null,
"name": "value"
}
],
"name": "passthrough",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
},
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "return_one",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {}
}'''
snapshots['test_basic_dep_fan_out 2'] = 'd050dcaed582d0e5c560a9d91633dfb08a96480c'
snapshots['test_basic_fan_in 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.02869f6637aa18bd6de60870b747730771d3ef1d": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"nothing_one\\": {}, \\"nothing_two\\": {}, \\"take_nothings\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.7666198738d531f40c136b24e46d12ee0ca3dc25"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.02869f6637aa18bd6de60870b747730771d3ef1d",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.17b6a168d89648299f5fa63c548ecef2405875ca": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.17b6a168d89648299f5fa63c548ecef2405875ca",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.7666198738d531f40c136b24e46d12ee0ca3dc25": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "nothing_one",
"type_key": "Shape.17b6a168d89648299f5fa63c548ecef2405875ca"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "nothing_two",
"type_key": "Shape.17b6a168d89648299f5fa63c548ecef2405875ca"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "take_nothings",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.7666198738d531f40c136b24e46d12ee0ca3dc25",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "return_nothing",
"solid_name": "nothing_one",
"tags": {}
},
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "return_nothing",
"solid_name": "nothing_two",
"tags": {}
},
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [
{
"__class__": "InputDependencySnap",
"input_name": "nothing",
"is_dynamic_collect": false,
"upstream_output_snaps": [
{
"__class__": "OutputHandleSnap",
"output_name": "result",
"solid_name": "nothing_one"
},
{
"__class__": "OutputHandleSnap",
"output_name": "result",
"solid_name": "nothing_two"
}
]
}
],
"is_dynamic_mapped": false,
"solid_def_name": "take_nothings",
"solid_name": "take_nothings",
"tags": {}
}
]
},
"description": null,
"graph_def_name": "fan_in_test",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.02869f6637aa18bd6de60870b747730771d3ef1d"
}
],
"name": "fan_in_test",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "return_nothing",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Nothing",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
},
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [
{
"__class__": "InputDefSnap",
"dagster_type_key": "Nothing",
"description": null,
"name": "nothing"
}
],
"name": "take_nothings",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {}
}'''
snapshots['test_basic_fan_in 2'] = 'eaa4fb019beea45963314e2e28f2544caa28e466'
snapshots['test_deserialize_solid_def_snaps_multi_type_config 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "bar",
"type_key": "Selector.c12ab659793f168246640a294e913ac9d90a242a"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Array.Float"
}
],
"given_name": null,
"key": "Permissive.bda2965be6725b48329d76783336ed442951fd54",
"kind": {
"__enum__": "ConfigTypeKind.PERMISSIVE_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_empty_pipeline_snap_props 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"noop_solid\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ba913521099bed4314e25592059869c8f3a3c96e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "noop_solid",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "noop_solid",
"solid_name": "noop_solid",
"tags": {}
}
]
},
"description": null,
"graph_def_name": "noop_pipeline",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52"
}
],
"name": "noop_pipeline",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "noop_solid",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {}
}'''
snapshots['test_empty_pipeline_snap_props 2'] = '0965b76124e758660317760c7e9bbc66282f33b0'
snapshots['test_empty_pipeline_snap_snapshot 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"noop_solid\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ba913521099bed4314e25592059869c8f3a3c96e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "noop_solid",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "noop_solid",
"solid_name": "noop_solid",
"tags": {}
}
]
},
"description": null,
"graph_def_name": "noop_pipeline",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52"
}
],
"name": "noop_pipeline",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "noop_solid",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {}
}'''
snapshots['test_multi_type_config_array_dict_fields[Permissive] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": "List of Array.Permissive.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Permissive.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Permissive.1f37a068c7c51aba23e9c41475c78eebc4e58471"
]
}'''
snapshots['test_multi_type_config_array_dict_fields[Selector] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": "List of Array.Selector.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Selector.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Selector.1f37a068c7c51aba23e9c41475c78eebc4e58471"
]
}'''
snapshots['test_multi_type_config_array_dict_fields[Shape] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.1f37a068c7c51aba23e9c41475c78eebc4e58471",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.1f37a068c7c51aba23e9c41475c78eebc4e58471"
]
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types0] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Permissive.c1ae6abf6c3c9e951eeefe4fde820cafc053ee40"
}
],
"given_name": null,
"key": "Selector.cb18f2a8fc9fa17668d8f4fd6b44c86c30c56774",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types1] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Shape.9bbda63934c371bf9be9a1cbb6fff9f5ee0be828"
}
],
"given_name": null,
"key": "Selector.b188a7737a2fecf0fca8cf94d331be517176dddf",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types2] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Selector.c1ae6abf6c3c9e951eeefe4fde820cafc053ee40"
}
],
"given_name": null,
"key": "Permissive.84180c8bd71a154af9d2965c8955925c228dc2bf",
"kind": {
"__enum__": "ConfigTypeKind.PERMISSIVE_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types3] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Shape.3d03240a3cdb5557305a2118fb3a059896368dd1"
}
],
"given_name": null,
"key": "Permissive.31f842392439e3c949b44f9e0e36bd1ed050a6b5",
"kind": {
"__enum__": "ConfigTypeKind.PERMISSIVE_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types4] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Selector.9bbda63934c371bf9be9a1cbb6fff9f5ee0be828"
}
],
"given_name": null,
"key": "Shape.88efc4d6ed14b1d35062d1e50a0227f606049e87",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_multi_type_config_nested_dicts[nested_dict_types5] 1'] = '''{
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "foo",
"type_key": "Permissive.3d03240a3cdb5557305a2118fb3a059896368dd1"
}
],
"given_name": null,
"key": "Shape.0117583609bbf6ddcd1b1c9586aca163c454ed9d",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
}'''
snapshots['test_pipeline_snap_all_props 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"noop_solid\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ba913521099bed4314e25592059869c8f3a3c96e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "noop_solid",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.ba913521099bed4314e25592059869c8f3a3c96e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "noop_solid",
"solid_name": "noop_solid",
"tags": {}
}
]
},
"description": "desc",
"graph_def_name": "noop_pipeline",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.b351ef464e8a8a094f615bd472a4e0b1b35cdb52"
}
],
"name": "noop_pipeline",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "noop_solid",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {
"key": "value"
}
}'''
snapshots['test_pipeline_snap_all_props 2'] = '5ebae3c42c9b43fcdda72d972e140601bbd8821b'
snapshots['test_two_invocations_deps_snap 1'] = '''{
"__class__": "PipelineSnapshot",
"config_schema_snapshot": {
"__class__": "ConfigSchemaSnapshot",
"all_config_snaps_by_key": {
"Any": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": "Any",
"key": "Any",
"kind": {
"__enum__": "ConfigTypeKind.ANY"
},
"scalar_kind": null,
"type_param_keys": null
},
"Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": "List of Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"enum_values": null,
"fields": null,
"given_name": null,
"key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.ARRAY"
},
"scalar_kind": null,
"type_param_keys": [
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
]
},
"Bool": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Bool",
"key": "Bool",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.BOOL"
},
"type_param_keys": null
},
"Float": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Float",
"key": "Float",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.FLOAT"
},
"type_param_keys": null
},
"Int": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "Int",
"key": "Int",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.INT"
},
"type_param_keys": null
},
"ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Bool",
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59"
]
},
"ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Float",
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3"
]
},
"ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"Int",
"Selector.a9799b971d12ace70a2d8803c883c863417d0725"
]
},
"ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": null,
"given_name": null,
"key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR_UNION"
},
"scalar_kind": null,
"type_param_keys": [
"String",
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269"
]
},
"Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "disabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "enabled",
"type_key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709"
}
],
"given_name": null,
"key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "in_process",
"type_key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}}",
"description": null,
"is_required": false,
"name": "multiprocess",
"type_key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e"
}
],
"given_name": null,
"key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"config\\": {}}",
"description": null,
"is_required": false,
"name": "filesystem",
"type_key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "in_memory",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.a9799b971d12ace70a2d8803c883c863417d0725": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Int"
}
],
"given_name": null,
"key": "Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Bool"
}
],
"given_name": null,
"key": "Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Float"
}
],
"given_name": null,
"key": "Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e04723c9d9937e3ab21206435b22247cfbe58269": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "String"
}
],
"given_name": null,
"key": "Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.e52fa3afbe531d9522fae1206f3ae9d248775742": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
}
],
"given_name": null,
"key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "json",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "pickle",
"type_key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "value",
"type_key": "Any"
}
],
"given_name": null,
"key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"kind": {
"__enum__": "ConfigTypeKind.SELECTOR"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.0bb49540f1708dcf5378009c9571eba999502e19": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "io_manager",
"type_key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7"
}
],
"given_name": null,
"key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.241ac489ffa5f718db6444bae7849fb86a62e441": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"INFO\\"",
"description": null,
"is_required": false,
"name": "log_level",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "\\"dagster\\"",
"description": null,
"is_required": false,
"name": "name",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.3baab16166bacfaf4705811e64d356112fd733cb": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
}
],
"given_name": null,
"key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.41de0e2d7b75524510155d0bdab8723c6feced3b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "result",
"type_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742"
}
],
"given_name": null,
"key": "Shape.41de0e2d7b75524510155d0bdab8723c6feced3b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.45768c89a28c7a70f247bca830b9a24598287716": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"in_process\\": {}}",
"description": null,
"is_required": false,
"name": "execution",
"type_key": "Selector.4d63da53a40bb42f96aad27d25ec8a9656d40975"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "intermediate_storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "loggers",
"type_key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"io_manager\\": {}}",
"description": null,
"is_required": false,
"name": "resources",
"type_key": "Shape.0bb49540f1708dcf5378009c9571eba999502e19"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"one\\": {}, \\"two\\": {}}",
"description": null,
"is_required": false,
"name": "solids",
"type_key": "Shape.ba7fa03e7f2b7ee324ff5f3ed290c26cb2585795"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "storage",
"type_key": "Selector.a2588a6acfaabe9de47899395c58b06786b9e2eb"
}
],
"given_name": null,
"key": "Shape.45768c89a28c7a70f247bca830b9a24598287716",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": true,
"name": "path",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.4b53b73df342381d0d05c5f36183dc99cb9676e2",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.69ff9be621991cc7961ea5e667d43edaac9d2339": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "outputs",
"type_key": "Array.Shape.41de0e2d7b75524510155d0bdab8723c6feced3b"
}
],
"given_name": null,
"key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.743e47901855cb245064dd633e217bfcb49a11a7": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
}
],
"given_name": null,
"key": "Shape.743e47901855cb245064dd633e217bfcb49a11a7",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.889b7348071b49700db678dab98bb0a15fd57ecd": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed"
}
],
"given_name": null,
"key": "Shape.889b7348071b49700db678dab98bb0a15fd57ecd",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "marker_to_close",
"type_key": "String"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "0",
"description": null,
"is_required": false,
"name": "max_concurrent",
"type_key": "Int"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"enabled\\": {}}",
"description": null,
"is_required": false,
"name": "retries",
"type_key": "Selector.1bfb167aea90780aa679597800c71bd8c65ed0b2"
}
],
"given_name": null,
"key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ba7fa03e7f2b7ee324ff5f3ed290c26cb2585795": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"field_aliases": {
"solids": "ops"
},
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "one",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
},
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{}",
"description": null,
"is_required": false,
"name": "two",
"type_key": "Shape.69ff9be621991cc7961ea5e667d43edaac9d2339"
}
],
"given_name": null,
"key": "Shape.ba7fa03e7f2b7ee324ff5f3ed290c26cb2585795",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.979b3d2fece4f3eb92e90f2ec9fb4c85efe9ea5c"
}
],
"given_name": null,
"key": "Shape.ca5906d9a0377218b4ee7d940ad55957afa73d1b",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [],
"given_name": null,
"key": "Shape.da39a3ee5e6b4b0d3255bfef95601890afd80709",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "base_dir",
"type_key": "String"
}
],
"given_name": null,
"key": "Shape.e26e0c525e2d2c66b5a06f4cfdd053de6d44e3ed",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "console",
"type_key": "Shape.3baab16166bacfaf4705811e64d356112fd733cb"
}
],
"given_name": null,
"key": "Shape.ebeaf4550c200fb540f2e1f3f2110debd8c4157c",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e": {
"__class__": "ConfigTypeSnap",
"description": null,
"enum_values": null,
"fields": [
{
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"max_concurrent\\": 0, \\"retries\\": {\\"enabled\\": {}}}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.a476f98f7c4e324d4b665af722d1f2cd7f99b023"
}
],
"given_name": null,
"key": "Shape.fff3afcfe0467fefa4b97fb8f72911aeb0e8fe4e",
"kind": {
"__enum__": "ConfigTypeKind.STRICT_SHAPE"
},
"scalar_kind": null,
"type_param_keys": null
},
"String": {
"__class__": "ConfigTypeSnap",
"description": "",
"enum_values": null,
"fields": null,
"given_name": "String",
"key": "String",
"kind": {
"__enum__": "ConfigTypeKind.SCALAR"
},
"scalar_kind": {
"__enum__": "ConfigScalarKind.STRING"
},
"type_param_keys": null
}
}
},
"dagster_type_namespace_snapshot": {
"__class__": "DagsterTypeNamespaceSnapshot",
"all_dagster_type_snaps_by_key": {
"Any": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Any",
"is_builtin": true,
"key": "Any",
"kind": {
"__enum__": "DagsterTypeKind.ANY"
},
"loader_schema_key": "Selector.f2fe6dfdc60a1947a8f8e7cd377a012b47065bc4",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Any",
"type_param_keys": []
},
"Bool": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Bool",
"is_builtin": true,
"key": "Bool",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Bool-Selector.be5d518b39e86a43c5f2eecaf538c1f6c7711b59",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Bool",
"type_param_keys": []
},
"Float": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Float",
"is_builtin": true,
"key": "Float",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Float-Selector.d00a37e3807d37c9f69cc62997c4a5f4a176e5c3",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Float",
"type_param_keys": []
},
"Int": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Int",
"is_builtin": true,
"key": "Int",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.Int-Selector.a9799b971d12ace70a2d8803c883c863417d0725",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "Int",
"type_param_keys": []
},
"Nothing": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "Nothing",
"is_builtin": true,
"key": "Nothing",
"kind": {
"__enum__": "DagsterTypeKind.NOTHING"
},
"loader_schema_key": null,
"materializer_schema_key": null,
"name": "Nothing",
"type_param_keys": []
},
"String": {
"__class__": "DagsterTypeSnap",
"description": null,
"display_name": "String",
"is_builtin": true,
"key": "String",
"kind": {
"__enum__": "DagsterTypeKind.SCALAR"
},
"loader_schema_key": "ScalarUnion.String-Selector.e04723c9d9937e3ab21206435b22247cfbe58269",
"materializer_schema_key": "Selector.e52fa3afbe531d9522fae1206f3ae9d248775742",
"name": "String",
"type_param_keys": []
}
}
},
"dep_structure_snapshot": {
"__class__": "DependencyStructureSnapshot",
"solid_invocation_snaps": [
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "noop_solid",
"solid_name": "one",
"tags": {}
},
{
"__class__": "SolidInvocationSnap",
"input_dep_snaps": [],
"is_dynamic_mapped": false,
"solid_def_name": "noop_solid",
"solid_name": "two",
"tags": {}
}
]
},
"description": null,
"graph_def_name": "two_solid_pipeline",
"lineage_snapshot": null,
"mode_def_snaps": [
{
"__class__": "ModeDefSnap",
"description": null,
"logger_def_snaps": [
{
"__class__": "LoggerDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": true,
"default_value_as_json_str": "{\\"log_level\\": \\"INFO\\", \\"name\\": \\"dagster\\"}",
"description": null,
"is_required": false,
"name": "config",
"type_key": "Shape.241ac489ffa5f718db6444bae7849fb86a62e441"
},
"description": "The default colored console logger.",
"name": "console"
}
],
"name": "default",
"resource_def_snaps": [
{
"__class__": "ResourceDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"name": "io_manager"
}
],
"root_config_key": "Shape.45768c89a28c7a70f247bca830b9a24598287716"
}
],
"name": "two_solid_pipeline",
"solid_definitions_snapshot": {
"__class__": "SolidDefinitionsSnapshot",
"composite_solid_def_snaps": [],
"solid_def_snaps": [
{
"__class__": "SolidDefSnap",
"config_field_snap": {
"__class__": "ConfigFieldSnap",
"default_provided": false,
"default_value_as_json_str": null,
"description": null,
"is_required": false,
"name": "config",
"type_key": "Any"
},
"description": null,
"input_def_snaps": [],
"name": "noop_solid",
"output_def_snaps": [
{
"__class__": "OutputDefSnap",
"dagster_type_key": "Any",
"description": null,
"is_dynamic": false,
"is_required": true,
"name": "result"
}
],
"required_resource_keys": [],
"tags": {}
}
]
},
"tags": {}
}'''
snapshots['test_two_invocations_deps_snap 2'] = 'e2bf3c5257f8c979501f9cec33d0c96219496fad'
| 32.380621 | 121 | 0.529281 |
1275199288757ff2ad3ed5877d999d611f88cb24 | 22,802 | py | Python | plugins/modules/oci_container_engine_cluster_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_container_engine_cluster_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_container_engine_cluster_actions.py | sohwaje/oci-ansible-collection | 9e6b8cf55e596a96560710a457a7df05886fc59c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_container_engine_cluster_actions
short_description: Perform actions on a Cluster resource in Oracle Cloud Infrastructure
description:
- Perform actions on a Cluster resource in Oracle Cloud Infrastructure
- For I(action=cluster_migrate_to_native_vcn), initiates cluster migration to use native VCN.
- For I(action=update_cluster_endpoint_config), update the details of the cluster endpoint configuration.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
cluster_id:
description:
- The OCID of the cluster.
type: str
aliases: ["id"]
required: true
endpoint_config:
description:
- The network configuration for access to the Cluster control plane.
- Required for I(action=cluster_migrate_to_native_vcn).
type: dict
suboptions:
subnet_id:
description:
- The OCID of the regional subnet in which to place the Cluster endpoint.
type: str
nsg_ids:
description:
- A list of the OCIDs of the network security groups (NSGs) to apply to the cluster endpoint. For more information about NSGs, see
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/20160918/NetworkSecurityGroup/).
type: list
elements: str
is_public_ip_enabled:
description:
- Whether the cluster should be assigned a public IP address. Defaults to false. If set to true on a private subnet, the cluster
provisioning will fail.
type: bool
decommission_delay_duration:
description:
- The optional override of the non-native endpoint decommission time after migration is complete. Defaults to 30 days.
- Applicable only for I(action=cluster_migrate_to_native_vcn).
type: str
nsg_ids:
description:
- A list of the OCIDs of the network security groups (NSGs) to apply to the cluster endpoint. For more information about NSGs, see
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/20160918/NetworkSecurityGroup/).
- Applicable only for I(action=update_cluster_endpoint_config).
type: list
elements: str
is_public_ip_enabled:
description:
- Whether the cluster should be assigned a public IP address. Defaults to false. If set to true on a private subnet, the cluster update will fail.
- Applicable only for I(action=update_cluster_endpoint_config).
type: bool
action:
description:
- The action to perform on the Cluster.
type: str
required: true
choices:
- "cluster_migrate_to_native_vcn"
- "update_cluster_endpoint_config"
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Perform action cluster_migrate_to_native_vcn on cluster
oci_container_engine_cluster_actions:
cluster_id: "ocid1.cluster.oc1..xxxxxxEXAMPLExxxxxx"
action: cluster_migrate_to_native_vcn
- name: Perform action update_cluster_endpoint_config on cluster
oci_container_engine_cluster_actions:
cluster_id: "ocid1.cluster.oc1..xxxxxxEXAMPLExxxxxx"
action: update_cluster_endpoint_config
"""
RETURN = """
cluster:
description:
- Details of the Cluster resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The OCID of the cluster.
returned: on success
type: str
sample: ocid1.cluster.oc1.iad.aaaaaaaaga3tombrmq3wgyrvmi3gcn3bmfsdizjwgy4wgyldmy3dcmtcmmyw
name:
description:
- The name of the cluster.
returned: on success
type: str
sample: My Cluster
compartment_id:
description:
- The OCID of the compartment in which the cluster exists.
returned: on success
type: str
sample: "ocid1.compartment.oc1..aaaaaaaafqm2df7ckwmmbtdsl2bgxsw4fcpvkoojytxrqst24yww2tdmtqcq"
endpoint_config:
description:
- The network configuration for access to the Cluster control plane.
returned: on success
type: complex
contains:
subnet_id:
description:
- The OCID of the regional subnet in which to place the Cluster endpoint.
returned: on success
type: str
sample: "ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx"
nsg_ids:
description:
- A list of the OCIDs of the network security groups (NSGs) to apply to the cluster endpoint. For more information about NSGs, see
L(NetworkSecurityGroup,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/20160918/NetworkSecurityGroup/).
returned: on success
type: list
sample: []
is_public_ip_enabled:
description:
- Whether the cluster should be assigned a public IP address. Defaults to false. If set to true on a private subnet, the cluster
provisioning will fail.
returned: on success
type: bool
sample: true
vcn_id:
description:
- The OCID of the virtual cloud network (VCN) in which the cluster exists.
returned: on success
type: str
sample: ocid1.vcn.oc1.iad.aaaaaaaa5e3hn7hk6y63awlhbvlhsumkn5p3ficbjcevbnoylvptcpkxtsaa
kubernetes_version:
description:
- The version of Kubernetes running on the cluster masters.
returned: on success
type: str
sample: v1.9.4
kms_key_id:
description:
- The OCID of the KMS key to be used as the master encryption key for Kubernetes secret encryption.
returned: on success
type: str
sample: "ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx"
options:
description:
- Optional attributes for the cluster.
returned: on success
type: complex
contains:
service_lb_subnet_ids:
description:
- The OCIDs of the subnets used for Kubernetes services load balancers.
returned: on success
type: list
sample: []
kubernetes_network_config:
description:
- Network configuration for Kubernetes.
returned: on success
type: complex
contains:
pods_cidr:
description:
- The CIDR block for Kubernetes pods.
returned: on success
type: str
sample: 10.244.0.0/16
services_cidr:
description:
- The CIDR block for Kubernetes services.
returned: on success
type: str
sample: 10.96.0.0/16
add_ons:
description:
- Configurable cluster add-ons
returned: on success
type: complex
contains:
is_kubernetes_dashboard_enabled:
description:
- Whether or not to enable the Kubernetes Dashboard add-on.
returned: on success
type: bool
sample: true
is_tiller_enabled:
description:
- Whether or not to enable the Tiller add-on.
returned: on success
type: bool
sample: true
admission_controller_options:
description:
- Configurable cluster admission controllers
returned: on success
type: complex
contains:
is_pod_security_policy_enabled:
description:
- Whether or not to enable the Pod Security Policy admission controller.
returned: on success
type: bool
sample: false
metadata:
description:
- Metadata about the cluster.
returned: on success
type: complex
contains:
time_created:
description:
- The time the cluster was created.
returned: on success
type: str
sample: "2017-07-21T16:11:29Z"
created_by_user_id:
description:
- The user who created the cluster.
returned: on success
type: str
sample: "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq"
created_by_work_request_id:
description:
- The OCID of the work request which created the cluster.
returned: on success
type: str
sample: ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq
time_deleted:
description:
- The time the cluster was deleted.
returned: on success
type: str
sample: "2017-07-21T16:11:29Z"
deleted_by_user_id:
description:
- The user who deleted the cluster.
returned: on success
type: str
sample: "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq"
deleted_by_work_request_id:
description:
- The OCID of the work request which deleted the cluster.
returned: on success
type: str
sample: ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq
time_updated:
description:
- The time the cluster was updated.
returned: on success
type: str
sample: "2017-07-21T16:11:29Z"
updated_by_user_id:
description:
- The user who updated the cluster.
returned: on success
type: str
sample: "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq"
updated_by_work_request_id:
description:
- The OCID of the work request which updated the cluster.
returned: on success
type: str
sample: ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq
lifecycle_state:
description:
- The state of the cluster masters.
returned: on success
type: str
sample: UPDATING
lifecycle_details:
description:
- Details about the state of the cluster masters.
returned: on success
type: str
sample: waiting for node pools
endpoints:
description:
- Endpoints served up by the cluster masters.
returned: on success
type: complex
contains:
kubernetes:
description:
- The non-native networking Kubernetes API server endpoint.
returned: on success
type: str
sample: https://yourkubernetes
public_endpoint:
description:
- The public native networking Kubernetes API server endpoint, if one was requested.
returned: on success
type: str
sample: https://yourPublicEndpoint
private_endpoint:
description:
- The private native networking Kubernetes API server endpoint.
returned: on success
type: str
sample: https://yourPrivateEndpoint
available_kubernetes_upgrades:
description:
- Available Kubernetes versions to which the clusters masters may be upgraded.
returned: on success
type: list
sample: []
image_policy_config:
description:
- The image verification policy for signature validation.
returned: on success
type: complex
contains:
is_policy_enabled:
description:
- Whether the image verification policy is enabled. Defaults to false. If set to true, the images will be verified against the policy at
runtime.
returned: on success
type: bool
sample: true
key_details:
description:
- A list of KMS key details.
returned: on success
type: complex
contains:
kms_key_id:
description:
- The OCIDs of the KMS key that will be used to verify whether the images are signed by an approved source.
returned: on success
type: str
sample: "ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx"
sample: {
"id": "ocid1.cluster.oc1.iad.aaaaaaaaga3tombrmq3wgyrvmi3gcn3bmfsdizjwgy4wgyldmy3dcmtcmmyw",
"name": "My Cluster",
"compartment_id": "ocid1.compartment.oc1..aaaaaaaafqm2df7ckwmmbtdsl2bgxsw4fcpvkoojytxrqst24yww2tdmtqcq",
"endpoint_config": {
"subnet_id": "ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx",
"nsg_ids": [],
"is_public_ip_enabled": true
},
"vcn_id": "ocid1.vcn.oc1.iad.aaaaaaaa5e3hn7hk6y63awlhbvlhsumkn5p3ficbjcevbnoylvptcpkxtsaa",
"kubernetes_version": "v1.9.4",
"kms_key_id": "ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx",
"options": {
"service_lb_subnet_ids": [],
"kubernetes_network_config": {
"pods_cidr": "10.244.0.0/16",
"services_cidr": "10.96.0.0/16"
},
"add_ons": {
"is_kubernetes_dashboard_enabled": true,
"is_tiller_enabled": true
},
"admission_controller_options": {
"is_pod_security_policy_enabled": false
}
},
"metadata": {
"time_created": "2017-07-21T16:11:29Z",
"created_by_user_id": "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq",
"created_by_work_request_id": "ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq",
"time_deleted": "2017-07-21T16:11:29Z",
"deleted_by_user_id": "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq",
"deleted_by_work_request_id": "ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq",
"time_updated": "2017-07-21T16:11:29Z",
"updated_by_user_id": "ocid1.user.oc1..aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq",
"updated_by_work_request_id": "ocid1.clustersworkrequest.oc1.iad.aaaaaaaanifpelnyzmkvnepohbz4ntswkpl35syzzsugdxceth3oihe8hcfq"
},
"lifecycle_state": "UPDATING",
"lifecycle_details": "waiting for node pools",
"endpoints": {
"kubernetes": "https://yourkubernetes",
"public_endpoint": "https://yourPublicEndpoint",
"private_endpoint": "https://yourPrivateEndpoint"
},
"available_kubernetes_upgrades": [],
"image_policy_config": {
"is_policy_enabled": true,
"key_details": [{
"kms_key_id": "ocid1.kmskey.oc1..xxxxxxEXAMPLExxxxxx"
}]
}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.container_engine import ContainerEngineClient
from oci.container_engine.models import ClusterMigrateToNativeVcnDetails
from oci.container_engine.models import UpdateClusterEndpointConfigDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ClusterActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
cluster_migrate_to_native_vcn
update_cluster_endpoint_config
"""
@staticmethod
def get_module_resource_id_param():
return "cluster_id"
def get_module_resource_id(self):
return self.module.params.get("cluster_id")
def get_get_fn(self):
return self.client.get_cluster
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_cluster, cluster_id=self.module.params.get("cluster_id"),
)
def cluster_migrate_to_native_vcn(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, ClusterMigrateToNativeVcnDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.cluster_migrate_to_native_vcn,
call_fn_args=(),
call_fn_kwargs=dict(
cluster_id=self.module.params.get("cluster_id"),
cluster_migrate_to_native_vcn_details=action_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def update_cluster_endpoint_config(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, UpdateClusterEndpointConfigDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_cluster_endpoint_config,
call_fn_args=(),
call_fn_kwargs=dict(
cluster_id=self.module.params.get("cluster_id"),
update_cluster_endpoint_config_details=action_details,
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
ClusterActionsHelperCustom = get_custom_class("ClusterActionsHelperCustom")
class ResourceHelper(ClusterActionsHelperCustom, ClusterActionsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=True
)
module_args.update(
dict(
cluster_id=dict(aliases=["id"], type="str", required=True),
endpoint_config=dict(
type="dict",
options=dict(
subnet_id=dict(type="str"),
nsg_ids=dict(type="list", elements="str"),
is_public_ip_enabled=dict(type="bool"),
),
),
decommission_delay_duration=dict(type="str"),
nsg_ids=dict(type="list", elements="str"),
is_public_ip_enabled=dict(type="bool"),
action=dict(
type="str",
required=True,
choices=[
"cluster_migrate_to_native_vcn",
"update_cluster_endpoint_config",
],
),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="cluster",
service_client_class=ContainerEngineClient,
namespace="container_engine",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main()
| 41.533698 | 160 | 0.570082 |
5f1c04b3d6e134f5c6d71dae40b32c5e2b8f035b | 153 | py | Python | examples/apps/django_multi_apps/stockscreener/urls.py | datalayer-contrib/holoviz-panel | c97b57e8eaff4b5f542add41f496395da2483b23 | [
"BSD-3-Clause"
] | 1,130 | 2019-11-23T09:53:37.000Z | 2022-03-31T11:30:07.000Z | examples/apps/django_multi_apps/stockscreener/urls.py | datalayer-contrib/holoviz-panel | c97b57e8eaff4b5f542add41f496395da2483b23 | [
"BSD-3-Clause"
] | 2,265 | 2019-11-20T17:09:09.000Z | 2022-03-31T22:09:38.000Z | examples/apps/django_multi_apps/stockscreener/urls.py | datalayer-contrib/holoviz-panel | c97b57e8eaff4b5f542add41f496395da2483b23 | [
"BSD-3-Clause"
] | 215 | 2019-11-26T11:49:04.000Z | 2022-03-30T10:23:11.000Z | from django.urls import path
from . import views
app_name = 'stockscreener'
urlpatterns = [
path('', views.stockscreener, name='stockscreener'),
]
| 17 | 56 | 0.718954 |
7d5905d6e6fe6d40599a43192a7a8520630fef2e | 1,416 | py | Python | memory/impala_client.py | gridsum/IML-predictor- | 2f4b4b7799752b3783bdcf01d1ca9e2a61942a74 | [
"Apache-2.0"
] | 3 | 2018-04-20T03:26:18.000Z | 2018-04-20T06:09:50.000Z | memory/impala_client.py | gridsum/IML-predictor- | 2f4b4b7799752b3783bdcf01d1ca9e2a61942a74 | [
"Apache-2.0"
] | null | null | null | memory/impala_client.py | gridsum/IML-predictor- | 2f4b4b7799752b3783bdcf01d1ca9e2a61942a74 | [
"Apache-2.0"
] | null | null | null | import logging
from impala.dbapi import connect
from .settings import ImpalaConstants, NEED_CERTIFICATE
from .error import ImpalaConnectError, ImpalaQueryError
class ImpalaWrapper:
def __init__(self, host=ImpalaConstants.HOST, port=ImpalaConstants.PORT,
user=ImpalaConstants.USER, database=None, sql=None,
auth_required=NEED_CERTIFICATE):
self.host = host
self.port = int(port)
self.user = user
self.database = database
self.sql = "explain %s" % sql
self.auth_required = auth_required
def cursor(self):
if self.auth_required:
auth_mechanism = 'GSSAPI'
else:
auth_mechanism = 'NOSASL'
try:
return connect(self.host, self.port,
auth_mechanism=auth_mechanism).cursor()
except Exception as err:
logging.error(err)
raise ImpalaConnectError(message=str(err))
def explain(self):
cursor = self.cursor()
try:
cursor.execute("use %s" % self.database)
cursor.execute("set explain_level=2")
cursor.execute(self.sql)
except Exception as err:
logging.warning(err)
raise ImpalaQueryError(message=str(err))
else:
for line in cursor:
yield line[0]
finally:
cursor.close()
| 29.5 | 76 | 0.592514 |
7d4f4e387054adbe5a5435c6fda5f5b3cc6bffe5 | 466 | py | Python | data/scripts/templates/object/tangible/wearables/ithorian/shared_ith_jacket_s14.py | obi-two/GameServer | 7d37024e2291a97d49522610cd8f1dbe5666afc2 | [
"MIT"
] | 20 | 2015-02-23T15:11:56.000Z | 2022-03-18T20:56:48.000Z | data/scripts/templates/object/tangible/wearables/ithorian/shared_ith_jacket_s14.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | null | null | null | data/scripts/templates/object/tangible/wearables/ithorian/shared_ith_jacket_s14.py | apathyboy/swganh | 665128efe9154611dec4cb5efc61d246dd095984 | [
"MIT"
] | 20 | 2015-04-04T16:35:59.000Z | 2022-03-24T14:54:37.000Z | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/ithorian/shared_ith_jacket_s14.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","ith_jacket_s14")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | 27.411765 | 81 | 0.738197 |
39ade7d2fa5d033aa78fd8d28a0e11a8b087f706 | 5,460 | py | Python | tools/pdtools/pdtools/util.py | lhartung/paradrop-test | 22a491bf3198bf61bcabaedfaecde5b9be97e76f | [
"Apache-2.0"
] | 76 | 2015-08-24T18:15:20.000Z | 2021-12-06T20:03:19.000Z | tools/pdtools/pdtools/util.py | leilei1881/Paradrop | 546464e075f8a226b9eb4fe444390dc5c8527ad9 | [
"Apache-2.0"
] | 35 | 2015-07-07T22:27:49.000Z | 2022-03-01T17:13:40.000Z | tools/pdtools/pdtools/util.py | lhartung/paradrop-test | 22a491bf3198bf61bcabaedfaecde5b9be97e76f | [
"Apache-2.0"
] | 27 | 2016-02-03T22:00:09.000Z | 2021-09-26T16:59:38.000Z | import getpass
import os
import tempfile
import builtins
import yaml
LOCAL_DEFAULT_USERNAME = "paradrop"
LOCAL_DEFAULT_PASSWORD = ""
def format_result(data):
"""
Format a result from an API call for printing.
"""
if data is None or data == []:
return ""
return yaml.safe_dump(data, default_flow_style=False)
def open_text_editor(data):
if data is None:
data = ""
fd, path = tempfile.mkstemp()
os.close(fd)
with open(path, 'w') as output:
output.write(data)
# Get modified time before calling editor.
orig_mtime = os.path.getmtime(path)
editor = os.environ.get("EDITOR", "vim")
os.spawnvpe(os.P_WAIT, editor, [editor, path], os.environ)
with open(path, 'r') as source:
data = source.read()
# Check if the file has been modified, and if it has, send the update.
new_mtime = os.path.getmtime(path)
if new_mtime == orig_mtime:
data = None
os.remove(path)
return data
def open_yaml_editor(data, description):
if data is None:
data = {}
fd, path = tempfile.mkstemp()
os.close(fd)
with open(path, 'w') as output:
if len(data) > 0:
output.write(yaml.safe_dump(data, default_flow_style=False))
output.write("\n")
output.write("# You are editing the configuration for the {}.\n".format(description))
output.write("# Blank lines and lines starting with '#' will be ignored.\n")
output.write("# Save and exit to apply changes; exit without saving to discard.\n")
# Get modified time before calling editor.
orig_mtime = os.path.getmtime(path)
editor = os.environ.get("EDITOR", "vim")
os.spawnvpe(os.P_WAIT, editor, [editor, path], os.environ)
with open(path, 'r') as source:
data = source.read()
new_data = yaml.safe_load(data)
# If result is null, convert to an empty dict before sending to router.
if new_data is None:
new_data = {}
# Check if the file has been modified.
new_mtime = os.path.getmtime(path)
changed = (new_mtime != orig_mtime)
os.remove(path)
return new_data, changed
def update_object(obj, path, callback=None):
"""
Traverse a data structure ensuring all nodes exist.
obj: expected to be a dictionary
path: string with dot-separated path components
callback: optional callback function (described below)
When update_object reaches the parent of the leaf node, it calls the
optional callback function. The arguments to the callback function are:
- parent: dictionary containing the leaf node
- key: string key for the leaf node in parent
- created: boolean flag indicating whether any part of the path, including
the leaf node needed to be created.
If the callback function is None, update_object will still ensure that all
components along the path exist. If the leaf needs to be created, it will
be created as an empty dictionary.
Example:
update_object({}, 'foo.bar') -> {'foo': {'bar': {}}}
Return value: Returns either the return value of callback, or if callback
is None, returns the value of the leaf node.
"""
parts = path.split(".")
current = obj
parent = obj
created = False
for part in parts:
if len(part) == 0:
raise Exception("Path ({}) is invalid".format(path))
if not isinstance(current, dict):
raise Exception("Cannot set {}, not a dictionary".format(path))
# Create dictionaries along the way if path nodes do not exist,
# but make note of the fact that the previous value did not exist.
if part not in current:
current[part] = {}
created = True
parent = current
current = parent[part]
if callback is not None:
return callback(parent, parts[-1], created)
else:
return current
class LoginGatherer(object):
"""
LoginGatherer is an iterator that produces username/password tuples.
On the first iteration, it returns a default username/password combination
for convenience. On subsequent iterations, it prompts the user for input.
The class method prompt() can be used directly in a loop for situations
where the default is not desired.
Usage examples:
for username, password in LoginGatherer(netloc):
...
while True:
username, password = LoginGatherer.prompt(netloc)
"""
def __init__(self, netloc):
self.first = True
self.netloc = netloc
def __iter__(self):
self.first = True
return self
def __next__(self):
if self.first:
self.first = False
return (LOCAL_DEFAULT_USERNAME, LOCAL_DEFAULT_PASSWORD)
else:
return LoginGatherer.prompt(self.netloc)
def next(self):
"""
Get the next username and password pair.
"""
return self.__next__()
@classmethod
def prompt(cls, netloc):
"""
Prompt the user to enter a username and password.
The netloc argument is presented in the prompt, so that the user knows
the relevant authentication domain.
"""
print("Please enter your username and password for {}."
.format(netloc))
username = builtins.input("Username: ")
password = getpass.getpass("Password: ")
return (username, password)
| 28.4375 | 93 | 0.641758 |
c45da4d0eb046bbdae3aaa125b45a4bae27c9487 | 8,058 | py | Python | python/createVcf.py | anands-repo/hello | 743ff49c0de97a985643280a3b5cb562c58e7fb7 | [
"MIT"
] | 7 | 2020-03-27T06:33:24.000Z | 2022-03-31T16:16:47.000Z | python/createVcf.py | anands-repo/hello | 743ff49c0de97a985643280a3b5cb562c58e7fb7 | [
"MIT"
] | 3 | 2021-08-24T08:33:03.000Z | 2022-01-10T09:30:21.000Z | python/createVcf.py | anands-repo/hello | 743ff49c0de97a985643280a3b5cb562c58e7fb7 | [
"MIT"
] | null | null | null | # © 2019 University of Illinois Board of Trustees. All rights reserved
import argparse
import subprocess
import re
import ast
from ReferenceCache import ReferenceCache
from functools import reduce
import os
import subprocess
import pysam
SNV_THRESHOLD=0.1
INDEL_THRESHOLD=0.1
def correctRepresentation(variant, ref):
"""
Correct the variant representation (variant normalization)
Follows https://genome.sph.umich.edu/wiki/Variant_Normalization
:param variant: dict
Dictionary representation of the given variant
:param ref: str
Reference cache location
:return: dict
Corrected dictionary representation
"""
referenceCache = ReferenceCache(database=ref);
referenceCache.chrom = variant['CHROM'];
allAlleles = [variant['REF']] + variant['ALT'];
ALLELES = [allAlleles[gt] for gt in sorted(list(set(map(int, variant['GT'].split('/')))))];
if reduce(lambda x, y : x and y, [a == variant['REF'] for a in ALLELES], True):
return variant;
else:
runLoop = True;
# Left align and right parsimony
while runLoop:
runLoop = False;
# If alleles end with the same base, truncate rightmost base
rightBases = [variant['REF'][-1]] + [a[-1] for a in ALLELES];
if reduce(lambda x, y : x and y, [r == rightBases[0] for r in rightBases]):
variant['REF'] = variant['REF'][:-1];
ALLELES = [a[:-1] for a in ALLELES];
runLoop = True;
# If any allele is empty, incorporate a base from the reference to its left
if reduce(lambda x, y : x or y, [len(a) == 0 for a in ALLELES]) or (len(variant['REF']) == 0):
variant['POS'] -= 1;
leftBase = referenceCache[variant['POS']];
variant['REF'] = leftBase + variant['REF'];
ALLELES = [leftBase + a for a in ALLELES];
runLoop = True;
# Left parsimony
runLoop = True;
while runLoop:
# Every allele is of length at least 2
if reduce(lambda x, y : x and y, [len(a) >= 2 for a in ALLELES]) and (len(variant['REF']) >= 2):
leftBases = [variant['REF'][0]] + [a[0] for a in ALLELES];
if reduce(lambda x, y : x and y, [l == leftBases[0] for l in leftBases]):
variant['POS'] += 1;
variant['REF'] = variant['REF'][1:];
ALLELES = [a[1:] for a in ALLELES];
else:
runLoop = False;
else:
runLoop = False;
# Form new alt alleles
ALT = [a for a in ALLELES if a != variant['REF']];
variant['ALT'] = ALT;
newGT = [];
for a in ALLELES:
if a != variant['REF']:
index = ALT.index(a) + 1;
else:
index = 0;
newGT.append(index);
if len(newGT) == 1:
newGT = newGT * 2;
newGT = '/'.join(map(str, newGT));
variant['GT'] = newGT;
return variant;
def generate_vcf_header(contig_lengths, chromosomes):
string = "##fileformat=VCFv4.1\n";
for chr_ in chromosomes:
if chr_ in contig_lengths:
string += "##contig=<ID=%s,length=%d>\n"%(chr_,contig_lengths[chr_]);
string += "##INFO=<ID=AssemblyResults,Description=Obtained probabilistic graph assembly>\n";
string += "##FORMAT=<ID=GT,Number=1,Type=String,Description=Genotype>\n";
string += "##FILTER=<ID=FAIL,Description=Failed allele fraction test>\n";
string += "#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT SAMPLE1\n";
return string;
def get_contig_lengths(bam):
# command = ['samtools', 'view', '-H', bam];
# proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE);
# msg, err = proc.communicate();
# contig_length = re.compile("^\s*\@SQ\s+SN:(\S+)\s+LN:([0-9]+)\s*.*$");
# length_dict = {};
# for line in msg.splitlines():
# line = line.decode('ascii');
# result = contig_length.match(line);
# if result is not None:
# length_dict[str(result.group(1))] = int(result.group(2));
chromosomes = list(map(str, range(1,23))) + ['X', 'Y'];
fhandle = pysam.AlignmentFile(bam, 'rb');
lengths = [fhandle.get_reference_length(c) for c in chromosomes];
return dict(zip(chromosomes, lengths));
def createVcfEntry(line, ref, normalize = False):
entries = ast.literal_eval(line);
passes = lambda a, flag : a > INDEL_THRESHOLD if flag else a > SNV_THRESHOLD;
if 'CHROM' in entries:
fracs = list(map(float, entries['FRAC'].split(':')));
gts = list(map(int, entries['GT'].split('/')));
fDict = dict(zip(gts, fracs));
indel = False;
for a in entries['ALT']:
if len(a) != len(entries['REF']):
indel = True;
# for a, f in zip(entries['GT'].split('/'), fracs):
# if f > (SNV_THRESHOLD if not indel else INDEL_THRESHOLD):
# passingAlleles.append(int(a));
# Choose alleles with top two signal levels, and threshold them
# if len(passingAlleles) > 2:
topTwo = [];
fZip = zip(fracs, gts);
sZip = sorted(fZip);
topTwo.append(sZip[-1][1]);
if len(sZip) > 1:
topTwo.append(sZip[-2][1]);
passingAlleles = [p for p in topTwo if passes(fDict[p], indel)];
if sum(passingAlleles) > 0:
flag = 'PASS';
if len(passingAlleles) == 1: passingAlleles = passingAlleles * 2;
newGt = '/'.join([str(x) for x in passingAlleles]);
else:
flag = 'FAIL';
newGt = '/'.join([str(x) for x in topTwo]);
variantDict = {
'CHROM':str(entries['CHROM']),
'POS':entries['POS'],
'REF':entries['REF'],
'ALT':entries['ALT'],
'GT':newGt
};
if normalize:
variantDict = correctRepresentation(variantDict, ref);
entry = "%s\t%d\t.\t%s\t%s\t30\t%s\tAssemblyResults\tGT\t%s\n"%(str(variantDict['CHROM']), variantDict['POS']+1, variantDict['REF'], ','.join(variantDict['ALT']), flag, variantDict['GT']);
else:
entry = None;
return entry;
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Create VCF file from assembly results");
parser.add_argument(
"--assembly",
help = "Assembly results file",
required = True,
);
parser.add_argument(
"--bam",
help = "Bam file used for assembly",
required = True,
);
parser.add_argument(
"--threshold",
default = 0.1,
type = float,
help = "Threshold for signal cutoff"
);
parser.add_argument(
"--ref",
help = "Location of reference cache database",
required = True,
);
parser.add_argument(
"--normalize",
action = 'store_true',
help = "Normalize variant representation",
default = False,
);
parser.add_argument(
"--vcf",
required = True,
help = "Output VCF filename",
);
args = parser.parse_args();
SNV_THRESHOLD = args.threshold;
INDEL_THRESHOLD = args.threshold;
contigLengths = get_contig_lengths(args.bam);
header = generate_vcf_header(contigLengths, ['18']);
tmpvcf = os.path.join('/tmp', 'tmp.vcf');
with open(args.assembly, 'r') as fhandle, open(tmpvcf, 'w') as whandle:
# whandle.write(header);
for line in fhandle:
results = createVcfEntry(line, args.ref, args.normalize);
if results is not None:
whandle.write(str(results));
with open(args.vcf, 'w') as fhandle:
fhandle.write(header);
with open(args.vcf, 'a') as fhandle:
# sort -k1,1d -k2,2n
subprocess.call(['sort', '-k1,1d', '-k2,2n', tmpvcf], stdout=fhandle); | 32.889796 | 196 | 0.558699 |
bc51013c19099b175b2e622ef3d3c5959b8172d6 | 16,876 | py | Python | heat_dashboard/test/test_data/heat_data.py | efenfauzi/heat-dashboard | e1e9fc853b72c2770ba041c0ac1151b4c9484f36 | [
"Apache-2.0"
] | 13 | 2017-10-31T10:35:14.000Z | 2020-10-28T03:23:04.000Z | heat_dashboard/test/test_data/heat_data.py | efenfauzi/heat-dashboard | e1e9fc853b72c2770ba041c0ac1151b4c9484f36 | [
"Apache-2.0"
] | 3 | 2019-11-01T13:03:10.000Z | 2019-12-06T17:33:03.000Z | heat_dashboard/test/test_data/heat_data.py | openstack/heat-dashboard | 8704da6f4a6d8f6f64be2d72af49f0ea44d7b900 | [
"Apache-2.0"
] | 13 | 2017-12-21T08:33:23.000Z | 2021-01-18T14:26:22.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heatclient.v1 import resource_types
from heatclient.v1 import resources
from heatclient.v1 import services
from heatclient.v1 import stacks
from heatclient.v1 import template_versions
from heat_dashboard.test.test_data import utils
# A slightly hacked up copy of a sample cloudformation template for testing.
TEMPLATE = """
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "AWS CloudFormation Sample Template.",
"Parameters": {
"KeyName": {
"Description": "Name of an EC2 Key Pair to enable SSH access to the instances",
"Type": "String"
},
"InstanceType": {
"Description": "WebServer EC2 instance type",
"Type": "String",
"Default": "m1.small",
"AllowedValues": [
"m1.tiny",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge"
],
"ConstraintDescription": "must be a valid EC2 instance type."
},
"DBName": {
"Default": "wordpress",
"Description": "The WordPress database name",
"Type": "String",
"MinLength": "1",
"MaxLength": "64",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"ConstraintDescription": "must begin with a letter and..."
},
"DBUsername": {
"Default": "admin",
"NoEcho": "true",
"Description": "The WordPress database admin account username",
"Type": "String",
"MinLength": "1",
"MaxLength": "16",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"ConstraintDescription": "must begin with a letter and..."
},
"DBPassword": {
"Default": "admin",
"NoEcho": "true",
"Description": "The WordPress database admin account password",
"Type": "String",
"MinLength": "1",
"MaxLength": "41",
"AllowedPattern": "[a-zA-Z0-9]*",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"DBRootPassword": {
"Default": "admin",
"NoEcho": "true",
"Description": "Root password for MySQL",
"Type": "String",
"MinLength": "1",
"MaxLength": "41",
"AllowedPattern": "[a-zA-Z0-9]*",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"LinuxDistribution": {
"Default": "F17",
"Description": "Distribution of choice",
"Type": "String",
"AllowedValues": [
"F18",
"F17",
"U10",
"RHEL-6.1",
"RHEL-6.2",
"RHEL-6.3"
]
},
"Network": {
"Type": "String",
"CustomConstraint": "neutron.network"
}
},
"Mappings": {
"AWSInstanceType2Arch": {
"m1.tiny": {
"Arch": "32"
},
"m1.small": {
"Arch": "64"
},
"m1.medium": {
"Arch": "64"
},
"m1.large": {
"Arch": "64"
},
"m1.xlarge": {
"Arch": "64"
}
},
"DistroArch2AMI": {
"F18": {
"32": "F18-i386-cfntools",
"64": "F18-x86_64-cfntools"
},
"F17": {
"32": "F17-i386-cfntools",
"64": "F17-x86_64-cfntools"
},
"U10": {
"32": "U10-i386-cfntools",
"64": "U10-x86_64-cfntools"
},
"RHEL-6.1": {
"32": "rhel61-i386-cfntools",
"64": "rhel61-x86_64-cfntools"
},
"RHEL-6.2": {
"32": "rhel62-i386-cfntools",
"64": "rhel62-x86_64-cfntools"
},
"RHEL-6.3": {
"32": "rhel63-i386-cfntools",
"64": "rhel63-x86_64-cfntools"
}
}
},
"Resources": {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Metadata": {
"AWS::CloudFormation::Init": {
"config": {
"packages": {
"yum": {
"mysql": [],
"mysql-server": [],
"httpd": [],
"wordpress": []
}
},
"services": {
"systemd": {
"mysqld": {
"enabled": "true",
"ensureRunning": "true"
},
"httpd": {
"enabled": "true",
"ensureRunning": "true"
}
}
}
}
}
},
"Properties": {
"ImageId": {
"Fn::FindInMap": [
"DistroArch2AMI",
{
"Ref": "LinuxDistribution"
},
{
"Fn::FindInMap": [
"AWSInstanceType2Arch",
{
"Ref": "InstanceType"
},
"Arch"
]
}
]
},
"InstanceType": {
"Ref": "InstanceType"
},
"KeyName": {
"Ref": "KeyName"
},
"UserData": {
"Fn::Base64": {
"Fn::Join": [
"",
[
"#!/bin/bash -v\\n",
"/opt/aws/bin/cfn-init\\n"
]
]
}
}
}
}
},
"Outputs": {
"WebsiteURL": {
"Value": {
"Fn::Join": [
"",
[
"http://",
{
"Fn::GetAtt": [
"WikiDatabase",
"PublicIp"
]
},
"/wordpress"
]
]
},
"Description": "URL for Wordpress wiki"
}
}
}
"""
VALIDATE = """
{
"Description": "AWS CloudFormation Sample Template.",
"Parameters": {
"DBUsername": {
"Type": "String",
"Description": "The WordPress database admin account username",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "16",
"ConstraintDescription": "must begin with a letter and..."
},
"LinuxDistribution": {
"Default": "F17",
"Type": "String",
"Description": "Distribution of choice",
"AllowedValues": [
"F18",
"F17",
"U10",
"RHEL-6.1",
"RHEL-6.2",
"RHEL-6.3"
]
},
"DBRootPassword": {
"Type": "String",
"Description": "Root password for MySQL",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"KeyName": {
"Type": "String",
"Description": "Name of an EC2 Key Pair to enable SSH access to the instances"
},
"DBName": {
"Type": "String",
"Description": "The WordPress database name",
"Default": "wordpress",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"MaxLength": "64",
"ConstraintDescription": "must begin with a letter and..."
},
"DBPassword": {
"Type": "String",
"Description": "The WordPress database admin account password",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"InstanceType": {
"Default": "m1.small",
"Type": "String",
"ConstraintDescription": "must be a valid EC2 instance type.",
"Description": "WebServer EC2 instance type",
"AllowedValues": [
"m1.tiny",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge"
]
},
"Network": {
"Type": "String",
"CustomConstraint": "neutron.network"
}
}
}
"""
ENVIRONMENT = """
parameters:
InstanceType: m1.xsmall
db_password: verybadpass
KeyName: heat_key
"""
SNAPSHOT_CREATE = """
{
"status": "IN_PROGRESS",
"name": "None",
"data": "None",
"creation_time": "2016-02-19T07:25:23.494152",
"status_reason": "None",
"id": "8af90c07-b788-44ee-a8ab-5990197f5e32"
}
"""
class Environment(object):
def __init__(self, data):
self.data = data
class Template(object):
def __init__(self, data, validate):
self.data = data
self.validate = validate
class Snapshot(object):
def __init__(self, data):
self.data = data
def data(TEST):
TEST.stacks = utils.TestDataContainer()
TEST.stack_templates = utils.TestDataContainer()
TEST.stack_environments = utils.TestDataContainer()
TEST.stack_snapshot_create = utils.TestDataContainer()
TEST.stack_snapshot = utils.TestDataContainer()
TEST.resource_types = utils.TestDataContainer()
TEST.heat_resources = utils.TestDataContainer()
TEST.heat_services = utils.TestDataContainer()
TEST.template_versions = utils.TestDataContainer()
TEST.template_functions = utils.TestDataContainer()
# Services
service_1 = services.Service(services.ServiceManager(None), {
"status": "up",
"binary": "heat-engine",
"report_interval": 60,
"engine_id": "2f7b5a9b-c50b-4b01-8248-f89f5fb338d1",
"created_at": "2015-02-06T03:23:32.000000",
"hostname": "mrkanag",
"updated_at": "2015-02-20T09:49:52.000000",
"topic": "engine",
"host": "engine-1",
"deleted_at": None,
"id": "1efd7015-5016-4caa-b5c8-12438af7b100"
})
service_2 = services.Service(services.ServiceManager(None), {
"status": "up",
"binary": "heat-engine",
"report_interval": 60,
"engine_id": "2f7b5a9b-c50b-4b01-8248-f89f5fb338d2",
"created_at": "2015-02-06T03:23:32.000000",
"hostname": "mrkanag",
"updated_at": "2015-02-20T09:49:52.000000",
"topic": "engine",
"host": "engine-2",
"deleted_at": None,
"id": "1efd7015-5016-4caa-b5c8-12438af7b100"
})
TEST.heat_services.add(service_1)
TEST.heat_services.add(service_2)
# Data return by heatclient.
TEST.api_resource_types = utils.TestDataContainer()
for i in range(10):
stack_data = {
"description": "No description",
"links": [{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/stack-test{0}/"
"05b4f39f-ea96-4d91-910c-e758c078a089{0}".format(i),
"rel": "self"
}],
"parameters": {
'DBUsername': '******',
'InstanceType': 'm1.small',
'AWS::StackId': (
'arn:openstack:heat::2ce287:stacks/teststack/88553ec'),
'DBRootPassword': '******',
'AWS::StackName': "teststack{0}".format(i),
'DBPassword': '******',
'AWS::Region': 'ap-southeast-1',
'DBName': u'wordpress'
},
"stack_status_reason": "Stack successfully created",
"stack_name": "stack-test{0}".format(i),
"creation_time": "2013-04-22T00:11:39Z",
"updated_time": "2013-04-22T00:11:39Z",
"stack_status": "CREATE_COMPLETE",
"id": "05b4f39f-ea96-4d91-910c-e758c078a089{0}".format(i)
}
stack = stacks.Stack(stacks.StackManager(None), stack_data)
TEST.stacks.add(stack)
for i in range(10):
snapshot_data = {
"status": "COMPLETE",
"name": 'null',
"data": {
"files": {},
"status": "COMPLETE",
"name": "zhao3",
"tags": ["a", " 123", " b", " 456"],
"stack_user_project_id": "3cba4460875444049a2a7cc5420ccddb",
"environment": {
"encrypted_param_names": [],
"parameter_defaults": {},
"event_sinks": [],
"parameters": {},
"resource_registry": {
"resources": {}
}
},
"template": {
"heat_template_version": "2013-05-23",
"description":
"HOT template for Test.",
"resources": {
"private_subnet": {
"type": "OS::Neutron::Subnet",
"properties": {
"network_id": {"get_resource": "private_net"},
"cidr": "172.16.2.0/24",
"gateway_ip": "172.16.2.1"
}
},
"private_net": {
"type": "OS::Neutron::Net",
"properties": {"name": "private-net"}
}
}
},
"action": "SNAPSHOT",
"project_id": "1acd0026829f4d28bb2eff912d7aad0d",
"id": "70650725-bdbd-419f-b53f-5707767bfe0e",
"resources": {
"private_subnet": {
"status": "COMPLETE",
"name": "private_subnet",
"resource_data": {},
"resource_id": "9c7211b3-31c7-41f6-b92a-442ad3f71ef0",
"action": "SNAPSHOT",
"type": "OS::Neutron::Subnet",
"metadata": {}
},
"private_net": {
"status": "COMPLETE",
"name": "private_net",
"resource_data": {},
"resource_id": "ff4fd287-31b2-4d00-bc96-c409bc1db027",
"action": "SNAPSHOT",
"type": "OS::Neutron::Net",
"metadata": {}
}
}
},
"creation_time": "2016-02-21T04:02:54",
"status_reason": "Stack SNAPSHOT completed successfully",
"id": "01558a3b-ba05-4427-bbb4-1e4ab71cfca{0}".format(i)
}
TEST.stack_snapshot.add(snapshot_data)
TEST.stack_templates.add(Template(TEMPLATE, VALIDATE))
TEST.stack_environments.add(Environment(ENVIRONMENT))
TEST.stack_snapshot_create.add(Snapshot(SNAPSHOT_CREATE))
# Resource types list
r_type_1 = {
"resource_type": "AWS::CloudFormation::Stack",
"attributes": {},
"properties": {
"Parameters": {
"description":
"The set of parameters passed to this nested stack.",
"immutable": False,
"required": False,
"type": "map",
"update_allowed": True},
"TemplateURL": {
"description": "The URL of a template that specifies"
" the stack to be created as a resource.",
"immutable": False,
"required": True,
"type": "string",
"update_allowed": True},
"TimeoutInMinutes": {
"description": "The length of time, in minutes,"
" to wait for the nested stack creation.",
"immutable": False,
"required": False,
"type": "number",
"update_allowed": True}
}
}
r_type_2 = {
"resource_type": "OS::Heat::CloudConfig",
"attributes": {
"config": {
"description": "The config value of the software config."}
},
"properties": {
"cloud_config": {
"description": "Map representing the cloud-config data"
" structure which will be formatted as YAML.",
"immutable": False,
"required": False,
"type": "map",
"update_allowed": False}
}
}
r_types_list = [r_type_1, r_type_2]
for rt in r_types_list:
r_type = resource_types.ResourceType(
resource_types.ResourceTypeManager(None), rt['resource_type'])
TEST.resource_types.add(r_type)
TEST.api_resource_types.add(rt)
# Resources
resource_1 = resources.Resource(resources.ResourceManager(None), {
"logical_resource_id": "my_resource",
"physical_resource_id": "7b5e29b1-c94d-402d-b69c-df9ac6dfc0ce",
"resource_name": "my_resource",
"links": [
{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/%s/%s/resources/my_resource" %
(TEST.stacks.first().stack_name,
TEST.stacks.first().id),
"rel": "self"
},
{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/%s/%s" %
(TEST.stacks.first().stack_name,
TEST.stacks.first().id),
"rel": "stack"
}
],
"attributes": {
"metadata": {}
}
})
TEST.heat_resources.add(resource_1)
# Template versions
template_version_1 = template_versions.TemplateVersion(
template_versions.TemplateVersionManager(None), {
"version": "HeatTemplateFormatVersion.2012-12-12",
"type": "cfn"
})
template_version_2 = template_versions.TemplateVersion(
template_versions.TemplateVersionManager(None), {
"version": "heat_template_version.2013-05-23",
"type": "hot"
})
TEST.template_versions.add(template_version_1)
TEST.template_versions.add(template_version_2)
# Template functions
template_function_1 = template_versions.TemplateVersion(
template_versions.TemplateVersionManager(None), {
"functions": "Fn::GetAZs",
"description": "A function for retrieving the availability zones."
})
template_function_2 = template_versions.TemplateVersion(
template_versions.TemplateVersionManager(None), {
"functions": "Fn::Join",
"description": "A function for joining strings."
})
TEST.template_functions.add(template_function_1)
TEST.template_functions.add(template_function_2)
| 27.307443 | 79 | 0.553626 |
21031a79cfa6e1dd3a793bc546a8b9f5ba9afd05 | 2,575 | py | Python | neutron/tests/unit/oneconvergence/test_plugin_helper.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 3 | 2015-02-02T02:51:39.000Z | 2015-02-23T10:20:23.000Z | neutron/tests/unit/oneconvergence/test_plugin_helper.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 4 | 2015-02-23T10:21:11.000Z | 2015-03-04T09:28:20.000Z | neutron/tests/unit/oneconvergence/test_plugin_helper.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 3 | 2015-04-03T08:47:02.000Z | 2020-02-05T10:40:45.000Z | # Copyright 2014 OneConvergence, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Kedar Kulkarni, One Convergence, Inc.
import mock
import requests
from neutron.openstack.common import jsonutils as json
from neutron.plugins.oneconvergence.lib import config # noqa
from neutron.plugins.oneconvergence.lib import plugin_helper as client
from neutron.tests import base
class TestPluginHelper(base.BaseTestCase):
def setUp(self):
super(TestPluginHelper, self).setUp()
self.nvsdcontroller = client.NVSDController()
def get_response(self, *args, **kwargs):
response = mock.Mock()
response.status_code = requests.codes.ok
response.content = json.dumps({'session_uuid': 'new_auth_token'})
return response
def test_login(self):
login_url = ('http://127.0.0.1:8082/pluginhandler/ocplugin/'
'authmgmt/login')
headers = {'Content-Type': 'application/json'}
data = json.dumps({"user_name": "ocplugin", "passwd": "oc123"})
timeout = 30.0
with mock.patch.object(self.nvsdcontroller, 'do_request',
side_effect=self.get_response) as do_request:
self.nvsdcontroller.login()
do_request.assert_called_once_with('POST', url=login_url,
headers=headers, data=data,
timeout=timeout)
def test_request(self):
with mock.patch.object(self.nvsdcontroller, 'do_request',
side_effect=self.get_response) as do_request:
self.nvsdcontroller.login()
self.nvsdcontroller.request("POST", "/some_url")
self.assertEqual(do_request.call_count, 2)
do_request.assert_called_with(
'POST',
url='http://127.0.0.1:8082/some_url?authToken=new_auth_token',
headers={'Content-Type': 'application/json'}, data='',
timeout=30.0)
| 42.213115 | 78 | 0.640388 |
e429ecd00df89462223442aef0d99c6bec78cc5c | 23,015 | py | Python | examples/pytorch/question_generation/run_question_generation_iclr.py | amanapte/graph4nlp | 1ec5464b5d6b1f9c36297171cfedf617021273c3 | [
"Apache-2.0"
] | 1 | 2021-12-09T21:56:49.000Z | 2021-12-09T21:56:49.000Z | examples/pytorch/question_generation/run_question_generation_iclr.py | amanapte/graph4nlp | 1ec5464b5d6b1f9c36297171cfedf617021273c3 | [
"Apache-2.0"
] | null | null | null | examples/pytorch/question_generation/run_question_generation_iclr.py | amanapte/graph4nlp | 1ec5464b5d6b1f9c36297171cfedf617021273c3 | [
"Apache-2.0"
] | null | null | null | import argparse
import multiprocessing
import os
import platform
import time
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.multiprocessing
import torch.nn as nn
import torch.optim as optim
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torch.utils.data import DataLoader
from graph4nlp.pytorch.datasets.squad import SQuADDataset
from graph4nlp.pytorch.models.graph2seq import Graph2Seq
from graph4nlp.pytorch.models.graph2seq_loss import Graph2SeqLoss
from graph4nlp.pytorch.modules.config import get_basic_args
from graph4nlp.pytorch.modules.evaluation import BLEU, METEOR, ROUGE
from graph4nlp.pytorch.modules.graph_construction import (
ConstituencyBasedGraphConstruction,
DependencyBasedGraphConstruction,
NodeEmbeddingBasedGraphConstruction,
NodeEmbeddingBasedRefinedGraphConstruction,
)
from graph4nlp.pytorch.modules.graph_construction.embedding_construction import WordEmbedding
from graph4nlp.pytorch.modules.utils import constants as Constants
from graph4nlp.pytorch.modules.utils.config_utils import get_yaml_config, update_values
from graph4nlp.pytorch.modules.utils.copy_utils import prepare_ext_vocab
from graph4nlp.pytorch.modules.utils.generic_utils import EarlyStopping, grid, to_cuda
from graph4nlp.pytorch.modules.utils.logger import Logger
from fused_embedding_construction import FusedEmbeddingConstruction
class QGModel(nn.Module):
def __init__(self, vocab, config):
super(QGModel, self).__init__()
self.config = config
self.vocab = vocab
self.use_coverage = self.config["decoder_args"]["rnn_decoder_share"]["use_coverage"]
# build Graph2Seq model
self.g2s = Graph2Seq.from_args(config, self.vocab)
if "w2v" in self.g2s.graph_topology.embedding_layer.word_emb_layers:
self.word_emb = self.g2s.graph_topology.embedding_layer.word_emb_layers[
"w2v"
].word_emb_layer
else:
self.word_emb = WordEmbedding(
self.vocab.in_word_vocab.embeddings.shape[0],
self.vocab.in_word_vocab.embeddings.shape[1],
pretrained_word_emb=self.vocab.in_word_vocab.embeddings,
fix_emb=config["graph_construction_args"]["node_embedding"]["fix_word_emb"],
).word_emb_layer
self.g2s.seq_decoder.tgt_emb = self.word_emb
self.loss_calc = Graph2SeqLoss(
ignore_index=self.vocab.out_word_vocab.PAD,
use_coverage=self.use_coverage,
coverage_weight=config["coverage_loss_ratio"],
)
# Replace the default embedding construction layer
# with the customized passage-answer alignment embedding construction layer
# TODO: delete the default layer and clear the memory
embedding_styles = config["graph_construction_args"]["node_embedding"]["embedding_style"]
self.g2s.graph_topology.embedding_layer = FusedEmbeddingConstruction(
self.vocab.in_word_vocab,
embedding_styles["single_token_item"],
emb_strategy=embedding_styles["emb_strategy"],
hidden_size=config["graph_construction_args"]["node_embedding"]["hidden_size"],
num_rnn_layers=embedding_styles.get("num_rnn_layers", 1),
fix_word_emb=config["graph_construction_args"]["node_embedding"]["fix_word_emb"],
fix_bert_emb=config["graph_construction_args"]["node_embedding"]["fix_bert_emb"],
bert_model_name=embedding_styles.get("bert_model_name", "bert-base-uncased"),
bert_lower_case=embedding_styles.get("bert_lower_case", True),
word_dropout=config["graph_construction_args"]["node_embedding"]["word_dropout"],
bert_dropout=config["graph_construction_args"]["node_embedding"].get(
"bert_dropout", None
),
rnn_dropout=config["graph_construction_args"]["node_embedding"]["rnn_dropout"],
)
def encode_init_node_feature(self, data):
# graph embedding construction
batch_gd = self.g2s.graph_topology.embedding_layer(data)
return batch_gd
def forward(self, data, oov_dict=None, require_loss=True):
batch_gd = self.encode_init_node_feature(data)
if require_loss:
tgt = data["tgt_tensor"]
else:
tgt = None
prob, enc_attn_weights, coverage_vectors = self.g2s.encoder_decoder(
batch_gd, oov_dict=oov_dict, tgt_seq=tgt
)
if require_loss:
tgt = data["tgt_tensor"]
min_length = min(prob.shape[1], tgt.shape[1])
prob = prob[:, :min_length, :]
tgt = tgt[:, :min_length]
loss = self.loss_calc(
prob,
label=tgt,
enc_attn_weights=enc_attn_weights,
coverage_vectors=coverage_vectors,
)
return prob, loss * min_length / 2
else:
return prob
class ModelHandler:
def __init__(self, config):
super(ModelHandler, self).__init__()
self.config = config
self.use_copy = self.config["decoder_args"]["rnn_decoder_share"]["use_copy"]
self.use_coverage = self.config["decoder_args"]["rnn_decoder_share"]["use_coverage"]
self.logger = Logger(
config["out_dir"],
config={k: v for k, v in config.items() if k != "device"},
overwrite=True,
)
self.logger.write(config["out_dir"])
self._build_dataloader()
self._build_model()
self._build_optimizer()
self._build_evaluation()
def _build_dataloader(self):
if (
self.config["graph_construction_args"]["graph_construction_share"]["graph_type"]
== "dependency"
):
topology_builder = DependencyBasedGraphConstruction
graph_type = "static"
dynamic_init_topology_builder = None
elif (
self.config["graph_construction_args"]["graph_construction_share"]["graph_type"]
== "constituency"
):
topology_builder = ConstituencyBasedGraphConstruction
graph_type = "static"
dynamic_init_topology_builder = None
elif (
self.config["graph_construction_args"]["graph_construction_share"]["graph_type"]
== "node_emb"
):
topology_builder = NodeEmbeddingBasedGraphConstruction
graph_type = "dynamic"
dynamic_init_topology_builder = None
elif (
self.config["graph_construction_args"]["graph_construction_share"]["graph_type"]
== "node_emb_refined"
):
topology_builder = NodeEmbeddingBasedRefinedGraphConstruction
graph_type = "dynamic"
dynamic_init_graph_type = self.config[
"graph_construction_args"
].graph_construction_private.dynamic_init_graph_type
if dynamic_init_graph_type is None or dynamic_init_graph_type == "line":
dynamic_init_topology_builder = None
elif dynamic_init_graph_type == "dependency":
dynamic_init_topology_builder = DependencyBasedGraphConstruction
elif dynamic_init_graph_type == "constituency":
dynamic_init_topology_builder = ConstituencyBasedGraphConstruction
else:
# dynamic_init_topology_builder
raise RuntimeError("Define your own dynamic_init_topology_builder")
else:
raise NotImplementedError("Define your topology builder.")
dataset = SQuADDataset(
root_dir=self.config["graph_construction_args"]["graph_construction_share"]["root_dir"],
pretrained_word_emb_name=self.config["pretrained_word_emb_name"],
merge_strategy=self.config["graph_construction_args"]["graph_construction_private"][
"merge_strategy"
],
edge_strategy=self.config["graph_construction_args"]["graph_construction_private"][
"edge_strategy"
],
max_word_vocab_size=self.config["top_word_vocab"],
min_word_vocab_freq=self.config["min_word_freq"],
word_emb_size=self.config["word_emb_size"],
share_vocab=self.config["share_vocab"],
seed=self.config["seed"],
graph_type=graph_type,
topology_builder=topology_builder,
topology_subdir=self.config["graph_construction_args"]["graph_construction_share"][
"topology_subdir"
],
dynamic_graph_type=self.config["graph_construction_args"]["graph_construction_share"][
"graph_type"
],
dynamic_init_topology_builder=dynamic_init_topology_builder,
dynamic_init_topology_aux_args={"dummy_param": 0},
thread_number=self.config["graph_construction_args"]["graph_construction_share"][
"thread_number"
],
port=self.config["graph_construction_args"]["graph_construction_share"]["port"],
timeout=self.config["graph_construction_args"]["graph_construction_share"]["timeout"],
)
# TODO: use small ratio of the data (Test only)
dataset.train = dataset.train
dataset.val = dataset.val
dataset.test = dataset.test
self.train_dataloader = DataLoader(
dataset.train,
batch_size=self.config["batch_size"],
shuffle=True,
num_workers=self.config["num_workers"],
collate_fn=dataset.collate_fn,
)
self.val_dataloader = DataLoader(
dataset.val,
batch_size=self.config["batch_size"],
shuffle=False,
num_workers=self.config["num_workers"],
collate_fn=dataset.collate_fn,
)
self.test_dataloader = DataLoader(
dataset.test,
batch_size=self.config["batch_size"],
shuffle=False,
num_workers=self.config["num_workers"],
collate_fn=dataset.collate_fn,
)
self.vocab = dataset.vocab_model
self.num_train = len(dataset.train)
self.num_val = len(dataset.val)
self.num_test = len(dataset.test)
print(
"Train size: {}, Val size: {}, Test size: {}".format(
self.num_train, self.num_val, self.num_test
)
)
self.logger.write(
"Train size: {}, Val size: {}, Test size: {}".format(
self.num_train, self.num_val, self.num_test
)
)
def _build_model(self):
self.model = QGModel(self.vocab, self.config).to(self.config["device"])
def _build_optimizer(self):
parameters = [p for p in self.model.parameters() if p.requires_grad]
self.optimizer = optim.Adam(parameters, lr=self.config["lr"])
self.stopper = EarlyStopping(
os.path.join(self.config["out_dir"], Constants._SAVED_WEIGHTS_FILE),
patience=self.config["patience"],
)
self.scheduler = ReduceLROnPlateau(
self.optimizer,
mode="max",
factor=self.config["lr_reduce_factor"],
patience=self.config["lr_patience"],
verbose=True,
)
def _build_evaluation(self):
self.metrics = {"BLEU": BLEU(n_grams=[1, 2, 3, 4]), "METEOR": METEOR(), "ROUGE": ROUGE()}
def train(self):
for epoch in range(self.config["epochs"]):
self.model.train()
train_loss = []
dur = []
t0 = time.time()
for i, data in enumerate(self.train_dataloader):
data = all_to_cuda(data, self.config["device"])
data["graph_data"] = data["graph_data"].to(self.config["device"])
oov_dict = None
if self.use_copy:
oov_dict, tgt = prepare_ext_vocab(
data["graph_data"],
self.vocab,
gt_str=data["tgt_text"],
device=self.config["device"],
)
data["tgt_tensor"] = tgt
logits, loss = self.model(data, oov_dict=oov_dict, require_loss=True)
self.optimizer.zero_grad()
loss.backward()
if self.config.get("grad_clipping", None) not in (None, 0):
# Clip gradients
parameters = [p for p in self.model.parameters() if p.requires_grad]
torch.nn.utils.clip_grad_norm_(parameters, self.config["grad_clipping"])
self.optimizer.step()
train_loss.append(loss.item())
# pred = torch.max(logits, dim=-1)[1].cpu()
dur.append(time.time() - t0)
if (i + 1) % 100 == 0:
format_str = (
"Epoch: [{} / {}] | Step: {} / {} | Time: {:.2f}s | Loss: {:.4f} |"
" Val scores:".format(
epoch + 1,
self.config["epochs"],
i,
len(self.train_dataloader),
np.mean(dur),
np.mean(train_loss),
)
)
print(format_str)
self.logger.write(format_str)
val_scores = self.evaluate(self.val_dataloader)
if epoch > 15:
self.scheduler.step(val_scores[self.config["early_stop_metric"]])
format_str = "Epoch: [{} / {}] | Time: {:.2f}s | Loss: {:.4f} | Val scores:".format(
epoch + 1, self.config["epochs"], np.mean(dur), np.mean(train_loss)
)
format_str += self.metric_to_str(val_scores)
print(format_str)
self.logger.write(format_str)
if epoch > 0 and self.stopper.step(
val_scores[self.config["early_stop_metric"]], self.model
):
break
return self.stopper.best_score
def evaluate(self, dataloader):
self.model.eval()
with torch.no_grad():
pred_collect = []
gt_collect = []
for data in dataloader:
data = all_to_cuda(data, self.config["device"])
data["graph_data"] = data["graph_data"].to(self.config["device"])
if self.use_copy:
oov_dict = prepare_ext_vocab(
data["graph_data"], self.vocab, device=self.config["device"]
)
ref_dict = oov_dict
else:
oov_dict = None
ref_dict = self.vocab.out_word_vocab
prob = self.model(data, oov_dict=oov_dict, require_loss=False)
pred = prob.argmax(dim=-1)
pred_str = wordid2str(pred.detach().cpu(), ref_dict)
pred_collect.extend(pred_str)
gt_collect.extend(data["tgt_text"])
scores = self.evaluate_predictions(gt_collect, pred_collect)
return scores
def translate(self, dataloader):
self.model.eval()
with torch.no_grad():
pred_collect = []
gt_collect = []
for data in dataloader:
data = all_to_cuda(data, self.config["device"])
data["graph_data"] = data["graph_data"].to(self.config["device"])
if self.use_copy:
oov_dict = prepare_ext_vocab(
data["graph_data"], self.vocab, device=self.config["device"]
)
ref_dict = oov_dict
else:
oov_dict = None
ref_dict = self.vocab.out_word_vocab
batch_gd = self.model.encode_init_node_feature(data)
prob = self.model.g2s.encoder_decoder_beam_search(
batch_gd, self.config["beam_size"], topk=1, oov_dict=oov_dict
)
pred_ids = (
torch.zeros(
len(prob),
self.config["decoder_args"]["rnn_decoder_private"]["max_decoder_step"],
)
.fill_(ref_dict.EOS)
.to(self.config["device"])
.int()
)
for i, item in enumerate(prob):
item = item[0]
seq = [j.view(1, 1) for j in item]
seq = torch.cat(seq, dim=1)
pred_ids[i, : seq.shape[1]] = seq
pred_str = wordid2str(pred_ids.detach().cpu(), ref_dict)
pred_collect.extend(pred_str)
gt_collect.extend(data["tgt_text"])
scores = self.evaluate_predictions(gt_collect, pred_collect)
return scores
def test(self):
# restored best saved model
self.model = torch.load(
os.path.join(self.config["out_dir"], Constants._SAVED_WEIGHTS_FILE)
).to(self.config["device"])
t0 = time.time()
scores = self.translate(self.test_dataloader)
dur = time.time() - t0
format_str = "Test examples: {} | Time: {:.2f}s | Test scores:".format(self.num_test, dur)
format_str += self.metric_to_str(scores)
print(format_str)
self.logger.write(format_str)
return scores
def evaluate_predictions(self, ground_truth, predict):
output = {}
for name, scorer in self.metrics.items():
score = scorer.calculate_scores(ground_truth=ground_truth, predict=predict)
if name.upper() == "BLEU":
for i in range(len(score[0])):
output["BLEU_{}".format(i + 1)] = score[0][i]
else:
output[name] = score[0]
return output
def metric_to_str(self, metrics):
format_str = ""
for k in metrics:
format_str += " {} = {:0.5f},".format(k.upper(), metrics[k])
return format_str[:-1]
def main(config):
# configure
np.random.seed(config["seed"])
torch.manual_seed(config["seed"])
if not config["no_cuda"] and torch.cuda.is_available():
print("[ Using CUDA ]")
config["device"] = torch.device("cuda" if config["gpu"] < 0 else "cuda:%d" % config["gpu"])
cudnn.benchmark = True
torch.cuda.manual_seed(config["seed"])
else:
config["device"] = torch.device("cpu")
print("\n" + config["out_dir"])
runner = ModelHandler(config)
t0 = time.time()
val_score = runner.train()
test_scores = runner.test()
# print('Removed best saved model file to save disk space')
# os.remove(runner.stopper.save_model_path)
runtime = time.time() - t0
print("Total runtime: {:.2f}s".format(time.time() - t0))
runner.logger.write("Total runtime: {:.2f}s\n".format(runtime))
runner.logger.close()
return val_score, test_scores
def wordid2str(word_ids, vocab):
ret = []
assert len(word_ids.shape) == 2, print(word_ids.shape)
for i in range(word_ids.shape[0]):
id_list = word_ids[i, :]
ret_inst = []
for j in range(id_list.shape[0]):
if id_list[j] == vocab.EOS or id_list[j] == vocab.PAD:
break
token = vocab.getWord(id_list[j])
ret_inst.append(token)
ret.append(" ".join(ret_inst))
return ret
def all_to_cuda(data, device=None):
if isinstance(data, torch.Tensor):
data = to_cuda(data, device)
elif isinstance(data, (list, dict)):
keys = range(len(data)) if isinstance(data, list) else data.keys()
for k in keys:
if isinstance(data[k], torch.Tensor):
data[k] = to_cuda(data[k], device)
return data
################################################################################
# ArgParse and Helper Functions #
################################################################################
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"-task_config", "--task_config", required=True, type=str, help="path to the config file"
)
parser.add_argument(
"-g2s_config", "--g2s_config", required=True, type=str, help="path to the config file"
)
parser.add_argument("--grid_search", action="store_true", help="flag: grid search")
args = vars(parser.parse_args())
return args
def print_config(config):
print("**************** MODEL CONFIGURATION ****************")
for key in sorted(config.keys()):
val = config[key]
keystr = "{}".format(key) + (" " * (24 - len(key)))
print("{} --> {}".format(keystr, val))
print("**************** MODEL CONFIGURATION ****************")
def grid_search_main(config):
grid_search_hyperparams = []
log_path = config["out_dir"]
for k, v in config.items():
if isinstance(v, list):
grid_search_hyperparams.append(k)
log_path += "_{}_{}".format(k, v)
logger = Logger(log_path, config=config, overwrite=True)
best_config = None
best_score = -1
best_scores = None
configs = grid(config)
for cnf in configs:
for k in grid_search_hyperparams:
cnf["out_dir"] += "_{}_{}".format(k, cnf[k])
val_score, test_scores = main(cnf)
if best_score < test_scores[cnf["early_stop_metric"]]:
best_score = test_scores[cnf["early_stop_metric"]]
best_scores = test_scores
best_config = cnf
print("Found a better configuration: {}".format(best_scores))
logger.write("Found a better configuration: {}".format(best_scores))
print("\nBest configuration:")
logger.write("\nBest configuration:")
for k in grid_search_hyperparams:
print("{}: {}".format(k, best_config[k]))
logger.write("{}: {}".format(k, best_config[k]))
print("Best score: {}".format(best_scores))
logger.write("Best score: {}\n".format(best_scores))
logger.close()
if __name__ == "__main__":
if platform.system() == "Darwin":
multiprocessing.set_start_method("spawn")
cfg = get_args()
task_args = get_yaml_config(cfg["task_config"])
g2s_args = get_yaml_config(cfg["g2s_config"])
# load Graph2Seq template config
g2s_template = get_basic_args(
graph_construction_name=g2s_args["graph_construction_name"],
graph_embedding_name=g2s_args["graph_embedding_name"],
decoder_name=g2s_args["decoder_name"],
)
update_values(to_args=g2s_template, from_args_list=[g2s_args, task_args])
print_config(g2s_template)
if cfg["grid_search"]:
grid_search_main(g2s_template)
else:
main(g2s_template)
| 39.207836 | 100 | 0.58866 |
c6872bc2a138d120ee83352ee94e5ce83cd98d6b | 2,754 | py | Python | Previous_State_On_Repo/StrokeRecoveryOffline/Data/code/txtcreator.py | rohun-tripati/pythonRepo | 91a7d536f7be05adc15e4d5add0a8a4a08c28c62 | [
"Unlicense"
] | 1 | 2018-06-25T19:20:48.000Z | 2018-06-25T19:20:48.000Z | Previous_State_On_Repo/StrokeRecoveryOffline/Data/code/txtcreator.py | rohun-tripati/pythonRepo | 91a7d536f7be05adc15e4d5add0a8a4a08c28c62 | [
"Unlicense"
] | null | null | null | Previous_State_On_Repo/StrokeRecoveryOffline/Data/code/txtcreator.py | rohun-tripati/pythonRepo | 91a7d536f7be05adc15e4d5add0a8a4a08c28c62 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
import os, sys, time
import Image
import numpy as np
import width as WT
from copy import deepcopy
def update(top, bottom, left, right, j ,i):
if top > j:
top = j
if bottom < j:
bottom = j
if left > i:
left = i
if right < i:
right = i
return top, bottom, left, right
def howmuchtocrop(image, path):
pixels = list(image.getdata())
size = image.size
#Size == (20, 49). Not this was for a long and thin image. So, rows -> size[1] and cols -> size[0]
top = size[1]; bottom = 0; left = size[0]; right = 0
#I envision the system is like size[1] rows and size[0] columns
for j in range(size[1]):
for i in range(size[0]):
if pixels[j * size[0] + i] == 0:
top, bottom, left, right = update(top, bottom, left, right, j ,i)
return [left, top, right, bottom]
def imgtotxt (path, outpath,k, debug =False):
if debug == True : print "In imgtotxt function, path and outpath == ", path, outpath
if not ".bmp" in path and not ".tiff" in path and not ".tif" in path:
print "The image obtained is not of format tiff or bmp or tif. Check path == ", path
print "Exiting"
sys.exit(0)
image = Image.open(path)
if ".bmp" in path:
image = image.convert("L")
size = image.size
print >> k, size[1], size[0]
corners = howmuchtocrop(image, path)
#sequence in box of crop -> left, upper, right, and lower pixel
cropimage = image.crop( ( max(0, corners[0] - 1), max(0, corners[1] -1), min(size[0], corners[2] + 2), min(size[1], corners[3] + 2) ) )
size = cropimage.size
pixels = list(cropimage.getdata())
# fixedheight = 400
# fwidth = int( float(size[0]) * float(fixedheight)/float(size[1]) )
# resizeimage = cropimage.resize( ( fwidth , fixedheight), Image.ANTIALIAS )
# resizeimage.show()
# pixels = list(resizeimage.getdata())
# size = resizeimage.size
lastcopy = deepcopy(pixels)
for x in range(100):
pixels = WT.thinnthick(pixels, size, "thin", False)
if lastcopy == pixels:
#print "Thin exited at x == ", x
break
else:
lastcopy = deepcopy(pixels)
# lastcopy = deepcopy(pixels)
# for x in range(100):
# pixels = WT.thinnthick(pixels, size, "thick", False)
# if lastcopy == pixels:
# print "Thick exited at x == ", x
# break
# else:
# lastcopy = deepcopy(pixels)
# lastcopy = deepcopy(pixels)
# for x in range(100):
# pixels = WT.thinnthick(pixels, size, "prune", False)
# if lastcopy == pixels:
# print "Prune exited at x == ", x
# break
# else:
# lastcopy = deepcopy(pixels)
string = '';
for j in range(size[1]):
for i in range(size[0]):
if pixels[j * size[0] + i] < 255:
string += '0'
else:
string += '1'
string += "\n"
printout = open(outpath,"w")
print >> printout, string
printout.close() | 25.5 | 136 | 0.629993 |
25280c9ae3f41265f362a1e78f6d06ea4121e0b1 | 735 | py | Python | Data Structures and Algorithms/LeetCode Algo Solutions/EASY DIFFICULTY PROBLEMS/SubtractProductAndSumOfDigits.py | akkik04/Python-DataStructures-and-Algorithms | 8db63173218e5a9205dbb325935c71fec93b695c | [
"MIT"
] | 1 | 2022-01-22T18:19:07.000Z | 2022-01-22T18:19:07.000Z | Data Structures and Algorithms/LeetCode Algo Solutions/EASY DIFFICULTY PROBLEMS/SubtractProductAndSumOfDigits.py | akkik04/Python-DataStructures-and-Algorithms | 8db63173218e5a9205dbb325935c71fec93b695c | [
"MIT"
] | null | null | null | Data Structures and Algorithms/LeetCode Algo Solutions/EASY DIFFICULTY PROBLEMS/SubtractProductAndSumOfDigits.py | akkik04/Python-DataStructures-and-Algorithms | 8db63173218e5a9205dbb325935c71fec93b695c | [
"MIT"
] | null | null | null | # SUBTRACT THE PRODUCT AND SUM OF DIGITS OF AN INTEGER LEETCODE SOLUTION:
class Solution(object):
def subtractProductAndSum(self, n):
# creating a list of the digits for 'n'.
x = map(int, str(n))
# initializing variables to track the product and sum of the digits.
digit_product = 1
digit_sum = 0
# creating a for-loop to iterate for the list of digits.
for i in x:
# code to multiply each element into the product.
digit_product *= i
# code to add each element into the sum.
digit_sum += i
# returning the difference between the product and sum.
return digit_product - digit_sum | 30.625 | 77 | 0.6 |
6b1ec0d746f5d1bc6c5959e4646512ba4c4036a0 | 1,332 | py | Python | dynamic_programming/longest_increasing_subsequence_o(nlogn).py | MKiperszmid/Python | 6b368e6ab2fa1a839b029fd45e127521bbe76005 | [
"MIT"
] | 1 | 2020-08-28T18:25:45.000Z | 2020-08-28T18:25:45.000Z | dynamic_programming/longest_increasing_subsequence_o(nlogn).py | MKiperszmid/Python | 6b368e6ab2fa1a839b029fd45e127521bbe76005 | [
"MIT"
] | 1 | 2020-08-28T18:24:31.000Z | 2020-08-28T19:35:47.000Z | dynamic_programming/longest_increasing_subsequence_o(nlogn).py | MKiperszmid/Python | 6b368e6ab2fa1a839b029fd45e127521bbe76005 | [
"MIT"
] | null | null | null | #############################
# Author: Aravind Kashyap
# File: lis.py
# comments: This programme outputs the Longest Strictly Increasing Subsequence in
# O(NLogN) Where N is the Number of elements in the list
#############################
from typing import List
def CeilIndex(v, l, r, key): # noqa: E741
while r - l > 1:
m = (l + r) // 2
if v[m] >= key:
r = m
else:
l = m # noqa: E741
return r
def LongestIncreasingSubsequenceLength(v: List[int]) -> int:
"""
>>> LongestIncreasingSubsequenceLength([2, 5, 3, 7, 11, 8, 10, 13, 6])
6
>>> LongestIncreasingSubsequenceLength([])
0
>>> LongestIncreasingSubsequenceLength([0, 8, 4, 12, 2, 10, 6, 14, 1, 9, 5, 13, 3,
... 11, 7, 15])
6
>>> LongestIncreasingSubsequenceLength([5, 4, 3, 2, 1])
1
"""
if not v:
return 0
tail = [0] * len(v)
length = 1
tail[0] = v[0]
for i in range(1, len(v)):
if v[i] < tail[0]:
tail[0] = v[i]
elif v[i] > tail[length - 1]:
tail[length] = v[i]
length += 1
else:
tail[CeilIndex(tail, -1, length - 1, v[i])] = v[i]
return length
if __name__ == "__main__":
import doctest
doctest.testmod()
| 23.785714 | 86 | 0.487237 |
d4f6af1a992873e2fd809a288fb0ed09340a71a0 | 35,200 | py | Python | py/google_drive_api.py | YoelMonsalve/GoogleDrivePythonLibrary | 00ac381a362a0bc885487c23ca185a219baf1944 | [
"MIT"
] | null | null | null | py/google_drive_api.py | YoelMonsalve/GoogleDrivePythonLibrary | 00ac381a362a0bc885487c23ca185a219baf1944 | [
"MIT"
] | null | null | null | py/google_drive_api.py | YoelMonsalve/GoogleDrivePythonLibrary | 00ac381a362a0bc885487c23ca185a219baf1944 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from googleapiclient.http import MediaFileUpload
import sys # sys.path
import os # os.path
import stat # S_IRUSR
import re # regex
from datetime import datetime
import threading
from time import sleep
from pprint import pprint
# this is to include another sources in this module
sys.path.append(os.path.dirname(__file__))
from _enum import MIME_TYPES
__author__ = "Yoel Monsalve"
__mail__ = "yymonsalve@gmail.com"
__date__ = "July, 2021"
__modified__ = "2021.08.01"
__github__ = "github.com/YoelMonsalve/GoogleDrivePythonLibrary"
"""
Requirements:
pip3 install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
"""
# If modifying these scopes, delete the file token.json.
# Q (yoel): Should it be drive.metadata only, instead of drive.metadata.readonly ???
SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly', 'https://www.googleapis.com/auth/drive']
# MIME types
MIME_TYPE_FOLDER = MIME_TYPES.FOLDER['mime']
MIME_TYPE_FILE = MIME_TYPES.FILE['mime']
MIME_TYPE_SHEET = MIME_TYPES.SPREADSHEET['mime']
MIME_TYPE_DOCUMENT = MIME_TYPES.DOCUMENT['mime']
MIME_TYPE_PHOTO = MIME_TYPES.PHOTO['mime']
class GoogleDriveAPI(object):
"""Custom class to easily manage the Google Drive API, coded in Python
@author Yoel Monsalve
@mail yymonsalve@gmail.com
References:
===============================================================
https://developers.google.com/drive/api/v3/
https://developers.google.com/drive/api/v3/quickstart/python
https://developers.google.com/drive/api/v3/about-files
"""
def __init__(self):
self.name = "GoogleDriveAPI"
self.token_file = '' # the file containing the token, it will be generated automatically
# using the authentication flow & client secret
self.client_secret = '' # the client secret file (download it from the Google Cloud Console page
# https://console.cloud.google.com/apis/credentials?project=xxx-yyy)
self.service = None # the Google API service
# MIME types
self.MIME_TYPE_FOLDER = MIME_TYPE_FOLDER
self.MIME_TYPE_FILE = MIME_TYPE_FILE
self.MIME_TYPE_SHEET = MIME_TYPE_SHEET
self.MIME_TYPE_DOCUMENT = MIME_TYPE_DOCUMENT
self.MIME_TYPE_PHOTO = MIME_TYPE_PHOTO
# SCOPES
self.SCOPES = SCOPES
def init_service(self, scopes = []):
"""This initializes the API service, using the client authentication
@param scopes The scopes to create the credentials for, if null then takes self.SCOPES
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(self.token_file):
if scopes:
# allows you to define other scopes each time you invoke the service
self.SCOPES = scopes
if not self.SCOPES:
raise Exception(f"{self.name}.init_service failed: No SCOPE defined")
creds = Credentials.from_authorized_user_file(self.token_file, self.SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
# refresh token
creds.refresh(Request())
else:
# create the token by the first time, using the authorization flow
if not self.client_secret:
raise Exception(f"{self.name}.init_service failed: trying to create a new token, and there is no a client secret")
flow = InstalledAppFlow.from_client_secrets_file(
self.client_secret, # the client secret of the App
self.SCOPES
)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(self.token_file, 'w') as token:
token.write(creds.to_json())
try:
service = build('drive', 'v3', credentials=creds)
self.service = service # service created!
except Exception as e:
raise Exception(f"{self.name}.init_service failed: {str(e)}")
def list_all_files(self, query='', attr=''):
"""Based in the code from: https://developers.google.com/drive/api/v3/search-files
Reference: https://developers.google.com/drive/api/v3/reference/files/list
@param query String. The query to search files for, e.g. "name='foo.txt'"
@param fields (optional) List. A list of metadata attributes to be retrieved, e.g. ['name', 'size', 'mimeType']
@return On success, a list of dictionaries describing each file found, as retrieved by the method service.files().list().
If not found, retrieves [].
"""
if not self.service or not query: return
data = []
page_token = None
while True:
if not attr or not (type(attr) is list):
req_fields = 'nextPageToken, files(id, name, mimeType, parents)'
else:
req_fields = 'nextPageToken, files(' + ','.join(attr) + ')'
#print("query:", query)
response = self.service.files().list(
q = query,
pageSize = 20, # The maximum number of files to return per page. Partial or empty result pages are possible
# even before the end of the files list has been reached. Acceptable values are 1 to 1000, inclusive.
# (Default: 100)
spaces = 'drive', # A comma-separated list of spaces to query within the corpus. Supported values are 'drive',
# 'appDataFolder' and 'photos'.
fields = req_fields,
pageToken = page_token
).execute()
n_max = 20 # LIMIT to n_max items ............. QUIT LATER!!!
for file in response.get('files', []):
data.append(file)
if n_max == 0: break
n_max -= 1
page_token = response.get('nextPageToken', None)
if page_token is None:
break
return data
def list_directory(self, path = '', attr=[], fileId=''):
"""List the content of a directory. The paths '/', and '' (empty) are allowed to refer
to the root folder.
@param path String. The path to scan for.
@param fileId (optional) String. If given, this overwrites path.
@param attr (optional) List. The attributes to be retrieved for the entries.
@return A list of dicts, each containing basic attributes for the entry ('name', 'id',
'mimeType','size','modifiedTime','parents')
@raise Exception, if the path does not exist, or it is not a directory.
"""
if not self.service:
raise Exception(f"{self.name}.list_directory: API service not started")
if not fileId:
ROOT_ID = "root"
if not path or path == '/':
parentId = ROOT_ID
else:
parent = self.getFileId(path, attr=['mimeType'])
if not parent:
raise Exception(f"{self.name}.list_directory: File not found: '{path}'")
elif parent.get('mimeType','') != MIME_TYPE_FOLDER:
raise Exception(f"{self.name}.list_directory: It is not a directory: '{path}'")
parentId = parent['id']
else:
parentId = fileId
query = f"'{parentId}' in parents"
if not attr:
# basic metadata
attr = attr=['name','id','mimeType', 'size', 'modifiedTime','parents']
return self.list_all_files(query=query, attr=attr)
def list_folders(self, name = '', parentId = ''):
"""List all folders with a specific name. To look sub-folders into a specific
parent folder, the paramenter parentID is the ID of such a parent.
If no parent ID is given, it will list folders under the root location
"""
if not self.service: return
ROOT_ID = 'root'
query = f"mimeType='{self.MIME_TYPE_FOLDER}'"
if name: query += f" and name='{name}'"
if parentId and parentId != '/':
query += f" and '{parentId}' in parents"
else:
query += f" and '{ROOT_ID}' in parents"
return self.list_all_files(query)
def list_files(self, name = '', mimeType = '', parentId = ''):
"""List all files with a specific name into a specific location.
If not mimeType is given, will list all entries that are not folders.
The parentId is the ID of the folder into which to look the files.
If no parent ID is given, it will list files under the root location.
"""
if not self.service: return
if not mimeType:
query = f"mimeType!='{self.MIME_TYPE_FOLDER}'"
else:
query = f"mimeType='{mimeType}'"
if name:
# removing dealing '/'
if name[0] == '/': name = name[1:]
query += f" and name='{name}'"
ROOT_ID = 'root'
if parentId and parentId != '/':
query += f" and '{parentId}' in parents"
else:
query += f" and '{ROOT_ID}' in parents"
return self.list_all_files(query)
def delete_files(self, name = '', mimeType = '', parentId = ''):
"""Delete all files with a specific name into a specific location.
The parameters name, mimeType and parentId are like in the method
list_files()
"""
if not self.service: return
files = self.list_files(name=name, mimeType=mimeType, parentId=parentId)
if not files: return
print(f"{len(files)} results found")
for file in files:
ans = input(f"delete \'{file['name']}\' [y]es/[n]o/[c]ancel? This action cannot be undone: ")
if ans.upper() == 'Y':
id = file['id']
self.service.files().delete(fileId=id).execute()
elif ans.upper() == 'C':
break
def delete_folders(self, name = '', parentId = ''):
"""Delete all folders with a specific name into a specific location.
The parameters nameand parentId are like in the method
list_files()
"""
return self.delete_files(name=name, mimeType=MIME_TYPE_FOLDER, parentId=parentId)
def remove(self, path = '', prompt = True, silent = False):
"""Remove a file, given its path. The file can be a normal file, or a directory.
If it is a directory, the entire folder and all its content will be removed (be careful!)
The method recognizes paths like /path/to/folder', or '/path/to/folder/foo.txt'
@param path String. The path to the file to be removed.
@param silent Bool. If True, don't raise Exception if the file doesn't exist.
Implies prompt = False.
@return None.
@raise Exception, if the files does not exist.
"""
if silent: prompt = False # silent implies not prompt
fileId = self.getFileId(path)
if not fileId:
if silent:
return
else:
raise Exception(f"{self.name}.remove: File not found '{path}'")
if prompt:
ans = input(f"delete '{path}' [y]es/[n]o? This action cannot be undone: ")
if ans.upper() == 'Y':
self.service.files().delete(fileId=fileId).execute()
else:
self.service.files().delete(fileId=fileId).execute()
def upload_file(self, origin = '', filename = '', originMimeType = '', destMimeType = '',
dest = ''):
"""Upload a file from the local machine up to Drive.
The file will have the new name <filename> if given, or else the same
name as in origin.
If <dest> is passed, the uploaded file will be moved to that folder. Otherwise,
the file will remain in the root folder of Drive.
@param origin String. The path of the file to be uploaded, e.g. 'path/to/file/foo.txt'
@param originMimeType String. The MIME type of the uploaded file, e.g. 'text/csv'
@param filename (optional) String. The name to be given to the new file into Drive.
@param destMimeType (optonal) String. The MIME type to the uploaded file, e.g. MIME_TYPE_DOCUMENT, etc.
@param dest (optional) String. The folder to move the uploaded the file in the destination.
@return String. The uploaded file ID.
"""
if not origin or not filename: return
# cheching if the file exists
if not os.path.exists(origin) or not os.path.isfile(origin):
raise Exception(f"{self.name}.upload_file: File not found")
# and is readable
elif not (os.stat(origin).st_mode & stat.S_IRUSR):
raise Exception(f"{self.name}.upload_file: File is not readable (check permissions)")
file_metadata = {
'name': filename,
}
if destMimeType: file_metadata['mimeType'] = destMimeType
media = MediaFileUpload(
origin,
#mimetype='text/csv',
mimetype=originMimeType,
resumable=True)
file = self.service.files().create(
body=file_metadata,
media_body=media,
fields='id').execute()
fileId = file.get('id')
if fileId and dest:
folderId = self.getFileId(dest)
if folderId:
self.moveToFolderById(fileId, folderId)
return fileId
def searchFile(self, path = ''):
"""This is shorcut method to getFileId, with a predefined set of attributes
@param path String. The path to search for.
@return Metadata (dict) of the file, or {} if not found.
"""
return self.getFileId(path, attr=['id', 'name', 'size', 'mimeType', 'modifiedTime', 'parents', 'md5Checksum'])
def getFileId(self, path='', attr=[]):
"""Get the file ID from a Drive path, e.g.: dir1/dir2/file.txt.
Normally, this method returns only the ID of the file. But if you set a list
of attributes (e.g. attr = ['mimeType', 'size']), then those attributes (plus the ID)
will be appended to the response and returned as in by the method get().
NOTE: if the name contains '/', you must escape it with '\/'
e.g. 'file/a' -> 'file\/a'
@param path String. The path of the Drive file to be located.
@param attr (optional) List. A list of attributes to be retrieved if success.
@return If no attr is passed, returns the ID of the file on success, or an empty string
on failure.
If attr is passed, return a dict of attributes on success, or {} on failure.
"""
# remove dealing '/', e.g. '/path/to/my/folder'
if path[0] == '/': path = path[1:]
if not path: return
# recognizing the escape character \/
# bug 2021.08.1
# as wildcard ('*') is not allowed in a file name, we will replace
# temporarily the '\/' by '*', then split by '/' and newly
# replace back '*' by '/'
if '*' in path:
raise Exception(f"{self.name}.getFileId: path cannot contain wildcard characters ('*')")
path = path.replace("\\/", '*') # using "\\/" to avoid ambiguity
folders = path.split('/')
if folders[-1] == '':
# if the path is ended with '/', e.g. 'path/to/my/folder/'
folders = folders[:-1]
if not folders: return
parentId = "root" # start search in the root folder
for folder in folders: # descend through each folder in the path
# converting '*' into '/'
folder = folder.replace("*", "/")
if not parentId:
# not found
return '' if not attr else {}
q = f"name='{folder}' and '{parentId}' in parents"
# === debug ===
#print(q)
r = self.list_all_files(query=q, attr='id')
# === debug ===
#pprint(r)
if r:
# by the next iteration, take the current folder as the parent
parentId = r[0].get('id', '')
else:
parentId = ''
if not parentId:
# not found
return '' if not attr else {}
# if not attr are given, return only the file ID. Otherwise, return
# a dict of attributes (as retrived by the API method get())
if not attr:
return parentId
else:
fields = 'id'
for a in attr:
fields += f", {a}"
r = self.service.files().get(fileId=parentId, fields=fields).execute()
return r
def getMimeTypeById(self, fileId = ''):
"""Get the MIME type of the file, given its ID
@param fileId String. The file ID.
@return String. The MIME type.
"""
if not fileId: return None
file = self.service.files().get(
fileId=fileId,
fields='mimeType'
).execute()
if file:
return file.get('mimeType')
else:
return None
def getFileNameById(self, fileId = ''):
"""Get the file name, given its ID
@param fileId String. The file ID.
@return String. The file name.
"""
if not fileId: return None
file = self.service.files().get(
fileId=fileId,
fields='name'
).execute()
if file:
return file.get('name')
else:
return None
def moveToFolderById(self, fileId = '', folderId = ''):
"""Move a file to a folder into Drive.
https://developers.google.com/drive/api/v3/folder#python
By using this in conjunction with getFileId, you can build sentences like
- API.moveToFolder(API.getFileId('foo.txt'), API.getFileId('path/to/move'))
@param fileId String. The ID of the file to be moved (see method getFileId()).
@param folderId String. The ID of the folder to which move the file.
@return None
"""
if not fileId or not folderId:
# NOTE: .... maybe raise an Exception instead ?
return
drive_service = self.service
# verifying the destination in a folder
folder = drive_service.files().get(
fileId=folderId,
fields='mimeType'
).execute()
if folder.get('mimeType') != MIME_TYPE_FOLDER:
raise Exception(f"{self.name}.moveToFolderById: Destination is not a MIME type folder")
# Retrieve the existing parents to remove
file = drive_service.files().get(
fileId=fileId,
fields='parents'
).execute()
previous_parents = ",".join(file.get('parents'))
# Move the file to the new folder
file = drive_service.files().update(
fileId=fileId,
addParents=folderId,
removeParents=previous_parents,
fields='id, parents'
).execute()
def moveToFolder(self, filename='', foldername=''):
"""Move a file to a folder, but using paths instead of ID's.
E.g.: moveToFolder('foo.txt', 'path/to/move')
@param filename String. The name of the file to be moved.
@param foldername String. The name of the folder to which move the file.
@return None
"""
if not filename or not foldername: return
fileId = self.getFileId(filename)
folderId = self.getFileId(foldername)
if not fileId or not folderId: return # not found
self.moveToFolderById(fileId, folderId)
def copyToFolderById(self, fileId = '', folderId = ''):
"""Make a copy of a file, into another folder.
https://stackoverflow.com/questions/64347544/how-to-rename-a-google-spreadsheet-via-google-drive-api-in-python
By using this in conjunction with getFileId, you can build sentences like
- API.copyToFolder(API.getFileId('foo.txt'), API.getFileId('path/to/move'))
@param fileId String. The ID of the file to be copied (see method getFileId()).
@param folderId String. The ID of the folder to which copy the file.
@return None
"""
if not fileId or not folderId: return
copyId = self.service.files().copy(fileId=fileId).execute()["id"]
self.moveToFolder(copyId, folderId)
def copyToFolder(self, filename='', foldername=''):
"""Make a copy of a file into a folder, but using paths instead of ID's.
e.g.: copyToFolder('foo.txt', 'path/to/move')
@param filename String. The name of the file to be copied (see method getFileId()).
@param foldername String. The name of the folder to which copy the file.
@return None
"""
if not filename or not foldername: return
fileId = getFileId(filename)
folderId = getFileId(foldername)
if not fileId or not folderId: return # not found
self.moveToFolderById(fileId, folderId)
def rename(self, oldFilename='', newFilename=''):
"""Rename a file. The file keeps holding in the same folder/parent.
To move the file no another folder, plus changing the filename, use first
the method moveToFolder(), then rename().
e.g.: rename('path/to/file/foo.txt', 'foo2.txt')
the result is the file 'path/to/file/foo2.txt'
@param oldFilename String. The file to be renamed.
@param newFilename String. The new name (in the same folder/parent where it was)
@return None
@raises Exception, if the file named oldFilename does not exist.
"""
if not oldFilename or not newFilename: return
fileId = getFileId(oldFilename)
if not fileId:
# not found
raise Exception(f"{self.name}.rename: File not found")
body = {"name": newFilename}
self.service.files().update(fileId=fileId, body=body).execute()
def _parse_dest_path(self, path = ''):
"""This is an auxiliary function that helps to parse a path as a folderId, plus
optionally a filename.
Examples:
a) path/to/folder will be parsed as a folderId (if that folder actually exists)
b) path/to/folder/ is the same than before
c) path/to/folder/foo.txt will be parsed as a folderId and a filename
NOTE: in all cases, a leading '/' will be ignored, '/path/to/folder' is the same
that 'path/to/folder'
@param path String. The destination path to be parsed.
@return If the folder doesn't exist, returns None. Otherwise, it can return
either a tuple (folderId,filename), or (folderId, ''), depending of wheter
a filename is given or not in the path (cases (b) or (c))
"""
sep = '/'
if path and path[0] == sep:
# remove dealing '/'
path = path[1:]
if not path: return None
v = path.split(sep)
folderId = "root"
mimeType = MIME_TYPE_FOLDER
i = 0
# iterates through of the path (array v)
while i < len(v) and mimeType == MIME_TYPE_FOLDER:
filename = v[i]
subfolders = self.list_folders(name = filename, parentId = folderId)
if subfolders:
folderId = subfolders[0]['id']
else:
if i == len(v) - 1:
# e.g. 'path/to/folder/foo.txt', then folderId = ID_OF('path/to/folder/'),
# and filename = 'foo.txt'
return (folderId, filename)
else:
# path not found
return None
i += 1
# this is the case 'path/to/folder/', no filename given
return (folderId, '')
def createFolder(self, path = ''):
"""Create a new folder in Drive. It recognizes string names like
'/path/to/my/new/folder', or '/path/to/my/new/folder/'
@param path String. The path to the folder to be created.
@return On success, return the ID of the new created folder.
"""
ROOT_ID = "root"
return self.createFolderRecursively(path, ROOT_ID)
def createFolderRecursively(self, path = '', parentId = ''):
"""Auxiliary function to createFolder()
"""
if not parentId: return ''
# remove dealing and trailing '/'
if path:
if path[0] == '/': path = path[1:]
if path[-1] == '/': path = path[:-1]
if not path:
raise Exception(f"{self.name}.createFolder: Incorrect path: '{path}'")
v = path.split('/', 1)
a = v[0]
if len(v) > 1:
b = v[1]
else:
b = None
# --- debug ---
# is 'a' child of parentId ?
#print(f"create recursively: a='{a}', b='{b}'")
#
q = f"name='{a}' and '{parentId}' in parents"
# --- debug ---
#print(f"query: {q}")
r = self.list_all_files(query=q, attr='id')
if not r:
# --- debug ---
#print(f"....create '{a}' as a child of {parentId}")
#
file_metadata = {
'name': a,
'mimeType': 'application/vnd.google-apps.folder'
}
file = self.service.files().create(body=file_metadata,
fields='id').execute()
# Move the created folder to the new location
fileId = file.get('id')
# Retrieve the existing parents to remove
file = self.service.files().get(
fileId=fileId,
fields='parents'
).execute()
previous_parents = ",".join(file.get('parents'))
file = self.service.files().update(
fileId=fileId,
addParents=parentId,
removeParents=previous_parents,
fields='id, parents'
).execute()
parentId = file.get('id')
else:
parentId = r[0]['id']
# now, parentId is the id of 'a'
# then, call recursively searching for the child 'b' of 'a'
if b:
return self.createFolderRecursively(b, parentId)
else:
return parentId
def sync(self, local_path='', remote_path='', regex = '',
recursion_level = 1, max_recursion_level = 10):
"""Synchronize local and remote path. Traverses recursively the local directory (*),
recreates the directory structure in the remote path, and copies only the files
more recently modified, or with a larger size.
(*)NOTE: if the local_path corresponds to regular file (instead of a directory) it will
synchronize that single file to the remote path.
@param local_path String. The full path of the source folder.
@param remote_path String. The path of the remote folder.
@param regex (optional) String. Only sync the local files matching regex.
e.g. regex = '.*\.txt$' will match 'foo.txt', but not 'foo.csv'
@param max_recursion_level Int. Max recursion level to look into it. Default 10.
@return None.
@raise Exception, if the local path cannot be properly read (e.g., permissions),
or an exceptions arises on calling other methods of the API (like upload_file())
"""
if not local_path or not remote_path: return
if recursion_level > max_recursion_level: return
# NOTE.- 2021.08.24
# CAUTION: Removing trailing / to all paths.
# As the program doesn't distinguish between folder/foo and folder/foo/, and
# keeping this trailing can bring to folder/foo//folder2 while concatenation
# with '/' (!!!)
if local_path[-1] == '/': local_path = local_path[:-1]
if remote_path[-1] == '/': remote_path = remote_path[:-1]
print(F"Syncing [Local]:{local_path} to [Drive]:{remote_path}")
if not os.path.exists(local_path):
raise Exception(f"{self.name}.sync: Local path not found")
# try creating the remote folder (is not exist), otherwise
# list its content
r = self.searchFile(remote_path)
if not r or r.get('mimeType') != MIME_TYPE_FOLDER:
# NOTE: if the file exists but it is a regular file, then it will create a
# folder with the same name. This is weird, but Google Drive allows
# to have several files with the same name.
# By future versions, this behavior could be changed to delete the
# older (regular) file.
print(f"+ creating folder '{remote_path}'")
remoteFolderId = self.createFolder(remote_path)
else:
remoteFolderId = r['id']
if os.path.isfile(local_path):
# if the source is a file
self._sync_file(local_path, remote_path)
return
elif not os.path.isdir(local_path):
# is it is not a file, neither a directory: fail
raise Exception(f"{self.name}.sync: Local path is not a directory")
# otherwise, the source is a directory ...
# list the content of the local directory
local_files = os.listdir(local_path)
# list the content of the remote directory
remote_files = self.list_directory(fileId = remoteFolderId)
# regex matcher
if regex:
matcher = re.compile(regex)
else:
matcher = None
for entry in local_files:
local_file = local_path + '/' + entry
if os.path.isdir(local_path + '/' + entry):
self.sync(local_path + '/' + entry, remote_path + '/' + entry,
regex= regex,
recursion_level = recursion_level + 1,
max_recursion_level = max_recursion_level)
elif os.path.isfile(local_path + '/' + entry):
# upload this file
#print(f"match [{regex}]? {not not matcher.match(local_file)}")
if matcher and not matcher.match(local_file):
# if regex is given, omit the files not matching the pattern
continue
print(f"CALL _sync_file({local_file}, {remote_path})")
self._sync_file(local_file, remote_path)
def _sync_file(self, local_file, dest):
"""Auxiliary function to sync a single file (not a folder).
If the file does not exist in the destination, it will be created.
If a file with that name actually exists, then it will update based in
a criteria:
- if the sizes between local and remote are different, or
- if the modification time is newer in the local file
@param local_file String. The full path of the source file.
@param dest String. The path of the destination folder.
@ return None
"""
if not local_file or not dest: return
# inspect the destination
filename = os.path.basename(local_file)
remote_name = dest + '/' + filename
r = self.searchFile(remote_name)
if not r:
print(f">> uploading '{local_file}' to '{remote_name}")
self.upload_file(origin = local_file, filename = filename,
dest = dest)
else:
# file exists, check timestamp and size
_fstat = os.stat(local_file)
""" NOTE: how to convert from localtime to utctime
https://stackoverflow.com/questions/79797/how-to-convert-local-time-string-to-utc
Option 1 .
>>> import datetime
>>> utc_datetime = datetime.datetime.utcnow()
>>> utc_datetime.strftime("%Y-%m-%d %H:%M:%S")
'2010-02-01 06:59:19
Option 2.
NOTE - If any of your data is in a region that uses DST, use pytz and take a look at John Millikin's answer.
If you want to obtain the UTC time from a given string and your lucky enough to be in a region in the world that either doesn't use DST, or you have data that is only offset from UTC without DST applied:
--> using local time as the basis for the offset value:
>>> # Obtain the UTC Offset for the current system:
>>> UTC_OFFSET_TIMEDELTA = datetime.datetime.utcnow() - datetime.datetime.now()
>>> local_datetime = datetime.datetime.strptime("2008-09-17 14:04:00", "%Y-%m-%d %H:%M:%S")
>>> result_utc_datetime = local_datetime + UTC_OFFSET_TIMEDELTA
>>> result_utc_datetime.strftime("%Y-%m-%d %H:%M:%S")
'2008-09-17 04:04:00'
>>> UTC_OFFSET = 10
>>> result_utc_datetime = local_datetime - datetime.timedelta(hours=UTC_OFFSET)
>>> result_utc_datetime.strftime("%Y-%m-%d %H:%M:%S")
'2008-09-17 04:04:00'
Option 3.
>>> import datetime
>>> timezone_aware_dt = datetime.datetime.now(datetime.timezone.utc)
"""
remote_mtime = datetime.strptime(r['modifiedTime'], "%Y-%m-%dT%H:%M:%S.%fZ").timestamp()
UTC_OFFSET_TIMEDELTA = datetime.utcnow() - datetime.now()
# NOTE: local mtime in UTC (!)
local_mtime = (datetime.fromtimestamp(_fstat.st_mtime) + UTC_OFFSET_TIMEDELTA).timestamp()
remote_size = r['size']
local_size = _fstat.st_size
if local_size != int(remote_size) or local_mtime > remote_mtime:
print(f"size: [local]{local_size} [remote]{remote_size}")
print(f"mtime: [local]{datetime.fromtimestamp(local_mtime)} [remote]{datetime.fromtimestamp(remote_mtime)}")
print(f">> updating '{local_file}'")
self.remove(path = remote_name, prompt = False)
self.upload_file(origin = local_file, filename = os.path.basename(local_file),
dest = dest)
def __del__(self):
pass | 42.256903 | 215 | 0.578352 |
a2e7163a25c553230c3e6f160f729c009deed9f5 | 34,671 | py | Python | flask_admin/tests/mongoengine/test_basic.py | martjushev/flask-admin | 4880593cd054ff60ce33b6156d62fc33c1a257c4 | [
"BSD-3-Clause"
] | 28 | 2020-02-16T09:59:12.000Z | 2021-05-20T06:52:05.000Z | flask_admin/tests/mongoengine/test_basic.py | d3alek/flask-admin | 3643d78808596f4634c8779e5decde9ef8ebda6e | [
"BSD-3-Clause"
] | 5 | 2020-01-22T12:26:09.000Z | 2020-02-24T04:27:35.000Z | flask_admin/tests/mongoengine/test_basic.py | d3alek/flask-admin | 3643d78808596f4634c8779e5decde9ef8ebda6e | [
"BSD-3-Clause"
] | 4 | 2020-03-31T09:22:49.000Z | 2021-06-18T05:00:05.000Z | from nose.tools import eq_, ok_
from wtforms import fields, validators
from flask_admin import form
from flask_admin._compat import as_unicode
from flask_admin.contrib.mongoengine import ModelView
from . import setup
from datetime import datetime
class CustomModelView(ModelView):
def __init__(self, model,
name=None, category=None, endpoint=None, url=None,
**kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
super(CustomModelView, self).__init__(model,
name, category,
endpoint, url)
def create_models(db):
class Model1(db.Document):
test1 = db.StringField(max_length=20)
test2 = db.StringField(max_length=20)
test3 = db.StringField()
test4 = db.StringField()
datetime_field = db.DateTimeField()
def __str__(self):
return self.test1
class Model2(db.Document):
string_field = db.StringField()
int_field = db.IntField()
float_field = db.FloatField()
bool_field = db.BooleanField()
model1 = db.ReferenceField(Model1)
Model1.objects.delete()
Model2.objects.delete()
return Model1, Model2
def fill_db(Model1, Model2):
Model1('test1_val_1', 'test2_val_1').save()
Model1('test1_val_2', 'test2_val_2').save()
Model1('test1_val_3', 'test2_val_3').save()
Model1('test1_val_4', 'test2_val_4').save()
Model1(None, 'empty_obj').save()
Model2('string_field_val_1', None, None, True).save()
Model2('string_field_val_2', None, None, False).save()
Model2('string_field_val_3', 5000, 25.9).save()
Model2('string_field_val_4', 9000, 75.5).save()
Model2('string_field_val_5', 6169453081680413441).save()
Model1('datetime_obj1', datetime_field=datetime(2014, 4, 3, 1, 9, 0)).save()
Model1('datetime_obj2', datetime_field=datetime(2013, 3, 2, 0, 8, 0)).save()
def test_model():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1)
admin.add_view(view)
eq_(view.model, Model1)
eq_(view.name, 'Model1')
eq_(view.endpoint, 'model1')
eq_(view._primary_key, 'id')
ok_('test1' in view._sortable_columns)
ok_('test2' in view._sortable_columns)
ok_('test3' in view._sortable_columns)
ok_('test4' in view._sortable_columns)
ok_(view._create_form_class is not None)
ok_(view._edit_form_class is not None)
eq_(view._search_supported, False)
eq_(view._filters, None)
eq_(view._create_form_class.test1.field_class, fields.StringField)
eq_(view._create_form_class.test2.field_class, fields.StringField)
eq_(view._create_form_class.test3.field_class, fields.TextAreaField)
eq_(view._create_form_class.test4.field_class, fields.TextAreaField)
# Make some test clients
client = app.test_client()
rv = client.get('/admin/model1/')
eq_(rv.status_code, 200)
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
rv = client.post('/admin/model1/new/',
data=dict(test1='test1large', test2='test2'))
eq_(rv.status_code, 302)
model = Model1.objects.first()
eq_(model.test1, 'test1large')
eq_(model.test2, 'test2')
eq_(model.test3, '')
eq_(model.test4, '')
rv = client.get('/admin/model1/')
eq_(rv.status_code, 200)
ok_('test1large' in rv.data)
url = '/admin/model1/edit/?id=%s' % model.id
rv = client.get(url)
eq_(rv.status_code, 200)
rv = client.post(url,
data=dict(test1='test1small', test2='test2large'))
eq_(rv.status_code, 302)
model = Model1.objects.first()
eq_(model.test1, 'test1small')
eq_(model.test2, 'test2large')
eq_(model.test3, '')
eq_(model.test4, '')
url = '/admin/model1/delete/?id=%s' % model.id
rv = client.post(url)
eq_(rv.status_code, 302)
eq_(Model1.objects.count(), 0)
def test_column_editable_list():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1,
column_editable_list=['test1', 'datetime_field'])
admin.add_view(view)
fill_db(Model1, Model2)
client = app.test_client()
# Test in-line edit field rendering
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('data-role="x-editable"' in data)
# Form - Test basic in-line edit functionality
obj1 = Model1.objects.get(test1='test1_val_3')
rv = client.post('/admin/model1/ajax/update/', data={
'list_form_pk': str(obj1.id),
'test1': 'change-success-1',
})
data = rv.data.decode('utf-8')
ok_('Record was successfully saved.' == data)
# confirm the value has changed
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('change-success-1' in data)
# Test validation error
obj2 = Model1.objects.get(test1='datetime_obj1')
rv = client.post('/admin/model1/ajax/update/', data={
'list_form_pk': str(obj2.id),
'datetime_field': 'problematic-input',
})
eq_(rv.status_code, 500)
# Test invalid primary key
rv = client.post('/admin/model1/ajax/update/', data={
'list_form_pk': '1000',
'test1': 'problematic-input',
})
data = rv.data.decode('utf-8')
eq_(rv.status_code, 500)
# Test editing column not in column_editable_list
rv = client.post('/admin/model1/ajax/update/', data={
'list_form_pk': '1',
'test2': 'problematic-input',
})
data = rv.data.decode('utf-8')
ok_('problematic-input' not in data)
# Test in-line editing for relations
view = CustomModelView(Model2, column_editable_list=['model1'])
admin.add_view(view)
obj3 = Model2.objects.get(string_field='string_field_val_1')
rv = client.post('/admin/model2/ajax/update/', data={
'list_form_pk': str(obj3.id),
'model1': str(obj1.id),
})
data = rv.data.decode('utf-8')
ok_('Record was successfully saved.' == data)
# confirm the value has changed
rv = client.get('/admin/model2/')
data = rv.data.decode('utf-8')
ok_('test1_val_1' in data)
def test_details_view():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view_no_details = CustomModelView(Model1)
admin.add_view(view_no_details)
# fields are scaffolded
view_w_details = CustomModelView(Model2, can_view_details=True)
admin.add_view(view_w_details)
# show only specific fields in details w/ column_details_list
string_field_view = CustomModelView(Model2, can_view_details=True,
column_details_list=["string_field"],
endpoint="sf_view")
admin.add_view(string_field_view)
fill_db(Model1, Model2)
client = app.test_client()
m1_id = Model1.objects.first().id
m2_id = Model2.objects.first().id
# ensure link to details is hidden when can_view_details is disabled
rv = client.get('/admin/model1/')
data = rv.data.decode('utf-8')
ok_('/admin/model1/details/' not in data)
# ensure link to details view appears
rv = client.get('/admin/model2/')
data = rv.data.decode('utf-8')
ok_('/admin/model2/details/' in data)
# test redirection when details are disabled
url = '/admin/model1/details/?url=%2Fadmin%2Fmodel1%2F&id=' + str(m1_id)
rv = client.get(url)
eq_(rv.status_code, 302)
# test if correct data appears in details view when enabled
url = '/admin/model2/details/?url=%2Fadmin%2Fmodel2%2F&id=' + str(m2_id)
rv = client.get(url)
data = rv.data.decode('utf-8')
ok_('String Field' in data)
ok_('string_field_val_1' in data)
ok_('Int Field' in data)
# test column_details_list
url = '/admin/sf_view/details/?url=%2Fadmin%2Fsf_view%2F&id=' + str(m2_id)
rv = client.get(url)
data = rv.data.decode('utf-8')
ok_('String Field' in data)
ok_('string_field_val_1' in data)
ok_('Int Field' not in data)
def test_column_filters():
app, db, admin = setup()
Model1, Model2 = create_models(db)
# fill DB with values
fill_db(Model1, Model2)
# Test string filter
view = CustomModelView(Model1, column_filters=['test1'])
admin.add_view(view)
eq_(len(view._filters), 7)
eq_(
[(f['index'], f['operation']) for f in view._filter_groups[u'Test1']],
[
(0, 'contains'),
(1, 'not contains'),
(2, 'equals'),
(3, 'not equal'),
(4, 'empty'),
(5, 'in list'),
(6, 'not in list'),
]
)
# Make some test clients
client = app.test_client()
# string - equals
rv = client.get('/admin/model1/?flt0_0=test1_val_1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' in data)
ok_('test1_val_2' not in data)
# string - not equal
rv = client.get('/admin/model1/?flt0_1=test1_val_1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' not in data)
ok_('test1_val_2' in data)
# string - contains
rv = client.get('/admin/model1/?flt0_2=test1_val_1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' in data)
ok_('test1_val_2' not in data)
# string - not contains
rv = client.get('/admin/model1/?flt0_3=test1_val_1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' not in data)
ok_('test1_val_2' in data)
# string - empty
rv = client.get('/admin/model1/?flt0_4=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('empty_obj' in data)
ok_('test1_val_1' not in data)
ok_('test1_val_2' not in data)
# string - not empty
rv = client.get('/admin/model1/?flt0_4=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('empty_obj' not in data)
ok_('test1_val_1' in data)
ok_('test1_val_2' in data)
# string - in list
rv = client.get('/admin/model1/?flt0_5=test1_val_1%2Ctest1_val_2')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' in data)
ok_('test2_val_2' in data)
ok_('test1_val_3' not in data)
ok_('test1_val_4' not in data)
# string - not in list
rv = client.get('/admin/model1/?flt0_6=test1_val_1%2Ctest1_val_2')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test2_val_1' not in data)
ok_('test2_val_2' not in data)
ok_('test1_val_3' in data)
ok_('test1_val_4' in data)
# Test numeric filter
view = CustomModelView(Model2, column_filters=['int_field'])
admin.add_view(view)
eq_(
[(f['index'], f['operation']) for f in view._filter_groups[u'Int Field']],
[
(0, 'equals'),
(1, 'not equal'),
(2, 'greater than'),
(3, 'smaller than'),
(4, 'empty'),
(5, 'in list'),
(6, 'not in list'),
]
)
# integer - equals
rv = client.get('/admin/model2/?flt0_0=5000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' in data)
ok_('string_field_val_4' not in data)
# integer - equals (huge number)
rv = client.get('/admin/model2/?flt0_0=6169453081680413441')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_5' in data)
ok_('string_field_val_4' not in data)
# integer - equals - test validation
rv = client.get('/admin/model2/?flt0_0=badval')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('Invalid Filter Value' in data)
# integer - not equal
rv = client.get('/admin/model2/?flt0_1=5000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' in data)
# integer - greater
rv = client.get('/admin/model2/?flt0_2=6000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' in data)
# integer - smaller
rv = client.get('/admin/model2/?flt0_3=6000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' in data)
ok_('string_field_val_4' not in data)
# integer - empty
rv = client.get('/admin/model2/?flt0_4=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' in data)
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' not in data)
# integer - not empty
rv = client.get('/admin/model2/?flt0_4=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' not in data)
ok_('string_field_val_3' in data)
ok_('string_field_val_4' in data)
# integer - in list
rv = client.get('/admin/model2/?flt0_5=5000%2C9000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' not in data)
ok_('string_field_val_3' in data)
ok_('string_field_val_4' in data)
# integer - in list (huge number)
rv = client.get('/admin/model2/?flt0_5=6169453081680413441')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_5' in data)
# integer - in list - test validation
rv = client.get('/admin/model2/?flt0_5=5000%2Cbadval')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('Invalid Filter Value' in data)
# integer - not in list
rv = client.get('/admin/model2/?flt0_6=5000%2C9000')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' in data)
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' not in data)
# Test boolean filter
view = CustomModelView(Model2, column_filters=['bool_field'],
endpoint="_bools")
admin.add_view(view)
eq_(
[(f['index'], f['operation']) for f in view._filter_groups[u'Bool Field']],
[
(0, 'equals'),
(1, 'not equal'),
]
)
# boolean - equals - Yes
rv = client.get('/admin/_bools/?flt0_0=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' not in data)
# boolean - equals - No
rv = client.get('/admin/_bools/?flt0_0=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' in data)
# boolean - not equals - Yes
rv = client.get('/admin/_bools/?flt0_1=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' in data)
# boolean - not equals - No
rv = client.get('/admin/_bools/?flt0_1=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' not in data)
# Test float filter
view = CustomModelView(Model2, column_filters=['float_field'],
endpoint="_float")
admin.add_view(view)
eq_(
[(f['index'], f['operation']) for f in view._filter_groups[u'Float Field']],
[
(0, 'equals'),
(1, 'not equal'),
(2, 'greater than'),
(3, 'smaller than'),
(4, 'empty'),
(5, 'in list'),
(6, 'not in list'),
]
)
# float - equals
rv = client.get('/admin/_float/?flt0_0=25.9')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' in data)
ok_('string_field_val_4' not in data)
# float - equals - test validation
rv = client.get('/admin/_float/?flt0_0=badval')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('Invalid Filter Value' in data)
# float - not equal
rv = client.get('/admin/_float/?flt0_1=25.9')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' in data)
# float - greater
rv = client.get('/admin/_float/?flt0_2=60.5')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' in data)
# float - smaller
rv = client.get('/admin/_float/?flt0_3=60.5')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_3' in data)
ok_('string_field_val_4' not in data)
# float - empty
rv = client.get('/admin/_float/?flt0_4=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' in data)
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' not in data)
# float - not empty
rv = client.get('/admin/_float/?flt0_4=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' not in data)
ok_('string_field_val_3' in data)
ok_('string_field_val_4' in data)
# float - in list
rv = client.get('/admin/_float/?flt0_5=25.9%2C75.5')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' not in data)
ok_('string_field_val_2' not in data)
ok_('string_field_val_3' in data)
ok_('string_field_val_4' in data)
# float - in list - test validation
rv = client.get('/admin/_float/?flt0_5=25.9%2Cbadval')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('Invalid Filter Value' in data)
# float - not in list
rv = client.get('/admin/_float/?flt0_6=25.9%2C75.5')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('string_field_val_1' in data)
ok_('string_field_val_2' in data)
ok_('string_field_val_3' not in data)
ok_('string_field_val_4' not in data)
# Test datetime filter
view = CustomModelView(Model1,
column_filters=['datetime_field'],
endpoint="_datetime")
admin.add_view(view)
eq_(
[(f['index'], f['operation']) for f in view._filter_groups[u'Datetime Field']],
[
(0, 'equals'),
(1, 'not equal'),
(2, 'greater than'),
(3, 'smaller than'),
(4, 'between'),
(5, 'not between'),
(6, 'empty'),
]
)
# datetime - equals
rv = client.get('/admin/_datetime/?flt0_0=2014-04-03+01%3A09%3A00')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' in data)
ok_('datetime_obj2' not in data)
# datetime - not equal
rv = client.get('/admin/_datetime/?flt0_1=2014-04-03+01%3A09%3A00')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' not in data)
ok_('datetime_obj2' in data)
# datetime - greater
rv = client.get('/admin/_datetime/?flt0_2=2014-04-03+01%3A08%3A00')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' in data)
ok_('datetime_obj2' not in data)
# datetime - smaller
rv = client.get('/admin/_datetime/?flt0_3=2014-04-03+01%3A08%3A00')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' not in data)
ok_('datetime_obj2' in data)
# datetime - between
rv = client.get('/admin/_datetime/?flt0_4=2014-04-02+00%3A00%3A00+to+2014-11-20+23%3A59%3A59')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' in data)
ok_('datetime_obj2' not in data)
# datetime - not between
rv = client.get('/admin/_datetime/?flt0_5=2014-04-02+00%3A00%3A00+to+2014-11-20+23%3A59%3A59')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('datetime_obj1' not in data)
ok_('datetime_obj2' in data)
# datetime - empty
rv = client.get('/admin/_datetime/?flt0_6=1')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test1_val_1' in data)
ok_('datetime_obj1' not in data)
ok_('datetime_obj2' not in data)
# datetime - not empty
rv = client.get('/admin/_datetime/?flt0_6=0')
eq_(rv.status_code, 200)
data = rv.data.decode('utf-8')
ok_('test1_val_1' not in data)
ok_('datetime_obj1' in data)
ok_('datetime_obj2' in data)
def test_default_sort():
app, db, admin = setup()
M1, _ = create_models(db)
M1(test1='c', test2='x').save()
M1(test1='b', test2='x').save()
M1(test1='a', test2='y').save()
eq_(M1.objects.count(), 3)
view = CustomModelView(M1, column_default_sort='test1')
admin.add_view(view)
_, data = view.get_list(0, None, None, None, None)
eq_(data[0].test1, 'a')
eq_(data[1].test1, 'b')
eq_(data[2].test1, 'c')
# test default sort with multiple columns
order = [('test2', False), ('test1', False)]
view2 = CustomModelView(M1, column_default_sort=order, endpoint='m1_2')
admin.add_view(view2)
_, data = view2.get_list(0, None, None, None, None)
eq_(len(data), 3)
eq_(data[0].test1, 'b')
eq_(data[1].test1, 'c')
eq_(data[2].test1, 'a')
def test_extra_fields():
app, db, admin = setup()
Model1, _ = create_models(db)
view = CustomModelView(
Model1,
form_extra_fields={
'extra_field': fields.StringField('Extra Field')
}
)
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
# Check presence and order
data = rv.data.decode('utf-8')
ok_('Extra Field' in data)
pos1 = data.find('Extra Field')
pos2 = data.find('Test1')
ok_(pos2 < pos1)
def test_extra_field_order():
app, db, admin = setup()
Model1, _ = create_models(db)
view = CustomModelView(
Model1,
form_extra_fields={
'extra_field': fields.StringField('Extra Field')
}
)
admin.add_view(view)
client = app.test_client()
rv = client.get('/admin/model1/new/')
eq_(rv.status_code, 200)
# Check presence and order
data = rv.data.decode('utf-8')
ok_('Extra Field' in data)
pos1 = data.find('Extra Field')
pos2 = data.find('Test1')
ok_(pos2 < pos1)
def test_custom_form_base():
app, db, admin = setup()
class TestForm(form.BaseForm):
pass
Model1, _ = create_models(db)
view = CustomModelView(
Model1,
form_base_class=TestForm
)
admin.add_view(view)
ok_(hasattr(view._create_form_class, 'test1'))
create_form = view.create_form()
ok_(isinstance(create_form, TestForm))
def test_subdocument_config():
app, db, admin = setup()
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.EmbeddedDocumentField(Comment)
# Check only
view1 = CustomModelView(
Model1,
form_subdocuments={
'subdoc': {
'form_columns': ('name',)
}
}
)
ok_(hasattr(view1._create_form_class, 'subdoc'))
form = view1.create_form()
ok_('name' in dir(form.subdoc.form))
ok_('value' not in dir(form.subdoc.form))
# Check exclude
view2 = CustomModelView(
Model1,
form_subdocuments={
'subdoc': {
'form_excluded_columns': ('value',)
}
}
)
form = view2.create_form()
ok_('name' in dir(form.subdoc.form))
ok_('value' not in dir(form.subdoc.form))
def test_subdocument_class_config():
app, db, admin = setup()
from flask_admin.contrib.mongoengine import EmbeddedForm
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.EmbeddedDocumentField(Comment)
class EmbeddedConfig(EmbeddedForm):
form_columns = ('name',)
# Check only
view1 = CustomModelView(
Model1,
form_subdocuments={
'subdoc': EmbeddedConfig()
}
)
form = view1.create_form()
ok_('name' in dir(form.subdoc.form))
ok_('value' not in dir(form.subdoc.form))
def test_nested_subdocument_config():
app, db, admin = setup()
# Check recursive
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Nested(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
comment = db.EmbeddedDocumentField(Comment)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
nested = db.EmbeddedDocumentField(Nested)
view1 = CustomModelView(
Model1,
form_subdocuments={
'nested': {
'form_subdocuments': {
'comment': {
'form_columns': ('name',)
}
}
}
}
)
form = view1.create_form()
ok_('name' in dir(form.nested.form.comment.form))
ok_('value' not in dir(form.nested.form.comment.form))
def test_nested_list_subdocument():
app, db, admin = setup()
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.ListField(db.EmbeddedDocumentField(Comment))
# Check only
view1 = CustomModelView(
Model1,
form_subdocuments={
'subdoc': {
'form_subdocuments': {
None: {
'form_columns': ('name',)
}
}
}
}
)
form = view1.create_form()
inline_form = form.subdoc.unbound_field.args[2]
ok_('name' in dir(inline_form))
ok_('value' not in dir(inline_form))
def test_nested_sortedlist_subdocument():
app, db, admin = setup()
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.SortedListField(db.EmbeddedDocumentField(Comment))
# Check only
view1 = CustomModelView(
Model1,
form_subdocuments={
'subdoc': {
'form_subdocuments': {
None: {
'form_columns': ('name',)
}
}
}
}
)
form = view1.create_form()
inline_form = form.subdoc.unbound_field.args[2]
ok_('name' in dir(inline_form))
ok_('value' not in dir(inline_form))
def test_sortedlist_subdocument_validation():
app, db, admin = setup()
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.SortedListField(db.EmbeddedDocumentField(Comment))
view = CustomModelView(Model1)
admin.add_view(view)
client = app.test_client()
rv = client.post('/admin/model1/new/',
data={'test1': 'test1large', 'subdoc-0-name': 'comment', 'subdoc-0-value': 'test'})
eq_(rv.status_code, 302)
rv = client.post('/admin/model1/new/',
data={'test1': 'test1large', 'subdoc-0-name': '', 'subdoc-0-value': 'test'})
eq_(rv.status_code, 200)
ok_('This field is required' in rv.data)
def test_list_subdocument_validation():
app, db, admin = setup()
class Comment(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
subdoc = db.ListField(db.EmbeddedDocumentField(Comment))
view = CustomModelView(Model1)
admin.add_view(view)
client = app.test_client()
rv = client.post('/admin/model1/new/',
data={'test1': 'test1large', 'subdoc-0-name': 'comment', 'subdoc-0-value': 'test'})
eq_(rv.status_code, 302)
rv = client.post('/admin/model1/new/',
data={'test1': 'test1large', 'subdoc-0-name': '', 'subdoc-0-value': 'test'})
eq_(rv.status_code, 200)
ok_('This field is required' in rv.data)
def test_ajax_fk():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(
Model2,
url='view',
form_ajax_refs={
'model1': {
'fields': ('test1', 'test2')
}
}
)
admin.add_view(view)
ok_(u'model1' in view._form_ajax_refs)
model = Model1(test1=u'first')
model.save()
model2 = Model1(test1=u'foo', test2=u'bar').save()
# Check loader
loader = view._form_ajax_refs[u'model1']
mdl = loader.get_one(model.id)
eq_(mdl.test1, model.test1)
items = loader.get_list(u'fir')
eq_(len(items), 1)
eq_(items[0].id, model.id)
items = loader.get_list(u'bar')
eq_(len(items), 1)
eq_(items[0].test1, u'foo')
# Check form generation
form = view.create_form()
eq_(form.model1.__class__.__name__, u'AjaxSelectField')
with app.test_request_context('/admin/view/'):
ok_(u'value=""' not in form.model1())
form.model1.data = model
needle = u'data-json="["%s", "first"]"' % as_unicode(model.id)
ok_(needle in form.model1())
ok_(u'value="%s"' % as_unicode(model.id) in form.model1())
# Check querying
client = app.test_client()
req = client.get(u'/admin/view/ajax/lookup/?name=model1&query=foo')
eq_(req.data, u'[["%s", "foo"]]' % model2.id)
# Check submitting
client.post('/admin/view/new/', data={u'model1': as_unicode(model.id)})
mdl = Model2.objects.first()
ok_(mdl is not None)
ok_(mdl.model1 is not None)
eq_(mdl.model1.id, model.id)
eq_(mdl.model1.test1, u'first')
def test_nested_ajax_refs():
app, db, admin = setup()
# Check recursive
class Comment(db.Document):
name = db.StringField(max_length=20, required=True)
value = db.StringField(max_length=20)
class Nested(db.EmbeddedDocument):
name = db.StringField(max_length=20, required=True)
comment = db.ReferenceField(Comment)
class Model1(db.Document):
test1 = db.StringField(max_length=20)
nested = db.EmbeddedDocumentField(Nested)
view1 = CustomModelView(
Model1,
form_subdocuments={
'nested': {
'form_ajax_refs': {
'comment': {
'fields': ['name']
}
}
}
}
)
form = view1.create_form()
eq_(type(form.nested.form.comment).__name__, 'AjaxSelectField')
ok_('nested-comment' in view1._form_ajax_refs)
def test_form_flat_choices():
app, db, admin = setup()
class Model(db.Document):
name = db.StringField(max_length=20, choices=('a', 'b', 'c'))
view = CustomModelView(Model)
admin.add_view(view)
form = view.create_form()
eq_(form.name.choices, [('a', 'a'), ('b', 'b'), ('c', 'c')])
def test_form_args():
app, db, admin = setup()
class Model(db.Document):
test = db.StringField(required=True)
shared_form_args = {'test': {'validators': [validators.Regexp('test')]}}
view = CustomModelView(Model, form_args=shared_form_args)
admin.add_view(view)
# ensure shared field_args don't create duplicate validators
create_form = view.create_form()
eq_(len(create_form.test.validators), 2)
edit_form = view.edit_form()
eq_(len(edit_form.test.validators), 2)
def test_form_args_embeddeddoc():
app, db, admin = setup()
class Info(db.EmbeddedDocument):
name = db.StringField()
age = db.StringField()
class Model(db.Document):
info = db.EmbeddedDocumentField('Info')
timestamp = db.DateTimeField()
view = CustomModelView(
Model,
form_args={
'info': {'label': 'Information'},
'timestamp': {'label': 'Last Updated Time'}
}
)
admin.add_view(view)
form = view.create_form()
eq_(form.timestamp.label.text, 'Last Updated Time')
# This is the failure
eq_(form.info.label.text, 'Information')
def test_simple_list_pager():
app, db, admin = setup()
Model1, _ = create_models(db)
class TestModelView(CustomModelView):
simple_list_pager = True
def get_count_query(self):
assert False
view = TestModelView(Model1)
admin.add_view(view)
count, data = view.get_list(0, None, None, None, None)
ok_(count is None)
def test_export_csv():
app, db, admin = setup()
Model1, Model2 = create_models(db)
view = CustomModelView(Model1, can_export=True,
column_list=['test1', 'test2'], export_max_rows=2,
endpoint='row_limit_2')
admin.add_view(view)
for x in range(5):
fill_db(Model1, Model2)
client = app.test_client()
# test export_max_rows
rv = client.get('/admin/row_limit_2/export/csv/')
data = rv.data.decode('utf-8')
eq_(rv.status_code, 200)
ok_("Test1,Test2\r\n"
"test1_val_1,test2_val_1\r\n"
"test1_val_2,test2_val_2\r\n" == data)
view = CustomModelView(Model1, can_export=True,
column_list=['test1', 'test2'],
endpoint='no_row_limit')
admin.add_view(view)
# test row limit without export_max_rows
rv = client.get('/admin/no_row_limit/export/csv/')
data = rv.data.decode('utf-8')
eq_(rv.status_code, 200)
ok_(len(data.splitlines()) > 21)
| 28.630058 | 104 | 0.611577 |
bdc1a1e35704b70ed21ff295a837520907c1dc3e | 5,695 | py | Python | filter_plugins/merge.py | openfun/arnold | 3ffd61d8296d93111e6b5412a53c42d67ca61463 | [
"MIT"
] | 34 | 2018-03-09T13:06:05.000Z | 2021-05-09T10:12:16.000Z | filter_plugins/merge.py | openfun/arnold | 3ffd61d8296d93111e6b5412a53c42d67ca61463 | [
"MIT"
] | 302 | 2018-05-23T12:06:29.000Z | 2022-02-28T08:22:34.000Z | filter_plugins/merge.py | openfun/arnold | 3ffd61d8296d93111e6b5412a53c42d67ca61463 | [
"MIT"
] | 10 | 2018-05-31T08:28:58.000Z | 2022-03-10T09:51:08.000Z | """
Merge Jinja filters
"""
from copy import deepcopy
from pathlib import Path
from ansible.errors import AnsibleFilterError
from ansible.utils.encrypt import random_password
# pylint: disable=invalid-name,too-many-branches
def merge_with_app(base, new):
"""
Merge data from the "new" application to the "base" application.
Services listed in "new" that do not exist in "base" will be ignored.
"""
if base is None:
raise AnsibleFilterError("input base app is empty")
if new is None:
raise AnsibleFilterError("input new app is empty")
if not isinstance(base, dict) or not isinstance(new, dict):
raise AnsibleFilterError("input apps definitions should be 'dict' types")
if base.get("name") is None or new.get("name") is None:
raise AnsibleFilterError("input apps should have a 'name' key")
if base["name"] != new["name"]:
raise AnsibleFilterError("input apps should have the same name")
if not base["name"]:
raise AnsibleFilterError("input apps name cannot be empty")
result = deepcopy(base)
if "services" not in new:
return result
# Add or override services metadata
for base_service in result["services"]:
new_app_selected_services = [
s for s in new["services"] if s.get("name") == base_service.get("name")
]
if len(new_app_selected_services) != 1:
continue
new_service = new_app_selected_services[0]
for k in ("configs", "templates", "environment_variables"):
if new_service.get(k) is None:
continue
if isinstance(new_service[k], list):
new_service_files = [Path(f).name for f in new_service[k]]
# We filter files based on their name, allowing to override
# templates for an app/service.
#
# Example: while merging baz/foo.yml (new) and bar/foo.yml (old)
# as they both share the same file name, e.g. foo.yml, the new
# one will be used and the old one ignored.
base_service[k] = sorted(
new_service[k]
+ [
f
for f in base_service[k]
if Path(f).name not in new_service_files
]
)
elif isinstance(new_service[k], str):
base_service[k] = new_service[k]
# Add service missing keys (could be meta, such as host, etc.)
for k, v in new_service.items():
if base_service.get(k) is None:
base_service[k] = v
# Merge volumes (if any)
if result.get("volumes") and new.get("volumes"):
# Use the list(set()) trick to remove duplicated items
result["volumes"] = sorted(list(set(new["volumes"] + result["volumes"])))
# Add new keys for this app
for k, v in new.items():
if k not in result:
result[k] = v
return result
def merge_with_database(base, database, app_name, customer, environment):
"""
Merge a database information with a database structure already existent.
If database already exist the new one is ignored.
"""
if not isinstance(base, dict) or not isinstance(database, dict):
raise AnsibleFilterError("input database is empty")
if not isinstance(environment, dict):
raise AnsibleFilterError("input environment must be a dictionnary")
if "engine" not in database:
raise AnsibleFilterError("input database should define an 'engine' key")
if "release" not in database:
raise AnsibleFilterError("input database should define a 'release' key")
if "code" not in environment:
raise AnsibleFilterError("environment dict should define a 'code' key")
if "name" not in environment:
raise AnsibleFilterError("environment dict should define a 'name' key")
result = deepcopy(base)
database_name = "_".join([environment.get("code"), customer, app_name])
new_database = {
"application": app_name,
"password": random_password(),
"name": database_name,
"user": database_name,
}
engine = database.get("engine")
if engine not in result:
# Create a new entry for this database engine
result[database.get("engine")] = [
{"release": database.get("release"), "databases": [new_database]}
]
return result
# Loop over defined engines and look for existing releases
for defined_engine in result[engine]:
if defined_engine.get("release", None) == database.get("release"):
# Target release already exists
for defined_database in defined_engine.get("databases"):
if defined_database.get("application") == app_name:
# Target database already exist: abort
break
else:
# Add a new entry for targeted database engine and release
defined_engine.get("databases").append(new_database)
else:
# Add a new release and database for targeted database engine
result[engine].append(
{"release": database.get("release"), "databases": [new_database]}
)
return result
# pylint: disable=no-self-use,too-few-public-methods
class FilterModule:
"""Filters used to deep merge python objects"""
def filters(self):
"""List plugin filters"""
return {
"merge_with_app": merge_with_app,
"merge_with_database": merge_with_database,
}
| 34.307229 | 83 | 0.60878 |
9d71a58401d0f34bcd8ea3e7adc62e60748b7e42 | 1,983 | py | Python | applications/ppmi_volumes/collect_ppmi_volumes_direct_reg_seg_mjff.py | stnava/superiq | a13befe5f525bbef02cd095031952db62c5d054e | [
"Apache-2.0"
] | null | null | null | applications/ppmi_volumes/collect_ppmi_volumes_direct_reg_seg_mjff.py | stnava/superiq | a13befe5f525bbef02cd095031952db62c5d054e | [
"Apache-2.0"
] | null | null | null | applications/ppmi_volumes/collect_ppmi_volumes_direct_reg_seg_mjff.py | stnava/superiq | a13befe5f525bbef02cd095031952db62c5d054e | [
"Apache-2.0"
] | null | null | null | from superiq import VolumeData
from superiq.pipeline_utils import *
import boto3
import pandas as pd
from datetime import datetime
def collect_brain_age(bucket, version):
prefix = f"superres-pipeline-{version}/"
objects = list_images(bucket, prefix)
brain_age = [i for i in objects if i.endswith('brain_age.csv')]
dfs = []
for i in brain_age:
ba = get_s3_object(bucket, i, '/tmp')
filename = ba.split('/')[-1]
splits = filename.split('-')
ba_df = pd.read_csv(ba)
ba_df['Repeat'] = splits[4]
dfs.append(ba_df)
dfs = pd.concat(dfs)
return dfs
if __name__ == "__main__":
bucket = "mjff-ppmi"
version = "mjff"
prefix = f"superres-pipeline-{version}/"
stack_filename = f'ppmi_stacked_volumes_{version}.csv'
pivoted_filename = f'ppmi_pivoted_volumes_{version}.csv'
upload_prefix = "volume_measures/"
filter_suffixes = ['OR_seg.csv', 'SR_jlfseg.csv','SR_ljflseg.csv', 'SR_seg.csv', 'SR_regseg.csv']
vd = VolumeData(bucket, prefix, filter_suffixes, upload_prefix)
local_stack = vd.stack_volumes(stack_filename)
local_pivot = vd.pivot_data(local_stack, pivoted_filename)
local_pivot_df = pd.read_csv(local_pivot)
local_pivot_df = local_pivot_df
ba = collect_brain_age(bucket, version)
local_pivot_df['join_date'] = [str(i)[:6] for i in local_pivot_df['Date']]
local_pivot_df = pd.merge(local_pivot_df, ba, on='Repeat')
s3 = boto3.client('s3')
local_pivot_df.to_csv('local_pivot.csv')
s3.upload_file('local_pivot.csv', bucket, f"volume_measures/direct_reg_seg_ppmi_volumes-{version}.csv")
metadata = False
if metadata:
metadata_bucket = 'mjff-ppmi'
metadata_key = 's3://ppmi-metadata/PPMIFullMetadata.csv'
metadata_df = pd.read_csv(metadata_key)
merged = pd.merge(
metadata_df,
local_pivot_df,
right_on=['Subject', 'join_date'],
left_on=['PATNO', 'join_date'],
how='outer'
)
merged_path = "full_" + "simple_reg_sr_ppmi_volumes.csv"
merged.to_csv(merged_path, index=False)
s3.upload_file(merged_path, bucket, merged_path)
| 35.410714 | 104 | 0.740292 |
91a17b8eb589cab75848f760561942840385e2a2 | 536 | py | Python | modules/vtk_basic/vtkHierarchicalDataSetGeometryFilter.py | chrisidefix/devide | 99bfe156e710fa47ba7ae88b0ce1eef592a3a439 | [
"BSD-3-Clause"
] | 25 | 2015-08-24T16:05:14.000Z | 2020-12-09T20:07:14.000Z | modules/vtk_basic/vtkHierarchicalDataSetGeometryFilter.py | chrisidefix/devide | 99bfe156e710fa47ba7ae88b0ce1eef592a3a439 | [
"BSD-3-Clause"
] | 1 | 2016-02-16T21:18:10.000Z | 2016-02-16T21:18:10.000Z | modules/vtk_basic/vtkHierarchicalDataSetGeometryFilter.py | chrisidefix/devide | 99bfe156e710fa47ba7ae88b0ce1eef592a3a439 | [
"BSD-3-Clause"
] | 5 | 2016-02-16T20:05:37.000Z | 2020-01-31T11:27:39.000Z | # class generated by DeVIDE::createDeVIDEModuleFromVTKObject
from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase
import vtk
class vtkHierarchicalDataSetGeometryFilter(SimpleVTKClassModuleBase):
def __init__(self, module_manager):
SimpleVTKClassModuleBase.__init__(
self, module_manager,
vtk.vtkHierarchicalDataSetGeometryFilter(), 'Processing.',
('vtkMultiGroupDataSet',), ('vtkPolyData',),
replaceDoc=True,
inputFunctions=None, outputFunctions=None)
| 41.230769 | 70 | 0.73694 |
ec589c003618327870a458e640132c3e9d16a487 | 14,509 | py | Python | plugins/modules/oci_network_subnet_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_network_subnet_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | plugins/modules/oci_network_subnet_facts.py | hanielburton/oci-ansible-collection | dfdffde637f746d346ba35569be8c3a3407022f2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# Copyright (c) 2017, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_network_subnet_facts
short_description: Fetches details about one or multiple Subnet resources in Oracle Cloud Infrastructure
description:
- Fetches details about one or multiple Subnet resources in Oracle Cloud Infrastructure
- Lists the subnets in the specified VCN and the specified compartment.
If the VCN ID is not provided, then the list includes the subnets from all VCNs in the specified compartment.
- If I(subnet_id) is specified, the details of a single Subnet will be returned.
version_added: "2.9"
author: Oracle (@oracle)
options:
subnet_id:
description:
- The OCID of the subnet.
- Required to get a specific subnet.
type: str
aliases: ["id"]
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment.
- Required to list multiple subnets.
type: str
vcn_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the VCN.
type: str
display_name:
description:
- A filter to return only resources that match the given display name exactly.
type: str
aliases: ["name"]
sort_by:
description:
- The field to sort by. You can provide one sort order (`sortOrder`). Default order for
TIMECREATED is descending. Default order for DISPLAYNAME is ascending. The DISPLAYNAME
sort order is case sensitive.
- "**Note:** In general, some \\"List\\" operations (for example, `ListInstances`) let you
optionally filter by availability domain if the scope of the resource type is within a
single availability domain. If you call one of these \\"List\\" operations without specifying
an availability domain, the resources are grouped by availability domain, then sorted."
type: str
choices:
- "TIMECREATED"
- "DISPLAYNAME"
sort_order:
description:
- The sort order to use, either ascending (`ASC`) or descending (`DESC`). The DISPLAYNAME sort order
is case sensitive.
type: str
choices:
- "ASC"
- "DESC"
lifecycle_state:
description:
- A filter to only return resources that match the given lifecycle state. The state value is case-insensitive.
type: str
choices:
- "PROVISIONING"
- "AVAILABLE"
- "TERMINATING"
- "TERMINATED"
- "UPDATING"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: List subnets
oci_network_subnet_facts:
compartment_id: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
- name: Get a specific subnet
oci_network_subnet_facts:
subnet_id: ocid1.subnet.oc1..xxxxxxEXAMPLExxxxxx
"""
RETURN = """
subnets:
description:
- List of Subnet resources
returned: on success
type: complex
contains:
availability_domain:
description:
- The subnet's availability domain. This attribute will be null if this is a regional subnet
instead of an AD-specific subnet. Oracle recommends creating regional subnets.
- "Example: `Uocm:PHX-AD-1`"
returned: on success
type: string
sample: Uocm:PHX-AD-1
cidr_block:
description:
- The subnet's CIDR block.
- "Example: `10.0.1.0/24`"
returned: on success
type: string
sample: 10.0.1.0/24
compartment_id:
description:
- The OCID of the compartment containing the subnet.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a
namespace. For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
dhcp_options_id:
description:
- The OCID of the set of DHCP options that the subnet uses.
returned: on success
type: string
sample: ocid1.dhcpoptions.oc1..xxxxxxEXAMPLExxxxxx
display_name:
description:
- A user-friendly name. Does not have to be unique, and it's changeable.
Avoid entering confidential information.
returned: on success
type: string
sample: display_name_example
dns_label:
description:
- A DNS label for the subnet, used in conjunction with the VNIC's hostname and
VCN's DNS label to form a fully qualified domain name (FQDN) for each VNIC
within this subnet (for example, `bminstance-1.subnet123.vcn1.oraclevcn.com`).
Must be an alphanumeric string that begins with a letter and is unique within the VCN.
The value cannot be changed.
- The absence of this parameter means the Internet and VCN Resolver
will not resolve hostnames of instances in this subnet.
- For more information, see
L(DNS in Your Virtual Cloud Network,https://docs.cloud.oracle.com/Content/Network/Concepts/dns.htm).
- "Example: `subnet123`"
returned: on success
type: string
sample: subnet123
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no
predefined name, type, or namespace. For more information, see L(Resource
Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
id:
description:
- The subnet's Oracle ID (OCID).
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
lifecycle_state:
description:
- The subnet's current state.
returned: on success
type: string
sample: PROVISIONING
prohibit_public_ip_on_vnic:
description:
- Whether VNICs within this subnet can have public IP addresses.
Defaults to false, which means VNICs created in this subnet will
automatically be assigned public IP addresses unless specified
otherwise during instance launch or VNIC creation (with the
`assignPublicIp` flag in
L(CreateVnicDetails,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/iaas/20160918/CreateVnicDetails/)).
If `prohibitPublicIpOnVnic` is set to true, VNICs created in this
subnet cannot have public IP addresses (that is, it's a private
subnet).
- "Example: `true`"
returned: on success
type: bool
sample: true
route_table_id:
description:
- The OCID of the route table that the subnet uses.
returned: on success
type: string
sample: ocid1.routetable.oc1..xxxxxxEXAMPLExxxxxx
security_list_ids:
description:
- "The OCIDs of the security list or lists that the subnet uses. Remember
that security lists are associated *with the subnet*, but the
rules are applied to the individual VNICs in the subnet."
returned: on success
type: list
sample: []
subnet_domain_name:
description:
- The subnet's domain name, which consists of the subnet's DNS label,
the VCN's DNS label, and the `oraclevcn.com` domain.
- For more information, see
L(DNS in Your Virtual Cloud Network,https://docs.cloud.oracle.com/Content/Network/Concepts/dns.htm).
- "Example: `subnet123.vcn1.oraclevcn.com`"
returned: on success
type: string
sample: subnet123.vcn1.oraclevcn.com
time_created:
description:
- The date and time the subnet was created, in the format defined by L(RFC3339,https://tools.ietf.org/html/rfc3339).
- "Example: `2016-08-25T21:10:29.600Z`"
returned: on success
type: string
sample: 2016-08-25T21:10:29.600Z
vcn_id:
description:
- The OCID of the VCN the subnet is in.
returned: on success
type: string
sample: ocid1.vcn.oc1..xxxxxxEXAMPLExxxxxx
virtual_router_ip:
description:
- The IP address of the virtual router.
- "Example: `10.0.14.1`"
returned: on success
type: string
sample: 10.0.14.1
virtual_router_mac:
description:
- The MAC address of the virtual router.
- "Example: `00:00:00:00:00:01`"
returned: on success
type: string
sample: 00:00:00:00:00:01
sample: [{
"availability_domain": "Uocm:PHX-AD-1",
"cidr_block": "10.0.1.0/24",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"dhcp_options_id": "ocid1.dhcpoptions.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"dns_label": "subnet123",
"freeform_tags": {'Department': 'Finance'},
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_state": "PROVISIONING",
"prohibit_public_ip_on_vnic": true,
"route_table_id": "ocid1.routetable.oc1..xxxxxxEXAMPLExxxxxx",
"security_list_ids": [],
"subnet_domain_name": "subnet123.vcn1.oraclevcn.com",
"time_created": "2016-08-25T21:10:29.600Z",
"vcn_id": "ocid1.vcn.oc1..xxxxxxEXAMPLExxxxxx",
"virtual_router_ip": "10.0.14.1",
"virtual_router_mac": "00:00:00:00:00:01"
}]
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import oci_common_utils
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceFactsHelperBase,
get_custom_class,
)
try:
from oci.core import VirtualNetworkClient
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class SubnetFactsHelperGen(OCIResourceFactsHelperBase):
"""Supported operations: get, list"""
def get_required_params_for_get(self):
return [
"subnet_id",
]
def get_required_params_for_list(self):
return [
"compartment_id",
]
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_subnet, subnet_id=self.module.params.get("subnet_id"),
)
def list_resources(self):
optional_list_method_params = [
"vcn_id",
"display_name",
"sort_by",
"sort_order",
"lifecycle_state",
]
optional_kwargs = dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
)
return oci_common_utils.list_all_resources(
self.client.list_subnets,
compartment_id=self.module.params.get("compartment_id"),
**optional_kwargs
)
SubnetFactsHelperCustom = get_custom_class("SubnetFactsHelperCustom")
class ResourceFactsHelper(SubnetFactsHelperCustom, SubnetFactsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec()
module_args.update(
dict(
subnet_id=dict(aliases=["id"], type="str"),
compartment_id=dict(type="str"),
vcn_id=dict(type="str"),
display_name=dict(aliases=["name"], type="str"),
sort_by=dict(type="str", choices=["TIMECREATED", "DISPLAYNAME"]),
sort_order=dict(type="str", choices=["ASC", "DESC"]),
lifecycle_state=dict(
type="str",
choices=[
"PROVISIONING",
"AVAILABLE",
"TERMINATING",
"TERMINATED",
"UPDATING",
],
),
)
)
module = AnsibleModule(argument_spec=module_args)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_facts_helper = ResourceFactsHelper(
module=module,
resource_type="subnet",
service_client_class=VirtualNetworkClient,
namespace="core",
)
result = []
if resource_facts_helper.is_get():
result = [resource_facts_helper.get()]
elif resource_facts_helper.is_list():
result = resource_facts_helper.list()
else:
resource_facts_helper.fail()
module.exit_json(subnets=result)
if __name__ == "__main__":
main()
| 38.282322 | 144 | 0.599145 |
86f23fb78462d680f148ac045f39fe60ee060d12 | 345 | py | Python | sampleprj/pysubpckB/src/sampleprj/pysubpckB/pysubsub/__init__.py | astyl/AcrobatomaticBuildSystem | a8a4858d723a0673eeeb6f039af05dc86be638a9 | [
"BSD-2-Clause-FreeBSD"
] | 10 | 2019-01-07T20:17:05.000Z | 2022-03-07T20:46:58.000Z | sampleprj/pysubpckB/src/sampleprj/pysubpckB/pysubsub/__init__.py | astyl/AcrobatomaticBuildSystem | a8a4858d723a0673eeeb6f039af05dc86be638a9 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2019-09-04T11:27:10.000Z | 2019-09-04T15:02:36.000Z | sampleprj/pysubpckB/src/sampleprj/pysubpckB/pysubsub/__init__.py | astyl/AcrobatomaticBuildSystem | a8a4858d723a0673eeeb6f039af05dc86be638a9 | [
"BSD-2-Clause-FreeBSD"
] | 3 | 2019-08-30T10:01:55.000Z | 2022-01-27T21:06:39.000Z | # -*-coding:Utf-8 -*
# @file __init__.py
#
# Copyright 2016 Airbus Safran Launchers. All rights reserved.
# Use is subject to license terms.
#
# $Id$
# $Date$
#
# module import entry point
# for subpackage sampleprj.pysubpckB.pysubsub
print 'importing sampleprj.pysubpckB.pysubsub ...'
print 'importing sampleprj.pysubpckB.pysubsub ... OK'
| 21.5625 | 62 | 0.724638 |
e2577765e2f5f7f4b23d0d55b15fa9ba7873a610 | 2,792 | py | Python | pkg/v1/providers/tests/clustergen/gencluster_params.py | ykakarap/tanzu-framework | 35a410d8ddc8b2f606a66ea0d1d696744b3c89b8 | [
"Apache-2.0"
] | 1 | 2022-03-24T12:13:01.000Z | 2022-03-24T12:13:01.000Z | pkg/v1/providers/tests/clustergen/gencluster_params.py | ykakarap/tanzu-framework | 35a410d8ddc8b2f606a66ea0d1d696744b3c89b8 | [
"Apache-2.0"
] | 5 | 2022-02-08T09:52:10.000Z | 2022-03-29T11:28:40.000Z | pkg/v1/providers/tests/clustergen/gencluster_params.py | ykakarap/tanzu-framework | 35a410d8ddc8b2f606a66ea0d1d696744b3c89b8 | [
"Apache-2.0"
] | 1 | 2021-11-10T11:01:42.000Z | 2021-11-10T11:01:42.000Z | #!/usr/bin/env python3
# Copyright 2020 The TKG Contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
import os
import sys
import hashlib
def write_test_cases(params_file, test_dir):
test_num = 0
with open(params_file, 'r') as file:
my_reader = csv.DictReader(file, delimiter=',')
for var_dict in my_reader:
test_num+=1
cmd_args = []
cfg_args = []
dict_contents = ""
for k, v in sorted(var_dict.items()):
dict_contents += '{}{}'.format(k, v)
if k == "_CNAME":
cmd_args.insert(0, v)
elif k == "_PLAN":
cmd_args.append('{} {}'.format("--plan", v))
elif k == "_INFRA":
cmd_args.append('{} {}'.format("-i", v))
if k.startswith('--'):
if v != "NOTPROVIDED":
k = k.lower()
if k.startswith('--enable-'):
cmd_args.append('{}={}'.format(k, v))
else:
cmd_args.append('{} {}'.format(k, v))
else:
# hack to workaround the problem with pict where there is no escape char for comma.
# sets "AZURE_CUSTOM_TAGS" to "tagKey1=tagValue1, tagKey2=tagValue2"
if k == "AZURE_CUSTOM_TAGS" and v.startswith('tagKey1='):
cfg_args.append('{}: {}'.format(k, 'tagKey1=tagValue1, tagKey2=tagValue2'))
elif v != "NA":
cfg_args.append('{}: {}'.format(k, v.replace("<comma>", ",")))
testid = int(hashlib.sha256(dict_contents.encode('utf-8')).hexdigest(), 16) % 10**8
filename = "%8.8d.case" % (testid)
with open(os.path.join(test_dir, filename), "w") as w:
w.write('#! ({}) EXE: {}\n\n'.format("%4.4d" % test_num, " ".join(cmd_args)))
w.write('{}\n'.format("\n".join(cfg_args)))
def main():
if len(sys.argv) != 3:
print("Usage {} csv_params_file test_data_dir".format(sys.argv[0]))
sys.exit(1)
else:
write_test_cases(sys.argv[1], sys.argv[2])
if __name__ == "__main__":
main()
| 38.246575 | 103 | 0.534742 |
fb6b2964f6421ec8e84a4b6943488bc58137e104 | 1,977 | py | Python | medium/python/c0296_636_exclusive-time-of-functions/00_leetcode_0296.py | drunkwater/leetcode | 8cc4a07763e71efbaedb523015f0c1eff2927f60 | [
"Ruby"
] | null | null | null | medium/python/c0296_636_exclusive-time-of-functions/00_leetcode_0296.py | drunkwater/leetcode | 8cc4a07763e71efbaedb523015f0c1eff2927f60 | [
"Ruby"
] | null | null | null | medium/python/c0296_636_exclusive-time-of-functions/00_leetcode_0296.py | drunkwater/leetcode | 8cc4a07763e71efbaedb523015f0c1eff2927f60 | [
"Ruby"
] | 3 | 2018-02-09T02:46:48.000Z | 2021-02-20T08:32:03.000Z | # DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#636. Exclusive Time of Functions
#Given the running logs of n functions that are executed in a nonpreemptive single threaded CPU, find the exclusive time of these functions.
#Each function has a unique id, start from 0 to n-1. A function may be called recursively or by another function.
#A log is a string has this format : function_id:start_or_end:timestamp. For example, "0:start:0" means function 0 starts from the very beginning of time 0. "0:end:0" means function 0 ends to the very end of time 0.
#Exclusive time of a function is defined as the time spent within this function, the time spent by calling other functions should not be considered as this function's exclusive time. You should return the exclusive time of each function sorted by their function id.
#Example 1:
#Input:
#n = 2
#logs =
#["0:start:0",
# "1:start:2",
# "1:end:5",
# "0:end:6"]
#Output:[3, 4]
#Explanation:
#Function 0 starts at time 0, then it executes 2 units of time and reaches the end of time 1.
#Now function 0 calls function 1, function 1 starts at time 2, executes 4 units of time and end at time 5.
#Function 0 is running again at time 6, and also end at the time 6, thus executes 1 unit of time.
#So function 0 totally execute 2 + 1 = 3 units of time, and function 1 totally execute 4 units of time.
#Note:
#Input logs will be sorted by timestamp, NOT log id.
#Your output should be sorted by function id, which means the 0th element of your output corresponds to the exclusive time of function 0.
#Two functions won't start or end at the same time.
#Functions could be called recursively, and will always end.
#1 <= n <= 100
#class Solution(object):
# def exclusiveTime(self, n, logs):
# """
# :type n: int
# :type logs: List[str]
# :rtype: List[int]
# """
# Time Is Money | 50.692308 | 265 | 0.73394 |
e9fb022673cfa409592e5e23f5e60bb5e1f0ccf0 | 6,181 | py | Python | cs-config/cs_config/functions.py | MaxGhenis/Tax-Cruncher | c06fd06ec4d17cc2795f97a6ff088fcd220d3498 | [
"MIT"
] | 1 | 2019-10-15T04:07:56.000Z | 2019-10-15T04:07:56.000Z | cs-config/cs_config/functions.py | MaxGhenis/Tax-Cruncher | c06fd06ec4d17cc2795f97a6ff088fcd220d3498 | [
"MIT"
] | null | null | null | cs-config/cs_config/functions.py | MaxGhenis/Tax-Cruncher | c06fd06ec4d17cc2795f97a6ff088fcd220d3498 | [
"MIT"
] | 1 | 2020-01-03T02:39:54.000Z | 2020-01-03T02:39:54.000Z | import os
import json
import traceback
import paramtools
import pandas as pd
import inspect
from .helpers import convert_defaults, convert_adj
from .outputs import credit_plot, rate_plot, liability_plot
from .constants import MetaParameters
from bokeh.models import ColumnDataSource
from taxcrunch.cruncher import Cruncher, CruncherParams
from taxcrunch.multi_cruncher import Batch
import taxcrunch
from taxcalc import Policy
from IPython.display import HTML
from collections import OrderedDict
TCPATH = inspect.getfile(Policy)
TCDIR = os.path.dirname(TCPATH)
with open(os.path.join(TCDIR, "policy_current_law.json"), "r") as f:
pcl = json.loads(f.read())
RES = convert_defaults(pcl)
class TCParams(paramtools.Parameters):
defaults = RES
def get_version():
version = taxcrunch.__version__
return f"Tax-Cruncher v{version}"
def get_inputs(meta_params_dict):
"""
Return default parameters from Tax-Cruncher
"""
metaparams = MetaParameters()
metaparams.adjust(meta_params_dict)
params = CruncherParams()
policy_params = TCParams()
policy_params.set_state(
year=metaparams.year.tolist())
filtered_pol_params = OrderedDict()
for k, v in policy_params.dump().items():
if k =="schema" or v.get("section_1", False):
filtered_pol_params[k] = v
keep = [
"mstat",
"page",
"sage",
"depx",
"dep13",
"dep17",
"dep18",
"pwages",
"swages",
"dividends",
"intrec",
"stcg",
"ltcg",
"otherprop",
"nonprop",
"pensions",
"gssi",
"ui",
"proptax",
"otheritem",
"childcare",
"mortgage",
"mtr_options",
"schema"
]
cruncher_dict = params.dump()
default_params = {
"Tax Information": {k: v for k, v in cruncher_dict.items() if k in keep},
"Policy": filtered_pol_params
}
meta = metaparams.dump()
return {"meta_parameters": meta, "model_parameters": default_params}
def validate_inputs(meta_params_dict, adjustment, errors_warnings):
params = CruncherParams()
params.adjust(adjustment["Tax Information"], raise_errors=False)
errors_warnings["Tax Information"]["errors"].update(params.errors)
pol_params = {}
# drop checkbox parameters.
for param, data in list(adjustment["Policy"].items()):
if not param.endswith("checkbox"):
pol_params[param] = data
policy_params = TCParams()
policy_params.adjust(pol_params, raise_errors=False)
errors_warnings["Policy"]["errors"].update(policy_params.errors)
return {"errors_warnings": errors_warnings}
def run_model(meta_params_dict, adjustment):
meta_params = MetaParameters()
meta_params.adjust(meta_params_dict)
policy_mods = convert_adj(adjustment["Policy"], meta_params.year.tolist())
adjustment["Tax Information"]["year"] = meta_params.year
params = CruncherParams()
params.adjust(adjustment["Tax Information"], raise_errors=False)
newvals = params.specification()
crunch = Cruncher(inputs=newvals, custom_reform=policy_mods)
# make dataset for bokeh plots
ivar = crunch.ivar
_, mtr_opt, _ = crunch.taxsim_inputs()
df = pd.concat([ivar] * 5000, ignore_index=True)
increments = pd.DataFrame(list(range(0, 500000, 100)))
# use Calculation Option to determine what var to increment
if mtr_opt == 'Taxpayer Earnings':
span = int(ivar[9])
df[9] = increments
elif mtr_opt == 'Spouse Earnings':
span = int(ivar[10])
df[10] = increments
elif mtr_opt == 'Short Term Gains':
span = int(ivar[13])
df[13] = increments
elif mtr_opt == 'Long Term Gains':
span = int(ivar[14])
df[14] = increments
elif mtr_opt == 'Qualified Dividends':
span = int(ivar[14])
df[11] = increments
elif mtr_opt == 'Interest Received':
span = int(ivar[12])
df[12] = increments
elif mtr_opt == 'Pensions':
span = int(ivar[17])
df[17] = increments
elif mtr_opt == 'Gross Social Security Benefits':
span = int(ivar[18])
df[18] = increments
elif mtr_opt == 'Real Estate Taxes Paid':
span = int(ivar[20])
df[20] = increments
elif mtr_opt == 'Mortgage':
span = int(ivar[23])
df[23] = increments
b = Batch(df)
df_base = b.create_table()
df_reform = b.create_table(policy_mods)
# compute average tax rates
df_base['IATR'] = df_base['Individual Income Tax'] / df_base['AGI']
df_base['PATR'] = df_base['Payroll Tax'] / df_base['AGI']
df_reform['IATR'] = df_reform['Individual Income Tax'] / df_reform['AGI']
df_reform['PATR'] = df_reform['Payroll Tax'] / df_reform['AGI']
df_base['Axis'] = increments
df_reform['Axis'] = increments
return comp_output(crunch, df_base, df_reform, span, mtr_opt)
def comp_output(crunch, df_base, df_reform, span, mtr_opt):
liabilities = liability_plot(df_base, df_reform, span, mtr_opt)
rates = rate_plot(df_base, df_reform, span, mtr_opt)
credits = credit_plot(df_base, df_reform, span, mtr_opt)
basic = crunch.basic_table()
detail = crunch.calc_table()
table_basic = basic.to_html(
classes="table table-striped table-hover"
)
table_detail = detail.to_html(
classes="table table-striped table-hover"
)
comp_dict = {
"renderable": [
{"media_type": "table", "title": "Basic Liabilities", "data": table_basic},
liabilities, rates, credits,
{
"media_type": "table",
"title": "Calculation of Liabilities",
"data": table_detail,
},
],
"downloadable": [
{
"media_type": "CSV",
"title": "basic_table",
"data": basic.to_csv(),
},
{
"media_type": "CSV",
"title": "calculation_table",
"data": detail.to_csv(),
},
],
}
return comp_dict
| 28.615741 | 87 | 0.620126 |
6ba4740638ee3de5412f75ad8b3cbf68a3da18fc | 102 | py | Python | novice/03-04/Latihan/hello.py | septiannurtrir/praxis-academy | 1ef7f959c372ae991d74ccd373123142c2fbc542 | [
"MIT"
] | 1 | 2019-08-27T17:06:13.000Z | 2019-08-27T17:06:13.000Z | novice/03-04/Latihan/hello.py | septiannurtrir/praxis-academy | 1ef7f959c372ae991d74ccd373123142c2fbc542 | [
"MIT"
] | null | null | null | novice/03-04/Latihan/hello.py | septiannurtrir/praxis-academy | 1ef7f959c372ae991d74ccd373123142c2fbc542 | [
"MIT"
] | null | null | null | from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hello, World' | 14.571429 | 25 | 0.666667 |
214f95cc8a01e5b753397f318f6a667a7ae6270b | 327 | py | Python | PreProcessingScript/dbConn.py | svobodam/Deep-Learning-Text-Summariser | b429384d171f3f32a0b2fc55e76b16645c615b1d | [
"MIT"
] | 1 | 2018-01-12T06:22:05.000Z | 2018-01-12T06:22:05.000Z | PreProcessingScript/dbConn.py | svobodam/Deep-Learning-Text-Summariser | b429384d171f3f32a0b2fc55e76b16645c615b1d | [
"MIT"
] | null | null | null | PreProcessingScript/dbConn.py | svobodam/Deep-Learning-Text-Summariser | b429384d171f3f32a0b2fc55e76b16645c615b1d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Database connection for DB manipulation
# Import libraries
import sqlite3
sqlConnection = sqlite3.connect('Data/DUCDataset.db')
c = sqlConnection.cursor()
c.execute("DELETE from summDUC WHERE TopicNo = 'D0702A'")
# Save changes
sqlConnection.commit()
# Close connection to DB
sqlConnection.close()
| 19.235294 | 57 | 0.767584 |
88212e0f29d2130bee1c9f9e2819385beabc61a8 | 56,558 | py | Python | netapp/santricity/api/v2/administration_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 5 | 2016-08-23T17:52:22.000Z | 2019-05-16T08:45:30.000Z | netapp/santricity/api/v2/administration_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 2 | 2016-11-10T05:30:21.000Z | 2019-04-05T15:03:37.000Z | netapp/santricity/api/v2/administration_api.py | NetApp/santricity-webapi-pythonsdk | 1d3df4a00561192f4cdcdd1890f4d27547ed2de2 | [
"BSD-3-Clause-Clear"
] | 7 | 2016-08-25T16:11:44.000Z | 2021-02-22T05:31:25.000Z | #!/usr/bin/env python
# coding: utf-8
"""
AdministrationApi.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ....santricity.configuration import Configuration
from ....santricity.api_client import ApiClient
class AdministrationApi(object):
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient(context_path='/devmgr/v2')
self.api_client = config.api_client
def get_certificate_signing_request(self, **kwargs):
"""
Retrieves an x509 certificate signing request
Mode: Both Embedded and Proxy. The response type of this method is a file stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_certificate_signing_request(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filename: fileName
:return: File
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['filename']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_certificate_signing_request" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/export'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filename' in params:
query_params['filename'] = params['filename']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/octet-stream'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='File',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_client_token(self, **kwargs):
"""
Return a secure random token of 16 bytes
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_client_token(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: str
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_client_token" % key
)
params[key] = val
del params['kwargs']
resource_path = '/client-token'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_embedded_local_users(self, system_id, **kwargs):
"""
Retrieve the local users and if their password is set
Mode: Embedded only
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_embedded_local_users(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:return: list[EmbeddedLocalUserResponse]
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_embedded_local_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_embedded_local_users`")
resource_path = '/storage-systems/{system-id}/local-users'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EmbeddedLocalUserResponse]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_embedded_local_users_info(self, system_id, **kwargs):
"""
Retrieve local users information.
Mode: Embedded only
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_embedded_local_users_info(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:return: EmbeddedLocalUserInfoResponse
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_embedded_local_users_info" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_embedded_local_users_info`")
resource_path = '/storage-systems/{system-id}/local-users/info'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EmbeddedLocalUserInfoResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_ssl_configuration(self, **kwargs):
"""
GET the SSL Configuration
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_ssl_configuration(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: SSLCertConfiguration
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = []
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ssl_configuration" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SSLCertConfiguration',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_symbol_port_enabled(self, system_id, **kwargs):
"""
Retrieve if the SYMbol port is enabled
Mode: Embedded.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_symbol_port_enabled(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:return: SymbolPortResponse
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_symbol_port_enabled" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `get_symbol_port_enabled`")
resource_path = '/storage-systems/{system-id}/symbol-port'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SymbolPortResponse',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_trusted_certificate_authorities(self, **kwargs):
"""
Gets the list of known trusted certificate authorities
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_trusted_certificate_authorities(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool use_truststore: True to see CA certificates in the truststore, false to see certificates in the keystore.
:return: list[X509CertInfo]
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['use_truststore']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_trusted_certificate_authorities" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/ca'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'use_truststore' in params:
query_params['useTruststore'] = params['use_truststore']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[X509CertInfo]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def reload_ssl_configuration(self, **kwargs):
"""
Asynchonously Reloads SSL Configuration. When this call returns, the reload has been requested
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.reload_ssl_configuration(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool reload_both: True if wanting to restart both controllers SSL Configuration; only applies to embedded systems.
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['reload_both']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method reload_ssl_configuration" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/reload'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'reload_both' in params:
query_params['reloadBoth'] = params['reload_both']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def remove_ca(self, alias, **kwargs):
"""
Deletes the CA with the given aliass
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_ca(alias, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str alias: (required)
:param bool use_truststore: True if this CA certificate needs to be deleted from the truststore, false otherwise.
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['alias', 'use_truststore']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_ca" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'alias' is set
if ('alias' not in params) or (params['alias'] is None):
raise ValueError("Missing the required parameter `alias` when calling `remove_ca`")
resource_path = '/sslconfig/ca/{alias}'.replace('{format}', 'json')
path_params = {}
if 'alias' in params:
path_params['alias'] = params['alias']
query_params = {}
if 'use_truststore' in params:
query_params['useTruststore'] = params['use_truststore']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def resets_ssl_configuration(self, **kwargs):
"""
Resets the webserver back to a self-singed certificate, removes all previously uploaded certificates from the keystore and Asynchronously reloads the SSL configuration.
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.resets_ssl_configuration(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool reload_ssl: Set to to false if you don't want to reload the SSL Context immediately; default value is true.
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['reload_ssl']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method resets_ssl_configuration" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/reset'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'reload_ssl' in params:
query_params['reloadSSL'] = params['reload_ssl']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def set_embedded_local_users(self, system_id, **kwargs):
"""
Set the password for local users
Mode: Embedded only
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_embedded_local_users(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param EmbeddedLocalUserRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_embedded_local_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `set_embedded_local_users`")
resource_path = '/storage-systems/{system-id}/local-users'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def set_ssl_configuration(self, **kwargs):
"""
Set the SSL Configuration causing a regeneration of the SSL Certificate.
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_ssl_configuration(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SSLCertConfiguration body:
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_ssl_configuration" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def set_symbol_port_enabled(self, system_id, **kwargs):
"""
Set if the SYMbol port is enabled
Mode: Embedded.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.set_symbol_port_enabled(system_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str system_id: The unique identifier of the storage-system. This may be the id or the WWN. (required)
:param SymbolPortRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['system_id', 'body']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_symbol_port_enabled" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'system_id' is set
if ('system_id' not in params) or (params['system_id'] is None):
raise ValueError("Missing the required parameter `system_id` when calling `set_symbol_port_enabled`")
resource_path = '/storage-systems/{system-id}/symbol-port'.replace('{format}', 'json')
path_params = {}
if 'system_id' in params:
path_params['system-id'] = params['system_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def upload_ca_certificate(self, **kwargs):
"""
Upload the root/intermediate certificates from a certificate authority that signed the certificate used for this server
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.upload_ca_certificate(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param bool use_truststore: True if this CA certificate needs to be stored in the truststore, false otherwise.
:param str alias: The user specified alias for this CA certificate
:param file file: certificate file
:return: None
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['use_truststore', 'alias', 'file']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_ca_certificate" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/ca'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'use_truststore' in params:
query_params['useTruststore'] = params['use_truststore']
if 'alias' in params:
query_params['alias'] = params['alias']
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def upload_certificate_signing_request(self, **kwargs):
"""
Upload a previously exported certificate signing request
Mode: Both Embedded and Proxy.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.upload_certificate_signing_request(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param file file: certificate file
:return: list[FileInfo]
If the method is called asynchronously,
returns the request thread.
:raises: ValueError
If the required params are not provided or if the response data format is unknown.
TypeError:
When the data type of response data is different from what we are expecting
ApiException:
Occurs when we get a HTTP error code (422 and above).
"""
all_params = ['file']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_certificate_signing_request" % key
)
params[key] = val
del params['kwargs']
resource_path = '/sslconfig/import'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'file' in params:
local_var_files['file'] = params['file']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = ['basicAuth']
response = self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FileInfo]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| 37.111549 | 845 | 0.497047 |
d3341d78d040dda21ac7c017f131edddff6deb20 | 204 | py | Python | fitness_app/users/tests/test_models.py | johanoh/fitness_app | bd8bc093058ab9f3873132d93ac3262d117546e0 | [
"MIT"
] | null | null | null | fitness_app/users/tests/test_models.py | johanoh/fitness_app | bd8bc093058ab9f3873132d93ac3262d117546e0 | [
"MIT"
] | null | null | null | fitness_app/users/tests/test_models.py | johanoh/fitness_app | bd8bc093058ab9f3873132d93ac3262d117546e0 | [
"MIT"
] | null | null | null | import pytest
from fitness_app.users.models import User
pytestmark = pytest.mark.django_db
def test_user_get_absolute_url(user: User):
assert user.get_absolute_url() == f"/users/{user.username}/"
| 20.4 | 64 | 0.77451 |
c0e25365c290441f53d3bd1ca056eb822226a614 | 863 | py | Python | xunit-autolabeler-v2/ast_parser/core/test_data/parser/class_wrapped_tests/class_wrapped_test.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 5 | 2019-07-11T17:35:44.000Z | 2021-10-09T01:49:04.000Z | xunit-autolabeler-v2/ast_parser/core/test_data/parser/class_wrapped_tests/class_wrapped_test.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 36 | 2019-08-27T18:20:21.000Z | 2022-01-12T21:29:00.000Z | xunit-autolabeler-v2/ast_parser/core/test_data/parser/class_wrapped_tests/class_wrapped_test.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 13 | 2019-10-30T19:39:51.000Z | 2021-04-04T09:31:52.000Z | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import class_wrapped
class FirstTestClass(unittest.TestCase):
def test_first():
assert class_wrapped.method() == 'main'
class SecondTestClass(unittest.TestCase):
def test_second():
assert class_wrapped.method() == 'main'
| 30.821429 | 74 | 0.746234 |
50e9cb6a57de9302fad127cbb738bc05921c7685 | 4,242 | py | Python | azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/iot_dps_client.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 4 | 2016-06-17T23:25:29.000Z | 2022-03-30T22:37:45.000Z | azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/iot_dps_client.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 54 | 2016-03-25T17:25:01.000Z | 2018-10-22T17:27:54.000Z | azure-mgmt-iothubprovisioningservices/azure/mgmt/iothubprovisioningservices/iot_dps_client.py | v-Ajnava/azure-sdk-for-python | a1f6f80eb5869c5b710e8bfb66146546697e2a6f | [
"MIT"
] | 3 | 2016-05-03T20:49:46.000Z | 2017-10-05T21:05:27.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.operations import Operations
from .operations.dps_certificate_operations import DpsCertificateOperations
from .operations.iot_dps_resource_operations import IotDpsResourceOperations
from .operations.dps_certificates_operations import DpsCertificatesOperations
from . import models
class IotDpsClientConfiguration(AzureConfiguration):
"""Configuration for IotDpsClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(IotDpsClientConfiguration, self).__init__(base_url)
self.add_user_agent('azure-mgmt-iothubprovisioningservices/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class IotDpsClient(object):
"""API for using the Azure IoT Hub Device Provisioning Service features.
:ivar config: Configuration for client.
:vartype config: IotDpsClientConfiguration
:ivar operations: Operations operations
:vartype operations: azure.mgmt.iothubprovisioningservices.operations.Operations
:ivar dps_certificate: DpsCertificate operations
:vartype dps_certificate: azure.mgmt.iothubprovisioningservices.operations.DpsCertificateOperations
:ivar iot_dps_resource: IotDpsResource operations
:vartype iot_dps_resource: azure.mgmt.iothubprovisioningservices.operations.IotDpsResourceOperations
:ivar dps_certificates: DpsCertificates operations
:vartype dps_certificates: azure.mgmt.iothubprovisioningservices.operations.DpsCertificatesOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = IotDpsClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '2017-11-15'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.operations = Operations(
self._client, self.config, self._serialize, self._deserialize)
self.dps_certificate = DpsCertificateOperations(
self._client, self.config, self._serialize, self._deserialize)
self.iot_dps_resource = IotDpsResourceOperations(
self._client, self.config, self._serialize, self._deserialize)
self.dps_certificates = DpsCertificatesOperations(
self._client, self.config, self._serialize, self._deserialize)
| 43.731959 | 105 | 0.727723 |
7eea3b040ce6ace72b6f6038a0e1d15d4e6ad53a | 975 | py | Python | python/test/test_env_from_source.py | adriangonz/seldon-deploy-sdk | c5504838630a87053387cec57ec2e1e7251971e2 | [
"Apache-2.0"
] | 6 | 2021-02-18T14:37:54.000Z | 2022-01-13T13:27:43.000Z | python/test/test_env_from_source.py | adriangonz/seldon-deploy-sdk | c5504838630a87053387cec57ec2e1e7251971e2 | [
"Apache-2.0"
] | 14 | 2021-01-04T16:32:03.000Z | 2021-12-13T17:53:59.000Z | python/test/test_env_from_source.py | adriangonz/seldon-deploy-sdk | c5504838630a87053387cec57ec2e1e7251971e2 | [
"Apache-2.0"
] | 7 | 2021-03-17T09:05:55.000Z | 2022-01-05T10:39:56.000Z | # coding: utf-8
"""
Seldon Deploy API
API to interact and manage the lifecycle of your machine learning models deployed through Seldon Deploy. # noqa: E501
OpenAPI spec version: v1alpha1
Contact: hello@seldon.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import seldon_deploy_sdk
from seldon_deploy_sdk.models.env_from_source import EnvFromSource # noqa: E501
from seldon_deploy_sdk.rest import ApiException
class TestEnvFromSource(unittest.TestCase):
"""EnvFromSource unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEnvFromSource(self):
"""Test EnvFromSource"""
# FIXME: construct object with mandatory attributes with example values
# model = seldon_deploy_sdk.models.env_from_source.EnvFromSource() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 23.780488 | 122 | 0.718974 |
1905f0888299e7ae9f86d3b4e48425451fb03697 | 1,247 | py | Python | clients/client/python/test/test_v0alpha0_api.py | ory/sdk | 9849c6115f44f4b7612ad246124d80b4401fd730 | [
"Apache-2.0"
] | 77 | 2020-02-14T17:27:36.000Z | 2022-03-25T08:44:52.000Z | clients/client/python/test/test_v0alpha0_api.py | vinckr/sdk | 5b93557835af7ad3662ef620b3ef10729149d484 | [
"Apache-2.0"
] | 125 | 2020-02-07T21:45:52.000Z | 2022-03-31T12:54:24.000Z | clients/client/python/test/test_v0alpha0_api.py | vinckr/sdk | 5b93557835af7ad3662ef620b3ef10729149d484 | [
"Apache-2.0"
] | 44 | 2020-01-31T22:05:47.000Z | 2022-03-09T14:41:22.000Z | """
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.30
Contact: support@ory.sh
Generated by: https://openapi-generator.tech
"""
import unittest
import ory_client
from ory_client.api.v0alpha0_api import V0alpha0Api # noqa: E501
class TestV0alpha0Api(unittest.TestCase):
"""V0alpha0Api unit test stubs"""
def setUp(self):
self.api = V0alpha0Api() # noqa: E501
def tearDown(self):
pass
def test_create_project(self):
"""Test case for create_project
Create a Project # noqa: E501
"""
pass
def test_get_project(self):
"""Test case for get_project
Get a Project # noqa: E501
"""
pass
def test_list_projects(self):
"""Test case for list_projects
List All Projects # noqa: E501
"""
pass
def test_update_project(self):
"""Test case for update_project
Update a Project # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 21.5 | 194 | 0.630313 |
5a47a06b7186bc9ffd89c97dd255290db55c3086 | 19,610 | py | Python | mars/serialize/tests/test_serialize.py | immortalFrogJiang/mars | 93c786e38bdc0fbb483282d7792379db0345a3b6 | [
"Apache-2.0"
] | 1 | 2019-02-01T07:41:48.000Z | 2019-02-01T07:41:48.000Z | mars/serialize/tests/test_serialize.py | immortalFrogJiang/mars | 93c786e38bdc0fbb483282d7792379db0345a3b6 | [
"Apache-2.0"
] | null | null | null | mars/serialize/tests/test_serialize.py | immortalFrogJiang/mars | 93c786e38bdc0fbb483282d7792379db0345a3b6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 1999-2018 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import os
import tempfile
import unittest
import numpy as np
try:
import pyarrow
except ImportError:
pyarrow = None
try:
import pandas as pd
except ImportError:
pd = None
from mars.compat import six, OrderedDict, BytesIO
from mars.lib import sparse
from mars.serialize.core import Serializable, IdentityField, StringField, UnicodeField, \
BytesField, Int8Field, Int16Field, Int32Field, Int64Field, UInt8Field, UInt16Field, \
UInt32Field, UInt64Field, Float16Field, Float32Field, Float64Field, BoolField, \
Datetime64Field, Timedelta64Field, DataTypeField, KeyField, ReferenceField, OneOfField, \
ListField, NDArrayField, DictField, TupleField, ValueType, serializes, deserializes, \
IndexField, SeriesField, DataFrameField, SliceField, ProviderType, AttributeAsDict
from mars.serialize import dataserializer
from mars.serialize.pbserializer import ProtobufSerializeProvider
from mars.serialize.jsonserializer import JsonSerializeProvider
from mars.core import BaseWithKey
from mars.utils import to_binary, to_text
class Node1(Serializable):
a = IdentityField('a', ValueType.string)
b1 = Int8Field('b1')
b2 = Int16Field('b2')
b3 = Int32Field('b3')
b4 = Int64Field('b4')
c1 = UInt8Field('c1')
c2 = UInt16Field('c2')
c3 = UInt32Field('c3')
c4 = UInt64Field('c4')
d1 = Float16Field('d1')
d2 = Float32Field('d2')
d3 = Float64Field('d3')
e = BoolField('e')
f = KeyField('f')
g = ReferenceField('g', 'Node2')
h = ListField('h')
i = ListField('i', ValueType.reference('self'))
def __new__(cls, *args, **kwargs):
if 'a' in kwargs and kwargs['a'] == 'test1':
return object.__new__(Node8)
return object.__new__(cls)
@classmethod
def cls(cls, provider):
if provider.type == ProviderType.protobuf:
from mars.serialize.tests.testser_pb2 import Node1Def
return Node1Def
return super(Node1, cls).cls(provider)
class Node8(Node1):
pass
class Node2(BaseWithKey, Serializable):
a = ListField('a', ValueType.list(ValueType.string))
_key = StringField('key')
_id = StringField('id')
_name = UnicodeField('name')
data = ListField('data', ValueType.int32)
@classmethod
def cls(cls, provider):
if provider.type == ProviderType.protobuf:
from mars.serialize.tests.testser_pb2 import Node2Def
return Node2Def
return super(Node2, cls).cls(provider)
class Node3(Serializable):
value = OneOfField('value', n1='Node1', n2='Node2')
@classmethod
def cls(cls, provider):
if provider.type == ProviderType.protobuf:
from mars.serialize.tests.testser_pb2 import Node3Def
return Node3Def
return super(Node3, cls).cls(provider)
class Node5(AttributeAsDict):
a = StringField('a')
b = SliceField('b')
class Node6(AttributeAsDict):
nid = IdentityField('id', ValueType.int64)
b = Int32Field('b')
def __new__(cls, *args, **kwargs):
if 'nid' in kwargs and kwargs['nid'] != 0:
return object.__new__(Node7)
return object.__new__(cls)
class Node7(Node6):
pass
class Node4(AttributeAsDict):
attr_tag = 'attr'
a = BytesField('b')
b = NDArrayField('c')
c = Datetime64Field('d')
d = Timedelta64Field('e')
e = DataTypeField('f')
f = DictField('g', ValueType.string, ValueType.list(ValueType.bool))
g = DictField('h')
h = TupleField('i', ValueType.int64, ValueType.unicode, ValueType.string, ValueType.float32,
ValueType.datetime64, ValueType.timedelta64, ValueType.dtype)
i = TupleField('j', ValueType.slice)
j = ReferenceField('k', Node5)
k = ListField('l', ValueType.reference('Node5'))
l = OneOfField('m', n5=Node5, n6=Node6)
m = IndexField('n')
mm = IndexField('mn')
n = SeriesField('o')
o = DataFrameField('p')
p = ListField('q')
@classmethod
def cls(cls, provider):
if provider.type == ProviderType.protobuf:
from mars.serialize.tests.testser_pb2 import Node4Def
return Node4Def
return super(Node4, cls).cls(provider)
class Test(unittest.TestCase):
def testPBSerialize(self):
provider = ProtobufSerializeProvider()
node2 = Node2(a=[['ss'], ['dd']], data=[3, 7, 212])
node1 = Node1(a='test1',
b1=-2, b2=2000, b3=-5000, b4=500000,
c1=2, c2=2000, c3=5000, c4=500000,
d1=2.5, d2=7.37, d3=5.976321,
e=False,
f=node2,
g=Node2(a=[['1', '2'], ['3', '4']]),
h=[[2, 3], node2, True, {1: node2}, np.datetime64('1066-10-13'), np.timedelta64(1, 'D')],
i=[Node1(b1=111), Node1(b1=222)])
node3 = Node3(value=node1)
serials = serializes(provider, [node2, node3])
d_node2, d_node3 = deserializes(provider, [Node2, Node3], serials)
self.assertIsNot(node2, d_node2)
self.assertEqual(node2.a, d_node2.a)
self.assertEqual(node2.data, d_node2.data)
self.assertIsNot(node3, d_node3)
self.assertIsInstance(d_node3.value, Node8)
self.assertIsNot(node3.value, d_node3.value)
self.assertEqual(node3.value.a, d_node3.value.a)
self.assertEqual(node3.value.b1, d_node3.value.b1)
self.assertEqual(node3.value.b2, d_node3.value.b2)
self.assertEqual(node3.value.b3, d_node3.value.b3)
self.assertEqual(node3.value.b4, d_node3.value.b4)
self.assertEqual(node3.value.c1, d_node3.value.c1)
self.assertEqual(node3.value.c2, d_node3.value.c2)
self.assertEqual(node3.value.c3, d_node3.value.c3)
self.assertEqual(node3.value.c4, d_node3.value.c4)
self.assertAlmostEqual(node3.value.d1, d_node3.value.d1, places=2)
self.assertAlmostEqual(node3.value.d2, d_node3.value.d2, places=4)
self.assertAlmostEqual(node3.value.d3, d_node3.value.d3)
self.assertEqual(node3.value.e, d_node3.value.e)
self.assertIsNot(node3.value.f, d_node3.value.f)
self.assertEqual(node3.value.f.a, d_node3.value.f.a)
self.assertIsNot(node3.value.g, d_node3.value.g)
self.assertEqual(node3.value.g.a, d_node3.value.g.a)
self.assertEqual(node3.value.h[0], d_node3.value.h[0])
self.assertNotIsInstance(d_node3.value.h[1], six.string_types)
self.assertIs(d_node3.value.h[1], d_node3.value.f)
self.assertEqual(node3.value.h[2], True)
self.assertEqual([n.b1 for n in node3.value.i], [n.b1 for n in d_node3.value.i])
self.assertNotIsInstance(node3.value.i[0], Node8)
def testJSONSerialize(self):
provider = JsonSerializeProvider()
node2 = Node2(a=[['ss'], ['dd']], data=[3, 7, 212])
node1 = Node1(a='test1',
b1=2, b2=2000, b3=5000, b4=500000,
c1=2, c2=2000, c3=5000, c4=500000,
d1=2.5, d2=7.37, d3=5.976321,
e=False,
f=node2,
g=Node2(a=[['1', '2'], ['3', '4']]),
h=[[2, 3], node2, True, {1: node2}, np.datetime64('1066-10-13'), np.timedelta64(1, 'D')],
i=[Node1(b1=111), Node1(b1=222)])
node3 = Node3(value=node1)
serials = serializes(provider, [node2, node3])
serials = [json.loads(json.dumps(s), object_hook=OrderedDict) for s in serials]
d_node2, d_node3 = deserializes(provider, [Node2, Node3], serials)
self.assertIsNot(node2, d_node2)
self.assertEqual(node2.a, d_node2.a)
self.assertEqual(node2.data, d_node2.data)
self.assertIsNot(node3, d_node3)
self.assertIsInstance(d_node3.value, Node8)
self.assertIsNot(node3.value, d_node3.value)
self.assertEqual(node3.value.a, d_node3.value.a)
self.assertEqual(node3.value.b1, d_node3.value.b1)
self.assertEqual(node3.value.b2, d_node3.value.b2)
self.assertEqual(node3.value.b3, d_node3.value.b3)
self.assertEqual(node3.value.b4, d_node3.value.b4)
self.assertEqual(node3.value.c1, d_node3.value.c1)
self.assertEqual(node3.value.c2, d_node3.value.c2)
self.assertEqual(node3.value.c3, d_node3.value.c3)
self.assertEqual(node3.value.c4, d_node3.value.c4)
self.assertAlmostEqual(node3.value.d1, d_node3.value.d1, places=2)
self.assertAlmostEqual(node3.value.d2, d_node3.value.d2, places=4)
self.assertAlmostEqual(node3.value.d3, d_node3.value.d3)
self.assertEqual(node3.value.e, d_node3.value.e)
self.assertIsNot(node3.value.f, d_node3.value.f)
self.assertEqual(node3.value.f.a, d_node3.value.f.a)
self.assertIsNot(node3.value.g, d_node3.value.g)
self.assertEqual(node3.value.g.a, d_node3.value.g.a)
self.assertEqual(node3.value.h[0], d_node3.value.h[0])
self.assertNotIsInstance(d_node3.value.h[1], six.string_types)
self.assertIs(d_node3.value.h[1], d_node3.value.f)
self.assertEqual(node3.value.h[2], True)
self.assertEqual([n.b1 for n in node3.value.i], [n.b1 for n in d_node3.value.i])
self.assertNotIsInstance(node3.value.i[0], Node8)
def testAttributeAsDict(self):
other_data = {}
if pd:
df = pd.DataFrame({'a': [1, 2, 3], 'b': [to_text('测试'), to_binary('属性'), 'c']},
index=[[0, 0, 1], ['测试', '属性', '测试']])
other_data['m'] = df.columns
other_data['mm'] = df.index
other_data['n'] = df['b']
other_data['o'] = df
other_data['p'] = [df.columns, df.index, df['a'], df]
node4 = Node4(a=to_binary('中文'),
b=np.random.randint(4, size=(3, 4)),
c=np.datetime64(datetime.datetime.now()),
d=np.timedelta64(datetime.timedelta(seconds=1234)),
e=np.dtype('int'),
f={'a': [True, False, False], 'd': [False, None]},
h=(1234, to_text('测试'), '属性', None, np.datetime64('1066-10-13'),
np.timedelta64(1, 'D'), np.dtype([('x', 'i4'), ('y', 'f4')])),
i=(slice(10), slice(0, 2), None, slice(2, 0, -1)),
j=Node5(a='aa', b=slice(1, 100, 3)),
k=[Node5(a='bb', b=slice(200, -1, -4)), None],
l=Node6(b=3, nid=1), **other_data)
pbs = ProtobufSerializeProvider()
serial = node4.serialize(pbs)
d_node4 = Node4.deserialize(pbs, serial)
self.assertEqual(node4.a, d_node4.a)
np.testing.assert_array_equal(node4.b, d_node4.b)
self.assertEqual(node4.c, d_node4.c)
self.assertEqual(node4.d, d_node4.d)
self.assertEqual(node4.e, d_node4.e)
self.assertEqual(node4.f, d_node4.f)
self.assertFalse(hasattr(d_node4, 'g'))
self.assertEqual(node4.h, d_node4.h)
self.assertEqual(node4.i, d_node4.i)
self.assertEqual(node4.j.a, d_node4.j.a)
self.assertEqual(node4.j.b, d_node4.j.b)
self.assertEqual(node4.k[0].a, d_node4.k[0].a)
self.assertEqual(node4.k[0].b, d_node4.k[0].b)
self.assertIsNone(d_node4.k[1])
self.assertIsInstance(d_node4.l, Node7)
self.assertEqual(d_node4.l.b, 3)
if pd:
pd.testing.assert_index_equal(node4.m, d_node4.m)
pd.testing.assert_index_equal(node4.mm, d_node4.mm)
pd.testing.assert_series_equal(node4.n, d_node4.n)
pd.testing.assert_frame_equal(node4.o, d_node4.o)
pd.testing.assert_index_equal(node4.p[0], d_node4.p[0])
pd.testing.assert_index_equal(node4.p[1], d_node4.p[1])
pd.testing.assert_series_equal(node4.p[2], d_node4.p[2])
pd.testing.assert_frame_equal(node4.p[3], d_node4.p[3])
jss = JsonSerializeProvider()
serial = node4.serialize(jss)
serial = json.loads(json.dumps(serial), object_hook=OrderedDict)
d_node4 = Node4.deserialize(jss, serial)
self.assertEqual(node4.a, d_node4.a)
np.testing.assert_array_equal(node4.b, d_node4.b)
self.assertEqual(node4.c, d_node4.c)
self.assertEqual(node4.d, d_node4.d)
self.assertEqual(node4.e, d_node4.e)
self.assertEqual(node4.f, d_node4.f)
self.assertFalse(hasattr(d_node4, 'g'))
self.assertEqual(node4.h, d_node4.h)
self.assertEqual(node4.i, d_node4.i)
self.assertEqual(node4.j.a, d_node4.j.a)
self.assertEqual(node4.k[0].a, d_node4.k[0].a)
self.assertIsNone(d_node4.k[1])
self.assertIsInstance(d_node4.l, Node7)
self.assertEqual(d_node4.l.b, 3)
if pd:
pd.testing.assert_index_equal(node4.m, d_node4.m)
pd.testing.assert_index_equal(node4.mm, d_node4.mm)
pd.testing.assert_series_equal(node4.n, d_node4.n)
pd.testing.assert_frame_equal(node4.o, d_node4.o)
pd.testing.assert_index_equal(node4.p[0], d_node4.p[0])
pd.testing.assert_index_equal(node4.p[1], d_node4.p[1])
pd.testing.assert_series_equal(node4.p[2], d_node4.p[2])
pd.testing.assert_frame_equal(node4.p[3], d_node4.p[3])
def testException(self):
node1 = Node1(h=[object()])
pbs = ProtobufSerializeProvider()
with self.assertRaises(TypeError):
node1.serialize(pbs)
jss = JsonSerializeProvider()
with self.assertRaises(TypeError):
node1.serialize(jss)
def testDataSerialize(self):
try:
import numpy as np
from numpy.testing import assert_array_equal
except ImportError:
np = None
try:
import scipy.sparse as sps
except ImportError:
sps = None
if np:
array = np.random.rand(1000, 100)
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(array)))
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(
array, compress=dataserializer.COMPRESS_FLAG_LZ4)))
array = np.random.rand(1000, 100)
assert_array_equal(array, dataserializer.load(BytesIO(dataserializer.dumps(array))))
assert_array_equal(array, dataserializer.load(BytesIO(dataserializer.dumps(
array, compress=dataserializer.COMPRESS_FLAG_LZ4))))
array = np.random.rand(1000, 100).T # test non c-contiguous
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(array)))
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(
array, compress=dataserializer.COMPRESS_FLAG_LZ4)))
array = np.float64(0.2345)
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(array)))
assert_array_equal(array, dataserializer.loads(dataserializer.dumps(
array, compress=dataserializer.COMPRESS_FLAG_LZ4)))
fn = os.path.join(tempfile.gettempdir(), 'test_dump_file_%d.bin' % id(self))
try:
array = np.random.rand(1000, 100).T # test non c-contiguous
with open(fn, 'wb') as dump_file:
dataserializer.dump(array, dump_file)
with open(fn, 'rb') as dump_file:
assert_array_equal(array, dataserializer.load(dump_file))
with open(fn, 'wb') as dump_file:
dataserializer.dump(array, dump_file,
compress=dataserializer.COMPRESS_FLAG_LZ4)
with open(fn, 'rb') as dump_file:
assert_array_equal(array, dataserializer.load(dump_file))
finally:
if os.path.exists(fn):
os.unlink(fn)
if sps:
mat = sparse.SparseMatrix(sps.random(100, 100, 0.1, format='csr'))
des_mat = dataserializer.loads(dataserializer.dumps(mat))
self.assertTrue((mat.spmatrix != des_mat.spmatrix).nnz == 0)
des_mat = dataserializer.loads(dataserializer.dumps(
mat, compress=dataserializer.COMPRESS_FLAG_LZ4))
self.assertTrue((mat.spmatrix != des_mat.spmatrix).nnz == 0)
@unittest.skipIf(pyarrow is None, 'PyArrow is not installed.')
def testArrowSerialize(self):
try:
import numpy as np
from numpy.testing import assert_array_equal
except ImportError:
np = None
try:
import scipy.sparse as sps
except ImportError:
sps = None
from mars.serialize.dataserializer import mars_serialize_context
context = mars_serialize_context()
if np:
array = np.random.rand(1000, 100)
assert_array_equal(array, pyarrow.deserialize(pyarrow.serialize(array, context).to_buffer(), context))
if sps:
mat = sparse.SparseMatrix(sps.random(100, 100, 0.1, format='csr'))
des_mat = pyarrow.deserialize(pyarrow.serialize(mat, context).to_buffer(), context)
self.assertTrue((mat.spmatrix != des_mat.spmatrix).nnz == 0)
if np and sps:
array = np.random.rand(1000, 100)
mat = sparse.SparseMatrix(sps.random(100, 100, 0.1, format='csr'))
tp = (array, mat)
des_tp = pyarrow.deserialize(pyarrow.serialize(tp, context).to_buffer(), context)
assert_array_equal(tp[0], des_tp[0])
self.assertTrue((tp[1].spmatrix != des_tp[1].spmatrix).nnz == 0)
@unittest.skipIf(pyarrow is None, 'PyArrow is not installed.')
def testCompressIO(self):
if not np:
return
import pyarrow
from numpy.testing import assert_array_equal
data = np.random.random((1000, 100))
serialized = pyarrow.serialize(data).to_buffer()
bio = BytesIO()
reader = dataserializer.CompressBufferReader(pyarrow.py_buffer(serialized),
dataserializer.COMPRESS_FLAG_LZ4)
while True:
block = reader.read(128)
if not block:
break
bio.write(block)
compressed = bio.getvalue()
assert_array_equal(data, dataserializer.loads(compressed))
data_sink = bytearray(len(serialized))
compressed_mv = memoryview(compressed)
writer = dataserializer.DecompressBufferWriter(pyarrow.py_buffer(data_sink))
pos = 0
while pos < len(compressed):
endpos = min(pos + 128, len(compressed))
writer.write(compressed_mv[pos:endpos])
pos = endpos
assert_array_equal(data, pyarrow.deserialize(data_sink))
| 40.939457 | 114 | 0.618307 |
33fcf628addb572db39df30e31d56f9e18af4d3c | 399 | py | Python | geolocation/asgi.py | GiovannaK/geolocation-system-django | b7938bd852fee314192fb9965174fd67325e77bb | [
"Unlicense"
] | null | null | null | geolocation/asgi.py | GiovannaK/geolocation-system-django | b7938bd852fee314192fb9965174fd67325e77bb | [
"Unlicense"
] | null | null | null | geolocation/asgi.py | GiovannaK/geolocation-system-django | b7938bd852fee314192fb9965174fd67325e77bb | [
"Unlicense"
] | null | null | null | """
ASGI config for geolocation project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geolocation.settings')
application = get_asgi_application()
| 23.470588 | 78 | 0.789474 |
6e9e47bfe494043e4d70a05208766f1ba74784ed | 4,573 | py | Python | qa/rpc-tests/multi_rpc.py | FusionEnergyX/FusionX | c8d72602f2050b5940fb182f9645cd0712cd2784 | [
"MIT"
] | 1 | 2019-03-20T20:33:48.000Z | 2019-03-20T20:33:48.000Z | qa/rpc-tests/multi_rpc.py | FusionEnerqyX/FusionX | 4e0d6bd3964f3961a16fc8ea8e8f749bb5c61d6a | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | FusionEnerqyX/FusionX | 4e0d6bd3964f3961a16fc8ea8e8f749bb5c61d6a | [
"MIT"
] | 2 | 2019-05-23T15:09:41.000Z | 2019-07-15T13:02:56.000Z | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test multiple rpc user config option rpcauth
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import str_to_b64str, assert_equal
import os
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
#Append rpcauth to fusion.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "fusion.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def setup_network(self):
self.nodes = self.setup_nodes()
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urllib.parse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 37.793388 | 129 | 0.645091 |
1d966500aa4d45f6453f87bffaa39a9b09408107 | 6,829 | py | Python | src/command_modules/azure-cli-ams/azure/cli/command_modules/ams/_help.py | diberry/azure-cli | 302999245cbb13b890b0a74f03443c577bd4bfae | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-ams/azure/cli/command_modules/ams/_help.py | diberry/azure-cli | 302999245cbb13b890b0a74f03443c577bd4bfae | [
"MIT"
] | 3 | 2021-03-26T00:25:36.000Z | 2022-03-29T22:03:55.000Z | src/command_modules/azure-cli-ams/azure/cli/command_modules/ams/_help.py | diberry/azure-cli | 302999245cbb13b890b0a74f03443c577bd4bfae | [
"MIT"
] | 1 | 2020-07-13T22:28:09.000Z | 2020-07-13T22:28:09.000Z | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long, too-many-lines
from knack.help_files import helps
helps['ams'] = """
type: group
short-summary: Manage Azure Media Services resources.
"""
helps['ams account'] = """
type: group
short-summary: Manage Azure Media Services accounts.
"""
helps['ams account create'] = """
type: command
short-summary: Create an Azure Media Services account.
"""
helps['ams account update'] = """
type: command
short-summary: Update the details of an Azure Media Services account.
"""
helps['ams account list'] = """
type: command
short-summary: List Azure Media Services accounts for the entire subscription.
"""
helps['ams account show'] = """
type: command
short-summary: Show the details of an Azure Media Services account.
"""
helps['ams account delete'] = """
type: command
short-summary: Delete an Azure Media Services account.
"""
helps['ams account storage'] = """
type: group
short-summary: Manage secondary storage for an Azure Media Services account.
"""
helps['ams account storage add'] = """
type: command
short-summary: Attach a secondary storage to an Azure Media Services account.
"""
helps['ams account storage remove'] = """
type: command
short-summary: Detach a secondary storage from an Azure Media Services account.
"""
helps['ams account sp'] = """
type: group
short-summary: Manage service principal and role based access for an Azure Media Services account.
"""
helps['ams account sp create'] = """
type: command
short-summary: Create a service principal and configure its access to an Azure Media Services account.
examples:
- name: Create a service principal with password and configure its access to an Azure Media Services account. Output will be in xml format.
text: >
az ams account sp create -a {myamsaccount} -g {myresourcegroup} -n {mySpName} -password {mySpPassword} --role {rol} --xml
"""
helps['ams account sp reset-credentials'] = """
type: command
short-summary: Generate a new client secret for a service principal configured for an Azure Media Services account.
"""
helps['ams transform'] = """
type: group
short-summary: Manage transforms for an Azure Media Services account.
"""
helps['ams transform list'] = """
type: command
short-summary: List all the transforms of an Azure Media Services account.
"""
helps['ams transform show'] = """
type: command
short-summary: Show the details of a transform.
"""
helps['ams transform create'] = """
type: command
short-summary: Create a transform.
"""
helps['ams transform delete'] = """
type: command
short-summary: Delete a transform.
"""
helps['ams transform update'] = """
type: command
short-summary: Update the details of a transform.
"""
helps['ams transform output'] = """
type: group
short-summary: Manage transform outputs for an Azure Media Services account.
"""
helps['ams transform output add'] = """
type: command
short-summary: Add an output to an existing transform.
"""
helps['ams transform output remove'] = """
type: command
short-summary: Remove an output from an existing transform.
"""
helps['ams asset'] = """
type: group
short-summary: Manage assets for an Azure Media Services account.
"""
helps['ams asset show'] = """
type: command
short-summary: Show the details of an asset.
"""
helps['ams asset list'] = """
type: command
short-summary: List all the assets of an Azure Media Services account.
"""
helps['ams asset create'] = """
type: command
short-summary: Create an asset.
"""
helps['ams asset update'] = """
type: command
short-summary: Update the details of an asset.
"""
helps['ams asset delete'] = """
type: command
short-summary: Delete an asset.
"""
helps['ams asset get-sas-urls'] = """
type: command
short-summary: Lists the asset SAS URLs used for uploading and downloading asset content.
"""
helps['ams job'] = """
type: group
short-summary: Manage jobs for a transform.
"""
helps['ams job start'] = """
type: command
short-summary: Start a job.
"""
helps['ams job list'] = """
type: command
short-summary: List all the jobs of a transform within an Azure Media Services account.
"""
helps['ams job show'] = """
type: command
short-summary: Show the details of a job.
"""
helps['ams job delete'] = """
type: command
short-summary: Delete a job.
"""
helps['ams job cancel'] = """
type: command
short-summary: Cancel a job.
"""
helps['ams streaming'] = """
type: group
short-summary: Manage streamings for an Azure Media Services account.
"""
helps['ams streaming locator'] = """
type: group
short-summary: Manage streaming locators for an Azure Media Services account.
"""
helps['ams streaming locator create'] = """
type: command
short-summary: Create a streaming locator.
"""
helps['ams streaming locator list'] = """
type: command
short-summary: List all the streaming locators within an Azure Media Services account.
"""
helps['ams streaming locator show'] = """
type: command
short-summary: Show the details of a streaming locator.
"""
helps['ams streaming locator get-paths'] = """
type: command
short-summary: List paths supported by a streaming locator.
"""
helps['ams streaming policy'] = """
type: group
short-summary: Manage streaming policies for an Azure Media Services account.
"""
helps['ams streaming policy create'] = """
type: command
short-summary: Create a streaming policy.
"""
helps['ams streaming policy list'] = """
type: command
short-summary: List all the streaming policies within an Azure Media Services account.
"""
helps['ams streaming policy show'] = """
type: command
short-summary: Show the details of a streaming policy.
"""
helps['ams streaming endpoint'] = """
type: group
short-summary: Manage streaming endpoints for an Azure Media Service account.
"""
helps['ams streaming endpoint start'] = """
type: command
short-summary: Start a streaming endpoint.
"""
helps['ams streaming endpoint stop'] = """
type: command
short-summary: Stop a streaming endpoint.
"""
helps['ams streaming endpoint list'] = """
type: command
short-summary: List all the streaming endpoints within an Azure Media Services account.
"""
| 26.265385 | 147 | 0.656026 |
709e995cd31b0abd76a18a285a1dda7405392fe6 | 10,717 | py | Python | intersight/models/content_base_parameter_all_of.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 21 | 2018-03-29T14:20:35.000Z | 2021-10-13T05:11:41.000Z | intersight/models/content_base_parameter_all_of.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 14 | 2018-01-30T15:45:46.000Z | 2022-02-23T14:23:21.000Z | intersight/models/content_base_parameter_all_of.py | sdnit-se/intersight-python | 551f7685c0f76bb8af60ec83ffb6f9672d49a4ae | [
"Apache-2.0"
] | 18 | 2018-01-03T15:09:56.000Z | 2021-07-16T02:21:54.000Z | # coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from intersight.configuration import Configuration
class ContentBaseParameterAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'accept_single_value': 'bool',
'complex_type': 'str',
'item_type': 'str',
'name': 'str',
'path': 'str',
'type': 'str'
}
attribute_map = {
'accept_single_value': 'AcceptSingleValue',
'complex_type': 'ComplexType',
'item_type': 'ItemType',
'name': 'Name',
'path': 'Path',
'type': 'Type'
}
def __init__(self,
accept_single_value=None,
complex_type=None,
item_type='simple',
name=None,
path=None,
type='simple',
local_vars_configuration=None): # noqa: E501
"""ContentBaseParameterAllOf - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._accept_single_value = None
self._complex_type = None
self._item_type = None
self._name = None
self._path = None
self._type = None
self.discriminator = None
if accept_single_value is not None:
self.accept_single_value = accept_single_value
if complex_type is not None:
self.complex_type = complex_type
if item_type is not None:
self.item_type = item_type
if name is not None:
self.name = name
if path is not None:
self.path = path
if type is not None:
self.type = type
@property
def accept_single_value(self):
"""Gets the accept_single_value of this ContentBaseParameterAllOf. # noqa: E501
The flag that allows single values in content to be extracted as a single element collection in case the parameter is of Collection type. This flag is applicable for parameters of type Collection only. # noqa: E501
:return: The accept_single_value of this ContentBaseParameterAllOf. # noqa: E501
:rtype: bool
"""
return self._accept_single_value
@accept_single_value.setter
def accept_single_value(self, accept_single_value):
"""Sets the accept_single_value of this ContentBaseParameterAllOf.
The flag that allows single values in content to be extracted as a single element collection in case the parameter is of Collection type. This flag is applicable for parameters of type Collection only. # noqa: E501
:param accept_single_value: The accept_single_value of this ContentBaseParameterAllOf. # noqa: E501
:type: bool
"""
self._accept_single_value = accept_single_value
@property
def complex_type(self):
"""Gets the complex_type of this ContentBaseParameterAllOf. # noqa: E501
The name of the complex type definition in case this is a complex parameter. The content.Grammar object must have a complex type, content.ComplexType, defined with the specified name in types collection property. # noqa: E501
:return: The complex_type of this ContentBaseParameterAllOf. # noqa: E501
:rtype: str
"""
return self._complex_type
@complex_type.setter
def complex_type(self, complex_type):
"""Sets the complex_type of this ContentBaseParameterAllOf.
The name of the complex type definition in case this is a complex parameter. The content.Grammar object must have a complex type, content.ComplexType, defined with the specified name in types collection property. # noqa: E501
:param complex_type: The complex_type of this ContentBaseParameterAllOf. # noqa: E501
:type: str
"""
self._complex_type = complex_type
@property
def item_type(self):
"""Gets the item_type of this ContentBaseParameterAllOf. # noqa: E501
The type of the collection item in case this is a collection parameter. # noqa: E501
:return: The item_type of this ContentBaseParameterAllOf. # noqa: E501
:rtype: str
"""
return self._item_type
@item_type.setter
def item_type(self, item_type):
"""Sets the item_type of this ContentBaseParameterAllOf.
The type of the collection item in case this is a collection parameter. # noqa: E501
:param item_type: The item_type of this ContentBaseParameterAllOf. # noqa: E501
:type: str
"""
allowed_values = ["simple", "complex", "collection"] # noqa: E501
if self.local_vars_configuration.client_side_validation and item_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `item_type` ({0}), must be one of {1}" # noqa: E501
.format(item_type, allowed_values))
self._item_type = item_type
@property
def name(self):
"""Gets the name of this ContentBaseParameterAllOf. # noqa: E501
The name of the parameter. # noqa: E501
:return: The name of this ContentBaseParameterAllOf. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ContentBaseParameterAllOf.
The name of the parameter. # noqa: E501
:param name: The name of this ContentBaseParameterAllOf. # noqa: E501
:type: str
"""
self._name = name
@property
def path(self):
"""Gets the path of this ContentBaseParameterAllOf. # noqa: E501
The content specific path information that identifies the parameter value within the content. The value is usually a XPath or JSONPath or a regular expression in case of text content. # noqa: E501
:return: The path of this ContentBaseParameterAllOf. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this ContentBaseParameterAllOf.
The content specific path information that identifies the parameter value within the content. The value is usually a XPath or JSONPath or a regular expression in case of text content. # noqa: E501
:param path: The path of this ContentBaseParameterAllOf. # noqa: E501
:type: str
"""
self._path = path
@property
def type(self):
"""Gets the type of this ContentBaseParameterAllOf. # noqa: E501
The type of the parameter. Accepted values are simple, complex, collection. # noqa: E501
:return: The type of this ContentBaseParameterAllOf. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this ContentBaseParameterAllOf.
The type of the parameter. Accepted values are simple, complex, collection. # noqa: E501
:param type: The type of this ContentBaseParameterAllOf. # noqa: E501
:type: str
"""
allowed_values = ["simple", "complex", "collection"] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `type` ({0}), must be one of {1}" # noqa: E501
.format(type, allowed_values))
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict()
if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ContentBaseParameterAllOf):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ContentBaseParameterAllOf):
return True
return self.to_dict() != other.to_dict()
| 38.689531 | 1,052 | 0.640664 |
36cfa71aac3d887882303854576284604cf31b97 | 1,437 | py | Python | tests/test-poly1.py | 6tudent/pyemf | 830e61008af8acf1830e5c91dc876313a771afdb | [
"BSD-3-Clause-No-Nuclear-Warranty"
] | 16 | 2016-01-22T01:40:10.000Z | 2021-07-22T02:19:24.000Z | tests/test-poly1.py | 6tudent/pyemf | 830e61008af8acf1830e5c91dc876313a771afdb | [
"BSD-3-Clause-No-Nuclear-Warranty"
] | 1 | 2017-11-07T11:53:14.000Z | 2017-11-07T11:53:14.000Z | tests/test-poly1.py | jeremysanders/pyemf | 830e61008af8acf1830e5c91dc876313a771afdb | [
"BSD-3-Clause-No-Nuclear-Warranty"
] | 11 | 2017-02-28T21:02:26.000Z | 2020-10-21T19:23:29.000Z | #!/usr/bin/env python
# Test of bounds checking and 16bit/32bit versions of polygon, polyline, etc.
import pyemf
width=8
height=6
dpi=300
emf=pyemf.EMF(width,height,dpi)
pen=emf.CreatePen(pyemf.PS_SOLID,10,(0x01,0xa0,0xff))
emf.SelectObject(pen)
brush=emf.CreateSolidBrush((0x50,0x50,0x50))
emf.SelectObject(brush)
emf.SetBkMode(pyemf.TRANSPARENT)
# set baseline for text to be bottom left corner
emf.SetTextAlign(pyemf.TA_BOTTOM|pyemf.TA_LEFT)
emf.SetTextColor((0,0,0))
font = emf.CreateFont( 50, 0, 0, 0, pyemf.FW_BOLD, 0, 0, 0,
pyemf.ANSI_CHARSET, pyemf.OUT_DEFAULT_PRECIS,
pyemf.CLIP_DEFAULT_PRECIS, pyemf.DEFAULT_QUALITY,
pyemf.DEFAULT_PITCH | pyemf.FF_DONTCARE, "Arial");
emf.SelectObject( font );
emf.TextOut(500,50,"Test of polypolygon and polypolyline")
x=100
y=300
emf.TextOut(x,y,"several filled-in squares. OpenOffice doesn't seem to complete the polygons.")
polylist=[]
for x1 in range(x,x+1000,200):
polylist.append([(x1,y),(x1+100,y),(x1+100,y+100),(x1,y+100)])
emf.PolyPolygon(polylist)
x=100
y=800
polylist=[]
emf.TextOut(100,y,"it's ... just a bunch of wavy lines.")
for y1 in range(y,y+500,100):
points=[]
for x in range(100,1000,50):
points.append((x,y1))
points.append((x+25,y1+25))
polylist.append(points)
emf.PolyPolyline(polylist)
ret=emf.save("test-poly1.emf")
print("save returns %s" % str(ret))
| 26.611111 | 96 | 0.690327 |
d73f90910d950c761deea4587ab6b82edca66ce9 | 2,327 | py | Python | mkdocs/main.py | ergonlogic/mkdocs | 4f0ab8af2580b5b8a2e02af57403015f9d1c7df3 | [
"BSD-2-Clause"
] | 1 | 2015-11-08T11:41:46.000Z | 2015-11-08T11:41:46.000Z | mkdocs/main.py | ericholscher/mkdocs | 2cee131ecb39d03a9ea9be0a2cf663c1ca94cadf | [
"BSD-2-Clause"
] | null | null | null | mkdocs/main.py | ericholscher/mkdocs | 2cee131ecb39d03a9ea9be0a2cf663c1ca94cadf | [
"BSD-2-Clause"
] | 1 | 2019-09-10T07:28:37.000Z | 2019-09-10T07:28:37.000Z | #!/usr/bin/env python
# coding: utf-8
from __future__ import print_function
import logging
import sys
from mkdocs import __version__
from mkdocs.build import build
from mkdocs.config import load_config
from mkdocs.exceptions import MkDocsException
from mkdocs.gh_deploy import gh_deploy
from mkdocs.new import new
from mkdocs.serve import serve
def configure_logging(options):
'''When a --verbose flag is passed, increase the verbosity of mkdocs'''
logger = logging.getLogger('mkdocs')
logger.addHandler(logging.StreamHandler())
if 'verbose' in options:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.WARNING)
def arg_to_option(arg):
"""
Convert command line arguments into two-tuples of config key/value pairs.
"""
arg = arg.lstrip('--')
option = True
if '=' in arg:
arg, option = arg.split('=', 1)
return (arg.replace('-', '_'), option)
def main(cmd, args, options=None):
"""
Build the documentation, and optionally start the devserver.
"""
configure_logging(options)
clean_site_dir = 'clean' in options
if cmd == 'serve':
config = load_config(options=options)
serve(config, options=options)
elif cmd == 'build':
config = load_config(options=options)
build(config, clean_site_dir=clean_site_dir)
elif cmd == 'json':
config = load_config(options=options)
build(config, dump_json=True, clean_site_dir=clean_site_dir)
elif cmd == 'gh-deploy':
config = load_config(options=options)
build(config, clean_site_dir=clean_site_dir)
gh_deploy(config)
elif cmd == 'new':
new(args, options)
else:
print('MkDocs (version {0})'.format(__version__))
print('mkdocs [help|new|build|serve|gh-deploy|json] {options}')
def run_main():
"""
Invokes main() with the contents of sys.argv
This is a separate function so it can be invoked
by a setuptools console_script.
"""
cmd = sys.argv[1] if len(sys.argv) >= 2 else None
opts = [arg_to_option(arg) for arg in sys.argv[2:] if arg.startswith('--')]
try:
main(cmd, args=sys.argv[2:], options=dict(opts))
except MkDocsException as e:
print(e.args[0], file=sys.stderr)
if __name__ == '__main__':
run_main()
| 28.728395 | 79 | 0.664375 |
5bbd7626b9a6bffcc0bfcc7dd1f70c92eea201f1 | 8,804 | py | Python | venv/Lib/site-packages/tableutil/text_table.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tableutil/text_table.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tableutil/text_table.py | avim2809/CameraSiteBlocker | bfc0434e75e8f3f95c459a4adc86b7673200816e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import codecs
from collections import OrderedDict
from future.builtins import str
class BadTextTable(Exception):
pass
def get_headings_for_text_table(text_table,
field_delimiter=u'|'):
try:
headings = codecs.open(text_table, u'r', encoding=u'utf8').read()
except IOError:
headings = text_table.splitlines()[0]
return [heading.strip() for heading in headings.split(field_delimiter)]
def is_description(line):
try:
return line[0] == u'@'
except IndexError:
return False
def trim_description(line):
line = line[1:] # Throw away the '@'
try:
# Only trim the first space.
if line[0] == u' ':
line = line[1:] # throw away first space
except IndexError:
pass
return line
def is_comment(line):
try:
return line[0] == u'#'
except IndexError:
return False
def detect_list(field,
detect_lists,
delimiter):
if detect_lists:
field = [element.strip() for element in field.strip().split(delimiter)]
if len(field) == 1:
field = field[0]
return field.strip()
def extract_keys(line,
field_delimiter):
return [field.strip() for field in line.split(field_delimiter)]
def check_separator_line(separator,
keys,
separator_line,
separator_delimiter):
separator = separator.strip()
separator_parts = separator.split(separator_delimiter)
if len(separator_parts) != len(keys):
raise ValueError(u"Badly formed separator line. "
u"Doesn't match number of fields detected")
if len([c for c in u''.join(separator_parts) if c != separator_line]) != 0:
raise ValueError(u'Badly formed separator line. \n{separator}\n'
u'Use {lsep}{d}{lsep}{d}{lsep} etc'.format(d=separator_delimiter,
lsep=separator_line * 4,
separator=separator))
def text_table_to_list(text_table,
field_delimiter=u'|',
separator_line=u'-',
separator_delimiter=u'+',
detect_lists=False,
list_delimiter=u','):
"""
Creates a Python list from a text based table.
e.g.
a string or file containing:
Name | Device_ID | Bouquet | Sub-Bouquet
--------+-----------+---------+------------
@ Description 1.
# Comment 1
htx | 34939046 | 4101 | 1
kd1 | 34939007 | 4101 | 1
@ Description 2.
yields list like this:
{u'Description': [u'Description 1. Description 2.],
u'Headings': [u'u'Bouquet',
u'Device_ID',
u'Name',
u'Sub-Bouquet'],
u'Data' : [{u'Bouquet' : u'4101',
u'Device_ID' : u'34939046',
u'Name' : u'htx',
u'Sub-Bouquet': u'1'},
{u'Bouquet' : u'4101',
u'Device_ID' : u'34939007',
u'Name' : u'kd1',
u'Sub-Bouquet': u'1'}]}
:param text_table:
:param field_delimiter:
:param separator_line:
:param separator_delimiter:
:param detect_lists:
:param list_delimiter:
:return: DICT
"""
def extract_list(lines):
lines = iter(lines)
try:
keys = extract_keys(line=next(lines),
field_delimiter=field_delimiter)
except IndexError:
raise BadTextTable(u'No header row detected')
try:
check_separator_line(separator=next(lines),
keys=keys,
separator_line=separator_line,
separator_delimiter=separator_delimiter)
except StopIteration:
# A bit cheeky...
# Couldn't find a second line to check as a separator.
# Make an assumption that the the original single line
# was a filename, and raising an IOError
if len(keys) == 1:
filename = keys[0]
if u'.' in filename or u'\\' in filename or u'/' in filename:
raise IOError(u'No file found or only header row supplied for text table')
raise ValueError(u'Separator line not found\n'
u'Use {lsep}{d}{lsep}{d}{lsep} etc'.format(d=separator_delimiter,
lsep=separator_line * 4))
rows = []
descriptions = []
while True:
try:
line = next(lines).strip()
except StopIteration:
break
if is_comment(line):
pass # throw away
elif is_description(line):
descriptions.append(trim_description(line))
elif line:
values = [detect_list(field=value,
detect_lists=detect_lists,
delimiter=list_delimiter)
for value in line.split(field_delimiter)]
rows.append(OrderedDict(zip(keys, values)))
return {u'Headings': keys,
u'Description': descriptions if descriptions else [u''],
u'Data': rows}
try:
return extract_list(codecs.open(text_table, u'r', encoding=u'utf8'))
except IOError:
pass
return extract_list(text_table.splitlines())
def text_table_to_dictionary(text_table,
field_delimiter=u'|',
separator_line=u'-',
separator_delimiter=u'+',
detect_lists=False,
list_delimiter=u','):
"""
Creates a Python dictionary from a text based table.
e.g.
a file or string containing:
Name | Device_ID | Bouquet | Sub-Bouquet
--------+-----------+---------+------------
@ Description 1.
# Comment 1
htx | 34939046 | 4101 | 1
kd1 | 34939007 | 4101 | 1
@ Description 2.
yields dictionary like this:
{u'Description': [u'Description 1. Description 2.],
u'Headings': [u'u'Bouquet',
u'Device_ID',
u'Name',
u'Sub-Bouquet'],
u'Data' : {u'htx': {u'Bouquet' : u'4101',
u'Device_ID' : u'34939046',
u'Name' : u'htx',
u'Sub-Bouquet': u'1'},
u'kd1': {u'Bouquet' : u'4101',
u'Device_ID' : u'34939007',
u'Name' : u'kd1',
u'Sub-Bouquet': u'1'}}}
if the source is just two columns,
assume key:value pairs. The column header names
are not considered significant in this case.
Key | Value
----+----------
ABC | 34939046
DEF | 34939007
yields dictionary like this:
{u'ABC': u'34939046',
u'DEF': u'34939007',
}
:param text_table:
:param field_delimiter:
:param separator_line:
:param separator_delimiter:
:param detect_lists:
:param list_delimiter:
:return: DICT
"""
as_list = text_table_to_list(text_table=text_table,
field_delimiter=field_delimiter,
separator_line=separator_line,
separator_delimiter=separator_delimiter,
detect_lists=detect_lists,
list_delimiter=list_delimiter)
descriptions = as_list[u'Description']
keys = as_list[u'Headings']
data = as_list[u'Data']
key_field = keys[0]
value_field = keys[1] if len(keys) > 1 else None
lookup = OrderedDict()
for datum in data:
# use key value pairs of a dictionary containing a list of key value pairs
# otherwise use key + all data
lookup[datum[key_field]] = datum[value_field] if len(keys) == 2 else datum
return {u'Headings': keys,
u'Description': descriptions,
u'Data': lookup}
def dictionary_to_list(dictionary):
return [dictionary[key] for key in dictionary if key != u'Description']
| 30.675958 | 97 | 0.49841 |
0b41b5fc987bb8c6122a7f1653e1a1a2a37d6e21 | 7,355 | py | Python | autoPyTorch/pipeline/components/setup/network_backbone/ShapedMLPBackbone.py | franchuterivera/Auto-PyTorch | ff5c5c99e424a47b5cdb0e40ec68fbf33943e764 | [
"Apache-2.0"
] | 1 | 2021-05-12T10:11:58.000Z | 2021-05-12T10:11:58.000Z | autoPyTorch/pipeline/components/setup/network_backbone/ShapedMLPBackbone.py | franchuterivera/Auto-PyTorch | ff5c5c99e424a47b5cdb0e40ec68fbf33943e764 | [
"Apache-2.0"
] | 1 | 2021-06-23T21:48:03.000Z | 2021-06-23T21:48:03.000Z | autoPyTorch/pipeline/components/setup/network_backbone/ShapedMLPBackbone.py | franchuterivera/Auto-PyTorch | ff5c5c99e424a47b5cdb0e40ec68fbf33943e764 | [
"Apache-2.0"
] | null | null | null | from typing import Dict, List, Optional, Tuple, Union
import ConfigSpace as CS
from ConfigSpace.configuration_space import ConfigurationSpace
from ConfigSpace.hyperparameters import (
CategoricalHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter
)
from torch import nn
from autoPyTorch.datasets.base_dataset import BaseDatasetPropertiesType
from autoPyTorch.pipeline.components.setup.network_backbone.base_network_backbone import NetworkBackboneComponent
from autoPyTorch.pipeline.components.setup.network_backbone.utils import (
_activations,
get_shaped_neuron_counts,
)
from autoPyTorch.utils.common import HyperparameterSearchSpace, add_hyperparameter, get_hyperparameter
class ShapedMLPBackbone(NetworkBackboneComponent):
"""
Implementation of a Shaped MLP -- an MLP with the number of units
arranged so that a given shape is honored
"""
def build_backbone(self, input_shape: Tuple[int, ...]) -> nn.Module:
layers = list() # type: List[nn.Module]
in_features = input_shape[0]
out_features = self.config["output_dim"]
neuron_counts = get_shaped_neuron_counts(self.config['mlp_shape'],
in_features,
out_features,
self.config['max_units'],
self.config['num_groups'])
if self.config["use_dropout"] and self.config["max_dropout"] > 0.05:
dropout_shape = get_shaped_neuron_counts(
self.config['mlp_shape'], 0, 0, 1000, self.config['num_groups']
)
previous = in_features
for i in range(self.config['num_groups'] - 1):
if i >= len(neuron_counts):
break
if self.config["use_dropout"] and self.config["max_dropout"] > 0.05:
dropout = dropout_shape[i] / 1000 * self.config["max_dropout"]
else:
dropout = 0.0
self._add_layer(layers, previous, neuron_counts[i], dropout)
previous = neuron_counts[i]
layers.append(nn.Linear(previous, out_features))
backbone = nn.Sequential(*layers)
self.backbone = backbone
return backbone
def get_output_shape(self, input_shape: Tuple[int, ...]) -> Tuple[int, ...]:
return (self.config["output_dim"],)
def _add_layer(self, layers: List[nn.Module],
in_features: int, out_features: int, dropout: float
) -> None:
layers.append(nn.Linear(in_features, out_features))
layers.append(_activations[self.config["activation"]]())
if self.config["use_dropout"] and self.config["max_dropout"] > 0.05:
layers.append(nn.Dropout(dropout))
@staticmethod
def get_properties(dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None
) -> Dict[str, Union[str, bool]]:
return {
'shortname': 'ShapedMLPBackbone',
'name': 'ShapedMLPBackbone',
'handles_tabular': True,
'handles_image': False,
'handles_time_series': False,
}
@staticmethod
def get_hyperparameter_search_space(
dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
num_groups: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="num_groups",
value_range=(1, 15),
default_value=5,
),
max_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_dropout",
value_range=(0, 1),
default_value=0.5,
),
use_dropout: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="use_dropout",
value_range=(True, False),
default_value=False,
),
max_units: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="max_units",
value_range=(10, 1024),
default_value=200,
),
output_dim: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="output_dim",
value_range=(10, 1024),
default_value=200,
),
mlp_shape: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="mlp_shape",
value_range=('funnel', 'long_funnel',
'diamond', 'hexagon',
'brick', 'triangle',
'stairs'),
default_value='funnel',
),
activation: HyperparameterSearchSpace = HyperparameterSearchSpace(hyperparameter="activation",
value_range=tuple(_activations.keys()),
default_value=list(_activations.keys())[0],
),
) -> ConfigurationSpace:
cs = ConfigurationSpace()
# The number of groups that will compose the resnet. That is,
# a group can have N Resblock. The M number of this N resblock
# repetitions is num_groups
add_hyperparameter(cs, num_groups, UniformIntegerHyperparameter)
add_hyperparameter(cs, mlp_shape, CategoricalHyperparameter)
add_hyperparameter(cs, activation, CategoricalHyperparameter)
add_hyperparameter(cs, max_units, UniformIntegerHyperparameter)
add_hyperparameter(cs, output_dim, UniformIntegerHyperparameter)
# We can have dropout in the network for
# better generalization
use_dropout = get_hyperparameter(use_dropout, CategoricalHyperparameter)
max_dropout = get_hyperparameter(max_dropout, UniformFloatHyperparameter)
cs.add_hyperparameters([use_dropout, max_dropout])
cs.add_condition(CS.EqualsCondition(max_dropout, use_dropout, True))
return cs
| 53.686131 | 117 | 0.510945 |
ac3644624bef299d91a0beb31bda7f72765b1bb5 | 1,768 | py | Python | dycon_web/contest/migrations/0020_auto_20211118_2121.py | landges/dycon | 314f2c0f9ed682a56d19c741e6a132cc3331ca5f | [
"MIT"
] | null | null | null | dycon_web/contest/migrations/0020_auto_20211118_2121.py | landges/dycon | 314f2c0f9ed682a56d19c741e6a132cc3331ca5f | [
"MIT"
] | null | null | null | dycon_web/contest/migrations/0020_auto_20211118_2121.py | landges/dycon | 314f2c0f9ed682a56d19c741e6a132cc3331ca5f | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2021-11-18 21:21
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('contest', '0019_auto_20211118_2120'),
]
operations = [
migrations.RemoveField(
model_name='competitionsubmission',
name='coopetition_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='detailed_results_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='history_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='prediction_output_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='prediction_runfile',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='prediction_stderr_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='prediction_stdout_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='private_output_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='scores_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='status_details',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='stderr_file',
),
migrations.RemoveField(
model_name='competitionsubmission',
name='stdout_file',
),
]
| 28.516129 | 47 | 0.575792 |
efd8e8bb5f6084f850f50b736631729309a53d8e | 398 | py | Python | ddtrace/contrib/elasticsearch/elasticsearch.py | zhammer/dd-trace-py | 4c30f6e36bfa34a63cd9b6884677c977f76d2a01 | [
"Apache-2.0",
"BSD-3-Clause"
] | 5 | 2020-03-07T01:12:29.000Z | 2021-04-21T00:53:19.000Z | ddtrace/contrib/elasticsearch/elasticsearch.py | zhammer/dd-trace-py | 4c30f6e36bfa34a63cd9b6884677c977f76d2a01 | [
"Apache-2.0",
"BSD-3-Clause"
] | 4 | 2019-11-22T20:58:01.000Z | 2020-08-17T21:16:13.000Z | ddtrace/contrib/elasticsearch/elasticsearch.py | zhammer/dd-trace-py | 4c30f6e36bfa34a63cd9b6884677c977f76d2a01 | [
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2020-03-18T16:29:20.000Z | 2020-07-20T16:05:10.000Z | from importlib import import_module
module_names = ('elasticsearch', 'elasticsearch1', 'elasticsearch2', 'elasticsearch5', 'elasticsearch6')
for module_name in module_names:
try:
elasticsearch = import_module(module_name)
break
except ImportError:
pass
else:
raise ImportError('could not import any of {0!r}'.format(module_names))
__all__ = ['elasticsearch']
| 26.533333 | 104 | 0.718593 |
c7b04472691e6878c863ea243971da107f0dd341 | 66 | py | Python | inventario/urls.py | exildev/pisix | 6e844be31333c7f6cd12fd0e21dc990405f9c27c | [
"MIT"
] | null | null | null | inventario/urls.py | exildev/pisix | 6e844be31333c7f6cd12fd0e21dc990405f9c27c | [
"MIT"
] | null | null | null | inventario/urls.py | exildev/pisix | 6e844be31333c7f6cd12fd0e21dc990405f9c27c | [
"MIT"
] | null | null | null | from django.conf.urls import url
import views
urlpatterns = [
]
| 9.428571 | 32 | 0.742424 |
48fde2619576bd3c06656763c33a09b757c8f048 | 6,610 | py | Python | snips_nlu/tests/test_intent_loading.py | CharlyBlavier/snips-nlu-Copy | 829d513ac464e0421a264fd64d8b94f59a09875e | [
"Apache-2.0"
] | 3,764 | 2018-02-27T08:25:52.000Z | 2022-03-30T17:59:22.000Z | snips_nlu/tests/test_intent_loading.py | unicorns18/snips-nlu | 74b2893c91fc0bafc919a7e088ecb0b2bd611acf | [
"Apache-2.0"
] | 305 | 2018-02-28T13:45:23.000Z | 2022-03-10T15:33:35.000Z | snips_nlu/tests/test_intent_loading.py | unicorns18/snips-nlu | 74b2893c91fc0bafc919a7e088ecb0b2bd611acf | [
"Apache-2.0"
] | 559 | 2018-03-04T15:44:15.000Z | 2022-03-21T17:00:21.000Z | from __future__ import unicode_literals
import io
from unittest import TestCase
from snips_nlu.dataset import Intent
from snips_nlu.exceptions import IntentFormatError
class TestIntentLoading(TestCase):
def test_should_load_from_yaml_file(self):
# Given
intent = Intent.from_yaml(io.StringIO("""
# getWeather Intent
---
type: intent
name: getWeather
utterances:
- "what is the weather in [weatherLocation:location](paris)
[date:snips/datetime](today) ?"
- "Will it rain [date:snips/datetime](tomorrow) in
[weatherLocation:location](london)?"
"""))
# When
intent_dict = intent.json
# Then
expected_intent_dict = {
"utterances": [
{
"data": [
{
"text": "what is the weather in "
},
{
"text": "paris",
"entity": "location",
"slot_name": "weatherLocation"
},
{
"text": " "
},
{
"text": "today",
"entity": "snips/datetime",
"slot_name": "date"
},
{
"text": " ?"
}
]
},
{
"data": [
{
"text": "Will it rain "
},
{
"text": "tomorrow",
"entity": "snips/datetime",
"slot_name": "date"
},
{
"text": " in "
},
{
"text": "london",
"entity": "location",
"slot_name": "weatherLocation"
},
{
"text": "?"
}
]
}
]
}
self.assertDictEqual(expected_intent_dict, intent_dict)
def test_should_load_from_yaml_file_using_slot_mapping(self):
# Given
intent = Intent.from_yaml(io.StringIO("""
# getWeather Intent
---
type: intent
name: getWeather
slots:
- name: date
entity: snips/datetime
- name: weatherLocation
entity: location
utterances:
- what is the weather in [weatherLocation](paris) ?
- Will it rain [date] in [weatherLocation](london)?
"""))
# When
intent_dict = intent.json
# Then
expected_intent_dict = {
"utterances": [
{
"data": [
{
"text": "what is the weather in "
},
{
"text": "paris",
"entity": "location",
"slot_name": "weatherLocation"
},
{
"text": " ?"
}
]
},
{
"data": [
{
"text": "Will it rain "
},
{
"text": None,
"entity": "snips/datetime",
"slot_name": "date"
},
{
"text": " in "
},
{
"text": "london",
"entity": "location",
"slot_name": "weatherLocation"
},
{
"text": "?"
}
]
}
]
}
self.assertDictEqual(expected_intent_dict, intent_dict)
def test_should_load_from_yaml_file_using_implicit_values(self):
# Given
intent = Intent.from_yaml(io.StringIO("""
# getWeather Intent
---
type: intent
name: getWeather
utterances:
- what is the weather in [location] ?
"""))
# When
intent_dict = intent.json
# Then
expected_intent_dict = {
"utterances": [
{
"data": [
{
"text": "what is the weather in "
},
{
"text": None,
"entity": "location",
"slot_name": "location"
},
{
"text": " ?"
}
]
}
]
}
self.assertDictEqual(expected_intent_dict, intent_dict)
def test_should_raise_when_missing_bracket_in_utterance(self):
# Given
intent_io = io.StringIO("""
# getWeather Intent
---
type: intent
name: getWeather
utterances:
- what is the weather in [location] ?
- give me the weather forecast in [location tomorrow please
- what's the weather in [location] this weekend ?
""")
# When / Then
with self.assertRaises(IntentFormatError) as cm:
Intent.from_yaml(intent_io)
faulty_utterance = "give me the weather forecast in [location " \
"tomorrow please"
self.assertTrue(faulty_utterance in str(cm.exception))
def test_should_raise_when_missing_parenthesis_in_utterance(self):
# Given
intent_io = io.StringIO("""
# getWeather Intent
---
type: intent
name: getWeather
utterances:
- what is the weather in [location] ?
- give me the weather forecast in [location] tomorrow please
- what's the weather in [location](Paris this weekend ?
""")
# When / Then
with self.assertRaises(IntentFormatError) as cm:
Intent.from_yaml(intent_io)
faulty_utterance = "what's the weather in [location](Paris this " \
"weekend ?"
self.assertTrue(faulty_utterance in str(cm.exception))
| 29.377778 | 75 | 0.38941 |
c9402580a48802756b1f0f042a31820d9e53a2fb | 1,806 | py | Python | tests/test_package.py | randyzwitch/streamlit-embedcode | 996fccadb0d3e9c53d1efc009b6b89a57428d0fb | [
"MIT"
] | 24 | 2020-07-16T17:04:55.000Z | 2022-03-10T00:14:04.000Z | tests/test_package.py | randyzwitch/streamlit-embedcode | 996fccadb0d3e9c53d1efc009b6b89a57428d0fb | [
"MIT"
] | 2 | 2020-07-13T20:23:23.000Z | 2020-07-23T15:03:37.000Z | tests/test_package.py | randyzwitch/streamlit-embedcode | 996fccadb0d3e9c53d1efc009b6b89a57428d0fb | [
"MIT"
] | 6 | 2020-09-02T18:19:28.000Z | 2022-01-03T02:11:26.000Z | from streamlit_embedcode import _clean_link
from seleniumbase import BaseCase # https://seleniumbase.io/
def test_cleanlink_notrailing():
assert (
_clean_link(
"https://gist.github.com/randyzwitch/be8c5e9fb5b8e7b046afebcac12e5087"
)
== "https://gist.github.com/randyzwitch/be8c5e9fb5b8e7b046afebcac12e5087"
)
def test_cleanlink_trailing():
assert (
_clean_link(
"https://gist.github.com/randyzwitch/be8c5e9fb5b8e7b046afebcac12e5087/"
)
== "https://gist.github.com/randyzwitch/be8c5e9fb5b8e7b046afebcac12e5087"
)
class ComponentsTest(BaseCase):
def test_basic(self):
# open the app
self.open("http://localhost:8501")
# check basic properties of all Streamlit apps
self.assert_title("app_to_test · Streamlit")
self.assert_element("div.withScreencast")
self.assert_element("div.stApp")
# github_gist
self.assert_text("github_gist test")
self.is_element_in_an_iframe("div.gist-file")
self.is_element_in_an_iframe("div.gist-data")
self.is_element_in_an_iframe("div.gist-meta")
# gitlab_snippet
self.assert_text("gitlab_snippet test")
self.is_element_in_an_iframe("div.gitlab-embed-snippets")
# pastebin_snippet
self.assert_text("pastebin_snippet test")
self.is_element_in_an_iframe("div.embedPastebin")
# codepen_snippet
self.assert_text("codepen_snippet test")
self.is_element_in_an_iframe("div.cp_embed_wrapper")
# ideone_snippet
self.assert_text("ideone_snippet test")
# TODO
# tagmycode_snippet
self.assert_text("tagmycode_snippet test")
self.is_element_in_an_iframe("div#tmc-embed-snippet")
| 30.610169 | 83 | 0.677741 |
b68b5bab9c2101867d6b9e4ce5b6d515bbf67efd | 4,503 | py | Python | test/test_config_command.py | kiefermat/beets | 268dcb0008228912c305fc6e8818dafda3b48555 | [
"MIT"
] | 1 | 2015-11-06T06:39:51.000Z | 2015-11-06T06:39:51.000Z | test/test_config_command.py | kiefermat/beets | 268dcb0008228912c305fc6e8818dafda3b48555 | [
"MIT"
] | null | null | null | test/test_config_command.py | kiefermat/beets | 268dcb0008228912c305fc6e8818dafda3b48555 | [
"MIT"
] | null | null | null | import os
import yaml
from mock import patch
from tempfile import mkdtemp
from shutil import rmtree
from beets import ui
from beets import config
import _common
from _common import unittest
from helper import TestHelper, capture_stdout
from beets.library import Library
class ConfigCommandTest(unittest.TestCase, TestHelper):
def setUp(self):
self.temp_dir = mkdtemp()
if 'EDITOR' in os.environ:
del os.environ['EDITOR']
os.environ['BEETSDIR'] = self.temp_dir
self.config_path = os.path.join(self.temp_dir, 'config.yaml')
with open(self.config_path, 'w') as file:
file.write('library: lib\n')
file.write('option: value')
self.cli_config_path = os.path.join(self.temp_dir, 'cli_config.yaml')
with open(self.cli_config_path, 'w') as file:
file.write('option: cli overwrite')
config.clear()
config._materialized = False
def tearDown(self):
rmtree(self.temp_dir)
def test_show_user_config(self):
with capture_stdout() as output:
self.run_command('config')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
def test_show_user_config_with_defaults(self):
with capture_stdout() as output:
self.run_command('config', '-d')
output = yaml.load(output.getvalue())
self.assertEqual(output['option'], 'value')
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['import']['timid'], False)
def test_show_user_config_with_cli(self):
with capture_stdout() as output:
self.run_command('--config', self.cli_config_path, 'config')
output = yaml.load(output.getvalue())
self.assertEqual(output['library'], 'lib')
self.assertEqual(output['option'], 'cli overwrite')
def test_config_paths(self):
with capture_stdout() as output:
self.run_command('config', '-p')
paths = output.getvalue().split('\n')
self.assertEqual(len(paths), 2)
self.assertEqual(paths[0], self.config_path)
def test_config_paths_with_cli(self):
with capture_stdout() as output:
self.run_command('--config', self.cli_config_path, 'config', '-p')
paths = output.getvalue().split('\n')
self.assertEqual(len(paths), 3)
self.assertEqual(paths[0], self.cli_config_path)
def test_edit_config_with_editor_env(self):
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def test_edit_config_with_open(self):
with _common.system_mock('Darwin'):
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'open', 'open', '-n', self.config_path)
def test_edit_config_with_xdg_open(self):
with _common.system_mock('Linux'):
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'xdg-open', 'xdg-open', self.config_path)
def test_edit_config_with_windows_exec(self):
with _common.system_mock('Windows'):
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(self.config_path, self.config_path)
def test_config_editor_not_found(self):
with self.assertRaises(ui.UserError) as user_error:
with patch('os.execlp') as execlp:
execlp.side_effect = OSError()
self.run_command('config', '-e')
self.assertIn('Could not edit configuration',
str(user_error.exception.args[0]))
def test_edit_invalid_config_file(self):
self.lib = Library(':memory:')
with open(self.config_path, 'w') as file:
file.write('invalid: [')
config.clear()
config._materialized = False
os.environ['EDITOR'] = 'myeditor'
with patch('os.execlp') as execlp:
self.run_command('config', '-e')
execlp.assert_called_once_with(
'myeditor', 'myeditor', self.config_path)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| 35.179688 | 78 | 0.631801 |
715ff422a40f65676e33d77e465f8f2e9f90eaef | 469 | py | Python | src/models/helper.py | wdy06/kaggle-data-science-bowl-2019 | 645d690595fccc4a130cd435aef536c3af2e9045 | [
"MIT"
] | null | null | null | src/models/helper.py | wdy06/kaggle-data-science-bowl-2019 | 645d690595fccc4a130cd435aef536c3af2e9045 | [
"MIT"
] | null | null | null | src/models/helper.py | wdy06/kaggle-data-science-bowl-2019 | 645d690595fccc4a130cd435aef536c3af2e9045 | [
"MIT"
] | null | null | null | from models.model_lgbm import ModelLGBMClassifier, ModelLGBMRegressor
from models.model_catboost import ModelCatBoostRegressor
from models.model_xgb import ModelXGBRegressor
from models.model_nn import ModelNNRegressor
MODEL_MAP = {
'ModelLGBMClassifier': ModelLGBMClassifier,
'ModelLGBMRegressor': ModelLGBMRegressor,
'ModelCatBoostRegressor': ModelCatBoostRegressor,
'ModelXGBRegressor': ModelXGBRegressor,
'ModelNNRegressor': ModelNNRegressor
}
| 36.076923 | 69 | 0.831557 |
e4259f5f12104c37c3927f39e9c07c325fb868d7 | 1,871 | py | Python | src/algorithms/sorting.py | frapa/py_data_structures | ad6f0cc35a6cc896f31d94bc7b20054015376173 | [
"MIT"
] | null | null | null | src/algorithms/sorting.py | frapa/py_data_structures | ad6f0cc35a6cc896f31d94bc7b20054015376173 | [
"MIT"
] | null | null | null | src/algorithms/sorting.py | frapa/py_data_structures | ad6f0cc35a6cc896f31d94bc7b20054015376173 | [
"MIT"
] | null | null | null | from collections.abc import Callable
from typing import Any, Sequence
from src.data_structures.heap import Heap
SortFunction = Callable[[Sequence[Any]], Sequence[Any]]
def insertion_sort(data: Sequence[Any]) -> Sequence[Any]:
if len(data) <= 1:
return data
for i, item in enumerate(data[1:], start=1):
for j in reversed(range(i)):
if item < data[j]:
data[j + 1] = data[j]
data[j] = item
else:
break
return data
def merge_sort(data: Sequence[Any]) -> Sequence[Any]:
if len(data) <= 1:
return data
len_half = len(data) // 2
half1 = merge_sort(data[:len_half])
half2 = merge_sort(data[len_half:])
result = []
i1 = 0
i2 = 0
while half1 or half2:
if i1 == len(half1):
result.extend(half2[i2:])
break
if i2 == len(half2):
result.extend(half1[i1:])
break
if half1[i1] <= half2[i2]:
result.append(half1[i1])
i1 += 1
else:
result.append(half2[i2])
i2 += 1
return result
def heap_sort(data: Sequence[Any]) -> Sequence[Any]:
if len(data) <= 1:
return data
heap = Heap.from_sequence(data)
return heap.heap_sort()
def quick_sort(data: Sequence[Any]) -> Sequence[Any]:
if len(data) <= 1:
return data
data = list(data)
pivot = data[-1]
smaller_i = 0
for i in range(len(data) - 1):
if data[i] <= pivot:
temp = data[smaller_i]
data[smaller_i] = data[i]
data[i] = temp
smaller_i += 1
temp = data[smaller_i]
data[smaller_i] = pivot
data[-1] = temp
data[:smaller_i] = quick_sort(data[:smaller_i])
data[smaller_i + 1 :] = quick_sort(data[smaller_i + 1 :])
return data
| 21.755814 | 61 | 0.540887 |
9a506824b46834cc5080402a3c2487e6f62edfbf | 1,217 | py | Python | task.py | nricklin/gdal-multiplex-task | e61c27f1fadcffbf160d662b20fb3a54590b9017 | [
"MIT"
] | null | null | null | task.py | nricklin/gdal-multiplex-task | e61c27f1fadcffbf160d662b20fb3a54590b9017 | [
"MIT"
] | 1 | 2019-05-30T19:09:13.000Z | 2019-05-30T19:09:13.000Z | task.py | nricklin/gdal-multiplex-task | e61c27f1fadcffbf160d662b20fb3a54590b9017 | [
"MIT"
] | null | null | null | import subprocess
import json
outdir = '/mnt/work/output/'
indir = '/mnt/work/input/'
out_status = '/mnt/work/status.json'
input_data = json.load(open('/mnt/work/input/ports.json'))
command = input_data['command']
command = command.replace('$indir', indir)
command = command.replace('$outdir', outdir)
print("Running command: ")
print(command)
print('\n')
proc = subprocess.Popen([command], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
returncode = proc.returncode
print('STDOUT:')
print(out.decode())
print('\n')
with open(out_status, 'w') as f:
# Note: return code isn't enough to capture errors that happen in a compound command
# so use the presence of any error message as an indicator of failure
if returncode == 0 and err.decode() == '':
msg = "SUCCESS: Execution of input command completed successfully."
f.write(json.dumps({'status': 'success',
'reason': msg}))
else:
msg = "ERROR: Execution of input command failed."
msg += err.decode()
f.write(json.dumps({'status': 'failed',
'reason': msg}))
raise subprocess.SubprocessError(msg)
| 28.97619 | 94 | 0.649137 |
5b3b0798a18052058d1272960cad30e2dade40b7 | 2,308 | py | Python | tests/unittests/utils/test_yaml_utils.py | krishnakatyal/towhee | c5e043aa1509cf46644ca6b53f691d6ed2647212 | [
"Apache-2.0"
] | null | null | null | tests/unittests/utils/test_yaml_utils.py | krishnakatyal/towhee | c5e043aa1509cf46644ca6b53f691d6ed2647212 | [
"Apache-2.0"
] | 1 | 2022-01-19T06:21:07.000Z | 2022-01-19T06:21:07.000Z | tests/unittests/utils/test_yaml_utils.py | jennyli-z/towhee | 55c55fd961229575b75eae269b55090c839f8dcd | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
from pathlib import Path
from towhee.utils.yaml_utils import load_yaml, dump_yaml
import ruamel.yaml
PIPELINE_PATH = Path(__file__).parent.parent.resolve() / 'test_util' / 'resnet50_embedding' / 'resnet50_embedding.yaml'
cache_path = Path(__file__).parent.parent.resolve() / 'test_cache' / 'test_yaml.yaml'
class TestYamlUtils(unittest.TestCase):
"""
Unit test for yaml utils.
"""
def test_load_yaml(self):
with open(PIPELINE_PATH, 'r', encoding='utf-8') as input_file:
data = load_yaml(stream=input_file, typ='safe')
self.assertIsInstance(data, dict)
self.assertIn('name', data.keys())
self.assertIn('operators', data.keys())
self.assertIn('dataframes', data.keys())
with open(PIPELINE_PATH, 'r', encoding='utf-8') as input_file:
data_1 = load_yaml(stream=input_file, typ=None)
self.assertIsInstance(data_1, ruamel.yaml.comments.CommentedMap)
src = '{A: a, B: b, C: c}'
data = load_yaml(stream=src, typ='safe')
self.assertIsInstance(data, dict)
self.assertIn('A', data.keys())
self.assertIn('B', data.keys())
self.assertIn('C', data.keys())
def test_dump_yaml(self):
data = {'A': 'a', 'B': 'b', 'C': 'c'}
if cache_path.is_file():
os.remove(cache_path)
with open(cache_path, 'w', encoding='utf-8') as output_file:
dump_yaml(data=data, stream=output_file)
self.assertTrue(cache_path.is_file())
self.assertEqual(cache_path.suffix, '.yaml')
os.remove(cache_path)
string = dump_yaml(data=data)
self.assertIsInstance(string, str)
| 37.836066 | 119 | 0.665511 |
9b513f6cd0be0db3713a57351d1cb1e2dc1daeee | 540 | py | Python | backend/home/migrations/0001_load_initial_data.py | crowdbotics-apps/ferrol-mola-32222 | 75e7ec9e3fcb45f206725c4bad699002a75d55af | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/home/migrations/0001_load_initial_data.py | crowdbotics-apps/ferrol-mola-32222 | 75e7ec9e3fcb45f206725c4bad699002a75d55af | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/home/migrations/0001_load_initial_data.py | crowdbotics-apps/ferrol-mola-32222 | 75e7ec9e3fcb45f206725c4bad699002a75d55af | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from django.db import migrations
def create_site(apps, schema_editor):
Site = apps.get_model("sites", "Site")
custom_domain = "ferrol-mola-32222.botics.co"
site_params = {
"name": "Ferrol Mola",
}
if custom_domain:
site_params["domain"] = custom_domain
Site.objects.update_or_create(defaults=site_params, id=1)
class Migration(migrations.Migration):
dependencies = [
("sites", "0002_alter_domain_unique"),
]
operations = [
migrations.RunPython(create_site),
]
| 20.769231 | 61 | 0.655556 |
efd5440b297bad9ea4fb44ac074e36ef065df6c4 | 1,769 | py | Python | 7-Ptrace-Code_injection_and_Syscall_interception/evasion.py | galtoubul/Introduction_to_Information_Security | 565181ef2ec9530de8e4af43d97da454ddcc77be | [
"MIT"
] | null | null | null | 7-Ptrace-Code_injection_and_Syscall_interception/evasion.py | galtoubul/Introduction_to_Information_Security | 565181ef2ec9530de8e4af43d97da454ddcc77be | [
"MIT"
] | null | null | null | 7-Ptrace-Code_injection_and_Syscall_interception/evasion.py | galtoubul/Introduction_to_Information_Security | 565181ef2ec9530de8e4af43d97da454ddcc77be | [
"MIT"
] | null | null | null | import server
from subprocess import check_output
class EvadeAntivirusServer(server.CommandServer):
"""The base class for the servers we will implement"""
def __init__(self):
super(EvadeAntivirusServer, self).__init__()
self.add_payload(
self.payload_for_getting_antivirus_pid(),
self.handle_first_payload)
def payload_for_getting_antivirus_pid(self) -> bytes:
"""This function returns a payload for getting the pid of the antivirus.
Returns:
The bytes payload for the malware.
"""
return b'pidof antivirus'
def get_antivirus_pid(self, product: bytes) -> int:
"""This function extracts the pid from the given product.
This product is the result of invoking the payload returned from
`payload_for_getting_antivirus_pid`.
Hint: To convert the `bytes` to `str`, consider using
`product.decode('latin-1')`
Returns:
The pid of the antivirus (as an integer). If the antivirus is not
found, return -1.
"""
if product == b'':
return -1
return int(product.decode('latin-1'))
def handle_first_payload(self, product: bytes):
pid = int(self.get_antivirus_pid(product))
if pid != -1:
print(f'Antivirus process id is: {pid}')
self.evade_antivirus(pid)
else:
print('Antivirus not found')
def evade_antivirus(self, pid: int):
# WARNING: Don't modify this function, we will implement it for you in
# all the questions.
print(f'Oh noes! I should escape {pid}')
if __name__ == '__main__':
EvadeAntivirusServer().run_server(host='0.0.0.0', port=8000)
| 31.589286 | 80 | 0.622951 |
35f511df4ac1123fbb59faea94673545fc605f22 | 1,446 | py | Python | cirq-google/cirq_google/engine/__init__.py | Saibaba-Alapati/Cirq | 782efcd04c3bbf73a0d630306a3d1cfd9966521d | [
"Apache-2.0"
] | 3,326 | 2018-07-18T23:17:21.000Z | 2022-03-29T22:28:24.000Z | cirq-google/cirq_google/engine/__init__.py | resduo/Cirq | 680f897345eb1c71c9242515edda8f04b8594319 | [
"Apache-2.0"
] | 3,443 | 2018-07-18T21:07:28.000Z | 2022-03-31T20:23:21.000Z | cirq-google/cirq_google/engine/__init__.py | resduo/Cirq | 680f897345eb1c71c9242515edda8f04b8594319 | [
"Apache-2.0"
] | 865 | 2018-07-18T23:30:24.000Z | 2022-03-30T11:43:23.000Z | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for running on Google's Quantum Engine.
"""
from cirq_google.engine.calibration import (
Calibration,
)
from cirq_google.engine.calibration_layer import (
CalibrationLayer,
)
from cirq_google.engine.calibration_result import (
CalibrationResult,
)
from cirq_google.engine.engine import (
Engine,
get_engine,
get_engine_calibration,
get_engine_device,
ProtoVersion,
)
from cirq_google.engine.engine_client import (
EngineException,
)
from cirq_google.engine.engine_job import (
EngineJob,
)
from cirq_google.engine.engine_processor import (
EngineProcessor,
)
from cirq_google.engine.engine_timeslot import (
EngineTimeSlot,
)
from cirq_google.engine.engine_program import (
EngineProgram,
)
from cirq_google.engine.engine_sampler import (
get_engine_sampler,
QuantumEngineSampler,
)
| 24.1 | 74 | 0.763485 |
e36d542366c6c55f5afda2640cd1ae08e6215b19 | 5,616 | py | Python | sdk/python/pulumi_azure_native/synapse/v20210301/get_private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/synapse/v20210301/get_private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/synapse/v20210301/get_private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetPrivateEndpointConnectionResult',
'AwaitableGetPrivateEndpointConnectionResult',
'get_private_endpoint_connection',
]
@pulumi.output_type
class GetPrivateEndpointConnectionResult:
"""
A private endpoint connection
"""
def __init__(__self__, id=None, name=None, private_endpoint=None, private_link_service_connection_state=None, provisioning_state=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if private_endpoint and not isinstance(private_endpoint, dict):
raise TypeError("Expected argument 'private_endpoint' to be a dict")
pulumi.set(__self__, "private_endpoint", private_endpoint)
if private_link_service_connection_state and not isinstance(private_link_service_connection_state, dict):
raise TypeError("Expected argument 'private_link_service_connection_state' to be a dict")
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional['outputs.PrivateEndpointResponse']:
"""
The private endpoint which the connection belongs to.
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional['outputs.PrivateLinkServiceConnectionStateResponse']:
"""
Connection state of the private endpoint connection.
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the private endpoint connection.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetPrivateEndpointConnectionResult(GetPrivateEndpointConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPrivateEndpointConnectionResult(
id=self.id,
name=self.name,
private_endpoint=self.private_endpoint,
private_link_service_connection_state=self.private_link_service_connection_state,
provisioning_state=self.provisioning_state,
type=self.type)
def get_private_endpoint_connection(private_endpoint_connection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPrivateEndpointConnectionResult:
"""
A private endpoint connection
:param str private_endpoint_connection_name: The name of the private endpoint connection.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str workspace_name: The name of the workspace.
"""
__args__ = dict()
__args__['privateEndpointConnectionName'] = private_endpoint_connection_name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:synapse/v20210301:getPrivateEndpointConnection', __args__, opts=opts, typ=GetPrivateEndpointConnectionResult).value
return AwaitableGetPrivateEndpointConnectionResult(
id=__ret__.id,
name=__ret__.name,
private_endpoint=__ret__.private_endpoint,
private_link_service_connection_state=__ret__.private_link_service_connection_state,
provisioning_state=__ret__.provisioning_state,
type=__ret__.type)
| 41.6 | 193 | 0.696047 |
264041928f7814a68521dae8a9fd3662ebfc2299 | 3,124 | py | Python | src/globus_sdk/config/environments.py | mh-globus/globus-sdk-python | c740ebd85640d5c5fe92fd22e99ec05b1a280f6d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/globus_sdk/config/environments.py | mh-globus/globus-sdk-python | c740ebd85640d5c5fe92fd22e99ec05b1a280f6d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/globus_sdk/config/environments.py | mh-globus/globus-sdk-python | c740ebd85640d5c5fe92fd22e99ec05b1a280f6d | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import logging
import os
from typing import Dict, List, Optional, Type, cast
log = logging.getLogger(__name__)
# the format string for a service URL pulled out of the environment
# these are handled with uppercased service names, e.g.
# `GLOBUS_SDK_SERVICE_URL_SEARCH=...`
_SERVICE_URL_VAR_FORMAT = "GLOBUS_SDK_SERVICE_URL_{}"
class EnvConfig:
envname: str
domain: str
no_dotapi: List[str] = ["auth"]
# this same dict is inherited (and therefore shared!) by all subclasses
_registry: Dict[str, Type["EnvConfig"]] = {}
# this is an easier hook to use than metaclass definition -- register every subclass
# in this dict automatically
#
# as a result, anyone can define
#
# class BetaEnv(EnvConfig):
# domain = "beta.foo.bar.example.com"
# envname = "beta"
#
# and retrieve it with get_config_by_name("beta")
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
cls._registry[cls.envname] = cls
@classmethod
def get_service_url(cls, service) -> str:
# you can override any name with a config attribute
service_url_attr = f"{service}_url"
if hasattr(cls, service_url_attr):
return cast(str, getattr(cls, service_url_attr))
# the typical pattern for a service hostname is X.api.Y
# X=transfer, Y=preview.globus.org => transfer.api.preview.globus.org
# check `no_dotapi` for services which don't have `.api` in their names
if service in cls.no_dotapi:
return f"https://{service}.{cls.domain}/"
return f"https://{service}.api.{cls.domain}/"
@classmethod
def get_by_name(cls, envname: str) -> Optional[Type["EnvConfig"]]:
return cls._registry.get(envname)
def get_service_url(environment: str, service: str) -> str:
log.debug(f'Service URL Lookup for "{service}" under env "{environment}"')
# check for an environment variable of the form
# GLOBUS_SDK_SERVICE_URL_*
# and use it ahead of any env config if set
varname = _SERVICE_URL_VAR_FORMAT.format(service.upper())
from_env = os.getenv(varname)
if from_env:
log.debug(f"Got URL from env var, {varname}={from_env}")
return from_env
conf = EnvConfig.get_by_name(environment)
if not conf:
raise ValueError(f'Unrecognized environment "{environment}"')
url = conf.get_service_url(service)
log.debug(f'Service URL Lookup Result: "{service}" is at "{url}"')
return url
#
# public environments
#
class ProductionEnvConfig(EnvConfig):
envname = "production"
domain = "globus.org"
nexus_url = "https://nexus.api.globusonline.org/"
class PreviewEnvConfig(EnvConfig):
envname = "preview"
domain = "preview.globus.org"
#
# environments for internal use only
#
for envname in ["sandbox", "integration", "test", "staging"]:
# use `type()` rather than the `class` syntax to control classnames
type(
f"{envname.title()}EnvConfig",
(EnvConfig,),
{"envname": envname, "domain": f"{envname}.globuscs.info"},
)
| 32.206186 | 88 | 0.663572 |
71ad9149634512d91ca2cbc6769ac926f38bc19c | 342 | py | Python | app/__init__.py | hcanul/jinja-admin-flask | 11914dc7a5cdaf036bf24aecc0d24daf86bbd7f2 | [
"MIT"
] | 6 | 2020-06-15T11:08:19.000Z | 2021-03-28T02:46:16.000Z | app/__init__.py | app-generator/jinja-template-sb-admin | 44c722c83fd5a89be67471b952af7ca7585c08db | [
"MIT"
] | 5 | 2022-01-25T20:48:29.000Z | 2022-01-28T11:45:30.000Z | app/__init__.py | app-generator/jinja-template-sb-admin | 44c722c83fd5a89be67471b952af7ca7585c08db | [
"MIT"
] | 2 | 2020-06-19T01:25:05.000Z | 2020-09-21T19:32:27.000Z | # -*- encoding: utf-8 -*-
"""
Copyright (c) 2019 - present AppSeed.us
"""
# import Flask
from flask import Flask
# Inject Flask magic
app = Flask(__name__)
# App Config - the minimal footprint
app.config['TESTING' ] = True
app.config['SECRET_KEY'] = 'S#perS3crEt_JamesBond'
# Import routing to render the pages
from app import views
| 19 | 51 | 0.69883 |
655bf7d4eb61ba4819bcfcf07acc7bf29501fec1 | 4,436 | py | Python | homeassistant/components/downloader.py | hcchu/home-assistant-clone | dbc91c1d48c6570764bbaa58467aa4dc87f2186b | [
"MIT"
] | 1 | 2019-05-19T01:51:57.000Z | 2019-05-19T01:51:57.000Z | homeassistant/components/downloader.py | hcchu/home-assistant-clone | dbc91c1d48c6570764bbaa58467aa4dc87f2186b | [
"MIT"
] | null | null | null | homeassistant/components/downloader.py | hcchu/home-assistant-clone | dbc91c1d48c6570764bbaa58467aa4dc87f2186b | [
"MIT"
] | 1 | 2022-02-12T23:56:40.000Z | 2022-02-12T23:56:40.000Z | """
homeassistant.components.downloader
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides functionality to download files.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/downloader/
"""
import os
import logging
import re
import threading
from homeassistant.helpers import validate_config
from homeassistant.util import sanitize_filename
DOMAIN = "downloader"
SERVICE_DOWNLOAD_FILE = "download_file"
ATTR_URL = "url"
ATTR_SUBDIR = "subdir"
CONF_DOWNLOAD_DIR = 'download_dir'
# pylint: disable=too-many-branches
def setup(hass, config):
""" Listens for download events to download files. """
logger = logging.getLogger(__name__)
try:
import requests
except ImportError:
logger.exception(("Failed to import requests. "
"Did you maybe not execute 'pip install requests'?"))
return False
if not validate_config(config, {DOMAIN: [CONF_DOWNLOAD_DIR]}, logger):
return False
download_path = config[DOMAIN][CONF_DOWNLOAD_DIR]
# If path is relative, we assume relative to HASS config dir
if not os.path.isabs(download_path):
download_path = hass.config.path(download_path)
if not os.path.isdir(download_path):
logger.error(
"Download path %s does not exist. File Downloader not active.",
download_path)
return False
def download_file(service):
""" Starts thread to download file specified in the url. """
if ATTR_URL not in service.data:
logger.error("Service called but 'url' parameter not specified.")
return
def do_download():
""" Downloads the file. """
try:
url = service.data[ATTR_URL]
subdir = service.data.get(ATTR_SUBDIR)
if subdir:
subdir = sanitize_filename(subdir)
final_path = None
req = requests.get(url, stream=True, timeout=10)
if req.status_code == 200:
filename = None
if 'content-disposition' in req.headers:
match = re.findall(r"filename=(\S+)",
req.headers['content-disposition'])
if len(match) > 0:
filename = match[0].strip("'\" ")
if not filename:
filename = os.path.basename(
url).strip()
if not filename:
filename = "ha_download"
# Remove stuff to ruin paths
filename = sanitize_filename(filename)
# Do we want to download to subdir, create if needed
if subdir:
subdir_path = os.path.join(download_path, subdir)
# Ensure subdir exist
if not os.path.isdir(subdir_path):
os.makedirs(subdir_path)
final_path = os.path.join(subdir_path, filename)
else:
final_path = os.path.join(download_path, filename)
path, ext = os.path.splitext(final_path)
# If file exist append a number.
# We test filename, filename_2..
tries = 1
final_path = path + ext
while os.path.isfile(final_path):
tries += 1
final_path = "{}_{}.{}".format(path, tries, ext)
logger.info("%s -> %s", url, final_path)
with open(final_path, 'wb') as fil:
for chunk in req.iter_content(1024):
fil.write(chunk)
logger.info("Downloading of %s done", url)
except requests.exceptions.ConnectionError:
logger.exception("ConnectionError occured for %s", url)
# Remove file if we started downloading but failed
if final_path and os.path.isfile(final_path):
os.remove(final_path)
threading.Thread(target=do_download).start()
hass.services.register(DOMAIN, SERVICE_DOWNLOAD_FILE,
download_file)
return True
| 30.805556 | 79 | 0.538999 |
d2617931f69bb777933e9399ff9d4acbbc9f26bb | 2,734 | py | Python | app/core/models.py | eitan-lukin/recipe-app-api | 13a2781c66bd329748786ba4cc7b380ed8a505c0 | [
"MIT"
] | null | null | null | app/core/models.py | eitan-lukin/recipe-app-api | 13a2781c66bd329748786ba4cc7b380ed8a505c0 | [
"MIT"
] | null | null | null | app/core/models.py | eitan-lukin/recipe-app-api | 13a2781c66bd329748786ba4cc7b380ed8a505c0 | [
"MIT"
] | null | null | null | import uuid
import os
from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager, \
PermissionsMixin
from django.conf import settings
def recipe_image_file_path(instance, filename):
"""Generate file path for new recipe image"""
ext = filename.split('.')[-1]
# Create unique name but save the extension
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/recipe/', filename)
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Creates and saves a new user"""
if not email:
raise ValueError('Users must have an email address.')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Creates and saves a new superuser."""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user model that supports using email instead of username."""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = 'email'
class Tag(models.Model):
"""Tag to be used for a recipe"""
name = models.CharField(max_length=255)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
# CASCADE: If you delete the user then also delete the tag
on_delete=models.CASCADE,
)
def __str__(self):
return self.name
class Ingredient(models.Model):
"""Ingredient to be used in a recipe"""
name = models.CharField(max_length=255)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
def __str__(self):
return self.name
class Recipe(models.Model):
"""Recipe object"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE
)
title = models.CharField(max_length=255)
time_minutes = models.IntegerField()
price = models.DecimalField(max_digits=5, decimal_places=2)
link = models.CharField(max_length=255, blank=True)
ingredients = models.ManyToManyField('Ingredient')
tags = models.ManyToManyField('Tag')
image = models.ImageField(null=True, upload_to=recipe_image_file_path)
def __str__(self):
return self.title
| 29.397849 | 76 | 0.673007 |
da731754429ac4058055505d4b000f8f03d00694 | 11,959 | py | Python | deal/linter/_transformer.py | orsinium/condition | 4cb8208a6aefe50a86669d3cdded810244cba597 | [
"MIT"
] | 40 | 2018-01-27T09:49:18.000Z | 2019-05-07T08:02:20.000Z | deal/linter/_transformer.py | orsinium/condition | 4cb8208a6aefe50a86669d3cdded810244cba597 | [
"MIT"
] | 6 | 2018-09-05T10:44:36.000Z | 2019-07-06T07:12:19.000Z | deal/linter/_transformer.py | rpdelaney/deal | ebf4244605bb5cc5da5a910010238437d372d5d0 | [
"MIT"
] | 4 | 2018-03-22T11:28:05.000Z | 2019-08-01T07:43:23.000Z | from __future__ import annotations
from enum import Enum
from pathlib import Path
from typing import Iterator, NamedTuple, Union
import astroid
from ._contract import Category
from ._extractors import get_value
from ._func import Func
from ._rules import CheckMarkers, CheckRaises
Priority = int
class TransformationType(Enum):
RAISES = 'raises'
HAS = 'has'
SAFE = 'safe'
PURE = 'pure'
IMPORT = 'import'
class AppendText(NamedTuple):
line: int
text: str
def apply(self, lines: list[str]) -> None:
content = lines[self.line - 1]
content = content.rstrip('\n')
content += f'{self.text}\n'
lines[self.line - 1] = content
@property
def key(self) -> tuple[int, Priority]:
return (self.line, 1)
class InsertText(NamedTuple):
line: int
text: str
def apply(self, lines: list[str]) -> None:
lines.insert(self.line - 1, f'{self.text}\n')
@property
def key(self) -> tuple[int, Priority]:
return (self.line, 2)
class InsertContract(NamedTuple):
line: int
contract: Category
args: list[str]
indent: int
def apply(self, lines: list[str]) -> None:
lines.insert(self.line - 1, f'{self}\n')
@property
def key(self) -> tuple[int, Priority]:
return (self.line, 3)
def __str__(self) -> str:
args = ', '.join(self.args)
if not self.args and self.contract.brackets_optional:
dec = f'@deal.{self.contract.value}'
else:
dec = f'@deal.{self.contract.value}({args})'
return ' ' * self.indent + dec
class Remove(NamedTuple):
line: int
def apply(self, lines: list[str]) -> None:
lines.pop(self.line - 1)
@property
def key(self) -> tuple[int, Priority]:
return (self.line, 4)
Mutation = Union[AppendText, InsertText, InsertContract, Remove]
class Transformer(NamedTuple):
"""Transformer adds deal decorators into the given script.
"""
content: str
path: Path
types: set[TransformationType]
mutations: list[Mutation] = []
quote: str = "'"
def transform(self) -> str:
self.mutations.clear()
tree = astroid.parse(self.content, path=self.path)
for func in Func.from_astroid(tree):
self._collect_mutations(func)
self.mutations.extend(self._mutations_pure())
self.mutations.extend(self._mutations_import(tree))
return self._apply_mutations(self.content)
def _collect_mutations(self, func: Func) -> None:
self.mutations.extend(self._mutations_excs(func))
self.mutations.extend(self._mutations_markers(func))
self.mutations.extend(self._mutations_property(func))
def _mutations_excs(self, func: Func) -> Iterator[Mutation]:
"""Add @deal.raises or @deal.safe if needed.
"""
cats = {Category.RAISES, Category.SAFE, Category.PURE}
# collect declared exceptions
declared: list[Union[str, type]] = []
for contract in func.contracts:
if contract.category not in cats:
continue
declared.extend(contract.exceptions)
# collect undeclared exceptions
excs: set[str] = set()
for error in CheckRaises().get_undeclared(func, declared):
assert isinstance(error.value, str)
excs.add(error.value)
# if no new exceptions found, add deal.safe
if not excs:
if declared:
return
if self._disabled(TransformationType.SAFE, TransformationType.PURE):
return
if func.has_contract(Category.PURE, Category.SAFE):
return
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.SAFE,
args=[],
)
return
# if new exceptions detected, remove old contracts and add a new deal.raises
if self._disabled(TransformationType.RAISES):
return
for contract in func.contracts:
if contract.category not in cats:
continue
yield Remove(contract.line)
if contract.category == Category.PURE:
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.HAS,
args=[],
)
contract_args = [self._exc_as_str(exc) for exc in declared]
contract_args.extend(sorted(excs))
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.RAISES,
args=contract_args,
)
@staticmethod
def _exc_as_str(exc) -> str:
if isinstance(exc, str):
return exc
return exc.__name__
def _mutations_markers(self, func: Func) -> Iterator[Mutation]:
"""Add @deal.has if needed.
"""
if self._disabled(TransformationType.HAS, TransformationType.PURE):
return
cats = {Category.HAS, Category.PURE}
# collect declared markers
declared: list[str] = []
for contract in func.contracts:
if contract.category not in cats:
continue
for arg in contract.args:
value = get_value(arg)
if isinstance(value, str):
declared.append(value)
# collect undeclared markers
markers: set[str] = set()
for error in CheckMarkers().get_undeclared(func, set(declared)):
assert isinstance(error.value, str)
markers.add(error.value)
# if no new markers found, add deal.has()
if not markers:
if func.has_contract(Category.PURE, Category.HAS):
return
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.HAS,
args=[],
)
return
# if new markers detected, remove old contracts and add a new deal.has
for contract in func.contracts:
if contract.category not in cats:
continue
yield Remove(contract.line)
if contract.category == Category.PURE:
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.SAFE,
args=[],
)
contract_args = [self._exc_as_str(marker) for marker in declared]
contract_args.extend(sorted(markers))
yield InsertContract(
line=self._get_insert_line(func),
indent=func.col,
contract=Category.HAS,
args=[f'{self.quote}{arg}{self.quote}' for arg in contract_args],
)
def _mutations_property(self, func: Func) -> Iterator[Mutation]:
assert isinstance(func.node, astroid.FunctionDef)
if func.node.decorators is None:
return
assert isinstance(func.node.decorators, astroid.Decorators)
for decorator in func.node.decorators.nodes:
if not isinstance(decorator, astroid.Name):
continue
if decorator.name not in {'property', 'cached_property'}:
continue
if not self._has_mutation_on_line(decorator.lineno + 1):
continue
yield AppendText(decorator.lineno, ' # type: ignore[misc]')
def _has_mutation_on_line(self, line: int) -> bool:
return any(mutation.line == line for mutation in self.mutations)
def _mutations_import(self, tree: astroid.Module) -> Iterator[Mutation]:
"""Add `import deal` if needed.
"""
if self._disabled(TransformationType.IMPORT):
return
if not self.mutations:
return
# check if already imported
for stmt in tree.body:
if not isinstance(stmt, astroid.Import):
continue
for name, _ in stmt.names:
if name == 'deal':
return
# We insert the import after `__future__` imports and module imports.
# We don't skip `from` imports, though, because they can be multiline.
line = 1
for stmt in tree.body:
if isinstance(stmt, astroid.Import):
line = stmt.lineno + 1
if isinstance(stmt, astroid.ImportFrom):
if stmt.modname == '__future__':
line = stmt.lineno + 1
yield InsertText(line=line, text='import deal')
def _mutations_pure(self) -> Iterator[Mutation]:
"""Merge `@deal.safe` and `@deal.has` into `@deal.pure` if needed.
"""
if self._disabled(TransformationType.PURE):
return
if not self.mutations:
return
# find lines that have both @deal.has and @deal.safe
lines_has = set()
lines_safe = set()
for mut in self.mutations:
if not isinstance(mut, InsertContract):
continue
if mut.contract == Category.HAS and mut.args == []:
lines_has.add(mut.line)
if mut.contract == Category.SAFE:
lines_safe.add(mut.line)
lines = lines_safe & lines_has
# remove @deal.has and @deal.safe mutations, replace by @deal.pure
old_cats = {Category.HAS, Category.SAFE}
for mut in self.mutations.copy():
if not isinstance(mut, InsertContract):
continue
if mut.line not in lines:
continue
assert mut.contract in old_cats, 'unexpected contract generated'
self.mutations.remove(mut)
if mut.contract == Category.SAFE:
yield mut._replace(contract=Category.PURE)
# If PURE tranformation is enabled,
# we emit @deal.safe and @deal.has even if they are disabled, so they can be
# merged into @deal.pure. So, if they are disabled and were not merged,
# drop them here.
if self._disabled(TransformationType.SAFE):
for mut in self.mutations.copy():
if isinstance(mut, InsertContract) and mut.contract == Category.SAFE:
self.mutations.remove(mut)
if self._disabled(TransformationType.HAS):
for mut in self.mutations.copy():
if isinstance(mut, InsertContract) and mut.contract == Category.HAS:
self.mutations.remove(mut)
@staticmethod
def _get_insert_line(func: Func) -> int:
assert isinstance(func.node, astroid.FunctionDef)
line = func.line
if func.node.decorators is None:
return line
assert isinstance(func.node.decorators, astroid.Decorators)
for decorator in func.node.decorators.nodes:
# some Python versions point to the first decorator, some to `def`
if decorator.lineno < func.line:
return func.line # pragma: no cover
if not isinstance(decorator, astroid.Name):
continue
if decorator.name in {'staticmethod', 'classmethod'}:
continue
line = decorator.lineno + 1
return line
def _apply_mutations(self, content: str) -> str:
if not self.mutations:
return content
lines = content.splitlines(keepends=True)
self.mutations.sort(key=lambda x: x.key, reverse=True)
for mutation in self.mutations:
mutation.apply(lines)
return ''.join(lines)
def _disabled(self, *expected: TransformationType) -> bool:
"""Check if all of the given types are disabled.
"""
return not bool(self.types & set(expected))
| 34.168571 | 85 | 0.583912 |
4706bd2a8ffd7ddfcb6fd46e88565ebb790c6346 | 5,266 | py | Python | qa/rpc-tests/cfund-rawtx-paymentrequest-vote.py | stakecom/stakework | a2110b0ba6aa9638a18c2e7ae12f0f229e074f35 | [
"MIT"
] | null | null | null | qa/rpc-tests/cfund-rawtx-paymentrequest-vote.py | stakecom/stakework | a2110b0ba6aa9638a18c2e7ae12f0f229e074f35 | [
"MIT"
] | null | null | null | qa/rpc-tests/cfund-rawtx-paymentrequest-vote.py | stakecom/stakework | a2110b0ba6aa9638a18c2e7ae12f0f229e074f35 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018 The Stakework Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import StakeWorkTestFramework
from test_framework.cfund_util import *
class CommunityFundVotePaymentrequestRawTX(StakeWorkTestFramework):
"""Tests the state transition of payment requests of the Community fund."""
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = self.setup_nodes()
self.is_network_split = split
def run_test(self):
self.nodes[0].staking(False)
activate_cfund(self.nodes[0])
self.nodes[0].donatefund(100)
# Get address
address = self.nodes[0].getnewaddress()
# Create a proposal
proposalid0 = self.nodes[0].createproposal(address, 10, 3600, "testprop")["hash"]
start_new_cycle(self.nodes[0])
# Accept the proposal
self.nodes[0].proposalvote(proposalid0, "yes")
start_new_cycle(self.nodes[0])
self.nodes[0].proposalvote(proposalid0, "remove")
slow_gen(self.nodes[0], 5)
end_cycle(self.nodes[0])
# Proposal should be accepted
assert (self.nodes[0].getproposal(proposalid0)["state"] == 1)
assert (self.nodes[0].getproposal(proposalid0)["status"] == "accepted")
# Create payment requests
paymentrequestid0 = self.nodes[0].createpaymentrequest(proposalid0, 1, "test0")["hash"]
paymentrequestid1 = self.nodes[0].createpaymentrequest(proposalid0, 1, "test1")["hash"]
slow_gen(self.nodes[0], 1)
# pre-flight tests
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Create valid vote tx's
pr0_vote_tx_yes = create_vote_tx(self.nodes[0], 'c3', 'c4', paymentrequestid0)
pr0_vote_tx_no = create_vote_tx(self.nodes[0], 'c3', 'c5', paymentrequestid0)
pr1_vote_tx_yes = create_vote_tx(self.nodes[0], 'c3', 'c4', paymentrequestid1)
# Make a proper good vote - yes
self.nodes[0].coinbaseoutputs([pr0_vote_tx_yes])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Make a proper good vote - no
self.nodes[0].coinbaseoutputs([pr0_vote_tx_no])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Insert multiple yes votes
self.nodes[0].coinbaseoutputs([pr0_vote_tx_yes, pr0_vote_tx_yes, pr0_vote_tx_yes])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 2)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Insert yes and no votes
self.nodes[0].coinbaseoutputs([pr0_vote_tx_yes, pr0_vote_tx_no])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 3)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 0)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Insert votes for multiple payment requests
self.nodes[0].coinbaseoutputs([pr0_vote_tx_yes, pr1_vote_tx_yes])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 4)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesNo'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesYes'] == 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid1)['votesNo'] == 0)
# Insert bad vote tx with double vote in string
pr0_bad_vote_tx = create_vote_tx(self.nodes[0], 'c3', 'c4c4', paymentrequestid0)
self.nodes[0].coinbaseoutputs([pr0_bad_vote_tx])
slow_gen(self.nodes[0], 1)
assert (self.nodes[0].getpaymentrequest(paymentrequestid0)['votesYes'] == 4)
if __name__ == '__main__':
CommunityFundVotePaymentrequestRawTX().main()
| 45.008547 | 95 | 0.676984 |
7abdac626eaaf2f2f6320553fced3eb5a1585484 | 3,710 | py | Python | test/center_loss_test.py | KittenCN/pyFaceNet | 0804d06a3533a83ff865a3c4343cfca2a5cbe063 | [
"MIT"
] | 1 | 2020-04-27T22:52:14.000Z | 2020-04-27T22:52:14.000Z | test/center_loss_test.py | KittenCN/pyFaceNet | 0804d06a3533a83ff865a3c4343cfca2a5cbe063 | [
"MIT"
] | null | null | null | test/center_loss_test.py | KittenCN/pyFaceNet | 0804d06a3533a83ff865a3c4343cfca2a5cbe063 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import unittest
import tensorflow as tf
import numpy as np
import facenet
class CenterLossTest(unittest.TestCase):
def testCenterLoss(self):
batch_size = 16
nrof_features = 2
nrof_classes = 16
alfa = 0.5
with tf.Graph().as_default():
features = tf.placeholder(tf.float32, shape=(batch_size, nrof_features), name='features')
labels = tf.placeholder(tf.int32, shape=(batch_size,), name='labels')
# Define center loss
center_loss, centers = facenet.center_loss_new(features, labels, alfa, nrof_classes)
label_to_center = np.array( [
[-3,-3], [-3,-1], [-3,1], [-3,3],
[-1,-3], [-1,-1], [-1,1], [-1,3],
[ 1,-3], [ 1,-1], [ 1,1], [ 1,3],
[ 3,-3], [ 3,-1], [ 3,1], [ 3,3]
])
sess = tf.Session()
with sess.as_default():
sess.run(tf.global_variables_initializer())
np.random.seed(seed=666)
for _ in range(0,100):
# Create array of random labels
lbls = np.random.randint(low=0, high=nrof_classes, size=(batch_size,))
feats = create_features(label_to_center, batch_size, nrof_features, lbls)
center_loss_, centers_ = sess.run([center_loss, centers], feed_dict={features:feats, labels:lbls})
# After a large number of updates the estimated centers should be close to the true ones
np.testing.assert_almost_equal(centers_, label_to_center, decimal=5, err_msg='Incorrect estimated centers')
np.testing.assert_almost_equal(center_loss_, 0.0, decimal=5, err_msg='Incorrect center loss')
def create_features(label_to_center, batch_size, nrof_features, labels):
# Map label to center
# label_to_center_dict = {
# 0:(-3,-3), 1:(-3,-1), 2:(-3,1), 3:(-3,3),
# 4:(-1,-3), 5:(-1,-1), 6:(-1,1), 7:(-1,3),
# 8:( 1,-3), 9:( 1,-1), 10:( 1,1), 11:( 1,3),
# 12:( 3,-3), 13:( 3,-1), 14:( 3,1), 15:( 3,3),
# }
# Create array of features corresponding to the labels
feats = np.zeros((batch_size, nrof_features))
for i in range(batch_size):
cntr = label_to_center[labels[i]]
for j in range(nrof_features):
feats[i,j] = cntr[j]
return feats
if __name__ == "__main__":
unittest.main()
| 42.159091 | 123 | 0.598922 |
17f0ae0eae6fadf4e4cc3b2765f9ec23057340ce | 5,295 | py | Python | SalishSeaTools/tests/test_unit_conversions.py | remanevy/Package | b8c394c5b886b2a85063f8da3957ea3ca4bbec3f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | SalishSeaTools/tests/test_unit_conversions.py | remanevy/Package | b8c394c5b886b2a85063f8da3957ea3ca4bbec3f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | SalishSeaTools/tests/test_unit_conversions.py | remanevy/Package | b8c394c5b886b2a85063f8da3957ea3ca4bbec3f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright 2013-2016 The Salish Sea MEOPAR Contributors
# and The University of British Columbia
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the unit_conversions module.
"""
from __future__ import division
import arrow
import numpy as np
import pytest
from salishsea_tools import unit_conversions
def test_M_PER_S__KM_PER_HR_constant_value():
expected = 3600 / 1000
np.testing.assert_allclose(unit_conversions.M_PER_S__KM_PER_HR, expected)
def test_M_PER_S__KNOTS_constant_value():
expected = 3600 / 1852
np.testing.assert_allclose(unit_conversions.M_PER_S__KNOTS, expected)
def test_KNOTS__M_PER_S_constant_value():
expected = 1852 / 3600
np.testing.assert_allclose(unit_conversions.KNOTS__M_PER_S, expected)
@pytest.mark.parametrize('m_per_s, expected', [
(0, 0),
(1, 3.6),
])
def test_mps_kph(m_per_s, expected):
np.testing.assert_allclose(
unit_conversions.mps_kph(m_per_s), expected)
def test_mps_kph_ndarray():
kph = unit_conversions.mps_kph(np.array([0, 1]))
np.testing.assert_allclose(kph, np.array([0, 3.6]))
@pytest.mark.parametrize('m_per_s, expected', [
(0, 0),
(1, 1.94384),
])
def test_mps_knots(m_per_s, expected):
np.testing.assert_allclose(
unit_conversions.mps_knots(m_per_s), expected, rtol=1e-05)
def test_mps_knots_ndarray():
knots = unit_conversions.mps_knots(np.array([0, 1]))
np.testing.assert_allclose(knots, np.array([0, 1.94384]), rtol=1e-05)
@pytest.mark.parametrize('knots, expected', [
(0, 0),
(1, 0.514444),
])
def test_knots_mps(knots, expected):
np.testing.assert_allclose(
unit_conversions.knots_mps(knots), expected, rtol=1e-05)
def test_knots_mps_ndarray():
knots = unit_conversions.knots_mps(np.array([0, 1]))
np.testing.assert_allclose(knots, np.array([0, 0.514444]), rtol=1e-05)
@pytest.mark.parametrize('wind_to, expected', [
(0, 270),
(90, 180),
(180, 90),
(270, 0),
(359, 271),
])
def test_wind_to_from(wind_to, expected):
np.testing.assert_allclose(
unit_conversions.wind_to_from(wind_to), expected)
def test_wind_to_from_ndarray():
wind_from = unit_conversions.wind_to_from(
np.array([0, 90, 180, 270, 359]))
np.testing.assert_allclose(wind_from, np.array([270, 180, 90, 0, 271]))
class TestBearingHeading(object):
"""Unit tests for bearing_heading() function.
"""
@pytest.mark.parametrize('bearing, expected', [
(0, 'N'),
(27, 'NNE'),
(359, 'N'),
])
def test_default_16_points(self, bearing, expected):
heading = unit_conversions.bearing_heading(bearing)
assert heading == expected
@pytest.mark.parametrize('bearing, expected', [
(0, 'N'),
(27, 'NE'),
(359, 'N'),
])
def test_8_points(self, bearing, expected):
heading = unit_conversions.bearing_heading(
bearing,
headings=['N', 'NE', 'E', 'SE', 'S', 'SW', 'W', 'NW', 'N'])
assert heading == expected
class TestHumanizeTimeOfDay(object):
"""Unit tests for humanize_time_of_day() function.
"""
@pytest.mark.parametrize('date_time, expected', [
(arrow.get('2015-12-26 00:00:00'), 'overnight Saturday'),
(arrow.get('2015-12-26 02:15:42'), 'overnight Saturday'),
(arrow.get('2015-12-26 05:59:59'), 'overnight Saturday'),
(arrow.get('2015-12-26 06:00:00'), 'early Saturday morning'),
(arrow.get('2015-12-26 07:22:51'), 'early Saturday morning'),
(arrow.get('2015-12-26 08:59:59'), 'early Saturday morning'),
(arrow.get('2015-12-26 09:00:00'), 'late Saturday morning'),
(arrow.get('2015-12-26 09:52:43'), 'late Saturday morning'),
(arrow.get('2015-12-26 11:59:59'), 'late Saturday morning'),
(arrow.get('2015-12-25 12:00:00'), 'early Friday afternoon'),
(arrow.get('2015-12-25 13:36:11'), 'early Friday afternoon'),
(arrow.get('2015-12-25 14:59:59'), 'early Friday afternoon'),
(arrow.get('2015-12-25 15:00:00'), 'late Friday afternoon'),
(arrow.get('2015-12-25 16:09:21'), 'late Friday afternoon'),
(arrow.get('2015-12-25 17:59:59'), 'late Friday afternoon'),
(arrow.get('2015-12-27 18:00:00'), 'early Sunday evening'),
(arrow.get('2015-12-27 18:01:56'), 'early Sunday evening'),
(arrow.get('2015-12-27 20:59:59'), 'early Sunday evening'),
(arrow.get('2015-12-27 21:00:00'), 'late Sunday evening'),
(arrow.get('2015-12-27 23:43:43'), 'late Sunday evening'),
(arrow.get('2015-12-27 23:59:59'), 'late Sunday evening'),
])
def test_humanize_time_of_day(self, date_time, expected):
result = unit_conversions.humanize_time_of_day(date_time)
assert result == expected
| 34.16129 | 77 | 0.663645 |
1c7c48878823a68e109244f3c551abf93439812b | 173 | py | Python | demo/DiSC_Demo/DiSC_Demo/urls.py | UMKC-BigDataLab/DiSC | fda81260a9f54a4519cceb1a4b0f501f095b6b4b | [
"Apache-2.0"
] | null | null | null | demo/DiSC_Demo/DiSC_Demo/urls.py | UMKC-BigDataLab/DiSC | fda81260a9f54a4519cceb1a4b0f501f095b6b4b | [
"Apache-2.0"
] | null | null | null | demo/DiSC_Demo/DiSC_Demo/urls.py | UMKC-BigDataLab/DiSC | fda81260a9f54a4519cceb1a4b0f501f095b6b4b | [
"Apache-2.0"
] | 1 | 2018-08-24T17:44:52.000Z | 2018-08-24T17:44:52.000Z | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('disc/', include('configure.urls')),
]
| 21.625 | 45 | 0.693642 |
485d18cb757cf96780754e925799ef3ea8def27c | 10,025 | py | Python | cherry/base.py | Windsooon/cherry | bc6ae1f0284d837fb95279098538530312313534 | [
"MIT"
] | 433 | 2019-07-23T06:51:05.000Z | 2022-03-29T03:43:49.000Z | cherry/base.py | Windsooon/cherry | bc6ae1f0284d837fb95279098538530312313534 | [
"MIT"
] | 7 | 2019-09-06T09:34:02.000Z | 2022-01-19T07:16:04.000Z | cherry/base.py | Windsooon/cherry | bc6ae1f0284d837fb95279098538530312313534 | [
"MIT"
] | 34 | 2019-08-27T09:50:29.000Z | 2022-03-25T01:55:35.000Z | # -*- coding: utf-8 -*-
"""
cherry.base
~~~~~~~~~~~~
Base method for cherry
:copyright: (c) 2018-2020 by Windson Yang
:license: MIT License, see LICENSE for more details.
"""
import os
import pickle
import tarfile
import hashlib
import codecs
import urllib
import logging
import numpy as np
from collections import namedtuple
from urllib.request import urlretrieve
from .exceptions import *
from .common import *
from sklearn.feature_extraction._stop_words import ENGLISH_STOP_WORDS
from sklearn.feature_extraction.text import CountVectorizer, \
TfidfVectorizer, HashingVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.linear_model import SGDClassifier
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.datasets import load_files
CHERRY_DIR = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'cherry')
DATA_DIR = os.path.join(os.getcwd(), 'datasets')
__all__ = ['DATA_DIR',
'get_stop_words',
'load_data',
'write_file',
'load_all',
'load_cache',
'get_vectorizer_and_clf',
'get_tokenizer',
'get_vectorizer',
'get_clf']
def get_stop_words(language='English'):
'''
There are several known issues in our provided ‘english’ stop word list.
It does not aim to be a general, ‘one-size-fits-all’ solution as some
tasks may require a more custom solution.
See https://aclweb.org/anthology/W18-2502 for more details.
TODO: add IDF after every stop word.
'''
if language == 'English':
return ENGLISH_STOP_WORDS
try:
return STOP_WORDS[language]
except KeyError:
error = 'Cherry didn\'t support {0} at this moment.'.format(language)
raise NotSupportError(error)
def load_all(model, language=None, preprocessing=None, categories=None, encoding=None, vectorizer=None,
vectorizer_method=None, clf=None, clf_method=None, x_data=None, y_data=None):
# If user didn't pass x_data and y_data, try to load data from local or remote
if not os.path.exists(DATA_DIR):
os.mkdir(DATA_DIR)
if not (x_data and y_data):
try:
cache = load_data(model, categories=categories, encoding=encoding)
except FilesNotFoundError:
error = ('Please make sure your put the {0} data inside `datasets` '
'folder or use model inside "email", "review" or "newsgroups".'.format(model))
raise FilesNotFoundError(error)
if preprocessing:
cache.data = [preprocessing(text) for text in cache.data]
x_data, y_data = cache.data, cache.target
vectorizer, clf = get_vectorizer_and_clf(
language, vectorizer, clf,
vectorizer_method, clf_method)
return x_data, y_data, vectorizer, clf
def load_data(model, categories=None, encoding=None):
'''
Load data using `model` name
'''
model_path = os.path.join(DATA_DIR, model)
if os.path.exists(model_path):
return _load_data_from_local(
model, categories=categories, encoding=encoding)
else:
return _load_data_from_remote(
model, categories=categories, encoding=encoding)
def _load_data_from_local(
model, categories=None, encoding=None):
'''
1. Find local cache files
2. If we can't find the cache files
3.1 Try to create cache files using data files inside `datasets`.
2.2 Raise error if create cache files failed.
'''
model_path = os.path.join(DATA_DIR, model)
cache_path = os.path.join(model_path, model + '.pkz')
if os.path.exists(cache_path):
try:
with open(cache_path, 'rb') as f:
compressed_content = f.read()
uncompressed_content = codecs.decode(
compressed_content, 'zlib_codec')
return pickle.loads(uncompressed_content)['all']
except Exception as e:
# Can't load cache files
error = ('Can\'t load cached data from {0}. '
'Please try again after delete cache files.'.format(model))
raise NotSupportError(error)
cache = dict(all=load_files(
model_path, categories=categories, encoding=encoding))
compressed_content = codecs.encode(pickle.dumps(cache), 'zlib_codec')
with open(cache_path, 'wb') as f:
f.write(compressed_content)
return cache['all']
def _load_data_from_remote(model, categories=None, encoding=None):
try:
info = BUILD_IN_MODELS[model]
except KeyError:
error = ('{0} is not in BUILD_IN_MODELS.').format(model)
raise FilesNotFoundError(error)
# The original data can be found at:
# https://people.csail.mit.edu/jrennie/20Newsgroups/20news-bydate.tar.gz
meta_data_c = namedtuple('meta_data_c', ['filename', 'url', 'checksum', 'encoding'])
# Create a nametuple
meta_data = meta_data_c(filename=info[0], url=info[1], checksum=info[2], encoding=info[3])
_fetch_remote(meta_data, DATA_DIR)
_decompress_data(meta_data.filename, DATA_DIR)
return _load_data_from_local(
model, categories=categories, encoding=info[3])
def _fetch_remote(remote, dirname=None):
"""
Function from sklearn
Helper function to download a remote datasets into path
Copy from sklearn.datasets.base
"""
file_path = (remote.filename if dirname is None
else os.path.join(dirname, remote.filename))
print('Downloading data from {0}'.format(remote.url))
urlretrieve(remote.url, file_path)
checksum = _sha256(file_path)
if remote.checksum != checksum:
raise IOError("{} has an SHA256 checksum ({}) "
"differing from expected ({}), "
"file may be corrupted.".format(file_path, checksum,
remote.checksum))
return file_path
def _sha256(path):
"""
Function from sklearn
Calculate the sha256 hash of the file at path.
"""
sha256hash = hashlib.sha256()
chunk_size = 8192
with open(path, "rb") as f:
while True:
buffer = f.read(chunk_size)
if not buffer:
break
sha256hash.update(buffer)
return sha256hash.hexdigest()
def _decompress_data(filename, model_path):
'''
Function from sklearn
'''
file_path = os.path.join(model_path, filename)
logging.debug("Decompressing %s", file_path)
tarfile.open(file_path, "r:gz").extractall(path=model_path)
os.remove(file_path)
def _train_test_split(cache, test_size=0.1):
data_lst = list()
target = list()
filenames = list()
data = cache['all']
data_lst.extend(data.data)
target.extend(data.target)
filenames.extend(data.filenames)
data.data = data_lst
data.target = np.array(target)
data.filenames = np.array(filenames)
return train_test_split(data.data, data.target, test_size=test_size, random_state=0)
def write_file(path, data):
'''
Write data to path
'''
with open(path, 'a+') as f:
f.write(data)
def write_cache(model, content, path):
'''
Write cached file under model dir
'''
cache_path = os.path.join(DATA_DIR, model + '/' + path)
compressed_content = codecs.encode(pickle.dumps(content), 'zlib_codec')
with open(cache_path, 'wb') as f:
f.write(compressed_content)
def load_cache(model, path):
'''
Load cache data from file
'''
cache_path = os.path.join(DATA_DIR, model + '/' + path)
if os.path.exists(cache_path):
try:
with open(cache_path, 'rb') as f:
compressed_content = f.read()
uncompressed_content = codecs.decode(
compressed_content, 'zlib_codec')
return pickle.loads(uncompressed_content)
except Exception as e:
error = (
'Can\'t load cached files.')
raise CacheNotFoundError(error)
else:
error = (
'Can\'t find cache files')
raise CacheNotFoundError(error)
def english_tokenizer_wrapper(text):
from nltk.tokenize import word_tokenize
return [t for t in word_tokenize(text) if len(t) > 1]
def chinese_tokenizer_wrapper(text):
import jieba
return [t for t in jieba.cut(text) if len(t) > 1]
def get_tokenizer(language):
if language == 'English':
return english_tokenizer_wrapper
elif language == 'Chinese':
return chinese_tokenizer_wrapper
else:
raise NotSupportError((
'You need to specify tokenizer function ' +
'when the language is nor English or Chinese.'))
def get_vectorizer_and_clf(
language, vectorizer, clf, vectorizer_method, clf_method):
if not vectorizer:
vectorizer = get_vectorizer(language, vectorizer_method)
if not clf:
clf = get_clf(clf_method)
return vectorizer, clf
def get_vectorizer(language, vectorizer_method):
mapping = {
'Count': CountVectorizer,
'Tfidf': TfidfVectorizer,
'Hashing': HashingVectorizer,
}
try:
method = mapping[vectorizer_method]
except KeyError:
error = 'Please make sure vectorizer_method in "Count", "Tfidf" or "Hashing".'
raise MethodNotFoundError(error)
else:
return method(tokenizer=get_tokenizer(language), stop_words=get_stop_words(language))
def get_clf(clf_method):
mapping = {
'MNB': (MultinomialNB, {'alpha': 0.1}),
'SGD': (SGDClassifier, {'loss': 'hinge', 'penalty': 'l2', 'alpha': 1e-3, 'max_iter': 5, 'tol': None}),
'RandomForest': (RandomForestClassifier, {'max_depth': 5}),
'AdaBoost': (AdaBoostClassifier, {}),
}
try:
method, parameters = mapping[clf_method]
except KeyError:
error = 'Please make sure clf_method in "MNB", "SGD", "RandomForest" or "AdaBoost".'
raise MethodNotFoundError(error)
return method(**parameters)
| 35.175439 | 110 | 0.650973 |
44ea836d8adf48a4cd8247338cd9e9f7f7b91557 | 4,269 | py | Python | LF_total_energy_variation_multiplication.py | kimukook/variable_length_oscillating_pendulum | 486aa95fe4b9cbaa6cbeb542209259484f48e191 | [
"MIT"
] | null | null | null | LF_total_energy_variation_multiplication.py | kimukook/variable_length_oscillating_pendulum | 486aa95fe4b9cbaa6cbeb542209259484f48e191 | [
"MIT"
] | null | null | null | LF_total_energy_variation_multiplication.py | kimukook/variable_length_oscillating_pendulum | 486aa95fe4b9cbaa6cbeb542209259484f48e191 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
import pendulum
from pendulum import Pendulum
from functools import partial
from scipy import io
'''
This is a script that computes a specific function over the state space (phase space). The function considered here
is for proving the asymptotic stability for the variable-length pendulum problem.
V * L
=====================================
Author : Muhan Zhao
Date : Dec. 26, 2019
Location: UC San Diego, La Jolla, CA
=====================================
'''
def compute_total_energy_LF(p: Pendulum):
'''
Compute the total energy of the variable-length pendulum,
V = 1/2 * m (L^2 * [d(phi)/dt]^2 + Ldot ^2) + m * g * (L0 - L * cos phi)
:param p:
:return:
'''
# assemble the staets
state = np.hstack((p.wave_phi[-1], p.wave_dphi[-1]))
# compute the length and time-change of the length
L = p.compute_length(state)
Ldot = p.compute_length_dot(state)
# Compute the total energy
V = 1/2 * p.m * (L**3 * state[1]**2 + Ldot**2 * L) + p.m * p.g * (p.l0 * L - L**2 * np.cos(state[0]))
# # The code below is for
# if V >= 50:
# V = 50
return V
def compute_total_energy_LF_derivative(p: Pendulum):
phi = np.hstack((p.wave_phi[-1], p.wave_dphi[-1]))
f = p.variable_length_eom(phi)
dVdphi = 1/2 * p.m * (p.l0**3*3*(1+p.delta*phi[0]*phi[1])**2*p.delta*phi[1]**3+p.l0**3*p.delta**3*phi[1]*(phi[0]*f[1]+phi[1]**2)**2+p.l0**3*p.delta**2*2*(1+p.delta*phi[0]*phi[1])*(phi[0]*f[1]+phi[1]**2)*f[1])\
+p.g*p.m*(p.l0**2*p.delta*phi[1]-2*p.l0**2*(1+p.delta*phi[0]*phi[1])*p.delta*phi[1]*np.cos(phi[0])+np.sin(phi[0])*p.l0**2*(1+p.delta*phi[0]*phi[1])**2)
dVddphi = 1/2 * p.m * (p.l0**3*3*(1+p.delta*phi[0]*phi[1])**2*p.delta*phi[0]*phi[1]**2+p.l0**3*(1+p.delta*phi[0]*phi[1])**3*2*phi[1]+p.l0**3*p.delta**3*phi[0]*(phi[0]*f[1]+phi[1]**2)**2+p.l0**3*(1+p.delta*phi[0]*phi[1])*p.delta**2*2*(phi[0]*f[1]+phi[1]**2)*2*phi[1])\
+p.g*p.m*(p.l0**2*p.delta*phi[0]-p.l0**2*np.cos(phi[0])*2*(1+p.delta*phi[0]*phi[1])*p.delta*phi[0])
dV = dVdphi * phi[1] + dVddphi * f[1]
if abs(dV) < 1e-4:
return 0
else:
if dV < -1e-4:
return -1
elif dV > 1e-4:
return 1
else:
pass
d = .2
T = .6
dt = 0.001
g = 9.8
l0 = 1
m = 1
attributes = {
'm': m,
'max_t': T,
'dt': dt,
'constrain_L': True,
'save_data': False,
'plot': False,
'save_fig': False,
'show_fig': False,
'asymptotic_mode': True,
'delta_asymptotic_const': .1,
'adaptive_mode': False,
'delta_adaptive_const': .05,
'l0': l0,
'Ldotmax': 5,
'Ldotmin': -5,
'Lmax': 1.5,
'Lmin': 0.5,
'g': g
}
# design the discretization of the phase space
size = 1000
width = 2
# define the region of interest
x, y = np.meshgrid(np.linspace(-width, width, size), np.linspace(-width, width, size))
# x, y = np.meshgrid(np.linspace(-width, width, size), np.linspace(-4, 3, size))
W, dW = np.zeros(x.shape), np.zeros(x.shape)
for i in range(x.shape[0]):
print(f'i = {i}')
for j in range(y.shape[1]):
phi = x[i, j] * np.ones(1)
dphi = y[i, j] * np.ones(1)
wave = {
'phi': phi,
'dphi': dphi,
}
pending_pendulum = Pendulum(wave, attributes)
# find the LF value of each point
W[i, j] = compute_total_energy_LF(pending_pendulum)
dW[i, j] = compute_total_energy_LF_derivative(pending_pendulum)
LF = {
'width': width,
'size': size,
'LF': W,
'LFdot': dW
}
io.savemat('LF_total_energy_variation_multiplication.mat', LF)
# plot
fig = plt.figure(figsize=[9, 9])
plt.grid()
plt.gca().set_aspect('equal', adjustable='box')
mu = plt.contour(x, y, W, levels=30, colors='gray')
cbar = plt.colorbar(mu)
plt.xlabel(r'$\phi(t)$', size=20)
plt.ylabel(r'$\dot{\phi}(t)$', size=20, rotation=0)
plt.contourf(x, y, dW, cmap='Reds', alpha=.3)
# data = io.loadmat('pendulum_data.mat')
# phi = data['asym_phi']
# dphi = data['asym_dphi']
# plt.plot(phi[0], dphi[0], 'b--', label='Asymptotic', zorder=1)
# plt.show()
plt.savefig('LF_TotalEnergy_Vdot_multiplication.png', format='png', dpi=300)
plt.close(fig)
# # read a specific trajectory
| 28.651007 | 271 | 0.573671 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.