hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6003ed87d210b31336cb2c8da2c4371379d4efe1
| 301
|
py
|
Python
|
classifier/beer/urls.py
|
RafaelBernardes/beer-classifier
|
68edb99a231d7090d0d6d384de712b4792b3b7d0
|
[
"MIT"
] | null | null | null |
classifier/beer/urls.py
|
RafaelBernardes/beer-classifier
|
68edb99a231d7090d0d6d384de712b4792b3b7d0
|
[
"MIT"
] | null | null | null |
classifier/beer/urls.py
|
RafaelBernardes/beer-classifier
|
68edb99a231d7090d0d6d384de712b4792b3b7d0
|
[
"MIT"
] | null | null | null |
from django.urls import path
from django.conf.urls.static import static
from django.conf import settings
from . import views
urlpatterns = [
path('', views.home, name='home'),
path('results', views.results, name='results'),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| 30.1
| 67
| 0.747508
|
27f46926e35b1d825ee676bdcbb40f58b822d94c
| 1,520
|
py
|
Python
|
setup.py
|
daimon99/django-jsonp
|
ff19f2628522a3655b28eb15b8ad4af93a931282
|
[
"MIT"
] | null | null | null |
setup.py
|
daimon99/django-jsonp
|
ff19f2628522a3655b28eb15b8ad4af93a931282
|
[
"MIT"
] | null | null | null |
setup.py
|
daimon99/django-jsonp
|
ff19f2628522a3655b28eb15b8ad4af93a931282
|
[
"MIT"
] | 1
|
2019-07-03T10:25:51.000Z
|
2019-07-03T10:25:51.000Z
|
from distutils.core import setup
import os
import re
with open(os.path.join(os.path.dirname(__file__), 'requirements.txt')) as requirements:
install_requires = requirements.readlines()
# <https://github.com/kennethreitz/requests/blob/master/setup.py#L32>
with open('django_jsonp/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name='django_jsonp',
version=version,
description='Simple JSONP support for django',
long_description=open('README.md').read(),
author='Alexander Zhukov',
author_email='zhukovaa90@gmail.com',
url='http://github.com/ZhukovAlexander/django-jsonp',
license='MIT',
zip_safe=True,
install_requires=install_requires,
packages=['djsonp'],
package_dir={'djsonp': 'django_jsonp'},
classifiers=['Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'],
)
| 41.081081
| 87
| 0.586184
|
d679f4c08386e9c543e7007fede6c2bd85d1df7a
| 5,316
|
py
|
Python
|
python/emr/app.py
|
marclyo/aws-cdk-examples
|
f041f07ebd4c94897e16d37ff813a38eb32645a1
|
[
"Apache-2.0"
] | 2,941
|
2019-02-08T15:29:36.000Z
|
2022-03-31T23:57:42.000Z
|
python/emr/app.py
|
marclyo/aws-cdk-examples
|
f041f07ebd4c94897e16d37ff813a38eb32645a1
|
[
"Apache-2.0"
] | 558
|
2019-02-14T23:32:02.000Z
|
2022-03-30T00:35:11.000Z
|
python/emr/app.py
|
marclyo/aws-cdk-examples
|
f041f07ebd4c94897e16d37ff813a38eb32645a1
|
[
"Apache-2.0"
] | 1,409
|
2019-02-12T19:13:04.000Z
|
2022-03-31T18:46:21.000Z
|
from aws_cdk import aws_ec2 as ec2, aws_iam as iam, core, aws_emr as emr
class EMRClusterStack(core.Stack):
def __init__(
self,
scope: core.Construct,
id: str,
s3_log_bucket: str,
s3_script_bucket: str,
spark_script: str,
**kwargs,
) -> None:
super().__init__(scope, id, **kwargs)
# VPC
vpc = ec2.Vpc(
self,
"vpc",
nat_gateways=0,
subnet_configuration=[
ec2.SubnetConfiguration(
name="public", subnet_type=ec2.SubnetType.PUBLIC
)
],
)
# enable reading scripts from s3 bucket
read_scripts_policy = iam.PolicyStatement(
effect=iam.Effect.ALLOW,
actions=["s3:GetObject",],
resources=[f"arn:aws:s3:::{s3_script_bucket}/*"],
)
read_scripts_document = iam.PolicyDocument()
read_scripts_document.add_statements(read_scripts_policy)
# emr service role
emr_service_role = iam.Role(
self,
"emr_service_role",
assumed_by=iam.ServicePrincipal("elasticmapreduce.amazonaws.com"),
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name(
"service-role/AmazonElasticMapReduceRole"
)
],
inline_policies=[read_scripts_document],
)
# emr job flow role
emr_job_flow_role = iam.Role(
self,
"emr_job_flow_role",
assumed_by=iam.ServicePrincipal("ec2.amazonaws.com"),
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name(
"service-role/AmazonElasticMapReduceforEC2Role"
)
],
)
# emr job flow profile
emr_job_flow_profile = iam.CfnInstanceProfile(
self,
"emr_job_flow_profile",
roles=[emr_job_flow_role.role_name],
instance_profile_name="emrJobFlowProfile_",
)
# create emr cluster
emr.CfnCluster(
self,
"emr_cluster",
instances=emr.CfnCluster.JobFlowInstancesConfigProperty(
core_instance_group=emr.CfnCluster.InstanceGroupConfigProperty(
instance_count=3, instance_type="m4.large", market="SPOT"
),
ec2_subnet_id=vpc.public_subnets[0].subnet_id,
hadoop_version="Amazon",
keep_job_flow_alive_when_no_steps=False,
master_instance_group=emr.CfnCluster.InstanceGroupConfigProperty(
instance_count=1, instance_type="m4.large", market="SPOT"
),
),
# note job_flow_role is an instance profile (not an iam role)
job_flow_role=emr_job_flow_profile.instance_profile_name,
name="cluster_name",
applications=[emr.CfnCluster.ApplicationProperty(name="Spark")],
service_role=emr_service_role.role_name,
configurations=[
# use python3 for pyspark
emr.CfnCluster.ConfigurationProperty(
classification="spark-env",
configurations=[
emr.CfnCluster.ConfigurationProperty(
classification="export",
configuration_properties={
"PYSPARK_PYTHON": "/usr/bin/python3",
"PYSPARK_DRIVER_PYTHON": "/usr/bin/python3",
},
)
],
),
# enable apache arrow
emr.CfnCluster.ConfigurationProperty(
classification="spark-defaults",
configuration_properties={
"spark.sql.execution.arrow.enabled": "true"
},
),
# dedicate cluster to single jobs
emr.CfnCluster.ConfigurationProperty(
classification="spark",
configuration_properties={"maximizeResourceAllocation": "true"},
),
],
log_uri=f"s3://{s3_log_bucket}/{core.Aws.REGION}/elasticmapreduce/",
release_label="emr-6.0.0",
visible_to_all_users=False,
# the job to be done
steps=[
emr.CfnCluster.StepConfigProperty(
hadoop_jar_step=emr.CfnCluster.HadoopJarStepConfigProperty(
jar="command-runner.jar",
args=[
"spark-submit",
"--deploy-mode",
"cluster",
f"s3://{s3_script_bucket}/scripts/{spark_script}",
],
),
name="step_name",
action_on_failure="CONTINUE",
),
],
)
app = core.App()
EMRClusterStack(
app,
"emr-cluster",
s3_log_bucket="s3_bucket_logs",
s3_script_bucket="s3_bucket_scripts",
spark_script="pyspark_script.py",
)
app.synth()
| 35.918919
| 84
| 0.51693
|
09c429028e1d08b324c7e24b9969149ad16a8e96
| 1,351
|
py
|
Python
|
django_project_1/users/views.py
|
KumarPython/Django-Projects
|
f69550debbd0850b4160f8b52d846c8c7e76c988
|
[
"MIT"
] | null | null | null |
django_project_1/users/views.py
|
KumarPython/Django-Projects
|
f69550debbd0850b4160f8b52d846c8c7e76c988
|
[
"MIT"
] | null | null | null |
django_project_1/users/views.py
|
KumarPython/Django-Projects
|
f69550debbd0850b4160f8b52d846c8c7e76c988
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render,redirect
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from .forms import UserRegistrationForm,UserUpdateForm,ProfileUpdateForm
def register(request):
if request.method=="POST":
form=UserRegistrationForm(request.POST)
if form.is_valid():
form.save()
username=form.cleaned_data.get('username')
messages.success(request,f'Account created for {username} ! Please Login Now')
return redirect('login')
else:
form=UserRegistrationForm()
return render(request,'users/register.html',{'form':form})
@login_required
def profile(request):
if request.method == "POST":
u_form=UserUpdateForm(request.POST,instance=request.user)
p_form=ProfileUpdateForm(request.POST,request.FILES,instance=request.user.profile)
if u_form.is_valid() and p_form.is_valid():
u_form.save()
p_form.save()
messages.success(request, f'Account updated for {request.user.username}')
return redirect('profile')
else:
u_form = UserUpdateForm(instance=request.user)
p_form = ProfileUpdateForm(instance=request.user.profile)
context={'u_form':u_form,'p_form':p_form}
return render(request,'users/profile.html',context)
| 39.735294
| 90
| 0.695041
|
915a54c444cbbf09dbd738b2af4d4e8c2e29b268
| 14,213
|
py
|
Python
|
venv/lib/python3.9/site-packages/google/cloud/videointelligence_v1p2beta1/gapic/video_intelligence_service_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.9/site-packages/google/cloud/videointelligence_v1p2beta1/gapic/video_intelligence_service_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.9/site-packages/google/cloud/videointelligence_v1p2beta1/gapic/video_intelligence_service_client.py
|
qarik-hanrattyjen/apache-airflow-backport-providers-google-2021.3.3
|
630dcef73e6a258b6e9a52f934e2dd912ce741f8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.videointelligence.v1p2beta1 VideoIntelligenceService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.operation
import google.api_core.operations_v1
import grpc
from google.cloud.videointelligence_v1p2beta1.gapic import enums
from google.cloud.videointelligence_v1p2beta1.gapic import (
video_intelligence_service_client_config,
)
from google.cloud.videointelligence_v1p2beta1.gapic.transports import (
video_intelligence_service_grpc_transport,
)
from google.cloud.videointelligence_v1p2beta1.proto import video_intelligence_pb2
from google.cloud.videointelligence_v1p2beta1.proto import video_intelligence_pb2_grpc
from google.longrunning import operations_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
"google-cloud-videointelligence",
).version
class VideoIntelligenceServiceClient(object):
"""Service that implements Google Cloud Video Intelligence API."""
SERVICE_ADDRESS = "videointelligence.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = (
"google.cloud.videointelligence.v1p2beta1.VideoIntelligenceService"
)
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
VideoIntelligenceServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.VideoIntelligenceServiceGrpcTransport,
Callable[[~.Credentials, type], ~.VideoIntelligenceServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = video_intelligence_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def annotate_video(
self,
features,
input_uri=None,
input_content=None,
video_context=None,
output_uri=None,
location_id=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Performs asynchronous video annotation. Progress and results can be
retrieved through the ``google.longrunning.Operations`` interface.
``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress).
``Operation.response`` contains ``AnnotateVideoResponse`` (results).
Example:
>>> from google.cloud import videointelligence_v1p2beta1
>>> from google.cloud.videointelligence_v1p2beta1 import enums
>>>
>>> client = videointelligence_v1p2beta1.VideoIntelligenceServiceClient()
>>>
>>> features_element = enums.Feature.LABEL_DETECTION
>>> features = [features_element]
>>> input_uri = 'gs://cloud-samples-data/video/cat.mp4'
>>>
>>> response = client.annotate_video(features, input_uri=input_uri)
>>>
>>> def callback(operation_future):
... # Handle result.
... result = operation_future.result()
>>>
>>> response.add_done_callback(callback)
>>>
>>> # Handle metadata.
>>> metadata = response.metadata()
Args:
features (list[~google.cloud.videointelligence_v1p2beta1.types.Feature]): Required. Requested video annotation features.
input_uri (str): Input video location. Currently, only `Google Cloud
Storage <https://cloud.google.com/storage/>`__ URIs are supported, which
must be specified in the following format: ``gs://bucket-id/object-id``
(other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For
more information, see `Request
URIs <https://cloud.google.com/storage/docs/request-endpoints>`__. A
video URI may include wildcards in ``object-id``, and thus identify
multiple videos. Supported wildcards: '*' to match 0 or more characters;
'?' to match 1 character. If unset, the input video should be embedded
in the request as ``input_content``. If set, ``input_content`` should be
unset.
input_content (bytes): The video data bytes. If unset, the input video(s) should be
specified via ``input_uri``. If set, ``input_uri`` should be unset.
video_context (Union[dict, ~google.cloud.videointelligence_v1p2beta1.types.VideoContext]): Additional video context and/or feature-specific parameters.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.videointelligence_v1p2beta1.types.VideoContext`
output_uri (str): Optional. Location where the output (in JSON format) should be
stored. Currently, only `Google Cloud
Storage <https://cloud.google.com/storage/>`__ URIs are supported, which
must be specified in the following format: ``gs://bucket-id/object-id``
(other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For
more information, see `Request
URIs <https://cloud.google.com/storage/docs/request-endpoints>`__.
location_id (str): Optional. Cloud region where annotation should take place. Supported
cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``,
``asia-east1``. If no region is specified, a region will be determined
based on video file location.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.videointelligence_v1p2beta1.types._OperationFuture` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "annotate_video" not in self._inner_api_calls:
self._inner_api_calls[
"annotate_video"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_video,
default_retry=self._method_configs["AnnotateVideo"].retry,
default_timeout=self._method_configs["AnnotateVideo"].timeout,
client_info=self._client_info,
)
request = video_intelligence_pb2.AnnotateVideoRequest(
features=features,
input_uri=input_uri,
input_content=input_content,
video_context=video_context,
output_uri=output_uri,
location_id=location_id,
)
operation = self._inner_api_calls["annotate_video"](
request, retry=retry, timeout=timeout, metadata=metadata
)
return google.api_core.operation.from_gapic(
operation,
self.transport._operations_client,
video_intelligence_pb2.AnnotateVideoResponse,
metadata_type=video_intelligence_pb2.AnnotateVideoProgress,
)
| 45.848387
| 163
| 0.644058
|
1abb3ca84b2454d0cab286a04f5ec63f5177b8b9
| 4,058
|
py
|
Python
|
src/alias/azext_alias/__init__.py
|
PoisonousJohn/azure-cli-extensions
|
cf0d7b6c031ba844dd5e43cc4e07533b85ef1269
|
[
"MIT"
] | 1
|
2018-09-22T14:53:04.000Z
|
2018-09-22T14:53:04.000Z
|
src/alias/azext_alias/__init__.py
|
PoisonousJohn/azure-cli-extensions
|
cf0d7b6c031ba844dd5e43cc4e07533b85ef1269
|
[
"MIT"
] | null | null | null |
src/alias/azext_alias/__init__.py
|
PoisonousJohn/azure-cli-extensions
|
cf0d7b6c031ba844dd5e43cc4e07533b85ef1269
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import timeit
from knack.log import get_logger
from azure.cli.core import AzCommandsLoader
from azure.cli.core.decorators import Completer
from azure.cli.core.commands.events import EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE
from azext_alias.alias import GLOBAL_ALIAS_PATH, AliasManager
from azext_alias.util import get_config_parser, is_alias_create_command, cache_reserved_commands
from azext_alias._const import DEBUG_MSG_WITH_TIMING
from azext_alias._validators import process_alias_create_namespace
from azext_alias import telemetry
from azext_alias import _help # pylint: disable=unused-import
logger = get_logger(__name__)
"""
We don't have access to load_cmd_tbl_func in custom.py (need the entire command table
for alias and command validation when the user invokes alias create).
This cache saves the entire command table globally so custom.py can have access to it.
Alter this cache through cache_reserved_commands(load_cmd_tbl_func) in util.py
"""
cached_reserved_commands = []
class AliasExtCommandLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core.commands import CliCommandType
custom_command_type = CliCommandType(operations_tmpl='azext_alias.custom#{}')
super(AliasExtCommandLoader, self).__init__(cli_ctx=cli_ctx,
custom_command_type=custom_command_type)
self.cli_ctx.register_event(EVENT_INVOKER_PRE_CMD_TBL_TRUNCATE, alias_event_handler)
def load_command_table(self, _):
with self.command_group('alias') as g:
g.custom_command('create', 'create_alias', validator=process_alias_create_namespace)
g.custom_command('list', 'list_alias')
g.custom_command('remove', 'remove_alias')
return self.command_table
def load_arguments(self, _):
with self.argument_context('alias create') as c:
c.argument('alias_name', options_list=['--name', '-n'], help='The name of the alias.')
c.argument('alias_command', options_list=['--command', '-c'], help='The command that the alias points to.')
with self.argument_context('alias remove') as c:
c.argument('alias_name', options_list=['--name', '-n'], help='The name of the alias.',
completer=get_alias_completer)
@Completer
def get_alias_completer(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument
"""
An argument completer for alias name.
"""
try:
alias_table = get_config_parser()
alias_table.read(GLOBAL_ALIAS_PATH)
return alias_table.sections()
except Exception: # pylint: disable=broad-except
return []
def alias_event_handler(_, **kwargs):
"""
An event handler for alias transformation when EVENT_INVOKER_PRE_TRUNCATE_CMD_TBL event is invoked
"""
try:
telemetry.start()
start_time = timeit.default_timer()
args = kwargs.get('args')
alias_manager = AliasManager(**kwargs)
# [:] will keep the reference of the original args
args[:] = alias_manager.transform(args)
if is_alias_create_command(args):
load_cmd_tbl_func = kwargs.get('load_cmd_tbl_func', lambda _: {})
cache_reserved_commands(load_cmd_tbl_func)
elapsed_time = (timeit.default_timer() - start_time) * 1000
logger.debug(DEBUG_MSG_WITH_TIMING, args, elapsed_time)
telemetry.set_execution_time(round(elapsed_time, 2))
except Exception as client_exception: # pylint: disable=broad-except
telemetry.set_exception(client_exception)
raise
finally:
telemetry.conclude()
COMMAND_LOADER_CLS = AliasExtCommandLoader
| 40.58
| 119
| 0.684327
|
602946871925e8584690b18b46728b9d86f15e58
| 957
|
py
|
Python
|
tests/test_sensor_xio_ngimu.py
|
LizShch/scikit-kinematics
|
6819f5fff9ba686164b3e336a03c5f153da68864
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_sensor_xio_ngimu.py
|
LizShch/scikit-kinematics
|
6819f5fff9ba686164b3e336a03c5f153da68864
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_sensor_xio_ngimu.py
|
LizShch/scikit-kinematics
|
6819f5fff9ba686164b3e336a03c5f153da68864
|
[
"BSD-3-Clause"
] | 3
|
2020-01-16T17:47:04.000Z
|
2021-05-09T20:11:37.000Z
|
"""
Test import from import data saved with NGIMU sensors from x-io,
through subclassing 'IMU_Base'
"""
# Author: Thomas Haslwanter
import numpy as np
import sys
import os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, os.path.join(myPath, '..', 'src','skinematics'))
import unittest
import imus
from time import sleep
from sensors.xio_ngimu import NGIMU
class TestSequenceFunctions(unittest.TestCase):
def test_import_xio(self):
# Get data, with a specified input from an XIO system
in_file = os.path.join(myPath, 'data', 'data_ngimu')
sensor = NGIMU(in_file=in_file, q_type=None)
rate = sensor.rate
acc = sensor.acc
omega = sensor.omega
self.assertAlmostEqual((rate - 50), 0)
self.assertAlmostEqual( (np.rad2deg(omega[0,2]) + 0.0020045), 0)
if __name__ == '__main__':
unittest.main()
print('Thanks for using programs from Thomas!')
sleep(0.2)
| 25.864865
| 72
| 0.68652
|
2e40aad037bc41416e4506b7fc2f5c099e2e329f
| 848
|
py
|
Python
|
code/main.py
|
tchittesh/EAS-
|
b21acfc77f43db8abbda8f0e4029389b779a05fc
|
[
"MIT"
] | 169
|
2017-10-16T07:22:10.000Z
|
2022-02-28T05:12:56.000Z
|
code/main.py
|
tchittesh/EAS-
|
b21acfc77f43db8abbda8f0e4029389b779a05fc
|
[
"MIT"
] | 5
|
2017-10-04T20:32:58.000Z
|
2019-10-22T11:29:37.000Z
|
code/main.py
|
tchittesh/EAS-
|
b21acfc77f43db8abbda8f0e4029389b779a05fc
|
[
"MIT"
] | 41
|
2017-10-10T04:50:21.000Z
|
2022-03-16T03:42:07.000Z
|
from expdir_monitor.expdir_monitor import ExpdirMonitor
import argparse
"""
Given a expdir, run the exp
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--test', action='store_true',
help='Test model for required dataset if pretrained model exists.'
)
parser.add_argument(
'--valid', action='store_true',
)
parser.add_argument(
'--valid_size', type=int, default=-1,
)
parser.add_argument('--path', type=str)
parser.add_argument('--restore', action='store_true')
args = parser.parse_args()
expdir_monitor = ExpdirMonitor(args.path)
test_performance = expdir_monitor.run(pure=False, restore=args.restore, test=args.test, valid=args.valid,
valid_size=args.valid_size)
if args.valid:
print('validation performance: %s' % test_performance)
else:
print('test performance: %s' % test_performance)
| 29.241379
| 105
| 0.728774
|
5e739e56dce2b8b46ebea82b5ea3a8359f0ee09e
| 1,027
|
py
|
Python
|
CrawlingServer/Vjudge.py
|
DirtyBat/LPOJ
|
bc0dc002777b0881e042abfdbb4fe2c3cc9df972
|
[
"MIT"
] | 196
|
2019-01-06T08:51:09.000Z
|
2022-03-31T06:51:20.000Z
|
CrawlingServer/Vjudge.py
|
lvdat/LPOJ
|
4c77c683df02786fe50ca24baa00181aed09979a
|
[
"MIT"
] | 77
|
2019-08-06T02:04:16.000Z
|
2022-02-26T09:31:08.000Z
|
CrawlingServer/Vjudge.py
|
lvdat/LPOJ
|
4c77c683df02786fe50ca24baa00181aed09979a
|
[
"MIT"
] | 62
|
2019-06-25T08:36:55.000Z
|
2022-03-16T09:49:51.000Z
|
import urllib.request
import urllib.parse
import json
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
def get_VJ_data(name):
try:
api_url = "https://vjudge.net/user/"+name
response = urllib.request.urlopen(api_url)
response_data=response.read()
response_data = str(response_data)
ac = response_data[response_data.find("title=\"Overall solved\" target=\"_blank\">")+len("title=\"Overall solved\" target=\"_blank\">"):response_data.find("</a>",response_data.find("title=\"Overall solved\" target=\"_blank\">"))]
submit =response_data[response_data.find("title=\"Overall attempted\" target=\"_blank\">")+len("title=\"Overall attempted\" target=\"_blank\">"):response_data.find("</a>",response_data.find("title=\"Overall attempted\" target=\"_blank\">"))]
return [int(ac),int(submit)]
except:
return [-1,-1]
if __name__ == "__main__":
while(True):
name = input("请输入要爬的ID:")
print(get_VJ_data(name))
| 44.652174
| 249
| 0.667965
|
e1de28a39a4172bcf73b2c7b27b8da7eec67fd40
| 3,776
|
py
|
Python
|
src/main/python/widgets/dialogs/date_selection_subdialog.py
|
ivov/admin-stock
|
e2e1d53436878b6db68dcb85d0cca31223066ffb
|
[
"MIT"
] | 8
|
2019-11-02T22:32:30.000Z
|
2021-08-16T08:29:39.000Z
|
src/main/python/widgets/dialogs/date_selection_subdialog.py
|
ivov/admin-stock
|
e2e1d53436878b6db68dcb85d0cca31223066ffb
|
[
"MIT"
] | null | null | null |
src/main/python/widgets/dialogs/date_selection_subdialog.py
|
ivov/admin-stock
|
e2e1d53436878b6db68dcb85d0cca31223066ffb
|
[
"MIT"
] | 3
|
2019-12-10T16:23:49.000Z
|
2021-11-01T20:22:16.000Z
|
from PyQt5 import QtWidgets, QtCore
from utils.styling import movements_dialog_date_selection_subdialog_style
from utils import db_manager
from utils import utils_collection as utils
class DateSelectionSubdialog(QtWidgets.QDialog):
def __init__(self, parent=None):
super(DateSelectionSubdialog, self).__init__(parent)
self.setWindowFlags(
QtCore.Qt.Dialog
| QtCore.Qt.CustomizeWindowHint
| QtCore.Qt.WindowCloseButtonHint
)
self.setFixedWidth(270)
self.selected_date = ""
groupbox = QtWidgets.QGroupBox("Período")
groupbox.setStyleSheet(movements_dialog_date_selection_subdialog_style)
self.combobox_month = QtWidgets.QComboBox()
self.combobox_month.setFixedWidth(85)
self.combobox_month.addItem("Mes")
months = [
"Enero",
"Febrero",
"Marzo",
"Abril",
"Mayo",
"Junio",
"Julio",
"Agosto",
"Septiembre",
"Octubre",
"Noviembre",
"Diciembre",
]
for i in months:
self.combobox_month.addItem(i)
self.combobox_year = QtWidgets.QComboBox()
self.combobox_year.addItem("Año")
self.combobox_year.setFixedWidth(85)
years = []
db = db_manager.DB_Manager()
if self.parent().title.text() == "Historial de movimientos":
years = db.get_years_from_movements()
elif self.parent().title.text() == "Historial de configuraciones":
years = db.get_years_from_configs()
for i in years:
self.combobox_year.addItem(i)
combobox_layout = QtWidgets.QHBoxLayout()
combobox_layout.addWidget(self.combobox_month)
combobox_layout.addWidget(self.combobox_year)
groupbox_inner_layout = QtWidgets.QVBoxLayout()
groupbox_inner_layout.addLayout(combobox_layout)
groupbox.setLayout(groupbox_inner_layout)
self.back_button = QtWidgets.QPushButton("« Volver")
self.back_button.setShortcut("Alt+v")
if self.parent().date_button.text() != "Período":
self.back_button.setText("× Cancelar")
self.back_button.setShortcut("Alt+c")
self.select_button = QtWidgets.QPushButton("Seleccionar »")
self.select_button.setShortcut("Alt+s")
self.select_button.setEnabled(False)
bottom_section = QtWidgets.QHBoxLayout()
bottom_section.addWidget(self.back_button)
bottom_section.addWidget(self.select_button)
layout = QtWidgets.QVBoxLayout()
layout.addWidget(groupbox)
layout.addLayout(bottom_section)
self.setLayout(layout)
self.back_button.clicked.connect(self.on_back)
self.select_button.clicked.connect(self.on_select_button_clicked)
self.combobox_month.currentTextChanged.connect(self.on_combobox_change)
self.combobox_year.currentTextChanged.connect(self.on_combobox_change)
def on_combobox_change(self):
if self.combobox_month.currentText() == "Mes":
return
if self.combobox_year.currentText() == "Año":
return
self.select_button.setEnabled(True)
def on_select_button_clicked(self):
self.selected_date = (
utils.get_month_number(self.combobox_month.currentText())
+ "/"
+ self.combobox_year.currentText()
)
self.parent().on_date_selected()
self.close()
def on_back(self):
if self.back_button.text() == "« Volver":
self.close()
elif self.back_button.text() == "× Cancelar":
self.parent().clear_all_filters()
self.close()
| 33.415929
| 79
| 0.634799
|
74af4c2903ce3145d6f49fd17d82ef7cbc205db1
| 1,547
|
py
|
Python
|
bot/migrations/0001_initial.py
|
CODE-Easyy/tg-managers-bot
|
24889543aa01b0cb562b232908ec7220f99a4e6c
|
[
"MIT"
] | null | null | null |
bot/migrations/0001_initial.py
|
CODE-Easyy/tg-managers-bot
|
24889543aa01b0cb562b232908ec7220f99a4e6c
|
[
"MIT"
] | null | null | null |
bot/migrations/0001_initial.py
|
CODE-Easyy/tg-managers-bot
|
24889543aa01b0cb562b232908ec7220f99a4e6c
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-11-07 21:05
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chat_id', models.PositiveIntegerField(unique=True)),
('username', models.CharField(blank=True, max_length=255, null=True)),
('manager', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message', models.CharField(max_length=255)),
('sended_at', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('sended', 'sended'), ('received', 'received')], max_length=255)),
('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bot.client')),
('manager', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 40.710526
| 121
| 0.618617
|
cf61c08324fbf6dccd47f66383d514bec5ce66ac
| 82
|
py
|
Python
|
DIY_NN/solutions/ff_oneline.py
|
PinmanHuang/CrashCourseML
|
b59ebf138d42fc9a1669735c6363d50938200e69
|
[
"MIT"
] | 3
|
2019-02-16T05:57:09.000Z
|
2019-09-16T07:07:18.000Z
|
DIY_NN/solutions/ff_oneline.py
|
PinmanHuang/CrashCourseML
|
b59ebf138d42fc9a1669735c6363d50938200e69
|
[
"MIT"
] | null | null | null |
DIY_NN/solutions/ff_oneline.py
|
PinmanHuang/CrashCourseML
|
b59ebf138d42fc9a1669735c6363d50938200e69
|
[
"MIT"
] | 8
|
2019-02-14T02:51:26.000Z
|
2019-10-07T07:44:24.000Z
|
from IPython.display import display, Latex
display(Latex("$\sigma (Cf(Ax+b)+d)$"))
| 41
| 42
| 0.719512
|
15187434680d6dbca9d3d4af9861ceb4c473eeeb
| 832
|
py
|
Python
|
assignment_8_4.py
|
Rajkamalyadav707/python_for_everybody
|
246b607c1c485e86f220deaa968d885ff6992e15
|
[
"Apache-2.0"
] | null | null | null |
assignment_8_4.py
|
Rajkamalyadav707/python_for_everybody
|
246b607c1c485e86f220deaa968d885ff6992e15
|
[
"Apache-2.0"
] | null | null | null |
assignment_8_4.py
|
Rajkamalyadav707/python_for_everybody
|
246b607c1c485e86f220deaa968d885ff6992e15
|
[
"Apache-2.0"
] | null | null | null |
#8.4 Open the file romeo.txt and read it line by line.
#For each line, split the line into a list of words using the split() method.
#The program should build a list of words.
#For each word on each line check to see if the word is already in the list and if not append it to the list.
#When the program completes, sort and print the resulting words in alphabetical order.
#You can download the sample data at http://www.py4e.com/code3/romeo.txt
fname = input("Enter file name: ")
fh = open(fname)
x = 0
y = 0
oneword = list()
emptylist = list()
newlist = list()
for line in fh:
x = x + 1
line = line.rstrip()
splitline = line.split()
for element in splitline:
if element in emptylist : continue
emptylist.append(element)
emptylist = sorted(emptylist)
print(emptylist)
#newlist = sorted(n)
| 32
| 109
| 0.700721
|
ee80c80459de084bf38c8446832af9b32b8e0aee
| 6,737
|
py
|
Python
|
sopel/modules/admin.py
|
Ameenekosan/Yumiko
|
16624f0b3f5c94262104b85866ce2cf7fd96f0db
|
[
"EFL-2.0"
] | null | null | null |
sopel/modules/admin.py
|
Ameenekosan/Yumiko
|
16624f0b3f5c94262104b85866ce2cf7fd96f0db
|
[
"EFL-2.0"
] | null | null | null |
sopel/modules/admin.py
|
Ameenekosan/Yumiko
|
16624f0b3f5c94262104b85866ce2cf7fd96f0db
|
[
"EFL-2.0"
] | null | null | null |
from __future__ import unicode_literals
from sopel.config.types import (
StaticSection, ValidatedAttribute, FilenameAttribute
)
import sopel.module
class AdminSection(StaticSection):
hold_ground = ValidatedAttribute('hold_ground', bool, default=False)
"""Auto re-join on kick"""
auto_accept_invite = ValidatedAttribute('auto_accept_invite', bool,
default=True)
def configure(config):
config.define_section('admin', AdminSection)
config.admin.configure_setting('hold_ground',
"Automatically re-join after being kicked?")
config.admin.configure_setting('auto_accept_invite',
'Automatically join channels when invited?')
def setup(bot):
bot.config.define_section('admin', AdminSection)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('join')
@sopel.module.priority('low')
@sopel.module.example('.join #example or .join #example key')
def join(bot, trigger):
"""Join the specified channel. This is an admin-only command."""
channel, key = trigger.group(3), trigger.group(4)
if not channel:
return
elif not key:
bot.join(channel)
else:
bot.join(channel, key)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('part')
@sopel.module.priority('low')
@sopel.module.example('.part #example')
def part(bot, trigger):
"""Part the specified channel. This is an admin-only command."""
channel, _sep, part_msg = trigger.group(2).partition(' ')
if part_msg:
bot.part(channel, part_msg)
else:
bot.part(channel)
@sopel.module.require_privmsg
@sopel.module.require_owner
@sopel.module.commands('quit')
@sopel.module.priority('low')
def quit(bot, trigger):
"""Quit from the server. This is an owner-only command."""
quit_message = trigger.group(2)
if not quit_message:
quit_message = 'Quitting on command from %s' % trigger.nick
bot.quit(quit_message)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('msg')
@sopel.module.priority('low')
@sopel.module.example('.msg #YourPants Does anyone else smell neurotoxin?')
def msg(bot, trigger):
"""
Send a message to a given channel or nick. Can only be done in privmsg by an
admin.
"""
if trigger.group(2) is None:
return
channel, _sep, message = trigger.group(2).partition(' ')
message = message.strip()
if not channel or not message:
return
bot.msg(channel, message)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('me')
@sopel.module.priority('low')
def me(bot, trigger):
"""
Send an ACTION (/me) to a given channel or nick. Can only be done in privmsg
by an admin.
"""
if trigger.group(2) is None:
return
channel, _sep, action = trigger.group(2).partition(' ')
action = action.strip()
if not channel or not action:
return
msg = '\x01ACTION %s\x01' % action
bot.msg(channel, msg)
@sopel.module.event('INVITE')
@sopel.module.rule('.*')
@sopel.module.priority('low')
def invite_join(bot, trigger):
"""
Join a channel sopel is invited to, if the inviter is an admin.
"""
if trigger.admin or bot.config.admin.auto_accept_invite:
bot.join(trigger.args[1])
return
@sopel.module.event('KICK')
@sopel.module.rule(r'.*')
@sopel.module.priority('low')
def hold_ground(bot, trigger):
"""
This function monitors all kicks across all channels sopel is in. If it
detects that it is the one kicked it'll automatically join that channel.
WARNING: This may not be needed and could cause problems if sopel becomes
annoying. Please use this with caution.
"""
if bot.config.admin.hold_ground:
channel = trigger.sender
if trigger.args[1] == bot.nick:
bot.join(channel)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('mode')
@sopel.module.priority('low')
def mode(bot, trigger):
"""Set a user mode on Sopel. Can only be done in privmsg by an admin."""
mode = trigger.group(3)
bot.write(('MODE ', bot.nick + ' ' + mode))
@sopel.module.require_privmsg("This command only works as a private message.")
@sopel.module.require_admin("This command requires admin privileges.")
@sopel.module.commands('set')
@sopel.module.example('.set core.owner Me')
def set_config(bot, trigger):
"""See and modify values of sopels config object.
Trigger args:
arg1 - section and option, in the form "section.option"
arg2 - value
If there is no section, section will default to "core".
If value is None, the option will be deleted.
"""
# Get section and option from first argument.
arg1 = trigger.group(3).split('.')
if len(arg1) == 1:
section_name, option = "core", arg1[0]
elif len(arg1) == 2:
section_name, option = arg1
else:
bot.reply("Usage: .set section.option value")
return
section = getattr(bot.config, section_name)
static_sec = isinstance(section, StaticSection)
if static_sec and not hasattr(section, option):
bot.say('[{}] section has no option {}.'.format(section_name, option))
return
# Display current value if no value is given.
value = trigger.group(4)
if not value:
if not static_sec and bot.config.parser.has_option(section, option):
bot.reply("Option %s.%s does not exist." % (section_name, option))
return
# Except if the option looks like a password. Censor those to stop them
# from being put on log files.
if option.endswith("password") or option.endswith("pass"):
value = "(password censored)"
else:
value = getattr(section, option)
bot.reply("%s.%s = %s" % (section_name, option, value))
return
# Otherwise, set the value to one given as argument 2.
if static_sec:
descriptor = getattr(section.__class__, option)
try:
if isinstance(descriptor, FilenameAttribute):
value = descriptor.parse(bot.config, descriptor, value)
else:
value = descriptor.parse(descriptor, value)
except ValueError as exc:
bot.say("Can't set attribute: " + str(exc))
return
setattr(section, option, value)
@sopel.module.require_privmsg
@sopel.module.require_admin
@sopel.module.commands('save')
@sopel.module.example('.save')
def save_config(bot, trigger):
"""Save state of sopels config object to the configuration file."""
bot.config.save()
| 30.762557
| 80
| 0.660828
|
13300ccfd0a52bc96158a7be43c6539fe28ec9c6
| 23,875
|
py
|
Python
|
src/python/serif/xmlio/__init__.py
|
BBN-E/text-open
|
c508f6caeaa51a43cdb0bc27d8ed77e5750fdda9
|
[
"Apache-2.0"
] | 2
|
2022-03-24T14:37:51.000Z
|
2022-03-24T19:56:45.000Z
|
src/python/serif/xmlio/__init__.py
|
BBN-E/text-open
|
c508f6caeaa51a43cdb0bc27d8ed77e5750fdda9
|
[
"Apache-2.0"
] | null | null | null |
src/python/serif/xmlio/__init__.py
|
BBN-E/text-open
|
c508f6caeaa51a43cdb0bc27d8ed77e5750fdda9
|
[
"Apache-2.0"
] | null | null | null |
######################################################################
# { Theory Attribute Specifications
######################################################################
from xml.etree import ElementTree as ET
def escape_cdata_carriage_return(text, encoding='utf-8'):
"""
Source copied from ElementTree.py and modified to add
'\r' -> '
' replacement. Monkey patch!
"""
# escape character data
try:
# it's worth avoiding do-nothing calls for strings that are
# shorter than 500 character, or so. assume that's, by far,
# the most common case in most applications.
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
if "\r" in text:
text = text.replace("\r", "
")
# Need to return a string, so after patching up the XML,
# we need to decode it again... I'm not convinced this
# actually does anything. I haven't found a counterexample
# yet. -DJE
return text.encode(encoding, "xmlcharrefreplace").decode(encoding)
except (TypeError, AttributeError):
ET._raise_serialization_error(text)
ET._escape_cdata = escape_cdata_carriage_return
SERIFXML_VERSION = 18
"""If true, then SerifTheory objects will keep a pointer to the
ElementTree.Element that they were constructed from. This
makes it possible for the save() method to preserve any extra
attributes or elements that were present in the original
document."""
KEEP_ORIGINAL_ETREE = False
class _AutoPopulatedXMLAttributeSpec(object):
"""
This is the abstract base class for \"Auto-populated XML attribute
specifications\" (or AttributeSpec's for short). Each
AttributeSpec is used to define a single attribute for a Serif
theory class. Some examples of AttributeSpecs are::
is_downcased = _SimpleAttribute(bool, default=False)
sentences = _ChildTheoryElement('Sentences')
start_token = _ReferenceAttribute('start_token', is_required=True)
Each AttributeSpec defines a `set_value()` method, which is used
to read the attribute's value from the XML input for a given
theory object. The default implementation of `set_value()` calls
the abstract method `get_value()`, which should read the
appropriate value from a given XML node, and stores it in the
theory object (using `setattr`).
The name of the instance variable that is used to store an
attribute's value is always identical to the name of the class
variable that holds the AttributeSpec. For example, the Document
class contains an AttributeSpec named 'docid'; and each instance
of the Document class will have an instance variable with the same
name ('docid') that is initialized by that AttributeSpec. Note
that this instance variable (containing the attribute value)
shadows the class variable containing the AttributeSpec.
"""
# We assign a unique attribute number to each AttributeSpec that
# gets created. This allows us to display attributes in the
# correct order when pretty-printing. (In particular, attributes
# are displayed in the order in which they were defined.)
attribute_counter = 0
def __init__(self):
self._attribute_number = self.attribute_counter
_AutoPopulatedXMLAttributeSpec.attribute_counter += 1
def set_value(self, etree, theory):
"""
Set the value of this attribute.
@param name: The name that should be used to store the attribute.
@param etree: The (input) XML tree corresponding to `theory`.
@param theory: The Serif theory object, to which the attribute
should be added.
"""
setattr(theory, self.__name__, self.get_value(etree, theory))
def get_value(self, etree, theory):
"""
Extract and return the value of this attribute from an input
XML tree.
@param name: The name that should be used to store the attribute.
@param etree: The (input) XML tree corresponding to `theory`.
@param theory: The Serif theory object, to which the attribute
should be added.
"""
raise AssertionError('get_value() is an abstract method.')
def serialize(self, etree, theory, **options):
raise AssertionError('serialize() is an abstract method.')
def default_value(self):
return None
def help(self):
"""
Return a single-line string describing this attribute
"""
raise AssertionError('help() is an abstract method.')
class _SimpleAttribute(_AutoPopulatedXMLAttributeSpec):
"""
A basic serif theory attribute, whose value is copied directly
from a corresonding XML attribute. The value should have a simple
type (such as string, boolean, or integer).
"""
def __init__(self, value_type=str, default=None, attr_name=None,
is_required=False):
"""
@param value_type: The type of value expected for this attribute.
This should be a Python type (such as int or bool), and is
used directly to cast the string value to an appropriate value.
@param default: The default value for this attribute. I.e., if
no value is provided, then the attribute will take this value.
The default value is *not* required to be of the type specified
by value_type -- in particular, the default may be None.
@param attr_name: The name of the XML attribute used to store this
value. If not specified, then the name will default to the
name of the serif theory attribute.
@param is_required: If true, then raise an exception if this
attribute is not defined on the XML input element.
"""
_AutoPopulatedXMLAttributeSpec.__init__(self)
self._value_type = value_type
self._default = default
self._attr_name = attr_name
self._is_required = is_required
def get_value(self, etree, theory):
name = self._attr_name or self.__name__
if name in etree.attrib:
return self._parse_value(name, etree.attrib[name])
elif self._is_required:
raise ValueError('Attribute %s is required for %s' %
(name, etree))
else:
return self._default
def _parse_value(self, name, value):
if self._value_type == bool:
if value.lower() == 'true': return True
if value.lower() == 'false': return False
raise ValueError('Attribute %s must have a boolean value '
'(either TRUE or FALSE)' % name)
else:
return self._value_type(value)
def _encode_value(self, value):
from serif.theory.enumerated_type import EnumeratedType
if value is True:
return 'TRUE'
elif value is False:
return 'FALSE'
elif isinstance(value, bytes):
return value.decode('utf-8')
elif isinstance(value, EnumeratedType._BaseClass):
return value.value
elif not isinstance(value, str):
return str(value)
else:
return value
def serialize(self, etree, theory, **options):
value = getattr(theory, self.__name__, None)
explicit_defaults = options.get('explicit_defaults', True)
if value is not None:
if ((not explicit_defaults) and
(self._default is not None) and
(value == self._default)):
return
attr_name = self._attr_name or self.__name__
value = self._encode_value(value)
etree.attrib[attr_name] = value
_HELP_TEMPLATE = 'a %s value extracted from the XML attribute %r'
def help(self):
name = self._attr_name or self.__name__
s = self._HELP_TEMPLATE % (
self._value_type.__name__, name)
if self._is_required:
s += ' (required)'
else:
s += ' (default=%r)' % self._default
return s
def default_value(self):
return self._default
class _SimpleListAttribute(_SimpleAttribute):
def _parse_value(self, name, value):
return tuple(_SimpleAttribute._parse_value(self, name, v)
for v in value.split())
def _encode_value(self, value):
return ' '.join(_SimpleAttribute._encode_value(self, v)
for v in value)
_HELP_TEMPLATE = 'a list of %s values extracted from the XML attribute %r'
class _IdAttribute(_AutoPopulatedXMLAttributeSpec):
"""
An identifier attribute (copied from the XML attribute \"id\").
In addtion to initializing theory.id, this attribute also
registers the id in the identifier map that is owned by the
theory's document.
"""
def set_value(self, etree, theory):
theory.id = etree.attrib.get('id')
document = theory.document
if document is None:
raise ValueError('Containing document not found!')
document.register_id(theory)
def serialize(self, etree, theory, **options):
xml_id = getattr(theory, 'id', None)
if xml_id is not None:
etree.attrib['id'] = xml_id
def help(self):
return "The XML id for this theory object (default=None)"
class _ReferenceAttribute(_SimpleAttribute):
"""
An attribute that is used to point to another Serif theory object,
using its identifier. When this attribute is initialized, the
target id is copied from the XML attribute with a specified name
(`attr_name`), and stored as a private variable. This id is *not*
looked up during initialization, since its target may not have
been created yet.
Instead, this attribute uses a Python feature called
\"descriptors\" to resolve the target id to a value when the
attribute is accessed.
In particular, each _ReferencedAttribute is a (non-data)
descriptor on the Serif theory class, which means that its
`__get__()` method is called whenever the corresponding Serif
theory attribute is read. The `__get__()` method looks up the
target id in the identifier map that is owned by the theory's
document. If the identifier is found, then the corresponding
theory object is returned; otherwise, a special `DanglingPointer`
object is returned.
"""
def __init__(self, attr_name, is_required=False, cls=None):
"""
@param attr_name: The name of the XML idref attribute used to
hold the pointer to a theory object. Typically, these
attribute names will end in '_id'.
@param is_required: If true, then raise an exception if this
attribute is not defined on the XML input element. If
is_required is false and the attribute is not defined on
the XML input element, then the Serif theory attribute's
value will be None.
@param cls: The Serif theory class (or name of the class)
that the target value should belong to.
"""
self._attr_name = attr_name
self._private_attr_name = '_' + attr_name
self._cls = cls
_SimpleAttribute.__init__(self, is_required=is_required,
attr_name=attr_name)
def set_value(self, etree, theory):
# This stores the id, but does *not* look it up -- the target
# for the pointer might not have been deserialized from xml yet.
setattr(theory, self._private_attr_name,
self.get_value(etree, theory))
def serialize(self, etree, theory, **options):
child = getattr(theory, self.__name__, None)
if child is not None:
etree.attrib[self._attr_name] = self._get_child_id(child)
def _get_child_id(self, child):
child_id = getattr(child, 'id', None)
if child_id is None:
raise ValueError('Serialization Error: attempt to serialize '
'a pointer to an object that has no id (%r)'
% child)
return child_id
def __get__(self, instance, owner=None):
from serif.theory.serif_theory import SerifTheory
# We look up the id only when the attribute is accessed.
if instance is None: return self
theory_id = getattr(instance, self._private_attr_name)
if theory_id is None: return None
document = instance.document
if document is None:
return DanglingPointer(theory_id)
target = document.lookup_id(theory_id)
if target is None:
return DanglingPointer(theory_id)
if self._cls is not None:
if isinstance(self._cls, str):
self._cls = SerifTheory._theory_classes[self._cls]
if not isinstance(target, self._cls):
raise ValueError('Expected %s to point to a %s' % (
self._attr_name, self._cls.__name__))
return target
def _cls_name(self):
if self._cls is None:
return 'theory object'
elif isinstance(self._cls, str):
return self._cls
else:
return self._cls.__name__
def help(self):
name = self._attr_name or self.__name__
s = 'a pointer to a %s extracted from the XML attribute %r' % (
self._cls_name(), name)
if self._is_required: s += ' (required)'
return s
class _ReferenceListAttribute(_ReferenceAttribute):
"""
An attribute that is used to point to a sequence of Serif theory
objects, using their identifiers. This AttributeSpec is similar
to `_ReferenceAttribute`, except that its value is a list of
theory objects, rather than a single theory object.
"""
def __get__(self, instance, owner=None):
from serif.theory.serif_theory import SerifTheory
theory_ids = getattr(instance, self._private_attr_name)
theory_ids = (theory_ids or '').split()
document = instance.document
if document is None:
return [DanglingPointer(tid) for tid in theory_ids]
targets = [(document.lookup_id(tid) or DanglingPointer(tid))
for tid in theory_ids]
if self._cls is not None:
if isinstance(self._cls, str):
self._cls = SerifTheory._theory_classes[self._cls]
for t in targets:
if not isinstance(t, (self._cls, DanglingPointer)):
raise ValueError('Expected %s to point to a %s; got a %s' % (
self._attr_name, self._cls.__name__, t.__class__.__name__))
return targets
def serialize(self, etree, theory, **options):
child_ids = [self._get_child_id(child)
for child in getattr(theory, self.__name__, ())]
if child_ids:
etree.attrib[self._attr_name] = ' '.join(child_ids)
def default_value(self):
return []
def help(self):
name = self._attr_name or self.__name__
s = ('a list of pointers to %ss extracted from '
'the XML attribute %r' % (self._cls_name(), name))
return s
class DanglingPointer(object):
"""
A class used by `_ReferenceAttribute` to indicate that the target
id has not yet been read. In particular, a DanglingPointer will
be returned by `ReferenceAttribute.__get__()` if a target pointer
id is not found in the identifier map.
"""
def __init__(self, id):
self.id = id
def __repr__(self):
return "<Dangling Pointer: id=%r>" % self.id
def _get_summary(self):
return "<Dangling Pointer: id=%r>" % self.id
class _OffsetAttribute(_AutoPopulatedXMLAttributeSpec):
"""
An attribute used to store a start or end offset. These
attributes may be stored in the XML in two different ways: either
using separate XML attributes for the begin and end offsets; or
using a single XML attribute for both. This AttributeSpec
subclass is responsible for reading both formats.
"""
def __init__(self, offset_side, offset_name, value_type=int):
_AutoPopulatedXMLAttributeSpec.__init__(self)
assert offset_side in ('start', 'end')
self.is_start = (offset_side == 'start')
self.offset_name = offset_name
self.offset_attr = '%s_%s' % (offset_side, offset_name)
self.condensed_offsets_attr = '%s_offsets' % offset_name
self._value_type = value_type
def get_value(self, etree, theory):
if self.offset_attr in etree.attrib:
return self._value_type(etree.attrib[self.offset_attr])
elif self.condensed_offsets_attr in etree.attrib:
s, e = etree.attrib[self.condensed_offsets_attr].split(':')
if self.is_start:
return self._value_type(s)
else:
return self._value_type(e)
else:
return None
def serialize(self, etree, theory, **options):
value = getattr(theory, self.__name__, None)
if value is not None:
if options.get('condensed_offsets', True):
etree.attrib[self.condensed_offsets_attr] = '%s:%s' % (
getattr(theory, 'start_%s' % self.offset_name),
getattr(theory, 'end_%s' % self.offset_name))
else:
etree.attrib[self.offset_attr] = '%s' % value
def help(self):
return 'an offset extracted from XML attribute %r or %r' % (
(self.offset_attr, self.condensed_offsets_attr))
class _ChildTheoryElement(_AutoPopulatedXMLAttributeSpec):
"""
An attribute used to hold a child theory that is described in
a child XML element.
"""
def __init__(self, cls_name, is_required=False):
"""
@param cls_name: The name of the Serif theory class for the
child value.
"""
_AutoPopulatedXMLAttributeSpec.__init__(self)
self._is_required = is_required
self._cls_name = cls_name
def _get_child_elt(self, name, etree):
if isinstance(name, tuple):
elts = [elt for elt in etree if elt.tag in name]
name = ' or '.join(name) # for error messages.
else:
elts = [elt for elt in etree if elt.tag == name]
if len(elts) == 1:
return elts[0]
elif len(elts) > 1:
raise ValueError('Expected at most one %s' % name)
elif self._is_required:
raise ValueError('Expected exactly one %s' % name)
else:
return None
def serialize(self, etree, theory, **options):
child = getattr(theory, self.__name__, None)
if child is not None:
if (hasattr(child, '_etree') and child._etree in etree):
child_etree = child.toxml(child._etree, **options)
else:
child_etree = child.toxml(**options)
etree.append(child_etree)
if isinstance(self._cls_name, tuple):
assert child_etree.tag in self._cls_name
else:
assert child_etree.tag == self._cls_name
def get_value(self, etree, theory):
from serif.theory.serif_theory import SerifTheory
name = self._cls_name or self.__name__
child_elt = self._get_child_elt(name, etree)
if child_elt is None:
return None
cls = SerifTheory._theory_classes.get(child_elt.tag)
if cls is None:
raise AssertionError('Theory class %s not defined!' % name)
return cls(child_elt, theory)
def help(self):
s = 'a child %s theory' % self._cls_name
if self._is_required:
s += ' (required)'
else:
s += ' (optional)'
return s
class _ChildTextElement(_ChildTheoryElement):
"""
An attribute whose value should be extracted from the string text
of a child XML element. (c.f. _TextOfElement)
"""
def set_text(self, text):
self.text = text
def get_value(self, etree, theory):
child_elt = self._get_child_elt(self._cls_name, etree)
if KEEP_ORIGINAL_ETREE:
self._child_elt = child_elt
if child_elt is None:
return None
else:
return child_elt.text
def serialize(self, etree, theory, **options):
text = getattr(theory, self.__name__, None)
if text is not None:
if hasattr(self, '_child_elt') and self._child_elt in etree:
child_etree = self._child_elt
else:
del etree[:]
child_etree = ET.Element(self._cls_name or self.__name__)
etree.append(child_etree)
child_etree.text = text
child_etree.tail = '\n' + options.get('indent', '')
def help(self):
return 'a text string extracted from the XML element %r' % (
self._cls_name)
class _TextOfElement(_AutoPopulatedXMLAttributeSpec):
"""
An attribute whose value should be extracted from the string text
of *this* XML element. (c.f. _ChildTextElement)
"""
def __init__(self, is_required=False, strip=False):
_AutoPopulatedXMLAttributeSpec.__init__(self)
self._strip = strip
self._is_required = is_required
def get_value(self, etree, theory):
text = etree.text or ''
if self._strip: text = text.strip()
if self._is_required and not text:
raise ValueError('Text content is required for %s' %
self.__name__)
return text
def serialize(self, etree, theory, **options):
text = getattr(theory, self.__name__, None)
if text is not None:
# assert etree.text is None # only one text string!
etree.text = text
def help(self):
return ("a text string extracted from this "
"theory's XML element text")
class _ChildTheoryElementList(_AutoPopulatedXMLAttributeSpec):
"""
An attribute whose value is a list of child theories. Each child
theory is deserialized from a single child XML element.
"""
def __init__(self, cls_name, index_attrib=None):
_AutoPopulatedXMLAttributeSpec.__init__(self)
self._cls_name = cls_name
self._index_attrib = index_attrib
def get_value(self, etree, theory):
from serif.theory.serif_theory import SerifTheory
name = self._cls_name or self.__name__
elts = [elt for elt in etree if elt.tag == name]
cls = SerifTheory._theory_classes.get(name)
if cls is None:
raise AssertionError('Theory class %s not defined!' % name)
result = [cls(elt, theory) for elt in elts]
if self._index_attrib:
for i, child in enumerate(result):
child.__dict__[self._index_attrib] = i
return result
def serialize(self, etree, theory, **options):
children = getattr(theory, self.__name__, ())
if KEEP_ORIGINAL_ETREE:
child_etrees = set(etree)
else:
child_etrees = set()
for child in children:
if (hasattr(child, '_etree') and child._etree in child_etrees):
child_etree = child.toxml(child._etree, **options)
else:
child_etree = child.toxml(**options)
etree.append(child_etree)
assert child_etree.tag == self._cls_name
def default_value(self):
return []
def help(self):
s = 'a list of child %s theory objects' % self._cls_name
return s
| 38.384244
| 83
| 0.624042
|
b531e3fd9488ed783a281441b42432fd3cea1ce0
| 1,960
|
py
|
Python
|
nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py
|
Conxz/nipype
|
1281723ae56eacd103597ff4081a205583706e62
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py
|
Conxz/nipype
|
1281723ae56eacd103597ff4081a205583706e62
|
[
"Apache-2.0"
] | null | null | null |
nipype/interfaces/freesurfer/tests/test_auto_RobustTemplate.py
|
Conxz/nipype
|
1281723ae56eacd103597ff4081a205583706e62
|
[
"Apache-2.0"
] | null | null | null |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ....testing import assert_equal
from ..longitudinal import RobustTemplate
def test_RobustTemplate_inputs():
input_map = dict(args=dict(argstr='%s',
),
auto_detect_sensitivity=dict(argstr='--satit',
mandatory=True,
xor=[u'outlier_sensitivity'],
),
average_metric=dict(argstr='--average %d',
),
environ=dict(nohash=True,
usedefault=True,
),
fixed_timepoint=dict(argstr='--fixtp',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_files=dict(argstr='--mov %s',
mandatory=True,
),
in_intensity_scales=dict(argstr='--iscalein %s',
),
initial_timepoint=dict(argstr='--inittp %d',
),
initial_transforms=dict(argstr='--ixforms %s',
),
intensity_scaling=dict(argstr='--iscale',
),
no_iteration=dict(argstr='--noit',
),
out_file=dict(argstr='--template %s',
mandatory=True,
usedefault=True,
),
outlier_sensitivity=dict(argstr='--sat %.4f',
mandatory=True,
xor=[u'auto_detect_sensitivity'],
),
scaled_intensity_outputs=dict(argstr='--iscaleout %s',
),
subjects_dir=dict(),
subsample_threshold=dict(argstr='--subsample %d',
),
terminal_output=dict(nohash=True,
),
transform_outputs=dict(argstr='--lta %s',
),
)
inputs = RobustTemplate.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_RobustTemplate_outputs():
output_map = dict(out_file=dict(),
scaled_intensity_outputs=dict(),
transform_outputs=dict(),
)
outputs = RobustTemplate.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| 27.605634
| 78
| 0.65
|
434ce31b1dc43ffd55f4001f40f98f3fdd52738b
| 6,676
|
py
|
Python
|
tests/python/sync_test_app.py
|
shuryanc/sdk
|
b7ece50cfc546fa6c3620c28ee4d9aa05059678b
|
[
"BSD-2-Clause"
] | 1,296
|
2015-01-04T17:27:12.000Z
|
2022-03-31T12:28:43.000Z
|
tests/python/sync_test_app.py
|
shuryanc/sdk
|
b7ece50cfc546fa6c3620c28ee4d9aa05059678b
|
[
"BSD-2-Clause"
] | 2,167
|
2015-01-01T14:00:45.000Z
|
2022-03-08T09:40:02.000Z
|
tests/python/sync_test_app.py
|
shuryanc/sdk
|
b7ece50cfc546fa6c3620c28ee4d9aa05059678b
|
[
"BSD-2-Clause"
] | 538
|
2015-01-01T14:12:21.000Z
|
2022-03-27T06:17:18.000Z
|
"""
Application for testing syncing algorithm
(c) 2013-2014 by Mega Limited, Wellsford, New Zealand
This file is part of the MEGA SDK - Client Access Engine.
Applications using the MEGA API must present a valid application key
and comply with the the rules set forth in the Terms of Service.
The MEGA SDK is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
@copyright Simplified (2-clause) BSD License.
You should have received a copy of the license along with this
program.
"""
import os
import time
import random
from sync_test_base import get_random_str
import shutil
import logging
import datetime
class SyncTestApp(object):
"""
test application base class
"""
def __init__(self, local_mount_in, local_mount_out, work_folder, delete_tmp_files=True, use_large_files=True):
"""
work_dir: a temporary folder to place generated files
remote_folder: a remote folder to sync
"""
self.start_time = time.time()
random.seed(time.time())
self.local_mount_in = local_mount_in
self.local_mount_out = local_mount_out
self.rnd_folder = get_random_str()
self.local_folder_in = os.path.join(self.local_mount_in, self.rnd_folder)
self.local_folder_out = os.path.join(self.local_mount_out, self.rnd_folder)
self.work_folder = os.path.join(work_folder, self.rnd_folder)
self.nr_retries = 200
self.delete_tmp_files = delete_tmp_files
self.use_large_files = use_large_files
def change_folders(self):
"""
cleans directories and call finish
"""
time.sleep(0.2) # to prevent from sync algorithm interpreting we are renaming
if self.delete_tmp_files:
try:
shutil.rmtree(self.local_folder_in)
except OSError:
pass
time.sleep(1.2) # to prevent from sync algorithm interpreting we are renaming
self.rnd_folder = get_random_str()
self.local_folder_in = os.path.join(self.local_mount_in, self.rnd_folder)
self.local_folder_out = os.path.join(self.local_mount_out, self.rnd_folder)
self.work_folder = os.path.join(self.work_folder, self.rnd_folder)
self.prepare_folders();
def __enter__(self):
# call subclass function
res = self.start()
if not res:
self.stop()
raise Exception('Failed to start app!')
res = self.prepare_folders()
if not res:
self.stop()
raise Exception('Failed to start app!')
return self
def __exit__(self, exc_type, exc_value, traceback):
# remove tmp folders
if self.delete_tmp_files:
try:
logging.debug("Deleting %s" % self.local_folder_in)
shutil.rmtree(self.local_folder_in)
except OSError:
pass
try:
logging.debug("Deleting %s" % self.local_folder_out)
shutil.rmtree(self.local_folder_out)
except OSError:
pass
try:
logging.debug("Deleting %s" % self.work_folder)
shutil.rmtree(self.work_folder)
except OSError:
pass
# terminate apps
self.stop()
logging.info("Execution time: %s" % str(datetime.timedelta(seconds=time.time()-self.start_time)))
@staticmethod
def touch(path):
"""
create an empty file
update utime
"""
with open(path, 'a'):
os.utime(path, None)
def prepare_folders(self):
"""
prepare upsync, downsync and work directories
"""
# create "in" folder
logging.info("IN folder: %s" % self.local_folder_in)
try:
os.makedirs(self.local_folder_in)
except OSError, e:
logging.error("Failed to create directory: %s (%s)" % (self.local_folder_in, e))
return False
logging.info("OUT folder: %s" % self.local_folder_out)
self.sync()
# temporary workaround
#tmp_fix_file = os.path.join(self.local_mount_out, "tmp_fix")
success = False
# try to access the dir
for r in range(0, self.nr_retries):
self.attempt=r
try:
if os.path.isdir(self.local_folder_out):
success = True
break
else:
# wait for a dir
logging.debug("Directory %s not found! Retrying [%d/%d] .." % (self.local_folder_out, r + 1, self.nr_retries))
#self.touch(tmp_fix_file)
self.sync()
except OSError:
# wait for a dir
logging.debug("Directory %s not found! Retrying [%d/%d] .." % (self.local_folder_out, r + 1, self.nr_retries))
#self.touch(tmp_fix_file)
self.sync()
if success is False:
logging.error("Failed to access directory: %s" % self.local_folder_out)
return False
# create work folder
logging.debug("Work folder: %s" % self.work_folder)
try:
os.makedirs(self.work_folder)
except OSError, e:
logging.error("Failed to create directory: %s (%s)" % (self.work_folder, e))
return False
return True
def stop(self):
"""
cleans directories and call finish
"""
if self.delete_tmp_files:
try:
shutil.rmtree(self.local_folder_in)
except OSError:
pass
self.sync()
self.finish()
# virtual methods
def start(self):
"""
start application
"""
raise NotImplementedError("Not Implemented !")
def finish(self):
"""
stop application
"""
raise NotImplementedError("Not Implemented !")
def sync(self):
"""
wait for full synchronization
"""
raise NotImplementedError("Not Implemented !")
def pause(self):
"""
pause application
"""
raise NotImplementedError("Not Implemented !")
def unpause(self):
"""
unpause application
"""
raise NotImplementedError("Not Implemented !")
def is_alive(self):
"""
return True if application instance is running
"""
raise NotImplementedError("Not Implemented !")
| 30.345455
| 130
| 0.584781
|
633ce44ed70b47e798377bbf188aaaa40f36bfae
| 56,026
|
py
|
Python
|
impala/hiveserver2.py
|
t3rmin4t0r/impyla
|
0914895830609001b9d4f535573cba8db487d45e
|
[
"Apache-2.0"
] | null | null | null |
impala/hiveserver2.py
|
t3rmin4t0r/impyla
|
0914895830609001b9d4f535573cba8db487d45e
|
[
"Apache-2.0"
] | null | null | null |
impala/hiveserver2.py
|
t3rmin4t0r/impyla
|
0914895830609001b9d4f535573cba8db487d45e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import getpass
import re
import socket
import datetime
import operator
import six
import sys
import time
from bitarray import bitarray
from six.moves import range
from impala._thrift_api import (
get_socket, get_http_transport, get_transport, TTransportException, TBinaryProtocol, TOpenSessionReq,
TFetchResultsReq,
TCloseSessionReq, TExecuteStatementReq, TGetInfoReq, TGetInfoType, TTypeId,
TFetchOrientation, TGetResultSetMetadataReq, TStatusCode, TGetColumnsReq,
TGetSchemasReq, TGetTablesReq, TGetFunctionsReq, TGetOperationStatusReq,
TOperationState, TCancelOperationReq, TCloseOperationReq, TGetLogReq,
TProtocolVersion, TGetRuntimeProfileReq, TRuntimeProfileFormat,
TGetExecSummaryReq, ImpalaHiveServer2Service, TExecStats, ThriftClient,
TApplicationException)
from impala.compat import Decimal
from impala.error import (NotSupportedError, OperationalError,
ProgrammingError, HiveServer2Error, HttpError)
from impala.interface import Connection, Cursor, _bind_parameters
from impala.util import get_logger_and_init_null
log = get_logger_and_init_null(__name__)
V6_VERSION = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6
class HiveServer2Connection(Connection):
# PEP 249
# HiveServer2Connection objects are associated with a TCLIService.Client
# thrift service
# it's instantiated with an alive TCLIService.Client
def __init__(self, service, default_db=None):
log.debug('HiveServer2Connection(service=%s, default_db=%s)', service,
default_db)
self.service = service
self.default_db = default_db
def close(self):
"""Close the session and the Thrift transport."""
# PEP 249
log.debug('Closing HS2 connection')
self.service.close()
def reconnect(self):
self.service.reconnect()
def commit(self):
"""Impala doesn't support transactions; does nothing."""
# PEP 249
pass
def rollback(self):
"""Impala doesn't support transactions; raises NotSupportedError"""
# PEP 249
raise NotSupportedError
def cursor(self, user=None, configuration=None, convert_types=True,
dictify=False, fetch_error=True):
"""Get a cursor from the HiveServer2 (HS2) connection.
Parameters
----------
user : str, optional
configuration : dict of str keys and values, optional
Configuration overlay for the HS2 session.
convert_types : bool, optional
When `False`, timestamps and decimal values will not be converted
to Python `datetime` and `Decimal` values. (These conversions are
expensive.) Only applies when using HS2 protocol versions > 6.
dictify : bool, optional
When `True` cursor will return key value pairs instead of rows.
fetch_error : bool, optional
In versions of impala prior to 2.7.0, when an operation fails and
the impalad returns an error state, the error message is not always
returned. In these cases the error message can be retrieved by a
subsequent fetch rpc call but this has a side effect of invalidating
the query handle and causing any further operations against it to
fail. e.g. calling log() or profile().
When set to `True` impyla will attempt to fetch the error message.
When set to `False`, this flag will cause impyla not to attempt to
fetch the message with a fetch call . In this case the query
handle remains valid and impyla will raise an exception with a
message of "Operation is in ERROR_STATE".
The Default option is `True`.
Returns
-------
HiveServer2Cursor
A `Cursor` object (DB API 2.0-compliant).
"""
# PEP 249
log.debug('Getting a cursor (Impala session)')
if user is None:
user = getpass.getuser()
log.debug('.cursor(): getting new session_handle')
session = self.service.open_session(user, configuration)
log.debug('HiveServer2Cursor(service=%s, session_handle=%s, '
'default_config=%s, hs2_protocol_version=%s)',
self.service, session.handle,
session.config, session.hs2_protocol_version)
cursor_class = HiveServer2DictCursor if dictify else HiveServer2Cursor
cursor = cursor_class(session, convert_types=convert_types,
fetch_error=fetch_error)
if self.default_db is not None:
log.info('Using database %s as default', self.default_db)
cursor.execute('USE %s' % self.default_db)
return cursor
class HiveServer2Cursor(Cursor):
"""The DB API 2.0 Cursor object.
See the PEP 249 specification for more details.
"""
# PEP 249
# HiveServer2Cursor objects are associated with a Session
# they are instantiated with alive session_handles
def __init__(self, session, convert_types=True, fetch_error=True):
self.session = session
self.convert_types = convert_types
self.fetch_error = fetch_error
self._last_operation = None
self._last_operation_string = None
self._last_operation_active = False
self._buffersize = None
self._buffer = Batch() # zero-length
# initial values, per PEP 249
self._description = None
self._rowcount = -1
self._closed = False
def __del__(self):
if self._closed:
return
try:
self.close_operation()
except Exception:
pass
try:
self.session.close()
except Exception:
pass
@property
def description(self):
# PEP 249
if self._description is None and self.has_result_set:
log.debug('description=None has_result_set=True => getting schema')
schema = self._last_operation.get_result_schema()
self._description = schema
return self._description
@property
def rowcount(self):
# PEP 249
return self._rowcount
@property
def rowcounts(self):
# Work around to get the number of rows modified for Inserts/Update/Delte statements
modifiedRows, errorRows = -1, -1
if self._last_operation_active:
logList = self.get_profile().split('\n')
resultDict = {}
subs = ['NumModifiedRows', 'NumRowErrors']
resultSet = [s for s in logList if any(item in s for item in subs)]
if resultSet:
for items in resultSet:
key, value = items.split(':')
key, value = key.strip(), value.strip()
resultDict[key] = value
modifiedRows = int(resultDict.get('NumModifiedRows', -1))
errorRows = int(resultDict.get('NumRowErrors', -1))
return (modifiedRows, errorRows)
@property
def lastrowid(self):
# PEP 249
return None
@property
def query_string(self):
return self._last_operation_string
def get_arraysize(self):
# PEP 249
return self._buffersize if self._buffersize else 1
def set_arraysize(self, arraysize):
# PEP 249
log.debug('set_arraysize: arraysize=%s', arraysize)
self._buffersize = arraysize
arraysize = property(get_arraysize, set_arraysize)
@property
def buffersize(self):
# this is for internal use. it provides an alternate default value for
# the size of the buffer, so that calling .next() will read multiple
# rows into a buffer if arraysize hasn't been set. (otherwise, we'd
# get an unbuffered impl because the PEP 249 default value of arraysize
# is 1)
return self._buffersize if self._buffersize else 1024
@property
def has_result_set(self):
return (self._last_operation is not None and
self._last_operation.has_result_set)
def close(self):
# PEP 249
if self._closed:
return
# If an operation is active and isn't closed before the session is
# closed, then the server will cancel the operation upon closing
# the session. Cancellation could be problematic for some DDL
# operations. This avoids requiring the user to call the non-PEP 249
# close_operation().
exc_info = None
try:
self.close_operation()
except Exception:
exc_info = sys.exc_info()
log.debug('Closing HiveServer2Cursor')
try:
self.session.close()
except Exception:
# If we encountered an error when closing the session
# then print operation close exception to logs and
# raise the session close exception
if exc_info:
log.error('Failure encountered closing last operation.',
exc_info=exc_info)
raise
self._closed = True
# If there was an error when closing last operation then
# raise exception
if exc_info:
six.reraise(*exc_info)
def cancel_operation(self, reset_state=True):
if self._last_operation_active:
log.info('Canceling active operation')
self._last_operation.cancel()
if reset_state:
self._reset_state()
def close_operation(self):
if self._last_operation_active:
log.info('Closing active operation')
self._reset_state()
def _reset_state(self):
log.debug('_reset_state: Resetting cursor state')
self._buffer = Batch()
self._description = None
if self._last_operation_active:
self._last_operation_active = False
self._last_operation.close()
self._last_operation_string = None
self._last_operation = None
def execute(self, operation, parameters=None, configuration=None):
"""Synchronously execute a SQL query.
Blocks until results are available.
Parameters
----------
operation : str
The SQL query to execute.
parameters : str, optional
Parameters to be bound to variables in the SQL query, if any.
Impyla supports all DB API `paramstyle`s, including `qmark`,
`numeric`, `named`, `format`, `pyformat`.
configuration : dict of str keys and values, optional
Configuration overlay for this query.
Returns
-------
NoneType
Results are available through a call to `fetch*`.
"""
# PEP 249
self.execute_async(operation, parameters=parameters,
configuration=configuration)
log.debug('Waiting for query to finish')
self._wait_to_finish() # make execute synchronous
log.debug('Query finished')
def execute_async(self, operation, parameters=None, configuration=None):
"""Asynchronously execute a SQL query.
Immediately returns after query is sent to the HS2 server. Poll with
`is_executing`. A call to `fetch*` will block.
Parameters
----------
operation : str
The SQL query to execute.
parameters : str, optional
Parameters to be bound to variables in the SQL query, if any.
Impyla supports all DB API `paramstyle`s, including `qmark`,
`numeric`, `named`, `format`, `pyformat`.
configuration : dict of str keys and values, optional
Configuration overlay for this query.
Returns
-------
NoneType
Results are available through a call to `fetch*`.
"""
log.debug('Executing query %s', operation)
paramstyle = None
if configuration:
paramstyle = configuration.pop('paramstyle', None)
def op():
if parameters:
self._last_operation_string = _bind_parameters(operation,
parameters,
paramstyle)
else:
self._last_operation_string = operation
op = self.session.execute(self._last_operation_string,
configuration,
run_async=True)
self._last_operation = op
self._execute_async(op)
def _debug_log_state(self):
if self._last_operation_active:
handle = self._last_operation.handle
else:
handle = None
log.debug('_execute_async: self._buffer=%s self._description=%s '
'self._last_operation_active=%s '
'self._last_operation=%s',
self._buffer, self._description,
self._last_operation_active, handle)
def _execute_async(self, operation_fn):
# operation_fn should set self._last_operation_string and
# self._last_operation
self._debug_log_state()
self._reset_state()
self._debug_log_state()
operation_fn()
self._last_operation_active = True
self._debug_log_state()
def _wait_to_finish(self):
# Prior to IMPALA-1633 GetOperationStatus does not populate errorMessage
# in case of failure. If not populated, queries that return results
# can get a failure description with a further call to FetchResults rpc.
loop_start = time.time()
while True:
req = TGetOperationStatusReq(operationHandle=self._last_operation.handle)
resp = self._last_operation._rpc('GetOperationStatus', req, True)
self._last_operation.update_has_result_set(resp)
operation_state = TOperationState._VALUES_TO_NAMES[resp.operationState]
log.debug('_wait_to_finish: waited %s seconds so far',
time.time() - loop_start)
if self._op_state_is_error(operation_state):
if resp.errorMessage:
raise OperationalError(resp.errorMessage)
else:
if self.fetch_error and self.has_result_set:
self._last_operation_active=False
self._last_operation.fetch()
else:
raise OperationalError("Operation is in ERROR_STATE")
if not self._op_state_is_executing(operation_state):
break
time.sleep(self._get_sleep_interval(loop_start))
def status(self):
if self._last_operation is None:
raise ProgrammingError("Operation state is not available")
return self._last_operation.get_status()
def execution_failed(self):
if self._last_operation is None:
raise ProgrammingError("Operation state is not available")
operation_state = self._last_operation.get_status()
return self._op_state_is_error(operation_state)
def _op_state_is_error(self, operation_state):
return operation_state == 'ERROR_STATE'
def is_executing(self):
if self._last_operation is None:
raise ProgrammingError("Operation state is not available")
operation_state = self._last_operation.get_status()
return self._op_state_is_executing(operation_state)
def _op_state_is_executing(self, operation_state):
return operation_state in (
'PENDING_STATE', 'INITIALIZED_STATE', 'RUNNING_STATE')
def _get_sleep_interval(self, start_time):
"""Returns a step function of time to sleep in seconds before polling
again. Maximum sleep is 1s, minimum is 0.1s"""
elapsed = time.time() - start_time
if elapsed < 0.05:
return 0.01
elif elapsed < 1.0:
return 0.05
elif elapsed < 10.0:
return 0.1
elif elapsed < 60.0:
return 0.5
return 1.0
def executemany(self, operation, seq_of_parameters, configuration=None):
# PEP 249
log.debug('Attempting to execute %s queries', len(seq_of_parameters))
for parameters in seq_of_parameters:
self.execute(operation, parameters, configuration)
if self.has_result_set:
raise ProgrammingError("Operations that have result sets are "
"not allowed with executemany.")
def fetchone(self):
# PEP 249
self._wait_to_finish()
if not self.has_result_set:
raise ProgrammingError("Tried to fetch but no results.")
log.debug('Fetching a single row')
try:
return next(self)
except StopIteration:
return None
def fetchcbatch(self):
'''Return a CBatch object containing the next rows to be fetched. If data is
currently buffered, returns that data, otherwise fetches the next batch.
Returns None if no more rows are currently available. Note that if None
is returned, more rows may still be available in future.'''
if not self._last_operation.is_columnar:
raise NotSupportedError("Server does not support columnar "
"fetching")
if not self.has_result_set:
raise ProgrammingError(
"Trying to fetch results on an operation with no results.")
if len(self._buffer) > 0:
log.debug('fetchcbatch: buffer has data in. Returning it and wiping buffer')
batch = self._buffer
self._buffer = Batch()
return batch
elif self._last_operation_active:
log.debug('fetchcbatch: buffer empty and op is active => fetching '
'more data')
batch = (self._last_operation.fetch(
self.description,
self.buffersize,
convert_types=self.convert_types))
if len(batch) == 0:
return None
return batch
else:
return None
def fetchmany(self, size=None):
# PEP 249
self._wait_to_finish()
if not self.has_result_set:
raise ProgrammingError("Tried to fetch but no results.")
if size is None:
size = self.arraysize
log.debug('Fetching up to %s result rows', size)
local_buffer = []
i = 0
while i < size:
try:
local_buffer.append(next(self))
i += 1
except StopIteration:
break
return local_buffer
def fetchall(self):
# PEP 249
self._wait_to_finish()
log.debug('Fetching all result rows')
try:
return list(self)
except StopIteration:
return []
def fetchcolumnar(self):
"""Executes a fetchall operation returning a list of CBatches"""
self._wait_to_finish()
if not self._last_operation.is_columnar:
raise NotSupportedError("Server does not support columnar "
"fetching")
batches = []
while True:
batch = (self._last_operation.fetch(
self.description,
self.buffersize,
convert_types=self.convert_types))
if len(batch) == 0:
break
batches.append(batch)
return batches
def setinputsizes(self, sizes):
# PEP 249
pass
def setoutputsize(self, size, column=None):
# PEP 249
pass
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
while True:
if not self.has_result_set:
raise ProgrammingError(
"Trying to fetch results on an operation with no results.")
if len(self._buffer) > 0:
log.debug('__next__: popping row out of buffer')
return self._buffer.pop()
elif self._last_operation_active:
log.debug('__next__: buffer empty and op is active => fetching '
'more data')
self._buffer = self._last_operation.fetch(self.description,
self.buffersize,
convert_types=self.convert_types)
if len(self._buffer) > 0:
log.debug('__next__: popping row out of buffer')
return self._buffer.pop()
if not self._buffer.expect_more_rows:
log.debug('__next__: no more data to fetch')
raise StopIteration
# If we didn't get rows, but more are expected, need to iterate again.
else:
log.debug('__next__: buffer empty')
raise StopIteration
def ping(self):
"""Checks connection to server by requesting some info."""
log.info('Pinging the impalad')
return self.session.ping()
def get_log(self):
if self._last_operation is None:
raise ProgrammingError("Operation state is not available")
return self._last_operation.get_log()
def get_profile(self, profile_format=TRuntimeProfileFormat.STRING):
if self._last_operation is None:
raise ProgrammingError("Operation state is not available")
return self._last_operation.get_profile(profile_format=profile_format)
def get_summary(self):
return self._last_operation.get_summary()
def build_summary_table(self, summary, output, idx=0,
is_fragment_root=False, indent_level=0):
return build_summary_table(summary, idx, is_fragment_root,
indent_level, output)
def get_databases(self):
def op():
self._last_operation_string = "RPC_GET_DATABASES"
self._last_operation = self.session.get_databases()
self._execute_async(op)
self._wait_to_finish()
def database_exists(self, db_name):
return self.session.database_exists(db_name)
def get_tables(self, database_name=None):
if database_name is None:
database_name = '.*'
def op():
self._last_operation_string = "RPC_GET_TABLES"
self._last_operation = self.session.get_tables(database_name)
self._execute_async(op)
self._wait_to_finish()
def table_exists(self, table_name, database_name=None):
if database_name is None:
database_name = '.*'
return self.session.table_exists(table_name,
database=database_name)
def get_table_schema(self, table_name, database_name=None):
if database_name is None:
database_name = '.*'
def op():
self._last_operation_string = "RPC_DESCRIBE_TABLE"
self._last_operation = self.session.get_table_schema(
table_name, database_name)
self._execute_async(op)
self._wait_to_finish()
results = self.fetchall()
if len(results) == 0:
# TODO: the error raised here should be different
raise OperationalError(
"no schema results for table %s.%s" % (
database_name, table_name))
# check that results are derived from a unique table
tables = set()
for col in results:
tables.add((col[1], col[2]))
if len(tables) > 1:
# TODO: the error raised here should be different
raise ProgrammingError(
"db: %s, table: %s is not unique" % (
database_name, table_name))
return [(r[3], r[5]) for r in results]
def get_functions(self, database_name=None):
if database_name is None:
database_name = '.*'
def op():
self._last_operation_string = "RPC_GET_FUNCTIONS"
self._last_operation = self.session.get_functions(database_name)
self._execute_async(op)
self._wait_to_finish()
class HiveServer2DictCursor(HiveServer2Cursor):
"""The cursor that returns each element as a dictionary"""
def execute(self, operation, parameters=None, configuration=None):
super(self.__class__, self).execute(operation, parameters,
configuration)
if self.description is not None:
self.fields = [d[0] for d in self.description]
else:
self.fields = None
def __next__(self):
record = super(self.__class__, self).__next__()
return dict(zip(self.fields, record))
# This work builds off of:
# 1. the Hue interface:
# hue/apps/beeswax/src/beeswax/server/dbms.py
# hue/apps/beeswax/src/beeswax/server/hive_server2_lib.py
# hue/desktop/core/src/desktop/lib/thrift_util.py
# 2. the Impala shell:
# Impala/shell/original_impala_shell.py
# mapping between the schema types (based on
# com.cloudera.impala.catalog.PrimitiveType) and TColumnValue (in returned
# rows) helper object for converting from TRow to something friendlier
_TTypeId_to_TColumnValue_getters = {
'BOOLEAN': operator.attrgetter('boolVal'),
'TINYINT': operator.attrgetter('byteVal'),
'SMALLINT': operator.attrgetter('i16Val'),
'INT': operator.attrgetter('i32Val'),
'BIGINT': operator.attrgetter('i64Val'),
'TIMESTAMP': operator.attrgetter('stringVal'),
'FLOAT': operator.attrgetter('doubleVal'),
'DOUBLE': operator.attrgetter('doubleVal'),
'STRING': operator.attrgetter('stringVal'),
'DECIMAL': operator.attrgetter('stringVal'),
'BINARY': operator.attrgetter('binaryVal'),
'VARCHAR': operator.attrgetter('stringVal'),
'CHAR': operator.attrgetter('stringVal'),
'MAP': operator.attrgetter('stringVal'),
'ARRAY': operator.attrgetter('stringVal'),
'STRUCT': operator.attrgetter('stringVal'),
'UNIONTYPE': operator.attrgetter('stringVal'),
'NULL': operator.attrgetter('stringVal'),
'DATE': operator.attrgetter('stringVal')
}
_pre_columnar_protocols = [
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1,
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V2,
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V3,
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V4,
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V5,
]
def err_if_rpc_not_ok(resp):
if (resp.status.statusCode != TStatusCode.SUCCESS_STATUS and
resp.status.statusCode != TStatusCode.SUCCESS_WITH_INFO_STATUS and
resp.status.statusCode != TStatusCode.STILL_EXECUTING_STATUS):
raise HiveServer2Error(resp.status.errorMessage)
# datetime only supports 6 digits of microseconds but Impala supports 9.
# If present, the trailing 3 digits will be ignored without warning.
_TIMESTAMP_PATTERN = re.compile(r'(\d+-\d+-\d+ \d+:\d+:\d+(\.\d{,6})?)')
# Regex to extract year/month/date from date.
_DATE_PATTERN = re.compile(r'(\d+)-(\d+)-(\d+)')
def _parse_timestamp(value):
input_value = value
if value:
match = _TIMESTAMP_PATTERN.match(value)
if match:
if match.group(2):
format = '%Y-%m-%d %H:%M:%S.%f'
# use the pattern to truncate the value
value = match.group()
else:
format = '%Y-%m-%d %H:%M:%S'
value = datetime.datetime.strptime(value, format)
else:
raise Exception(
'Cannot convert "{}" into a datetime'.format(value))
else:
value = None
log.debug('%s => %s', input_value, value)
return value
def _parse_date(value):
if value:
match = _DATE_PATTERN.match(value)
if match:
return datetime.date(int(match.group(1)), int(match.group(2)), int(match.group(3)))
else:
raise Exception(
'Cannot convert "{}" into a date'.format(value))
return value
# TODO: Add another decorator that runs the function in its own thread
def threaded(func):
# pylint: disable=unused-argument
raise NotImplementedError
def connect(host, port, timeout=None, use_ssl=False, ca_cert=None,
user=None, password=None, kerberos_service_name='impala',
auth_mechanism=None, krb_host=None, use_http_transport=False,
http_path='', auth_cookie_names=None, retries=3):
log.debug('Connecting to HiveServer2 %s:%s with %s authentication '
'mechanism', host, port, auth_mechanism)
if krb_host:
kerberos_host = krb_host
else:
kerberos_host = host
if use_http_transport:
# TODO(#362): Add server authentication with thrift 0.12.
if ca_cert:
raise NotSupportedError("Server authentication is not supported " +
"with HTTP endpoints")
transport = get_http_transport(host, port, http_path=http_path,
use_ssl=use_ssl, ca_cert=ca_cert,
auth_mechanism=auth_mechanism,
user=user, password=password,
kerberos_host=kerberos_host,
kerberos_service_name=kerberos_service_name,
auth_cookie_names=auth_cookie_names)
else:
sock = get_socket(host, port, use_ssl, ca_cert)
if timeout is not None:
timeout = timeout * 1000. # TSocket expects millis
if six.PY2:
sock.setTimeout(timeout)
elif six.PY3:
try:
# thriftpy has a release where set_timeout is missing
sock.set_timeout(timeout)
except AttributeError:
sock.socket_timeout = timeout
sock.connect_timeout = timeout
log.debug('sock=%s', sock)
transport = get_transport(sock, kerberos_host, kerberos_service_name,
auth_mechanism, user, password)
transport.open()
protocol = TBinaryProtocol(transport)
if six.PY2:
# ThriftClient == ImpalaHiveServer2Service.Client
service = ThriftClient(protocol)
elif six.PY3:
# ThriftClient == TClient
service = ThriftClient(ImpalaHiveServer2Service, protocol)
log.debug('transport=%s protocol=%s service=%s', transport, protocol,
service)
return HS2Service(service, retries=retries)
def _is_columnar_protocol(hs2_protocol_version):
return (hs2_protocol_version ==
TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6)
def _is_precolumnar_protocol(hs2_protocol_version):
return hs2_protocol_version in _pre_columnar_protocols
class Batch(object):
def __init__(self):
pass
def __len__(self):
return 0
def pop(self):
raise NotImplementedError("Cannot pop a Batch object")
def __iter__(self):
return self
def next(self):
return self.__next__()
def __next__(self):
if len(self) > 0:
return self.pop()
raise StopIteration
def __str__(self):
return 'Batch()'
class Column(object):
def __init__(self, data_type, values, nulls):
self.data_type = data_type
self.values = values
self.nulls = nulls
self.rows_left = len(self.values)
self.num_rows = self.rows_left
def __len__(self):
return self.rows_left
def __str__(self):
return 'Column(type={0}, values={1}, nulls={2})'.format(
self.data_type, self.values, self.nulls)
def pop(self):
if self.rows_left < 1:
raise StopIteration
pos = self.num_rows-self.rows_left
self.rows_left -= 1
if self.nulls[pos]:
return None
value = self.values[pos]
return value
class CBatch(Batch):
def __init__(self, trowset, expect_more_rows, schema, convert_types=True):
self.expect_more_rows = expect_more_rows
self.schema = schema
tcols = [_TTypeId_to_TColumnValue_getters[schema[i][1]](col)
for (i, col) in enumerate(trowset.columns)]
num_cols = len(tcols)
num_rows = len(tcols[0].values)
log.debug('CBatch: input TRowSet num_cols=%s num_rows=%s tcols=%s',
num_cols, num_rows, tcols)
self.columns = []
for j in range(num_cols):
type_ = schema[j][1]
nulls = tcols[j].nulls
values = tcols[j].values
# thriftpy sometimes returns unicode instead of bytes
if six.PY3 and isinstance(nulls, str):
nulls = nulls.encode('utf-8')
is_null = bitarray(endian='little')
is_null.frombytes(nulls)
# Ref HUE-2722, HiveServer2 sometimes does not add trailing '\x00'
if len(values) > len(nulls):
to_append = ((len(values) - len(nulls) + 7) // 8)
is_null.frombytes(b'\x00' * to_append)
if convert_types:
values = self._convert_values(type_, is_null, values)
self.columns.append(Column(type_, values, is_null))
def _convert_values(self, type_, is_null, values):
# pylint: disable=consider-using-enumerate
if type_ == 'TIMESTAMP':
for i in range(len(values)):
values[i] = (None if is_null[i] else
_parse_timestamp(values[i]))
elif type_ == 'DECIMAL':
for i in range(len(values)):
values[i] = (None if is_null[i] else Decimal(values[i]))
elif type_ == 'DATE':
for i in range(len(values)):
values[i] = (None if is_null[i] else _parse_date(values[i]))
return values
def __len__(self):
return len(self.columns[0]) if len(self.columns) > 0 else 0
def pop(self):
return tuple([c.pop() for c in self.columns])
def __str__(self):
col_string = ','.join([str(col) for col in self.columns])
return 'CBatch({0})'.format(col_string)
class RBatch(Batch):
def __init__(self, trowset, expect_more_rows, schema):
log.debug('RBatch: input TRowSet: %s', trowset)
self.expect_more_rows = expect_more_rows
self.schema = schema
self.rows = []
for trow in trowset.rows:
row = []
for (i, col_val) in enumerate(trow.colVals):
type_ = schema[i][1]
value = _TTypeId_to_TColumnValue_getters[type_](col_val).value
if type_ == 'TIMESTAMP':
value = _parse_timestamp(value)
elif type_ == 'DECIMAL':
if value:
value = Decimal(value)
row.append(value)
self.rows.append(tuple(row))
def __len__(self):
return len(self.rows)
def pop(self):
return self.rows.pop(0)
class ThriftRPC(object):
def __init__(self, client, retries=3):
self.client = client
self.retries = retries
def _rpc(self, func_name, request, retry_on_http_error=False):
self._log_request(func_name, request)
response = self._execute(func_name, request, retry_on_http_error)
self._log_response(func_name, response)
err_if_rpc_not_ok(response)
return response
def _execute(self, func_name, request, retry_on_http_error=False):
# pylint: disable=protected-access
# get the thrift transport
transport = self.client._iprot.trans
tries_left = self.retries
last_http_exception = None
while tries_left > 0:
try:
log.debug('Attempting to open transport (tries_left=%s)',
tries_left)
open_transport(transport)
log.debug('Transport opened')
func = getattr(self.client, func_name)
return func(request)
except socket.error:
log.exception('Failed to open transport (tries_left=%s)',
tries_left)
last_http_exception = None
except TTransportException:
log.exception('Failed to open transport (tries_left=%s)',
tries_left)
last_http_exception = None
except HttpError as h:
if not retry_on_http_error:
log.debug('Caught HttpError %s %s in %s which is not retryable',
h, str(h.body or ''), func_name)
raise
last_http_exception = h
if tries_left > 1:
retry_secs = None
retry_after = h.http_headers.get('Retry-After', None)
if retry_after:
try:
retry_secs = int(retry_after)
except ValueError:
retry_secs = None
if retry_secs:
log.debug("sleeping after seeing Retry-After value of %d", retry_secs)
log.debug('Caught HttpError %s %s in %s (tries_left=%s), retry after %d secs',
h, str(h.body or ''), func_name, tries_left, retry_secs)
time.sleep(retry_secs)
else:
retry_secs = 1 # Future: use exponential backoff?
log.debug("sleeping for %d second before retrying", retry_secs)
time.sleep(retry_secs)
log.debug('Caught HttpError %s %s in %s (tries_left=%s)',
h, str(h.body or ''), func_name, tries_left)
except Exception:
raise
log.debug('Closing transport (tries_left=%s)', tries_left)
transport.close()
tries_left -= 1
if last_http_exception is not None:
raise last_http_exception
raise HiveServer2Error('Failed after retrying {0} times'
.format(self.retries))
def _operation(self, kind, request, retry_on_http_error=False):
resp = self._rpc(kind, request, retry_on_http_error)
return self._get_operation(resp.operationHandle)
def _log_request(self, kind, request):
log.debug('%s: req=%s', kind, request)
def _log_response(self, kind, response):
log.debug('%s: resp=%s', kind, response)
def open_transport(transport):
"""
Open transport, accounting for API differences between thrift versus thriftpy2,
as well as TBufferedTransport versus THttpClient.
"""
# python2 and thrift, or any THttpClient
if 'isOpen' in dir(transport):
transport_is_open = transport.isOpen()
# python3 and thriftpy2 (for TBufferedTransport only)
if 'is_open' in dir(transport):
transport_is_open = transport.is_open()
if not transport_is_open:
transport.open()
class HS2Service(ThriftRPC):
def __init__(self, thrift_client, retries=3):
ThriftRPC.__init__(self, thrift_client, retries=retries)
def close(self):
# pylint: disable=protected-access
log.debug('close_service: client=%s', self.client)
self.client._iprot.trans.close()
def reconnect(self):
# pylint: disable=protected-access
log.debug('reconnect: client=%s', self.client)
self.client._iprot.trans.close()
self.client._iprot.trans.open()
def open_session(self, user, configuration=None):
protocol = TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6
req = TOpenSessionReq(client_protocol=protocol,
username=user,
configuration=configuration)
# OpenSession rpcs are idempotent and so ok to retry. If the client gets
# disconnected and the server successfully opened a session, the client
# will retry and rely on server to clean up the session.
resp = self._rpc('OpenSession', req, True)
return HS2Session(self, resp.sessionHandle,
resp.configuration,
resp.serverProtocolVersion)
class HS2Session(ThriftRPC):
def __init__(self, service, handle, config, hs2_protocol_version,
retries=3):
# pylint: disable=protected-access
self.service = service
self.handle = handle
self.config = config
self.hs2_protocol_version = hs2_protocol_version
if hs2_protocol_version not in TProtocolVersion._VALUES_TO_NAMES:
raise HiveServer2Error("Got HiveServer2 version {0}; "
"expected V1 - V6"
.format(hs2_protocol_version))
ThriftRPC.__init__(self, self.service.client, retries=retries)
def close(self):
req = TCloseSessionReq(sessionHandle=self.handle)
# CloseSession rpcs don't retry as a session cannot be closed twice.
self._rpc('CloseSession', req, False)
def execute(self, statement, configuration=None, run_async=False):
req = TExecuteStatementReq(sessionHandle=self.handle,
statement=statement,
confOverlay=configuration,
runAsync=run_async)
# Do not try to retry http requests.
# Read queries should be idempotent but most dml queries are not. Also retrying
# query execution from client could be expensive and so likely makes sense to do
# it if server is also aware of the retries.
return self._operation('ExecuteStatement', req, False)
def get_databases(self, schema='.*'):
req = TGetSchemasReq(sessionHandle=self.handle, schemaName=schema)
return self._operation('GetSchemas', req, True)
def get_tables(self, database='.*', table_like='.*'):
req = TGetTablesReq(sessionHandle=self.handle,
schemaName=database,
tableName=table_like)
return self._operation('GetTables', req, True)
def get_table_schema(self, table, database='.*'):
req = TGetColumnsReq(sessionHandle=self.handle,
schemaName=database,
tableName=table, columnName='.*')
return self._operation('GetColumns', req, True)
def get_functions(self, database='.*'):
# TODO: need to test this one especially
req = TGetFunctionsReq(sessionHandle=self.handle,
schemaName=database,
functionName='.*')
return self._operation('GetFunctions', req, True)
def database_exists(self, db_name):
op = self.get_databases(schema=db_name)
# this only fetches default max_rows, but there should only be one row
# ideally
results = op.fetch()
exists = False
for result in results:
if result[0].lower() == db_name.lower():
exists = True
op.close()
return exists
def table_exists(self, table, database='.*'):
op = self.get_tables(database=database, table_like=table)
results = op.fetch()
exists = False
for result in results:
if result[2].lower() == table.lower():
exists = True
op.close()
return exists
def ping(self):
req = TGetInfoReq(sessionHandle=self.handle,
infoType=TGetInfoType.CLI_SERVER_NAME)
log.debug('ping: req=%s', req)
try:
resp = self.client.GetInfo(req)
except TTransportException:
log.exception('ping: failed')
return False
log.debug('ping: resp=%s', resp)
try:
err_if_rpc_not_ok(resp)
except HiveServer2Error:
log.exception('ping: failed')
return False
return True
def _get_operation(self, handle):
return Operation(self, handle)
class Operation(ThriftRPC):
def __init__(self, session, handle, retries=3):
self.session = session
self.handle = handle
self._schema = None
self._state_has_result_set = None
ThriftRPC.__init__(self, self.session.client, retries=retries)
@property
def has_result_set(self):
# When HIVE_CLI_SERVICE_PROTOCOL_V10 or later API is used and async compilation is
# enabled, self.handle.hasResultSet is not set any longer.
# In this case self._state_has_result_set should be used instead.
if self._state_has_result_set is not None:
return self._state_has_result_set
else:
return self.handle.hasResultSet
def update_has_result_set(self, state):
self._state_has_result_set = state.hasResultSet
def get_status(self):
# pylint: disable=protected-access
req = TGetOperationStatusReq(operationHandle=self.handle)
# GetOperationStatus rpc is idempotent and so safe to retry.
resp = self._rpc('GetOperationStatus', req, True)
self.update_has_result_set(resp)
return TOperationState._VALUES_TO_NAMES[resp.operationState]
def get_state(self):
req = TGetOperationStatusReq(operationHandle=self.handle)
# GetOperationStatus rpc is idempotent and so safe to retry.
resp = self._rpc('GetOperationStatus', req, True)
self.update_has_result_set(resp)
return resp
def get_log(self, max_rows=1024, orientation=TFetchOrientation.FETCH_NEXT):
try:
req = TGetLogReq(operationHandle=self.handle)
# GetLog rpc is idempotent and so safe to retry.
log = self._rpc('GetLog', req, True).log
except TApplicationException as e: # raised if Hive is used
if not e.type == TApplicationException.UNKNOWN_METHOD:
raise
req = TFetchResultsReq(operationHandle=self.handle,
orientation=orientation,
maxRows=max_rows,
fetchType=1)
resp = self._rpc('FetchResults', req, False)
schema = [('Log', 'STRING', None, None, None, None, None)]
log = self._wrap_results(resp.results, schema, convert_types=True)
log = '\n'.join(l[0] for l in log)
return log
def cancel(self):
req = TCancelOperationReq(operationHandle=self.handle)
# CancelOperation rpc is idempotent and so safe to retry.
return self._rpc('CancelOperation', req, True)
def close(self):
req = TCloseOperationReq(operationHandle=self.handle)
# CloseOperation rpc is not idempotent for dml and we're not sure
# here if this is dml or not.
return self._rpc('CloseOperation', req, False)
def get_profile(self, profile_format=TRuntimeProfileFormat.STRING):
req = TGetRuntimeProfileReq(operationHandle=self.handle,
sessionHandle=self.session.handle,
format=profile_format)
# GetRuntimeProfile rpc is idempotent and so safe to retry.
resp = self._rpc('GetRuntimeProfile', req, True)
if profile_format == TRuntimeProfileFormat.THRIFT:
return resp.thrift_profile
return resp.profile
def get_summary(self):
req = TGetExecSummaryReq(operationHandle=self.handle,
sessionHandle=self.session.handle)
# GetExecSummary rpc is idempotent and so safe to retry.
resp = self._rpc('GetExecSummary', req, True)
return resp.summary
def fetch(self, schema=None, max_rows=1024,
orientation=TFetchOrientation.FETCH_NEXT,
convert_types=True):
if not self.has_result_set:
log.debug('fetch_results: has_result_set=False')
return None
# the schema is necessary to pull the proper values (i.e., coalesce)
if schema is None:
schema = self.get_result_schema()
req = TFetchResultsReq(operationHandle=self.handle,
orientation=orientation,
maxRows=max_rows)
# FetchResults rpc is not idempotent unless the client and server communicate and
# results are kept around for retry to be successful.
resp = self._rpc('FetchResults', req, False)
return self._wrap_results(resp.results, resp.hasMoreRows, schema,
convert_types=convert_types)
def _wrap_results(self, results, expect_more_rows, schema, convert_types=True):
if self.is_columnar:
log.debug('fetch_results: constructing CBatch')
return CBatch(results, expect_more_rows, schema, convert_types=convert_types)
else:
log.debug('fetch_results: constructing RBatch')
return RBatch(results, expect_more_rows, schema)
@property
def is_columnar(self):
protocol = self.session.hs2_protocol_version
return _is_columnar_protocol(protocol)
def get_result_schema(self):
if not self.has_result_set:
log.debug('get_result_schema: has_result_set=False')
return None
req = TGetResultSetMetadataReq(operationHandle=self.handle)
resp = self._rpc('GetResultSetMetadata', req, True)
schema = []
for column in resp.schema.columns:
# pylint: disable=protected-access
name = column.columnName
entry = column.typeDesc.types[0].primitiveEntry
type_ = TTypeId._VALUES_TO_NAMES[entry.type].split('_')[0]
if type_ == 'DECIMAL':
qualifiers = entry.typeQualifiers.qualifiers
precision = qualifiers['precision'].i32Value
scale = qualifiers['scale'].i32Value
schema.append((name, type_, None, None,
precision, scale, None))
else:
schema.append((name, type_, None, None, None, None, None))
log.debug('get_result_schema: schema=%s', schema)
return schema
def build_summary_table(summary, idx, is_fragment_root, indent_level, output):
"""Direct translation of Coordinator::PrintExecSummary() to recursively
build a list of rows of summary statistics, one per exec node
summary: the TExecSummary object that contains all the summary data
idx: the index of the node to print
is_fragment_root: true if the node to print is the root of a fragment (and
therefore feeds into an exchange)
indent_level: the number of spaces to print before writing the node's
label, to give the appearance of a tree. The 0th child of a node has the
same indent_level as its parent. All other children have an indent_level
of one greater than their parent.
output: the list of rows into which to append the rows produced for this
node and its children.
Returns the index of the next exec node in summary.exec_nodes that should
be processed, used internally to this method only.
"""
# pylint: disable=too-many-locals
attrs = ["latency_ns", "cpu_time_ns", "cardinality", "memory_used"]
# Initialise aggregate and maximum stats
agg_stats, max_stats = TExecStats(), TExecStats()
for attr in attrs:
setattr(agg_stats, attr, 0)
setattr(max_stats, attr, 0)
node = summary.nodes[idx]
for stats in node.exec_stats:
for attr in attrs:
val = getattr(stats, attr)
if val is not None:
setattr(agg_stats, attr, getattr(agg_stats, attr) + val)
setattr(max_stats, attr, max(getattr(max_stats, attr), val))
if len(node.exec_stats) > 0:
avg_time = agg_stats.latency_ns / len(node.exec_stats)
else:
avg_time = 0
# If the node is a broadcast-receiving exchange node, the cardinality of
# rows produced is the max over all instances (which should all have
# received the same number of rows). Otherwise, the cardinality is the sum
# over all instances which process disjoint partitions.
if node.is_broadcast and is_fragment_root:
cardinality = max_stats.cardinality
else:
cardinality = agg_stats.cardinality
est_stats = node.estimated_stats
label_prefix = ""
if indent_level > 0:
label_prefix = "|"
if is_fragment_root:
label_prefix += " " * indent_level
else:
label_prefix += "--" * indent_level
def prettyprint(val, units, divisor):
for unit in units:
if val < divisor:
if unit == units[0]:
return "%d%s" % (val, unit)
else:
return "%3.2f%s" % (val, unit)
val /= divisor
def prettyprint_bytes(byte_val):
return prettyprint(
byte_val, [' B', ' KB', ' MB', ' GB', ' TB'], 1024.0)
def prettyprint_units(unit_val):
return prettyprint(unit_val, ["", "K", "M", "B"], 1000.0)
def prettyprint_time(time_val):
return prettyprint(time_val, ["ns", "us", "ms", "s"], 1000.0)
row = [label_prefix + node.label,
len(node.exec_stats),
prettyprint_time(avg_time),
prettyprint_time(max_stats.latency_ns),
prettyprint_units(cardinality),
prettyprint_units(est_stats.cardinality),
prettyprint_bytes(max_stats.memory_used),
prettyprint_bytes(est_stats.memory_used),
node.label_detail]
output.append(row)
try:
sender_idx = summary.exch_to_sender_map[idx]
# This is an exchange node, so the sender is a fragment root, and
# should be printed next.
build_summary_table(summary, sender_idx, True, indent_level, output)
except (KeyError, TypeError):
# Fall through if idx not in map, or if exch_to_sender_map itself is
# not set
pass
idx += 1
if node.num_children > 0:
first_child_output = []
idx = build_summary_table(summary, idx, False, indent_level,
first_child_output)
# pylint: disable=unused-variable
# TODO: is child_idx supposed to be unused? See #120
for child_idx in range(1, node.num_children):
# All other children are indented (we only have 0, 1 or 2 children
# for every exec node at the moment)
idx = build_summary_table(summary, idx, False, indent_level + 1,
output)
output += first_child_output
return idx
| 37.550938
| 105
| 0.6072
|
b409bdbc44fbcab668c63ba1560d1d9b02d4e573
| 3,433
|
py
|
Python
|
dexter/tasks.py
|
CodeForAfrica/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | null | null | null |
dexter/tasks.py
|
CodeForAfrica/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | 32
|
2019-07-25T06:17:31.000Z
|
2019-08-05T02:41:42.000Z
|
dexter/tasks.py
|
CodeForAfricaLabs/mma-dexter
|
10d7f0c51bb935399c708a432699e06418049a33
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import logging
from datetime import date, timedelta
from dateutil.parser import parse
from dexter.app import celery_app as app
from dexter.processing import DocumentProcessor, DocumentProcessorNT
# force configs for API keys to be set
import dexter.core
# This is a collection of periodic tasks for Dexter, using
# Celery to drive task completion.
log = logging.getLogger(__name__)
@app.task
def back_process_feeds():
""" Enqueue a task to fetch yesterday's feeds. """
if date.today() == date(2019, 6, 6):
d1 = date(2019, 6, 3)
d2 = date(2019, 4, 21)
# days = [d1 + timedelta(days=x) for x in range((d2 - d1).days + 1)]
days = [d1]
filter_parm = ''
for d in days:
fetch_filtered_daily_feeds.delay(d.isoformat(), filter_parm)
elif date.today() == date(2019, 6, 7):
d1 = date(2019, 6, 4)
d2 = date(2019, 4, 21)
# days = [d1 + timedelta(days=x) for x in range((d2 - d1).days + 1)]
days = [d1]
filter_parm = ''
for d in days:
fetch_filtered_daily_feeds.delay(d.isoformat(), filter_parm)
else:
print 'Already Done!'
@app.task
def fetch_yesterdays_feeds():
""" Enqueue a task to fetch yesterday's feeds. """
yesterday = date.today() - timedelta(days=1)
fetch_daily_feeds.delay(yesterday.isoformat())
# retry after 30 minutes, retry for up to 7 days
@app.task(bind=True, default_retry_delay=30*60, max_retries=7*24*2)
def fetch_filtered_daily_feeds(self, day, filter_parm):
""" Fetch feed of URLs to crawl and queue up a task to grab and process
each url. """
try:
day = parse(day)
dp = DocumentProcessorNT()
count = 0
for item in dp.fetch_filtered_daily_feed_items(day, filter_parm):
get_feed_item.delay(item)
count += 1
except Exception as e:
log.error("Error processing daily feeds for %s" % day, exc_info=e)
self.retry(exc=e)
if count == 0:
# nothing to do, retry later
self.retry()
# retry after 30 minutes, retry for up to 7 days
@app.task(bind=True, default_retry_delay=30*60, max_retries=7*24*2)
def fetch_daily_feeds(self, day):
""" Fetch feed of URLs to crawl and queue up a task to grab and process
each url. """
try:
day = parse(day)
dp = DocumentProcessorNT()
count = 0
for item in dp.fetch_daily_feed_items(day):
get_feed_item.delay(item)
count += 1
except Exception as e:
log.error("Error processing daily feeds for %s" % day, exc_info=e)
self.retry(exc=e)
if count == 0:
# nothing to do, retry later
self.retry()
# retry every minute, for up to 24 hours.
@app.task(bind=True, rate_limit="10/m", default_retry_delay=30, max_retries=2)
def get_feed_item(self, item):
""" Fetch and process a document feed item. """
try:
dp = DocumentProcessorNT()
dp.process_feed_item(item)
except Exception as e:
log.error("Error processing feed item: %s" % item, exc_info=e)
self.retry()
@app.task
def backfill_taxonomies():
""" Enqueue a task to backfill taxonomies """
try:
dp = DocumentProcessorNT()
dp.backfill_taxonomies()
except Exception as e:
log.error("Error backfilling taxonomies: %s" % e.message, exc_info=e)
| 28.371901
| 78
| 0.635304
|
610c0fde4dbf60c131f13bf71b6057b89daf8015
| 2,205
|
py
|
Python
|
tests/data/metrics/confusion_matrix/test_confusion_matrix_data_row.py
|
Recycleye/labelbox-python
|
0a7135b10c3cdf5fb0e51d9ba15ac8c6ada648fb
|
[
"Apache-2.0"
] | null | null | null |
tests/data/metrics/confusion_matrix/test_confusion_matrix_data_row.py
|
Recycleye/labelbox-python
|
0a7135b10c3cdf5fb0e51d9ba15ac8c6ada648fb
|
[
"Apache-2.0"
] | null | null | null |
tests/data/metrics/confusion_matrix/test_confusion_matrix_data_row.py
|
Recycleye/labelbox-python
|
0a7135b10c3cdf5fb0e51d9ba15ac8c6ada648fb
|
[
"Apache-2.0"
] | null | null | null |
from pytest_cases import fixture_ref
from pytest_cases import pytest_parametrize_plus, fixture_ref
from labelbox.data.metrics.confusion_matrix.confusion_matrix import confusion_matrix_metric
@pytest_parametrize_plus("tool_examples", [
fixture_ref('polygon_pairs'),
fixture_ref('rectangle_pairs'),
fixture_ref('mask_pairs'),
fixture_ref('line_pairs'),
fixture_ref('point_pairs')
])
def test_overlapping_objects(tool_examples):
for example in tool_examples:
score = confusion_matrix_metric(example.ground_truths,
example.predictions)
if len(example.expected) == 0:
assert len(score) == 0
else:
expected = [0, 0, 0, 0]
for expected_values in example.expected.values():
for idx in range(4):
expected[idx] += expected_values[idx]
assert score[0].value == tuple(
expected), f"{example.predictions},{example.ground_truths}"
@pytest_parametrize_plus(
"tool_examples",
[fixture_ref('checklist_pairs'),
fixture_ref('radio_pairs')])
def test_overlapping_classifications(tool_examples):
for example in tool_examples:
score = confusion_matrix_metric(example.ground_truths,
example.predictions)
if len(example.expected) == 0:
assert len(score) == 0
else:
expected = [0, 0, 0, 0]
for expected_values in example.expected.values():
for idx in range(4):
expected[idx] += expected_values[idx]
assert score[0].value == tuple(
expected), f"{example.predictions},{example.ground_truths}"
def test_partial_overlap(pair_iou_thresholds):
for example in pair_iou_thresholds:
for iou in example.expected.keys():
score = confusion_matrix_metric(example.predictions,
example.ground_truths,
iou=iou)
assert score[0].value == tuple(
example.expected[iou]
), f"{example.predictions},{example.ground_truths}"
| 38.017241
| 91
| 0.606803
|
a5356a027b7ce61dd39525a06a08aaf1266490ff
| 89
|
py
|
Python
|
src/ape/convert/__init__.py
|
benjyz/ape
|
b5f3ff28c97c463a764881032cb2cfcd21201d07
|
[
"Apache-2.0"
] | 210
|
2021-04-29T05:42:42.000Z
|
2022-03-31T15:50:17.000Z
|
src/ape/convert/__init__.py
|
benjyz/ape
|
b5f3ff28c97c463a764881032cb2cfcd21201d07
|
[
"Apache-2.0"
] | 370
|
2021-04-29T01:54:32.000Z
|
2022-03-31T19:19:29.000Z
|
src/ape/convert/__init__.py
|
benjyz/ape
|
b5f3ff28c97c463a764881032cb2cfcd21201d07
|
[
"Apache-2.0"
] | 25
|
2021-04-29T05:08:50.000Z
|
2022-03-11T20:43:56.000Z
|
from eth_utils import to_checksum_address as to_address
__all__ = [
"to_address",
]
| 14.833333
| 55
| 0.752809
|
3ea677dfa854e664438cc9ba559123b44399684a
| 21,560
|
py
|
Python
|
ufss/HLG/dipole_operator.py
|
peterarose/UFSS
|
1dded9c94493b8d681cd8620d45d883a79e41ae3
|
[
"MIT"
] | 8
|
2020-08-18T12:19:34.000Z
|
2022-01-26T17:33:47.000Z
|
ufss/vibronic_eigenstates/dipole_operator.py
|
gharib85/ufss
|
9aea2da19127c697e5b344548dbd8e152925b8b2
|
[
"MIT"
] | 4
|
2020-09-03T11:43:44.000Z
|
2022-03-25T04:13:41.000Z
|
ufss/vibronic_eigenstates/dipole_operator.py
|
gharib85/ufss
|
9aea2da19127c697e5b344548dbd8e152925b8b2
|
[
"MIT"
] | 2
|
2020-08-18T12:19:39.000Z
|
2020-09-14T00:58:58.000Z
|
#Standard python libraries
import numpy as np
import os
import itertools
from scipy.sparse import csr_matrix, kron, identity
from .eigen_generator import EigenGenerator
from .eigenstates import LadderOperators
class CalculateCartesianDipoleOperatorLowMemory(EigenGenerator):
"""This class calculates the dipole operator in the eigenbasis of
the system hamiltonian directly in the cartesian basis"""
def __init__(self,parameter_file_path,*,mask_by_occupation_num=True):
super().__init__(parameter_file_path,mask_by_occupation_num=mask_by_occupation_num)
self.base_path = parameter_file_path
self.load_params()
self.set_vibrations()
self.set_H()
self.set_molecular_dipoles()
self.calculate_mu()
self.save_mu()
def set_molecular_dipoles(self,*,dipoles = None):
"""Load molecular dipoles from params file, or override with input
dipoles - must be a numpy ndarray, with shape (n,3) where n is the number of sites"""
if type(dipoles) is np.ndarray:
self.molecular_dipoles = dipoles
else:
self.molecular_dipoles = np.array(self.params['dipoles'],dtype='float')
self.set_single_to_double_dipole_matrix()
def set_single_to_double_dipole_matrix(self):
"""Given a set of dipoles for transitions from the ground to the
singly excited states, constructs the dipole transitions that take the
system from the singly excited states to the various doubly excited states
"""
singly_excited = np.arange(self.molecular_dipoles.shape[0])
doubly_excited = list(itertools.combinations(singly_excited,2))
mat = np.zeros((len(singly_excited),len(doubly_excited),3))
for i in range(len(singly_excited)):
for j in range(len(doubly_excited)):
tup = doubly_excited[j]
if i == tup[0]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[1]]]
elif i == tup[1]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[0]]]
self.molecular_dipoles_SEM_to_DEM = mat
def set_H(self,*,truncation_size = None):
if truncation_size:
self.truncation_size = truncation_size
self.set_vibrations()
self.H0 = self.manifold_hamiltonian(0)
self.H1 = self.manifold_hamiltonian(1)
if 'DEM' in self.manifolds:
self.H2 = self.manifold_hamiltonian(2)
def dipole_matrix(self,starting_manifold_num,next_manifold_num,pol):
"""Calculates the dipole matrix that connects from one
manifold to the next, using the known dipole moments and the efield
polarization, determined by the pulse number.
"""
upper_manifold_num = max(starting_manifold_num,next_manifold_num)
if abs(starting_manifold_num - next_manifold_num) != 1:
warnings.warn('Can only move from manifolds 0 to 1 or 1 to 2')
return None
# Condon approximation
vib_identity = identity(self.H0.shape[0])
if upper_manifold_num == 1:
d_vec = self.molecular_dipoles.dot(pol)
d_mat = d_vec[:,np.newaxis]
overlap_matrix = kron(d_mat,vib_identity)
elif upper_manifold_num == 2:
d_mat = self.molecular_dipoles_SEM_to_DEM.dot(pol)
overlap_matrix = kron(d_mat.T,vib_identity)
if starting_manifold_num > next_manifold_num:
# Take transpose if transition is down rather than up
overlap_matrix = np.conjugate(overlap_matrix.T)
return overlap_matrix.tocsr()
def calculate_mu_x(self):
x = np.array([1,0,0])
e0 = self.eigenvectors[0]
e1 = self.eigenvectors[1]
mu10_x = self.dipole_matrix(0,1,x)
mu10_x = mu10_x.dot(e0)
mu10_x = e1.T.dot(mu10_x)
if 'DEM' in self.manifolds:
mu21_x = self.dipole_matrix(1,2,x)
e2 = self.eigenvectors[2]
mu21_x = mu21_x.dot(e1)
mu21_x = e2.T.dot(mu21_x)
def calculate_mu_y(self):
y = np.array([0,1,0])
e0 = self.eigenvectors[0]
e1 = self.eigenvectors[1]
mu10_y = self.dipole_matrix(0,1,y)
mu10_y = mu10_y.dot(e0)
mu10_y = e1.T.dot(mu10_y)
if 'DEM' in self.manifolds:
mu21_y = self.dipole_matrix(1,2,y)
e2 = self.eigenvectors[2]
mu21_y = mu21_y.dot(e1)
mu21_y = e2.T.dot(mu21_y)
def calculate_mu_z(self):
z = np.array([0,0,1])
e0 = self.eigenvectors[0]
e1 = self.eigenvectors[1]
mu10_z = self.dipole_matrix(0,1,z)
mu10_z = mu10_z.dot(e0)
mu10_z = e1.T.dot(mu10_z)
if 'DEM' in self.manifolds:
mu21_z = self.dipole_matrix(1,2,z)
e2 = self.eigenvectors[2]
mu21_z = mu21_z.dot(e1)
mu21_z = e2.T.dot(mu21_z)
def combine_mu(self):
mu10 = np.zeros((mu10_x.shape[0],mu10_x.shape[1],3))
mu10[:,:,0] = mu10_x
mu10[:,:,1] = mu10_y
mu10[:,:,2] = mu10_z
self.mu = {'GSM_to_SEM':mu10}
if 'DEM' in self.manifolds:
mu21 = np.zeros((mu21_x.shape[0],mu21_x.shape[1],3))
mu21[:,:,0] = mu21_x
mu21[:,:,1] = mu21_y
mu21[:,:,2] = mu21_z
self.mu['SEM_to_DEM'] = mu21
def save_mu(self):
np.savez(os.path.join(self.base_path,'mu.npz'),**self.mu)
class CalculateCartesianDipoleOperator(EigenGenerator):
"""This class calculates the dipole operator in the eigenbasis of
the system hamiltonian directly in the cartesian basis"""
def __init__(self,parameter_file_path,*,mask_by_occupation_num=True):
super().__init__(parameter_file_path,mask_by_occupation_num=mask_by_occupation_num)
self.base_path = parameter_file_path
self.load_params()
self.Ladder = LadderOperators(self.truncation_size)
self.set_vibrations()
self.set_H()
self.set_molecular_dipoles()
self.calculate_mu()
self.save_mu()
def set_molecular_dipoles(self,*,dipoles = None):
"""Load molecular dipoles from params file, or override with input
dipoles - must be a numpy ndarray, with shape (n,3) where n is the number of sites"""
if type(dipoles) is np.ndarray:
self.molecular_dipoles = dipoles
else:
self.molecular_dipoles = np.array(self.params['dipoles'],dtype='float')
self.set_single_to_double_dipole_matrix()
def set_single_to_double_dipole_matrix(self):
"""Given a set of dipoles for transitions from the ground to the
singly excited states, constructs the dipole transitions that take the
system from the singly excited states to the various doubly excited states
"""
singly_excited = np.arange(self.molecular_dipoles.shape[0])
doubly_excited = list(itertools.combinations(singly_excited,2))
mat = np.zeros((len(singly_excited),len(doubly_excited),3))
for i in range(len(singly_excited)):
for j in range(len(doubly_excited)):
tup = doubly_excited[j]
if i == tup[0]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[1]]]
elif i == tup[1]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[0]]]
self.molecular_dipoles_SEM_to_DEM = mat
def set_H(self,*,truncation_size = None):
if truncation_size:
self.truncation_size = truncation_size
self.set_vibrations()
self.H0 = self.manifold_hamiltonian(0)
self.H1 = self.manifold_hamiltonian(1)
if 'DEM' in self.manifolds:
self.H2 = self.manifold_hamiltonian(2)
def dipole_matrix(self,starting_manifold_num,next_manifold_num,pol):
"""Calculates the dipole matrix that connects from one
manifold to the next, using the known dipole moments and the efield
polarization, determined by the pulse number.
"""
upper_manifold_num = max(starting_manifold_num,next_manifold_num)
if abs(starting_manifold_num - next_manifold_num) != 1:
warnings.warn('Can only move from manifolds 0 to 1 or 1 to 2')
return None
# Condon approximation
vib_identity = identity(self.H0.shape[0])
if upper_manifold_num == 1:
d_vec = self.molecular_dipoles.dot(pol)
d_mat = d_vec[:,np.newaxis]
overlap_matrix = kron(d_mat,vib_identity)
elif upper_manifold_num == 2:
d_mat = self.molecular_dipoles_SEM_to_DEM.dot(pol)
overlap_matrix = kron(d_mat.T,vib_identity)
if starting_manifold_num > next_manifold_num:
# Take transpose if transition is down rather than up
overlap_matrix = np.conjugate(overlap_matrix.T)
return overlap_matrix.tocsr()
def dipole_matrix_nonCondon(self,starting_manifold_num,next_manifold_num,nonCondonList,*,order=1):
"""Calculates the dipole matrix that connects from one
manifold to the next, using the known dipole moments and the efield
polarization, determined by the pulse number.
Args:
nonCondonList (list): list of floats describing condon violation of each vibrational mode
Kwargs:
order (int): order of non-condon violation
"""
if len(self.energies[1]) > 1:
warnings.warn('This only works correctly for 1 excited state')
upper_manifold_num = max(starting_manifold_num,next_manifold_num)
if abs(starting_manifold_num - next_manifold_num) != 1:
warnings.warn('Can only move from manifolds 0 to 1 or 1 to 2')
return None
#This is broken except for 1st order
if order > 1:
warnings.warn('Basis correction not implemented for order > 1')
xn = self.Ladder.x_power_n(order).copy()
basis_correction = -self.params['vibrations'][0]['displacement0'][0] * np.eye(xn.shape[0])
xn += basis_correction
# 1st order Condon violation
XN = nonCondonList[0] * self.vibration_identity_kron(0,xn,0)
for i in range(1,len(nonCondonList)):
XN += nonCondonList[i] * self.vibration_identity_kron(i,xn,0)
# if upper_manifold_num == 1:
# d_vec = self.molecular_dipoles.dot(pol)
# d_mat = d_vec[:,np.newaxis]
# overlap_matrix = kron(d_mat,XN)
# elif upper_manifold_num == 2:
# d_mat = self.molecular_dipoles_SEM_to_DEM.dot(pol)
# overlap_matrix = kron(d_mat.T,XN)
if starting_manifold_num > next_manifold_num:
# Take transpose if transition is down rather than up
XN = np.conjugate(XN.T)
starting_manifold_indices = self.manifold_indices[starting_manifold_num][0]
next_manifold_indices = self.manifold_indices[next_manifold_num][0]
XN = XN[next_manifold_indices,:]
XN = XN[:,starting_manifold_indices]
return csr_matrix(XN)
def calculate_mu(self):
x = np.array([1,0,0])
y = np.array([0,1,0])
z = np.array([0,0,1])
e0 = self.eigenvectors[0]
e1 = self.eigenvectors[1]
mu10_x = self.dipole_matrix(0,1,x)
mu10_y = self.dipole_matrix(0,1,y)
mu10_z = self.dipole_matrix(0,1,z)
try:
linear_non_condon_list_x = [self.params['vibrations'][i]['condon_violation'][0][0] for i in range(self.num_vibrations)]
linear_non_condon_list_y = [self.params['vibrations'][i]['condon_violation'][0][1] for i in range(self.num_vibrations)]
linear_non_condon_list_z = [self.params['vibrations'][i]['condon_violation'][0][2] for i in range(self.num_vibrations)]
linear_violation_x = self.dipole_matrix_nonCondon(0,1,linear_non_condon_list_x)
mu10_x += linear_violation_x
mu10_y += self.dipole_matrix_nonCondon(0,1,linear_non_condon_list_y)
mu10_z += self.dipole_matrix_nonCondon(0,1,linear_non_condon_list_z)
except KeyError:
pass
mu10_x = mu10_x.dot(e0)
mu10_x = e1.T.dot(mu10_x)
mu10_y = mu10_y.dot(e0)
mu10_y = e1.T.dot(mu10_y)
mu10_z = mu10_z.dot(e0)
mu10_z = e1.T.dot(mu10_z)
mu10 = np.zeros((mu10_x.shape[0],mu10_x.shape[1],3))
mu10[:,:,0] = mu10_x
mu10[:,:,1] = mu10_y
mu10[:,:,2] = mu10_z
self.mu = {'GSM_to_SEM':mu10}
if 'DEM' in self.manifolds:
mu21_x = self.dipole_matrix(1,2,x)
mu21_y = self.dipole_matrix(1,2,y)
mu21_z = self.dipole_matrix(1,2,z)
e2 = self.eigenvectors[2]
mu21_x = mu21_x.dot(e1)
mu21_x = e2.T.dot(mu21_x)
mu21_y = mu21_y.dot(e1)
mu21_y = e2.T.dot(mu21_y)
mu21_z = mu21_z.dot(e1)
mu21_z = e2.T.dot(mu21_z)
mu21 = np.zeros((mu21_x.shape[0],mu21_x.shape[1],3))
mu21[:,:,0] = mu21_x
mu21[:,:,1] = mu21_y
mu21[:,:,2] = mu21_z
self.mu['SEM_to_DEM'] = mu21
def save_mu(self):
np.savez(os.path.join(self.base_path,'mu.npz'),**self.mu)
class CalculateDipoleOperator(EigenGenerator):
"""This class calculates the dipole operator in the eigenbasis of
the system Hamiltonian using the eigenvectors"""
def __init__(self,parameter_file_path,*,mask_by_occupation_num=True):
super().__init__(parameter_file_path,mask_by_occupation_num=mask_by_occupation_num)
self.base_path = parameter_file_path
self.load_params()
self.set_mu()
def x0(self,size):
"""Defines the identity operator in the vibrational space"""
ham = np.diag(np.ones(size))
return csr_matrix(ham)
def x1(self,size):
"""Defines the position operator in the vibrational space"""
def offdiag1(n):
return np.sqrt((n+1)/2)
n = np.arange(0,size)
off1 = offdiag1(n[0:-1])
ham = np.zeros((size,size))
ham += np.diag(off1,k=1) + np.diag(off1,k=-1)
return csr_matrix(ham)
def new_vibration_identity_kron(self,position,item):
"""Takes in an operator on a single vibrational and krons it with the
correct number of vibrational identities, inserting it into its
position as indexed by its mode position as specified in the
input file
"""
identities = [np.identity(self.truncation_size) for n in
range(self.num_vibrations-1)]
identities.insert(position,item)
mat = identities.pop(0)
for next_item in identities:
mat = kron(mat,next_item)
return mat
def mu_vibrational_space(self):
"""Untested for condon violations """
ident = self.x0(self.truncation_size)
mu = self.new_vibration_identity_kron(0,ident) # Condon Approximation
try:
kappas = np.array(self.params['kappa'])
except KeyError:
# If parameters file does not specify a condon violation,
# Assume condon approximation holds
kappas = np.zeros(self.num_vibrations)
if np.all(kappas == 0):
# Assumes condon approximation
pass
else:
# Linear condon violation supported so far
x = self.x1(self.truncation_size)
for i in range(kappas.size):
mu += kappas[i] * self.new_vibration_identity_kron(i,x)
return mu
def mu_inner_product(self,eigmats1,eigmats2):
"""Example of how to write a potentially more complicated
dipole operator on the vibrational space.
Args:
eigmats1 (np.ndarray): 3d numpy array with indices [n,m,o]
where n is the site index, m is the vibrational-space index
and o is the eigen index
eigmast2 (np.ndarray): 3d numpy array with indices [n,m,o] (same
as eigmats1)
"""
sites1, vib, num_eig1 = eigmats1.shape
sites2, vib, num_eig2 = eigmats2.shape
in_prod = np.zeros((num_eig1,num_eig2,sites1,sites2))
vib_mu = self.mu_vibrational_space()
# iterate over all sites
for i in range(sites1):
eigvecs1 = eigmats1[i,...]
# Take matrix product with vibrational space mu
eigvecs1 = vib_mu.dot(eigvecs1)
for j in range(sites2):
eigvecs2 = eigmats2[j,...]
in_prod[...,i,j] = np.dot(eigvecs1.T,eigvecs2)
return in_prod
def simple_inner_product(self,eigmats1,eigmats2):
return np.einsum('mji,njk',eigmats1,eigmats2)
def make_overlap_matrix(self,manifold1,manifold2):
eigvecs1 = self.eigenvectors[manifold1]
eigvecs2 = self.eigenvectors[manifold2]
num_eigvals1 = self.eigenvalues[manifold1].size
num_eigvals2 = self.eigenvalues[manifold2].size
num_sites1 = len(self.energies[manifold1])
num_sites2 = len(self.energies[manifold2])
vibration_space_size = len(self.eigenvectors[0][:,0])
eigmats1 = eigvecs1.reshape((num_sites1,vibration_space_size,num_eigvals1))
eigmats2 = eigvecs2.reshape((num_sites2,vibration_space_size,num_eigvals2))
overlap_matrix = self.simple_inner_product(eigmats1,eigmats2)
return overlap_matrix
def calculate_mu(self):
self.mu_GSM_to_SEM_site = self.make_overlap_matrix(1,0)
if 'DEM' in self.manifolds:
self.mu_SEM_to_DEM_site = self.make_overlap_matrix(2,1)
def set_mu(self):
file_name = os.path.join(self.base_path,'mu_site_basis.npz')
try:
mu_archive = np.load(file_name)
self.mu_GSM_to_SEM_site = mu_archive['GSM_to_SEM']
if 'DEM' in self.manifolds:
self.mu_SEM_to_DEM_site = mu_archive['SEM_to_DEM']
except (FileNotFoundError, KeyError):
self.calculate_mu()
self.save_mu()
def save_mu(self):
file_name = os.path.join(self.base_path,'mu_site_basis.npz')
mu_site_dict = {'GSM_to_SEM':self.mu_GSM_to_SEM_site}
if 'DEM' in self.manifolds:
mu_site_dict['SEM_to_DEM'] = self.mu_SEM_to_DEM_site
np.savez(file_name,**mu_site_dict)
class DipoleConverter(CalculateDipoleOperator):
"""Converts mu represented in the site basis into mu represented
in cartesian coordinates
"""
def __init__(self,parameter_file_path):
super().__init__(parameter_file_path)
self.set_molecular_dipoles()
self.save_cartesian_mu()
### Setting the molecular dipole
def set_molecular_dipoles(self,*,dipoles = None):
"""Load molecular dipoles from params file, or override with input
dipoles - must be a numpy ndarray, with shape (n,3) where n is the number of sites"""
if type(dipoles) is np.ndarray:
self.molecular_dipoles = dipoles
else:
self.molecular_dipoles = np.array(self.params['dipoles'],dtype='float')
self.set_single_to_double_dipole_matrix()
def set_single_to_double_dipole_matrix(self):
"""Given a set of dipoles for transitions from the ground to the
singly excited states, constructs the dipole transitions that take the
system from the singly excited states to the various doubly excited states
"""
singly_excited = np.arange(self.molecular_dipoles.shape[0])
doubly_excited = list(itertools.combinations(singly_excited,2))
mat = np.zeros((len(singly_excited),len(doubly_excited),3))
for i in range(len(singly_excited)):
for j in range(len(doubly_excited)):
tup = doubly_excited[j]
if i == tup[0]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[1]]]
elif i == tup[1]:
mat[i,j,:] = self.molecular_dipoles[singly_excited[tup[0]]]
self.molecular_dipoles_SEM_to_DEM = mat
def conv_mu_site_basis_to_cartesian(self,overlap_matrix,manifold1,manifold2):
if np.abs(manifold1 - manifold2) != 1:
raise ValueError('Dipole only moves between adjacent manifolds')
if manifold1 == 0 or manifold2 == 0:
d = self.molecular_dipoles
# 4th index of overlap_matrix can only be 0
# There is only 1 "site" in the GSM
overlap_matrix = np.dot(overlap_matrix[...,0],d)
elif manifold1 == 2 or manifold2 == 2:
d = self.molecular_dipoles_SEM_to_DEM
overlap_matrix = np.einsum('abij,jic',overlap_matrix,d)
# I have to swap the indices of d to match the convention of overlap_matrix
return overlap_matrix
def save_cartesian_mu(self):
file_name = os.path.join(self.base_path,'mu.npz')
mu_GSM_to_SEM_cartesian = self.conv_mu_site_basis_to_cartesian(self.mu_GSM_to_SEM_site,1,0)
mu_dict = {'GSM_to_SEM':mu_GSM_to_SEM_cartesian}
if 'DEM' in self.manifolds:
mu_SEM_to_DEM_cartesian = self.conv_mu_site_basis_to_cartesian(self.mu_SEM_to_DEM_site,2,1)
mu_dict['SEM_to_DEM'] = mu_SEM_to_DEM_cartesian
np.savez(file_name,**mu_dict)
| 39.057971
| 131
| 0.628942
|
5d747b1dfa334f867366d19ca8dc8ee0579d132f
| 632
|
py
|
Python
|
support/tests/api/tests_profile/test_serializers.py
|
UladzislauBaranau/support-api
|
c453fd6ecc09027ee49d8f582c54521627ddf1a6
|
[
"MIT"
] | null | null | null |
support/tests/api/tests_profile/test_serializers.py
|
UladzislauBaranau/support-api
|
c453fd6ecc09027ee49d8f582c54521627ddf1a6
|
[
"MIT"
] | null | null | null |
support/tests/api/tests_profile/test_serializers.py
|
UladzislauBaranau/support-api
|
c453fd6ecc09027ee49d8f582c54521627ddf1a6
|
[
"MIT"
] | null | null | null |
from api.v1.profiles.serializers import ProfileSerializer, UpdateSupportStatusSerializer
from django.forms.models import model_to_dict
def test_profile_serializer(test_user):
serialized_data = model_to_dict(test_user)
serializer = ProfileSerializer(data=serialized_data)
assert serializer.is_valid()
assert serializer.errors == {}
def test_update_support_status_serializer(test_user):
serialized_data = model_to_dict(test_user)
serializer = UpdateSupportStatusSerializer(data=serialized_data)
# only 'user' and 'is_support' fields are used for serialization
assert len(serializer.fields) == 2
| 31.6
| 88
| 0.797468
|
9ff74c99efd0cd653c4dcaf9dc3602e01605d5d6
| 189
|
py
|
Python
|
backend/database/cron/config_en.py
|
elexis-eu/word-game
|
146aa14d5fbb2fd5e33c6021b60b09e0552c948d
|
[
"Apache-2.0"
] | null | null | null |
backend/database/cron/config_en.py
|
elexis-eu/word-game
|
146aa14d5fbb2fd5e33c6021b60b09e0552c948d
|
[
"Apache-2.0"
] | 4
|
2020-09-22T11:04:13.000Z
|
2020-09-22T11:04:27.000Z
|
backend/database/cron/config_en.py
|
elexis-eu/word-games
|
146aa14d5fbb2fd5e33c6021b60b09e0552c948d
|
[
"Apache-2.0"
] | null | null | null |
class DBconfig :
host = "localhost"
port = "3306"
user = "root"
password = "root"
database = "igra_english"
class CronRoot :
path = "/home/igrabesed/database/cron/"
| 21
| 43
| 0.613757
|
4ad70a4fdf0097d4887a373181cda1317dfb226f
| 48
|
py
|
Python
|
BAClangUtils/__init__.py
|
BenArvin/BAClangUtils
|
d1cc1836c2def6f344c36f39570ed9dc4300bf34
|
[
"MIT"
] | 2
|
2019-04-18T03:58:13.000Z
|
2021-01-08T09:27:46.000Z
|
BAClangUtils/__init__.py
|
BenArvin/BAClangUtils
|
d1cc1836c2def6f344c36f39570ed9dc4300bf34
|
[
"MIT"
] | null | null | null |
BAClangUtils/__init__.py
|
BenArvin/BAClangUtils
|
d1cc1836c2def6f344c36f39570ed9dc4300bf34
|
[
"MIT"
] | 1
|
2019-04-18T03:58:19.000Z
|
2019-04-18T03:58:19.000Z
|
__all__ = [
'ShellUtil',
'RawTokenUtil']
| 16
| 19
| 0.583333
|
0284f9ff42933a0963609d5590765427a1aa77dd
| 106
|
py
|
Python
|
US-bank-experiments-source-code/unfreeze/test.py
|
Namir0806/FETILDA
|
d4a3e720dccef3ba0221e6d59214e54a11c6fc5b
|
[
"MIT"
] | null | null | null |
US-bank-experiments-source-code/unfreeze/test.py
|
Namir0806/FETILDA
|
d4a3e720dccef3ba0221e6d59214e54a11c6fc5b
|
[
"MIT"
] | null | null | null |
US-bank-experiments-source-code/unfreeze/test.py
|
Namir0806/FETILDA
|
d4a3e720dccef3ba0221e6d59214e54a11c6fc5b
|
[
"MIT"
] | null | null | null |
import sys
learning_rate = float(sys.argv[5])
print(learning_rate)
print('{:.1e}'.format(learning_rate))
| 17.666667
| 37
| 0.745283
|
ef3198a937db705e2825eb900e011ab9dac1627d
| 2,034
|
py
|
Python
|
tensorflow/python/keras/applications/applications_test.py
|
kuo1220/verbose-barnacle
|
0a1b9ed01e48092f4167e366cf7496c2b111ef6d
|
[
"Apache-2.0"
] | 5
|
2018-10-20T03:54:49.000Z
|
2021-01-02T07:19:53.000Z
|
tensorflow/python/keras/applications/applications_test.py
|
kuo1220/verbose-barnacle
|
0a1b9ed01e48092f4167e366cf7496c2b111ef6d
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/python/keras/applications/applications_test.py
|
kuo1220/verbose-barnacle
|
0a1b9ed01e48092f4167e366cf7496c2b111ef6d
|
[
"Apache-2.0"
] | 2
|
2018-11-03T01:19:26.000Z
|
2021-04-23T02:34:07.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Integration tests for Keras applications."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.keras import applications
from tensorflow.python.platform import test
MODEL_LIST = [
(applications.ResNet50, 2048),
(applications.VGG16, 512),
(applications.VGG19, 512),
(applications.Xception, 2048),
(applications.InceptionV3, 2048),
(applications.InceptionResNetV2, 1536),
(applications.MobileNet, 1024),
# TODO(fchollet): enable MobileNetV2 in next version.
(applications.DenseNet121, 1024),
(applications.DenseNet169, 1664),
(applications.DenseNet201, 1920),
(applications.NASNetMobile, 1056),
(applications.NASNetLarge, 4032),
]
class ApplicationsTest(test.TestCase, parameterized.TestCase):
@parameterized.parameters(*MODEL_LIST)
def test_classification_model(self, model_fn, _):
model = model_fn(classes=1000, weights=None)
self.assertEqual(model.output_shape[-1], 1000)
@parameterized.parameters(*MODEL_LIST)
def test_feature_extration_model(self, model_fn, output_dim):
model = model_fn(include_top=False, weights=None)
self.assertEqual(model.output_shape, (None, None, None, output_dim))
if __name__ == '__main__':
test.main()
| 34.474576
| 80
| 0.728122
|
eb4b524e6535488d1095e43edd973cb8232ddf5e
| 275
|
py
|
Python
|
0738 Hit Counter.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | 1
|
2020-12-29T21:17:26.000Z
|
2020-12-29T21:17:26.000Z
|
0738 Hit Counter.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | null | null | null |
0738 Hit Counter.py
|
ansabgillani/binarysearchcomproblems
|
12fe8632f8cbb5058c91a55bae53afa813a3247e
|
[
"MIT"
] | 4
|
2021-09-09T17:42:43.000Z
|
2022-03-18T04:54:03.000Z
|
class HitCounter:
def __init__(self):
self.queue = deque()
def add(self, timestamp):
self.queue.append(timestamp)
def count(self, timestamp):
while self.queue and self.queue[0] < timestamp-60: self.queue.popleft();return len(self.queue)
| 27.5
| 102
| 0.654545
|
1a960b364ad9fbe632edde3295ac8d04c03e60a2
| 487
|
py
|
Python
|
setup.py
|
evandrosouza89/dods-match-stats
|
4d4e9878b3fbb61b02af8ee805056aa781d03e82
|
[
"MIT"
] | 3
|
2020-09-22T01:59:38.000Z
|
2021-06-19T15:43:10.000Z
|
setup.py
|
evandrosouza89/dods-match-stats
|
4d4e9878b3fbb61b02af8ee805056aa781d03e82
|
[
"MIT"
] | null | null | null |
setup.py
|
evandrosouza89/dods-match-stats
|
4d4e9878b3fbb61b02af8ee805056aa781d03e82
|
[
"MIT"
] | null | null | null |
from setuptools import setup
setup(
name='dods_match_stats',
version='1.0',
packages=['dods_match_stats'],
url='https://github.com/evandrosouza89/dods-match-stats',
license='MIT',
author='Evandro Souza',
author_email='evandro.souza89@gmail.com',
description='A HL Log Standard parser and competitive match stats generator for Day of Defeat Source game.',
install_requires=[
'setuptools',
'configparser',
'SQLAlchemy'
],
)
| 27.055556
| 112
| 0.669405
|
10ac9a743d4578f9d646cf61cc6b91b37e6220e6
| 1,298
|
py
|
Python
|
lib/storage.py
|
nkrios/omnibus
|
d73c1e720f1d97aa104a4286187f785ef0dcaae5
|
[
"MIT"
] | 251
|
2018-05-08T20:40:37.000Z
|
2022-03-22T22:31:17.000Z
|
lib/storage.py
|
samyoyo/omnibus
|
65f6251137d6e38128c19120aa204a577b2cbcaf
|
[
"MIT"
] | 33
|
2018-05-08T21:30:54.000Z
|
2020-08-19T16:24:28.000Z
|
lib/storage.py
|
samyoyo/omnibus
|
65f6251137d6e38128c19120aa204a577b2cbcaf
|
[
"MIT"
] | 70
|
2018-05-16T12:53:05.000Z
|
2022-03-22T22:31:20.000Z
|
#!/usr/bin/env python
##
# omnibus - deadbits.
# output storage management
##
import os
import json
from common import timestamp
from common import error
from common import success
from common import warning
class JSON(object):
def __init__(self, data, file_path=None, file_name='report.json', create=True):
self.data = data
self.file_path = None
if file_name == 'report.json':
self.file_name = '%s_%s.json' % (data['name'], timestamp)
else:
self.file_name = file_name
if file_path:
self.set_filepath(file_path, file_name, create)
def set_filepath(self, file_path, file_name, create=True):
if os.path.isdir(file_path):
self.file_path = os.path.join(file_path, file_name)
if not os.path.exists(self.file_path):
self.save()
success('saved report to %s' % self.file_path)
return False
else:
error('unable to find directory %s - cannot save report' % file_path)
return False
def save(self):
if self.file_path:
with open(self.file_path, 'wb') as fp:
json.dump(self.data, fp)
else:
warning('file path not correctly set - cannot save report')
| 27.041667
| 83
| 0.604006
|
7828807d37dfa40a72485c174c38c84882709206
| 1,006
|
py
|
Python
|
venv/Lib/site-packages/pyo/examples/06-filters/02-bandpass-filters.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/examples/06-filters/02-bandpass-filters.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/examples/06-filters/02-bandpass-filters.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
"""
02-bandpass-filters.py - Narrowing a bandpass filter bandwidth.
This example illustrates the difference between a simple IIR second-order
bandpass filter and a cascade of second-order bandpass filters. A cascade
of four bandpass filters with a high Q can be used as a efficient resonator
on the signal.
"""
from pyo import *
s = Server().boot()
# White noise generator
n = Noise(0.5)
# Common cutoff frequency control
freq = Sig(1000)
freq.ctrl([SLMap(50, 5000, "lin", "value", 1000)], title="Cutoff Frequency")
# Common filter's Q control
q = Sig(5)
q.ctrl([SLMap(0.7, 20, "log", "value", 5)], title="Filter's Q")
# Second-order bandpass filter
bp1 = Reson(n, freq, q=q)
# Cascade of second-order bandpass filters
bp2 = Resonx(n, freq, q=q, stages=4)
# Interpolates between input objects to produce a single output
sel = Selector([bp1, bp2]).out()
sel.ctrl(title="Filter selector (0=Reson, 1=Resonx)")
# Displays the spectrum contents of the chosen source
sp = Spectrum(sel)
s.gui(locals())
| 26.473684
| 76
| 0.723658
|
3e9f5259c2f2f906e2aea8107a6244f9dffcd2bf
| 8,431
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20190801/get_connection_monitor.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20190801/get_connection_monitor.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20190801/get_connection_monitor.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetConnectionMonitorResult',
'AwaitableGetConnectionMonitorResult',
'get_connection_monitor',
]
@pulumi.output_type
class GetConnectionMonitorResult:
"""
Information about the connection monitor.
"""
def __init__(__self__, auto_start=None, destination=None, etag=None, id=None, location=None, monitoring_interval_in_seconds=None, monitoring_status=None, name=None, provisioning_state=None, source=None, start_time=None, tags=None, type=None):
if auto_start and not isinstance(auto_start, bool):
raise TypeError("Expected argument 'auto_start' to be a bool")
pulumi.set(__self__, "auto_start", auto_start)
if destination and not isinstance(destination, dict):
raise TypeError("Expected argument 'destination' to be a dict")
pulumi.set(__self__, "destination", destination)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if monitoring_interval_in_seconds and not isinstance(monitoring_interval_in_seconds, int):
raise TypeError("Expected argument 'monitoring_interval_in_seconds' to be a int")
pulumi.set(__self__, "monitoring_interval_in_seconds", monitoring_interval_in_seconds)
if monitoring_status and not isinstance(monitoring_status, str):
raise TypeError("Expected argument 'monitoring_status' to be a str")
pulumi.set(__self__, "monitoring_status", monitoring_status)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if source and not isinstance(source, dict):
raise TypeError("Expected argument 'source' to be a dict")
pulumi.set(__self__, "source", source)
if start_time and not isinstance(start_time, str):
raise TypeError("Expected argument 'start_time' to be a str")
pulumi.set(__self__, "start_time", start_time)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="autoStart")
def auto_start(self) -> Optional[bool]:
"""
Determines if the connection monitor will start automatically once created.
"""
return pulumi.get(self, "auto_start")
@property
@pulumi.getter
def destination(self) -> 'outputs.ConnectionMonitorDestinationResponse':
"""
Describes the destination of connection monitor.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def id(self) -> str:
"""
ID of the connection monitor.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Connection monitor location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="monitoringIntervalInSeconds")
def monitoring_interval_in_seconds(self) -> Optional[int]:
"""
Monitoring interval in seconds.
"""
return pulumi.get(self, "monitoring_interval_in_seconds")
@property
@pulumi.getter(name="monitoringStatus")
def monitoring_status(self) -> Optional[str]:
"""
The monitoring status of the connection monitor.
"""
return pulumi.get(self, "monitoring_status")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the connection monitor.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the connection monitor.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def source(self) -> 'outputs.ConnectionMonitorSourceResponse':
"""
Describes the source of connection monitor.
"""
return pulumi.get(self, "source")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[str]:
"""
The date and time when the connection monitor was started.
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Connection monitor tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Connection monitor type.
"""
return pulumi.get(self, "type")
class AwaitableGetConnectionMonitorResult(GetConnectionMonitorResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetConnectionMonitorResult(
auto_start=self.auto_start,
destination=self.destination,
etag=self.etag,
id=self.id,
location=self.location,
monitoring_interval_in_seconds=self.monitoring_interval_in_seconds,
monitoring_status=self.monitoring_status,
name=self.name,
provisioning_state=self.provisioning_state,
source=self.source,
start_time=self.start_time,
tags=self.tags,
type=self.type)
def get_connection_monitor(connection_monitor_name: Optional[str] = None,
network_watcher_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetConnectionMonitorResult:
"""
Information about the connection monitor.
:param str connection_monitor_name: The name of the connection monitor.
:param str network_watcher_name: The name of the Network Watcher resource.
:param str resource_group_name: The name of the resource group containing Network Watcher.
"""
__args__ = dict()
__args__['connectionMonitorName'] = connection_monitor_name
__args__['networkWatcherName'] = network_watcher_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20190801:getConnectionMonitor', __args__, opts=opts, typ=GetConnectionMonitorResult).value
return AwaitableGetConnectionMonitorResult(
auto_start=__ret__.auto_start,
destination=__ret__.destination,
etag=__ret__.etag,
id=__ret__.id,
location=__ret__.location,
monitoring_interval_in_seconds=__ret__.monitoring_interval_in_seconds,
monitoring_status=__ret__.monitoring_status,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
source=__ret__.source,
start_time=__ret__.start_time,
tags=__ret__.tags,
type=__ret__.type)
| 37.30531
| 246
| 0.655675
|
191e602d68fac777dd789c328bebdec21b20c751
| 10,811
|
py
|
Python
|
tests/px_loginhistory_test.py
|
noahfx/px
|
1b49febda9d387f750b65a662ff662defce9acf6
|
[
"MIT"
] | null | null | null |
tests/px_loginhistory_test.py
|
noahfx/px
|
1b49febda9d387f750b65a662ff662defce9acf6
|
[
"MIT"
] | null | null | null |
tests/px_loginhistory_test.py
|
noahfx/px
|
1b49febda9d387f750b65a662ff662defce9acf6
|
[
"MIT"
] | null | null | null |
import datetime
import pytest
import dateutil.tz
from px import px_loginhistory
@pytest.yield_fixture
def check_output(capfd):
yield None
out, err = capfd.readouterr()
assert not err
assert not out
def get_users_at(last_output, now, testtime):
"""
Ask px_loginhistory to parse last_output given the current timestamp of now.
Then return the users px_loginhistory claims were logged in at testtime.
"""
return px_loginhistory.get_users_at(testtime, last_output=last_output, now=now)
def test_get_users_at_range(check_output):
# Test user logged in between two timestamps
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan ttys000 Thu Mar 31 14:39 - 11:08 (20:29)"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 14, 38, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 14, 39, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 17, 46, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 4, 1, 11, 8, tzinfo=dateutil.tz.tzlocal()))
# After
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 4, 1, 11, 9, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_still_logged_in(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan ttys000 Sun Apr 3 11:54 still logged in"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 4, 3, 11, 53, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 4, 3, 11, 54, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime.now(dateutil.tz.tzlocal()))
def test_get_users_at_remote(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "root pts/1 10.1.6.120 Tue Jan 28 05:59 still logged in"
assert set(["root from 10.1.6.120"]) == get_users_at(
lastline, now,
datetime.datetime.now(dateutil.tz.tzlocal()))
def test_get_users_at_local_osx(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan ttys000 Sun Apr 3 11:54 still logged in"
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime.now(dateutil.tz.tzlocal()))
def test_get_users_at_local_linux(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan pts/2 :0 Wed Mar 9 13:25 - 13:38 (00:12)"
assert set(["johan from :0"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 9, 13, 26, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_until_crash(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan ttys001 Thu Nov 26 19:55 - crash (27+07:11)"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2015, 11, 26, 19, 54, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2015, 11, 26, 19, 55, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan"]) == get_users_at(
lastline, now,
datetime.datetime(2015, 12, 10, 19, 53, tzinfo=dateutil.tz.tzlocal()))
# A bit after
assert not get_users_at(
lastline, now,
datetime.datetime(2015, 12, 26, 19, 55, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_until_shutdown_osx(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "_mbsetupuser console Mon Jan 18 20:31 - shutdown (34+01:29)"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 1, 18, 20, 30, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["_mbsetupuser"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 1, 18, 20, 31, tzinfo=dateutil.tz.tzlocal()))
assert set(["_mbsetupuser"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 2, 18, 20, 30, tzinfo=dateutil.tz.tzlocal()))
# A bit after
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 2, 28, 20, 30, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_until_shutdown_linux(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan :0 :0 Sat Mar 26 22:04 - down (00:08)"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 3, 26, 22, 3, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan from :0"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 26, 22, 4, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan from :0"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 26, 22, 9, tzinfo=dateutil.tz.tzlocal()))
# A bit after
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 3, 26, 22, 15, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_multiple(check_output):
# Test multiple users logged in between two timestamps
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "\n".join([
"johan1 ttys000 Thu Mar 31 14:39 - 11:08 (20:29)",
"johan2 ttys000 Thu Mar 31 14:39 - 11:08 (20:29)",
])
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 14, 38, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan1", "johan2"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 14, 39, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan1", "johan2"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 3, 31, 17, 46, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan1", "johan2"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 4, 1, 11, 8, tzinfo=dateutil.tz.tzlocal()))
# After
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 4, 1, 11, 9, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_pseudousers_osx(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
# Note trailing space in test string, we get that from last on OS X 10.11.3
lastline = "reboot ~ Fri Oct 23 06:50 "
# "reboot" is not a real user, it shouldn't be listed
assert not get_users_at(
lastline, now,
datetime.datetime(2015, 10, 23, 6, 50, tzinfo=dateutil.tz.tzlocal()))
# Note trailing space in test string, we get that from last on OS X 10.11.3
lastline = "shutdown ~ Fri Oct 23 06:49 "
# "shutdown" is not a real user, it shouldn't be listed
assert not get_users_at(
lastline, now,
datetime.datetime(2015, 10, 23, 6, 49, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_pseudousers_linux(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "reboot system boot 4.2.0-30-generic Thu Mar 3 11:19 - 13:38 (6+02:18)"
# "reboot" is not a real user, it shouldn't be listed
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 3, 3, 11, 19, tzinfo=dateutil.tz.tzlocal()))
def test_get_users_at_gone_no_logout(check_output):
"""
Treat "gone - no logout" as "still logged in".
That's the only place I've seen it.
"""
now = datetime.datetime(2016, 4, 7, 12, 8, tzinfo=dateutil.tz.tzlocal())
lastline = "johan pts/3 :0 Mon Apr 4 23:10 gone - no logout"
# Before
assert not get_users_at(
lastline, now,
datetime.datetime(2016, 4, 4, 23, 9, tzinfo=dateutil.tz.tzlocal()))
# During
assert set(["johan from :0"]) == get_users_at(
lastline, now,
datetime.datetime(2016, 4, 4, 23, 10, tzinfo=dateutil.tz.tzlocal()))
assert set(["johan from :0"]) == get_users_at(
lastline, now,
datetime.datetime.now(dateutil.tz.tzlocal()))
def test_get_users_at_trailing_noise(check_output):
now = datetime.datetime(2016, 4, 7, 12, 8, tzinfo=dateutil.tz.tzlocal())
assert not get_users_at("", now, now)
# Note trailing space in test string, we get that from last on OS X 10.11.3
assert not get_users_at("wtmp begins Thu Oct 1 22:54 ", now, now)
def test_get_users_at_unexpected_last_output(capfd):
UNEXPECTED = "glasskiosk"
now = datetime.datetime(2016, 4, 7, 12, 8, tzinfo=dateutil.tz.tzlocal())
assert not get_users_at(UNEXPECTED, now, now)
out, err = capfd.readouterr()
assert not out
assert UNEXPECTED in err
assert 'https://github.com/walles/px/issues' in err
def test_get_users_at_just_run_it(check_output):
# Just tyre kick it live wherever we happen to be. This shouldn't crash.
px_loginhistory.get_users_at(datetime.datetime.now(dateutil.tz.tzlocal()))
def test_to_timestamp(check_output):
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
expected = datetime.datetime(2016, 3, 5, 11, 19, tzinfo=dateutil.tz.tzlocal())
assert px_loginhistory._to_timestamp("Thu Mar 5 11:19", now) == expected
now = datetime.datetime(2016, 4, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
expected = datetime.datetime(2016, 2, 29, 13, 19, tzinfo=dateutil.tz.tzlocal())
assert px_loginhistory._to_timestamp("Mon Feb 29 13:19", now) == expected
now = datetime.datetime(2017, 1, 3, 12, 8, tzinfo=dateutil.tz.tzlocal())
expected = datetime.datetime(2016, 2, 29, 13, 19, tzinfo=dateutil.tz.tzlocal())
assert px_loginhistory._to_timestamp("Mon Feb 29 13:19", now) == expected
def test_to_timedelta(check_output):
assert px_loginhistory._to_timedelta("01:29") == datetime.timedelta(0, hours=1, minutes=29)
assert px_loginhistory._to_timedelta("4+01:29") == datetime.timedelta(4, hours=1, minutes=29)
assert px_loginhistory._to_timedelta("34+01:29") == datetime.timedelta(34, hours=1, minutes=29)
| 37.023973
| 99
| 0.639441
|
ce9ddeca05ccf6da52711467ea4cd2b380f4c785
| 474
|
py
|
Python
|
marketing/migrations/0005_auto_20210125_0600.py
|
Dogechi/Me2U
|
0852600983dc1058ee347f4065ee801e16c1249e
|
[
"MIT"
] | null | null | null |
marketing/migrations/0005_auto_20210125_0600.py
|
Dogechi/Me2U
|
0852600983dc1058ee347f4065ee801e16c1249e
|
[
"MIT"
] | 9
|
2020-06-06T01:16:25.000Z
|
2021-06-04T23:20:37.000Z
|
marketing/migrations/0005_auto_20210125_0600.py
|
Me2U-Afrika/Me2U
|
aee054afedff1e6c87f87494eaddf044e217aa95
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2021-01-25 04:00
from django.db import migrations
import stdimage.models
class Migration(migrations.Migration):
dependencies = [
('marketing', '0004_auto_20210124_0711'),
]
operations = [
migrations.AlterField(
model_name='trendinfo',
name='trend_background',
field=stdimage.models.StdImageField(blank=True, null=True, upload_to='images/marketing/banner'),
),
]
| 23.7
| 108
| 0.647679
|
aaa85a2a0d4e7b46e1822ebb47756c903994f126
| 2,104
|
py
|
Python
|
day04/solve.py
|
agobi/aoc-2020
|
71bf029bf95fc84147ce0ffbbe51af0d408fdead
|
[
"BSD-3-Clause"
] | null | null | null |
day04/solve.py
|
agobi/aoc-2020
|
71bf029bf95fc84147ce0ffbbe51af0d408fdead
|
[
"BSD-3-Clause"
] | null | null | null |
day04/solve.py
|
agobi/aoc-2020
|
71bf029bf95fc84147ce0ffbbe51af0d408fdead
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2020 Attila Gobi
# SPDX-License-Identifier: BSD-3-Clause
"""
Solution for https://adventofcode.com/2020/day/4
>>> passports = parse("day04/test.txt")
>>> solve1(passports)
2
>>> solve2(passports)
2
"""
import sys
import re
def parse(fn):
ret = []
current = {}
with open(fn, "rt") as f:
for line in f:
line = line.strip()
if line == "":
ret.append(current)
current = {}
else:
for k, v in [x.split(":") for x in line.split(" ")]:
current[k] = v
ret.append(current)
return ret
def solve1(data):
fields = set(["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"])
count = 0
for passport in data:
if not fields - set(passport.keys()):
count += 1
return count
def solve2(data):
pid_re = re.compile(r'\d{9}')
hcl_re = re.compile(r'#[0-9a-f]{6}')
ecl_set = set(["amb", "blu", "brn", "gry", "grn", "hzl", "oth"])
def valid_hgt(x):
try:
int_x = int(x[:-2])
if x.endswith("in"):
return int_x >= 59 and int_x <= 76
elif x.endswith("cm"):
return int_x >= 150 and int_x <= 193
except ValueError:
pass
return False
fields = {
"byr": lambda x: int(x) >= 1920 and int(x) <= 2002,
"iyr": lambda x: int(x) >= 2010 and int(x) <= 2020,
"eyr": lambda x: int(x) >= 2020 and int(x) <= 2030,
"hgt": valid_hgt,
"hcl": lambda x: hcl_re.fullmatch(x),
"ecl": lambda x: x in ecl_set,
"pid": lambda x: pid_re.fullmatch(x)
}
def validate(x):
for k, v in fields.items():
if k not in passport or not v(passport[k]):
# print("ERROR:", k, passport)
return False
return True
count = 0
for passport in data:
if validate(passport):
count += 1
return count
if __name__ == '__main__':
data = parse(sys.argv[1])
print(solve1(data))
print(solve2(data))
| 22.382979
| 68
| 0.498099
|
c6714cc558f8af867e7ffa776318b0cc1cb41c77
| 3,999
|
py
|
Python
|
eval.py
|
zhongerqiandan/OpenDialog
|
f478b2a912c8c742da5ced510ac40da59217ddb3
|
[
"MIT"
] | 98
|
2020-07-16T06:27:29.000Z
|
2022-03-12T15:21:51.000Z
|
eval.py
|
zhongerqiandan/OpenDialog
|
f478b2a912c8c742da5ced510ac40da59217ddb3
|
[
"MIT"
] | 2
|
2020-07-22T12:00:17.000Z
|
2021-02-24T01:19:14.000Z
|
eval.py
|
gmftbyGMFTBY/OpenDialog
|
8eb56b7a21cea1172131db7a56d2656364144771
|
[
"MIT"
] | 19
|
2020-07-16T08:36:09.000Z
|
2021-09-14T05:36:54.000Z
|
from header import *
from metrics import *
from utils import *
def cal_ir_metric(rest):
# P@1
p_1 = np.mean([precision_at_k(y_true, y_pred, k=1) for y_true, y_pred in rest])
# R10@1, R10@2, R10@5
recalls = [recall(y_true, y_pred, cutoffs=[1, 2, 5]) for y_true, y_pred in rest]
r10_1 = np.mean([i[0][1] for i in recalls])
r10_2 = np.mean([i[1][1] for i in recalls])
r10_5 = np.mean([i[2][1] for i in recalls])
# R2@1, need to fix the rest
new_rest = []
for l, p in deepcopy(rest):
candidate = random.sample(p, 1)[0]
while candidate == 0:
candidate = random.sample(p, 1)[0]
if p.index(candidate) > p.index(0):
new_rest.append((l, [0, candidate]))
else:
new_rest.append((l, [candidate, 0]))
recalls = [recall(y_true, y_pred, cutoffs=[1]) for y_true, y_pred in new_rest]
r2_1 = np.mean([i[0][1] for i in recalls])
# MAP and MRR
y_true = [i[0] for i in rest]
y_pred = [i[1] for i in rest]
MAP = mean_avg_precision_at_k(y_true, y_pred)
MRR = mean_reciprocal_rank(y_true, y_pred)
#
p_1 = round(p_1, 4)
r2_1 = round(r2_1, 4)
r10_1 = round(r10_1, 4)
r10_2 = round(r10_2, 4)
r10_5 = round(r10_5, 4)
MAP = round(MAP, 4)
MRR = round(MRR, 4)
return p_1, r2_1, r10_1, r10_2, r10_5, MAP, MRR
def cal_generative_metric(path, batch_size=16, lang='zh'):
# read the generated rest
with open(path) as f:
data = f.read().split('\n\n')
data = [i.split('\n') for i in data if i.strip()]
# filter the prefix, collect the refs and tgts
rest, refs, tgts = [], [], []
for example in data:
example = [i[5:].replace('[SEP]', '').replace('[CLS]', '') for i in example]
rest.append(example)
if lang == 'en':
refs.append(example[1].split())
tgts.append(example[2].split())
else:
# use the jieba to tokenize for chinese
refs.append(list(jieba.cut(example[1])))
tgts.append(list(jieba.cut(example[2])))
# refs.append(list(example[1]))
# tgts.append(list(example[2]))
# performance: (bleu4, dist-1/2, average, extrema, greedy)
# length
r_max_l, r_min_l, r_avg_l = cal_length(refs)
c_max_l, c_min_l, c_avg_l = cal_length(tgts)
# BLEU-4
b_refs, b_tgts = [' '.join(i) for i in refs], [' '.join(i) for i in tgts]
bleu1, bleu2, bleu3, bleu4 = cal_BLEU(b_refs, b_tgts)
# Dist-1/2
candidates, references = [], []
for t, r in zip(tgts, refs):
candidates.extend(t)
references.extend(r)
dist1, dist2 = cal_Distinct(candidates)
r_dist1, r_dist2 = cal_Distinct(references)
# embedding-based (average, extrema, greedy), using the character instead of the word
# word embeddings from: https://github.com/Embedding/Chinese-Word-Vectors
if lang == 'zh':
w2v = gensim.models.KeyedVectors.load_word2vec_format('data/chinese_w2v.txt', binary=False)
else:
w2v = gensim.models.KeyedVectors.load_word2vec_format('data/english_w2v.bin', binary=True)
print(f'[!] load english word2vec by gensim; GoogleNews WordVector: data/vocab/english_w2v.bin')
es, vs, gs = [], [], []
for r, c in tqdm(list(zip(refs, tgts))):
es.append(cal_embedding_average(r, c, w2v))
vs.append(cal_vector_extrema(r, c, w2v))
gs.append(cal_greedy_matching_matrix(r, c, w2v))
average = np.mean(es)
extrema = np.mean(vs)
greedy = np.mean(gs)
# round, 4
bleu1, bleu2, bleu3, bleu4 = round(bleu1, 4), round(bleu2, 4), round(bleu3, 4), round(bleu4, 4)
dist1, dist2, r_dist1, r_dist2 = round(dist1, 4), round(dist2, 4), round(r_dist1, 4), round(r_dist2, 4)
average, extrema, greedy = round(average, 4), round(extrema, 4), round(greedy, 4)
return (bleu1, bleu2, bleu3, bleu4), ((r_max_l, r_min_l, r_avg_l), (c_max_l, c_min_l, c_avg_l)), (dist1, dist2, r_dist1, r_dist2), (average, extrema, greedy)
| 43
| 161
| 0.612153
|
d82d289aa11f6fd7a29a3ef854f2582add7748bf
| 928
|
py
|
Python
|
D/test_pasted_from_page.py
|
staguchi0703/ABC178
|
30f2a28659008d62ae18b08be8548c864530d143
|
[
"MIT"
] | null | null | null |
D/test_pasted_from_page.py
|
staguchi0703/ABC178
|
30f2a28659008d62ae18b08be8548c864530d143
|
[
"MIT"
] | null | null | null |
D/test_pasted_from_page.py
|
staguchi0703/ABC178
|
30f2a28659008d62ae18b08be8548c864530d143
|
[
"MIT"
] | null | null | null |
#
from resolve import resolve
####################################
####################################
# 以下にプラグインの内容をペーストする
#
import sys
from io import StringIO
import unittest
class TestClass(unittest.TestCase):
def assertIO(self, input, output):
stdout, stdin = sys.stdout, sys.stdin
sys.stdout, sys.stdin = StringIO(), StringIO(input)
resolve()
sys.stdout.seek(0)
out = sys.stdout.read()[:-1]
sys.stdout, sys.stdin = stdout, stdin
self.assertEqual(out, output)
def test_入力例_1(self):
input = """7"""
output = """3"""
self.assertIO(input, output)
def test_入力例_2(self):
input = """2"""
output = """0"""
self.assertIO(input, output)
def test_入力例_3(self):
input = """1729"""
output = """294867501"""
self.assertIO(input, output)
if __name__ == "__main__":
unittest.main()
| 23.2
| 59
| 0.53556
|
d3aa77298e312562d153b1a2d5af95658d568623
| 97
|
py
|
Python
|
alternatives/server-flask/src/core/resolvers/__init__.py
|
TaitoUnited/full-stack-template
|
58529515c2f3dd765074b4c5f326f6336646f4e7
|
[
"MIT"
] | 21
|
2019-10-12T06:04:43.000Z
|
2022-03-31T06:03:34.000Z
|
alternatives/server-flask/src/core/resolvers/__init__.py
|
TaitoUnited/server-template
|
67f370f212adefd96da2404077e575764f6a1b11
|
[
"MIT"
] | 64
|
2018-04-22T09:39:19.000Z
|
2019-06-14T12:32:08.000Z
|
alternatives/server-flask/src/core/resolvers/__init__.py
|
TaitoUnited/full-stack-template
|
58529515c2f3dd765074b4c5f326f6336646f4e7
|
[
"MIT"
] | 4
|
2019-11-03T22:47:56.000Z
|
2022-01-09T11:52:15.000Z
|
# GraphQL API resolvers
from . import media_resolver # noqa
from . import post_resolver # noqa
| 24.25
| 36
| 0.762887
|
0536d2d272fd292dd96cac951041f07e4e3e068e
| 663
|
py
|
Python
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11
|
2015-10-04T02:17:46.000Z
|
2018-02-07T18:23:00.000Z
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 22
|
2017-08-01T22:45:10.000Z
|
2022-03-10T07:46:31.000Z
|
var/spack/repos/builtin/packages/py-webencodings/package.py
|
player1537-forks/spack
|
822b7632222ec5a91dc7b7cda5fc0e08715bd47c
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 4
|
2016-06-10T17:57:39.000Z
|
2018-09-11T04:59:38.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyWebencodings(PythonPackage):
"""This is a Python implementation of the WHATWG Encoding standard."""
homepage = "https://github.com/gsnedders/python-webencodings"
pypi = "webencodings/webencodings-0.5.1.tar.gz"
version('0.5.1', sha256='b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923')
depends_on('py-setuptools', type='build')
depends_on('python@2.6:2.8,3.3:', type=('build', 'run'))
| 34.894737
| 95
| 0.731523
|
51c76c51621c1411470fc41191ff51e0ccb30e7b
| 3,849
|
py
|
Python
|
get_db_layer_data.py
|
erochest/problayers
|
a7cba057a4ede6504c461ec794d0557cdcc85281
|
[
"BSD-3-Clause"
] | null | null | null |
get_db_layer_data.py
|
erochest/problayers
|
a7cba057a4ede6504c461ec794d0557cdcc85281
|
[
"BSD-3-Clause"
] | null | null | null |
get_db_layer_data.py
|
erochest/problayers
|
a7cba057a4ede6504c461ec794d0557cdcc85281
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import fileinput
import sys
import requests
GEOSERVER = 'http://libsvr35.lib.virginia.edu:8080/geoserver/rest'
GEOSERVER_AUTH = ('slabadmin', 'GIS4slab!')
def debug(msg):
sys.stderr.write(msg + '\n')
sys.stderr.flush()
def rest_req(uri, method='get', headers=None, data=None):
method_fn = getattr(requests, method)
headers = headers or {}
data = data or {}
debug('FETCHING "{0}"'.format(uri))
r = method_fn(uri, auth=GEOSERVER_AUTH, headers=headers, data=data)
r.raise_for_status()
return r
def rest_text(uri, *args, **kwargs):
return rest_req(uri, *args, **kwargs).text
def rest_json(uri, *args, **kwargs):
return rest_req(uri, *args, **kwargs).json
def geoserver(request, *args, **kwargs):
return rest_json(GEOSERVER + request, *args, **kwargs)
def to_dict(cxn_params):
"""\
This takes the @key / $ encoding of the connection parameters and returns a
dict.
"""
return dict( (p['@key'], p['$']) for p in cxn_params['entry'] )
class DataStore(object):
def __init__(self, workspace_name, data_store):
self.workspace_name = workspace_name
self.data_store = data_store
self.__feature_type_cache = {}
def __getitem__(self, key):
return self.data_store[key]
def _load_feature_types(self):
ft_url = self.data_store['featureTypes']
json = rest_json(ft_url)
if not json['featureTypes']:
return {}
else:
try:
return dict(
(ft['name'], ft['href'])
for ft in json['featureTypes']['featureType']
)
except:
import pprint
debug('ERROR ON')
pprint.pprint(json)
raise
@property
def feature_types(self):
try:
fts = self.__feature_types
except AttributeError:
self.__feature_types = fts = self._load_feature_types()
return fts
def get_dstore(workspace_name, cache):
dstore = cache.get(workspace_name)
if dstore is None:
ds_url = '/workspaces/{0}/datastores/{0}.json'.format(workspace_name)
try:
cache[workspace_name] = dstore = DataStore(
workspace_name, geoserver(ds_url)['dataStore']
)
except:
dstore = None
return dstore
def main():
dstores = {}
for line in fileinput.input():
debug('INPUT: "{0}"'.format(line.strip()))
(workspace_name, layer_name) = line.strip().split('.')
dstore = get_dstore(workspace_name, dstores)
if dstore is None:
debug('Cannot load datastore {0}. skipping.'.format(
workspace_name,
))
continue
cxn_params = to_dict(dstore['connectionParameters'])
if (cxn_params['dbtype'] == 'postgis' and
cxn_params['host'] == 'lon.lib.virginia.edu'):
db_name = cxn_params['database']
fts = dstore.feature_types
layer_uri = fts.get(layer_name)
if layer_uri is not None:
layer_uri = layer_uri.replace('.json', '.xml')
layer_data = rest_text(layer_uri)
out_file = 'layers/{0}.{1}.xml'.format(
db_name, layer_name,
)
sys.stderr.write(
'writing data for {0}.{1} => {2} ({3} bytes).\n'.format(
db_name, layer_name, out_file, len(layer_data),
))
with open(out_file, 'wb') as fout:
fout.write(layer_data)
if layer_data:
sys.stdout.write(line)
if __name__ == '__main__':
main()
| 27.891304
| 80
| 0.551052
|
67584068db143adb81a650a04bf5f529ed4428df
| 57,546
|
py
|
Python
|
laygo/generators/adc_sar/adc_sar_sf_layout_generator.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 26
|
2017-07-07T08:06:31.000Z
|
2021-11-25T06:41:24.000Z
|
laygo/generators/adc_sar/adc_sar_sf_layout_generator.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 9
|
2016-12-28T03:08:29.000Z
|
2019-01-30T16:00:28.000Z
|
laygo/generators/adc_sar/adc_sar_sf_layout_generator.py
|
tinapiao/Software-IC-Automation
|
74b23cd94aa6e4658b110e93b5deb635e014f3a6
|
[
"BSD-3-Clause"
] | 10
|
2018-07-14T01:31:28.000Z
|
2021-08-21T10:18:30.000Z
|
#!/usr/bin/python
########################################################################################################################
#
# Copyright (c) 2014, Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
########################################################################################################################
"""ADC library
"""
import laygo
import numpy as np
import os
import yaml
#import logging;logging.basicConfig(level=logging.DEBUG)
def create_power_pin_from_inst(laygen, layer, gridname, inst_left, inst_right):
"""create power pin"""
rvdd0_pin_xy = laygen.get_inst_pin_xy(inst_left.name, 'VDD', gridname, sort=True)
rvdd1_pin_xy = laygen.get_inst_pin_xy(inst_right.name, 'VDD', gridname, sort=True)
rvss0_pin_xy = laygen.get_inst_pin_xy(inst_left.name, 'VSS', gridname, sort=True)
rvss1_pin_xy = laygen.get_inst_pin_xy(inst_right.name, 'VSS', gridname, sort=True)
laygen.pin(name='VDD', layer=layer, xy=np.vstack((rvdd0_pin_xy[0],rvdd1_pin_xy[1])), gridname=gridname)
laygen.pin(name='VSS', layer=layer, xy=np.vstack((rvss0_pin_xy[0],rvss1_pin_xy[1])), gridname=gridname)
def generate_boundary(laygen, objectname_pfix, placement_grid,
devname_bottom, devname_top, devname_left, devname_right,
shape_bottom=None, shape_top=None, shape_left=None, shape_right=None,
transform_bottom=None, transform_top=None, transform_left=None, transform_right=None,
origin=np.array([0, 0])):
#generate a boundary structure to resolve boundary design rules
pg = placement_grid
#parameters
if shape_bottom == None:
shape_bottom = [np.array([1, 1]) for d in devname_bottom]
if shape_top == None:
shape_top = [np.array([1, 1]) for d in devname_top]
if shape_left == None:
shape_left = [np.array([1, 1]) for d in devname_left]
if shape_right == None:
shape_right = [np.array([1, 1]) for d in devname_right]
if transform_bottom == None:
transform_bottom = ['R0' for d in devname_bottom]
if transform_top == None:
transform_top = ['R0' for d in devname_top]
if transform_left == None:
transform_left = ['R0' for d in devname_left]
if transform_right == None:
transform_right = ['R0' for d in devname_right]
#bottom
dev_bottom=[]
dev_bottom.append(laygen.place("I" + objectname_pfix + 'BNDBTM0', devname_bottom[0], pg, xy=origin,
shape=shape_bottom[0], transform=transform_bottom[0]))
for i, d in enumerate(devname_bottom[1:]):
dev_bottom.append(laygen.relplace("I" + objectname_pfix + 'BNDBTM'+str(i+1), d, pg, dev_bottom[-1].name,
shape=shape_bottom[i+1], transform=transform_bottom[i+1]))
dev_left=[]
dev_left.append(laygen.relplace("I" + objectname_pfix + 'BNDLFT0', devname_left[0], pg, dev_bottom[0].name, direction='top',
shape=shape_left[0], transform=transform_left[0]))
for i, d in enumerate(devname_left[1:]):
dev_left.append(laygen.relplace("I" + objectname_pfix + 'BNDLFT'+str(i+1), d, pg, dev_left[-1].name, direction='top',
shape=shape_left[i+1], transform=transform_left[i+1]))
dev_right=[]
dev_right.append(laygen.relplace("I" + objectname_pfix + 'BNDRHT0', devname_right[0], pg, dev_bottom[-1].name, direction='top',
shape=shape_right[0], transform=transform_right[0]))
for i, d in enumerate(devname_right[1:]):
dev_right.append(laygen.relplace("I" + objectname_pfix + 'BNDRHT'+str(i+1), d, pg, dev_right[-1].name, direction='top',
shape=shape_right[i+1], transform=transform_right[i+1]))
dev_top=[]
dev_top.append(laygen.relplace("I" + objectname_pfix + 'BNDTOP0', devname_top[0], pg, dev_left[-1].name, direction='top',
shape=shape_top[0], transform=transform_top[0]))
for i, d in enumerate(devname_top[1:]):
dev_top.append(laygen.relplace("I" + objectname_pfix + 'BNDTOP'+str(i+1), d, pg, dev_top[-1].name,
shape=shape_top[i+1], transform=transform_top[i+1]))
dev_right=[]
return [dev_bottom, dev_top, dev_left, dev_right]
def generate_tap(laygen, objectname_pfix, placement_grid, routing_grid_m1m2_thick, devname_tap_boundary, devname_tap_body,
m=1, origin=np.array([0,0]), double_rail=False, transform='R0'):
"""generate a tap primitive"""
pg = placement_grid
rg12t = routing_grid_m1m2_thick
# placement
taprow = laygen.relplace(name=[None, None, None],
templatename=[devname_tap_boundary, devname_tap_body, devname_tap_boundary],
gridname=pg, xy=[origin, [0, 0], [0, 0]], shape=[[1, 1], [m, 1], [1, 1]],
transform=transform)
itapbl0, itap0, itapbr0 = taprow
#power route
laygen.route(name=None, xy0=[0, 0], xy1=[0, 0], gridname0=rg12t,
refobj0=itap0.elements[0, 0].pins['TAP0'], refobj1=itap0.elements[m-1, 0].pins['TAP1'])
for i in range(0, m, 1):
laygen.via(name=None, xy=[0, 0], refobj=itap0.elements[i, 0].pins['TAP1'], gridname=rg12t)
if double_rail==False: #location of track
laygen.route(name=None, xy0=[0, 1], xy1=[0, 1], gridname0=rg12t,
refobj0=itap0.elements[0, 0].pins['TAP0'], refobj1=itap0.elements[m - 1, 0].pins['TAP1'])
for i in range(0, m, 1):
laygen.via(name=None, xy=[0, 1], refobj=itap0.elements[i, 0].pins['TAP0'], gridname=rg12t)
if m % 2 == 0:
laygen.via(name=None, xy=[0, 1], refobj=itap0.elements[m - 1, 0].pins['TAP2'], gridname=rg12t)
if double_rail==True: #location of track
laygen.route(name=None, xy0=[0, -1], xy1=[0, -1], gridname0=rg12t,
refobj0=itap0.elements[0, 0].pins['TAP0'], refobj1=itap0.elements[m-1, 0].pins['TAP1'])
for i in range(0, m, 1):
laygen.route(None, xy0=np.array([0, -1]), xy1=np.array([0, 1]), gridname0=rg12t,
refobj0=itap0.elements[i, 0].pins['TAP0'], refobj1=itap0.elements[i, 0].pins['TAP0'])
laygen.via(None, np.array([0, -1]), refobj=itap0.elements[i, 0].pins['TAP0'], gridname=rg12t)
if m%2==0:
laygen.route(None, xy0=np.array([0, -1]), xy1=np.array([0, 1]), gridname0=rg12t,
refobj0=itap0.elements[m-1, 0].pins['TAP1'], refobj1=itap0.elements[m-1, 0].pins['TAP1'])
laygen.via(name=None, xy=[0, -1], refobj=itap0.elements[m-1, 0].pins['TAP1'], gridname=rg12t)
return [itapbl0, itap0, itapbr0]
def generate_mos(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_mos_boundary, devname_mos_body,
devname_mos_dmy, m=1, m_dmy=0, origin=np.array([0,0])):
"""generate a analog mos primitive with dummies"""
pg = placement_grid
rg_m1m2 = routing_grid_m1m2
# placement
if not m_dmy==0:
imbl0 = laygen.place("I" + objectname_pfix + 'BL0', devname_mos_boundary, pg, xy=origin)
imdmyl0 = laygen.relplace("I" + objectname_pfix + 'DMYL0', devname_mos_dmy, pg, imbl0.name, shape=np.array([m_dmy, 1]))
im0 = laygen.relplace("I" + objectname_pfix + '0', devname_mos_body, pg, imdmyl0.name, shape=np.array([m, 1]))
imdmyr0 = laygen.relplace("I" + objectname_pfix + 'DMYR0', devname_mos_dmy, pg, im0.name, shape=np.array([m_dmy, 1]))
imbr0 = laygen.relplace("I" + objectname_pfix + 'BR0', devname_mos_boundary, pg, imdmyr0.name)
else:
imbl0 = laygen.place("I" + objectname_pfix + 'BL0', devname_mos_boundary, pg, xy=origin)
imdmyl0 = None
im0 = laygen.relplace("I" + objectname_pfix + '0', devname_mos_body, pg, imbl0.name, shape=np.array([m, 1]))
imdmyr0 = None
imbr0 = laygen.relplace("I" + objectname_pfix + 'BR0', devname_mos_boundary, pg, im0.name)
#route
#gate
rg0=laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=im0.name, refpinname0='G0', refinstindex0=np.array([0, 0]),
refinstname1=im0.name, refpinname1='G0', refinstindex1=np.array([m-1, 0])
)
for i in range(m):
laygen.via(None, np.array([0, 0]), refinstname=im0.name, refpinname='G0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
#drain
rdl0=laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=im0.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=im0.name, refpinname1='D0', refinstindex1=np.array([m-1, 0])
)
for i in range(m):
laygen.via(None, np.array([0, 1]), refinstname=im0.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
#source
rs0=laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=im0.name, refpinname0='S0', refinstindex0=np.array([0, 0]),
refinstname1=im0.name, refpinname1='S1', refinstindex1=np.array([m-1, 0])
)
for i in range(m):
laygen.via(None, np.array([0, 0]), refinstname=im0.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
laygen.via(None, np.array([0, 0]), refinstname=im0.name, refpinname='S1', refinstindex=np.array([m - 1, 0]), gridname=rg_m1m2)
#dmy
if m_dmy>=2:
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imdmyl0.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=imdmyl0.name, refpinname1='D0', refinstindex1=np.array([m_dmy-1, 0])
)
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imdmyr0.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=imdmyr0.name, refpinname1='D0', refinstindex1=np.array([m_dmy-1, 0])
)
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imdmyl0.name, refpinname0='S0', refinstindex0=np.array([0, 0]),
refinstname1=imdmyl0.name, refpinname1='S0', refinstindex1=np.array([m_dmy-1, 0])
)
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imdmyr0.name, refpinname0='S1', refinstindex0=np.array([0, 0]),
refinstname1=imdmyr0.name, refpinname1='S1', refinstindex1=np.array([m_dmy-1, 0])
)
for i in range(m_dmy):
laygen.via(None, np.array([0, 1]), refinstname=imdmyl0.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
laygen.via(None, np.array([0, 1]), refinstname=imdmyr0.name, refpinname='D0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
laygen.via(None, np.array([0, 0]), refinstname=imdmyl0.name, refpinname='S0', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
laygen.via(None, np.array([0, 0]), refinstname=imdmyr0.name, refpinname='S1', refinstindex=np.array([i, 0]), gridname=rg_m1m2)
return [imbl0, imdmyl0, im0, imdmyr0, imbr0]
def generate_source_follower(laygen, objectname_pfix, placement_grid, routing_grid_m1m2, devname_mos_boundary, devname_mos_body,
devname_mos_dmy, devname_tap_boundary, devname_tap_body,
devname_mos_space_4x, devname_mos_space_2x, devname_mos_space_1x,
devname_tap_space_4x, devname_tap_space_2x, devname_tap_space_1x,
m_mir=2, m_bias=2, m_in=2, m_ofst=2, m_bias_dum=2, m_in_dum=2, m_byp=2, m_byp_bias=2, bias_current=True, origin=np.array([0,0])):
"""generate an analog differential mos structure with dummmies """
pg = placement_grid
rg_m1m2 = routing_grid_m1m2
# placement
# generate boundary
x0=laygen.get_template_size('sar', gridname=pg, libname=workinglib)[0]/2
x1=laygen.get_template_size('boundary_bottomleft', gridname=pg, libname=utemplib)[0]
m_bnd=int((x0-x1*2)/laygen.get_template_size('boundary_bottom', gridname=pg, libname=utemplib)[0])
[bnd_bottom, bnd_top, bnd_left, bnd_right]=generate_boundary(laygen, objectname_pfix='BND0',
placement_grid=pg,
devname_bottom = ['boundary_bottomleft', 'boundary_bottom', 'boundary_bottomright'],
shape_bottom = [np.array([1, 1]), np.array([m_bnd, 1]), np.array([1, 1])],
devname_top = ['boundary_topleft', 'boundary_top', 'boundary_topright'],
shape_top = [np.array([1, 1]), np.array([m_bnd, 1]), np.array([1, 1])],
devname_left = ['ptap_fast_left', 'nmos4_fast_left', 'ptap_fast_left', 'ptap_fast_left', 'nmos4_fast_left', 'ptap_fast_left'],
transform_left=['MX', 'R0', 'MX', 'R0', 'MX', 'R0'],
devname_right=['ptap_fast_right', 'nmos4_fast_right','ptap_fast_right','ptap_fast_right', 'nmos4_fast_right','ptap_fast_right'],
transform_right = ['MX', 'R0', 'MX', 'R0', 'MX', 'R0'],
origin=np.array([0, 0]))
# generate the first tap row
m_tap = max((m_bias_dum*6+m_ofst+int(m_mir/2)*2+m_bias+m_byp_bias), (m_in_dum*3+m_in+m_byp+2))+4
tap_origin = laygen.get_inst_xy(bnd_left[0].name, pg) + laygen.get_template_size('ptap_fast_left', pg)[0]*np.array([1,0])
[itapbl0, itap0, itapbr0] = generate_tap(laygen, objectname_pfix=objectname_pfix+'PTAP0', placement_grid=pg,
routing_grid_m1m2_thick=rg_m1m2_thick,
devname_tap_boundary=devname_tap_boundary, devname_tap_body=devname_tap_body,
m=m_tap, double_rail=False, origin=tap_origin, transform='MX')
# generate the second current mirror & bias devices row
if m_bias_dum * 6 + m_ofst + int(m_mir / 2) * 2 + m_bias + m_byp_bias > m_in_dum * 3 + m_in + m_byp + 2:
m_bias_dum_r = m_bias_dum
else:
m_bias_dum_r = (m_in_dum*3+m_in+m_byp+2) - (m_bias_dum*5+m_ofst+int(m_mir/2)*2+m_bias+m_byp_bias)
imspl0 = laygen.relplace("I" + objectname_pfix + 'SPL0', devname_mos_space_4x, pg, bnd_left[1].name, shape=np.array([2, 1]))
imbl0 = laygen.relplace("I" + objectname_pfix + 'BL0', devname_mos_boundary, pg, imspl0.name)
imdmyl0 = laygen.relplace("I" + objectname_pfix + 'DMYL0', devname_mos_dmy, pg, imbl0.name, shape=np.array([m_bias_dum, 1]))
imofst0 = laygen.relplace("I" + objectname_pfix + 'OFST0', devname_mos_body, pg, imdmyl0.name, shape=np.array([m_ofst, 1]))
imdmyl1 = laygen.relplace("I" + objectname_pfix + 'DMYL1', devname_mos_dmy, pg, imofst0.name, shape=np.array([m_bias_dum, 1]))
if bias_current == True:
immirl = laygen.relplace("I" + objectname_pfix + 'MIRL0', devname_mos_body, pg, imdmyl1.name, shape=np.array([int(m_mir/2), 1]))
else:
# immirl = laygen.relplace("I" + objectname_pfix + 'MIRL0', devname_mos_space_2x, pg, imdmyl1.name, shape=np.array([int(m_mir/2), 1]))
immirl = laygen.relplace("I" + objectname_pfix + 'MIRL0', devname_mos_boundary, pg, imdmyl1.name, shape=np.array([int(m_mir), 1]))
imdmyl2 = laygen.relplace("I" + objectname_pfix + 'DMYL2', devname_mos_dmy, pg, immirl.name, shape=np.array([m_bias_dum, 1]))
imbias0 = laygen.relplace("I" + objectname_pfix + 'BIAS0', devname_mos_body, pg, imdmyl2.name, shape=np.array([m_bias, 1]))
imdmyr0 = laygen.relplace("I" + objectname_pfix + 'DMYR0', devname_mos_dmy, pg, imbias0.name, shape=np.array([m_bias_dum, 1]), transform='MY')
if bias_current == True:
immirr = laygen.relplace("I" + objectname_pfix + 'MIRR0', devname_mos_body, pg, imdmyr0.name, shape=np.array([int(m_mir/2), 1]), transform='MY')
else:
# immirr = laygen.relplace("I" + objectname_pfix + 'MIRR0', devname_mos_space_2x, pg, imdmyr0.name, shape=np.array([int(m_mir/2), 1]), transform='MY')
immirr = laygen.relplace("I" + objectname_pfix + 'MIRR0', devname_mos_boundary, pg, imdmyr0.name, shape=np.array([int(m_mir), 1]), transform='MY')
imdmyr0_1 = laygen.relplace("I" + objectname_pfix + 'DMYR0_1', devname_mos_dmy, pg, immirr.name, shape=np.array([m_bias_dum, 1]), transform='MY')
imbyp_bias = laygen.relplace("I" + objectname_pfix + 'BYPBIAS', devname_mos_body, pg, imdmyr0_1.name, shape=np.array([m_byp_bias, 1]), transform='MY')
imdmyr1 = laygen.relplace("I" + objectname_pfix + 'DMYR1', devname_mos_dmy, pg, imbyp_bias.name, shape=np.array([m_bias_dum_r, 1]), transform='MY')
imbr0 = laygen.relplace("I" + objectname_pfix + 'BR0', devname_mos_boundary, pg, imdmyr1.name, transform='MY')
# generate the third tap row
tap_origin = laygen.get_inst_xy(bnd_left[2].name, pg) + laygen.get_template_size('ptap_fast_left', pg)[0]*np.array([1,0])
[itapbl2, itap2, itapbr2] = generate_tap(laygen, objectname_pfix=objectname_pfix+'PTAP1', placement_grid=pg,
routing_grid_m1m2_thick=rg_m1m2_thick,
devname_tap_boundary=devname_tap_boundary, devname_tap_body=devname_tap_body,
m=m_tap, double_rail=False, origin=tap_origin, transform='MX')
# generate the fourth tap row
tap_origin = laygen.get_inst_xy(bnd_left[3].name, pg) + laygen.get_template_size('ptap_fast_left', pg)[0]*np.array([1,0])
[itapbl3, itap3, itapbr3] = generate_tap(laygen, objectname_pfix=objectname_pfix+'PTAP2', placement_grid=pg,
routing_grid_m1m2_thick=rg_m1m2_thick,
devname_tap_boundary=devname_tap_boundary, devname_tap_body=devname_tap_body,
m=m_tap, double_rail=False, origin=tap_origin, transform='R0')
# generate the fifth input device row
if m_bias_dum * 6 + m_ofst + int(m_mir / 2) * 2 + m_bias + m_byp_bias < m_in_dum * 3 + m_in + m_byp + 2:
m_in_dum_r = m_in_dum
else:
m_in_dum_r = (m_bias_dum * 6 + m_ofst + int(m_mir / 2) * 2 + m_bias) - (m_in_dum * 2 + m_in)
imspl_in0 = laygen.relplace("I" + objectname_pfix + 'SPLin0', devname_mos_space_4x, pg, bnd_left[4].name, shape=np.array([2, 1]), transform='MX')
imbl_in0 = laygen.relplace("I" + objectname_pfix + 'BLin0', devname_mos_boundary, pg, imspl_in0.name, transform='MX')
imdmyl_in0 = laygen.relplace("I" + objectname_pfix + 'DMYLin0', devname_mos_body, pg, imbl_in0.name, shape=np.array([m_in_dum, 1]), transform='MX')
imin0 = laygen.relplace("I" + objectname_pfix + 'IN0', devname_mos_body, pg, imdmyl_in0.name, shape=np.array([m_in, 1]), transform='MX')
imdmyr_in0_0 = laygen.relplace("I" + objectname_pfix + 'DMYRin0_0', devname_mos_body, pg, imin0.name, shape=np.array([m_in_dum, 1]), transform='MX')
imbyp_bnl = laygen.relplace("I" + objectname_pfix + 'BYP_BNL', devname_mos_boundary, pg, imdmyr_in0_0.name, shape=np.array([2, 1]), transform='MX')
imbyp = laygen.relplace("I" + objectname_pfix + 'BYP', devname_mos_body, pg, imbyp_bnl.name, shape=np.array([m_byp, 1]), transform='MX')
imdmyr_bnr = laygen.relplace("I" + objectname_pfix + 'BYP_BNR', devname_mos_boundary, pg, imbyp.name, shape=np.array([2, 1]), transform='R180')
imdmyr_in0 = laygen.relplace("I" + objectname_pfix + 'DMYRin0', devname_mos_body, pg, imdmyr_bnr.name, shape=np.array([m_in_dum_r, 1]), transform='R180')
imbr_in0 = laygen.relplace("I" + objectname_pfix + 'BRin0', devname_mos_boundary, pg, imdmyr_in0.name, transform='R180')
# generate the sixth tap row
tap_origin = laygen.get_inst_xy(bnd_left[5].name, pg) + laygen.get_template_size('ptap_fast_left', pg)[0]*np.array([1,0])
[itapbl1, itap1, itapbr1] = generate_tap(laygen, objectname_pfix=objectname_pfix+'PTAP3', placement_grid=pg,
routing_grid_m1m2_thick=rg_m1m2_thick,
devname_tap_boundary=devname_tap_boundary, devname_tap_body=devname_tap_body,
m=m_tap, double_rail=False, origin=tap_origin)
# generate space
x_sp4 = laygen.get_template_size('nmos4_fast_space_nf4', gridname=pg, libname=utemplib)[0]
x_sp2 = laygen.get_template_size('nmos4_fast_space_nf2', gridname=pg, libname=utemplib)[0]
x_sp1 = laygen.get_template_size('nmos4_fast_space', gridname=pg, libname=utemplib)[0]
x_sp = x0 - x1 - laygen.get_inst_bbox(itapbr0.name, gridname=pg)[1][0]
m_sp4x = int(x_sp/x_sp1/4)
m_sp1x = int(x_sp/x_sp1) - 4*int(x_sp/x_sp1/4)
isp0_4x = laygen.relplace("I" + objectname_pfix + 'sp0_4x', devname_tap_space_4x, pg, itapbr0.name, shape=[m_sp4x,1], transform='MX')
isp0_1x = laygen.relplace("I" + objectname_pfix + 'sp0_1x', devname_tap_space_1x, pg, isp0_4x.name, shape=[m_sp1x,1], transform='MX')
isp1_4x = laygen.relplace("I" + objectname_pfix + 'sp1_4x', devname_mos_space_4x, pg, imbr0.name,
shape=[m_sp4x, 1])
isp1_1x = laygen.relplace("I" + objectname_pfix + 'sp1_1x', devname_mos_space_1x, pg, isp1_4x.name,
shape=[m_sp1x, 1])
isp2_4x = laygen.relplace("I" + objectname_pfix + 'sp2_4x', devname_tap_space_4x, pg, itapbr2.name,
shape=[m_sp4x, 1], transform='MX')
isp2_1x = laygen.relplace("I" + objectname_pfix + 'sp2_1x', devname_tap_space_1x, pg, isp2_4x.name,
shape=[m_sp1x, 1], transform='MX')
isp3_4x = laygen.relplace("I" + objectname_pfix + 'sp3_4x', devname_tap_space_4x, pg, itapbr3.name,
shape=[m_sp4x, 1])
isp3_1x = laygen.relplace("I" + objectname_pfix + 'sp3_1x', devname_tap_space_1x, pg, isp3_4x.name,
shape=[m_sp1x, 1])
isp4_4x = laygen.relplace("I" + objectname_pfix + 'sp4_4x', devname_mos_space_4x, pg, imbr_in0.name,
shape=[m_sp4x, 1], transform='MX')
isp4_1x = laygen.relplace("I" + objectname_pfix + 'sp4_1x', devname_mos_space_1x, pg, isp4_4x.name,
shape=[m_sp1x, 1], transform='MX')
isp5_4x = laygen.relplace("I" + objectname_pfix + 'sp5_4x', devname_tap_space_4x, pg, itapbr1.name, shape=[m_sp4x,1])
isp5_1x = laygen.relplace("I" + objectname_pfix + 'sp5_1x', devname_tap_space_1x, pg, isp5_4x.name, shape=[m_sp1x,1])
# route
# VBIAS
for i in range(m_bias-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbias0.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=imbias0.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]),
via0=[0,0], via1=[0,0])
rvb = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, -10]), gridname0=rg_m2m3,
refinstname0=imbias0.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=imbias0.name, refpinname1='G0', refinstindex1=np.array([i, 0]), via0=[0,0])
laygen.boundary_pin_from_rect(rvb, rg_m2m3, 'VBIAS'+str(i), laygen.layers['pin'][3], size=4, direction='bottom', netname='VBIAS')
if bias_current == True:
if int(m_mir/2)-1>0:
for i in range(int(m_mir/2)-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirl.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=immirl.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]),
via0=[0,0], via1=[0,0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirr.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=immirr.name, refpinname1='G0', refinstindex1=np.array([i + 1, 0]),
via0=[0, 0], via1=[0, 0])
else:
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirl.name, refpinname0='G0', refinstindex0=np.array([0, 0]),
refinstname1=immirl.name, refpinname1='G0', refinstindex1=np.array([1, 0]),
via0=[0, 0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirr.name, refpinname0='G0', refinstindex0=np.array([0, 0]),
refinstname1=immirr.name, refpinname1='G0', refinstindex1=np.array([1, 0]),
via0=[0, 0])
for i in range(int(m_mir / 2)): #diode connected
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirl.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=immirl.name, refpinname1='D0', refinstindex1=np.array([i, 0]))
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirr.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=immirr.name, refpinname1='D0', refinstindex1=np.array([i, 0]))
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbias0.name, refpinname0='G0', refinstindex0=np.array([m_bias-1, 0]),
refinstname1=immirr.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=immirl.name, refpinname0='G0', refinstindex0=np.array([int(m_mir/2)-1, 0]),
refinstname1=imbias0.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
# IBYP_BIAS routing
for i in range(m_byp_bias-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbyp_bias.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=imbyp_bias.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]),
via0=[0,0], via1=[0,0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbyp_bias.name, refpinname0='D0', refinstindex0=np.array([i, 0]),
refinstname1=imbyp_bias.name, refpinname1='D0', refinstindex1=np.array([i+1, 0]),
via0=[0,0], via1=[0,0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbyp_bias.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=imbias0.name, refpinname1='D0', refinstindex1=np.array([0, 0]))
for i in range(m_bias-1):
laygen.via(None, np.array([0, 0]), rg_m2m3, refinstname=imbias0.name, refpinname='S1', refinstindex=np.array([i, 0]))
# IBIAS/IMIR/IBYP_BIAS VSS connection
for i in range(m_tap):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -3]), gridname0=rg_m1m2,
refinstname0=itap0.name, refpinname0='TAP0', refinstindex0=np.array([i, 0]),
refinstname1=itap0.name, refpinname1='TAP0', refinstindex1=np.array([i, 0]))
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -3]), gridname0=rg_m1m2,
refinstname0=itap0.name, refpinname0='TAP2', refinstindex0=np.array([m_tap-1, 0]),
refinstname1=itap0.name, refpinname1='TAP2', refinstindex1=np.array([m_tap-1, 0]))
# IBIAS/IMIR Dummy VSS connection
idmy_list = [imdmyl0, imdmyl1, imdmyl2, imdmyr0, imdmyr0_1]
for i in range(len(idmy_list)):
for j in range(m_bias_dum):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -3]), gridname0=rg_m1m2,
refinstname0=idmy_list[i].name, refpinname0='D0', refinstindex0=np.array([j, 0]),
refinstname1=idmy_list[i].name, refpinname1='D0', refinstindex1=np.array([j, 0]))
for i in range(m_bias_dum_r):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -3]), gridname0=rg_m1m2,
refinstname0=imdmyr1.name, refpinname0='D0', refinstindex0=np.array([i, 0]),
refinstname1=imdmyr1.name, refpinname1='D0', refinstindex1=np.array([i, 0]))
# Output
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imdmyl_in0.name, refpinname0='S0', refinstindex0=np.array([m_in_dum - 1, 0]),
refinstname1=imdmyr_in0_0.name, refpinname1='S0', refinstindex1=np.array([m_in_dum - 1, 0]))
for i in range(m_in):
laygen.via(None, np.array([0, 1]), rg_m1m2, refinstname=imin0.name, refpinname='D0',refinstindex=np.array([i, 0]))
ro_v0, ro_h0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]),
gridname0=rg_m2m3, refinstname0=imin0.name, refpinname0='D0', refinstindex0=np.array([i, 0]),
refinstname1 = imbias0.name, refpinname1 = 'D0', refinstindex1 = np.array([0, 0]), via0=[0,0])
# laygen.boundary_pin_from_rect(ro_v0, rg_m2m3, 'out'+str(i), laygen.layers['pin'][3], size=4, direction='top', netname='out')
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imdmyl0.name, refpinname0='S0', refinstindex0=np.array([m_bias_dum - 1, 0]),
refinstname1=imdmyr1.name, refpinname1='S0', refinstindex1=np.array([0, 0]))
for i in range(m_bias):
laygen.via(None, np.array([0, 1]), rg_m1m2, refinstname=imbias0.name, refpinname='D0',refinstindex=np.array([i, 0]))
ro_v0, ro_h0 = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy0=np.array([0, 1]),
xy1=np.array([0, 1]), gridname0=rg_m2m3, refinstname0=imbias0.name, refpinname0='D0',
refinstindex0=np.array([i, 0]), refinstname1=imin0.name, refpinname1='D0',
refinstindex1=np.array([0, 0]), via0=[0, 0])
laygen.via(None, np.array([0, 0]), rg_m3m4_thick, refinstname=imbias0.name, refpinname='D0',refinstindex=np.array([i, 0]))
# laygen.boundary_pin_from_rect(ro_v0, rg_m2m3, 'out' + str(m_in+i), laygen.layers['pin'][3], size=4, direction='top',
# netname='out')
ro_m4=laygen.route(None, laygen.layers['metal'][4], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m3m4_thick,
refinstname0=imbias0.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=imbias0.name, refpinname1='D0', refinstindex1=np.array([m_bias-1, 0]))
laygen.pin(name='out', layer=laygen.layers['pin'][4], xy=laygen.get_rect_xy(ro_m4.name, rg_m3m4_thick), gridname=rg_m3m4_thick)
for i in range(m_ofst):
laygen.via(None, np.array([0, 1]), rg_m1m2, refinstname=imofst0.name, refpinname='D0',refinstindex=np.array([i, 0]))
# Input
for i in range(m_in-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imin0.name, refpinname0='G0', refinstindex0=np.array([i, 0]), via0=[0,0],
refinstname1=imin0.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]), via1=[0,0])
rin_m3 = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, 3]), gridname0=rg_m2m3,
refinstname0=imin0.name, refpinname0='G0', refinstindex0=np.array([0, 0]), via0=[0, 0],
refinstname1=imin0.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
rin_m4 = laygen.route(None, laygen.layers['metal'][4], xy0=laygen.get_rect_xy(rin_m3.name, rg_m3m4)[0],
xy1=laygen.get_rect_xy(rin_m3.name, rg_m3m4)[0]-np.array([5, 0]), gridname0=rg_m3m4,
via0=[0, 0])
rin = laygen.route(None, laygen.layers['metal'][5], xy0=laygen.get_rect_xy(rin_m4.name, rg_m4m5_thick)[1],
xy1=laygen.get_rect_xy(rin_m4.name, rg_m4m5_thick)[1]+np.array([0, 6]), gridname0=rg_m4m5_thick,
via0=[0, 0])
# rin_m4, rin = laygen.route_hv(laygen.layers['metal'][4], laygen.layers['metal'][5], xy0=np.array([1, 3]),
# xy1=np.array([4, -4]), gridname0=rg_m3m4, gridname1=rg_m4m5_thick,
# refinstname0=imin0.name, refpinname0='G0', refinstindex0=np.array([0, 0]),
# refinstname1=imin0.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
laygen.boundary_pin_from_rect(rin, rg_m4m5_thick, 'in', laygen.layers['pin'][5], size=4, direction='top')
# In-Out bypass
for i in range(m_byp):
if not i == m_byp-1:
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbyp.name, refpinname0='G0', refinstindex0=np.array([i, 0]), via0=[0,0],
refinstname1=imbyp.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]), via1=[0,0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imbyp.name, refpinname0='D0', refinstindex0=np.array([i, 0]), via0=[0, 0],
refinstname1=imbyp.name, refpinname1='D0', refinstindex1=np.array([i + 1, 0]), via1=[0, 0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imbyp.name, refpinname0='S0', refinstindex0=np.array([i, 0]), via0=[0, 0],
refinstname1=imbyp.name, refpinname1='S0', refinstindex1=np.array([i + 1, 0]), via1=[0, 0])
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 1]), xy1=np.array([0, 1]), gridname0=rg_m1m2,
refinstname0=imbyp.name, refpinname0='D0', refinstindex0=np.array([0, 0]),
refinstname1=imin0.name, refpinname1='D0', refinstindex1=np.array([0, 0]))
# laygen.route_hv(laygen.layers['metal'][2], laygen.layers['metal'][3], xy0=np.array([0, 0]),
# xy1=np.array([1, 0]), gridname0=rg_m2m3,
# refinstname0=imbyp.name, refpinname0='S0', refinstindex0=np.array([0, 0]),
# refinstname1=imin0.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy1=np.array([0, 0]),
xy0=np.array([1, 0]), gridname0=rg_m2m3,
refinstname1=imbyp.name, refpinname1='S0', refinstindex1=np.array([0, 0]),
refinstname0=imin0.name, refpinname0='G0', refinstindex0=np.array([0, 0]))
# Input dummy
idmy_in_list = [imdmyl_in0, imdmyr_in0_0]
for j in range(len(idmy_in_list)):
for i in range(m_in_dum-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=idmy_in_list[j].name, refpinname0='G0', refinstindex0=np.array([i, 0]), via0=[0,0],
refinstname1=idmy_in_list[j].name, refpinname1='G0', refinstindex1=np.array([i+1, 0]), via1=[0,0])
for i in range(m_in_dum):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=idmy_in_list[j].name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=idmy_in_list[j].name, refpinname1='D0', refinstindex1=np.array([i, 0]))
# laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy0=np.array([0, 0]),
# xy1=np.array([0, 0]), gridname0=rg_m2m3, gridname1=rg_m2m3_thick,
# refinstname0=idmy_in_list[j].name, refpinname0='G0',
# refinstindex0=np.array([0, 0]), refinstname1=itap1.name, refpinname1='TAP0',
# refinstindex1=np.array([0, 0]), via0=[0, 0]) #gate to VSS
laygen.route_hv(laygen.layers['metal'][2], laygen.layers['metal'][3], xy0=np.array([0, 0]),
xy1=np.array([0, 0]), gridname0=rg_m2m3_thick, gridname1=rg_m2m3,
refinstname0=itap1.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=idmy_in_list[j].name, refpinname1='G0', refinstindex1=np.array([0, 0]), via1=[0, 0]
) #gate to VSS
for i in range(m_in_dum_r-1):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imdmyr_in0.name, refpinname0='G0', refinstindex0=np.array([i, 0]), via0=[0,0],
refinstname1=imdmyr_in0.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]), via1=[0,0])
for i in range(m_in_dum_r):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imdmyr_in0.name, refpinname0='G0', refinstindex0=np.array([i, 0]),
refinstname1=imdmyr_in0.name, refpinname1='D0', refinstindex1=np.array([i, 0]))
# laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
# refinstname0=imdmyr_in0.name, refpinname0='S0', refinstindex0=np.array([i, 0]), via0=[0,0],
# refinstname1=imdmyr_in0.name, refpinname1='S0', refinstindex1=np.array([i+1, 0]), via1=[0,0])
# laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy0=np.array([0, 0]),
# xy1=np.array([0, 0]), gridname0=rg_m2m3, gridname1=rg_m2m3_thick,
# refinstname0=imdmyr_in0.name, refpinname0='G0',
# refinstindex0=np.array([1, 0]), refinstname1=itap1.name, refpinname1='TAP0',
# refinstindex1=np.array([0, 0]), via0=[0, 0]) #gate to VSS
laygen.route_hv(laygen.layers['metal'][2], laygen.layers['metal'][3], xy0=np.array([0, 0]),
xy1=np.array([0, 0]), gridname0=rg_m2m3_thick, gridname1=rg_m2m3,
refinstname0=itap1.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=imdmyr_in0.name, refpinname1='G0', refinstindex1=np.array([0, 0]), via1=[0, 0]
) # gate to VSS
# Voff
for i in range(m_ofst):
laygen.route(None, laygen.layers['metal'][2], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m1m2,
refinstname0=imofst0.name, refpinname0='G0', refinstindex0=np.array([i, 0]), via0=[0,0],
refinstname1=imofst0.name, refpinname1='G0', refinstindex1=np.array([i+1, 0]))
roff = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([1, 0]), xy1=np.array([1, -10]), gridname0=rg_m2m3,
refinstname0=imofst0.name, refpinname0='G0', refinstindex0=np.array([0, 0]), via0=[0, 0],
refinstname1=imofst0.name, refpinname1='G0', refinstindex1=np.array([0, 0]))
laygen.boundary_pin_from_rect(roff, rg_m2m3, 'Voff', laygen.layers['pin'][3], size=4, direction='bottom')
# Bypass signal
# rbyp, rv = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([0, 0]), xy1=np.array([0, 0]), gridname0=rg_m2m3,
# refinstname0=imbyp.name, refpinname0='G0', refinstindex0=np.array([0, 0]), via0=[0, 0],
# refinstname1=imbyp_bias.name, refpinname1='G0', refinstindex1=np.array([0, 0]), via1=[0, 0])
rbyp, rv = laygen.route_vh(laygen.layers['metal'][3], laygen.layers['metal'][2], xy0=np.array([0, 0]),
xy1=np.array([0, 0]), gridname0=rg_m2m3, refinstname0=imbyp.name, refpinname0='G0',
refinstindex0=np.array([0, 0]), refinstname1=imbyp_bias.name, refpinname1='G0',
refinstindex1=np.array([0, 0]), via0=[0, 0]) #gate to VSS
laygen.boundary_pin_from_rect(rbyp, rg_m2m3, 'bypass', laygen.layers['pin'][3], size=4, direction='bottom')
# Input device VDD connection
for i in range(m_in):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2,
refinstname0=imin0.name, refpinname0='S0', refinstindex0=np.array([i, 0]),
refinstname1=imin0.name, refpinname1='S0', refinstindex1=np.array([i, 0]), via1=[0,0])
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2,
refinstname0=imin0.name, refpinname0='S1', refinstindex0=np.array([i, 0]),
refinstname1=imin0.name, refpinname1='S1', refinstindex1=np.array([i, 0]), via1=[0,0])
# Input dummy VDD connection
for j in range(len(idmy_in_list)):
for i in range(m_in_dum):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2,
refinstname0=idmy_in_list[j].name, refpinname0='S0', refinstindex0=np.array([i, 0]),
refinstname1=idmy_in_list[j].name, refpinname1='S0', refinstindex1=np.array([i, 0]), via1=[0, 0])
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]),
gridname0=rg_m1m2,
refinstname0=idmy_in_list[j].name, refpinname0='S1', refinstindex0=np.array([i, 0]),
refinstname1=idmy_in_list[j].name, refpinname1='S1', refinstindex1=np.array([i, 0]), via1=[0, 0])
# Input dummy R VDD connection
for i in range(m_in_dum_r):
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2,
refinstname0=imdmyr_in0.name, refpinname0='S0', refinstindex0=np.array([i, 0]),
refinstname1=imdmyr_in0.name, refpinname1='S0', refinstindex1=np.array([i, 0]), via1=[0, 0])
laygen.route(None, laygen.layers['metal'][1], xy0=np.array([0, 0]), xy1=np.array([0, -1]), gridname0=rg_m1m2,
refinstname0=imdmyr_in0.name, refpinname0='S1', refinstindex0=np.array([i, 0]),
refinstname1=imdmyr_in0.name, refpinname1='S1', refinstindex1=np.array([i, 0]), via1=[0, 0])
# VSS/VDD
num_vert_pwr_l = 3
num_vert_pwr_r = 0 + m_sp4x*2
# M2 VSS rails
rvss0 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 0]), xy1=np.array([2*num_vert_pwr_r, 0]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap0.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss0_0 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 1]), xy1=np.array([2*num_vert_pwr_r, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap0.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss1 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 0]), xy1=np.array([2*num_vert_pwr_r, 0]), gridname0=rg_m2m3_thick,
refinstname0=itap1.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap1.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss1_0 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 1]), xy1=np.array([2*num_vert_pwr_r, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap1.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap1.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss2 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 0]), xy1=np.array([2*num_vert_pwr_r, 0]), gridname0=rg_m2m3_thick,
refinstname0=itap2.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap2.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss2_0 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 1]), xy1=np.array([2*num_vert_pwr_r, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap2.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap2.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss3 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 0]), xy1=np.array([2*num_vert_pwr_r, 0]), gridname0=rg_m2m3_thick,
refinstname0=itap3.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap3.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
rvss3_0 = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l, 1]), xy1=np.array([2*num_vert_pwr_r, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap3.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap3.name, refpinname1='TAP1', refinstindex1=np.array([m_tap-1, 0]))
# M2 VDD rail
rvdd = laygen.route(None, laygen.layers['metal'][2], xy0=np.array([-2*num_vert_pwr_l-8, -1]), xy1=np.array([-2*num_vert_pwr_r, -1]), gridname0=rg_m2m3,
refinstname0=imdmyl_in0.name, refpinname0='S0', refinstindex0=np.array([0, 0]),
refinstname1=imdmyr_in0.name, refpinname1='S0', refinstindex1=np.array([0, 0]))
# M3 VDD/VSS vertical
for i in range(num_vert_pwr_l):
rvvss_l = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+1, 1]), xy1=np.array([2*i+1, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap1.name, refpinname1='TAP0', refinstindex1=np.array([0, 0]))
rvvdd_l = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2*i+0, 1]), xy1=np.array([2*i+0, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP0', refinstindex0=np.array([0, 0]),
refinstname1=itap1.name, refpinname1='TAP0', refinstindex1=np.array([0, 0]))
laygen.via(None, np.array([2*i+1, 1]), refinstname=itap0.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 0]), refinstname=itap0.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 1]), refinstname=itap1.name, refpinname='TAP0',
refinstindex=np.array([0, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 0]), refinstname=itap1.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 1]), refinstname=itap2.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 0]), refinstname=itap2.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 1]), refinstname=itap3.name, refpinname='TAP0',
refinstindex=np.array([0, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+1, 0]), refinstname=itap3.name, refpinname='TAP0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3_thick)
laygen.via(None, np.array([2*i+0-8, -1]), refinstname=imdmyl_in0.name, refpinname='S0', refinstindex=np.array([0, 0]),
gridname=rg_m2m3)
laygen.pin(name='VSS'+str(i), layer=laygen.layers['pin'][3], xy=laygen.get_rect_xy(rvvss_l.name, rg_m2m3_thick),
gridname=rg_m2m3_thick, netname='VSS')
laygen.pin(name='VDD'+str(i), layer=laygen.layers['pin'][3], xy=laygen.get_rect_xy(rvvdd_l.name, rg_m2m3_thick),
gridname=rg_m2m3_thick, netname='VDD')
for i in range(num_vert_pwr_r):
rvvss_r = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2 * i + 1, 1]),
xy1=np.array([2 * i + 1, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP2', refinstindex0=np.array([m_tap-1, 0]),
refinstname1=itap1.name, refpinname1='TAP2', refinstindex1=np.array([m_tap-1, 0]))
rvvdd_r = laygen.route(None, laygen.layers['metal'][3], xy0=np.array([2 * i + 2, 1]),
xy1=np.array([2 * i + 2, 1]), gridname0=rg_m2m3_thick,
refinstname0=itap0.name, refpinname0='TAP2', refinstindex0=np.array([m_tap-1, 0]),
refinstname1=itap1.name, refpinname1='TAP2', refinstindex1=np.array([m_tap-1, 0]))
laygen.via(None, np.array([2 * i + 1, 1]), refinstname=itap0.name, refpinname='TAP2',
refinstindex=np.array([m_tap-1, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([2 * i + 1, 0]), refinstname=itap0.name, refpinname='TAP2',
refinstindex=np.array([m_tap-1, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([2 * i + 1, 1]), refinstname=itap1.name, refpinname='TAP2',
refinstindex=np.array([m_tap-1, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([2 * i + 1, 0]), refinstname=itap1.name, refpinname='TAP2',
refinstindex=np.array([m_tap-1, 0]), gridname=rg_m2m3_thick)
laygen.via(None, np.array([-2 * i - 2, -1]), refinstname=imdmyr_in0.name, refpinname='S0',
refinstindex=np.array([0, 0]), gridname=rg_m2m3)
laygen.pin(name='VSS' + str(num_vert_pwr_l+i), layer=laygen.layers['pin'][3], xy=laygen.get_rect_xy(rvvss_r.name, rg_m2m3_thick),
gridname=rg_m2m3_thick, netname='VSS')
laygen.pin(name='VDD' + str(num_vert_pwr_l+i), layer=laygen.layers['pin'][3], xy=laygen.get_rect_xy(rvvdd_r.name, rg_m2m3_thick),
gridname=rg_m2m3_thick, netname='VDD')
if __name__ == '__main__':
laygen = laygo.GridLayoutGenerator(config_file="laygo_config.yaml")
import imp
try:
imp.find_module('bag')
laygen.use_phantom = False
except ImportError:
laygen.use_phantom = True
tech=laygen.tech
utemplib = tech+'_microtemplates_dense'
logictemplib = tech+'_logic_templates'
laygen.load_template(filename=tech+'_microtemplates_dense_templates.yaml', libname=utemplib)
laygen.load_grid(filename=tech+'_microtemplates_dense_grids.yaml', libname=utemplib)
laygen.load_template(filename=logictemplib+'.yaml', libname=logictemplib)
laygen.templates.sel_library(utemplib)
laygen.grids.sel_library(utemplib)
#library load or generation
workinglib = 'adc_sar_generated'
laygen.add_library(workinglib)
laygen.sel_library(workinglib)
if os.path.exists(workinglib+'.yaml'): #generated layout file exists
laygen.load_template(filename=workinglib+'.yaml', libname=workinglib)
laygen.templates.sel_library(utemplib)
#grid
pg = 'placement_basic' #placement grid
rg_m1m2 = 'route_M1_M2_cmos'
rg_m1m2_thick = 'route_M1_M2_basic_thick'
rg_m2m3 = 'route_M2_M3_cmos'
rg_m2m3_thick = 'route_M2_M3_thick_basic'
rg_m3m4 = 'route_M3_M4_basic'
rg_m3m4_thick = 'route_M3_M4_basic_thick'
rg_m4m5 = 'route_M4_M5_basic'
rg_m4m5_thick = 'route_M4_M5_basic_thick'
rg_m5m6 = 'route_M5_M6_basic'
rg_m1m2_pin = 'route_M1_M2_basic'
rg_m2m3_pin = 'route_M2_M3_basic'
mycell_list = []
#salatch generation (wboundary)
cellname = 'sourceFollower'
print(cellname+" generating")
mycell_list.append(cellname)
m_sa=8
m_rst_sa=8
m_rgnn_sa=2
m_buf_sa=8
#load from preset
load_from_file=True
yamlfile_spec="adc_sar_spec.yaml"
yamlfile_size="adc_sar_size.yaml"
if load_from_file==True:
with open(yamlfile_spec, 'r') as stream:
specdict = yaml.load(stream)
with open(yamlfile_size, 'r') as stream:
sizedict = yaml.load(stream)
m_mir=sizedict['sourceFollower']['m_mirror']
m_bias=sizedict['sourceFollower']['m_bias']
m_in=sizedict['sourceFollower']['m_in']
m_ofst=sizedict['sourceFollower']['m_off']
m_in_dum=sizedict['sourceFollower']['m_in_dum']
m_bias_dum=sizedict['sourceFollower']['m_bias_dum']
m_byp=sizedict['sourceFollower']['m_byp']
m_byp_bias=sizedict['sourceFollower']['m_byp_bias']
bias_current=sizedict['sourceFollower']['bias_current']
laygen.add_cell(cellname)
laygen.sel_cell(cellname)
sf_origin=np.array([0, 0])
#source follwer generation
generate_source_follower(laygen, objectname_pfix='SF0', placement_grid=pg, routing_grid_m1m2=rg_m1m2,
devname_mos_boundary='nmos4_fast_boundary', devname_mos_body='nmos4_fast_center_nf2',
devname_mos_dmy='nmos4_fast_dmy_nf2', devname_tap_boundary='ptap_fast_boundary', devname_tap_body='ptap_fast_center_nf2',
devname_mos_space_4x='nmos4_fast_space_nf4', devname_mos_space_2x='nmos4_fast_space_nf2', devname_mos_space_1x='nmos4_fast_space',
devname_tap_space_4x='ptap_fast_space_nf4', devname_tap_space_2x='ptap_fast_space_nf2', devname_tap_space_1x='ptap_fast_space',
m_mir=m_mir, m_bias=m_bias, m_in=m_in,
m_ofst=m_ofst, m_bias_dum=m_bias_dum, m_in_dum=m_in_dum, m_byp=m_byp, m_byp_bias=m_byp_bias,
bias_current=bias_current, origin=sf_origin)
laygen.add_template_from_cell()
laygen.save_template(filename=workinglib+'.yaml', libname=workinglib)
#bag export, if bag does not exist, gds export
import imp
try:
imp.find_module('bag')
import bag
prj = bag.BagProject()
for mycell in mycell_list:
laygen.sel_cell(mycell)
laygen.export_BAG(prj, array_delimiter=['[', ']'])
except ImportError:
laygen.export_GDS('output.gds', cellname=mycell_list, layermapfile=tech+".layermap") # change layermapfile
| 75.818182
| 159
| 0.621885
|
9a71bad8ed1c7a2462aea183009fd4a930e10790
| 11,152
|
py
|
Python
|
tethysapp/earth_engine/gee/methods.py
|
jhoanse/tethysapp-earth_engine
|
c3fab4b2f9ba4ab1cfe4c993a13794f9195c811f
|
[
"BSD-3-Clause"
] | null | null | null |
tethysapp/earth_engine/gee/methods.py
|
jhoanse/tethysapp-earth_engine
|
c3fab4b2f9ba4ab1cfe4c993a13794f9195c811f
|
[
"BSD-3-Clause"
] | null | null | null |
tethysapp/earth_engine/gee/methods.py
|
jhoanse/tethysapp-earth_engine
|
c3fab4b2f9ba4ab1cfe4c993a13794f9195c811f
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import math
import logging
import ee
from ee.ee_exception import EEException
import geojson
import pandas as pd
from . import cloud_mask as cm
from .products import EE_PRODUCTS
from ..app import EarthEngine as app
log = logging.getLogger(f'tethys.apps.{__name__}')
service_account = app.get_custom_setting('service_account_email')
private_key_path = app.get_custom_setting('private_key_file')
if service_account and private_key_path and os.path.isfile(private_key_path):
try:
credentials = ee.ServiceAccountCredentials(service_account, private_key_path)
ee.Initialize(credentials)
log.info('Successfully initialized GEE using service account.')
except EEException as e:
log.warning('Unable to initialize GEE using service account. If installing ignore this warning.')
else:
try:
ee.Initialize()
except EEException as e:
log.warning('Unable to initialize GEE with local credentials. If installing ignore this warning.')
def image_to_map_id(image_name, vis_params={}):
"""
Get map_id parameters
"""
try:
ee_image = ee.Image(image_name)
map_id = ee_image.getMapId(vis_params)
tile_url = map_id['tile_fetcher'].url_format
return tile_url
except EEException:
log.exception('An error occurred while attempting to retrieve the map id.')
def get_image_collection_asset(request, platform, sensor, product, date_from=None, date_to=None, reducer='median'):
"""
Get tile url for image collection asset.
"""
ee_product = EE_PRODUCTS[platform][sensor][product]
collection = ee_product['collection']
index = ee_product.get('index', None)
vis_params = ee_product.get('vis_params', {})
cloud_mask = ee_product.get('cloud_mask', None)
log.debug(f'Image Collection Name: {collection}')
log.debug(f'Band Selector: {index}')
log.debug(f'Vis Params: {vis_params}')
try:
ee_collection = ee.ImageCollection(collection)
if date_from and date_to:
ee_filter_date = ee.Filter.date(date_from, date_to)
ee_collection = ee_collection.filter(ee_filter_date)
if index:
ee_collection = ee_collection.select(index)
if cloud_mask:
cloud_mask_func = getattr(cm, cloud_mask, None)
if cloud_mask_func:
ee_collection = ee_collection.map(cloud_mask_func)
if reducer:
ee_collection = getattr(ee_collection, reducer)()
# Attempt to clip the image by the boundary provided by the user
clip_features = get_boundary_fc_for_user(request.user)
if clip_features:
ee_collection = ee_collection.clipToCollection(clip_features)
tile_url = image_to_map_id(ee_collection, vis_params)
return tile_url
except EEException:
log.exception('An error occurred while attempting to retrieve the image collection asset.')
def get_time_series_from_image_collection(platform, sensor, product, index_name, scale=30, geometry=None,
date_from=None, date_to=None, reducer='median', orient='df'):
"""
Derive time series at given geometry.
"""
time_series = []
ee_product = EE_PRODUCTS[platform][sensor][product]
collection_name = ee_product['collection']
if not isinstance(geometry, geojson.GeometryCollection):
raise ValueError('Geometry must be a valid GeoJSON GeometryCollection.')
for geom in geometry.geometries:
log.debug(f'Computing Time Series for Geometry of Type: {geom.type}')
try:
ee_geometry = None
if isinstance(geom, geojson.Polygon):
ee_geometry = ee.Geometry.Polygon(geom.coordinates)
elif isinstance(geom, geojson.Point):
ee_geometry = ee.Geometry.Point(geom.coordinates)
else:
raise ValueError('Only Points and Polygons are supported.')
if date_from is not None:
if index_name is not None:
indexCollection = ee.ImageCollection(collection_name) \
.filterDate(date_from, date_to) \
.select(index_name)
else:
indexCollection = ee.ImageCollection(collection_name) \
.filterDate(date_from, date_to)
else:
indexCollection = ee.ImageCollection(collection_name)
def get_index(image):
if reducer:
the_reducer = getattr(ee.Reducer, reducer)()
if index_name is not None:
index_value = image.reduceRegion(the_reducer, ee_geometry, scale).get(index_name)
else:
index_value = image.reduceRegion(the_reducer, ee_geometry, scale)
date = image.get('system:time_start')
index_image = ee.Image().set('indexValue', [ee.Number(date), index_value])
return index_image
index_collection = indexCollection.map(get_index)
index_collection_agg = index_collection.aggregate_array('indexValue')
values = index_collection_agg.getInfo()
log.debug('Values acquired.')
df = pd.DataFrame(values, columns=['Time', index_name.replace("_", " ")])
if orient == 'df':
time_series.append(df)
else:
time_series.append(df.to_dict(orient=orient))
except EEException:
log.exception('An error occurred while attempting to retrieve the time series.')
log.debug(f'Time Series: {time_series}')
return time_series
def upload_shapefile_to_gee(user, shp_file):
"""
Upload a shapefile to Google Earth Engine as an asset.
Args:
user (django.contrib.auth.User): the request user.
shp_file (shapefile.Reader): A shapefile reader object.
"""
features = []
fields = shp_file.fields[1:]
field_names = [field[0] for field in fields]
# Convert Shapefile to ee.Features
for record in shp_file.shapeRecords():
# First convert to geojson
attributes = dict(zip(field_names, record.record))
geojson_geom = record.shape.__geo_interface__
geojson_feature = {
'type': 'Feature',
'geometry': geojson_geom,
'properties': attributes
}
# Create ee.Feature from geojson (this is the Upload, b/c ee.Feature is a server object)
features.append(ee.Feature(geojson_feature))
feature_collection = ee.FeatureCollection(features)
# Get unique folder for each user to story boundary asset
user_boundary_asset_path = get_user_boundary_path(user)
# Overwrite an existing asset with this name by deleting it first
try:
ee.batch.data.deleteAsset(user_boundary_asset_path)
except EEException as e:
# Nothing to delete, so pass
if 'Asset not found' not in str(e):
log.exception('Encountered an unhandled EEException.')
raise e
# Export ee.Feature to ee.Asset
task = ee.batch.Export.table.toAsset(
collection=feature_collection,
description='uploadToTableAsset',
assetId=user_boundary_asset_path
)
task.start()
def get_asset_dir_for_user(user):
"""
Get a unique asset directory for given user.
Args:
user (django.contrib.auth.User): the request user.
Returns:
str: asset directory path for given user.
"""
asset_roots = ee.batch.data.getAssetRoots()
if len(asset_roots) < 1:
# Initialize the asset root directory if one doesn't exist already
ee.batch.data.createAssetHome('users/earth_engine_app')
asset_root_dir = asset_roots[0]['id']
earth_engine_root_dir = os.path.join(asset_root_dir, 'earth_engine_app')
user_root_dir = os.path.join(earth_engine_root_dir, user.username)
# Create earth engine directory, will raise exception if it already exists
try:
ee.batch.data.createAsset({
'type': 'Folder',
'name': earth_engine_root_dir
})
except EEException as e:
if 'Cannot overwrite asset' not in str(e):
raise e
# Create user directory, will raise exception if it already exists
try:
ee.batch.data.createAsset({
'type': 'Folder',
'name': user_root_dir
})
except EEException as e:
if 'Cannot overwrite asset' not in str(e):
raise e
return user_root_dir
def get_user_boundary_path(user):
"""
Get a unique path for the user boundary asset.
Args:
user (django.contrib.auth.User): the request user.
Returns:
str: the unique path for the user boundary asset.
"""
user_asset_dir = get_asset_dir_for_user(user)
user_boundary_asset_path = os.path.join(user_asset_dir, 'boundary')
return user_boundary_asset_path
def get_boundary_fc_for_user(user):
"""
Get the boundary FeatureClass for the given user if it exists.
Args:
user (django.contrib.auth.User): the request user.
Returns:
ee.FeatureCollection: boundary feature collection or None
"""
try:
boundary_path = get_user_boundary_path(user)
# If no boundary exists for the user, an exception occur when calling this and clipping will skipped
ee.batch.data.getAsset(boundary_path)
# Add the clip option
fc = ee.FeatureCollection(boundary_path)
return fc
except EEException:
pass
return None
def get_boundary_fc_props_for_user(user):
"""
Get various properties of the boundary FeatureCollection.
Args:
user (django.contrib.auth.User): Get the properties of the boundary uploaded by this user.
Returns:
dict<zoom,bbox,centroid>: Dictionary containing the centroid and bounding box of the boundary and the approximate OpenLayers zoom level to frame the boundary around the centroid. Empty dictionary if no boundary FeatureCollection is found for the given user.
"""
fc = get_boundary_fc_for_user(user)
if not fc:
return dict()
# Compute bounding box
bounding_rect = fc.geometry().bounds().getInfo()
bounding_coords = bounding_rect.get('coordinates')[0]
# Derive bounding box from two corners of the bounding rectangle
bbox = [bounding_coords[0][0], bounding_coords[0][1], bounding_coords[2][0], bounding_coords[2][1]]
# Get centroid
centroid = fc.geometry().centroid().getInfo()
# Compute length diagonal of bbox for zoom calculation
diag = math.sqrt((bbox[0] - bbox[2])**2 + (bbox[1] - bbox[3])**2)
# Found the diagonal length and zoom level for US and Kenya boundaries
# Used equation of a line to develop the relationship between zoom and diagonal of bounding box
zoom = round((-0.0701 * diag) + 8.34, 0)
# The returned ee.FeatureClass properties
fc_props = {
'zoom': zoom,
'bbox': bbox,
'centroid': centroid.get('coordinates')
}
return fc_props
| 33.896657
| 265
| 0.656743
|
910be802878b3dd52f8b4fc63f02170581f90d70
| 2,007
|
py
|
Python
|
awx/main/migrations/_squashed_30.py
|
antonionovaesjr/awx
|
ad482708d4f6f18941afb81072ef61c5ca48446a
|
[
"Apache-2.0"
] | 2
|
2018-11-12T18:52:24.000Z
|
2020-05-22T18:41:21.000Z
|
awx/main/migrations/_squashed_30.py
|
antonionovaesjr/awx
|
ad482708d4f6f18941afb81072ef61c5ca48446a
|
[
"Apache-2.0"
] | 3
|
2020-12-30T21:13:58.000Z
|
2022-03-29T22:06:22.000Z
|
awx/main/migrations/_squashed_30.py
|
antonionovaesjr/awx
|
ad482708d4f6f18941afb81072ef61c5ca48446a
|
[
"Apache-2.0"
] | 9
|
2019-05-11T00:03:30.000Z
|
2021-07-07T16:09:17.000Z
|
from django.db import (
migrations,
models,
)
import jsonfield.fields
import awx.main.fields
from awx.main.migrations import _save_password_keys
from awx.main.migrations import _migration_utils as migration_utils
def update_dashed_host_variables(apps, schema_editor):
Host = apps.get_model('main', 'Host')
for host in Host.objects.filter(variables='---'):
host.variables = ''
host.save()
SQUASHED_30 = {
'0029_v302_add_ask_skip_tags': [
# add ask skip tags
migrations.AddField(
model_name='jobtemplate',
name='ask_skip_tags_on_launch',
field=models.BooleanField(default=False),
),
],
'0030_v302_job_survey_passwords': [
# job survery passwords
migrations.AddField(
model_name='job',
name='survey_passwords',
field=jsonfield.fields.JSONField(default={}, editable=False, blank=True),
),
],
'0031_v302_migrate_survey_passwords': [
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
migrations.RunPython(_save_password_keys.migrate_survey_passwords),
],
'0032_v302_credential_permissions_update': [
# RBAC credential permission updates
migrations.AlterField(
model_name='credential',
name='admin_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['singleton:system_administrator', 'organization.admin_role'], to='main.Role', null='True'),
),
migrations.AlterField(
model_name='credential',
name='use_role',
field=awx.main.fields.ImplicitRoleField(related_name='+', parent_role=['admin_role'], to='main.Role', null='True'),
),
],
'0033_v303_v245_host_variable_fix': [
migrations.RunPython(migration_utils.set_current_apps_for_migrations),
migrations.RunPython(update_dashed_host_variables),
],
}
__all__ = ['SQUASHED_30']
| 32.901639
| 174
| 0.663677
|
9b3081712fff463bd4621dd633bbe9d9c9a22908
| 53
|
py
|
Python
|
python/eskapade/analysis/__init__.py
|
mbaak/Eskapade
|
00c8f6ca52eb5b738b4268257e277dab71b804cb
|
[
"Apache-2.0"
] | 16
|
2016-10-10T08:39:30.000Z
|
2020-12-22T01:00:56.000Z
|
python/eskapade/analysis/__init__.py
|
mbaak/Eskapade
|
00c8f6ca52eb5b738b4268257e277dab71b804cb
|
[
"Apache-2.0"
] | null | null | null |
python/eskapade/analysis/__init__.py
|
mbaak/Eskapade
|
00c8f6ca52eb5b738b4268257e277dab71b804cb
|
[
"Apache-2.0"
] | 6
|
2017-06-14T12:01:41.000Z
|
2018-04-03T17:01:04.000Z
|
# flake8: noqa
from eskapade.analysis.links import *
| 17.666667
| 37
| 0.773585
|
b1593848bb279fe6795853ec17b504fa9e53c37e
| 1,690
|
py
|
Python
|
Cryptography/Man In Middle Attack Simulation/Bob.py
|
kartik2112/Interesting-Codes
|
423c93b7b2b66ab46d3c188e357d1f8af31251b2
|
[
"MIT"
] | null | null | null |
Cryptography/Man In Middle Attack Simulation/Bob.py
|
kartik2112/Interesting-Codes
|
423c93b7b2b66ab46d3c188e357d1f8af31251b2
|
[
"MIT"
] | null | null | null |
Cryptography/Man In Middle Attack Simulation/Bob.py
|
kartik2112/Interesting-Codes
|
423c93b7b2b66ab46d3c188e357d1f8af31251b2
|
[
"MIT"
] | null | null | null |
import socket
import thread
import time
import sys
import subsCipher1411113 as scEnc
import random
print "\n\nHi! This is ---------- Bob's Machine -----------\n\n"
print "I am attempting to connect to Adam and receive his some spicy secret messages :-P\n\n"
prvtKey = random.randint(10,1000)
#prvtKey = 31
p,q = 231, 237
print "p = " + str(p) + ", q = " + str(q)
print "Private Key: " + str(prvtKey) +"\n"
def receiverThread(c,key):
while True:
#data = c.recv(1024).decode('utf-8')
data = c.recv(1024)
decData = scEnc.decryptThis(data,key)
if decData == "quit":
sys.exit(0)
#print("\r",data,"Eve: ")
print "\rAdam: " + decData + "\t( Actual message received: "+ data +" )" +"\nBob: ",
s = socket.socket()
print "Bob Server Socket created"
port = int(input("Enter port for Bob: "))
s.bind(('',port))
print "Bob Server Socket binded to",port
s.listen(5)
print "Bob Server Socket is listening"
c, addr = s.accept()
print "Bob received connection from Adam:",addr
# Share keys
R1 = q**prvtKey % p
print "R1 calculated by Bob's machine: " + str(R1)
c.send(str(R1))
R2 = c.recv(1024)
print "R2 received from Adam's machine: " + R2
R2=int(R2)
print "Calculating key ..."
key = R2**prvtKey % p
print "Handshake complete!\nKey that has been calculated is: " + str(key)
time.sleep(2)
# Start Sharing messages
thread.start_new_thread(receiverThread,(c,key))
while True:
message = raw_input("Bob: ")
encMsg = scEnc.encryptThis(message,key)
#c.send(message.encode('utf-8'))
print "\tEncrypted Message Sent: "+encMsg
c.send(encMsg)
if message == "quit":
sys.exit(0)
c.close()
| 23.150685
| 93
| 0.631953
|
3bd38a5575b39d6a0f0d63ca7208ab9efab6a3ef
| 3,753
|
py
|
Python
|
jorldy/sync_distributed_train.py
|
zenoengine/JORLDY
|
1eb867e52a03e0282a55fa612cbc5b5de701ffe7
|
[
"Apache-2.0"
] | null | null | null |
jorldy/sync_distributed_train.py
|
zenoengine/JORLDY
|
1eb867e52a03e0282a55fa612cbc5b5de701ffe7
|
[
"Apache-2.0"
] | null | null | null |
jorldy/sync_distributed_train.py
|
zenoengine/JORLDY
|
1eb867e52a03e0282a55fa612cbc5b5de701ffe7
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import multiprocessing as mp
from core import *
from manager import *
from process import *
# default_config_path = "config.YOUR_AGENT.YOUR_ENV"
default_config_path = "config.dqn.cartpole"
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--config", type=str, help="config.dqn.cartpole")
args, unknown = parser.parse_known_args()
config_path = args.config if args.config else default_config_path
config_manager = ConfigManager(config_path, unknown)
config = config_manager.config
env = Env(**config.env)
agent_config = {
"state_size": env.state_size,
"action_size": env.action_size,
"optim_config": config.optim,
"run_step": config.train.run_step,
"num_workers": config.train.num_workers,
}
env.close()
agent_config.update(config.agent)
if config.train.distributed_batch_size:
agent_config["batch_size"] = config.train.distributed_batch_size
result_queue = mp.Queue()
manage_sync_queue = mp.Queue(1)
path_queue = mp.Queue(1)
record_period = (
config.train.record_period
if config.train.record_period
else config.train.run_step // 10
)
eval_manager_config = (
Env,
config.env,
config.train.eval_iteration,
config.train.record,
record_period,
)
log_id = config.train.id if config.train.id else config.agent.name
log_manager_config = (config.env.name, log_id, config.train.experiment)
manage = mp.Process(
target=manage_process,
args=(
Agent,
{"device": "cpu", **agent_config},
result_queue,
manage_sync_queue,
path_queue,
config.train.run_step,
config.train.print_period,
MetricManager,
EvalManager,
eval_manager_config,
LogManager,
log_manager_config,
config_manager,
),
)
manage.start()
try:
distributed_manager = DistributedManager(
Env,
config.env,
Agent,
{"device": "cpu", **agent_config},
config.train.num_workers,
"sync",
)
agent = Agent(**agent_config)
assert agent.action_type == env.action_type
if config.train.load_path:
agent.load(config.train.load_path)
save_path = path_queue.get()
step, print_stamp, save_stamp = 0, 0, 0
while step < config.train.run_step:
transitions = distributed_manager.run(config.train.update_period)
step += config.train.update_period
print_stamp += config.train.update_period
save_stamp += config.train.update_period
result = agent.process(transitions, step)
distributed_manager.sync(agent.sync_out())
result_queue.put((step, result))
if (
print_stamp >= config.train.print_period
or step >= config.train.run_step
):
try:
manage_sync_queue.get_nowait()
except:
pass
manage_sync_queue.put(agent.sync_out())
print_stamp = 0
if save_stamp >= config.train.save_period or step >= config.train.run_step:
agent.save(save_path)
save_stamp = 0
except Exception as e:
traceback.print_exc()
manage.terminate()
else:
print("Main process done.")
manage.join()
print("Manage process done.")
finally:
result_queue.close()
manage_sync_queue.close()
path_queue.close()
| 31.016529
| 87
| 0.596589
|
3f7948bb4c1d2aff98143cf17184026276c11073
| 1,692
|
py
|
Python
|
azure/mgmt/compute/compute/v2017_03_30/models/virtual_machine_identity.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 2
|
2020-07-29T14:22:17.000Z
|
2020-11-06T18:47:40.000Z
|
azure/mgmt/compute/compute/v2017_03_30/models/virtual_machine_identity.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2016-08-01T07:37:04.000Z
|
2016-08-01T07:37:04.000Z
|
azure/mgmt/compute/compute/v2017_03_30/models/virtual_machine_identity.py
|
EnjoyLifeFund/py36pkgs
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
[
"MIT",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1
|
2020-12-12T21:04:41.000Z
|
2020-12-12T21:04:41.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VirtualMachineIdentity(Model):
"""Identity for the virtual machine.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar principal_id: The principal id of virtual machine identity.
:vartype principal_id: str
:ivar tenant_id: The tenant id associated with the virtual machine.
:vartype tenant_id: str
:param type: The type of identity used for the virtual machine. Currently,
the only supported type is 'SystemAssigned', which implicitly creates an
identity. Possible values include: 'SystemAssigned'
:type type: str or :class:`ResourceIdentityType
<azure.mgmt.compute.compute.v2017_03_30.models.ResourceIdentityType>`
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'type': {'key': 'type', 'type': 'ResourceIdentityType'},
}
def __init__(self, type=None):
self.principal_id = None
self.tenant_id = None
self.type = type
| 36
| 78
| 0.625296
|
a968866cdb260304020fd3637358c3567f374b69
| 547
|
py
|
Python
|
migrations/versions/0305h_smtp_columns.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 41
|
2019-11-28T16:58:41.000Z
|
2022-01-28T21:11:16.000Z
|
migrations/versions/0305h_smtp_columns.py
|
cds-snc/notification-api
|
b1c1064f291eb860b494c3fa65ac256ad70bf47c
|
[
"MIT"
] | 1,083
|
2019-07-08T12:57:24.000Z
|
2022-03-08T18:53:40.000Z
|
migrations/versions/0305h_smtp_columns.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 9
|
2020-01-24T19:56:43.000Z
|
2022-01-27T21:36:53.000Z
|
"""
Revision ID: 0305h_smtp_columns
Revises: 0305g_remove_letter_branding
Create Date: 2019-12-13 17:08:21.019759
"""
import sqlalchemy as sa
from alembic import op
revision = "0305h_smtp_columns"
down_revision = "0305g_remove_letter_branding"
def upgrade():
op.add_column("services", sa.Column("smtp_user", sa.Text(), nullable=True))
op.add_column("services_history", sa.Column("smtp_user", sa.Text(), nullable=True))
def downgrade():
op.drop_column("services", "smtp_user")
op.drop_column("services_history", "smtp_user")
| 23.782609
| 87
| 0.744059
|
e5c9c055de52ac64b4cf5a896101f35d90014250
| 214
|
py
|
Python
|
models/debug_unet.py
|
Curli-quan/fewshot-select
|
34f8ce5069ed1fbd01c1fa73a3ef264c98dadafe
|
[
"Apache-2.0"
] | null | null | null |
models/debug_unet.py
|
Curli-quan/fewshot-select
|
34f8ce5069ed1fbd01c1fa73a3ef264c98dadafe
|
[
"Apache-2.0"
] | null | null | null |
models/debug_unet.py
|
Curli-quan/fewshot-select
|
34f8ce5069ed1fbd01c1fa73a3ef264c98dadafe
|
[
"Apache-2.0"
] | null | null | null |
import torch
from .UNet3D import UNet3DEncoder2
import numpy as np
unet = UNet3DEncoder2(1,1,emb_len=64)
im = torch.rand((2,1,128,128,64))
feas = unet(im)
print(len(feas))
import ipdb; ipdb.set_trace()
| 19.454545
| 38
| 0.705607
|
c536802cae72817c514793a89a9b60ca50010a61
| 8,321
|
py
|
Python
|
sent_selectors/modified_greedy.py
|
blendle/summblogcode
|
93ae3f6bba8aba4557f4ccfa1b9fcc8cc8727206
|
[
"0BSD"
] | 8
|
2018-04-18T20:10:36.000Z
|
2019-11-19T08:33:07.000Z
|
sent_selectors/modified_greedy.py
|
blendle/summblogcode
|
93ae3f6bba8aba4557f4ccfa1b9fcc8cc8727206
|
[
"0BSD"
] | 2
|
2018-09-07T03:13:19.000Z
|
2019-11-07T13:58:48.000Z
|
sent_selectors/modified_greedy.py
|
blendle/research-summarization
|
93ae3f6bba8aba4557f4ccfa1b9fcc8cc8727206
|
[
"0BSD"
] | null | null | null |
import numpy as np
from sklearn.metrics import pairwise_distances
from sklearn.cluster import KMeans
import string
def modified_greedy(sentences,
tokenized,
model,
stopwords,
original_indices,
sent_representations,
objective_function,
min_sentence_length):
"""Implementation of the MMR summarizer as described in Lin & Bilmes (2010)."""
# Initialize stuff
# Ground set indices: all indices, stays constant throughout the function
all_indices = tuple(range(len(original_indices)))
# Candidate indices: all candidates (gets smaller every iteration)
candidate_indices = list(range(len(original_indices)))
# Summary indices: indices of represented sentences added to summary
summary_indices = []
# Scaling factor (r) is taken from original paper: r = 0.3
scaling_factor = .3
# Tf-idf clustering, as described in Lin & Bilmes (2011)
n_clusters = len(original_indices) // 5
k_means = KMeans(n_clusters=n_clusters, random_state=42)
clustering = k_means.fit_predict(sent_representations)
clustered_indices = [np.array(all_indices)[np.where(clustering == i)].tolist()
for i in range(n_clusters)]
# Make document vector (since w2v sentences are now sums, it is this easy):
document_vector = np.sum(sent_representations, axis=0)
# Pick the right sentences from sentence list (to match representation matrix)
sentences = [sentences[i] for i in original_indices]
tokenized = [tokenized[i] for i in original_indices]
# Construct bag of words from representable sentences
preprocessed = (sentence.lower().split(' ')
for i, sentence in enumerate(tokenized))
# POS-tag filtering, and punctuation removal
preprocessed = [[word.translate(str.maketrans('', '', string.punctuation))
for word in sentence] for sentence in preprocessed]
# Remove OOV words
sentence_words = [[word for word in sentence if word in model.model.vocab]
for sentence in preprocessed]
# Deduplicate & flatten
bag_of_words = list(set([word for sentence in sentence_words for word in sentence]))
# Look up in-vocabulary word vectors
vectorized = [(word, model.model[word]) for word in bag_of_words]
# Construct word similarity matrix for all words in article object
names, vectors = zip(*vectorized)
# word_distance_matrix = pairwise_distances(vectors, metric='euclidean')
word_distance_matrix = pairwise_distances(vectors, metric='cosine')
# Pandas workaround
name_index_tuples = list(zip(list(range(len(names))), names))
# Fill diagonal with nan, to make sure it's never the minimum
np.fill_diagonal(word_distance_matrix, np.nan)
# Compute sentence similarity matrix based on sentence representations
distance_matrix = pairwise_distances(sent_representations, metric='cosine')
similarity_matrix = np.subtract(1, distance_matrix)
np.fill_diagonal(similarity_matrix, np.nan)
# Compute sentence lengths
sentence_lengths = [len(s.split()) for s in sentences]
length_scaler = np.power(sentence_lengths, scaling_factor).tolist()
# Remove sentences that do not have similarity with other sentences from candidate set
similarity_sum_per_sentence = np.nansum(similarity_matrix, axis=0)
irrelevant_indices = np.where(similarity_sum_per_sentence == 0)[0].tolist()
candidate_indices = [index for index in candidate_indices
if index not in irrelevant_indices]
# Already save the best singleton summary, for comparison to iterative result later
singleton_scores = [objective_function(similarity_matrix,
sent_representations,
name_index_tuples,
sentence_words,
word_distance_matrix,
document_vector,
clustered_indices,
all_indices,
[i])
if sentence_lengths[i] <= 100
else np.nan for i in candidate_indices]
best_singleton_score = np.nanmax(singleton_scores)
# Note that the singleton index is directly translated to a sentence representation index
best_singleton_index = candidate_indices[np.nanargmax(singleton_scores)]
# Greedily add sentences to summary
summary_length = 0
for iteration in range(len(sentence_lengths)):
print("Iteration {}".format(iteration))
# Edge case: value of objective function when summary is empty.
if iteration == 0:
current_score = 0.
else:
current_score = objective_function(similarity_matrix,
sent_representations,
name_index_tuples,
sentence_words,
word_distance_matrix,
document_vector,
clustered_indices,
all_indices,
summary_indices)
# Compute all relevant new scores
new_scores = [objective_function(similarity_matrix,
sent_representations,
name_index_tuples,
sentence_words,
word_distance_matrix,
document_vector,
clustered_indices,
all_indices,
summary_indices+[i])
if sentence_lengths[i] > min_sentence_length
else np.nan
for i in candidate_indices]
# If there are no candidates left, break the loop
if all(np.isnan(score) for score in new_scores):
break
# Remove non-candidate elements from length scaler to fit arrays
current_length_scaler = [v for i, v in enumerate(length_scaler) if i in candidate_indices]
added_values = np.divide(np.subtract(new_scores, current_score), current_length_scaler)
best_index = np.nanargmax(added_values)
# Pass best index if the sentence does not increase MMR-score (+ empty summary edge case)
if not new_scores[best_index] - current_score >= 0 and summary_indices:
candidate_indices.pop(best_index)
else:
summary_indices.append(candidate_indices[best_index])
summary_length += sentence_lengths[candidate_indices[best_index]]
candidate_indices.pop(best_index)
if summary_length >= 100:
break
# Last step: compare singleton score with summary score, and pick best as summary
final_summary_score = objective_function(similarity_matrix,
sent_representations,
name_index_tuples,
sentence_words,
word_distance_matrix,
document_vector,
clustered_indices,
all_indices,
summary_indices)
if best_singleton_score >= final_summary_score:
ranked_sentences = [sentences[i] for i in [best_singleton_index]]
ranking = list(zip([best_singleton_index], ranked_sentences))
else:
ranked_sentences = [sentences[i] for i in summary_indices]
ranking = list(zip(summary_indices, ranked_sentences))
# Replace filtered indices with original ones
ranking = [(original_indices[i], s) for i, s in ranking]
return ranking
| 49.826347
| 98
| 0.585747
|
fc57aac74cef17491271e0e098cb712044677f34
| 458
|
py
|
Python
|
taxdata/cps/__init__.py
|
jdebacker/taxdata
|
c32d401a10a6c8f6e889d87c6cc72fd4338017b2
|
[
"CC0-1.0"
] | 12
|
2019-02-07T14:06:28.000Z
|
2021-12-04T19:19:50.000Z
|
taxdata/cps/__init__.py
|
jdebacker/taxdata
|
c32d401a10a6c8f6e889d87c6cc72fd4338017b2
|
[
"CC0-1.0"
] | 230
|
2015-10-20T18:38:10.000Z
|
2018-12-05T16:04:04.000Z
|
taxdata/cps/__init__.py
|
jdebacker/taxdata
|
c32d401a10a6c8f6e889d87c6cc72fd4338017b2
|
[
"CC0-1.0"
] | 19
|
2015-12-21T18:25:11.000Z
|
2018-11-10T16:53:38.000Z
|
# flake8: noqa
from taxdata.cps import benefits
from taxdata.cps import cps_meta
from taxdata.cps import cpsmar
from taxdata.cps.create import create
from taxdata.cps.finalprep import finalprep
from taxdata.cps import helpers
from taxdata.cps import impute
from taxdata.cps import pycps
from taxdata.cps import splitincome
from taxdata.cps import targeting
from taxdata.cps import taxunit
from taxdata.cps import validation
from taxdata.cps import constants
| 30.533333
| 43
| 0.842795
|
66911154aa71843c2e716af6114ea83fdbc19443
| 1,707
|
py
|
Python
|
setup.py
|
Malachov/mylogging
|
df1618f7a9893fd3a6ae3912d5c7d57eebffaadb
|
[
"MIT"
] | null | null | null |
setup.py
|
Malachov/mylogging
|
df1618f7a9893fd3a6ae3912d5c7d57eebffaadb
|
[
"MIT"
] | null | null | null |
setup.py
|
Malachov/mylogging
|
df1618f7a9893fd3a6ae3912d5c7d57eebffaadb
|
[
"MIT"
] | null | null | null |
#%%
from setuptools import setup, find_packages
import pkg_resources
import mylogging
version = mylogging.__version__
with open("README.md") as readme_file:
readme = readme_file.read()
with open("requirements.txt") as f:
myreqs = [str(requirement) for requirement in pkg_resources.parse_requirements(f)]
setup(
author_email="malachovd@seznam.cz",
author="Daniel Malachov",
description="Small library for printing warnings and creating logs.",
include_package_data=True,
install_requires=myreqs,
license="mit",
long_description_content_type="text/markdown",
long_description=readme,
name="mylogging",
packages=find_packages(exclude=("tests",)),
platforms="any",
project_urls={
"Documentation": "https://mylogging.readthedocs.io/",
"Home": "https://github.com/Malachov/mylogging",
},
python_requires=">=3.7",
url="https://github.com/Malachov/mylogging",
version=version,
classifiers=[
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Natural Language :: English",
"Environment :: Other Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Intended Audience :: Developers",
"Intended Audience :: Education",
],
extras_require={},
)
| 34.14
| 87
| 0.639719
|
94436973fed695997818295ba99b48ab10bbdde8
| 1,285
|
py
|
Python
|
tests/conftest.py
|
jmann277/oura_cdm
|
de51c780d49744234757ddce2718a59abd8d8a03
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
jmann277/oura_cdm
|
de51c780d49744234757ddce2718a59abd8d8a03
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
jmann277/oura_cdm
|
de51c780d49744234757ddce2718a59abd8d8a03
|
[
"MIT"
] | null | null | null |
import pytest
from oura_cdm.concepts import ObservationConcept
from oura_cdm.extract_oura import get_oura_data
from oura_cdm.observation import get_observation_table
from oura_cdm.pipeline import run
from oura_cdm.concepts import Ontology
@pytest.fixture(scope='session')
def start_date():
return '2022-01-17'
@pytest.fixture(scope='session')
def end_date():
return '2022-02-02'
@pytest.fixture(scope='session')
def oura_data(start_date, end_date):
return get_oura_data(start_date=start_date, end_date=end_date)
@pytest.fixture(scope='session')
def target_folder_name():
return 'sleep_data_test'
@pytest.fixture(scope='session')
def pipeline_inputs(target_folder_name):
return {
"target_folder_name": target_folder_name
}
@pytest.fixture(scope='session')
def pipeline_artifacts(pipeline_inputs):
return run(**pipeline_inputs)
@pytest.fixture(scope='session')
def observation_df(oura_data):
observation_df = get_observation_table(oura_data)
return observation_df
@pytest.fixture(params=[c for c in ObservationConcept])
def observation_concept(request):
return request.param
@pytest.fixture
def raw_observation(oura_data):
return oura_data[0]
@pytest.fixture(scope='session')
def ontology():
return Ontology()
| 21.065574
| 66
| 0.771206
|
9ea42e99df11cb036c494bb5f3f389f1b580e5c8
| 6,736
|
py
|
Python
|
DPO/DataProcess/TechIndicator/TAIndicator.py
|
xuyuanjian/Dynamic-Portfolio-Optimization
|
83c744d7914fa0965595e75797760c9eb4e87772
|
[
"MIT"
] | 1
|
2021-06-15T09:31:24.000Z
|
2021-06-15T09:31:24.000Z
|
DPO/DataProcess/TechIndicator/TAIndicator.py
|
dfhby0/Dynamic-Portfolio-Optimization
|
6d40f6c3f9e7c27adb4ec63dc1339d717cdbc5d4
|
[
"MIT"
] | 1
|
2021-06-21T10:04:09.000Z
|
2021-06-21T10:04:09.000Z
|
DPO/DataProcess/TechIndicator/TAIndicator.py
|
dfhby0/Dynamic-Portfolio-Optimization
|
6d40f6c3f9e7c27adb4ec63dc1339d717cdbc5d4
|
[
"MIT"
] | 1
|
2021-06-15T09:31:25.000Z
|
2021-06-15T09:31:25.000Z
|
import numpy as np
import talib
def DPOGetTATech(Open, High, Close, Low, Volume, techlist=['EMA', 'MA']):
ta_indicator = np.array([])
Open = Open.astype(float)
High = High.astype(float)
Close = Close.astype(float)
Low = Low.astype(float)
Volume = Volume.astype(float)
for tech_name in techlist:
if tech_name == 'ADX':
# ADX动向指标
temp_indicator = TA_ADX(High, Low, Close)
# 0值填补空缺值
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
temp_indicator = TA_ADXR(High, Low, Close)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
elif tech_name == 'MACD':
# MACD线
temp_indicator = TA_MACD(Close)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
elif tech_name == 'BBANDS':
# 布林线
temp_indicator = TA_BBANDS(Close)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
elif tech_name == 'CCI':
# CCI
temp_indicator = TA_CCI(High, Low, Close)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
elif tech_name == 'EMA':
# 加权移动平均线
temp_indicator = talib.EMA(Close, 12)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
temp_indicator = talib.EMA(Close, 29)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[ta_indicator, temp_indicator]
temp_indicator = talib.EMA(Close, 5)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[ta_indicator, temp_indicator]
elif tech_name == 'MA':
# 移动平均线
temp_indicator = talib.MA(Close, 12)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[
ta_indicator,
temp_indicator] if len(ta_indicator) > 0 else temp_indicator
temp_indicator = talib.MA(Close, 29)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[ta_indicator, temp_indicator]
temp_indicator = talib.MA(Close, 5)
temp_indicator[np.where(np.isnan(temp_indicator))[0]] = 0
temp_indicator[np.where(temp_indicator == np.inf)[0]] = 0
ta_indicator = np.c_[ta_indicator, temp_indicator]
return ta_indicator
def TA_MACD(prices: np.ndarray,
fastperiod: int = 12,
slowperiod: int = 26,
signalperiod: int = 9) -> np.ndarray:
macd, signal, hist = talib.MACD(prices,
fastperiod=fastperiod,
slowperiod=slowperiod,
signalperiod=signalperiod)
hist = (macd - signal) * 2
delta = np.r_[np.nan, np.diff(hist)]
return np.c_[macd, signal, hist, delta]
def TA_RSI(prices: np.ndarray, timeperiod: int = 12) -> np.ndarray:
rsi = talib.RSI(prices, timeperiod=timeperiod)
delta = np.r_[np.nan, np.diff(rsi)]
return np.c_[rsi, delta]
def TA_BBANDS(prices: np.ndarray,
timeperiod: int = 5,
nbdevup: int = 2,
nbdevdn: int = 2,
matype: int = 0) -> np.ndarray:
up, middle, low = talib.BBANDS(prices, timeperiod, nbdevup, nbdevdn,
matype)
ch = (up - low) / middle
delta = np.r_[np.nan, np.diff(ch)]
return np.c_[up, middle, low, ch, delta]
def TA_KDJ(high: np.ndarray,
low: np.ndarray,
close: np.ndarray,
fastk_period: int = 9,
slowk_matype: int = 0,
slowk_period: int = 3,
slowd_period: int = 3) -> np.ndarray:
K, D = talib.STOCH(high,
low,
close,
fastk_period=fastk_period,
slowk_matype=slowk_matype,
slowk_period=slowk_period,
slowd_period=slowd_period)
J = 3 * K - 2 * D
delta = np.r_[np.nan, np.diff(J)]
return np.c_[K, D, J, delta]
def TA_ADX(high: np.ndarray,
low: np.ndarray,
close: np.ndarray,
timeperiod: int = 14) -> np.ndarray:
real = talib.ADX(high, low, close, timeperiod=timeperiod)
return np.c_[real]
def TA_ADXR(high: np.ndarray,
low: np.ndarray,
close: np.ndarray,
timeperiod: int = 14) -> np.ndarray:
real = talib.ADXR(high, low, close, timeperiod=timeperiod)
return np.c_[real]
def TA_CCI(high: np.ndarray,
low: np.ndarray,
close: np.ndarray,
timeperiod: int = 14) -> np.ndarray:
real = talib.CCI(high, low, close, timeperiod=timeperiod)
delta = np.r_[np.nan, np.diff(real)]
return np.c_[real, delta]
def TA_KAMA(prices: np.ndarray, timeperiod: int = 30):
real = talib.KAMA(prices, timeperiod=timeperiod)
return np.c_[real]
def TA_HMA(prices: np.ndarray, timeperiod: int = 12):
hma = talib.WMA(
2 * talib.WMA(prices, int(timeperiod / 2)) -
talib.WMA(prices, timeperiod), int(np.sqrt(timeperiod)))
return hma
| 36.410811
| 76
| 0.565024
|
b5e16dc7bd8f686e393e172fb84746f95c7c7cb2
| 3,242
|
py
|
Python
|
cvtron/model_zoo/lapsrn/lapsrn.py
|
opencollective/CVTron
|
fd111d734fed4008ba9d04d18c359dbd441a1897
|
[
"Apache-2.0"
] | 94
|
2018-03-31T06:32:50.000Z
|
2020-09-25T10:19:23.000Z
|
cvtron/model_zoo/lapsrn/lapsrn.py
|
opencollective/CVTron
|
fd111d734fed4008ba9d04d18c359dbd441a1897
|
[
"Apache-2.0"
] | 98
|
2018-04-02T15:55:25.000Z
|
2020-04-06T09:35:36.000Z
|
cvtron/model_zoo/lapsrn/lapsrn.py
|
opencollective/CVTron
|
fd111d734fed4008ba9d04d18c359dbd441a1897
|
[
"Apache-2.0"
] | 18
|
2018-07-10T22:54:19.000Z
|
2021-03-23T05:17:39.000Z
|
#coding:utf-8
import numpy as np
import tensorflow as tf
import tensorlayer as tl
from tensorlayer.layers import (Conv2dLayer, ElementwiseLayer, InputLayer,
PReluLayer, SubpixelConv2d)
from config import config
def lrelu(x):
return tf.maximum(x*0.2,x)
def LapSRNSingleLevel(net_image, net_feature, reuse=False):
with tf.variable_scope("Model_level", reuse=reuse):
tl.layers.set_name_reuse(reuse)
net_tmp = net_feature
# recursive block
for d in range(config.model.resblock_depth):
net_tmp = PReluLayer(net_tmp, name='prelu_D%s'%(d))
net_tmp = Conv2dLayer(net_tmp,shape=[3,3,64,64],strides=[1,1,1,1],
name='conv_D%s'%(d), W_init=tf.contrib.layers.xavier_initializer())
# for r in range(1,config.model.recursive_depth):
# for d in range(config.model.resblock_depth):
# net_tmp = PReluLayer(net_tmp, name='prelu_R%s_D%s'%(r,d))
# net_tmp = Conv2dLayer(net_tmp,shape=[3,3,64,64],strides=[1,1,1,1],
# name='conv_R%s_D%s'%(r,d), W_init=tf.contrib.layers.xavier_initializer())
net_feature = ElementwiseLayer(layer=[net_feature,net_tmp],combine_fn=tf.add,name='add_feature')
net_feature = PReluLayer(net_feature, name='prelu_feature')
net_feature = Conv2dLayer(net_feature,shape=[3,3,64,256],strides=[1,1,1,1],
name='upconv_feature', W_init=tf.contrib.layers.xavier_initializer())
net_feature = SubpixelConv2d(net_feature,scale=2,n_out_channel=64,
name='subpixel_feature')
# add image back
gradient_level = Conv2dLayer(net_feature,shape=[3,3,64,3],strides=[1,1,1,1],act=lrelu,
name='grad', W_init=tf.contrib.layers.xavier_initializer())
net_image = Conv2dLayer(net_image,shape=[3,3,3,12],strides=[1,1,1,1],
name='upconv_image', W_init=tf.contrib.layers.xavier_initializer())
net_image = SubpixelConv2d(net_image,scale=2,n_out_channel=3,
name='subpixel_image')
net_image = ElementwiseLayer(layer=[gradient_level,net_image],combine_fn=tf.add,name='add_image')
return net_image, net_feature, gradient_level
def LapSRN(inputs, is_train=False, reuse=False):
n_level = int(np.log2(config.model.scale))
if not n_level >= 1:
raise ValueError('n_level >=1 expected but not satisfied')
with tf.variable_scope("LapSRN", reuse=reuse) as vs:
tl.layers.set_name_reuse(reuse)
shapes = tf.shape(inputs)
inputs_level = InputLayer(inputs, name='input_level')
net_feature = Conv2dLayer(inputs_level, shape=[3,3,3,64], strides=[1,1,1,1],
W_init=tf.contrib.layers.xavier_initializer(),
name='init_conv')
net_image = inputs_level
net_image1, net_feature1, net_gradient1 = LapSRNSingleLevel(net_image, net_feature, reuse=reuse)
net_image2, net_feature2, net_gradient2 = LapSRNSingleLevel(net_image1, net_feature1, reuse=True)
return net_image2, net_gradient2, net_image1, net_gradient1
| 45.661972
| 105
| 0.642813
|
fc01905abf8b1a891f7263df0b6f28a568564866
| 5,484
|
py
|
Python
|
tests/test_cache_decorator.py
|
marcinkuzminski/beaker
|
1cbd0ed67f8bc3b461c9571f62b303b8900666cb
|
[
"BSD-3-Clause"
] | 1
|
2015-04-22T02:00:38.000Z
|
2015-04-22T02:00:38.000Z
|
tests/test_cache_decorator.py
|
TurboGears/beaker
|
533daa6411a18e39b1072451a959deb0ecf6f97f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_cache_decorator.py
|
TurboGears/beaker
|
533daa6411a18e39b1072451a959deb0ecf6f97f
|
[
"BSD-3-Clause"
] | null | null | null |
import time
from datetime import datetime
import beaker.cache as cache
from beaker.cache import CacheManager, cache_region, region_invalidate
from beaker import util
defaults = {'cache.data_dir':'./cache', 'cache.type':'dbm', 'cache.expire': 2}
def teardown():
import shutil
shutil.rmtree('./cache', True)
@cache_region('short_term')
def fred(x):
return time.time()
@cache_region('short_term')
def george(x):
return time.time()
def make_cache_obj(**kwargs):
opts = defaults.copy()
opts.update(kwargs)
cache = CacheManager(**util.parse_cache_config_options(opts))
return cache
def make_cached_func(**opts):
cache = make_cache_obj(**opts)
@cache.cache()
def load(person):
now = datetime.now()
return "Hi there %s, its currently %s" % (person, now)
return cache, load
def make_region_cached_func():
opts = {}
opts['cache.regions'] = 'short_term, long_term'
opts['cache.short_term.expire'] = '2'
cache = make_cache_obj(**opts)
@cache_region('short_term', 'region_loader')
def load(person):
now = datetime.now()
return "Hi there %s, its currently %s" % (person, now)
return load
def make_region_cached_func_2():
opts = {}
opts['cache.regions'] = 'short_term, long_term'
opts['cache.short_term.expire'] = '2'
cache = make_cache_obj(**opts)
@cache_region('short_term')
def load_person(person):
now = datetime.now()
return "Hi there %s, its currently %s" % (person, now)
return load_person
def test_check_region_decorator():
func = make_region_cached_func()
result = func('Fred')
assert 'Fred' in result
result2 = func('Fred')
assert result == result2
result3 = func('George')
assert 'George' in result3
result4 = func('George')
assert result3 == result4
time.sleep(2)
result2 = func('Fred')
assert result != result2
def test_different_default_names():
result = fred(1)
time.sleep(1)
result2 = george(1)
assert result != result2
def test_check_invalidate_region():
func = make_region_cached_func()
result = func('Fred')
assert 'Fred' in result
result2 = func('Fred')
assert result == result2
region_invalidate(func, None, 'region_loader', 'Fred')
result3 = func('Fred')
assert result3 != result2
result2 = func('Fred')
assert result3 == result2
# Invalidate a non-existent key
region_invalidate(func, None, 'region_loader', 'Fredd')
assert result3 == result2
def test_check_invalidate_region_2():
func = make_region_cached_func_2()
result = func('Fred')
assert 'Fred' in result
result2 = func('Fred')
assert result == result2
region_invalidate(func, None, 'Fred')
result3 = func('Fred')
assert result3 != result2
result2 = func('Fred')
assert result3 == result2
# Invalidate a non-existent key
region_invalidate(func, None, 'Fredd')
assert result3 == result2
def test_invalidate_cache():
cache, func = make_cached_func()
val = func('foo')
time.sleep(.1)
val2 = func('foo')
assert val == val2
cache.invalidate(func, 'foo')
val3 = func('foo')
assert val3 != val
def test_class_key_cache():
cache = make_cache_obj()
class Foo(object):
@cache.cache('method')
def go(self, x, y):
return "hi foo"
@cache.cache('standalone')
def go(x, y):
return "hi standalone"
x = Foo().go(1, 2)
y = go(1, 2)
ns = go._arg_namespace
assert cache.get_cache(ns).get('method 1 2') == x
assert cache.get_cache(ns).get('standalone 1 2') == y
def test_func_namespace():
def go(x, y):
return "hi standalone"
assert 'test_cache_decorator' in util.func_namespace(go)
assert util.func_namespace(go).endswith('go')
def test_class_key_region():
opts = {}
opts['cache.regions'] = 'short_term'
opts['cache.short_term.expire'] = '2'
cache = make_cache_obj(**opts)
class Foo(object):
@cache_region('short_term', 'method')
def go(self, x, y):
return "hi foo"
@cache_region('short_term', 'standalone')
def go(x, y):
return "hi standalone"
x = Foo().go(1, 2)
y = go(1, 2)
ns = go._arg_namespace
assert cache.get_cache_region(ns, 'short_term').get('method 1 2') == x
assert cache.get_cache_region(ns, 'short_term').get('standalone 1 2') == y
def test_classmethod_key_region():
opts = {}
opts['cache.regions'] = 'short_term'
opts['cache.short_term.expire'] = '2'
cache = make_cache_obj(**opts)
class Foo(object):
@classmethod
@cache_region('short_term', 'method')
def go(cls, x, y):
return "hi"
x = Foo.go(1, 2)
ns = Foo.go._arg_namespace
assert cache.get_cache_region(ns, 'short_term').get('method 1 2') == x
def test_class_key_region_invalidate():
opts = {}
opts['cache.regions'] = 'short_term'
opts['cache.short_term.expire'] = '2'
cache = make_cache_obj(**opts)
class Foo(object):
@cache_region('short_term', 'method')
def go(self, x, y):
now = datetime.now()
return "hi %s" % now
def invalidate(self, x, y):
region_invalidate(self.go, None, "method", x, y)
x = Foo().go(1, 2)
time.sleep(1)
y = Foo().go(1, 2)
Foo().invalidate(1, 2)
z = Foo().go(1, 2)
assert x == y
assert x != z
| 25.041096
| 78
| 0.623997
|
2824e6e97ca997c114ef7fc4d6eddf12ff1bdc76
| 5,936
|
py
|
Python
|
test/continuous/test_trpo.py
|
mahi97/tianshou
|
c248b4f87e46d8fca229f29d5cabb15211c842e9
|
[
"MIT"
] | null | null | null |
test/continuous/test_trpo.py
|
mahi97/tianshou
|
c248b4f87e46d8fca229f29d5cabb15211c842e9
|
[
"MIT"
] | null | null | null |
test/continuous/test_trpo.py
|
mahi97/tianshou
|
c248b4f87e46d8fca229f29d5cabb15211c842e9
|
[
"MIT"
] | null | null | null |
import argparse
import os
import pprint
import gym
import numpy as np
import torch
from torch import nn
from torch.distributions import Independent, Normal
from torch.utils.tensorboard import SummaryWriter
from tianshou.data import Collector, VectorReplayBuffer
from tianshou.env import DummyVectorEnv
from tianshou.policy import TRPOPolicy
from tianshou.trainer import onpolicy_trainer
from tianshou.utils import TensorboardLogger
from tianshou.utils.net.common import Net
from tianshou.utils.net.continuous import ActorProb, Critic
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('--task', type=str, default='Pendulum-v1')
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--buffer-size', type=int, default=50000)
parser.add_argument('--lr', type=float, default=1e-3)
parser.add_argument('--gamma', type=float, default=0.95)
parser.add_argument('--epoch', type=int, default=5)
parser.add_argument('--step-per-epoch', type=int, default=50000)
parser.add_argument('--step-per-collect', type=int, default=2048)
parser.add_argument(
'--repeat-per-collect', type=int, default=2
) # theoretically it should be 1
parser.add_argument('--batch-size', type=int, default=99999)
parser.add_argument('--hidden-sizes', type=int, nargs='*', default=[64, 64])
parser.add_argument('--training-num', type=int, default=16)
parser.add_argument('--test-num', type=int, default=10)
parser.add_argument('--logdir', type=str, default='log')
parser.add_argument('--render', type=float, default=0.)
parser.add_argument(
'--device', type=str, default='cuda' if torch.cuda.is_available() else 'cpu'
)
# trpo special
parser.add_argument('--gae-lambda', type=float, default=0.95)
parser.add_argument('--rew-norm', type=int, default=1)
parser.add_argument('--norm-adv', type=int, default=1)
parser.add_argument('--optim-critic-iters', type=int, default=5)
parser.add_argument('--max-kl', type=float, default=0.005)
parser.add_argument('--backtrack-coeff', type=float, default=0.8)
parser.add_argument('--max-backtracks', type=int, default=10)
args = parser.parse_known_args()[0]
return args
def test_trpo(args=get_args()):
env = gym.make(args.task)
if args.task == 'Pendulum-v1':
env.spec.reward_threshold = -250
args.state_shape = env.observation_space.shape or env.observation_space.n
args.action_shape = env.action_space.shape or env.action_space.n
args.max_action = env.action_space.high[0]
# you can also use tianshou.env.SubprocVectorEnv
# train_envs = gym.make(args.task)
train_envs = DummyVectorEnv(
[lambda: gym.make(args.task) for _ in range(args.training_num)]
)
# test_envs = gym.make(args.task)
test_envs = DummyVectorEnv(
[lambda: gym.make(args.task) for _ in range(args.test_num)]
)
# seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
train_envs.seed(args.seed)
test_envs.seed(args.seed)
# model
net = Net(
args.state_shape,
hidden_sizes=args.hidden_sizes,
activation=nn.Tanh,
device=args.device
)
actor = ActorProb(
net,
args.action_shape,
max_action=args.max_action,
unbounded=True,
device=args.device
).to(args.device)
critic = Critic(
Net(
args.state_shape,
hidden_sizes=args.hidden_sizes,
device=args.device,
activation=nn.Tanh
),
device=args.device
).to(args.device)
# orthogonal initialization
for m in list(actor.modules()) + list(critic.modules()):
if isinstance(m, torch.nn.Linear):
torch.nn.init.orthogonal_(m.weight)
torch.nn.init.zeros_(m.bias)
optim = torch.optim.Adam(critic.parameters(), lr=args.lr)
# replace DiagGuassian with Independent(Normal) which is equivalent
# pass *logits to be consistent with policy.forward
def dist(*logits):
return Independent(Normal(*logits), 1)
policy = TRPOPolicy(
actor,
critic,
optim,
dist,
discount_factor=args.gamma,
reward_normalization=args.rew_norm,
advantage_normalization=args.norm_adv,
gae_lambda=args.gae_lambda,
action_space=env.action_space,
optim_critic_iters=args.optim_critic_iters,
max_kl=args.max_kl,
backtrack_coeff=args.backtrack_coeff,
max_backtracks=args.max_backtracks
)
# collector
train_collector = Collector(
policy, train_envs, VectorReplayBuffer(args.buffer_size, len(train_envs))
)
test_collector = Collector(policy, test_envs)
# log
log_path = os.path.join(args.logdir, args.task, 'trpo')
writer = SummaryWriter(log_path)
logger = TensorboardLogger(writer)
def save_fn(policy):
torch.save(policy.state_dict(), os.path.join(log_path, 'policy.pth'))
def stop_fn(mean_rewards):
return mean_rewards >= env.spec.reward_threshold
# trainer
result = onpolicy_trainer(
policy,
train_collector,
test_collector,
args.epoch,
args.step_per_epoch,
args.repeat_per_collect,
args.test_num,
args.batch_size,
step_per_collect=args.step_per_collect,
stop_fn=stop_fn,
save_fn=save_fn,
logger=logger
)
assert stop_fn(result['best_reward'])
if __name__ == '__main__':
pprint.pprint(result)
# Let's watch its performance!
env = gym.make(args.task)
policy.eval()
collector = Collector(policy, env)
result = collector.collect(n_episode=1, render=args.render)
rews, lens = result["rews"], result["lens"]
print(f"Final reward: {rews.mean()}, length: {lens.mean()}")
if __name__ == '__main__':
test_trpo()
| 34.312139
| 84
| 0.668632
|
2b3d6c03c12e19845bb291de1232765b9bf6403b
| 255
|
py
|
Python
|
examples/matplotlib/mpl_plot_energy.py
|
sudojarvis/arviz
|
73531be4f23df7d764b2e3bec8c5ef5cb882590d
|
[
"Apache-2.0"
] | 1,159
|
2018-04-03T08:50:54.000Z
|
2022-03-31T18:03:52.000Z
|
examples/matplotlib/mpl_plot_energy.py
|
sudojarvis/arviz
|
73531be4f23df7d764b2e3bec8c5ef5cb882590d
|
[
"Apache-2.0"
] | 1,656
|
2018-03-23T14:15:05.000Z
|
2022-03-31T14:00:28.000Z
|
examples/matplotlib/mpl_plot_energy.py
|
sudojarvis/arviz
|
73531be4f23df7d764b2e3bec8c5ef5cb882590d
|
[
"Apache-2.0"
] | 316
|
2018-04-03T14:25:52.000Z
|
2022-03-25T10:41:29.000Z
|
"""
Energy Plot
===========
_thumb: .7, .5
_example_title: Plot energy
"""
import matplotlib.pyplot as plt
import arviz as az
az.style.use("arviz-darkgrid")
data = az.load_arviz_data("centered_eight")
az.plot_energy(data, figsize=(12, 8))
plt.show()
| 14.166667
| 43
| 0.690196
|
6c0f3a17bea826be3caa38928b6fc4e603a306de
| 2,640
|
py
|
Python
|
pony/orm/tests/test_exists.py
|
ProgHaj/pony
|
52720af1728ab2931364be8615e18ad8714a7c9e
|
[
"Apache-2.0"
] | 2,628
|
2015-01-02T17:55:28.000Z
|
2022-03-31T10:36:42.000Z
|
pony/orm/tests/test_exists.py
|
ProgHaj/pony
|
52720af1728ab2931364be8615e18ad8714a7c9e
|
[
"Apache-2.0"
] | 525
|
2015-01-03T20:30:08.000Z
|
2022-03-23T12:30:01.000Z
|
pony/orm/tests/test_exists.py
|
ProgHaj/pony
|
52720af1728ab2931364be8615e18ad8714a7c9e
|
[
"Apache-2.0"
] | 256
|
2015-01-02T17:55:31.000Z
|
2022-03-20T17:01:37.000Z
|
import unittest
from pony.orm.core import *
from pony.orm.tests.testutils import *
from pony.orm.tests import setup_database, teardown_database
db = Database()
class Group(db.Entity):
students = Set('Student')
class Student(db.Entity):
first_name = Required(str)
last_name = Required(str)
login = Optional(str, nullable=True)
graduated = Optional(bool, default=False)
group = Required(Group)
passport = Optional('Passport', column='passport')
class Passport(db.Entity):
student = Optional(Student)
class TestExists(unittest.TestCase):
@classmethod
def setUpClass(cls):
setup_database(db)
with db_session:
g1 = Group(id=1)
g2 = Group(id=2)
p = Passport(id=1)
Student(id=1, first_name='Mashu', last_name='Kyrielight', login='Shielder', group=g1)
Student(id=2, first_name='Okita', last_name='Souji', login='Sakura', group=g1)
Student(id=3, first_name='Francis', last_name='Drake', group=g2, graduated=True)
Student(id=4, first_name='Oda', last_name='Nobunaga', group=g2, graduated=True)
Student(id=5, first_name='William', last_name='Shakespeare', group=g2, graduated=True, passport=p)
@classmethod
def tearDownClass(cls):
teardown_database(db)
def setUp(self):
rollback()
db_session.__enter__()
def tearDown(self):
rollback()
db_session.__exit__()
def test_1(self):
q = select(g for g in Group if exists(s.login for s in g.students))[:]
self.assertEqual(q[0], Group[1])
def test_2(self):
q = select(g for g in Group if exists(s.graduated for s in g.students))[:]
self.assertEqual(q[0], Group[2])
def test_3(self):
q = select(s for s in Student if
exists(len(s2.first_name) == len(s.first_name) and s != s2 for s2 in Student))[:]
self.assertEqual(set(q), {Student[1], Student[2], Student[3], Student[5]})
def test_4(self):
q = select(g for g in Group if not exists(not s.graduated for s in g.students))[:]
self.assertEqual(q[0], Group[2])
def test_5(self):
q = select(g for g in Group if exists(s for s in g.students))[:]
self.assertEqual(set(q), {Group[1], Group[2]})
def test_6(self):
q = select(g for g in Group if exists(s.login for s in g.students if s.first_name != 'Okita') and g.id != 10)[:]
self.assertEqual(q[0], Group[1])
def test_7(self):
q = select(g for g in Group if exists(s.passport for s in g.students))[:]
self.assertEqual(q[0], Group[2])
| 33.417722
| 120
| 0.623106
|
3df10ff5fa39204ca02775869c55d236057788e8
| 594
|
py
|
Python
|
NetworkApp/ip_addr_valid.py
|
khtran1994/Python_Networking_Applications
|
3dfffa85582ee07ca9d9df05d720f65c1953a667
|
[
"MIT"
] | null | null | null |
NetworkApp/ip_addr_valid.py
|
khtran1994/Python_Networking_Applications
|
3dfffa85582ee07ca9d9df05d720f65c1953a667
|
[
"MIT"
] | null | null | null |
NetworkApp/ip_addr_valid.py
|
khtran1994/Python_Networking_Applications
|
3dfffa85582ee07ca9d9df05d720f65c1953a667
|
[
"MIT"
] | null | null | null |
import sys
#Checking octets
def ip_addr_valid(list):
for ip in list:
ip = ip.rstrip("\n")
octet_list = ip.split('.')
if (len(octet_list) == 4) and (1 <= int(octet_list[0]) <= 223) and (int(octet_list[0]) != 127) and (int(octet_list[0]) != 169 or int(octet_list[1]) != 254) and (0 <= int(octet_list[1]) <= 255 and 0 <= int(octet_list[2]) <= 255 and 0 <= int(octet_list[3]) <= 255):
continue
else:
print('\n* There was an invalid IP address in the file: {} :(\n'.format(ip))
sys.exit()
| 39.6
| 272
| 0.521886
|
077239508b35a46fb57b9a5aad6040d1c47dc3e6
| 545
|
py
|
Python
|
world/exploration/migrations/0041_shardhavenobstacle_obstacle_class.py
|
tellg/arxcode
|
f04340f9466c31f59bc13b8e1afd4f5734da4848
|
[
"MIT"
] | 5
|
2019-03-16T08:26:53.000Z
|
2019-11-27T15:42:16.000Z
|
world/exploration/migrations/0041_shardhavenobstacle_obstacle_class.py
|
tellg/arxcode
|
f04340f9466c31f59bc13b8e1afd4f5734da4848
|
[
"MIT"
] | 7
|
2018-09-29T05:08:15.000Z
|
2021-06-10T21:35:32.000Z
|
world/exploration/migrations/0041_shardhavenobstacle_obstacle_class.py
|
tellg/arxcode
|
f04340f9466c31f59bc13b8e1afd4f5734da4848
|
[
"MIT"
] | 7
|
2018-09-19T21:11:29.000Z
|
2019-11-19T12:46:14.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-11-22 02:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('exploration', '0040_auto_20181120_2016'),
]
operations = [
migrations.AddField(
model_name='shardhavenobstacle',
name='obstacle_class',
field=models.PositiveSmallIntegerField(choices=[(0, b'Pass an Exit'), (1, b'Obtain a Treasure')], default=0),
),
]
| 25.952381
| 121
| 0.640367
|
e96f53f4360a40e2b3b32e710517e8a5f2d686f9
| 739
|
py
|
Python
|
newssourceaggregator/test/query_authdb.py
|
hayj/NewsSourceAggregator
|
f57803511ecb28be2f0c088d2f7d00bec4a8fe4e
|
[
"MIT"
] | null | null | null |
newssourceaggregator/test/query_authdb.py
|
hayj/NewsSourceAggregator
|
f57803511ecb28be2f0c088d2f7d00bec4a8fe4e
|
[
"MIT"
] | 4
|
2021-02-08T20:25:35.000Z
|
2021-12-13T19:48:38.000Z
|
newssourceaggregator/test/query_authdb.py
|
hayj/NewsSourceAggregator
|
f57803511ecb28be2f0c088d2f7d00bec4a8fe4e
|
[
"MIT"
] | null | null | null |
import sys, os
sys.path.append("/".join(os.path.abspath(__file__).split("/")[0:-2]))
from sqlalchemy_declarative import User, Base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///sqlalchemy_example_auth.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
DBSession.bind = engine
session = DBSession()
usr = User(email="test_user", password="test_user", token="test_token")
session.add(usr)
session.commit()
print(session.query(User).all())
#usr = session.query(User).first()
#print(usr.email)
#print(usr.password)
usr = session.query(User).filter(User.email == "ajodar@test.fr").first()
print(usr)
print(usr.email)
print(usr.password)
print(usr.token)
| 26.392857
| 72
| 0.756428
|
535a9e27f210103705b60ec64ff112f7dc7c54a5
| 1,537
|
py
|
Python
|
Python-Scripts/pearson_correlation.py
|
Peshal1067/climate-data-science
|
2664d8f0fb8cad2ef353f4ec26d0a0280ac2d181
|
[
"MIT"
] | 79
|
2020-02-03T22:59:53.000Z
|
2022-03-09T05:50:08.000Z
|
Python-Scripts/pearson_correlation.py
|
zexuanxu/climate-data-science
|
2664d8f0fb8cad2ef353f4ec26d0a0280ac2d181
|
[
"MIT"
] | 1
|
2019-06-01T03:09:22.000Z
|
2019-06-01T03:09:22.000Z
|
Python-Scripts/pearson_correlation.py
|
zexuanxu/climate-data-science
|
2664d8f0fb8cad2ef353f4ec26d0a0280ac2d181
|
[
"MIT"
] | 25
|
2020-02-27T20:37:16.000Z
|
2022-01-05T21:57:31.000Z
|
#!/usr/bin/env python
'''
File Name: correl.py
Description: Pearson Linear Correlation for Gridded Data and Climate Indices.
Author: Willy Hagi
E-mail: hagi.willy@gmail.com
Python Version: 3.6.7
'''
import numpy as np
import xarray as xr
import proplot as plot
import matplotlib.pyplot as plt
from esmtools.stats import*
# --- read netcdf file
dset = xr.open_dataset('asstdt_pacific.nc')
# --- select djf months
sst = dset['sst'].sel(time=np.in1d(dset['time.month'], [1, 2, 12]))
print(sst)
# --- make niño 3.4 index
nino34 = sst.sel(lat=slice(5, -5), lon=slice(360 - 170, 360 - 120))
nino34 = nino34.mean(dim=('lat', 'lon'))
# --- pearson linear correlation
pearson_r, p_values = corr(sst, nino34, dim='time', return_p=True)
# --- plotting
fig, ax = plot.subplots(axwidth=6., tight=True,
proj='pcarree', proj_kw={'lon_0': 180},)
# format options
ax.format(land=False, coast=True, innerborders=True, borders=True,
large='15px', labels=True,
latlim=(31, -31), lonlim=(119, 291),
lonlines=plot.arange(130, 280, 20),
geogridlinewidth=0,)
# plot correlation values
map1 = ax.contourf(dset['lon'], dset['lat'], pearson_r,
levels=50, cmap='ColdHot', extend='both')
# plot p_values
ax.contourf(dset['lon'], dset['lat'], p_values,
levels=np.arange(0, 0.05, 0.01), hatches=['....'], alpha=0)
# colorbar
ax.colorbar(map1, loc='b', shrink=0.5, extendrect=True)
ax.format(title='Correlation between Niño 3.4 Index and ASST')
plt.show()
| 26.5
| 78
| 0.65257
|
d6a7316e93d54b3411dcb1fadc27e030f0244040
| 4,492
|
py
|
Python
|
glove/glove.py
|
AlexisKoko/AlexisKoko
|
428a514704879004667e513c6509b27be28995f9
|
[
"Apache-2.0"
] | null | null | null |
glove/glove.py
|
AlexisKoko/AlexisKoko
|
428a514704879004667e513c6509b27be28995f9
|
[
"Apache-2.0"
] | null | null | null |
glove/glove.py
|
AlexisKoko/AlexisKoko
|
428a514704879004667e513c6509b27be28995f9
|
[
"Apache-2.0"
] | 3
|
2022-03-28T13:05:24.000Z
|
2022-03-31T10:00:52.000Z
|
import time
import io
import sys
import numpy
import tensorflow as tf
from .normalise_text import normalise as normalise_text
# / removed from being the last param, ref https://stackoverflow.com/a/56514307
def removeprefix(self: str, prefix: str) -> str:
"""
Removes a prefix from a string.
Polyfills string.removeprefix(), which is introduced in Python 3.9+.
Ref https://www.python.org/dev/peps/pep-0616/#specification
"""
if self.startswith(prefix):
return self[len(prefix):]
else:
return self[:]
# / removed from being the last param, ref https://stackoverflow.com/a/56514307
def removesuffix(self: str, suffix: str) -> str:
"""
Removes a suffix from a string.
Polyfills string.removesuffix(), which is introduced in Python 3.9+.
Ref https://www.python.org/dev/peps/pep-0616/#specification
"""
# suffix='' should not call self[:-0].
if suffix and self.endswith(suffix):
return self[:-len(suffix)]
else:
return self[:]
class GloVe:
"""
Manages pre-trained GloVe word vectors.
Ref https://www.damienpontifex.com/posts/using-pre-trained-glove-embeddings-in-tensorflow/
Download pre-trained word vectors from here: https://nlp.stanford.edu/projects/glove/
"""
def __init__(self, filepath):
"""
Initialises a new GloVe class instance.
filepath (string): The path to the file to load the pre-trained GloVe embeddings from.
"""
super(GloVe, self).__init__()
self.data = {}
self.word_length = None
self.filepath = filepath
self.load()
def load(self):
"""Loads the GloVe database from a given file."""
sys.stderr.write("\n")
start = time.time()
handle = io.open(self.filepath, "r", encoding="utf8")
for i, line in enumerate(handle):
parts = line.split(" ", maxsplit=1)
# We do NOT strip < and > here, because we do a lookup later on that.
self.data[parts[0]] = list(map(
lambda el: float(el),
parts[1].split(" ")
))
if self.word_length is None:
self.word_length = len(self.data[parts[0]])
# Update the CLI
if i % 10000 == 0:
sys.stderr.write(f"\rLoading GloVe from '{self.filepath}': {i}...")
handle.close()
sys.stderr.write(f" done in {round(time.time() - start, 3)}s.\n")
def lookup(self, token: str):
"""Looks up the given token in the loaded embeddings."""
key = token
if key not in self.data:
key = self.strip_outer(token) # Try removing < and >
if key not in self.data:
key = f"<{token}>" # Try wrapping in < and >
if key not in self.data:
return None # Give up
return self.data[key] # We found it!
def strip_outer(self, str: str) -> str:
"""Strips < and > from the given input string."""
return removesuffix(removeprefix(str, "<"), ">")
def _tokenise(self, str: str):
"""Splits the input string into tokens using Keras."""
return tf.keras.preprocessing.text.text_to_word_sequence(
self._normalise(str),
filters = ", \t\n",
lower = True, split = " "
)
def _normalise(self, str):
"""Normalises input text to be suitable to GloVe lookup."""
return normalise_text(str)
###########################################################################
def word_vector_length(self):
"""Returns the length of a single word vector."""
return self.word_length
def tweetvision(self, str):
"""
Convert a string to a list of tokens as the AI will see it.
Basically the same as .embeddings(str), but returns the tokens instead of the embeddings.
"""
result = []
for i, token in enumerate(self._tokenise(str)):
if self.lookup(token) is None:
continue
else:
result.append(token)
return result
def embeddings(self, str, length=-1):
"""
Converts the given string to a list of word embeddings.
str (string): The string to convert to an embedding.
length (number): The number of tokens that the returned embedding should have. -1 (the default value) indicates that no length normalisation should be performed.
"""
result = []
# TODO: Handle out-of-vocabulary words better than just stripping them
for i, token in enumerate(self._tokenise(str)):
embedding = self.lookup(token)
if embedding is None:
# logger.debug(f"[DEBUG] {token} was none")
continue
result.append(embedding)
# Normalise the embedding length if we're asked to
if length > -1:
result = result[-length:]
shortfall = length - len(result)
for _ in range(shortfall):
result.append(numpy.zeros(self.word_vector_length()))
return result
| 28.794872
| 163
| 0.667409
|
c1691c5c56fadc21b2dadc7ad422e9fb6cd9a3dc
| 5,602
|
py
|
Python
|
run.py
|
BA3000/openvqa
|
af247b61ee4c25f64c542de73dcc3e334fa5a2c3
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
BA3000/openvqa
|
af247b61ee4c25f64c542de73dcc3e334fa5a2c3
|
[
"Apache-2.0"
] | null | null | null |
run.py
|
BA3000/openvqa
|
af247b61ee4c25f64c542de73dcc3e334fa5a2c3
|
[
"Apache-2.0"
] | null | null | null |
# --------------------------------------------------------
# OpenVQA
# Written by Yuhao Cui https://github.com/cuiyuhao1996
# --------------------------------------------------------
from openvqa.models.model_loader import CfgLoader
from utils.exec import Execution
import argparse, yaml
def parse_args():
'''
Parse input arguments
'''
parser = argparse.ArgumentParser(description='OpenVQA Args')
parser.add_argument('--RUN', dest='RUN_MODE',
choices=['train', 'val', 'test'],
help='{train, val, test}',
type=str, required=True)
parser.add_argument('--MODEL', dest='MODEL',
choices=[
'mcan_small',
'mcan_large',
'ban_4',
'ban_8',
'mfb',
'mfh',
'mem',
'butd',
'mmnasnet'
]
,
help='{'
'mcan_small,'
'mcan_large,'
'ban_4,'
'ban_8,'
'mfb,'
'mfh,'
'butd,'
'mmnasnet,'
'}'
,
type=str, required=True)
parser.add_argument('--DATASET', dest='DATASET',
choices=['vqa', 'gqa', 'clevr'],
help='{'
'vqa,'
'gqa,'
'clevr,'
'}'
,
type=str, required=True)
parser.add_argument('--SPLIT', dest='TRAIN_SPLIT',
choices=['train', 'train+val', 'train+val+vg'],
help="set training split, "
"vqa: {'train', 'train+val', 'train+val+vg'}"
"gqa: {'train', 'train+val'}"
"clevr: {'train', 'train+val'}"
,
type=str)
parser.add_argument('--EVAL_EE', dest='EVAL_EVERY_EPOCH',
choices=['True', 'False'],
help='True: evaluate the val split when an epoch finished,'
'False: do not evaluate on local',
type=str)
parser.add_argument('--SAVE_PRED', dest='TEST_SAVE_PRED',
choices=['True', 'False'],
help='True: save the prediction vectors,'
'False: do not save the prediction vectors',
type=str)
parser.add_argument('--BS', dest='BATCH_SIZE',
help='batch size in training',
type=int)
parser.add_argument('--GPU', dest='GPU',
help="gpu choose, eg.'0, 1, 2, ...'",
type=str)
parser.add_argument('--SEED', dest='SEED',
help='fix random seed',
type=int)
parser.add_argument('--VERSION', dest='VERSION',
help='version control',
type=str)
parser.add_argument('--RESUME', dest='RESUME',
choices=['True', 'False'],
help='True: use checkpoint to resume training,'
'False: start training with random init',
type=str)
parser.add_argument('--CKPT_V', dest='CKPT_VERSION',
help='checkpoint version',
type=str)
parser.add_argument('--CKPT_E', dest='CKPT_EPOCH',
help='checkpoint epoch',
type=int)
parser.add_argument('--CKPT_PATH', dest='CKPT_PATH',
help='load checkpoint path, we '
'recommend that you use '
'CKPT_VERSION and CKPT_EPOCH '
'instead, it will override'
'CKPT_VERSION and CKPT_EPOCH',
type=str)
parser.add_argument('--ACCU', dest='GRAD_ACCU_STEPS',
help='split batch to reduce gpu memory usage',
type=int)
parser.add_argument('--NW', dest='NUM_WORKERS',
help='multithreaded loading to accelerate IO',
type=int)
parser.add_argument('--PINM', dest='PIN_MEM',
choices=['True', 'False'],
help='True: use pin memory, False: not use pin memory',
type=str)
parser.add_argument('--VERB', dest='VERBOSE',
choices=['True', 'False'],
help='True: verbose print, False: simple print',
type=str)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
cfg_file = "configs/{}/{}.yml".format(args.DATASET, args.MODEL)
with open(cfg_file, 'r') as f:
yaml_dict = yaml.safe_load(f)
__C = CfgLoader(yaml_dict['MODEL_USE']).load()
args = __C.str_to_bool(args)
args_dict = __C.parse_to_dict(args)
args_dict = {**yaml_dict, **args_dict}
__C.add_args(args_dict)
__C.proc()
print('Hyper Parameters:')
print(__C)
execution = Execution(__C)
execution.run(__C.RUN_MODE)
| 33.951515
| 81
| 0.419315
|
0d83d2a82d558c8e9126526ee9732d3b0b6bcf25
| 1,125
|
py
|
Python
|
setup.py
|
acarrasco/dacite
|
ece070cc3c25e86634086db8ee4f2e45bdfe6fe5
|
[
"MIT"
] | null | null | null |
setup.py
|
acarrasco/dacite
|
ece070cc3c25e86634086db8ee4f2e45bdfe6fe5
|
[
"MIT"
] | 1
|
2019-03-20T17:30:34.000Z
|
2019-03-20T17:30:34.000Z
|
setup.py
|
acarrasco/dacite
|
ece070cc3c25e86634086db8ee4f2e45bdfe6fe5
|
[
"MIT"
] | null | null | null |
import sys
from setuptools import setup
if sys.version_info < (3, 7):
requirements = ["dataclasses"]
else:
requirements = []
setup(
name="dacite",
version="0.0.24",
description="Simple creation of data classes from dictionaries.",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
author="Konrad Hałas",
author_email="halas.konrad@gmail.com",
url="https://github.com/konradhalas/dacite",
license="MIT",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires=">=3.6",
keywords="dataclasses",
packages=["dacite"],
install_requires=requirements,
extras_require={"dev": ["pytest>=4", "pytest-cov", "coveralls", "black", "mypy", "pylint"]},
)
| 31.25
| 96
| 0.633778
|
09e639ab8407ec6803e00fb0e08fb09e57908f63
| 7,181
|
py
|
Python
|
src/demos/python/irrlicht/demo_IRR_ujoint.py
|
Benatti1991/chrono
|
d927a7fae8ed2f4e6695cacaef28c605fcd9ffaf
|
[
"BSD-3-Clause"
] | 1,383
|
2015-02-04T14:17:40.000Z
|
2022-03-30T04:58:16.000Z
|
src/demos/python/irrlicht/demo_IRR_ujoint.py
|
Benatti1991/chrono
|
d927a7fae8ed2f4e6695cacaef28c605fcd9ffaf
|
[
"BSD-3-Clause"
] | 245
|
2015-01-11T15:30:51.000Z
|
2022-03-30T21:28:54.000Z
|
src/demos/python/irrlicht/demo_IRR_ujoint.py
|
Benatti1991/chrono
|
d927a7fae8ed2f4e6695cacaef28c605fcd9ffaf
|
[
"BSD-3-Clause"
] | 351
|
2015-02-04T14:17:47.000Z
|
2022-03-30T04:42:52.000Z
|
#------------------------------------------------------------------------------
# Name: pychrono example
# Purpose:
#
# Author: Lijing Yang
#
# Created: 6/10/2020
# Copyright: (c) ProjectChrono 2019
#------------------------------------------------------------------------------
import pychrono.core as chrono
import pychrono.irrlicht as chronoirr
import math
print ("Example: demonstration of a universal joint")
# The path to the Chrono data directory containing various assets (meshes, textures, data files)
# is automatically set, relative to the default location of this demo.
# If running from a different directory, you must change the path to the data directory with:
#chrono.SetChronoDataPath('relative/path/to/data/directory/')
# ---------------------------------------------------------------------
#
# Create the simulation system and add items
#
mysystem = chrono.ChSystemNSC()
mysystem.Set_G_acc(chrono.ChVectorD(0, 0, 0))
# Set the half-length of the two shafts
hl = 2
# Set the bend angle between the two shafts (positive rotation
# about the global X axis)
angle = math.pi / 6.
cosa = math.cos(angle)
sina = math.sin(angle)
rot = chrono.Q_from_AngX(angle)
# Create the ground body
# ----------------------
ground = chrono.ChBody()
ground.SetIdentifier(-1)
ground.SetBodyFixed(True)
ground.SetCollide(False)
mysystem.Add(ground)
# attach visualization assets to represent the revolute and cylindrical
# joints that connect the two shafts to ground
cyl_1 = chrono.ChCylinderShape()
cyl_1.GetCylinderGeometry().p1 = chrono.ChVectorD(0, 0, -hl - 0.2)
cyl_1.GetCylinderGeometry().p2 = chrono.ChVectorD(0, 0, -hl + 0.2)
cyl_1.GetCylinderGeometry().rad = 0.3
ground.AddAsset(cyl_1)
cyl_2 = chrono.ChCylinderShape()
cyl_2.GetCylinderGeometry().p1 = chrono.ChVectorD(0, -(hl - 0.2) * sina, (hl - 0.2) * cosa)
cyl_2.GetCylinderGeometry().p2 = chrono.ChVectorD(0, -(hl + 0.2) * sina, (hl + 0.2) * cosa)
cyl_2.GetCylinderGeometry().rad = 0.3
ground.AddAsset(cyl_2)
# Create the first shaft body
# ---------------------------
shaft_1 = chrono.ChBody()
mysystem.AddBody(shaft_1)
shaft_1.SetIdentifier(1)
shaft_1.SetBodyFixed(False)
shaft_1.SetCollide(False)
shaft_1.SetMass(1)
shaft_1.SetInertiaXX(chrono.ChVectorD(1, 1, 0.2))
shaft_1.SetPos(chrono.ChVectorD(0, 0, -hl))
shaft_1.SetRot(chrono.ChQuaternionD(1, 0, 0, 0))
# Add visualization assets to represent the shaft (a box) and the arm of the
# universal joint's cross associated with this shaft (a cylinder)
box_1 = chrono.ChBoxShape()
box_1.GetBoxGeometry().Size = chrono.ChVectorD(0.15, 0.15, 0.9 * hl)
shaft_1.AddAsset(box_1)
cyl_2 = chrono.ChCylinderShape()
cyl_2.GetCylinderGeometry().p1 = chrono.ChVectorD(-0.2, 0, hl)
cyl_2.GetCylinderGeometry().p2 = chrono.ChVectorD(0.2, 0, hl)
cyl_2.GetCylinderGeometry().rad = 0.05
shaft_1.AddAsset(cyl_2)
col = chrono.ChColorAsset()
col.SetColor(chrono.ChColor(0.9, 0.4, 0.1))
shaft_1.AddAsset(col)
# Create the second shaft body
# ----------------------------
# The second shaft is identical to the first one, but initialized at an angle
# equal to the specified bend angle.
shaft_2 = chrono.ChBody()
mysystem.AddBody(shaft_2)
shaft_2.SetIdentifier(1)
shaft_2.SetBodyFixed(False)
shaft_2.SetCollide(False)
shaft_2.SetMass(1)
shaft_2.SetInertiaXX(chrono.ChVectorD(1, 1, 0.2))
shaft_2.SetPos(chrono.ChVectorD(0, -hl * sina, hl * cosa))
shaft_2.SetRot(rot)
# Add visualization assets to represent the shaft (a box) and the arm of the
# universal joint's cross associated with this shaft (a cylinder)
box_1 = chrono.ChBoxShape()
box_1.GetBoxGeometry().Size = chrono.ChVectorD(0.15, 0.15, 0.9 * hl)
shaft_2.AddAsset(box_1)
cyl_2 = chrono.ChCylinderShape()
cyl_2.GetCylinderGeometry().p1 = chrono.ChVectorD(0, -0.2, -hl)
cyl_2.GetCylinderGeometry().p2 = chrono.ChVectorD(0, 0.2, -hl)
cyl_2.GetCylinderGeometry().rad = 0.05
shaft_2.AddAsset(cyl_2)
col = chrono.ChColorAsset()
col.SetColor(chrono.ChColor(0.2, 0.4, 0.8))
shaft_2.AddAsset(col)
# Connect the first shaft to ground
# ---------------------------------
# Use a rotational motor to impose both the revolute joint constraints, as well
# as constant angular velocity. Here, we drive the motor angle with a ramp function.
# Alternatively, we could use a ChLinkMotorAngularSpeed with constant speed.
# The joint is located at the origin of the first shaft.
motor = chrono.ChLinkMotorRotationAngle()
motor.Initialize(ground,
shaft_1,
chrono.ChFrameD(chrono.ChVectorD(0, 0, -hl), chrono.ChQuaternionD(1, 0, 0, 0)))
motor.SetAngleFunction(chrono.ChFunction_Ramp(0, 1))
mysystem.AddLink(motor)
# Connect the second shaft to ground through a cylindrical joint
# --------------------------------------------------------------
# Use a cylindrical joint so that we do not have redundant constraints
# (note that, technically Chrono could deal with a revolute joint here).
# the joint is located at the origin of the second shaft.
cyljoint = chrono.ChLinkLockCylindrical()
mysystem.AddLink(cyljoint)
cyljoint.Initialize(ground,
shaft_2,
chrono.ChCoordsysD(chrono.ChVectorD(0, -hl * sina, hl * cosa), rot))
# Connect the two shafts through a universal joint
# ------------------------------------------------
# The joint is located at the global origin. Its kinematic constraints will
# enforce orthogonality of the associated cross.
ujoint = chrono.ChLinkUniversal()
mysystem.AddLink(ujoint)
ujoint.Initialize(shaft_1,
shaft_2,
chrono.ChFrameD(chrono.ChVectorD(0, 0, 0), rot))
# ---------------------------------------------------------------------
#
# Create an Irrlicht application to visualize the system
#
myapplication = chronoirr.ChIrrApp(mysystem, 'PyChrono example: universal joint', chronoirr.dimension2du(1024,768))
myapplication.AddTypicalSky()
myapplication.AddTypicalLogo(chrono.GetChronoDataFile('logo_pychrono_alpha.png'))
myapplication.AddTypicalCamera(chronoirr.vector3df(3, 1, -1.5))
myapplication.AddTypicalLights()
# ==IMPORTANT!== Use this function for adding a ChIrrNodeAsset to all items
# in the system. These ChIrrNodeAsset assets are 'proxies' to the Irrlicht meshes.
# If you need a finer control on which item really needs a visualization proxy in
# Irrlicht, just use application.AssetBind(myitem) on a per-item basis.
myapplication.AssetBindAll()
# ==IMPORTANT!== Use this function for 'converting' into Irrlicht meshes the assets
# that you added to the bodies into 3D shapes, they can be visualized by Irrlicht!
myapplication.AssetUpdateAll()
# ---------------------------------------------------------------------
#
# Run the simulation
#
myapplication.SetTimestep(0.001)
myapplication.SetTryRealtime(True)
frame = 0
while(myapplication.GetDevice().run()):
myapplication.BeginScene()
myapplication.DrawAll()
myapplication.DoStep()
myapplication.EndScene()
frame += 1
if frame % 20 == 0:
omega_1 = shaft_1.GetWvel_loc().z
omega_2 = shaft_2.GetWvel_loc().z
print('{:.4}'.format(str(mysystem.GetChTime())), '{:.6}'.format(str(omega_1)), '{:.6}'.format(str(omega_2)))
| 33.713615
| 116
| 0.678179
|
873bceb2fa848bc5d66c4dd57168acb11f17605b
| 131
|
py
|
Python
|
configs/gcnet/gcnet_r101-d8_512x512_160k_ade20k.py
|
Xlinford/mmsegmentation
|
8b444de5e6db2af2538a73a93ac75204f5c3bb2f
|
[
"Apache-2.0"
] | null | null | null |
configs/gcnet/gcnet_r101-d8_512x512_160k_ade20k.py
|
Xlinford/mmsegmentation
|
8b444de5e6db2af2538a73a93ac75204f5c3bb2f
|
[
"Apache-2.0"
] | null | null | null |
configs/gcnet/gcnet_r101-d8_512x512_160k_ade20k.py
|
Xlinford/mmsegmentation
|
8b444de5e6db2af2538a73a93ac75204f5c3bb2f
|
[
"Apache-2.0"
] | null | null | null |
_base_ = './gcnet_r50-d8_512x512_160k_ade20k.py'
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
| 43.666667
| 80
| 0.763359
|
ce6fe2dcb0606bda51a67fd563fc8568fa4ed3de
| 4,720
|
py
|
Python
|
Metadataextraction/test_whole.py
|
corneliazy/Geosoftware2
|
8604c79c58a61b84c602f16b5f1e74e30dfcbd0e
|
[
"MIT"
] | null | null | null |
Metadataextraction/test_whole.py
|
corneliazy/Geosoftware2
|
8604c79c58a61b84c602f16b5f1e74e30dfcbd0e
|
[
"MIT"
] | 47
|
2018-11-13T13:55:01.000Z
|
2019-09-16T13:38:11.000Z
|
Metadataextraction/test_whole.py
|
corneliazy/Geosoftware2
|
8604c79c58a61b84c602f16b5f1e74e30dfcbd0e
|
[
"MIT"
] | 4
|
2018-11-27T12:36:51.000Z
|
2020-10-14T18:07:04.000Z
|
import click # used to print something
import os # used to get the location of the testdata
import extractTool # used to invoke the getMetadata function
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
# Tests to check folders
###############################
# --detail=bbox --folder=whole
###############################
def test_whole_mix_1():
filepath=__location__+'/testdata/mischung_bbox1'
assert extractTool.getMetadata(filepath, 'bbox' , False) == [[5.9153007564753155, -43.7405, 167.998, 52.5307755328733], [None], [None]]
def test_whole_mix_2():
filepath=__location__+"/testdata/mischung_bbox2"
assert extractTool.getMetadata(filepath, 'bbox' , False) == [[5.520648869321924, 49.87014441103477, 10.114607987362609, 52.88446415203189], [None], [None]]
def test_whole_geotiff():
filepath=__location__+'/testdata/geotifftest'
assert extractTool.getMetadata(filepath, 'bbox' , False) == [[5.520648869321924, 49.87014441103477, 10.114607987362609, 52.88446415203189], [None], [None]]
def test_whole_geopackage():
filepath=__location__+"/testdata/geopackagetest"
assert extractTool.getMetadata(filepath, 'bbox' , False) == [[96.8169, -43.7405, 167.998, -9.14218], [None], [None]]
def test_whole_csv():
filepath=__location__+"/testdata/csvordnertest"
assert extractTool.getMetadata(filepath, 'bbox' , False) == [[4.3175, 47.988889, 9.731219, 53.217222], [None], [None]]
def test_whole_geojson():
filepath=__location__+"/testdata/innergeoj"
assert extractTool.getMetadata(filepath, 'bbox', False) == [[6.60864, 51.2380774, 6.71483, 51.31549], [None], [None]]
#####################################
# --detail=bbox --folder=whole --time
#####################################
def test_whole_time_geojson():
filepath=__location__+"/testdata/timegeo/timegeo"
assert extractTool.getMetadata(filepath, 'bbox' , True) == [[6.220493316650391, 50.52150360276628, 7.647256851196289, 51.974624029877454], [None], ['2018-11-14 00:00:00', '2018-11-14 00:00:00']]
def test_whole_time_mix():
filepath=__location__+"/testdata/time_mischung"
assert extractTool.getMetadata(filepath, 'bbox', True) == [[0.0,-90.0, 357.5, 90.0], [None], ['2002-07-01 12:00:00', '2018-11-14 00:00:00']]
def test_whole_time_empty():
filepath=__location__+"/testdata/leer"
assert extractTool.getMetadata(filepath, 'bbox', True) == None
#####################################
# --detail=convexHull --folder=whole
#####################################
def test_whole_hull_mix_1():
filepath=__location__+'/testdata/mischung_bbox1'
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
def test_whole_hull_mix_2():
filepath=__location__+"/testdata/mischung_bbox2"
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
def test_whole_hull_geotiff():
filepath=__location__+'/testdata/geotifftest'
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
def test_whole_hull_geopackage():
filepath=__location__+"/testdata/geopackagetest"
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
def test_whole_hull_csv():
filepath=__location__+"/testdata/csvordnertest"
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
def test_whole_hull_geojson():
filepath=__location__+"/testdata/innergeoj"
assert extractTool.getMetadata(filepath, 'convexHull', False) == [[None], [None], [None]]
###########################################
# --detail=convexHull --folder=whole --time
###########################################
def test_whole_hull_time_geojson():
filepath=__location__+"/testdata/timegeo/timegeo"
assert extractTool.getMetadata(filepath, 'convexHull', True) == [[None], [None], ['2018-11-14 00:00:00', '2018-11-14 00:00:00']]
def test_whole_hull_time_mix():
filepath=__location__+"/testdata/time_mischung"
assert extractTool.getMetadata(filepath, 'convexHull', True) == [[None], [None], ['2002-07-01 12:00:00', '2018-11-14 00:00:00']]
def test_whole_hull_time_empty():
filepath=__location__+"/testdata/leer"
assert extractTool.getMetadata(filepath, 'convexHull', True) == None
###########################################
# --detail=bbox folder in folder
###########################################
def test_folder_folder():
filepath=__location__+"/testdata/folder"
assert extractTool.getMetadata(filepath, 'bbox', False) == [[6.59663465544554, 51.2380774, 6.71483, 51.486636388722296], [None], [None]]
| 44.952381
| 198
| 0.654661
|
e90e96aecc90c38189b66f3fdb5030c9e29a1ec8
| 7,849
|
py
|
Python
|
dmi_scheduler/database.py
|
digitalmethodsinitiative/dmi-scheduler
|
99065fef3d9bdb6ec0a84ec4499fe2300f92021b
|
[
"MIT"
] | 1
|
2020-08-31T11:46:27.000Z
|
2020-08-31T11:46:27.000Z
|
dmi_scheduler/database.py
|
digitalmethodsinitiative/dmi-scheduler
|
99065fef3d9bdb6ec0a84ec4499fe2300f92021b
|
[
"MIT"
] | 1
|
2021-11-03T10:44:48.000Z
|
2021-11-03T11:05:45.000Z
|
dmi_scheduler/database.py
|
digitalmethodsinitiative/dmi-scheduler
|
99065fef3d9bdb6ec0a84ec4499fe2300f92021b
|
[
"MIT"
] | null | null | null |
"""
Database wrapper
"""
import psycopg2.extras
import psycopg2
from psycopg2 import sql
from psycopg2.extras import execute_values
class Database:
"""
Simple database handler
Offers a number of abstraction methods that limit how much SQL one is
required to write. Also makes the database connection mostly multithreading
proof by instantiating a new cursor for each query (and closing it afterwards)
"""
_cursor = None
_log = None
def __init__(self, logger, dbname, user, password, host, port, appname=""):
"""
Set up database connection
:param logger: Logger instance
:param dbname: Database name
:param user: Database username
:param password: Database password
:param host: Database server address
:param port: Database port
:param appname: App name, mostly useful to trace connections in pg_stat_activity
"""
appname = "dmi-db" if not appname else "dmi-db-%s" % appname
self.connection = psycopg2.connect(dbname=dbname, user=user, password=password, host=host, port=port,
application_name=appname)
self._cursor = self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
self._log = logger
if self._log is None:
raise NotImplementedError()
self.commit()
def query(self, query, replacements=None, cursor=None):
"""
Execute a query
:param string query: Query
:param args: Replacement values
:param cursor: Cursor to use. Default - use common cursor
:return None:
"""
if not cursor:
cursor = self.get_cursor()
self._log.debug("Executing query %s" % self._cursor.mogrify(query, replacements))
return cursor.execute(query, replacements)
def execute(self, query, replacements=None):
"""
Execute a query, and commit afterwards
This is required for UPDATE/INSERT/DELETE/etc to stick
:param string query: Query
:param replacements: Replacement values
"""
cursor = self.get_cursor()
self._log.debug("Executing query %s" % self._cursor.mogrify(query, replacements))
cursor.execute(query, replacements)
self.commit()
cursor.close()
def execute_many(self, query, replacements=None):
"""
Execute a query multiple times, each time with different values
This makes it particularly suitable for INSERT queries, but other types
of query using VALUES are possible too.
:param string query: Query
:param replacements: A list of replacement values
"""
cursor = self.get_cursor()
execute_values(cursor, query, replacements)
cursor.close()
def update(self, table, data, where=None, commit=True):
"""
Update a database record
:param string table: Table to update
:param dict where: Simple conditions, parsed as "column1 = value1 AND column2 = value2" etc
:param dict data: Data to set, Column => Value
:param bool commit: Whether to commit after executing the query
:return int: Number of affected rows. Note that this may be unreliable if `commit` is `False`
"""
if where is None:
where = {}
# build query
identifiers = [sql.Identifier(column) for column in data.keys()]
identifiers.insert(0, sql.Identifier(table))
replacements = list(data.values())
query = "UPDATE {} SET " + ", ".join(["{} = %s" for column in data])
if where:
query += " WHERE " + " AND ".join(["{} = %s" for column in where])
for column in where.keys():
identifiers.append(sql.Identifier(column))
replacements.append(where[column])
query = sql.SQL(query).format(*identifiers)
cursor = self.get_cursor()
self._log.debug("Executing query: %s" % cursor.mogrify(query, replacements))
cursor.execute(query, replacements)
if commit:
self.commit()
result = cursor.rowcount
cursor.close()
return result
def delete(self, table, where, commit=True):
"""
Delete a database record
:param string table: Table to delete from
:param dict where: Simple conditions, parsed as "column1 = value1 AND column2 = value2" etc
:param bool commit: Whether to commit after executing the query
:return int: Number of affected rows. Note that this may be unreliable if `commit` is `False`
"""
where_sql = ["{} = %s" for column in where.keys()]
replacements = list(where.values())
# build query
identifiers = [sql.Identifier(column) for column in where.keys()]
identifiers.insert(0, sql.Identifier(table))
query = sql.SQL("DELETE FROM {} WHERE " + " AND ".join(where_sql)).format(*identifiers)
cursor = self.get_cursor()
self._log.debug("Executing query: %s" % cursor.mogrify(query, replacements))
cursor.execute(query, replacements)
if commit:
self.commit()
result = cursor.rowcount
cursor.close()
return result
def insert(self, table, data, commit=True, safe=False, constraints=None):
"""
Create database record
:param string table: Table to insert record into
:param dict data: Data to insert
:param bool commit: Whether to commit after executing the query
:param bool safe: If set to `True`, "ON CONFLICT DO NOTHING" is added to the insert query, so that no error is
thrown when the insert violates a unique index or other constraint
:param tuple constraints: If `safe` is `True`, this tuple may contain the columns that should be used as a
constraint, e.g. ON CONFLICT (name, lastname) DO NOTHING
:return int: Number of affected rows. Note that this may be unreliable if `commit` is `False`
"""
if constraints is None:
constraints = []
# escape identifiers
identifiers = [sql.Identifier(column) for column in data.keys()]
identifiers.insert(0, sql.Identifier(table))
# construct ON NOTHING bit of query
if safe:
safe_bit = " ON CONFLICT "
if constraints:
safe_bit += "(" + ", ".join(["{}" for each in constraints]) + ")"
for column in constraints:
identifiers.append(sql.Identifier(column))
safe_bit += " DO NOTHING"
else:
safe_bit = ""
# prepare parameter replacements
protoquery = "INSERT INTO {} (%s) VALUES %%s" % ", ".join(["{}" for column in data.keys()]) + safe_bit
query = sql.SQL(protoquery).format(*identifiers)
replacements = (tuple(data.values()),)
cursor = self.get_cursor()
self._log.debug("Executing query: %s" % cursor.mogrify(query, replacements))
cursor.execute(query, replacements)
if commit:
self.commit()
result = cursor.rowcount
cursor.close()
return result
def fetchall(self, query, *args):
"""
Fetch all rows for a query
:param string query: Query
:param args: Replacement values
:return list: The result rows, as a list
"""
cursor = self.get_cursor()
self._log.debug("Executing query: %s" % cursor.mogrify(query, *args))
self.query(query, cursor=cursor, *args)
try:
result = cursor.fetchall()
except AttributeError:
result = []
cursor.close()
return result
def fetchone(self, query, *args):
"""
Fetch one result row
:param string query: Query
:param args: Replacement values
:return: The row, as a dictionary, or None if there were no rows
"""
cursor = self.get_cursor()
self.query(query, cursor=cursor, *args)
try:
result = cursor.fetchone()
except psycopg2.ProgrammingError as e:
# no results to fetch
self.commit()
result = None
cursor.close()
return result
def commit(self):
"""
Commit the current transaction
This is required for UPDATE etc to stick.
"""
self.connection.commit()
def rollback(self):
"""
Roll back the current transaction
"""
self.connection.rollback()
def close(self):
"""
Close connection
Running queries after this is probably a bad idea!
"""
self.connection.close()
def get_cursor(self):
"""
Get a new cursor
Re-using cursors seems to give issues when using per-thread
connections, so simply instantiate a new one each time
:return: Cursor
"""
return self.connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
| 27.932384
| 112
| 0.70251
|
3b03d2555eb9146dca6f90881c25f87c53255921
| 5,866
|
py
|
Python
|
pycspr/serialisation/deploy_from_json.py
|
casper-network/pycspr
|
24244a82396db164beec23a461d13adab98aba0e
|
[
"Apache-2.0"
] | 11
|
2021-09-27T08:41:18.000Z
|
2022-03-24T11:25:20.000Z
|
pycspr/serialisation/deploy_from_json.py
|
casper-network/pycspr
|
24244a82396db164beec23a461d13adab98aba0e
|
[
"Apache-2.0"
] | 13
|
2021-09-13T19:08:45.000Z
|
2022-02-08T10:01:12.000Z
|
pycspr/serialisation/deploy_from_json.py
|
casper-network/pycspr
|
24244a82396db164beec23a461d13adab98aba0e
|
[
"Apache-2.0"
] | 14
|
2021-07-12T10:46:33.000Z
|
2022-03-01T08:25:07.000Z
|
from pycspr.factory import create_public_key_from_account_key
from pycspr.serialisation.cl_value_from_json import decode as cl_value_from_json
from pycspr.types.deploys import Deploy
from pycspr.types.deploys import DeployApproval
from pycspr.types.deploys import DeployArgument
from pycspr.types.deploys import DeployExecutableItem
from pycspr.types.deploys import DeployHeader
from pycspr.types.deploys import DeployTimeToLive
from pycspr.types.deploys import ModuleBytes
from pycspr.types.deploys import StoredContractByHash
from pycspr.types.deploys import StoredContractByHashVersioned
from pycspr.types.deploys import StoredContractByName
from pycspr.types.deploys import StoredContractByNameVersioned
from pycspr.types.deploys import Transfer
from pycspr.types import Timestamp
def decode(typedef: object, obj: dict) -> object:
"""Decodes a deploy related type from a JSON object.
:param obj: A JSON compatible dictionary.
:param typedef: Deploy related type definition.
:returns: A deploy related type.
"""
try:
decoder = _DECODERS[typedef]
except KeyError:
raise ValueError(f"Cannot decode {typedef} from json")
else:
return decoder(_get_parsed_json(typedef, obj))
def _decode_deploy(obj: dict) -> Deploy:
return Deploy(
approvals=[decode(DeployApproval, i) for i in obj["approvals"]],
hash=bytes.fromhex(obj["hash"]),
header=decode(DeployHeader, obj["header"]),
payment=decode(DeployExecutableItem, obj["payment"]),
session=decode(DeployExecutableItem, obj["session"])
)
def _decode_deploy_approval(obj: dict) -> DeployApproval:
return DeployApproval(
signer=create_public_key_from_account_key(bytes.fromhex(obj["signer"])),
signature=bytes.fromhex(obj["signature"]),
)
def _decode_deploy_argument(obj: dict) -> DeployArgument:
return DeployArgument(
name=obj[0],
value=cl_value_from_json(obj[1])
)
def _decode_deploy_executable_item(obj: dict) -> DeployExecutableItem:
if "ModuleBytes" in obj:
return decode(ModuleBytes, obj)
elif "StoredContractByHash" in obj:
return decode(StoredContractByHash, obj)
elif "StoredVersionedContractByHash" in obj:
return decode(StoredContractByHashVersioned, obj)
elif "StoredContractByName" in obj:
return decode(StoredContractByName, obj)
elif "StoredVersionedContractByName" in obj:
return decode(StoredContractByNameVersioned, obj)
elif "Transfer" in obj:
return decode(Transfer, obj)
else:
raise NotImplementedError("Unsupported DeployExecutableItem variant")
def _decode_deploy_header(obj: dict) -> DeployHeader:
return DeployHeader(
account_public_key=create_public_key_from_account_key(bytes.fromhex(obj["account"])),
body_hash=bytes.fromhex(obj["body_hash"]),
chain_name=obj["chain_name"],
dependencies=[],
gas_price=obj["gas_price"],
timestamp=Timestamp.from_string(obj["timestamp"]),
ttl=DeployTimeToLive.from_string(obj["ttl"])
)
def _decode_module_bytes(obj: dict) -> ModuleBytes:
return ModuleBytes(
args=[decode(DeployArgument, i) for i in obj["args"]],
module_bytes=bytes.fromhex(obj["module_bytes"])
)
def _decode_stored_contract_by_hash(obj: dict) -> StoredContractByHash:
return StoredContractByHash(
args=[decode(DeployArgument, i) for i in obj["args"]],
entry_point=obj["entry_point"],
hash=bytes.fromhex(obj["hash"])
)
def _decode_stored_contract_by_hash_versioned(obj: dict) -> StoredContractByHashVersioned:
return StoredContractByHashVersioned(
args=[decode(DeployArgument, i) for i in obj["args"]],
entry_point=obj["entry_point"],
hash=bytes.fromhex(obj["hash"]),
version=obj["version"]
)
def _decode_stored_contract_by_name(obj: dict) -> StoredContractByName:
return StoredContractByName(
args=[decode(DeployArgument, i) for i in obj["args"]],
entry_point=obj["entry_point"],
name=obj["name"],
)
def _decode_stored_contract_by_name_versioned(obj: dict) -> StoredContractByNameVersioned:
return StoredContractByNameVersioned(
args=[decode(DeployArgument, i) for i in obj["args"]],
entry_point=obj["entry_point"],
name=obj["name"],
version=obj["version"]
)
def _decode_transfer(obj: dict) -> Transfer:
return Transfer(
args=[decode(DeployArgument, i) for i in obj["args"]],
)
def _get_parsed_json(typedef: object, obj: dict) -> dict:
if typedef is DeployArgument:
if isinstance(obj[1]["bytes"], str):
obj[1]["bytes"] = bytes.fromhex(obj[1]["bytes"])
elif typedef is ModuleBytes:
return obj["ModuleBytes"]
elif typedef is StoredContractByHash:
return obj["StoredContractByHash"]
elif typedef is StoredContractByHashVersioned:
return obj["StoredContractByHashVersioned"]
elif typedef is StoredContractByName:
return obj["StoredContractByName"]
elif typedef is StoredContractByNameVersioned:
return obj["StoredContractByNameVersioned"]
elif typedef is Transfer:
return obj["Transfer"]
return obj
_DECODERS = {
Deploy: _decode_deploy,
DeployApproval: _decode_deploy_approval,
DeployArgument: _decode_deploy_argument,
DeployExecutableItem: _decode_deploy_executable_item,
DeployHeader: _decode_deploy_header,
ModuleBytes: _decode_module_bytes,
StoredContractByHash: _decode_stored_contract_by_hash,
StoredContractByHashVersioned: _decode_stored_contract_by_hash_versioned,
StoredContractByName: _decode_stored_contract_by_name,
StoredContractByNameVersioned: _decode_stored_contract_by_name_versioned,
Transfer: _decode_transfer
}
| 35.337349
| 93
| 0.724514
|
9d725af34977eca1db8a6935a5b941719cd96ae1
| 6,898
|
py
|
Python
|
project/trails/add-digits.py
|
shivanshseth/Digit-Addition
|
f4b3fd936f0bc4d208fb7ea460a05afa37911379
|
[
"Apache-2.0"
] | null | null | null |
project/trails/add-digits.py
|
shivanshseth/Digit-Addition
|
f4b3fd936f0bc4d208fb7ea460a05afa37911379
|
[
"Apache-2.0"
] | null | null | null |
project/trails/add-digits.py
|
shivanshseth/Digit-Addition
|
f4b3fd936f0bc4d208fb7ea460a05afa37911379
|
[
"Apache-2.0"
] | null | null | null |
import torch
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from torch.utils.data import DataLoader, Dataset
import torch.nn as nn
import torch.nn.functional as F
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
#loading data
X = np.load('../Data/data0.npy')
y = np.load('../Data/lab0.npy')
for i in [1, 2]:
Xt = np.load('../Data/data' + str(i) + '.npy')
yt = np.load('../Data/lab' + str(i) + '.npy')
X = np.concatenate((X, Xt))
y = np.concatenate((y, yt))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
print("split done")
## Padding to make it square
#tp = ((0, 0), (64, 64), (0, 0))
#X_train = np.pad(X_train, pad_width=tp, mode='constant', constant_values=0)
X_train = torch.Tensor([[i] for i in X_train])
#X_test = np.pad(X_test, pad_width=tp, mode='constant', constant_values=0)
X_test = torch.Tensor([[i] for i in X_test])
batch_size = 300
print("Converted to tensor")
class DigitAdditionDataset(Dataset):
def __init__(self, X, y):
self.x = X
self.n_samples = X.shape[0]
self.y = torch.Tensor(y).long()
def __getitem__(self, index):
return self.x[index], self.y[index]
def __len__(self):
return self.n_samples
traindataset = DigitAdditionDataset(X_train, y_train)
valdataset = DigitAdditionDataset(X_test, y_test)
valoader = DataLoader(dataset=valdataset, batch_size=batch_size, shuffle=True, num_workers=1)
trainloader = DataLoader(dataset=traindataset, batch_size=batch_size, shuffle=True, num_workers=1)
print("dataloader made")
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=5, stride=1, padding=2),
nn.ReLU()
)
self.layer01 = nn.Sequential(
nn.Conv2d(8, 16, kernel_size=5, stride=1, padding=2),
nn.ReLU()
)
self.layer1 = nn.Sequential(
nn.Conv2d(16, 32, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=(1,2), stride=(1,2)))
# (16, 40 , 168) -> (32, 40, 84)
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=(1,2), stride=(1,2)))
# (32, 40, 84) -> (64, 40, 42)
self.layer3 = nn.Sequential(
nn.Conv2d(64, 64, kernel_size=5, stride=1, padding=(4,3)),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
# (64, 40, 42) -> (64, 44, 44) -> (64, 22, 22)
self.layer4 = nn.Sequential(
nn.Conv2d(64, 64, kernel_size=5, stride=1, padding=2),
nn.ReLU(),
nn.MaxPool2d(kernel_size=2, stride=2))
# (64, 22, 22) -> (64, 11, 11)
self.fc1 = nn.Linear(64*11*11, 2000)
self.drop1 = nn.Dropout(p=0.5)
self.fc2 = nn.Linear(2000, 37)
def forward(self, x):
out = self.layer0(x)
out = self.layer01(out)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = out.reshape(out.size(0), -1)
out = F.relu(self.fc1(out))
out = self.drop1(out)
out = self.fc2(out)
return out
# In[153]:
model = Net()
model= nn.DataParallel(model)
model = model.cuda()
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
print("model made")
# In[154]:
# Train the model
def train_model(model, trainloader, valoader, num_epochs=100, saveweights=True, eval_pass=False, weightsfile="./trained_model"):
print("starting train")
torch.cuda.empty_cache()
if eval_pass:
num_epochs = 1
total_step = len(trainloader)
train_loss_list = []
train_acc_list = []
val_acc_list = []
val_loss_list = []
for epoch in range(num_epochs):
if not eval_pass:
for i, (images, label) in enumerate(trainloader):
model.train()
# Run the forward pass
images = images.cuda()
label = label.cuda()
outputs = model(images)
#print("OUTPUT DEVICE", outputs.device, label.device)
loss = criterion(outputs, label)
#train_loss_list.append(loss.item())
# Backprop and perform Adam optimisation
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Track the accuracy
total = label.size(0)
_, predicted = torch.max(outputs.data, 1)
correct = (predicted == label).sum().item()
del label
del images
#train_acc_list.append(correct / total)
print('Training: Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}, Accuracy: {:.2f}%'
.format(epoch + 1, num_epochs, i + 1, total_step, loss.item(),
(correct / total) * 100))
train_acc_list.append(correct / total)
train_loss_list.append(loss.item())
torch.cuda.empty_cache()
for images, label in valoader:
model.eval()
# Run the forward pass
images = images.cuda()
label = label.cuda()
outputs = model(images)
#print("OUTPUT DEVICE", outputs.device, label.device)
loss = criterion(outputs, label)
# Track the accuracy
total = label.size(0)
_, predicted = torch.max(outputs.data, 1)
correct = (predicted == label).sum().item()
val_acc_list.append(correct / total)
val_loss_list.append(loss.item())
print('Validation: Epoch [{}/{}], Step [{}/{}], Loss: {:.4f}, Accuracy: {:.2f}%'
.format(epoch + 1, num_epochs, i + 1, total_step, loss.item(),
(correct / total) * 100))
if saveweights:
torch.save(model.state_dict(), './trained_model')
plt.title("Curve:Loss")
plt.plot(range(len(train_loss_list)), train_loss_list, label="Train")
plt.plot(range(len(train_loss_list)), val_loss_list, label="Validation")
plt.xlabel("Iterations")
plt.ylabel("Loss")
plt.legend()
plt.savefig('loss_curve.png')
plt.close()
plt.title("Curve:Accuracy")
plt.plot(range(len(train_loss_list)), train_acc_list, label="Train")
plt.plot(range(len(train_loss_list)), val_acc_list, label="Validation")
plt.xlabel("Iterations")
plt.ylabel("Loss")
plt.legend()
plt.savefig('acc_curve.png')
train_model(model, trainloader, valoader, 100)
| 34.49
| 128
| 0.575239
|
97c63ed1a04029e9c6b980de3265d2388c5665eb
| 479
|
py
|
Python
|
examples/spline_form_arc.py
|
mamofejo/ezdxf
|
bd5a08a85608360266eb8702d48638195c72c247
|
[
"MIT"
] | 1
|
2021-06-05T09:15:15.000Z
|
2021-06-05T09:15:15.000Z
|
examples/spline_form_arc.py
|
luoyu-123/ezdxf
|
40963a2010028f87846241e08434f43ab421f3fb
|
[
"MIT"
] | null | null | null |
examples/spline_form_arc.py
|
luoyu-123/ezdxf
|
40963a2010028f87846241e08434f43ab421f3fb
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2020, Manfred Moitzi
# License: MIT License
from pathlib import Path
import ezdxf
DIR = Path('~/Desktop/outbox').expanduser()
doc = ezdxf.new()
msp = doc.modelspace()
arc = msp.add_arc(
center=(0, 0),
radius=1.0,
start_angle=0,
end_angle=360,
dxfattribs={'layer': 'arc'},
)
spline = arc.to_spline(msp, replace=False)
spline.dxf.layer = 'B-spline'
spline.dxf.color = 1
doc.set_modelspace_vport(2)
doc.saveas(DIR / 'spline_from_arc.dxf')
| 17.740741
| 43
| 0.682672
|
bcc7b405f410fb7ff4fd705e210ff53b3c2fbe49
| 7,209
|
py
|
Python
|
kubernetes/client/models/apps_v1beta1_deployment.py
|
TomasTomecek/kubernetes-python
|
c37c074303a13c72662b9201ccc023fb0ca45755
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/models/apps_v1beta1_deployment.py
|
TomasTomecek/kubernetes-python
|
c37c074303a13c72662b9201ccc023fb0ca45755
|
[
"Apache-2.0"
] | 1
|
2021-04-30T20:41:19.000Z
|
2021-04-30T20:41:19.000Z
|
venv/lib/python2.7/site-packages/kubernetes/client/models/apps_v1beta1_deployment.py
|
784134748/kubernetes-install
|
5df59632c2619632e422948b667fb68eab9ff5be
|
[
"MIT"
] | 1
|
2020-05-09T07:16:55.000Z
|
2020-05-09T07:16:55.000Z
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.12.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class AppsV1beta1Deployment(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'AppsV1beta1DeploymentSpec',
'status': 'AppsV1beta1DeploymentStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None):
"""
AppsV1beta1Deployment - a model defined in Swagger
"""
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""
Gets the api_version of this AppsV1beta1Deployment.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this AppsV1beta1Deployment.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this AppsV1beta1Deployment.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this AppsV1beta1Deployment.
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""
Gets the kind of this AppsV1beta1Deployment.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this AppsV1beta1Deployment.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this AppsV1beta1Deployment.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this AppsV1beta1Deployment.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this AppsV1beta1Deployment.
Standard object metadata.
:return: The metadata of this AppsV1beta1Deployment.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this AppsV1beta1Deployment.
Standard object metadata.
:param metadata: The metadata of this AppsV1beta1Deployment.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this AppsV1beta1Deployment.
Specification of the desired behavior of the Deployment.
:return: The spec of this AppsV1beta1Deployment.
:rtype: AppsV1beta1DeploymentSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this AppsV1beta1Deployment.
Specification of the desired behavior of the Deployment.
:param spec: The spec of this AppsV1beta1Deployment.
:type: AppsV1beta1DeploymentSpec
"""
self._spec = spec
@property
def status(self):
"""
Gets the status of this AppsV1beta1Deployment.
Most recently observed status of the Deployment.
:return: The status of this AppsV1beta1Deployment.
:rtype: AppsV1beta1DeploymentStatus
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this AppsV1beta1Deployment.
Most recently observed status of the Deployment.
:param status: The status of this AppsV1beta1Deployment.
:type: AppsV1beta1DeploymentStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, AppsV1beta1Deployment):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.16318
| 281
| 0.609238
|
d853b1fd917850845d3dba12979ae0b3de168423
| 1,981
|
py
|
Python
|
booldog/utils/utils.py
|
NIB-SI/squad-reboot
|
8d32527eddc1ee04d8f0f00b8a744dbd84f811f1
|
[
"MIT"
] | null | null | null |
booldog/utils/utils.py
|
NIB-SI/squad-reboot
|
8d32527eddc1ee04d8f0f00b8a744dbd84f811f1
|
[
"MIT"
] | 1
|
2021-05-13T12:36:35.000Z
|
2021-05-13T12:36:35.000Z
|
booldog/utils/utils.py
|
NIB-SI/BoolDoG
|
a638f9b2e131d2a381f6a81d6791e1436d101b9e
|
[
"MIT"
] | 1
|
2020-06-10T09:08:02.000Z
|
2020-06-10T09:08:02.000Z
|
import numpy as np
def ensure_ndarray(v):
if not type(v) == np.ndarray:
return np.array([*v])
else:
return v
def parameter_to_array(parameter, graph_keys, default=1):
'''
Parameter argument to numpy array
Parameters
----------
parameter : int, float or dict
if int of float, returns an array of length n with values parameter
if dict, returns an array of length n with values set according to
parameter (nodes indexed by graph_keys), and the rest set to default
graph_keys : dict
default : int or float, optional
default value for parameters not set in parameter dict
Returns
----------
parameter_array: numpy array
array of length n
'''
if isinstance(parameter, np.ndarray) and \
(len(parameter) == len(graph_keys)):
return parameter
parameter_array = np.ones(len(graph_keys))
if isinstance(parameter, (int, float)):
parameter_array = parameter_array * parameter
elif isinstance(parameter, dict):
if 'default' in parameter.keys():
parameter_array = parameter_array * parameter['default']
for key, value in parameter.items():
if key == 'default':
continue
parameter_array[graph_keys[key]] = value
else:
print("'parameters must be int, float, or dict.")
parameter_array = parameter_array*default
return parameter_array
def file_writeable(path):
'''
Checks if path is writable. If not, attempts to print reason, and raises
an Exception.
'''
if path.exists():
print(f'{path} already exists and will be overwritten')
try:
with open(path, 'w') as f:
pass
except IOError as x:
if x.errno == errno.EACCES:
print(f'No permission to write to {path}')
elif x.errno == errno.EISDIR:
print(f'{path} is directory.')
raise e
| 23.034884
| 76
| 0.610803
|
8ae4daf0705ed4d198b7b16679eb5e81e6dc9d72
| 3,938
|
py
|
Python
|
easyquotation/basequotation.py
|
fanwz/easyquotation
|
a829c2f2e222be3e4cb79575256f3b3ad26a0929
|
[
"MIT"
] | 1
|
2020-11-09T02:19:36.000Z
|
2020-11-09T02:19:36.000Z
|
easyquotation/basequotation.py
|
fanwz/easyquotation
|
a829c2f2e222be3e4cb79575256f3b3ad26a0929
|
[
"MIT"
] | null | null | null |
easyquotation/basequotation.py
|
fanwz/easyquotation
|
a829c2f2e222be3e4cb79575256f3b3ad26a0929
|
[
"MIT"
] | 1
|
2019-08-02T01:02:15.000Z
|
2019-08-02T01:02:15.000Z
|
# coding:utf8
import abc
import json
import multiprocessing.pool
import warnings
import requests
from . import helpers
class BaseQuotation(metaclass=abc.ABCMeta):
"""行情获取基类"""
max_num = 800 # 每次请求的最大股票数
@property
@abc.abstractmethod
def stock_api(self) -> str:
"""
行情 api 地址
"""
pass
def __init__(self):
self._session = requests.session()
stock_codes = self.load_stock_codes()
self.stock_list = self.gen_stock_list(stock_codes)
def gen_stock_list(self, stock_codes):
stock_with_exchange_list = self._gen_stock_prefix(stock_codes)
if self.max_num > len(stock_with_exchange_list):
request_list = ",".join(stock_with_exchange_list)
return [request_list]
stock_list = []
request_num = len(stock_codes) // (self.max_num + 1) + 1
for range_start in range(request_num):
num_start = self.max_num * range_start
num_end = self.max_num * (range_start + 1)
request_list = ",".join(
stock_with_exchange_list[num_start:num_end]
)
stock_list.append(request_list)
return stock_list
def _gen_stock_prefix(self, stock_codes):
return [
helpers.get_stock_type(code) + code[-6:]
for code in stock_codes
]
@staticmethod
def load_stock_codes():
with open(helpers.STOCK_CODE_PATH) as f:
return json.load(f)["stock"]
@property
def all(self):
warnings.warn("use market_snapshot instead", DeprecationWarning)
return self.get_stock_data(self.stock_list)
@property
def all_market(self):
"""return quotation with stock_code prefix key"""
return self.get_stock_data(self.stock_list, prefix=True)
def stocks(self, stock_codes, prefix=False):
return self.real(stock_codes, prefix)
def real(self, stock_codes, prefix=False):
"""return specific stocks real quotation
:param stock_codes: stock code or list of stock code,
when prefix is True, stock code must start with sh/sz
:param prefix: if prefix i True, stock_codes must contain sh/sz market
flag. If prefix is False, index quotation can't return
:return quotation dict, key is stock_code, value is real quotation.
If prefix with True, key start with sh/sz market flag
"""
if not isinstance(stock_codes, list):
stock_codes = [stock_codes]
stock_list = self.gen_stock_list(stock_codes)
return self.get_stock_data(stock_list, prefix=prefix)
def market_snapshot(self, prefix=False):
"""return all market quotation snapshot
:param prefix: if prefix is True, return quotation dict's stock_code
key start with sh/sz market flag
"""
return self.get_stock_data(self.stock_list, prefix=prefix)
def get_stocks_by_range(self, params):
headers = {
"Accept-Encoding": "gzip, deflate, sdch",
"User-Agent": (
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/54.0.2840.100 "
"Safari/537.36"
),
}
r = self._session.get(self.stock_api + params, headers=headers)
return r.text
def get_stock_data(self, stock_list, **kwargs):
"""获取并格式化股票信息"""
res = self._fetch_stock_data(stock_list)
return self.format_response_data(res, **kwargs)
def _fetch_stock_data(self, stock_list):
"""获取股票信息"""
pool = multiprocessing.pool.ThreadPool(len(stock_list))
try:
res = pool.map(self.get_stocks_by_range, stock_list)
finally:
pool.close()
return [d for d in res if d is not None]
def format_response_data(self, rep_data, **kwargs):
pass
| 32.01626
| 78
| 0.624429
|
7e295903258435d70d120b89ce9deaf532e7a733
| 3,739
|
py
|
Python
|
benchmark/startCirq1139.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq1139.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq1139.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=46
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.rx(-1.3603096190043806).on(input_qubit[2])) # number=28
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[3])) # number=34
c.append(cirq.CZ.on(input_qubit[4],input_qubit[3])) # number=35
c.append(cirq.H.on(input_qubit[3])) # number=36
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.X.on(input_qubit[2])) # number=29
c.append(cirq.rx(-1.9697785938008003).on(input_qubit[1])) # number=37
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.H.on(input_qubit[0])) # number=38
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39
c.append(cirq.H.on(input_qubit[0])) # number=40
c.append(cirq.X.on(input_qubit[0])) # number=32
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=33
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=24
c.append(cirq.X.on(input_qubit[1])) # number=25
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=43
c.append(cirq.X.on(input_qubit[1])) # number=44
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=45
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[1])) # number=26
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[3])) # number=30
c.append(cirq.X.on(input_qubit[3])) # number=12
c.append(cirq.H.on(input_qubit[2])) # number=42
c.append(cirq.X.on(input_qubit[0])) # number=13
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1139.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 37.767677
| 77
| 0.650976
|
e72459afd6f6b83bfca461fa7242c21086fe2386
| 734
|
py
|
Python
|
src/app/forms.py
|
lsiksous/mauviette
|
06a985846b34929f22396ed16f3b4d2647025f21
|
[
"FTL"
] | null | null | null |
src/app/forms.py
|
lsiksous/mauviette
|
06a985846b34929f22396ed16f3b4d2647025f21
|
[
"FTL"
] | null | null | null |
src/app/forms.py
|
lsiksous/mauviette
|
06a985846b34929f22396ed16f3b4d2647025f21
|
[
"FTL"
] | null | null | null |
from flask_wtf import FlaskForm
from wtforms import (
SubmitField,
SelectField,
TextAreaField,
StringField,
BooleanField,
PasswordField,
)
from wtforms.validators import DataRequired, Length
class LoginForm(FlaskForm):
username = StringField("Username", validators=[DataRequired()])
password = PasswordField("Password", validators=[DataRequired()])
remember_me = BooleanField("Remember Me")
submit = SubmitField("Sign In")
class MessageForm(FlaskForm):
message = TextAreaField("", validators=[Length(max=140)])
dialect = SelectField(
u"Product",
choices=[("GOMA", "Goeland ou Malouine"), ("LPBF", "Lolly Pop ou Baby Flac")],
)
submit = SubmitField("Submit")
| 27.185185
| 86
| 0.689373
|
33cfbddcab6c846b8f0f01935d44df05fa41b7ed
| 869
|
py
|
Python
|
pack.py
|
TonyBCooper/PinningCalc
|
1733613f5f9c3186a09a0666b94cc5c497d18749
|
[
"MIT"
] | null | null | null |
pack.py
|
TonyBCooper/PinningCalc
|
1733613f5f9c3186a09a0666b94cc5c497d18749
|
[
"MIT"
] | null | null | null |
pack.py
|
TonyBCooper/PinningCalc
|
1733613f5f9c3186a09a0666b94cc5c497d18749
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: Tony Cooper
Created On: 2021-01-05
"""
import os
import py_compile
import shutil
import zipapp
sourceFiles = ['data', 'FrmAbout', 'FrmMain', 'SysBase', 'SysCorbinRusswinSystem70IC', 'UIBase']
copyFiles = ['__main__.py','app.icns', 'app.ico', 'app.png', 'app.gif']
d = os.path.dirname(__file__)
sourcePath = d+'/PinningCalc/'
targetPath = d+'/tmp/'
if (os.path.exists(targetPath)):
shutil.rmtree(targetPath)
os.mkdir(targetPath)
os.mkdir(targetPath+'__pycache__')
for fn in sourceFiles:
py_compile.compile(sourcePath+fn+'.py', cfile=targetPath+fn+'.pyc', optimize=2)
shutil.copyfile(sourcePath+fn+'.py', targetPath+fn+'.py')
for fn in copyFiles:
shutil.copyfile(sourcePath+fn, targetPath+fn)
zipapp.create_archive(d+'/tmp', d+'/PinningCalc.pyz', '/usr/bin/env python3')
shutil.rmtree(targetPath)
| 26.333333
| 96
| 0.713464
|
c2c2a5c89d76781334900e3b8ab1e49941b3de88
| 9,347
|
py
|
Python
|
src/models/CORAL_BART/metrics.py
|
behavioral-data/multiverse
|
82b7265de0aa3e9d229ce9f3f86b8b48435ca365
|
[
"MIT"
] | null | null | null |
src/models/CORAL_BART/metrics.py
|
behavioral-data/multiverse
|
82b7265de0aa3e9d229ce9f3f86b8b48435ca365
|
[
"MIT"
] | null | null | null |
src/models/CORAL_BART/metrics.py
|
behavioral-data/multiverse
|
82b7265de0aa3e9d229ce9f3f86b8b48435ca365
|
[
"MIT"
] | 1
|
2021-08-19T15:21:50.000Z
|
2021-08-19T15:21:50.000Z
|
from transformers import EvalPrediction
from rouge import Rouge
import numpy as np
from scipy.special import softmax
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score, roc_auc_score
from nltk.translate.gleu_score import corpus_gleu
from functools import partial
from typing import NamedTuple
import logging
logger = logging.getLogger(__name__)
from src.models.CORAL_BART.utils import safe_decode, safe_decode_coral
from tokenize import tokenize, untokenize, NUMBER, STRING, NAME, OP
from io import BytesIO
def get_avg_scores(hyps, refs):
rouge = Rouge(metrics=["rouge-l"])
scores = {m: {s: 0 for s in rouge.stats} for m in rouge.metrics}
if rouge.return_lengths:
scores["lengths"] = {"hyp": 0, "ref": 0}
count = 0
for (hyp, ref) in zip(hyps, refs):
# hyp = [" ".join(_.split()) for _ in hyp.split(".") if len(_) > 0]
# ref = [" ".join(_.split()) for _ in ref.split(".") if len(_) > 0]
# hyp = hyp.split()
# ref = ref.split()
for m in rouge.metrics:
fn = Rouge.AVAILABLE_METRICS[m]
sc = fn(hyp, ref, exclusive=rouge.exclusive)
scores[m] = {s: scores[m][s] + sc[s] for s in rouge.stats}
count += 1
avg_scores = {
m: {s: scores[m][s] / count for s in rouge.stats}
for m in rouge.metrics
}
return avg_scores
def find_all_between_tags(lst, start_tag, end_tag):
search_from = 0
try:
while True:
start_index = lst.index(start_tag, search_from)
end_index = lst.index(end_tag, start_index + 1)
yield lst[start_index + 1:end_index]
search_from = end_index + 1
except ValueError:
pass
def insert_space_to_tokenized_code(string):
try:
g = tokenize(BytesIO(string.encode('utf-8')).readline)
result = []
for toknum, tokval, _, _, _ in g:
result.append(tokval)
result = result[1:]
except:
result = string.split()
return ' '.join(result)
def levenshteinDistance(s1, s2):
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2 + 1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(
1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1]
def ids_to_space_sep_tokens(ids,tokenizer):
token_str = " ".join(tokenizer.convert_ids_to_tokens(ids,skip_special_tokens=True))
if len(token_str) == 0:
return "<EMPTY>"
return token_str
def calc_rouge_from_tokens(pred_tokens,label_tokens, tokenizer):
predictions = [ids_to_space_sep_tokens(x,tokenizer) for x in pred_tokens]
labels = [ids_to_space_sep_tokens(x,tokenizer) for x in label_tokens]
return get_avg_scores(predictions,labels)
def calc_gleu_from_tokens(pred_tokens, label_tokens, tokenizer):
predictions = [tokenizer.convert_ids_to_tokens(x, skip_special_tokens=True) for x in pred_tokens]
labels = [[tokenizer.convert_ids_to_tokens(x, skip_special_tokens=True)] for x in label_tokens]
return corpus_gleu(labels,predictions)
def remove_ids(ids):
return [x for x in ids if not x in [-100]]
def calc_span_aware_rouge(prediction_ids,label_ids, tokenizer):
# What if theres nothing in the span?
# What if theres no span?
start_token = tokenizer.convert_tokens_to_ids(["<INSERTED>"])[0]
end_token = tokenizer.convert_tokens_to_ids(["</INSERTED>"])[0]
all_pred_tokens = []
all_label_tokens = []
for pred, label in zip(prediction_ids,label_ids):
pred_spans = list(find_all_between_tags(pred,start_token,end_token))
label_spans = list(find_all_between_tags(label,start_token,end_token))
# In cases where there's just a deletion, the correct result is
# an empty span, which looks like this:
# if len(label_spans) == 0:
# label_spans = [[end_token]]
# if len(pred_spans) == 0:
# pred_spans = [[end_token]]
pred_tokens = [item for sublist in pred_spans for item in sublist]
label_tokens = [item for sublist in label_spans for item in sublist]
all_pred_tokens.append(pred_tokens)
all_label_tokens.append(label_tokens)
return calc_rouge_from_tokens(all_pred_tokens,all_label_tokens,tokenizer)
def remove_masked_ids(ids, masks):
span_start_ids = [i for i, v in enumerate(
masks) if masks[i] == 1 and (i == 0 or masks[i - 1] == 0)]
span_end_ids = [i for i, v in enumerate(
masks) if i > 0 and masks[i] == 0 and masks[i - 1] == 1]
assert len(span_start_ids) == len(span_end_ids)
spans = []
spans = [ids[s:e] for s, e in zip(span_start_ids, span_end_ids)]
return spans
def get_seq2seq_eval(tokenizer, coral=False, idx2word=None, word2idx=None, span_aware_rouge=True):
if coral:
assert idx2word is not None and word2idx is not None
def seq2seq_eval(tokenizer, predictions: NamedTuple, span_aware_rouge=span_aware_rouge):
scores = {}
# rouge = Rouge()
prediction_ids = list(map(remove_ids, predictions.predictions))
label_ids = list(map(remove_ids, predictions.label_ids))
# Rouge scores
rogue_scores = calc_rouge_from_tokens(predictions.predictions, predictions.label_ids, tokenizer)
logger.info("Full Rouge:")
logger.info(rogue_scores)
scores["rouge-l-p"] = rogue_scores["rouge-l"]["p"]
scores["rouge-l-f"] = rogue_scores["rouge-l"]["f"]
scores["rouge-l-r"] = rogue_scores["rouge-l"]["r"]
scores["gleu"] = calc_gleu_from_tokens(predictions.predictions, predictions.label_ids, tokenizer)
if span_aware_rouge:
span_aware_rouge = calc_span_aware_rouge(predictions.predictions ,predictions.label_ids, tokenizer)
logger.info("Span Aware Rouge:")
logger.info(span_aware_rouge)
scores["span_aware_rouge_l_p"] = span_aware_rouge["rouge-l"]["p"]
scores["span_aware_rouge_l_r"] = span_aware_rouge["rouge-l"]["r"]
scores["span_aware_rouge_l_f"] = span_aware_rouge["rouge-l"]["f"]
if not coral:
prediction_tokens = [tokenizer.convert_ids_to_tokens(
x, skip_special_tokens=True) for x in prediction_ids]
label_tokens = [tokenizer.convert_ids_to_tokens(
x, skip_special_tokens=True) for x in label_ids]
avg_edit_distance = np.mean([levenshteinDistance(
a, b) for a, b in zip(prediction_tokens, label_tokens)])
scores["avg_edit_distance"] = avg_edit_distance
return scores
return partial(seq2seq_eval, tokenizer)
def get_multitask_eval(tokenizer, coral=False, idx2word=None, word2idx=None, wandb=False, threshold=0.1):
if coral:
assert idx2word is not None and word2idx is not None
seq2seq_eval = get_seq2seq_eval(
tokenizer, coral=False, idx2word=None, word2idx=None)
def multitask_eval(wandb, predictions):
results = seq2seq_eval(predictions)
input_ids = np.array([x for y in predictions.input_ids for x in y])
pad_mask = input_ids != 1
input_labels = np.array(
[x for y in predictions.input_labels for x in y])
input_logits = np.array(
[x for y in predictions.input_logits for x in y])
# Remove indices with pad tokens:
input_labels = input_labels[pad_mask]
input_logits = input_logits[pad_mask]
input_probs = softmax(input_logits, axis=1)
classes = (input_probs[:, -1] >= threshold)[input_labels != -100]
results["classification_precision"] = precision_score(
input_labels, classes)
results["classification_recall"] = recall_score(input_labels, classes)
results["classification_accuracy"] = accuracy_score(
input_labels, classes)
results["classification_f1"] = f1_score(input_labels, classes)
results["classification_roc_auc"] = roc_auc_score(input_labels,input_probs[:,-1])
return results
return partial(multitask_eval, wandb)
def classification_eval(predictions):
input_ids = np.array([x for y in predictions.input_ids for x in y])
pad_mask = input_ids != 1
input_labels = np.array(
[x for y in predictions.input_labels for x in y])
input_logits = np.array(predictions.predictions)
# Remove indices with pad tokens:
input_labels = input_labels[pad_mask]
input_logits = np.array(
[x for y in predictions.predictions for x in y])[pad_mask]
input_probs = softmax(input_logits, axis=1)
classes = (input_probs[:, -1] >= 0.15)
results = {}
results["classification_precision"] = precision_score(
input_labels, classes)
results["classification_recall"] = recall_score(input_labels, classes)
results["classification_accuracy"] = accuracy_score(
input_labels, classes)
results["classification_f1"] = f1_score(input_labels, classes)
results["roc_auc"] = roc_auc_score(input_labels,input_probs[:,-1])
return results
| 36.228682
| 111
| 0.659677
|
d3b0d865e2fabe210680dc99007df8f623b06ea9
| 692
|
py
|
Python
|
src/main/forms.py
|
itmo-wad/Task6_Tatarov_Dmitriy
|
29f4caf9717c6ce9dc322fadcf77cfb3162720d1
|
[
"Apache-2.0"
] | null | null | null |
src/main/forms.py
|
itmo-wad/Task6_Tatarov_Dmitriy
|
29f4caf9717c6ce9dc322fadcf77cfb3162720d1
|
[
"Apache-2.0"
] | null | null | null |
src/main/forms.py
|
itmo-wad/Task6_Tatarov_Dmitriy
|
29f4caf9717c6ce9dc322fadcf77cfb3162720d1
|
[
"Apache-2.0"
] | null | null | null |
from flask_wtf import Form
from wtforms import TextField, TextAreaField, SubmitField, PasswordField, BooleanField
from wtforms.validators import DataRequired
class SignUpForm(Form):
username = TextField('User Name', validators= [ DataRequired()])
password = PasswordField('Password',validators=[ DataRequired()])
submit = SubmitField('Sign Up')
class SignInForm(Form):
username = TextField('Username', validators = [DataRequired()])
password = PasswordField('Password', validators = [DataRequired()])
submit = SubmitField('Sign In')
class ChangeForm(Form):
password = TextField('Password', validators = [DataRequired()])
submit = SubmitField('Change Pass')
| 38.444444
| 86
| 0.736994
|
fde476069169ad0861f488e03a7a59d5ca0f9423
| 1,079
|
py
|
Python
|
or.py
|
shivendrd/OneNeuron
|
43fdcaa4ebea44bbf4f65edce0e58cf838db1a8a
|
[
"MIT"
] | null | null | null |
or.py
|
shivendrd/OneNeuron
|
43fdcaa4ebea44bbf4f65edce0e58cf838db1a8a
|
[
"MIT"
] | null | null | null |
or.py
|
shivendrd/OneNeuron
|
43fdcaa4ebea44bbf4f65edce0e58cf838db1a8a
|
[
"MIT"
] | null | null | null |
from utils.model import Perceptron
from utils.all_utils import prepare_data,save_plot, save_model
import numpy as np
import pandas as pd
def main(data, eta, epochs, plotFilename, filename):
"""it will give the data send the data to dataframe create a plot creating a model
Args:
data ([pd.DataFrame]): its the pandas dataframe
eta ([type]): step size
epochs ([epochs]): total no of iteration does
filename ([model]): filename of model
plotFilename ([plot]): name of plot which are created
"""
df = pd.DataFrame(data)
print(df)
X,y = prepare_data(df)
ETA = 0.3
EPOCHS = 10
model = Perceptron(eta=eta, epochs=epochs)
model.fit(X,y)
_ = model.total_loss
save_model(model, filename=filename)
save_plot(df, plotFilename, model)
if __name__=='__main__': #'<<<entry point'
OR = {
"x1": [0,0,1,1],
"x2": [0,1,0,1],
"y": [0,1,1,1],
}
ETA = 0.3
EPOCHS = 10
main(data=OR, eta=ETA, epochs=EPOCHS, filename="or.model", plotFilename="or.png")
| 24.522727
| 87
| 0.620019
|
99d67518b42bd84b66f4b24dd98e24be62a8fc78
| 374
|
py
|
Python
|
Atividade-1/questao-4.py
|
David-Marcoss/POO1-PYTHON
|
b0a49fb95e8b87fdfe2fc40eca547bf25f93c3c4
|
[
"MIT"
] | null | null | null |
Atividade-1/questao-4.py
|
David-Marcoss/POO1-PYTHON
|
b0a49fb95e8b87fdfe2fc40eca547bf25f93c3c4
|
[
"MIT"
] | null | null | null |
Atividade-1/questao-4.py
|
David-Marcoss/POO1-PYTHON
|
b0a49fb95e8b87fdfe2fc40eca547bf25f93c3c4
|
[
"MIT"
] | null | null | null |
def fatorial(n):
fat =1
for i in range(2,n+1):
fat*= i
return fat
def fatorial2(n):
if n == 0:
return 0
if n == 1:
return 1
if n > 1:
return fatorial2(n - 1) * n
n = int(input('digite um valor: '))
print('fatorial iterativo {} = {}'.format(n,fatorial(n)))
print('fatorial recursivo {} = {}'.format(n,fatorial2(n)))
| 19.684211
| 58
| 0.532086
|
b62647b20f0c225308435084d96a965463cf313d
| 2,154
|
py
|
Python
|
sentinelhub/sentinelhub_session.py
|
AbnerErnaniADSFatec/sentinelhub-py
|
a27204e666368e29796be947e9d7cfd2b4a2f1ac
|
[
"MIT"
] | 1
|
2020-06-23T21:52:56.000Z
|
2020-06-23T21:52:56.000Z
|
sentinelhub/sentinelhub_session.py
|
AbnerErnaniADSFatec/sentinelhub-py
|
a27204e666368e29796be947e9d7cfd2b4a2f1ac
|
[
"MIT"
] | null | null | null |
sentinelhub/sentinelhub_session.py
|
AbnerErnaniADSFatec/sentinelhub-py
|
a27204e666368e29796be947e9d7cfd2b4a2f1ac
|
[
"MIT"
] | null | null | null |
"""
Module implementing Sentinel Hub session object
"""
import time
from oauthlib.oauth2 import BackendApplicationClient
from requests_oauthlib import OAuth2Session
from .config import SHConfig
class SentinelHubSession:
""" Sentinel Hub authentication class
The class will do OAuth2 authentication with Sentinel Hub service and store the token. It will make sure that the
token is never expired by automatically refreshing it if expiry time is close.
"""
SECONDS_BEFORE_EXPIRY = 60
def __init__(self, config=None):
"""
:param config: An instance of package configuration class
:type config: SHConfig
"""
self.config = config or SHConfig()
if not (self.config.sh_client_id and self.config.sh_client_secret):
raise ValueError("Configuration parameters 'sh_client_id' and 'sh_client_secret' have to be set in order"
"to authenticate with Sentinel Hub service")
self._token = None
@property
def token(self):
""" Always up-to-date session's token
:return: A token in a form of dictionary of parameters
:rtype: dict
"""
if self._token and self._token['expires_at'] > time.time() + self.SECONDS_BEFORE_EXPIRY:
return self._token
self._token = self._fetch_token()
return self._token
@property
def session_headers(self):
""" Provides session authorization headers
:return: A dictionary with authorization headers
:rtype: dict
"""
return {
'Authorization': 'Bearer {}'.format(self.token['access_token'])
}
def _fetch_token(self):
""" Collects a new token from Sentinel Hub service
"""
oauth_client = BackendApplicationClient(client_id=self.config.sh_client_id)
with OAuth2Session(client=oauth_client) as oauth_session:
return oauth_session.fetch_token(
token_url=self.config.get_sh_oauth_url(),
client_id=self.config.sh_client_id,
client_secret=self.config.sh_client_secret
)
| 31.676471
| 117
| 0.654596
|
453334f68683cccdb4b15b0ff25c1a314e1261c2
| 89
|
py
|
Python
|
tccli/services/bmvpc/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 47
|
2018-05-31T11:26:25.000Z
|
2022-03-08T02:12:45.000Z
|
tccli/services/bmvpc/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 23
|
2018-06-14T10:46:30.000Z
|
2022-02-28T02:53:09.000Z
|
tccli/services/bmvpc/__init__.py
|
zqfan/tencentcloud-cli
|
b6ad9fced2a2b340087e4e5522121d405f68b615
|
[
"Apache-2.0"
] | 22
|
2018-10-22T09:49:45.000Z
|
2022-03-30T08:06:04.000Z
|
# -*- coding: utf-8 -*-
from tccli.services.bmvpc.bmvpc_client import action_caller
| 22.25
| 59
| 0.707865
|
5064034c350c527f4325d63f3db516c3125a1089
| 2,460
|
py
|
Python
|
examples/legacy/plugins/workbench/AcmeLabUsingEggs/src/acme.workbench/acme/workbench/view/color_view.py
|
enthought/envisage
|
ca57225c4e9022d1ed5299a60e13dc2290d7d94e
|
[
"BSD-3-Clause"
] | 51
|
2015-05-12T01:34:15.000Z
|
2022-03-20T19:11:22.000Z
|
examples/legacy/plugins/workbench/AcmeLabUsingEggs/src/acme.workbench/acme/workbench/view/color_view.py
|
enthought/envisage
|
ca57225c4e9022d1ed5299a60e13dc2290d7d94e
|
[
"BSD-3-Clause"
] | 347
|
2015-02-27T19:51:09.000Z
|
2022-03-21T16:03:01.000Z
|
examples/legacy/plugins/workbench/AcmeLabUsingEggs/src/acme.workbench/acme/workbench/view/color_view.py
|
enthought/envisage
|
ca57225c4e9022d1ed5299a60e13dc2290d7d94e
|
[
"BSD-3-Clause"
] | 11
|
2015-02-11T04:32:54.000Z
|
2021-09-13T10:50:05.000Z
|
""" A view containing a colored panel! """
# Enthought library imports.
from traits.etsconfig.api import ETSConfig
from pyface.workbench.api import View
class ColorView(View):
""" A view containing a colored panel!
This view is written so that it works with *both* wx and Qt4. Your own
views obviously do not have to do this!
"""
#### 'IView' interface ####################################################
# The category that the view belongs to.
category = "Color"
###########################################################################
# 'IWorkbenchPart' interface.
###########################################################################
#### Trait initializers ###################################################
def _id_default(self):
""" Trait initializer. """
# By making the Id the same as the name, we make it easy to specify
# the views in the example perspectives. Note for larger applications
# the Id should be globally unique, and by default we use the module
# name and class name.
return self.name
#### Methods ##############################################################
def create_control(self, parent):
""" Creates the toolkit-specific control that represents the view.
'parent' is the toolkit-specific control that is the view's parent.
"""
method = getattr(self, "_%s_create_control" % ETSConfig.toolkit, None)
if method is None:
raise SystemError("Unknown toolkit %s", ETSConfig.toolkit)
color = self.name.lower()
return method(parent, color)
###########################################################################
# Private interface.
###########################################################################
def _wx_create_control(self, parent, color):
""" Create a wx version of the control. """
import wx
panel = wx.Panel(parent, -1)
panel.SetBackgroundColour(color)
return panel
def _qt4_create_control(self, parent, color):
""" Create a Qt4 version of the control. """
from pyface.qt import QtGui
widget = QtGui.QWidget(parent)
palette = widget.palette()
palette.setColor(QtGui.QPalette.Window, QtGui.QColor(color))
widget.setPalette(palette)
widget.setAutoFillBackground(True)
return widget
| 30.37037
| 79
| 0.513821
|
b42a41648d58a9d3cfcf6299a9275c63a2e23151
| 127
|
py
|
Python
|
tests/test_world.py
|
thorium-cloud/boto3-assistant
|
480551afbb28b5348aa54e6dee987f2448544e33
|
[
"MIT"
] | null | null | null |
tests/test_world.py
|
thorium-cloud/boto3-assistant
|
480551afbb28b5348aa54e6dee987f2448544e33
|
[
"MIT"
] | null | null | null |
tests/test_world.py
|
thorium-cloud/boto3-assistant
|
480551afbb28b5348aa54e6dee987f2448544e33
|
[
"MIT"
] | null | null | null |
from boto3_assistant import world
def test_world():
response = world.invoke({}, {})
assert response == "Hello World"
| 18.142857
| 36
| 0.677165
|
f9e78bc8f47e6c8d9898a977540f50e0fe9a2b3c
| 967
|
py
|
Python
|
src/kol/request/DeclineTradeResponseRequest.py
|
danheath/temppykol
|
7f9621b44df9f9d2d9fc0a5b2a06db116b9ccfab
|
[
"BSD-3-Clause"
] | 19
|
2015-02-16T08:30:49.000Z
|
2020-05-01T06:06:33.000Z
|
src/kol/request/DeclineTradeResponseRequest.py
|
danheath/temppykol
|
7f9621b44df9f9d2d9fc0a5b2a06db116b9ccfab
|
[
"BSD-3-Clause"
] | 5
|
2015-01-13T23:01:54.000Z
|
2016-11-30T15:23:43.000Z
|
src/kol/request/DeclineTradeResponseRequest.py
|
danheath/temppykol
|
7f9621b44df9f9d2d9fc0a5b2a06db116b9ccfab
|
[
"BSD-3-Clause"
] | 19
|
2015-05-28T09:36:19.000Z
|
2022-03-15T23:19:29.000Z
|
from kol.request.GenericRequest import GenericRequest
from kol.manager import PatternManager
from kol.util import Report
import kol.Error as Error
class DeclineTradeResponseRequest(GenericRequest):
def __init__(self, session, tradeid):
super(DeclineTradeResponseRequest, self).__init__(session)
self.url = session.serverURL + 'makeoffer.php'
self.requestData['pwd'] = session.pwd
self.requestData['action'] = 'decline2'
self.requestData['whichoffer'] = tradeid
def parseResponse(self):
successPattern = PatternManager.getOrCompilePattern('tradeCancelledSuccessfully')
if successPattern.search(self.responseText):
Report.trace('request', "Trade response " + str(self.requestData['whichoffer']) + " cancelled successfully.")
else:
raise Error.Error("Unknown error declining trade response for trade " + str(self.requestData['whichoffer']), Error.REQUEST_GENERIC)
| 48.35
| 143
| 0.719752
|
1990bc3f7148d809135c4f489d99d934c693a561
| 2,124
|
py
|
Python
|
api_tests/nodes/views/test_node_implicit_contributors_list.py
|
chennan47/osf.io
|
270608592b39a94941a3e329c0dc16d295a82472
|
[
"Apache-2.0"
] | null | null | null |
api_tests/nodes/views/test_node_implicit_contributors_list.py
|
chennan47/osf.io
|
270608592b39a94941a3e329c0dc16d295a82472
|
[
"Apache-2.0"
] | 3
|
2019-01-24T05:49:15.000Z
|
2019-01-28T03:00:14.000Z
|
api_tests/nodes/views/test_node_implicit_contributors_list.py
|
udzuki/RDM-osf.io
|
cf5665dd7c933d00e56fd1c8cd795caa770bae0a
|
[
"Apache-2.0"
] | 1
|
2018-11-06T11:03:48.000Z
|
2018-11-06T11:03:48.000Z
|
import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
NodeFactory
)
@pytest.fixture()
def admin_contributor():
return AuthUserFactory()
@pytest.fixture()
def implicit_contributor():
return AuthUserFactory(given_name='Henrique')
@pytest.fixture()
def parent(implicit_contributor):
return ProjectFactory(
title='Parent Project',
creator=implicit_contributor
)
@pytest.fixture()
def component(admin_contributor, parent):
return NodeFactory(parent=parent, creator=admin_contributor)
@pytest.mark.django_db
class TestNodeImplicitContributors:
def test_list_and_filter_implicit_contributors(self, app, component, admin_contributor, implicit_contributor):
url = '/{}nodes/{}/implicit_contributors/'.format(API_BASE, component._id)
res = app.get(url, auth=admin_contributor.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 1
assert res.json['data'][0]['id'] == implicit_contributor._id
url = '/{}nodes/{}/implicit_contributors/?filter[given_name]={}'.format(API_BASE, component._id, implicit_contributor.given_name)
res = app.get(url, auth=admin_contributor.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 1
assert res.json['data'][0]['id'] == implicit_contributor._id
url = '/{}nodes/{}/implicit_contributors/?filter[given_name]=NOT_EVEN_A_NAME'.format(API_BASE, component._id)
res = app.get(url, auth=admin_contributor.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 0
component.add_contributor(implicit_contributor, save=True)
res = app.get(url, auth=admin_contributor.auth)
assert res.status_code == 200
assert res.content_type == 'application/vnd.api+json'
assert len(res.json['data']) == 0
| 35.4
| 137
| 0.696798
|
acd7e7047b359bb2033871d4fba886f1fad66dd8
| 1,471
|
py
|
Python
|
helper_nodes/pointcloud_image_sync.py
|
apl-ocean-engineering/tof_sensor
|
1a18b17f8ede8ab8cde86e80edcbe7dc4681c498
|
[
"BSD-3-Clause"
] | null | null | null |
helper_nodes/pointcloud_image_sync.py
|
apl-ocean-engineering/tof_sensor
|
1a18b17f8ede8ab8cde86e80edcbe7dc4681c498
|
[
"BSD-3-Clause"
] | null | null | null |
helper_nodes/pointcloud_image_sync.py
|
apl-ocean-engineering/tof_sensor
|
1a18b17f8ede8ab8cde86e80edcbe7dc4681c498
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
Created on Wed Feb 27 12:32:07 2019
@author: mitchell
"""
import rospy
import std_msgs.msg
from sensor_msgs.msg import Image, PointCloud2
from cv_bridge import CvBridge, CvBridgeError
import cv2
class image_republish:
def __init__(self):
self.image_pub = rospy.Publisher("/camera/rgb/image_color",Image, queue_size=1)
self.pointcloud_pub = rospy.Publisher("/camera/depth/points",PointCloud2, queue_size=1)
self.bridge = CvBridge()
rospy.Subscriber("/camera/left/image_raw",Image,self.img_callback)
rospy.Subscriber("/seikowave_node/cloud",PointCloud2,self.pointcloud_callback)
self.img = Image()
def img_callback(self, msg):
try:
cv_image = self.bridge.imgmsg_to_cv2(msg, "mono8")
except CvBridgeError as e:
print(e)
cv_image = cv2.flip(cv_image, -1)
try:
self.img = self.bridge.cv2_to_imgmsg(cv_image, "mono8")
except CvBridgeError as e:
print(e)
def pointcloud_callback(self, msg):
self.image_pub.publish(self.img)
self.pointcloud_pub.publish(msg)
def run(self):
r = rospy.Rate(10)
i = 0
while not rospy.is_shutdown():
i+=1
r.sleep()
if __name__ == '__main__':
rospy.init_node("image_sync")
IR = image_republish()
rospy.spin()
| 25.807018
| 103
| 0.60707
|
1a20e3032a2fcab17de308f00eb66876f7bad3b9
| 368
|
py
|
Python
|
app/repositories/student_event_repo.py
|
jattoabdul/vanhack-cms
|
ab2cb054e35765531833afd98051027d891baf10
|
[
"MIT"
] | null | null | null |
app/repositories/student_event_repo.py
|
jattoabdul/vanhack-cms
|
ab2cb054e35765531833afd98051027d891baf10
|
[
"MIT"
] | null | null | null |
app/repositories/student_event_repo.py
|
jattoabdul/vanhack-cms
|
ab2cb054e35765531833afd98051027d891baf10
|
[
"MIT"
] | null | null | null |
from app.repositories.base_repo import BaseRepo
from app.models.student_event import StudentEvent
class StudentEventRepo(BaseRepo):
def __init__(self):
BaseRepo.__init__(self, StudentEvent)
def new_student_event(self, event_id, student_id):
student_event = StudentEvent(event_id=event_id, student_id=student_id)
student_event.save()
return student_event
| 28.307692
| 72
| 0.817935
|
2d916d8de079410812c5c22e8f53e62980973d4b
| 13,029
|
py
|
Python
|
keras_question_and_answering_system/library/seq2seq_v2.py
|
chen0040/keras-question-and-answering-web-api
|
8763beb11ac03a5f490ff6f02c41e29ccfbfc9b8
|
[
"MIT"
] | 25
|
2017-12-29T03:55:43.000Z
|
2022-03-22T03:55:04.000Z
|
keras_question_and_answering_system/library/seq2seq_v2.py
|
chen0040/keras-question-and-answering-web-api
|
8763beb11ac03a5f490ff6f02c41e29ccfbfc9b8
|
[
"MIT"
] | 3
|
2018-02-15T19:19:56.000Z
|
2019-12-24T09:11:47.000Z
|
keras_question_and_answering_system/library/seq2seq_v2.py
|
chen0040/keras-question-and-answering-web-api
|
8763beb11ac03a5f490ff6f02c41e29ccfbfc9b8
|
[
"MIT"
] | 16
|
2018-01-11T15:09:47.000Z
|
2022-01-09T08:44:40.000Z
|
from keras.callbacks import ModelCheckpoint
from keras.models import Model
from keras.layers import Input, LSTM, Dense, Embedding, Dropout, add, RepeatVector
from keras.preprocessing.sequence import pad_sequences
from keras_question_and_answering_system.library.utility import text_utils
from keras_question_and_answering_system.library.utility.qa_data_utils import Seq2SeqTripleSamples
import numpy as np
import nltk
import os
def generate_batch(ds, input_data, output_data, batch_size):
num_batches = len(input_data) // batch_size
while True:
for batchIdx in range(0, num_batches):
start = batchIdx * batch_size
end = (batchIdx + 1) * batch_size
encoder_input_paragraph_data_batch = []
encoder_input_question_data_batch = []
for input_paragraph_data, input_question_data in input_data[start:end]:
encoder_input_paragraph_data_batch.append(input_paragraph_data)
encoder_input_question_data_batch.append(input_question_data)
encoder_input_paragraph_data_batch = pad_sequences(encoder_input_paragraph_data_batch,
ds.input_paragraph_max_seq_length)
encoder_input_question_data_batch = pad_sequences(encoder_input_question_data_batch,
ds.input_question_max_seq_length)
decoder_target_data_batch = np.zeros(shape=(batch_size, ds.target_max_seq_length, ds.num_target_tokens))
decoder_input_data_batch = np.zeros(shape=(batch_size, ds.target_max_seq_length, ds.num_target_tokens))
for lineIdx, target_wid_list in enumerate(output_data[start:end]):
for idx, wid in enumerate(target_wid_list):
if wid == 0: # UNKNOWN
continue
decoder_input_data_batch[lineIdx, idx, wid] = 1
if idx > 0:
decoder_target_data_batch[lineIdx, idx - 1, wid] = 1
yield [encoder_input_paragraph_data_batch, encoder_input_question_data_batch,
decoder_input_data_batch], decoder_target_data_batch
class Seq2SeqV2QA(object):
model_name = 'seq2seq-qa-v2'
def __init__(self):
self.model = None
self.encoder_model = None
self.decoder_model = None
self.input_paragraph_word2idx = None
self.input_paragraph_idx2word = None
self.input_question_word2idx = None
self.input_question_idx2word = None
self.target_word2idx = None
self.target_idx2word = None
self.max_encoder_paragraph_seq_length = None
self.max_encoder_question_seq_length = None
self.max_decoder_seq_length = None
self.num_encoder_paragraph_tokens = None
self.num_encoder_question_tokens = None
self.num_decoder_tokens = None
@staticmethod
def get_architecture_file_path(model_dir_path):
return os.path.join(model_dir_path, Seq2SeqV2QA.model_name + '-architecture.json')
@staticmethod
def get_weight_file_path(model_dir_path):
return os.path.join(model_dir_path, Seq2SeqV2QA.model_name + '-weights.h5')
def load_model(self, model_dir_path):
self.input_paragraph_word2idx = np.load(
model_dir_path + '/' + self.model_name + '-input-paragraph-word2idx.npy').item()
self.input_paragraph_idx2word = np.load(
model_dir_path + '/' + self.model_name + '-input-paragraph-idx2word.npy').item()
self.input_question_word2idx = np.load(
model_dir_path + '/' + self.model_name + '-input-question-word2idx.npy').item()
self.input_question_idx2word = np.load(
model_dir_path + '/' + self.model_name + '-input-question-idx2word.npy').item()
self.target_word2idx = np.load(model_dir_path + '/' + self.model_name + '-target-word2idx.npy').item()
self.target_idx2word = np.load(model_dir_path + '/' + self.model_name + '-target-idx2word.npy').item()
context = np.load(model_dir_path + '/' + self.model_name + '-config.npy').item()
self.max_encoder_paragraph_seq_length = context['input_paragraph_max_seq_length']
self.max_encoder_question_seq_length = context['input_question_max_seq_length']
self.max_decoder_seq_length = context['target_max_seq_length']
self.num_encoder_paragraph_tokens = context['num_input_paragraph_tokens']
self.num_encoder_question_tokens = context['num_input_question_tokens']
self.num_decoder_tokens = context['num_target_tokens']
print(self.max_encoder_paragraph_seq_length)
print(self.max_encoder_question_seq_length)
print(self.max_decoder_seq_length)
print(self.num_encoder_paragraph_tokens)
print(self.num_encoder_question_tokens)
print(self.num_decoder_tokens)
self.create_model()
weight_file_path = self.get_weight_file_path(model_dir_path)
self.model.load_weights(weight_file_path)
def create_model(self):
hidden_units = 256
embed_hidden_units = 100
context_inputs = Input(shape=(None,), name='context_inputs')
encoded_context = Embedding(input_dim=self.num_encoder_paragraph_tokens, output_dim=embed_hidden_units,
input_length=self.max_encoder_paragraph_seq_length,
name='context_embedding')(context_inputs)
encoded_context = Dropout(0.3)(encoded_context)
question_inputs = Input(shape=(None,), name='question_inputs')
encoded_question = Embedding(input_dim=self.num_encoder_question_tokens, output_dim=embed_hidden_units,
input_length=self.max_encoder_question_seq_length,
name='question_embedding')(question_inputs)
encoded_question = Dropout(0.3)(encoded_question)
encoded_question = LSTM(units=embed_hidden_units, name='question_lstm')(encoded_question)
encoded_question = RepeatVector(self.max_encoder_paragraph_seq_length)(encoded_question)
merged = add([encoded_context, encoded_question])
encoder_lstm = LSTM(units=hidden_units, return_state=True, name='encoder_lstm')
encoder_outputs, encoder_state_h, encoder_state_c = encoder_lstm(merged)
encoder_states = [encoder_state_h, encoder_state_c]
decoder_inputs = Input(shape=(None, self.num_decoder_tokens), name='decoder_inputs')
decoder_lstm = LSTM(units=hidden_units, return_state=True, return_sequences=True, name='decoder_lstm')
decoder_outputs, decoder_state_h, decoder_state_c = decoder_lstm(decoder_inputs,
initial_state=encoder_states)
decoder_dense = Dense(units=self.num_decoder_tokens, activation='softmax', name='decoder_dense')
decoder_outputs = decoder_dense(decoder_outputs)
self.model = Model([context_inputs, question_inputs, decoder_inputs], decoder_outputs)
self.model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
self.encoder_model = Model([context_inputs, question_inputs], encoder_states)
decoder_state_inputs = [Input(shape=(hidden_units,)), Input(shape=(hidden_units,))]
decoder_outputs, state_h, state_c = decoder_lstm(decoder_inputs, initial_state=decoder_state_inputs)
decoder_states = [state_h, state_c]
decoder_outputs = decoder_dense(decoder_outputs)
self.decoder_model = Model([decoder_inputs] + decoder_state_inputs, [decoder_outputs] + decoder_states)
def reply(self, paragraph, question):
input_paragraph_seq = []
input_question_seq = []
input_paragraph_wid_list = []
input_question_wid_list = []
input_paragraph_text = paragraph.lower()
input_question_text = question.lower()
for word in nltk.word_tokenize(input_paragraph_text):
if not text_utils.in_white_list(word):
continue
idx = 1 # default [UNK]
if word in self.input_paragraph_word2idx:
idx = self.input_paragraph_word2idx[word]
input_paragraph_wid_list.append(idx)
for word in nltk.word_tokenize(input_question_text):
if not text_utils.in_white_list(word):
continue
idx = 1 # default [UNK]
if word in self.input_question_word2idx:
idx = self.input_question_word2idx[word]
input_question_wid_list.append(idx)
input_paragraph_seq.append(input_paragraph_wid_list)
input_question_seq.append(input_question_wid_list)
input_paragraph_seq = pad_sequences(input_paragraph_seq, self.max_encoder_paragraph_seq_length)
input_question_seq = pad_sequences(input_question_seq, self.max_encoder_question_seq_length)
states_value = self.encoder_model.predict([input_paragraph_seq, input_question_seq])
target_seq = np.zeros((1, 1, self.num_decoder_tokens))
target_seq[0, 0, self.target_word2idx['START']] = 1
target_text = ''
target_text_len = 0
terminated = False
while not terminated:
output_tokens, h, c = self.decoder_model.predict([target_seq] + states_value)
sample_token_idx = np.argmax(output_tokens[0, -1, :])
sample_word = self.target_idx2word[sample_token_idx]
target_text_len += 1
if sample_word != 'START' and sample_word != 'END':
target_text += ' ' + sample_word
if sample_word == 'END' or target_text_len >= self.max_decoder_seq_length:
terminated = True
target_seq = np.zeros((1, 1, self.num_decoder_tokens))
target_seq[0, 0, sample_token_idx] = 1
states_value = [h, c]
return target_text.strip()
def test_run(self, ds, index=None):
if index is None:
index = 0
paragraph, question, actual_answer = ds.get_data(index)
predicted_answer = self.reply(paragraph, question)
# print({'context': paragraph, 'question': question})
print({'predict': predicted_answer, 'actual': actual_answer})
def fit(self, data_set, model_dir_path, epochs=None, batch_size=None, test_size=None, random_state=None,
save_best_only=False, max_input_vocab_size=None, max_target_vocab_size=None):
if batch_size is None:
batch_size = 64
if epochs is None:
epochs = 100
if test_size is None:
test_size = 0.2
if random_state is None:
random_state = 42
if max_input_vocab_size is None:
max_input_vocab_size = 5000
if max_target_vocab_size is None:
max_target_vocab_size = 5000
data_set_seq2seq = Seq2SeqTripleSamples(data_set, max_input_vocab_size=max_input_vocab_size,
max_target_vocab_size=max_target_vocab_size)
data_set_seq2seq.save(model_dir_path, 'qa-v2')
x_train, x_test, y_train, y_test = data_set_seq2seq.split(test_size=test_size, random_state=random_state)
print(len(x_train))
print(len(x_test))
self.max_encoder_question_seq_length = data_set_seq2seq.input_question_max_seq_length
self.max_encoder_paragraph_seq_length = data_set_seq2seq.input_paragraph_max_seq_length
self.max_decoder_seq_length = data_set_seq2seq.target_max_seq_length
self.num_encoder_question_tokens = data_set_seq2seq.num_input_question_tokens
self.num_encoder_paragraph_tokens = data_set_seq2seq.num_input_paragraph_tokens
self.num_decoder_tokens = data_set_seq2seq.num_target_tokens
weight_file_path = self.get_weight_file_path(model_dir_path)
architecture_file_path = self.get_architecture_file_path(model_dir_path)
self.create_model()
with open(architecture_file_path, 'w') as f:
f.write(self.model.to_json())
train_gen = generate_batch(data_set_seq2seq, x_train, y_train, batch_size)
test_gen = generate_batch(data_set_seq2seq, x_test, y_test, batch_size)
train_num_batches = len(x_train) // batch_size
test_num_batches = len(x_test) // batch_size
checkpoint = ModelCheckpoint(filepath=weight_file_path, save_best_only=save_best_only)
history = self.model.fit_generator(generator=train_gen, steps_per_epoch=train_num_batches,
epochs=epochs,
verbose=1, validation_data=test_gen, validation_steps=test_num_batches,
callbacks=[checkpoint])
self.model.save_weights(weight_file_path)
np.save(os.path.join(model_dir_path, Seq2SeqV2QA.model_name + '-history.npy'), history.history)
return history
| 50.696498
| 116
| 0.681019
|
71a6826a3a6beb118e7fb5071c6002e19ca9d91d
| 9,885
|
py
|
Python
|
tosca/services/wget2.py
|
aria-jpl/sar-availability
|
b553819fe3ba3bd75e533603294b6bb29bdbd9e2
|
[
"Apache-2.0"
] | null | null | null |
tosca/services/wget2.py
|
aria-jpl/sar-availability
|
b553819fe3ba3bd75e533603294b6bb29bdbd9e2
|
[
"Apache-2.0"
] | 2
|
2020-07-06T15:17:58.000Z
|
2020-09-08T14:55:55.000Z
|
tosca/services/wget2.py
|
aria-jpl/sar-availability
|
b553819fe3ba3bd75e533603294b6bb29bdbd9e2
|
[
"Apache-2.0"
] | null | null | null |
import os, json, requests, math, re
import hashlib
from datetime import datetime
import dateutil.parser
from flask import jsonify, Blueprint, request, url_for, Response
from flask_login import login_required
from pprint import pformat
import base64
import simplekml
from tosca import app
mod = Blueprint("services/kml", __name__)
@mod.route("/services/kml/<dataset>", methods=["GET"])
def get_kml(dataset=None):
"""Return kml for dataset."""
# get callback, source, and dataset
source_b64 = request.args.get("base64")
source = request.args.get("source")
if source_b64 is not None:
source = base64.b64decode(source_b64)
if dataset is None:
return (
jsonify(
{"success": False, "message": "Cannot recognize dataset: %s" % dataset,}
),
500,
)
app.logger.info("source: {}".format(source))
app.logger.info("source_b64: {}".format(source_b64))
# query
decoded_query = json.loads(source)
results = get_es_results(query=decoded_query)
# build kml
kml_obj = gen_kml(results)
# return result
fname = "sar_availability-acquisitions-{}.kml".format(
datetime.utcnow().strftime("%Y%m%dT%H%M%S")
)
return Response(
kml_obj,
headers={
"Content-Type": "application/vnd.google-earth.kml+xml",
"Content-Disposition": "attachment; filename={}".format(fname),
},
)
def gen_poly(kmlobj, acq):
"""Create a new polygon for the KML for an acquisition"""
# generate params from acquisition
prm = gen_acq_dict(acq)
# save the params as a polygon
pol = kmlobj.newpolygon(name=prm["name"])
pol.outerboundaryis = prm["coord"]
pol.timespan.begin = prm["starttime"]
pol.timespan.end = prm["endtime"]
pol.style.linestyle.color = gen_color(acq)
pol.style.linestyle.width = 1
pol.style.polystyle.color = simplekml.Color.changealphaint(100, gen_color(acq))
pol.description = gen_kml_bubble(prm)
def gen_acq_dict(acq):
"""returns a dict of acquisition metadata & handles both ESA & BOS SARCAT datatypes"""
uid = re.sub("\_+", "_", acq["_id"])
coordinates = acq["_source"]["location"]["coordinates"][0]
coord = convert_coord(coordinates)
platform = walk(acq, "platform")
download_url = walk(acq, "download_url")
name = walk(acq, "title")
if name:
name = re.sub("\_+", "_", name)
if name is None:
name = (
uid.replace("-bos_sarcat-predicted", "")
.replace("-", "_")
.replace("acquisition_", "")
.replace("Sentinel_1", "S1")
)
start = walk(acq, "starttime")
end = walk(acq, "endtime")
source = walk(acq, "source")
starttime = dateutil.parser.parse(start).strftime("%Y-%m-%d")
endtime = dateutil.parser.parse(end).strftime("%Y-%m-%d")
if dateutil.parser.parse(start) > dateutil.parser.parse(end):
end = start
endtime = starttime
track = walk(acq, "track_number")
location = walk(acq, "continent")
status = walk(acq, "status")
if status:
status.replace("\n", "")
center = walk(acq, "center")
center_str = None
if center:
center_str = "{:.1f}, {:.1f}".format(
center["coordinates"][1], center["coordinates"][0]
)
location = get_loc_string(acq)
orbitnum = walk(acq, "orbitNumber")
dct = {
"uid": uid,
"coord": coord,
"coordinates": coordinates,
"start": start,
"end": end,
"starttime": starttime,
"endtime": endtime,
"track": track,
"source": source,
"platform": platform,
"orbitnum": orbitnum,
"name": name,
"download_url": download_url,
"center": center_str,
"location": location,
"status": status,
}
return dct
def gen_color(acq):
"""returns color based on acquisition source"""
platform = walk(acq, "platform")
if platform is None:
return simplekml.Color.blue
platform = platform.lower()
if platform.find("sentinel") != -1:
return simplekml.Color.white
elif platform.find("alos") != -1:
return simplekml.Color.green
elif platform.find("csk") != -1:
return simplekml.Color.blue
elif platform.find("radarsat") != -1:
return simplekml.Color.red
return convert_str_to_color(platform)
def convert_str_to_color(instr):
"""converts an input string into simplekml color deterministically"""
hexstr = hashlib.md5(instr).hexdigest()
r = int(hexstr[0], 16) * int(hexstr[1], 16)
g = int(hexstr[2], 16) * int(hexstr[3], 16)
b = int(hexstr[4], 16) * int(hexstr[5], 16)
d = 255 - max([r, g, b])
r += d
g += d
b += d
return simplekml.Color.rgb(r, g, b)
def get_loc_string(acq):
"""determines the location from the acquisition metadata and returns it as a string"""
city_list = walk(acq, "city")
center = walk(acq, "center")
lat = center["coordinates"][1]
lon = center["coordinates"][0]
center = [float(lon), float(lat)]
loc_str = None
distance = None
for item in city_list:
city_point = [float(item["longitude"]), float(item["latitude"])]
cur_dist = get_distance(center, city_point)
if loc_str is None:
distance = cur_dist
# build region
if item["admin2_name"]:
loc_str = build_loc_name(item)
else:
if cur_dist < distance:
# build region
loc_str = build_loc_name(item)
return loc_str
def build_loc_name(item):
"""builds the location name from the item"""
local = item["admin2_name"]
region = item["admin1_name"]
country = item["country_name"]
if local:
loc_str = "{}, {}, {}".format(
local.encode("ascii", "ignore"),
region.encode("ascii", "ignore"),
country.encode("ascii", "ignore"),
)
else:
loc_str = "{}, {}".format(
region.encode("ascii", "ignore"), country.encode("ascii", "ignore")
)
return loc_str
def get_distance(point1, point2):
"""returns the distance between the two points in kilometers"""
distance = (
math.acos(
math.sin(math.radians(point1[1])) * math.sin(math.radians(point2[1]))
+ math.cos(math.radians(point1[1]))
* math.cos(math.radians(point2[1]))
* math.cos(math.radians(point2[0]) - math.radians(point1[0]))
)
* 6371
)
return distance
def gen_kml_bubble(dct):
"""generates the html for the kml polygon"""
lst = [
"name",
"platform",
"location",
"start",
"end",
"source",
"track",
"orbitnum",
"coordinates",
"uid",
"status",
"download_url",
]
outstr = "<table>"
for item in lst:
if dct[item]:
if item == "download_url":
outstr += '<tr><td><b><font color=blue>{}</font></b></td><td> <a href="{}">{}</a></td></tr>'.format(
item.capitalize(), dct[item], dct[item]
)
else:
outstr += "<tr><td><b><font color=blue>{}</font></b></td><td>{}</td></tr>".format(
item.capitalize(), dct[item]
)
return outstr + "</table>"
def convert_coord(es_coord):
"""gen coords for kml"""
coord = []
for point in es_coord:
coord.append((str(point[0]), str(point[1])))
return coord
def gen_kml(acquisitions_list, verbose=False):
"""Create a KML file showing acquisition coverage"""
kmlobj = simplekml.Kml()
for acquisition in acquisitions_list:
gen_poly(kmlobj, acquisition)
return kmlobj.kml()
def query_es(query, url):
"""query elastic search"""
iterator_size = query["size"] = 2000 # override input query size
data = json.dumps(query, indent=2)
response = requests.get(url, data=data, verify=False, timeout=15)
response.raise_for_status()
results = json.loads(response.text, encoding="ascii")
results_list = results["hits"]["hits"]
total_results = int(results["hits"]["total"])
if total_results > iterator_size:
for i in range(iterator_size, total_results, iterator_size):
query["from"] = i
response = requests.get(url, data=data, verify=False, timeout=15)
response.raise_for_status()
results = json.loads(response.text, encoding="ascii")
results_list.extend(results["hits"]["hits"])
return results_list
def walk(node, key_match):
"""recursive node walk, returns None if nothing found, returns the value if a key matches key_match"""
if isinstance(node, dict):
for key, item in node.items():
# print('searching {} for {}'.format(key, key_match))
if str(key) == str(key_match):
# print('found {}: {}'.format(key, item))
return item
result = walk(item, key_match)
if not result is None:
return result
return None
if isinstance(node, list):
for item in node:
if isinstance(item, dict) or isinstance(item, list):
result = walk(item, key_match)
if not result is None:
return result
return None
return None
def get_es_results(query=None, source="bos", verbose=False):
"""get the elasticsearch results from the given query"""
index = "grq_*_acquisition-*"
grq_ip = app.config["ES_URL"]
url = "{}/{}/_search".format(grq_ip, index)
# run the es query & return the results
if verbose:
print("query: {}".format(query))
results = query_es(query, url)
return results
| 31.682692
| 116
| 0.585837
|
aec1bf5d9e8708562a844d85d51417ff74b6c1aa
| 13,887
|
py
|
Python
|
mayan/apps/sources/views/document_views.py
|
lisuen/Mayan-EDMS
|
a3bda53893b8908e7f62b6d3242901901d27b069
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/sources/views/document_views.py
|
lisuen/Mayan-EDMS
|
a3bda53893b8908e7f62b6d3242901901d27b069
|
[
"Apache-2.0"
] | null | null | null |
mayan/apps/sources/views/document_views.py
|
lisuen/Mayan-EDMS
|
a3bda53893b8908e7f62b6d3242901901d27b069
|
[
"Apache-2.0"
] | null | null | null |
import logging
from django.contrib import messages
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.utils.encoding import force_text
from django.utils.translation import ugettext_lazy as _
from mayan.apps.acls.models import AccessControlList
from mayan.apps.documents.models import (
DocumentType, Document, DocumentFile
)
from mayan.apps.documents.permissions import permission_document_create
from mayan.apps.navigation.classes import Link
from mayan.apps.storage.models import SharedUploadedFile
from mayan.apps.views.generics import MultiFormView
from ..exceptions import SourceException
from ..forms import (
NewDocumentForm, WebFormUploadForm, WebFormUploadFormHTML5
)
from ..icons import icon_staging_folder_file, icon_upload_view_link
from ..literals import SOURCE_UNCOMPRESS_CHOICE_ASK, SOURCE_UNCOMPRESS_CHOICE_Y
from ..links import factory_conditional_active_by_source
from ..menus import menu_sources
from ..models import (
InteractiveSource, Source, SaneScanner, StagingFolderSource
)
from ..tasks import task_source_handle_upload
from ..utils import get_upload_form_class
# import shutil
# from ..ocr import ocrFile
from django.http import HttpResponse
__all__ = ('UploadBaseView', 'UploadInteractiveView')
logger = logging.getLogger(name=__name__)
class UploadBaseView(MultiFormView):
prefixes = {'source_form': 'source', 'document_form': 'document'}
template_name = 'appearance/generic_form.html'
@staticmethod
def get_active_tab_links(document=None):
return [
UploadBaseView.get_tab_link_for_source(source, document)
for source in InteractiveSource.objects.filter(enabled=True).select_subclasses()
]
@staticmethod
def get_tab_link_for_source(source, document=None):
if document:
view = 'sources:document_file_upload'
args = ('"{}"'.format(document.pk), '"{}"'.format(source.pk),)
else:
view = 'sources:document_upload_interactive'
args = ('"{}"'.format(source.pk),)
return Link(
args=args,
conditional_active=factory_conditional_active_by_source(
source=source
), icon=icon_upload_view_link, keep_query=True,
remove_from_query=['page'], text=source.label, view=view
)
def dispatch(self, request, *args, **kwargs):
if 'source_id' in kwargs:
self.source = get_object_or_404(
klass=Source.objects.filter(enabled=True).select_subclasses(),
pk=kwargs['source_id']
)
else:
self.source = InteractiveSource.objects.filter(
enabled=True
).select_subclasses().first()
if not InteractiveSource.objects.filter(enabled=True).exists():
messages.error(
message=_(
'No interactive document sources have been defined or '
'none have been enabled, create one before proceeding.'
), request=request
)
return HttpResponseRedirect(
redirect_to=reverse(viewname='sources:setup_source_list')
)
return super().dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
subtemplates_list = []
context['source'] = self.source
if isinstance(self.source, StagingFolderSource):
try:
staging_filelist = list(self.source.get_files())
except Exception as exception:
messages.error(message=exception, request=self.request)
staging_filelist = []
finally:
subtemplates_list = [
{
'name': 'appearance/generic_multiform_subtemplate.html',
'context': {
'forms': context['forms'],
'title': _('Document properties'),
}
},
{
'name': 'appearance/generic_list_subtemplate.html',
'context': {
'hide_link': True,
'no_results_icon': icon_staging_folder_file,
'no_results_text': _(
'This could mean that the staging folder is '
'empty. It could also mean that the '
'operating system user account being used '
'for Mayan EDMS doesn\'t have the necessary '
'file system permissions for the folder.'
),
'no_results_title': _(
'No staging files available'
),
'object_list': staging_filelist,
'title': _('Files in staging path'),
}
},
]
elif isinstance(self.source, SaneScanner):
subtemplates_list.append({
'name': 'appearance/generic_multiform_subtemplate.html',
'context': {
'forms': context['forms'],
'is_multipart': True,
'title': _('Document properties'),
'submit_label': _('Scan'),
},
})
else:
subtemplates_list.append({
'name': 'appearance/generic_multiform_subtemplate.html',
'context': {
'forms': context['forms'],
'is_multipart': True,
'title': _('Document properties'),
},
})
menu_sources.bound_links['sources:document_upload_interactive'] = self.tab_links
menu_sources.bound_links['sources:document_file_upload'] = self.tab_links
context.update(
{
'subtemplates_list': subtemplates_list,
}
)
return context
class UploadInteractiveView(UploadBaseView):
def create_source_form_form(self, **kwargs):
if hasattr(self.source, 'uncompress'):
show_expand = self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK
else:
show_expand = False
return self.get_form_classes()['source_form'](
prefix=kwargs['prefix'],
source=self.source,
show_expand=show_expand,
data=kwargs.get('data', None),
files=kwargs.get('files', None),
)
def create_document_form_form(self, **kwargs):
return self.get_form_classes()['document_form'](
prefix=kwargs['prefix'],
document_type=self.document_type,
data=kwargs.get('data', None),
files=kwargs.get('files', None),
)
def dispatch(self, request, *args, **kwargs):
self.subtemplates_list = []
self.document_type = get_object_or_404(
klass=DocumentType, pk=self.request.GET.get(
'document_type_id', self.request.POST.get('document_type_id')
)
)
AccessControlList.objects.check_access(
obj=self.document_type, permissions=(permission_document_create,),
user=request.user
)
self.tab_links = UploadBaseView.get_active_tab_links()
try:
return super().dispatch(request, *args, **kwargs)
except Exception as exception:
if request.is_ajax():
return JsonResponse(
data={'error': force_text(s=exception)}, status=500
)
else:
raise
def forms_valid(self, forms):
if self.source.can_compress:
if self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_ASK:
expand = forms['source_form'].cleaned_data.get('expand')
else:
if self.source.uncompress == SOURCE_UNCOMPRESS_CHOICE_Y:
expand = True
else:
expand = False
else:
expand = False
try:
uploaded_file = self.source.get_upload_file_object(
forms['source_form'].cleaned_data
)
# print(uploaded_file)
except SourceException as exception:
messages.error(message=exception, request=self.request)
else:
shared_uploaded_file = SharedUploadedFile.objects.create(
file=uploaded_file.file
)
if not self.request.user.is_anonymous:
user = self.request.user
user_id = self.request.user.pk
else:
user = None
user_id = None
try:
self.source.clean_up_upload_file(uploaded_file)
except Exception as exception:
messages.error(message=exception, request=self.request)
querystring = self.request.GET.copy()
querystring.update(self.request.POST)
try:
Document.execute_pre_create_hooks(
kwargs={
'document_type': self.document_type,
'user': user
}
)
DocumentFile.execute_pre_create_hooks(
kwargs={
'document_type': self.document_type,
'shared_uploaded_file': shared_uploaded_file,
'user': user
}
)
ocr_result = forms['document_form'].cleaned_data.get('description')
# if '.png' in str(shared_uploaded_file) or '.jpg' in str(shared_uploaded_file):
#
# with shared_uploaded_file.open() as file_object:
# file_path = str(file_object)
# temp_path = 'temp/temp.png'
# print('start copy file.........')
# shutil.copy(file_path, temp_path)
# print('start ocr.........')
# ocr_result = ocrFile(temp_path)
# # print(ocr_result)
# print('ocr done......')
task_source_handle_upload.apply_async(
kwargs={
# 'description': forms['document_form'].cleaned_data.get('description'),
'description': ocr_result,
'document_type_id': self.document_type.pk,
'expand': expand,
'label': forms['document_form'].get_final_label(
filename=force_text(s=shared_uploaded_file)
),
'language': forms['document_form'].cleaned_data.get('language'),
'querystring': querystring.urlencode(),
'shared_uploaded_file_id': shared_uploaded_file.pk,
'source_id': self.source.pk,
'user_id': user_id,
}
)
except Exception as exception:
message = _(
'Error executing document upload task; '
'%(exception)s'
) % {
'exception': exception,
}
logger.critical(msg=message, exc_info=True)
raise type(exception)(message)
else:
messages.success(
message=_(
'New document queued for upload and will be available '
'shortly.'
), request=self.request
)
r = HttpResponseRedirect(
redirect_to='{}?{}'.format(
reverse(
viewname=self.request.resolver_match.view_name,
kwargs=self.request.resolver_match.kwargs
), self.request.META['QUERY_STRING']
),
)
# print(r)
return r
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['title'] = _(
'Upload a document of type "%(document_type)s" from '
'source: %(source)s'
) % {'document_type': self.document_type, 'source': self.source.label}
if not isinstance(self.source, StagingFolderSource) and not isinstance(self.source, SaneScanner):
context['subtemplates_list'][0]['context'].update(
{
'form_action': '{}?{}'.format(
reverse(
viewname=self.request.resolver_match.view_name,
kwargs=self.request.resolver_match.kwargs
), self.request.META['QUERY_STRING']
),
'form_css_classes': 'dropzone',
'form_disable_submit': True,
'form_id': 'html5upload',
}
)
# print(context)
return context
def get_form_classes(self):
source_form_class = get_upload_form_class(
source_type_name=self.source.source_type
)
# Override source form class to enable the HTML5 file uploader
if source_form_class == WebFormUploadForm:
source_form_class = WebFormUploadFormHTML5
return {
'document_form': NewDocumentForm,
'source_form': source_form_class
}
| 37.736413
| 105
| 0.537121
|
dab191c1e89611441d84f991721c9cacef0e7652
| 6,696
|
py
|
Python
|
tests/test_pysendyit.py
|
0x6f736f646f/sendit-python
|
c7f2c8d7837547f691ed60b15d62166a15a7dbaa
|
[
"MIT"
] | 1
|
2021-07-12T09:36:25.000Z
|
2021-07-12T09:36:25.000Z
|
tests/test_pysendyit.py
|
0x6f736f646f/sendit-python
|
c7f2c8d7837547f691ed60b15d62166a15a7dbaa
|
[
"MIT"
] | 26
|
2020-05-24T23:33:30.000Z
|
2021-07-04T13:48:30.000Z
|
tests/test_pysendyit.py
|
0x6f736f646f/sendyit-python
|
c7f2c8d7837547f691ed60b15d62166a15a7dbaa
|
[
"MIT"
] | null | null | null |
import unittest
import requests
import os
from pysendyit.pysendyit import Sendy
from pysendyit.errors import SendyException
api_username = os.getenv('API_USERNAME')
api_key = os.getenv('API_KEY')
base_url = os.getenv('BASE_URL')
class SendyTest(unittest.TestCase):
def setUp(self):
self.sendy = Sendy(api_key=api_key, api_username=api_username, base_url=base_url)
self.url_parameter = "track"
self.request_data = {"command":"track", "order_no":"AA2395374", "request_token_id":"request_token_id"}
self.location_data = {"name":"test", "latitude":-1.300577, "longitude": 36.78183, "description": "test"}
self.person_data = {"name":"test", "phone":"0712345678", "email": "test@mail.com", "notes": "test"}
self.package_size = {"weight":"2", "height":"2", "width": "2", "length":"2", "item_name":"test"}
self.payment_data = {"status":"test", "pay_method":"test", "amount": "200"}
self.delivery_data = {
"pick_up_date": "pick_up_date", "collect_payment": self.payment_data,
"carrier_type": "carrier_type", "return": "return_type", "note": "note",
"note_status": "note_status", "request_type": "request_type",
"order_type": "order_type", "ecommerce_order": "ecommerce_order",
"express": "express", "skew": "skew",
"package_size": [self.package_size, self.package_size]
}
def test_prepare_location_details_1(self):
self.assertNotEqual(self.location_data.values(), self.sendy.prepare_location_details("from", self.location_data['name'], self.location_data['latitude'], self.location_data['longitude'], self.location_data['description']).values())
def test_prepare_location_details_2(self):
self.assertNotEqual(self.location_data.values(), self.sendy.prepare_location_details("to", self.location_data['name'], self.location_data['latitude'], self.location_data['longitude'], self.location_data['description']).values())
def test_prepare_location_details_3(self):
self.assertIsNotNone(self.sendy.prepare_location_details("to", self.location_data['name'], self.location_data['latitude'], self.location_data['longitude'], self.location_data['description']))
def test_prepare_location_details_4(self):
self.assertRaises(SendyException, self.sendy.prepare_location_details, "test", self.location_data['name'], self.location_data['latitude'], self.location_data['longitude'], self.location_data['description'])
def test_prepare_person_details_1(self):
self.assertNotEqual(self.person_data.values(), self.sendy.prepare_person_details("recepient", self.person_data['name'], self.person_data['phone'], self.person_data['email'], self.person_data['notes']).values())
def test_prepare_person_details_2(self):
self.assertNotEqual(self.person_data.values(), self.sendy.prepare_person_details("sender", self.person_data['name'], self.person_data['phone'], self.person_data['email'], self.person_data['notes']).values())
def test_prepare_person_details_3(self):
self.assertIsNotNone(self.sendy.prepare_person_details("sender", self.person_data['name'], self.person_data['phone'], self.person_data['email'], self.person_data['notes']))
def test_prepare_person_details_4(self):
self.assertRaises(SendyException, self.sendy.prepare_person_details, "test", self.person_data['name'], self.person_data['phone'], self.person_data['email'], self.person_data['notes'])
def test_prepare_package_size_1(self):
self.assertDictEqual(self.package_size, self.sendy.prepare_package_size(self.package_size['weight'], self.package_size['height'], self.package_size['width'], self.package_size['length'], self.package_size['item_name']))
def test_prepare_package_size_2(self):
self.assertIsNotNone(self.sendy.prepare_package_size(self.package_size['weight'], self.package_size['height'], self.package_size['width'], self.package_size['length'], self.package_size['item_name']))
def test_prepare_collect_payment_1(self):
self.assertDictEqual(self.payment_data, self.sendy.prepare_collect_payment(self.payment_data['status'], self.payment_data['pay_method'], self.payment_data['amount']))
def test_prepare_collect_payment_2(self):
self.assertIsNotNone(self.sendy.prepare_collect_payment(self.payment_data['status'], self.payment_data['pay_method'], self.payment_data['amount']))
def test_prepare_delivery_details_1(self):
self.delivery_data['package_size'] = [self.delivery_data['package_size'][0]]
self.assertEqual(self.delivery_data, self.sendy.prepare_delivery_details(self.delivery_data['pick_up_date'], self.delivery_data['collect_payment'], self.delivery_data['carrier_type'], self.delivery_data['return'], self.delivery_data['note'], self.delivery_data['note_status'], self.delivery_data['request_type'], self.delivery_data['order_type'], self.delivery_data['ecommerce_order'], self.delivery_data['express'], self.delivery_data['skew'], self.delivery_data['package_size'][0]))
def test_prepare_delivery_details_2(self):
self.assertIsNotNone(self.sendy.prepare_delivery_details(self.delivery_data['pick_up_date'], self.delivery_data['collect_payment'], self.delivery_data['carrier_type'], self.delivery_data['return'], self.delivery_data['note'], self.delivery_data['note_status'], self.delivery_data['request_type'], self.delivery_data['order_type'], self.delivery_data['ecommerce_order'], self.delivery_data['express'], self.delivery_data['skew'], self.delivery_data['package_size'][0]))
def test_request_delivery(self):
self.assertIsNotNone(self.sendy.request_delivery(self.location_data, self.location_data, self.person_data, self.person_data, self.delivery_data))
def test_request_multi_destination_delivery(self):
self.assertIsNotNone(self.sendy.request_multi_destination_delivery(self.location_data, self.location_data, self.location_data, self.person_data, self.person_data, self.delivery_data))
def test_request_multi_pickup_delivery(self):
self.assertIsNotNone(self.sendy.request_multi_pickup_delivery("0712345678", self.location_data, self.location_data, self.location_data, self.person_data, self.person_data, self.delivery_data))
def test_complete_delivery(self):
self.assertIsNotNone(self.sendy.complete_delivery(self.delivery_data, order_no="AA2395374", request_token_id="request_token_id"))
def test_track_or_cancel_delivery(self):
self.assertIsNotNone(self.sendy.track_or_cancel_delivery(command="track", order_no="AA2395374", request_token_id="request_token_id"))
if __name__ == '__main__':
unittest.main()
| 73.582418
| 492
| 0.750597
|
02663aa5470c59d11ca07400e80efc83041b3308
| 8,652
|
py
|
Python
|
statsmodels/examples/ex_arch_canada.py
|
ginggs/statsmodels
|
a74a179d2a3267ed992871f8d9ef6c6d86c9b934
|
[
"BSD-3-Clause"
] | 76
|
2019-12-28T08:37:10.000Z
|
2022-03-29T02:19:41.000Z
|
statsmodels/examples/ex_arch_canada.py
|
ginggs/statsmodels
|
a74a179d2a3267ed992871f8d9ef6c6d86c9b934
|
[
"BSD-3-Clause"
] | 1
|
2019-07-29T08:35:08.000Z
|
2019-07-29T08:35:08.000Z
|
statsmodels/examples/ex_arch_canada.py
|
ginggs/statsmodels
|
a74a179d2a3267ed992871f8d9ef6c6d86c9b934
|
[
"BSD-3-Clause"
] | 35
|
2020-02-04T14:46:25.000Z
|
2022-03-24T03:56:17.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Sat Dec 24 07:31:47 2011
Author: Josef Perktold
"""
import numpy as np
import statsmodels.sandbox.stats.diagnostic as dia
canada_raw = '''\
405.36646642737 929.610513893698 7.52999999999884 386.136109062605
404.639833965913 929.803984550587 7.69999999999709 388.135759111711
403.814883043744 930.318387567177 7.47000000000116 390.540112911955
404.215773188006 931.427687420772 7.2699999999968 393.963817246136
405.046713585284 932.662005594273 7.37000000000262 396.764690917547
404.416738673847 933.550939726636 7.12999999999738 400.021701616327
402.81912737043 933.531526191785 7.40000000000146 400.751498688807
401.977334663103 933.076879439814 8.33000000000175 405.733473658807
402.089724946428 932.12375320915 8.83000000000175 409.05038628366
401.306688373207 930.635939140315 10.429999999993 411.398377747425
401.630171263522 929.097059933419 12.1999999999971 413.019421511595
401.56375463175 928.563335601161 12.7700000000041 415.166962884156
402.815698906973 929.069380060201 12.429999999993 414.662070678749
403.142107624713 930.265516098198 12.2299999999959 415.731936138368
403.078619166324 931.677031559203 11.6999999999971 416.231468866173
403.718785733801 932.138967575148 11.1999999999971 418.14392690728
404.866799027579 932.276686471608 11.2700000000041 419.735231229658
405.636186735378 932.832783118083 11.4700000000012 420.484186198549
405.136285378794 933.733419116009 11.3000000000029 420.930881402259
406.024639922986 934.177206176622 11.1699999999983 422.112404525291
406.412269729241 934.592839827856 11 423.627805811063
406.300932644569 935.606709830033 10.6300000000047 423.988686751336
406.335351723382 936.511085968336 10.2700000000041 424.190212657915
406.773695329549 937.420090112655 10.1999999999971 426.127043353785
405.152547649247 938.415921627889 9.66999999999825 426.857794216679
404.929830809648 938.999170021426 9.60000000000582 426.745717993024
404.576546350926 939.235354789206 9.60000000000582 426.885793656802
404.199492630983 939.679504234357 9.5 428.840253264144
405.94985619596 940.249674139969 9.5 430.122322107039
405.82209202516 941.435818685214 9.02999999999884 430.230679154048
406.446282537108 942.29809597644 8.69999999999709 430.392994893689
407.051247525876 943.532223256403 8.13000000000466 432.028420083791
407.946023990985 944.34896981513 7.87000000000262 433.388625934544
408.179584663105 944.821488789039 7.66999999999825 433.964091817787
408.599812740441 945.067136927327 7.80000000000291 434.484384354647
409.090560656008 945.80672616174 7.7300000000032 436.156879277168
408.704215141145 946.869661504613 7.56999999999971 438.265143944308
408.980275213206 946.876612143542 7.56999999999971 438.763587343863
408.328690037174 947.249692256472 7.33000000000175 439.949811558539
407.885696563307 947.651276093962 7.56999999999971 441.835856392131
407.260532233258 948.183970741596 7.62999999999738 443.176872656863
406.775150765526 948.349239264364 7.59999999999854 444.359199033223
406.179413590339 948.032170661406 8.16999999999825 444.523614807208
405.439793348166 947.106483115935 9.19999999999709 446.969404642587
403.279970790458 946.079554231134 10.1699999999983 450.158586973168
403.364855995771 946.183811678692 10.3300000000017 451.546427290378
403.380680430043 946.22579516585 10.3999999999942 452.298351499968
404.003182812546 945.997783938785 10.3699999999953 453.120066578834
404.47739841708 945.518279080208 10.6000000000058 453.999145996277
404.786782762866 945.351397570438 11 454.955176222477
405.271003921828 945.291785517556 11.3999999999942 455.482381155116
405.382993140508 945.400785900878 11.7299999999959 456.100929020225
405.156416006566 945.905809840959 11.070000000007 457.202696739531
406.470043094757 945.90347041344 11.6699999999983 457.388589594786
406.229308967752 946.319028746014 11.4700000000012 457.779898919191
406.726483850871 946.579621275764 11.3000000000029 457.553538085846
408.578504884277 946.780032223884 10.9700000000012 458.80240271533
409.67671010704 947.628284240641 10.6300000000047 459.05640335985
410.385763295936 948.622057553611 10.1000000000058 459.15782324686
410.539523677181 949.399183241404 9.66999999999825 459.703720275789
410.445258303139 949.948137966398 9.52999999999884 459.703720275789
410.625605270832 949.794494142446 9.47000000000116 460.025814162716
410.867239714014 949.953380175189 9.5 461.025722503696
411.235917829196 950.250239444989 9.27000000000407 461.30391443673
410.663655285725 950.538030883093 9.5 461.4030814421
410.808508412624 950.787128498243 9.42999999999302 462.927726133156
412.115961520089 950.869528648471 9.69999999999709 464.688777934061
412.999407129539 950.928132469716 9.89999999999418 465.071700094375
412.955056755303 951.845722481401 9.42999999999302 464.285125295526
412.82413309368 952.6004761952 9.30000000000291 464.034426099541
413.048874899 953.597552755418 8.86999999999534 463.453479461824
413.611017876145 954.143388344158 8.77000000000407 465.071700094375
413.604781916778 954.542593332134 8.60000000000582 466.088867474481
412.968388225217 955.263136106029 8.33000000000175 466.617120754625
412.265886525002 956.056052852469 8.16999999999825 465.747796561181
412.910594097915 956.79658640007 8.02999999999884 465.899527268299
413.829416419695 957.386480451857 7.90000000000146 466.409925351738
414.22415210314 958.06341570725 7.87000000000262 466.955244491812
415.1677707968 958.716592187518 7.52999999999884 467.628081344681
415.701580225863 959.488142422254 6.93000000000029 467.70256230891
416.867407108435 960.362493080892 6.80000000000291 469.134788222928
417.610399060359 960.783379042937 6.69999999999709 469.336419672322
418.002980476361 961.029029939624 6.93000000000029 470.011666329664
417.266680178544 961.765709811429 6.87000000000262 469.647234439539'''
canada = np.array(canada_raw.split(), float).reshape(-1,4)
k=2
resarch2 = dia.acorr_lm((canada[:,k]-canada[:,k].mean())**2, maxlag=2, autolag=None, store=1)
print(resarch2)
resarch5 = dia.acorr_lm(canada[:,k]**2, maxlag=12, autolag=None, store=1)
ss = '''\
ARCH LM-test; Null hypothesis: no ARCH effects
Chi-squared = %(chi)-8.4f df = %(df)-4d p-value = %(pval)8.4g
'''
resarch = resarch5
print()
print(ss % dict(chi=resarch[2], df=resarch[-1].resols.df_model, pval=resarch[3]))
#R:FinTS: ArchTest(as.vector(Canada[,3]), lag=5)
'''
ARCH LM-test; Null hypothesis: no ARCH effects
data: as.vector(Canada[, 3])
Chi-squared = 78.878, df = 5, p-value = 1.443e-15
'''
#from ss above
'''
ARCH LM-test; Null hypothesis: no ARCH effects
Chi-squared = 78.849 df = 5 p-value = 1.461e-15
'''
#k=2
#R
'''
ARCH LM-test; Null hypothesis: no ARCH effects
data: as.vector(Canada[, 4])
Chi-squared = 74.6028, df = 5, p-value = 1.121e-14
'''
#mine
'''
ARCH LM-test; Null hypothesis: no ARCH effects
Chi-squared = 74.6028 df = 5 p-value = 1.126e-14
'''
'''
> ArchTest(as.vector(Canada[,4]), lag=12)
ARCH LM-test; Null hypothesis: no ARCH effects
data: as.vector(Canada[, 4])
Chi-squared = 69.6359, df = 12, p-value = 3.747e-10
'''
#mine:
'''
ARCH LM-test; Null hypothesis: no ARCH effects
Chi-squared = 69.6359 df = 12 p-value = 3.747e-10
'''
| 54.415094
| 93
| 0.684928
|
7e34ec561c4b30254110a22eece3391805a5024b
| 277
|
py
|
Python
|
02. Identify numerical and categorial data/categorial_trait.py
|
SausanCantik/descriptive-stats-phenotype-data
|
04aab439530013b78cdf8f850d2f1c06fa73cbd7
|
[
"MIT"
] | null | null | null |
02. Identify numerical and categorial data/categorial_trait.py
|
SausanCantik/descriptive-stats-phenotype-data
|
04aab439530013b78cdf8f850d2f1c06fa73cbd7
|
[
"MIT"
] | null | null | null |
02. Identify numerical and categorial data/categorial_trait.py
|
SausanCantik/descriptive-stats-phenotype-data
|
04aab439530013b78cdf8f850d2f1c06fa73cbd7
|
[
"MIT"
] | null | null | null |
#A function to show the list of trait with categorial data
def categorial_trait(dataframe):
numeric, categorial = classifying_column(dataframe)
print('Traits with categorial data : ','\n',categorial, '\n')
print('Total count : ' ,len(categorial) , 'Traits')
| 39.571429
| 66
| 0.700361
|
55ae7d649b0d97d7a64edaffcea6319e916602d8
| 865
|
py
|
Python
|
python/paddle/nn/initalizer.py
|
slf12/Paddle
|
fa43d74a3a16ac696db5dc893c9a7b1c6913dc85
|
[
"Apache-2.0"
] | 1
|
2020-05-02T00:00:20.000Z
|
2020-05-02T00:00:20.000Z
|
python/paddle/nn/initalizer.py
|
slf12/Paddle
|
fa43d74a3a16ac696db5dc893c9a7b1c6913dc85
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/nn/initalizer.py
|
slf12/Paddle
|
fa43d74a3a16ac696db5dc893c9a7b1c6913dc85
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TODO: define the initializers to create a Parameter in neural network
__all__ = [
# 'Bilinear',
# 'Constant',
# 'MSRA',
# 'Normal',
# 'TruncatedNormal',
# 'Uniform',
# 'Xavier'
]
| 33.269231
| 74
| 0.676301
|
e983b1fdab64d89fd3421b3177bdc203c5e8e42c
| 1,332
|
py
|
Python
|
stonehenge/db/migrations/migration.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | 1
|
2018-09-07T14:15:31.000Z
|
2018-09-07T14:15:31.000Z
|
stonehenge/db/migrations/migration.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | 5
|
2018-09-06T01:48:12.000Z
|
2021-05-08T10:47:00.000Z
|
stonehenge/db/migrations/migration.py
|
RobertTownley/stonehenge
|
376b8e1501dd12ac1bcec5de680a5b521b0d949c
|
[
"MIT"
] | null | null | null |
import json
import os
from typing import List
from stonehenge.db.operations import Operation
from stonehenge.db.migrations.exceptions import UnappliedMigrationException
class Migration:
def __init__(
self,
operations: List[Operation],
migrations_dir: str,
):
self.operations = operations
self.migrations_dir = migrations_dir
def save_to_file(self) -> str:
next_migration_index = self.get_next_migration_index()
filename = f"Migration_{next_migration_index}.json"
filepath = os.path.join(self.migrations_dir, filename)
if os.path.isfile(filepath):
raise UnappliedMigrationException(filename)
with open(filepath, "w+") as f:
content = self.to_json()
content_str = json.dumps(content, indent=4)
f.write(content_str)
return filename
def get_next_migration_index(self) -> int:
highest = 1
for filename in os.listdir(self.migrations_dir):
try:
index = int(filename[10])
except ValueError:
continue
if index >= highest:
highest = index + 1
return highest
def to_json(self):
return {
"operations": [o.to_json() for o in self.operations],
}
| 28.340426
| 75
| 0.615616
|
8cb01eea00fa6ec102480781a60ee883b01c4f18
| 517
|
py
|
Python
|
Packages/anaconda_php/plugin_version.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | null | null | null |
Packages/anaconda_php/plugin_version.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | 1
|
2016-02-10T09:50:09.000Z
|
2016-02-10T09:50:09.000Z
|
Packages/anaconda_php/plugin_version.py
|
prisis/sublime-text-packages
|
99ae8a5496613e27a75e5bd91723549b21476e60
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2014 - Oscar Campos <oscar.campos@member.fsf.org>
# This program is Free Software see LICENSE file for details
import os
# Plugin version
messages_json = os.path.join(os.path.dirname(__file__), 'messages.json')
with open(messages_json, 'r') as message_file:
message_data = message_file.read()
ver = message_data.splitlines()[-2].split(':')[0].strip().replace('"', '')
version = tuple([int(i) for i in ver.split('.')])
# Minimum required anaconda version
anaconda_required_version = (1, 3, 0)
| 30.411765
| 74
| 0.715667
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.