hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ea4838d1b97595b551548482c1d2bb4259ba3a7e | 8,103 | py | Python | views/view.py | edchelstephens/django-rest-utils | 15cee427149217d1e53384281894f91e9653b6b4 | [
"BSD-3-Clause"
] | 1 | 2022-02-20T01:37:25.000Z | 2022-02-20T01:37:25.000Z | views/view.py | edchelstephens/django-rest-utils | 15cee427149217d1e53384281894f91e9653b6b4 | [
"BSD-3-Clause"
] | null | null | null | views/view.py | edchelstephens/django-rest-utils | 15cee427149217d1e53384281894f91e9653b6b4 | [
"BSD-3-Clause"
] | null | null | null | """Module for views wrappers."""
from typing import Optional, List, Dict, Union
from django.conf import settings
from django.views.generic.base import View
from django.http.response import JsonResponse, HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.status import (
is_success,
is_client_error,
is_server_error,
HTTP_500_INTERNAL_SERVER_ERROR,
)
from views.request import DjangoRequestMixin, RestRequestMixin
from exceptions import HumanReadableError
from logging.debug import DebuggerMixin
RequestResponseData = Union[List, Dict]
class DjangoViewAPIMixin(DebuggerMixin):
"""Django View API base class mixin.*
Created to standardize handling request and response.
This class has a RESPONSE class attribute, that will serve as the
response object to be returned by child instances of this base class.
RESPONSE class attribute must be set on child classes as class attributes as well.
The values of which is either one of the valid RESPONSE_CLASSES listed:
- rest_framework's Response
or
- django's JsonResponse
"""
status = 200
RESPONSE = JsonResponse
CONTENT_TYPE = "application/json"
error_dict = {
"title": "Error",
"message": "Unable to process request.",
"errors": None,
}
def get_content_type(self, content_type: Optional[str] = None) -> str:
"""Get view response content_type*.
content_type possible values:
None
'text/html'
'text/plain'
'application/json'
# and others
Defaults to class attribute CONTENT_TYPE, which can be set by children classes.
https://docs.djangoproject.com/en/3.2/ref/request-response/#django.http.HttpRequest.content_type
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
https://www.iana.org/assignments/media-types/media-types.xhtml
"""
return content_type if content_type is not None else self.CONTENT_TYPE
def get_response(
self, data: RequestResponseData, status: int, content_type: str, **kwargs
) -> HttpResponse:
"""Get the returned response."""
try:
return self.RESPONSE(
data=data, status=status, content_type=content_type, **kwargs
)
except Exception:
return self.RESPONSE(data=data, status=status, content_type=content_type)
def success_response(
self,
data: RequestResponseData,
status=200,
content_type: Optional[str] = None,
) -> HttpResponse:
"""Method for returning success response from api."""
if not is_success(status):
status = 200
content_type = self.get_content_type(content_type)
response = self.get_response(data, status, content_type)
return response
def error_response(
self,
exception: Exception,
error_data: Optional[dict] = None,
status: Optional[int] = None,
content_type: Optional[str] = None,
) -> HttpResponse:
"""Method for returning error response from api.
NOTE: This should be called in the context of an except block.
Arguments:
exception
- The exception instance on the Except block
error_data
- this is a mapping object with the same format as error_dict above.
"""
if settings.DEBUG:
self.debug_exception(exception)
if error_data is None:
error_data = self.error_dict
if status is None:
status = self.status
error_data = self.get_error_data(exception, error_data)
status = self.get_error_status_code(status)
content_type = self.get_content_type(content_type)
response = self.get_response(error_data, status, content_type)
return response
def server_error_response(
self,
exception: Exception,
title="Server Error",
message="Please contact developer.",
status=HTTP_500_INTERNAL_SERVER_ERROR,
errors: Optional[List] = None,
) -> HttpResponse:
"""Return default server error response with debugging."""
self.status = status
self.error_dict["title"] = title
self.error_dict["message"] = message
self.error_dict["errors"] = errors if errors else [str(exception)]
return self.error_response(
exception, error_data=self.error_dict, status=self.status
)
def raise_error(
self,
title="Error",
message="Unable to process request.",
status=400,
errors: Optional[List] = None,
) -> None:
"""Set status error status code and raise the human readable error."""
self.status = status
self.error_dict["title"] = title
self.error_dict["message"] = message
self.error_dict["errors"] = errors if errors else []
raise HumanReadableError(message)
def stopper(self) -> None:
"""For testing human readable exception clauses.
Raises HumanReadableError.
"""
self.raise_error(title="Testing", message="Stopper")
def is_error_human_readable(self, exception: Exception) -> bool:
"""Check if error exception is human readable."""
return isinstance(exception, HumanReadableError)
def get_error_data(self, exception: Exception, error_data: dict) -> None:
"""Ensure correct error response data - a maping object serializable to JSON.*
* On this format: {
"title":<title>,
"message": <message>
}
"""
try:
if self.is_valid_error_dict(error_data):
error_data = error_data
else:
error_data = self.get_default_error_dict()
if self.is_error_human_readable(exception):
exception_message = str(exception)
error_data["message"] = exception_message
except Exception:
error_data = self.get_default_error_dict()
finally:
return error_data
def get_default_error_dict(self) -> dict:
"""Get default error dict."""
return {
"title": "Error",
"message": "Unable to process request.",
"errors": None,
}
def is_valid_error_dict(self, error_data: dict) -> bool:
"""Check if error_data is in valid mapping format same as default_error_dict."""
try:
valid = all(
(
isinstance(error_data, dict),
isinstance(error_data.get("title"), str),
isinstance(error_data.get("message"), str),
)
)
return valid
except Exception:
return False
def get_error_status_code(self, code: int) -> int:
"""Get correct error status code, defaults to 400."""
try:
error_code = 400
if self.is_valid_error_code(self.status):
error_code = self.status
elif self.is_valid_error_code(code):
error_code = code
return error_code
except Exception:
return 400
def is_valid_error_code(self, code: int) -> bool:
"""Check if code is valid error status code."""
return is_client_error(code) or is_server_error(code)
class RestAPIView(DjangoViewAPIMixin, RestRequestMixin, APIView):
"""Our class based view for rest_framework api views.
https://www.django-rest-framework.org/api-guide/views/#class-based-views
"""
RESPONSE = Response
class DjangoView(DjangoViewAPIMixin, DjangoRequestMixin, View):
"""Our class based view for django views.
https://docs.djangoproject.com/en/3.2/topics/class-based-views/#class-based-views
https://docs.djangoproject.com/en/3.2/ref/class-based-views/base/#view
"""
RESPONSE = JsonResponse
| 31.165385 | 104 | 0.629273 | """Module for views wrappers."""
from typing import Optional, List, Dict, Union
from django.conf import settings
from django.views.generic.base import View
from django.http.response import JsonResponse, HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.status import (
is_success,
is_client_error,
is_server_error,
HTTP_500_INTERNAL_SERVER_ERROR,
)
from views.request import DjangoRequestMixin, RestRequestMixin
from exceptions import HumanReadableError
from logging.debug import DebuggerMixin
RequestResponseData = Union[List, Dict]
class DjangoViewAPIMixin(DebuggerMixin):
"""Django View API base class mixin.*
Created to standardize handling request and response.
This class has a RESPONSE class attribute, that will serve as the
response object to be returned by child instances of this base class.
RESPONSE class attribute must be set on child classes as class attributes as well.
The values of which is either one of the valid RESPONSE_CLASSES listed:
- rest_framework's Response
or
- django's JsonResponse
"""
status = 200
RESPONSE = JsonResponse
CONTENT_TYPE = "application/json"
error_dict = {
"title": "Error",
"message": "Unable to process request.",
"errors": None,
}
def get_content_type(self, content_type: Optional[str] = None) -> str:
"""Get view response content_type*.
content_type possible values:
None
'text/html'
'text/plain'
'application/json'
# and others
Defaults to class attribute CONTENT_TYPE, which can be set by children classes.
https://docs.djangoproject.com/en/3.2/ref/request-response/#django.http.HttpRequest.content_type
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type
https://www.iana.org/assignments/media-types/media-types.xhtml
"""
return content_type if content_type is not None else self.CONTENT_TYPE
def get_response(
self, data: RequestResponseData, status: int, content_type: str, **kwargs
) -> HttpResponse:
"""Get the returned response."""
try:
return self.RESPONSE(
data=data, status=status, content_type=content_type, **kwargs
)
except Exception:
return self.RESPONSE(data=data, status=status, content_type=content_type)
def success_response(
self,
data: RequestResponseData,
status=200,
content_type: Optional[str] = None,
) -> HttpResponse:
"""Method for returning success response from api."""
if not is_success(status):
status = 200
content_type = self.get_content_type(content_type)
response = self.get_response(data, status, content_type)
return response
def error_response(
self,
exception: Exception,
error_data: Optional[dict] = None,
status: Optional[int] = None,
content_type: Optional[str] = None,
) -> HttpResponse:
"""Method for returning error response from api.
NOTE: This should be called in the context of an except block.
Arguments:
exception
- The exception instance on the Except block
error_data
- this is a mapping object with the same format as error_dict above.
"""
if settings.DEBUG:
self.debug_exception(exception)
if error_data is None:
error_data = self.error_dict
if status is None:
status = self.status
error_data = self.get_error_data(exception, error_data)
status = self.get_error_status_code(status)
content_type = self.get_content_type(content_type)
response = self.get_response(error_data, status, content_type)
return response
def server_error_response(
self,
exception: Exception,
title="Server Error",
message="Please contact developer.",
status=HTTP_500_INTERNAL_SERVER_ERROR,
errors: Optional[List] = None,
) -> HttpResponse:
"""Return default server error response with debugging."""
self.status = status
self.error_dict["title"] = title
self.error_dict["message"] = message
self.error_dict["errors"] = errors if errors else [str(exception)]
return self.error_response(
exception, error_data=self.error_dict, status=self.status
)
def raise_error(
self,
title="Error",
message="Unable to process request.",
status=400,
errors: Optional[List] = None,
) -> None:
"""Set status error status code and raise the human readable error."""
self.status = status
self.error_dict["title"] = title
self.error_dict["message"] = message
self.error_dict["errors"] = errors if errors else []
raise HumanReadableError(message)
def stopper(self) -> None:
"""For testing human readable exception clauses.
Raises HumanReadableError.
"""
self.raise_error(title="Testing", message="Stopper")
def is_error_human_readable(self, exception: Exception) -> bool:
"""Check if error exception is human readable."""
return isinstance(exception, HumanReadableError)
def get_error_data(self, exception: Exception, error_data: dict) -> None:
"""Ensure correct error response data - a maping object serializable to JSON.*
* On this format: {
"title":<title>,
"message": <message>
}
"""
try:
if self.is_valid_error_dict(error_data):
error_data = error_data
else:
error_data = self.get_default_error_dict()
if self.is_error_human_readable(exception):
exception_message = str(exception)
error_data["message"] = exception_message
except Exception:
error_data = self.get_default_error_dict()
finally:
return error_data
def get_default_error_dict(self) -> dict:
"""Get default error dict."""
return {
"title": "Error",
"message": "Unable to process request.",
"errors": None,
}
def is_valid_error_dict(self, error_data: dict) -> bool:
"""Check if error_data is in valid mapping format same as default_error_dict."""
try:
valid = all(
(
isinstance(error_data, dict),
isinstance(error_data.get("title"), str),
isinstance(error_data.get("message"), str),
)
)
return valid
except Exception:
return False
def get_error_status_code(self, code: int) -> int:
"""Get correct error status code, defaults to 400."""
try:
error_code = 400
if self.is_valid_error_code(self.status):
error_code = self.status
elif self.is_valid_error_code(code):
error_code = code
return error_code
except Exception:
return 400
def is_valid_error_code(self, code: int) -> bool:
"""Check if code is valid error status code."""
return is_client_error(code) or is_server_error(code)
class RestAPIView(DjangoViewAPIMixin, RestRequestMixin, APIView):
"""Our class based view for rest_framework api views.
https://www.django-rest-framework.org/api-guide/views/#class-based-views
"""
RESPONSE = Response
class DjangoView(DjangoViewAPIMixin, DjangoRequestMixin, View):
"""Our class based view for django views.
https://docs.djangoproject.com/en/3.2/topics/class-based-views/#class-based-views
https://docs.djangoproject.com/en/3.2/ref/class-based-views/base/#view
"""
RESPONSE = JsonResponse
| 0 | 0 | 0 |
bf409a2f226a3e16369d3334f9022698965c2407 | 691 | py | Python | setup.py | hulk-1/sublime-backup-cli | 1508426c320d233ccf7639df75e2c9dbee8777bd | [
"Apache-2.0"
] | 3 | 2019-07-16T05:21:53.000Z | 2019-10-22T13:50:23.000Z | setup.py | hulk-1/sublime-backup-cli | 1508426c320d233ccf7639df75e2c9dbee8777bd | [
"Apache-2.0"
] | 1 | 2019-10-22T13:52:49.000Z | 2019-10-22T13:52:49.000Z | setup.py | hulk-1/sublime-backup-cli | 1508426c320d233ccf7639df75e2c9dbee8777bd | [
"Apache-2.0"
] | 5 | 2019-10-22T13:50:24.000Z | 2021-07-27T20:34:33.000Z | from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='sublime-backup',
version='0.3',
py_modules=['cli'],
author = 'nishantwrp',
author_email = 'mittalnishant14@outlook.com',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/nishantwrp/sublime-backup-cli',
license = 'Apache 2.0',
description = 'A simple command line tool to backup / sync your sublime snippets',
install_requires=[
'Click','configparser','appdirs','requests'
],
entry_points='''
[console_scripts]
sublime-backup=cli:cli
''',
)
| 27.64 | 86 | 0.657019 | from setuptools import setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='sublime-backup',
version='0.3',
py_modules=['cli'],
author = 'nishantwrp',
author_email = 'mittalnishant14@outlook.com',
long_description=long_description,
long_description_content_type="text/markdown",
url = 'https://github.com/nishantwrp/sublime-backup-cli',
license = 'Apache 2.0',
description = 'A simple command line tool to backup / sync your sublime snippets',
install_requires=[
'Click','configparser','appdirs','requests'
],
entry_points='''
[console_scripts]
sublime-backup=cli:cli
''',
)
| 0 | 0 | 0 |
5eb0eb9854fe86ca24d110dc547435616474b02a | 4,919 | py | Python | paneldata_pipeline/topics.py | ddionrails/data-pipeline | 40ca8c1e5eaead6db8008a8fd5c589ea3ebaf7d2 | [
"BSD-3-Clause"
] | null | null | null | paneldata_pipeline/topics.py | ddionrails/data-pipeline | 40ca8c1e5eaead6db8008a8fd5c589ea3ebaf7d2 | [
"BSD-3-Clause"
] | 11 | 2019-08-28T15:13:46.000Z | 2021-11-02T06:44:41.000Z | paneldata_pipeline/topics.py | ddionrails/data-pipeline | 40ca8c1e5eaead6db8008a8fd5c589ea3ebaf7d2 | [
"BSD-3-Clause"
] | null | null | null | """Provides the functionality to create a topic tree JSON file."""
import json
from pathlib import Path
from typing import Any, Dict, List, Optional, TypedDict, Union
import pandas
LANGUAGES = dict(en="", de="_de")
class LeafNode(TypedDict):
"""A Node referencing a Concept"""
title: str
key: str
type: str
class Node(LeafNode):
"""A Node referencing a Topic"""
children: List[Any]
class TopicParser:
"""
Generate ``topics.json`` from ``topics.csv`` and ``concepts.csv``::
TopicParser().to_json()
"""
| 31.735484 | 90 | 0.589144 | """Provides the functionality to create a topic tree JSON file."""
import json
from pathlib import Path
from typing import Any, Dict, List, Optional, TypedDict, Union
import pandas
LANGUAGES = dict(en="", de="_de")
class LeafNode(TypedDict):
"""A Node referencing a Concept"""
title: str
key: str
type: str
class Node(LeafNode):
"""A Node referencing a Topic"""
children: List[Any]
class Topic:
all_objects: List[Any] = []
def __init__(
self, name: str = "", parent_name: Optional[str] = None, label: str = ""
):
self.name = name
self.parent_name = parent_name
self.label = label if str(label) != "nan" else ""
self.children: List[Topic] = []
self.concepts: List[Concept] = []
self.all_objects.append(self)
def to_dict(self) -> Node:
children: List[Union[Node, LeafNode]] = [x.to_dict() for x in self.children]
children += [x.to_dict() for x in self.concepts]
return dict(
title=self.label, key="topic_%s" % self.name, type="topic", children=children
)
@classmethod
def get_by_name(cls, name: Optional[str]) -> Optional[Any]:
"""Get topic from all_objects by name"""
if name is None:
return None
for topic in cls.all_objects:
if topic.name == name:
return topic
return None
@classmethod
def get_root_topics(cls) -> List[Any]:
"""Return topics with no parents (== root topics)"""
return [x for x in cls.all_objects if x.parent_name is None]
@classmethod
def add_topics_to_parents(cls) -> None:
for topic in cls.all_objects:
parent = Topic.get_by_name(topic.parent_name)
if parent:
parent.children.append(topic)
class Concept:
all_objects: List[Any] = []
def __init__(self, name: str, topic_name: str, label: str):
self.name = name
self.topic_name = topic_name
self.label = label if str(label) != "nan" else ""
self.all_objects.append(self)
def to_dict(self) -> LeafNode:
return dict(title=self.label, key="concept_%s" % self.name, type="concept")
@classmethod
def add_concepts_to_topics(cls) -> None:
for concept in cls.all_objects:
topic = Topic.get_by_name(concept.topic_name)
if topic:
topic.concepts.append(concept)
else:
print("Topic not found: %s" % concept.topic_name)
class TopicParser:
"""
Generate ``topics.json`` from ``topics.csv`` and ``concepts.csv``::
TopicParser().to_json()
"""
def __init__(
self,
input_folder: Path,
output_folder: Path,
languages: Optional[List[str]] = None,
):
topics_input_csv = input_folder.joinpath("topics.csv")
concepts_input_csv = input_folder.joinpath("concepts.csv")
self.output_json = output_folder.joinpath("topics.json")
if not languages:
languages = ["en", "de"]
self.topics_input_csv = topics_input_csv
self.concepts_input_csv = concepts_input_csv
self.topics_data = pandas.read_csv(topics_input_csv)
self.concepts_data = pandas.read_csv(concepts_input_csv)
self.languages = languages
def to_json(self) -> None:
json_dict = self._create_json()
with open(self.output_json, "w") as json_file:
json_file.write(json.dumps(json_dict))
def _create_json(self) -> List[Dict[str, Any]]:
result = []
for language in self.languages:
result.append(dict(language=language, topics=self._convert_to_dict(language)))
return result
def _convert_to_dict(self, language: str) -> List[Node]:
for row in self.topics_data.to_dict("records"):
if str(row.get("parent")) == "nan":
parent_name = None
else:
parent_name = row.get("parent")
Topic(
name=row.get("name"),
label=row.get("label" + LANGUAGES[language], row.get("name")),
parent_name=parent_name,
)
for row in self.concepts_data.to_dict("records"):
if str(row.get("name", "nan")) != "nan":
Concept(
name=row.get("name"),
topic_name=row.get("topic"),
label=row.get("label" + LANGUAGES[language], row.get("name")),
)
Topic.add_topics_to_parents()
Concept.add_concepts_to_topics()
print("Language: %s" % language)
print("Topics: %s" % len(Topic.all_objects))
print("Concepts: %s" % len(Concept.all_objects))
result = [topic.to_dict() for topic in Topic.get_root_topics()]
Topic.all_objects = []
Concept.all_objects = []
return result
| 3,468 | 739 | 154 |
6190c399878fa6349ae57d322b87254242345881 | 856 | py | Python | cmdb/util/perms.py | 6cloud/6cloud_cmdb | 7e36eb6da13d10362c9812774ba3c82e82a76eec | [
"Apache-2.0"
] | 1 | 2018-10-27T17:31:18.000Z | 2018-10-27T17:31:18.000Z | cmdb/util/perms.py | 6cloud/6cloud_cmdb | 7e36eb6da13d10362c9812774ba3c82e82a76eec | [
"Apache-2.0"
] | 2 | 2021-02-08T20:23:48.000Z | 2021-06-10T20:42:56.000Z | cmdb/util/perms.py | 6cloud/6cloud_cmdb | 7e36eb6da13d10362c9812774ba3c82e82a76eec | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from rest_framework.permissions import BasePermission, DjangoModelPermissions, DjangoObjectPermissions
from system.models import User, Role
| 29.517241 | 102 | 0.725467 | # -*- coding: utf-8 -*-
from rest_framework.permissions import BasePermission, DjangoModelPermissions, DjangoObjectPermissions
from system.models import User, Role
class CustomPermission(BasePermission):
def has_object_permission(self, request, view, obj):
return request.username == obj
def has_permission(self, request, view):
user = User.objects.get(username=request.username)
if request.method in user.get_all_permissions():
return True
else:
return False
class CustomDjangoModelPermission(DjangoModelPermissions):
def has_permission(self, request, view):
self.perms_map['GET'] = ['%(app_label)s.get_%(model_name)s']
super(CustomDjangoModelPermission, self).has_permission(request, view)
class CustomDjangoObjectPermission(DjangoObjectPermissions):
pass | 435 | 103 | 150 |
1746246f4d9d3b9861ee94c29e33ea27526d921c | 23 | py | Python | dynamic_preferences/__init__.py | mirk8xr/django-dynamic-preferences | 456df7a1d0d36e8cf3d613438e9cd76a241acc1e | [
"BSD-3-Clause"
] | null | null | null | dynamic_preferences/__init__.py | mirk8xr/django-dynamic-preferences | 456df7a1d0d36e8cf3d613438e9cd76a241acc1e | [
"BSD-3-Clause"
] | null | null | null | dynamic_preferences/__init__.py | mirk8xr/django-dynamic-preferences | 456df7a1d0d36e8cf3d613438e9cd76a241acc1e | [
"BSD-3-Clause"
] | 2 | 2017-04-10T09:34:18.000Z | 2021-02-25T05:35:40.000Z | __version__ = "0.7.15"
| 11.5 | 22 | 0.652174 | __version__ = "0.7.15"
| 0 | 0 | 0 |
62667c3d7687034bbce3f7e93685dc9d61db1e90 | 123 | py | Python | halotools/sim_manager/setup_package.py | pllim/halotools | 6499cff09e7e0f169e4f425ee265403f6be816e8 | [
"BSD-3-Clause"
] | 83 | 2015-01-15T14:54:16.000Z | 2021-12-09T11:28:02.000Z | halotools/sim_manager/setup_package.py | pllim/halotools | 6499cff09e7e0f169e4f425ee265403f6be816e8 | [
"BSD-3-Clause"
] | 579 | 2015-01-14T15:57:37.000Z | 2022-01-13T18:58:44.000Z | halotools/sim_manager/setup_package.py | pllim/halotools | 6499cff09e7e0f169e4f425ee265403f6be816e8 | [
"BSD-3-Clause"
] | 70 | 2015-01-14T15:15:58.000Z | 2021-12-22T18:18:31.000Z | """
"""
| 15.375 | 75 | 0.634146 | """
"""
def get_package_data():
return {
'halotools.sim_manager.tests': ['data/dummy_halocat_0.07812.list']}
| 91 | 0 | 23 |
59833554f2ba906974c362f5903e733e23c4744a | 876 | py | Python | algorithms/math/recursive_greatest_common_divisor.py | yashaswiadyalu/python | f12050990c064f20d27947084edd3ce57863efb6 | [
"MIT"
] | 204 | 2018-09-19T15:34:56.000Z | 2022-03-16T20:45:55.000Z | algorithms/math/recursive_greatest_common_divisor.py | yashaswiadyalu/python | f12050990c064f20d27947084edd3ce57863efb6 | [
"MIT"
] | 44 | 2018-10-02T03:02:29.000Z | 2021-10-06T15:18:08.000Z | algorithms/math/recursive_greatest_common_divisor.py | yashaswiadyalu/python | f12050990c064f20d27947084edd3ce57863efb6 | [
"MIT"
] | 177 | 2018-10-02T02:47:08.000Z | 2022-03-17T03:51:36.000Z | """
In mathematics, the greatest common divisor (gcd) of two or more integers,
which are not all zero, is the largest positive integer that divides each of the integers.
For example, the gcd of 8 and 12 is 4.
» https://en.wikipedia.org/wiki/Greatest_common_divisor
Due to limited recursion depth this algorithm is not suited for calculating the GCD of big integers.
"""
x = int(input("x = "))
y = int(input("y = "))
print(f"gcd({x}, {y}) = {recGCD(x,y)}")
| 32.444444 | 100 | 0.636986 | """
In mathematics, the greatest common divisor (gcd) of two or more integers,
which are not all zero, is the largest positive integer that divides each of the integers.
For example, the gcd of 8 and 12 is 4.
» https://en.wikipedia.org/wiki/Greatest_common_divisor
Due to limited recursion depth this algorithm is not suited for calculating the GCD of big integers.
"""
def recGCD(x, y, div = 0):
# Detemine which integer is greater and set the divisor accordingly
if div == 0:
if x > y:
div = x
else:
div = y
# If both integers can be divided without a remainder the gcd has been found
if x % div == 0 and y % div == 0:
return div
# Decrease divisor by one and try again
else:
return recGCD(x, y, div-1)
x = int(input("x = "))
y = int(input("y = "))
print(f"gcd({x}, {y}) = {recGCD(x,y)}")
| 393 | 0 | 23 |
60c94ed5f163fd983bec8804d12d89541e6a8831 | 1,397 | py | Python | mao_era/management/commands/reparent_pages.py | kingsdigitallab/mao-django | 79225212868f35d96d323ec8878bfd2ec9187978 | [
"MIT"
] | null | null | null | mao_era/management/commands/reparent_pages.py | kingsdigitallab/mao-django | 79225212868f35d96d323ec8878bfd2ec9187978 | [
"MIT"
] | null | null | null | mao_era/management/commands/reparent_pages.py | kingsdigitallab/mao-django | 79225212868f35d96d323ec8878bfd2ec9187978 | [
"MIT"
] | null | null | null | from django.core.management.base import BaseCommand
from django.db import transaction
from ...models import (HomePage, ObjectBiographiesPage, ObjectBiographyPage,
SourcePage, SourcesPage)
| 41.088235 | 76 | 0.604152 | from django.core.management.base import BaseCommand
from django.db import transaction
from ...models import (HomePage, ObjectBiographiesPage, ObjectBiographyPage,
SourcePage, SourcesPage)
class Command(BaseCommand):
help = 'Move all object biography pages and source pages under their ' \
'respective index pages (creating the latter, if necessary).'
def handle(self, *args, **options):
with transaction.atomic():
home = HomePage.objects.all()[0]
try:
biographies_page = ObjectBiographiesPage.objects.all()[0]
except IndexError:
biographies_page = ObjectBiographiesPage(
title='Object Biographies',
body='List of object biographies')
home.add_child(instance=biographies_page)
for biography_page in ObjectBiographyPage.objects.all():
biography_page.move(biographies_page, 'last-child')
try:
sources_page = SourcesPage.objects.all()[0]
except IndexError:
sources_page = SourcesPage(title='Sources',
body='List of sources')
home.add_child(instance=sources_page)
for source_page in SourcePage.objects.all():
source_page.move(sources_page, 'last-child')
| 980 | 181 | 23 |
53f3f04082698e9f1805080ec268f4a4de9614ee | 3,727 | py | Python | zhym/TestImg_hande_only_table1.py | yanmeizhao/MyMmdetection | ae369f88b1faa87b32688c7ca770ec67f0a5c8d1 | [
"Apache-2.0"
] | 1 | 2019-07-30T01:31:30.000Z | 2019-07-30T01:31:30.000Z | zhym/TestImg_hande_only_table1.py | yanmeizhao/MyMmdetection | ae369f88b1faa87b32688c7ca770ec67f0a5c8d1 | [
"Apache-2.0"
] | null | null | null | zhym/TestImg_hande_only_table1.py | yanmeizhao/MyMmdetection | ae369f88b1faa87b32688c7ca770ec67f0a5c8d1 | [
"Apache-2.0"
] | null | null | null | import os
import cv2
import mmcv
import numpy as np
from mmcv.image import imread, imwrite
from mmcv import color_val
from mmdet.apis import init_detector, inference_detector
config_file = 'configs_zhym/faster_rcnn_r50_fpn_1x_voc_handeonlytable.py'
checkpoint_file = 'work_dirs/faster_rcnn_r50_fpn_1x_handeonlytable/epoch_10.pth'
#config_file = 'configs_zhym/cascade_mask_rcnn_r101_fpn_1x_four_points.py'
#checkpoint_file = 'work_dirs/cascade_mask_rcnn_r101_fpn_1x/epoch_12.pth'
# build the model from a config file and a checkpoint file
model = init_detector(config_file, checkpoint_file, device='cuda:0')
# test a single image and show the results
img_root_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/PDF4_new_JPEGs/'
#img_root_dir = '/home/zhaoyanmei/mmdetection/data/CoCoFourPoint/test/'
dst_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/visualize_PDF4/'
dst_pred_txt = dst_dir + 'pred_result.txt'
pred_txt_file = open(dst_pred_txt, 'w')
for i, img_file in enumerate(os.listdir(img_root_dir)):
print(i)
img = os.path.join(img_root_dir, img_file)
result = inference_detector(model, img)
show_result(img, result, model.CLASSES, out_file=os.path.join(dst_dir, img_file))
# test a list of images and write the results to image files
#imgs = ['000000000060.jpg']
#for i, result in enumerate(inference_detector(model, imgs)):
# show_result(imgs[i], result, model.CLASSES, out_file='result_{}.jpg'.format(i))
| 39.231579 | 204 | 0.696002 | import os
import cv2
import mmcv
import numpy as np
from mmcv.image import imread, imwrite
from mmcv import color_val
from mmdet.apis import init_detector, inference_detector
config_file = 'configs_zhym/faster_rcnn_r50_fpn_1x_voc_handeonlytable.py'
checkpoint_file = 'work_dirs/faster_rcnn_r50_fpn_1x_handeonlytable/epoch_10.pth'
#config_file = 'configs_zhym/cascade_mask_rcnn_r101_fpn_1x_four_points.py'
#checkpoint_file = 'work_dirs/cascade_mask_rcnn_r101_fpn_1x/epoch_12.pth'
# build the model from a config file and a checkpoint file
model = init_detector(config_file, checkpoint_file, device='cuda:0')
# test a single image and show the results
img_root_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/PDF4_new_JPEGs/'
#img_root_dir = '/home/zhaoyanmei/mmdetection/data/CoCoFourPoint/test/'
dst_dir = '/home/zhaoyanmei/data/HANDE/HandeOnlyTable/visualize_PDF4/'
dst_pred_txt = dst_dir + 'pred_result.txt'
pred_txt_file = open(dst_pred_txt, 'w')
def show_result(img, result, class_names, score_thr=0.5, out_file=None):
assert isinstance(class_names, (tuple, list))
img_name = os.path.basename(img)
img = mmcv.imread(img)
if isinstance(result, tuple):
bbox_result, segm_result = result
else:
bbox_result, segm_result = result, None
bboxes = np.vstack(bbox_result)
# draw bounding boxes
labels = [np.full(bbox.shape[0], i, dtype=np.int32) for i, bbox in enumerate(bbox_result)]
labels = np.concatenate(labels)
imshow_det_bboxes(
img.copy(),
bboxes,
labels,
img_name,
class_names=class_names,
score_thr=score_thr,
show=out_file is None,
out_file=out_file)
def imshow_det_bboxes(img, bboxes, labels, img_name, class_names=None, score_thr=0.7, bbox_color='green', text_color='green', thickness=1, font_scale=0.5,show=True,win_name='',wait_time=0, out_file=None):
assert bboxes.ndim == 2
assert labels.ndim == 1
assert bboxes.shape[0] == labels.shape[0]
assert bboxes.shape[1] == 4 or bboxes.shape[1] == 5
img = imread(img)
if score_thr > 0:
assert bboxes.shape[1] == 5
scores = bboxes[:, -1]
inds = scores > score_thr
bboxeses = bboxes[inds, :]
labels = labels[inds]
bbox_color = color_val(bbox_color)
text_color = color_val(text_color)
for bbox, label in zip(bboxes, labels):
bbox_int = bbox.astype(np.int32)
left_top = (bbox_int[0], bbox_int[1])
right_bottom = (bbox_int[2], bbox_int[3])
cv2.rectangle(img, left_top, right_bottom, bbox_color, thickness=thickness)
label_text = class_names[label] if class_names is not None else 'cls {}'.format(label)
if len(bbox) > 4:
label_text += '|{:.02f}'.format(bbox[-1])
cv2.putText(img, label_text, (bbox_int[0], bbox_int[1]-2), cv2.FONT_HERSHEY_COMPLEX, font_scale, text_color)
bbox_str = [str(bbox[i]) for i in range(len(bbox))]
bbox_str.insert(0, img_name)
bbox_str.append(label_text)
pred_str = ','.join(bbox_str)
pred_txt_file.write(pred_str+'\n')
if show:
imshow(img, win_name, wait_time)
if out_file is not None:
imwrite(img, out_file)
for i, img_file in enumerate(os.listdir(img_root_dir)):
print(i)
img = os.path.join(img_root_dir, img_file)
result = inference_detector(model, img)
show_result(img, result, model.CLASSES, out_file=os.path.join(dst_dir, img_file))
# test a list of images and write the results to image files
#imgs = ['000000000060.jpg']
#for i, result in enumerate(inference_detector(model, imgs)):
# show_result(imgs[i], result, model.CLASSES, out_file='result_{}.jpg'.format(i))
| 2,240 | 0 | 46 |
518f42be8ddf60dfeeab9843302688710e9fd4a0 | 948 | py | Python | done/18-4-python-oo-practice/serial.py | demohack/yute | 2fb136118733394e3595bf707cb32f1b7b2aede0 | [
"MIT"
] | null | null | null | done/18-4-python-oo-practice/serial.py | demohack/yute | 2fb136118733394e3595bf707cb32f1b7b2aede0 | [
"MIT"
] | 17 | 2021-03-24T14:59:50.000Z | 2022-03-05T23:52:31.000Z | done/18-4-python-oo-practice/serial.py | demohack/yute | 2fb136118733394e3595bf707cb32f1b7b2aede0 | [
"MIT"
] | null | null | null | """Python serial number generator."""
class SerialGenerator:
"""Machine to create unique incrementing serial numbers.
>>> serial = SerialGenerator(start=100)
>>> serial.generate()
100
>>> serial.generate()
101
>>> serial.generate()
102
>>> serial.reset()
>>> serial.generate()
100
"""
def __init__(self, start = 0):
"""init a new generator, with start value."""
self.start = start
self.next = start
def __repr__(self):
"""special method used to represent a class’s objects as a string"""
return f"<SerialGenerator start={self.start} next={self.next}>"
def generate(self):
"""generate next number"""
n = self.next
print(self.next)
self.next += 1
return n
def reset(self):
"""reset start value"""
self.next = self.start
s = SerialGenerator(4)
s.generate()
s.generate()
| 20.170213 | 76 | 0.57384 | """Python serial number generator."""
class SerialGenerator:
"""Machine to create unique incrementing serial numbers.
>>> serial = SerialGenerator(start=100)
>>> serial.generate()
100
>>> serial.generate()
101
>>> serial.generate()
102
>>> serial.reset()
>>> serial.generate()
100
"""
def __init__(self, start = 0):
"""init a new generator, with start value."""
self.start = start
self.next = start
def __repr__(self):
"""special method used to represent a class’s objects as a string"""
return f"<SerialGenerator start={self.start} next={self.next}>"
def generate(self):
"""generate next number"""
n = self.next
print(self.next)
self.next += 1
return n
def reset(self):
"""reset start value"""
self.next = self.start
s = SerialGenerator(4)
s.generate()
s.generate()
| 0 | 0 | 0 |
0ad29b03ae8b324bb53dd5a8e9ccb0f6355cebd6 | 107,999 | py | Python | pst.py | btolab/PANhunt | 3fca018d1c6e36d374c18d7345c4a71059c73d3a | [
"BSD-3-Clause"
] | null | null | null | pst.py | btolab/PANhunt | 3fca018d1c6e36d374c18d7345c4a71059c73d3a | [
"BSD-3-Clause"
] | null | null | null | pst.py | btolab/PANhunt | 3fca018d1c6e36d374c18d7345c4a71059c73d3a | [
"BSD-3-Clause"
] | null | null | null | #! /usr/bin/env python
#
# Copyright (c) 2014, Dionach Ltd. All rights reserved. See LICENSE file.
#
# By BB
# based on MS-PST Microsoft specification for PST file format [MS-PST].pdf v2.1
#
import struct, datetime, math, os, sys, unicodedata, re, argparse, itertools, string
import progressbar
error_log_list = []
if sys.hexversion >= 0x03000000:
else:
to_byte = ord
##############################################################################################################################
# _ _ _ ____ _ _ ___ _ ____ ______ _
# | \ | | ___ __| | ___ | _ \ __ _| |_ __ _| |__ __ _ ___ ___ / / \ | | _ \| __ ) \ | | __ _ _ _ ___ _ __
# | \| |/ _ \ / _` |/ _ \ | | | |/ _` | __/ _` | '_ \ / _` / __|/ _ \ | || \| | | | | _ \| | | | / _` | | | |/ _ \ '__|
# | |\ | (_) | (_| | __/ | |_| | (_| | || (_| | |_) | (_| \__ \ __/ | || |\ | |_| | |_) | | | |__| (_| | |_| | __/ |
# |_| \_|\___/ \__,_|\___| |____/ \__,_|\__\__,_|_.__/ \__,_|___/\___| | ||_| \_|____/|____/| | |_____\__,_|\__, |\___|_|
# \_\ /_/ |___/
##############################################################################################################################
class NBD:
"""Node Database Layer"""
def fetch_all_block_data(self, bid):
"""returns list of block datas"""
datas = []
block = self.fetch_block(bid)
if block.block_type == Block.btypeData:
datas.append(block.data)
elif block.block_type == Block.btypeXBLOCK:
for xbid in block.rgbid:
xblock = self.fetch_block(xbid)
if xblock.block_type != Block.btypeData:
raise PSTException('Expecting data block, got block type %s' % xblock.block_type)
datas.append(xblock.data)
elif block.block_type == Block.btypeXXBLOCK:
for xxbid in block.rgbid:
xxblock = self.fetch_block(xxbid)
if xxblock.block_type != Block.btypeXBLOCK:
raise PSTException('Expecting XBLOCK, got block type %s' % xxblock.block_type)
datas.extend(self.fetch_all_block_data(xxbid))
else:
raise PSTException('Invalid block type (not data/XBLOCK/XXBLOCK), got %s' % block.block_type)
return datas
def fetch_subnodes(self, bid):
""" get dictionary of subnode SLENTRYs for subnode bid"""
subnodes = {}
block = self.fetch_block(bid)
if block.block_type == Block.btypeSLBLOCK:
for slentry in block.rgentries:
if slentry.nid in subnodes.keys():
raise PSTException('Duplicate subnode %s' % slentry.nid)
subnodes[slentry.nid.nid] = slentry
elif block.block_type == Block.btypeSIBLOCK:
for sientry in block.rgentries:
subnodes.update(self.fetch_subnodes(sientry.bid))
else:
raise PSTException('Invalid block type (not SLBLOCK/SIBLOCK), got %s' % block.block_type)
return subnodes
def get_page_leaf_entries(self, entry_type, page_offset):
""" entry type is NBTENTRY or BBTENTRY"""
leaf_entries = {}
page = self.fetch_page(page_offset)
for entry in page.rgEntries:
if isinstance(entry, entry_type):
if entry.key in leaf_entries.keys():
raise PSTException('Invalid Leaf Key %s' % entry)
leaf_entries[entry.key] = entry
elif isinstance(entry, BTENTRY):
leaf_entries.update(self.get_page_leaf_entries(entry_type, entry.BREF.ib))
else:
raise PSTException('Invalid Entry Type')
return leaf_entries
################################################################################################################################################################################
# _ _ _ _____ _ _ _ ____ _ _ ___ _____ ______ _
# | | (_)___| |_ ___ |_ _|_ _| |__ | | ___ ___ __ _ _ __ __| | | _ \ _ __ ___ _ __ ___ _ __| |_(_) ___ ___ / / | |_ _| _ \ \ | | __ _ _ _ ___ _ __
# | | | / __| __/ __| | |/ _` | '_ \| |/ _ \/ __| / _` | '_ \ / _` | | |_) | '__/ _ \| '_ \ / _ \ '__| __| |/ _ \/ __| | || | | | | |_) | | | | / _` | | | |/ _ \ '__|
# | |___| \__ \ |_\__ \_ | | (_| | |_) | | __/\__ \_ | (_| | | | | (_| | | __/| | | (_) | |_) | __/ | | |_| | __/\__ \ | || |___| | | __/| | | |__| (_| | |_| | __/ |
# |_____|_|___/\__|___( ) |_|\__,_|_.__/|_|\___||___( ) \__,_|_| |_|\__,_| |_| |_| \___/| .__/ \___|_| \__|_|\___||___/ | ||_____|_| |_| | | |_____\__,_|\__, |\___|_|
# |/ |/ |_| \_\ /_/ |___/
################################################################################################################################################################################
class LTP:
"""LTP layer"""
#############################################################################################################################
# __ __ _ _
# | \/ | ___ ___ ___ __ _ __ _(_)_ __ __ _ | | __ _ _ _ ___ _ __
# | |\/| |/ _ \/ __/ __|/ _` |/ _` | | '_ \ / _` | | | / _` | | | |/ _ \ '__|
# | | | | __/\__ \__ \ (_| | (_| | | | | | (_| | | |__| (_| | |_| | __/ |
# |_| |_|\___||___/___/\__,_|\__, |_|_| |_|\__, | |_____\__,_|\__, |\___|_|
# |___/ |___/ |___/
#############################################################################################################################
class Messaging:
"""Messaging Layer"""
#############################################################################################################################
# ____ ____ _____ _
# | _ \/ ___|_ _| | | __ _ _ _ ___ _ __
# | |_) \___ \ | | | | / _` | | | |/ _ \ '__|
# | __/ ___) || | | |__| (_| | |_| | __/ |
# |_| |____/ |_| |_____\__,_|\__, |\___|_|
# |___/
#############################################################################################################################
###################################################################################################################################
# _ _ _ _ _ _ _ _____ _ _
# | | | | |_(_) (_) |_ _ _ | ___| _ _ __ ___| |_(_) ___ _ __ ___
# | | | | __| | | | __| | | | | |_ | | | | '_ \ / __| __| |/ _ \| '_ \/ __|
# | |_| | |_| | | | |_| |_| | | _|| |_| | | | | (__| |_| | (_) | | | \__ \
# \___/ \__|_|_|_|\__|\__, | |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
# |___/
###################################################################################################################################
def get_unused_filename(filepath):
""" adds numbered suffix to filepath if filename already exists"""
if os.path.exists(filepath):
suffix = 1
while os.path.exists('%s-%s%s' % (os.path.splitext(filepath)[0], suffix, os.path.splitext(filepath)[1])):
suffix += 1
filepath = '%s-%s%s' % (os.path.splitext(filepath)[0], suffix, os.path.splitext(filepath)[1])
return filepath
###############################################################################################################################
#
# _____ _ _____ _ _
# |_ _|__ ___| |_ | ___| _ _ __ ___| |_(_) ___ _ __ ___
# | |/ _ \/ __| __| | |_ | | | | '_ \ / __| __| |/ _ \| '_ \/ __|
# | | __/\__ \ |_ | _|| |_| | | | | (__| |_| | (_) | | | \__ \
# |_|\___||___/\__| |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
#
###############################################################################################################################
def test_dump_pst(pst_filepath, output_path):
""" dump out all PST email attachments and emails (into text files) to output_path folder"""
pst = PST(pst_filepath)
print(pst.get_pst_status())
pbar = get_simple_progressbar('Messages: ')
total_messages = pst.get_total_message_count()
pst.export_all_messages(output_path, pbar, total_messages)
pbar.finish()
pbar = get_simple_progressbar('Attachments: ')
total_attachments = pst.get_total_attachment_count()
pst.export_all_attachments(output_path, pbar, total_attachments)
pbar.finish()
pst.close()
###################################################################################################################################
# __ __ _
# | \/ | __ _(_)_ __
# | |\/| |/ _` | | '_ \
# | | | | (_| | | | | |
# |_| |_|\__,_|_|_| |_|
#
###################################################################################################################################
if __name__=="__main__":
input_pst_file = ''
output_folder = 'dump'
arg_parser = argparse.ArgumentParser(prog='pst', description='PST: parses PST files. Can dump emails and attachments.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
arg_parser.add_argument('-i', dest='input_pst_file', default=input_pst_file, help='input PST file to dump')
arg_parser.add_argument('-o', dest='output_folder', default=output_folder, help='output folder')
arg_parser.add_argument('-t', dest='debug', help=argparse.SUPPRESS, action='store_true', default=False) # hidden argument
args = arg_parser.parse_args()
if not args.debug:
input_pst_file = args.input_pst_file
output_folder = args.output_folder
if not os.path.exists(input_pst_file):
print('Input PST file does not exist')
sys.exit(1)
if not os.path.exists(output_folder):
print('Output folder does not exist')
sys.exit(1)
test_dump_pst(input_pst_file,output_folder)
else: # debug
pass
#test_folder = 'D:\\'
#test_status_pst(test_folder+'sample.pst')
#test_dump_pst(test_folder+'sample.pst', test_folder+'dump')
#test_folder_psts(test_folder)
| 48.08504 | 263 | 0.612182 | #! /usr/bin/env python
#
# Copyright (c) 2014, Dionach Ltd. All rights reserved. See LICENSE file.
#
# By BB
# based on MS-PST Microsoft specification for PST file format [MS-PST].pdf v2.1
#
import struct, datetime, math, os, sys, unicodedata, re, argparse, itertools, string
import progressbar
class PSTException(Exception):
pass
error_log_list = []
if sys.hexversion >= 0x03000000:
def to_byte(x):
return x
def is_int(x):
return isinstance(x, int)
else:
to_byte = ord
def is_int(x):
return isinstance(x, (int, long))
##############################################################################################################################
# _ _ _ ____ _ _ ___ _ ____ ______ _
# | \ | | ___ __| | ___ | _ \ __ _| |_ __ _| |__ __ _ ___ ___ / / \ | | _ \| __ ) \ | | __ _ _ _ ___ _ __
# | \| |/ _ \ / _` |/ _ \ | | | |/ _` | __/ _` | '_ \ / _` / __|/ _ \ | || \| | | | | _ \| | | | / _` | | | |/ _ \ '__|
# | |\ | (_) | (_| | __/ | |_| | (_| | || (_| | |_) | (_| \__ \ __/ | || |\ | |_| | |_) | | | |__| (_| | |_| | __/ |
# |_| \_|\___/ \__,_|\___| |____/ \__,_|\__\__,_|_.__/ \__,_|___/\___| | ||_| \_|____/|____/| | |_____\__,_|\__, |\___|_|
# \_\ /_/ |___/
##############################################################################################################################
class NID:
NID_TYPE_HID = 0x00
NID_TYPE_INTERNAL = 0x01
NID_TYPE_NORMAL_FOLDER = 0x02
NID_TYPE_SEARCH_FOLDER = 0x03
NID_TYPE_NORMAL_MESSAGE = 0x04
NID_TYPE_ATTACHMENT = 0x05
NID_TYPE_SEARCH_UPDATE_QUEUE = 0x06
NID_TYPE_SEARCH_CRITERIA_OBJECT = 0x07
NID_TYPE_ASSOC_MESSAGE = 0x08
NID_TYPE_CONTENTS_TABLE_INDEX = 0x0a
NID_TYPE_RECEIVE_FOLDER_TABLE = 0x0b
NID_TYPE_OUTGOING_QUEUE_TABLE = 0x0c
NID_TYPE_HIERARCHY_TABLE = 0x0d
NID_TYPE_CONTENTS_TABLE = 0x0e
NID_TYPE_ASSOC_CONTENTS_TABLE = 0x0f
NID_TYPE_SEARCH_CONTENTS_TABLE = 0x10
NID_TYPE_ATTACHMENT_TABLE = 0x11
NID_TYPE_RECIPIENT_TABLE = 0x12
NID_TYPE_SEARCH_TABLE_INDEX = 0x13
NID_TYPE_LTP = 0x1f
NID_MESSAGE_STORE = 0x21
NID_NAME_TO_ID_MAP = 0x61
NID_NORMAL_FOLDER_TEMPLATE = 0xA1
NID_SEARCH_FOLDER_TEMPLATE = 0xC1
NID_ROOT_FOLDER = 0x122
NID_SEARCH_MANAGEMENT_QUEUE = 0x1E1
NID_SEARCH_ACTIVITY_LIST = 0x201
NID_RESERVED1 = 0x241
NID_SEARCH_DOMAIN_OBJECT = 0x261
NID_SEARCH_GATHERER_QUEUE = 0x281
NID_SEARCH_GATHERER_DESCRIPTOR = 0x2A1
NID_RESERVED2 = 0x2E1
NID_RESERVED3 = 0x301
NID_SEARCH_GATHERER_FOLDER_QUEUE = 0x321
def __init__(self, bytes_or_nid):
if is_int(bytes_or_nid):
self.nid = bytes_or_nid
else:
self.nid = struct.unpack('I', bytes_or_nid)[0]
self.nidType = self.nid & 0x1f
self.nidIndex = self.nid & 0xffffffe0
self.is_hid = False
self.is_nid = True
def __repr__(self):
return 'nid: %s, %s' % (hex(self.nid), hex(self.nidType))
class BID:
def __init__(self, bytes):
if len(bytes) == 4: # ansi
self.bid = struct.unpack('I', bytes)[0]
else: #unicode (8)
self.bid = struct.unpack('Q', bytes)[0]
if self.bid % 2 == 1: # A
self.bid -= 1
self.is_internal = (self.bid & 2 == 2) # B
def __repr__(self):
if self.is_internal:
int_ext = 'I'
else:
int_ext = 'E'
return 'bid: %s %s' % (self.bid, int_ext)
class BREF:
def __init__(self, bytes):
if len(bytes) == 8: # ansi
self.bid, self.ib = struct.unpack('4sI', bytes)
else: #unicode (16)
self.bid, self.ib = struct.unpack('8sQ', bytes)
self.bid = BID(self.bid)
def __repr__(self):
return '%s, ib: %s' % (self.bid, hex(self.ib))
class Page:
PAGE_SIZE = 512
ptypeBBT = 0x80
ptypeNBT = 0x81
ptypeFMap = 0x82
ptypePMap = 0x83
ptypeAMap = 0x84
ptypeFPMap = 0x85
ptypeDL = 0x86
def __init__(self, bytes, is_ansi):
# fixed 512 bytes
if len(bytes) != Page.PAGE_SIZE:
raise PSTException('Invalid Page size')
if is_ansi:
self.ptype, self.ptypeRepeat, self.wSig, self.bid, self.dwCRC = struct.unpack('BBHII', bytes[-12:])
else: # unicode
self.ptype, self.ptypeRepeat, self.wSig, self.dwCRC, self.bid = struct.unpack('BBHIQ', bytes[-16:])
if self.ptype < Page.ptypeBBT or self.ptype > Page.ptypeDL:
raise PSTException('Invalid Page Type %s ' % hex(self.ptype))
if self.ptype != self.ptypeRepeat:
raise PSTException('Page Type does not match Page Type Repeat %s!=%s ' % (hex(self.ptype), hex(self.ptypeRepeat)))
if self.ptype in (Page.ptypeBBT, Page.ptypeNBT):
if is_ansi:
self.cEnt, self.cEntMax, self.cbEnt, self.cLevel = struct.unpack('BBBB', bytes[-16:-12])
# rgEntries 492 (cLevel>0) or 496 bytes (cLevel=0)
entry_size = 12
else: # unicode
self.cEnt, self.cEntMax, self.cbEnt, self.cLevel = struct.unpack('BBBB', bytes[-24:-20])
# rgEntries 488 bytes
entry_size = 24
if self.cLevel == 0:
if self.ptype == Page.ptypeBBT:
entry_type = BBTENTRY
else: # ptypeNBT
entry_type = NBTENTRY
entry_size = entry_size + entry_size//3
else: # BTENTRY
entry_type = BTENTRY
self.rgEntries = []
for i in range(self.cEnt): # self.cbEnt is size of each entry which may be different to entry_size
self.rgEntries.append(entry_type(bytes[i*self.cbEnt:i*self.cbEnt+entry_size]))
def __repr__(self):
return 'PageType: %s, Entries: %s, Level: %s' % (hex(self.ptype), self.cEnt, self.cLevel)
class BTENTRY:
def __init__(self, bytes):
if len(bytes) == 12: # ansi
self.btkey = struct.unpack('I',bytes[:4])[0]
self.BREF = BREF(bytes[4:])
else: # unicode 24
self.btkey = struct.unpack('Q',bytes[:8])[0]
self.BREF = BREF(bytes[8:])
def __repr__(self):
return '%s' % (self.BREF)
class BBTENTRY:
def __init__(self, bytes):
if len(bytes) == 12: #ansi
self.BREF = BREF(bytes[:8])
self.cb, self.cRef = struct.unpack('HH',bytes[8:12])
else: # unicode (24)
self.BREF = BREF(bytes[:16])
self.cb, self.cRef = struct.unpack('HH',bytes[16:20])
self.key = self.BREF.bid.bid
def __repr__(self):
return '%s, data size: %s' % (self.BREF, self.cb)
class NBTENTRY:
def __init__(self, bytes):
if len(bytes) == 16: #ansi
self.nid, self.bidData, self.bidSub, self.nidParent = struct.unpack('4s4s4s4s',bytes)
else: # unicode (32)
self.nid, padding, self.bidData, self.bidSub, self.nidParent = struct.unpack('4s4s8s8s4s',bytes[:-4])
self.nid = NID(self.nid)
self.bidData = BID(self.bidData)
self.bidSub = BID(self.bidSub)
self.nidParent = NID(self.nidParent)
self.key = self.nid.nid
def __repr__(self):
return '%s, bidData: %s, bidSub: %s' % (self.nid, self.bidData, self.bidSub)
class SLENTRY:
def __init__(self, bytes):
if len(bytes) == 12: #ansi
self.nid, self.bidData, self.bidSub = struct.unpack('4s4s4s',bytes)
else: # unicode 24
self.nid, padding, self.bidData, self.bidSub = struct.unpack('4s4s8s8s',bytes)
self.nid = NID(self.nid)
self.bidData = BID(self.bidData)
self.bidSub = BID(self.bidSub)
def __repr__(self):
return '%s %s sub%s' % (self.nid, self.bidData, self.bidSub)
class SIENTRY:
def __init__(self, bytes):
if len(bytes) == 8: #ansi
self.nid, self.bid = struct.unpack('4s4s',bytes)
else: # unicode 16
self.nid, padding, self.bid = struct.unpack('4s4s8s',bytes)
self.nid = NID(self.nid)
self.bid = BID(self.bid)
class Block:
# this has the first 512 entries removed, as decoding only uses from 512 onwards
mpbbCryptFrom512 = (71, 241, 180, 230, 11, 106, 114, 72, 133, 78, 158, 235, 226, 248, 148, 83, 224, 187, 160, 2, 232, 90, 9, 171, 219, 227, 186, 198, 124, 195, 16, 221,
57, 5, 150, 48, 245, 55, 96, 130, 140, 201, 19, 74, 107, 29, 243, 251, 143, 38, 151, 202, 145, 23, 1, 196, 50, 45, 110, 49, 149, 255, 217, 35,
209, 0, 94, 121, 220, 68, 59, 26, 40, 197, 97, 87, 32, 144, 61, 131, 185, 67, 190, 103, 210, 70, 66, 118, 192, 109, 91, 126, 178, 15, 22, 41,
60, 169, 3, 84, 13, 218, 93, 223, 246, 183, 199, 98, 205, 141, 6, 211, 105, 92, 134, 214, 20, 247, 165, 102, 117, 172, 177, 233, 69, 33, 112, 12,
135, 159, 116, 164, 34, 76, 111, 191, 31, 86, 170, 46, 179, 120, 51, 80, 176, 163, 146, 188, 207, 25, 28, 167, 99, 203, 30, 77, 62, 75, 27, 155,
79, 231, 240, 238, 173, 58, 181, 89, 4, 234, 64, 85, 37, 81, 229, 122, 137, 56, 104, 82, 123, 252, 39, 174, 215, 189, 250, 7, 244, 204, 142, 95,
239, 53, 156, 132, 43, 21, 213, 119, 52, 73, 182, 18, 10, 127, 113, 136, 253, 157, 24, 65, 125, 147, 216, 88, 44, 206, 254, 36, 175, 222, 184, 54,
200, 161, 128, 166, 153, 152, 168, 47, 14, 129, 101, 115, 228, 194, 162, 138, 212, 225, 17, 208, 8, 139, 42, 242, 237, 154, 100, 63, 193, 108, 249, 236)
if sys.hexversion >= 0x03000000:
decrypt_table = bytes.maketrans(bytearray(range(256)), bytearray(mpbbCryptFrom512))
else:
decrypt_table = string.maketrans(b''.join(map(chr, range(256))), b''.join(map(chr, mpbbCryptFrom512)))
btypeData = 0
btypeXBLOCK = 1
btypeXXBLOCK = 2
btypeSLBLOCK = 3
btypeSIBLOCK = 4
def __init__(self, bytes, offset, data_size, is_ansi, bid_check, bCryptMethod):
self.is_ansi = is_ansi
self.offset = offset # for debugging
if self.is_ansi: # 12
self.cb, self.wSig, self.bid, self.dwCRC = struct.unpack('HH4sI',bytes[-12:])
bid_size = 4
slentry_size = 12
sientry_size = 8
sl_si_entries_offset = 4 # [MS-PST] WRONG for SLBLOCK and SIBLOCK for ANSI: there is no 4 byte padding
else: # unicode 16
self.cb, self.wSig, self.dwCRC, self.bid = struct.unpack('HHI8s',bytes[-16:])
bid_size = 8
slentry_size = 24
sientry_size = 16
sl_si_entries_offset = 8
self.bid = BID(self.bid)
if self.bid.bid != bid_check.bid:
raise PSTException('Block bid %s != ref bid %s' % (self.bid, bid_check))
if data_size != self.cb:
raise PSTException('BBT Entry data size %s != Block data size %s' % (data_size, self.cb) )
if not self.bid.is_internal:
self.block_type = Block.btypeData
self.btype = 0
self.cLevel = 0
if bCryptMethod == 1: #NDB_CRYPT_PERMUTE
self.data = bytes[:data_size].translate(Block.decrypt_table)
else: # no data encoding
self.data = bytes[:data_size] # data block
else: # XBLOCK, XXBLOCK, SLBLOCK or SIBLOCK
self.btype, self.cLevel, self.cEnt = struct.unpack('BBH',bytes[:4])
if self.btype == 1: #XBLOCK, XXBLOCK
self.lcbTotal = struct.unpack('I',bytes[4:8])[0]
if self.cLevel == 1: #XBLOCK
self.block_type = Block.btypeXBLOCK
elif self.cLevel == 2: #XXBLOCK
self.block_type = Block.btypeXXBLOCK
else:
raise PSTException('Invalid Block Level %s' % self.cLevel)
self.rgbid = []
for i in range(self.cEnt):
self.rgbid.append(BID(bytes[8+i*bid_size:8+(i+1)*bid_size]))
elif self.btype == 2: # SLBLOCK, SIBLOCK
self.rgentries = []
if self.cLevel == 0: #SLBLOCK
self.block_type = Block.btypeSLBLOCK
for i in range(self.cEnt):
self.rgentries.append(SLENTRY(bytes[sl_si_entries_offset + i*slentry_size:sl_si_entries_offset + (i+1)*slentry_size]))
elif self.cLevel ==1: #SIBLOCK
self.block_type = Block.btypeSIBLOCK
for i in range(self.cEnt):
self.rgentries.append(SIENTRY(bytes[sl_si_entries_offset + i*sientry_size:sl_si_entries_offset + (i+1)*sientry_size]))
else:
raise PSTException('Invalid Block Level %s' % self.cLevel)
else:
raise PSTException('Invalid Block Type %s' % self.btype)
def __repr__(self):
return 'Block %s %s %s' % (self.bid, self.btype, self.cLevel)
class NBD:
"""Node Database Layer"""
def __init__(self, fd, header):
self.fd = fd
self.header = header
self.nbt_entries = self.get_page_leaf_entries(NBTENTRY, self.header.root.BREFNBT.ib)
self.bbt_entries = self.get_page_leaf_entries(BBTENTRY, self.header.root.BREFBBT.ib)
def fetch_page(self, offset):
self.fd.seek(offset)
return Page(self.fd.read(Page.PAGE_SIZE), self.header.is_ansi)
def fetch_block(self, bid):
try:
bbt_entry = self.bbt_entries[bid.bid]
except KeyError:
raise PSTException('Invalid BBTEntry: %s' % bid)
offset = bbt_entry.BREF.ib
data_size = bbt_entry.cb
if self.header.is_ansi:
block_trailer_size = 12
else: # unicode
block_trailer_size = 16
# block size must align on 64 bytes
size_diff = (data_size + block_trailer_size) % 64
if size_diff == 0:
block_size = data_size + block_trailer_size
else:
block_size = data_size + block_trailer_size + 64 - size_diff
self.fd.seek(offset)
return Block(self.fd.read(block_size), offset, data_size, self.header.is_ansi, bid, self.header.bCryptMethod)
def fetch_all_block_data(self, bid):
"""returns list of block datas"""
datas = []
block = self.fetch_block(bid)
if block.block_type == Block.btypeData:
datas.append(block.data)
elif block.block_type == Block.btypeXBLOCK:
for xbid in block.rgbid:
xblock = self.fetch_block(xbid)
if xblock.block_type != Block.btypeData:
raise PSTException('Expecting data block, got block type %s' % xblock.block_type)
datas.append(xblock.data)
elif block.block_type == Block.btypeXXBLOCK:
for xxbid in block.rgbid:
xxblock = self.fetch_block(xxbid)
if xxblock.block_type != Block.btypeXBLOCK:
raise PSTException('Expecting XBLOCK, got block type %s' % xxblock.block_type)
datas.extend(self.fetch_all_block_data(xxbid))
else:
raise PSTException('Invalid block type (not data/XBLOCK/XXBLOCK), got %s' % block.block_type)
return datas
def fetch_subnodes(self, bid):
""" get dictionary of subnode SLENTRYs for subnode bid"""
subnodes = {}
block = self.fetch_block(bid)
if block.block_type == Block.btypeSLBLOCK:
for slentry in block.rgentries:
if slentry.nid in subnodes.keys():
raise PSTException('Duplicate subnode %s' % slentry.nid)
subnodes[slentry.nid.nid] = slentry
elif block.block_type == Block.btypeSIBLOCK:
for sientry in block.rgentries:
subnodes.update(self.fetch_subnodes(sientry.bid))
else:
raise PSTException('Invalid block type (not SLBLOCK/SIBLOCK), got %s' % block.block_type)
return subnodes
def get_page_leaf_entries(self, entry_type, page_offset):
""" entry type is NBTENTRY or BBTENTRY"""
leaf_entries = {}
page = self.fetch_page(page_offset)
for entry in page.rgEntries:
if isinstance(entry, entry_type):
if entry.key in leaf_entries.keys():
raise PSTException('Invalid Leaf Key %s' % entry)
leaf_entries[entry.key] = entry
elif isinstance(entry, BTENTRY):
leaf_entries.update(self.get_page_leaf_entries(entry_type, entry.BREF.ib))
else:
raise PSTException('Invalid Entry Type')
return leaf_entries
################################################################################################################################################################################
# _ _ _ _____ _ _ _ ____ _ _ ___ _____ ______ _
# | | (_)___| |_ ___ |_ _|_ _| |__ | | ___ ___ __ _ _ __ __| | | _ \ _ __ ___ _ __ ___ _ __| |_(_) ___ ___ / / | |_ _| _ \ \ | | __ _ _ _ ___ _ __
# | | | / __| __/ __| | |/ _` | '_ \| |/ _ \/ __| / _` | '_ \ / _` | | |_) | '__/ _ \| '_ \ / _ \ '__| __| |/ _ \/ __| | || | | | | |_) | | | | / _` | | | |/ _ \ '__|
# | |___| \__ \ |_\__ \_ | | (_| | |_) | | __/\__ \_ | (_| | | | | (_| | | __/| | | (_) | |_) | __/ | | |_| | __/\__ \ | || |___| | | __/| | | |__| (_| | |_| | __/ |
# |_____|_|___/\__|___( ) |_|\__,_|_.__/|_|\___||___( ) \__,_|_| |_|\__,_| |_| |_| \___/| .__/ \___|_| \__|_|\___||___/ | ||_____|_| |_| | | |_____\__,_|\__, |\___|_|
# |/ |/ |_| \_\ /_/ |___/
################################################################################################################################################################################
class HID:
def __init__(self, bytes):
# hidIndex cannot be zero, first 5 bits must be zero (hidType)
self.hidIndex, self.hidBlockIndex = struct.unpack('HH', bytes)
self.hidType = self.hidIndex & 0x1F
self.hidIndex = (self.hidIndex >> 5) & 0x7FF
self.is_hid = True
self.is_nid = False
class HNPAGEMAP:
def __init__(self, bytes):
self.cAlloc, self.cFree = struct.unpack('HH', bytes[:4])
self.rgibAlloc = []
for i in range(self.cAlloc+1): # cAlloc+1 is next free
self.rgibAlloc.append(struct.unpack('H', bytes[4+i*2:4+(i+1)*2])[0])
class HN:
bTypeTC = 0x7C
bTypeBTH = 0xB5
bTypePC = 0xBC
def __init__(self, nbt_entry, ltp, datas):
"""datas = list of data sections from blocks"""
self.nbt_entry = nbt_entry
self.datas = datas
self.ltp = ltp
self.hnpagemaps = []
for i in range(len(datas)):
bytes = datas[i]
if i == 0: # HNHDR
ibHnpm, self.bSig, self.bClientSig, self.hidUserRoot, self.rgbFillLevel = struct.unpack('HBB4sI', bytes[:12])
self.hidUserRoot = HID(self.hidUserRoot)
if self.bSig != 0xEC:
raise PSTException('Invalid HN Signature %s' % self.bSig)
else: # HNPAGEHDR or HNBITMAPHDR
ibHnpm = struct.unpack('H', bytes[:2])[0]
self.hnpagemaps.append(HNPAGEMAP(bytes[ibHnpm:]))
# subnode SLENTRYs
self.subnodes = None
if self.nbt_entry.bidSub.bid != 0:
self.subnodes = self.ltp.nbd.fetch_subnodes(self.nbt_entry.bidSub)
def get_hid_data(self, hid):
start_offset = self.hnpagemaps[hid.hidBlockIndex].rgibAlloc[hid.hidIndex-1]
end_offset = self.hnpagemaps[hid.hidBlockIndex].rgibAlloc[hid.hidIndex]
return self.datas[hid.hidBlockIndex][start_offset:end_offset]
def __repr__(self):
return 'HN: %s, Blocks: %s' % (self.nbt_entry, len(self.datas))
class BTHData:
def __init__(self, key, data):
self.key = key
self.data = data
class BTHIntermediate:
def __init__(self, key, hidNextLevel, bIdxLevel):
self.key = key
self.hidNextLevel = hidNextLevel
self.bIdxLevel = bIdxLevel
class BTH:
def __init__(self, hn, bth_hid):
""" hn = HN heapnode, bth_hid is hid of BTH header"""
#BTHHEADER
bth_header_bytes = hn.get_hid_data(bth_hid)
self.bType, self.cbKey, self.cbEnt, self.bIdxLevels, self.hidRoot = struct.unpack('BBBB4s', bth_header_bytes)
self.hidRoot = HID(self.hidRoot)
if self.bType != HN.bTypeBTH:
raise PSTException('Invalid BTH Type %s' % self.bType)
self.bth_datas = []
bth_working_stack = []
if self.hidRoot != 0:
bytes = hn.get_hid_data(self.hidRoot)
bth_record_list = self.get_bth_records(bytes, self.bIdxLevels)
if self.bIdxLevels == 0: # no intermediate levels
self.bth_datas = bth_record_list
else:
bth_working_stack = bth_record_list
while bth_working_stack:
bth_intermediate = bth_working_stack.pop()
bytes = hn.get_hid_data(bth_intermediate.hidNextLevel)
bth_record_list = self.get_bth_records(bytes, bth_intermediate.bIdxLevel - 1)
if bth_intermediate.bIdxLevel - 1 == 0: # leafs
self.bth_datas.extend(bth_record_list)
else:
bth_working_stack.extend(bth_record_list)
def get_bth_records(self, bytes, bIdxLevel):
bth_record_list = []
if bIdxLevel == 0: # leaf
record_size = self.cbKey + self.cbEnt
records = len(bytes) // record_size
for i in range(records):
key, data = struct.unpack('%ss%ss' % (self.cbKey, self.cbEnt) , bytes[i*record_size:(i+1)*record_size])
bth_record_list.append(BTHData(key, data))
else: # intermediate
record_size = self.cbKey + 4
records = len(bytes) // record_size
for i in range(records):
key, hidNextLevel = struct.unpack('%ss4s' % self.cbKey , bytes[i*record_size:(i+1)*record_size])
hidNextLevel = HID(hidNextLevel)
bth_record_list.append(BTHIntermediate(key, hidNextLevel, bIdxLevel))
return bth_record_list
class PCBTHData:
def __init__(self, bth_data, hn):
self.wPropId = struct.unpack('H', bth_data.key)[0]
self.wPropType, self.dwValueHnid = struct.unpack('H4s', bth_data.data)
ptype = hn.ltp.ptypes[self.wPropType]
if not ptype.is_variable and not ptype.is_multi:
if ptype.byte_count <= 4:
self.value = ptype.value(self.dwValueHnid[:ptype.byte_count])
else:
self.hid = HID(self.dwValueHnid)
self.value = ptype.value(hn.get_hid_data(self.hid))
else:
if NID(self.dwValueHnid).nidType == NID.NID_TYPE_HID:
self.hid = HID(self.dwValueHnid)
self.value = ptype.value(hn.get_hid_data(self.hid))
else:
self.subnode_nid = NID(self.dwValueHnid)
if self.subnode_nid.nid in hn.subnodes.keys():
subnode_nid_bid = hn.subnodes[self.subnode_nid.nid].bidData
else:
raise PSTException('Invalid NID subnode reference %s' % self.subnode_nid)
datas = hn.ltp.nbd.fetch_all_block_data(subnode_nid_bid)
self.value = ptype.value(b''.join(datas))
def __repr__(self):
return '%s (%s) = %s' % (hex(self.wPropId), hex(self.wPropType), repr(self.value))
class PTypeEnum:
PtypInteger16 = 0x02
PtypInteger32 = 0x03
PtypFloating32 = 0x04
PtypFloating64 = 0x05
PtypCurrency = 0x06
PtypFloatingTime = 0x07
PtypErrorCode = 0x0A
PtypBoolean = 0x0B
PtypInteger64 = 0x14
PtypString = 0x1F
PtypString8 = 0x1E
PtypTime = 0x40
PtypGuid = 0x48
PtypServerId = 0xFB
PtypRestriction = 0xFD
PtypRuleAction = 0xFE
PtypBinary = 0x102
PtypMultipleInteger16 = 0x1002
PtypMultipleInteger32 = 0x1003
PtypMultipleFloating32 = 0x1004
PtypMultipleFloating64 = 0x1005
PtypMultipleCurrency = 0x1006
PtypMultipleFloatingTime = 0x1007
PtypMultipleInteger64 = 0x1014
PtypMultipleString = 0x101F
PtypMultipleString8 = 0x101E
PtypMultipleTime = 0x1040
PtypMultipleGuid = 0x1048
PtypMultipleBinary = 0x1102
PtypUnspecified = 0x0
PtypNull = 0x01
PtypObject = 0x0D
class PType:
def __init__(self, ptype, byte_count, is_variable, is_multi):
self.ptype, self.byte_count, self.is_variable, self.is_multi = ptype, byte_count, is_variable, is_multi
def value(self, bytes):
if self.ptype == PTypeEnum.PtypInteger16:
return struct.unpack('h', bytes)[0]
elif self.ptype == PTypeEnum.PtypInteger32:
return struct.unpack('i', bytes)[0]
elif self.ptype == PTypeEnum.PtypFloating32:
return struct.unpack('f', bytes)[0]
elif self.ptype == PTypeEnum.PtypFloating64:
return struct.unpack('d', bytes)[0]
elif self.ptype == PTypeEnum.PtypCurrency:
raise PSTException('PtypCurrency value not implemented')
elif self.ptype == PTypeEnum.PtypFloatingTime:
return self.get_floating_time(bytes)
elif self.ptype == PTypeEnum.PtypErrorCode:
return struct.unpack('I', bytes)[0]
elif self.ptype == PTypeEnum.PtypBoolean:
return (struct.unpack('B', bytes)[0] != 0)
elif self.ptype == PTypeEnum.PtypInteger64:
return struct.unpack('q', bytes)[0]
elif self.ptype == PTypeEnum.PtypString:
return bytes.decode('utf-16-le') # unicode
elif self.ptype == PTypeEnum.PtypString8:
return bytes
elif self.ptype == PTypeEnum.PtypTime:
return self.get_time(bytes)
elif self.ptype == PTypeEnum.PtypGuid:
return bytes
elif self.ptype == PTypeEnum.PtypServerId:
raise PSTException('PtypServerId value not implemented')
elif self.ptype == PTypeEnum.PtypRestriction:
raise PSTException('PtypRestriction value not implemented')
elif self.ptype == PTypeEnum.PtypRuleAction:
raise PSTException('PtypRuleAction value not implemented')
elif self.ptype == PTypeEnum.PtypBinary:
#count = struct.unpack('H', bytes[:2])[0]
return bytes
elif self.ptype == PTypeEnum.PtypMultipleInteger16:
count = len(bytes) // 2
return [struct.unpack('h', bytes[i*2:(i+1)*2])[0] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleInteger32:
count = len(bytes) // 4
return [struct.unpack('i', bytes[i*4:(i+1)*4])[0] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleFloating32:
count = len(bytes) // 4
return [struct.unpack('f', bytes[i*4:(i+1)*4])[0] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleFloating64:
ccount = len(bytes) // 8
return [struct.unpack('d', bytes[i*8:(i+1)*8])[0] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleCurrency:
raise PSTException('PtypMultipleCurrency value not implemented')
elif self.ptype == PTypeEnum.PtypMultipleFloatingTime:
count = len(bytes) // 8
return [self.get_floating_time(bytes[i*8:(i+1)*8]) for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleInteger64:
count = len(bytes) // 8
return [struct.unpack('q', bytes[i*8:(i+1)*8])[0] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleString:
ulCount, rgulDataOffsets = self.get_multi_value_offsets(bytes)
s = []
for i in range(ulCount):
s.append(bytes[rgulDataOffsets[i]:rgulDataOffsets[i+1]].decode('utf-16-le'))
return s
elif self.ptype == PTypeEnum.PtypMultipleString8:
ulCount, rgulDataOffsets = self.get_multi_value_offsets(bytes)
datas = []
for i in range(ulCount):
datas.append(bytes[rgulDataOffsets[i]:rgulDataOffsets[i+1]])
return datas
elif self.ptype == PTypeEnum.PtypMultipleTime:
count = len(bytes) // 8
return [self.get_time(bytes[i*8:(i+1)*8]) for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleGuid:
count = len(bytes) // 16
return [bytes[i*16:(i+1)*16] for i in range(count)]
elif self.ptype == PTypeEnum.PtypMultipleBinary:
ulCount, rgulDataOffsets = self.get_multi_value_offsets(bytes)
datas = []
for i in range(ulCount):
datas.append(bytes[rgulDataOffsets[i]:rgulDataOffsets[i+1]])
return datas
elif self.ptype == PTypeEnum.PtypUnspecified:
return bytes
elif self.ptype == PTypeEnum.PtypNull:
return None
elif self.ptype == PTypeEnum.PtypObject:
return bytes[:4]
else:
raise PSTException('Invalid PTypeEnum for value %s ' % self.ptype)
def get_floating_time(self, bytes):
return datetime.datetime(year=1899, month=12, day=30) + datetime.timedelta(days=struct.unpack('d', bytes)[0])
def get_time(self, bytes):
return datetime.datetime(year=1601, month=1, day=1) + datetime.timedelta(microseconds = struct.unpack('q', bytes)[0]/10.0)
def get_multi_value_offsets(self, bytes):
ulCount = struct.unpack('I', bytes[:4])[0]
rgulDataOffsets = [struct.unpack('I', bytes[(i+1)*4:(i+2)*4])[0] for i in range(ulCount)]
rgulDataOffsets.append(len(bytes))
return ulCount, rgulDataOffsets
class PropIdEnum:
PidTagNameidBucketCount = 0x0001
PidTagNameidStreamGuid = 0x0002
PidTagNameidStreamEntry = 0x0003
PidTagNameidStreamString = 0x0004
PidTagNameidBucketBase = 0x1000
PidTagItemTemporaryFlags = 0x1097
PidTagPstBestBodyProptag = 0x661D
PidTagPstIpmsubTreeDescendant = 0x6705
PidTagPstSubTreeContainer = 0x6772
PidTagLtpParentNid = 0x67F1
PidTagLtpRowId = 0x67F2
PidTagLtpRowVer = 0x67F3
PidTagPstPassword = 0x67FF
PidTagMapiFormComposeCommand = 0x682F
PidTagRecordKey = 0x0FF9
PidTagDisplayName = 0x3001
PidTagIpmSubTreeEntryId = 0x35E0
PidTagIpmWastebasketEntryId = 0x35E3
PidTagFinderEntryId = 0x35E7
PidTagContentCount = 0x3602
PidTagContentUnreadCount = 0x3603
PidTagSubfolders = 0x360A
PidTagReplItemid = 0x0E30
PidTagReplChangenum = 0x0E33
PidTagReplVersionHistory = 0x0E34
PidTagReplFlags = 0x0E38
PidTagContainerClass = 0x3613
PidTagPstHiddenCount = 0x6635
PidTagPstHiddenUnread = 0x6636
PidTagImportance = 0x0017
PidTagMessageClassW = 0x001A
PidTagSensitivity = 0x0036
PidTagSubjectW = 0x0037
PidTagClientSubmitTime = 0x0039
PidTagSentRepresentingNameW = 0x0042
PidTagMessageToMe = 0x0057
PidTagMessageCcMe = 0x0058
PidTagConversationTopicW = 0x0070
PidTagConversationIndex = 0x0071
PidTagDisplayCcW = 0x0E03
PidTagDisplayToW = 0x0E04
PidTagMessageDeliveryTime = 0x0E06
PidTagMessageFlags = 0x0E07
PidTagMessageSize = 0x0E08
PidTagMessageStatus = 0x0E17
PidTagReplCopiedfromVersionhistory = 0x0E3C
PidTagReplCopiedfromItemid = 0x0E3D
PidTagLastModificationTime = 0x3008
PidTagSecureSubmitFlags = 0x65C6
PidTagOfflineAddressBookName = 0x6800
PidTagSendOutlookRecallReport = 0x6803
PidTagOfflineAddressBookTruncatedProperties = 0x6805
PidTagMapiFormComposeCommand = 0x682F
PidTagViewDescriptorFlags = 0x7003
PidTagViewDescriptorLinkTo = 0x7004
PidTagViewDescriptorViewFolder = 0x7005
PidTagViewDescriptorName = 0x7006
PidTagViewDescriptorVersion = 0x7007
PidTagCreationTime = 0x3007
PidTagSearchKey = 0x300B
PidTagRecipientType = 0x0c15
PidTagResponsibility = 0x0E0F
PidTagObjectType = 0x0FFE
PidTagEntryID = 0x0FFF
PidTagAddressType = 0x3002
PidTagEmailAddress = 0x3003
PidTagDisplayType = 0x3900
PidTag7BitDisplayName = 0x39FF
PidTagSendRichInfo = 0x3A40
PidTagAttachmentSize = 0x0E20
PidTagAttachFilename = 0x3704
PidTagAttachMethod = 0x3705
PidTagRenderingPosition = 0x370B
PidTagSenderName = 0x0C1A
PidTagRead = 0x0E69
PidTagHasAttachments = 0x0E1B
PidTagBody = 0x1000
PidTagRtfCompressed = 0x1009
PidTagAttachDataBinary = 0x3701
PidTagAttachDataObject = 0x3701
PidTagOriginalDisplayTo = 0x0074
PidTagTransportMessageHeaders = 0x007D
PidTagSenderSmtpAddress = 0x5D01
PidTagSentRepresentingSmtpAddress = 0x5D02
PidTagAttachMimeTag = 0x370E
PidTagAttachExtension = 0x3703
PidTagAttachLongFilename = 0x3707
class PC: # Property Context
def __init__(self, hn):
self.hn = hn
if hn.bClientSig != HN.bTypePC:
raise PSTException('Invalid HN bClientSig, not bTypePC, is %s' % hn.bClientSig)
self.bth = BTH(hn, hn.hidUserRoot)
if self.bth.cbKey != 2:
raise PSTException('Invalid PC BTH key size: %s' % self.bth.cbKey)
if self.bth.cbEnt != 6:
raise PSTException('Invalid PC BTH data size: %s' % self.bth.cbEnt)
self.props = {}
for bth_data in self.bth.bth_datas:
pc_prop = PCBTHData(bth_data, hn)
if pc_prop.wPropId in (PropIdEnum.PidTagFinderEntryId, PropIdEnum.PidTagIpmSubTreeEntryId, PropIdEnum.PidTagIpmWastebasketEntryId, PropIdEnum.PidTagEntryID):
pc_prop.value = EntryID(pc_prop.value)
self.props[pc_prop.wPropId] = pc_prop
def getval(self, propid):
if propid in self.props.keys():
return self.props[propid].value
else:
return None
def __repr__(self):
s = 'PC %s\n' % self.hn
s += '\n'.join(['Property %s' % self.props[wPropId] for wPropId in sorted(self.props.keys())])
return s
class TCOLDESC:
def __init__(self, bytes):
#self.tag is 4 byte (self.wPropId, self.wPropType): where is documentation MS?
self.wPropType, self.wPropId, self.ibData, self.cbData, self.iBit = struct.unpack('HHHBB', bytes)
def __repr__(self):
return 'Tag: %s/%s, Offset+Size: %s+%s' % (hex(self.wPropId), hex(self.wPropType), self.ibData, self.cbData)
class TCROWID:
def __init__(self, bth_data):
self.dwRowID = struct.unpack('I', bth_data.key)[0] # dwRowID
self.nid = NID(bth_data.key) # for hierarchy TCs
if len(bth_data.data) == 2: # ansi
self.dwRowIndex = struct.unpack('H', bth_data.data)[0]
else: # unicode (4)
self.dwRowIndex = struct.unpack('I', bth_data.data)[0]
class TC: # Table Context
TCI_4b = 0
TCI_2b = 1
TCI_1b = 2
TCI_bm = 3
def __init__(self, hn):
self.hn = hn
if hn.bClientSig != HN.bTypeTC:
raise PSTException('Invalid HN bClientSig, not bTypeTC, is %s' % hn.bClientSig)
tcinfo_bytes = hn.get_hid_data(hn.hidUserRoot)
self.bType, self.cCols = struct.unpack('BB', tcinfo_bytes[:2])
if self.bType != HN.bTypeTC:
raise PSTException('Invalid TCINFO bType, not bTypeTC, is %s' % self.bType)
self.rgib = struct.unpack('HHHH', tcinfo_bytes[2:10])
self.hidRowIndex, self.hnidRows, self.hidIndex = struct.unpack('4s4s4s', tcinfo_bytes[10:22])
self.hidRowIndex = HID(self.hidRowIndex)
if NID(self.hnidRows).nidType == NID.NID_TYPE_HID:
self.hnidRows = HID(self.hnidRows)
else:
self.hnidRows = NID(self.hnidRows)
self.rgTCOLDESC = []
for i in range(self.cCols):
self.rgTCOLDESC.append(TCOLDESC(tcinfo_bytes[22+i*8:22+(i+1)*8]))
self.setup_row_index()
self.setup_row_matrix()
def setup_row_index(self):
self.RowIndex = {} # key is dwRowID, value is dwRowIndex
if not (self.hnidRows.is_hid and self.hnidRows.hidIndex == 0):
row_index_bth = BTH(self.hn, self.hidRowIndex)
if row_index_bth.cbKey != 4:
raise PSTException('Invalid TC RowIndex key size %s' % row_index_bth.cbKey)
for bth_data in row_index_bth.bth_datas:
tcrowid = TCROWID(bth_data)
self.RowIndex[tcrowid.dwRowIndex] = tcrowid
def setup_row_matrix(self):
self.RowMatrix = {}
if self.RowIndex:
if self.hn.ltp.nbd.header.is_ansi:
size_BlockTrailer = 12
else: # unicode
size_BlockTrailer = 16
row_size = self.rgib[TC.TCI_bm]
RowsPerBlock = int(math.floor((8192.0 - size_BlockTrailer) / row_size))
if self.hnidRows.is_hid:
row_matrix_datas = [self.hn.get_hid_data(self.hnidRows)] # block data list
else:
if self.hnidRows.nid in self.hn.subnodes.keys():
subnode_nid_bid = self.hn.subnodes[self.hnidRows.nid].bidData
else:
raise PSTException('Row Matrix HNID not in Subnodes: %s' % self.hnidRows.nid)
row_matrix_datas = self.hn.ltp.nbd.fetch_all_block_data(subnode_nid_bid)
for irow in range(len(self.RowIndex)):
BlockIndex = irow // RowsPerBlock
RowIndex = irow % RowsPerBlock
row_bytes = row_matrix_datas[BlockIndex][RowIndex * row_size:(RowIndex+1) * row_size]
dwRowID = struct.unpack('I', row_bytes[:4])[0]
rgbCEB = row_bytes[self.rgib[TC.TCI_1b]:]
#row_datas = []
rowvals = {}
for tcoldesc in self.rgTCOLDESC:
is_fCEB = ((to_byte(rgbCEB[tcoldesc.iBit // 8]) & (1 << (7 - (tcoldesc.iBit % 8)))) != 0)
if is_fCEB:
data_bytes = row_bytes[tcoldesc.ibData:tcoldesc.ibData+tcoldesc.cbData]
else:
data_bytes = None
#row_datas.append(self.get_row_cell_value(data_bytes, tcoldesc))
if tcoldesc.wPropId in rowvals.keys():
raise PSTException('Property ID %s already in row data' % hex(tcoldesc.wPropId))
rowvals[tcoldesc.wPropId] = self.get_row_cell_value(data_bytes, tcoldesc)
self.RowMatrix[dwRowID] = rowvals #row_datas
def get_row_cell_value(self, data_bytes, tcoldesc):
if data_bytes is None:
return None
else:
ptype = self.hn.ltp.ptypes[tcoldesc.wPropType]
if not ptype.is_variable and not ptype.is_multi:
if ptype.byte_count <= 8:
return ptype.value(data_bytes)
else:
hid = HID(data_bytes)
return ptype.value(self.hn.get_hid_data(hid))
else:
if NID(data_bytes).nidType == NID.NID_TYPE_HID:
hid = HID(data_bytes)
return ptype.value(self.hn.get_hid_data(hid))
else:
subnode_nid = NID(data_bytes)
if subnode_nid.nid in self.hn.subnodes.keys():
subnode_nid_bid = self.hn.subnodes[subnode_nid.nid].bidData
else:
raise PSTException('Row Matrix Value HNID Subnode invalid: %s' % subnode_nid)
datas = self.hn.ltp.nbd.fetch_all_block_data(subnode_nid_bid)
return ptype.value(b''.join(datas))
def get_row_ID(self, RowIndex):
return self.RowIndex[RowIndex].dwRowID
def getval(self, RowIndex, wPropId):
dwRowID = self.get_row_ID(RowIndex)
rowvals = self.RowMatrix[dwRowID]
if wPropId in rowvals.keys():
return rowvals[wPropId]
else:
return None
def __repr__(self):
s = 'TC Rows: %s, %s\n' % (len(self.RowIndex), self.hn)
s += 'Columns: ' + ''.join([' %s' % tcoldesc for tcoldesc in self.rgTCOLDESC])
s += '\nData:\n' + '\n'.join(['%s: %s' % (hex(dwRowID), rowvals) for dwRowID,rowvals in self.RowMatrix.items()])
return s
class LTP:
"""LTP layer"""
def __init__(self, nbd):
self.nbd = nbd
self.ptypes = {
PTypeEnum.PtypInteger16:PType(PTypeEnum.PtypInteger16, 2, False, False),
PTypeEnum.PtypInteger32:PType(PTypeEnum.PtypInteger32, 4, False, False),
PTypeEnum.PtypFloating32:PType(PTypeEnum.PtypFloating32, 4, False, False),
PTypeEnum.PtypFloating64:PType(PTypeEnum.PtypFloating64, 8, False, False),
PTypeEnum.PtypCurrency:PType(PTypeEnum.PtypCurrency, 8, False, False),
PTypeEnum.PtypFloatingTime:PType(PTypeEnum.PtypFloatingTime, 8, False, False),
PTypeEnum.PtypErrorCode:PType(PTypeEnum.PtypErrorCode, 4, False, False),
PTypeEnum.PtypBoolean:PType(PTypeEnum.PtypBoolean, 1, False, False),
PTypeEnum.PtypInteger64:PType(PTypeEnum.PtypInteger64, 8, False, False),
PTypeEnum.PtypString:PType(PTypeEnum.PtypString, 0, True, False),
PTypeEnum.PtypString8:PType(PTypeEnum.PtypString8, 0, True, False),
PTypeEnum.PtypTime:PType(PTypeEnum.PtypTime, 8, False, False),
PTypeEnum.PtypGuid:PType(PTypeEnum.PtypGuid, 16, False, False),
PTypeEnum.PtypServerId:PType(PTypeEnum.PtypServerId, 2, False, True),
PTypeEnum.PtypRestriction:PType(PTypeEnum.PtypRestriction, 0, True, False),
PTypeEnum.PtypRuleAction:PType(PTypeEnum.PtypRuleAction, 2, False, True),
PTypeEnum.PtypBinary:PType(PTypeEnum.PtypBinary, 2, False, True),
PTypeEnum.PtypMultipleInteger16:PType(PTypeEnum.PtypMultipleInteger16, 2, False, True),
PTypeEnum.PtypMultipleInteger32:PType(PTypeEnum.PtypMultipleInteger32, 2, False, True),
PTypeEnum.PtypMultipleFloating32:PType(PTypeEnum.PtypMultipleFloating32, 2, False, True),
PTypeEnum.PtypMultipleFloating64:PType(PTypeEnum.PtypMultipleFloating64, 2, False, True),
PTypeEnum.PtypMultipleCurrency:PType(PTypeEnum.PtypMultipleCurrency, 2, False, True),
PTypeEnum.PtypMultipleFloatingTime:PType(PTypeEnum.PtypMultipleFloatingTime, 2, False, True),
PTypeEnum.PtypMultipleInteger64:PType(PTypeEnum.PtypMultipleInteger64, 2, False, True),
PTypeEnum.PtypMultipleString:PType(PTypeEnum.PtypMultipleString, 2, True, True),
PTypeEnum.PtypMultipleString8:PType(PTypeEnum.PtypMultipleString8, 2, True, True),
PTypeEnum.PtypMultipleTime:PType(PTypeEnum.PtypMultipleTime, 2, False, True),
PTypeEnum.PtypMultipleGuid:PType(PTypeEnum.PtypMultipleGuid, 2, False, True),
PTypeEnum.PtypMultipleBinary:PType(PTypeEnum.PtypMultipleBinary, 2, False, True),
PTypeEnum.PtypUnspecified:PType(PTypeEnum.PtypUnspecified, 0, False, False),
PTypeEnum.PtypNull:PType(PTypeEnum.PtypNull, 0, False, False),
PTypeEnum.PtypObject:PType(PTypeEnum.PtypObject, 4, False, True)
}
def get_pc_by_nid(self, nid):
nbt_entry = self.nbd.nbt_entries[nid.nid]
datas = self.nbd.fetch_all_block_data(nbt_entry.bidData)
hn = HN(nbt_entry, self, datas)
return PC(hn)
def get_pc_by_slentry(self, slentry):
datas = self.nbd.fetch_all_block_data(slentry.bidData)
hn = HN(slentry, self, datas)
return PC(hn)
def get_tc_by_nid(self, nid):
nbt_entry = self.nbd.nbt_entries[nid.nid]
datas = self.nbd.fetch_all_block_data(nbt_entry.bidData)
hn = HN(nbt_entry, self, datas)
return TC(hn)
def get_tc_by_slentry(self, slentry):
datas = self.nbd.fetch_all_block_data(slentry.bidData)
hn = HN(slentry, self, datas)
return TC(hn)
def strip_SubjectPrefix(self, Subject):
if Subject and ord(Subject[:1]) == 0x01:
#prefix_length = ord(Subject[1:2])
#return Subject[prefix_length+1:]
return Subject[2:]
else:
return Subject
#############################################################################################################################
# __ __ _ _
# | \/ | ___ ___ ___ __ _ __ _(_)_ __ __ _ | | __ _ _ _ ___ _ __
# | |\/| |/ _ \/ __/ __|/ _` |/ _` | | '_ \ / _` | | | / _` | | | |/ _ \ '__|
# | | | | __/\__ \__ \ (_| | (_| | | | | | (_| | | |__| (_| | |_| | __/ |
# |_| |_|\___||___/___/\__,_|\__, |_|_| |_|\__, | |_____\__,_|\__, |\___|_|
# |___/ |___/ |___/
#############################################################################################################################
class EntryID:
def __init__(self, bytes):
self.rgbFlags, self.uid, self.nid = struct.unpack('4s16s4s', bytes)
self.nid = NID(self.nid)
def __repr__(self):
return 'EntryID %s' % self.nid
class SubFolder:
def __init__(self, nid, name, parent_path):
self.nid = nid
self.name = name
self.parent_path = parent_path
def __repr__(self):
return '%s (%s)' % (self.name, self.nid)
class SubMessage:
def __init__(self, nid, SentRepresentingName, Subject, ClientSubmitTime):
self.nid = nid
self.SentRepresentingName = SentRepresentingName
self.Subject = Subject
self.ClientSubmitTime = ClientSubmitTime
def __repr__(self):
return '%s (%s)' % (self.Subject, self.nid)
class Folder:
def __init__(self, nid, ltp, parent_path='', messaging=None):
if nid.nidType != NID.NID_TYPE_NORMAL_FOLDER:
raise PSTException('Invalid Folder NID Type: %s' % nid.nidType)
self.pc = ltp.get_pc_by_nid(nid)
self.DisplayName = self.pc.getval(PropIdEnum.PidTagDisplayName)
self.path = parent_path+'\\'+self.DisplayName
#print('FOLDER DEBUG', self.DisplayName, self.pc)
# entryids in PST are stored as nids
if messaging:
self.EntryId = 4*b'\x00' + messaging.store_record_key + struct.pack('I', nid.nid)
self.ContentCount = self.pc.getval(PropIdEnum.PidTagContentCount)
self.ContainerClass = self.pc.getval(PropIdEnum.PidTagContainerClass)
self.HasSubfolders = self.pc.getval(PropIdEnum.PidTagSubfolders)
nid_hierachy = NID(nid.nidIndex | NID.NID_TYPE_HIERARCHY_TABLE)
nid_contents = NID(nid.nidIndex | NID.NID_TYPE_CONTENTS_TABLE)
nid_fai = NID(nid.nidIndex | NID.NID_TYPE_ASSOC_CONTENTS_TABLE) # FAI = Folder Associated Information
try:
self.tc_hierachy = None
self.subfolders = []
self.tc_hierachy = ltp.get_tc_by_nid(nid_hierachy)
self.subfolders = [SubFolder(self.tc_hierachy.RowIndex[RowIndex].nid, self.tc_hierachy.getval(RowIndex,PropIdEnum.PidTagDisplayName), self.path) for RowIndex in range(len(self.tc_hierachy.RowIndex))]
except PSTException as e:
log_error(e)
try:
self.tc_contents = None
self.submessages = []
self.tc_contents = ltp.get_tc_by_nid(nid_contents)
self.submessages = [SubMessage(self.tc_contents.RowIndex[RowIndex].nid, \
self.tc_contents.getval(RowIndex,PropIdEnum.PidTagSentRepresentingNameW), ltp.strip_SubjectPrefix(self.tc_contents.getval(RowIndex,PropIdEnum.PidTagSubjectW)), \
self.tc_contents.getval(RowIndex,PropIdEnum.PidTagClientSubmitTime)) \
for RowIndex in range(len(self.tc_contents.RowIndex)) if RowIndex in self.tc_contents.RowIndex.keys()]
except PSTException as e:
log_error(e)
try:
self.tc_fai = None
self.tc_fai = ltp.get_tc_by_nid(nid_fai)
except PSTException as e:
log_error(e)
def __repr__(self):
return 'Folder: %s, submessages: %s, subfolders: %s' % (self.DisplayName, len(self.submessages), self.subfolders)
class SubAttachment:
def __init__(self, nid, AttachmentSize, AttachFilename, AttachLongFilename):
self.nid, self.AttachmentSize, self.AttachFilename, self.AttachLongFilename = nid, AttachmentSize, AttachFilename, AttachLongFilename
if self.AttachLongFilename:
self.Filename = self.AttachLongFilename
else:
self.Filename = self.AttachFilename
if self.Filename:
self.Filename = os.path.basename(self.Filename)
else:
self.Filename = '[None]'
def __repr__(self):
return '%s (%s)' % (self.Filename, size_friendly(self.AttachmentSize))
class SubRecipient:
def __init__(self, RecipientType, DisplayName, ObjectType, AddressType, EmailAddress, DisplayType, EntryID):
self.RecipientType, self.DisplayName, self.ObjectType, self.AddressType, self.EmailAddress, self.DisplayType, self.EntryID = RecipientType, DisplayName, ObjectType, AddressType, EmailAddress, DisplayType, EntryID
def __repr__(self):
return '%s (%s)' % (self.DisplayName, self.EmailAddress)
class Message:
mfRead = 0x01
mfUnsent = 0x08
mfUnmodified = 0x02
mfHasAttach = 0x10
mfFromMe = 0x20
mfFAI = 0x40
mfNotifyRead = 0x100
mfNotifyUnread = 0x200
mfInternet = 0x2000
afByValue = 0x01
afEmbeddedMessage = 0x05
afStorage = 0x06
def __init__(self, nid, ltp, nbd=None, parent_message=None, messaging=None):
self.ltp = ltp
if parent_message:
subnode = parent_message.pc.hn.subnodes[nid]
datas = nbd.fetch_all_block_data(subnode.bidData)
hn = HN(subnode, ltp, datas)
self.pc = PC(hn)
else:
if nid.nidType != NID.NID_TYPE_NORMAL_MESSAGE:
raise PSTException('Invalid Message NID Type: %s' % nid_pc.nidType)
self.pc = ltp.get_pc_by_nid(nid)
# entryids in PST are stored as nids
if messaging:
self.EntryId = 4*b'\x00' + messaging.store_record_key + struct.pack('I', nid.nid)
self.MessageClass = self.pc.getval(PropIdEnum.PidTagMessageClassW)
self.Subject = ltp.strip_SubjectPrefix(self.pc.getval(PropIdEnum.PidTagSubjectW))
self.ClientSubmitTime = self.pc.getval(PropIdEnum.PidTagClientSubmitTime)
self.SentRepresentingName = self.pc.getval(PropIdEnum.PidTagSentRepresentingNameW)
self.SenderName = self.pc.getval(PropIdEnum.PidTagSenderName)
self.SenderSmtpAddress = self.pc.getval(PropIdEnum.PidTagSenderSmtpAddress)
self.MessageDeliveryTime = self.pc.getval(PropIdEnum.PidTagMessageDeliveryTime)
self.MessageFlags = self.pc.getval(PropIdEnum.PidTagMessageFlags)
self.MessageStatus = self.pc.getval(PropIdEnum.PidTagMessageStatus)
self.HasAttachments = (self.MessageFlags & Message.mfHasAttach == Message.mfHasAttach)
self.MessageSize = self.pc.getval(PropIdEnum.PidTagMessageSize)
self.Body = self.pc.getval(PropIdEnum.PidTagBody)
self.Read = (self.MessageFlags & Message.mfRead == Message.mfRead)
self.TransportMessageHeaders = self.pc.getval(PropIdEnum.PidTagTransportMessageHeaders)
self.DisplayTo = self.pc.getval(PropIdEnum.PidTagDisplayToW)
self.XOriginatingIP = self.pc.getval(0x8028) # x-originating-ip
self.tc_attachments = None
self.tc_recipients = None
if self.pc.hn.subnodes:
for subnode in self.pc.hn.subnodes.values(): #SLENTRYs
if subnode.nid.nidType == NID.NID_TYPE_ATTACHMENT_TABLE:
self.tc_attachments = self.ltp.get_tc_by_slentry(subnode)
elif subnode.nid.nidType == NID.NID_TYPE_RECIPIENT_TABLE:
self.tc_recipients = ltp.get_tc_by_slentry(subnode)
self.subattachments = []
if self.tc_attachments:
self.subattachments = [SubAttachment(self.tc_attachments.RowIndex[RowIndex].nid, self.tc_attachments.getval(RowIndex,PropIdEnum.PidTagAttachmentSize), \
self.tc_attachments.getval(RowIndex,PropIdEnum.PidTagAttachFilename), self.tc_attachments.getval(RowIndex,PropIdEnum.PidTagAttachLongFilename)) for RowIndex in range(len(self.tc_attachments.RowIndex))]
self.subrecipients = []
if self.tc_recipients:
self.subrecipients = [SubRecipient(self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagRecipientType), self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagDisplayName), \
self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagObjectType), self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagAddressType), \
self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagEmailAddress), self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagDisplayType), \
self.tc_recipients.getval(RowIndex,PropIdEnum.PidTagEntryID)) for RowIndex in range(len(self.tc_recipients.RowIndex))]
def get_attachment(self, subattachment):
""" fetch attachment on demand, not when Message instanced"""
return Attachment(self.ltp, self.pc.hn.subnodes[subattachment.nid.nid])
def get_all_properties(self):
return self.pc.__repr__()
def __repr__(self):
attachments = ', '.join(['%s' % subattachment for subattachment in self.subattachments])
return 'Message: %s, From: %s, %s, Size: %s, Attachments: %s' % (repr(self.Subject), repr(self.SentRepresentingName), self.ClientSubmitTime, size_friendly(self.MessageSize), attachments)
class Attachment:
def __init__(self, ltp, slentry):
self.ltp = ltp
self.slentry = slentry
self.pc = self.ltp.get_pc_by_slentry(slentry)
self.DisplayName = self.pc.getval(PropIdEnum.PidTagDisplayName)
self.AttachMethod = self.pc.getval(PropIdEnum.PidTagAttachMethod)
self.AttachmentSize = self.pc.getval(PropIdEnum.PidTagAttachmentSize)
self.AttachFilename = self.pc.getval(PropIdEnum.PidTagAttachFilename) # 8.3 short name
self.AttachLongFilename = self.pc.getval(PropIdEnum.PidTagAttachLongFilename)
if self.AttachLongFilename:
self.Filename = self.AttachLongFilename
else:
self.Filename = self.AttachFilename
if self.Filename:
self.Filename = os.path.basename(self.Filename)
else:
self.Filename = '[NoFilename_Method%s]' % self.AttachMethod
if self.AttachMethod == Message.afByValue:
self.data = self.pc.getval(PropIdEnum.PidTagAttachDataBinary)
else:
self.data = self.pc.getval(PropIdEnum.PidTagAttachDataObject)
#raise PSTException('Unsupported Attachment Method %s' % self.AttachMethod)
self.AttachMimeTag = self.pc.getval(PropIdEnum.PidTagAttachMimeTag)
self.AttachExtension = self.pc.getval(PropIdEnum.PidTagAttachExtension)
def get_all_properties(self):
return self.pc.__repr__()
class NAMEID:
def __init__(self, bytes):
self.dwPropertyID, self.wGuid, self.wPropIdx = struct.unpack('IHH', bytes)
self.N = self.wGuid & 0x01
self.wGuid = self.wGuid >> 1
self.NPID = self.wPropIdx + 0x8000
class Messaging:
"""Messaging Layer"""
def __init__(self, ltp):
self.ltp = ltp
self.set_message_store()
try:
self.set_name_to_id_map()
except PSTException as e:
log_error(e)
def set_message_store(self):
self.message_store = self.ltp.get_pc_by_nid(NID(NID.NID_MESSAGE_STORE))
self.store_record_key = self.message_store.getval(PropIdEnum.PidTagRecordKey)
if PropIdEnum.PidTagPstPassword in self.message_store.props.keys():
self.PasswordCRC32Hash = struct.unpack('I', struct.pack('i', self.message_store.getval(PropIdEnum.PidTagPstPassword)))[0]
else:
self.PasswordCRC32Hash = None
self.root_entryid = self.message_store.getval(PropIdEnum.PidTagIpmSubTreeEntryId)
self.deleted_items_entryid = self.message_store.getval(PropIdEnum.PidTagIpmWastebasketEntryId)
def set_name_to_id_map(self):
self.nameid_entries = []
self.pc_name_to_id_map = self.ltp.get_pc_by_nid(NID(NID.NID_NAME_TO_ID_MAP))
nameid_entrystream = self.pc_name_to_id_map.getval(PropIdEnum.PidTagNameidStreamEntry)
self.nameid_entries = [NAMEID(nameid_entrystream[i*8:(i+1)*8]) for i in range(len(nameid_entrystream)//8)]
nameid_stringstream = self.pc_name_to_id_map.getval(PropIdEnum.PidTagNameidStreamString)
nameid_guidstream = self.pc_name_to_id_map.getval(PropIdEnum.PidTagNameidStreamGuid)
for nameid in self.nameid_entries:
if nameid.N == 1:
name_len = struct.unpack('I', nameid_stringstream[nameid.dwPropertyID:nameid.dwPropertyID+4])[0]
nameid.name = nameid_stringstream[nameid.dwPropertyID+4:nameid.dwPropertyID+4+name_len].decode('utf-16-le') # unicode
if nameid.wGuid == 0:
nameid.guid = None
elif nameid.wGuid == 1: # PS_MAPI
nameid.guid = b'(\x03\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F'
elif nameid.wGuid == 2: # PS_PUBLIC_STRINGS
nameid.guid = b')\x03\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F'
else:
nameid.guid = nameid_guidstream[16*(nameid.wGuid-3):16*(nameid.wGuid-2)]
def get_folder(self, entryid, parent_path=''):
return Folder(entryid.nid, self.ltp, parent_path, self)
def get_named_properties(self):
return '\n'.join(['%s = %s' % (hex(nameid.NPID), repr(nameid.name)) for nameid in self.nameid_entries if nameid.N==1])
#############################################################################################################################
# ____ ____ _____ _
# | _ \/ ___|_ _| | | __ _ _ _ ___ _ __
# | |_) \___ \ | | | | / _` | | | |/ _ \ '__|
# | __/ ___) || | | |__| (_| | |_| | __/ |
# |_| |____/ |_| |_____\__,_|\__, |\___|_|
# |___/
#############################################################################################################################
class CRC:
CrcTableOffset32 = (0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA, 0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3,
0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988, 0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91,
0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE, 0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7,
0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC, 0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5,
0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172, 0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B,
0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940, 0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59,
0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116, 0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F,
0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924, 0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D,
0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A, 0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433,
0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818, 0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01,
0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E, 0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457,
0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C, 0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65,
0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2, 0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB,
0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0, 0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9,
0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086, 0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F,
0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4, 0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD,
0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A, 0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683,
0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8, 0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1,
0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE, 0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7,
0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC, 0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5,
0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252, 0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B,
0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60, 0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79,
0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236, 0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F,
0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04, 0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D,
0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A, 0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713,
0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38, 0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21,
0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E, 0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777,
0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C, 0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45,
0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2, 0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB,
0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0, 0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9,
0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6, 0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF,
0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94, 0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D)
CrcTableOffset40 = (0x00000000, 0x191B3141, 0x32366282, 0x2B2D53C3, 0x646CC504, 0x7D77F445, 0x565AA786, 0x4F4196C7,
0xC8D98A08, 0xD1C2BB49, 0xFAEFE88A, 0xE3F4D9CB, 0xACB54F0C, 0xB5AE7E4D, 0x9E832D8E, 0x87981CCF,
0x4AC21251, 0x53D92310, 0x78F470D3, 0x61EF4192, 0x2EAED755, 0x37B5E614, 0x1C98B5D7, 0x05838496,
0x821B9859, 0x9B00A918, 0xB02DFADB, 0xA936CB9A, 0xE6775D5D, 0xFF6C6C1C, 0xD4413FDF, 0xCD5A0E9E,
0x958424A2, 0x8C9F15E3, 0xA7B24620, 0xBEA97761, 0xF1E8E1A6, 0xE8F3D0E7, 0xC3DE8324, 0xDAC5B265,
0x5D5DAEAA, 0x44469FEB, 0x6F6BCC28, 0x7670FD69, 0x39316BAE, 0x202A5AEF, 0x0B07092C, 0x121C386D,
0xDF4636F3, 0xC65D07B2, 0xED705471, 0xF46B6530, 0xBB2AF3F7, 0xA231C2B6, 0x891C9175, 0x9007A034,
0x179FBCFB, 0x0E848DBA, 0x25A9DE79, 0x3CB2EF38, 0x73F379FF, 0x6AE848BE, 0x41C51B7D, 0x58DE2A3C,
0xF0794F05, 0xE9627E44, 0xC24F2D87, 0xDB541CC6, 0x94158A01, 0x8D0EBB40, 0xA623E883, 0xBF38D9C2,
0x38A0C50D, 0x21BBF44C, 0x0A96A78F, 0x138D96CE, 0x5CCC0009, 0x45D73148, 0x6EFA628B, 0x77E153CA,
0xBABB5D54, 0xA3A06C15, 0x888D3FD6, 0x91960E97, 0xDED79850, 0xC7CCA911, 0xECE1FAD2, 0xF5FACB93,
0x7262D75C, 0x6B79E61D, 0x4054B5DE, 0x594F849F, 0x160E1258, 0x0F152319, 0x243870DA, 0x3D23419B,
0x65FD6BA7, 0x7CE65AE6, 0x57CB0925, 0x4ED03864, 0x0191AEA3, 0x188A9FE2, 0x33A7CC21, 0x2ABCFD60,
0xAD24E1AF, 0xB43FD0EE, 0x9F12832D, 0x8609B26C, 0xC94824AB, 0xD05315EA, 0xFB7E4629, 0xE2657768,
0x2F3F79F6, 0x362448B7, 0x1D091B74, 0x04122A35, 0x4B53BCF2, 0x52488DB3, 0x7965DE70, 0x607EEF31,
0xE7E6F3FE, 0xFEFDC2BF, 0xD5D0917C, 0xCCCBA03D, 0x838A36FA, 0x9A9107BB, 0xB1BC5478, 0xA8A76539,
0x3B83984B, 0x2298A90A, 0x09B5FAC9, 0x10AECB88, 0x5FEF5D4F, 0x46F46C0E, 0x6DD93FCD, 0x74C20E8C,
0xF35A1243, 0xEA412302, 0xC16C70C1, 0xD8774180, 0x9736D747, 0x8E2DE606, 0xA500B5C5, 0xBC1B8484,
0x71418A1A, 0x685ABB5B, 0x4377E898, 0x5A6CD9D9, 0x152D4F1E, 0x0C367E5F, 0x271B2D9C, 0x3E001CDD,
0xB9980012, 0xA0833153, 0x8BAE6290, 0x92B553D1, 0xDDF4C516, 0xC4EFF457, 0xEFC2A794, 0xF6D996D5,
0xAE07BCE9, 0xB71C8DA8, 0x9C31DE6B, 0x852AEF2A, 0xCA6B79ED, 0xD37048AC, 0xF85D1B6F, 0xE1462A2E,
0x66DE36E1, 0x7FC507A0, 0x54E85463, 0x4DF36522, 0x02B2F3E5, 0x1BA9C2A4, 0x30849167, 0x299FA026,
0xE4C5AEB8, 0xFDDE9FF9, 0xD6F3CC3A, 0xCFE8FD7B, 0x80A96BBC, 0x99B25AFD, 0xB29F093E, 0xAB84387F,
0x2C1C24B0, 0x350715F1, 0x1E2A4632, 0x07317773, 0x4870E1B4, 0x516BD0F5, 0x7A468336, 0x635DB277,
0xCBFAD74E, 0xD2E1E60F, 0xF9CCB5CC, 0xE0D7848D, 0xAF96124A, 0xB68D230B, 0x9DA070C8, 0x84BB4189,
0x03235D46, 0x1A386C07, 0x31153FC4, 0x280E0E85, 0x674F9842, 0x7E54A903, 0x5579FAC0, 0x4C62CB81,
0x8138C51F, 0x9823F45E, 0xB30EA79D, 0xAA1596DC, 0xE554001B, 0xFC4F315A, 0xD7626299, 0xCE7953D8,
0x49E14F17, 0x50FA7E56, 0x7BD72D95, 0x62CC1CD4, 0x2D8D8A13, 0x3496BB52, 0x1FBBE891, 0x06A0D9D0,
0x5E7EF3EC, 0x4765C2AD, 0x6C48916E, 0x7553A02F, 0x3A1236E8, 0x230907A9, 0x0824546A, 0x113F652B,
0x96A779E4, 0x8FBC48A5, 0xA4911B66, 0xBD8A2A27, 0xF2CBBCE0, 0xEBD08DA1, 0xC0FDDE62, 0xD9E6EF23,
0x14BCE1BD, 0x0DA7D0FC, 0x268A833F, 0x3F91B27E, 0x70D024B9, 0x69CB15F8, 0x42E6463B, 0x5BFD777A,
0xDC656BB5, 0xC57E5AF4, 0xEE530937, 0xF7483876, 0xB809AEB1, 0xA1129FF0, 0x8A3FCC33, 0x9324FD72)
CrcTableOffset48 = (0x00000000, 0x01C26A37, 0x0384D46E, 0x0246BE59, 0x0709A8DC, 0x06CBC2EB, 0x048D7CB2, 0x054F1685,
0x0E1351B8, 0x0FD13B8F, 0x0D9785D6, 0x0C55EFE1, 0x091AF964, 0x08D89353, 0x0A9E2D0A, 0x0B5C473D,
0x1C26A370, 0x1DE4C947, 0x1FA2771E, 0x1E601D29, 0x1B2F0BAC, 0x1AED619B, 0x18ABDFC2, 0x1969B5F5,
0x1235F2C8, 0x13F798FF, 0x11B126A6, 0x10734C91, 0x153C5A14, 0x14FE3023, 0x16B88E7A, 0x177AE44D,
0x384D46E0, 0x398F2CD7, 0x3BC9928E, 0x3A0BF8B9, 0x3F44EE3C, 0x3E86840B, 0x3CC03A52, 0x3D025065,
0x365E1758, 0x379C7D6F, 0x35DAC336, 0x3418A901, 0x3157BF84, 0x3095D5B3, 0x32D36BEA, 0x331101DD,
0x246BE590, 0x25A98FA7, 0x27EF31FE, 0x262D5BC9, 0x23624D4C, 0x22A0277B, 0x20E69922, 0x2124F315,
0x2A78B428, 0x2BBADE1F, 0x29FC6046, 0x283E0A71, 0x2D711CF4, 0x2CB376C3, 0x2EF5C89A, 0x2F37A2AD,
0x709A8DC0, 0x7158E7F7, 0x731E59AE, 0x72DC3399, 0x7793251C, 0x76514F2B, 0x7417F172, 0x75D59B45,
0x7E89DC78, 0x7F4BB64F, 0x7D0D0816, 0x7CCF6221, 0x798074A4, 0x78421E93, 0x7A04A0CA, 0x7BC6CAFD,
0x6CBC2EB0, 0x6D7E4487, 0x6F38FADE, 0x6EFA90E9, 0x6BB5866C, 0x6A77EC5B, 0x68315202, 0x69F33835,
0x62AF7F08, 0x636D153F, 0x612BAB66, 0x60E9C151, 0x65A6D7D4, 0x6464BDE3, 0x662203BA, 0x67E0698D,
0x48D7CB20, 0x4915A117, 0x4B531F4E, 0x4A917579, 0x4FDE63FC, 0x4E1C09CB, 0x4C5AB792, 0x4D98DDA5,
0x46C49A98, 0x4706F0AF, 0x45404EF6, 0x448224C1, 0x41CD3244, 0x400F5873, 0x4249E62A, 0x438B8C1D,
0x54F16850, 0x55330267, 0x5775BC3E, 0x56B7D609, 0x53F8C08C, 0x523AAABB, 0x507C14E2, 0x51BE7ED5,
0x5AE239E8, 0x5B2053DF, 0x5966ED86, 0x58A487B1, 0x5DEB9134, 0x5C29FB03, 0x5E6F455A, 0x5FAD2F6D,
0xE1351B80, 0xE0F771B7, 0xE2B1CFEE, 0xE373A5D9, 0xE63CB35C, 0xE7FED96B, 0xE5B86732, 0xE47A0D05,
0xEF264A38, 0xEEE4200F, 0xECA29E56, 0xED60F461, 0xE82FE2E4, 0xE9ED88D3, 0xEBAB368A, 0xEA695CBD,
0xFD13B8F0, 0xFCD1D2C7, 0xFE976C9E, 0xFF5506A9, 0xFA1A102C, 0xFBD87A1B, 0xF99EC442, 0xF85CAE75,
0xF300E948, 0xF2C2837F, 0xF0843D26, 0xF1465711, 0xF4094194, 0xF5CB2BA3, 0xF78D95FA, 0xF64FFFCD,
0xD9785D60, 0xD8BA3757, 0xDAFC890E, 0xDB3EE339, 0xDE71F5BC, 0xDFB39F8B, 0xDDF521D2, 0xDC374BE5,
0xD76B0CD8, 0xD6A966EF, 0xD4EFD8B6, 0xD52DB281, 0xD062A404, 0xD1A0CE33, 0xD3E6706A, 0xD2241A5D,
0xC55EFE10, 0xC49C9427, 0xC6DA2A7E, 0xC7184049, 0xC25756CC, 0xC3953CFB, 0xC1D382A2, 0xC011E895,
0xCB4DAFA8, 0xCA8FC59F, 0xC8C97BC6, 0xC90B11F1, 0xCC440774, 0xCD866D43, 0xCFC0D31A, 0xCE02B92D,
0x91AF9640, 0x906DFC77, 0x922B422E, 0x93E92819, 0x96A63E9C, 0x976454AB, 0x9522EAF2, 0x94E080C5,
0x9FBCC7F8, 0x9E7EADCF, 0x9C381396, 0x9DFA79A1, 0x98B56F24, 0x99770513, 0x9B31BB4A, 0x9AF3D17D,
0x8D893530, 0x8C4B5F07, 0x8E0DE15E, 0x8FCF8B69, 0x8A809DEC, 0x8B42F7DB, 0x89044982, 0x88C623B5,
0x839A6488, 0x82580EBF, 0x801EB0E6, 0x81DCDAD1, 0x8493CC54, 0x8551A663, 0x8717183A, 0x86D5720D,
0xA9E2D0A0, 0xA820BA97, 0xAA6604CE, 0xABA46EF9, 0xAEEB787C, 0xAF29124B, 0xAD6FAC12, 0xACADC625,
0xA7F18118, 0xA633EB2F, 0xA4755576, 0xA5B73F41, 0xA0F829C4, 0xA13A43F3, 0xA37CFDAA, 0xA2BE979D,
0xB5C473D0, 0xB40619E7, 0xB640A7BE, 0xB782CD89, 0xB2CDDB0C, 0xB30FB13B, 0xB1490F62, 0xB08B6555,
0xBBD72268, 0xBA15485F, 0xB853F606, 0xB9919C31, 0xBCDE8AB4, 0xBD1CE083, 0xBF5A5EDA, 0xBE9834ED)
CrcTableOffset56 = (0x00000000, 0xB8BC6765, 0xAA09C88B, 0x12B5AFEE, 0x8F629757, 0x37DEF032, 0x256B5FDC, 0x9DD738B9,
0xC5B428EF, 0x7D084F8A, 0x6FBDE064, 0xD7018701, 0x4AD6BFB8, 0xF26AD8DD, 0xE0DF7733, 0x58631056,
0x5019579F, 0xE8A530FA, 0xFA109F14, 0x42ACF871, 0xDF7BC0C8, 0x67C7A7AD, 0x75720843, 0xCDCE6F26,
0x95AD7F70, 0x2D111815, 0x3FA4B7FB, 0x8718D09E, 0x1ACFE827, 0xA2738F42, 0xB0C620AC, 0x087A47C9,
0xA032AF3E, 0x188EC85B, 0x0A3B67B5, 0xB28700D0, 0x2F503869, 0x97EC5F0C, 0x8559F0E2, 0x3DE59787,
0x658687D1, 0xDD3AE0B4, 0xCF8F4F5A, 0x7733283F, 0xEAE41086, 0x525877E3, 0x40EDD80D, 0xF851BF68,
0xF02BF8A1, 0x48979FC4, 0x5A22302A, 0xE29E574F, 0x7F496FF6, 0xC7F50893, 0xD540A77D, 0x6DFCC018,
0x359FD04E, 0x8D23B72B, 0x9F9618C5, 0x272A7FA0, 0xBAFD4719, 0x0241207C, 0x10F48F92, 0xA848E8F7,
0x9B14583D, 0x23A83F58, 0x311D90B6, 0x89A1F7D3, 0x1476CF6A, 0xACCAA80F, 0xBE7F07E1, 0x06C36084,
0x5EA070D2, 0xE61C17B7, 0xF4A9B859, 0x4C15DF3C, 0xD1C2E785, 0x697E80E0, 0x7BCB2F0E, 0xC377486B,
0xCB0D0FA2, 0x73B168C7, 0x6104C729, 0xD9B8A04C, 0x446F98F5, 0xFCD3FF90, 0xEE66507E, 0x56DA371B,
0x0EB9274D, 0xB6054028, 0xA4B0EFC6, 0x1C0C88A3, 0x81DBB01A, 0x3967D77F, 0x2BD27891, 0x936E1FF4,
0x3B26F703, 0x839A9066, 0x912F3F88, 0x299358ED, 0xB4446054, 0x0CF80731, 0x1E4DA8DF, 0xA6F1CFBA,
0xFE92DFEC, 0x462EB889, 0x549B1767, 0xEC277002, 0x71F048BB, 0xC94C2FDE, 0xDBF98030, 0x6345E755,
0x6B3FA09C, 0xD383C7F9, 0xC1366817, 0x798A0F72, 0xE45D37CB, 0x5CE150AE, 0x4E54FF40, 0xF6E89825,
0xAE8B8873, 0x1637EF16, 0x048240F8, 0xBC3E279D, 0x21E91F24, 0x99557841, 0x8BE0D7AF, 0x335CB0CA,
0xED59B63B, 0x55E5D15E, 0x47507EB0, 0xFFEC19D5, 0x623B216C, 0xDA874609, 0xC832E9E7, 0x708E8E82,
0x28ED9ED4, 0x9051F9B1, 0x82E4565F, 0x3A58313A, 0xA78F0983, 0x1F336EE6, 0x0D86C108, 0xB53AA66D,
0xBD40E1A4, 0x05FC86C1, 0x1749292F, 0xAFF54E4A, 0x322276F3, 0x8A9E1196, 0x982BBE78, 0x2097D91D,
0x78F4C94B, 0xC048AE2E, 0xD2FD01C0, 0x6A4166A5, 0xF7965E1C, 0x4F2A3979, 0x5D9F9697, 0xE523F1F2,
0x4D6B1905, 0xF5D77E60, 0xE762D18E, 0x5FDEB6EB, 0xC2098E52, 0x7AB5E937, 0x680046D9, 0xD0BC21BC,
0x88DF31EA, 0x3063568F, 0x22D6F961, 0x9A6A9E04, 0x07BDA6BD, 0xBF01C1D8, 0xADB46E36, 0x15080953,
0x1D724E9A, 0xA5CE29FF, 0xB77B8611, 0x0FC7E174, 0x9210D9CD, 0x2AACBEA8, 0x38191146, 0x80A57623,
0xD8C66675, 0x607A0110, 0x72CFAEFE, 0xCA73C99B, 0x57A4F122, 0xEF189647, 0xFDAD39A9, 0x45115ECC,
0x764DEE06, 0xCEF18963, 0xDC44268D, 0x64F841E8, 0xF92F7951, 0x41931E34, 0x5326B1DA, 0xEB9AD6BF,
0xB3F9C6E9, 0x0B45A18C, 0x19F00E62, 0xA14C6907, 0x3C9B51BE, 0x842736DB, 0x96929935, 0x2E2EFE50,
0x2654B999, 0x9EE8DEFC, 0x8C5D7112, 0x34E11677, 0xA9362ECE, 0x118A49AB, 0x033FE645, 0xBB838120,
0xE3E09176, 0x5B5CF613, 0x49E959FD, 0xF1553E98, 0x6C820621, 0xD43E6144, 0xC68BCEAA, 0x7E37A9CF,
0xD67F4138, 0x6EC3265D, 0x7C7689B3, 0xC4CAEED6, 0x591DD66F, 0xE1A1B10A, 0xF3141EE4, 0x4BA87981,
0x13CB69D7, 0xAB770EB2, 0xB9C2A15C, 0x017EC639, 0x9CA9FE80, 0x241599E5, 0x36A0360B, 0x8E1C516E,
0x866616A7, 0x3EDA71C2, 0x2C6FDE2C, 0x94D3B949, 0x090481F0, 0xB1B8E695, 0xA30D497B, 0x1BB12E1E,
0x43D23E48, 0xFB6E592D, 0xE9DBF6C3, 0x516791A6, 0xCCB0A91F, 0x740CCE7A, 0x66B96194, 0xDE0506F1)
CrcTableOffset64 = (0x00000000, 0x3D6029B0, 0x7AC05360, 0x47A07AD0, 0xF580A6C0, 0xC8E08F70, 0x8F40F5A0, 0xB220DC10,
0x30704BC1, 0x0D106271, 0x4AB018A1, 0x77D03111, 0xC5F0ED01, 0xF890C4B1, 0xBF30BE61, 0x825097D1,
0x60E09782, 0x5D80BE32, 0x1A20C4E2, 0x2740ED52, 0x95603142, 0xA80018F2, 0xEFA06222, 0xD2C04B92,
0x5090DC43, 0x6DF0F5F3, 0x2A508F23, 0x1730A693, 0xA5107A83, 0x98705333, 0xDFD029E3, 0xE2B00053,
0xC1C12F04, 0xFCA106B4, 0xBB017C64, 0x866155D4, 0x344189C4, 0x0921A074, 0x4E81DAA4, 0x73E1F314,
0xF1B164C5, 0xCCD14D75, 0x8B7137A5, 0xB6111E15, 0x0431C205, 0x3951EBB5, 0x7EF19165, 0x4391B8D5,
0xA121B886, 0x9C419136, 0xDBE1EBE6, 0xE681C256, 0x54A11E46, 0x69C137F6, 0x2E614D26, 0x13016496,
0x9151F347, 0xAC31DAF7, 0xEB91A027, 0xD6F18997, 0x64D15587, 0x59B17C37, 0x1E1106E7, 0x23712F57,
0x58F35849, 0x659371F9, 0x22330B29, 0x1F532299, 0xAD73FE89, 0x9013D739, 0xD7B3ADE9, 0xEAD38459,
0x68831388, 0x55E33A38, 0x124340E8, 0x2F236958, 0x9D03B548, 0xA0639CF8, 0xE7C3E628, 0xDAA3CF98,
0x3813CFCB, 0x0573E67B, 0x42D39CAB, 0x7FB3B51B, 0xCD93690B, 0xF0F340BB, 0xB7533A6B, 0x8A3313DB,
0x0863840A, 0x3503ADBA, 0x72A3D76A, 0x4FC3FEDA, 0xFDE322CA, 0xC0830B7A, 0x872371AA, 0xBA43581A,
0x9932774D, 0xA4525EFD, 0xE3F2242D, 0xDE920D9D, 0x6CB2D18D, 0x51D2F83D, 0x167282ED, 0x2B12AB5D,
0xA9423C8C, 0x9422153C, 0xD3826FEC, 0xEEE2465C, 0x5CC29A4C, 0x61A2B3FC, 0x2602C92C, 0x1B62E09C,
0xF9D2E0CF, 0xC4B2C97F, 0x8312B3AF, 0xBE729A1F, 0x0C52460F, 0x31326FBF, 0x7692156F, 0x4BF23CDF,
0xC9A2AB0E, 0xF4C282BE, 0xB362F86E, 0x8E02D1DE, 0x3C220DCE, 0x0142247E, 0x46E25EAE, 0x7B82771E,
0xB1E6B092, 0x8C869922, 0xCB26E3F2, 0xF646CA42, 0x44661652, 0x79063FE2, 0x3EA64532, 0x03C66C82,
0x8196FB53, 0xBCF6D2E3, 0xFB56A833, 0xC6368183, 0x74165D93, 0x49767423, 0x0ED60EF3, 0x33B62743,
0xD1062710, 0xEC660EA0, 0xABC67470, 0x96A65DC0, 0x248681D0, 0x19E6A860, 0x5E46D2B0, 0x6326FB00,
0xE1766CD1, 0xDC164561, 0x9BB63FB1, 0xA6D61601, 0x14F6CA11, 0x2996E3A1, 0x6E369971, 0x5356B0C1,
0x70279F96, 0x4D47B626, 0x0AE7CCF6, 0x3787E546, 0x85A73956, 0xB8C710E6, 0xFF676A36, 0xC2074386,
0x4057D457, 0x7D37FDE7, 0x3A978737, 0x07F7AE87, 0xB5D77297, 0x88B75B27, 0xCF1721F7, 0xF2770847,
0x10C70814, 0x2DA721A4, 0x6A075B74, 0x576772C4, 0xE547AED4, 0xD8278764, 0x9F87FDB4, 0xA2E7D404,
0x20B743D5, 0x1DD76A65, 0x5A7710B5, 0x67173905, 0xD537E515, 0xE857CCA5, 0xAFF7B675, 0x92979FC5,
0xE915E8DB, 0xD475C16B, 0x93D5BBBB, 0xAEB5920B, 0x1C954E1B, 0x21F567AB, 0x66551D7B, 0x5B3534CB,
0xD965A31A, 0xE4058AAA, 0xA3A5F07A, 0x9EC5D9CA, 0x2CE505DA, 0x11852C6A, 0x562556BA, 0x6B457F0A,
0x89F57F59, 0xB49556E9, 0xF3352C39, 0xCE550589, 0x7C75D999, 0x4115F029, 0x06B58AF9, 0x3BD5A349,
0xB9853498, 0x84E51D28, 0xC34567F8, 0xFE254E48, 0x4C059258, 0x7165BBE8, 0x36C5C138, 0x0BA5E888,
0x28D4C7DF, 0x15B4EE6F, 0x521494BF, 0x6F74BD0F, 0xDD54611F, 0xE03448AF, 0xA794327F, 0x9AF41BCF,
0x18A48C1E, 0x25C4A5AE, 0x6264DF7E, 0x5F04F6CE, 0xED242ADE, 0xD044036E, 0x97E479BE, 0xAA84500E,
0x4834505D, 0x755479ED, 0x32F4033D, 0x0F942A8D, 0xBDB4F69D, 0x80D4DF2D, 0xC774A5FD, 0xFA148C4D,
0x78441B9C, 0x4524322C, 0x028448FC, 0x3FE4614C, 0x8DC4BD5C, 0xB0A494EC, 0xF704EE3C, 0xCA64C78C)
CrcTableOffset72 = (0x00000000, 0xCB5CD3A5, 0x4DC8A10B, 0x869472AE, 0x9B914216, 0x50CD91B3, 0xD659E31D, 0x1D0530B8,
0xEC53826D, 0x270F51C8, 0xA19B2366, 0x6AC7F0C3, 0x77C2C07B, 0xBC9E13DE, 0x3A0A6170, 0xF156B2D5,
0x03D6029B, 0xC88AD13E, 0x4E1EA390, 0x85427035, 0x9847408D, 0x531B9328, 0xD58FE186, 0x1ED33223,
0xEF8580F6, 0x24D95353, 0xA24D21FD, 0x6911F258, 0x7414C2E0, 0xBF481145, 0x39DC63EB, 0xF280B04E,
0x07AC0536, 0xCCF0D693, 0x4A64A43D, 0x81387798, 0x9C3D4720, 0x57619485, 0xD1F5E62B, 0x1AA9358E,
0xEBFF875B, 0x20A354FE, 0xA6372650, 0x6D6BF5F5, 0x706EC54D, 0xBB3216E8, 0x3DA66446, 0xF6FAB7E3,
0x047A07AD, 0xCF26D408, 0x49B2A6A6, 0x82EE7503, 0x9FEB45BB, 0x54B7961E, 0xD223E4B0, 0x197F3715,
0xE82985C0, 0x23755665, 0xA5E124CB, 0x6EBDF76E, 0x73B8C7D6, 0xB8E41473, 0x3E7066DD, 0xF52CB578,
0x0F580A6C, 0xC404D9C9, 0x4290AB67, 0x89CC78C2, 0x94C9487A, 0x5F959BDF, 0xD901E971, 0x125D3AD4,
0xE30B8801, 0x28575BA4, 0xAEC3290A, 0x659FFAAF, 0x789ACA17, 0xB3C619B2, 0x35526B1C, 0xFE0EB8B9,
0x0C8E08F7, 0xC7D2DB52, 0x4146A9FC, 0x8A1A7A59, 0x971F4AE1, 0x5C439944, 0xDAD7EBEA, 0x118B384F,
0xE0DD8A9A, 0x2B81593F, 0xAD152B91, 0x6649F834, 0x7B4CC88C, 0xB0101B29, 0x36846987, 0xFDD8BA22,
0x08F40F5A, 0xC3A8DCFF, 0x453CAE51, 0x8E607DF4, 0x93654D4C, 0x58399EE9, 0xDEADEC47, 0x15F13FE2,
0xE4A78D37, 0x2FFB5E92, 0xA96F2C3C, 0x6233FF99, 0x7F36CF21, 0xB46A1C84, 0x32FE6E2A, 0xF9A2BD8F,
0x0B220DC1, 0xC07EDE64, 0x46EAACCA, 0x8DB67F6F, 0x90B34FD7, 0x5BEF9C72, 0xDD7BEEDC, 0x16273D79,
0xE7718FAC, 0x2C2D5C09, 0xAAB92EA7, 0x61E5FD02, 0x7CE0CDBA, 0xB7BC1E1F, 0x31286CB1, 0xFA74BF14,
0x1EB014D8, 0xD5ECC77D, 0x5378B5D3, 0x98246676, 0x852156CE, 0x4E7D856B, 0xC8E9F7C5, 0x03B52460,
0xF2E396B5, 0x39BF4510, 0xBF2B37BE, 0x7477E41B, 0x6972D4A3, 0xA22E0706, 0x24BA75A8, 0xEFE6A60D,
0x1D661643, 0xD63AC5E6, 0x50AEB748, 0x9BF264ED, 0x86F75455, 0x4DAB87F0, 0xCB3FF55E, 0x006326FB,
0xF135942E, 0x3A69478B, 0xBCFD3525, 0x77A1E680, 0x6AA4D638, 0xA1F8059D, 0x276C7733, 0xEC30A496,
0x191C11EE, 0xD240C24B, 0x54D4B0E5, 0x9F886340, 0x828D53F8, 0x49D1805D, 0xCF45F2F3, 0x04192156,
0xF54F9383, 0x3E134026, 0xB8873288, 0x73DBE12D, 0x6EDED195, 0xA5820230, 0x2316709E, 0xE84AA33B,
0x1ACA1375, 0xD196C0D0, 0x5702B27E, 0x9C5E61DB, 0x815B5163, 0x4A0782C6, 0xCC93F068, 0x07CF23CD,
0xF6999118, 0x3DC542BD, 0xBB513013, 0x700DE3B6, 0x6D08D30E, 0xA65400AB, 0x20C07205, 0xEB9CA1A0,
0x11E81EB4, 0xDAB4CD11, 0x5C20BFBF, 0x977C6C1A, 0x8A795CA2, 0x41258F07, 0xC7B1FDA9, 0x0CED2E0C,
0xFDBB9CD9, 0x36E74F7C, 0xB0733DD2, 0x7B2FEE77, 0x662ADECF, 0xAD760D6A, 0x2BE27FC4, 0xE0BEAC61,
0x123E1C2F, 0xD962CF8A, 0x5FF6BD24, 0x94AA6E81, 0x89AF5E39, 0x42F38D9C, 0xC467FF32, 0x0F3B2C97,
0xFE6D9E42, 0x35314DE7, 0xB3A53F49, 0x78F9ECEC, 0x65FCDC54, 0xAEA00FF1, 0x28347D5F, 0xE368AEFA,
0x16441B82, 0xDD18C827, 0x5B8CBA89, 0x90D0692C, 0x8DD55994, 0x46898A31, 0xC01DF89F, 0x0B412B3A,
0xFA1799EF, 0x314B4A4A, 0xB7DF38E4, 0x7C83EB41, 0x6186DBF9, 0xAADA085C, 0x2C4E7AF2, 0xE712A957,
0x15921919, 0xDECECABC, 0x585AB812, 0x93066BB7, 0x8E035B0F, 0x455F88AA, 0xC3CBFA04, 0x089729A1,
0xF9C19B74, 0x329D48D1, 0xB4093A7F, 0x7F55E9DA, 0x6250D962, 0xA90C0AC7, 0x2F987869, 0xE4C4ABCC)
CrcTableOffset80 = (0x00000000, 0xA6770BB4, 0x979F1129, 0x31E81A9D, 0xF44F2413, 0x52382FA7, 0x63D0353A, 0xC5A73E8E,
0x33EF4E67, 0x959845D3, 0xA4705F4E, 0x020754FA, 0xC7A06A74, 0x61D761C0, 0x503F7B5D, 0xF64870E9,
0x67DE9CCE, 0xC1A9977A, 0xF0418DE7, 0x56368653, 0x9391B8DD, 0x35E6B369, 0x040EA9F4, 0xA279A240,
0x5431D2A9, 0xF246D91D, 0xC3AEC380, 0x65D9C834, 0xA07EF6BA, 0x0609FD0E, 0x37E1E793, 0x9196EC27,
0xCFBD399C, 0x69CA3228, 0x582228B5, 0xFE552301, 0x3BF21D8F, 0x9D85163B, 0xAC6D0CA6, 0x0A1A0712,
0xFC5277FB, 0x5A257C4F, 0x6BCD66D2, 0xCDBA6D66, 0x081D53E8, 0xAE6A585C, 0x9F8242C1, 0x39F54975,
0xA863A552, 0x0E14AEE6, 0x3FFCB47B, 0x998BBFCF, 0x5C2C8141, 0xFA5B8AF5, 0xCBB39068, 0x6DC49BDC,
0x9B8CEB35, 0x3DFBE081, 0x0C13FA1C, 0xAA64F1A8, 0x6FC3CF26, 0xC9B4C492, 0xF85CDE0F, 0x5E2BD5BB,
0x440B7579, 0xE27C7ECD, 0xD3946450, 0x75E36FE4, 0xB044516A, 0x16335ADE, 0x27DB4043, 0x81AC4BF7,
0x77E43B1E, 0xD19330AA, 0xE07B2A37, 0x460C2183, 0x83AB1F0D, 0x25DC14B9, 0x14340E24, 0xB2430590,
0x23D5E9B7, 0x85A2E203, 0xB44AF89E, 0x123DF32A, 0xD79ACDA4, 0x71EDC610, 0x4005DC8D, 0xE672D739,
0x103AA7D0, 0xB64DAC64, 0x87A5B6F9, 0x21D2BD4D, 0xE47583C3, 0x42028877, 0x73EA92EA, 0xD59D995E,
0x8BB64CE5, 0x2DC14751, 0x1C295DCC, 0xBA5E5678, 0x7FF968F6, 0xD98E6342, 0xE86679DF, 0x4E11726B,
0xB8590282, 0x1E2E0936, 0x2FC613AB, 0x89B1181F, 0x4C162691, 0xEA612D25, 0xDB8937B8, 0x7DFE3C0C,
0xEC68D02B, 0x4A1FDB9F, 0x7BF7C102, 0xDD80CAB6, 0x1827F438, 0xBE50FF8C, 0x8FB8E511, 0x29CFEEA5,
0xDF879E4C, 0x79F095F8, 0x48188F65, 0xEE6F84D1, 0x2BC8BA5F, 0x8DBFB1EB, 0xBC57AB76, 0x1A20A0C2,
0x8816EAF2, 0x2E61E146, 0x1F89FBDB, 0xB9FEF06F, 0x7C59CEE1, 0xDA2EC555, 0xEBC6DFC8, 0x4DB1D47C,
0xBBF9A495, 0x1D8EAF21, 0x2C66B5BC, 0x8A11BE08, 0x4FB68086, 0xE9C18B32, 0xD82991AF, 0x7E5E9A1B,
0xEFC8763C, 0x49BF7D88, 0x78576715, 0xDE206CA1, 0x1B87522F, 0xBDF0599B, 0x8C184306, 0x2A6F48B2,
0xDC27385B, 0x7A5033EF, 0x4BB82972, 0xEDCF22C6, 0x28681C48, 0x8E1F17FC, 0xBFF70D61, 0x198006D5,
0x47ABD36E, 0xE1DCD8DA, 0xD034C247, 0x7643C9F3, 0xB3E4F77D, 0x1593FCC9, 0x247BE654, 0x820CEDE0,
0x74449D09, 0xD23396BD, 0xE3DB8C20, 0x45AC8794, 0x800BB91A, 0x267CB2AE, 0x1794A833, 0xB1E3A387,
0x20754FA0, 0x86024414, 0xB7EA5E89, 0x119D553D, 0xD43A6BB3, 0x724D6007, 0x43A57A9A, 0xE5D2712E,
0x139A01C7, 0xB5ED0A73, 0x840510EE, 0x22721B5A, 0xE7D525D4, 0x41A22E60, 0x704A34FD, 0xD63D3F49,
0xCC1D9F8B, 0x6A6A943F, 0x5B828EA2, 0xFDF58516, 0x3852BB98, 0x9E25B02C, 0xAFCDAAB1, 0x09BAA105,
0xFFF2D1EC, 0x5985DA58, 0x686DC0C5, 0xCE1ACB71, 0x0BBDF5FF, 0xADCAFE4B, 0x9C22E4D6, 0x3A55EF62,
0xABC30345, 0x0DB408F1, 0x3C5C126C, 0x9A2B19D8, 0x5F8C2756, 0xF9FB2CE2, 0xC813367F, 0x6E643DCB,
0x982C4D22, 0x3E5B4696, 0x0FB35C0B, 0xA9C457BF, 0x6C636931, 0xCA146285, 0xFBFC7818, 0x5D8B73AC,
0x03A0A617, 0xA5D7ADA3, 0x943FB73E, 0x3248BC8A, 0xF7EF8204, 0x519889B0, 0x6070932D, 0xC6079899,
0x304FE870, 0x9638E3C4, 0xA7D0F959, 0x01A7F2ED, 0xC400CC63, 0x6277C7D7, 0x539FDD4A, 0xF5E8D6FE,
0x647E3AD9, 0xC209316D, 0xF3E12BF0, 0x55962044, 0x90311ECA, 0x3646157E, 0x07AE0FE3, 0xA1D90457,
0x579174BE, 0xF1E67F0A, 0xC00E6597, 0x66796E23, 0xA3DE50AD, 0x05A95B19, 0x34414184, 0x92364A30)
CrcTableOffset88 = (0x00000000, 0xCCAA009E, 0x4225077D, 0x8E8F07E3, 0x844A0EFA, 0x48E00E64, 0xC66F0987, 0x0AC50919,
0xD3E51BB5, 0x1F4F1B2B, 0x91C01CC8, 0x5D6A1C56, 0x57AF154F, 0x9B0515D1, 0x158A1232, 0xD92012AC,
0x7CBB312B, 0xB01131B5, 0x3E9E3656, 0xF23436C8, 0xF8F13FD1, 0x345B3F4F, 0xBAD438AC, 0x767E3832,
0xAF5E2A9E, 0x63F42A00, 0xED7B2DE3, 0x21D12D7D, 0x2B142464, 0xE7BE24FA, 0x69312319, 0xA59B2387,
0xF9766256, 0x35DC62C8, 0xBB53652B, 0x77F965B5, 0x7D3C6CAC, 0xB1966C32, 0x3F196BD1, 0xF3B36B4F,
0x2A9379E3, 0xE639797D, 0x68B67E9E, 0xA41C7E00, 0xAED97719, 0x62737787, 0xECFC7064, 0x205670FA,
0x85CD537D, 0x496753E3, 0xC7E85400, 0x0B42549E, 0x01875D87, 0xCD2D5D19, 0x43A25AFA, 0x8F085A64,
0x562848C8, 0x9A824856, 0x140D4FB5, 0xD8A74F2B, 0xD2624632, 0x1EC846AC, 0x9047414F, 0x5CED41D1,
0x299DC2ED, 0xE537C273, 0x6BB8C590, 0xA712C50E, 0xADD7CC17, 0x617DCC89, 0xEFF2CB6A, 0x2358CBF4,
0xFA78D958, 0x36D2D9C6, 0xB85DDE25, 0x74F7DEBB, 0x7E32D7A2, 0xB298D73C, 0x3C17D0DF, 0xF0BDD041,
0x5526F3C6, 0x998CF358, 0x1703F4BB, 0xDBA9F425, 0xD16CFD3C, 0x1DC6FDA2, 0x9349FA41, 0x5FE3FADF,
0x86C3E873, 0x4A69E8ED, 0xC4E6EF0E, 0x084CEF90, 0x0289E689, 0xCE23E617, 0x40ACE1F4, 0x8C06E16A,
0xD0EBA0BB, 0x1C41A025, 0x92CEA7C6, 0x5E64A758, 0x54A1AE41, 0x980BAEDF, 0x1684A93C, 0xDA2EA9A2,
0x030EBB0E, 0xCFA4BB90, 0x412BBC73, 0x8D81BCED, 0x8744B5F4, 0x4BEEB56A, 0xC561B289, 0x09CBB217,
0xAC509190, 0x60FA910E, 0xEE7596ED, 0x22DF9673, 0x281A9F6A, 0xE4B09FF4, 0x6A3F9817, 0xA6959889,
0x7FB58A25, 0xB31F8ABB, 0x3D908D58, 0xF13A8DC6, 0xFBFF84DF, 0x37558441, 0xB9DA83A2, 0x7570833C,
0x533B85DA, 0x9F918544, 0x111E82A7, 0xDDB48239, 0xD7718B20, 0x1BDB8BBE, 0x95548C5D, 0x59FE8CC3,
0x80DE9E6F, 0x4C749EF1, 0xC2FB9912, 0x0E51998C, 0x04949095, 0xC83E900B, 0x46B197E8, 0x8A1B9776,
0x2F80B4F1, 0xE32AB46F, 0x6DA5B38C, 0xA10FB312, 0xABCABA0B, 0x6760BA95, 0xE9EFBD76, 0x2545BDE8,
0xFC65AF44, 0x30CFAFDA, 0xBE40A839, 0x72EAA8A7, 0x782FA1BE, 0xB485A120, 0x3A0AA6C3, 0xF6A0A65D,
0xAA4DE78C, 0x66E7E712, 0xE868E0F1, 0x24C2E06F, 0x2E07E976, 0xE2ADE9E8, 0x6C22EE0B, 0xA088EE95,
0x79A8FC39, 0xB502FCA7, 0x3B8DFB44, 0xF727FBDA, 0xFDE2F2C3, 0x3148F25D, 0xBFC7F5BE, 0x736DF520,
0xD6F6D6A7, 0x1A5CD639, 0x94D3D1DA, 0x5879D144, 0x52BCD85D, 0x9E16D8C3, 0x1099DF20, 0xDC33DFBE,
0x0513CD12, 0xC9B9CD8C, 0x4736CA6F, 0x8B9CCAF1, 0x8159C3E8, 0x4DF3C376, 0xC37CC495, 0x0FD6C40B,
0x7AA64737, 0xB60C47A9, 0x3883404A, 0xF42940D4, 0xFEEC49CD, 0x32464953, 0xBCC94EB0, 0x70634E2E,
0xA9435C82, 0x65E95C1C, 0xEB665BFF, 0x27CC5B61, 0x2D095278, 0xE1A352E6, 0x6F2C5505, 0xA386559B,
0x061D761C, 0xCAB77682, 0x44387161, 0x889271FF, 0x825778E6, 0x4EFD7878, 0xC0727F9B, 0x0CD87F05,
0xD5F86DA9, 0x19526D37, 0x97DD6AD4, 0x5B776A4A, 0x51B26353, 0x9D1863CD, 0x1397642E, 0xDF3D64B0,
0x83D02561, 0x4F7A25FF, 0xC1F5221C, 0x0D5F2282, 0x079A2B9B, 0xCB302B05, 0x45BF2CE6, 0x89152C78,
0x50353ED4, 0x9C9F3E4A, 0x121039A9, 0xDEBA3937, 0xD47F302E, 0x18D530B0, 0x965A3753, 0x5AF037CD,
0xFF6B144A, 0x33C114D4, 0xBD4E1337, 0x71E413A9, 0x7B211AB0, 0xB78B1A2E, 0x39041DCD, 0xF5AE1D53,
0x2C8E0FFF, 0xE0240F61, 0x6EAB0882, 0xA201081C, 0xA8C40105, 0x646E019B, 0xEAE10678, 0x264B06E6)
@staticmethod
def ComputeCRC(pv):
""" from [MS-PST]. dwCRC is zero. pv is bytes to CRC. cbLength is length of pv """
dwCRC = 0
cbLength = len(pv)
if cbLength < 4:
cbRunningLength = 0
else:
cbRunningLength = (cbLength//8)*8
cbEndUnalignedBytes = cbLength - cbRunningLength
index = 0
for i in range(1, (cbRunningLength//8) + 1):
dwCRC ^= struct.unpack('I',pv[index:index+4])[0]
dwCRC = CRC.CrcTableOffset88[dwCRC & 0x000000FF] ^ CRC.CrcTableOffset80[(dwCRC >> 8) & 0x000000FF] ^ CRC.CrcTableOffset72[(dwCRC >> 16) & 0x000000FF] ^ CRC.CrcTableOffset64[(dwCRC >> 24) & 0x000000FF]
index += 4
dw2nd32 = struct.unpack('I',pv[index:index+4])[0]
dwCRC = dwCRC ^ CRC.CrcTableOffset56[dw2nd32 & 0x000000FF] ^ CRC.CrcTableOffset48[(dw2nd32 >> 8) & 0x000000FF] ^ CRC.CrcTableOffset40[(dw2nd32 >> 16) & 0x000000FF] ^ CRC.CrcTableOffset32[(dw2nd32 >> 24) & 0x000000FF]
index += 4
for i in range(1, cbEndUnalignedBytes + 1):
dwCRC = CRC.CrcTableOffset32[(dwCRC ^ struct.unpack('B',pv[index:index+1])[0]) & 0x000000FF] ^ (dwCRC >> 8)
index += 1
return dwCRC
class FieldSize:
BYTE = 1
WORD = 2
DWORD = 4
ANSIDWORD = 8
class Header:
def __init__(self, fd):
# common ansi/unicode fields
fd.seek(0)
self.dwMagic = fd.read(FieldSize.DWORD)
self.dwCRCPartial = fd.read(FieldSize.DWORD) # ignore
self.wMagicClient = fd.read(FieldSize.WORD)
try:
self.wVer, self.wVerClient, self.bPlatformCreate, self.bPlatformAccess = struct.unpack('HHBB',fd.read(FieldSize.WORD+FieldSize.WORD+FieldSize.BYTE+FieldSize.BYTE))
except struct.error:
self.validPST = False
return
self.dwReserved1 = fd.read(FieldSize.DWORD) # ignore
self.dwReserved2 = fd.read(FieldSize.DWORD) # ignore
self.validPST = (self.dwMagic == b'!BDN' and self.wMagicClient == b'SM')
if not self.validPST:
return
self.is_ansi = (self.wVer in (14, 15))
self.is_unicode = (self.wVer == 23)
if not (self.is_ansi or self.is_unicode):
self.validPST = False
return
if self.is_ansi:
self.bidNextB = BID(fd.read(FieldSize.DWORD))
self.bidNextP = BID(fd.read(FieldSize.DWORD))
self.dwUnique = fd.read(FieldSize.DWORD)
self.rgnid = struct.unpack('IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII', fd.read(128))
self.root = Root(fd.read(40), True)
self.rgbFM = fd.read(128) # unused
self.rgbFP = fd.read(128) # unused
self.bSentinel, self.bCryptMethod = struct.unpack('BB', fd.read(FieldSize.BYTE+FieldSize.BYTE))
self.rgbReserved = fd.read(FieldSize.WORD) # unused
self.ullReserved = fd.read(8) # unused
self.dwReserved = fd.read(FieldSize.DWORD) # unused
self.rgbReserved2 = fd.read(3) # unused
self.bReserved = fd.read(1) # unused
self.rgbReserved3 = fd.read(32) # unused
if self.is_unicode:
self.bidUnused = fd.read(FieldSize.ANSIDWORD) # unused
self.bidNextP = BID(fd.read(FieldSize.ANSIDWORD))
#self.bidNextB = fd.read(FieldSize.ANSIDWORD) # the spec is wrong, example in appendix is correct
self.dwUnique = fd.read(FieldSize.DWORD) # ignore
self.rgnid = struct.unpack('IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII', fd.read(128))
self.qwUnused = fd.read(FieldSize.ANSIDWORD) # unused
self.root = Root(fd.read(72), False)
self.dwAlign = fd.read(FieldSize.DWORD) # unused
self.rgbFM = fd.read(128) # unused
self.rgbFP = fd.read(128) # unused
self.bSentinel, self.bCryptMethod = struct.unpack('BB', fd.read(FieldSize.BYTE+FieldSize.BYTE))
self.rgbReserved = fd.read(FieldSize.WORD) # unused
self.bidNextB = BID(fd.read(FieldSize.ANSIDWORD)) # repeated from above in spec
self.dwCRCFull = fd.read(FieldSize.DWORD) # ignored
self.rgbReserved2 = fd.read(3) # unused
self.bReserved = fd.read(1) # unused
self.rgbReserved3 = fd.read(32) # unused
class Root:
def __init__(self, bytes, is_ansi):
if is_ansi: # 40
self.ibFileEof, self.ibAMapLast, self.cbAMapFree, self.cbPMapFree, self.BREFNBT, self.BREFBBT, self.fAMapValid = \
struct.unpack('IIII8s8sB', bytes[4:-3])
else: #unicode #72
self.ibFileEof, self.ibAMapLast, self.cbAMapFree, self.cbPMapFree, self.BREFNBT, self.BREFBBT, self.fAMapValid = \
struct.unpack('QQQQ16s16sB', bytes[4:-3])
self.BREFNBT = BREF(self.BREFNBT)
self.BREFBBT = BREF(self.BREFBBT)
class PST:
def __init__(self, pst_file):
self.fd = open(pst_file,'rb')
self.header = Header(self.fd)
if not self.header.validPST:
raise PSTException('PST file is not a valid PST')
if self.header.bCryptMethod not in (0,1): # unencoded or NDB_CRYPT_PERMUTE
raise PSTException('Unsupported encoding/crypt method %s' % self.header.bCryptMethod)
self.nbd = NBD(self.fd, self.header)
self.ltp = LTP(self.nbd)
self.messaging = Messaging(self.ltp)
def close(self):
self.fd.close()
def folder_generator(self):
root_folder = self.messaging.get_folder(self.messaging.root_entryid, '')
subfolder_stack = root_folder.subfolders
yield root_folder
#Deleted Items should also be in root folder, so don't need to get this one
#bin_folder = self.messaging.get_folder(self.messaging.deleted_items_entryid, '')
#subfolder_stack.extend(bin_folder.subfolders)
#yield bin_folder
while subfolder_stack:
subfolder = subfolder_stack.pop()
try:
folder = Folder(subfolder.nid, self.ltp, subfolder.parent_path, self.messaging)
subfolder_stack.extend(folder.subfolders)
yield folder
except PSTException as e:
log_error(e)
def message_generator(self, folder):
try:
for submessage in folder.submessages:
try:
message = Message(submessage.nid, self.ltp, messaging=self.messaging)
yield message
except PSTException as e:
log_error(e)
except GeneratorExit:
pass
finally:
pass
def export_all_attachments(self, path='', progressbar = None, total_attachments = 0, overwrite=True):
"""dumps all attachments in the PST to a path"""
attachments_completed = 0
for folder in self.folder_generator():
for message in self.message_generator(folder):
if message.HasAttachments:
for subattachment in message.subattachments:
attachment = message.get_attachment(subattachment)
if len(attachment.data) !=0:
filepath = os.path.join(path, attachment.Filename)
if overwrite:
if os.path.exists(filepath):
os.remove(filepath)
else:
filepath = get_unused_filename(filepath)
write_file(filepath, attachment.data, 'wb')
attachments_completed += 1
if progressbar:
progressbar.update(attachments_completed * 100.0 / total_attachments)
def export_all_messages(self, path='', progressbar = None, total_messages = 0):
messages_completed = 0
for folder in self.folder_generator():
filepath = get_unused_filename(os.path.join(path, get_safe_filename(folder.path.replace('\\','_'))+'.txt'))
msg_txt = u''
for message in self.message_generator(folder):
msg_txt += u'Subject: %s\nFrom: %s (%s)\n' % (message.Subject, message.SenderName, message.SenderSmtpAddress)
msg_txt += u'To: %s\n' % ('; '.join([u'%s (%s)' % (subrecipient.DisplayName, subrecipient.EmailAddress) for subrecipient in message.subrecipients]))
msg_txt += u'Sent: %s\nDelivered: %s\n' % (message.ClientSubmitTime, message.MessageDeliveryTime)
msg_txt += u'MessageClass: %s\n' % (message.MessageClass)
if message.HasAttachments:
msg_txt += u'Attachments: %s\n' % (u', '.join([subattachment.__repr__() for subattachment in message.subattachments]))
msg_txt += u'\n%s\n\n\n' % message.Body
if msg_txt:
write_file(filepath, unicode2ascii(msg_txt), 'w')
messages_completed += 1
if progressbar:
progressbar.update(messages_completed * 100.0 / total_messages)
def get_total_message_count(self):
total_message_count = 0
for folder in self.folder_generator():
total_message_count += len(folder.submessages)
return total_message_count
def get_total_attachment_count(self):
total_attachment_count = 0
for folder in self.folder_generator():
for message in self.message_generator(folder):
if message.HasAttachments:
total_attachment_count += len(message.subattachments)
return total_attachment_count
def get_pst_status(self):
status = u'Valid PST: %s, Unicode: %s, CryptMethod: %s, Name: %s, Password: %s' % (self.header.validPST, self.header.is_unicode, self.header.bCryptMethod, self.messaging.message_store.getval(PropIdEnum.PidTagDisplayName), self.messaging.PasswordCRC32Hash)
return status
@staticmethod
def bruteforce(charset, maxlength):
return (''.join(candidate) for candidate in itertools.chain.from_iterable(itertools.product(charset, repeat=i) for i in range(1, maxlength + 1)))
@staticmethod
def crack_password(crc, dictionary_file=''):
"""either does a dictionary attack against the PST password CRC hash, or does a brute force of up to 4 chars"""
if dictionary_file:
dic_entries = read_file(dictionary_file).split('\n')
for password_check in dic_entries:
crc_check = CRC.ComputeCRC(password_check.strip())
if crc == crc_check:
return password_check
else: # brute force
charset = string.ascii_lowercase + string.digits
for password_length in range(1,5):
for password_check in PST.bruteforce(charset, password_length):
crc_check = CRC.ComputeCRC(password_check)
if crc == crc_check:
return password_check
return ''
###################################################################################################################################
# _ _ _ _ _ _ _ _____ _ _
# | | | | |_(_) (_) |_ _ _ | ___| _ _ __ ___| |_(_) ___ _ __ ___
# | | | | __| | | | __| | | | | |_ | | | | '_ \ / __| __| |/ _ \| '_ \/ __|
# | |_| | |_| | | | |_| |_| | | _|| |_| | | | | (__| |_| | (_) | | | \__ \
# \___/ \__|_|_|_|\__|\__, | |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
# |___/
###################################################################################################################################
def hex(i):
return '0x%x' % i
def size_friendly(size):
if size < 1024:
return '%sB' % (size)
elif size < 1024*1024:
return '%sKB' % (size//1024)
elif size < 1024*1024*1024:
return '%sMB' % (size//(1024*1024))
else:
return '%sGB' % (size//(1024*1024*1024))
def unicode2ascii(unicode_str):
return unicodedata.normalize('NFKD', unicode_str).encode('ascii','ignore')
def write_file(fn, s, write_mode='w'):
f = open(fn,write_mode)
f.write(s)
f.close()
def read_file(fn, open_mode="r"):
f = open(fn, open_mode)
s = f.read()
f.close()
return s
def get_unused_filename(filepath):
""" adds numbered suffix to filepath if filename already exists"""
if os.path.exists(filepath):
suffix = 1
while os.path.exists('%s-%s%s' % (os.path.splitext(filepath)[0], suffix, os.path.splitext(filepath)[1])):
suffix += 1
filepath = '%s-%s%s' % (os.path.splitext(filepath)[0], suffix, os.path.splitext(filepath)[1])
return filepath
def get_safe_filename(filename):
return re.sub(r'[/\\;,><&\*:%=\+@!#\^\(\)|\?]', '', filename)
def log_error(e):
global error_log_list
error_log_list.append(e.message)
sys.stderr.write(e.message+'\n')
###############################################################################################################################
#
# _____ _ _____ _ _
# |_ _|__ ___| |_ | ___| _ _ __ ___| |_(_) ___ _ __ ___
# | |/ _ \/ __| __| | |_ | | | | '_ \ / __| __| |/ _ \| '_ \/ __|
# | | __/\__ \ |_ | _|| |_| | | | | (__| |_| | (_) | | | \__ \
# |_|\___||___/\__| |_| \__,_|_| |_|\___|\__|_|\___/|_| |_|___/
#
###############################################################################################################################
def test_status_pst(pst_filepath):
pst = PST(pst_filepath)
print(unicode2ascii(pst.get_pst_status()))
print('Total Messages: %s' % pst.get_total_message_count())
print('Total Attachments: %s' % pst.get_total_attachment_count())
pst.close()
def get_simple_progressbar(title):
pbar_widgets = [title, progressbar.Percentage(), ' ', progressbar.Bar(marker = progressbar.RotatingMarker()), ' ', progressbar.ETA()]
pbar = progressbar.ProgressBar(widgets = pbar_widgets).start()
return pbar
def test_dump_pst(pst_filepath, output_path):
""" dump out all PST email attachments and emails (into text files) to output_path folder"""
pst = PST(pst_filepath)
print(pst.get_pst_status())
pbar = get_simple_progressbar('Messages: ')
total_messages = pst.get_total_message_count()
pst.export_all_messages(output_path, pbar, total_messages)
pbar.finish()
pbar = get_simple_progressbar('Attachments: ')
total_attachments = pst.get_total_attachment_count()
pst.export_all_attachments(output_path, pbar, total_attachments)
pbar.finish()
pst.close()
def test_folder_psts(psts_folder):
global error_log_list
s = ''
for pst_filepath in [os.path.join(psts_folder, filename) for filename in os.listdir(psts_folder) if os.path.isfile(os.path.join(psts_folder, filename)) and os.path.splitext(filename.lower())[1] == '.pst']:
try:
s += 'Opening %s\n' % pst_filepath
error_log_list = []
pst = PST(pst_filepath)
status = unicode2ascii(pst.get_pst_status())
print(status)
password = ''
if pst.messaging.PasswordCRC32Hash:
password = pst.crack_password(pst.messaging.PasswordCRC32Hash)
if password:
password = ' (%s)' % password
s += status + password + '\n'
pst.close()
s += '\n'.join(error_log_list)
s += '\n\n\n'
except Exception as e:
s += 'ERROR: %s\n' % e
write_file(os.path.join(psts_folder, 'psts_test.txt'), s)
###################################################################################################################################
# __ __ _
# | \/ | __ _(_)_ __
# | |\/| |/ _` | | '_ \
# | | | | (_| | | | | |
# |_| |_|\__,_|_|_| |_|
#
###################################################################################################################################
if __name__=="__main__":
input_pst_file = ''
output_folder = 'dump'
arg_parser = argparse.ArgumentParser(prog='pst', description='PST: parses PST files. Can dump emails and attachments.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
arg_parser.add_argument('-i', dest='input_pst_file', default=input_pst_file, help='input PST file to dump')
arg_parser.add_argument('-o', dest='output_folder', default=output_folder, help='output folder')
arg_parser.add_argument('-t', dest='debug', help=argparse.SUPPRESS, action='store_true', default=False) # hidden argument
args = arg_parser.parse_args()
if not args.debug:
input_pst_file = args.input_pst_file
output_folder = args.output_folder
if not os.path.exists(input_pst_file):
print('Input PST file does not exist')
sys.exit(1)
if not os.path.exists(output_folder):
print('Output folder does not exist')
sys.exit(1)
test_dump_pst(input_pst_file,output_folder)
else: # debug
pass
#test_folder = 'D:\\'
#test_status_pst(test_folder+'sample.pst')
#test_dump_pst(test_folder+'sample.pst', test_folder+'dump')
#test_folder_psts(test_folder)
| 49,587 | 44,635 | 2,881 |
97670336a25763b3b30ba76cb087cbb3a93e0734 | 1,926 | py | Python | masd/algorithms/ungherese.py | ale-cci/Appunti | 9aa76eb46b7a984776f50260e598c9de1fa6317a | [
"MIT"
] | null | null | null | masd/algorithms/ungherese.py | ale-cci/Appunti | 9aa76eb46b7a984776f50260e598c9de1fa6317a | [
"MIT"
] | null | null | null | masd/algorithms/ungherese.py | ale-cci/Appunti | 9aa76eb46b7a984776f50260e598c9de1fa6317a | [
"MIT"
] | null | null | null | '''
d_ij = matrice dei costi
per ogni colonna trovo il minimo della colonna
a ogni elemento della colonna sottraggo il minimo (d0)
si ottiene una nuova matrice con pesi >= 0
---
stessa operazione con le righe (d1)
---
ottengo matrice di coefficienti d2
assegnamento rappresentato da matrice X
dove x_ij = 1 se i assegnato a j, 0 else
Nota: sum(x[i][j] for j in V) == 1 # 1 solo nodo assegnato
stesso vale per j
Nota: costo assegnamento = sum(d_ij * x_ij)
= sum(d2 * x_ij) + sum(d1) + sum(d0)
quindi D1 = sum(d1) + D0 = sum(d0) <= sum(d_ij * x_ij) rappresenta
un lower bound del costo
se si trova un matching di costo D1 + D0 allora è ottimo
---
trovare sottinsieme di 0 in d2 di cardinalità max (1 solo x riga e conna)
se insieme ammette soluzione di cardinalità
--- per trovare delta:
dati due insiemi di vertici A e B traccio un arco sse d2_ij = 0, il problema diventa quindi di trovare
un matching di cardinalità massima (delta)
se delta = n
1h02 dimostraz: siccome gli elementi devono essere indipendenti per
il principio della piccionaia e delta ha cardinalità n, sum_i(x_ij) == 1e sum_j(x_ij) == 1
se delta < n
in caso non si riesca a trovare
va risolto un sottoproblema
---
determinare il valore minimo lambda tra gli elementi T2 non coperti da nessuna linea.
(gli elementi non ricoperti sono tutti strettamente positivi siccome tutti gli zeri sono coperti)
* incremento tutti gli elementi ricoperti da due linee di lambda.
* decremento tutti gli elementi non ricoperti di lambda.
indicato con h1 il numero di riche nel ricoprimento e con h2 il numero di colonne nel ricoprimento,
: h1 + h2 = delta
si ha
sum_i(d3_i) = - lambda * (#righe notin ricoprimento) = - lambda (n - h1)
sum_j(d3_j) = lambda * (#colonne ricoprimento) = lambda (h2)
---
'''
if __name__ == '__main__':
mtx = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
ungherese(mtx)
| 26.027027 | 102 | 0.7108 | '''
d_ij = matrice dei costi
per ogni colonna trovo il minimo della colonna
a ogni elemento della colonna sottraggo il minimo (d0)
si ottiene una nuova matrice con pesi >= 0
---
stessa operazione con le righe (d1)
---
ottengo matrice di coefficienti d2
assegnamento rappresentato da matrice X
dove x_ij = 1 se i assegnato a j, 0 else
Nota: sum(x[i][j] for j in V) == 1 # 1 solo nodo assegnato
stesso vale per j
Nota: costo assegnamento = sum(d_ij * x_ij)
= sum(d2 * x_ij) + sum(d1) + sum(d0)
quindi D1 = sum(d1) + D0 = sum(d0) <= sum(d_ij * x_ij) rappresenta
un lower bound del costo
se si trova un matching di costo D1 + D0 allora è ottimo
---
trovare sottinsieme di 0 in d2 di cardinalità max (1 solo x riga e conna)
se insieme ammette soluzione di cardinalità
--- per trovare delta:
dati due insiemi di vertici A e B traccio un arco sse d2_ij = 0, il problema diventa quindi di trovare
un matching di cardinalità massima (delta)
se delta = n
1h02 dimostraz: siccome gli elementi devono essere indipendenti per
il principio della piccionaia e delta ha cardinalità n, sum_i(x_ij) == 1e sum_j(x_ij) == 1
se delta < n
in caso non si riesca a trovare
va risolto un sottoproblema
---
determinare il valore minimo lambda tra gli elementi T2 non coperti da nessuna linea.
(gli elementi non ricoperti sono tutti strettamente positivi siccome tutti gli zeri sono coperti)
* incremento tutti gli elementi ricoperti da due linee di lambda.
* decremento tutti gli elementi non ricoperti di lambda.
indicato con h1 il numero di riche nel ricoprimento e con h2 il numero di colonne nel ricoprimento,
: h1 + h2 = delta
si ha
sum_i(d3_i) = - lambda * (#righe notin ricoprimento) = - lambda (n - h1)
sum_j(d3_j) = lambda * (#colonne ricoprimento) = lambda (h2)
---
'''
def ungherese(d):
pass
if __name__ == '__main__':
mtx = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
ungherese(mtx)
| 5 | 0 | 22 |
5866eef41c41e230d1e5d244e9515821ab57b038 | 12,579 | py | Python | okama/plots.py | servnk/okama | 50232226437496aae67f98906c5c0634ee285cc7 | [
"MIT"
] | null | null | null | okama/plots.py | servnk/okama | 50232226437496aae67f98906c5c0634ee285cc7 | [
"MIT"
] | null | null | null | okama/plots.py | servnk/okama | 50232226437496aae67f98906c5c0634ee285cc7 | [
"MIT"
] | null | null | null | import itertools
from typing import List, Optional, Union
from matplotlib import pyplot as plt
from .asset_list import AssetList
from .common.helpers import Float
from .frontier.single_period import EfficientFrontier
from .settings import default_ticker
class Plots(AssetList):
"""
Plotting tools collection to use with financial charts (Efficient Frontier, Assets and Transition map etc.)
Parameters
----------
assets : list, default None
List of assets. Could include tickers or asset like objects (Asset, Portfolio).
If None a single asset list with a default ticker is used.
first_date : str, default None
First date of monthly return time series.
If None the first date is calculated automatically as the oldest available date for the listed assets.
last_date : str, default None
Last date of monthly return time series.
If None the last date is calculated automatically as the newest available date for the listed assets.
ccy : str, default 'USD'
Base currency for the list of assets. All risk metrics and returns are adjusted to the base currency.
inflation : bool, default True
Defines whether to take inflation data into account in the calculations.
Including inflation could limit available data (last_date, first_date)
as the inflation data is usually published with a one-month delay.
With inflation = False some properties like real return are not available.
"""
def plot_assets(
self,
kind: str = "mean",
tickers: Union[str, list] = "tickers",
pct_values: bool = False,
) -> plt.axes:
"""
Plot the assets points on the risk-return chart with annotations.
Annualized values for risk and return are used.
Risk is a standard deviation of monthly rate of return time series.
Return can be an annualized mean return (expected return) or CAGR (Compound annual growth rate).
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
kind : {'mean', 'cagr'}, default 'mean'
Type of Return: annualized mean return (expected return) or CAGR (Compound annual growth rate).
tickers : {'tickers', 'names'} or list of str, default 'tickers'
Annotation type for assets.
'tickers' - assets symbols are shown in form of 'SPY.US'
'names' - assets names are used like - 'SPDR S&P 500 ETF Trust'
To show custom annotations for each asset pass the list of names.
pct_values : bool, default False
Risk and return values in the axes:
Algebraic annotation (False)
Percents (True)
Examples
--------
>>> import matplotlib.pyplot as plt
>>> x = ok.Plots(['SPY.US', 'AGG.US'], ccy='USD', inflation=False)
>>> x.plot_assets()
>>> plt.show()
Plotting with default parameters values shows expected return, ticker annotations and algebraic values
for risk and return.
To use CAGR instead of expected return use kind='cagr'.
>>> x.plot_assets(kind='cagr',
... tickers=['US Stocks', 'US Bonds'], # use custom annotations for the assets
... pct_values=True # risk and return values are in percents
... )
>>> plt.show()
"""
if kind == "mean":
risks = self.risk_annual
returns = Float.annualize_return(self.assets_ror.mean())
elif kind == "cagr":
risks = self.risk_annual
returns = self.get_cagr().loc[self.symbols]
else:
raise ValueError('kind should be "mean" or "cagr".')
# set lists for single point scatter
if len(self.symbols) < 2:
risks = [risks]
returns = [returns]
# set the plot
self._verify_axes()
plt.autoscale(enable=True, axis="year", tight=False)
m = 100 if pct_values else 1
self.ax.scatter(risks * m, returns * m)
# Set the labels
if tickers == "tickers":
asset_labels = self.symbols
elif tickers == "names":
asset_labels = list(self.names.values())
else:
if not isinstance(tickers, list):
raise ValueError(
f"tickers parameter should be a list of string labels."
)
if len(tickers) != len(self.symbols):
raise ValueError("labels and tickers must be of the same length")
asset_labels = tickers
# draw the points and print the labels
for label, x, y in zip(asset_labels, risks, returns):
self.ax.annotate(
label, # this is the text
(x * m, y * m), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0, 10), # distance from text to points (x,y)
ha="center", # horizontal alignment can be left, right or center
)
return self.ax
def plot_transition_map(
self, bounds=None, full_frontier=False, cagr=True
) -> plt.axes:
"""
Plot Transition Map for optimized portfolios on the single period Efficient Frontier.
Transition Map shows the relation between asset weights and optimized portfolios properties:
- CAGR (Compound annual growth rate)
- Risk (annualized standard deviation of return)
Wights are displayed on the y-axis.
CAGR or Risk - on the x-axis.
Constrained optimization with weights bounds is available.
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
bounds: tuple of ((float, float),...)
Bounds for the assets weights. Each asset can have weights limitation from 0 to 1.0.
If an asset has limitation for 10 to 20%, bounds are defined as (0.1, 0.2).
bounds = ((0, .5), (0, 1)) shows that in Portfolio with two assets first one has weight limitations
from 0 to 50%. The second asset has no limitations.
full_frontier : bool, default False
Defines whether to show the Transition Map for portfolios on the full Efficient Frontier or
only on its upper part.
If 'False' only portfolios with the return above Global Minimum Volatility (GMV) point are shown.
cagr : bool, default True
Show the relation between weights and CAGR (if True) or between weights and Risk (if False).
of - sets X axe to CAGR (if true) or to risk (if false).
CAGR or Risk are displayed on the x-axis.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> x = ok.Plots(['SPY.US', 'AGG.US', 'GLD.US'], ccy='USD', inflation=False)
>>> x.plot_transition_map()
>>> plt.show()
Transition Map with default setting show the relation between Return (CAGR) and assets weights for optimized portfolios.
The same relation for Risk can be shown setting cagr=False.
>>> x.plot_transition_map(cagr=False,
... full_frontier=True, # to see the relation for the full Efficient Frontier
... )
>>> plt.show()
"""
ef = EfficientFrontier(
assets=self.symbols,
first_date=self.first_date,
last_date=self.last_date,
ccy=self.currency,
inflation=self._bool_inflation,
bounds=bounds,
full_frontier=full_frontier,
n_points=20,
).ef_points
self._verify_axes()
linestyle = itertools.cycle(("-", "--", ":", "-."))
x_axe = "CAGR" if cagr else "Risk"
fig = plt.figure(figsize=(12, 6))
for i in ef:
if i not in (
"Risk",
"Mean return",
"CAGR",
): # select only columns with tickers
self.ax.plot(
ef[x_axe], ef.loc[:, i], linestyle=next(linestyle), label=i
)
self.ax.set_xlim(ef[x_axe].min(), ef[x_axe].max())
if cagr:
self.ax.set_xlabel("CAGR (Compound Annual Growth Rate)")
else:
self.ax.set_xlabel("Risk (volatility)")
self.ax.set_ylabel("Weights of assets")
self.ax.legend(loc="upper left", frameon=False)
fig.tight_layout()
return self.ax
def plot_pair_ef(self, tickers="tickers", bounds=None) -> plt.axes:
"""
Plot Efficient Frontier of every pair of assets.
Efficient Frontier is a set of portfolios which satisfy the condition that no other portfolio exists
with a higher expected return but with the same risk (standard deviation of return).
Arithmetic mean (expected return) is used for optimized portfolios.
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
tickers : {'tickers', 'names'} or list of str, default 'tickers'
Annotation type for assets.
'tickers' - assets symbols are shown in form of 'SPY.US'
'names' - assets names are used like - 'SPDR S&P 500 ETF Trust'
To show custom annotations for each asset pass the list of names.
bounds: tuple of ((float, float),...)
Bounds for the assets weights. Each asset can have weights limitation from 0 to 1.0.
If an asset has limitation for 10 to 20%, bounds are defined as (0.1, 0.2).
bounds = ((0, .5), (0, 1)) shows that in Portfolio with two assets first one has weight limitations
from 0 to 50%. The second asset has no limitations.
Notes
-----
It should be at least 3 assets.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> ls4 = ['SPY.US', 'BND.US', 'GLD.US', 'VNQ.US']
>>> curr = 'USD'
>>> last_date = '07-2021'
>>> ok.Plots(ls4, ccy=curr, last_date=last_date).plot_pair_ef()
>>> plt.show()
It can be useful to plot the full Efficent Frontier (EF) with optimized 4 assets portfolios
together with the EFs for each pair of assets.
>>> ef4 = ok.EfficientFrontier(assets=ls4, ccy=curr, n_points=100)
>>> df4 = ef4.ef_points
>>> fig = plt.figure()
>>> # Plot Efficient Frontier of every pair of assets. Optimized portfolios will have 2 assets.
>>> ok.Plots(ls4, ccy=curr, last_date=last_date).plot_pair_ef() # mean return is used for optimized portfolios.
>>> ax = plt.gca()
>>> # Plot the full Efficient Frontier for 4 asset portfolios.
>>> ax.plot(df4['Risk'], df4['Mean return'], color = 'black', linestyle='--')
>>> plt.show()
"""
if len(self.symbols) < 3:
raise ValueError("The number of symbols cannot be less than 3")
self._verify_axes()
for i in itertools.combinations(self.symbols, 2):
sym_pair = list(i)
index0 = self.symbols.index(sym_pair[0])
index1 = self.symbols.index(sym_pair[1])
if bounds:
bounds_pair = (bounds[index0], bounds[index1])
else:
bounds_pair = None
ef = EfficientFrontier(
assets=sym_pair,
ccy=self.currency,
first_date=self.first_date,
last_date=self.last_date,
inflation=self._bool_inflation,
full_frontier=True,
bounds=bounds_pair,
).ef_points
self.ax.plot(ef["Risk"], ef["Mean return"])
self.plot_assets(kind="mean", tickers=tickers)
return self.ax
| 39.806962 | 128 | 0.584546 | import itertools
from typing import List, Optional, Union
from matplotlib import pyplot as plt
from .asset_list import AssetList
from .common.helpers import Float
from .frontier.single_period import EfficientFrontier
from .settings import default_ticker
class Plots(AssetList):
"""
Plotting tools collection to use with financial charts (Efficient Frontier, Assets and Transition map etc.)
Parameters
----------
assets : list, default None
List of assets. Could include tickers or asset like objects (Asset, Portfolio).
If None a single asset list with a default ticker is used.
first_date : str, default None
First date of monthly return time series.
If None the first date is calculated automatically as the oldest available date for the listed assets.
last_date : str, default None
Last date of monthly return time series.
If None the last date is calculated automatically as the newest available date for the listed assets.
ccy : str, default 'USD'
Base currency for the list of assets. All risk metrics and returns are adjusted to the base currency.
inflation : bool, default True
Defines whether to take inflation data into account in the calculations.
Including inflation could limit available data (last_date, first_date)
as the inflation data is usually published with a one-month delay.
With inflation = False some properties like real return are not available.
"""
def __init__(
self,
assets: List[str] = [default_ticker],
first_date: Optional[str] = None,
last_date: Optional[str] = None,
ccy: str = "USD",
inflation: bool = True,
):
super().__init__(
assets,
first_date=first_date,
last_date=last_date,
ccy=ccy,
inflation=inflation,
)
self.ax = None
self._bool_inflation = inflation
def _verify_axes(self):
if self.ax:
del self.ax
self.ax = plt.gca()
def plot_assets(
self,
kind: str = "mean",
tickers: Union[str, list] = "tickers",
pct_values: bool = False,
) -> plt.axes:
"""
Plot the assets points on the risk-return chart with annotations.
Annualized values for risk and return are used.
Risk is a standard deviation of monthly rate of return time series.
Return can be an annualized mean return (expected return) or CAGR (Compound annual growth rate).
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
kind : {'mean', 'cagr'}, default 'mean'
Type of Return: annualized mean return (expected return) or CAGR (Compound annual growth rate).
tickers : {'tickers', 'names'} or list of str, default 'tickers'
Annotation type for assets.
'tickers' - assets symbols are shown in form of 'SPY.US'
'names' - assets names are used like - 'SPDR S&P 500 ETF Trust'
To show custom annotations for each asset pass the list of names.
pct_values : bool, default False
Risk and return values in the axes:
Algebraic annotation (False)
Percents (True)
Examples
--------
>>> import matplotlib.pyplot as plt
>>> x = ok.Plots(['SPY.US', 'AGG.US'], ccy='USD', inflation=False)
>>> x.plot_assets()
>>> plt.show()
Plotting with default parameters values shows expected return, ticker annotations and algebraic values
for risk and return.
To use CAGR instead of expected return use kind='cagr'.
>>> x.plot_assets(kind='cagr',
... tickers=['US Stocks', 'US Bonds'], # use custom annotations for the assets
... pct_values=True # risk and return values are in percents
... )
>>> plt.show()
"""
if kind == "mean":
risks = self.risk_annual
returns = Float.annualize_return(self.assets_ror.mean())
elif kind == "cagr":
risks = self.risk_annual
returns = self.get_cagr().loc[self.symbols]
else:
raise ValueError('kind should be "mean" or "cagr".')
# set lists for single point scatter
if len(self.symbols) < 2:
risks = [risks]
returns = [returns]
# set the plot
self._verify_axes()
plt.autoscale(enable=True, axis="year", tight=False)
m = 100 if pct_values else 1
self.ax.scatter(risks * m, returns * m)
# Set the labels
if tickers == "tickers":
asset_labels = self.symbols
elif tickers == "names":
asset_labels = list(self.names.values())
else:
if not isinstance(tickers, list):
raise ValueError(
f"tickers parameter should be a list of string labels."
)
if len(tickers) != len(self.symbols):
raise ValueError("labels and tickers must be of the same length")
asset_labels = tickers
# draw the points and print the labels
for label, x, y in zip(asset_labels, risks, returns):
self.ax.annotate(
label, # this is the text
(x * m, y * m), # this is the point to label
textcoords="offset points", # how to position the text
xytext=(0, 10), # distance from text to points (x,y)
ha="center", # horizontal alignment can be left, right or center
)
return self.ax
def plot_transition_map(
self, bounds=None, full_frontier=False, cagr=True
) -> plt.axes:
"""
Plot Transition Map for optimized portfolios on the single period Efficient Frontier.
Transition Map shows the relation between asset weights and optimized portfolios properties:
- CAGR (Compound annual growth rate)
- Risk (annualized standard deviation of return)
Wights are displayed on the y-axis.
CAGR or Risk - on the x-axis.
Constrained optimization with weights bounds is available.
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
bounds: tuple of ((float, float),...)
Bounds for the assets weights. Each asset can have weights limitation from 0 to 1.0.
If an asset has limitation for 10 to 20%, bounds are defined as (0.1, 0.2).
bounds = ((0, .5), (0, 1)) shows that in Portfolio with two assets first one has weight limitations
from 0 to 50%. The second asset has no limitations.
full_frontier : bool, default False
Defines whether to show the Transition Map for portfolios on the full Efficient Frontier or
only on its upper part.
If 'False' only portfolios with the return above Global Minimum Volatility (GMV) point are shown.
cagr : bool, default True
Show the relation between weights and CAGR (if True) or between weights and Risk (if False).
of - sets X axe to CAGR (if true) or to risk (if false).
CAGR or Risk are displayed on the x-axis.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> x = ok.Plots(['SPY.US', 'AGG.US', 'GLD.US'], ccy='USD', inflation=False)
>>> x.plot_transition_map()
>>> plt.show()
Transition Map with default setting show the relation between Return (CAGR) and assets weights for optimized portfolios.
The same relation for Risk can be shown setting cagr=False.
>>> x.plot_transition_map(cagr=False,
... full_frontier=True, # to see the relation for the full Efficient Frontier
... )
>>> plt.show()
"""
ef = EfficientFrontier(
assets=self.symbols,
first_date=self.first_date,
last_date=self.last_date,
ccy=self.currency,
inflation=self._bool_inflation,
bounds=bounds,
full_frontier=full_frontier,
n_points=20,
).ef_points
self._verify_axes()
linestyle = itertools.cycle(("-", "--", ":", "-."))
x_axe = "CAGR" if cagr else "Risk"
fig = plt.figure(figsize=(12, 6))
for i in ef:
if i not in (
"Risk",
"Mean return",
"CAGR",
): # select only columns with tickers
self.ax.plot(
ef[x_axe], ef.loc[:, i], linestyle=next(linestyle), label=i
)
self.ax.set_xlim(ef[x_axe].min(), ef[x_axe].max())
if cagr:
self.ax.set_xlabel("CAGR (Compound Annual Growth Rate)")
else:
self.ax.set_xlabel("Risk (volatility)")
self.ax.set_ylabel("Weights of assets")
self.ax.legend(loc="upper left", frameon=False)
fig.tight_layout()
return self.ax
def plot_pair_ef(self, tickers="tickers", bounds=None) -> plt.axes:
"""
Plot Efficient Frontier of every pair of assets.
Efficient Frontier is a set of portfolios which satisfy the condition that no other portfolio exists
with a higher expected return but with the same risk (standard deviation of return).
Arithmetic mean (expected return) is used for optimized portfolios.
Returns
-------
Axes : 'matplotlib.axes._subplots.AxesSubplot'
Parameters
----------
tickers : {'tickers', 'names'} or list of str, default 'tickers'
Annotation type for assets.
'tickers' - assets symbols are shown in form of 'SPY.US'
'names' - assets names are used like - 'SPDR S&P 500 ETF Trust'
To show custom annotations for each asset pass the list of names.
bounds: tuple of ((float, float),...)
Bounds for the assets weights. Each asset can have weights limitation from 0 to 1.0.
If an asset has limitation for 10 to 20%, bounds are defined as (0.1, 0.2).
bounds = ((0, .5), (0, 1)) shows that in Portfolio with two assets first one has weight limitations
from 0 to 50%. The second asset has no limitations.
Notes
-----
It should be at least 3 assets.
Examples
--------
>>> import matplotlib.pyplot as plt
>>> ls4 = ['SPY.US', 'BND.US', 'GLD.US', 'VNQ.US']
>>> curr = 'USD'
>>> last_date = '07-2021'
>>> ok.Plots(ls4, ccy=curr, last_date=last_date).plot_pair_ef()
>>> plt.show()
It can be useful to plot the full Efficent Frontier (EF) with optimized 4 assets portfolios
together with the EFs for each pair of assets.
>>> ef4 = ok.EfficientFrontier(assets=ls4, ccy=curr, n_points=100)
>>> df4 = ef4.ef_points
>>> fig = plt.figure()
>>> # Plot Efficient Frontier of every pair of assets. Optimized portfolios will have 2 assets.
>>> ok.Plots(ls4, ccy=curr, last_date=last_date).plot_pair_ef() # mean return is used for optimized portfolios.
>>> ax = plt.gca()
>>> # Plot the full Efficient Frontier for 4 asset portfolios.
>>> ax.plot(df4['Risk'], df4['Mean return'], color = 'black', linestyle='--')
>>> plt.show()
"""
if len(self.symbols) < 3:
raise ValueError("The number of symbols cannot be less than 3")
self._verify_axes()
for i in itertools.combinations(self.symbols, 2):
sym_pair = list(i)
index0 = self.symbols.index(sym_pair[0])
index1 = self.symbols.index(sym_pair[1])
if bounds:
bounds_pair = (bounds[index0], bounds[index1])
else:
bounds_pair = None
ef = EfficientFrontier(
assets=sym_pair,
ccy=self.currency,
first_date=self.first_date,
last_date=self.last_date,
inflation=self._bool_inflation,
full_frontier=True,
bounds=bounds_pair,
).ef_points
self.ax.plot(ef["Risk"], ef["Mean return"])
self.plot_assets(kind="mean", tickers=tickers)
return self.ax
| 516 | 0 | 54 |
d1a9bd1097cbb623ba8b10a3de4121e881e15bcd | 1,357 | py | Python | code/alert_properties.py | rmorgan10/AlertMonitoring | 40965ea36fb001ac7d0fc0248478768cd9a89bec | [
"BSD-3-Clause"
] | 1 | 2020-10-18T18:08:48.000Z | 2020-10-18T18:08:48.000Z | code/alert_properties.py | rmorgan10/AlertMonitoring | 40965ea36fb001ac7d0fc0248478768cd9a89bec | [
"BSD-3-Clause"
] | 4 | 2020-10-19T15:51:27.000Z | 2021-01-22T22:01:26.000Z | code/alert_properties.py | rmorgan10/AlertMonitoring | 40965ea36fb001ac7d0fc0248478768cd9a89bec | [
"BSD-3-Clause"
] | null | null | null | # A class to collect alert properties
import glob
import os
import pandas as pd
| 30.840909 | 83 | 0.557848 | # A class to collect alert properties
import glob
import os
import pandas as pd
class Alert:
def __init__(self, row):
self.ra = float(row['RA [deg]'])
self.dec = float(row['Dec [deg]'])
self.err90 = float(row['Error90 [arcmin]'])
self.err50 = float(row['Error50 [arcmin]'])
self.run_num = str(row['RunNum_EventNum'].split('_')[0])
self.event_num = str(row['RunNum_EventNum'].split('_')[1])
self.notice_type = str(row['NoticeType'])
self.energy = float(row['Energy'])
self.signalness = float(row['Signalness'])
self.far = float(row['FAR [#/yr]'])
self.revision = int(row['Rev'])
self.time_UT = pd.to_datetime(str(row['Date']) + 'T' + str(row['Time UT']),
format="%y/%m/%dT%H:%M:%S.%f")
self._name = "IC" + self.time_UT.strftime("%y%m%d")
self.name = self._name + self._find_suffix()
return
def _find_suffix(self):
os.chdir('..')
today_alerts = glob.glob(self._name + '*_' + str(self.revision))
os.chdir('code')
letters = set([x[8] for x in today_alerts])
return chr(65 + len(letters))
def make_alert_list(amon_df):
alerts = []
for index, row in amon_df.iterrows():
alerts.append(Alert(row))
return alerts
| 1,174 | -9 | 107 |
04edfc128cc7fef52c0869ec8e34b5915ce793c0 | 1,180 | py | Python | examples/M4_competition/download_data_M4.py | LeoTafti/darts | 210605fafb730de564e3d723ab3919ed94da42b9 | [
"Apache-2.0"
] | 3,756 | 2020-06-22T13:33:41.000Z | 2022-03-31T23:28:40.000Z | examples/M4_competition/download_data_M4.py | LeoTafti/darts | 210605fafb730de564e3d723ab3919ed94da42b9 | [
"Apache-2.0"
] | 578 | 2020-06-17T17:04:14.000Z | 2022-03-31T11:42:37.000Z | examples/M4_competition/download_data_M4.py | LeoTafti/darts | 210605fafb730de564e3d723ab3919ed94da42b9 | [
"Apache-2.0"
] | 384 | 2020-06-22T13:33:54.000Z | 2022-03-31T22:55:35.000Z | """Downloading data from the M4 competition
"""
import os
import requests
if __name__ == "__main__":
data_frequencies = ['Yearly', 'Quarterly', 'Monthly', 'Weekly', 'Daily', 'Hourly']
datapath = "./dataset/"
url = "https://github.com/Mcompetitions/M4-methods/raw/master/Dataset/{}.csv"
download(datapath, url, 'M4-info')
for freq in data_frequencies:
for split in ['train', 'test']:
download(datapath+split, url, '{}-{}'.format(freq, split), split.capitalize())
| 27.44186 | 90 | 0.602542 | """Downloading data from the M4 competition
"""
import os
import requests
def download(datapath, url, name, split=None):
os.makedirs(datapath, exist_ok=True)
if split is not None:
namesplit = split + "/" + name
else:
namesplit = name
url = url.format(namesplit)
file_path = os.path.join(datapath, name) + ".csv"
if os.path.exists(file_path):
print(name+" already exists")
return
print('Downloading ' + url)
r = requests.get(url, stream=True)
with open(file_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=16 * 1024 ** 2):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
return
if __name__ == "__main__":
data_frequencies = ['Yearly', 'Quarterly', 'Monthly', 'Weekly', 'Daily', 'Hourly']
datapath = "./dataset/"
url = "https://github.com/Mcompetitions/M4-methods/raw/master/Dataset/{}.csv"
download(datapath, url, 'M4-info')
for freq in data_frequencies:
for split in ['train', 'test']:
download(datapath+split, url, '{}-{}'.format(freq, split), split.capitalize())
| 646 | 0 | 23 |
b7a0960b05d13c74be641bedd1ea16d745df8dc9 | 1,738 | py | Python | GoldenGrinAnonymous.py | ohmyitsdan/GoldenGrinAnonymous | dc7f65af12e49e3de4869d60ee9221d109ae1bc7 | [
"MIT"
] | null | null | null | GoldenGrinAnonymous.py | ohmyitsdan/GoldenGrinAnonymous | dc7f65af12e49e3de4869d60ee9221d109ae1bc7 | [
"MIT"
] | null | null | null | GoldenGrinAnonymous.py | ohmyitsdan/GoldenGrinAnonymous | dc7f65af12e49e3de4869d60ee9221d109ae1bc7 | [
"MIT"
] | null | null | null | import time
import math
import keyboard
# Betting Breakdown
initFee = 1500000
prefCard = 500000
infamous = 3000000
safe1 = 1000000
safe2 = 3300000
safe3 = 6500000
target = 1000000000
ct = 1
print(f'\nWelcome to your grind-less \'Golden Grin Anonymous\' Achievement!\n')
currentSpend = int(input('What is your current spend amount? $').replace(',',''))
leftToSpend = '{:,}'.format(target - int(currentSpend))
print(f'OK, You have ${leftToSpend} left to spend.')
safedCards = int(input('\nHow many cards do you have safed? '))
isitInfamous = input('Is It Infamous? (y/n) ').upper()
if safedCards == 0:
spin = initFee
elif safedCards == 1:
spin = initFee + prefCard + safe1
elif safedCards == 2:
spin = initFee + prefCard + safe1 + safe2
else:
spin = initFee + prefCard + safe1 + safe2 + safe3
if isitInfamous == 'Y':
spin += infamous
numSpins = math.ceil(currentSpend / spin)
print(f'\nAt your current settings you would need {numSpins} card turns to reach the target.')
start = input('Are you ready to start? (y/n) ').upper()
if start != 'Y':
print('\nThanks for playing.')
else:
print('\nLoad the game, enter your settings into Offshore Payday')
print('Press any key to start...')
keyboard.read_key()
print('HOLD \'Q\' to quit.')
spinThatWheel()
| 25.940299 | 94 | 0.629459 | import time
import math
import keyboard
# Betting Breakdown
initFee = 1500000
prefCard = 500000
infamous = 3000000
safe1 = 1000000
safe2 = 3300000
safe3 = 6500000
target = 1000000000
ct = 1
print(f'\nWelcome to your grind-less \'Golden Grin Anonymous\' Achievement!\n')
currentSpend = int(input('What is your current spend amount? $').replace(',',''))
leftToSpend = '{:,}'.format(target - int(currentSpend))
print(f'OK, You have ${leftToSpend} left to spend.')
safedCards = int(input('\nHow many cards do you have safed? '))
isitInfamous = input('Is It Infamous? (y/n) ').upper()
if safedCards == 0:
spin = initFee
elif safedCards == 1:
spin = initFee + prefCard + safe1
elif safedCards == 2:
spin = initFee + prefCard + safe1 + safe2
else:
spin = initFee + prefCard + safe1 + safe2 + safe3
if isitInfamous == 'Y':
spin += infamous
numSpins = math.ceil(currentSpend / spin)
def spinThatWheel():
global ct
while keyboard.is_pressed('q') == False:
while ct <= numSpins:
print(f'Spin {ct}/{numSpins}.')
keyboard.press('space')
time.sleep(0.5)
keyboard.release('space')
time.sleep(7)
keyboard.press('enter')
time.sleep(0.5)
keyboard.release('enter')
time.sleep(2)
ct += 1
print(f'\nAt your current settings you would need {numSpins} card turns to reach the target.')
start = input('Are you ready to start? (y/n) ').upper()
if start != 'Y':
print('\nThanks for playing.')
else:
print('\nLoad the game, enter your settings into Offshore Payday')
print('Press any key to start...')
keyboard.read_key()
print('HOLD \'Q\' to quit.')
spinThatWheel()
| 408 | 0 | 23 |
f8570fcb4bce71708acbc200208e26e570367cbe | 375 | py | Python | statify/renderers.py | aufdenpunkt/django-statify | c99b527683d326ecd70dea714cd1522fbc5608c4 | [
"BSD-3-Clause"
] | null | null | null | statify/renderers.py | aufdenpunkt/django-statify | c99b527683d326ecd70dea714cd1522fbc5608c4 | [
"BSD-3-Clause"
] | null | null | null | statify/renderers.py | aufdenpunkt/django-statify | c99b527683d326ecd70dea714cd1522fbc5608c4 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# 3rd party imports
from django_medusa.renderers import DiskStaticSiteRenderer
# Project imports
from statify.models import URL
| 20.833333 | 58 | 0.669333 | # -*- coding: utf-8 -*-
#
# 3rd party imports
from django_medusa.renderers import DiskStaticSiteRenderer
# Project imports
from statify.models import URL
class UrlsRenderer(DiskStaticSiteRenderer):
def get_paths(self):
paths = []
urls = URL.objects.filter(is_valid=True)
for url in urls:
paths.append(url.url)
return paths
| 147 | 22 | 49 |
7bddec9da1fac479748f377b304673d3cb459745 | 949 | py | Python | condense.py | innovate-invent/python-musings | 0b58a3aea54adfcb55ef3dbd4652828efcb25c8e | [
"MIT"
] | null | null | null | condense.py | innovate-invent/python-musings | 0b58a3aea54adfcb55ef3dbd4652828efcb25c8e | [
"MIT"
] | null | null | null | condense.py | innovate-invent/python-musings | 0b58a3aea54adfcb55ef3dbd4652828efcb25c8e | [
"MIT"
] | null | null | null | import pysam
from CigarIterator import CigarIterator, appendOrInc
if __name__ == "__main__":
pass #TODO | 28.757576 | 77 | 0.600632 | import pysam
from CigarIterator import CigarIterator, appendOrInc
def condense(record: pysam.AlignedSegment):
if record.reference_length == 0:
# No work needs to be done
return
seq = ""
qual = []
ops = []
itr = CigarIterator(record)
clipped = itr.skipClipped()
if clipped:
ops.append((pysam.CHARD_CLIP, clipped))
lastPos = itr.opPos
while itr.skipToNonRef(): # TODO: check for MD tag and add if not present
if itr.inSeq:
seq += itr.seqBase
qual.append(itr.baseQual)
dist = itr.opPos - lastPos
if dist > 0:
appendOrInc(ops, [pysam.CREF_SKIP, dist])
if itr.op == pysam.CMATCH:
appendOrInc(ops, [pysam.CDIFF, 1])
else:
appendOrInc(ops, [itr.op, 1])
record.query_sequence = seq
record.query_qualities = qual
record.cigartuples = ops
if __name__ == "__main__":
pass #TODO | 818 | 0 | 23 |
32aa4df811b3d6088038a5059a9de369672b42c4 | 6,894 | py | Python | waveglow/models/waveglow_teacher.py | yandex-research/distill-nf | b804289b88e9bf1736f29b7e8b0b25614b08a924 | [
"Apache-2.0"
] | 7 | 2021-06-23T08:21:50.000Z | 2021-11-18T17:32:58.000Z | waveglow/models/waveglow_teacher.py | yandex-research/distill-nf | b804289b88e9bf1736f29b7e8b0b25614b08a924 | [
"Apache-2.0"
] | 1 | 2021-12-09T05:19:40.000Z | 2021-12-10T06:23:25.000Z | waveglow/models/waveglow_teacher.py | yandex-research/distill-nf | b804289b88e9bf1736f29b7e8b0b25614b08a924 | [
"Apache-2.0"
] | null | null | null | """
WaveGlow teacher wrapper
"""
from functools import lru_cache
import numpy as np
import torch
from models.waveglow import WaveGlow
class WaveGlowTeacher(WaveGlow):
""" A WaveGlow model optimized for use as a teacher in distillation """
@classmethod
def sample_inputs_for(self, spect, sigma=1.0):
""" upsample and generate noise: the non-distilled part of waveglow """
assert self.n_early_every > 0 and self.n_flows > 0
spect = self.upsample(spect)
# trim conv artifacts. maybe pad spec to kernel multiple
if not self.upsample_multistage:
time_cutoff = self.upsample.kernel_size[0] - self.upsample.stride[0]
spect = spect[:, :, :-time_cutoff]
spect = spect.unfold(2, self.n_group, self.n_group).permute(0, 2, 1, 3)
spect = spect.contiguous().view(spect.size(0), spect.size(1), -1).permute(0, 2, 1)
num_noise_vectors = max(1, (self.n_flows - 1) // self.n_early_every)
noise_audio = sigma * torch.randn(spect.shape[0], self.n_remaining_channels, spect.shape[2],
device=spect.device, dtype=spect.dtype)
noise_vectors = [sigma * torch.randn(spect.shape[0], self.n_early_size, spect.shape[2],
device=spect.device, dtype=spect.dtype)
for _ in range(num_noise_vectors)]
return (spect, noise_audio, *noise_vectors)
def forward(self, spect, noise_audio, *noise_vectors):
""" A deterministic version of waveglow.infer; use compute_inputs_for(mel_spect) for inputs """
audio = noise_audio
noise_index = 0
for k in reversed(range(self.n_flows)):
n_half = audio.size(1) // 2
audio_0 = audio[:, :n_half, :]
audio_1 = audio[:, n_half:, :]
wn_input = (audio_0, spect)
output = self.WN[k](wn_input)
s = output[:, n_half:, :]
b = output[:, :n_half, :]
audio_1 = (audio_1 - b) / torch.exp(s)
audio = torch.cat([audio_0, audio_1], 1)
audio = self.convinv[k](audio, reverse=True)
if k % self.n_early_every == 0 and k > 0:
z = noise_vectors[noise_index]
noise_index += 1
audio = torch.cat((z, audio), 1)
assert noise_index == len(noise_vectors), f"Used {noise_index} noise vectors, but got {len(noise_vectors)}"
audio = audio.permute(0, 2, 1).contiguous().view(audio.size(0), -1)
return audio
class DeterministicWaveGlowTeacher(WaveGlowTeacher):
""" WaveGlowTeacher that predicts deterministically based on seed """
@lru_cache(maxsize=None)
def sample_inputs_for(self, spect, **kwargs):
""" upsample and generate noise: the non-distilled part of waveglow """
assert self.n_early_every > 0 and self.n_flows > 0
spect = self.upsample(spect)
# trim conv artifacts. maybe pad spec to kernel multiple
if not self.upsample_multistage:
time_cutoff = self.upsample.kernel_size[0] - self.upsample.stride[0]
spect = spect[:, :, :-time_cutoff]
spect = spect.unfold(2, self.n_group, self.n_group).permute(0, 2, 1, 3)
spect = spect.contiguous().view(spect.size(0), spect.size(1), -1).permute(0, 2, 1)
num_noise_vectors = max(1, (self.n_flows - 1) // self.n_early_every)
noise_num_channels = (self.n_remaining_channels,) + (self.n_early_size,) * num_noise_vectors
noise_audio, *noise_vectors = self.generate_noise_inputs(
spect.shape[0], noise_num_channels, spect.shape[2], device=spect.device, dtype=spect.dtype, **kwargs)
return (spect, noise_audio, *noise_vectors) | 43.0875 | 145 | 0.589788 | """
WaveGlow teacher wrapper
"""
from functools import lru_cache
import numpy as np
import torch
from models.waveglow import WaveGlow
class WaveGlowTeacher(WaveGlow):
""" A WaveGlow model optimized for use as a teacher in distillation """
@classmethod
def load(cls, path, fp16=True, train=False, device='cuda'):
ckpt = torch.load(path)
del ckpt['config']['n_speakers']
del ckpt['config']['speaker_embedding_dim']
waveglow = cls(**ckpt['config'])
waveglow.remove_weightnorm()
waveglow.load_state_dict(ckpt['state_dict'])
if fp16:
print("Cast WaveGlow to fp16")
for convinv in waveglow.convinv:
# precompute W_inverse
# it should be used by WG automatically
with torch.no_grad():
W = convinv.conv.weight.data.squeeze()
W_inverse = W.inverse()
convinv.register_buffer("W_inverse", W_inverse[..., None])
waveglow = waveglow.half()
return waveglow.to(device).train(train)
def sample_inputs_for(self, spect, sigma=1.0):
""" upsample and generate noise: the non-distilled part of waveglow """
assert self.n_early_every > 0 and self.n_flows > 0
spect = self.upsample(spect)
# trim conv artifacts. maybe pad spec to kernel multiple
if not self.upsample_multistage:
time_cutoff = self.upsample.kernel_size[0] - self.upsample.stride[0]
spect = spect[:, :, :-time_cutoff]
spect = spect.unfold(2, self.n_group, self.n_group).permute(0, 2, 1, 3)
spect = spect.contiguous().view(spect.size(0), spect.size(1), -1).permute(0, 2, 1)
num_noise_vectors = max(1, (self.n_flows - 1) // self.n_early_every)
noise_audio = sigma * torch.randn(spect.shape[0], self.n_remaining_channels, spect.shape[2],
device=spect.device, dtype=spect.dtype)
noise_vectors = [sigma * torch.randn(spect.shape[0], self.n_early_size, spect.shape[2],
device=spect.device, dtype=spect.dtype)
for _ in range(num_noise_vectors)]
return (spect, noise_audio, *noise_vectors)
def forward(self, spect, noise_audio, *noise_vectors):
""" A deterministic version of waveglow.infer; use compute_inputs_for(mel_spect) for inputs """
audio = noise_audio
noise_index = 0
for k in reversed(range(self.n_flows)):
n_half = audio.size(1) // 2
audio_0 = audio[:, :n_half, :]
audio_1 = audio[:, n_half:, :]
wn_input = (audio_0, spect)
output = self.WN[k](wn_input)
s = output[:, n_half:, :]
b = output[:, :n_half, :]
audio_1 = (audio_1 - b) / torch.exp(s)
audio = torch.cat([audio_0, audio_1], 1)
audio = self.convinv[k](audio, reverse=True)
if k % self.n_early_every == 0 and k > 0:
z = noise_vectors[noise_index]
noise_index += 1
audio = torch.cat((z, audio), 1)
assert noise_index == len(noise_vectors), f"Used {noise_index} noise vectors, but got {len(noise_vectors)}"
audio = audio.permute(0, 2, 1).contiguous().view(audio.size(0), -1)
return audio
def flow_forward(self, spect, audio):
# Upsample spectrogram to size of audio
spect = self.upsample(spect) # (B, M, F) -> (B, M, T)
assert(spect.size(2) >= audio.size(1))
if spect.size(2) > audio.size(1):
spect = spect[:, :, :audio.size(1)]
spect = spect.unfold(2, self.n_group, self.n_group).permute(0, 2, 1, 3) # (B, M, T) -> (B, M, T//G, G) -> (B, T//G, M, G)
spect = spect.contiguous().view(spect.size(0), spect.size(1), -1).permute(0, 2, 1) # (B, T//G, M, G) -> (B, T//G, M*G) -> (B, M*G, T//G)
audio = audio.unfold(1, self.n_group, self.n_group).permute(0, 2, 1) # (B, T) -> (B, T//G, G) -> (B, G, T//G)
output_audio = []
for k in range(self.n_flows):
if k % self.n_early_every == 0 and k > 0:
output_audio.append(audio[:, :self.n_early_size, :])
audio = audio[:, self.n_early_size:, :]
audio, _ = self.convinv[k](audio)
n_half = int(audio.size(1)/2)
audio_0 = audio[:, :n_half, :]
audio_1 = audio[:, n_half:, :]
wn_input = (audio_0, spect)
output = self.WN[k](wn_input)
log_s = output[:, n_half:, :]
b = output[:, :n_half, :]
audio_1 = torch.exp(log_s)*audio_1 + b
audio = torch.cat([audio_0, audio_1], 1)
output_audio.append(audio)
return torch.cat(output_audio, 1), spect
class DeterministicWaveGlowTeacher(WaveGlowTeacher):
""" WaveGlowTeacher that predicts deterministically based on seed """
@lru_cache(maxsize=None)
def generate_noise_common(self, *, seed=1337, device=torch.device('cpu'),
dtype=torch.float32, channels=8, max_length=50_000):
noise = np.random.RandomState(seed=seed).randn(1, channels, max_length)
return torch.as_tensor(noise, device=device, dtype=dtype)
def generate_noise_inputs(self, batch_size: int, channels: tuple, length: int, **kwargs):
assert kwargs.get('max_length', 50_000) >= length
common = self.generate_noise_common(channels=sum(channels), **kwargs)
noise_inputs = []
split_index = 0
for num_channels in channels:
noise_inputs.append(common[:, split_index: split_index + num_channels, :length].repeat(batch_size, 1, 1))
split_index += num_channels
return noise_inputs
def sample_inputs_for(self, spect, **kwargs):
""" upsample and generate noise: the non-distilled part of waveglow """
assert self.n_early_every > 0 and self.n_flows > 0
spect = self.upsample(spect)
# trim conv artifacts. maybe pad spec to kernel multiple
if not self.upsample_multistage:
time_cutoff = self.upsample.kernel_size[0] - self.upsample.stride[0]
spect = spect[:, :, :-time_cutoff]
spect = spect.unfold(2, self.n_group, self.n_group).permute(0, 2, 1, 3)
spect = spect.contiguous().view(spect.size(0), spect.size(1), -1).permute(0, 2, 1)
num_noise_vectors = max(1, (self.n_flows - 1) // self.n_early_every)
noise_num_channels = (self.n_remaining_channels,) + (self.n_early_size,) * num_noise_vectors
noise_audio, *noise_vectors = self.generate_noise_inputs(
spect.shape[0], noise_num_channels, spect.shape[2], device=spect.device, dtype=spect.dtype, **kwargs)
return (spect, noise_audio, *noise_vectors) | 3,013 | 0 | 106 |
43278a4f75adc3ba6f1e211712961e9f3b1fe18e | 4,535 | py | Python | tests/framework/AnalyticModels/optimizing/plotWalk3d.py | rinelson456/raven | 1114246136a2f72969e75b5e99a11b35500d4eef | [
"Apache-2.0"
] | 159 | 2017-03-24T21:07:06.000Z | 2022-03-20T13:44:40.000Z | tests/framework/AnalyticModels/optimizing/plotWalk3d.py | rinelson456/raven | 1114246136a2f72969e75b5e99a11b35500d4eef | [
"Apache-2.0"
] | 1,667 | 2017-03-27T14:41:22.000Z | 2022-03-31T19:50:06.000Z | tests/framework/AnalyticModels/optimizing/plotWalk3d.py | rinelson456/raven | 1114246136a2f72969e75b5e99a11b35500d4eef | [
"Apache-2.0"
] | 95 | 2017-03-24T21:05:03.000Z | 2022-03-08T17:30:22.000Z | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for producing an animation of optimization histories
for 3D problems given pickled mesh grid data. For examples
of the mesh grid data, see raven/tests/framework/AnalyticModels/optimizing/plot_functions.py.
"""
import pickle as pk
import matplotlib.pyplot as plt
from matplotlib import colors
from matplotlib import cm
from mpl_toolkits.mplot3d import axes3d, Axes3D
from matplotlib import animation
import numpy as np
# load function data
bX,bY,bZ = pk.load(open('dvalley_plotdata.pk','rb'))
norm = plt.Normalize(bZ.min()-1, bZ.max()+5)
colors = cm.BuGn(norm(bZ))
rcount, ccount, _ = colors.shape
fig = plt.figure(figsize=(10,8))
ax = fig.gca(projection='3d')
ax.view_init(70, 0)
surf = ax.plot_surface(bX, bY, bZ, rcount=rcount, ccount=ccount,
facecolors=colors,alpha=0.3)
ax.set_xlabel('x')
ax.set_ylabel('y')
# load walk data
cases = range(5)
data = {}
for c,case in enumerate(cases):
try:
with open('opt_export_{}.csv'.format(case+1),'r', encoding = "utf-8-sig") as infile:
data[case] = {'x':[],'y':[],'z':[],'a':[]}
for l,line in enumerate(infile):
line = line.strip().split(',')
if l==0:
ix = line.index('x')
iy = line.index('y')
iz = line.index('ans')
ia = line.index('accepted')
continue
data[case]['x'].append(float(line[ix]))
data[case]['y'].append(float(line[iy]))
data[case]['z'].append(float(line[iz]))
data[case]['a'].append(bool(float(line[ia])))
except IOError:
cases = cases[:c]
break
# point the first dot
points = []
trails = []
rejects = []
clr = ax._get_lines.prop_cycler
for case in cases:
c = next(clr)['color']
point, = ax.plot3D([data[case]['x'][0]],[data[case]['y'][0]],[data[case]['z'][0]],color=c,alpha=0.9,marker='${}$'.format(case))
trail, = ax.plot3D([data[case]['x'][0]],[data[case]['y'][0]],[data[case]['z'][0]],'.-',color=c,alpha=0.9)
reject, = ax.plot3D([],[],[],'x',color=c,alpha=0.9)
points.append(point)
trails.append(trail)
rejects.append(reject)
def updatePoint(n,data,points,trails,rejects):
"""
Function to be called to update the animation points, one iteration at a time.
@ In, n, int, the iteration to use
@ In, data, dict, all the data collected from the RAVEN output
@ In, points, list, plotted points in the animation
@ In, trails, list, currently unused, finite number of trailing points to track in animation
@ In, rejects, list, rejected samples from evaluations
@ Out, point, matplotlib.pyplot line, last plotted point object
"""
print('Animating iteration',n)
for c,case in enumerate(cases):
point = points[c]
trail = trails[c]
reject = rejects[c]
N = len(data[case]['x'])
# truncate data
x = np.array(data[case]['x'][:n+1] if n+1 < N else data[case]['x'])
y = np.array(data[case]['y'][:n+1] if n+1 < N else data[case]['y'])
z = np.array(data[case]['z'][:n+1] if n+1 < N else data[case]['z'])
a = np.array(data[case]['a'][:n+1] if n+1 < N else data[case]['a'])
# split data into accepted, rejected points
xA = np.atleast_1d(x[a])
yA = np.atleast_1d(y[a])
zA = np.atleast_1d(z[a])
xR = np.atleast_1d(x[np.logical_not(a)])
yR = np.atleast_1d(y[np.logical_not(a)])
zR = np.atleast_1d(z[np.logical_not(a)])
try:
point.set_data([xA[-1]],[yA[-1]])
point.set_3d_properties(zA[-1])
trail.set_data(xA,yA)
trail.set_3d_properties(zA)
reject.set_data(xR,yR)
reject.set_3d_properties(zR)
except IndexError:
continue
ax.set_title('iteration {}'.format(n),loc='center',pad=20)
return point
ani=animation.FuncAnimation(fig,updatePoint,max(len(data[case]['x']) for case in cases),fargs=(data,points,trails,rejects),interval=100,repeat_delay=3000)
Writer = animation.writers['ffmpeg']
writer = Writer(fps=15,bitrate=1800)
ani.save('path3d.mp4',writer=writer)
| 34.356061 | 154 | 0.655347 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for producing an animation of optimization histories
for 3D problems given pickled mesh grid data. For examples
of the mesh grid data, see raven/tests/framework/AnalyticModels/optimizing/plot_functions.py.
"""
import pickle as pk
import matplotlib.pyplot as plt
from matplotlib import colors
from matplotlib import cm
from mpl_toolkits.mplot3d import axes3d, Axes3D
from matplotlib import animation
import numpy as np
# load function data
bX,bY,bZ = pk.load(open('dvalley_plotdata.pk','rb'))
norm = plt.Normalize(bZ.min()-1, bZ.max()+5)
colors = cm.BuGn(norm(bZ))
rcount, ccount, _ = colors.shape
fig = plt.figure(figsize=(10,8))
ax = fig.gca(projection='3d')
ax.view_init(70, 0)
surf = ax.plot_surface(bX, bY, bZ, rcount=rcount, ccount=ccount,
facecolors=colors,alpha=0.3)
ax.set_xlabel('x')
ax.set_ylabel('y')
# load walk data
cases = range(5)
data = {}
for c,case in enumerate(cases):
try:
with open('opt_export_{}.csv'.format(case+1),'r', encoding = "utf-8-sig") as infile:
data[case] = {'x':[],'y':[],'z':[],'a':[]}
for l,line in enumerate(infile):
line = line.strip().split(',')
if l==0:
ix = line.index('x')
iy = line.index('y')
iz = line.index('ans')
ia = line.index('accepted')
continue
data[case]['x'].append(float(line[ix]))
data[case]['y'].append(float(line[iy]))
data[case]['z'].append(float(line[iz]))
data[case]['a'].append(bool(float(line[ia])))
except IOError:
cases = cases[:c]
break
# point the first dot
points = []
trails = []
rejects = []
clr = ax._get_lines.prop_cycler
for case in cases:
c = next(clr)['color']
point, = ax.plot3D([data[case]['x'][0]],[data[case]['y'][0]],[data[case]['z'][0]],color=c,alpha=0.9,marker='${}$'.format(case))
trail, = ax.plot3D([data[case]['x'][0]],[data[case]['y'][0]],[data[case]['z'][0]],'.-',color=c,alpha=0.9)
reject, = ax.plot3D([],[],[],'x',color=c,alpha=0.9)
points.append(point)
trails.append(trail)
rejects.append(reject)
def updatePoint(n,data,points,trails,rejects):
"""
Function to be called to update the animation points, one iteration at a time.
@ In, n, int, the iteration to use
@ In, data, dict, all the data collected from the RAVEN output
@ In, points, list, plotted points in the animation
@ In, trails, list, currently unused, finite number of trailing points to track in animation
@ In, rejects, list, rejected samples from evaluations
@ Out, point, matplotlib.pyplot line, last plotted point object
"""
print('Animating iteration',n)
for c,case in enumerate(cases):
point = points[c]
trail = trails[c]
reject = rejects[c]
N = len(data[case]['x'])
# truncate data
x = np.array(data[case]['x'][:n+1] if n+1 < N else data[case]['x'])
y = np.array(data[case]['y'][:n+1] if n+1 < N else data[case]['y'])
z = np.array(data[case]['z'][:n+1] if n+1 < N else data[case]['z'])
a = np.array(data[case]['a'][:n+1] if n+1 < N else data[case]['a'])
# split data into accepted, rejected points
xA = np.atleast_1d(x[a])
yA = np.atleast_1d(y[a])
zA = np.atleast_1d(z[a])
xR = np.atleast_1d(x[np.logical_not(a)])
yR = np.atleast_1d(y[np.logical_not(a)])
zR = np.atleast_1d(z[np.logical_not(a)])
try:
point.set_data([xA[-1]],[yA[-1]])
point.set_3d_properties(zA[-1])
trail.set_data(xA,yA)
trail.set_3d_properties(zA)
reject.set_data(xR,yR)
reject.set_3d_properties(zR)
except IndexError:
continue
ax.set_title('iteration {}'.format(n),loc='center',pad=20)
return point
ani=animation.FuncAnimation(fig,updatePoint,max(len(data[case]['x']) for case in cases),fargs=(data,points,trails,rejects),interval=100,repeat_delay=3000)
Writer = animation.writers['ffmpeg']
writer = Writer(fps=15,bitrate=1800)
ani.save('path3d.mp4',writer=writer)
| 0 | 0 | 0 |
de273ff0b25434b0dfa52b5e87d8dfccaab15b41 | 3,009 | py | Python | coldwallet/crypto.py | Anthchirp/python-coldwallet | 3d36b9322a4a596d06a78793d68aca17f6d13077 | [
"BSD-3-Clause"
] | null | null | null | coldwallet/crypto.py | Anthchirp/python-coldwallet | 3d36b9322a4a596d06a78793d68aca17f6d13077 | [
"BSD-3-Clause"
] | null | null | null | coldwallet/crypto.py | Anthchirp/python-coldwallet | 3d36b9322a4a596d06a78793d68aca17f6d13077 | [
"BSD-3-Clause"
] | 1 | 2020-08-14T02:30:35.000Z | 2020-08-14T02:30:35.000Z | # Coldwallet encryption related functions. This is the heart of coldwallet.
from __future__ import absolute_import, division, print_function
import base64
import math
import os
import coldwallet.aes
import pylibscrypt
def disable_randomness():
'''Make the random number generator produce deterministic (ie. non-random)
output. This is used for internal testing only, and must never be used
otherwise! A warning is printed.
'''
import random
import sys
print('', file=sys.stderr)
print('-- coldwallet running in test mode - random number generation disabled --', file=sys.stderr)
print('', file=sys.stderr)
if sys.hexversion >= 0x03000000:
random.seed(a=0, version=1)
else:
random.seed(a=0)
os.urandom = fakerandom
def generate_random_string(bits):
'''Generate a random string with a given number of bits. If the number of bits is
not divisible by 8 then pad with 0s.
'''
assert bits >= 1, "Cannot create 0 bit random strings"
stringbytes = int(math.ceil(bits / 8))
rand_string = os.urandom(stringbytes)
zero_padding = stringbytes * 8 - bits
if zero_padding:
mask = 0x100 - (2 ** zero_padding)
rand_string = rand_string[:-1] + bytes(bytearray((ord(rand_string[-1:]) & mask,)))
return rand_string
def encrypt_secret_key(secret, coldkey, public_address, scrypt_N=2**14, scrypt_p=1):
'''Encrypt a secret exponent using an individual symmetric key. The symmetric
key is generated from the shared coldwallet key (given as byte string) and
the public bitcoin address (given in human readable format) using the
memory-hard scrypt hash. The result is returned in base64 encoding.
'''
# Generate a 256 bit symmetric key from the coldwallet key and the public bitcoin address
symmetric_key = pylibscrypt.scrypt(coldkey, public_address.encode('ascii'), olen=32, N=scrypt_N, p=scrypt_p)
# Encrypt the secret exponent with the symmetric key
encrypted_secret = coldwallet.aes.encrypt_block(secret, symmetric_key)
# Base64 encode the result
return base64.b64encode(encrypted_secret).decode('ascii')
def decrypt_secret_key(code, coldkey, public_address, scrypt_N=2**14, scrypt_p=1):
'''Decrypt a secret exponent, given in base64 encoding, using an individual
symmetric key. The symmetric key is generated as above. The result is
returned as a byte string.
'''
# Base64 decode the input
code = base64.b64decode(code.encode('ascii'))
# Generate a 256 bit symmetric key from the coldwallet key and the public bitcoin address
symmetric_key = pylibscrypt.scrypt(coldkey, public_address.encode('ascii'), olen=32, N=scrypt_N, p=scrypt_p)
# Decrypt the secret exponent with the symmetric key
secret = coldwallet.aes.decrypt_block(code, symmetric_key)
return secret
| 38.576923 | 110 | 0.740445 | # Coldwallet encryption related functions. This is the heart of coldwallet.
from __future__ import absolute_import, division, print_function
import base64
import math
import os
import coldwallet.aes
import pylibscrypt
def disable_randomness():
'''Make the random number generator produce deterministic (ie. non-random)
output. This is used for internal testing only, and must never be used
otherwise! A warning is printed.
'''
import random
import sys
print('', file=sys.stderr)
print('-- coldwallet running in test mode - random number generation disabled --', file=sys.stderr)
print('', file=sys.stderr)
def fakerandom(b):
# bytes/bytearray conversion and use and rounding of
# random.random() is required for Python2/3 compatibility.
return bytes(bytearray((int(round(random.random(), 10) * 256) for x in range(b))))
if sys.hexversion >= 0x03000000:
random.seed(a=0, version=1)
else:
random.seed(a=0)
os.urandom = fakerandom
def generate_random_string(bits):
'''Generate a random string with a given number of bits. If the number of bits is
not divisible by 8 then pad with 0s.
'''
assert bits >= 1, "Cannot create 0 bit random strings"
stringbytes = int(math.ceil(bits / 8))
rand_string = os.urandom(stringbytes)
zero_padding = stringbytes * 8 - bits
if zero_padding:
mask = 0x100 - (2 ** zero_padding)
rand_string = rand_string[:-1] + bytes(bytearray((ord(rand_string[-1:]) & mask,)))
return rand_string
def encrypt_secret_key(secret, coldkey, public_address, scrypt_N=2**14, scrypt_p=1):
'''Encrypt a secret exponent using an individual symmetric key. The symmetric
key is generated from the shared coldwallet key (given as byte string) and
the public bitcoin address (given in human readable format) using the
memory-hard scrypt hash. The result is returned in base64 encoding.
'''
# Generate a 256 bit symmetric key from the coldwallet key and the public bitcoin address
symmetric_key = pylibscrypt.scrypt(coldkey, public_address.encode('ascii'), olen=32, N=scrypt_N, p=scrypt_p)
# Encrypt the secret exponent with the symmetric key
encrypted_secret = coldwallet.aes.encrypt_block(secret, symmetric_key)
# Base64 encode the result
return base64.b64encode(encrypted_secret).decode('ascii')
def decrypt_secret_key(code, coldkey, public_address, scrypt_N=2**14, scrypt_p=1):
'''Decrypt a secret exponent, given in base64 encoding, using an individual
symmetric key. The symmetric key is generated as above. The result is
returned as a byte string.
'''
# Base64 decode the input
code = base64.b64decode(code.encode('ascii'))
# Generate a 256 bit symmetric key from the coldwallet key and the public bitcoin address
symmetric_key = pylibscrypt.scrypt(coldkey, public_address.encode('ascii'), olen=32, N=scrypt_N, p=scrypt_p)
# Decrypt the secret exponent with the symmetric key
secret = coldwallet.aes.decrypt_block(code, symmetric_key)
return secret
| 204 | 0 | 24 |
0df61cd55b30e0b3fa9f65343e0cc0106a7172fa | 4,958 | py | Python | src/ramstk/models/programdb/action/record.py | TahaEntezari/ramstk | f82e5b31ef5c4e33cc02252263247b99a9abe129 | [
"BSD-3-Clause"
] | 26 | 2019-05-15T02:03:47.000Z | 2022-02-21T07:28:11.000Z | src/ramstk/models/programdb/action/record.py | TahaEntezari/ramstk | f82e5b31ef5c4e33cc02252263247b99a9abe129 | [
"BSD-3-Clause"
] | 815 | 2019-05-10T12:31:52.000Z | 2022-03-31T12:56:26.000Z | src/ramstk/models/programdb/action/record.py | TahaEntezari/ramstk | f82e5b31ef5c4e33cc02252263247b99a9abe129 | [
"BSD-3-Clause"
] | 9 | 2019-04-20T23:06:29.000Z | 2022-01-24T21:21:04.000Z | # pylint: disable=duplicate-code
# -*- coding: utf-8 -*-
#
# ramstk.models.action.record.py is part of The RAMSTK Project
#
# All rights reserved.
# Copyright since 2007 Doyle "weibullguy" Rowland doyle.rowland <AT> reliaqual <DOT> com
"""RAMSTKAction Table Module."""
# Standard Library Imports
from datetime import date, timedelta
# Third Party Imports
from sqlalchemy import Column, Date, ForeignKeyConstraint, Integer, String
from sqlalchemy.orm import relationship
# RAMSTK Package Imports
from ramstk.db import RAMSTK_BASE
from ramstk.models import RAMSTKBaseRecord
class RAMSTKActionRecord(RAMSTK_BASE, RAMSTKBaseRecord):
"""Class to represent table ramstk_action in the RAMSTK Program database.
This table shares a Many-to-One relationship with ramstk_cause.
"""
__defaults__ = {
"action_recommended": "",
"action_category": "",
"action_owner": "",
"action_due_date": date.today() + timedelta(days=30),
"action_status": "",
"action_taken": "",
"action_approved": 0,
"action_approve_date": date.today() + timedelta(days=30),
"action_closed": 0,
"action_close_date": date.today() + timedelta(days=30),
}
__tablename__ = "ramstk_action"
__table_args__ = (
ForeignKeyConstraint(
[
"fld_revision_id",
"fld_hardware_id",
"fld_mode_id",
"fld_mechanism_id",
"fld_cause_id",
],
[
"ramstk_cause.fld_revision_id",
"ramstk_cause.fld_hardware_id",
"ramstk_cause.fld_mode_id",
"ramstk_cause.fld_mechanism_id",
"ramstk_cause.fld_cause_id",
],
),
{"extend_existing": True},
)
revision_id = Column("fld_revision_id", Integer, primary_key=True, nullable=False)
hardware_id = Column(
"fld_hardware_id", Integer, primary_key=True, default=-1, nullable=False
)
mode_id = Column("fld_mode_id", Integer, primary_key=True, nullable=False)
mechanism_id = Column("fld_mechanism_id", Integer, primary_key=True, nullable=False)
cause_id = Column(
"fld_cause_id", Integer, primary_key=True, nullable=False, unique=True
)
action_id = Column(
"fld_action_id", Integer, primary_key=True, autoincrement=True, nullable=False
)
action_recommended = Column(
"fld_action_recommended", String, default=__defaults__["action_recommended"]
)
action_category = Column(
"fld_action_category", String(512), default=__defaults__["action_category"]
)
action_owner = Column(
"fld_action_owner", String(512), default=__defaults__["action_owner"]
)
action_due_date = Column(
"fld_action_due_date", Date, default=__defaults__["action_due_date"]
)
action_status = Column(
"fld_action_status", String(512), default=__defaults__["action_status"]
)
action_taken = Column(
"fld_action_taken", String, default=__defaults__["action_taken"]
)
action_approved = Column(
"fld_action_approved", Integer, default=__defaults__["action_approved"]
)
action_approve_date = Column(
"fld_action_approve_date", Date, default=__defaults__["action_approve_date"]
)
action_closed = Column(
"fld_action_closed", Integer, default=__defaults__["action_closed"]
)
action_close_date = Column(
"fld_action_close_date", Date, default=__defaults__["action_close_date"]
)
# Define the relationships to other tables in the RAMSTK Program database.
cause = relationship("RAMSTKCauseRecord", back_populates="action") # type: ignore
is_mode = False
is_mechanism = False
is_cause = False
is_control = False
is_action = True
def get_attributes(self):
"""Retrieve current values of the RAMSTKAction data model attributes.
:return: {cause_id, action_id, action_recommended,
action_category, action_owner, action_due_date,
action_status, action_taken, action_approved,
action_approved_date, action_closed,
action_closed_date} pairs.
:rtype: dict
"""
_attributes = {
"cause_id": self.cause_id,
"action_id": self.action_id,
"action_recommended": self.action_recommended,
"action_category": self.action_category,
"action_owner": self.action_owner,
"action_due_date": self.action_due_date,
"action_status": self.action_status,
"action_taken": self.action_taken,
"action_approved": self.action_approved,
"action_approve_date": self.action_approve_date,
"action_closed": self.action_closed,
"action_close_date": self.action_close_date,
}
return _attributes
| 35.414286 | 88 | 0.64885 | # pylint: disable=duplicate-code
# -*- coding: utf-8 -*-
#
# ramstk.models.action.record.py is part of The RAMSTK Project
#
# All rights reserved.
# Copyright since 2007 Doyle "weibullguy" Rowland doyle.rowland <AT> reliaqual <DOT> com
"""RAMSTKAction Table Module."""
# Standard Library Imports
from datetime import date, timedelta
# Third Party Imports
from sqlalchemy import Column, Date, ForeignKeyConstraint, Integer, String
from sqlalchemy.orm import relationship
# RAMSTK Package Imports
from ramstk.db import RAMSTK_BASE
from ramstk.models import RAMSTKBaseRecord
class RAMSTKActionRecord(RAMSTK_BASE, RAMSTKBaseRecord):
"""Class to represent table ramstk_action in the RAMSTK Program database.
This table shares a Many-to-One relationship with ramstk_cause.
"""
__defaults__ = {
"action_recommended": "",
"action_category": "",
"action_owner": "",
"action_due_date": date.today() + timedelta(days=30),
"action_status": "",
"action_taken": "",
"action_approved": 0,
"action_approve_date": date.today() + timedelta(days=30),
"action_closed": 0,
"action_close_date": date.today() + timedelta(days=30),
}
__tablename__ = "ramstk_action"
__table_args__ = (
ForeignKeyConstraint(
[
"fld_revision_id",
"fld_hardware_id",
"fld_mode_id",
"fld_mechanism_id",
"fld_cause_id",
],
[
"ramstk_cause.fld_revision_id",
"ramstk_cause.fld_hardware_id",
"ramstk_cause.fld_mode_id",
"ramstk_cause.fld_mechanism_id",
"ramstk_cause.fld_cause_id",
],
),
{"extend_existing": True},
)
revision_id = Column("fld_revision_id", Integer, primary_key=True, nullable=False)
hardware_id = Column(
"fld_hardware_id", Integer, primary_key=True, default=-1, nullable=False
)
mode_id = Column("fld_mode_id", Integer, primary_key=True, nullable=False)
mechanism_id = Column("fld_mechanism_id", Integer, primary_key=True, nullable=False)
cause_id = Column(
"fld_cause_id", Integer, primary_key=True, nullable=False, unique=True
)
action_id = Column(
"fld_action_id", Integer, primary_key=True, autoincrement=True, nullable=False
)
action_recommended = Column(
"fld_action_recommended", String, default=__defaults__["action_recommended"]
)
action_category = Column(
"fld_action_category", String(512), default=__defaults__["action_category"]
)
action_owner = Column(
"fld_action_owner", String(512), default=__defaults__["action_owner"]
)
action_due_date = Column(
"fld_action_due_date", Date, default=__defaults__["action_due_date"]
)
action_status = Column(
"fld_action_status", String(512), default=__defaults__["action_status"]
)
action_taken = Column(
"fld_action_taken", String, default=__defaults__["action_taken"]
)
action_approved = Column(
"fld_action_approved", Integer, default=__defaults__["action_approved"]
)
action_approve_date = Column(
"fld_action_approve_date", Date, default=__defaults__["action_approve_date"]
)
action_closed = Column(
"fld_action_closed", Integer, default=__defaults__["action_closed"]
)
action_close_date = Column(
"fld_action_close_date", Date, default=__defaults__["action_close_date"]
)
# Define the relationships to other tables in the RAMSTK Program database.
cause = relationship("RAMSTKCauseRecord", back_populates="action") # type: ignore
is_mode = False
is_mechanism = False
is_cause = False
is_control = False
is_action = True
def get_attributes(self):
"""Retrieve current values of the RAMSTKAction data model attributes.
:return: {cause_id, action_id, action_recommended,
action_category, action_owner, action_due_date,
action_status, action_taken, action_approved,
action_approved_date, action_closed,
action_closed_date} pairs.
:rtype: dict
"""
_attributes = {
"cause_id": self.cause_id,
"action_id": self.action_id,
"action_recommended": self.action_recommended,
"action_category": self.action_category,
"action_owner": self.action_owner,
"action_due_date": self.action_due_date,
"action_status": self.action_status,
"action_taken": self.action_taken,
"action_approved": self.action_approved,
"action_approve_date": self.action_approve_date,
"action_closed": self.action_closed,
"action_close_date": self.action_close_date,
}
return _attributes
| 0 | 0 | 0 |
f5100452d343a876e37e519f48919a3f203b63e3 | 13,293 | py | Python | src/api/dcps/python/test/test_type_checkers.py | brezillon/opensplice | 725ae9d949c83fce1746bd7d8a154b9d0a81fe3e | [
"Apache-2.0"
] | 133 | 2017-11-09T02:10:00.000Z | 2022-03-29T09:45:10.000Z | src/api/dcps/python/test/test_type_checkers.py | brezillon/opensplice | 725ae9d949c83fce1746bd7d8a154b9d0a81fe3e | [
"Apache-2.0"
] | 131 | 2017-11-07T14:48:43.000Z | 2022-03-13T15:30:47.000Z | src/api/dcps/python/test/test_type_checkers.py | brezillon/opensplice | 725ae9d949c83fce1746bd7d8a154b9d0a81fe3e | [
"Apache-2.0"
] | 94 | 2017-11-09T02:26:19.000Z | 2022-02-24T06:38:25.000Z | #
# Vortex OpenSplice
#
# This software and documentation are Copyright 2006 to TO_YEAR ADLINK
# Technology Limited, its affiliated companies and licensors. All rights
# reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Dec 22, 2017
@author: prismtech
'''
import unittest
import ddsutil
import enum
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | 51.523256 | 109 | 0.698262 | #
# Vortex OpenSplice
#
# This software and documentation are Copyright 2006 to TO_YEAR ADLINK
# Technology Limited, its affiliated companies and licensors. All rights
# reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Dec 22, 2017
@author: prismtech
'''
import unittest
import ddsutil
import enum
class MyEnum(enum.Enum):
ONE = 0
TWO = 1
class TestTypeCheckers(unittest.TestCase):
_bool_checker = ddsutil._bool_checker
def testBool(self):
self.assertRaises(TypeError, lambda: self._bool_checker(None))
self.assertRaises(TypeError, lambda: self._bool_checker(1))
self._bool_checker(True)
self._bool_checker(False)
_octet_checker = ddsutil._octet_checker
_ushort_checker = ddsutil._ushort_checker
_ulong_checker = ddsutil._ulong_checker
_ulonglong_checker = ddsutil._ulonglong_checker
_short_checker = ddsutil._short_checker
_long_checker = ddsutil._long_checker
_longlong_checker = ddsutil._longlong_checker
def testOctet(self):
self.assertRaises(TypeError, lambda: self._octet_checker(None))
self.assertRaises(TypeError, lambda: self._octet_checker(3.5))
self.assertRaises(TypeError, lambda: self._octet_checker('a'))
self.assertRaises(TypeError, lambda: self._octet_checker(b'a'))
self.assertRaises(TypeError, lambda: self._octet_checker(-1))
self.assertRaises(TypeError, lambda: self._octet_checker(256))
self.assertRaises(TypeError, lambda: self._octet_checker(-1000))
self.assertRaises(TypeError, lambda: self._octet_checker(2560))
self._octet_checker(0)
self._octet_checker(128)
self._octet_checker(255)
def testUShort(self):
self.assertRaises(TypeError, lambda: self._ushort_checker(None))
self.assertRaises(TypeError, lambda: self._ushort_checker(3.5))
self.assertRaises(TypeError, lambda: self._ushort_checker('a'))
self.assertRaises(TypeError, lambda: self._ushort_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ushort_checker(-1))
self.assertRaises(TypeError, lambda: self._ushort_checker((1<<16)))
self.assertRaises(TypeError, lambda: self._ushort_checker(-1000))
self.assertRaises(TypeError, lambda: self._ushort_checker(350000))
self._ushort_checker(0)
self._ushort_checker((1<<15))
self._ushort_checker((1<<16)-1)
def testULong(self):
self.assertRaises(TypeError, lambda: self._ulong_checker(None))
self.assertRaises(TypeError, lambda: self._ulong_checker(3.5))
self.assertRaises(TypeError, lambda: self._ulong_checker('a'))
self.assertRaises(TypeError, lambda: self._ulong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ulong_checker(-1))
self.assertRaises(TypeError, lambda: self._ulong_checker((1<<32)))
self.assertRaises(TypeError, lambda: self._ulong_checker(-1000))
self.assertRaises(TypeError, lambda: self._ulong_checker((1<<40)))
self._ulong_checker(0)
self._ulong_checker((1<<31))
self._ulong_checker((1<<32)-1)
def testULongLong(self):
self.assertRaises(TypeError, lambda: self._ulonglong_checker(None))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(3.5))
self.assertRaises(TypeError, lambda: self._ulonglong_checker('a'))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(-1))
self.assertRaises(TypeError, lambda: self._ulonglong_checker((1<<64)))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(-1000))
self.assertRaises(TypeError, lambda: self._ulonglong_checker((1<<66)))
self._ulonglong_checker(0)
self._ulonglong_checker((1<<60))
self._ulonglong_checker((1<<64)-1)
def testShort(self):
self.assertRaises(TypeError, lambda: self._short_checker(None))
self.assertRaises(TypeError, lambda: self._short_checker(3.5))
self.assertRaises(TypeError, lambda: self._short_checker('a'))
self.assertRaises(TypeError, lambda: self._short_checker(b'a'))
self.assertRaises(TypeError, lambda: self._short_checker(-(1<<15) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker((1<<15))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker(-(1<<16) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker((1<<16))) # well outside the bounds
self._short_checker(-(1<<15))
self._short_checker(-5)
self._short_checker(0)
self._short_checker(5)
self._short_checker((1<<15)-1)
def testLong(self):
self.assertRaises(TypeError, lambda: self._long_checker(None))
self.assertRaises(TypeError, lambda: self._long_checker(3.5))
self.assertRaises(TypeError, lambda: self._long_checker('a'))
self.assertRaises(TypeError, lambda: self._long_checker(b'a'))
self.assertRaises(TypeError, lambda: self._long_checker(-(1<<31) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker((1<<31))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker(-(1<<32) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker((1<<32))) # well outside the bounds
self._long_checker(-(1<<31))
self._long_checker(-5)
self._long_checker(0)
self._long_checker(5)
self._long_checker((1<<31)-1)
def testLongLong(self):
self.assertRaises(TypeError, lambda: self._longlong_checker(None))
self.assertRaises(TypeError, lambda: self._longlong_checker(3.5))
self.assertRaises(TypeError, lambda: self._longlong_checker('a'))
self.assertRaises(TypeError, lambda: self._longlong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._longlong_checker(-(1<<63) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker((1<<63))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker(-(1<<64) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker((1<<64))) # well outside the bounds
self._longlong_checker(-(1<<63))
self._longlong_checker(-5)
self._longlong_checker(0)
self._longlong_checker(5)
self._longlong_checker((1<<63)-1)
_char_checker = ddsutil._char_checker
def testChar(self):
self.assertRaises(TypeError, lambda: self._char_checker(None))
self.assertRaises(TypeError, lambda: self._char_checker(3))
self.assertRaises(TypeError, lambda: self._char_checker(b'a'))
self.assertRaises(TypeError, lambda: self._char_checker(''))
self.assertRaises(TypeError, lambda: self._char_checker('aa'))
self.assertRaises(TypeError, lambda: self._char_checker(chr(256)))
self._char_checker(chr(0))
self._char_checker('1')
self._char_checker(chr(255))
_str_checker = ddsutil._str_checker
_str2_checker = ddsutil._bounded_str_checker(2)
def testStr(self):
self.assertRaises(TypeError, lambda: self._str2_checker(None))
self.assertRaises(TypeError, lambda: self._str2_checker(3))
self.assertRaises(TypeError, lambda: self._str2_checker(b'a'))
self.assertRaises(TypeError, lambda: self._str2_checker('aab'))
self.assertRaises(TypeError, lambda: self._str2_checker(chr(256)))
self._str2_checker('')
self._str2_checker(chr(0))
self._str2_checker('1')
self._str2_checker('11')
self._str2_checker(chr(255))
_enum_checker = ddsutil._class_checker(MyEnum)
def testClass(self):
self.assertRaises(TypeError, lambda: self._enum_checker(None))
self.assertRaises(TypeError, lambda: self._enum_checker(1))
self.assertRaises(TypeError, lambda: self._enum_checker('A'))
self._enum_checker(MyEnum.ONE)
self._enum_checker(MyEnum.TWO)
_float_checker = ddsutil._float_checker
def testFloat(self):
self.assertRaises(TypeError, lambda: self._float_checker(None))
self.assertRaises(TypeError, lambda: self._float_checker(1))
self.assertRaises(TypeError, lambda: self._float_checker('A'))
self._float_checker(3.15)
self._float_checker(1.0)
self._float_checker(-9.183)
_long_array_checker = ddsutil._array_checker(2, ddsutil._long_checker)
def testLongArray(self):
self.assertRaises(TypeError, lambda: self._long_array_checker(None))
self.assertRaises(TypeError, lambda: self._long_array_checker(1))
self.assertRaises(TypeError, lambda: self._long_array_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_array_checker('A'))
self.assertRaises(TypeError, lambda: self._long_array_checker([]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,2,3]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,1<<32]))
self._long_array_checker([1,2])
self._long_array_checker([-(1<<31),(1<<31)-1])
_long_seq_checker = ddsutil._seq_checker(2, ddsutil._long_checker)
def testLongSeq(self):
self.assertRaises(TypeError, lambda: self._long_seq_checker(None))
self.assertRaises(TypeError, lambda: self._long_seq_checker(1))
self.assertRaises(TypeError, lambda: self._long_seq_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_seq_checker('A'))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,2,3]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,1<<32]))
self._long_seq_checker([])
self._long_seq_checker([1])
self._long_seq_checker([1,2])
self._long_seq_checker([-(1<<31),(1<<31)-1])
_long_ubseq_checker = ddsutil._seq_checker(0, ddsutil._long_checker)
def testLongUBSeq(self):
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(None))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(1))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker('A'))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1,1<<32]))
self._long_ubseq_checker([])
self._long_ubseq_checker([1])
self._long_ubseq_checker([1,2])
self._long_ubseq_checker([1,2,3,4,5])
self._long_ubseq_checker([-(1<<31),(1<<31)-1])
_long_matrix_checker = ddsutil._array_checker(1,ddsutil._array_checker(2,ddsutil._long_checker))
def testLongMatrix(self):
self.assertRaises(TypeError, lambda: self._long_matrix_checker([]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1,2,3]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1,2],[3,4]]))
self._long_matrix_checker([[1,2]])
_long_seqseq_checker = ddsutil._seq_checker(1,ddsutil._seq_checker(2,ddsutil._long_checker))
def testLongSeqSeq(self):
self._long_seqseq_checker([])
self._long_seqseq_checker([[]])
self._long_seqseq_checker([[1]])
self._long_seqseq_checker([[1,2]])
self.assertRaises(TypeError, lambda: self._long_seqseq_checker([[1,2,3]]))
self.assertRaises(TypeError, lambda: self._long_seqseq_checker([[1,2],[3,4]]))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | 10,719 | 1,561 | 46 |
a45c6650efeef92d6157c4a9a8948139dbd687f7 | 1,730 | py | Python | teamspirit/users/models.py | etienne86/oc_p13_team_spirit | fd3d45618d349ecd0a03e63c4a7e9c1044eeffaa | [
"MIT"
] | null | null | null | teamspirit/users/models.py | etienne86/oc_p13_team_spirit | fd3d45618d349ecd0a03e63c4a7e9c1044eeffaa | [
"MIT"
] | null | null | null | teamspirit/users/models.py | etienne86/oc_p13_team_spirit | fd3d45618d349ecd0a03e63c4a7e9c1044eeffaa | [
"MIT"
] | null | null | null | """Contain the models related to the app ``users``."""
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from teamspirit.users.managers import UserManager
| 29.322034 | 73 | 0.669942 | """Contain the models related to the app ``users``."""
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from teamspirit.users.managers import UserManager
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
verbose_name=_("Email"),
unique=True,
error_messages={
'unique': _("A user with that email already exists."),
}
)
username = models.EmailField(default=email)
first_name = models.CharField(
max_length=30,
verbose_name=_("first name")
)
last_name = models.CharField(
max_length=150,
verbose_name=_("last name")
)
personal = models.ForeignKey(
to='profiles.Personal',
on_delete=models.CASCADE,
null=True,
blank=False,
)
is_active = models.BooleanField(default=True)
is_admin = models.BooleanField(default=False)
is_staff = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
objects = UserManager()
def __str__(self):
return f"{self.first_name.capitalize()} {self.last_name.upper()}"
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True
def get_absolute_url(self):
return reverse("users:detail", kwargs={"email": self.email})
| 146 | 1,221 | 23 |
7180c2cb782283af9965665c625700a77fe90027 | 2,094 | py | Python | scripts/ingestors/rwis/process_idot_awos.py | trentford/iem | 7264d24f2d79a3cd69251a09758e6531233a732f | [
"MIT"
] | 1 | 2019-10-07T17:01:24.000Z | 2019-10-07T17:01:24.000Z | scripts/ingestors/rwis/process_idot_awos.py | trentford/iem | 7264d24f2d79a3cd69251a09758e6531233a732f | [
"MIT"
] | null | null | null | scripts/ingestors/rwis/process_idot_awos.py | trentford/iem | 7264d24f2d79a3cd69251a09758e6531233a732f | [
"MIT"
] | null | null | null | """Process AWOS METAR file"""
from __future__ import print_function
import re
import sys
import os
import datetime
import ftplib
import subprocess
import tempfile
from io import StringIO
from pyiem import util
INCOMING = "/mesonet/data/incoming"
def fetch_files():
"""Fetch files """
props = util.get_properties()
fn = "%s/iaawos_metar.txt" % (INCOMING, )
try:
ftp = ftplib.FTP('165.206.203.34')
except TimeoutError as _exp:
print("process_idot_awos FTP server timeout error")
sys.exit()
ftp.login('rwis', props['rwis_ftp_password'])
ftp.retrbinary('RETR METAR.txt', open(fn, 'wb').write)
ftp.close()
return fn
def main():
"""Go Main"""
fn = fetch_files()
utc = datetime.datetime.utcnow().strftime("%Y%m%d%H%M")
data = {}
# Sometimes, the file gets gobbled it seems
for line in open(fn, 'rb'):
line = line.decode('utf-8', 'ignore')
match = re.match("METAR K(?P<id>[A-Z1-9]{3})", line)
if not match:
continue
gd = match.groupdict()
data[gd['id']] = line
sio = StringIO()
sio.write("\001\r\r\n")
sio.write(("SAUS00 KISU %s\r\r\n"
) % (datetime.datetime.utcnow().strftime("%d%H%M"), ))
sio.write("METAR\r\r\n")
for sid in data:
sio.write('%s=\r\r\n' % (data[sid].strip().replace("METAR ", ""), ))
sio.write("\003")
sio.seek(0)
(tmpfd, tmpname) = tempfile.mkstemp()
os.write(tmpfd, sio.getvalue().encode('utf-8'))
os.close(tmpfd)
proc = subprocess.Popen(("/home/ldm/bin/pqinsert -i -p 'data c %s "
"LOCDSMMETAR.dat LOCDSMMETAR.dat txt' %s"
) % (utc, tmpname), shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, stderr) = proc.communicate()
os.remove(tmpname)
if stdout != b"" or stderr is not None:
print("process_idot_awos\nstdout: %s\nstderr: %s" % (stdout, stderr))
if __name__ == '__main__':
main()
| 28.684932 | 77 | 0.579274 | """Process AWOS METAR file"""
from __future__ import print_function
import re
import sys
import os
import datetime
import ftplib
import subprocess
import tempfile
from io import StringIO
from pyiem import util
INCOMING = "/mesonet/data/incoming"
def fetch_files():
"""Fetch files """
props = util.get_properties()
fn = "%s/iaawos_metar.txt" % (INCOMING, )
try:
ftp = ftplib.FTP('165.206.203.34')
except TimeoutError as _exp:
print("process_idot_awos FTP server timeout error")
sys.exit()
ftp.login('rwis', props['rwis_ftp_password'])
ftp.retrbinary('RETR METAR.txt', open(fn, 'wb').write)
ftp.close()
return fn
def main():
"""Go Main"""
fn = fetch_files()
utc = datetime.datetime.utcnow().strftime("%Y%m%d%H%M")
data = {}
# Sometimes, the file gets gobbled it seems
for line in open(fn, 'rb'):
line = line.decode('utf-8', 'ignore')
match = re.match("METAR K(?P<id>[A-Z1-9]{3})", line)
if not match:
continue
gd = match.groupdict()
data[gd['id']] = line
sio = StringIO()
sio.write("\001\r\r\n")
sio.write(("SAUS00 KISU %s\r\r\n"
) % (datetime.datetime.utcnow().strftime("%d%H%M"), ))
sio.write("METAR\r\r\n")
for sid in data:
sio.write('%s=\r\r\n' % (data[sid].strip().replace("METAR ", ""), ))
sio.write("\003")
sio.seek(0)
(tmpfd, tmpname) = tempfile.mkstemp()
os.write(tmpfd, sio.getvalue().encode('utf-8'))
os.close(tmpfd)
proc = subprocess.Popen(("/home/ldm/bin/pqinsert -i -p 'data c %s "
"LOCDSMMETAR.dat LOCDSMMETAR.dat txt' %s"
) % (utc, tmpname), shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(stdout, stderr) = proc.communicate()
os.remove(tmpname)
if stdout != b"" or stderr is not None:
print("process_idot_awos\nstdout: %s\nstderr: %s" % (stdout, stderr))
if __name__ == '__main__':
main()
| 0 | 0 | 0 |
c0d79cf9add0c3046e65ed825aa8c5eb35aeb539 | 1,172 | py | Python | bvc/models/treasury.py | Vayel/GUCEM-BVC | e5645dec332756d3c9db083abf2c8f3625a10d4d | [
"WTFPL"
] | 2 | 2016-09-23T18:02:40.000Z | 2017-04-28T18:35:59.000Z | bvc/models/treasury.py | Vayel/GUCEM-BVC | e5645dec332756d3c9db083abf2c8f3625a10d4d | [
"WTFPL"
] | 82 | 2016-09-26T14:38:31.000Z | 2018-02-12T18:47:12.000Z | bvc/models/treasury.py | Vayel/GUCEM-BVC | e5645dec332756d3c9db083abf2c8f3625a10d4d | [
"WTFPL"
] | null | null | null | from django.db import models
from django.core.exceptions import ObjectDoesNotExist
| 25.478261 | 86 | 0.693686 | from django.db import models
from django.core.exceptions import ObjectDoesNotExist
def get_previous_treasury(id_):
try:
return TreasuryOperation.objects.get(id=id_-1).stock
except ObjectDoesNotExist:
return 0
def get_treasury():
try:
return TreasuryOperation.objects.latest('id').stock
except ObjectDoesNotExist:
return 0
def treasury_op_from_delta(delta, reason):
stock = get_treasury() + delta
if stock < 0:
raise ValueError()
op = TreasuryOperation(stock=stock, reason=reason)
op.save()
return op
class TreasuryOperation(models.Model):
REASON_MAX_LEN = 200
stock = models.DecimalField(max_digits=7, decimal_places=2, verbose_name='stock',)
reason = models.CharField(max_length=REASON_MAX_LEN, verbose_name='raison',)
date = models.DateField(auto_now_add=True,)
class Meta:
verbose_name = 'opération de trésorerie'
verbose_name_plural = 'opérations de trésorerie'
def __str__(self):
return '{} (delta : {})'.format(self.stock, self.delta)
@property
def delta(self):
return self.stock - get_previous_treasury(self.id)
| 543 | 454 | 92 |
be915c81082067a81a8b1706489e596e884a6466 | 101 | py | Python | abc/abc033/abc033c.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc033/abc033c.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc033/abc033c.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | S = input()
result = 0
for s in S.split('+'):
if s.count('0') == 0:
result += 1
print(result)
| 12.625 | 23 | 0.524752 | S = input()
result = 0
for s in S.split('+'):
if s.count('0') == 0:
result += 1
print(result)
| 0 | 0 | 0 |
eac53b2d01f44a07353fd2b12387db5da97ec33d | 246 | py | Python | config.py | sweetcolor/internet_market_scraper | f7eb8c9ade2c0a956ba5d5b7e6173010c85afed6 | [
"MIT"
] | null | null | null | config.py | sweetcolor/internet_market_scraper | f7eb8c9ade2c0a956ba5d5b7e6173010c85afed6 | [
"MIT"
] | null | null | null | config.py | sweetcolor/internet_market_scraper | f7eb8c9ade2c0a956ba5d5b7e6173010c85afed6 | [
"MIT"
] | null | null | null | import os
CACHE_DIRECTORY = os.path.join(os.path.expanduser('~'), os.path.join('temp', 'cache'))
RESULT_DIRECTORY = os.path.join(os.path.expanduser('~'), os.path.join('temp', 'result'))
# CACHE_DIRECTORY = 'cache'
# RESULT_DIRECTORY = 'result'
| 30.75 | 88 | 0.699187 | import os
CACHE_DIRECTORY = os.path.join(os.path.expanduser('~'), os.path.join('temp', 'cache'))
RESULT_DIRECTORY = os.path.join(os.path.expanduser('~'), os.path.join('temp', 'result'))
# CACHE_DIRECTORY = 'cache'
# RESULT_DIRECTORY = 'result'
| 0 | 0 | 0 |
fa279acec731fd90c91aff4d02f5d936763c2cc8 | 6,825 | py | Python | robot_ws/src/robot_bringup/launch/nsra_moveit.launch.py | NS-Robotics/NSRA2 | 86dbd504ed268fa951c61b010924bea6faff5a43 | [
"BSD-3-Clause"
] | null | null | null | robot_ws/src/robot_bringup/launch/nsra_moveit.launch.py | NS-Robotics/NSRA2 | 86dbd504ed268fa951c61b010924bea6faff5a43 | [
"BSD-3-Clause"
] | null | null | null | robot_ws/src/robot_bringup/launch/nsra_moveit.launch.py | NS-Robotics/NSRA2 | 86dbd504ed268fa951c61b010924bea6faff5a43 | [
"BSD-3-Clause"
] | null | null | null | import os
import yaml
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import ExecuteProcess, DeclareLaunchArgument
from launch.substitutions import Command, FindExecutable, LaunchConfiguration, PathJoinSubstitution
from ament_index_python.packages import get_package_share_directory
from launch_ros.substitutions import FindPackageShare
import xacro #sudo apt install ros-foxy-xacro
| 31.597222 | 325 | 0.619487 | import os
import yaml
from launch import LaunchDescription
from launch_ros.actions import Node
from launch.actions import ExecuteProcess, DeclareLaunchArgument
from launch.substitutions import Command, FindExecutable, LaunchConfiguration, PathJoinSubstitution
from ament_index_python.packages import get_package_share_directory
from launch_ros.substitutions import FindPackageShare
import xacro #sudo apt install ros-foxy-xacro
def load_file(file_path):
try:
with open(file_path, "r") as file:
return file.read()
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
print("file error")
return None
def load_yaml(file_path):
try:
with open(file_path, "r") as file:
return yaml.safe_load(file)
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
print("yaml error")
return None
def generate_launch_description():
# Initialize Arguments
runtime_config_package = "robot_descriptions" #LaunchConfiguration("runtime_config_package")
runtime_config_robot = "nsra2" #LaunchConfiguration("runtime_config_robot")
# planning_context
robot_description_config = Command(
[
PathJoinSubstitution([FindExecutable(name="xacro")]),
" ",
PathJoinSubstitution(
[get_package_share_directory(runtime_config_package), runtime_config_robot, "urdf", "nsra2.urdf.xacro"]
),
" ",
"prefix:=",
"",
" ",
"use_sim:=",
"false",
" ",
"use_fake_hardware:=",
"false",
" ",
"fake_sensor_commands:=",
"false",
" ",
"slowdown:=",
"3.0",
]
)
robot_description = {"robot_description": robot_description_config}
robot_description_semantic_config = load_file(
os.path.join(
get_package_share_directory(runtime_config_package),
runtime_config_robot,
"moveit",
"nsra.srdf",
)
)
robot_description_semantic = {
"robot_description_semantic": robot_description_semantic_config
}
kinematics_yaml = load_yaml(
os.path.join(
get_package_share_directory(runtime_config_package),
runtime_config_robot,
"moveit",
"kinematics.yaml",
)
)
robot_description_kinematics = {"robot_description_kinematics": kinematics_yaml}
# Planning Functionality
ompl_planning_pipeline_config = {
"planning_pipelines": ["ompl"],
"ompl": {
"planning_plugin": "ompl_interface/OMPLPlanner",
"request_adapters": """default_planner_request_adapters/AddTimeOptimalParameterization default_planner_request_adapters/FixWorkspaceBounds default_planner_request_adapters/FixStartStateBounds default_planner_request_adapters/FixStartStateCollision default_planner_request_adapters/FixStartStatePathConstraints""",
"start_state_max_bounds_error": 0.1,
},
}
ompl_planning_yaml = load_yaml(
os.path.join(
get_package_share_directory(runtime_config_package),
runtime_config_robot,
"moveit",
"ompl_planning.yaml",
)
)
ompl_planning_pipeline_config["ompl"].update(ompl_planning_yaml)
# Trajectory Execution Functionality
moveit_simple_controllers_yaml = load_yaml(
os.path.join(
get_package_share_directory(runtime_config_package),
runtime_config_robot,
"config",
"moveit_controllers.yaml",
)
)
moveit_controllers = {
"moveit_simple_controller_manager": moveit_simple_controllers_yaml,
"moveit_controller_manager": "moveit_simple_controller_manager/MoveItSimpleControllerManager",
}
trajectory_execution = {
"moveit_manage_controllers": True,
"trajectory_execution.allowed_execution_duration_scaling": 1.2,
"trajectory_execution.allowed_goal_duration_margin": 0.5,
"trajectory_execution.allowed_start_tolerance": 0.01,
}
planning_scene_monitor_parameters = {
"publish_planning_scene": True,
"publish_geometry_updates": True,
"publish_state_updates": True,
"publish_transforms_updates": True,
}
joint_limits_yaml = {
"robot_description_planning": load_yaml(
os.path.join(
get_package_share_directory(runtime_config_package),
runtime_config_robot,
"moveit",
"joint_limits.yaml",
)
)
}
# Start the actual move_group node/action server
run_move_group_node = Node(
package="moveit_ros_move_group",
executable="move_group",
output="screen",
parameters=[
robot_description,
robot_description_semantic,
kinematics_yaml,
ompl_planning_pipeline_config,
trajectory_execution,
moveit_controllers,
planning_scene_monitor_parameters,
joint_limits_yaml,
],
)
# RViz
# rviz_config_file = PathJoinSubstitution(
# [
# FindPackageShare(runtime_config_package),
# runtime_config_robot,
# "config",
# "robot.rviz"
# ]
# )
# rviz_node = Node(
# package="rviz2",
# executable="rviz2",
# name="rviz2",
# output="log",
# arguments=["-d", rviz_config_file],
# parameters=[
# robot_description,
# robot_description_semantic,
# ompl_planning_pipeline_config,
# kinematics_yaml,
# joint_limits_yaml,
# ],
# )
# Static TF
static_tf = Node(
package="tf2_ros",
executable="static_transform_publisher",
name="static_transform_publisher",
output="log",
arguments=["0.0", "0.0", "0.0", "0.0", "0.0", "0.0", "world", "base_link"],
)
# Warehouse mongodb server
mongodb_server_node = Node(
package="warehouse_ros_mongo",
executable="mongo_wrapper_ros.py",
parameters=[
{"warehouse_port": 33829},
{"warehouse_host": "192.168.1.110"},
{"warehouse_plugin": "warehouse_ros_mongo::MongoDatabaseConnection"},
],
output="screen",
)
return LaunchDescription(
[
#rviz_node,
static_tf,
#robot_state_publisher,
run_move_group_node,
#ros2_control_node,
mongodb_server_node,
]
#+ load_controllers
) | 6,326 | 0 | 69 |
ed10715519696688a209a2c646cfeefc39085f2d | 4,443 | py | Python | SimpleIncrementor.py | Gru80/SimpleIncrementor | e470800d55d27485394c612e654b7624310b84d9 | [
"MIT"
] | null | null | null | SimpleIncrementor.py | Gru80/SimpleIncrementor | e470800d55d27485394c612e654b7624310b84d9 | [
"MIT"
] | null | null | null | SimpleIncrementor.py | Gru80/SimpleIncrementor | e470800d55d27485394c612e654b7624310b84d9 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
This is a plugin for the Sublime Text Editor
https://www.sublimetext.com/
Replace all occurences of the currently selected text in the document with an incrementing number.
Some options are provided:
* Start with an offset
* Use fixed number of digits (fill up with leading 0s)
* Define a preceding text in front of the iterator
'''
import sublime, sublime_plugin
import re
SETTINGS_FILE = "SimpleIncrementor.sublime-settings"
EXPHELP = '''Use key:value pairs separated by a blank character to pass options.
Valid Keys:
digits, offset, prectext, step
Example:
digits:5 offset:10
To re-show this dialogue, enable show_help in the Plugin Settings.
'''
class SimpleIncrementExpertParseCommand(sublime_plugin.TextCommand):
''' Take the arguments from expert mode and create a dictionary from it to
call the main function
'''
class SimpleIncrementExpertCommand(sublime_plugin.TextCommand):
''' Get the user input for expert-mode execution '''
class SimpleIncrementCommand(sublime_plugin.TextCommand):
''' The main component for doing the replacement '''
class SimpleIncrementDigitsCommand(sublime_plugin.TextCommand):
''' Fill up the left part with leading zeros to match the given number of digits '''
prectext = ''
class SimpleIncrementPrectextCommand(sublime_plugin.TextCommand):
''' Get the preceding text from the user '''
class SimpleIncrementPrectextDigitsCommand(sublime_plugin.TextCommand):
''' Combination of preceding text and fill-up with leading zeros '''
class SimpleIncrementOffsetCommand(sublime_plugin.TextCommand):
''' Start incrementation with an offset '''
| 29.818792 | 98 | 0.610623 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
'''
This is a plugin for the Sublime Text Editor
https://www.sublimetext.com/
Replace all occurences of the currently selected text in the document with an incrementing number.
Some options are provided:
* Start with an offset
* Use fixed number of digits (fill up with leading 0s)
* Define a preceding text in front of the iterator
'''
import sublime, sublime_plugin
import re
SETTINGS_FILE = "SimpleIncrementor.sublime-settings"
EXPHELP = '''Use key:value pairs separated by a blank character to pass options.
Valid Keys:
digits, offset, prectext, step
Example:
digits:5 offset:10
To re-show this dialogue, enable show_help in the Plugin Settings.
'''
def settings():
return sublime.load_settings(SETTINGS_FILE)
class SimpleIncrementExpertParseCommand(sublime_plugin.TextCommand):
''' Take the arguments from expert mode and create a dictionary from it to
call the main function
'''
def run(self, edit, cmd):
cmds = dict(re.findall(r'(\S+):(\S+)', cmd))
sublime.active_window().run_command('simple_increment', cmds)
#print(cmds)
class SimpleIncrementExpertCommand(sublime_plugin.TextCommand):
''' Get the user input for expert-mode execution '''
def run(self, edit):
shelp = False
if settings().has("show_help"):
shelp = settings().get("show_help")
if shelp:
sublime.message_dialog(EXPHELP)
settings().set("show_help", False)
sublime.save_settings(SETTINGS_FILE)
self.view.window().show_input_panel(
'Simple Incrementor - Expert Mode:',
'',
lambda x: sublime.active_window().run_command('simple_increment_expert_parse', {
'cmd': x
}),
None,
None)
class SimpleIncrementCommand(sublime_plugin.TextCommand):
''' The main component for doing the replacement '''
def run(self, edit, **kwargs):
offset = int(kwargs.get('offset', 0))
digits = int(kwargs.get('digits', 0))
step = int(kwargs.get('step', 1))
prectext = kwargs.get('prectext', '')
# Select all occurances of the selected text
sublime.active_window().run_command('find_all_under')
i = offset
cntr = 0
for occurance in self.view.sel():
self.view.replace(edit, occurance, prectext + str(i).zfill(digits))
i += step
cntr += 1
self.view.window().status_message('Replaced {} occurances'.format(cntr))
class SimpleIncrementDigitsCommand(sublime_plugin.TextCommand):
''' Fill up the left part with leading zeros to match the given number of digits '''
prectext = ''
def run(self, edit, prectext = ''):
self.prectext = prectext
self.view.window().show_input_panel(
'Simple Incrementor: How many total digits?',
'',
lambda x: sublime.active_window().run_command('simple_increment', {
'digits': x,
'prectext': self.prectext
}),
None,
None)
class SimpleIncrementPrectextCommand(sublime_plugin.TextCommand):
''' Get the preceding text from the user '''
def run(self, edit):
self.view.window().show_input_panel(
'Simple Incrementor: Preceding Text?',
'',
lambda x: sublime.active_window().run_command('simple_increment', {
'prectext': x
}),
None,
None)
class SimpleIncrementPrectextDigitsCommand(sublime_plugin.TextCommand):
''' Combination of preceding text and fill-up with leading zeros '''
def run(self, edit):
self.view.window().show_input_panel(
'Simple Incrementor: Preceding Text?',
'',
lambda x: sublime.active_window().run_command('simple_increment_digits', {
'prectext': x
}),
None,
None)
class SimpleIncrementOffsetCommand(sublime_plugin.TextCommand):
''' Start incrementation with an offset '''
def run(self, edit):
self.view.window().show_input_panel(
'Simple Incrementor: Offset?',
'',
lambda x: sublime.active_window().run_command('simple_increment', {
'offset': x
}),
None,
None)
| 2,543 | 0 | 211 |
73660730c4b825462ce529960c64f5330d37ce50 | 1,421 | py | Python | s3-lambda-textract-cdktf/helpers/layers.py | iobreaker/serverless-patterns | 58991f19f6566637c336471a37ba7def49e8a9ad | [
"MIT-0"
] | null | null | null | s3-lambda-textract-cdktf/helpers/layers.py | iobreaker/serverless-patterns | 58991f19f6566637c336471a37ba7def49e8a9ad | [
"MIT-0"
] | null | null | null | s3-lambda-textract-cdktf/helpers/layers.py | iobreaker/serverless-patterns | 58991f19f6566637c336471a37ba7def49e8a9ad | [
"MIT-0"
] | null | null | null | """
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import urllib3
import shutil
import zipfile
from .hash import getFileSha256Hash
import os
PILLOW_MODULE_DOWNLOAD_FILE_NAME = "pillow_layer_module.zip"
def downloadPillowLayerFile(directory: str, url: str, file_hash: str):
"""
Download the Pillow layer
:param directory: Target directory to store the downloaded file
:param url: The url for the download
:param file_hash: The verification sha256 hash
:return: None
"""
if not os.path.exists(directory):
os.mkdir(directory)
http = urllib3.PoolManager()
try:
with http.request('GET', url, preload_content=False) as r, open(
f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}", 'wb') as f:
shutil.copyfileobj(r, f)
except Exception as err:
raise IOError(err)
if getFileSha256Hash(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}") != file_hash:
os.unlink(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}")
raise ImportError(f"Bad Pillow module file sha256 signature : {url}")
try:
with zipfile.ZipFile(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}", 'r') as f:
f.extractall(f"{directory}/python")
except Exception as err:
raise IOError(err)
os.unlink(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}")
| 30.234043 | 90 | 0.693878 | """
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import urllib3
import shutil
import zipfile
from .hash import getFileSha256Hash
import os
PILLOW_MODULE_DOWNLOAD_FILE_NAME = "pillow_layer_module.zip"
def downloadPillowLayerFile(directory: str, url: str, file_hash: str):
"""
Download the Pillow layer
:param directory: Target directory to store the downloaded file
:param url: The url for the download
:param file_hash: The verification sha256 hash
:return: None
"""
if not os.path.exists(directory):
os.mkdir(directory)
http = urllib3.PoolManager()
try:
with http.request('GET', url, preload_content=False) as r, open(
f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}", 'wb') as f:
shutil.copyfileobj(r, f)
except Exception as err:
raise IOError(err)
if getFileSha256Hash(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}") != file_hash:
os.unlink(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}")
raise ImportError(f"Bad Pillow module file sha256 signature : {url}")
try:
with zipfile.ZipFile(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}", 'r') as f:
f.extractall(f"{directory}/python")
except Exception as err:
raise IOError(err)
os.unlink(f"{directory}/{PILLOW_MODULE_DOWNLOAD_FILE_NAME}")
| 0 | 0 | 0 |
7a18b8fb25edcb421a3fd9ecf0b06851c9c9dd7f | 966 | py | Python | tests/unit/apis/test_settings.py | linji0801/amazon-s3-find-and-forget | 9005fded8176faaee1e12cc42dc57f7d7b861a0d | [
"Apache-2.0"
] | null | null | null | tests/unit/apis/test_settings.py | linji0801/amazon-s3-find-and-forget | 9005fded8176faaee1e12cc42dc57f7d7b861a0d | [
"Apache-2.0"
] | null | null | null | tests/unit/apis/test_settings.py | linji0801/amazon-s3-find-and-forget | 9005fded8176faaee1e12cc42dc57f7d7b861a0d | [
"Apache-2.0"
] | null | null | null | import json
from types import SimpleNamespace
import pytest
from mock import patch
from backend.lambdas.settings import handlers
pytestmark = [pytest.mark.unit, pytest.mark.api, pytest.mark.settings]
@patch("backend.lambdas.settings.handlers.get_config")
| 28.411765 | 70 | 0.668737 | import json
from types import SimpleNamespace
import pytest
from mock import patch
from backend.lambdas.settings import handlers
pytestmark = [pytest.mark.unit, pytest.mark.api, pytest.mark.settings]
@patch("backend.lambdas.settings.handlers.get_config")
def test_it_process_queue(mock_config):
mock_config.return_value = {
"AthenaConcurrencyLimit": 15,
"DeletionTasksMaxNumber": 50,
"QueryExecutionWaitSeconds": 5,
"QueryQueueWaitSeconds": 5,
"ForgetQueueWaitSeconds": 30,
}
response = handlers.list_settings_handler({}, SimpleNamespace())
assert 200 == response["statusCode"]
assert "headers" in response
assert {
"Settings": {
"AthenaConcurrencyLimit": 15,
"DeletionTasksMaxNumber": 50,
"QueryExecutionWaitSeconds": 5,
"QueryQueueWaitSeconds": 5,
"ForgetQueueWaitSeconds": 30,
}
} == json.loads(response["body"])
| 684 | 0 | 22 |
668d543614e31bc7e1ee6ac14f929debe3906495 | 916 | py | Python | localization/apps.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | 1 | 2017-04-27T19:35:42.000Z | 2017-04-27T19:35:42.000Z | localization/apps.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | localization/apps.py | establishment/django-establishment | ad1d04fe9efc748e2fba5b4bc67446d2a4cf12f6 | [
"CC0-1.0"
] | null | null | null | from django.apps import AppConfig
from django.conf import settings
| 29.548387 | 89 | 0.724891 | from django.apps import AppConfig
from django.conf import settings
def ensure_default_language():
from .models import Language
try:
Language.objects.get(id=1)
except Exception:
try:
Language.objects.create(name="English", local_name="English", iso_code="eng")
except Exception:
print("Failed to create default language")
def collect_public_state(state, global_constants, context_dict):
from .models import Language, Country, TranslationKey, TranslationEntry
state.add_all(Language.objects.all())
state.add_all(TranslationEntry.objects.all())
state.add_all(TranslationKey.objects.all())
state.add_all(Country.objects.all())
class LocalizationAppConfig(AppConfig):
name = "establishment.localization"
def ready(self):
ensure_default_language()
settings.PUBLIC_STATE_COLLECTORS.append(collect_public_state)
| 692 | 85 | 69 |
8dc2e570aad8b8b5478e75861962a8499c8352ea | 4,036 | py | Python | venv/Lib/site-packages/keystoneauth1/identity/v3/application_credential.py | prasoon-uta/IBM-coud-storage | 82a6876316715efbd0b492d0d467dde0ab26a56b | [
"Apache-2.0"
] | 48 | 2015-05-02T16:19:10.000Z | 2021-12-17T19:01:17.000Z | venv/Lib/site-packages/keystoneauth1/identity/v3/application_credential.py | prasoon-uta/IBM-coud-storage | 82a6876316715efbd0b492d0d467dde0ab26a56b | [
"Apache-2.0"
] | 1 | 2019-12-04T13:48:10.000Z | 2019-12-04T13:48:10.000Z | venv/Lib/site-packages/keystoneauth1/identity/v3/application_credential.py | prasoon-uta/IBM-coud-storage | 82a6876316715efbd0b492d0d467dde0ab26a56b | [
"Apache-2.0"
] | 46 | 2015-05-23T14:04:35.000Z | 2022-02-17T12:33:50.000Z | # Copyright 2018 SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1.identity.v3 import base
__all__ = ('ApplicationCredentialMethod', 'ApplicationCredential')
class ApplicationCredentialMethod(base.AuthMethod):
"""Construct a User/Passcode based authentication method.
:param string application_credential_secret: Application credential secret.
:param string application_credential_id: Application credential id.
:param string application_credential_name: The name of the application
credential, if an ID is not
provided.
:param string username: Username for authentication, if an application
credential ID is not provided.
:param string user_id: User ID for authentication, if an application
credential ID is not provided.
:param string user_domain_id: User's domain ID for authentication, if an
application credential ID is not provided.
:param string user_domain_name: User's domain name for authentication, if
an application credential ID is not
provided.
"""
_method_parameters = ['application_credential_secret',
'application_credential_id',
'application_credential_name',
'user_id',
'username',
'user_domain_id',
'user_domain_name']
class ApplicationCredential(base.AuthConstructor):
"""A plugin for authenticating with an application credential.
:param string auth_url: Identity service endpoint for authentication.
:param string application_credential_secret: Application credential secret.
:param string application_credential_id: Application credential ID.
:param string application_credential_name: Application credential name.
:param string username: Username for authentication.
:param string user_id: User ID for authentication.
:param string user_domain_id: User's domain ID for authentication.
:param string user_domain_name: User's domain name for authentication.
:param bool reauthenticate: Allow fetching a new token if the current one
is going to expire. (optional) default True
"""
_auth_method_class = ApplicationCredentialMethod
| 44.844444 | 79 | 0.644698 | # Copyright 2018 SUSE Linux GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1.identity.v3 import base
__all__ = ('ApplicationCredentialMethod', 'ApplicationCredential')
class ApplicationCredentialMethod(base.AuthMethod):
"""Construct a User/Passcode based authentication method.
:param string application_credential_secret: Application credential secret.
:param string application_credential_id: Application credential id.
:param string application_credential_name: The name of the application
credential, if an ID is not
provided.
:param string username: Username for authentication, if an application
credential ID is not provided.
:param string user_id: User ID for authentication, if an application
credential ID is not provided.
:param string user_domain_id: User's domain ID for authentication, if an
application credential ID is not provided.
:param string user_domain_name: User's domain name for authentication, if
an application credential ID is not
provided.
"""
_method_parameters = ['application_credential_secret',
'application_credential_id',
'application_credential_name',
'user_id',
'username',
'user_domain_id',
'user_domain_name']
def get_auth_data(self, session, auth, headers, **kwargs):
auth_data = {'secret': self.application_credential_secret}
if self.application_credential_id:
auth_data['id'] = self.application_credential_id
else:
auth_data['name'] = self.application_credential_name
auth_data['user'] = {}
if self.user_id:
auth_data['user']['id'] = self.user_id
elif self.username:
auth_data['user']['name'] = self.username
if self.user_domain_id:
auth_data['user']['domain'] = {'id': self.user_domain_id}
elif self.user_domain_name:
auth_data['user']['domain'] = {
'name': self.user_domain_name}
return 'application_credential', auth_data
def get_cache_id_elements(self):
return dict(('application_credential_%s' % p, getattr(self, p))
for p in self._method_parameters)
class ApplicationCredential(base.AuthConstructor):
"""A plugin for authenticating with an application credential.
:param string auth_url: Identity service endpoint for authentication.
:param string application_credential_secret: Application credential secret.
:param string application_credential_id: Application credential ID.
:param string application_credential_name: Application credential name.
:param string username: Username for authentication.
:param string user_id: User ID for authentication.
:param string user_domain_id: User's domain ID for authentication.
:param string user_domain_name: User's domain name for authentication.
:param bool reauthenticate: Allow fetching a new token if the current one
is going to expire. (optional) default True
"""
_auth_method_class = ApplicationCredentialMethod
| 956 | 0 | 54 |
14b28a98370a160b7a6bac3ee8dfd1c783eb75ed | 8,559 | py | Python | examples/textbook/explode_evolved_star.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | 1 | 2019-12-28T22:47:51.000Z | 2019-12-28T22:47:51.000Z | examples/textbook/explode_evolved_star.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | null | null | null | examples/textbook/explode_evolved_star.py | joshuawall/amuse | c2034074ee76c08057c4faa96c32044ab40952e9 | [
"Apache-2.0"
] | 2 | 2021-11-19T04:41:37.000Z | 2021-11-20T02:11:17.000Z | import numpy
import os.path
from amuse.test.amusetest import get_path_to_results
try:
from matplotlib import pyplot
HAS_MATPLOTLIB = True
from amuse.plot import plot, semilogy, xlabel, ylabel, loglog
except ImportError:
HAS_MATPLOTLIB = False
from amuse.units import units
from amuse.units import generic_unit_system
from amuse.units import nbody_system
from amuse.units import constants
from amuse.units.generic_unit_converter import ConvertBetweenGenericAndSiUnits
from amuse.support.exceptions import AmuseException
from amuse.community.mesa.interface import MESA
from amuse.community.gadget2.interface import Gadget2
from amuse.community.fi.interface import Fi
from amuse.ext.star_to_sph import *
from amuse.datamodel import Particles
from amuse.datamodel import Grid
from prepare_figure import single_frame, figure_frame, set_tickmarks
from distinct_colours import get_distinct
def hydro_plot(view, hydro_code, image_size, time, figname):
"""
Produce a series of images suitable for conversion into a movie.
view: the (physical) region to plot [xmin, xmax, ymin, ymax]
hydro_code: hydrodynamics code in which the gas to be plotted is defined
image_size: size of the output image in pixels (x, y)
time: current hydro code time
"""
if not HAS_MATPLOTLIB:
return
shape = (image_size[0], image_size[1], 1)
size = image_size[0] * image_size[1]
axis_lengths = [0.0, 0.0, 0.0] | units.m
axis_lengths[0] = view[1] - view[0]
axis_lengths[1] = view[3] - view[2]
grid = Grid.create(shape, axis_lengths)
grid.x += view[0]
grid.y += view[2]
speed = grid.z.reshape(size) * (0 | 1/units.s)
rho, rhovx, rhovy, rhovz, rhoe \
= hydro_code.get_hydro_state_at_point(
grid.x.reshape(size), grid.y.reshape(size), grid.z.reshape(size),
speed, speed, speed)
min_v = 800.0 | units.km / units.s
max_v = 3000.0 | units.km / units.s
min_rho = 3.0e-9 | units.g / units.cm**3
max_rho = 1.0e-5 | units.g / units.cm**3
min_E = 1.0e11 | units.J / units.kg
max_E = 1.0e13 | units.J / units.kg
v_sqr = (rhovx**2 + rhovy**2 + rhovz**2) / rho**2
E = rhoe / rho
log_v = numpy.log((v_sqr/min_v**2)) / numpy.log((max_v**2/min_v**2))
log_rho = numpy.log((rho/min_rho)) / numpy.log((max_rho/min_rho))
log_E = numpy.log((E/min_E)) / numpy.log((max_E/min_E))
red = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_rho)).reshape(shape)
green = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_v)).reshape(shape)
blue = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_E)).reshape(shape)
alpha = numpy.minimum(numpy.ones_like(log_v),
numpy.maximum(numpy.zeros_like(log_v),
numpy.log((rho
/ (10*min_rho))))).reshape(shape)
rgba = numpy.concatenate((red, green, blue, alpha), axis = 2)
pyplot.figure(figsize = (image_size[0]/100.0,
image_size[1]/100.0), dpi = 100)
im = pyplot.figimage(rgba, origin='lower')
pyplot.savefig(figname, transparent=True, dpi = 100,
facecolor='k', edgecolor='k')
print "Saved hydroplot at time", time, "in file"
print ' ', figname
pyplot.close()
if __name__ == "__main__":
print "Test run to mimic a supernova in SPH"
print "Details:"
print " A high-mass star is evolved to the supergiant phase using MESA."
print " Then it is converted to SPH particles using", \
"convert_stellar_model_to_SPH"
print " (with a non-SPH 'core' particle).", \
"Finally the internal energies of"
print " the innermost particles are increased so that the star gains the"
print " 10^51 erg released in a typical supernova explosion."
run_supernova()
| 39.995327 | 80 | 0.628578 | import numpy
import os.path
from amuse.test.amusetest import get_path_to_results
try:
from matplotlib import pyplot
HAS_MATPLOTLIB = True
from amuse.plot import plot, semilogy, xlabel, ylabel, loglog
except ImportError:
HAS_MATPLOTLIB = False
from amuse.units import units
from amuse.units import generic_unit_system
from amuse.units import nbody_system
from amuse.units import constants
from amuse.units.generic_unit_converter import ConvertBetweenGenericAndSiUnits
from amuse.support.exceptions import AmuseException
from amuse.community.mesa.interface import MESA
from amuse.community.gadget2.interface import Gadget2
from amuse.community.fi.interface import Fi
from amuse.ext.star_to_sph import *
from amuse.datamodel import Particles
from amuse.datamodel import Grid
def setup_stellar_evolution_model():
out_pickle_file = os.path.join(get_path_to_results(),
"super_giant_stellar_structure.pkl")
if os.path.exists(out_pickle_file):
return out_pickle_file
print "Creating initial conditions from a MESA stellar evolution model..."
stellar_evolution = MESA(redirection = "none")
stars = Particles(1)
stars.mass = 10.0 | units.MSun
stellar_evolution.particles.add_particles(stars)
while stellar_evolution.particles[0].stellar_type <= 12|units.stellar_type:
stellar_evolution.evolve_model()
pickle_stellar_model(stellar_evolution.particles[0], out_pickle_file)
stellar_evolution.stop()
return out_pickle_file
def inject_supernova_energy(gas_particles):
Rinner = 10|units.RSun
inner = gas_particles.select(
lambda pos : pos.length_squared() < Rinner**2, ["position"])
print "Adding", (1.0e51 | units.erg) / inner.total_mass(),
print "to each of", len(inner), "innermost particles"
print " of the exploding star"
inner.u += (1.0e51 | units.erg) / inner.total_mass()
from prepare_figure import single_frame, figure_frame, set_tickmarks
from distinct_colours import get_distinct
def hydro_plot(view, hydro_code, image_size, time, figname):
"""
Produce a series of images suitable for conversion into a movie.
view: the (physical) region to plot [xmin, xmax, ymin, ymax]
hydro_code: hydrodynamics code in which the gas to be plotted is defined
image_size: size of the output image in pixels (x, y)
time: current hydro code time
"""
if not HAS_MATPLOTLIB:
return
shape = (image_size[0], image_size[1], 1)
size = image_size[0] * image_size[1]
axis_lengths = [0.0, 0.0, 0.0] | units.m
axis_lengths[0] = view[1] - view[0]
axis_lengths[1] = view[3] - view[2]
grid = Grid.create(shape, axis_lengths)
grid.x += view[0]
grid.y += view[2]
speed = grid.z.reshape(size) * (0 | 1/units.s)
rho, rhovx, rhovy, rhovz, rhoe \
= hydro_code.get_hydro_state_at_point(
grid.x.reshape(size), grid.y.reshape(size), grid.z.reshape(size),
speed, speed, speed)
min_v = 800.0 | units.km / units.s
max_v = 3000.0 | units.km / units.s
min_rho = 3.0e-9 | units.g / units.cm**3
max_rho = 1.0e-5 | units.g / units.cm**3
min_E = 1.0e11 | units.J / units.kg
max_E = 1.0e13 | units.J / units.kg
v_sqr = (rhovx**2 + rhovy**2 + rhovz**2) / rho**2
E = rhoe / rho
log_v = numpy.log((v_sqr/min_v**2)) / numpy.log((max_v**2/min_v**2))
log_rho = numpy.log((rho/min_rho)) / numpy.log((max_rho/min_rho))
log_E = numpy.log((E/min_E)) / numpy.log((max_E/min_E))
red = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_rho)).reshape(shape)
green = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_v)).reshape(shape)
blue = numpy.minimum(numpy.ones_like(rho.number),
numpy.maximum(numpy.zeros_like(rho.number),
log_E)).reshape(shape)
alpha = numpy.minimum(numpy.ones_like(log_v),
numpy.maximum(numpy.zeros_like(log_v),
numpy.log((rho
/ (10*min_rho))))).reshape(shape)
rgba = numpy.concatenate((red, green, blue, alpha), axis = 2)
pyplot.figure(figsize = (image_size[0]/100.0,
image_size[1]/100.0), dpi = 100)
im = pyplot.figimage(rgba, origin='lower')
pyplot.savefig(figname, transparent=True, dpi = 100,
facecolor='k', edgecolor='k')
print "Saved hydroplot at time", time, "in file"
print ' ', figname
pyplot.close()
def energy_plot(time, E_kin, E_pot, E_therm, figname):
if not HAS_MATPLOTLIB:
return
x_label = 'Time [hour]'
y_label = 'Energy [foe]'
single_frame(x_label, y_label, logx=False, logy=False,
xsize=14, ysize=10, ymin=-1, ymax=-1)
cols = get_distinct(4)
FOE = 1.e+51 | units.erg
hour = 1|units.hour
pyplot.plot(time/hour, E_kin/FOE, label='E_kin', c=cols[0])
pyplot.plot(time/hour, E_pot/FOE, label='E_pot', c=cols[1])
pyplot.plot(time/hour, E_therm/FOE, label='E_therm', c=cols[2])
pyplot.plot(time/hour, (E_kin+E_pot+E_therm)/FOE, label='E_total',
c=cols[3])
pyplot.legend(loc='best')
pyplot.savefig(figname)
print '\nSaved energy evolution figure in file', figname, '\n'
pyplot.show()
pyplot.close()
def run_supernova():
use_hydro_code = Gadget2
hydro_code_options = dict(number_of_workers=3)
number_of_sph_particles = 3000
t_end = 1.0e4 | units.s
pickle_file = setup_stellar_evolution_model()
model = convert_stellar_model_to_SPH(None,
number_of_sph_particles,
seed = 12345,
pickle_file = pickle_file,
with_core_particle = True,
target_core_mass = 1.4|units.MSun)
print "model=", model.core_particle
core, gas_without_core, core_radius \
= model.core_particle, model.gas_particles, model.core_radius
inject_supernova_energy(gas_without_core)
print "\nEvolving (SPH) to:", t_end
n_steps = 100
unit_converter = ConvertBetweenGenericAndSiUnits(1.0 | units.RSun,
constants.G, t_end)
hydro_code = use_hydro_code(unit_converter, **hydro_code_options)
try:
hydro_code.parameters.timestep = t_end / n_steps
except Exception as exc:
if not "parameter is read-only" in str(exc): raise
hydro_code.parameters.epsilon_squared = core_radius**2
hydro_code.parameters.n_smooth_tol = 0.01
hydro_code.gas_particles.add_particles(gas_without_core)
hydro_code.dm_particles.add_particle(core)
times = [] | units.s
potential_energies = [] | units.J
kinetic_energies = [] | units.J
thermal_energies = [] | units.J
for time, i_step in [(i*t_end/n_steps, i) for i in range(0, n_steps+1)]:
hydro_code.evolve_model(time)
times.append(time)
potential_energies.append(hydro_code.potential_energy)
kinetic_energies.append( hydro_code.kinetic_energy)
thermal_energies.append( hydro_code.thermal_energy)
hydro_plot([-1.0, 1.0, -1.0, 1.0] * (350 | units.RSun),
hydro_code,
(100, 100),
time,
os.path.join(get_path_to_results(),
"supernova_hydro_image{0:=03}.png".format(i_step))
)
energy_plot(times, kinetic_energies, potential_energies, thermal_energies,
"supernova_energy_evolution.pdf")
hydro_code.stop()
if __name__ == "__main__":
print "Test run to mimic a supernova in SPH"
print "Details:"
print " A high-mass star is evolved to the supergiant phase using MESA."
print " Then it is converted to SPH particles using", \
"convert_stellar_model_to_SPH"
print " (with a non-SPH 'core' particle).", \
"Finally the internal energies of"
print " the innermost particles are increased so that the star gains the"
print " 10^51 erg released in a typical supernova explosion."
run_supernova()
| 4,240 | 0 | 92 |
135557c6b0473025cabcad2d123a29466285591d | 1,317 | py | Python | client/verta/verta/_cli/__init__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/_cli/__init__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | client/verta/verta/_cli/__init__.py | Vafilor/modeldb | 57e617fa64368e7c5c8ae186955e3e28b39fd5a9 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import click
from .._internal_utils import _config_utils
from . import remote
from . import branch
from . import commit
from . import blob
@click.group()
def cli():
"""ModelDB versioning CLI for snapshotting and tracking model ingredients."""
pass
@click.command()
def init():
"""
Create a Verta config file in the current directory.
Running verta init in an existing repository is safe. It will not overwrite things that are
already there.
"""
for config_filename in _config_utils.CONFIG_FILENAMES:
if os.path.isfile(config_filename):
config_filepath = os.path.abspath(config_filename)
click.echo("found existing config file {}".format(config_filepath))
return
config_filepath = _config_utils.create_empty_config_file('.')
click.echo("initialized empty config file {}".format(config_filepath))
cli.add_command(init)
cli.add_command(remote.remote)
cli.add_command(branch.branch)
cli.add_command(branch.checkout)
cli.add_command(branch.log)
cli.add_command(commit.add)
cli.add_command(commit.rm)
cli.add_command(commit.commit)
cli.add_command(commit.tag)
cli.add_command(commit.status)
cli.add_command(commit.diff)
cli.add_command(blob.pull)
cli.add_command(blob.import_, name="import")
| 24.849057 | 95 | 0.736522 | # -*- coding: utf-8 -*-
import os
import click
from .._internal_utils import _config_utils
from . import remote
from . import branch
from . import commit
from . import blob
@click.group()
def cli():
"""ModelDB versioning CLI for snapshotting and tracking model ingredients."""
pass
@click.command()
def init():
"""
Create a Verta config file in the current directory.
Running verta init in an existing repository is safe. It will not overwrite things that are
already there.
"""
for config_filename in _config_utils.CONFIG_FILENAMES:
if os.path.isfile(config_filename):
config_filepath = os.path.abspath(config_filename)
click.echo("found existing config file {}".format(config_filepath))
return
config_filepath = _config_utils.create_empty_config_file('.')
click.echo("initialized empty config file {}".format(config_filepath))
cli.add_command(init)
cli.add_command(remote.remote)
cli.add_command(branch.branch)
cli.add_command(branch.checkout)
cli.add_command(branch.log)
cli.add_command(commit.add)
cli.add_command(commit.rm)
cli.add_command(commit.commit)
cli.add_command(commit.tag)
cli.add_command(commit.status)
cli.add_command(commit.diff)
cli.add_command(blob.pull)
cli.add_command(blob.import_, name="import")
| 0 | 0 | 0 |
a4f449361d04f73e246062e6808f58b5fa4c8ccd | 1,083 | py | Python | oelint_adv/rule_base/rule_vars_bbclassextends.py | skycaptain/oelint-adv | ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46 | [
"BSD-2-Clause"
] | null | null | null | oelint_adv/rule_base/rule_vars_bbclassextends.py | skycaptain/oelint-adv | ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46 | [
"BSD-2-Clause"
] | null | null | null | oelint_adv/rule_base/rule_vars_bbclassextends.py | skycaptain/oelint-adv | ff67d3149cf8b1de2b0b2d158a68f4e2cf5e9e46 | [
"BSD-2-Clause"
] | null | null | null | from oelint_adv.cls_rule import Rule
from oelint_parser.cls_item import Variable
| 41.653846 | 114 | 0.588181 | from oelint_adv.cls_rule import Rule
from oelint_parser.cls_item import Variable
class VarBbclassextend(Rule):
def __init__(self):
super().__init__(id='oelint.var.bbclassextend',
severity='info',
message='BBCLASSEXTEND should be set if possible')
def check(self, _file, stash):
res = []
items = stash.GetItemsFor(filename=_file, classifier=Variable.CLASSIFIER,
attribute=Variable.ATTR_VAR, attributeValue='BBCLASSEXTEND')
items_inherit = stash.GetItemsFor(
filename=_file, classifier=Variable.CLASSIFIER, attribute=Variable.ATTR_VAR, attributeValue='inherit')
if not any(items):
_safe = False
for _class in ['native', 'nativesdk', 'cross']:
if any([x for x in items_inherit if x.VarValue.find(_class) != -1]):
_safe = True
break
if not _file.endswith('.bbappend') and not _safe:
res += self.finding(_file, 0)
return res
| 917 | 8 | 76 |
61dd8412dbdb1f81e6f9fd7fcaad21f34bed6416 | 5,382 | py | Python | tests/test_scientific/test_brownian.py | jfaccioni/clovars | 64e24286a2dc185490384aeb08027d88eb9462c4 | [
"MIT"
] | null | null | null | tests/test_scientific/test_brownian.py | jfaccioni/clovars | 64e24286a2dc185490384aeb08027d88eb9462c4 | [
"MIT"
] | null | null | null | tests/test_scientific/test_brownian.py | jfaccioni/clovars | 64e24286a2dc185490384aeb08027d88eb9462c4 | [
"MIT"
] | null | null | null | import random
import unittest
from clovars.scientific import brownian_motion, bounded_brownian_motion, reflect_around_interval, triangular_wave
class TestBrownian(unittest.TestCase):
"""Class representing unit-tests for clovars.scientific.brownian_motion module."""
def test_bounded_brownian_motion_returns_values_between_bounds(self) -> None:
"""
Tests whether the "bounded_brownian_motion" function always returns values
between the lower and upper bounds used.
"""
for _ in range(30):
current_value, scale = random.random(), random.random()
lower_bound, upper_bound = random.random(), random.random() + 1
value = bounded_brownian_motion(
current_value=current_value,
scale=scale,
lower_bound=lower_bound,
upper_bound=upper_bound,
)
with self.subTest(value=value, lower_bound=lower_bound, upper_bound=upper_bound):
self.assertGreaterEqual(value, lower_bound)
self.assertLessEqual(value, upper_bound)
def test_brownian_motion_returns_values_close_to_the_input_value(self) -> None:
"""Tests whether the "brownian_motion" function returns new values within ~ 7 SDs of the input value."""
for current_value in [0, 17.98, 999, 312, -73.4]:
for scale in [0.2, 0.5, 0.8]:
tolerance = 7 * scale
with self.subTest(current_value=current_value, scale=scale, tolerance=tolerance):
result = brownian_motion(current_value=current_value, scale=scale)
self.assertGreaterEqual(result, current_value - tolerance)
self.assertLessEqual(result, current_value + tolerance)
def test_brownian_motion_returns_input_value_if_scale_is_one(self) -> None:
"""Tests whether the "brownian_motion" function returns the exact input value if the scale argument is one."""
for _ in range(30):
current_value = random.random()
with self.subTest(current_value=current_value):
result = brownian_motion(current_value=current_value, scale=1.0)
self.assertEqual(current_value, result)
def test_reflect_around_interval_returns_input_value_reflected_between_bounds(self) -> None:
"""
Tests whether the "reflect_around_interval" function returns the input value
after reflecting it between two bounds.
"""
reflect_test_cases = [
(0.5, 0.0, 1.0, 0.5),
(1.5, 0.0, 1.0, 0.5),
(-.5, 0.0, 1.0, 0.5),
(3.6, 2.0, 3.0, 2.4),
(3.2, 1.0, 3.0, 2.8),
(6.4, 4.0, 6.0, 5.6),
(10., 5.0, 8.0, 6.0),
(10., 1.0, 3.0, 2.0),
]
for x, lower_bound, upper_bound, expected_x in reflect_test_cases:
with self.subTest(x=x, lower_bound=lower_bound, upper_bound=upper_bound, expected_x=expected_x):
actual_x = reflect_around_interval(x=x, lower_bound=lower_bound, upper_bound=upper_bound)
self.assertAlmostEqual(expected_x, actual_x)
def test_triangular_wave_behaves_as_a_triangular_wave(self) -> None:
"""Tests whether the "triangular_wave" function returns values as expected by a triangular wave function."""
triangular_test_cases = [
(0.0, 1.0, 1.0, 0.0),
(.25, 1.0, 1.0, 1.0),
(.50, 1.0, 1.0, 0.0),
(.75, 1.0, 1.0, -1.),
(1.0, 1.0, 1.0, 0.0),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
def test_triangular_wave_scales_with_period(self) -> None:
"""Tests whether the "triangular_wave" function scales with its period argument properly."""
triangular_test_cases = [
(.25, 1.0, 1.0, 1.0),
(.25, 2.0, 1.0, 0.5),
(.25, 4.0, 1.0, .25),
(.25, 0.5, 1.0, 0.0),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
def test_triangular_wave_scales_with_amplitude(self) -> None:
"""Tests whether the "triangular_wave" function scales with its amplitude argument properly."""
triangular_test_cases = [
(0.25, 1.0, 1.0, 1.0),
(0.25, 1.0, 2.0, 2.0),
(0.25, 1.0, 4.0, 4.0),
(0.25, 1.0, 5.0, 5.0),
(0.75, 1.0, 1.0, -1.),
(0.75, 1.0, 2.0, -2.),
(0.75, 1.0, 4.0, -4.),
(0.75, 1.0, 5.0, -5.),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
if __name__ == '__main__':
unittest.main()
| 48.053571 | 118 | 0.607952 | import random
import unittest
from clovars.scientific import brownian_motion, bounded_brownian_motion, reflect_around_interval, triangular_wave
class TestBrownian(unittest.TestCase):
"""Class representing unit-tests for clovars.scientific.brownian_motion module."""
def test_bounded_brownian_motion_returns_values_between_bounds(self) -> None:
"""
Tests whether the "bounded_brownian_motion" function always returns values
between the lower and upper bounds used.
"""
for _ in range(30):
current_value, scale = random.random(), random.random()
lower_bound, upper_bound = random.random(), random.random() + 1
value = bounded_brownian_motion(
current_value=current_value,
scale=scale,
lower_bound=lower_bound,
upper_bound=upper_bound,
)
with self.subTest(value=value, lower_bound=lower_bound, upper_bound=upper_bound):
self.assertGreaterEqual(value, lower_bound)
self.assertLessEqual(value, upper_bound)
def test_brownian_motion_returns_values_close_to_the_input_value(self) -> None:
"""Tests whether the "brownian_motion" function returns new values within ~ 7 SDs of the input value."""
for current_value in [0, 17.98, 999, 312, -73.4]:
for scale in [0.2, 0.5, 0.8]:
tolerance = 7 * scale
with self.subTest(current_value=current_value, scale=scale, tolerance=tolerance):
result = brownian_motion(current_value=current_value, scale=scale)
self.assertGreaterEqual(result, current_value - tolerance)
self.assertLessEqual(result, current_value + tolerance)
def test_brownian_motion_returns_input_value_if_scale_is_one(self) -> None:
"""Tests whether the "brownian_motion" function returns the exact input value if the scale argument is one."""
for _ in range(30):
current_value = random.random()
with self.subTest(current_value=current_value):
result = brownian_motion(current_value=current_value, scale=1.0)
self.assertEqual(current_value, result)
def test_reflect_around_interval_returns_input_value_reflected_between_bounds(self) -> None:
"""
Tests whether the "reflect_around_interval" function returns the input value
after reflecting it between two bounds.
"""
reflect_test_cases = [
(0.5, 0.0, 1.0, 0.5),
(1.5, 0.0, 1.0, 0.5),
(-.5, 0.0, 1.0, 0.5),
(3.6, 2.0, 3.0, 2.4),
(3.2, 1.0, 3.0, 2.8),
(6.4, 4.0, 6.0, 5.6),
(10., 5.0, 8.0, 6.0),
(10., 1.0, 3.0, 2.0),
]
for x, lower_bound, upper_bound, expected_x in reflect_test_cases:
with self.subTest(x=x, lower_bound=lower_bound, upper_bound=upper_bound, expected_x=expected_x):
actual_x = reflect_around_interval(x=x, lower_bound=lower_bound, upper_bound=upper_bound)
self.assertAlmostEqual(expected_x, actual_x)
def test_triangular_wave_behaves_as_a_triangular_wave(self) -> None:
"""Tests whether the "triangular_wave" function returns values as expected by a triangular wave function."""
triangular_test_cases = [
(0.0, 1.0, 1.0, 0.0),
(.25, 1.0, 1.0, 1.0),
(.50, 1.0, 1.0, 0.0),
(.75, 1.0, 1.0, -1.),
(1.0, 1.0, 1.0, 0.0),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
def test_triangular_wave_scales_with_period(self) -> None:
"""Tests whether the "triangular_wave" function scales with its period argument properly."""
triangular_test_cases = [
(.25, 1.0, 1.0, 1.0),
(.25, 2.0, 1.0, 0.5),
(.25, 4.0, 1.0, .25),
(.25, 0.5, 1.0, 0.0),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
def test_triangular_wave_scales_with_amplitude(self) -> None:
"""Tests whether the "triangular_wave" function scales with its amplitude argument properly."""
triangular_test_cases = [
(0.25, 1.0, 1.0, 1.0),
(0.25, 1.0, 2.0, 2.0),
(0.25, 1.0, 4.0, 4.0),
(0.25, 1.0, 5.0, 5.0),
(0.75, 1.0, 1.0, -1.),
(0.75, 1.0, 2.0, -2.),
(0.75, 1.0, 4.0, -4.),
(0.75, 1.0, 5.0, -5.),
]
for x, period, amplitude, expected_y in triangular_test_cases:
with self.subTest(x=x, period=period, amplitude=amplitude, expected_y=expected_y):
actual_y = triangular_wave(x=x, period=period, amplitude=amplitude)
self.assertEqual(expected_y, actual_y)
if __name__ == '__main__':
unittest.main()
| 0 | 0 | 0 |
1ba5ae030771231bf80a5f38f201d1e26faa28a1 | 4,612 | py | Python | pyformlang/finite_automaton/nondeterministic_transition_function.py | YaccConstructor/pyformlang | df640e13524c5d835ddcdedf25d8246fc73d7b88 | [
"MIT"
] | null | null | null | pyformlang/finite_automaton/nondeterministic_transition_function.py | YaccConstructor/pyformlang | df640e13524c5d835ddcdedf25d8246fc73d7b88 | [
"MIT"
] | 1 | 2020-07-22T11:40:30.000Z | 2020-07-22T11:40:30.000Z | pyformlang/finite_automaton/nondeterministic_transition_function.py | YaccConstructor/pyformlang | df640e13524c5d835ddcdedf25d8246fc73d7b88 | [
"MIT"
] | null | null | null | """
A nondeterministic transition function
"""
import copy
from typing import Set
from .state import State
from .symbol import Symbol
class NondeterministicTransitionFunction(object):
""" A nondeterministic transition function in a finite automaton.
The difference with a deterministic transition is that the return value is
a set of States
"""
def add_transition(self, s_from: State, symb_by: Symbol, s_to: State) -> int:
""" Adds a new transition to the function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
s_to : :class:`~pyformlang.finite_automaton.State`
The destination state
Returns
--------
done : int
Always 1
"""
if s_from in self._transitions:
if symb_by in self._transitions[s_from]:
self._transitions[s_from][symb_by].add(s_to)
else:
self._transitions[s_from][symb_by] = {s_to}
else:
self._transitions[s_from] = dict()
self._transitions[s_from][symb_by] = {s_to}
return 1
def remove_transition(self, s_from: State, symb_by: Symbol, s_to: State) -> int:
""" Removes a transition to the function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
s_to : :class:`~pyformlang.finite_automaton.State`
The destination state
Returns
--------
done : int
1 is the transition was found, 0 otherwise
"""
if s_from in self._transitions and \
symb_by in self._transitions[s_from] and \
s_to in self._transitions[s_from][symb_by]:
self._transitions[s_from][symb_by].remove(s_to)
return 1
return 0
def get_number_transitions(self) -> int:
""" Gives the number of transitions describe by the function
Returns
----------
n_transitions : int
The number of transitions
"""
counter = 0
for s_from in self._transitions:
for symb_by in self._transitions[s_from]:
counter += len(self._transitions[s_from][symb_by])
return counter
def __call__(self, s_from: State, symb_by: Symbol=None) -> Set[State]:
""" Calls the transition function as a real function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
Returns
----------
s_from : :class:`~pyformlang.finite_automaton.State` or None
The destination state or None if it does not exists
"""
if s_from in self._transitions:
if symb_by is not None:
if symb_by in self._transitions[s_from]:
return self._transitions[s_from][symb_by]
else:
return self._transitions[s_from].items()
return set()
def is_deterministic(self):
""" Whether the transition function is deterministic
Returns
----------
is_deterministic : bool
Whether the function is deterministic
"""
for s_from in self._transitions:
for symb in self._transitions[s_from]:
if len(self._transitions[s_from][symb]) > 1:
return False
return True
def get_edges(self):
""" Gets the edges
Returns
----------
edges : generator of (:class:`~pyformlang.finite_automaton.State`, \
:class:`~pyformlang.finite_automaton.Symbol`,\
:class:`~pyformlang.finite_automaton.State`)
A generator of edges
"""
for state in self._transitions:
for symbol in self._transitions[state]:
for next_state in self._transitions[state][symbol]:
yield state, symbol, next_state
| 30.143791 | 84 | 0.58196 | """
A nondeterministic transition function
"""
import copy
from typing import Set
from .state import State
from .symbol import Symbol
class NondeterministicTransitionFunction(object):
""" A nondeterministic transition function in a finite automaton.
The difference with a deterministic transition is that the return value is
a set of States
"""
def __init__(self):
self._transitions = dict()
def add_transition(self, s_from: State, symb_by: Symbol, s_to: State) -> int:
""" Adds a new transition to the function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
s_to : :class:`~pyformlang.finite_automaton.State`
The destination state
Returns
--------
done : int
Always 1
"""
if s_from in self._transitions:
if symb_by in self._transitions[s_from]:
self._transitions[s_from][symb_by].add(s_to)
else:
self._transitions[s_from][symb_by] = {s_to}
else:
self._transitions[s_from] = dict()
self._transitions[s_from][symb_by] = {s_to}
return 1
def remove_transition(self, s_from: State, symb_by: Symbol, s_to: State) -> int:
""" Removes a transition to the function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
s_to : :class:`~pyformlang.finite_automaton.State`
The destination state
Returns
--------
done : int
1 is the transition was found, 0 otherwise
"""
if s_from in self._transitions and \
symb_by in self._transitions[s_from] and \
s_to in self._transitions[s_from][symb_by]:
self._transitions[s_from][symb_by].remove(s_to)
return 1
return 0
def get_number_transitions(self) -> int:
""" Gives the number of transitions describe by the function
Returns
----------
n_transitions : int
The number of transitions
"""
counter = 0
for s_from in self._transitions:
for symb_by in self._transitions[s_from]:
counter += len(self._transitions[s_from][symb_by])
return counter
def __len__(self):
return self.get_number_transitions()
def __call__(self, s_from: State, symb_by: Symbol=None) -> Set[State]:
""" Calls the transition function as a real function
Parameters
----------
s_from : :class:`~pyformlang.finite_automaton.State`
The source state
symb_by : :class:`~pyformlang.finite_automaton.Symbol`
The transition symbol
Returns
----------
s_from : :class:`~pyformlang.finite_automaton.State` or None
The destination state or None if it does not exists
"""
if s_from in self._transitions:
if symb_by is not None:
if symb_by in self._transitions[s_from]:
return self._transitions[s_from][symb_by]
else:
return self._transitions[s_from].items()
return set()
def is_deterministic(self):
""" Whether the transition function is deterministic
Returns
----------
is_deterministic : bool
Whether the function is deterministic
"""
for s_from in self._transitions:
for symb in self._transitions[s_from]:
if len(self._transitions[s_from][symb]) > 1:
return False
return True
def get_edges(self):
""" Gets the edges
Returns
----------
edges : generator of (:class:`~pyformlang.finite_automaton.State`, \
:class:`~pyformlang.finite_automaton.Symbol`,\
:class:`~pyformlang.finite_automaton.State`)
A generator of edges
"""
for state in self._transitions:
for symbol in self._transitions[state]:
for next_state in self._transitions[state][symbol]:
yield state, symbol, next_state
def __iter__(self):
yield from self.get_edges()
def to_dict(self):
return copy.deepcopy(self._transitions)
| 154 | 0 | 108 |
64128d742c77eb39ea55a053c783a5b7068a25bc | 1,071 | py | Python | services/core/VolttronCentral/volttroncentral/resource_directory.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 73 | 2017-07-11T21:46:41.000Z | 2022-03-11T03:35:25.000Z | services/core/VolttronCentral/volttroncentral/resource_directory.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 19 | 2017-10-10T22:06:15.000Z | 2022-03-28T21:03:33.000Z | services/core/VolttronCentral/volttroncentral/resource_directory.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 36 | 2017-06-24T00:17:03.000Z | 2022-03-31T13:58:36.000Z | import contextlib
import os
import shelve
from .registry import PlatformRegistry
| 26.775 | 71 | 0.580766 | import contextlib
import os
import shelve
from .registry import PlatformRegistry
class ResourceDirectory:
def __init__(self):
self._datafile = os.path.join(os.environ['VOLTTRON_HOME'],
'data/resources.shelve')
def save_object(key, data):
if not isinstance(key, basestring):
raise ValueError('keys must be a string')
with contextlib.closing(
shelve.open(self._datafile, 'c')) as shelf:
shelf[key] = data
def retrieve_object(key):
if not isinstance(key, basestring):
raise ValueError('keys must be a string')
if not os.path.exists(self._datafile):
raise KeyError('invalid key')
with contextlib.closing(
shelve.open(self._datafile, 'r')) as shelf:
return shelf[key]
self._registry = PlatformRegistry(retrieve_object, save_object)
@property
def platform_registry(self):
return self._registry
| 892 | 71 | 23 |
df580d90dc51102943202cb2497c0b87fabad9ff | 4,352 | py | Python | papi_sdk/models/hotel_info.py | stanislav-losev/papi-sdk-python | 4a296745d626ef13c6d1170e9d3569cb1c37eb3c | [
"MIT"
] | 1 | 2020-12-30T13:06:41.000Z | 2020-12-30T13:06:41.000Z | papi_sdk/models/hotel_info.py | stanislav-losev/papi-sdk-python | 4a296745d626ef13c6d1170e9d3569cb1c37eb3c | [
"MIT"
] | 2 | 2021-01-18T07:57:29.000Z | 2021-06-23T11:04:14.000Z | papi_sdk/models/hotel_info.py | stanislav-losev/papi-sdk-python | 4a296745d626ef13c6d1170e9d3569cb1c37eb3c | [
"MIT"
] | 3 | 2020-12-30T13:09:45.000Z | 2020-12-30T13:42:33.000Z | from datetime import time
from typing import List, Optional
from pydantic import BaseModel, Field
from papi_sdk.models.base import BaseResponse
| 21.229268 | 56 | 0.700827 | from datetime import time
from typing import List, Optional
from pydantic import BaseModel, Field
from papi_sdk.models.base import BaseResponse
class HotelInfoRequest(BaseModel):
id: str
language: str
class AmenityGroup(BaseModel):
amenities: List[str]
group_name: Optional[str]
class DescriptionItem(BaseModel):
paragraphs: List[str]
title: Optional[str]
class PolicyItem(BaseModel):
paragraphs: List[str]
title: Optional[str]
class Region(BaseModel):
country_code: str
iata: Optional[str]
id: int
name: Optional[str]
type: str
class RgExt(BaseModel):
rg_class: Optional[int] = Field(None, alias="class")
quality: Optional[int]
sex: Optional[int]
bathroom: Optional[int]
bedding: Optional[int]
family: Optional[int]
capacity: Optional[int]
club: Optional[int]
class RoomGroup(BaseModel):
images: List[str]
name: str
room_amenities: List[str]
room_group_id: Optional[int]
rg_ext: RgExt
class Visa(BaseModel):
visa_support: Optional[str]
class Shuttle(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
shuttle_type: Optional[str]
price: Optional[float]
destination_type: Optional[str]
class Pets(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
pets_type: Optional[str]
price: Optional[float]
price_unit: Optional[str]
class Parking(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
territory_type: Optional[str]
price: Optional[float]
price_unit: Optional[str]
class NoShow(BaseModel):
availability: Optional[str]
day_period: Optional[str]
time: Optional[str]
class Meal(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
meal_type: Optional[str]
price: Optional[float]
class Internet(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
internet_type: Optional[str]
price: Optional[float]
price_unit: Optional[str]
work_area: Optional[str]
class ExtraBed(BaseModel):
amount: Optional[int]
currency: Optional[str]
inclusion: Optional[str]
price: Optional[float]
price_unit: Optional[str]
class Deposit(BaseModel):
availability: Optional[str]
currency: Optional[str]
deposit_type: Optional[str]
payment_type: Optional[str]
price: Optional[float]
price_unit: Optional[str]
pricing_method: Optional[str]
class Cradle(BaseModel):
amount: Optional[int]
currency: Optional[str]
inclusion: Optional[str]
price: Optional[float]
price_unit: Optional[str]
class ChildrenMeal(BaseModel):
age_end: Optional[int]
age_start: Optional[int]
currency: Optional[str]
inclusion: Optional[str]
meal_type: Optional[str]
price: Optional[float]
class Children(BaseModel):
age_end: Optional[int]
age_start: Optional[int]
currency: Optional[str]
extra_bed: Optional[str]
price: Optional[float]
class CheckinCheckout(BaseModel):
currency: Optional[str]
inclusion: Optional[str]
price: Optional[float]
class AddFee(BaseModel):
currency: Optional[str]
fee_type: Optional[str]
price: Optional[float]
price_unit: Optional[str]
class MetapolicyStruct(BaseModel):
internet: List[Internet]
add_fee: List[AddFee]
check_in_check_out: Optional[List[CheckinCheckout]]
children: List[Children]
children_meal: List[ChildrenMeal]
cradle: Optional[List[Cradle]]
deposit: List[Deposit]
extra_bed: List[ExtraBed]
meal: List[Meal]
no_show: Optional[NoShow]
parking: List[Parking]
pets: List[Pets]
shuttle: List[Shuttle]
visa: Optional[Visa]
class HotelInfoData(BaseModel):
address: str
amenity_groups: List[AmenityGroup]
check_in_time: time
check_out_time: time
description_struct: List[DescriptionItem]
email: Optional[str]
id: str
images: List[str]
kind: str
latitude: float
longitude: float
name: str
metapolicy_struct: MetapolicyStruct
phone: Optional[str]
policy_struct: List[PolicyItem]
postal_code: Optional[str]
region: Region
room_groups: List[RoomGroup]
star_rating: int
serp_filters: List[str]
is_closed: bool
class HotelInfoResponse(BaseResponse):
data: Optional[HotelInfoData]
| 0 | 3,630 | 552 |
e9493de09e87b4f9f1b79fed10ada95277c07938 | 663 | py | Python | scripts/__V2/login.py | AutoCoinDCF/NEW_API | f4abc48fff907a0785372b941afcd67e62eec825 | [
"Apache-2.0"
] | null | null | null | scripts/__V2/login.py | AutoCoinDCF/NEW_API | f4abc48fff907a0785372b941afcd67e62eec825 | [
"Apache-2.0"
] | null | null | null | scripts/__V2/login.py | AutoCoinDCF/NEW_API | f4abc48fff907a0785372b941afcd67e62eec825 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*-coding:utf-8-*-
# net login
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import codecs
import os
driver = webdriver.PhantomJS()
#driver = webdriver.Firefox(executable_path='/usr/local/bin/geckodriver')
driver.get('https://gw.ict.ac.cn/srun_portal_pc.php?ac_id=1&')
name = driver.find_element_by_name("username")
name.send_keys('user_key')
password = driver.find_element_by_id('password')
password.send_keys('user_value')
password.send_keys(Keys.RETURN)
time.sleep(1)
file_object = codecs.open("dump.html", "w", "utf-8")
html = driver.page_source
file_object.write(html)
driver.quit()
| 23.678571 | 73 | 0.766214 | #!/usr/bin/env python
# -*-coding:utf-8-*-
# net login
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
import codecs
import os
driver = webdriver.PhantomJS()
#driver = webdriver.Firefox(executable_path='/usr/local/bin/geckodriver')
driver.get('https://gw.ict.ac.cn/srun_portal_pc.php?ac_id=1&')
name = driver.find_element_by_name("username")
name.send_keys('user_key')
password = driver.find_element_by_id('password')
password.send_keys('user_value')
password.send_keys(Keys.RETURN)
time.sleep(1)
file_object = codecs.open("dump.html", "w", "utf-8")
html = driver.page_source
file_object.write(html)
driver.quit()
| 0 | 0 | 0 |
c854f1df1820a7eee9b465b150768785d9183c01 | 6,520 | py | Python | research/cv/PDarts/src/model.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 77 | 2021-10-15T08:32:37.000Z | 2022-03-30T13:09:11.000Z | research/cv/PDarts/src/model.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 3 | 2021-10-30T14:44:57.000Z | 2022-02-14T06:57:57.000Z | research/cv/PDarts/src/model.py | mindspore-ai/models | 9127b128e2961fd698977e918861dadfad00a44c | [
"Apache-2.0"
] | 24 | 2021-10-15T08:32:45.000Z | 2022-03-24T18:45:20.000Z | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# less required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The PDarts model file."""
import mindspore.nn as nn
import mindspore.ops as P
from src.operations import FactorizedReduce, ReLUConvBN, OPS
from src.my_utils import drop_path
class Module(nn.Cell):
"""
The module of PDarts.
"""
def _compile(self, C, op_names, indices, concat, reduction):
"""
Combine the functions of model.
"""
assert len(op_names) == len(indices)
self._steps = len(op_names) // 2
self._concat = concat
self.multiplier = len(concat)
self._ops = nn.CellList()
for name, index in zip(op_names, indices):
stride = 2 if reduction and index < 2 else 1
op = OPS[name](C, stride, True)
self._ops += [op]
self._indices = indices
def construct(self, s0, s1, drop_prob, layer_mask):
"""
Do the module.
"""
s0 = self.preprocess0(s0)
s1 = self.preprocess1(s1)
concat_result = None
states = [s0, s1]
for i in range(self._steps):
h1 = states[self._indices[2 * i]]
h2 = states[self._indices[2 * i + 1]]
op1 = self._ops[2 * i]
op2 = self._ops[2 * i + 1]
h1 = op1(h1)
h2 = op2(h2)
if self.training and drop_prob > 0.:
h1 = drop_path(self.div, self.mul, h1,
drop_prob, layer_mask[i * 2])
h2 = drop_path(self.div, self.mul, h2,
drop_prob, layer_mask[i * 2 + 1])
s = h1 + h2
states.append(s)
if len(states) - 1 == self.concat_start + 1 and len(states) - 1 <= self.concat_end:
concat_result = self.concat_1(
(states[len(states) - 2], states[len(states) - 1]))
elif len(states) - 1 > self.concat_start + 1 and len(states) - 1 <= self.concat_end:
concat_result = self.concat_1(
(concat_result, states[len(states) - 1]))
return concat_result
class AuxiliaryHeadCIFAR(nn.Cell):
"""
Define the Auxiliary Head.
"""
def __init__(self, C, num_classes):
"""assuming input size 8x8"""
super(AuxiliaryHeadCIFAR, self).__init__()
self.features = nn.SequentialCell(
nn.ReLU(),
nn.AvgPool2d(5, stride=3), # image size = 2 x 2
nn.Conv2d(C, 128, 1, pad_mode='pad', has_bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128, 768, 2, pad_mode='pad', has_bias=False),
nn.BatchNorm2d(768),
nn.ReLU()
)
self.reshape = P.Reshape()
self.classifier = nn.Dense(768, num_classes)
class NetworkCIFAR(nn.Cell):
"""
The PDarts model define
"""
def construct(self, x):
"""
Do the model.
"""
logits_aux = None
s0 = s1 = self.stem(x)
for i in range(len(self.cell_list)):
cell = self.cell_list[i]
s0, s1 = s1, cell(s0, s1, self.drop_path_prob, self.epoch_mask[i])
if i == 2 * self._layers // 3:
if self._auxiliary and self.training:
logits_aux = self.auxiliary_head(s1)
out = self.global_pooling(s1, (2, 3))
out = self.reshape(out, (out.shape[0], -1))
logits = self.classifier(out)
if self._auxiliary and self.training:
return logits, logits_aux
return logits
| 35.824176 | 96 | 0.560276 | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# less required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""The PDarts model file."""
import mindspore.nn as nn
import mindspore.ops as P
from src.operations import FactorizedReduce, ReLUConvBN, OPS
from src.my_utils import drop_path
class Module(nn.Cell):
"""
The module of PDarts.
"""
def __init__(self, genotype, C_prev_prev, C_prev, C, reduction, reduction_prev):
super(Module, self).__init__()
if reduction_prev:
self.preprocess0 = FactorizedReduce(C_prev_prev, C)
else:
self.preprocess0 = ReLUConvBN(C_prev_prev, C, 1, 1, 0)
self.preprocess1 = ReLUConvBN(C_prev, C, 1, 1, 0)
if reduction:
op_names, indices = zip(*genotype.reduce)
concat = genotype.reduce_concat
else:
op_names, indices = zip(*genotype.normal)
concat = genotype.normal_concat
self._compile(C, op_names, indices, concat, reduction)
self.div = P.Div()
self.mul = P.Mul()
self.concat_1 = P.Concat(axis=1)
self.concat_start = self._concat.start
self.concat_end = self.concat_start + len(self._concat) - 1
def _compile(self, C, op_names, indices, concat, reduction):
"""
Combine the functions of model.
"""
assert len(op_names) == len(indices)
self._steps = len(op_names) // 2
self._concat = concat
self.multiplier = len(concat)
self._ops = nn.CellList()
for name, index in zip(op_names, indices):
stride = 2 if reduction and index < 2 else 1
op = OPS[name](C, stride, True)
self._ops += [op]
self._indices = indices
def construct(self, s0, s1, drop_prob, layer_mask):
"""
Do the module.
"""
s0 = self.preprocess0(s0)
s1 = self.preprocess1(s1)
concat_result = None
states = [s0, s1]
for i in range(self._steps):
h1 = states[self._indices[2 * i]]
h2 = states[self._indices[2 * i + 1]]
op1 = self._ops[2 * i]
op2 = self._ops[2 * i + 1]
h1 = op1(h1)
h2 = op2(h2)
if self.training and drop_prob > 0.:
h1 = drop_path(self.div, self.mul, h1,
drop_prob, layer_mask[i * 2])
h2 = drop_path(self.div, self.mul, h2,
drop_prob, layer_mask[i * 2 + 1])
s = h1 + h2
states.append(s)
if len(states) - 1 == self.concat_start + 1 and len(states) - 1 <= self.concat_end:
concat_result = self.concat_1(
(states[len(states) - 2], states[len(states) - 1]))
elif len(states) - 1 > self.concat_start + 1 and len(states) - 1 <= self.concat_end:
concat_result = self.concat_1(
(concat_result, states[len(states) - 1]))
return concat_result
class AuxiliaryHeadCIFAR(nn.Cell):
"""
Define the Auxiliary Head.
"""
def __init__(self, C, num_classes):
"""assuming input size 8x8"""
super(AuxiliaryHeadCIFAR, self).__init__()
self.features = nn.SequentialCell(
nn.ReLU(),
nn.AvgPool2d(5, stride=3), # image size = 2 x 2
nn.Conv2d(C, 128, 1, pad_mode='pad', has_bias=False),
nn.BatchNorm2d(128),
nn.ReLU(),
nn.Conv2d(128, 768, 2, pad_mode='pad', has_bias=False),
nn.BatchNorm2d(768),
nn.ReLU()
)
self.reshape = P.Reshape()
self.classifier = nn.Dense(768, num_classes)
def construct(self, x):
x = self.features(x)
x = self.reshape(x, (x.shape[0], -1))
x = self.classifier(x)
return x
class NetworkCIFAR(nn.Cell):
"""
The PDarts model define
"""
def __init__(self, C, num_classes, layers, auxiliary, genotype):
super(NetworkCIFAR, self).__init__()
self._layers = layers
self._auxiliary = auxiliary
stem_multiplier = 3
C_curr = stem_multiplier * C
self.stem = nn.SequentialCell(
nn.Conv2d(3, C_curr, 3, padding=1, pad_mode='pad', has_bias=False),
nn.BatchNorm2d(C_curr)
)
C_prev_prev, C_prev, C_curr = C_curr, C_curr, C
self.cell_list = nn.CellList()
reduction_prev = False
for i in range(layers):
if i in [layers // 3, 2 * layers // 3]:
C_curr *= 2
reduction = True
else:
reduction = False
cell = Module(genotype, C_prev_prev, C_prev,
C_curr, reduction, reduction_prev)
reduction_prev = reduction
self.cell_list += [cell]
C_prev_prev, C_prev = C_prev, cell.multiplier * C_curr
if i == 2 * layers // 3:
C_to_auxiliary = C_prev
if auxiliary:
self.auxiliary_head = AuxiliaryHeadCIFAR(
C_to_auxiliary, num_classes)
self.global_pooling = P.ReduceMean(keep_dims=True)
self.reshape = P.Reshape()
self.classifier = nn.Dense(C_prev, num_classes)
def construct(self, x):
"""
Do the model.
"""
logits_aux = None
s0 = s1 = self.stem(x)
for i in range(len(self.cell_list)):
cell = self.cell_list[i]
s0, s1 = s1, cell(s0, s1, self.drop_path_prob, self.epoch_mask[i])
if i == 2 * self._layers // 3:
if self._auxiliary and self.training:
logits_aux = self.auxiliary_head(s1)
out = self.global_pooling(s1, (2, 3))
out = self.reshape(out, (out.shape[0], -1))
logits = self.classifier(out)
if self._auxiliary and self.training:
return logits, logits_aux
return logits
| 2,273 | 0 | 81 |
d0c0672955a392d3debf27ac64849b0b73eb088c | 1,422 | py | Python | dummyapis/views.py | chYash/Testing-Api | bf7e8035f81c723496f1a6adddeab0bfeb2752c5 | [
"MIT"
] | null | null | null | dummyapis/views.py | chYash/Testing-Api | bf7e8035f81c723496f1a6adddeab0bfeb2752c5 | [
"MIT"
] | null | null | null | dummyapis/views.py | chYash/Testing-Api | bf7e8035f81c723496f1a6adddeab0bfeb2752c5 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework.decorators import api_view
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from rest_framework.response import Response
from .models import *
from .serializers import *
from django.core import serializers
from rest_framework_jwt.utils import jwt_decode_handler
import jwt
from rest_framework.views import APIView
from rest_framework.generics import ListAPIView, RetrieveAPIView
from rest_framework.pagination import PageNumberPagination
from django_filters.rest_framework import DjangoFilterBackend
from django.utils.decorators import method_decorator
from rest_framework import viewsets | 30.255319 | 64 | 0.81083 | from django.shortcuts import render
from rest_framework.decorators import api_view
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework import status
from rest_framework.response import Response
from .models import *
from .serializers import *
from django.core import serializers
from rest_framework_jwt.utils import jwt_decode_handler
import jwt
from rest_framework.views import APIView
from rest_framework.generics import ListAPIView, RetrieveAPIView
from rest_framework.pagination import PageNumberPagination
from django_filters.rest_framework import DjangoFilterBackend
from django.utils.decorators import method_decorator
from rest_framework import viewsets
class BugViewsetAPI(viewsets.ModelViewSet):
queryset = Bug.objects.all()
serializer_class = BugSerializer
class ExpenseViewsetAPI(viewsets.ModelViewSet):
queryset = Expense.objects.all()
serializer_class = ExpenseSerializer
class UserViewSetAPI(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
class MonthlyExpenseViewSetAPI(RetrieveAPIView):
queryset = User.objects.all()
serializer_class = MonthlyExpenseSerializer
def retrieve(self,request,*args,**kwargs):
user = self.queryset.get(id=kwargs["id"])
serializer = self.serializer_class(user)
return Response(serializer.data) | 162 | 428 | 92 |
b4e81a402d50896e82c564f3d102fbcebb31b5ca | 196 | py | Python | Crafting_Quality_Code_UniToronto/week2_testing/lecture/dict.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | 1 | 2021-01-17T15:13:49.000Z | 2021-01-17T15:13:49.000Z | Crafting_Quality_Code_UniToronto/week2_testing/lecture/dict.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | null | null | null | Crafting_Quality_Code_UniToronto/week2_testing/lecture/dict.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | 1 | 2021-01-17T15:13:16.000Z | 2021-01-17T15:13:16.000Z |
if __name__ == '__main__':
expected1 = {'c': 2, 'b': 2, 'a': 1}
expected2 = {'a': 1, 'b': 2, 'c': 2}
a = change(expected1)
print(expected1 == a) | 15.076923 | 40 | 0.469388 |
def change(d):
d['a'] = 0
if __name__ == '__main__':
expected1 = {'c': 2, 'b': 2, 'a': 1}
expected2 = {'a': 1, 'b': 2, 'c': 2}
a = change(expected1)
print(expected1 == a) | 9 | 0 | 23 |
5f7ed9bbec03310b3305a59cb7263e14fbf57ac3 | 353,613 | py | Python | src/sage/combinat/finite_state_machine.py | drvinceknight/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | 2 | 2015-08-11T05:05:47.000Z | 2019-05-15T17:27:25.000Z | src/sage/combinat/finite_state_machine.py | kaushik94/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | null | null | null | src/sage/combinat/finite_state_machine.py | kaushik94/sage | 00199fb220aa173d8585b9e90654dafd3247d82d | [
"BSL-1.0"
] | 1 | 2020-07-24T12:04:03.000Z | 2020-07-24T12:04:03.000Z | # -*- coding: utf-8 -*-
r"""
Finite State Machines, Automata, Transducers
This module adds support for finite state machines, automata and
transducers. See classes :class:`Automaton` and :class:`Transducer`
(or the more general class :class:`FiniteStateMachine`) and the
:ref:`examples <finite_state_machine_examples>` below for
details creating one.
Contents
========
:class:`FiniteStateMachine` and derived classes :class:`Transducer` and :class:`Automaton`
------------------------------------------------------------------------------------------
Accessing parts of a finite state machine
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.state` | Get a state by its label
:meth:`~FiniteStateMachine.states` | List of states
:meth:`~FiniteStateMachine.iter_states` | Iterator over the states
:meth:`~FiniteStateMachine.initial_states` | List of initial states
:meth:`~FiniteStateMachine.iter_initial_states` | Iterator over initial states
:meth:`~FiniteStateMachine.final_states` | List of final states
:meth:`~FiniteStateMachine.iter_final_states` | Iterator over final states
:meth:`~FiniteStateMachine.transition` | Get a transition by its states and labels
:meth:`~FiniteStateMachine.transitions` | List of transitions
:meth:`~FiniteStateMachine.iter_transitions` | Iterator over the transitions
:meth:`~FiniteStateMachine.predecessors` | List of predecessors of a state
:meth:`~FiniteStateMachine.induced_sub_finite_state_machine` | Induced sub-machine
:meth:`~FiniteStateMachine.accessible_components` | Accessible components
:meth:`~FiniteStateMachine.final_components` | Final components (connected components which cannot be left again)
(Modified) Copies
^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.empty_copy` | Returns an empty deep copy
:meth:`~FiniteStateMachine.deepcopy` | Returns a deep copy
:meth:`~FiniteStateMachine.relabeled` | Returns a relabeled deep copy
Manipulation
^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.add_state` | Add a state
:meth:`~FiniteStateMachine.add_states` | Add states
:meth:`~FiniteStateMachine.delete_state` | Delete a state
:meth:`~FiniteStateMachine.add_transition` | Add a transition
:meth:`~FiniteStateMachine.add_transitions_from_function` | Add transitions
:attr:`~FiniteStateMachine.on_duplicate_transition` | Hook for handling duplicate transitions
:meth:`~FiniteStateMachine.add_from_transition_function` | Add transitions by a transition function
:meth:`~FiniteStateMachine.delete_transition` | Delete a transition
:meth:`~FiniteStateMachine.remove_epsilon_transitions` | Remove epsilon transitions (not implemented)
:meth:`~FiniteStateMachine.split_transitions` | Split transitions with input words of length ``> 1``
:meth:`~FiniteStateMachine.determine_alphabets` | Determines input and output alphabets
:meth:`~FiniteStateMachine.construct_final_word_out` | Construct final output by implicitly reading trailing letters; cf. :meth:`~FiniteStateMachine.with_final_word_out`
Properties
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.has_state` | Checks for a state
:meth:`~FiniteStateMachine.has_initial_state` | Checks for an initial state
:meth:`~FiniteStateMachine.has_initial_states` | Checks for initial states
:meth:`~FiniteStateMachine.has_final_state` | Checks for an final state
:meth:`~FiniteStateMachine.has_final_states` | Checks for final states
:meth:`~FiniteStateMachine.has_transition` | Checks for a transition
:meth:`~FiniteStateMachine.is_deterministic` | Checks for a deterministic machine
:meth:`~FiniteStateMachine.is_complete` | Checks for a complete machine
:meth:`~FiniteStateMachine.is_connected` | Checks for a connected machine
:meth:`~FiniteStateMachine.is_Markov_chain` | Checks for a Markov chain
:meth:`~FiniteStateMachine.is_monochromatic` | Checks whether the colors of all states are equal
:meth:`~FiniteStateMachine.asymptotic_moments` | Main terms of expectation and variance of sums of labels
Operations
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.disjoint_union` | Disjoint union (not implemented)
:meth:`~FiniteStateMachine.concatenation` | Concatenation (not implemented)
:meth:`~FiniteStateMachine.Kleene_closure` | Kleene closure (not implemented)
:meth:`Automaton.intersection` | Intersection of automata
:meth:`Transducer.intersection` | Intersection of transducers
:meth:`Transducer.cartesian_product` | Cartesian product of a transducer with another finite state machine
:meth:`~FiniteStateMachine.product_FiniteStateMachine` | Product of finite state machines
:meth:`~FiniteStateMachine.composition` | Composition (output of other is input of self)
:meth:`~FiniteStateMachine.input_projection` | Input projection (output is deleted)
:meth:`~FiniteStateMachine.output_projection` | Output projection (old output is new input)
:meth:`~FiniteStateMachine.projection` | Input or output projection
:meth:`~FiniteStateMachine.transposition` | Transposition (all transitions are reversed)
:meth:`~FiniteStateMachine.with_final_word_out` | Machine with final output constructed by implicitly reading trailing letters, cf. :meth:`~FiniteStateMachine.construct_final_word_out` for inplace version
:meth:`Automaton.determinisation` | Determinisation of an automaton
:meth:`~FiniteStateMachine.process` | Process input
:meth:`Automaton.process` | Process input of an automaton (output differs from general case)
:meth:`Transducer.process` | Process input of a transducer (output differs from general case)
:meth:`~FiniteStateMachine.iter_process` | Return process iterator
Simplification
^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.prepone_output` | Prepone output where possible
:meth:`~FiniteStateMachine.equivalence_classes` | List of equivalent states
:meth:`~FiniteStateMachine.quotient` | Quotient with respect to equivalence classes
:meth:`~FiniteStateMachine.merged_transitions` | Merge transitions while adding input
:meth:`~FiniteStateMachine.markov_chain_simplification` | Simplification of a Markov chain
:meth:`Automaton.minimization` | Minimization of an automaton
:meth:`Transducer.simplification` | Simplification of a transducer
Conversion
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.adjacency_matrix` | (Weighted) adjacency :class:`matrix <Matrix>`
:meth:`~FiniteStateMachine.graph` | Underlying :class:`DiGraph`
:meth:`~FiniteStateMachine.plot` | Plot
LaTeX output
++++++++++++
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.latex_options` | Set options
:meth:`~FiniteStateMachine.set_coordinates` | Set coordinates of the states
:meth:`~FiniteStateMachine.default_format_transition_label` | Default formatting of words in transition labels
:meth:`~FiniteStateMachine.format_letter_negative` | Format negative numbers as overlined number
:meth:`~FiniteStateMachine.format_transition_label_reversed` | Format words in transition labels in reversed order
:class:`FSMState`
-----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMState.final_word_out` | Final output of a state
:attr:`~FSMState.is_final` | Describes whether a state is final or not
:attr:`~FSMState.is_initial` | Describes whether a state is initial or not
:meth:`~FSMState.label` | Label of a state
:meth:`~FSMState.relabeled` | Returns a relabeled deep copy of a state
:meth:`~FSMState.fully_equal` | Checks whether two states are fully equal (including all attributes)
:class:`FSMTransition`
----------------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMTransition.from_state` | State in which transition starts
:attr:`~FSMTransition.to_state` | State in which transition ends
:attr:`~FSMTransition.word_in` | Input word of the transition
:attr:`~FSMTransition.word_out` | Output word of the transition
:meth:`~FSMTransition.deepcopy` | Returns a deep copy of the transition
Helper Functions
----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:func:`equal` | Checks whether all elements of ``iterator`` are equal
:func:`full_group_by` | Group iterable by values of some key
:func:`startswith` | Determine whether list starts with the given prefix
:func:`FSMLetterSymbol` | Returns a string associated to the input letter
:func:`FSMWordSymbol` | Returns a string associated to a word
:func:`is_FSMState` | Tests whether an object inherits from :class:`FSMState`
:func:`is_FSMTransition` | Tests whether an object inherits from :class:`FSMTransition`
:func:`is_FiniteStateMachine` | Tests whether an object inherits from :class:`FiniteStateMachine`
:func:`duplicate_transition_ignore` | Default function for handling duplicate transitions
:func:`duplicate_transition_raise_error` | Raise error when inserting a duplicate transition
:func:`duplicate_transition_add_input` | Add input when inserting a duplicate transition
.. _finite_state_machine_examples:
Examples
========
We start with a general :class:`FiniteStateMachine`. Later there will
be also an :class:`Automaton` and a :class:`Transducer`.
A simple finite state machine
-----------------------------
We can easily create a finite state machine by
::
sage: fsm = FiniteStateMachine()
sage: fsm
Finite state machine with 0 states
By default this is the empty finite state machine, so not very
interesting. Let's create and add some states and transitions::
sage: day = fsm.add_state('day')
sage: night = fsm.add_state('night')
sage: sunrise = fsm.add_transition(night, day)
sage: sunset = fsm.add_transition(day, night)
Let us look at ``sunset`` more closely::
sage: sunset
Transition from 'day' to 'night': -|-
Note that could also have created and added the transitions directly
by::
sage: fsm.add_transition('day', 'night')
Transition from 'day' to 'night': -|-
This would have had added the states automatically, since they are
present in the transitions.
Anyhow, we got the following finite state machine::
sage: fsm
Finite state machine with 2 states
We can also obtain the underlying directed graph by
::
sage: fsm.graph()
Digraph on 2 vertices
To visualize a finite state machine, we can use
:func:`~sage.misc.latex.latex` and run the result through LaTeX,
see the section on :ref:`finite_state_machine_LaTeX_output`
below.
Alternatively, we could have created the finite state machine above
simply by
::
sage: FiniteStateMachine([('night', 'day'), ('day', 'night')])
Finite state machine with 2 states
See :class:`FiniteStateMachine` for a lot of possibilities to create
finite state machines.
.. _finite_state_machine_recognizing_NAFs_example:
A simple Automaton (recognizing NAFs)
---------------------------------------
We want to build an automaton which recognizes non-adjacent forms
(NAFs), i.e., sequences which have no adjacent non-zeros.
We use `0`, `1`, and `-1` as digits::
sage: NAF = Automaton(
....: {'A': [('A', 0), ('B', 1), ('B', -1)], 'B': [('A', 0)]})
sage: NAF.state('A').is_initial = True
sage: NAF.state('A').is_final = True
sage: NAF.state('B').is_final = True
sage: NAF
Automaton with 2 states
Of course, we could have specified the initial and final states
directly in the definition of ``NAF`` by ``initial_states=['A']`` and
``final_states=['A', 'B']``.
So let's test the automaton with some input::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: NAF([0])
True
sage: NAF([0, 1])
True
sage: NAF([1, -1])
False
sage: NAF([0, -1, 0, 1])
True
sage: NAF([0, -1, -1, -1, 0])
False
sage: NAF([-1, 0, 0, 1, 1])
False
Alternatively, we could call that by
::
sage: NAF.process([0, -1, 0, 1])
(True, 'B')
which gives additionally the state in which we arrived.
.. _finite_state_machine_LaTeX_output:
LaTeX output
------------
We can visualize a finite state machine by converting it to LaTeX by
using the usual function :func:`~sage.misc.latex.latex`. Within LaTeX,
TikZ is used for typesetting the graphics, see the
:wikipedia:`PGF/TikZ`.
::
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.185.00) edge node[rotate=360.00, anchor=north] {$1, -1$} (v1.355.00);
\path[->] (v1.5.00) edge node[rotate=0.00, anchor=south] {$0$} (v0.175.00);
\end{tikzpicture}
We can turn this into a graphical representation.
::
sage: view(NAF) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells in this and the previous section.
Several options can be set to customize the output, see
:meth:`~FiniteStateMachine.latex_options` for details. In particular,
we use :meth:`~FiniteStateMachine.format_letter_negative` to format
`-1` as `\overline{1}`.
::
sage: NAF.latex_options(
....: coordinates={'A': (0, 0),
....: 'B': (6, 0)},
....: initial_where={'A': 'below'},
....: format_letter=NAF.format_letter_negative,
....: format_state_label=lambda x:
....: r'\mathcal{%s}' % x.label()
....: )
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial, initial where=below] (v0) at (0.000000, 0.000000) {$\mathcal{A}$};
\node[state, accepting] (v1) at (6.000000, 0.000000) {$\mathcal{B}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.5.00) edge node[rotate=0.00, anchor=south] {$1, \overline{1}$} (v1.175.00);
\path[->] (v1.185.00) edge node[rotate=360.00, anchor=north] {$0$} (v0.355.00);
\end{tikzpicture}
sage: view(NAF) # not tested
A simple transducer (binary inverter)
-------------------------------------
Let's build a simple transducer, which rewrites a binary word by
iverting each bit::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
We can look at the states and transitions::
sage: inverter.states()
['A']
sage: for t in inverter.transitions():
....: print t
Transition from 'A' to 'A': 0|1
Transition from 'A' to 'A': 1|0
Now we apply a word to it and see what the transducer does::
sage: inverter([0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1])
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0]
``True`` means, that we landed in a final state, that state is labeled
``'A'``, and we also got an output.
A transducer which performs division by `3` in binary
-----------------------------------------------------
Now we build a transducer, which divides a binary number by `3`.
The labels of the states are the remainder of the division.
The transition function is
::
sage: def f(state_from, read):
....: if state_from + read <= 1:
....: state_to = 2*state_from + read
....: write = 0
....: else:
....: state_to = 2*state_from + read - 3
....: write = 1
....: return (state_to, write)
which assumes reading a binary number from left to right.
We get the transducer with
::
sage: D = Transducer(f, initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
Let us try to divide `12` by `3`::
sage: D([1, 1, 0, 0])
[0, 1, 0, 0]
Now we want to divide `13` by `3`::
sage: D([1, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The raised ``ValueError``
means `13` is not divisible by `3`.
.. _finite_state_machine_gray_code_example:
Gray Code
---------
The Gray code is a binary :wikipedia:`numeral system <Numeral_system>`
where two successive values differ in only one bit, cf. the
:wikipedia:`Gray_code`. The Gray code of an integer `n` is obtained by
a bitwise xor between the binary expansion of `n` and the binary
expansion of `\lfloor n/2\rfloor`; the latter corresponds to a
shift by one position in binary.
The purpose of this example is to construct a transducer converting the
standard binary expansion to the Gray code by translating this
construction into operations with transducers.
For this construction, the least significant digit is at
the left-most position.
Note that it is easier to shift everything to
the right first, i.e., multiply by `2` instead of building
`\lfloor n/2\rfloor`. Then, we take the input xor with the right
shift of the input and forget the first letter.
We first construct a transducer shifting the binary expansion to the
right. This requires storing the previously read digit in a state.
::
sage: def shift_right_transition(state, digit):
....: if state == 'I':
....: return (digit, None)
....: else:
....: return (digit, state)
sage: shift_right_transducer = Transducer(
....: shift_right_transition,
....: initial_states=['I'],
....: input_alphabet=[0, 1],
....: final_states=[0])
sage: shift_right_transducer.transitions()
[Transition from 'I' to 0: 0|-,
Transition from 'I' to 1: 1|-,
Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 1: 1|1]
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False
sage: shift_right_transducer([0, 1, 1, 0])
[0, 1, 1]
sage: shift_right_transducer([1, 0, 0])
[1, 0]
The output of the shifts above look a bit weird (from a right-shift
transducer, we would expect, for example, that ``[1, 0, 0]`` was
mapped to ``[0, 1, 0]``), since we write ``None`` instead of the zero
at the left. Further, note that only `0` is listed as a final state
as we have to enforce that a most significant zero is read as the last
input letter in order to flush the last digit::
sage: shift_right_transducer([0, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
Next, we construct the transducer performing the xor operation. We also
have to take ``None`` into account as our ``shift_right_transducer``
waits one iteration until it starts writing output. This corresponds
with our intention to forget the first letter.
::
sage: def xor_transition(state, digits):
....: if digits[0] is None or digits[1] is None:
....: return (0, None)
....: else:
....: return (0, digits[0].__xor__(digits[1]))
sage: from itertools import product
sage: xor_transducer = Transducer(
....: xor_transition,
....: initial_states=[0],
....: final_states=[0],
....: input_alphabet=list(product([None, 0, 1], [0, 1])))
sage: xor_transducer.transitions()
[Transition from 0 to 0: (None, 0)|-,
Transition from 0 to 0: (None, 1)|-,
Transition from 0 to 0: (0, 0)|0,
Transition from 0 to 0: (0, 1)|1,
Transition from 0 to 0: (1, 0)|1,
Transition from 0 to 0: (1, 1)|0]
sage: xor_transducer([(None, 0), (None, 1), (0, 0), (0, 1), (1, 0), (1, 1)])
[0, 1, 1, 0]
sage: xor_transducer([(0, None)])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The transducer computing the Gray code is then constructed as a
:meth:`cartesian product <Transducer.cartesian_product>` between the
shifted version and the original input (represented here by the
``shift_right_transducer`` and the :meth:`identity transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.Identity>`,
respectively). This cartesian product is then fed into the
``xor_transducer`` as a :meth:`composition
<FiniteStateMachine.composition>` of transducers.
As described in :meth:`Transducer.cartesian_product`, we have to
temporarily set
``finite_state_machine.FSMOldCodeTransducerCartesianProduct`` to
``False`` in order to disable backwards compatible code.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_transducer = shift_right_transducer.cartesian_product(transducers.Identity([0, 1]))
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: Gray_transducer = xor_transducer(product_transducer)
We use :meth:`~FiniteStateMachine.construct_final_word_out` to make sure that all output
is written; otherwise, we would have to make sure that a sufficient number of trailing
zeros is read.
::
sage: Gray_transducer.construct_final_word_out([0])
sage: Gray_transducer.transitions()
[Transition from (('I', 0), 0) to ((0, 0), 0): 0|-,
Transition from (('I', 0), 0) to ((1, 0), 0): 1|-,
Transition from ((0, 0), 0) to ((0, 0), 0): 0|0,
Transition from ((0, 0), 0) to ((1, 0), 0): 1|1,
Transition from ((1, 0), 0) to ((0, 0), 0): 0|1,
Transition from ((1, 0), 0) to ((1, 0), 0): 1|0]
There is a :meth:`prepackaged transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.GrayCode>`
for Gray code, let's see whether they agree. We have to use
:meth:`~FiniteStateMachine.relabeled` to relabel our states with
integers.
::
sage: constructed = Gray_transducer.relabeled()
sage: packaged = transducers.GrayCode()
sage: constructed == packaged
True
Finally, we check that this indeed computes the Gray code of the first
10 non-negative integers.
::
sage: for n in srange(10):
....: Gray_transducer(n.bits())
[]
[1]
[1, 1]
[0, 1]
[0, 1, 1]
[1, 1, 1]
[1, 0, 1]
[0, 0, 1]
[0, 0, 1, 1]
[1, 0, 1, 1]
Using the hook-functions
------------------------
Let's use the previous example "divison by `3`" to demonstrate the
optional state and transition parameters ``hook``.
First, we define, what those functions should do. In our case, this is
just saying in which state we are and which transition we take
::
sage: def state_hook(state, process):
....: print "We are now in State %s." % (state.label(),)
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: def transition_hook(transition, process):
....: print ("Currently we go from %s to %s, "
....: "reading %s and writing %s." % (
....: transition.from_state, transition.to_state,
....: FSMWordSymbol(transition.word_in),
....: FSMWordSymbol(transition.word_out)))
Now, let's add these hook-functions to the existing transducer::
sage: for s in D.iter_states():
....: s.hook = state_hook
sage: for t in D.iter_transitions():
....: t.hook = transition_hook
Rerunning the process again now gives the following output::
sage: D.process([1, 1, 0, 1])
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
Currently we go from 1 to 0, reading 1 and writing 1.
We are now in State 0.
Currently we go from 0 to 0, reading 0 and writing 0.
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
(False, 1, [0, 1, 0, 0])
The example above just explains the basic idea of using
hook-functions. In the following, we will use those hooks more seriously.
Detecting sequences with same number of `0` and `1`
---------------------------------------------------
Suppose we have a binary input and want to accept all sequences with
the same number of `0` and `1`. This cannot be done with a finite
automaton. Anyhow, we can make usage of the hook functions to extend
our finite automaton by a counter::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: C = FiniteStateMachine()
sage: def update_counter(state, process):
....: l = process.read_letter()
....: process.fsm.counter += 1 if l == 1 else -1
....: if process.fsm.counter > 0:
....: next_state = 'positive'
....: elif process.fsm.counter < 0:
....: next_state = 'negative'
....: else:
....: next_state = 'zero'
....: return FSMTransition(state, process.fsm.state(next_state),
....: l, process.fsm.counter)
sage: C.add_state(FSMState('zero', hook=update_counter,
....: is_initial=True, is_final=True))
'zero'
sage: C.add_state(FSMState('positive', hook=update_counter))
'positive'
sage: C.add_state(FSMState('negative', hook=update_counter))
'negative'
Now, let's input some sequence::
sage: C.counter = 0; C([1, 1, 1, 1, 0, 0])
(False, 'positive', [1, 2, 3, 4, 3, 2])
The result is False, since there are four `1` but only two `0`. We
land in the state ``positive`` and we can also see the values of the
counter in each step.
Let's try some other examples::
sage: C.counter = 0; C([1, 1, 0, 0])
(True, 'zero', [1, 2, 1, 0])
sage: C.counter = 0; C([0, 1, 0, 0])
(False, 'negative', [-1, 0, -1, -2])
See also methods :meth:`Automaton.process` and
:meth:`Transducer.process` (or even
:meth:`FiniteStateMachine.process`), the explanation of the parameter
``hook`` and the examples in :class:`FSMState` and
:class:`FSMTransition`, and the description and examples in
:class:`FSMProcessIterator` for more information on processing and
hooks.
AUTHORS:
- Daniel Krenn (2012-03-27): initial version
- Clemens Heuberger (2012-04-05): initial version
- Sara Kropf (2012-04-17): initial version
- Clemens Heuberger (2013-08-21): release candidate for Sage patch
- Daniel Krenn (2013-08-21): release candidate for Sage patch
- Sara Kropf (2013-08-21): release candidate for Sage patch
- Clemens Heuberger (2013-09-02): documentation improved
- Daniel Krenn (2013-09-13): comments from trac worked in
- Clemens Heuberger (2013-11-03): output (labels) of determinisation,
product, composition, etc. changed (for consistency),
representation of state changed, documentation improved
- Daniel Krenn (2013-11-04): whitespaces in documentation corrected
- Clemens Heuberger (2013-11-04): full_group_by added
- Daniel Krenn (2013-11-04): next release candidate for Sage patch
- Sara Kropf (2013-11-08): fix for adjacency matrix
- Clemens Heuberger (2013-11-11): fix for prepone_output
- Daniel Krenn (2013-11-11): comments from trac #15078 included:
docstring of FiniteStateMachine rewritten, Automaton and Transducer
inherited from FiniteStateMachine
- Daniel Krenn (2013-11-25): documentation improved according to
comments from trac #15078
- Clemens Heuberger, Daniel Krenn, Sara Kropf (2014-02-21--2014-07-18):
A huge bunch of improvements. Details see
#15841, #15847, #15848, #15849, #15850, #15922, #15923, #15924,
#15925, #15928, #15960, #15961, #15962, #15963, #15975, #16016,
#16024, #16061, #16128, #16132, #16138, #16139, #16140, #16143,
#16144, #16145, #16146, #16191, #16200, #16205, #16206, #16207,
#16229, #16253, #16254, #16255, #16266, #16355, #16357, #16387,
#16425, #16539, #16555, #16557, #16588, #16589, #16666, #16668,
#16674, #16675, #16677.
ACKNOWLEDGEMENT:
- Clemens Heuberger, Daniel Krenn and Sara Kropf are supported by the
Austrian Science Fund (FWF): P 24644-N26.
Methods
=======
"""
#*****************************************************************************
# Copyright (C) 2012--2014 Clemens Heuberger <clemens.heuberger@aau.at>
# 2012--2014 Daniel Krenn <dev@danielkrenn.at>
# 2012--2014 Sara Kropf <sara.kropf@aau.at>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.structure.sage_object import SageObject
from sage.graphs.digraph import DiGraph
from sage.matrix.constructor import matrix
from sage.rings.integer_ring import ZZ
from sage.rings.real_mpfr import RR
from sage.symbolic.ring import SR
from sage.calculus.var import var
from sage.misc.cachefunc import cached_function
from sage.misc.latex import latex
from sage.misc.misc import verbose
from sage.functions.trig import cos, sin, atan2
from sage.symbolic.constants import pi
from copy import copy
from copy import deepcopy
import itertools
from itertools import imap
from collections import defaultdict, OrderedDict
def full_group_by(l, key=lambda x: x):
"""
Group iterable ``l`` by values of ``key``.
INPUT:
- iterable ``l``
- key function ``key``
OUTPUT:
A list of pairs ``(k, elements)`` such that ``key(e)=k`` for all
``e`` in ``elements``.
This is similar to ``itertools.groupby`` except that lists are
returned instead of iterables and no prior sorting is required.
We do not require
- that the keys are sortable (in contrast to the
approach via ``sorted`` and ``itertools.groupby``) and
- that the keys are hashable (in contrast to the
implementation proposed in `<http://stackoverflow.com/a/15250161>`_).
However, it is required
- that distinct keys have distinct ``str``-representations.
The implementation is inspired by
`<http://stackoverflow.com/a/15250161>`_, but non-hashable keys are
allowed.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import full_group_by
sage: t = [2/x, 1/x, 2/x]
sage: r = full_group_by([0, 1, 2], key=lambda i:t[i])
sage: sorted(r, key=lambda p:p[1])
[(2/x, [0, 2]), (1/x, [1])]
sage: from itertools import groupby
sage: for k, elements in groupby(sorted([0, 1, 2],
....: key=lambda i:t[i]),
....: key=lambda i:t[i]):
....: print k, list(elements)
2/x [0]
1/x [1]
2/x [2]
Note that the behavior is different from ``itertools.groupby``
because neither `1/x<2/x` nor `2/x<1/x` does hold.
Here, the result ``r`` has been sorted in order to guarantee a
consistent order for the doctest suite.
"""
elements = defaultdict(list)
original_keys = {}
for item in l:
k = key(item)
s = str(k)
if s in original_keys:
if original_keys[s]!=k:
raise ValueError("Two distinct elements with representation "
"%s " % s)
else:
original_keys[s]=k
elements[s].append(item)
return [(original_keys[s], values ) for (s, values) in elements.items()]
def equal(iterator):
"""
Checks whether all elements of ``iterator`` are equal.
INPUT:
- ``iterator`` -- an iterator of the elements to check
OUTPUT:
``True`` or ``False``.
This implements `<http://stackoverflow.com/a/3844832/1052778>`_.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import equal
sage: equal([0, 0, 0])
True
sage: equal([0, 1, 0])
False
sage: equal([])
True
sage: equal(iter([None, None]))
True
We can test other properties of the elements than the elements
themselves. In the following example, we check whether all tuples
have the same lengths::
sage: equal(len(x) for x in [(1, 2), (2, 3), (3, 1)])
True
sage: equal(len(x) for x in [(1, 2), (1, 2, 3), (3, 1)])
False
"""
try:
iterator = iter(iterator)
first = next(iterator)
return all(first == rest for rest in iterator)
except StopIteration:
return True
def startswith(list, prefix):
"""
Determine whether list starts with the given prefix.
INPUT:
- ``list`` -- list
- ``prefix`` -- list representing the prefix
OUTPUT:
``True`` or ``False``.
Similar to :meth:`str.startswith`.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import startswith
sage: startswith([1, 2, 3], [1, 2])
True
sage: startswith([1], [1, 2])
False
sage: startswith([1, 3, 2], [1, 2])
False
"""
return list[:len(prefix)] == prefix
#*****************************************************************************
FSMEmptyWordSymbol = '-'
EmptyWordLaTeX = r'\varepsilon'
EndOfWordLaTeX = r'\$'
FSMOldCodeTransducerCartesianProduct = True
FSMOldProcessOutput = True # See trac #16132 (deprecation).
tikz_automata_where = {"right": 0,
"above": 90,
"left": 180,
"below": 270}
def FSMLetterSymbol(letter):
"""
Returns a string associated to the input letter.
INPUT:
- ``letter`` -- the input letter or ``None`` (representing the
empty word).
OUTPUT:
If ``letter`` is ``None`` the symbol for the empty word
``FSMEmptyWordSymbol`` is returned, otherwise the string
associated to the letter.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMLetterSymbol
sage: FSMLetterSymbol(0)
'0'
sage: FSMLetterSymbol(None)
'-'
"""
return FSMEmptyWordSymbol if letter is None else repr(letter)
def FSMWordSymbol(word):
"""
Returns a string of ``word``. It may returns the symbol of the
empty word ``FSMEmptyWordSymbol``.
INPUT:
- ``word`` -- the input word.
OUTPUT:
A string of ``word``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: FSMWordSymbol([0, 1, 1])
'0,1,1'
"""
if not isinstance(word, list):
return FSMLetterSymbol(word)
if len(word) == 0:
return FSMEmptyWordSymbol
s = ''
for letter in word:
s += (',' if len(s) > 0 else '') + FSMLetterSymbol(letter)
return s
#*****************************************************************************
def is_FSMState(S):
"""
Tests whether or not ``S`` inherits from :class:`FSMState`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMState, FSMState
sage: is_FSMState(FSMState('A'))
True
"""
return isinstance(S, FSMState)
class FSMState(SageObject):
"""
Class for a state of a finite state machine.
INPUT:
- ``label`` -- the label of the state.
- ``word_out`` -- (default: ``None``) a word that is written when
the state is reached.
- ``is_initial`` -- (default: ``False``)
- ``is_final`` -- (default: ``False``)
- ``final_word_out`` -- (default: ``None``) a word that is written when
the state is reached as the last state of some input; only for final
states.
- ``hook`` -- (default: ``None``) A function which is called when
the state is reached during processing input. It takes two input
parameters: the first is the current state (to allow using the same
hook for several states), the second is the current process
iterator object (to have full access to everything; e.g. the
next letter from the input tape can be read in). It can output
the next transition, i.e. the transition to take next. If it
returns ``None`` the process iterator chooses. Moreover, this
function can raise a ``StopIteration`` exception to stop
processing of a finite state machine the input immediately. See
also the example below.
- ``color`` -- (default: ``None``) In order to distinguish states,
they can be given an arbitrary "color" (an arbitrary object).
This is used in :meth:`FiniteStateMachine.equivalence_classes`:
states of different colors are never considered to be
equivalent. Note that :meth:`Automaton.determinisation` requires
that ``color`` is hashable.
- ``allow_label_None`` -- (default: ``False``) If ``True`` allows also
``None`` as label. Note that a state with label ``None`` is used in
:class:`FSMProcessIterator`.
OUTPUT:
Returns a state of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state 1', word_out=0, is_initial=True)
sage: A
'state 1'
sage: A.label()
'state 1'
sage: B = FSMState('state 2')
sage: A == B
False
We can also define a final output word of a final state which is
used if the input of a transducer leads to this state. Such final
output words are used in subsequential transducers. ::
sage: C = FSMState('state 3', is_final=True, final_word_out='end')
sage: C.final_word_out
['end']
The final output word can be a single letter, ``None`` or a list of
letters::
sage: A = FSMState('A')
sage: A.is_final = True
sage: A.final_word_out = 2
sage: A.final_word_out
[2]
sage: A.final_word_out = [2, 3]
sage: A.final_word_out
[2, 3]
Only final states can have a final output word which is not
``None``::
sage: B = FSMState('B')
sage: B.final_word_out is None
True
sage: B.final_word_out = 2
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state B is not final.
Setting the ``final_word_out`` of a final state to ``None`` is the
same as setting it to ``[]`` and is also the default for a final
state::
sage: C = FSMState('C', is_final=True)
sage: C.final_word_out
[]
sage: C.final_word_out = None
sage: C.final_word_out
[]
sage: C.final_word_out = []
sage: C.final_word_out
[]
It is not allowed to use ``None`` as a label::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(None)
Traceback (most recent call last):
...
ValueError: Label None reserved for a special state,
choose another label.
This can be overridden by::
sage: FSMState(None, allow_label_None=True)
None
Note that :meth:`Automaton.determinisation` requires that ``color``
is hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
We can use a hook function of a state to stop processing. This is
done by raising a ``StopIteration`` exception. The following code
demonstrates this::
sage: T = Transducer([(0, 1, 9, 'a'), (1, 2, 9, 'b'),
....: (2, 3, 9, 'c'), (3, 4, 9, 'd')],
....: initial_states=[0],
....: final_states=[4],
....: input_alphabet=[9])
sage: def stop(current_state, process_iterator):
....: raise StopIteration()
sage: T.state(3).hook = stop
sage: T.process([9, 9, 9, 9])
(False, 3, ['a', 'b', 'c'])
"""
is_initial = False
"""
Describes whether the state is initial.
EXAMPLES::
sage: T = Automaton([(0,0,0)])
sage: T.initial_states()
[]
sage: T.state(0).is_initial = True
sage: T.initial_states()
[0]
"""
def __init__(self, label, word_out=None,
is_initial=False, is_final=False, final_word_out=None,
hook=None, color=None, allow_label_None=False):
"""
See :class:`FSMState` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('final', is_final=True)
'final'
TESTS::
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = True
sage: A = FSMState('A', is_final=True, final_word_out='end')
sage: A.final_word_out
['end']
sage: A = FSMState('A', is_final=True,
....: final_word_out=['e', 'n', 'd'])
sage: A.final_word_out
['e', 'n', 'd']
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=True, final_word_out=None)
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=False)
sage: A.final_word_out is None
True
sage: A.is_final = False
sage: A = FSMState('A', is_final=False, final_word_out='end')
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False,
....: final_word_out=['e', 'n', 'd'])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False, final_word_out=None)
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=False, final_word_out=[])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
"""
if not allow_label_None and label is None:
raise ValueError("Label None reserved for a special state, "
"choose another label.")
self._label_ = label
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
self.is_initial = is_initial
self._final_word_out_ = None
self.is_final = is_final
self.final_word_out = final_word_out
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
self.color = color
def __lt__(self, other):
"""
Returns True if label of ``self`` is less than label of
``other``.
INPUT:
- `other` -- a state.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(0) < FSMState(1)
True
"""
return self.label() < other.label()
@property
def final_word_out(self):
"""
The final output word of a final state which is written if the
state is reached as the last state of the input of the finite
state machine. For a non-final state, the value is ``None``.
``final_word_out`` can be a single letter, a list or ``None``,
but for a final-state, it is always saved as a list.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=2)
sage: A.final_word_out
[2]
sage: A.final_word_out = 3
sage: A.final_word_out
[3]
sage: A.final_word_out = [3, 4]
sage: A.final_word_out
[3, 4]
sage: A.final_word_out = None
sage: A.final_word_out
[]
sage: B = FSMState('B')
sage: B.final_word_out is None
True
A non-final state cannot have a final output word::
sage: B.final_word_out = [3, 4]
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
"""
return self._final_word_out_
@final_word_out.setter
def final_word_out(self, final_word_out):
"""
Sets the value of the final output word of a final state.
INPUT:
- ``final_word_out`` -- a list, any element or ``None``.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: B = FSMState('B')
sage: B.final_word_out = []
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
sage: B.final_word_out = None
sage: B.final_word_out is None
True
"""
if not self.is_final:
if final_word_out is not None:
raise ValueError("Only final states can have a "
"final output word, but state %s is not final."
% (self.label()))
else:
self._final_word_out_ = None
elif isinstance(final_word_out, list):
self._final_word_out_ = final_word_out
elif final_word_out is not None:
self._final_word_out_ = [final_word_out]
else:
self._final_word_out_ = []
@property
def is_final(self):
"""
Describes whether the state is final or not.
``True`` if the state is final and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=3)
sage: A.is_final
True
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A.final_word_out = None
sage: A.is_final = False
sage: A.is_final
False
"""
return (self.final_word_out is not None)
@is_final.setter
def is_final(self, is_final):
"""
Defines the state as a final state or a non-final state.
INPUT:
- ``is_final`` -- ``True`` if the state should be final and
``False`` otherwise.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = False
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=True, final_word_out='a')
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.is_final = False
sage: A.final_word_out is None
True
"""
if is_final and self.final_word_out is None:
self._final_word_out_ = []
elif not is_final:
if not self.final_word_out:
self._final_word_out_ = None
else:
raise ValueError("State %s cannot be non-final, because it "
"has a final output word. Only final states "
"can have a final output word. "
% self.label())
def label(self):
"""
Returns the label of the state.
INPUT:
Nothing.
OUTPUT:
The label of the state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state')
sage: A.label()
'state'
"""
return self._label_
def __copy__(self):
"""
Returns a (shallow) copy of the state.
INPUT:
Nothing.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: copy(A)
'A'
"""
new = FSMState(self.label(), self.word_out,
self.is_initial, self.is_final,
color=self.color,
final_word_out=self.final_word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: deepcopy(A)
'A'
"""
try:
label = self._deepcopy_relabel_
except AttributeError:
label = deepcopy(self.label(), memo)
new = FSMState(label, deepcopy(self.word_out, memo),
self.is_initial, self.is_final)
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
new.color = deepcopy(self.color, memo)
new.final_word_out = deepcopy(self.final_word_out, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState((1, 3), color=[1, 2],
....: is_final=True, final_word_out=3)
sage: B = deepcopy(A)
sage: B
(1, 3)
sage: B.label == A.label
True
sage: B.label is A.label
False
sage: B.color == A.color
True
sage: B.color is A.color
False
sage: B.is_final == A.is_final
True
sage: B.is_final is A.is_final
True
sage: B.final_word_out == A.final_word_out
True
sage: B.final_word_out is A.final_word_out
False
"""
return deepcopy(self, memo)
def relabeled(self, label, memo=None):
"""
Returns a deep copy of the state with a new label.
INPUT:
- ``label`` -- the label of new state.
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: A.relabeled('B')
'B'
"""
self._deepcopy_relabel_ = label
new = deepcopy(self, memo)
del self._deepcopy_relabel_
return new
def __hash__(self):
"""
Returns a hash value for the object.
INPUT:
Nothing.
OUTPUT:
The hash of this state.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: hash(A) #random
-269909568
"""
return hash(self.label())
def _repr_(self):
"""
Returns the string "label".
INPUT:
Nothing.
OUTPUT:
A string.
TESTS:
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A')._repr_()
"'A'"
"""
return repr(self.label())
def __eq__(left, right):
"""
Returns True if two states are the same, i.e., if they have
the same labels.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
Note that the hooks and whether the states are initial or
final are not checked. To fully compare two states (including
these attributes), use :meth:`.fully_equal`.
As only the labels are used when hashing a state, only the
labels can actually be compared by the equality relation.
Note that the labels are unique within one finite state machine,
so this may only lead to ambiguities when comparing states
belonging to different finite state machines.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A == B
True
"""
if not is_FSMState(right):
return False
return left.label() == right.label()
def __ne__(left, right):
"""
Tests for inequality, complement of __eq__.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('A', is_final=True)
sage: A != B
False
"""
return (not (left == right))
def fully_equal(left, right, compare_color=True):
"""
Checks whether two states are fully equal, i.e., including all
attributes except ``hook``.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
- ``compare_color`` -- If ``True`` (default) colors are
compared as well, otherwise not.
OUTPUT:
``True`` or ``False``.
Note that usual comparison by ``==`` does only compare the labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A.fully_equal(B)
False
sage: A == B
True
sage: A.is_initial = True; A.color = 'green'
sage: A.fully_equal(B)
False
sage: A.fully_equal(B, compare_color=False)
True
"""
color = not compare_color or left.color == right.color
return (left.__eq__(right) and
left.is_initial == right.is_initial and
left.is_final == right.is_final and
left.final_word_out == right.final_word_out and
left.word_out == right.word_out and
color)
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A').__nonzero__()
True
"""
return True # A state cannot be zero (see __init__)
#*****************************************************************************
def is_FSMTransition(T):
"""
Tests whether or not ``T`` inherits from :class:`FSMTransition`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMTransition, FSMTransition
sage: is_FSMTransition(FSMTransition('A', 'B'))
True
"""
return isinstance(T, FSMTransition)
class FSMTransition(SageObject):
"""
Class for a transition of a finite state machine.
INPUT:
- ``from_state`` -- state from which transition starts.
- ``to_state`` -- state in which transition ends.
- ``word_in`` -- the input word of the transitions (when the
finite state machine is used as automaton)
- ``word_out`` -- the output word of the transitions (when the
finite state machine is used as transducer)
OUTPUT:
A transition of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: S = FSMTransition(A, B, 0, 1)
sage: T = FSMTransition('A', 'B', 0, 1)
sage: T == S
True
sage: U = FSMTransition('A', 'B', 0)
sage: U == T
False
"""
from_state = None
"""State from which the transition starts. Read-only."""
to_state = None
"""State in which the transition ends. Read-only."""
word_in = None
"""Input word of the transition. Read-only."""
word_out = None
"""Output word of the transition. Read-only."""
def __init__(self, from_state, to_state,
word_in=None, word_out=None,
hook=None):
"""
See :class:`FSMTransition` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)
Transition from 'A' to 'B': 0|1
"""
if is_FSMState(from_state):
self.from_state = from_state
else:
self.from_state = FSMState(from_state)
if is_FSMState(to_state):
self.to_state = to_state
else:
self.to_state = FSMState(to_state)
if isinstance(word_in, list):
self.word_in = word_in
elif word_in is not None:
self.word_in = [word_in]
else:
self.word_in = []
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
def __lt__(self, other):
"""
Returns True if ``self`` is less than ``other`` with respect to the
key ``(self.from_state, self.word_in, self.to_state, self.word_out)``.
INPUT:
- `other` -- a transition.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition(0,1,0,0) < FSMTransition(1,0,0,0)
True
"""
return (self.from_state, self.word_in, self.to_state, self.word_out) < \
(other.from_state, other.word_in, other.to_state, other.word_out)
def __copy__(self):
"""
Returns a (shallow) copy of the transition.
INPUT:
Nothing.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: copy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(self.from_state, self.to_state,
self.word_in, self.word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(deepcopy(self.from_state, memo),
deepcopy(self.to_state, memo),
deepcopy(self.word_in, memo),
deepcopy(self.word_out, memo))
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
return deepcopy(self, memo)
def __hash__(self):
"""
Since transitions are mutable, they should not be hashable, so
we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: hash(FSMTransition('A', 'B'))
Traceback (most recent call last):
...
TypeError: Transitions are mutable, and thus not hashable.
"""
raise TypeError("Transitions are mutable, and thus not hashable.")
def _repr_(self):
"""
Represents a transitions as from state to state and input, output.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 0)._repr_()
"Transition from 'A' to 'B': 0|0"
"""
return "Transition from %s to %s: %s" % (repr(self.from_state),
repr(self.to_state),
self._in_out_label_())
def _in_out_label_(self):
"""
Returns the input and output of a transition as
"word_in|word_out".
INPUT:
Nothing.
OUTPUT:
A string of the input and output labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)._in_out_label_()
'0|1'
"""
return "%s|%s" % (FSMWordSymbol(self.word_in),
FSMWordSymbol(self.word_out))
def __eq__(left, right):
"""
Returns True if the two transitions are the same, i.e., if the
both go from the same states to the same states and read and
write the same words.
Note that the hooks are not checked.
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 == t2
True
"""
if not is_FSMTransition(right):
raise TypeError('Only instances of FSMTransition ' \
'can be compared.')
return left.from_state == right.from_state \
and left.to_state == right.to_state \
and left.word_in == right.word_in \
and left.word_out == right.word_out
def __ne__(left, right):
"""
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
Tests for inequality, complement of __eq__.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 != t2
False
"""
return (not (left == right))
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0).__nonzero__()
True
"""
return True # A transition cannot be zero (see __init__)
#*****************************************************************************
def is_FiniteStateMachine(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`FiniteStateMachine`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine
sage: is_FiniteStateMachine(FiniteStateMachine())
True
sage: is_FiniteStateMachine(Automaton())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, FiniteStateMachine)
def duplicate_transition_ignore(old_transition, new_transition):
"""
Default function for handling duplicate transitions in finite
state machines. This implementation ignores the occurrence.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
The same transition, unchanged.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_ignore
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_ignore(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Transition from 0 to 0: 1|-
"""
return old_transition
def duplicate_transition_raise_error(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation raises a ``ValueError``.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
Nothing. A ``ValueError`` is raised.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_raise_error(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 0 to 0: 1|-
"""
raise ValueError("Attempting to re-insert transition %s" % old_transition)
def duplicate_transition_add_input(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation adds the input label of the
new transition to the input label of the old transition. This is
intended for the case where a Markov chain is modelled by a finite
state machine using the input labels as transition probabilities.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
A transition whose input weight is the sum of the input
weights of ``old_transition`` and ``new_transition``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_add_input(FSMTransition('a', 'a', 1/2),
....: FSMTransition('a', 'a', 1/2))
Transition from 'a' to 'a': 1|-
Input labels must be lists of length 1::
sage: duplicate_transition_add_input(FSMTransition('a', 'a', [1, 1]),
....: FSMTransition('a', 'a', [1, 1]))
Traceback (most recent call last):
...
TypeError: Trying to use duplicate_transition_add_input on
"Transition from 'a' to 'a': 1,1|-" and
"Transition from 'a' to 'a': 1,1|-",
but input words are assumed to be lists of length 1
"""
if (hasattr(old_transition.word_in, '__iter__')
and len(old_transition.word_in) == 1
and hasattr(new_transition.word_in, '__iter__')
and len(new_transition.word_in) == 1):
old_transition.word_in = [old_transition.word_in[0]
+ new_transition.word_in[0]]
else:
raise TypeError('Trying to use duplicate_transition_add_input on ' +
'"%s" and "%s", ' % (old_transition, new_transition) +
'but input words are assumed to be lists of length 1')
return old_transition
class FiniteStateMachine(SageObject):
"""
Class for a finite state machine.
A finite state machine is a finite set of states connected by
transitions.
INPUT:
- ``data`` -- can be any of the following:
#. a dictionary of dictionaries (of transitions),
#. a dictionary of lists (of states or transitions),
#. a list (of transitions),
#. a function (transition function),
#. an other instance of a finite state machine.
- ``initial_states`` and ``final_states`` -- the initial and
final states of this machine
- ``input_alphabet`` and ``output_alphabet`` -- the input and
output alphabets of this machine
- ``determine_alphabets`` -- If ``True``, then the function
:meth:`.determine_alphabets` is called after ``data`` was read and
processed, if ``False``, then not. If it is ``None``, then it is
decided during the construction of the finite state machine
whether :meth:`.determine_alphabets` should be called.
- ``with_final_word_out`` -- If given (not ``None``), then the
function :meth:`.with_final_word_out` (more precisely, its inplace
pendant :meth:`.construct_final_word_out`) is called with input
``letters=with_final_word_out`` at the end of the creation
process.
- ``store_states_dict`` -- If ``True``, then additionally the states
are stored in an interal dictionary for speed up.
- ``on_duplicate_transition`` -- A function which is called when a
transition is inserted into ``self`` which already existed (same
``from_state``, same ``to_state``, same ``word_in``, same ``word_out``).
This function is assumed to take two arguments, the first being
the already existing transition, the second being the new
transition (as an :class:`FSMTransition`). The function must
return the (possibly modified) original transition.
By default, we have ``on_duplicate_transition=None``, which is
interpreted as
``on_duplicate_transition=duplicate_transition_ignore``, where
``duplicate_transition_ignore`` is a predefined function
ignoring the occurrence. Other such predefined functions are
``duplicate_transition_raise_error`` and
``duplicate_transition_add_input``.
OUTPUT:
A finite state machine.
The object creation of :class:`Automaton` and :class:`Transducer`
is the same as the one described here (i.e. just replace the word
``FiniteStateMachine`` by ``Automaton`` or ``Transducer``).
Each transition of an automaton has an input label. Automata can,
for example, be determinised (see
:meth:`Automaton.determinisation`) and minimized (see
:meth:`Automaton.minimization`). Each transition of a transducer
has an input and an output label. Transducers can, for example, be
simplified (see :meth:`Transducer.simplification`).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
See documentation for more examples.
We illustrate the different input formats:
#. The input-data can be a dictionary of dictionaries, where
- the keys of the outer dictionary are state-labels (from-states of
transitions),
- the keys of the inner dictionaries are state-labels (to-states of
transitions),
- the values of the inner dictionaries specify the transition
more precisely.
The easiest is to use a tuple consisting of an input and an
output word::
sage: FiniteStateMachine({'a':{'b':(0, 1), 'c':(1, 1)}})
Finite state machine with 3 states
Instead of the tuple anything iterable (e.g. a list) can be
used as well.
If you want to use the arguments of :class:`FSMTransition`
directly, you can use a dictionary::
sage: FiniteStateMachine({'a':{'b':{'word_in':0, 'word_out':1},
....: 'c':{'word_in':1, 'word_out':1}}})
Finite state machine with 3 states
In the case you already have instances of
:class:`FSMTransition`, it is possible to use them directly::
sage: FiniteStateMachine({'a':{'b':FSMTransition('a', 'b', 0, 1),
....: 'c':FSMTransition('a', 'c', 1, 1)}})
Finite state machine with 3 states
#. The input-data can be a dictionary of lists, where the keys
are states or label of states.
The list-elements can be states::
sage: a = FSMState('a')
sage: b = FSMState('b')
sage: c = FSMState('c')
sage: FiniteStateMachine({a:[b, c]})
Finite state machine with 3 states
Or the list-elements can simply be labels of states::
sage: FiniteStateMachine({'a':['b', 'c']})
Finite state machine with 3 states
The list-elements can also be transitions::
sage: FiniteStateMachine({'a':[FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)]})
Finite state machine with 3 states
Or they can be tuples of a label, an input word and an output
word specifying a transition::
sage: FiniteStateMachine({'a':[('b', 0, 1), ('c', 1, 1)]})
Finite state machine with 3 states
#. The input-data can be a list, where its elements specify
transitions::
sage: FiniteStateMachine([FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)])
Finite state machine with 3 states
It is possible to skip ``FSMTransition`` in the example above::
sage: FiniteStateMachine([('a', 'b', 0, 1), ('a', 'c', 1, 1)])
Finite state machine with 3 states
The parameters of the transition are given in tuples. Anyhow,
anything iterable (e.g. a list) is possible.
You can also name the parameters of the transition. For this
purpose you take a dictionary::
sage: FiniteStateMachine([{'from_state':'a', 'to_state':'b',
....: 'word_in':0, 'word_out':1},
....: {'from_state':'a', 'to_state':'c',
....: 'word_in':1, 'word_out':1}])
Finite state machine with 3 states
Other arguments, which :class:`FSMTransition` accepts, can be
added, too.
#. The input-data can also be function acting as transition
function:
This function has two input arguments:
#. a label of a state (from which the transition starts),
#. a letter of the (input-)alphabet (as input-label of the transition).
It returns a tuple with the following entries:
#. a label of a state (to which state the transition goes),
#. a letter of or a word over the (output-)alphabet (as
output-label of the transition).
It may also output a list of such tuples if several
transitions from the from-state and the input letter exist
(this means that the finite state machine is
non-deterministic).
If the transition does not exist, the function should raise a
``LookupError`` or return an empty list.
When constructing a finite state machine in this way, some
inital states and an input alphabet have to be specified.
::
sage: def f(state_from, read):
....: if int(state_from) + read <= 2:
....: state_to = 2*int(state_from)+read
....: write = 0
....: else:
....: state_to = 2*int(state_from) + read - 5
....: write = 1
....: return (str(state_to), write)
sage: F = FiniteStateMachine(f, input_alphabet=[0, 1],
....: initial_states=['0'],
....: final_states=['0'])
sage: F([1, 0, 1])
(True, '0', [0, 0, 1])
#. The input-data can be an other instance of a finite state machine::
sage: FiniteStateMachine(FiniteStateMachine([]))
Traceback (most recent call last):
...
NotImplementedError
The following examples demonstrate the use of ``on_duplicate_transition``::
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]])
sage: F.transitions()
[Transition from 'a' to 'a': 1/2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F1 = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_raise_error)
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 'a' to 'a': 1/2|-
Use ``duplicate_transition_add_input`` to emulate a Markov chain,
the input labels are considered as transition probabilities::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.transitions()
[Transition from 'a' to 'a': 1|-]
Use ``with_final_word_out`` to construct final output::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=0)
sage: for s in T.iter_final_states():
....: print s, s.final_word_out
0 []
1 [0]
TESTS::
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: FiniteStateMachine({a:[b, c], b:[b, c, d],
....: c:[a, b], d:[a, c]})
Finite state machine with 4 states
We have several constructions which lead to the same finite
state machine::
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: C = FSMState('C')
sage: FSM1 = FiniteStateMachine(
....: {A:{B:{'word_in':0, 'word_out':1},
....: C:{'word_in':1, 'word_out':1}}})
sage: FSM2 = FiniteStateMachine({A:{B:(0, 1), C:(1, 1)}})
sage: FSM3 = FiniteStateMachine(
....: {A:{B:FSMTransition(A, B, 0, 1),
....: C:FSMTransition(A, C, 1, 1)}})
sage: FSM4 = FiniteStateMachine({A:[(B, 0, 1), (C, 1, 1)]})
sage: FSM5 = FiniteStateMachine(
....: {A:[FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)]})
sage: FSM6 = FiniteStateMachine(
....: [{'from_state':A, 'to_state':B, 'word_in':0, 'word_out':1},
....: {'from_state':A, 'to_state':C, 'word_in':1, 'word_out':1}])
sage: FSM7 = FiniteStateMachine([(A, B, 0, 1), (A, C, 1, 1)])
sage: FSM8 = FiniteStateMachine(
....: [FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)])
sage: FSM1 == FSM2 == FSM3 == FSM4 == FSM5 == FSM6 == FSM7 == FSM8
True
It is possible to skip ``FSMTransition`` in the example above.
Some more tests for different input-data::
sage: FiniteStateMachine({'a':{'a':[0, 0], 'b':[1, 1]},
....: 'b':{'b':[1, 0]}})
Finite state machine with 2 states
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: t1 = FSMTransition(a, b)
sage: t2 = FSMTransition(b, c)
sage: t3 = FSMTransition(b, d)
sage: t4 = FSMTransition(c, d)
sage: FiniteStateMachine([t1, t2, t3, t4])
Finite state machine with 4 states
"""
on_duplicate_transition = duplicate_transition_ignore
"""
Which function to call when a duplicate transition is inserted. See
the documentation of the parameter ``on_duplicate_transition`` of
the class :class:`FiniteStateMachine` for details.
"""
#*************************************************************************
# init
#*************************************************************************
def __init__(self,
data=None,
initial_states=None, final_states=None,
input_alphabet=None, output_alphabet=None,
determine_alphabets=None,
with_final_word_out=None,
store_states_dict=True,
on_duplicate_transition=None):
"""
See :class:`FiniteStateMachine` for more information.
TEST::
sage: FiniteStateMachine()
Finite state machine with 0 states
"""
self._states_ = [] # List of states in the finite state
# machine. Each state stores a list of
# outgoing transitions.
if store_states_dict:
self._states_dict_ = {}
if initial_states is not None:
if not hasattr(initial_states, '__iter__'):
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
if final_states is not None:
if not hasattr(final_states, '__iter__'):
raise TypeError('Final states must be iterable ' \
'(e.g. a list of states).')
for s in final_states:
state = self.add_state(s)
state.is_final = True
self.input_alphabet = input_alphabet
self.output_alphabet = output_alphabet
if on_duplicate_transition is None:
on_duplicate_transition = duplicate_transition_ignore
if hasattr(on_duplicate_transition, '__call__'):
self.on_duplicate_transition=on_duplicate_transition
else:
raise TypeError('on_duplicate_transition must be callable')
if data is None:
pass
elif is_FiniteStateMachine(data):
raise NotImplementedError
elif hasattr(data, 'iteritems'):
# data is a dict (or something similar),
# format: key = from_state, value = iterator of transitions
for (sf, iter_transitions) in data.iteritems():
self.add_state(sf)
if hasattr(iter_transitions, 'iteritems'):
for (st, transition) in iter_transitions.iteritems():
self.add_state(st)
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(sf, st, **transition)
elif hasattr(transition, '__iter__'):
self.add_transition(sf, st, *transition)
else:
self.add_transition(sf, st, transition)
elif hasattr(iter_transitions, '__iter__'):
for transition in iter_transitions:
if hasattr(transition, '__iter__'):
L = [sf]
L.extend(transition)
elif is_FSMTransition(transition):
L = transition
else:
L = [sf, transition]
self.add_transition(L)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__iter__'):
# data is a something that is iterable,
# items are transitions
for transition in data:
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(transition)
elif hasattr(transition, '__iter__'):
self.add_transition(transition)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__call__'):
self.add_from_transition_function(data)
else:
raise TypeError('Cannot decide what to do with data.')
if determine_alphabets:
self.determine_alphabets()
if with_final_word_out is not None:
self.construct_final_word_out(with_final_word_out)
self._allow_composition_ = True
#*************************************************************************
# copy and hash
#*************************************************************************
def __copy__(self):
"""
Returns a (shallow) copy of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
TESTS::
sage: copy(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
copy = __copy__
def empty_copy(self, memo=None, new_class=None):
"""
Returns an empty deep copy of the finite state machine, i.e.,
``input_alphabet``, ``output_alphabet``, ``on_duplicate_transition``
are preserved, but states and transitions are not.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
- ``new_class`` -- a class for the copy. By default
(``None``), the class of ``self`` is used.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F = FiniteStateMachine([('A', 'A', 0, 2), ('A', 'A', 1, 3)],
....: input_alphabet=[0, 1],
....: output_alphabet=[2, 3],
....: on_duplicate_transition=duplicate_transition_raise_error)
sage: FE = F.empty_copy(); FE
Finite state machine with 0 states
sage: FE.input_alphabet
[0, 1]
sage: FE.output_alphabet
[2, 3]
sage: FE.on_duplicate_transition == duplicate_transition_raise_error
True
TESTS::
sage: T = Transducer()
sage: type(T.empty_copy())
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.empty_copy(new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
if new_class is None:
new = self.__class__()
else:
new = new_class()
new.input_alphabet = deepcopy(self.input_alphabet, memo)
new.output_alphabet = deepcopy(self.output_alphabet, memo)
new.on_duplicate_transition = self.on_duplicate_transition
return new
def __deepcopy__(self, memo):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
"""
relabel = hasattr(self, '_deepcopy_relabel_')
new = self.empty_copy(memo=memo)
relabel_iter = itertools.count(0)
for state in self.iter_states():
if relabel:
if self._deepcopy_labels_ is None:
state._deepcopy_relabel_ = next(relabel_iter)
elif hasattr(self._deepcopy_labels_, '__call__'):
state._deepcopy_relabel_ = self._deepcopy_labels_(state.label())
elif hasattr(self._deepcopy_labels_, '__getitem__'):
state._deepcopy_relabel_ = self._deepcopy_labels_[state.label()]
else:
raise TypeError("labels must be None, a callable "
"or a dictionary.")
s = deepcopy(state, memo)
if relabel:
del state._deepcopy_relabel_
new.add_state(s)
for transition in self.iter_transitions():
new.add_transition(deepcopy(transition, memo))
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: C = deepcopy(F)
sage: C.transitions()[0].from_state is C.state('A')
True
sage: C.transitions()[0].to_state is C.state('A')
True
"""
return deepcopy(self, memo)
def relabeled(self, memo=None, labels=None):
"""
Returns a deep copy of the finite state machine, but the
states are relabeled.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
- ``labels`` -- (default: ``None``) a dictionary or callable
mapping old labels to new labels. If ``None``, then the new
labels are integers starting with 0.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: FSM1 = FiniteStateMachine([('A', 'B'), ('B', 'C'), ('C', 'A')])
sage: FSM1.states()
['A', 'B', 'C']
sage: FSM2 = FSM1.relabeled()
sage: FSM2.states()
[0, 1, 2]
sage: FSM3 = FSM1.relabeled(labels={'A': 'a', 'B': 'b', 'C': 'c'})
sage: FSM3.states()
['a', 'b', 'c']
sage: FSM4 = FSM2.relabeled(labels=lambda x: 2*x)
sage: FSM4.states()
[0, 2, 4]
TESTS::
sage: FSM2.relabeled(labels=1)
Traceback (most recent call last):
...
TypeError: labels must be None, a callable or a dictionary.
"""
self._deepcopy_relabel_ = True
self._deepcopy_labels_ = labels
new = deepcopy(self, memo)
del self._deepcopy_relabel_
del self._deepcopy_labels_
return new
def induced_sub_finite_state_machine(self, states):
"""
Returns a sub-finite-state-machine of the finite state machine
induced by the given states.
INPUT:
- ``states`` -- a list (or an iterator) of states (either labels or
instances of :class:`FSMState`) of the sub-finite-state-machine.
OUTPUT:
A new finite state machine. It consists (of deep copies) of
the given states and (deep copies) of all transitions of ``self``
between these states.
EXAMPLE::
sage: FSM = FiniteStateMachine([(0, 1, 0), (0, 2, 0),
....: (1, 2, 0), (2, 0, 0)])
sage: sub_FSM = FSM.induced_sub_finite_state_machine([0, 1])
sage: sub_FSM.states()
[0, 1]
sage: sub_FSM.transitions()
[Transition from 0 to 1: 0|-]
sage: FSM.induced_sub_finite_state_machine([3])
Traceback (most recent call last):
...
ValueError: 3 is not a state of this finite state machine.
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: sub_FSM.transitions()[0].from_state is sub_FSM.state(0)
True
"""
good_states = set()
for state in states:
if not self.has_state(state):
raise ValueError("%s is not a state of this finite state machine." % state)
good_states.add(self.state(state))
memo = {}
new = self.empty_copy(memo=memo)
for state in good_states:
s = deepcopy(state, memo)
new.add_state(s)
for state in good_states:
for transition in self.iter_transitions(state):
if transition.to_state in good_states:
new.add_transition(deepcopy(transition, memo))
return new
def __hash__(self):
"""
Since finite state machines are mutable, they should not be
hashable, so we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this finite state machine.
EXAMPLES::
sage: hash(FiniteStateMachine())
Traceback (most recent call last):
...
TypeError: Finite state machines are mutable, and thus not hashable.
"""
if getattr(self, "_immutable", False):
return hash((tuple(self.states()), tuple(self.transitions())))
raise TypeError("Finite state machines are mutable, " \
"and thus not hashable.")
#*************************************************************************
# operators
#*************************************************************************
def __or__(self, other):
"""
Returns the disjoint union of the finite state machines self and other.
INPUT:
- ``other`` -- a finite state machine.
OUTPUT:
A new finite state machine.
TESTS::
sage: FiniteStateMachine() | FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.disjoint_union(other)
__add__ = __or__
def __iadd__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F += FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __and__(self, other):
"""
Returns the intersection of ``self`` with ``other``.
TESTS::
sage: FiniteStateMachine() & FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.intersection(other)
def __imul__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F *= FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __call__(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Calls either method :meth:`.composition` or :meth:`.process`
(with ``full_output=False``).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True, is_final=True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: H = G(F)
sage: H.states()
[('A', 1), ('B', 1), ('B', 2)]
"""
if len(args) == 0:
raise TypeError("Called with too few arguments.")
if is_FiniteStateMachine(args[0]):
return self.composition(*args, **kwargs)
if hasattr(args[0], '__iter__'):
if not kwargs.has_key('full_output'):
kwargs['full_output'] = False
return self.process(*args, **kwargs)
raise TypeError("Do not know what to do with that arguments.")
#*************************************************************************
# tests
#*************************************************************************
def __nonzero__(self):
"""
Returns True if the finite state machine consists of at least
one state.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: FiniteStateMachine().__nonzero__()
False
"""
return len(self._states_) > 0
def __eq__(left, right):
"""
Returns ``True`` if the two finite state machines are equal,
i.e., if they have the same states and the same transitions.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
``True`` or ``False``.
Note that this function compares all attributes of a state (by
using :meth:`FSMState.fully_equal`) except for colors. Colors
are handled as follows: If the colors coincide, then the
finite state machines are also considered equal. If not, then
they are considered as equal if both finite state machines are
monochromatic.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1)])
sage: F == FiniteStateMachine()
False
sage: G = FiniteStateMachine([('A', 'B', 1)],
....: initial_states=['A'])
sage: F == G
False
sage: F.state('A').is_initial = True
sage: F == G
True
This shows the behavior when the states have colors::
sage: F.state('A').color = 'red'
sage: G.state('A').color = 'red'
sage: F == G
True
sage: G.state('A').color = 'blue'
sage: F == G
False
sage: F.state('B').color = 'red'
sage: F.is_monochromatic()
True
sage: G.state('B').color = 'blue'
sage: G.is_monochromatic()
True
sage: F == G
True
"""
if not is_FiniteStateMachine(right):
raise TypeError('Only instances of FiniteStateMachine '
'can be compared.')
if len(left._states_) != len(right._states_):
return False
colors_equal = True
for state in left.iter_states():
try:
right_state = right.state(state.label())
except LookupError:
return False
# we handle colors separately
if not state.fully_equal(right_state, compare_color=False):
return False
if state.color != right_state.color:
colors_equal = False
left_transitions = state.transitions
right_transitions = right.state(state).transitions
if len(left_transitions) != len(right_transitions):
return False
for t in left_transitions:
if t not in right_transitions:
return False
# handle colors
if colors_equal:
return True
if left.is_monochromatic() and right.is_monochromatic():
return True
return False
def __ne__(left, right):
"""
Tests for inequality, complement of :meth:`.__eq__`.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
True or False.
EXAMPLES::
sage: E = FiniteStateMachine([('A', 'B', 0)])
sage: F = Automaton([('A', 'B', 0)])
sage: G = Transducer([('A', 'B', 0, 1)])
sage: E == F
True
sage: E == G
False
"""
return (not (left == right))
def __contains__(self, item):
"""
Returns true, if the finite state machine contains the
state or transition item. Note that only the labels of the
states and the input and output words are tested.
INPUT:
- ``item`` -- a state or a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: FSMState('A', is_initial=True) in F
True
sage: 'A' in F
False
sage: FSMTransition('A', 'B', 0) in F
True
"""
if is_FSMState(item):
return self.has_state(item)
if is_FSMTransition(item):
return self.has_transition(item)
return False
def is_Markov_chain(self, is_zero=None):
"""
Checks whether ``self`` is a Markov chain where the transition
probabilities are modeled as input labels.
INPUT:
- ``is_zero`` -- by default (``is_zero=None``), checking for
zero is simply done by
:meth:`~sage.structure.element.Element.is_zero`. This
parameter can be used to provide a more sophisticated check
for zero, e.g. in the case of symbolic probabilities, see
the examples below.
OUTPUT:
``True`` or ``False``.
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input` and the sum of the input
weights of the transitions leaving a state must add up to 1.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
True
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input`::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]])
sage: F.is_Markov_chain()
False
Sum of input labels of the transitions leaving states must be 1::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
If the probabilities are variables in the symbolic ring,
:func:`~sage.symbolic.assumptions.assume` will do the trick::
sage: var('p q')
(p, q)
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: assume(p + q == 1)
sage: (p + q - 1).is_zero()
True
sage: F.is_Markov_chain()
True
sage: forget()
sage: del(p, q)
If the probabilities are variables in some polynomial ring,
the parameter ``is_zero`` can be used::
sage: R.<p, q> = PolynomialRing(QQ)
sage: def is_zero_polynomial(polynomial):
....: return polynomial in (p + q - 1)*R
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
sage: F.is_Markov_chain(is_zero_polynomial)
True
"""
is_zero_function = default_is_zero
if is_zero is not None:
is_zero_function = is_zero
if self.on_duplicate_transition != duplicate_transition_add_input:
return False
return all(is_zero_function(sum(t.word_in[0] for t in state.transitions) - 1)
for state in self.states())
#*************************************************************************
# representations / LaTeX
#*************************************************************************
def _repr_(self):
"""
Represents the finite state machine as "Finite state machine
with n states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: FiniteStateMachine()._repr_()
'Finite state machine with 0 states'
"""
return "Finite state machine with %s states" % len(self._states_)
default_format_letter = latex
format_letter = default_format_letter
def format_letter_negative(self, letter):
r"""
Format negative numbers as overlined numbers, everything
else by standard LaTeX formatting.
INPUT:
``letter`` -- anything.
OUTPUT:
Overlined absolute value if letter is a negative integer,
:func:`latex(letter) <sage.misc.latex.latex>` otherwise.
EXAMPLES::
sage: A = Automaton([(0, 0, -1)])
sage: map(A.format_letter_negative, [-1, 0, 1, 'a', None])
['\\overline{1}', 0, 1, \text{\texttt{a}}, \mbox{\rm None}]
sage: A.latex_options(format_letter=A.format_letter_negative)
sage: print(latex(A))
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$\overline{1}$} ();
\end{tikzpicture}
"""
if letter in ZZ and letter < 0:
return r'\overline{%d}' % -letter
else:
return latex(letter)
def format_transition_label_reversed(self, word):
r"""
Format words in transition labels in reversed order.
INPUT:
``word`` -- list of letters.
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode, letters are written in reversed order.
This is the reversed version of
:meth:`.default_format_transition_label`.
In digit expansions, digits are frequently processed from the
least significant to the most significant position, but it is
customary to write the least significant digit at the
right-most position. Therefore, the labels have to be
reversed.
EXAMPLE::
sage: T = Transducer([(0, 0, 0, [1, 2, 3])])
sage: T.format_transition_label_reversed([1, 2, 3])
'3 2 1'
sage: T.latex_options(format_transition_label=T.format_transition_label_reversed)
sage: print latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$0\mid 3 2 1$} ();
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.format_transition_label_reversed([])
'\\varepsilon'
"""
return self.default_format_transition_label(reversed(word))
def default_format_transition_label(self, word):
r"""
Default formatting of words in transition labels for LaTeX output.
INPUT:
``word`` -- list of letters
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode.
- For a non-empty word: Concatenation of the letters, piped through
``self.format_letter`` and separated by blanks.
- For an empty word:
``sage.combinat.finite_state_machine.EmptyWordLaTeX``.
There is also a variant :meth:`.format_transition_label_reversed`
writing the words in reversed order.
EXAMPLES:
#. Example of a non-empty word::
sage: T = Transducer()
sage: print T.default_format_transition_label(
....: ['a', 'alpha', 'a_1', '0', 0, (0, 1)])
\text{\texttt{a}} \text{\texttt{alpha}}
\text{\texttt{a{\char`\_}1}} 0 0 \left(0, 1\right)
#. In the example above, ``'a'`` and ``'alpha'`` should perhaps
be symbols::
sage: var('a alpha a_1')
(a, alpha, a_1)
sage: print T.default_format_transition_label([a, alpha, a_1])
a \alpha a_{1}
#. Example of an empty word::
sage: print T.default_format_transition_label([])
\varepsilon
We can change this by setting
``sage.combinat.finite_state_machine.EmptyWordLaTeX``::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = ''
sage: T.default_format_transition_label([])
''
Finally, we restore the default value::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = r'\varepsilon'
#. This method is the default value for
``FiniteStateMachine.format_transition_label``. That can be changed to be
any other function::
sage: A = Automaton([(0, 1, 0)])
sage: def custom_format_transition_label(word):
....: return "t"
sage: A.latex_options(format_transition_label=custom_format_transition_label)
sage: print latex(A)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\node[state] (v1) at (-3.000000, 0.000000) {$1$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$t$} (v1);
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.default_format_transition_label([])
'\\varepsilon'
sage: T.default_format_transition_label(iter([]))
'\\varepsilon'
"""
result = " ".join(imap(self.format_letter, word))
if result:
return result
else:
return EmptyWordLaTeX
format_transition_label = default_format_transition_label
def latex_options(self,
coordinates=None,
format_state_label=None,
format_letter=None,
format_transition_label=None,
loop_where=None,
initial_where=None,
accepting_style=None,
accepting_distance=None,
accepting_where=None,
accepting_show_empty=None):
r"""
Set options for LaTeX output via
:func:`~sage.misc.latex.latex` and therefore
:func:`~sage.misc.latex.view`.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates. If no
coordinates are given, states a placed equidistantly on a
circle of radius `3`. See also :meth:`.set_coordinates`.
- ``format_state_label`` -- a function mapping labels of
states to a string suitable for typesetting in LaTeX's
mathematics mode. If not given, :func:`~sage.misc.latex.latex`
is used.
- ``format_letter`` -- a function mapping letters of the input
and output alphabets to a string suitable for typesetting in
LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` uses
:func:`~sage.misc.latex.latex`.
- ``format_transition_label`` -- a function mapping words over
the input and output alphabets to a string suitable for
typesetting in LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` is used.
- ``loop_where`` -- a dictionary or a function mapping labels of
initial states to one of ``'above'``, ``'left'``, ``'below'``,
``'right'``. If not given, ``'above'`` is used.
- ``initial_where`` -- a dictionary or a function mapping
labels of initial states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'left'``) is used.
- ``accepting_style`` -- one of ``'accepting by double'`` and
``'accepting by arrow'``. If not given, ``'accepting by
double'`` is used unless there are non-empty final output
words.
- ``accepting_distance`` -- a string giving a LaTeX length
used for the length of the arrow leading from a final state.
If not given, TikZ' default (currently ``'3ex'``) is used
unless there are non-empty final output words, in which case
``'7ex'`` is used.
- ``accepting_where`` -- a dictionary or a function mapping
labels of final states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'right'``) is used. If the final state has a
final output word, it is also possible to give an angle
in degrees.
- ``accepting_show_empty`` -- if ``True`` the arrow of an
empty final output word is labeled as well. Note that this
implicitly implies ``accepting_style='accepting by
arrow'``. If not given, the default ``False`` is used.
OUTPUT:
Nothing.
As TikZ (cf. the :wikipedia:`PGF/TikZ`) is used to typeset
the graphics, the syntax is oriented on TikZ' syntax.
This is a convenience function collecting all options for
LaTeX output. All of its functionality can also be achieved by
directly setting the attributes
- ``coordinates``, ``format_label``, ``loop_where``,
``initial_where``, and ``accepting_where`` of
:class:`FSMState` (here, ``format_label`` is a callable
without arguments, everything else is a specific value);
- ``format_label`` of :class:`FSMTransition` (``format_label``
is a callable without arguments);
- ``format_state_label``, ``format_letter``,
``format_transition_label``, ``accepting_style``,
``accepting_distance``, and ``accepting_show_empty``
of :class:`FiniteStateMachine`.
This function, however, also (somewhat) checks its input and
serves to collect documentation on all these options.
The function can be called several times, only those arguments
which are not ``None`` are taken into account. By the same
means, it can be combined with directly setting some
attributes as outlined above.
EXAMPLES:
See also the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
::
sage: T = Transducer(initial_states=[4],
....: final_states=[0, 3])
sage: for j in srange(4):
....: T.add_transition(4, j, 0, [0, j])
....: T.add_transition(j, 4, 0, [0, -j])
....: T.add_transition(j, j, 0, 0)
Transition from 4 to 0: 0|0,0
Transition from 0 to 4: 0|0,0
Transition from 0 to 0: 0|0
Transition from 4 to 1: 0|0,1
Transition from 1 to 4: 0|0,-1
Transition from 1 to 1: 0|0
Transition from 4 to 2: 0|0,2
Transition from 2 to 4: 0|0,-2
Transition from 2 to 2: 0|0
Transition from 4 to 3: 0|0,3
Transition from 3 to 4: 0|0,-3
Transition from 3 to 3: 0|0
sage: T.add_transition(4, 4, 0, 0)
Transition from 4 to 4: 0|0
sage: T.state(3).final_word_out = [0, 0]
sage: T.latex_options(
....: coordinates={4: (0, 0),
....: 0: (-6, 3),
....: 1: (-2, 3),
....: 2: (2, 3),
....: 3: (6, 3)},
....: format_state_label=lambda x: r'\mathbf{%s}' % x,
....: format_letter=lambda x: r'w_{%s}' % x,
....: format_transition_label=lambda x:
....: r"{\scriptstyle %s}" % T.default_format_transition_label(x),
....: loop_where={4: 'below', 0: 'left', 1: 'above',
....: 2: 'right', 3:'below'},
....: initial_where=lambda x: 'above',
....: accepting_style='accepting by double',
....: accepting_distance='10ex',
....: accepting_where={0: 'left', 3: 45}
....: )
sage: T.state(4).format_label=lambda: r'\mathcal{I}'
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state, accepting, accepting where=left] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\node[state, accepting, accepting where=45] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid {\scriptstyle w_{0} w_{0}}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-1}}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-2}}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-3}}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{1}}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{2}}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{3}}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\end{tikzpicture}
sage: view(T) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells.
By changing some of the options, we get the following output::
sage: T.latex_options(
....: format_transition_label=T.default_format_transition_label,
....: accepting_style='accepting by arrow',
....: accepting_show_empty=True
....: )
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex, accepting text=, accepting/.style=accepting by arrow, accepting distance=10ex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\path[->] (v1.180.00) edge node[rotate=360.00, anchor=south] {$\$ \mid \varepsilon$} ++(180.00:10ex);
\node[state] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid w_{0} w_{0}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {$w_{0}\mid w_{0}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {$w_{0}\mid w_{0} w_{0}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {$w_{0}\mid w_{0}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {$w_{0}\mid w_{0} w_{-1}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {$w_{0}\mid w_{0}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {$w_{0}\mid w_{0} w_{-2}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {$w_{0}\mid w_{0} w_{-3}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {$w_{0}\mid w_{0} w_{0}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {$w_{0}\mid w_{0} w_{1}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {$w_{0}\mid w_{0} w_{2}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {$w_{0}\mid w_{0} w_{3}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\end{tikzpicture}
sage: view(T) # not tested
TESTS::
sage: T.latex_options(format_state_label='Nothing')
Traceback (most recent call last):
...
TypeError: format_state_label must be callable.
sage: T.latex_options(format_letter='')
Traceback (most recent call last):
...
TypeError: format_letter must be callable.
sage: T.latex_options(format_transition_label='')
Traceback (most recent call last):
...
TypeError: format_transition_label must be callable.
sage: T.latex_options(loop_where=37)
Traceback (most recent call last):
...
TypeError: loop_where must be a callable or a
dictionary.
sage: T.latex_options(loop_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: loop_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(initial_where=90)
Traceback (most recent call last):
...
TypeError: initial_where must be a callable or a
dictionary.
sage: T.latex_options(initial_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: initial_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_style='fancy')
Traceback (most recent call last):
...
ValueError: accepting_style must be in ['accepting by
double', 'accepting by arrow'].
sage: T.latex_options(accepting_where=90)
Traceback (most recent call last):
...
TypeError: accepting_where must be a callable or a
dictionary.
sage: T.latex_options(accepting_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: accepting_where for 0 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_where={0: 'above', 3: 'top'})
Traceback (most recent call last):
...
ValueError: accepting_where for 3 must be a real number or
be in ['below', 'right', 'above', 'left'].
"""
if coordinates is not None:
self.set_coordinates(coordinates)
if format_state_label is not None:
if not hasattr(format_state_label, '__call__'):
raise TypeError('format_state_label must be callable.')
self.format_state_label = format_state_label
if format_letter is not None:
if not hasattr(format_letter, '__call__'):
raise TypeError('format_letter must be callable.')
self.format_letter = format_letter
if format_transition_label is not None:
if not hasattr(format_transition_label, '__call__'):
raise TypeError('format_transition_label must be callable.')
self.format_transition_label = format_transition_label
if loop_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.states():
if hasattr(loop_where, '__call__'):
where = loop_where(state.label())
else:
try:
where = loop_where[state.label()]
except TypeError:
raise TypeError("loop_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.loop_where = where
else:
raise ValueError('loop_where for %s must be in %s.' %
(state.label(), permissible))
if initial_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_initial_states():
if hasattr(initial_where, '__call__'):
where = initial_where(state.label())
else:
try:
where = initial_where[state.label()]
except TypeError:
raise TypeError("initial_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.initial_where = where
else:
raise ValueError('initial_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_style is not None:
permissible = ['accepting by double',
'accepting by arrow']
if accepting_style in permissible:
self.accepting_style = accepting_style
else:
raise ValueError('accepting_style must be in %s.' %
permissible)
if accepting_distance is not None:
self.accepting_distance = accepting_distance
if accepting_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_final_states():
if hasattr(accepting_where, '__call__'):
where = accepting_where(state.label())
else:
try:
where = accepting_where[state.label()]
except TypeError:
raise TypeError("accepting_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.accepting_where = where
elif hasattr(state, 'final_word_out') \
and state.final_word_out:
if where in RR:
state.accepting_where = where
else:
raise ValueError('accepting_where for %s must '
'be a real number or be in %s.' %
(state.label(), permissible))
else:
raise ValueError('accepting_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_show_empty is not None:
self.accepting_show_empty = accepting_show_empty
def _latex_(self):
r"""
Returns a LaTeX code for the graph of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1, 2)],
....: initial_states=['A'],
....: final_states=['B'])
sage: F.state('A').initial_where='below'
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=below] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$ $} (v1);
\end{tikzpicture}
"""
def label_rotation(angle, both_directions):
"""
Given an angle of a transition, compute the TikZ string to
rotate the label.
"""
angle_label = angle
anchor_label = "south"
if angle > 90 or angle <= -90:
angle_label = angle + 180
if both_directions:
# if transitions in both directions, the transition to the
# left has its label below the transition, otherwise above
anchor_label = "north"
return "rotate=%.2f, anchor=%s" % (angle_label, anchor_label)
setup_latex_preamble()
options = ["auto", "initial text=", ">=latex"]
nonempty_final_word_out = False
for state in self.iter_final_states():
if state.final_word_out:
nonempty_final_word_out = True
break
if hasattr(self, "accepting_style"):
accepting_style = self.accepting_style
elif nonempty_final_word_out:
accepting_style = "accepting by arrow"
else:
accepting_style = "accepting by double"
if accepting_style == "accepting by arrow":
options.append("accepting text=")
options.append("accepting/.style=%s" % accepting_style)
if hasattr(self, "accepting_distance"):
accepting_distance = self.accepting_distance
elif nonempty_final_word_out:
accepting_distance = "7ex"
else:
accepting_distance = None
if accepting_style == "accepting by arrow" and accepting_distance:
options.append("accepting distance=%s"
% accepting_distance)
if hasattr(self, "accepting_show_empty"):
accepting_show_empty = self.accepting_show_empty
else:
accepting_show_empty = False
result = "\\begin{tikzpicture}[%s]\n" % ", ".join(options)
j = 0;
for vertex in self.iter_states():
if not hasattr(vertex, "coordinates"):
vertex.coordinates = (3*cos(2*pi*j/len(self.states())),
3*sin(2*pi*j/len(self.states())))
options = ""
if vertex.is_final:
if not (vertex.final_word_out
and accepting_style == "accepting by arrow") \
and not accepting_show_empty:
# otherwise, we draw a custom made accepting path
# with label below
options += ", accepting"
if hasattr(vertex, "accepting_where"):
options += ", accepting where=%s" % (
vertex.accepting_where,)
if vertex.is_initial:
options += ", initial"
if hasattr(vertex, "initial_where"):
options += ", initial where=%s" % vertex.initial_where
if hasattr(vertex, "format_label"):
label = vertex.format_label()
elif hasattr(self, "format_state_label"):
label = self.format_state_label(vertex)
else:
label = latex(vertex.label())
result += "\\node[state%s] (v%d) at (%f, %f) {$%s$};\n" % (
options, j, vertex.coordinates[0],
vertex.coordinates[1], label)
vertex._number_ = j
if vertex.is_final and (vertex.final_word_out or accepting_show_empty):
angle = 0
if hasattr(vertex, "accepting_where"):
angle = tikz_automata_where.get(vertex.accepting_where,
vertex.accepting_where)
result += "\\path[->] (v%d.%.2f) edge node[%s] {$%s \mid %s$} ++(%.2f:%s);\n" % (
j, angle,
label_rotation(angle, False),
EndOfWordLaTeX,
self.format_transition_label(vertex.final_word_out),
angle, accepting_distance)
j += 1
# We use an OrderedDict instead of a dict in order to have a
# defined ordering of the transitions in the output. See
# http://trac.sagemath.org/ticket/16580#comment:3 . As the
# transitions have to be sorted anyway, the performance
# penalty should be bearable; nevertheless, this is only
# required for doctests.
adjacent = OrderedDict(
(pair, list(transitions))
for pair, transitions in
itertools.groupby(
sorted(self.iter_transitions(),
key=key_function),
key=key_function
))
for ((source, target), transitions) in adjacent.iteritems():
if len(transitions) > 0:
labels = []
for transition in transitions:
if hasattr(transition, "format_label"):
labels.append(transition.format_label())
else:
labels.append(self._latex_transition_label_(
transition, self.format_transition_label))
label = ", ".join(labels)
if source != target:
angle = atan2(
target.coordinates[1] - source.coordinates[1],
target.coordinates[0] - source.coordinates[0]) * 180/pi
both_directions = (target, source) in adjacent
if both_directions:
angle_source = ".%.2f" % ((angle + 5).n(),)
angle_target = ".%.2f" % ((angle + 175).n(),)
else:
angle_source = ""
angle_target = ""
result += "\\path[->] (v%d%s) edge node[%s] {$%s$} (v%d%s);\n" % (
source._number_, angle_source,
label_rotation(angle, both_directions),
label,
target._number_, angle_target)
else:
loop_where = "above"
if hasattr(source, "loop_where"):
loop_where = source.loop_where
rotation = {'left': '[rotate=90, anchor=south]',
'right': '[rotate=90, anchor=north]'}
result += "\\path[->] (v%d) edge[loop %s] node%s {$%s$} ();\n" % (
source._number_,
loop_where, rotation.get(loop_where, ''),
label)
result += "\\end{tikzpicture}"
return result
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
TESTS::
sage: F = FiniteStateMachine([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
' '
"""
return ' '
def set_coordinates(self, coordinates, default=True):
"""
Set coordinates of the states for the LaTeX representation by
a dictionary or a function mapping labels to coordinates.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates.
- ``default`` -- If ``True``, then states not given by
``coordinates`` get a default position on a circle of
radius 3.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates({0: (0, 0), 1: (2, 0), 2: (1, 1)})
sage: F.state(0).coordinates
(0, 0)
We can also use a function to determine the coordinates::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates(lambda l: (l, 3/(l+1)))
sage: F.state(2).coordinates
(2, 1)
"""
states_without_coordinates = []
for state in self.iter_states():
try:
state.coordinates = coordinates[state.label()]
continue
except (KeyError, TypeError):
pass
try:
state.coordinates = coordinates(state.label())
continue
except TypeError:
pass
states_without_coordinates.append(state)
if default:
n = len(states_without_coordinates)
for j, state in enumerate(states_without_coordinates):
state.coordinates = (3*cos(2*pi*j/n),
3*sin(2*pi*j/n))
#*************************************************************************
# other
#*************************************************************************
def _matrix_(self, R=None):
"""
Returns the adjacency matrix of the finite state machine.
See :meth:`.adjacency_matrix` for more information.
EXAMPLES::
sage: B = FiniteStateMachine({0: {0: (0, 0), 'a': (1, 0)},
....: 'a': {2: (0, 0), 3: (1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B._matrix_()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
"""
return self.adjacency_matrix()
def adjacency_matrix(self, input=None,
entry=None):
"""
Returns the adjacency matrix of the underlying graph.
INPUT:
- ``input`` -- Only transitions with input label ``input`` are
respected.
- ``entry`` -- The function ``entry`` takes a transition and the
return value is written in the matrix as the entry
``(transition.from_state, transition.to_state)``. The default
value (``None``) of entry takes the variable ``x`` to the
power of the sum of the output word of the transition.
OUTPUT:
A matrix.
If any label of a state is not an integer, the finite state
machine is relabeled at the beginning. If there are more than
one transitions between two states, then the different return
values of ``entry`` are added up.
EXAMPLES::
sage: B = FiniteStateMachine({0:{0:(0, 0), 'a':(1, 0)},
....: 'a':{2:(0, 0), 3:(1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B.adjacency_matrix()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
This is equivalent to::
sage: matrix(B)
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
It is also possible to use other entries in the adjacency matrix::
sage: B.adjacency_matrix(entry=(lambda transition: 1))
[1 1 0 0 0]
[0 0 1 1 0]
[1 0 0 0 1]
[0 1 1 0 0]
[0 0 0 1 1]
sage: B.adjacency_matrix(1, entry=(lambda transition:
....: exp(I*transition.word_out[0]*var('t'))))
[ 0 1 0 0 0]
[ 0 0 0 1 0]
[e^(I*t) 0 0 0 0]
[ 0 0 e^(I*t) 0 0]
[ 0 0 0 0 e^(I*t)]
sage: a = Automaton([(0, 1, 0),
....: (1, 2, 0),
....: (2, 0, 1),
....: (2, 1, 0)],
....: initial_states=[0],
....: final_states=[0])
sage: a.adjacency_matrix()
[0 1 0]
[0 0 1]
[1 1 0]
"""
if entry is None:
entry = default_function
relabeledFSM = self
l = len(relabeledFSM.states())
for state in self.iter_states():
if state.label() not in ZZ or state.label() >= l \
or state.label() < 0:
relabeledFSM = self.relabeled()
break
dictionary = {}
for transition in relabeledFSM.iter_transitions():
if input is None or transition.word_in == [input]:
if (transition.from_state.label(),
transition.to_state.label()) in dictionary:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
+= entry(transition)
else:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
= entry(transition)
return matrix(len(relabeledFSM.states()), dictionary)
def determine_alphabets(self, reset=True):
"""
Determines the input and output alphabet according to the
transitions in self.
INPUT:
- ``reset`` -- If reset is ``True``, then the existing input
and output alphabets are erased, otherwise new letters are
appended to the existing alphabets.
OUTPUT:
Nothing.
After this operation the input alphabet and the output
alphabet of self are a list of letters.
.. TODO::
At the moment, the letters of the alphabets need to be hashable.
EXAMPLES::
sage: T = Transducer([(1, 1, 1, 0), (1, 2, 2, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: final_states=[1],
....: determine_alphabets=False)
sage: T.state(1).final_word_out = [1, 4]
sage: (T.input_alphabet, T.output_alphabet)
(None, None)
sage: T.determine_alphabets()
sage: (T.input_alphabet, T.output_alphabet)
([0, 1, 2], [0, 1, 4])
"""
if reset:
ain = set()
aout = set()
else:
ain = set(self.input_alphabet)
aout = set(self.output_alphabet)
for t in self.iter_transitions():
for letter in t.word_in:
ain.add(letter)
for letter in t.word_out:
aout.add(letter)
for s in self.iter_final_states():
for letter in s.final_word_out:
aout.add(letter)
self.input_alphabet = list(ain)
self.output_alphabet = list(aout)
#*************************************************************************
# get states and transitions
#*************************************************************************
def states(self):
"""
Returns the states of the finite state machine.
INPUT:
Nothing.
OUTPUT:
The states of the finite state machine as list.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.states()
['1', '2']
"""
return copy(self._states_)
def iter_states(self):
"""
Returns an iterator of the states.
INPUT:
Nothing.
OUTPUT:
An iterator of the states of the finite state machine.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [s.label() for s in FSM.iter_states()]
['1', '2']
"""
return iter(self._states_)
def transitions(self, from_state=None):
"""
Returns a list of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
A list of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.transitions()
[Transition from '1' to '2': 1|-,
Transition from '2' to '2': 0|-]
"""
return list(self.iter_transitions(from_state))
def iter_transitions(self, from_state=None):
"""
Returns an iterator of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
An iterator of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('1')]
[('1', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('2')]
[('2', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions()]
[('1', '2'), ('2', '2')]
"""
if from_state is None:
return self._iter_transitions_all_()
else:
return iter(self.state(from_state).transitions)
def _iter_transitions_all_(self):
"""
Returns an iterator over all transitions.
INPUT:
Nothing.
OUTPUT:
An iterator over all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM._iter_transitions_all_()]
[('1', '2'), ('2', '2')]
"""
for state in self.iter_states():
for t in state.transitions:
yield t
def initial_states(self):
"""
Returns a list of all initial states.
INPUT:
Nothing.
OUTPUT:
A list of all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: F.initial_states()
['A']
"""
return list(self.iter_initial_states())
def iter_initial_states(self):
"""
Returns an iterator of the initial states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: [s.label() for s in F.iter_initial_states()]
['A']
"""
return itertools.ifilter(lambda s:s.is_initial, self.iter_states())
def final_states(self):
"""
Returns a list of all final states.
INPUT:
Nothing.
OUTPUT:
A list of all final states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: F.final_states()
['A', 'C']
"""
return list(self.iter_final_states())
def iter_final_states(self):
"""
Returns an iterator of the final states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: [s.label() for s in F.iter_final_states()]
['A', 'C']
"""
return itertools.ifilter(lambda s:s.is_final, self.iter_states())
def state(self, state):
"""
Returns the state of the finite state machine.
INPUT:
- ``state`` -- If ``state`` is not an instance of
:class:`FSMState`, then it is assumed that it is the label
of a state.
OUTPUT:
Returns the state of the finite state machine corresponding to
``state``.
If no state is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: FSM = FiniteStateMachine([(A, 'B'), ('C', A)])
sage: FSM.state('A') == A
True
sage: FSM.state('xyz')
Traceback (most recent call last):
...
LookupError: No state with label xyz found.
"""
switch = is_FSMState(state)
try:
return self._states_dict_[what(state, switch)]
except AttributeError:
for s in self.iter_states():
if what(s, not switch) == state:
return s
except KeyError:
pass
raise LookupError("No state with label %s found." % (what(state, switch),))
def transition(self, transition):
"""
Returns the transition of the finite state machine.
INPUT:
- ``transition`` -- If ``transition`` is not an instance of
:class:`FSMTransition`, then it is assumed that it is a
tuple ``(from_state, to_state, word_in, word_out)``.
OUTPUT:
Returns the transition of the finite state machine
corresponding to ``transition``.
If no transition is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: F = FiniteStateMachine([t])
sage: F.transition(('A', 'B', 0))
Transition from 'A' to 'B': 0|-
sage: id(t) == id(F.transition(('A', 'B', 0)))
True
"""
if not is_FSMTransition(transition):
transition = FSMTransition(*transition)
for s in self.iter_transitions(transition.from_state):
if s == transition:
return s
raise LookupError("No transition found.")
#*************************************************************************
# properties (state and transitions)
#*************************************************************************
def has_state(self, state):
"""
Returns whether ``state`` is one of the states of the finite
state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label of a state.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_state('A')
False
"""
try:
self.state(state)
return True
except LookupError:
return False
def has_transition(self, transition):
"""
Returns whether ``transition`` is one of the transitions of
the finite state machine.
INPUT:
- ``transition`` has to be a :class:`FSMTransition`.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'A', 0, 1)
sage: FiniteStateMachine().has_transition(t)
False
sage: FiniteStateMachine().has_transition(('A', 'A', 0, 1))
Traceback (most recent call last):
...
TypeError: Transition is not an instance of FSMTransition.
"""
if is_FSMTransition(transition):
return transition in self.iter_transitions()
raise TypeError("Transition is not an instance of FSMTransition.")
def has_initial_state(self, state):
"""
Returns whether ``state`` is one of the initial states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A')], initial_states=['A'])
sage: F.has_initial_state('A')
True
"""
try:
return self.state(state).is_initial
except LookupError:
return False
def has_initial_states(self):
"""
Returns whether the finite state machine has an initial state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_initial_states()
False
"""
return len(self.initial_states()) > 0
def has_final_state(self, state):
"""
Returns whether ``state`` is one of the final states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine(final_states=['A']).has_final_state('A')
True
"""
try:
return self.state(state).is_final
except LookupError:
return False
def has_final_states(self):
"""
Returns whether the finite state machine has a final state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_final_states()
False
"""
return len(self.final_states()) > 0
#*************************************************************************
# properties
#*************************************************************************
def is_deterministic(self):
"""
Returns whether the finite finite state machine is deterministic.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be deterministic if
each transition has input label of length one and for each
pair `(q,a)` where `q` is a state and `a` is an element of the
input alphabet, there is at most one transition from `q` with
input label `a`.
TESTS::
sage: fsm = FiniteStateMachine()
sage: fsm.add_transition(('A', 'B', 0, []))
Transition from 'A' to 'B': 0|-
sage: fsm.is_deterministic()
True
sage: fsm.add_transition(('A', 'C', 0, []))
Transition from 'A' to 'C': 0|-
sage: fsm.is_deterministic()
False
sage: fsm.add_transition(('A', 'B', [0,1], []))
Transition from 'A' to 'B': 0,1|-
sage: fsm.is_deterministic()
False
"""
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key,transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
return True
def is_complete(self):
"""
Returns whether the finite state machine is complete.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be complete if
each transition has an input label of length one and for each
pair `(q, a)` where `q` is a state and `a` is an element of the
input alphabet, there is exactly one transition from `q` with
input label `a`.
EXAMPLES::
sage: fsm = FiniteStateMachine([(0, 0, 0, 0),
....: (0, 1, 1, 1),
....: (1, 1, 0, 0)],
....: determine_alphabets=False)
sage: fsm.is_complete()
Traceback (most recent call last):
...
ValueError: No input alphabet is given. Try calling determine_alphabets().
sage: fsm.input_alphabet = [0, 1]
sage: fsm.is_complete()
False
sage: fsm.add_transition((1, 1, 1, 1))
Transition from 1 to 1: 1|1
sage: fsm.is_complete()
True
sage: fsm.add_transition((0, 0, 1, 0))
Transition from 0 to 0: 1|0
sage: fsm.is_complete()
False
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key, transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
# all input labels are lists, extract the only element
outgoing_alphabet = [key[0] for key, transition_class in
transition_classes_by_word_in]
if not sorted(self.input_alphabet) == sorted(outgoing_alphabet):
return False
return True
def is_connected(self):
"""
TESTS::
sage: FiniteStateMachine().is_connected()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
#*************************************************************************
# let the finite state machine work
#*************************************************************************
def process(self, *args, **kwargs):
"""
Returns whether the finite state machine accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
OUTPUT:
A triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing (in the case the finite state machine runs as
transducer).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = FiniteStateMachine({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
Alternatively, we can invoke this function by::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
::
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = FiniteStateMachine(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w)[0] for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
Non-deterministic finite state machines cannot be handeled.
::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)],
....: initial_states=[0])
sage: T.process([0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T = Transducer([(0, 1, [0, 0], 0), (0, 2, [0, 0, 1], 0),
....: (0, 1, 1, 2), (1, 0, [], 1), (1, 1, 1, 3)],
....: initial_states=[0], final_states=[0, 1])
sage: T.process([0])
(False, None, None)
sage: T.process([0, 0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T.process([1])
(True, 1, [2])
sage: T.process([1, 1])
Traceback (most recent call last):
...
NotImplementedError: process cannot handle epsilon transition leaving state 1.
"""
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
return (it.accept_input, it.current_state, it.output_tape)
def iter_process(self, input_tape=None, initial_state=None, **kwargs):
"""
See :meth:`.process` for more informations.
EXAMPLES::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = inverter.iter_process(input_tape=[0, 1, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0, 0]
"""
return FSMProcessIterator(self, input_tape, initial_state, **kwargs)
#*************************************************************************
# change finite state machine (add/remove state/transitions)
#*************************************************************************
def add_state(self, state):
"""
Adds a state to the finite state machine and returns the new
state. If the state already exists, that existing state is
returned.
INPUT:
- ``state`` is either an instance of
:class:`FSMState` or,
otherwise, a label of a state.
OUTPUT:
The new or existing state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: F = FiniteStateMachine()
sage: A = FSMState('A', is_initial=True)
sage: F.add_state(A)
'A'
"""
try:
return self.state(state)
except LookupError:
pass
# at this point we know that we have a new state
if is_FSMState(state):
s = state
else:
s = FSMState(state)
s.transitions = list()
self._states_.append(s)
try:
self._states_dict_[s.label()] = s
except AttributeError:
pass
return s
def add_states(self, states):
"""
Adds several states. See add_state for more information.
INPUT:
- ``states`` -- a list of states or iterator over states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B'])
sage: F.states()
['A', 'B']
"""
for state in states:
self.add_state(state)
def add_transition(self, *args, **kwargs):
"""
Adds a transition to the finite state machine and returns the
new transition.
If the transition already exists, the return value of
``self.on_duplicate_transition`` is returned. See the
documentation of :class:`FiniteStateMachine`.
INPUT:
The following forms are all accepted:
::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(FSMTransition(A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, 0, 1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition('A', 'B', {'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': {'word_in': 0, 'word_out': 1}|-
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(from_state=A, to_state=B,
....: word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition({'from_state': A, 'to_state': B,
....: 'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition((A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition([A, B, 0, 1])
Transition from 'A' to 'B': 0|1
If the states ``A`` and ``B`` are not instances of
:class:`FSMState`, then it is assumed that they are labels of
states.
OUTPUT:
The new transition.
"""
if len(args) + len(kwargs) == 0:
return
if len(args) + len(kwargs) == 1:
if len(args) == 1:
d = args[0]
if is_FSMTransition(d):
return self._add_fsm_transition_(d)
else:
d = next(kwargs.itervalues())
if hasattr(d, 'iteritems'):
args = []
kwargs = d
elif hasattr(d, '__iter__'):
args = d
kwargs = {}
else:
raise TypeError("Cannot decide what to do with input.")
data = dict(zip(
('from_state', 'to_state', 'word_in', 'word_out', 'hook'),
args))
data.update(kwargs)
data['from_state'] = self.add_state(data['from_state'])
data['to_state'] = self.add_state(data['to_state'])
return self._add_fsm_transition_(FSMTransition(**data))
def _add_fsm_transition_(self, t):
"""
Adds a transition.
INPUT:
- ``t`` -- an instance of :class:`FSMTransition`.
OUTPUT:
The new transition.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: F = FiniteStateMachine()
sage: F._add_fsm_transition_(FSMTransition('A', 'B'))
Transition from 'A' to 'B': -|-
"""
try:
existing_transition = self.transition(t)
except LookupError:
pass
else:
return self.on_duplicate_transition(existing_transition, t)
from_state = self.add_state(t.from_state)
self.add_state(t.to_state)
from_state.transitions.append(t)
return t
def add_from_transition_function(self, function, initial_states=None,
explore_existing_states=True):
"""
Constructs a finite state machine from a transition function.
INPUT:
- ``function`` may return a tuple (new_state, output_word) or a
list of such tuples.
- ``initial_states`` -- If no initial states are given, the
already existing initial states of self are taken.
- If ``explore_existing_states`` is True (default), then
already existing states in self (e.g. already given final
states) will also be processed if they are reachable from
the initial states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine(initial_states=['A'],
....: input_alphabet=[0, 1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
sage: F.transitions()
[Transition from 'A' to 'A': 0|0,
Transition from 'A' to 'B': 0|1,
Transition from 'A' to 'A': 1|1,
Transition from 'A' to 'B': 1|0,
Transition from 'B' to 'A': 0|0,
Transition from 'B' to 'B': 0|1,
Transition from 'B' to 'A': 1|1,
Transition from 'B' to 'B': 1|0]
Initial states can also be given as a parameter::
sage: F = FiniteStateMachine(input_alphabet=[0,1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f,initial_states=['A'])
sage: F.initial_states()
['A']
Already existing states in the finite state machine (the final
states in the example below) are also explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function)
sage: F.transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
If ``explore_existing_states=False``, however, this behavior
is turned off, i.e., already existing states are not
explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function,
....: explore_existing_states=False)
sage: F.transitions()
[Transition from 0 to 1: 0|-]
TEST::
sage: F = FiniteStateMachine(initial_states=['A'])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
::
sage: def transition(state, where):
....: return (vector([0, 0]), 1)
sage: Transducer(transition, input_alphabet=[0], initial_states=[0])
Traceback (most recent call last):
...
TypeError: mutable vectors are unhashable
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if initial_states is None:
not_done = self.initial_states()
elif hasattr(initial_states, '__iter__'):
not_done = []
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
not_done.append(state)
else:
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
if len(not_done) == 0:
raise ValueError("No state is initial.")
if explore_existing_states:
ignore_done = self.states()
for s in not_done:
try:
ignore_done.remove(s)
except ValueError:
pass
else:
ignore_done = []
while len(not_done) > 0:
s = not_done.pop(0)
for letter in self.input_alphabet:
try:
return_value = function(s.label(), letter)
except LookupError:
continue
if not hasattr(return_value, "pop"):
return_value = [return_value]
try:
for (st_label, word) in return_value:
pass
except TypeError:
raise ValueError("The callback function for "
"add_from_transition is expected "
"to return a pair (new_state, "
"output_label) or a list of such pairs. "
"For the state %s and the input "
"letter %s, it however returned %s, "
"which is not acceptable."
% (s.label(), letter, return_value))
for (st_label, word) in return_value:
if not self.has_state(st_label):
not_done.append(self.add_state(st_label))
elif len(ignore_done) > 0:
u = self.state(st_label)
if u in ignore_done:
not_done.append(u)
ignore_done.remove(u)
self.add_transition(s, st_label,
word_in=letter, word_out=word)
def add_transitions_from_function(self, function, labels_as_input=True):
"""
Adds one or more transitions if ``function(state, state)``
says that there are some.
INPUT:
- ``function`` -- a transition function. Given two states
``from_state`` and ``to_state`` (or their labels if
``label_as_input`` is true), this function shall return a
tuple ``(word_in, word_out)`` to add a transition from
``from_state`` to ``to_state`` with input and output labels
``word_in`` and ``word_out``, respectively. If no such
addition is to be added, the transition function shall
return ``None``. The transition function may also return
a list of such tuples in order to add multiple transitions
between the pair of states.
- ``label_as_input`` -- (default: ``True``)
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B', 'C'])
sage: def f(state1, state2):
....: if state1 == 'C':
....: return None
....: return (0, 1)
sage: F.add_transitions_from_function(f)
sage: len(F.transitions())
6
Multiple transitions are also possible::
sage: F = FiniteStateMachine()
sage: F.add_states([0, 1])
sage: def f(state1, state2):
....: if state1 != state2:
....: return [(0, 1), (1, 0)]
....: else:
....: return None
sage: F.add_transitions_from_function(f)
sage: F.transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 0: 1|0]
TESTS::
sage: F = FiniteStateMachine()
sage: F.add_state(0)
0
sage: def f(state1, state2):
....: return 1
sage: F.add_transitions_from_function(f)
Traceback (most recent call last):
...
ValueError: The callback function for add_transitions_from_function
is expected to return a pair (word_in, word_out) or a list of such
pairs. For states 0 and 0 however, it returned 1,
which is not acceptable.
"""
for s_from in self.iter_states():
for s_to in self.iter_states():
try:
if labels_as_input:
return_value = function(s_from.label(), s_to.label())
else:
return_value = function(s_from, s_to)
except LookupError:
continue
if return_value is None:
continue
if not hasattr(return_value, "pop"):
transitions = [return_value]
else:
transitions = return_value
for t in transitions:
if not hasattr(t, '__getitem__'):
raise ValueError("The callback function for "
"add_transitions_from_function "
"is expected to return a "
"pair (word_in, word_out) or a "
"list of such pairs. For "
"states %s and %s however, it "
"returned %s, which is not "
"acceptable." % (s_from, s_to, return_value))
label_in = t[0]
try:
label_out = t[1]
except LookupError:
label_out = None
self.add_transition(s_from, s_to, label_in, label_out)
def delete_transition(self, t):
"""
Deletes a transition by removing it from the list of transitions of
the state, where the transition starts.
INPUT:
- ``t`` -- a transition.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: F.delete_transition(('A', 'B', 0))
sage: F.transitions()
[Transition from 'B' to 'A': 1|-]
"""
transition = self.transition(t)
transition.from_state.transitions.remove(transition)
def delete_state(self, s):
"""
Deletes a state and all transitions coming or going to this state.
INPUT:
- ``s`` -- a label of a state or an :class:`FSMState`.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t1 = FSMTransition('A', 'B', 0)
sage: t2 = FSMTransition('B', 'B', 1)
sage: F = FiniteStateMachine([t1, t2])
sage: F.delete_state('A')
sage: F.transitions()
[Transition from 'B' to 'B': 1|-]
TESTS::
sage: F._states_
['B']
sage: F._states_dict_ # This shows that #16024 is fixed.
{'B': 'B'}
"""
state = self.state(s)
for transition in self.transitions():
if transition.to_state == state:
self.delete_transition(transition)
self._states_.remove(state)
try:
del self._states_dict_[state.label()]
except AttributeError:
pass
def remove_epsilon_transitions(self):
"""
TESTS::
sage: FiniteStateMachine().remove_epsilon_transitions()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def accessible_components(self):
"""
Returns a new finite state machine with the accessible states
of self and all transitions between those states.
INPUT:
Nothing.
OUTPUT:
A finite state machine with the accessible states of self and
all transitions between those states.
A state is accessible if there is a directed path from an
initial state to the state. If self has no initial states then
a copy of the finite state machine self is returned.
EXAMPLES::
sage: F = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 2 states
::
sage: F = Automaton([(0, 0, 1), (0, 0, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 1 states
TESTS:
Check whether input of length > 1 works::
sage: F = Automaton([(0, 1, [0, 1]), (0, 2, 0)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 3 states
"""
if len(self.initial_states()) == 0:
return deepcopy(self)
memo = {}
new_initial_states=map(lambda x: deepcopy(x, memo),
self.initial_states())
result = self.empty_copy()
result.add_from_transition_function(accessible,
initial_states=new_initial_states)
for final_state in self.iter_final_states():
try:
new_final_state=result.state(final_state.label)
new_final_state.is_final=True
except LookupError:
pass
return result
# *************************************************************************
# creating new finite state machines
# *************************************************************************
def disjoint_union(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().disjoint_union(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def concatenation(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().concatenation(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def Kleene_closure(self):
"""
TESTS::
sage: FiniteStateMachine().Kleene_closure()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def intersection(self, other):
"""
TESTS::
sage: FiniteStateMachine().intersection(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def product_FiniteStateMachine(self, other, function,
new_input_alphabet=None,
only_accessible_components=True,
final_function=None,
new_class=None):
r"""
Returns a new finite state machine whose states are
`d`-tuples of states of the original finite state machines.
INPUT:
- ``other`` -- a finite state machine (for `d=2`) or a list
(or iterable) of `d-1` finite state machines.
- ``function`` has to accept `d` transitions from `A_j` to `B_j`
for `j\in\{1, \ldots, d\}` and returns a pair ``(word_in, word_out)``
which is the label of the transition `A=(A_1, \ldots, A_d)` to `B=(B_1,
\ldots, B_d)`. If there is no transition from `A` to `B`,
then ``function`` should raise a ``LookupError``.
- ``new_input_alphabet`` (optional) -- the new input alphabet
as a list.
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
- ``final_function`` -- A function mapping `d` final states of
the original finite state machines to the final output of
the corresponding state in the new finite state machine. By
default, the final output is the empty word if both final
outputs of the constituent states are empty; otherwise, a
``ValueError`` is raised.
- ``new_class`` -- Class of the new finite state machine. By
default (``None``), the class of ``self`` is used.
OUTPUT:
A finite state machine whose states are `d`-tuples of states of the
original finite state machines. A state is initial or
final if all constituent states are initial or final,
respectively.
The labels of the transitions are defined by ``function``.
The final output of a final state is determined by calling
``final_function`` on the constituent states.
The color of a new state is the tuple of colors of the
constituent states of ``self`` and ``other``.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1), ('A', 'A', 0), ('B', 'A', 2)],
....: initial_states=['A'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Automaton([(1, 1, 1)], initial_states=[1], final_states=[1])
sage: def addition(transition1, transition2):
....: return (transition1.word_in[0] + transition2.word_in[0],
....: None)
sage: H = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H.transitions()
[Transition from ('A', 1) to ('B', 1): 2|-,
Transition from ('A', 1) to ('A', 1): 1|-,
Transition from ('B', 1) to ('A', 1): 3|-]
sage: H1 = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H1.states()[0].label()[0] is F.states()[0]
True
sage: H1.states()[0].label()[1] is G.states()[0]
True
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(
....: G, lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None))
sage: H.states()
[(0, 0), (1, 0)]
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(G,
....: lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None),
....: only_accessible_components=False)
sage: H.states()
[(0, 0), (1, 0), (0, 1), (1, 1)]
Also final output words are considered according to the function
``final_function``::
sage: F = Transducer([(0, 1, 0, 1), (1, 1, 1, 1), (1, 1, 0, 1)],
....: final_states=[1])
sage: F.state(1).final_word_out = 1
sage: G = Transducer([(0, 0, 0, 1), (0, 0, 1, 0)], final_states=[0])
sage: G.state(0).final_word_out = 1
sage: def minus(t1, t2):
....: return (t1.word_in[0] - t2.word_in[0],
....: t1.word_out[0] - t2.word_out[0])
sage: H = F.product_FiniteStateMachine(G, minus)
Traceback (most recent call last):
...
ValueError: A final function must be given.
sage: def plus(s1, s2):
....: return s1.final_word_out[0] + s2.final_word_out[0]
sage: H = F.product_FiniteStateMachine(G, minus,
....: final_function=plus)
sage: H.final_states()
[(1, 0)]
sage: H.final_states()[0].final_word_out
[2]
Products of more than two finite state machines are also possible::
sage: def plus(s1, s2, s3):
....: if s1.word_in == s2.word_in == s3.word_in:
....: return (s1.word_in,
....: sum(s.word_out[0] for s in (s1, s2, s3)))
....: else:
....: raise LookupError
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.product_FiniteStateMachine([T1, T2], plus)
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|0,
Transition from ((), (), ()) to ((), (1,), ()): 1|0,
Transition from ((), (), ()) to ((), (), (2,)): 2|0,
Transition from ((0,), (), ()) to ((0,), (), ()): 0|1,
Transition from ((0,), (), ()) to ((), (1,), ()): 1|0,
Transition from ((0,), (), ()) to ((), (), (2,)): 2|0,
Transition from ((), (1,), ()) to ((0,), (), ()): 0|0,
Transition from ((), (1,), ()) to ((), (1,), ()): 1|1,
Transition from ((), (1,), ()) to ((), (), (2,)): 2|0,
Transition from ((), (), (2,)) to ((0,), (), ()): 0|0,
Transition from ((), (), (2,)) to ((), (1,), ()): 1|0,
Transition from ((), (), (2,)) to ((), (), (2,)): 2|1]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[0, 1, 0, 1, 0, 1, 0, 0, 0, 1]
``other`` can also be an iterable::
sage: T == T0.product_FiniteStateMachine(iter([T1, T2]), plus)
True
TESTS:
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: B = A.product_FiniteStateMachine(A,
....: lambda t1, t2: (0, None))
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
Check handling of the parameter ``other``::
sage: A.product_FiniteStateMachine(None, plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
sage: A.product_FiniteStateMachine([None], plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
Test whether ``new_class`` works::
sage: T = Transducer()
sage: type(T.product_FiniteStateMachine(T, None))
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.product_FiniteStateMachine(T, None,
....: new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
if final_function is None:
final_function = default_final_function
result = self.empty_copy(new_class=new_class)
if new_input_alphabet is not None:
result.input_alphabet = new_input_alphabet
else:
result.input_alphabet = None
if hasattr(other, '__iter__'):
machines = [self]
machines.extend(other)
if not all(is_FiniteStateMachine(m) for m in machines):
raise ValueError("other must be a finite state machine "
"or a list of finite state machines.")
elif is_FiniteStateMachine(other):
machines = [self, other]
else:
raise ValueError("other must be a finite state machine or "
"a list of finite state machines.")
for transitions in itertools.product(
*(m.iter_transitions() for m in machines)):
try:
word = function(*transitions)
except LookupError:
continue
result.add_transition(tuple(t.from_state for t in transitions),
tuple(t.to_state for t in transitions),
word[0], word[1])
for state in result.states():
if all(s.is_initial for s in state.label()):
state.is_initial = True
if all(s.is_final for s in state.label()):
state.is_final = True
state.final_word_out = final_function(*state.label())
state.color = tuple(s.color for s in state.label())
if only_accessible_components:
if result.input_alphabet is None:
result.determine_alphabets()
return result.accessible_components()
else:
return result
def composition(self, other, algorithm=None,
only_accessible_components=True):
"""
Returns a new transducer which is the composition of ``self``
and ``other``.
INPUT:
- ``other`` -- a transducer
- ``algorithm`` -- can be one of the following
- ``direct`` -- The composition is calculated directly.
There can be arbitrarily many initial and final states,
but the input and output labels must have length 1.
WARNING: The output of other is fed into self.
- ``explorative`` -- An explorative algorithm is used.
At least the following restrictions apply, but are not
checked:
- both self and other have exactly one initial state
- all input labels of transitions have length exactly 1
The input alphabet of self has to be specified.
This is a very limited implementation of composition.
WARNING: The output of ``other`` is fed into ``self``.
If algorithm is ``None``, then the algorithm is chosen
automatically (at the moment always ``direct``).
OUTPUT:
A new transducer.
The labels of the new finite state machine are pairs of states
of the original finite state machines. The color of a new
state is the tuple of colors of the constituent states.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G.composition(F, algorithm='explorative')
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Also final output words are considered if ``algorithm='direct'`` or
``None``::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'],
....: final_states=['A', 'B'])
sage: F.state('A').final_word_out = 0
sage: F.state('B').final_word_out = 1
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2])
sage: G.state(2).final_word_out = 0
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.final_states()
[(2, 'B')]
Note that ``(2, 'A')`` is not final, as the final output `0`
of state `2` of `G` cannot be processed in state ``'A'`` of
`F`.
::
sage: [s.final_word_out for s in Hd.final_states()]
[[1, 0]]
Be aware that after composition, different transitions may
share the same output label (same python object)::
sage: F = Transducer([ ('A','B',0,0), ('B','A',0,0)],
....: initial_states=['A'],
....: final_states=['A'])
sage: F.transitions()[0].word_out is F.transitions()[1].word_out
False
sage: G = Transducer([('C','C',0,1)],)
....: initial_states=['C'],
....: final_states=['C'])
sage: H = G.composition(F)
sage: H.transitions()[0].word_out is H.transitions()[1].word_out
True
In the explorative algorithm, transducers with non-empty final
output words are currently not implemented::
sage: A = transducers.GrayCode()
sage: B = transducers.abs([0, 1])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Similarly, the explorative algorithm cannot handle
non-deterministic finite state machines::
sage: A = Transducer([(0, 0, 0, 0), (0, 1, 0, 0)])
sage: B = transducers.Identity([0])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
sage: B.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
TESTS:
Due to the limitations of the two algorithms the following
(examples from above, but different algorithm used) does not
give a full answer or does not work.
In the following, ``algorithm='explorative'`` is inadequate,
as ``F`` has more than one initial state::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: He = F.composition(G, algorithm='explorative')
sage: He.initial_states()
[(1, 'A')]
sage: He.transitions()
[Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
In the following example, ``algorithm='direct'`` is inappropriate
as there are edges with output labels of length greater than 1::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: Hd = G.composition(F, algorithm='direct')
In the following examples, we compose transducers and automata
and check whether the types are correct. ::
sage: from sage.combinat.finite_state_machine import (
....: is_Automaton, is_Transducer)
sage: T = Transducer([(0, 0, 0, 0)], initial_states=[0])
sage: A = Automaton([(0, 0, 0)], initial_states=[0])
sage: is_Transducer(T.composition(T, algorithm='direct'))
True
sage: is_Transducer(T.composition(T, algorithm='explorative'))
True
sage: T.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: T.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: is_Automaton(A.composition(T, algorithm='direct'))
True
sage: is_Automaton(A.composition(T, algorithm='explorative'))
True
"""
if not other._allow_composition_:
raise TypeError("Composition with automaton is not "
"possible.")
if algorithm is None:
algorithm = 'direct'
if algorithm == 'direct':
return self._composition_direct_(other, only_accessible_components)
elif algorithm == 'explorative':
return self._composition_explorative_(other)
else:
raise ValueError("Unknown algorithm %s." % (algorithm,))
def _composition_direct_(self, other, only_accessible_components=True):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F._composition_direct_(G)
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
"""
result = other.product_FiniteStateMachine(
self, function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: [],
new_class=self.__class__)
for state_result in result.iter_states():
state = state_result.label()[0]
if state.is_final:
accept, state_to, output = self.process(
state.final_word_out,
initial_state=self.state(state_result.label()[1]))
if not accept:
state_result.is_final = False
else:
state_result.is_final = True
state_result.final_word_out = output
return result
def _composition_explorative_(self, other):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G._composition_explorative_(F)
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: T = Transducer([[0, 0, 0, 0]], initial_states=[0])
sage: A = T.input_projection()
sage: B = A.composition(T, algorithm='explorative')
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
.. TODO::
The explorative algorithm should be re-implemented using the
process iterators of both finite state machines.
"""
if any(s.final_word_out for s in self.iter_final_states()) or \
any(s.final_word_out for s in other.iter_final_states()):
raise NotImplementedError("Explorative composition is not "
"implemented for transducers with "
"non-empty final output words. Try "
"the direct algorithm instead.")
if not self.is_deterministic() or not other.is_deterministic():
raise NotImplementedError("Explorative composition is "
"currently not implemented for "
"non-deterministic transducers.")
F = other.empty_copy(new_class=self.__class__)
new_initial_states = [(other.initial_states()[0], self.initial_states()[0])]
F.add_from_transition_function(composition_transition,
initial_states=new_initial_states)
for state in F.states():
if all(map(lambda s: s.is_final, state.label())):
state.is_final = True
state.color = tuple(map(lambda s: s.color, state.label()))
return F
def input_projection(self):
"""
Returns an automaton where the output of each transition of
self is deleted.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.input_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 0|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 1|-]
"""
return self.projection(what='input')
def output_projection(self):
"""
Returns a automaton where the input of each transition of self
is deleted and the new input is the original output.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.output_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
Final output words are also considered correctly::
sage: H = Transducer([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0), ('A', ('final', 0), 0, 0)],
....: final_states=['A', 'B'])
sage: H.state('B').final_word_out = 2
sage: J = H.output_projection()
sage: J.states()
['A', 'B', ('final', 0), ('final', 1)]
sage: J.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'A' to ('final', 0): 0|-,
Transition from 'B' to 'B': 0|-,
Transition from 'B' to ('final', 1): 2|-]
sage: J.final_states()
['A', ('final', 1)]
"""
return self.projection(what='output')
def projection(self, what='input'):
"""
Returns an Automaton which transition labels are the projection
of the transition labels of the input.
INPUT:
- ``what`` -- (default: ``input``) either ``input`` or ``output``.
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.projection(what='output')
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
"""
new = Automaton()
# TODO: use empty_copy() in order to
# preserve on_duplicate_transition and future extensions.
# for this, empty_copy would need a new optional argument
# use_class=None ?
if what == 'input':
new.input_alphabet = copy(self.input_alphabet)
elif what == 'output':
new.input_alphabet = copy(self.output_alphabet)
else:
raise NotImplementedError
state_mapping = {}
for state in self.iter_states():
state_mapping[state] = new.add_state(deepcopy(state))
for transition in self.iter_transitions():
if what == 'input':
new_word_in = transition.word_in
elif what == 'output':
new_word_in = transition.word_out
else:
raise NotImplementedError
new.add_transition((state_mapping[transition.from_state],
state_mapping[transition.to_state],
new_word_in, None))
if what == 'output':
states = [s for s in self.iter_final_states() if s.final_word_out]
if not states:
return new
number = 0
while new.has_state(('final', number)):
number += 1
final = new.add_state(('final', number))
final.is_final = True
for state in states:
output = state.final_word_out
new.state(state_mapping[state]).final_word_out = []
new.state(state_mapping[state]).is_final = False
new.add_transition((state_mapping[state], final, output, None))
return new
def transposition(self):
"""
Returns a new finite state machine, where all transitions of the
input finite state machine are reversed.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'A', 1), ('A', 'B', 0)],
....: initial_states=['A'], final_states=['B'])
sage: aut.transposition().transitions('B')
[Transition from 'B' to 'A': 0|-]
::
sage: aut = Automaton([('1', '1', 1), ('1', '2', 0), ('2', '2', 0)],
....: initial_states=['1'], final_states=['1', '2'])
sage: aut.transposition().initial_states()
['1', '2']
TESTS:
If a final state of ``self`` has a non-empty final output word,
transposition is not implemented::
sage: T = Transducer([('1', '1', 1, 0), ('1', '2', 0, 1),
....: ('2', '2', 0, 2)],
....: initial_states=['1'],
....: final_states=['1', '2'])
sage: T.state('1').final_word_out = [2, 5]
sage: T.transposition()
Traceback (most recent call last):
...
NotImplementedError: Transposition for transducers with
final output words is not implemented.
"""
transposition = self.empty_copy()
for state in self.iter_states():
transposition.add_state(deepcopy(state))
for transition in self.iter_transitions():
transposition.add_transition(
transition.to_state.label(), transition.from_state.label(),
transition.word_in, transition.word_out)
for initial in self.iter_initial_states():
state = transposition.state(initial.label())
if not initial.is_final:
state.is_final = True
state.is_initial = False
for final in self.iter_final_states():
state = transposition.state(final.label())
if final.final_word_out:
raise NotImplementedError("Transposition for transducers "
"with final output words is not "
"implemented.")
if not final.is_initial:
state.is_final = False
state.is_initial = True
return transposition
def split_transitions(self):
"""
Returns a new transducer, where all transitions in self with input
labels consisting of more than one letter
are replaced by a path of the corresponding length.
INPUT:
Nothing.
OUTPUT:
A new transducer.
EXAMPLES::
sage: A = Transducer([('A', 'B', [1, 2, 3], 0)],
....: initial_states=['A'], final_states=['B'])
sage: A.split_transitions().states()
[('A', ()), ('B', ()),
('A', (1,)), ('A', (1, 2))]
"""
new = self.empty_copy()
for state in self.states():
new.add_state(FSMState((state, ()), is_initial=state.is_initial,
is_final=state.is_final))
for transition in self.transitions():
for j in range(len(transition.word_in)-1):
new.add_transition((
(transition.from_state, tuple(transition.word_in[:j])),
(transition.from_state, tuple(transition.word_in[:j+1])),
transition.word_in[j],
[]))
new.add_transition((
(transition.from_state, tuple(transition.word_in[:-1])),
(transition.to_state, ()),
transition.word_in[-1:],
transition.word_out))
return new
def final_components(self):
"""
Returns the final components of a finite state machine as finite
state machines.
INPUT:
Nothing.
OUTPUT:
A list of finite state machines, each representing a final
component of ``self``.
A final component of a transducer ``T`` is a strongly connected
component ``C`` such that there are no transitions of ``T``
leaving ``C``.
The final components are the only parts of a transducer which
influence the main terms of the asympotic behaviour of the sum
of output labels of a transducer, see [HKP2014]_ and [HKW2014]_.
EXAMPLES::
sage: T = Transducer([['A', 'B', 0, 0], ['B', 'C', 0, 1],
....: ['C', 'B', 0, 1], ['A', 'D', 1, 0],
....: ['D', 'D', 0, 0], ['D', 'B', 1, 0],
....: ['A', 'E', 2, 0], ['E', 'E', 0, 0]])
sage: FC = T.final_components()
sage: sorted(FC[0].transitions())
[Transition from 'B' to 'C': 0|1,
Transition from 'C' to 'B': 0|1]
sage: FC[1].transitions()
[Transition from 'E' to 'E': 0|0]
Another example (cycle of length 2)::
sage: T = Automaton([[0, 1, 0], [1, 0, 0]])
sage: len(T.final_components()) == 1
True
sage: T.final_components()[0].transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
REFERENCES:
.. [HKP2014] Clemens Heuberger, Sara Kropf, and Helmut
Prodinger, *Asymptotic analysis of the sum of the output of
transducer*, in preparation.
"""
DG = self.digraph()
condensation = DG.strongly_connected_components_digraph()
return [self.induced_sub_finite_state_machine(map(self.state, component))
for component in condensation.vertices()
if condensation.out_degree(component) == 0]
# *************************************************************************
# simplifications
# *************************************************************************
def prepone_output(self):
"""
For all paths, shift the output of the path from one
transition to the earliest possible preceeding transition of
the path.
INPUT:
Nothing.
OUTPUT:
Nothing.
Apply the following to each state `s` (except initial states) of the
finite state machine as often as possible:
If the letter `a` is a prefix of the output label of all transitions from
`s` (including the final output of `s`), then remove it from all these
labels and append it to all output labels of all transitions leading
to `s`.
We assume that the states have no output labels, but final outputs are
allowed.
EXAMPLES::
sage: A = Transducer([('A', 'B', 1, 1),
....: ('B', 'B', 0, 0),
....: ('B', 'C', 1, 0)],
....: initial_states=['A'],
....: final_states=['C'])
sage: A.prepone_output()
sage: A.transitions()
[Transition from 'A' to 'B': 1|1,0,
Transition from 'B' to 'B': 0|0,
Transition from 'B' to 'C': 1|-]
::
sage: B = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['C'])
sage: B.prepone_output()
sage: B.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
If initial states are not labeled as such, unexpected results may be
obtained::
sage: C = Transducer([(0,1,0,0)])
sage: C.prepone_output()
verbose 0 (...: finite_state_machine.py, prepone_output)
All transitions leaving state 0 have an output label with
prefix 0. However, there is no inbound transition and it
is not an initial state. This routine (possibly called by
simplification) therefore erased this prefix from all
outbound transitions.
sage: C.transitions()
[Transition from 0 to 1: 0|-]
Also the final output of final states can be changed::
sage: T = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: T.state('B').final_word_out = [1]
sage: T.prepone_output()
sage: T.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
sage: T.state('B').final_word_out
[]
::
sage: S = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: S.state('B').final_word_out = [0]
sage: S.prepone_output()
sage: S.transitions()
[Transition from 'A' to 'B': 0|1,
Transition from 'B' to 'C': 1|1,1,
Transition from 'B' to 'C': 0|1]
sage: S.state('B').final_word_out
[0]
Output labels do not have to be hashable::
sage: C = Transducer([(0, 1, 0, []),
....: (1, 0, 0, [vector([0, 0]), 0]),
....: (1, 1, 1, [vector([0, 0]), 1]),
....: (0, 0, 1, 0)],
....: determine_alphabets=False,
....: initial_states=[0])
sage: C.prepone_output()
sage: sorted(C.transitions())
[Transition from 0 to 1: 0|(0, 0),
Transition from 0 to 0: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 1: 1|1,(0, 0)]
"""
changed = 1
iteration = 0
while changed > 0:
changed = 0
iteration += 1
for state in self.iter_states():
if state.is_initial:
continue
if state.word_out:
raise NotImplementedError(
"prepone_output assumes that all states have "
"empty output word, but state %s has output "
"word %s" % (state, state.word_out))
common_output = find_common_output(state)
if common_output:
changed += 1
if state.is_final:
assert state.final_word_out[0] == common_output[0]
state.final_word_out = state.final_word_out[1:]
for transition in self.transitions(state):
assert transition.word_out[0] == common_output[0]
transition.word_out = transition.word_out[1:]
found_inbound_transition = False
for transition in self.iter_transitions():
if transition.to_state == state:
transition.word_out = transition.word_out \
+ [common_output[0]]
found_inbound_transition = True
if not found_inbound_transition:
verbose(
"All transitions leaving state %s have an "
"output label with prefix %s. However, "
"there is no inbound transition and it is "
"not an initial state. This routine "
"(possibly called by simplification) "
"therefore erased this prefix from all "
"outbound transitions." %
(state, common_output[0]),
level=0)
def equivalence_classes(self):
r"""
Returns a list of equivalence classes of states.
INPUT:
Nothing.
OUTPUT:
A list of equivalence classes of states.
Two states `a` and `b` are equivalent if and only if there is
a bijection `\varphi` between paths starting at `a` and paths
starting at `b` with the following properties: Let `p_a` be a
path from `a` to `a'` and `p_b` a path from `b` to `b'` such
that `\varphi(p_a)=p_b`, then
- `p_a.\mathit{word}_\mathit{in}=p_b.\mathit{word}_\mathit{in}`,
- `p_a.\mathit{word}_\mathit{out}=p_b.\mathit{word}_\mathit{out}`,
- `a'` and `b'` have the same output label, and
- `a'` and `b'` are both final or both non-final and have the
same final output word.
The function :meth:`.equivalence_classes` returns a list of
the equivalence classes to this equivalence relation.
This is one step of Moore's minimization algorithm.
.. SEEALSO::
:meth:`.minimization`
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
"""
# Two states `a` and `b` are j-equivalent if and only if there
# is a bijection `\varphi` between paths of length <= j
# starting at `a` and paths starting at `b` with the following
# properties: Let `p_a` be a path from `a` to `a'` and `p_b` a
# path from `b` to `b'` such that `\varphi(p_a)=p_b`, then
#
# - `p_a.\mathit{word}_{in}=p_b.\mathit{word}_{in}`,
# - `p_a.\mathit{word}_{out}=p_b.\mathit{word}_{out}`,
# - `a'` and `b'` have the same output label, and
# - `a'` and `b'` are both final or both non-final.
# If for some j the relations j-1 equivalent and j-equivalent
# coincide, then they are equal to the equivalence relation
# described in the docstring.
# classes_current holds the equivalence classes of
# j-equivalence, classes_previous holds the equivalence
# classes of j-1 equivalence.
# initialize with 0-equivalence
classes_previous = []
key_0 = lambda state: (state.is_final, state.color, state.word_out,
state.final_word_out)
states_grouped = full_group_by(self.states(), key=key_0)
classes_current = [equivalence_class for
(key,equivalence_class) in states_grouped]
while len(classes_current) != len(classes_previous):
class_of = {}
classes_previous = classes_current
classes_current = []
for k in range(len(classes_previous)):
for state in classes_previous[k]:
class_of[state] = k
key_current = lambda state: sorted(
[(transition.word_in,
transition.word_out,
class_of[transition.to_state])
for transition in state.transitions])
for class_previous in classes_previous:
states_grouped = full_group_by(class_previous, key=key_current)
classes_current.extend([equivalence_class for
(key,equivalence_class) in states_grouped])
return classes_current
def quotient(self, classes):
r"""
Constructs the quotient with respect to the equivalence
classes.
INPUT:
- ``classes`` is a list of equivalence classes of states.
OUTPUT:
A finite state machine.
The labels of the new states are tuples of states of the
``self``, corresponding to ``classes``.
Assume that `c` is a class, and `a` and `b` are states in
`c`. Then there is a bijection `\varphi` between the
transitions from `a` and the transitions from `b` with the
following properties: if `\varphi(t_a)=t_b`, then
- `t_a.\mathit{word}_\mathit{in}=t_b.\mathit{word}_\mathit{in}`,
- `t_a.\mathit{word}_\mathit{out}=t_b.\mathit{word}_\mathit{out}`, and
- `t_a` and `t_b` lead to some equivalent states `a'` and `b'`.
Non-initial states may be merged with initial states, the
resulting state is an initial state.
All states in a class must have the same ``is_final``,
``final_word_out`` and ``word_out`` values.
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
sage: fsmq.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsmq.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
sage: fsmq1 = fsm.quotient(fsm.equivalence_classes())
sage: fsmq1 == fsmq
True
sage: fsm.quotient([[fsm.state("A"), fsm.state("B"), fsm.state("C"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Transitions of state 'A' and 'B' are incompatible.
TESTS::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)],
....: final_states=["A", "C"])
sage: fsm.state("A").final_word_out = 1
sage: fsm.state("C").final_word_out = 2
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Class ['A', 'C'] mixes
final states with different final output words.
"""
new = self.empty_copy()
state_mapping = {}
# Create new states and build state_mapping
for c in classes:
new_label = tuple(c)
new_state = c[0].relabeled(new_label)
new.add_state(new_state)
for state in c:
state_mapping[state] = new_state
# Copy data from old transducer
for c in classes:
new_state = state_mapping[c[0]]
sorted_transitions = sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in c[0].transitions])
for transition in self.iter_transitions(c[0]):
new.add_transition(
from_state = new_state,
to_state = state_mapping[transition.to_state],
word_in = transition.word_in,
word_out = transition.word_out)
# check that all class members have the same information (modulo classes)
for state in c:
new_state.is_initial = new_state.is_initial or state.is_initial
assert new_state.is_final == state.is_final, \
"Class %s mixes final and non-final states" % (c,)
assert new_state.word_out == state.word_out, \
"Class %s mixes different word_out" % (c,)
assert new_state.color == state.color, \
"Class %s mixes different colors" % (c,)
assert sorted_transitions == sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in state.transitions]), \
"Transitions of state %s and %s are incompatible." % (c[0], state)
assert new_state.final_word_out == state.final_word_out, \
"Class %s mixes final states with different " \
"final output words." % (c,)
return new
def merged_transitions(self):
"""
Merges transitions which have the same ``from_state``,
``to_state`` and ``word_out`` while adding their ``word_in``.
INPUT:
Nothing.
OUTPUT:
A finite state machine with merged transitions. If no mergers occur,
return ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 1], [1, -2, 1/4, 1], [1, -2, 1/2, 1],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 1], [-2, 3, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.merged_transitions()
sage: T1 is T
False
sage: sorted(T1.transitions())
[Transition from -2 to -2: 1/4|1,
Transition from -2 to 2: 1/4|1,
Transition from -2 to 3: 1/2|1,
Transition from 1 to 2: 1/4|1,
Transition from 1 to -2: 3/4|1,
Transition from 2 to -2: 1/4|1,
Transition from 2 to 2: 1/4|1,
Transition from 2 to 3: 1/2|1]
Applying the function again does not change the result::
sage: T2 = T1.merged_transitions()
sage: T2 is T1
True
"""
new = self.empty_copy()
changed = False
state_dict = {}
memo = {}
for state in self.states():
new_state = deepcopy(state,memo)
state_dict[state] = new_state
new.add_state(new_state)
for state in self.states():
grouped_transitions = itertools.groupby(sorted(state.transitions, key=key), key=key)
for (to_state, word_out), transitions in grouped_transitions:
transition_list = list(transitions)
changed = changed or len(transition_list) > 1
word_in = 0
for transition in transition_list:
if hasattr(transition.word_in, '__iter__') and len(transition.word_in) == 1:
word_in += transition.word_in[0]
else:
raise TypeError('%s does not have a list of length 1 as word_in' % transition)
new.add_transition((state, to_state, word_in, word_out))
if changed:
return new
else:
return self
def markov_chain_simplification(self):
"""
Consider ``self`` as Markov chain with probabilities as input labels
and simplify it.
INPUT:
Nothing.
OUTPUT:
Simplified version of ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 0], [1, -2, 1/4, 0], [1, -2, 1/2, 0],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 2], [-2, 3, 1/2, 2]],
....: initial_states=[1],
....: final_states=[3],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.markov_chain_simplification()
sage: sorted(T1.transitions())
[Transition from ((1,),) to ((2, -2),): 1|0,
Transition from ((2, -2),) to ((2, -2),): 1/2|1,
Transition from ((2, -2),) to ((3,),): 1/2|2]
"""
current = self.merged_transitions()
number_states = len(current.states())
while True:
current = current.simplification()
new_number_states = len(current.states())
new = current.merged_transitions()
if new is current and number_states == new_number_states:
return new
current = new
number_states = new_number_states
def with_final_word_out(self, letters, allow_non_final=True):
"""
Constructs a new finite state machine with final output words
for all states by implicitly reading trailing letters until a
final state is reached.
INPUT:
- ``letters`` -- either an element of the input alphabet or a
list of such elements. This is repeated cyclically when
needed.
- ``allow_non_final`` -- a boolean (default: ``True``) which
indicates whether we allow that some states may be non-final
in the resulting finite state machine. I.e., if ``False`` then
each state has to have a path to a final state with input
label matching ``letters``.
OUTPUT:
A finite state machine.
The inplace version of this function is
:meth:`.construct_final_word_out`.
Suppose for the moment a single element ``letter`` as input
for ``letters``. This is equivalent to ``letters = [letter]``.
We will discuss the general case below.
Let ``word_in`` be a word over the input alphabet and assume
that the original finite state machine transforms ``word_in`` to
``word_out`` reaching a possibly non-final state ``s``. Let
further `k` be the minimum number of letters ``letter`` such
that there is a path from ``s`` to some final state ``f`` whose
input label consists of `k` copies of ``letter`` and whose
output label is ``path_word_out``. Then the state ``s`` of the
resulting finite state machine is a final state with final
output ``path_word_out + f.final_word_out``. Therefore, the new
finite state machine transforms ``word_in`` to ``word_out +
path_word_out + f.final_word_out``.
This is e.g. useful for finite state machines operating on digit
expansions: there, it is sometimes required to read a sufficient
number of trailing zeros (at the most significant positions) in
order to reach a final state and to flush all carries. In this
case, this method constructs an essentially equivalent finite
state machine in the sense that it not longer requires adding
sufficiently many trailing zeros. However, it is the
responsibility of the user to make sure that if adding trailing
zeros to the input anyway, the output is equivalent.
If ``letters`` consists of more than one letter, then it is
assumed that (not necessarily complete) cycles of ``letters``
are appended as trailing input.
.. SEEALSO::
:ref:`example on Gray code <finite_state_machine_gray_code_example>`
EXAMPLES:
#. A simple transducer transforming `00` blocks to `01`
blocks::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: T.process([0, 0, 0])
(False, 1, [0, 1, 0])
sage: T.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
0 []
1 [1]
sage: F.process([0, 0, 0])
(True, 1, [0, 1, 0, 1])
sage: F.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
#. A more realistic example: Addition of `1` in binary. We
construct a transition function transforming the input
to its binary expansion::
sage: def binary_transition(carry, input):
....: value = carry + input
....: if value.mod(2) == 0:
....: return (value/2, 0)
....: else:
....: return ((value-1)/2, 1)
Now, we only have to start with a carry of `1` to
get the required transducer::
sage: T = Transducer(binary_transition,
....: input_alphabet=[0, 1],
....: initial_states=[1],
....: final_states=[0])
We test this for the binary expansion of `7`::
sage: T.process([1, 1, 1])
(False, 1, [0, 0, 0])
The final carry `1` has not be flushed yet, we have to add a
trailing zero::
sage: T.process([1, 1, 1, 0])
(True, 0, [0, 0, 0, 1])
We check that with this trailing zero, the transducer
performs as advertised::
sage: all(ZZ(T(k.bits()+[0]), base=2) == k + 1
....: for k in srange(16))
True
However, most of the time, we produce superfluous trailing
zeros::
sage: T(11.bits()+[0])
[0, 0, 1, 1, 0]
We now use this method::
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
1 [1]
0 []
The same tests as above, but we do not have to pad with
trailing zeros anymore::
sage: F.process([1, 1, 1])
(True, 1, [0, 0, 0, 1])
sage: all(ZZ(F(k.bits()), base=2) == k + 1
....: for k in srange(16))
True
No more trailing zero in the output::
sage: F(11.bits())
[0, 0, 1, 1]
sage: all(F(k.bits())[-1] == 1
....: for k in srange(16))
True
#. Here is an example, where we allow trailing repeated `10`::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 1, 'b'),
....: (2, 0, 0, 'c')],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out([1, 0])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bc
Trying this with trailing repeated `01` does not produce
a ``final_word_out`` for state ``1``, but for state ``2``::
sage: F = T.with_final_word_out([0, 1])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
2 c
#. Here another example with a more-letter trailing input::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 0, 'b'), (1, 2, 1, 'b'),
....: (2, 3, 0, 'c'), (2, 0, 1, 'e'),
....: (3, 1, 0, 'd'), (3, 1, 1, 'd')],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0, 0, 1, 1])
sage: for f in T.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bcdbcdbe
2 cdbe
3 dbe
TESTS:
#. Reading copies of ``letter`` may result in a cycle. In
this simple example, we have no final state at all::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: The finite state machine contains
a cycle starting at state 0 with input label 0
and no final state.
#. A unique transition with input word ``letter`` is
required::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 0
with input label 0.
It is not a problem if there is no transition starting
at state ``1`` with input word ``letter``::
sage: T = Transducer([(0, 1, 0, 0)])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
Anyhow, you can override this by::
sage: T = Transducer([(0, 1, 0, 0)])
sage: T.with_final_word_out(0, allow_non_final=False)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 1
with input label 0.
#. All transitions must have input labels of length `1`::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
sage: T = Transducer([(0, 0, [0, 1], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
#. An empty list as input is not allowed::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out([])
Traceback (most recent call last):
...
ValueError: letters is not allowed to be an empty list.
"""
new = deepcopy(self)
new.construct_final_word_out(letters, allow_non_final)
return new
def construct_final_word_out(self, letters, allow_non_final=True):
"""
This is an inplace version of :meth:`.with_final_word_out`. See
:meth:`.with_final_word_out` for documentation and examples.
TESTS::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out(0)
sage: T.construct_final_word_out(0)
sage: T == F # indirect doctest
True
sage: T = Transducer([(0, 1, 0, None)],
....: final_states=[1])
sage: F = T.with_final_word_out(0)
sage: F.state(0).final_word_out
[]
"""
from itertools import cycle, izip_longest
if not isinstance(letters, list):
letters = [letters]
elif not letters:
raise ValueError(
"letters is not allowed to be an empty list.")
in_progress = set()
cache = {}
for state in self.iter_states():
assert(not in_progress)
# trailing_letters is an infinite iterator additionally
# marking positions
trailing_letters = cycle(enumerate(letters))
find_final_word_out(state)
# actual modifications can only be carried out after all final words
# have been computed as it may not be permissible to stop at a
# formerly non-final state unless a cycle has been completed.
for (state, position), final_word_out in cache.iteritems():
if position == 0 and final_word_out is not None:
state.is_final = True
state.final_word_out = final_word_out
# *************************************************************************
# other
# *************************************************************************
def graph(self, edge_labels='words_in_out'):
"""
Returns the graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
- ``edge_label``: (default: ``'words_in_out'``) can be
- ``'words_in_out'`` (labels will be strings ``'i|o'``)
- a function with which takes as input a transition
and outputs (returns) the label
OUTPUT:
A graph.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: T = Transducer()
sage: T.graph()
Digraph on 0 vertices
sage: T.add_state(A)
'A'
sage: T.graph()
Digraph on 1 vertex
sage: T.add_transition(('A', 'A', 0, 1))
Transition from 'A' to 'A': 0|1
sage: T.graph()
Looped digraph on 1 vertex
"""
if edge_labels == 'words_in_out':
label_fct = lambda t:t._in_out_label_()
elif hasattr(edge_labels, '__call__'):
label_fct = edge_labels
else:
raise TypeError('Wrong argument for edge_labels.')
graph_data = []
isolated_vertices = []
for state in self.iter_states():
transitions = state.transitions
if len(transitions) == 0:
isolated_vertices.append(state.label())
for t in transitions:
graph_data.append((t.from_state.label(), t.to_state.label(),
label_fct(t)))
G = DiGraph(graph_data)
G.add_vertices(isolated_vertices)
return G
digraph = graph
def plot(self):
"""
Plots a graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
Nothing.
OUTPUT:
A plot of the graph of the finite state machine.
TESTS::
sage: FiniteStateMachine([('A', 'A', 0)]).plot()
"""
return self.graph(edge_labels='words_in_out').plot()
def predecessors(self, state, valid_input=None):
"""
Lists all predecessors of a state.
INPUT:
- ``state`` -- the state from which the predecessors should be
listed.
- ``valid_input`` -- If ``valid_input`` is a list, then we
only consider transitions whose input labels are contained
in ``valid_input``. ``state`` has to be a :class:`FSMState`
(not a label of a state). If input labels of length larger
than `1` are used, then ``valid_input`` has to be a list of
lists.
OUTPUT:
A list of states.
EXAMPLES::
sage: A = Transducer([('I', 'A', 'a', 'b'), ('I', 'B', 'b', 'c'),
....: ('I', 'C', 'c', 'a'), ('A', 'F', 'b', 'a'),
....: ('B', 'F', ['c', 'b'], 'b'), ('C', 'F', 'a', 'c')],
....: initial_states=['I'], final_states=['F'])
sage: A.predecessors(A.state('A'))
['A', 'I']
sage: A.predecessors(A.state('F'), valid_input=['b', 'a'])
['F', 'C', 'A', 'I']
sage: A.predecessors(A.state('F'), valid_input=[['c', 'b'], 'a'])
['F', 'C', 'B']
"""
if valid_input is not None:
valid_list = list()
for input in valid_input:
input_list = input
if not isinstance(input_list, list):
input_list = [input]
valid_list.append(input_list)
valid_input = valid_list
unhandeled_direct_predecessors = {s:[] for s in self.states() }
for t in self.transitions():
if valid_input is None or t.word_in in valid_input:
unhandeled_direct_predecessors[t.to_state].append(t.from_state)
done = []
open = [state]
while len(open) > 0:
s = open.pop()
candidates = unhandeled_direct_predecessors[s]
if candidates is not None:
open.extend(candidates)
unhandeled_direct_predecessors[s] = None
done.append(s)
return(done)
def asymptotic_moments(self, variable=SR.symbol('n')):
r"""
Returns the main terms of expectation and variance of the sum
of output labels and its covariance with the sum of input
labels.
INPUT:
- ``variable`` -- a symbol denoting the length of the input,
by default `n`.
OUTPUT:
A dictionary consisting of
- ``expectation`` -- `e n + \operatorname{Order}(1)`,
- ``variance`` -- `v n + \operatorname{Order}(1)`,
- ``covariance`` -- `c n + \operatorname{Order}(1)`
for suitable constants `e`, `v` and `c`.
Assume that all input and output labels are numbers and that
``self`` is complete and has only one final component. Assume
further that this final component is aperiodic. Furthermore,
assume that there is exactly one initial state and that all
states are final.
Denote by `X_n` the sum of output labels written by the
finite state machine when reading a random input word of
length `n` over the input alphabet (assuming
equidistribution).
Then the expectation of `X_n` is `en+O(1)`, the variance
of `X_n` is `vn+O(1)` and the covariance of `X_n` and
the sum of input labels is `cn+O(1)`, cf. [HKW2014]_,
Theorem 2.
In the case of non-integer input or output labels, performance
degrades significantly. For rational input and output labels,
consider rescaling to integers. This limitation comes from the
fact that determinants over polynomial rings can be computed
much more efficiently than over the symbolic ring. In fact, we
compute (parts) of a trivariate generating function where the
input and output labels are exponents of some indeterminates,
see [HKW2014]_, Theorem 2 for details. If those exponents are
integers, we can use a polynomial ring.
EXAMPLES:
#. A trivial example: write the negative of the input::
sage: T = Transducer([(0, 0, 0, 0), (0, 0, 1, -1)],
....: initial_states=[0],
....: final_states=[0])
sage: T([0, 1, 1])
[0, -1, -1]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
-1/4*n + Order(1)
#. For the case of the Hamming weight of the non-adjacent-form
(NAF) of integers, cf. the :wikipedia:`Non-adjacent_form`
and the :ref:`example on recognizing NAFs
<finite_state_machine_recognizing_NAFs_example>`, the
following agrees with the results in [HP2007]_.
We first use the transducer to convert the standard binary
expansion to the NAF given in [HP2007]_. We use the parameter
``with_final_word_out`` such that we do not have to add
sufficiently many trailing zeros::
sage: NAF = Transducer([(0, 0, 0, 0),
....: (0, '.1', 1, None),
....: ('.1', 0, 0, [1, 0]),
....: ('.1', 1, 1, [-1, 0]),
....: (1, 1, 1, 0),
....: (1, '.1', 0, None)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0])
As an example, we compute the NAF of `27` by this
transducer.
::
sage: binary_27 = 27.bits()
sage: binary_27
[1, 1, 0, 1, 1]
sage: NAF_27 = NAF(binary_27)
sage: NAF_27
[-1, 0, -1, 0, 0, 1, 0]
sage: ZZ(NAF_27, base=2)
27
Next, we are only interested in the Hamming weight::
sage: def weight(state, input):
....: if input is None:
....: result = 0
....: else:
....: result = ZZ(input != 0)
....: return (0, result)
sage: weight_transducer = Transducer(weight,
....: input_alphabet=[-1, 0, 1],
....: initial_states=[0],
....: final_states=[0])
At the moment, we can not use composition with ``NAF``,
because it has non-empty final output words::
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Thus, we change ``NAF``, then compose and again construct
the final output words::
sage: for s in NAF.final_states():
....: s.final_word_out = []
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative').relabeled()
sage: NAFweight.construct_final_word_out(0)
sage: sorted(NAFweight.transitions())
[Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|-,
Transition from 1 to 0: 0|1,0,
Transition from 1 to 2: 1|1,0,
Transition from 2 to 1: 0|-,
Transition from 2 to 2: 1|0]
sage: NAFweight(binary_27 + [0, 0])
[1, 0, 1, 0, 0, 1, 0]
Now, we actually compute the asymptotic moments::
sage: moments = NAFweight.asymptotic_moments()
sage: moments['expectation']
1/3*n + Order(1)
sage: moments['variance']
2/27*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is Example 3.1 in [HKW2014]_, where a transducer with
variable output labels is given. There, the aim was to
choose the output labels of this very simple transducer such
that the input and output sum are asymptotically
independent, i.e., the constant `c` vanishes.
::
sage: var('a_1, a_2, a_3, a_4')
(a_1, a_2, a_3, a_4)
sage: T = Transducer([[0, 0, 0, a_1], [0, 1, 1, a_3],
....: [1, 0, 0, a_4], [1, 1, 1, a_2]],
....: initial_states=[0], final_states=[0, 1])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
1/4*(a_1 + a_2 + a_3 + a_4)*n + Order(1)
sage: moments['covariance']
-1/4*(a_1 - a_2)*n + Order(1)
Therefore, the asymptotic covariance vanishes if and only if
`a_2=a_1`.
#. This is Example 6.2 in [HKW2014]_, dealing with the
transducer converting the binary expansion of an integer
into Gray code (cf. the :wikipedia:`Gray_code` and the
:ref:`example on Gray code
<finite_state_machine_gray_code_example>`)::
sage: moments = transducers.GrayCode().asymptotic_moments()
sage: moments['expectation']
1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the first part of Example 6.3 in [HKW2014]_,
counting the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: sorted(block10.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|1,
Transition from (1,) to (1,): 1|0]
sage: moments = block10.asymptotic_moments()
sage: moments['expectation']
1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the second part of Example 6.3 in [HKW2014]_,
counting the number of 11 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: sorted(block11.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|0,
Transition from (1,) to (1,): 1|1]
sage: var('N')
N
sage: moments = block11.asymptotic_moments(N)
sage: moments['expectation']
1/4*N + Order(1)
sage: moments['variance']
5/16*N + Order(1)
sage: correlation = (moments['covariance'].coefficient(N) /
....: (1/2 * sqrt(moments['variance'].coefficient(N))))
sage: correlation
2/5*sqrt(5)
#. This is Example 6.4 in [HKW2014]_, counting the number of
01 blocks minus the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block01 = transducers.CountSubblockOccurrences(
....: [0, 1],
....: input_alphabet=[0, 1])
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_01x10 = block01.cartesian_product(block10)
sage: block_difference = transducers.sub([0, 1])(product_01x10)
sage: T = block_difference.simplification().relabeled()
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: T.transitions()
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|0,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|1,
Transition from 2 to 1: 0|0,
Transition from 2 to 0: 1|0]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
Order(1)
sage: moments['variance']
Order(1)
sage: moments['covariance']
Order(1)
#. The finite state machine must have a unique final component::
sage: T = Transducer([(0, -1, -1, -1), (0, 1, 1, 1),
....: (-1, -1, -1, -1), (-1, -1, 1, -1),
....: (1, 1, -1, 1), (1, 1, 1, 1)],
....: initial_states=[0],
....: final_states=[0, 1, -1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines with one final
component.
In this particular example, the first letter of the input
decides whether we reach the loop at `-1` or the loop at
`1`. In the first case, we have `X_n = -n`, while we have
`X_n = n` in the second case. Therefore, the expectation
`E(X_n)` of `X_n` is `E(X_n) = 0`. We get `(X_n-E(X_n))^2 =
n^2` in all cases, which results in a variance of `n^2`.
So this example shows that the variance may be non-linear if
there is more than one final component.
TESTS:
#. An input alphabet must be given::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: determine_alphabets=False)
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
#. The finite state machine must have a unique initial state::
sage: T = Transducer([(0, 0, 0, 0)])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: A unique initial state is required.
#. The finite state machine must be complete::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: This finite state machine is
not complete.
#. The final component of the finite state machine must be
aperiodic::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0], final_states=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines whose unique final
component is aperiodic.
#. Non-integer input or output labels lead to a warning::
sage: T = Transducer([[0, 0, 0, 0], [0, 0, 1, -1/2]],
....: initial_states=[0], final_states=[0])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
This warning can be silenced by :func:`~sage.misc.misc.set_verbose`::
sage: set_verbose(-1, "finite_state_machine.py")
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
sage: set_verbose(0, "finite_state_machine.py")
#. Check whether ``word_out`` of ``FSMState`` are correctly
dealt with::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2,
....: is_initial=True,
....: is_final=True)
sage: T = Transducer([(s, s, 0, 1)],
....: initial_states=[s], final_states=[s])
sage: T([0, 0])
[2, 1, 2, 1, 2]
sage: T.asymptotic_moments()['expectation']
3*n + Order(1)
The same test for non-integer output::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2/3)
sage: T = Transducer([(s, s, 0, 1/2)],
....: initial_states=[s], final_states=[s])
sage: T.asymptotic_moments()['expectation']
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
7/6*n + Order(1)
#. All states of ``self`` have to be final::
sage: T = Transducer([(0, 1, 1, 4)], initial_states=[0])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: Not all states are final.
ALGORITHM:
See [HKW2014]_, Theorem 2.
REFERENCES:
.. [HKW2014] Clemens Heuberger, Sara Kropf and Stephan Wagner,
*Combinatorial Characterization of Independent Transducers via
Functional Digraphs*, :arxiv:`1404.3680`.
.. [HP2007] Clemens Heuberger and Helmut Prodinger, *The Hamming
Weight of the Non-Adjacent-Form under Various Input Statistics*,
Periodica Mathematica Hungarica Vol. 55 (1), 2007, pp. 81–96,
:doi:`10.1007/s10998-007-3081-z`.
"""
from sage.calculus.functional import derivative
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.rational_field import QQ
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if len(self.initial_states()) != 1:
raise ValueError("A unique initial state is required.")
if not all(state.is_final for state in self.iter_states()):
raise ValueError("Not all states are final.")
if not self.is_complete():
raise NotImplementedError("This finite state machine is "
"not complete.")
final_components = self.final_components()
if len(final_components) != 1:
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"with one final component.")
final_component = final_components[0]
if not final_component.digraph().is_aperiodic():
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"whose unique final component is "
"aperiodic.")
K = len(self.input_alphabet)
R = PolynomialRing(QQ, ("x", "y", "z"))
(x, y, z) = R.gens()
try:
M = get_matrix(self, x, y)
except TypeError:
verbose("Non-integer output weights lead to "
"significant performance degradation.", level=0)
# fall back to symbolic ring
R = SR
x = R.symbol()
y = R.symbol()
z = R.symbol()
M = get_matrix(self, x, y)
else:
f = (M.parent().identity_matrix() - z/K*M).det()
f_x = substitute_one(derivative(f, x))
f_y = substitute_one(derivative(f, y))
f_z = substitute_one(derivative(f, z))
f_xy = substitute_one(derivative(f, x, y))
f_xz = substitute_one(derivative(f, x, z))
f_yz = substitute_one(derivative(f, y, z))
f_yy = substitute_one(derivative(f, y, y))
f_zz = substitute_one(derivative(f, z, z))
e_2 = f_y / f_z
v_2 = (f_y**2 * (f_zz+f_z) + f_z**2 * (f_yy+f_y)
- 2*f_y*f_z*f_yz) / f_z**3
c = (f_x * f_y * (f_zz+f_z) + f_z**2 * f_xy - f_y*f_z*f_xz
- f_x*f_z*f_yz) / f_z**3
return {'expectation': e_2*variable + SR(1).Order(),
'variance': v_2*variable + SR(1).Order(),
'covariance': c*variable + SR(1).Order()}
def is_monochromatic(self):
"""
Checks whether the colors of all states are equal.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
EXAMPLES::
sage: G = transducers.GrayCode()
sage: [s.color for s in G.iter_states()]
[None, None, None]
sage: G.is_monochromatic()
True
sage: G.state(1).color = 'blue'
sage: G.is_monochromatic()
False
"""
return equal(s.color for s in self.iter_states())
#*****************************************************************************
def is_Automaton(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Automaton`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Automaton
sage: is_Automaton(FiniteStateMachine())
False
sage: is_Automaton(Automaton())
True
sage: is_FiniteStateMachine(Automaton())
True
"""
return isinstance(FSM, Automaton)
class Automaton(FiniteStateMachine):
"""
This creates an automaton, which is a finite state machine, whose
transitions have input labels.
An automaton has additional features like creating a deterministic
and a minimized automaton.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create an automaton recognizing even numbers (given in
binary and read from left to right) in the following way::
sage: A = Automaton([('P', 'Q', 0), ('P', 'P', 1),
....: ('Q', 'P', 1), ('Q', 'Q', 0)],
....: initial_states=['P'], final_states=['Q'])
sage: A
Automaton with 2 states
sage: A([0])
True
sage: A([1, 1, 0])
True
sage: A([1, 0, 1])
False
Note that the full output of the commands can be obtained by
calling :meth:`.process` and looks like this::
sage: A.process([1, 0, 1])
(False, 'P')
TESTS::
sage: Automaton()
Automaton with 0 states
"""
def __init__(self, *args, **kwargs):
"""
Initialize an automaton. See :class:`Automaton` and its parent
:class:`FiniteStateMachine` for more information.
TESTS::
sage: Transducer()._allow_composition_
True
sage: Automaton()._allow_composition_
False
"""
super(Automaton, self).__init__(*args, **kwargs)
self._allow_composition_ = False
def _repr_(self):
"""
Represents the finite state machine as "Automaton with n
states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Automaton()._repr_()
'Automaton with 0 states'
"""
return "Automaton with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Automaton([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right]
"""
return format_function(transition.word_in)
def intersection(self, other, only_accessible_components=True):
"""
Returns a new automaton which accepts an input if it is
accepted by both given automata.
INPUT:
- ``other`` -- an automaton
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new automaton which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the new automaton is the cartesian product of the
set of states of both given automata. There is a transition `((A, B),
(C, D), a)` in the new automaton if there are transitions `(A, C, a)`
and `(B, D, a)` in the old automata.
The methods :meth:`.intersection` and
:meth:`.cartesian_product` are the same (for automata).
EXAMPLES::
sage: aut1 = Automaton([('1', '2', 1),
....: ('2', '2', 1),
....: ('2', '2', 0)],
....: initial_states=['1'],
....: final_states=['2'],
....: determine_alphabets=True)
sage: aut2 = Automaton([('A', 'A', 1),
....: ('A', 'B', 0),
....: ('B', 'B', 0),
....: ('B', 'A', 1)],
....: initial_states=['A'],
....: final_states=['B'],
....: determine_alphabets=True)
sage: res = aut1.intersection(aut2)
sage: (aut1([1, 1]), aut2([1, 1]), res([1, 1]))
(True, False, False)
sage: (aut1([1, 0]), aut2([1, 0]), res([1, 0]))
(True, True, True)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'A') to ('2', 'B'): 0|-,
Transition from ('2', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'B') to ('2', 'B'): 0|-,
Transition from ('2', 'B') to ('2', 'A'): 1|-]
For automata with epsilon-transitions, intersection is not well
defined. But for any finite state machine, epsilon-transitions can be
removed by :meth:`.remove_epsilon_transitions`.
::
sage: a1 = Automaton([(0, 0, 0),
....: (0, 1, None),
....: (1, 1, 1),
....: (1, 2, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a2 = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a1.intersection(a2)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input)
was found.
sage: a1.remove_epsilon_transitions() # not tested (since not implemented yet)
sage: a1.intersection(a2) # not tested
"""
if not is_Automaton(other):
raise TypeError(
"Only an automaton can be intersected with an automaton.")
return self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components)
cartesian_product = intersection
def determinisation(self):
"""
Returns a deterministic automaton which accepts the same input
words as the original one.
INPUT:
Nothing.
OUTPUT:
A new automaton, which is deterministic.
The labels of the states of the new automaton are frozensets
of states of ``self``. The color of a new state is the
frozenset of colors of the constituent states of ``self``.
Therefore, the colors of the constituent states have to be
hashable.
The input alphabet must be specified.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'B', 1), ('B', 'B', 1)],
....: initial_states=['A'], final_states=['B'])
sage: aut.determinisation().transitions()
[Transition from frozenset(['A'])
to frozenset(['A']): 0|-,
Transition from frozenset(['A'])
to frozenset(['B']): 1|-,
Transition from frozenset(['B'])
to frozenset([]): 0|-,
Transition from frozenset(['B'])
to frozenset(['B']): 1|-,
Transition from frozenset([])
to frozenset([]): 0|-,
Transition from frozenset([])
to frozenset([]): 1|-]
::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: A.determinisation().states()
[frozenset(['A']), frozenset(['A', 'B']),
frozenset(['A', 'C']), frozenset(['A', 'C', 'B'])]
::
sage: A = Automaton([(0, 1, 1), (0, 2, [1, 1]), (0, 3, [1, 1, 1]),
....: (1, 0, -1), (2, 0, -2), (3, 0, -3)],
....: initial_states=[0], final_states=[0, 1, 2, 3])
sage: B = A.determinisation().relabeled()
sage: all(t.to_state.label() == 2 for t in
....: B.state(2).transitions)
True
sage: B.state(2).is_final
False
sage: B.delete_state(2) # this is a sink
sage: sorted(B.transitions())
[Transition from 0 to 1: 1|-,
Transition from 1 to 0: -1|-,
Transition from 1 to 3: 1|-,
Transition from 3 to 0: -2|-,
Transition from 3 to 4: 1|-,
Transition from 4 to 0: -3|-]
Note that colors of states have to be hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
TESTS:
This is from #15078, comment 13.
::
sage: D = {'A': [('A', 'a'), ('B', 'a'), ('A', 'b')],
....: 'C': [], 'B': [('C', 'b')]}
sage: auto = Automaton(D, initial_states=['A'], final_states=['C'])
sage: auto.is_deterministic()
False
sage: auto.process(list('aaab'))
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered
when processing input.
sage: auto.states()
['A', 'C', 'B']
sage: Ddet = auto.determinisation()
sage: Ddet
Automaton with 3 states
sage: Ddet.is_deterministic()
True
sage: sorted(Ddet.transitions())
[Transition from frozenset(['A']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A']) to frozenset(['A']): 'b'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'C']): 'b'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A']): 'b'|-]
sage: Ddet.initial_states()
[frozenset(['A'])]
sage: Ddet.final_states()
[frozenset(['A', 'C'])]
"""
if any(len(t.word_in) > 1 for t in self.iter_transitions()):
return self.split_transitions().determinisation()
epsilon_successors = {}
direct_epsilon_successors = {}
for state in self.iter_states():
direct_epsilon_successors[state] = set(
t.to_state
for t in self.iter_transitions(state)
if not t.word_in)
epsilon_successors[state] = set([state])
old_count_epsilon_successors = 0
count_epsilon_successors = len(epsilon_successors)
while old_count_epsilon_successors < count_epsilon_successors:
old_count_epsilon_successors = count_epsilon_successors
count_epsilon_successors = 0
for state in self.iter_states():
for direct_successor in direct_epsilon_successors[state]:
epsilon_successors[state] = epsilon_successors[state].union(epsilon_successors[direct_successor])
count_epsilon_successors += len(epsilon_successors[state])
result = self.empty_copy()
new_initial_states = [frozenset(self.iter_initial_states())]
result.add_from_transition_function(set_transition,
initial_states=new_initial_states)
for state in result.iter_states():
state.is_final = any(s.is_final for s in state.label())
state.color = frozenset(s.color for s in state.label())
return result
def minimization(self, algorithm=None):
"""
Returns the minimization of the input automaton as a new automaton.
INPUT:
- ``algorithm`` -- Either Moore's algorithm (by
``algorithm='Moore'`` or as default for deterministic
automata) or Brzozowski's algorithm (when
``algorithm='Brzozowski'`` or when the automaton is not
deterministic) is used.
OUTPUT:
A new automaton.
The resulting automaton is deterministic and has a minimal
number of states.
EXAMPLES::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A.minimization(algorithm='Brzozowski')
sage: B.transitions(B.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(B.states())
3
sage: C = A.minimization(algorithm='Brzozowski')
sage: C.transitions(C.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(C.states())
3
::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut.minimization(algorithm='Brzozowski')
sage: [len(min.states()), len(aut.states())]
[3, 4]
sage: min = aut.minimization(algorithm='Moore')
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
deterministic = self.is_deterministic()
if algorithm == "Moore" or (algorithm is None and deterministic):
return self._minimization_Moore_()
elif algorithm == "Brzozowski" or (algorithm is None and not deterministic):
return self._minimization_Brzozowski_()
else:
raise NotImplementedError("Algorithm '%s' is not implemented. Choose 'Moore' or 'Brzozowski'" % algorithm)
def _minimization_Brzozowski_(self):
"""
Returns a minimized automaton by using Brzozowski's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A._minimization_Brzozowski_()
sage: len(B.states())
3
"""
return self.transposition().determinisation().transposition().determinisation()
def _minimization_Moore_(self):
"""
Returns a minimized automaton by using Moore's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut._minimization_Moore_()
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
if self.is_deterministic():
return self.quotient(self.equivalence_classes())
else:
raise NotImplementedError("Minimization via Moore's Algorithm is only " \
"implemented for deterministic finite state machines")
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the automaton accepts the input and the state
where the computation stops.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only whether the sequence is accepted
or not (the first entry below only).
OUTPUT:
The full output is a pair, where
- the first entry is ``True`` if the input string is accepted and
- the second gives the state reached after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = Automaton(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[(True, '_'), (True, '1'), (False, None),
(True, '1'), (False, None), (False, None)]
If we just want a condensed output, we use::
sage: [NAF.process(w, full_output=False)
....: for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
It is equivalent to::
sage: [NAF(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
The following example illustrates the difference between
non-existing paths and reaching a non-final state::
sage: NAF.process([2])
(False, None)
sage: NAF.add_transition(('_', 's', 2))
Transition from '_' to 's': 2|-
sage: NAF.process([2])
(False, 's')
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Automaton.process "
"(and thus of Automaton.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Automaton, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
return (it.accept_input, it.current_state)
else:
return it.accept_input
#*****************************************************************************
def is_Transducer(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Transducer`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Transducer
sage: is_Transducer(FiniteStateMachine())
False
sage: is_Transducer(Transducer())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, Transducer)
class Transducer(FiniteStateMachine):
"""
This creates a transducer, which is a finite state machine, whose
transitions have input and output labels.
An transducer has additional features like creating a simplified
transducer.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create a transducer performing the addition of 1 (for
numbers given in binary and read from right to left) in the
following way::
sage: T = Transducer([('C', 'C', 1, 0), ('C', 'N', 0, 1),
....: ('N', 'N', 0, 0), ('N', 'N', 1, 1)],
....: initial_states=['C'], final_states=['N'])
sage: T
Transducer with 2 states
sage: T([0])
[1]
sage: T([1,1,0])
[0, 0, 1]
sage: ZZ(T(15.digits(base=2)+[0]), base=2)
16
Note that we have padded the binary input sequence by a `0` so
that the transducer can reach its final state.
TESTS::
sage: Transducer()
Transducer with 0 states
"""
def _repr_(self):
"""
Represents the transducer as "Transducer with n states" where
n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Transducer()._repr_()
'Transducer with 0 states'
"""
return "Transducer with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 2)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1\mid 2$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Transducer([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right] \mid \left[1\right]
"""
return (format_function(transition.word_in) + "\\mid "
+ format_function(transition.word_out))
def intersection(self, other, only_accessible_components=True):
"""
Returns a new transducer which accepts an input if it is accepted by
both given finite state machines producing the same output.
INPUT:
- ``other`` -- a transducer
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new transducer which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the transducer is the cartesian product of the
set of states of both given transducer. There is a transition `((A,
B), (C, D), a, b)` in the new transducer if there are
transitions `(A, C, a, b)` and `(B, D, a, b)` in the old transducers.
EXAMPLES::
sage: transducer1 = Transducer([('1', '2', 1, 0),
....: ('2', '2', 1, 0),
....: ('2', '2', 0, 1)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'A', 1, 0),
....: ('A', 'B', 0, 0),
....: ('B', 'B', 0, 1),
....: ('B', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|0,
Transition from ('2', 'A') to ('2', 'A'): 1|0]
In general, transducers are not closed under intersection. But
for transducer which do not have epsilon-transitions, the
intersection is well defined (cf. [BaWo2012]_). However, in
the next example the intersection of the two transducers is
not well defined. The intersection of the languages consists
of `(a^n, b^n c^n)`. This set is not recognizable by a
*finite* transducer.
::
sage: t1 = Transducer([(0, 0, 'a', 'b'),
....: (0, 1, None, 'c'),
....: (1, 1, None, 'c')],
....: initial_states=[0],
....: final_states=[0, 1])
sage: t2 = Transducer([('A', 'A', None, 'b'),
....: ('A', 'B', 'a', 'c'),
....: ('B', 'B', 'a', 'c')],
....: initial_states=['A'],
....: final_states=['A', 'B'])
sage: t2.intersection(t1)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input or output)
was found.
TESTS::
sage: transducer1 = Transducer([('1', '2', 1, 0)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'B', 1, 0)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[('2', 'B')]
sage: transducer1.state('2').final_word_out = 1
sage: transducer2.state('B').final_word_out = 2
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[]
REFERENCES:
.. [BaWo2012] Javier Baliosian and Dina Wonsever, *Finite State
Transducers*, chapter in *Handbook of Finite State Based Models and
Applications*, edited by Jiacun Wang, Chapman and Hall/CRC, 2012.
"""
if not is_Transducer(other):
raise TypeError(
"Only a transducer can be intersected with a transducer.")
new = self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: s1.final_word_out)
for state in new.iter_final_states():
state0 = self.state(state.label()[0])
state1 = other.state(state.label()[1])
if state0.final_word_out != state1.final_word_out:
state.final_word_out = None
state.is_final = False
return new
def cartesian_product(self, other, only_accessible_components=True):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldCodeTransducerCartesianProduct`` to ``False``.
Return a new transducer which can simultaneously process an
input with the machines ``self`` and ``other`` where the
output labels are `d`-tuples of the original output labels.
INPUT:
- ``other`` - a finite state machine (if `d=2`) or a list (or
other iterable) of `d-1` finite state machines
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A transducer which can simultaneously process an input with ``self``
and the machine(s) in ``other``.
The set of states of the new transducer is the cartesian product of
the set of states of ``self`` and ``other``.
Let `(A_j, B_j, a_j, b_j)` for `j\in\{1, \ldots, d\}` be
transitions in the machines ``self`` and in ``other``. Then
there is a transition `((A_1, \ldots, A_d), (B_1, \ldots,
B_d), a, (b_1, \ldots, b_d))` in the new transducer if `a_1 =
\cdots = a_d =: a`.
EXAMPLES:
Originally a different output was constructed by
:meth:`Transducer.cartesian_product`. This output is now produced by
:meth:`Transducer.intersection`.
::
sage: transducer1 = Transducer([('A', 'A', 0, 0),
....: ('A', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['A'],
....: determine_alphabets=True)
sage: transducer2 = Transducer([(0, 1, 0, ['b', 'c']),
....: (0, 0, 1, 'b'),
....: (1, 1, 0, 'a')],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: result = transducer1.cartesian_product(transducer2)
doctest:...: DeprecationWarning: The output of
Transducer.cartesian_product will change.
Please use Transducer.intersection for the original output.
See http://trac.sagemath.org/16061 for details.
sage: result
Transducer with 0 states
By setting ``FSMOldCodeTransducerCartesianProduct`` to ``False``
the new desired output is produced.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: result = transducer1.cartesian_product(transducer2)
sage: result
Transducer with 2 states
sage: result.transitions()
[Transition from ('A', 0) to ('A', 1): 0|(0, 'b'),(None, 'c'),
Transition from ('A', 0) to ('A', 0): 1|(1, 'b'),
Transition from ('A', 1) to ('A', 1): 0|(0, 'a')]
sage: result([1, 0, 0])
[(1, 'b'), (0, 'b'), (None, 'c'), (0, 'a')]
sage: (transducer1([1, 0, 0]), transducer2([1, 0, 0]))
([1, 0, 0], ['b', 'b', 'c', 'a'])
Also final output words are correctly processed::
sage: transducer1.state('A').final_word_out = 2
sage: result = transducer1.cartesian_product(transducer2)
sage: result.final_states()[0].final_word_out
[(2, None)]
The following transducer counts the number of 11 blocks minus
the number of 10 blocks over the alphabet ``[0, 1]``.
::
sage: count_11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: count_10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: count_11x10 = count_11.cartesian_product(count_10)
sage: difference = transducers.sub([0, 1])(count_11x10)
sage: T = difference.simplification().relabeled()
sage: T.initial_states()
[1]
sage: sorted(T.transitions())
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|1,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|0]
sage: input = [0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0]
sage: output = [0, 0, 1, -1, 0, -1, 0, 0, 0, 1, 1, -1]
sage: T(input) == output
True
If ``other`` is an automaton, then :meth:`.cartesian_product` returns
``self`` where the input is restricted to the input accepted by
``other``.
For example, if the transducer transforms the standard
binary expansion into the non-adjacent form and the automaton
recognizes the binary expansion without adjacent ones, then the
cartesian product of these two is a transducer which does not change
the input (except for changing ``a`` to ``(a, None)`` and ignoring a
leading `0`).
::
sage: NAF = Transducer([(0, 1, 0, None),
....: (0, 2, 1, None),
....: (1, 1, 0, 0),
....: (1, 2, 1, 0),
....: (2, 1, 0, 1),
....: (2, 3, 1, -1),
....: (3, 2, 0, 0),
....: (3, 3, 1, 0)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: aut11 = Automaton([(0, 0, 0), (0, 1, 1), (1, 0, 0)],
....: initial_states=[0],
....: final_states=[0, 1],
....: determine_alphabets=True)
sage: res = NAF.cartesian_product(aut11)
sage: res([1, 0, 0, 1, 0, 1, 0])
[(1, None), (0, None), (0, None), (1, None), (0, None), (1, None)]
This is obvious because if the standard binary expansion does not have
adjacent ones, then it is the same as the non-adjacent form.
Be aware that :meth:`.cartesian_product` is not commutative.
::
sage: aut11.cartesian_product(NAF)
Traceback (most recent call last):
...
TypeError: Only an automaton can be intersected with an automaton.
The cartesian product of more than two finite state machines can also
be computed::
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.cartesian_product([T1, T2])
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((0,), (), ()) to ((0,), (), ()): 0|(1, 0, 0),
Transition from ((0,), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((0,), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (1,), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (1,), ()) to ((), (1,), ()): 1|(0, 1, 0),
Transition from ((), (1,), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (), (2,)) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (), (2,)): 2|(0, 0, 1)]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[(0, 0, 0),
(1, 0, 0),
(0, 0, 0),
(0, 1, 0),
(0, 0, 0),
(0, 0, 1),
(0, 0, 0),
(0, 0, 0),
(0, 0, 0),
(0, 0, 1)]
"""
if FSMOldCodeTransducerCartesianProduct:
from sage.misc.superseded import deprecation
deprecation(16061, "The output of Transducer.cartesian_product "
"will change. Please use "
"Transducer.intersection for the original "
"output.")
return self.intersection(
other,
only_accessible_components=only_accessible_components)
return self.product_FiniteStateMachine(
other,
function,
final_function=final_function,
only_accessible_components=only_accessible_components)
def simplification(self):
"""
Returns a simplified transducer.
INPUT:
Nothing.
OUTPUT:
A new transducer.
This function simplifies a transducer by Moore's algorithm,
first moving common output labels of transitions leaving a
state to output labels of transitions entering the state
(cf. :meth:`.prepone_output`).
The resulting transducer implements the same function as the
original transducer.
EXAMPLES::
sage: fsm = Transducer([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsms = fsm.simplification()
sage: fsms
Transducer with 2 states
sage: fsms.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsms.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
::
sage: fsm = Transducer([("A", "A", 0, 0),
....: ("A", "B", 1, 1),
....: ("A", "C", 1, -1),
....: ("B", "A", 2, 0),
....: ("C", "A", 2, 0)])
sage: fsm_simplified = fsm.simplification()
sage: fsm_simplified
Transducer with 2 states
sage: fsm_simplified.transitions()
[Transition from ('A',) to ('A',): 0|0,
Transition from ('A',) to ('B', 'C'): 1|1,0,
Transition from ('A',) to ('B', 'C'): 1|-1,0,
Transition from ('B', 'C') to ('A',): 2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([('A', 'A', 1/2, 0),
....: ('A', 'B', 1/4, 1),
....: ('A', 'C', 1/4, 1),
....: ('B', 'A', 1, 0),
....: ('C', 'A', 1, 0)],
....: initial_states=[0],
....: final_states=['A', 'B', 'C'],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: sorted(T.simplification().transitions())
[Transition from ('A',) to ('A',): 1/2|0,
Transition from ('A',) to ('B', 'C'): 1/2|1,
Transition from ('B', 'C') to ('A',): 1|0]
Illustrating the use of colors in order to avoid identification of states::
sage: T = Transducer( [[0,0,0,0], [0,1,1,1],
....: [1,0,0,0], [1,1,1,1]],
....: initial_states=[0],
....: final_states=[0,1])
sage: sorted(T.simplification().transitions())
[Transition from (0, 1) to (0, 1): 0|0,
Transition from (0, 1) to (0, 1): 1|1]
sage: T.state(0).color = 0
sage: T.state(0).color = 1
sage: sorted(T.simplification().transitions())
[Transition from (0,) to (0,): 0|0,
Transition from (0,) to (1,): 1|1,
Transition from (1,) to (0,): 0|0,
Transition from (1,) to (1,): 1|1]
"""
fsm = deepcopy(self)
fsm.prepone_output()
return fsm.quotient(fsm.equivalence_classes())
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the transducer accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only the generated output (the
third entry below only). If the input is not accepted, a
``ValueError`` is raised.
OUTPUT:
The full output is a triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing.
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
If we are only interested in the output, we can also use::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
The following transducer transforms `0^n 1` to `1^n 2`::
sage: T = Transducer([(0, 0, 0, 1), (0, 1, 1, 2)])
sage: T.state(0).is_initial = True
sage: T.state(1).is_final = True
We can see the different possibilites of the output by::
sage: [T.process(w) for w in [[1], [0, 1], [0, 0, 1], [0, 1, 1],
....: [0], [0, 0], [2, 0], [0, 1, 2]]]
[(True, 1, [2]), (True, 1, [1, 2]),
(True, 1, [1, 1, 2]), (False, None, None),
(False, 0, [1]), (False, 0, [1, 1]),
(False, None, None), (False, None, None)]
If we just want a condensed output, we use::
sage: [T.process(w, full_output=False)
....: for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T.process([0, 1, 2], full_output=False)
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
It is equivalent to::
sage: [T(w) for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T([0, 1, 2])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Transducer.process "
"(and thus of Transducer.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Transducer, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
if it.current_state.label() is None:
return (it.accept_input, it.current_state, None)
else:
return (it.accept_input, it.current_state, it.output_tape)
else:
if not it.accept_input:
raise ValueError("Invalid input sequence.")
return it.output_tape
#*****************************************************************************
def is_FSMProcessIterator(PI):
"""
Tests whether or not ``PI`` inherits from :class:`FSMProcessIterator`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMProcessIterator, FSMProcessIterator
sage: is_FSMProcessIterator(FSMProcessIterator(FiniteStateMachine([[0, 0, 0, 0]], initial_states=[0])))
True
"""
return isinstance(PI, FSMProcessIterator)
class FSMProcessIterator(SageObject):
"""
This class is for processing an input string on a finite state
machine.
An instance of this class is generated when
:meth:`FiniteStateMachine.process` or
:meth:`FiniteStateMachine.iter_process` of the finite state
machine is invoked. It behaves like an iterator which, in each
step, takes one letter of the input and runs (one step on) the
finite state machine with this input. More precisely, in each
step, the process iterator takes an outgoing transition of the
current state, whose input label equals the input letter of the
tape. The output label of the transition, if present, is written
on the output tape.
INPUT:
- ``fsm`` -- The finite state machine on which the input should be
processed.
- ``input_tape`` -- The input tape. It can be anything that is
iterable.
- ``initial_state`` -- The initial state in which the machine
starts. If this is ``None``, the unique inital state of the finite
state machine is takes. If there are several, a ``ValueError`` is
raised.
The process (iteration) stops if there are no more input letters
on the tape. In this case a StopIteration exception is thrown. As
result the following attributes are available:
- ``accept_input`` -- Is ``True`` if the reached state is a final state.
- ``current_state`` -- The current/reached state in the process.
- ``output_tape`` -- The written output.
Current values of those attributes (except ``accept_input``) are
(also) available during the iteration.
OUTPUT:
An iterator.
EXAMPLES:
The following transducer reads binary words and outputs a word,
where blocks of ones are replaced by just a single one. Further
only words that end with a zero are accepted.
::
sage: T = Transducer({'A': [('A', 0, 0), ('B', 1, None)],
....: 'B': [('B', 1, None), ('A', 0, [1, 0])]},
....: initial_states=['A'], final_states=['A'])
sage: input = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0]
sage: T.process(input)
(True, 'A', [1, 0, 0, 1, 0, 1, 0])
The function :meth:`FiniteStateMachine.process` created a new
``FSMProcessIterator``. We can do that manually, too, and get full
access to the iteration process::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: it = FSMProcessIterator(T, input_tape=input)
sage: for _ in it:
....: print (it.current_state, it.output_tape)
('B', [])
('B', [])
('A', [1, 0])
('A', [1, 0, 0])
('B', [1, 0, 0])
('A', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('A', [1, 0, 0, 1, 0, 1, 0])
sage: it.accept_input
True
TESTS::
sage: T = Transducer([[0, 0, 0, 0]])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: No state is initial.
::
sage: T = Transducer([[0, 1, 0, 0]], initial_states=[0, 1])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: Several initial states.
"""
def __init__(self, fsm, input_tape=None, initial_state=None, **kwargs):
"""
See :class:`FSMProcessIterator` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0]
"""
self.fsm = fsm
if initial_state is None:
fsm_initial_states = self.fsm.initial_states()
try:
self.current_state = fsm_initial_states[0]
except IndexError:
raise ValueError("No state is initial.")
if len(fsm_initial_states) > 1:
raise ValueError("Several initial states.")
else:
self.current_state = initial_state
self.output_tape = []
if input_tape is None:
self._input_tape_iter_ = iter([])
else:
if hasattr(input_tape, '__iter__'):
self._input_tape_iter_ = iter(input_tape)
else:
raise ValueError("Given input tape is not iterable.")
def __iter__(self):
"""
Returns ``self``.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: id(it) == id(iter(it))
True
"""
return self
def next(self):
"""
Makes one step in processing the input tape.
INPUT:
Nothing.
OUTPUT:
It returns the taken transition. A ``StopIteration`` exception is
thrown when there is nothing more to read.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.next()
Transition from 'A' to 'A': 0|1
sage: it.next()
Transition from 'A' to 'A': 1|0
sage: it.next()
Traceback (most recent call last):
...
StopIteration
TESTS::
sage: Z = Transducer()
sage: s = Z.add_state(0)
sage: s.is_initial = True
sage: s.is_final = True
sage: s.final_word_out = [1, 2]
sage: Z.process([])
(True, 0, [1, 2])
"""
if hasattr(self, 'accept_input'):
raise StopIteration
try:
# process current state
transition = None
try:
transition = self.current_state.hook(
self.current_state, self)
except AttributeError:
pass
self.write_word(self.current_state.word_out)
# get next
if not isinstance(transition, FSMTransition):
next_word = []
found = False
try:
while not found:
next_word.append(self.read_letter())
if len(next_word) == 1 and any(not t.word_in
for t in self.current_state.transitions):
raise NotImplementedError(
"process cannot handle epsilon transition "
"leaving state %s." % self.current_state.label())
try:
transition = self.get_next_transition(
next_word)
found = True
except ValueError:
pass
if found and any(
t is not transition and startswith(t.word_in,
next_word)
for t in self.current_state.transitions):
raise NotImplementedError("Non-deterministic "
"path encountered "
"when processing "
"input.")
except StopIteration:
# this means input tape is finished
if len(next_word) > 0:
self.current_state = FSMState(None,
allow_label_None=True)
raise StopIteration
# process transition
try:
transition.hook(transition, self)
except AttributeError:
pass
self.write_word(transition.word_out)
# go to next state
self.current_state = transition.to_state
except StopIteration:
# this means, either input tape is finished or
# someone has thrown StopIteration manually (in one
# of the hooks)
if self.current_state.label is None or not self.current_state.is_final:
self.accept_input = False
if not hasattr(self, 'accept_input'):
self.accept_input = True
if self.current_state.is_final:
self.write_word(self.current_state.final_word_out)
raise StopIteration
# return
return transition
def read_letter(self):
"""
Reads a letter from the input tape.
INPUT:
Nothing.
OUTPUT:
A letter.
Exception ``StopIteration`` is thrown if tape has reached
the end.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.read_letter()
0
"""
return self._input_tape_iter_.next()
def write_letter(self, letter):
"""
Writes a letter on the output tape.
INPUT:
- ``letter`` -- the letter to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_letter(42)
sage: it.output_tape
[42]
"""
self.output_tape.append(letter)
def write_word(self, word):
"""
Writes a word on the output tape.
INPUT:
- ``word`` -- the word to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_word([4, 2])
sage: it.output_tape
[4, 2]
"""
for letter in word:
self.write_letter(letter)
def get_next_transition(self, word_in):
"""
Returns the next transition according to ``word_in``. It is
assumed that we are in state ``self.current_state``.
INPUT:
- ``word_in`` -- the input word.
OUTPUT:
The next transition according to ``word_in``. It is assumed
that we are in state ``self.current_state``. If no transition
matches, a ``ValueError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.get_next_transition([0])
Transition from 'A' to 'A': 0|1
sage: it.get_next_transition([2])
Traceback (most recent call last):
...
ValueError: No transition with input [2] found.
"""
for transition in self.current_state.transitions:
if transition.word_in == word_in:
return transition
raise ValueError("No transition with input %s found." % (word_in,))
#*****************************************************************************
@cached_function
def setup_latex_preamble():
r"""
This function adds the package ``tikz`` with support for automata
to the preamble of Latex so that the finite state machines can be
drawn nicely.
INPUT:
Nothing.
OUTPUT:
Nothing.
See the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
TESTS::
sage: from sage.combinat.finite_state_machine import setup_latex_preamble
sage: setup_latex_preamble()
sage: ("\usepackage{tikz}" in latex.extra_preamble()) == latex.has_file("tikz.sty")
True
"""
latex.add_package_to_preamble_if_available('tikz')
latex.add_to_mathjax_avoid_list("tikz")
if latex.has_file("tikz.sty"):
latex.add_to_preamble(r'\usetikzlibrary{automata}')
#*****************************************************************************
| 35.965521 | 208 | 0.522803 | # -*- coding: utf-8 -*-
r"""
Finite State Machines, Automata, Transducers
This module adds support for finite state machines, automata and
transducers. See classes :class:`Automaton` and :class:`Transducer`
(or the more general class :class:`FiniteStateMachine`) and the
:ref:`examples <finite_state_machine_examples>` below for
details creating one.
Contents
========
:class:`FiniteStateMachine` and derived classes :class:`Transducer` and :class:`Automaton`
------------------------------------------------------------------------------------------
Accessing parts of a finite state machine
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.state` | Get a state by its label
:meth:`~FiniteStateMachine.states` | List of states
:meth:`~FiniteStateMachine.iter_states` | Iterator over the states
:meth:`~FiniteStateMachine.initial_states` | List of initial states
:meth:`~FiniteStateMachine.iter_initial_states` | Iterator over initial states
:meth:`~FiniteStateMachine.final_states` | List of final states
:meth:`~FiniteStateMachine.iter_final_states` | Iterator over final states
:meth:`~FiniteStateMachine.transition` | Get a transition by its states and labels
:meth:`~FiniteStateMachine.transitions` | List of transitions
:meth:`~FiniteStateMachine.iter_transitions` | Iterator over the transitions
:meth:`~FiniteStateMachine.predecessors` | List of predecessors of a state
:meth:`~FiniteStateMachine.induced_sub_finite_state_machine` | Induced sub-machine
:meth:`~FiniteStateMachine.accessible_components` | Accessible components
:meth:`~FiniteStateMachine.final_components` | Final components (connected components which cannot be left again)
(Modified) Copies
^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.empty_copy` | Returns an empty deep copy
:meth:`~FiniteStateMachine.deepcopy` | Returns a deep copy
:meth:`~FiniteStateMachine.relabeled` | Returns a relabeled deep copy
Manipulation
^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.add_state` | Add a state
:meth:`~FiniteStateMachine.add_states` | Add states
:meth:`~FiniteStateMachine.delete_state` | Delete a state
:meth:`~FiniteStateMachine.add_transition` | Add a transition
:meth:`~FiniteStateMachine.add_transitions_from_function` | Add transitions
:attr:`~FiniteStateMachine.on_duplicate_transition` | Hook for handling duplicate transitions
:meth:`~FiniteStateMachine.add_from_transition_function` | Add transitions by a transition function
:meth:`~FiniteStateMachine.delete_transition` | Delete a transition
:meth:`~FiniteStateMachine.remove_epsilon_transitions` | Remove epsilon transitions (not implemented)
:meth:`~FiniteStateMachine.split_transitions` | Split transitions with input words of length ``> 1``
:meth:`~FiniteStateMachine.determine_alphabets` | Determines input and output alphabets
:meth:`~FiniteStateMachine.construct_final_word_out` | Construct final output by implicitly reading trailing letters; cf. :meth:`~FiniteStateMachine.with_final_word_out`
Properties
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.has_state` | Checks for a state
:meth:`~FiniteStateMachine.has_initial_state` | Checks for an initial state
:meth:`~FiniteStateMachine.has_initial_states` | Checks for initial states
:meth:`~FiniteStateMachine.has_final_state` | Checks for an final state
:meth:`~FiniteStateMachine.has_final_states` | Checks for final states
:meth:`~FiniteStateMachine.has_transition` | Checks for a transition
:meth:`~FiniteStateMachine.is_deterministic` | Checks for a deterministic machine
:meth:`~FiniteStateMachine.is_complete` | Checks for a complete machine
:meth:`~FiniteStateMachine.is_connected` | Checks for a connected machine
:meth:`~FiniteStateMachine.is_Markov_chain` | Checks for a Markov chain
:meth:`~FiniteStateMachine.is_monochromatic` | Checks whether the colors of all states are equal
:meth:`~FiniteStateMachine.asymptotic_moments` | Main terms of expectation and variance of sums of labels
Operations
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.disjoint_union` | Disjoint union (not implemented)
:meth:`~FiniteStateMachine.concatenation` | Concatenation (not implemented)
:meth:`~FiniteStateMachine.Kleene_closure` | Kleene closure (not implemented)
:meth:`Automaton.intersection` | Intersection of automata
:meth:`Transducer.intersection` | Intersection of transducers
:meth:`Transducer.cartesian_product` | Cartesian product of a transducer with another finite state machine
:meth:`~FiniteStateMachine.product_FiniteStateMachine` | Product of finite state machines
:meth:`~FiniteStateMachine.composition` | Composition (output of other is input of self)
:meth:`~FiniteStateMachine.input_projection` | Input projection (output is deleted)
:meth:`~FiniteStateMachine.output_projection` | Output projection (old output is new input)
:meth:`~FiniteStateMachine.projection` | Input or output projection
:meth:`~FiniteStateMachine.transposition` | Transposition (all transitions are reversed)
:meth:`~FiniteStateMachine.with_final_word_out` | Machine with final output constructed by implicitly reading trailing letters, cf. :meth:`~FiniteStateMachine.construct_final_word_out` for inplace version
:meth:`Automaton.determinisation` | Determinisation of an automaton
:meth:`~FiniteStateMachine.process` | Process input
:meth:`Automaton.process` | Process input of an automaton (output differs from general case)
:meth:`Transducer.process` | Process input of a transducer (output differs from general case)
:meth:`~FiniteStateMachine.iter_process` | Return process iterator
Simplification
^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.prepone_output` | Prepone output where possible
:meth:`~FiniteStateMachine.equivalence_classes` | List of equivalent states
:meth:`~FiniteStateMachine.quotient` | Quotient with respect to equivalence classes
:meth:`~FiniteStateMachine.merged_transitions` | Merge transitions while adding input
:meth:`~FiniteStateMachine.markov_chain_simplification` | Simplification of a Markov chain
:meth:`Automaton.minimization` | Minimization of an automaton
:meth:`Transducer.simplification` | Simplification of a transducer
Conversion
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.adjacency_matrix` | (Weighted) adjacency :class:`matrix <Matrix>`
:meth:`~FiniteStateMachine.graph` | Underlying :class:`DiGraph`
:meth:`~FiniteStateMachine.plot` | Plot
LaTeX output
++++++++++++
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.latex_options` | Set options
:meth:`~FiniteStateMachine.set_coordinates` | Set coordinates of the states
:meth:`~FiniteStateMachine.default_format_transition_label` | Default formatting of words in transition labels
:meth:`~FiniteStateMachine.format_letter_negative` | Format negative numbers as overlined number
:meth:`~FiniteStateMachine.format_transition_label_reversed` | Format words in transition labels in reversed order
:class:`FSMState`
-----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMState.final_word_out` | Final output of a state
:attr:`~FSMState.is_final` | Describes whether a state is final or not
:attr:`~FSMState.is_initial` | Describes whether a state is initial or not
:meth:`~FSMState.label` | Label of a state
:meth:`~FSMState.relabeled` | Returns a relabeled deep copy of a state
:meth:`~FSMState.fully_equal` | Checks whether two states are fully equal (including all attributes)
:class:`FSMTransition`
----------------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMTransition.from_state` | State in which transition starts
:attr:`~FSMTransition.to_state` | State in which transition ends
:attr:`~FSMTransition.word_in` | Input word of the transition
:attr:`~FSMTransition.word_out` | Output word of the transition
:meth:`~FSMTransition.deepcopy` | Returns a deep copy of the transition
Helper Functions
----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:func:`equal` | Checks whether all elements of ``iterator`` are equal
:func:`full_group_by` | Group iterable by values of some key
:func:`startswith` | Determine whether list starts with the given prefix
:func:`FSMLetterSymbol` | Returns a string associated to the input letter
:func:`FSMWordSymbol` | Returns a string associated to a word
:func:`is_FSMState` | Tests whether an object inherits from :class:`FSMState`
:func:`is_FSMTransition` | Tests whether an object inherits from :class:`FSMTransition`
:func:`is_FiniteStateMachine` | Tests whether an object inherits from :class:`FiniteStateMachine`
:func:`duplicate_transition_ignore` | Default function for handling duplicate transitions
:func:`duplicate_transition_raise_error` | Raise error when inserting a duplicate transition
:func:`duplicate_transition_add_input` | Add input when inserting a duplicate transition
.. _finite_state_machine_examples:
Examples
========
We start with a general :class:`FiniteStateMachine`. Later there will
be also an :class:`Automaton` and a :class:`Transducer`.
A simple finite state machine
-----------------------------
We can easily create a finite state machine by
::
sage: fsm = FiniteStateMachine()
sage: fsm
Finite state machine with 0 states
By default this is the empty finite state machine, so not very
interesting. Let's create and add some states and transitions::
sage: day = fsm.add_state('day')
sage: night = fsm.add_state('night')
sage: sunrise = fsm.add_transition(night, day)
sage: sunset = fsm.add_transition(day, night)
Let us look at ``sunset`` more closely::
sage: sunset
Transition from 'day' to 'night': -|-
Note that could also have created and added the transitions directly
by::
sage: fsm.add_transition('day', 'night')
Transition from 'day' to 'night': -|-
This would have had added the states automatically, since they are
present in the transitions.
Anyhow, we got the following finite state machine::
sage: fsm
Finite state machine with 2 states
We can also obtain the underlying directed graph by
::
sage: fsm.graph()
Digraph on 2 vertices
To visualize a finite state machine, we can use
:func:`~sage.misc.latex.latex` and run the result through LaTeX,
see the section on :ref:`finite_state_machine_LaTeX_output`
below.
Alternatively, we could have created the finite state machine above
simply by
::
sage: FiniteStateMachine([('night', 'day'), ('day', 'night')])
Finite state machine with 2 states
See :class:`FiniteStateMachine` for a lot of possibilities to create
finite state machines.
.. _finite_state_machine_recognizing_NAFs_example:
A simple Automaton (recognizing NAFs)
---------------------------------------
We want to build an automaton which recognizes non-adjacent forms
(NAFs), i.e., sequences which have no adjacent non-zeros.
We use `0`, `1`, and `-1` as digits::
sage: NAF = Automaton(
....: {'A': [('A', 0), ('B', 1), ('B', -1)], 'B': [('A', 0)]})
sage: NAF.state('A').is_initial = True
sage: NAF.state('A').is_final = True
sage: NAF.state('B').is_final = True
sage: NAF
Automaton with 2 states
Of course, we could have specified the initial and final states
directly in the definition of ``NAF`` by ``initial_states=['A']`` and
``final_states=['A', 'B']``.
So let's test the automaton with some input::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: NAF([0])
True
sage: NAF([0, 1])
True
sage: NAF([1, -1])
False
sage: NAF([0, -1, 0, 1])
True
sage: NAF([0, -1, -1, -1, 0])
False
sage: NAF([-1, 0, 0, 1, 1])
False
Alternatively, we could call that by
::
sage: NAF.process([0, -1, 0, 1])
(True, 'B')
which gives additionally the state in which we arrived.
.. _finite_state_machine_LaTeX_output:
LaTeX output
------------
We can visualize a finite state machine by converting it to LaTeX by
using the usual function :func:`~sage.misc.latex.latex`. Within LaTeX,
TikZ is used for typesetting the graphics, see the
:wikipedia:`PGF/TikZ`.
::
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.185.00) edge node[rotate=360.00, anchor=north] {$1, -1$} (v1.355.00);
\path[->] (v1.5.00) edge node[rotate=0.00, anchor=south] {$0$} (v0.175.00);
\end{tikzpicture}
We can turn this into a graphical representation.
::
sage: view(NAF) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells in this and the previous section.
Several options can be set to customize the output, see
:meth:`~FiniteStateMachine.latex_options` for details. In particular,
we use :meth:`~FiniteStateMachine.format_letter_negative` to format
`-1` as `\overline{1}`.
::
sage: NAF.latex_options(
....: coordinates={'A': (0, 0),
....: 'B': (6, 0)},
....: initial_where={'A': 'below'},
....: format_letter=NAF.format_letter_negative,
....: format_state_label=lambda x:
....: r'\mathcal{%s}' % x.label()
....: )
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial, initial where=below] (v0) at (0.000000, 0.000000) {$\mathcal{A}$};
\node[state, accepting] (v1) at (6.000000, 0.000000) {$\mathcal{B}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.5.00) edge node[rotate=0.00, anchor=south] {$1, \overline{1}$} (v1.175.00);
\path[->] (v1.185.00) edge node[rotate=360.00, anchor=north] {$0$} (v0.355.00);
\end{tikzpicture}
sage: view(NAF) # not tested
A simple transducer (binary inverter)
-------------------------------------
Let's build a simple transducer, which rewrites a binary word by
iverting each bit::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
We can look at the states and transitions::
sage: inverter.states()
['A']
sage: for t in inverter.transitions():
....: print t
Transition from 'A' to 'A': 0|1
Transition from 'A' to 'A': 1|0
Now we apply a word to it and see what the transducer does::
sage: inverter([0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1])
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0]
``True`` means, that we landed in a final state, that state is labeled
``'A'``, and we also got an output.
A transducer which performs division by `3` in binary
-----------------------------------------------------
Now we build a transducer, which divides a binary number by `3`.
The labels of the states are the remainder of the division.
The transition function is
::
sage: def f(state_from, read):
....: if state_from + read <= 1:
....: state_to = 2*state_from + read
....: write = 0
....: else:
....: state_to = 2*state_from + read - 3
....: write = 1
....: return (state_to, write)
which assumes reading a binary number from left to right.
We get the transducer with
::
sage: D = Transducer(f, initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
Let us try to divide `12` by `3`::
sage: D([1, 1, 0, 0])
[0, 1, 0, 0]
Now we want to divide `13` by `3`::
sage: D([1, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The raised ``ValueError``
means `13` is not divisible by `3`.
.. _finite_state_machine_gray_code_example:
Gray Code
---------
The Gray code is a binary :wikipedia:`numeral system <Numeral_system>`
where two successive values differ in only one bit, cf. the
:wikipedia:`Gray_code`. The Gray code of an integer `n` is obtained by
a bitwise xor between the binary expansion of `n` and the binary
expansion of `\lfloor n/2\rfloor`; the latter corresponds to a
shift by one position in binary.
The purpose of this example is to construct a transducer converting the
standard binary expansion to the Gray code by translating this
construction into operations with transducers.
For this construction, the least significant digit is at
the left-most position.
Note that it is easier to shift everything to
the right first, i.e., multiply by `2` instead of building
`\lfloor n/2\rfloor`. Then, we take the input xor with the right
shift of the input and forget the first letter.
We first construct a transducer shifting the binary expansion to the
right. This requires storing the previously read digit in a state.
::
sage: def shift_right_transition(state, digit):
....: if state == 'I':
....: return (digit, None)
....: else:
....: return (digit, state)
sage: shift_right_transducer = Transducer(
....: shift_right_transition,
....: initial_states=['I'],
....: input_alphabet=[0, 1],
....: final_states=[0])
sage: shift_right_transducer.transitions()
[Transition from 'I' to 0: 0|-,
Transition from 'I' to 1: 1|-,
Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 1: 1|1]
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False
sage: shift_right_transducer([0, 1, 1, 0])
[0, 1, 1]
sage: shift_right_transducer([1, 0, 0])
[1, 0]
The output of the shifts above look a bit weird (from a right-shift
transducer, we would expect, for example, that ``[1, 0, 0]`` was
mapped to ``[0, 1, 0]``), since we write ``None`` instead of the zero
at the left. Further, note that only `0` is listed as a final state
as we have to enforce that a most significant zero is read as the last
input letter in order to flush the last digit::
sage: shift_right_transducer([0, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
Next, we construct the transducer performing the xor operation. We also
have to take ``None`` into account as our ``shift_right_transducer``
waits one iteration until it starts writing output. This corresponds
with our intention to forget the first letter.
::
sage: def xor_transition(state, digits):
....: if digits[0] is None or digits[1] is None:
....: return (0, None)
....: else:
....: return (0, digits[0].__xor__(digits[1]))
sage: from itertools import product
sage: xor_transducer = Transducer(
....: xor_transition,
....: initial_states=[0],
....: final_states=[0],
....: input_alphabet=list(product([None, 0, 1], [0, 1])))
sage: xor_transducer.transitions()
[Transition from 0 to 0: (None, 0)|-,
Transition from 0 to 0: (None, 1)|-,
Transition from 0 to 0: (0, 0)|0,
Transition from 0 to 0: (0, 1)|1,
Transition from 0 to 0: (1, 0)|1,
Transition from 0 to 0: (1, 1)|0]
sage: xor_transducer([(None, 0), (None, 1), (0, 0), (0, 1), (1, 0), (1, 1)])
[0, 1, 1, 0]
sage: xor_transducer([(0, None)])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The transducer computing the Gray code is then constructed as a
:meth:`cartesian product <Transducer.cartesian_product>` between the
shifted version and the original input (represented here by the
``shift_right_transducer`` and the :meth:`identity transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.Identity>`,
respectively). This cartesian product is then fed into the
``xor_transducer`` as a :meth:`composition
<FiniteStateMachine.composition>` of transducers.
As described in :meth:`Transducer.cartesian_product`, we have to
temporarily set
``finite_state_machine.FSMOldCodeTransducerCartesianProduct`` to
``False`` in order to disable backwards compatible code.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_transducer = shift_right_transducer.cartesian_product(transducers.Identity([0, 1]))
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: Gray_transducer = xor_transducer(product_transducer)
We use :meth:`~FiniteStateMachine.construct_final_word_out` to make sure that all output
is written; otherwise, we would have to make sure that a sufficient number of trailing
zeros is read.
::
sage: Gray_transducer.construct_final_word_out([0])
sage: Gray_transducer.transitions()
[Transition from (('I', 0), 0) to ((0, 0), 0): 0|-,
Transition from (('I', 0), 0) to ((1, 0), 0): 1|-,
Transition from ((0, 0), 0) to ((0, 0), 0): 0|0,
Transition from ((0, 0), 0) to ((1, 0), 0): 1|1,
Transition from ((1, 0), 0) to ((0, 0), 0): 0|1,
Transition from ((1, 0), 0) to ((1, 0), 0): 1|0]
There is a :meth:`prepackaged transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.GrayCode>`
for Gray code, let's see whether they agree. We have to use
:meth:`~FiniteStateMachine.relabeled` to relabel our states with
integers.
::
sage: constructed = Gray_transducer.relabeled()
sage: packaged = transducers.GrayCode()
sage: constructed == packaged
True
Finally, we check that this indeed computes the Gray code of the first
10 non-negative integers.
::
sage: for n in srange(10):
....: Gray_transducer(n.bits())
[]
[1]
[1, 1]
[0, 1]
[0, 1, 1]
[1, 1, 1]
[1, 0, 1]
[0, 0, 1]
[0, 0, 1, 1]
[1, 0, 1, 1]
Using the hook-functions
------------------------
Let's use the previous example "divison by `3`" to demonstrate the
optional state and transition parameters ``hook``.
First, we define, what those functions should do. In our case, this is
just saying in which state we are and which transition we take
::
sage: def state_hook(state, process):
....: print "We are now in State %s." % (state.label(),)
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: def transition_hook(transition, process):
....: print ("Currently we go from %s to %s, "
....: "reading %s and writing %s." % (
....: transition.from_state, transition.to_state,
....: FSMWordSymbol(transition.word_in),
....: FSMWordSymbol(transition.word_out)))
Now, let's add these hook-functions to the existing transducer::
sage: for s in D.iter_states():
....: s.hook = state_hook
sage: for t in D.iter_transitions():
....: t.hook = transition_hook
Rerunning the process again now gives the following output::
sage: D.process([1, 1, 0, 1])
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
Currently we go from 1 to 0, reading 1 and writing 1.
We are now in State 0.
Currently we go from 0 to 0, reading 0 and writing 0.
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
(False, 1, [0, 1, 0, 0])
The example above just explains the basic idea of using
hook-functions. In the following, we will use those hooks more seriously.
Detecting sequences with same number of `0` and `1`
---------------------------------------------------
Suppose we have a binary input and want to accept all sequences with
the same number of `0` and `1`. This cannot be done with a finite
automaton. Anyhow, we can make usage of the hook functions to extend
our finite automaton by a counter::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: C = FiniteStateMachine()
sage: def update_counter(state, process):
....: l = process.read_letter()
....: process.fsm.counter += 1 if l == 1 else -1
....: if process.fsm.counter > 0:
....: next_state = 'positive'
....: elif process.fsm.counter < 0:
....: next_state = 'negative'
....: else:
....: next_state = 'zero'
....: return FSMTransition(state, process.fsm.state(next_state),
....: l, process.fsm.counter)
sage: C.add_state(FSMState('zero', hook=update_counter,
....: is_initial=True, is_final=True))
'zero'
sage: C.add_state(FSMState('positive', hook=update_counter))
'positive'
sage: C.add_state(FSMState('negative', hook=update_counter))
'negative'
Now, let's input some sequence::
sage: C.counter = 0; C([1, 1, 1, 1, 0, 0])
(False, 'positive', [1, 2, 3, 4, 3, 2])
The result is False, since there are four `1` but only two `0`. We
land in the state ``positive`` and we can also see the values of the
counter in each step.
Let's try some other examples::
sage: C.counter = 0; C([1, 1, 0, 0])
(True, 'zero', [1, 2, 1, 0])
sage: C.counter = 0; C([0, 1, 0, 0])
(False, 'negative', [-1, 0, -1, -2])
See also methods :meth:`Automaton.process` and
:meth:`Transducer.process` (or even
:meth:`FiniteStateMachine.process`), the explanation of the parameter
``hook`` and the examples in :class:`FSMState` and
:class:`FSMTransition`, and the description and examples in
:class:`FSMProcessIterator` for more information on processing and
hooks.
AUTHORS:
- Daniel Krenn (2012-03-27): initial version
- Clemens Heuberger (2012-04-05): initial version
- Sara Kropf (2012-04-17): initial version
- Clemens Heuberger (2013-08-21): release candidate for Sage patch
- Daniel Krenn (2013-08-21): release candidate for Sage patch
- Sara Kropf (2013-08-21): release candidate for Sage patch
- Clemens Heuberger (2013-09-02): documentation improved
- Daniel Krenn (2013-09-13): comments from trac worked in
- Clemens Heuberger (2013-11-03): output (labels) of determinisation,
product, composition, etc. changed (for consistency),
representation of state changed, documentation improved
- Daniel Krenn (2013-11-04): whitespaces in documentation corrected
- Clemens Heuberger (2013-11-04): full_group_by added
- Daniel Krenn (2013-11-04): next release candidate for Sage patch
- Sara Kropf (2013-11-08): fix for adjacency matrix
- Clemens Heuberger (2013-11-11): fix for prepone_output
- Daniel Krenn (2013-11-11): comments from trac #15078 included:
docstring of FiniteStateMachine rewritten, Automaton and Transducer
inherited from FiniteStateMachine
- Daniel Krenn (2013-11-25): documentation improved according to
comments from trac #15078
- Clemens Heuberger, Daniel Krenn, Sara Kropf (2014-02-21--2014-07-18):
A huge bunch of improvements. Details see
#15841, #15847, #15848, #15849, #15850, #15922, #15923, #15924,
#15925, #15928, #15960, #15961, #15962, #15963, #15975, #16016,
#16024, #16061, #16128, #16132, #16138, #16139, #16140, #16143,
#16144, #16145, #16146, #16191, #16200, #16205, #16206, #16207,
#16229, #16253, #16254, #16255, #16266, #16355, #16357, #16387,
#16425, #16539, #16555, #16557, #16588, #16589, #16666, #16668,
#16674, #16675, #16677.
ACKNOWLEDGEMENT:
- Clemens Heuberger, Daniel Krenn and Sara Kropf are supported by the
Austrian Science Fund (FWF): P 24644-N26.
Methods
=======
"""
#*****************************************************************************
# Copyright (C) 2012--2014 Clemens Heuberger <clemens.heuberger@aau.at>
# 2012--2014 Daniel Krenn <dev@danielkrenn.at>
# 2012--2014 Sara Kropf <sara.kropf@aau.at>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.structure.sage_object import SageObject
from sage.graphs.digraph import DiGraph
from sage.matrix.constructor import matrix
from sage.rings.integer_ring import ZZ
from sage.rings.real_mpfr import RR
from sage.symbolic.ring import SR
from sage.calculus.var import var
from sage.misc.cachefunc import cached_function
from sage.misc.latex import latex
from sage.misc.misc import verbose
from sage.functions.trig import cos, sin, atan2
from sage.symbolic.constants import pi
from copy import copy
from copy import deepcopy
import itertools
from itertools import imap
from collections import defaultdict, OrderedDict
def full_group_by(l, key=lambda x: x):
"""
Group iterable ``l`` by values of ``key``.
INPUT:
- iterable ``l``
- key function ``key``
OUTPUT:
A list of pairs ``(k, elements)`` such that ``key(e)=k`` for all
``e`` in ``elements``.
This is similar to ``itertools.groupby`` except that lists are
returned instead of iterables and no prior sorting is required.
We do not require
- that the keys are sortable (in contrast to the
approach via ``sorted`` and ``itertools.groupby``) and
- that the keys are hashable (in contrast to the
implementation proposed in `<http://stackoverflow.com/a/15250161>`_).
However, it is required
- that distinct keys have distinct ``str``-representations.
The implementation is inspired by
`<http://stackoverflow.com/a/15250161>`_, but non-hashable keys are
allowed.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import full_group_by
sage: t = [2/x, 1/x, 2/x]
sage: r = full_group_by([0, 1, 2], key=lambda i:t[i])
sage: sorted(r, key=lambda p:p[1])
[(2/x, [0, 2]), (1/x, [1])]
sage: from itertools import groupby
sage: for k, elements in groupby(sorted([0, 1, 2],
....: key=lambda i:t[i]),
....: key=lambda i:t[i]):
....: print k, list(elements)
2/x [0]
1/x [1]
2/x [2]
Note that the behavior is different from ``itertools.groupby``
because neither `1/x<2/x` nor `2/x<1/x` does hold.
Here, the result ``r`` has been sorted in order to guarantee a
consistent order for the doctest suite.
"""
elements = defaultdict(list)
original_keys = {}
for item in l:
k = key(item)
s = str(k)
if s in original_keys:
if original_keys[s]!=k:
raise ValueError("Two distinct elements with representation "
"%s " % s)
else:
original_keys[s]=k
elements[s].append(item)
return [(original_keys[s], values ) for (s, values) in elements.items()]
def equal(iterator):
"""
Checks whether all elements of ``iterator`` are equal.
INPUT:
- ``iterator`` -- an iterator of the elements to check
OUTPUT:
``True`` or ``False``.
This implements `<http://stackoverflow.com/a/3844832/1052778>`_.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import equal
sage: equal([0, 0, 0])
True
sage: equal([0, 1, 0])
False
sage: equal([])
True
sage: equal(iter([None, None]))
True
We can test other properties of the elements than the elements
themselves. In the following example, we check whether all tuples
have the same lengths::
sage: equal(len(x) for x in [(1, 2), (2, 3), (3, 1)])
True
sage: equal(len(x) for x in [(1, 2), (1, 2, 3), (3, 1)])
False
"""
try:
iterator = iter(iterator)
first = next(iterator)
return all(first == rest for rest in iterator)
except StopIteration:
return True
def startswith(list, prefix):
"""
Determine whether list starts with the given prefix.
INPUT:
- ``list`` -- list
- ``prefix`` -- list representing the prefix
OUTPUT:
``True`` or ``False``.
Similar to :meth:`str.startswith`.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import startswith
sage: startswith([1, 2, 3], [1, 2])
True
sage: startswith([1], [1, 2])
False
sage: startswith([1, 3, 2], [1, 2])
False
"""
return list[:len(prefix)] == prefix
#*****************************************************************************
FSMEmptyWordSymbol = '-'
EmptyWordLaTeX = r'\varepsilon'
EndOfWordLaTeX = r'\$'
FSMOldCodeTransducerCartesianProduct = True
FSMOldProcessOutput = True # See trac #16132 (deprecation).
tikz_automata_where = {"right": 0,
"above": 90,
"left": 180,
"below": 270}
def FSMLetterSymbol(letter):
"""
Returns a string associated to the input letter.
INPUT:
- ``letter`` -- the input letter or ``None`` (representing the
empty word).
OUTPUT:
If ``letter`` is ``None`` the symbol for the empty word
``FSMEmptyWordSymbol`` is returned, otherwise the string
associated to the letter.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMLetterSymbol
sage: FSMLetterSymbol(0)
'0'
sage: FSMLetterSymbol(None)
'-'
"""
return FSMEmptyWordSymbol if letter is None else repr(letter)
def FSMWordSymbol(word):
"""
Returns a string of ``word``. It may returns the symbol of the
empty word ``FSMEmptyWordSymbol``.
INPUT:
- ``word`` -- the input word.
OUTPUT:
A string of ``word``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: FSMWordSymbol([0, 1, 1])
'0,1,1'
"""
if not isinstance(word, list):
return FSMLetterSymbol(word)
if len(word) == 0:
return FSMEmptyWordSymbol
s = ''
for letter in word:
s += (',' if len(s) > 0 else '') + FSMLetterSymbol(letter)
return s
#*****************************************************************************
def is_FSMState(S):
"""
Tests whether or not ``S`` inherits from :class:`FSMState`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMState, FSMState
sage: is_FSMState(FSMState('A'))
True
"""
return isinstance(S, FSMState)
class FSMState(SageObject):
"""
Class for a state of a finite state machine.
INPUT:
- ``label`` -- the label of the state.
- ``word_out`` -- (default: ``None``) a word that is written when
the state is reached.
- ``is_initial`` -- (default: ``False``)
- ``is_final`` -- (default: ``False``)
- ``final_word_out`` -- (default: ``None``) a word that is written when
the state is reached as the last state of some input; only for final
states.
- ``hook`` -- (default: ``None``) A function which is called when
the state is reached during processing input. It takes two input
parameters: the first is the current state (to allow using the same
hook for several states), the second is the current process
iterator object (to have full access to everything; e.g. the
next letter from the input tape can be read in). It can output
the next transition, i.e. the transition to take next. If it
returns ``None`` the process iterator chooses. Moreover, this
function can raise a ``StopIteration`` exception to stop
processing of a finite state machine the input immediately. See
also the example below.
- ``color`` -- (default: ``None``) In order to distinguish states,
they can be given an arbitrary "color" (an arbitrary object).
This is used in :meth:`FiniteStateMachine.equivalence_classes`:
states of different colors are never considered to be
equivalent. Note that :meth:`Automaton.determinisation` requires
that ``color`` is hashable.
- ``allow_label_None`` -- (default: ``False``) If ``True`` allows also
``None`` as label. Note that a state with label ``None`` is used in
:class:`FSMProcessIterator`.
OUTPUT:
Returns a state of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state 1', word_out=0, is_initial=True)
sage: A
'state 1'
sage: A.label()
'state 1'
sage: B = FSMState('state 2')
sage: A == B
False
We can also define a final output word of a final state which is
used if the input of a transducer leads to this state. Such final
output words are used in subsequential transducers. ::
sage: C = FSMState('state 3', is_final=True, final_word_out='end')
sage: C.final_word_out
['end']
The final output word can be a single letter, ``None`` or a list of
letters::
sage: A = FSMState('A')
sage: A.is_final = True
sage: A.final_word_out = 2
sage: A.final_word_out
[2]
sage: A.final_word_out = [2, 3]
sage: A.final_word_out
[2, 3]
Only final states can have a final output word which is not
``None``::
sage: B = FSMState('B')
sage: B.final_word_out is None
True
sage: B.final_word_out = 2
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state B is not final.
Setting the ``final_word_out`` of a final state to ``None`` is the
same as setting it to ``[]`` and is also the default for a final
state::
sage: C = FSMState('C', is_final=True)
sage: C.final_word_out
[]
sage: C.final_word_out = None
sage: C.final_word_out
[]
sage: C.final_word_out = []
sage: C.final_word_out
[]
It is not allowed to use ``None`` as a label::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(None)
Traceback (most recent call last):
...
ValueError: Label None reserved for a special state,
choose another label.
This can be overridden by::
sage: FSMState(None, allow_label_None=True)
None
Note that :meth:`Automaton.determinisation` requires that ``color``
is hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
We can use a hook function of a state to stop processing. This is
done by raising a ``StopIteration`` exception. The following code
demonstrates this::
sage: T = Transducer([(0, 1, 9, 'a'), (1, 2, 9, 'b'),
....: (2, 3, 9, 'c'), (3, 4, 9, 'd')],
....: initial_states=[0],
....: final_states=[4],
....: input_alphabet=[9])
sage: def stop(current_state, process_iterator):
....: raise StopIteration()
sage: T.state(3).hook = stop
sage: T.process([9, 9, 9, 9])
(False, 3, ['a', 'b', 'c'])
"""
is_initial = False
"""
Describes whether the state is initial.
EXAMPLES::
sage: T = Automaton([(0,0,0)])
sage: T.initial_states()
[]
sage: T.state(0).is_initial = True
sage: T.initial_states()
[0]
"""
def __init__(self, label, word_out=None,
is_initial=False, is_final=False, final_word_out=None,
hook=None, color=None, allow_label_None=False):
"""
See :class:`FSMState` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('final', is_final=True)
'final'
TESTS::
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = True
sage: A = FSMState('A', is_final=True, final_word_out='end')
sage: A.final_word_out
['end']
sage: A = FSMState('A', is_final=True,
....: final_word_out=['e', 'n', 'd'])
sage: A.final_word_out
['e', 'n', 'd']
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=True, final_word_out=None)
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=False)
sage: A.final_word_out is None
True
sage: A.is_final = False
sage: A = FSMState('A', is_final=False, final_word_out='end')
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False,
....: final_word_out=['e', 'n', 'd'])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False, final_word_out=None)
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=False, final_word_out=[])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
"""
if not allow_label_None and label is None:
raise ValueError("Label None reserved for a special state, "
"choose another label.")
self._label_ = label
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
self.is_initial = is_initial
self._final_word_out_ = None
self.is_final = is_final
self.final_word_out = final_word_out
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
self.color = color
def __lt__(self, other):
"""
Returns True if label of ``self`` is less than label of
``other``.
INPUT:
- `other` -- a state.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(0) < FSMState(1)
True
"""
return self.label() < other.label()
@property
def final_word_out(self):
"""
The final output word of a final state which is written if the
state is reached as the last state of the input of the finite
state machine. For a non-final state, the value is ``None``.
``final_word_out`` can be a single letter, a list or ``None``,
but for a final-state, it is always saved as a list.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=2)
sage: A.final_word_out
[2]
sage: A.final_word_out = 3
sage: A.final_word_out
[3]
sage: A.final_word_out = [3, 4]
sage: A.final_word_out
[3, 4]
sage: A.final_word_out = None
sage: A.final_word_out
[]
sage: B = FSMState('B')
sage: B.final_word_out is None
True
A non-final state cannot have a final output word::
sage: B.final_word_out = [3, 4]
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
"""
return self._final_word_out_
@final_word_out.setter
def final_word_out(self, final_word_out):
"""
Sets the value of the final output word of a final state.
INPUT:
- ``final_word_out`` -- a list, any element or ``None``.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: B = FSMState('B')
sage: B.final_word_out = []
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
sage: B.final_word_out = None
sage: B.final_word_out is None
True
"""
if not self.is_final:
if final_word_out is not None:
raise ValueError("Only final states can have a "
"final output word, but state %s is not final."
% (self.label()))
else:
self._final_word_out_ = None
elif isinstance(final_word_out, list):
self._final_word_out_ = final_word_out
elif final_word_out is not None:
self._final_word_out_ = [final_word_out]
else:
self._final_word_out_ = []
@property
def is_final(self):
"""
Describes whether the state is final or not.
``True`` if the state is final and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=3)
sage: A.is_final
True
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A.final_word_out = None
sage: A.is_final = False
sage: A.is_final
False
"""
return (self.final_word_out is not None)
@is_final.setter
def is_final(self, is_final):
"""
Defines the state as a final state or a non-final state.
INPUT:
- ``is_final`` -- ``True`` if the state should be final and
``False`` otherwise.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = False
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=True, final_word_out='a')
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.is_final = False
sage: A.final_word_out is None
True
"""
if is_final and self.final_word_out is None:
self._final_word_out_ = []
elif not is_final:
if not self.final_word_out:
self._final_word_out_ = None
else:
raise ValueError("State %s cannot be non-final, because it "
"has a final output word. Only final states "
"can have a final output word. "
% self.label())
def label(self):
"""
Returns the label of the state.
INPUT:
Nothing.
OUTPUT:
The label of the state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state')
sage: A.label()
'state'
"""
return self._label_
def __copy__(self):
"""
Returns a (shallow) copy of the state.
INPUT:
Nothing.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: copy(A)
'A'
"""
new = FSMState(self.label(), self.word_out,
self.is_initial, self.is_final,
color=self.color,
final_word_out=self.final_word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: deepcopy(A)
'A'
"""
try:
label = self._deepcopy_relabel_
except AttributeError:
label = deepcopy(self.label(), memo)
new = FSMState(label, deepcopy(self.word_out, memo),
self.is_initial, self.is_final)
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
new.color = deepcopy(self.color, memo)
new.final_word_out = deepcopy(self.final_word_out, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState((1, 3), color=[1, 2],
....: is_final=True, final_word_out=3)
sage: B = deepcopy(A)
sage: B
(1, 3)
sage: B.label == A.label
True
sage: B.label is A.label
False
sage: B.color == A.color
True
sage: B.color is A.color
False
sage: B.is_final == A.is_final
True
sage: B.is_final is A.is_final
True
sage: B.final_word_out == A.final_word_out
True
sage: B.final_word_out is A.final_word_out
False
"""
return deepcopy(self, memo)
def relabeled(self, label, memo=None):
"""
Returns a deep copy of the state with a new label.
INPUT:
- ``label`` -- the label of new state.
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: A.relabeled('B')
'B'
"""
self._deepcopy_relabel_ = label
new = deepcopy(self, memo)
del self._deepcopy_relabel_
return new
def __hash__(self):
"""
Returns a hash value for the object.
INPUT:
Nothing.
OUTPUT:
The hash of this state.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: hash(A) #random
-269909568
"""
return hash(self.label())
def _repr_(self):
"""
Returns the string "label".
INPUT:
Nothing.
OUTPUT:
A string.
TESTS:
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A')._repr_()
"'A'"
"""
return repr(self.label())
def __eq__(left, right):
"""
Returns True if two states are the same, i.e., if they have
the same labels.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
Note that the hooks and whether the states are initial or
final are not checked. To fully compare two states (including
these attributes), use :meth:`.fully_equal`.
As only the labels are used when hashing a state, only the
labels can actually be compared by the equality relation.
Note that the labels are unique within one finite state machine,
so this may only lead to ambiguities when comparing states
belonging to different finite state machines.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A == B
True
"""
if not is_FSMState(right):
return False
return left.label() == right.label()
def __ne__(left, right):
"""
Tests for inequality, complement of __eq__.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('A', is_final=True)
sage: A != B
False
"""
return (not (left == right))
def fully_equal(left, right, compare_color=True):
"""
Checks whether two states are fully equal, i.e., including all
attributes except ``hook``.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
- ``compare_color`` -- If ``True`` (default) colors are
compared as well, otherwise not.
OUTPUT:
``True`` or ``False``.
Note that usual comparison by ``==`` does only compare the labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A.fully_equal(B)
False
sage: A == B
True
sage: A.is_initial = True; A.color = 'green'
sage: A.fully_equal(B)
False
sage: A.fully_equal(B, compare_color=False)
True
"""
color = not compare_color or left.color == right.color
return (left.__eq__(right) and
left.is_initial == right.is_initial and
left.is_final == right.is_final and
left.final_word_out == right.final_word_out and
left.word_out == right.word_out and
color)
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A').__nonzero__()
True
"""
return True # A state cannot be zero (see __init__)
#*****************************************************************************
def is_FSMTransition(T):
"""
Tests whether or not ``T`` inherits from :class:`FSMTransition`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMTransition, FSMTransition
sage: is_FSMTransition(FSMTransition('A', 'B'))
True
"""
return isinstance(T, FSMTransition)
class FSMTransition(SageObject):
"""
Class for a transition of a finite state machine.
INPUT:
- ``from_state`` -- state from which transition starts.
- ``to_state`` -- state in which transition ends.
- ``word_in`` -- the input word of the transitions (when the
finite state machine is used as automaton)
- ``word_out`` -- the output word of the transitions (when the
finite state machine is used as transducer)
OUTPUT:
A transition of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: S = FSMTransition(A, B, 0, 1)
sage: T = FSMTransition('A', 'B', 0, 1)
sage: T == S
True
sage: U = FSMTransition('A', 'B', 0)
sage: U == T
False
"""
from_state = None
"""State from which the transition starts. Read-only."""
to_state = None
"""State in which the transition ends. Read-only."""
word_in = None
"""Input word of the transition. Read-only."""
word_out = None
"""Output word of the transition. Read-only."""
def __init__(self, from_state, to_state,
word_in=None, word_out=None,
hook=None):
"""
See :class:`FSMTransition` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)
Transition from 'A' to 'B': 0|1
"""
if is_FSMState(from_state):
self.from_state = from_state
else:
self.from_state = FSMState(from_state)
if is_FSMState(to_state):
self.to_state = to_state
else:
self.to_state = FSMState(to_state)
if isinstance(word_in, list):
self.word_in = word_in
elif word_in is not None:
self.word_in = [word_in]
else:
self.word_in = []
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
def __lt__(self, other):
"""
Returns True if ``self`` is less than ``other`` with respect to the
key ``(self.from_state, self.word_in, self.to_state, self.word_out)``.
INPUT:
- `other` -- a transition.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition(0,1,0,0) < FSMTransition(1,0,0,0)
True
"""
return (self.from_state, self.word_in, self.to_state, self.word_out) < \
(other.from_state, other.word_in, other.to_state, other.word_out)
def __copy__(self):
"""
Returns a (shallow) copy of the transition.
INPUT:
Nothing.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: copy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(self.from_state, self.to_state,
self.word_in, self.word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(deepcopy(self.from_state, memo),
deepcopy(self.to_state, memo),
deepcopy(self.word_in, memo),
deepcopy(self.word_out, memo))
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
return deepcopy(self, memo)
def __hash__(self):
"""
Since transitions are mutable, they should not be hashable, so
we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: hash(FSMTransition('A', 'B'))
Traceback (most recent call last):
...
TypeError: Transitions are mutable, and thus not hashable.
"""
raise TypeError("Transitions are mutable, and thus not hashable.")
def _repr_(self):
"""
Represents a transitions as from state to state and input, output.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 0)._repr_()
"Transition from 'A' to 'B': 0|0"
"""
return "Transition from %s to %s: %s" % (repr(self.from_state),
repr(self.to_state),
self._in_out_label_())
def _in_out_label_(self):
"""
Returns the input and output of a transition as
"word_in|word_out".
INPUT:
Nothing.
OUTPUT:
A string of the input and output labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)._in_out_label_()
'0|1'
"""
return "%s|%s" % (FSMWordSymbol(self.word_in),
FSMWordSymbol(self.word_out))
def __eq__(left, right):
"""
Returns True if the two transitions are the same, i.e., if the
both go from the same states to the same states and read and
write the same words.
Note that the hooks are not checked.
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 == t2
True
"""
if not is_FSMTransition(right):
raise TypeError('Only instances of FSMTransition ' \
'can be compared.')
return left.from_state == right.from_state \
and left.to_state == right.to_state \
and left.word_in == right.word_in \
and left.word_out == right.word_out
def __ne__(left, right):
"""
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
Tests for inequality, complement of __eq__.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 != t2
False
"""
return (not (left == right))
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0).__nonzero__()
True
"""
return True # A transition cannot be zero (see __init__)
#*****************************************************************************
def is_FiniteStateMachine(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`FiniteStateMachine`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine
sage: is_FiniteStateMachine(FiniteStateMachine())
True
sage: is_FiniteStateMachine(Automaton())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, FiniteStateMachine)
def duplicate_transition_ignore(old_transition, new_transition):
"""
Default function for handling duplicate transitions in finite
state machines. This implementation ignores the occurrence.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
The same transition, unchanged.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_ignore
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_ignore(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Transition from 0 to 0: 1|-
"""
return old_transition
def duplicate_transition_raise_error(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation raises a ``ValueError``.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
Nothing. A ``ValueError`` is raised.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_raise_error(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 0 to 0: 1|-
"""
raise ValueError("Attempting to re-insert transition %s" % old_transition)
def duplicate_transition_add_input(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation adds the input label of the
new transition to the input label of the old transition. This is
intended for the case where a Markov chain is modelled by a finite
state machine using the input labels as transition probabilities.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
A transition whose input weight is the sum of the input
weights of ``old_transition`` and ``new_transition``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_add_input(FSMTransition('a', 'a', 1/2),
....: FSMTransition('a', 'a', 1/2))
Transition from 'a' to 'a': 1|-
Input labels must be lists of length 1::
sage: duplicate_transition_add_input(FSMTransition('a', 'a', [1, 1]),
....: FSMTransition('a', 'a', [1, 1]))
Traceback (most recent call last):
...
TypeError: Trying to use duplicate_transition_add_input on
"Transition from 'a' to 'a': 1,1|-" and
"Transition from 'a' to 'a': 1,1|-",
but input words are assumed to be lists of length 1
"""
if (hasattr(old_transition.word_in, '__iter__')
and len(old_transition.word_in) == 1
and hasattr(new_transition.word_in, '__iter__')
and len(new_transition.word_in) == 1):
old_transition.word_in = [old_transition.word_in[0]
+ new_transition.word_in[0]]
else:
raise TypeError('Trying to use duplicate_transition_add_input on ' +
'"%s" and "%s", ' % (old_transition, new_transition) +
'but input words are assumed to be lists of length 1')
return old_transition
class FiniteStateMachine(SageObject):
"""
Class for a finite state machine.
A finite state machine is a finite set of states connected by
transitions.
INPUT:
- ``data`` -- can be any of the following:
#. a dictionary of dictionaries (of transitions),
#. a dictionary of lists (of states or transitions),
#. a list (of transitions),
#. a function (transition function),
#. an other instance of a finite state machine.
- ``initial_states`` and ``final_states`` -- the initial and
final states of this machine
- ``input_alphabet`` and ``output_alphabet`` -- the input and
output alphabets of this machine
- ``determine_alphabets`` -- If ``True``, then the function
:meth:`.determine_alphabets` is called after ``data`` was read and
processed, if ``False``, then not. If it is ``None``, then it is
decided during the construction of the finite state machine
whether :meth:`.determine_alphabets` should be called.
- ``with_final_word_out`` -- If given (not ``None``), then the
function :meth:`.with_final_word_out` (more precisely, its inplace
pendant :meth:`.construct_final_word_out`) is called with input
``letters=with_final_word_out`` at the end of the creation
process.
- ``store_states_dict`` -- If ``True``, then additionally the states
are stored in an interal dictionary for speed up.
- ``on_duplicate_transition`` -- A function which is called when a
transition is inserted into ``self`` which already existed (same
``from_state``, same ``to_state``, same ``word_in``, same ``word_out``).
This function is assumed to take two arguments, the first being
the already existing transition, the second being the new
transition (as an :class:`FSMTransition`). The function must
return the (possibly modified) original transition.
By default, we have ``on_duplicate_transition=None``, which is
interpreted as
``on_duplicate_transition=duplicate_transition_ignore``, where
``duplicate_transition_ignore`` is a predefined function
ignoring the occurrence. Other such predefined functions are
``duplicate_transition_raise_error`` and
``duplicate_transition_add_input``.
OUTPUT:
A finite state machine.
The object creation of :class:`Automaton` and :class:`Transducer`
is the same as the one described here (i.e. just replace the word
``FiniteStateMachine`` by ``Automaton`` or ``Transducer``).
Each transition of an automaton has an input label. Automata can,
for example, be determinised (see
:meth:`Automaton.determinisation`) and minimized (see
:meth:`Automaton.minimization`). Each transition of a transducer
has an input and an output label. Transducers can, for example, be
simplified (see :meth:`Transducer.simplification`).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
See documentation for more examples.
We illustrate the different input formats:
#. The input-data can be a dictionary of dictionaries, where
- the keys of the outer dictionary are state-labels (from-states of
transitions),
- the keys of the inner dictionaries are state-labels (to-states of
transitions),
- the values of the inner dictionaries specify the transition
more precisely.
The easiest is to use a tuple consisting of an input and an
output word::
sage: FiniteStateMachine({'a':{'b':(0, 1), 'c':(1, 1)}})
Finite state machine with 3 states
Instead of the tuple anything iterable (e.g. a list) can be
used as well.
If you want to use the arguments of :class:`FSMTransition`
directly, you can use a dictionary::
sage: FiniteStateMachine({'a':{'b':{'word_in':0, 'word_out':1},
....: 'c':{'word_in':1, 'word_out':1}}})
Finite state machine with 3 states
In the case you already have instances of
:class:`FSMTransition`, it is possible to use them directly::
sage: FiniteStateMachine({'a':{'b':FSMTransition('a', 'b', 0, 1),
....: 'c':FSMTransition('a', 'c', 1, 1)}})
Finite state machine with 3 states
#. The input-data can be a dictionary of lists, where the keys
are states or label of states.
The list-elements can be states::
sage: a = FSMState('a')
sage: b = FSMState('b')
sage: c = FSMState('c')
sage: FiniteStateMachine({a:[b, c]})
Finite state machine with 3 states
Or the list-elements can simply be labels of states::
sage: FiniteStateMachine({'a':['b', 'c']})
Finite state machine with 3 states
The list-elements can also be transitions::
sage: FiniteStateMachine({'a':[FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)]})
Finite state machine with 3 states
Or they can be tuples of a label, an input word and an output
word specifying a transition::
sage: FiniteStateMachine({'a':[('b', 0, 1), ('c', 1, 1)]})
Finite state machine with 3 states
#. The input-data can be a list, where its elements specify
transitions::
sage: FiniteStateMachine([FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)])
Finite state machine with 3 states
It is possible to skip ``FSMTransition`` in the example above::
sage: FiniteStateMachine([('a', 'b', 0, 1), ('a', 'c', 1, 1)])
Finite state machine with 3 states
The parameters of the transition are given in tuples. Anyhow,
anything iterable (e.g. a list) is possible.
You can also name the parameters of the transition. For this
purpose you take a dictionary::
sage: FiniteStateMachine([{'from_state':'a', 'to_state':'b',
....: 'word_in':0, 'word_out':1},
....: {'from_state':'a', 'to_state':'c',
....: 'word_in':1, 'word_out':1}])
Finite state machine with 3 states
Other arguments, which :class:`FSMTransition` accepts, can be
added, too.
#. The input-data can also be function acting as transition
function:
This function has two input arguments:
#. a label of a state (from which the transition starts),
#. a letter of the (input-)alphabet (as input-label of the transition).
It returns a tuple with the following entries:
#. a label of a state (to which state the transition goes),
#. a letter of or a word over the (output-)alphabet (as
output-label of the transition).
It may also output a list of such tuples if several
transitions from the from-state and the input letter exist
(this means that the finite state machine is
non-deterministic).
If the transition does not exist, the function should raise a
``LookupError`` or return an empty list.
When constructing a finite state machine in this way, some
inital states and an input alphabet have to be specified.
::
sage: def f(state_from, read):
....: if int(state_from) + read <= 2:
....: state_to = 2*int(state_from)+read
....: write = 0
....: else:
....: state_to = 2*int(state_from) + read - 5
....: write = 1
....: return (str(state_to), write)
sage: F = FiniteStateMachine(f, input_alphabet=[0, 1],
....: initial_states=['0'],
....: final_states=['0'])
sage: F([1, 0, 1])
(True, '0', [0, 0, 1])
#. The input-data can be an other instance of a finite state machine::
sage: FiniteStateMachine(FiniteStateMachine([]))
Traceback (most recent call last):
...
NotImplementedError
The following examples demonstrate the use of ``on_duplicate_transition``::
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]])
sage: F.transitions()
[Transition from 'a' to 'a': 1/2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F1 = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_raise_error)
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 'a' to 'a': 1/2|-
Use ``duplicate_transition_add_input`` to emulate a Markov chain,
the input labels are considered as transition probabilities::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.transitions()
[Transition from 'a' to 'a': 1|-]
Use ``with_final_word_out`` to construct final output::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=0)
sage: for s in T.iter_final_states():
....: print s, s.final_word_out
0 []
1 [0]
TESTS::
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: FiniteStateMachine({a:[b, c], b:[b, c, d],
....: c:[a, b], d:[a, c]})
Finite state machine with 4 states
We have several constructions which lead to the same finite
state machine::
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: C = FSMState('C')
sage: FSM1 = FiniteStateMachine(
....: {A:{B:{'word_in':0, 'word_out':1},
....: C:{'word_in':1, 'word_out':1}}})
sage: FSM2 = FiniteStateMachine({A:{B:(0, 1), C:(1, 1)}})
sage: FSM3 = FiniteStateMachine(
....: {A:{B:FSMTransition(A, B, 0, 1),
....: C:FSMTransition(A, C, 1, 1)}})
sage: FSM4 = FiniteStateMachine({A:[(B, 0, 1), (C, 1, 1)]})
sage: FSM5 = FiniteStateMachine(
....: {A:[FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)]})
sage: FSM6 = FiniteStateMachine(
....: [{'from_state':A, 'to_state':B, 'word_in':0, 'word_out':1},
....: {'from_state':A, 'to_state':C, 'word_in':1, 'word_out':1}])
sage: FSM7 = FiniteStateMachine([(A, B, 0, 1), (A, C, 1, 1)])
sage: FSM8 = FiniteStateMachine(
....: [FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)])
sage: FSM1 == FSM2 == FSM3 == FSM4 == FSM5 == FSM6 == FSM7 == FSM8
True
It is possible to skip ``FSMTransition`` in the example above.
Some more tests for different input-data::
sage: FiniteStateMachine({'a':{'a':[0, 0], 'b':[1, 1]},
....: 'b':{'b':[1, 0]}})
Finite state machine with 2 states
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: t1 = FSMTransition(a, b)
sage: t2 = FSMTransition(b, c)
sage: t3 = FSMTransition(b, d)
sage: t4 = FSMTransition(c, d)
sage: FiniteStateMachine([t1, t2, t3, t4])
Finite state machine with 4 states
"""
on_duplicate_transition = duplicate_transition_ignore
"""
Which function to call when a duplicate transition is inserted. See
the documentation of the parameter ``on_duplicate_transition`` of
the class :class:`FiniteStateMachine` for details.
"""
#*************************************************************************
# init
#*************************************************************************
def __init__(self,
data=None,
initial_states=None, final_states=None,
input_alphabet=None, output_alphabet=None,
determine_alphabets=None,
with_final_word_out=None,
store_states_dict=True,
on_duplicate_transition=None):
"""
See :class:`FiniteStateMachine` for more information.
TEST::
sage: FiniteStateMachine()
Finite state machine with 0 states
"""
self._states_ = [] # List of states in the finite state
# machine. Each state stores a list of
# outgoing transitions.
if store_states_dict:
self._states_dict_ = {}
if initial_states is not None:
if not hasattr(initial_states, '__iter__'):
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
if final_states is not None:
if not hasattr(final_states, '__iter__'):
raise TypeError('Final states must be iterable ' \
'(e.g. a list of states).')
for s in final_states:
state = self.add_state(s)
state.is_final = True
self.input_alphabet = input_alphabet
self.output_alphabet = output_alphabet
if on_duplicate_transition is None:
on_duplicate_transition = duplicate_transition_ignore
if hasattr(on_duplicate_transition, '__call__'):
self.on_duplicate_transition=on_duplicate_transition
else:
raise TypeError('on_duplicate_transition must be callable')
if data is None:
pass
elif is_FiniteStateMachine(data):
raise NotImplementedError
elif hasattr(data, 'iteritems'):
# data is a dict (or something similar),
# format: key = from_state, value = iterator of transitions
for (sf, iter_transitions) in data.iteritems():
self.add_state(sf)
if hasattr(iter_transitions, 'iteritems'):
for (st, transition) in iter_transitions.iteritems():
self.add_state(st)
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(sf, st, **transition)
elif hasattr(transition, '__iter__'):
self.add_transition(sf, st, *transition)
else:
self.add_transition(sf, st, transition)
elif hasattr(iter_transitions, '__iter__'):
for transition in iter_transitions:
if hasattr(transition, '__iter__'):
L = [sf]
L.extend(transition)
elif is_FSMTransition(transition):
L = transition
else:
L = [sf, transition]
self.add_transition(L)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__iter__'):
# data is a something that is iterable,
# items are transitions
for transition in data:
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(transition)
elif hasattr(transition, '__iter__'):
self.add_transition(transition)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__call__'):
self.add_from_transition_function(data)
else:
raise TypeError('Cannot decide what to do with data.')
if determine_alphabets:
self.determine_alphabets()
if with_final_word_out is not None:
self.construct_final_word_out(with_final_word_out)
self._allow_composition_ = True
#*************************************************************************
# copy and hash
#*************************************************************************
def __copy__(self):
"""
Returns a (shallow) copy of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
TESTS::
sage: copy(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
copy = __copy__
def empty_copy(self, memo=None, new_class=None):
"""
Returns an empty deep copy of the finite state machine, i.e.,
``input_alphabet``, ``output_alphabet``, ``on_duplicate_transition``
are preserved, but states and transitions are not.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
- ``new_class`` -- a class for the copy. By default
(``None``), the class of ``self`` is used.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F = FiniteStateMachine([('A', 'A', 0, 2), ('A', 'A', 1, 3)],
....: input_alphabet=[0, 1],
....: output_alphabet=[2, 3],
....: on_duplicate_transition=duplicate_transition_raise_error)
sage: FE = F.empty_copy(); FE
Finite state machine with 0 states
sage: FE.input_alphabet
[0, 1]
sage: FE.output_alphabet
[2, 3]
sage: FE.on_duplicate_transition == duplicate_transition_raise_error
True
TESTS::
sage: T = Transducer()
sage: type(T.empty_copy())
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.empty_copy(new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
if new_class is None:
new = self.__class__()
else:
new = new_class()
new.input_alphabet = deepcopy(self.input_alphabet, memo)
new.output_alphabet = deepcopy(self.output_alphabet, memo)
new.on_duplicate_transition = self.on_duplicate_transition
return new
def __deepcopy__(self, memo):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
"""
relabel = hasattr(self, '_deepcopy_relabel_')
new = self.empty_copy(memo=memo)
relabel_iter = itertools.count(0)
for state in self.iter_states():
if relabel:
if self._deepcopy_labels_ is None:
state._deepcopy_relabel_ = next(relabel_iter)
elif hasattr(self._deepcopy_labels_, '__call__'):
state._deepcopy_relabel_ = self._deepcopy_labels_(state.label())
elif hasattr(self._deepcopy_labels_, '__getitem__'):
state._deepcopy_relabel_ = self._deepcopy_labels_[state.label()]
else:
raise TypeError("labels must be None, a callable "
"or a dictionary.")
s = deepcopy(state, memo)
if relabel:
del state._deepcopy_relabel_
new.add_state(s)
for transition in self.iter_transitions():
new.add_transition(deepcopy(transition, memo))
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: C = deepcopy(F)
sage: C.transitions()[0].from_state is C.state('A')
True
sage: C.transitions()[0].to_state is C.state('A')
True
"""
return deepcopy(self, memo)
def relabeled(self, memo=None, labels=None):
"""
Returns a deep copy of the finite state machine, but the
states are relabeled.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
- ``labels`` -- (default: ``None``) a dictionary or callable
mapping old labels to new labels. If ``None``, then the new
labels are integers starting with 0.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: FSM1 = FiniteStateMachine([('A', 'B'), ('B', 'C'), ('C', 'A')])
sage: FSM1.states()
['A', 'B', 'C']
sage: FSM2 = FSM1.relabeled()
sage: FSM2.states()
[0, 1, 2]
sage: FSM3 = FSM1.relabeled(labels={'A': 'a', 'B': 'b', 'C': 'c'})
sage: FSM3.states()
['a', 'b', 'c']
sage: FSM4 = FSM2.relabeled(labels=lambda x: 2*x)
sage: FSM4.states()
[0, 2, 4]
TESTS::
sage: FSM2.relabeled(labels=1)
Traceback (most recent call last):
...
TypeError: labels must be None, a callable or a dictionary.
"""
self._deepcopy_relabel_ = True
self._deepcopy_labels_ = labels
new = deepcopy(self, memo)
del self._deepcopy_relabel_
del self._deepcopy_labels_
return new
def induced_sub_finite_state_machine(self, states):
"""
Returns a sub-finite-state-machine of the finite state machine
induced by the given states.
INPUT:
- ``states`` -- a list (or an iterator) of states (either labels or
instances of :class:`FSMState`) of the sub-finite-state-machine.
OUTPUT:
A new finite state machine. It consists (of deep copies) of
the given states and (deep copies) of all transitions of ``self``
between these states.
EXAMPLE::
sage: FSM = FiniteStateMachine([(0, 1, 0), (0, 2, 0),
....: (1, 2, 0), (2, 0, 0)])
sage: sub_FSM = FSM.induced_sub_finite_state_machine([0, 1])
sage: sub_FSM.states()
[0, 1]
sage: sub_FSM.transitions()
[Transition from 0 to 1: 0|-]
sage: FSM.induced_sub_finite_state_machine([3])
Traceback (most recent call last):
...
ValueError: 3 is not a state of this finite state machine.
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: sub_FSM.transitions()[0].from_state is sub_FSM.state(0)
True
"""
good_states = set()
for state in states:
if not self.has_state(state):
raise ValueError("%s is not a state of this finite state machine." % state)
good_states.add(self.state(state))
memo = {}
new = self.empty_copy(memo=memo)
for state in good_states:
s = deepcopy(state, memo)
new.add_state(s)
for state in good_states:
for transition in self.iter_transitions(state):
if transition.to_state in good_states:
new.add_transition(deepcopy(transition, memo))
return new
def __hash__(self):
"""
Since finite state machines are mutable, they should not be
hashable, so we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this finite state machine.
EXAMPLES::
sage: hash(FiniteStateMachine())
Traceback (most recent call last):
...
TypeError: Finite state machines are mutable, and thus not hashable.
"""
if getattr(self, "_immutable", False):
return hash((tuple(self.states()), tuple(self.transitions())))
raise TypeError("Finite state machines are mutable, " \
"and thus not hashable.")
#*************************************************************************
# operators
#*************************************************************************
def __or__(self, other):
"""
Returns the disjoint union of the finite state machines self and other.
INPUT:
- ``other`` -- a finite state machine.
OUTPUT:
A new finite state machine.
TESTS::
sage: FiniteStateMachine() | FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.disjoint_union(other)
__add__ = __or__
def __iadd__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F += FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __and__(self, other):
"""
Returns the intersection of ``self`` with ``other``.
TESTS::
sage: FiniteStateMachine() & FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.intersection(other)
def __imul__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F *= FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __call__(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Calls either method :meth:`.composition` or :meth:`.process`
(with ``full_output=False``).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True, is_final=True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: H = G(F)
sage: H.states()
[('A', 1), ('B', 1), ('B', 2)]
"""
if len(args) == 0:
raise TypeError("Called with too few arguments.")
if is_FiniteStateMachine(args[0]):
return self.composition(*args, **kwargs)
if hasattr(args[0], '__iter__'):
if not kwargs.has_key('full_output'):
kwargs['full_output'] = False
return self.process(*args, **kwargs)
raise TypeError("Do not know what to do with that arguments.")
#*************************************************************************
# tests
#*************************************************************************
def __nonzero__(self):
"""
Returns True if the finite state machine consists of at least
one state.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: FiniteStateMachine().__nonzero__()
False
"""
return len(self._states_) > 0
def __eq__(left, right):
"""
Returns ``True`` if the two finite state machines are equal,
i.e., if they have the same states and the same transitions.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
``True`` or ``False``.
Note that this function compares all attributes of a state (by
using :meth:`FSMState.fully_equal`) except for colors. Colors
are handled as follows: If the colors coincide, then the
finite state machines are also considered equal. If not, then
they are considered as equal if both finite state machines are
monochromatic.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1)])
sage: F == FiniteStateMachine()
False
sage: G = FiniteStateMachine([('A', 'B', 1)],
....: initial_states=['A'])
sage: F == G
False
sage: F.state('A').is_initial = True
sage: F == G
True
This shows the behavior when the states have colors::
sage: F.state('A').color = 'red'
sage: G.state('A').color = 'red'
sage: F == G
True
sage: G.state('A').color = 'blue'
sage: F == G
False
sage: F.state('B').color = 'red'
sage: F.is_monochromatic()
True
sage: G.state('B').color = 'blue'
sage: G.is_monochromatic()
True
sage: F == G
True
"""
if not is_FiniteStateMachine(right):
raise TypeError('Only instances of FiniteStateMachine '
'can be compared.')
if len(left._states_) != len(right._states_):
return False
colors_equal = True
for state in left.iter_states():
try:
right_state = right.state(state.label())
except LookupError:
return False
# we handle colors separately
if not state.fully_equal(right_state, compare_color=False):
return False
if state.color != right_state.color:
colors_equal = False
left_transitions = state.transitions
right_transitions = right.state(state).transitions
if len(left_transitions) != len(right_transitions):
return False
for t in left_transitions:
if t not in right_transitions:
return False
# handle colors
if colors_equal:
return True
if left.is_monochromatic() and right.is_monochromatic():
return True
return False
def __ne__(left, right):
"""
Tests for inequality, complement of :meth:`.__eq__`.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
True or False.
EXAMPLES::
sage: E = FiniteStateMachine([('A', 'B', 0)])
sage: F = Automaton([('A', 'B', 0)])
sage: G = Transducer([('A', 'B', 0, 1)])
sage: E == F
True
sage: E == G
False
"""
return (not (left == right))
def __contains__(self, item):
"""
Returns true, if the finite state machine contains the
state or transition item. Note that only the labels of the
states and the input and output words are tested.
INPUT:
- ``item`` -- a state or a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: FSMState('A', is_initial=True) in F
True
sage: 'A' in F
False
sage: FSMTransition('A', 'B', 0) in F
True
"""
if is_FSMState(item):
return self.has_state(item)
if is_FSMTransition(item):
return self.has_transition(item)
return False
def is_Markov_chain(self, is_zero=None):
"""
Checks whether ``self`` is a Markov chain where the transition
probabilities are modeled as input labels.
INPUT:
- ``is_zero`` -- by default (``is_zero=None``), checking for
zero is simply done by
:meth:`~sage.structure.element.Element.is_zero`. This
parameter can be used to provide a more sophisticated check
for zero, e.g. in the case of symbolic probabilities, see
the examples below.
OUTPUT:
``True`` or ``False``.
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input` and the sum of the input
weights of the transitions leaving a state must add up to 1.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
True
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input`::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]])
sage: F.is_Markov_chain()
False
Sum of input labels of the transitions leaving states must be 1::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
If the probabilities are variables in the symbolic ring,
:func:`~sage.symbolic.assumptions.assume` will do the trick::
sage: var('p q')
(p, q)
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: assume(p + q == 1)
sage: (p + q - 1).is_zero()
True
sage: F.is_Markov_chain()
True
sage: forget()
sage: del(p, q)
If the probabilities are variables in some polynomial ring,
the parameter ``is_zero`` can be used::
sage: R.<p, q> = PolynomialRing(QQ)
sage: def is_zero_polynomial(polynomial):
....: return polynomial in (p + q - 1)*R
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
sage: F.is_Markov_chain(is_zero_polynomial)
True
"""
def default_is_zero(expression):
return expression.is_zero()
is_zero_function = default_is_zero
if is_zero is not None:
is_zero_function = is_zero
if self.on_duplicate_transition != duplicate_transition_add_input:
return False
return all(is_zero_function(sum(t.word_in[0] for t in state.transitions) - 1)
for state in self.states())
#*************************************************************************
# representations / LaTeX
#*************************************************************************
def _repr_(self):
"""
Represents the finite state machine as "Finite state machine
with n states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: FiniteStateMachine()._repr_()
'Finite state machine with 0 states'
"""
return "Finite state machine with %s states" % len(self._states_)
default_format_letter = latex
format_letter = default_format_letter
def format_letter_negative(self, letter):
r"""
Format negative numbers as overlined numbers, everything
else by standard LaTeX formatting.
INPUT:
``letter`` -- anything.
OUTPUT:
Overlined absolute value if letter is a negative integer,
:func:`latex(letter) <sage.misc.latex.latex>` otherwise.
EXAMPLES::
sage: A = Automaton([(0, 0, -1)])
sage: map(A.format_letter_negative, [-1, 0, 1, 'a', None])
['\\overline{1}', 0, 1, \text{\texttt{a}}, \mbox{\rm None}]
sage: A.latex_options(format_letter=A.format_letter_negative)
sage: print(latex(A))
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$\overline{1}$} ();
\end{tikzpicture}
"""
if letter in ZZ and letter < 0:
return r'\overline{%d}' % -letter
else:
return latex(letter)
def format_transition_label_reversed(self, word):
r"""
Format words in transition labels in reversed order.
INPUT:
``word`` -- list of letters.
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode, letters are written in reversed order.
This is the reversed version of
:meth:`.default_format_transition_label`.
In digit expansions, digits are frequently processed from the
least significant to the most significant position, but it is
customary to write the least significant digit at the
right-most position. Therefore, the labels have to be
reversed.
EXAMPLE::
sage: T = Transducer([(0, 0, 0, [1, 2, 3])])
sage: T.format_transition_label_reversed([1, 2, 3])
'3 2 1'
sage: T.latex_options(format_transition_label=T.format_transition_label_reversed)
sage: print latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$0\mid 3 2 1$} ();
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.format_transition_label_reversed([])
'\\varepsilon'
"""
return self.default_format_transition_label(reversed(word))
def default_format_transition_label(self, word):
r"""
Default formatting of words in transition labels for LaTeX output.
INPUT:
``word`` -- list of letters
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode.
- For a non-empty word: Concatenation of the letters, piped through
``self.format_letter`` and separated by blanks.
- For an empty word:
``sage.combinat.finite_state_machine.EmptyWordLaTeX``.
There is also a variant :meth:`.format_transition_label_reversed`
writing the words in reversed order.
EXAMPLES:
#. Example of a non-empty word::
sage: T = Transducer()
sage: print T.default_format_transition_label(
....: ['a', 'alpha', 'a_1', '0', 0, (0, 1)])
\text{\texttt{a}} \text{\texttt{alpha}}
\text{\texttt{a{\char`\_}1}} 0 0 \left(0, 1\right)
#. In the example above, ``'a'`` and ``'alpha'`` should perhaps
be symbols::
sage: var('a alpha a_1')
(a, alpha, a_1)
sage: print T.default_format_transition_label([a, alpha, a_1])
a \alpha a_{1}
#. Example of an empty word::
sage: print T.default_format_transition_label([])
\varepsilon
We can change this by setting
``sage.combinat.finite_state_machine.EmptyWordLaTeX``::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = ''
sage: T.default_format_transition_label([])
''
Finally, we restore the default value::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = r'\varepsilon'
#. This method is the default value for
``FiniteStateMachine.format_transition_label``. That can be changed to be
any other function::
sage: A = Automaton([(0, 1, 0)])
sage: def custom_format_transition_label(word):
....: return "t"
sage: A.latex_options(format_transition_label=custom_format_transition_label)
sage: print latex(A)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\node[state] (v1) at (-3.000000, 0.000000) {$1$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$t$} (v1);
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.default_format_transition_label([])
'\\varepsilon'
sage: T.default_format_transition_label(iter([]))
'\\varepsilon'
"""
result = " ".join(imap(self.format_letter, word))
if result:
return result
else:
return EmptyWordLaTeX
format_transition_label = default_format_transition_label
def latex_options(self,
coordinates=None,
format_state_label=None,
format_letter=None,
format_transition_label=None,
loop_where=None,
initial_where=None,
accepting_style=None,
accepting_distance=None,
accepting_where=None,
accepting_show_empty=None):
r"""
Set options for LaTeX output via
:func:`~sage.misc.latex.latex` and therefore
:func:`~sage.misc.latex.view`.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates. If no
coordinates are given, states a placed equidistantly on a
circle of radius `3`. See also :meth:`.set_coordinates`.
- ``format_state_label`` -- a function mapping labels of
states to a string suitable for typesetting in LaTeX's
mathematics mode. If not given, :func:`~sage.misc.latex.latex`
is used.
- ``format_letter`` -- a function mapping letters of the input
and output alphabets to a string suitable for typesetting in
LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` uses
:func:`~sage.misc.latex.latex`.
- ``format_transition_label`` -- a function mapping words over
the input and output alphabets to a string suitable for
typesetting in LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` is used.
- ``loop_where`` -- a dictionary or a function mapping labels of
initial states to one of ``'above'``, ``'left'``, ``'below'``,
``'right'``. If not given, ``'above'`` is used.
- ``initial_where`` -- a dictionary or a function mapping
labels of initial states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'left'``) is used.
- ``accepting_style`` -- one of ``'accepting by double'`` and
``'accepting by arrow'``. If not given, ``'accepting by
double'`` is used unless there are non-empty final output
words.
- ``accepting_distance`` -- a string giving a LaTeX length
used for the length of the arrow leading from a final state.
If not given, TikZ' default (currently ``'3ex'``) is used
unless there are non-empty final output words, in which case
``'7ex'`` is used.
- ``accepting_where`` -- a dictionary or a function mapping
labels of final states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'right'``) is used. If the final state has a
final output word, it is also possible to give an angle
in degrees.
- ``accepting_show_empty`` -- if ``True`` the arrow of an
empty final output word is labeled as well. Note that this
implicitly implies ``accepting_style='accepting by
arrow'``. If not given, the default ``False`` is used.
OUTPUT:
Nothing.
As TikZ (cf. the :wikipedia:`PGF/TikZ`) is used to typeset
the graphics, the syntax is oriented on TikZ' syntax.
This is a convenience function collecting all options for
LaTeX output. All of its functionality can also be achieved by
directly setting the attributes
- ``coordinates``, ``format_label``, ``loop_where``,
``initial_where``, and ``accepting_where`` of
:class:`FSMState` (here, ``format_label`` is a callable
without arguments, everything else is a specific value);
- ``format_label`` of :class:`FSMTransition` (``format_label``
is a callable without arguments);
- ``format_state_label``, ``format_letter``,
``format_transition_label``, ``accepting_style``,
``accepting_distance``, and ``accepting_show_empty``
of :class:`FiniteStateMachine`.
This function, however, also (somewhat) checks its input and
serves to collect documentation on all these options.
The function can be called several times, only those arguments
which are not ``None`` are taken into account. By the same
means, it can be combined with directly setting some
attributes as outlined above.
EXAMPLES:
See also the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
::
sage: T = Transducer(initial_states=[4],
....: final_states=[0, 3])
sage: for j in srange(4):
....: T.add_transition(4, j, 0, [0, j])
....: T.add_transition(j, 4, 0, [0, -j])
....: T.add_transition(j, j, 0, 0)
Transition from 4 to 0: 0|0,0
Transition from 0 to 4: 0|0,0
Transition from 0 to 0: 0|0
Transition from 4 to 1: 0|0,1
Transition from 1 to 4: 0|0,-1
Transition from 1 to 1: 0|0
Transition from 4 to 2: 0|0,2
Transition from 2 to 4: 0|0,-2
Transition from 2 to 2: 0|0
Transition from 4 to 3: 0|0,3
Transition from 3 to 4: 0|0,-3
Transition from 3 to 3: 0|0
sage: T.add_transition(4, 4, 0, 0)
Transition from 4 to 4: 0|0
sage: T.state(3).final_word_out = [0, 0]
sage: T.latex_options(
....: coordinates={4: (0, 0),
....: 0: (-6, 3),
....: 1: (-2, 3),
....: 2: (2, 3),
....: 3: (6, 3)},
....: format_state_label=lambda x: r'\mathbf{%s}' % x,
....: format_letter=lambda x: r'w_{%s}' % x,
....: format_transition_label=lambda x:
....: r"{\scriptstyle %s}" % T.default_format_transition_label(x),
....: loop_where={4: 'below', 0: 'left', 1: 'above',
....: 2: 'right', 3:'below'},
....: initial_where=lambda x: 'above',
....: accepting_style='accepting by double',
....: accepting_distance='10ex',
....: accepting_where={0: 'left', 3: 45}
....: )
sage: T.state(4).format_label=lambda: r'\mathcal{I}'
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state, accepting, accepting where=left] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\node[state, accepting, accepting where=45] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid {\scriptstyle w_{0} w_{0}}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-1}}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-2}}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-3}}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{1}}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{2}}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{3}}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\end{tikzpicture}
sage: view(T) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells.
By changing some of the options, we get the following output::
sage: T.latex_options(
....: format_transition_label=T.default_format_transition_label,
....: accepting_style='accepting by arrow',
....: accepting_show_empty=True
....: )
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex, accepting text=, accepting/.style=accepting by arrow, accepting distance=10ex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\path[->] (v1.180.00) edge node[rotate=360.00, anchor=south] {$\$ \mid \varepsilon$} ++(180.00:10ex);
\node[state] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid w_{0} w_{0}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {$w_{0}\mid w_{0}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {$w_{0}\mid w_{0} w_{0}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {$w_{0}\mid w_{0}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {$w_{0}\mid w_{0} w_{-1}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {$w_{0}\mid w_{0}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {$w_{0}\mid w_{0} w_{-2}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {$w_{0}\mid w_{0} w_{-3}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {$w_{0}\mid w_{0} w_{0}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {$w_{0}\mid w_{0} w_{1}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {$w_{0}\mid w_{0} w_{2}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {$w_{0}\mid w_{0} w_{3}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\end{tikzpicture}
sage: view(T) # not tested
TESTS::
sage: T.latex_options(format_state_label='Nothing')
Traceback (most recent call last):
...
TypeError: format_state_label must be callable.
sage: T.latex_options(format_letter='')
Traceback (most recent call last):
...
TypeError: format_letter must be callable.
sage: T.latex_options(format_transition_label='')
Traceback (most recent call last):
...
TypeError: format_transition_label must be callable.
sage: T.latex_options(loop_where=37)
Traceback (most recent call last):
...
TypeError: loop_where must be a callable or a
dictionary.
sage: T.latex_options(loop_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: loop_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(initial_where=90)
Traceback (most recent call last):
...
TypeError: initial_where must be a callable or a
dictionary.
sage: T.latex_options(initial_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: initial_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_style='fancy')
Traceback (most recent call last):
...
ValueError: accepting_style must be in ['accepting by
double', 'accepting by arrow'].
sage: T.latex_options(accepting_where=90)
Traceback (most recent call last):
...
TypeError: accepting_where must be a callable or a
dictionary.
sage: T.latex_options(accepting_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: accepting_where for 0 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_where={0: 'above', 3: 'top'})
Traceback (most recent call last):
...
ValueError: accepting_where for 3 must be a real number or
be in ['below', 'right', 'above', 'left'].
"""
if coordinates is not None:
self.set_coordinates(coordinates)
if format_state_label is not None:
if not hasattr(format_state_label, '__call__'):
raise TypeError('format_state_label must be callable.')
self.format_state_label = format_state_label
if format_letter is not None:
if not hasattr(format_letter, '__call__'):
raise TypeError('format_letter must be callable.')
self.format_letter = format_letter
if format_transition_label is not None:
if not hasattr(format_transition_label, '__call__'):
raise TypeError('format_transition_label must be callable.')
self.format_transition_label = format_transition_label
if loop_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.states():
if hasattr(loop_where, '__call__'):
where = loop_where(state.label())
else:
try:
where = loop_where[state.label()]
except TypeError:
raise TypeError("loop_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.loop_where = where
else:
raise ValueError('loop_where for %s must be in %s.' %
(state.label(), permissible))
if initial_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_initial_states():
if hasattr(initial_where, '__call__'):
where = initial_where(state.label())
else:
try:
where = initial_where[state.label()]
except TypeError:
raise TypeError("initial_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.initial_where = where
else:
raise ValueError('initial_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_style is not None:
permissible = ['accepting by double',
'accepting by arrow']
if accepting_style in permissible:
self.accepting_style = accepting_style
else:
raise ValueError('accepting_style must be in %s.' %
permissible)
if accepting_distance is not None:
self.accepting_distance = accepting_distance
if accepting_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_final_states():
if hasattr(accepting_where, '__call__'):
where = accepting_where(state.label())
else:
try:
where = accepting_where[state.label()]
except TypeError:
raise TypeError("accepting_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.accepting_where = where
elif hasattr(state, 'final_word_out') \
and state.final_word_out:
if where in RR:
state.accepting_where = where
else:
raise ValueError('accepting_where for %s must '
'be a real number or be in %s.' %
(state.label(), permissible))
else:
raise ValueError('accepting_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_show_empty is not None:
self.accepting_show_empty = accepting_show_empty
def _latex_(self):
r"""
Returns a LaTeX code for the graph of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1, 2)],
....: initial_states=['A'],
....: final_states=['B'])
sage: F.state('A').initial_where='below'
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=below] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$ $} (v1);
\end{tikzpicture}
"""
def label_rotation(angle, both_directions):
"""
Given an angle of a transition, compute the TikZ string to
rotate the label.
"""
angle_label = angle
anchor_label = "south"
if angle > 90 or angle <= -90:
angle_label = angle + 180
if both_directions:
# if transitions in both directions, the transition to the
# left has its label below the transition, otherwise above
anchor_label = "north"
return "rotate=%.2f, anchor=%s" % (angle_label, anchor_label)
setup_latex_preamble()
options = ["auto", "initial text=", ">=latex"]
nonempty_final_word_out = False
for state in self.iter_final_states():
if state.final_word_out:
nonempty_final_word_out = True
break
if hasattr(self, "accepting_style"):
accepting_style = self.accepting_style
elif nonempty_final_word_out:
accepting_style = "accepting by arrow"
else:
accepting_style = "accepting by double"
if accepting_style == "accepting by arrow":
options.append("accepting text=")
options.append("accepting/.style=%s" % accepting_style)
if hasattr(self, "accepting_distance"):
accepting_distance = self.accepting_distance
elif nonempty_final_word_out:
accepting_distance = "7ex"
else:
accepting_distance = None
if accepting_style == "accepting by arrow" and accepting_distance:
options.append("accepting distance=%s"
% accepting_distance)
if hasattr(self, "accepting_show_empty"):
accepting_show_empty = self.accepting_show_empty
else:
accepting_show_empty = False
result = "\\begin{tikzpicture}[%s]\n" % ", ".join(options)
j = 0;
for vertex in self.iter_states():
if not hasattr(vertex, "coordinates"):
vertex.coordinates = (3*cos(2*pi*j/len(self.states())),
3*sin(2*pi*j/len(self.states())))
options = ""
if vertex.is_final:
if not (vertex.final_word_out
and accepting_style == "accepting by arrow") \
and not accepting_show_empty:
# otherwise, we draw a custom made accepting path
# with label below
options += ", accepting"
if hasattr(vertex, "accepting_where"):
options += ", accepting where=%s" % (
vertex.accepting_where,)
if vertex.is_initial:
options += ", initial"
if hasattr(vertex, "initial_where"):
options += ", initial where=%s" % vertex.initial_where
if hasattr(vertex, "format_label"):
label = vertex.format_label()
elif hasattr(self, "format_state_label"):
label = self.format_state_label(vertex)
else:
label = latex(vertex.label())
result += "\\node[state%s] (v%d) at (%f, %f) {$%s$};\n" % (
options, j, vertex.coordinates[0],
vertex.coordinates[1], label)
vertex._number_ = j
if vertex.is_final and (vertex.final_word_out or accepting_show_empty):
angle = 0
if hasattr(vertex, "accepting_where"):
angle = tikz_automata_where.get(vertex.accepting_where,
vertex.accepting_where)
result += "\\path[->] (v%d.%.2f) edge node[%s] {$%s \mid %s$} ++(%.2f:%s);\n" % (
j, angle,
label_rotation(angle, False),
EndOfWordLaTeX,
self.format_transition_label(vertex.final_word_out),
angle, accepting_distance)
j += 1
def key_function(s):
return (s.from_state, s.to_state)
# We use an OrderedDict instead of a dict in order to have a
# defined ordering of the transitions in the output. See
# http://trac.sagemath.org/ticket/16580#comment:3 . As the
# transitions have to be sorted anyway, the performance
# penalty should be bearable; nevertheless, this is only
# required for doctests.
adjacent = OrderedDict(
(pair, list(transitions))
for pair, transitions in
itertools.groupby(
sorted(self.iter_transitions(),
key=key_function),
key=key_function
))
for ((source, target), transitions) in adjacent.iteritems():
if len(transitions) > 0:
labels = []
for transition in transitions:
if hasattr(transition, "format_label"):
labels.append(transition.format_label())
else:
labels.append(self._latex_transition_label_(
transition, self.format_transition_label))
label = ", ".join(labels)
if source != target:
angle = atan2(
target.coordinates[1] - source.coordinates[1],
target.coordinates[0] - source.coordinates[0]) * 180/pi
both_directions = (target, source) in adjacent
if both_directions:
angle_source = ".%.2f" % ((angle + 5).n(),)
angle_target = ".%.2f" % ((angle + 175).n(),)
else:
angle_source = ""
angle_target = ""
result += "\\path[->] (v%d%s) edge node[%s] {$%s$} (v%d%s);\n" % (
source._number_, angle_source,
label_rotation(angle, both_directions),
label,
target._number_, angle_target)
else:
loop_where = "above"
if hasattr(source, "loop_where"):
loop_where = source.loop_where
rotation = {'left': '[rotate=90, anchor=south]',
'right': '[rotate=90, anchor=north]'}
result += "\\path[->] (v%d) edge[loop %s] node%s {$%s$} ();\n" % (
source._number_,
loop_where, rotation.get(loop_where, ''),
label)
result += "\\end{tikzpicture}"
return result
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
TESTS::
sage: F = FiniteStateMachine([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
' '
"""
return ' '
def set_coordinates(self, coordinates, default=True):
"""
Set coordinates of the states for the LaTeX representation by
a dictionary or a function mapping labels to coordinates.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates.
- ``default`` -- If ``True``, then states not given by
``coordinates`` get a default position on a circle of
radius 3.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates({0: (0, 0), 1: (2, 0), 2: (1, 1)})
sage: F.state(0).coordinates
(0, 0)
We can also use a function to determine the coordinates::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates(lambda l: (l, 3/(l+1)))
sage: F.state(2).coordinates
(2, 1)
"""
states_without_coordinates = []
for state in self.iter_states():
try:
state.coordinates = coordinates[state.label()]
continue
except (KeyError, TypeError):
pass
try:
state.coordinates = coordinates(state.label())
continue
except TypeError:
pass
states_without_coordinates.append(state)
if default:
n = len(states_without_coordinates)
for j, state in enumerate(states_without_coordinates):
state.coordinates = (3*cos(2*pi*j/n),
3*sin(2*pi*j/n))
#*************************************************************************
# other
#*************************************************************************
def _matrix_(self, R=None):
"""
Returns the adjacency matrix of the finite state machine.
See :meth:`.adjacency_matrix` for more information.
EXAMPLES::
sage: B = FiniteStateMachine({0: {0: (0, 0), 'a': (1, 0)},
....: 'a': {2: (0, 0), 3: (1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B._matrix_()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
"""
return self.adjacency_matrix()
def adjacency_matrix(self, input=None,
entry=None):
"""
Returns the adjacency matrix of the underlying graph.
INPUT:
- ``input`` -- Only transitions with input label ``input`` are
respected.
- ``entry`` -- The function ``entry`` takes a transition and the
return value is written in the matrix as the entry
``(transition.from_state, transition.to_state)``. The default
value (``None``) of entry takes the variable ``x`` to the
power of the sum of the output word of the transition.
OUTPUT:
A matrix.
If any label of a state is not an integer, the finite state
machine is relabeled at the beginning. If there are more than
one transitions between two states, then the different return
values of ``entry`` are added up.
EXAMPLES::
sage: B = FiniteStateMachine({0:{0:(0, 0), 'a':(1, 0)},
....: 'a':{2:(0, 0), 3:(1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B.adjacency_matrix()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
This is equivalent to::
sage: matrix(B)
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
It is also possible to use other entries in the adjacency matrix::
sage: B.adjacency_matrix(entry=(lambda transition: 1))
[1 1 0 0 0]
[0 0 1 1 0]
[1 0 0 0 1]
[0 1 1 0 0]
[0 0 0 1 1]
sage: B.adjacency_matrix(1, entry=(lambda transition:
....: exp(I*transition.word_out[0]*var('t'))))
[ 0 1 0 0 0]
[ 0 0 0 1 0]
[e^(I*t) 0 0 0 0]
[ 0 0 e^(I*t) 0 0]
[ 0 0 0 0 e^(I*t)]
sage: a = Automaton([(0, 1, 0),
....: (1, 2, 0),
....: (2, 0, 1),
....: (2, 1, 0)],
....: initial_states=[0],
....: final_states=[0])
sage: a.adjacency_matrix()
[0 1 0]
[0 0 1]
[1 1 0]
"""
def default_function(transitions):
var('x')
return x**sum(transition.word_out)
if entry is None:
entry = default_function
relabeledFSM = self
l = len(relabeledFSM.states())
for state in self.iter_states():
if state.label() not in ZZ or state.label() >= l \
or state.label() < 0:
relabeledFSM = self.relabeled()
break
dictionary = {}
for transition in relabeledFSM.iter_transitions():
if input is None or transition.word_in == [input]:
if (transition.from_state.label(),
transition.to_state.label()) in dictionary:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
+= entry(transition)
else:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
= entry(transition)
return matrix(len(relabeledFSM.states()), dictionary)
def determine_alphabets(self, reset=True):
"""
Determines the input and output alphabet according to the
transitions in self.
INPUT:
- ``reset`` -- If reset is ``True``, then the existing input
and output alphabets are erased, otherwise new letters are
appended to the existing alphabets.
OUTPUT:
Nothing.
After this operation the input alphabet and the output
alphabet of self are a list of letters.
.. TODO::
At the moment, the letters of the alphabets need to be hashable.
EXAMPLES::
sage: T = Transducer([(1, 1, 1, 0), (1, 2, 2, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: final_states=[1],
....: determine_alphabets=False)
sage: T.state(1).final_word_out = [1, 4]
sage: (T.input_alphabet, T.output_alphabet)
(None, None)
sage: T.determine_alphabets()
sage: (T.input_alphabet, T.output_alphabet)
([0, 1, 2], [0, 1, 4])
"""
if reset:
ain = set()
aout = set()
else:
ain = set(self.input_alphabet)
aout = set(self.output_alphabet)
for t in self.iter_transitions():
for letter in t.word_in:
ain.add(letter)
for letter in t.word_out:
aout.add(letter)
for s in self.iter_final_states():
for letter in s.final_word_out:
aout.add(letter)
self.input_alphabet = list(ain)
self.output_alphabet = list(aout)
#*************************************************************************
# get states and transitions
#*************************************************************************
def states(self):
"""
Returns the states of the finite state machine.
INPUT:
Nothing.
OUTPUT:
The states of the finite state machine as list.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.states()
['1', '2']
"""
return copy(self._states_)
def iter_states(self):
"""
Returns an iterator of the states.
INPUT:
Nothing.
OUTPUT:
An iterator of the states of the finite state machine.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [s.label() for s in FSM.iter_states()]
['1', '2']
"""
return iter(self._states_)
def transitions(self, from_state=None):
"""
Returns a list of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
A list of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.transitions()
[Transition from '1' to '2': 1|-,
Transition from '2' to '2': 0|-]
"""
return list(self.iter_transitions(from_state))
def iter_transitions(self, from_state=None):
"""
Returns an iterator of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
An iterator of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('1')]
[('1', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('2')]
[('2', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions()]
[('1', '2'), ('2', '2')]
"""
if from_state is None:
return self._iter_transitions_all_()
else:
return iter(self.state(from_state).transitions)
def _iter_transitions_all_(self):
"""
Returns an iterator over all transitions.
INPUT:
Nothing.
OUTPUT:
An iterator over all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM._iter_transitions_all_()]
[('1', '2'), ('2', '2')]
"""
for state in self.iter_states():
for t in state.transitions:
yield t
def initial_states(self):
"""
Returns a list of all initial states.
INPUT:
Nothing.
OUTPUT:
A list of all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: F.initial_states()
['A']
"""
return list(self.iter_initial_states())
def iter_initial_states(self):
"""
Returns an iterator of the initial states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: [s.label() for s in F.iter_initial_states()]
['A']
"""
return itertools.ifilter(lambda s:s.is_initial, self.iter_states())
def final_states(self):
"""
Returns a list of all final states.
INPUT:
Nothing.
OUTPUT:
A list of all final states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: F.final_states()
['A', 'C']
"""
return list(self.iter_final_states())
def iter_final_states(self):
"""
Returns an iterator of the final states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: [s.label() for s in F.iter_final_states()]
['A', 'C']
"""
return itertools.ifilter(lambda s:s.is_final, self.iter_states())
def state(self, state):
"""
Returns the state of the finite state machine.
INPUT:
- ``state`` -- If ``state`` is not an instance of
:class:`FSMState`, then it is assumed that it is the label
of a state.
OUTPUT:
Returns the state of the finite state machine corresponding to
``state``.
If no state is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: FSM = FiniteStateMachine([(A, 'B'), ('C', A)])
sage: FSM.state('A') == A
True
sage: FSM.state('xyz')
Traceback (most recent call last):
...
LookupError: No state with label xyz found.
"""
def what(s, switch):
if switch:
return s.label()
else:
return s
switch = is_FSMState(state)
try:
return self._states_dict_[what(state, switch)]
except AttributeError:
for s in self.iter_states():
if what(s, not switch) == state:
return s
except KeyError:
pass
raise LookupError("No state with label %s found." % (what(state, switch),))
def transition(self, transition):
"""
Returns the transition of the finite state machine.
INPUT:
- ``transition`` -- If ``transition`` is not an instance of
:class:`FSMTransition`, then it is assumed that it is a
tuple ``(from_state, to_state, word_in, word_out)``.
OUTPUT:
Returns the transition of the finite state machine
corresponding to ``transition``.
If no transition is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: F = FiniteStateMachine([t])
sage: F.transition(('A', 'B', 0))
Transition from 'A' to 'B': 0|-
sage: id(t) == id(F.transition(('A', 'B', 0)))
True
"""
if not is_FSMTransition(transition):
transition = FSMTransition(*transition)
for s in self.iter_transitions(transition.from_state):
if s == transition:
return s
raise LookupError("No transition found.")
#*************************************************************************
# properties (state and transitions)
#*************************************************************************
def has_state(self, state):
"""
Returns whether ``state`` is one of the states of the finite
state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label of a state.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_state('A')
False
"""
try:
self.state(state)
return True
except LookupError:
return False
def has_transition(self, transition):
"""
Returns whether ``transition`` is one of the transitions of
the finite state machine.
INPUT:
- ``transition`` has to be a :class:`FSMTransition`.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'A', 0, 1)
sage: FiniteStateMachine().has_transition(t)
False
sage: FiniteStateMachine().has_transition(('A', 'A', 0, 1))
Traceback (most recent call last):
...
TypeError: Transition is not an instance of FSMTransition.
"""
if is_FSMTransition(transition):
return transition in self.iter_transitions()
raise TypeError("Transition is not an instance of FSMTransition.")
def has_initial_state(self, state):
"""
Returns whether ``state`` is one of the initial states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A')], initial_states=['A'])
sage: F.has_initial_state('A')
True
"""
try:
return self.state(state).is_initial
except LookupError:
return False
def has_initial_states(self):
"""
Returns whether the finite state machine has an initial state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_initial_states()
False
"""
return len(self.initial_states()) > 0
def has_final_state(self, state):
"""
Returns whether ``state`` is one of the final states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine(final_states=['A']).has_final_state('A')
True
"""
try:
return self.state(state).is_final
except LookupError:
return False
def has_final_states(self):
"""
Returns whether the finite state machine has a final state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_final_states()
False
"""
return len(self.final_states()) > 0
#*************************************************************************
# properties
#*************************************************************************
def is_deterministic(self):
"""
Returns whether the finite finite state machine is deterministic.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be deterministic if
each transition has input label of length one and for each
pair `(q,a)` where `q` is a state and `a` is an element of the
input alphabet, there is at most one transition from `q` with
input label `a`.
TESTS::
sage: fsm = FiniteStateMachine()
sage: fsm.add_transition(('A', 'B', 0, []))
Transition from 'A' to 'B': 0|-
sage: fsm.is_deterministic()
True
sage: fsm.add_transition(('A', 'C', 0, []))
Transition from 'A' to 'C': 0|-
sage: fsm.is_deterministic()
False
sage: fsm.add_transition(('A', 'B', [0,1], []))
Transition from 'A' to 'B': 0,1|-
sage: fsm.is_deterministic()
False
"""
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key,transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
return True
def is_complete(self):
"""
Returns whether the finite state machine is complete.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be complete if
each transition has an input label of length one and for each
pair `(q, a)` where `q` is a state and `a` is an element of the
input alphabet, there is exactly one transition from `q` with
input label `a`.
EXAMPLES::
sage: fsm = FiniteStateMachine([(0, 0, 0, 0),
....: (0, 1, 1, 1),
....: (1, 1, 0, 0)],
....: determine_alphabets=False)
sage: fsm.is_complete()
Traceback (most recent call last):
...
ValueError: No input alphabet is given. Try calling determine_alphabets().
sage: fsm.input_alphabet = [0, 1]
sage: fsm.is_complete()
False
sage: fsm.add_transition((1, 1, 1, 1))
Transition from 1 to 1: 1|1
sage: fsm.is_complete()
True
sage: fsm.add_transition((0, 0, 1, 0))
Transition from 0 to 0: 1|0
sage: fsm.is_complete()
False
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key, transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
# all input labels are lists, extract the only element
outgoing_alphabet = [key[0] for key, transition_class in
transition_classes_by_word_in]
if not sorted(self.input_alphabet) == sorted(outgoing_alphabet):
return False
return True
def is_connected(self):
"""
TESTS::
sage: FiniteStateMachine().is_connected()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
#*************************************************************************
# let the finite state machine work
#*************************************************************************
def process(self, *args, **kwargs):
"""
Returns whether the finite state machine accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
OUTPUT:
A triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing (in the case the finite state machine runs as
transducer).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = FiniteStateMachine({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
Alternatively, we can invoke this function by::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
::
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = FiniteStateMachine(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w)[0] for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
Non-deterministic finite state machines cannot be handeled.
::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)],
....: initial_states=[0])
sage: T.process([0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T = Transducer([(0, 1, [0, 0], 0), (0, 2, [0, 0, 1], 0),
....: (0, 1, 1, 2), (1, 0, [], 1), (1, 1, 1, 3)],
....: initial_states=[0], final_states=[0, 1])
sage: T.process([0])
(False, None, None)
sage: T.process([0, 0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T.process([1])
(True, 1, [2])
sage: T.process([1, 1])
Traceback (most recent call last):
...
NotImplementedError: process cannot handle epsilon transition leaving state 1.
"""
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
return (it.accept_input, it.current_state, it.output_tape)
def iter_process(self, input_tape=None, initial_state=None, **kwargs):
"""
See :meth:`.process` for more informations.
EXAMPLES::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = inverter.iter_process(input_tape=[0, 1, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0, 0]
"""
return FSMProcessIterator(self, input_tape, initial_state, **kwargs)
#*************************************************************************
# change finite state machine (add/remove state/transitions)
#*************************************************************************
def add_state(self, state):
"""
Adds a state to the finite state machine and returns the new
state. If the state already exists, that existing state is
returned.
INPUT:
- ``state`` is either an instance of
:class:`FSMState` or,
otherwise, a label of a state.
OUTPUT:
The new or existing state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: F = FiniteStateMachine()
sage: A = FSMState('A', is_initial=True)
sage: F.add_state(A)
'A'
"""
try:
return self.state(state)
except LookupError:
pass
# at this point we know that we have a new state
if is_FSMState(state):
s = state
else:
s = FSMState(state)
s.transitions = list()
self._states_.append(s)
try:
self._states_dict_[s.label()] = s
except AttributeError:
pass
return s
def add_states(self, states):
"""
Adds several states. See add_state for more information.
INPUT:
- ``states`` -- a list of states or iterator over states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B'])
sage: F.states()
['A', 'B']
"""
for state in states:
self.add_state(state)
def add_transition(self, *args, **kwargs):
"""
Adds a transition to the finite state machine and returns the
new transition.
If the transition already exists, the return value of
``self.on_duplicate_transition`` is returned. See the
documentation of :class:`FiniteStateMachine`.
INPUT:
The following forms are all accepted:
::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(FSMTransition(A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, 0, 1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition('A', 'B', {'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': {'word_in': 0, 'word_out': 1}|-
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(from_state=A, to_state=B,
....: word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition({'from_state': A, 'to_state': B,
....: 'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition((A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition([A, B, 0, 1])
Transition from 'A' to 'B': 0|1
If the states ``A`` and ``B`` are not instances of
:class:`FSMState`, then it is assumed that they are labels of
states.
OUTPUT:
The new transition.
"""
if len(args) + len(kwargs) == 0:
return
if len(args) + len(kwargs) == 1:
if len(args) == 1:
d = args[0]
if is_FSMTransition(d):
return self._add_fsm_transition_(d)
else:
d = next(kwargs.itervalues())
if hasattr(d, 'iteritems'):
args = []
kwargs = d
elif hasattr(d, '__iter__'):
args = d
kwargs = {}
else:
raise TypeError("Cannot decide what to do with input.")
data = dict(zip(
('from_state', 'to_state', 'word_in', 'word_out', 'hook'),
args))
data.update(kwargs)
data['from_state'] = self.add_state(data['from_state'])
data['to_state'] = self.add_state(data['to_state'])
return self._add_fsm_transition_(FSMTransition(**data))
def _add_fsm_transition_(self, t):
"""
Adds a transition.
INPUT:
- ``t`` -- an instance of :class:`FSMTransition`.
OUTPUT:
The new transition.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: F = FiniteStateMachine()
sage: F._add_fsm_transition_(FSMTransition('A', 'B'))
Transition from 'A' to 'B': -|-
"""
try:
existing_transition = self.transition(t)
except LookupError:
pass
else:
return self.on_duplicate_transition(existing_transition, t)
from_state = self.add_state(t.from_state)
self.add_state(t.to_state)
from_state.transitions.append(t)
return t
def add_from_transition_function(self, function, initial_states=None,
explore_existing_states=True):
"""
Constructs a finite state machine from a transition function.
INPUT:
- ``function`` may return a tuple (new_state, output_word) or a
list of such tuples.
- ``initial_states`` -- If no initial states are given, the
already existing initial states of self are taken.
- If ``explore_existing_states`` is True (default), then
already existing states in self (e.g. already given final
states) will also be processed if they are reachable from
the initial states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine(initial_states=['A'],
....: input_alphabet=[0, 1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
sage: F.transitions()
[Transition from 'A' to 'A': 0|0,
Transition from 'A' to 'B': 0|1,
Transition from 'A' to 'A': 1|1,
Transition from 'A' to 'B': 1|0,
Transition from 'B' to 'A': 0|0,
Transition from 'B' to 'B': 0|1,
Transition from 'B' to 'A': 1|1,
Transition from 'B' to 'B': 1|0]
Initial states can also be given as a parameter::
sage: F = FiniteStateMachine(input_alphabet=[0,1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f,initial_states=['A'])
sage: F.initial_states()
['A']
Already existing states in the finite state machine (the final
states in the example below) are also explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function)
sage: F.transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
If ``explore_existing_states=False``, however, this behavior
is turned off, i.e., already existing states are not
explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function,
....: explore_existing_states=False)
sage: F.transitions()
[Transition from 0 to 1: 0|-]
TEST::
sage: F = FiniteStateMachine(initial_states=['A'])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
::
sage: def transition(state, where):
....: return (vector([0, 0]), 1)
sage: Transducer(transition, input_alphabet=[0], initial_states=[0])
Traceback (most recent call last):
...
TypeError: mutable vectors are unhashable
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if initial_states is None:
not_done = self.initial_states()
elif hasattr(initial_states, '__iter__'):
not_done = []
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
not_done.append(state)
else:
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
if len(not_done) == 0:
raise ValueError("No state is initial.")
if explore_existing_states:
ignore_done = self.states()
for s in not_done:
try:
ignore_done.remove(s)
except ValueError:
pass
else:
ignore_done = []
while len(not_done) > 0:
s = not_done.pop(0)
for letter in self.input_alphabet:
try:
return_value = function(s.label(), letter)
except LookupError:
continue
if not hasattr(return_value, "pop"):
return_value = [return_value]
try:
for (st_label, word) in return_value:
pass
except TypeError:
raise ValueError("The callback function for "
"add_from_transition is expected "
"to return a pair (new_state, "
"output_label) or a list of such pairs. "
"For the state %s and the input "
"letter %s, it however returned %s, "
"which is not acceptable."
% (s.label(), letter, return_value))
for (st_label, word) in return_value:
if not self.has_state(st_label):
not_done.append(self.add_state(st_label))
elif len(ignore_done) > 0:
u = self.state(st_label)
if u in ignore_done:
not_done.append(u)
ignore_done.remove(u)
self.add_transition(s, st_label,
word_in=letter, word_out=word)
def add_transitions_from_function(self, function, labels_as_input=True):
"""
Adds one or more transitions if ``function(state, state)``
says that there are some.
INPUT:
- ``function`` -- a transition function. Given two states
``from_state`` and ``to_state`` (or their labels if
``label_as_input`` is true), this function shall return a
tuple ``(word_in, word_out)`` to add a transition from
``from_state`` to ``to_state`` with input and output labels
``word_in`` and ``word_out``, respectively. If no such
addition is to be added, the transition function shall
return ``None``. The transition function may also return
a list of such tuples in order to add multiple transitions
between the pair of states.
- ``label_as_input`` -- (default: ``True``)
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B', 'C'])
sage: def f(state1, state2):
....: if state1 == 'C':
....: return None
....: return (0, 1)
sage: F.add_transitions_from_function(f)
sage: len(F.transitions())
6
Multiple transitions are also possible::
sage: F = FiniteStateMachine()
sage: F.add_states([0, 1])
sage: def f(state1, state2):
....: if state1 != state2:
....: return [(0, 1), (1, 0)]
....: else:
....: return None
sage: F.add_transitions_from_function(f)
sage: F.transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 0: 1|0]
TESTS::
sage: F = FiniteStateMachine()
sage: F.add_state(0)
0
sage: def f(state1, state2):
....: return 1
sage: F.add_transitions_from_function(f)
Traceback (most recent call last):
...
ValueError: The callback function for add_transitions_from_function
is expected to return a pair (word_in, word_out) or a list of such
pairs. For states 0 and 0 however, it returned 1,
which is not acceptable.
"""
for s_from in self.iter_states():
for s_to in self.iter_states():
try:
if labels_as_input:
return_value = function(s_from.label(), s_to.label())
else:
return_value = function(s_from, s_to)
except LookupError:
continue
if return_value is None:
continue
if not hasattr(return_value, "pop"):
transitions = [return_value]
else:
transitions = return_value
for t in transitions:
if not hasattr(t, '__getitem__'):
raise ValueError("The callback function for "
"add_transitions_from_function "
"is expected to return a "
"pair (word_in, word_out) or a "
"list of such pairs. For "
"states %s and %s however, it "
"returned %s, which is not "
"acceptable." % (s_from, s_to, return_value))
label_in = t[0]
try:
label_out = t[1]
except LookupError:
label_out = None
self.add_transition(s_from, s_to, label_in, label_out)
def delete_transition(self, t):
"""
Deletes a transition by removing it from the list of transitions of
the state, where the transition starts.
INPUT:
- ``t`` -- a transition.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: F.delete_transition(('A', 'B', 0))
sage: F.transitions()
[Transition from 'B' to 'A': 1|-]
"""
transition = self.transition(t)
transition.from_state.transitions.remove(transition)
def delete_state(self, s):
"""
Deletes a state and all transitions coming or going to this state.
INPUT:
- ``s`` -- a label of a state or an :class:`FSMState`.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t1 = FSMTransition('A', 'B', 0)
sage: t2 = FSMTransition('B', 'B', 1)
sage: F = FiniteStateMachine([t1, t2])
sage: F.delete_state('A')
sage: F.transitions()
[Transition from 'B' to 'B': 1|-]
TESTS::
sage: F._states_
['B']
sage: F._states_dict_ # This shows that #16024 is fixed.
{'B': 'B'}
"""
state = self.state(s)
for transition in self.transitions():
if transition.to_state == state:
self.delete_transition(transition)
self._states_.remove(state)
try:
del self._states_dict_[state.label()]
except AttributeError:
pass
def remove_epsilon_transitions(self):
"""
TESTS::
sage: FiniteStateMachine().remove_epsilon_transitions()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def accessible_components(self):
"""
Returns a new finite state machine with the accessible states
of self and all transitions between those states.
INPUT:
Nothing.
OUTPUT:
A finite state machine with the accessible states of self and
all transitions between those states.
A state is accessible if there is a directed path from an
initial state to the state. If self has no initial states then
a copy of the finite state machine self is returned.
EXAMPLES::
sage: F = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 2 states
::
sage: F = Automaton([(0, 0, 1), (0, 0, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 1 states
TESTS:
Check whether input of length > 1 works::
sage: F = Automaton([(0, 1, [0, 1]), (0, 2, 0)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 3 states
"""
if len(self.initial_states()) == 0:
return deepcopy(self)
memo = {}
def accessible(from_state, read):
return [(deepcopy(x.to_state, memo), x.word_out)
for x in self.iter_transitions(from_state)
if x.word_in[0] == read]
new_initial_states=map(lambda x: deepcopy(x, memo),
self.initial_states())
result = self.empty_copy()
result.add_from_transition_function(accessible,
initial_states=new_initial_states)
for final_state in self.iter_final_states():
try:
new_final_state=result.state(final_state.label)
new_final_state.is_final=True
except LookupError:
pass
return result
# *************************************************************************
# creating new finite state machines
# *************************************************************************
def disjoint_union(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().disjoint_union(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def concatenation(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().concatenation(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def Kleene_closure(self):
"""
TESTS::
sage: FiniteStateMachine().Kleene_closure()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def intersection(self, other):
"""
TESTS::
sage: FiniteStateMachine().intersection(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def product_FiniteStateMachine(self, other, function,
new_input_alphabet=None,
only_accessible_components=True,
final_function=None,
new_class=None):
r"""
Returns a new finite state machine whose states are
`d`-tuples of states of the original finite state machines.
INPUT:
- ``other`` -- a finite state machine (for `d=2`) or a list
(or iterable) of `d-1` finite state machines.
- ``function`` has to accept `d` transitions from `A_j` to `B_j`
for `j\in\{1, \ldots, d\}` and returns a pair ``(word_in, word_out)``
which is the label of the transition `A=(A_1, \ldots, A_d)` to `B=(B_1,
\ldots, B_d)`. If there is no transition from `A` to `B`,
then ``function`` should raise a ``LookupError``.
- ``new_input_alphabet`` (optional) -- the new input alphabet
as a list.
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
- ``final_function`` -- A function mapping `d` final states of
the original finite state machines to the final output of
the corresponding state in the new finite state machine. By
default, the final output is the empty word if both final
outputs of the constituent states are empty; otherwise, a
``ValueError`` is raised.
- ``new_class`` -- Class of the new finite state machine. By
default (``None``), the class of ``self`` is used.
OUTPUT:
A finite state machine whose states are `d`-tuples of states of the
original finite state machines. A state is initial or
final if all constituent states are initial or final,
respectively.
The labels of the transitions are defined by ``function``.
The final output of a final state is determined by calling
``final_function`` on the constituent states.
The color of a new state is the tuple of colors of the
constituent states of ``self`` and ``other``.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1), ('A', 'A', 0), ('B', 'A', 2)],
....: initial_states=['A'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Automaton([(1, 1, 1)], initial_states=[1], final_states=[1])
sage: def addition(transition1, transition2):
....: return (transition1.word_in[0] + transition2.word_in[0],
....: None)
sage: H = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H.transitions()
[Transition from ('A', 1) to ('B', 1): 2|-,
Transition from ('A', 1) to ('A', 1): 1|-,
Transition from ('B', 1) to ('A', 1): 3|-]
sage: H1 = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H1.states()[0].label()[0] is F.states()[0]
True
sage: H1.states()[0].label()[1] is G.states()[0]
True
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(
....: G, lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None))
sage: H.states()
[(0, 0), (1, 0)]
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(G,
....: lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None),
....: only_accessible_components=False)
sage: H.states()
[(0, 0), (1, 0), (0, 1), (1, 1)]
Also final output words are considered according to the function
``final_function``::
sage: F = Transducer([(0, 1, 0, 1), (1, 1, 1, 1), (1, 1, 0, 1)],
....: final_states=[1])
sage: F.state(1).final_word_out = 1
sage: G = Transducer([(0, 0, 0, 1), (0, 0, 1, 0)], final_states=[0])
sage: G.state(0).final_word_out = 1
sage: def minus(t1, t2):
....: return (t1.word_in[0] - t2.word_in[0],
....: t1.word_out[0] - t2.word_out[0])
sage: H = F.product_FiniteStateMachine(G, minus)
Traceback (most recent call last):
...
ValueError: A final function must be given.
sage: def plus(s1, s2):
....: return s1.final_word_out[0] + s2.final_word_out[0]
sage: H = F.product_FiniteStateMachine(G, minus,
....: final_function=plus)
sage: H.final_states()
[(1, 0)]
sage: H.final_states()[0].final_word_out
[2]
Products of more than two finite state machines are also possible::
sage: def plus(s1, s2, s3):
....: if s1.word_in == s2.word_in == s3.word_in:
....: return (s1.word_in,
....: sum(s.word_out[0] for s in (s1, s2, s3)))
....: else:
....: raise LookupError
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.product_FiniteStateMachine([T1, T2], plus)
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|0,
Transition from ((), (), ()) to ((), (1,), ()): 1|0,
Transition from ((), (), ()) to ((), (), (2,)): 2|0,
Transition from ((0,), (), ()) to ((0,), (), ()): 0|1,
Transition from ((0,), (), ()) to ((), (1,), ()): 1|0,
Transition from ((0,), (), ()) to ((), (), (2,)): 2|0,
Transition from ((), (1,), ()) to ((0,), (), ()): 0|0,
Transition from ((), (1,), ()) to ((), (1,), ()): 1|1,
Transition from ((), (1,), ()) to ((), (), (2,)): 2|0,
Transition from ((), (), (2,)) to ((0,), (), ()): 0|0,
Transition from ((), (), (2,)) to ((), (1,), ()): 1|0,
Transition from ((), (), (2,)) to ((), (), (2,)): 2|1]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[0, 1, 0, 1, 0, 1, 0, 0, 0, 1]
``other`` can also be an iterable::
sage: T == T0.product_FiniteStateMachine(iter([T1, T2]), plus)
True
TESTS:
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: B = A.product_FiniteStateMachine(A,
....: lambda t1, t2: (0, None))
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
Check handling of the parameter ``other``::
sage: A.product_FiniteStateMachine(None, plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
sage: A.product_FiniteStateMachine([None], plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
Test whether ``new_class`` works::
sage: T = Transducer()
sage: type(T.product_FiniteStateMachine(T, None))
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.product_FiniteStateMachine(T, None,
....: new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
def default_final_function(*args):
if any(s.final_word_out for s in args):
raise ValueError("A final function must be given.")
return []
if final_function is None:
final_function = default_final_function
result = self.empty_copy(new_class=new_class)
if new_input_alphabet is not None:
result.input_alphabet = new_input_alphabet
else:
result.input_alphabet = None
if hasattr(other, '__iter__'):
machines = [self]
machines.extend(other)
if not all(is_FiniteStateMachine(m) for m in machines):
raise ValueError("other must be a finite state machine "
"or a list of finite state machines.")
elif is_FiniteStateMachine(other):
machines = [self, other]
else:
raise ValueError("other must be a finite state machine or "
"a list of finite state machines.")
for transitions in itertools.product(
*(m.iter_transitions() for m in machines)):
try:
word = function(*transitions)
except LookupError:
continue
result.add_transition(tuple(t.from_state for t in transitions),
tuple(t.to_state for t in transitions),
word[0], word[1])
for state in result.states():
if all(s.is_initial for s in state.label()):
state.is_initial = True
if all(s.is_final for s in state.label()):
state.is_final = True
state.final_word_out = final_function(*state.label())
state.color = tuple(s.color for s in state.label())
if only_accessible_components:
if result.input_alphabet is None:
result.determine_alphabets()
return result.accessible_components()
else:
return result
def composition(self, other, algorithm=None,
only_accessible_components=True):
"""
Returns a new transducer which is the composition of ``self``
and ``other``.
INPUT:
- ``other`` -- a transducer
- ``algorithm`` -- can be one of the following
- ``direct`` -- The composition is calculated directly.
There can be arbitrarily many initial and final states,
but the input and output labels must have length 1.
WARNING: The output of other is fed into self.
- ``explorative`` -- An explorative algorithm is used.
At least the following restrictions apply, but are not
checked:
- both self and other have exactly one initial state
- all input labels of transitions have length exactly 1
The input alphabet of self has to be specified.
This is a very limited implementation of composition.
WARNING: The output of ``other`` is fed into ``self``.
If algorithm is ``None``, then the algorithm is chosen
automatically (at the moment always ``direct``).
OUTPUT:
A new transducer.
The labels of the new finite state machine are pairs of states
of the original finite state machines. The color of a new
state is the tuple of colors of the constituent states.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G.composition(F, algorithm='explorative')
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Also final output words are considered if ``algorithm='direct'`` or
``None``::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'],
....: final_states=['A', 'B'])
sage: F.state('A').final_word_out = 0
sage: F.state('B').final_word_out = 1
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2])
sage: G.state(2).final_word_out = 0
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.final_states()
[(2, 'B')]
Note that ``(2, 'A')`` is not final, as the final output `0`
of state `2` of `G` cannot be processed in state ``'A'`` of
`F`.
::
sage: [s.final_word_out for s in Hd.final_states()]
[[1, 0]]
Be aware that after composition, different transitions may
share the same output label (same python object)::
sage: F = Transducer([ ('A','B',0,0), ('B','A',0,0)],
....: initial_states=['A'],
....: final_states=['A'])
sage: F.transitions()[0].word_out is F.transitions()[1].word_out
False
sage: G = Transducer([('C','C',0,1)],)
....: initial_states=['C'],
....: final_states=['C'])
sage: H = G.composition(F)
sage: H.transitions()[0].word_out is H.transitions()[1].word_out
True
In the explorative algorithm, transducers with non-empty final
output words are currently not implemented::
sage: A = transducers.GrayCode()
sage: B = transducers.abs([0, 1])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Similarly, the explorative algorithm cannot handle
non-deterministic finite state machines::
sage: A = Transducer([(0, 0, 0, 0), (0, 1, 0, 0)])
sage: B = transducers.Identity([0])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
sage: B.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
TESTS:
Due to the limitations of the two algorithms the following
(examples from above, but different algorithm used) does not
give a full answer or does not work.
In the following, ``algorithm='explorative'`` is inadequate,
as ``F`` has more than one initial state::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: He = F.composition(G, algorithm='explorative')
sage: He.initial_states()
[(1, 'A')]
sage: He.transitions()
[Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
In the following example, ``algorithm='direct'`` is inappropriate
as there are edges with output labels of length greater than 1::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: Hd = G.composition(F, algorithm='direct')
In the following examples, we compose transducers and automata
and check whether the types are correct. ::
sage: from sage.combinat.finite_state_machine import (
....: is_Automaton, is_Transducer)
sage: T = Transducer([(0, 0, 0, 0)], initial_states=[0])
sage: A = Automaton([(0, 0, 0)], initial_states=[0])
sage: is_Transducer(T.composition(T, algorithm='direct'))
True
sage: is_Transducer(T.composition(T, algorithm='explorative'))
True
sage: T.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: T.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: is_Automaton(A.composition(T, algorithm='direct'))
True
sage: is_Automaton(A.composition(T, algorithm='explorative'))
True
"""
if not other._allow_composition_:
raise TypeError("Composition with automaton is not "
"possible.")
if algorithm is None:
algorithm = 'direct'
if algorithm == 'direct':
return self._composition_direct_(other, only_accessible_components)
elif algorithm == 'explorative':
return self._composition_explorative_(other)
else:
raise ValueError("Unknown algorithm %s." % (algorithm,))
def _composition_direct_(self, other, only_accessible_components=True):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F._composition_direct_(G)
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
"""
def function(transition1, transition2):
if transition1.word_out == transition2.word_in:
return (transition1.word_in, transition2.word_out)
else:
raise LookupError
result = other.product_FiniteStateMachine(
self, function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: [],
new_class=self.__class__)
for state_result in result.iter_states():
state = state_result.label()[0]
if state.is_final:
accept, state_to, output = self.process(
state.final_word_out,
initial_state=self.state(state_result.label()[1]))
if not accept:
state_result.is_final = False
else:
state_result.is_final = True
state_result.final_word_out = output
return result
def _composition_explorative_(self, other):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G._composition_explorative_(F)
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: T = Transducer([[0, 0, 0, 0]], initial_states=[0])
sage: A = T.input_projection()
sage: B = A.composition(T, algorithm='explorative')
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
.. TODO::
The explorative algorithm should be re-implemented using the
process iterators of both finite state machines.
"""
def composition_transition(state, input):
(state1, state2) = state
transition1 = None
for transition in other.iter_transitions(state1):
if transition.word_in == [input]:
transition1 = transition
break
if transition1 is None:
raise LookupError
new_state1 = transition1.to_state
new_state2 = state2
output = []
for o in transition1.word_out:
transition2 = None
for transition in self.iter_transitions(new_state2):
if transition.word_in == [o]:
transition2 = transition
break
if transition2 is None:
raise LookupError
new_state2 = transition2.to_state
output += transition2.word_out
return ((new_state1, new_state2), output)
if any(s.final_word_out for s in self.iter_final_states()) or \
any(s.final_word_out for s in other.iter_final_states()):
raise NotImplementedError("Explorative composition is not "
"implemented for transducers with "
"non-empty final output words. Try "
"the direct algorithm instead.")
if not self.is_deterministic() or not other.is_deterministic():
raise NotImplementedError("Explorative composition is "
"currently not implemented for "
"non-deterministic transducers.")
F = other.empty_copy(new_class=self.__class__)
new_initial_states = [(other.initial_states()[0], self.initial_states()[0])]
F.add_from_transition_function(composition_transition,
initial_states=new_initial_states)
for state in F.states():
if all(map(lambda s: s.is_final, state.label())):
state.is_final = True
state.color = tuple(map(lambda s: s.color, state.label()))
return F
def input_projection(self):
"""
Returns an automaton where the output of each transition of
self is deleted.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.input_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 0|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 1|-]
"""
return self.projection(what='input')
def output_projection(self):
"""
Returns a automaton where the input of each transition of self
is deleted and the new input is the original output.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.output_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
Final output words are also considered correctly::
sage: H = Transducer([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0), ('A', ('final', 0), 0, 0)],
....: final_states=['A', 'B'])
sage: H.state('B').final_word_out = 2
sage: J = H.output_projection()
sage: J.states()
['A', 'B', ('final', 0), ('final', 1)]
sage: J.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'A' to ('final', 0): 0|-,
Transition from 'B' to 'B': 0|-,
Transition from 'B' to ('final', 1): 2|-]
sage: J.final_states()
['A', ('final', 1)]
"""
return self.projection(what='output')
def projection(self, what='input'):
"""
Returns an Automaton which transition labels are the projection
of the transition labels of the input.
INPUT:
- ``what`` -- (default: ``input``) either ``input`` or ``output``.
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.projection(what='output')
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
"""
new = Automaton()
# TODO: use empty_copy() in order to
# preserve on_duplicate_transition and future extensions.
# for this, empty_copy would need a new optional argument
# use_class=None ?
if what == 'input':
new.input_alphabet = copy(self.input_alphabet)
elif what == 'output':
new.input_alphabet = copy(self.output_alphabet)
else:
raise NotImplementedError
state_mapping = {}
for state in self.iter_states():
state_mapping[state] = new.add_state(deepcopy(state))
for transition in self.iter_transitions():
if what == 'input':
new_word_in = transition.word_in
elif what == 'output':
new_word_in = transition.word_out
else:
raise NotImplementedError
new.add_transition((state_mapping[transition.from_state],
state_mapping[transition.to_state],
new_word_in, None))
if what == 'output':
states = [s for s in self.iter_final_states() if s.final_word_out]
if not states:
return new
number = 0
while new.has_state(('final', number)):
number += 1
final = new.add_state(('final', number))
final.is_final = True
for state in states:
output = state.final_word_out
new.state(state_mapping[state]).final_word_out = []
new.state(state_mapping[state]).is_final = False
new.add_transition((state_mapping[state], final, output, None))
return new
def transposition(self):
"""
Returns a new finite state machine, where all transitions of the
input finite state machine are reversed.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'A', 1), ('A', 'B', 0)],
....: initial_states=['A'], final_states=['B'])
sage: aut.transposition().transitions('B')
[Transition from 'B' to 'A': 0|-]
::
sage: aut = Automaton([('1', '1', 1), ('1', '2', 0), ('2', '2', 0)],
....: initial_states=['1'], final_states=['1', '2'])
sage: aut.transposition().initial_states()
['1', '2']
TESTS:
If a final state of ``self`` has a non-empty final output word,
transposition is not implemented::
sage: T = Transducer([('1', '1', 1, 0), ('1', '2', 0, 1),
....: ('2', '2', 0, 2)],
....: initial_states=['1'],
....: final_states=['1', '2'])
sage: T.state('1').final_word_out = [2, 5]
sage: T.transposition()
Traceback (most recent call last):
...
NotImplementedError: Transposition for transducers with
final output words is not implemented.
"""
transposition = self.empty_copy()
for state in self.iter_states():
transposition.add_state(deepcopy(state))
for transition in self.iter_transitions():
transposition.add_transition(
transition.to_state.label(), transition.from_state.label(),
transition.word_in, transition.word_out)
for initial in self.iter_initial_states():
state = transposition.state(initial.label())
if not initial.is_final:
state.is_final = True
state.is_initial = False
for final in self.iter_final_states():
state = transposition.state(final.label())
if final.final_word_out:
raise NotImplementedError("Transposition for transducers "
"with final output words is not "
"implemented.")
if not final.is_initial:
state.is_final = False
state.is_initial = True
return transposition
def split_transitions(self):
"""
Returns a new transducer, where all transitions in self with input
labels consisting of more than one letter
are replaced by a path of the corresponding length.
INPUT:
Nothing.
OUTPUT:
A new transducer.
EXAMPLES::
sage: A = Transducer([('A', 'B', [1, 2, 3], 0)],
....: initial_states=['A'], final_states=['B'])
sage: A.split_transitions().states()
[('A', ()), ('B', ()),
('A', (1,)), ('A', (1, 2))]
"""
new = self.empty_copy()
for state in self.states():
new.add_state(FSMState((state, ()), is_initial=state.is_initial,
is_final=state.is_final))
for transition in self.transitions():
for j in range(len(transition.word_in)-1):
new.add_transition((
(transition.from_state, tuple(transition.word_in[:j])),
(transition.from_state, tuple(transition.word_in[:j+1])),
transition.word_in[j],
[]))
new.add_transition((
(transition.from_state, tuple(transition.word_in[:-1])),
(transition.to_state, ()),
transition.word_in[-1:],
transition.word_out))
return new
def final_components(self):
"""
Returns the final components of a finite state machine as finite
state machines.
INPUT:
Nothing.
OUTPUT:
A list of finite state machines, each representing a final
component of ``self``.
A final component of a transducer ``T`` is a strongly connected
component ``C`` such that there are no transitions of ``T``
leaving ``C``.
The final components are the only parts of a transducer which
influence the main terms of the asympotic behaviour of the sum
of output labels of a transducer, see [HKP2014]_ and [HKW2014]_.
EXAMPLES::
sage: T = Transducer([['A', 'B', 0, 0], ['B', 'C', 0, 1],
....: ['C', 'B', 0, 1], ['A', 'D', 1, 0],
....: ['D', 'D', 0, 0], ['D', 'B', 1, 0],
....: ['A', 'E', 2, 0], ['E', 'E', 0, 0]])
sage: FC = T.final_components()
sage: sorted(FC[0].transitions())
[Transition from 'B' to 'C': 0|1,
Transition from 'C' to 'B': 0|1]
sage: FC[1].transitions()
[Transition from 'E' to 'E': 0|0]
Another example (cycle of length 2)::
sage: T = Automaton([[0, 1, 0], [1, 0, 0]])
sage: len(T.final_components()) == 1
True
sage: T.final_components()[0].transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
REFERENCES:
.. [HKP2014] Clemens Heuberger, Sara Kropf, and Helmut
Prodinger, *Asymptotic analysis of the sum of the output of
transducer*, in preparation.
"""
DG = self.digraph()
condensation = DG.strongly_connected_components_digraph()
return [self.induced_sub_finite_state_machine(map(self.state, component))
for component in condensation.vertices()
if condensation.out_degree(component) == 0]
# *************************************************************************
# simplifications
# *************************************************************************
def prepone_output(self):
"""
For all paths, shift the output of the path from one
transition to the earliest possible preceeding transition of
the path.
INPUT:
Nothing.
OUTPUT:
Nothing.
Apply the following to each state `s` (except initial states) of the
finite state machine as often as possible:
If the letter `a` is a prefix of the output label of all transitions from
`s` (including the final output of `s`), then remove it from all these
labels and append it to all output labels of all transitions leading
to `s`.
We assume that the states have no output labels, but final outputs are
allowed.
EXAMPLES::
sage: A = Transducer([('A', 'B', 1, 1),
....: ('B', 'B', 0, 0),
....: ('B', 'C', 1, 0)],
....: initial_states=['A'],
....: final_states=['C'])
sage: A.prepone_output()
sage: A.transitions()
[Transition from 'A' to 'B': 1|1,0,
Transition from 'B' to 'B': 0|0,
Transition from 'B' to 'C': 1|-]
::
sage: B = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['C'])
sage: B.prepone_output()
sage: B.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
If initial states are not labeled as such, unexpected results may be
obtained::
sage: C = Transducer([(0,1,0,0)])
sage: C.prepone_output()
verbose 0 (...: finite_state_machine.py, prepone_output)
All transitions leaving state 0 have an output label with
prefix 0. However, there is no inbound transition and it
is not an initial state. This routine (possibly called by
simplification) therefore erased this prefix from all
outbound transitions.
sage: C.transitions()
[Transition from 0 to 1: 0|-]
Also the final output of final states can be changed::
sage: T = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: T.state('B').final_word_out = [1]
sage: T.prepone_output()
sage: T.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
sage: T.state('B').final_word_out
[]
::
sage: S = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: S.state('B').final_word_out = [0]
sage: S.prepone_output()
sage: S.transitions()
[Transition from 'A' to 'B': 0|1,
Transition from 'B' to 'C': 1|1,1,
Transition from 'B' to 'C': 0|1]
sage: S.state('B').final_word_out
[0]
Output labels do not have to be hashable::
sage: C = Transducer([(0, 1, 0, []),
....: (1, 0, 0, [vector([0, 0]), 0]),
....: (1, 1, 1, [vector([0, 0]), 1]),
....: (0, 0, 1, 0)],
....: determine_alphabets=False,
....: initial_states=[0])
sage: C.prepone_output()
sage: sorted(C.transitions())
[Transition from 0 to 1: 0|(0, 0),
Transition from 0 to 0: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 1: 1|1,(0, 0)]
"""
def find_common_output(state):
if any(itertools.ifilter(
lambda transition: not transition.word_out,
self.transitions(state))) \
or state.is_final and not state.final_word_out:
return tuple()
first_letters = map(lambda transition: transition.word_out[0],
self.transitions(state))
if state.is_final:
first_letters = first_letters + [state.final_word_out[0]]
if not first_letters:
return tuple()
first_item = first_letters.pop()
if all([item == first_item for item in first_letters]):
return (first_item,)
return tuple()
changed = 1
iteration = 0
while changed > 0:
changed = 0
iteration += 1
for state in self.iter_states():
if state.is_initial:
continue
if state.word_out:
raise NotImplementedError(
"prepone_output assumes that all states have "
"empty output word, but state %s has output "
"word %s" % (state, state.word_out))
common_output = find_common_output(state)
if common_output:
changed += 1
if state.is_final:
assert state.final_word_out[0] == common_output[0]
state.final_word_out = state.final_word_out[1:]
for transition in self.transitions(state):
assert transition.word_out[0] == common_output[0]
transition.word_out = transition.word_out[1:]
found_inbound_transition = False
for transition in self.iter_transitions():
if transition.to_state == state:
transition.word_out = transition.word_out \
+ [common_output[0]]
found_inbound_transition = True
if not found_inbound_transition:
verbose(
"All transitions leaving state %s have an "
"output label with prefix %s. However, "
"there is no inbound transition and it is "
"not an initial state. This routine "
"(possibly called by simplification) "
"therefore erased this prefix from all "
"outbound transitions." %
(state, common_output[0]),
level=0)
def equivalence_classes(self):
r"""
Returns a list of equivalence classes of states.
INPUT:
Nothing.
OUTPUT:
A list of equivalence classes of states.
Two states `a` and `b` are equivalent if and only if there is
a bijection `\varphi` between paths starting at `a` and paths
starting at `b` with the following properties: Let `p_a` be a
path from `a` to `a'` and `p_b` a path from `b` to `b'` such
that `\varphi(p_a)=p_b`, then
- `p_a.\mathit{word}_\mathit{in}=p_b.\mathit{word}_\mathit{in}`,
- `p_a.\mathit{word}_\mathit{out}=p_b.\mathit{word}_\mathit{out}`,
- `a'` and `b'` have the same output label, and
- `a'` and `b'` are both final or both non-final and have the
same final output word.
The function :meth:`.equivalence_classes` returns a list of
the equivalence classes to this equivalence relation.
This is one step of Moore's minimization algorithm.
.. SEEALSO::
:meth:`.minimization`
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
"""
# Two states `a` and `b` are j-equivalent if and only if there
# is a bijection `\varphi` between paths of length <= j
# starting at `a` and paths starting at `b` with the following
# properties: Let `p_a` be a path from `a` to `a'` and `p_b` a
# path from `b` to `b'` such that `\varphi(p_a)=p_b`, then
#
# - `p_a.\mathit{word}_{in}=p_b.\mathit{word}_{in}`,
# - `p_a.\mathit{word}_{out}=p_b.\mathit{word}_{out}`,
# - `a'` and `b'` have the same output label, and
# - `a'` and `b'` are both final or both non-final.
# If for some j the relations j-1 equivalent and j-equivalent
# coincide, then they are equal to the equivalence relation
# described in the docstring.
# classes_current holds the equivalence classes of
# j-equivalence, classes_previous holds the equivalence
# classes of j-1 equivalence.
# initialize with 0-equivalence
classes_previous = []
key_0 = lambda state: (state.is_final, state.color, state.word_out,
state.final_word_out)
states_grouped = full_group_by(self.states(), key=key_0)
classes_current = [equivalence_class for
(key,equivalence_class) in states_grouped]
while len(classes_current) != len(classes_previous):
class_of = {}
classes_previous = classes_current
classes_current = []
for k in range(len(classes_previous)):
for state in classes_previous[k]:
class_of[state] = k
key_current = lambda state: sorted(
[(transition.word_in,
transition.word_out,
class_of[transition.to_state])
for transition in state.transitions])
for class_previous in classes_previous:
states_grouped = full_group_by(class_previous, key=key_current)
classes_current.extend([equivalence_class for
(key,equivalence_class) in states_grouped])
return classes_current
def quotient(self, classes):
r"""
Constructs the quotient with respect to the equivalence
classes.
INPUT:
- ``classes`` is a list of equivalence classes of states.
OUTPUT:
A finite state machine.
The labels of the new states are tuples of states of the
``self``, corresponding to ``classes``.
Assume that `c` is a class, and `a` and `b` are states in
`c`. Then there is a bijection `\varphi` between the
transitions from `a` and the transitions from `b` with the
following properties: if `\varphi(t_a)=t_b`, then
- `t_a.\mathit{word}_\mathit{in}=t_b.\mathit{word}_\mathit{in}`,
- `t_a.\mathit{word}_\mathit{out}=t_b.\mathit{word}_\mathit{out}`, and
- `t_a` and `t_b` lead to some equivalent states `a'` and `b'`.
Non-initial states may be merged with initial states, the
resulting state is an initial state.
All states in a class must have the same ``is_final``,
``final_word_out`` and ``word_out`` values.
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
sage: fsmq.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsmq.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
sage: fsmq1 = fsm.quotient(fsm.equivalence_classes())
sage: fsmq1 == fsmq
True
sage: fsm.quotient([[fsm.state("A"), fsm.state("B"), fsm.state("C"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Transitions of state 'A' and 'B' are incompatible.
TESTS::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)],
....: final_states=["A", "C"])
sage: fsm.state("A").final_word_out = 1
sage: fsm.state("C").final_word_out = 2
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Class ['A', 'C'] mixes
final states with different final output words.
"""
new = self.empty_copy()
state_mapping = {}
# Create new states and build state_mapping
for c in classes:
new_label = tuple(c)
new_state = c[0].relabeled(new_label)
new.add_state(new_state)
for state in c:
state_mapping[state] = new_state
# Copy data from old transducer
for c in classes:
new_state = state_mapping[c[0]]
sorted_transitions = sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in c[0].transitions])
for transition in self.iter_transitions(c[0]):
new.add_transition(
from_state = new_state,
to_state = state_mapping[transition.to_state],
word_in = transition.word_in,
word_out = transition.word_out)
# check that all class members have the same information (modulo classes)
for state in c:
new_state.is_initial = new_state.is_initial or state.is_initial
assert new_state.is_final == state.is_final, \
"Class %s mixes final and non-final states" % (c,)
assert new_state.word_out == state.word_out, \
"Class %s mixes different word_out" % (c,)
assert new_state.color == state.color, \
"Class %s mixes different colors" % (c,)
assert sorted_transitions == sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in state.transitions]), \
"Transitions of state %s and %s are incompatible." % (c[0], state)
assert new_state.final_word_out == state.final_word_out, \
"Class %s mixes final states with different " \
"final output words." % (c,)
return new
def merged_transitions(self):
"""
Merges transitions which have the same ``from_state``,
``to_state`` and ``word_out`` while adding their ``word_in``.
INPUT:
Nothing.
OUTPUT:
A finite state machine with merged transitions. If no mergers occur,
return ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 1], [1, -2, 1/4, 1], [1, -2, 1/2, 1],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 1], [-2, 3, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.merged_transitions()
sage: T1 is T
False
sage: sorted(T1.transitions())
[Transition from -2 to -2: 1/4|1,
Transition from -2 to 2: 1/4|1,
Transition from -2 to 3: 1/2|1,
Transition from 1 to 2: 1/4|1,
Transition from 1 to -2: 3/4|1,
Transition from 2 to -2: 1/4|1,
Transition from 2 to 2: 1/4|1,
Transition from 2 to 3: 1/2|1]
Applying the function again does not change the result::
sage: T2 = T1.merged_transitions()
sage: T2 is T1
True
"""
def key(transition):
return (transition.to_state, transition.word_out)
new = self.empty_copy()
changed = False
state_dict = {}
memo = {}
for state in self.states():
new_state = deepcopy(state,memo)
state_dict[state] = new_state
new.add_state(new_state)
for state in self.states():
grouped_transitions = itertools.groupby(sorted(state.transitions, key=key), key=key)
for (to_state, word_out), transitions in grouped_transitions:
transition_list = list(transitions)
changed = changed or len(transition_list) > 1
word_in = 0
for transition in transition_list:
if hasattr(transition.word_in, '__iter__') and len(transition.word_in) == 1:
word_in += transition.word_in[0]
else:
raise TypeError('%s does not have a list of length 1 as word_in' % transition)
new.add_transition((state, to_state, word_in, word_out))
if changed:
return new
else:
return self
def markov_chain_simplification(self):
"""
Consider ``self`` as Markov chain with probabilities as input labels
and simplify it.
INPUT:
Nothing.
OUTPUT:
Simplified version of ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 0], [1, -2, 1/4, 0], [1, -2, 1/2, 0],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 2], [-2, 3, 1/2, 2]],
....: initial_states=[1],
....: final_states=[3],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.markov_chain_simplification()
sage: sorted(T1.transitions())
[Transition from ((1,),) to ((2, -2),): 1|0,
Transition from ((2, -2),) to ((2, -2),): 1/2|1,
Transition from ((2, -2),) to ((3,),): 1/2|2]
"""
current = self.merged_transitions()
number_states = len(current.states())
while True:
current = current.simplification()
new_number_states = len(current.states())
new = current.merged_transitions()
if new is current and number_states == new_number_states:
return new
current = new
number_states = new_number_states
def with_final_word_out(self, letters, allow_non_final=True):
"""
Constructs a new finite state machine with final output words
for all states by implicitly reading trailing letters until a
final state is reached.
INPUT:
- ``letters`` -- either an element of the input alphabet or a
list of such elements. This is repeated cyclically when
needed.
- ``allow_non_final`` -- a boolean (default: ``True``) which
indicates whether we allow that some states may be non-final
in the resulting finite state machine. I.e., if ``False`` then
each state has to have a path to a final state with input
label matching ``letters``.
OUTPUT:
A finite state machine.
The inplace version of this function is
:meth:`.construct_final_word_out`.
Suppose for the moment a single element ``letter`` as input
for ``letters``. This is equivalent to ``letters = [letter]``.
We will discuss the general case below.
Let ``word_in`` be a word over the input alphabet and assume
that the original finite state machine transforms ``word_in`` to
``word_out`` reaching a possibly non-final state ``s``. Let
further `k` be the minimum number of letters ``letter`` such
that there is a path from ``s`` to some final state ``f`` whose
input label consists of `k` copies of ``letter`` and whose
output label is ``path_word_out``. Then the state ``s`` of the
resulting finite state machine is a final state with final
output ``path_word_out + f.final_word_out``. Therefore, the new
finite state machine transforms ``word_in`` to ``word_out +
path_word_out + f.final_word_out``.
This is e.g. useful for finite state machines operating on digit
expansions: there, it is sometimes required to read a sufficient
number of trailing zeros (at the most significant positions) in
order to reach a final state and to flush all carries. In this
case, this method constructs an essentially equivalent finite
state machine in the sense that it not longer requires adding
sufficiently many trailing zeros. However, it is the
responsibility of the user to make sure that if adding trailing
zeros to the input anyway, the output is equivalent.
If ``letters`` consists of more than one letter, then it is
assumed that (not necessarily complete) cycles of ``letters``
are appended as trailing input.
.. SEEALSO::
:ref:`example on Gray code <finite_state_machine_gray_code_example>`
EXAMPLES:
#. A simple transducer transforming `00` blocks to `01`
blocks::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: T.process([0, 0, 0])
(False, 1, [0, 1, 0])
sage: T.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
0 []
1 [1]
sage: F.process([0, 0, 0])
(True, 1, [0, 1, 0, 1])
sage: F.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
#. A more realistic example: Addition of `1` in binary. We
construct a transition function transforming the input
to its binary expansion::
sage: def binary_transition(carry, input):
....: value = carry + input
....: if value.mod(2) == 0:
....: return (value/2, 0)
....: else:
....: return ((value-1)/2, 1)
Now, we only have to start with a carry of `1` to
get the required transducer::
sage: T = Transducer(binary_transition,
....: input_alphabet=[0, 1],
....: initial_states=[1],
....: final_states=[0])
We test this for the binary expansion of `7`::
sage: T.process([1, 1, 1])
(False, 1, [0, 0, 0])
The final carry `1` has not be flushed yet, we have to add a
trailing zero::
sage: T.process([1, 1, 1, 0])
(True, 0, [0, 0, 0, 1])
We check that with this trailing zero, the transducer
performs as advertised::
sage: all(ZZ(T(k.bits()+[0]), base=2) == k + 1
....: for k in srange(16))
True
However, most of the time, we produce superfluous trailing
zeros::
sage: T(11.bits()+[0])
[0, 0, 1, 1, 0]
We now use this method::
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
1 [1]
0 []
The same tests as above, but we do not have to pad with
trailing zeros anymore::
sage: F.process([1, 1, 1])
(True, 1, [0, 0, 0, 1])
sage: all(ZZ(F(k.bits()), base=2) == k + 1
....: for k in srange(16))
True
No more trailing zero in the output::
sage: F(11.bits())
[0, 0, 1, 1]
sage: all(F(k.bits())[-1] == 1
....: for k in srange(16))
True
#. Here is an example, where we allow trailing repeated `10`::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 1, 'b'),
....: (2, 0, 0, 'c')],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out([1, 0])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bc
Trying this with trailing repeated `01` does not produce
a ``final_word_out`` for state ``1``, but for state ``2``::
sage: F = T.with_final_word_out([0, 1])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
2 c
#. Here another example with a more-letter trailing input::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 0, 'b'), (1, 2, 1, 'b'),
....: (2, 3, 0, 'c'), (2, 0, 1, 'e'),
....: (3, 1, 0, 'd'), (3, 1, 1, 'd')],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0, 0, 1, 1])
sage: for f in T.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bcdbcdbe
2 cdbe
3 dbe
TESTS:
#. Reading copies of ``letter`` may result in a cycle. In
this simple example, we have no final state at all::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: The finite state machine contains
a cycle starting at state 0 with input label 0
and no final state.
#. A unique transition with input word ``letter`` is
required::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 0
with input label 0.
It is not a problem if there is no transition starting
at state ``1`` with input word ``letter``::
sage: T = Transducer([(0, 1, 0, 0)])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
Anyhow, you can override this by::
sage: T = Transducer([(0, 1, 0, 0)])
sage: T.with_final_word_out(0, allow_non_final=False)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 1
with input label 0.
#. All transitions must have input labels of length `1`::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
sage: T = Transducer([(0, 0, [0, 1], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
#. An empty list as input is not allowed::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out([])
Traceback (most recent call last):
...
ValueError: letters is not allowed to be an empty list.
"""
new = deepcopy(self)
new.construct_final_word_out(letters, allow_non_final)
return new
def construct_final_word_out(self, letters, allow_non_final=True):
"""
This is an inplace version of :meth:`.with_final_word_out`. See
:meth:`.with_final_word_out` for documentation and examples.
TESTS::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out(0)
sage: T.construct_final_word_out(0)
sage: T == F # indirect doctest
True
sage: T = Transducer([(0, 1, 0, None)],
....: final_states=[1])
sage: F = T.with_final_word_out(0)
sage: F.state(0).final_word_out
[]
"""
from itertools import cycle, izip_longest
if not isinstance(letters, list):
letters = [letters]
elif not letters:
raise ValueError(
"letters is not allowed to be an empty list.")
in_progress = set()
cache = {}
def find_final_word_out(state):
# The return value is the output which is produced when
# reading the given letters until a final state is reached.
# If no final state can be reached, then None is returned.
# For final states, the final word out is returned.
# For final states with empty final output, that is [].
position, letter = next(trailing_letters)
if state.is_final:
return state.final_word_out
if (state, position) in cache:
return cache[state, position]
if (state, position) in in_progress:
raise ValueError(
"The finite state machine contains a cycle "
"starting at state %s with input label %s "
"and no final state." % (state, letter))
if any(len(t.word_in) != 1 for t in state.transitions):
raise NotImplementedError(
"All transitions must have input labels of length "
"1. Consider calling split_transitions().")
transitions = [t for t in state.transitions
if t.word_in == [letter]]
if allow_non_final and not transitions:
final_word_out = None
elif len(transitions) != 1:
raise ValueError(
"No unique transition leaving state %s with input "
"label %s." % (state, letter))
else:
in_progress.add((state, position))
next_word = find_final_word_out(transitions[0].to_state)
if next_word is not None:
final_word_out = transitions[0].word_out + next_word
else:
final_word_out = None
in_progress.remove((state, position))
cache[state, position] = final_word_out
return final_word_out
for state in self.iter_states():
assert(not in_progress)
# trailing_letters is an infinite iterator additionally
# marking positions
trailing_letters = cycle(enumerate(letters))
find_final_word_out(state)
# actual modifications can only be carried out after all final words
# have been computed as it may not be permissible to stop at a
# formerly non-final state unless a cycle has been completed.
for (state, position), final_word_out in cache.iteritems():
if position == 0 and final_word_out is not None:
state.is_final = True
state.final_word_out = final_word_out
# *************************************************************************
# other
# *************************************************************************
def graph(self, edge_labels='words_in_out'):
"""
Returns the graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
- ``edge_label``: (default: ``'words_in_out'``) can be
- ``'words_in_out'`` (labels will be strings ``'i|o'``)
- a function with which takes as input a transition
and outputs (returns) the label
OUTPUT:
A graph.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: T = Transducer()
sage: T.graph()
Digraph on 0 vertices
sage: T.add_state(A)
'A'
sage: T.graph()
Digraph on 1 vertex
sage: T.add_transition(('A', 'A', 0, 1))
Transition from 'A' to 'A': 0|1
sage: T.graph()
Looped digraph on 1 vertex
"""
if edge_labels == 'words_in_out':
label_fct = lambda t:t._in_out_label_()
elif hasattr(edge_labels, '__call__'):
label_fct = edge_labels
else:
raise TypeError('Wrong argument for edge_labels.')
graph_data = []
isolated_vertices = []
for state in self.iter_states():
transitions = state.transitions
if len(transitions) == 0:
isolated_vertices.append(state.label())
for t in transitions:
graph_data.append((t.from_state.label(), t.to_state.label(),
label_fct(t)))
G = DiGraph(graph_data)
G.add_vertices(isolated_vertices)
return G
digraph = graph
def plot(self):
"""
Plots a graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
Nothing.
OUTPUT:
A plot of the graph of the finite state machine.
TESTS::
sage: FiniteStateMachine([('A', 'A', 0)]).plot()
"""
return self.graph(edge_labels='words_in_out').plot()
def predecessors(self, state, valid_input=None):
"""
Lists all predecessors of a state.
INPUT:
- ``state`` -- the state from which the predecessors should be
listed.
- ``valid_input`` -- If ``valid_input`` is a list, then we
only consider transitions whose input labels are contained
in ``valid_input``. ``state`` has to be a :class:`FSMState`
(not a label of a state). If input labels of length larger
than `1` are used, then ``valid_input`` has to be a list of
lists.
OUTPUT:
A list of states.
EXAMPLES::
sage: A = Transducer([('I', 'A', 'a', 'b'), ('I', 'B', 'b', 'c'),
....: ('I', 'C', 'c', 'a'), ('A', 'F', 'b', 'a'),
....: ('B', 'F', ['c', 'b'], 'b'), ('C', 'F', 'a', 'c')],
....: initial_states=['I'], final_states=['F'])
sage: A.predecessors(A.state('A'))
['A', 'I']
sage: A.predecessors(A.state('F'), valid_input=['b', 'a'])
['F', 'C', 'A', 'I']
sage: A.predecessors(A.state('F'), valid_input=[['c', 'b'], 'a'])
['F', 'C', 'B']
"""
if valid_input is not None:
valid_list = list()
for input in valid_input:
input_list = input
if not isinstance(input_list, list):
input_list = [input]
valid_list.append(input_list)
valid_input = valid_list
unhandeled_direct_predecessors = {s:[] for s in self.states() }
for t in self.transitions():
if valid_input is None or t.word_in in valid_input:
unhandeled_direct_predecessors[t.to_state].append(t.from_state)
done = []
open = [state]
while len(open) > 0:
s = open.pop()
candidates = unhandeled_direct_predecessors[s]
if candidates is not None:
open.extend(candidates)
unhandeled_direct_predecessors[s] = None
done.append(s)
return(done)
def asymptotic_moments(self, variable=SR.symbol('n')):
r"""
Returns the main terms of expectation and variance of the sum
of output labels and its covariance with the sum of input
labels.
INPUT:
- ``variable`` -- a symbol denoting the length of the input,
by default `n`.
OUTPUT:
A dictionary consisting of
- ``expectation`` -- `e n + \operatorname{Order}(1)`,
- ``variance`` -- `v n + \operatorname{Order}(1)`,
- ``covariance`` -- `c n + \operatorname{Order}(1)`
for suitable constants `e`, `v` and `c`.
Assume that all input and output labels are numbers and that
``self`` is complete and has only one final component. Assume
further that this final component is aperiodic. Furthermore,
assume that there is exactly one initial state and that all
states are final.
Denote by `X_n` the sum of output labels written by the
finite state machine when reading a random input word of
length `n` over the input alphabet (assuming
equidistribution).
Then the expectation of `X_n` is `en+O(1)`, the variance
of `X_n` is `vn+O(1)` and the covariance of `X_n` and
the sum of input labels is `cn+O(1)`, cf. [HKW2014]_,
Theorem 2.
In the case of non-integer input or output labels, performance
degrades significantly. For rational input and output labels,
consider rescaling to integers. This limitation comes from the
fact that determinants over polynomial rings can be computed
much more efficiently than over the symbolic ring. In fact, we
compute (parts) of a trivariate generating function where the
input and output labels are exponents of some indeterminates,
see [HKW2014]_, Theorem 2 for details. If those exponents are
integers, we can use a polynomial ring.
EXAMPLES:
#. A trivial example: write the negative of the input::
sage: T = Transducer([(0, 0, 0, 0), (0, 0, 1, -1)],
....: initial_states=[0],
....: final_states=[0])
sage: T([0, 1, 1])
[0, -1, -1]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
-1/4*n + Order(1)
#. For the case of the Hamming weight of the non-adjacent-form
(NAF) of integers, cf. the :wikipedia:`Non-adjacent_form`
and the :ref:`example on recognizing NAFs
<finite_state_machine_recognizing_NAFs_example>`, the
following agrees with the results in [HP2007]_.
We first use the transducer to convert the standard binary
expansion to the NAF given in [HP2007]_. We use the parameter
``with_final_word_out`` such that we do not have to add
sufficiently many trailing zeros::
sage: NAF = Transducer([(0, 0, 0, 0),
....: (0, '.1', 1, None),
....: ('.1', 0, 0, [1, 0]),
....: ('.1', 1, 1, [-1, 0]),
....: (1, 1, 1, 0),
....: (1, '.1', 0, None)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0])
As an example, we compute the NAF of `27` by this
transducer.
::
sage: binary_27 = 27.bits()
sage: binary_27
[1, 1, 0, 1, 1]
sage: NAF_27 = NAF(binary_27)
sage: NAF_27
[-1, 0, -1, 0, 0, 1, 0]
sage: ZZ(NAF_27, base=2)
27
Next, we are only interested in the Hamming weight::
sage: def weight(state, input):
....: if input is None:
....: result = 0
....: else:
....: result = ZZ(input != 0)
....: return (0, result)
sage: weight_transducer = Transducer(weight,
....: input_alphabet=[-1, 0, 1],
....: initial_states=[0],
....: final_states=[0])
At the moment, we can not use composition with ``NAF``,
because it has non-empty final output words::
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Thus, we change ``NAF``, then compose and again construct
the final output words::
sage: for s in NAF.final_states():
....: s.final_word_out = []
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative').relabeled()
sage: NAFweight.construct_final_word_out(0)
sage: sorted(NAFweight.transitions())
[Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|-,
Transition from 1 to 0: 0|1,0,
Transition from 1 to 2: 1|1,0,
Transition from 2 to 1: 0|-,
Transition from 2 to 2: 1|0]
sage: NAFweight(binary_27 + [0, 0])
[1, 0, 1, 0, 0, 1, 0]
Now, we actually compute the asymptotic moments::
sage: moments = NAFweight.asymptotic_moments()
sage: moments['expectation']
1/3*n + Order(1)
sage: moments['variance']
2/27*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is Example 3.1 in [HKW2014]_, where a transducer with
variable output labels is given. There, the aim was to
choose the output labels of this very simple transducer such
that the input and output sum are asymptotically
independent, i.e., the constant `c` vanishes.
::
sage: var('a_1, a_2, a_3, a_4')
(a_1, a_2, a_3, a_4)
sage: T = Transducer([[0, 0, 0, a_1], [0, 1, 1, a_3],
....: [1, 0, 0, a_4], [1, 1, 1, a_2]],
....: initial_states=[0], final_states=[0, 1])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
1/4*(a_1 + a_2 + a_3 + a_4)*n + Order(1)
sage: moments['covariance']
-1/4*(a_1 - a_2)*n + Order(1)
Therefore, the asymptotic covariance vanishes if and only if
`a_2=a_1`.
#. This is Example 6.2 in [HKW2014]_, dealing with the
transducer converting the binary expansion of an integer
into Gray code (cf. the :wikipedia:`Gray_code` and the
:ref:`example on Gray code
<finite_state_machine_gray_code_example>`)::
sage: moments = transducers.GrayCode().asymptotic_moments()
sage: moments['expectation']
1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the first part of Example 6.3 in [HKW2014]_,
counting the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: sorted(block10.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|1,
Transition from (1,) to (1,): 1|0]
sage: moments = block10.asymptotic_moments()
sage: moments['expectation']
1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the second part of Example 6.3 in [HKW2014]_,
counting the number of 11 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: sorted(block11.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|0,
Transition from (1,) to (1,): 1|1]
sage: var('N')
N
sage: moments = block11.asymptotic_moments(N)
sage: moments['expectation']
1/4*N + Order(1)
sage: moments['variance']
5/16*N + Order(1)
sage: correlation = (moments['covariance'].coefficient(N) /
....: (1/2 * sqrt(moments['variance'].coefficient(N))))
sage: correlation
2/5*sqrt(5)
#. This is Example 6.4 in [HKW2014]_, counting the number of
01 blocks minus the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block01 = transducers.CountSubblockOccurrences(
....: [0, 1],
....: input_alphabet=[0, 1])
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_01x10 = block01.cartesian_product(block10)
sage: block_difference = transducers.sub([0, 1])(product_01x10)
sage: T = block_difference.simplification().relabeled()
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: T.transitions()
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|0,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|1,
Transition from 2 to 1: 0|0,
Transition from 2 to 0: 1|0]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
Order(1)
sage: moments['variance']
Order(1)
sage: moments['covariance']
Order(1)
#. The finite state machine must have a unique final component::
sage: T = Transducer([(0, -1, -1, -1), (0, 1, 1, 1),
....: (-1, -1, -1, -1), (-1, -1, 1, -1),
....: (1, 1, -1, 1), (1, 1, 1, 1)],
....: initial_states=[0],
....: final_states=[0, 1, -1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines with one final
component.
In this particular example, the first letter of the input
decides whether we reach the loop at `-1` or the loop at
`1`. In the first case, we have `X_n = -n`, while we have
`X_n = n` in the second case. Therefore, the expectation
`E(X_n)` of `X_n` is `E(X_n) = 0`. We get `(X_n-E(X_n))^2 =
n^2` in all cases, which results in a variance of `n^2`.
So this example shows that the variance may be non-linear if
there is more than one final component.
TESTS:
#. An input alphabet must be given::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: determine_alphabets=False)
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
#. The finite state machine must have a unique initial state::
sage: T = Transducer([(0, 0, 0, 0)])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: A unique initial state is required.
#. The finite state machine must be complete::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: This finite state machine is
not complete.
#. The final component of the finite state machine must be
aperiodic::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0], final_states=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines whose unique final
component is aperiodic.
#. Non-integer input or output labels lead to a warning::
sage: T = Transducer([[0, 0, 0, 0], [0, 0, 1, -1/2]],
....: initial_states=[0], final_states=[0])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
This warning can be silenced by :func:`~sage.misc.misc.set_verbose`::
sage: set_verbose(-1, "finite_state_machine.py")
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
sage: set_verbose(0, "finite_state_machine.py")
#. Check whether ``word_out`` of ``FSMState`` are correctly
dealt with::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2,
....: is_initial=True,
....: is_final=True)
sage: T = Transducer([(s, s, 0, 1)],
....: initial_states=[s], final_states=[s])
sage: T([0, 0])
[2, 1, 2, 1, 2]
sage: T.asymptotic_moments()['expectation']
3*n + Order(1)
The same test for non-integer output::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2/3)
sage: T = Transducer([(s, s, 0, 1/2)],
....: initial_states=[s], final_states=[s])
sage: T.asymptotic_moments()['expectation']
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
7/6*n + Order(1)
#. All states of ``self`` have to be final::
sage: T = Transducer([(0, 1, 1, 4)], initial_states=[0])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: Not all states are final.
ALGORITHM:
See [HKW2014]_, Theorem 2.
REFERENCES:
.. [HKW2014] Clemens Heuberger, Sara Kropf and Stephan Wagner,
*Combinatorial Characterization of Independent Transducers via
Functional Digraphs*, :arxiv:`1404.3680`.
.. [HP2007] Clemens Heuberger and Helmut Prodinger, *The Hamming
Weight of the Non-Adjacent-Form under Various Input Statistics*,
Periodica Mathematica Hungarica Vol. 55 (1), 2007, pp. 81–96,
:doi:`10.1007/s10998-007-3081-z`.
"""
from sage.calculus.functional import derivative
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.rational_field import QQ
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if len(self.initial_states()) != 1:
raise ValueError("A unique initial state is required.")
if not all(state.is_final for state in self.iter_states()):
raise ValueError("Not all states are final.")
if not self.is_complete():
raise NotImplementedError("This finite state machine is "
"not complete.")
final_components = self.final_components()
if len(final_components) != 1:
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"with one final component.")
final_component = final_components[0]
if not final_component.digraph().is_aperiodic():
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"whose unique final component is "
"aperiodic.")
def get_matrix(fsm, x, y):
return fsm.adjacency_matrix(
entry=lambda transition: x**sum(transition.word_in) *
y**(sum(transition.word_out) +
sum(transition.from_state.word_out)))
K = len(self.input_alphabet)
R = PolynomialRing(QQ, ("x", "y", "z"))
(x, y, z) = R.gens()
try:
M = get_matrix(self, x, y)
except TypeError:
verbose("Non-integer output weights lead to "
"significant performance degradation.", level=0)
# fall back to symbolic ring
R = SR
x = R.symbol()
y = R.symbol()
z = R.symbol()
M = get_matrix(self, x, y)
def substitute_one(g):
return g.subs({x: 1, y: 1, z: 1})
else:
def substitute_one(g):
# the result of the substitution shall live in QQ,
# not in the polynomial ring R, so the method
# subs does not achieve the result.
# Therefore, we need this helper function.
return g(1, 1, 1)
f = (M.parent().identity_matrix() - z/K*M).det()
f_x = substitute_one(derivative(f, x))
f_y = substitute_one(derivative(f, y))
f_z = substitute_one(derivative(f, z))
f_xy = substitute_one(derivative(f, x, y))
f_xz = substitute_one(derivative(f, x, z))
f_yz = substitute_one(derivative(f, y, z))
f_yy = substitute_one(derivative(f, y, y))
f_zz = substitute_one(derivative(f, z, z))
e_2 = f_y / f_z
v_2 = (f_y**2 * (f_zz+f_z) + f_z**2 * (f_yy+f_y)
- 2*f_y*f_z*f_yz) / f_z**3
c = (f_x * f_y * (f_zz+f_z) + f_z**2 * f_xy - f_y*f_z*f_xz
- f_x*f_z*f_yz) / f_z**3
return {'expectation': e_2*variable + SR(1).Order(),
'variance': v_2*variable + SR(1).Order(),
'covariance': c*variable + SR(1).Order()}
def is_monochromatic(self):
"""
Checks whether the colors of all states are equal.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
EXAMPLES::
sage: G = transducers.GrayCode()
sage: [s.color for s in G.iter_states()]
[None, None, None]
sage: G.is_monochromatic()
True
sage: G.state(1).color = 'blue'
sage: G.is_monochromatic()
False
"""
return equal(s.color for s in self.iter_states())
#*****************************************************************************
def is_Automaton(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Automaton`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Automaton
sage: is_Automaton(FiniteStateMachine())
False
sage: is_Automaton(Automaton())
True
sage: is_FiniteStateMachine(Automaton())
True
"""
return isinstance(FSM, Automaton)
class Automaton(FiniteStateMachine):
"""
This creates an automaton, which is a finite state machine, whose
transitions have input labels.
An automaton has additional features like creating a deterministic
and a minimized automaton.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create an automaton recognizing even numbers (given in
binary and read from left to right) in the following way::
sage: A = Automaton([('P', 'Q', 0), ('P', 'P', 1),
....: ('Q', 'P', 1), ('Q', 'Q', 0)],
....: initial_states=['P'], final_states=['Q'])
sage: A
Automaton with 2 states
sage: A([0])
True
sage: A([1, 1, 0])
True
sage: A([1, 0, 1])
False
Note that the full output of the commands can be obtained by
calling :meth:`.process` and looks like this::
sage: A.process([1, 0, 1])
(False, 'P')
TESTS::
sage: Automaton()
Automaton with 0 states
"""
def __init__(self, *args, **kwargs):
"""
Initialize an automaton. See :class:`Automaton` and its parent
:class:`FiniteStateMachine` for more information.
TESTS::
sage: Transducer()._allow_composition_
True
sage: Automaton()._allow_composition_
False
"""
super(Automaton, self).__init__(*args, **kwargs)
self._allow_composition_ = False
def _repr_(self):
"""
Represents the finite state machine as "Automaton with n
states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Automaton()._repr_()
'Automaton with 0 states'
"""
return "Automaton with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Automaton([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right]
"""
return format_function(transition.word_in)
def intersection(self, other, only_accessible_components=True):
"""
Returns a new automaton which accepts an input if it is
accepted by both given automata.
INPUT:
- ``other`` -- an automaton
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new automaton which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the new automaton is the cartesian product of the
set of states of both given automata. There is a transition `((A, B),
(C, D), a)` in the new automaton if there are transitions `(A, C, a)`
and `(B, D, a)` in the old automata.
The methods :meth:`.intersection` and
:meth:`.cartesian_product` are the same (for automata).
EXAMPLES::
sage: aut1 = Automaton([('1', '2', 1),
....: ('2', '2', 1),
....: ('2', '2', 0)],
....: initial_states=['1'],
....: final_states=['2'],
....: determine_alphabets=True)
sage: aut2 = Automaton([('A', 'A', 1),
....: ('A', 'B', 0),
....: ('B', 'B', 0),
....: ('B', 'A', 1)],
....: initial_states=['A'],
....: final_states=['B'],
....: determine_alphabets=True)
sage: res = aut1.intersection(aut2)
sage: (aut1([1, 1]), aut2([1, 1]), res([1, 1]))
(True, False, False)
sage: (aut1([1, 0]), aut2([1, 0]), res([1, 0]))
(True, True, True)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'A') to ('2', 'B'): 0|-,
Transition from ('2', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'B') to ('2', 'B'): 0|-,
Transition from ('2', 'B') to ('2', 'A'): 1|-]
For automata with epsilon-transitions, intersection is not well
defined. But for any finite state machine, epsilon-transitions can be
removed by :meth:`.remove_epsilon_transitions`.
::
sage: a1 = Automaton([(0, 0, 0),
....: (0, 1, None),
....: (1, 1, 1),
....: (1, 2, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a2 = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a1.intersection(a2)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input)
was found.
sage: a1.remove_epsilon_transitions() # not tested (since not implemented yet)
sage: a1.intersection(a2) # not tested
"""
if not is_Automaton(other):
raise TypeError(
"Only an automaton can be intersected with an automaton.")
def function(transition1, transition2):
if not transition1.word_in or not transition2.word_in:
raise ValueError(
"An epsilon-transition (with empty input) was found.")
if transition1.word_in == transition2.word_in:
return (transition1.word_in, None)
else:
raise LookupError
return self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components)
cartesian_product = intersection
def determinisation(self):
"""
Returns a deterministic automaton which accepts the same input
words as the original one.
INPUT:
Nothing.
OUTPUT:
A new automaton, which is deterministic.
The labels of the states of the new automaton are frozensets
of states of ``self``. The color of a new state is the
frozenset of colors of the constituent states of ``self``.
Therefore, the colors of the constituent states have to be
hashable.
The input alphabet must be specified.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'B', 1), ('B', 'B', 1)],
....: initial_states=['A'], final_states=['B'])
sage: aut.determinisation().transitions()
[Transition from frozenset(['A'])
to frozenset(['A']): 0|-,
Transition from frozenset(['A'])
to frozenset(['B']): 1|-,
Transition from frozenset(['B'])
to frozenset([]): 0|-,
Transition from frozenset(['B'])
to frozenset(['B']): 1|-,
Transition from frozenset([])
to frozenset([]): 0|-,
Transition from frozenset([])
to frozenset([]): 1|-]
::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: A.determinisation().states()
[frozenset(['A']), frozenset(['A', 'B']),
frozenset(['A', 'C']), frozenset(['A', 'C', 'B'])]
::
sage: A = Automaton([(0, 1, 1), (0, 2, [1, 1]), (0, 3, [1, 1, 1]),
....: (1, 0, -1), (2, 0, -2), (3, 0, -3)],
....: initial_states=[0], final_states=[0, 1, 2, 3])
sage: B = A.determinisation().relabeled()
sage: all(t.to_state.label() == 2 for t in
....: B.state(2).transitions)
True
sage: B.state(2).is_final
False
sage: B.delete_state(2) # this is a sink
sage: sorted(B.transitions())
[Transition from 0 to 1: 1|-,
Transition from 1 to 0: -1|-,
Transition from 1 to 3: 1|-,
Transition from 3 to 0: -2|-,
Transition from 3 to 4: 1|-,
Transition from 4 to 0: -3|-]
Note that colors of states have to be hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
TESTS:
This is from #15078, comment 13.
::
sage: D = {'A': [('A', 'a'), ('B', 'a'), ('A', 'b')],
....: 'C': [], 'B': [('C', 'b')]}
sage: auto = Automaton(D, initial_states=['A'], final_states=['C'])
sage: auto.is_deterministic()
False
sage: auto.process(list('aaab'))
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered
when processing input.
sage: auto.states()
['A', 'C', 'B']
sage: Ddet = auto.determinisation()
sage: Ddet
Automaton with 3 states
sage: Ddet.is_deterministic()
True
sage: sorted(Ddet.transitions())
[Transition from frozenset(['A']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A']) to frozenset(['A']): 'b'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'C']): 'b'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A']): 'b'|-]
sage: Ddet.initial_states()
[frozenset(['A'])]
sage: Ddet.final_states()
[frozenset(['A', 'C'])]
"""
if any(len(t.word_in) > 1 for t in self.iter_transitions()):
return self.split_transitions().determinisation()
epsilon_successors = {}
direct_epsilon_successors = {}
for state in self.iter_states():
direct_epsilon_successors[state] = set(
t.to_state
for t in self.iter_transitions(state)
if not t.word_in)
epsilon_successors[state] = set([state])
old_count_epsilon_successors = 0
count_epsilon_successors = len(epsilon_successors)
while old_count_epsilon_successors < count_epsilon_successors:
old_count_epsilon_successors = count_epsilon_successors
count_epsilon_successors = 0
for state in self.iter_states():
for direct_successor in direct_epsilon_successors[state]:
epsilon_successors[state] = epsilon_successors[state].union(epsilon_successors[direct_successor])
count_epsilon_successors += len(epsilon_successors[state])
def set_transition(states, letter):
result = set()
for state in states:
for transition in self.iter_transitions(state):
if transition.word_in == [letter]:
result.add(transition.to_state)
result = result.union(*(epsilon_successors[s] for s in result))
return (frozenset(result), [])
result = self.empty_copy()
new_initial_states = [frozenset(self.iter_initial_states())]
result.add_from_transition_function(set_transition,
initial_states=new_initial_states)
for state in result.iter_states():
state.is_final = any(s.is_final for s in state.label())
state.color = frozenset(s.color for s in state.label())
return result
def minimization(self, algorithm=None):
"""
Returns the minimization of the input automaton as a new automaton.
INPUT:
- ``algorithm`` -- Either Moore's algorithm (by
``algorithm='Moore'`` or as default for deterministic
automata) or Brzozowski's algorithm (when
``algorithm='Brzozowski'`` or when the automaton is not
deterministic) is used.
OUTPUT:
A new automaton.
The resulting automaton is deterministic and has a minimal
number of states.
EXAMPLES::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A.minimization(algorithm='Brzozowski')
sage: B.transitions(B.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(B.states())
3
sage: C = A.minimization(algorithm='Brzozowski')
sage: C.transitions(C.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(C.states())
3
::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut.minimization(algorithm='Brzozowski')
sage: [len(min.states()), len(aut.states())]
[3, 4]
sage: min = aut.minimization(algorithm='Moore')
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
deterministic = self.is_deterministic()
if algorithm == "Moore" or (algorithm is None and deterministic):
return self._minimization_Moore_()
elif algorithm == "Brzozowski" or (algorithm is None and not deterministic):
return self._minimization_Brzozowski_()
else:
raise NotImplementedError("Algorithm '%s' is not implemented. Choose 'Moore' or 'Brzozowski'" % algorithm)
def _minimization_Brzozowski_(self):
"""
Returns a minimized automaton by using Brzozowski's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A._minimization_Brzozowski_()
sage: len(B.states())
3
"""
return self.transposition().determinisation().transposition().determinisation()
def _minimization_Moore_(self):
"""
Returns a minimized automaton by using Moore's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut._minimization_Moore_()
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
if self.is_deterministic():
return self.quotient(self.equivalence_classes())
else:
raise NotImplementedError("Minimization via Moore's Algorithm is only " \
"implemented for deterministic finite state machines")
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the automaton accepts the input and the state
where the computation stops.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only whether the sequence is accepted
or not (the first entry below only).
OUTPUT:
The full output is a pair, where
- the first entry is ``True`` if the input string is accepted and
- the second gives the state reached after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = Automaton(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[(True, '_'), (True, '1'), (False, None),
(True, '1'), (False, None), (False, None)]
If we just want a condensed output, we use::
sage: [NAF.process(w, full_output=False)
....: for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
It is equivalent to::
sage: [NAF(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
The following example illustrates the difference between
non-existing paths and reaching a non-final state::
sage: NAF.process([2])
(False, None)
sage: NAF.add_transition(('_', 's', 2))
Transition from '_' to 's': 2|-
sage: NAF.process([2])
(False, 's')
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Automaton.process "
"(and thus of Automaton.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Automaton, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
return (it.accept_input, it.current_state)
else:
return it.accept_input
#*****************************************************************************
def is_Transducer(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Transducer`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Transducer
sage: is_Transducer(FiniteStateMachine())
False
sage: is_Transducer(Transducer())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, Transducer)
class Transducer(FiniteStateMachine):
"""
This creates a transducer, which is a finite state machine, whose
transitions have input and output labels.
An transducer has additional features like creating a simplified
transducer.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create a transducer performing the addition of 1 (for
numbers given in binary and read from right to left) in the
following way::
sage: T = Transducer([('C', 'C', 1, 0), ('C', 'N', 0, 1),
....: ('N', 'N', 0, 0), ('N', 'N', 1, 1)],
....: initial_states=['C'], final_states=['N'])
sage: T
Transducer with 2 states
sage: T([0])
[1]
sage: T([1,1,0])
[0, 0, 1]
sage: ZZ(T(15.digits(base=2)+[0]), base=2)
16
Note that we have padded the binary input sequence by a `0` so
that the transducer can reach its final state.
TESTS::
sage: Transducer()
Transducer with 0 states
"""
def _repr_(self):
"""
Represents the transducer as "Transducer with n states" where
n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Transducer()._repr_()
'Transducer with 0 states'
"""
return "Transducer with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 2)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1\mid 2$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Transducer([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right] \mid \left[1\right]
"""
return (format_function(transition.word_in) + "\\mid "
+ format_function(transition.word_out))
def intersection(self, other, only_accessible_components=True):
"""
Returns a new transducer which accepts an input if it is accepted by
both given finite state machines producing the same output.
INPUT:
- ``other`` -- a transducer
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new transducer which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the transducer is the cartesian product of the
set of states of both given transducer. There is a transition `((A,
B), (C, D), a, b)` in the new transducer if there are
transitions `(A, C, a, b)` and `(B, D, a, b)` in the old transducers.
EXAMPLES::
sage: transducer1 = Transducer([('1', '2', 1, 0),
....: ('2', '2', 1, 0),
....: ('2', '2', 0, 1)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'A', 1, 0),
....: ('A', 'B', 0, 0),
....: ('B', 'B', 0, 1),
....: ('B', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|0,
Transition from ('2', 'A') to ('2', 'A'): 1|0]
In general, transducers are not closed under intersection. But
for transducer which do not have epsilon-transitions, the
intersection is well defined (cf. [BaWo2012]_). However, in
the next example the intersection of the two transducers is
not well defined. The intersection of the languages consists
of `(a^n, b^n c^n)`. This set is not recognizable by a
*finite* transducer.
::
sage: t1 = Transducer([(0, 0, 'a', 'b'),
....: (0, 1, None, 'c'),
....: (1, 1, None, 'c')],
....: initial_states=[0],
....: final_states=[0, 1])
sage: t2 = Transducer([('A', 'A', None, 'b'),
....: ('A', 'B', 'a', 'c'),
....: ('B', 'B', 'a', 'c')],
....: initial_states=['A'],
....: final_states=['A', 'B'])
sage: t2.intersection(t1)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input or output)
was found.
TESTS::
sage: transducer1 = Transducer([('1', '2', 1, 0)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'B', 1, 0)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[('2', 'B')]
sage: transducer1.state('2').final_word_out = 1
sage: transducer2.state('B').final_word_out = 2
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[]
REFERENCES:
.. [BaWo2012] Javier Baliosian and Dina Wonsever, *Finite State
Transducers*, chapter in *Handbook of Finite State Based Models and
Applications*, edited by Jiacun Wang, Chapman and Hall/CRC, 2012.
"""
if not is_Transducer(other):
raise TypeError(
"Only a transducer can be intersected with a transducer.")
def function(transition1, transition2):
if not transition1.word_in or not transition2.word_in \
or not transition1.word_out or not transition2.word_out:
raise ValueError("An epsilon-transition "
"(with empty input or output) was found.")
if transition1.word_in == transition2.word_in \
and transition1.word_out == transition2.word_out:
return (transition1.word_in, transition1.word_out)
else:
raise LookupError
new = self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: s1.final_word_out)
for state in new.iter_final_states():
state0 = self.state(state.label()[0])
state1 = other.state(state.label()[1])
if state0.final_word_out != state1.final_word_out:
state.final_word_out = None
state.is_final = False
return new
def cartesian_product(self, other, only_accessible_components=True):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldCodeTransducerCartesianProduct`` to ``False``.
Return a new transducer which can simultaneously process an
input with the machines ``self`` and ``other`` where the
output labels are `d`-tuples of the original output labels.
INPUT:
- ``other`` - a finite state machine (if `d=2`) or a list (or
other iterable) of `d-1` finite state machines
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A transducer which can simultaneously process an input with ``self``
and the machine(s) in ``other``.
The set of states of the new transducer is the cartesian product of
the set of states of ``self`` and ``other``.
Let `(A_j, B_j, a_j, b_j)` for `j\in\{1, \ldots, d\}` be
transitions in the machines ``self`` and in ``other``. Then
there is a transition `((A_1, \ldots, A_d), (B_1, \ldots,
B_d), a, (b_1, \ldots, b_d))` in the new transducer if `a_1 =
\cdots = a_d =: a`.
EXAMPLES:
Originally a different output was constructed by
:meth:`Transducer.cartesian_product`. This output is now produced by
:meth:`Transducer.intersection`.
::
sage: transducer1 = Transducer([('A', 'A', 0, 0),
....: ('A', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['A'],
....: determine_alphabets=True)
sage: transducer2 = Transducer([(0, 1, 0, ['b', 'c']),
....: (0, 0, 1, 'b'),
....: (1, 1, 0, 'a')],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: result = transducer1.cartesian_product(transducer2)
doctest:...: DeprecationWarning: The output of
Transducer.cartesian_product will change.
Please use Transducer.intersection for the original output.
See http://trac.sagemath.org/16061 for details.
sage: result
Transducer with 0 states
By setting ``FSMOldCodeTransducerCartesianProduct`` to ``False``
the new desired output is produced.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: result = transducer1.cartesian_product(transducer2)
sage: result
Transducer with 2 states
sage: result.transitions()
[Transition from ('A', 0) to ('A', 1): 0|(0, 'b'),(None, 'c'),
Transition from ('A', 0) to ('A', 0): 1|(1, 'b'),
Transition from ('A', 1) to ('A', 1): 0|(0, 'a')]
sage: result([1, 0, 0])
[(1, 'b'), (0, 'b'), (None, 'c'), (0, 'a')]
sage: (transducer1([1, 0, 0]), transducer2([1, 0, 0]))
([1, 0, 0], ['b', 'b', 'c', 'a'])
Also final output words are correctly processed::
sage: transducer1.state('A').final_word_out = 2
sage: result = transducer1.cartesian_product(transducer2)
sage: result.final_states()[0].final_word_out
[(2, None)]
The following transducer counts the number of 11 blocks minus
the number of 10 blocks over the alphabet ``[0, 1]``.
::
sage: count_11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: count_10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: count_11x10 = count_11.cartesian_product(count_10)
sage: difference = transducers.sub([0, 1])(count_11x10)
sage: T = difference.simplification().relabeled()
sage: T.initial_states()
[1]
sage: sorted(T.transitions())
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|1,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|0]
sage: input = [0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0]
sage: output = [0, 0, 1, -1, 0, -1, 0, 0, 0, 1, 1, -1]
sage: T(input) == output
True
If ``other`` is an automaton, then :meth:`.cartesian_product` returns
``self`` where the input is restricted to the input accepted by
``other``.
For example, if the transducer transforms the standard
binary expansion into the non-adjacent form and the automaton
recognizes the binary expansion without adjacent ones, then the
cartesian product of these two is a transducer which does not change
the input (except for changing ``a`` to ``(a, None)`` and ignoring a
leading `0`).
::
sage: NAF = Transducer([(0, 1, 0, None),
....: (0, 2, 1, None),
....: (1, 1, 0, 0),
....: (1, 2, 1, 0),
....: (2, 1, 0, 1),
....: (2, 3, 1, -1),
....: (3, 2, 0, 0),
....: (3, 3, 1, 0)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: aut11 = Automaton([(0, 0, 0), (0, 1, 1), (1, 0, 0)],
....: initial_states=[0],
....: final_states=[0, 1],
....: determine_alphabets=True)
sage: res = NAF.cartesian_product(aut11)
sage: res([1, 0, 0, 1, 0, 1, 0])
[(1, None), (0, None), (0, None), (1, None), (0, None), (1, None)]
This is obvious because if the standard binary expansion does not have
adjacent ones, then it is the same as the non-adjacent form.
Be aware that :meth:`.cartesian_product` is not commutative.
::
sage: aut11.cartesian_product(NAF)
Traceback (most recent call last):
...
TypeError: Only an automaton can be intersected with an automaton.
The cartesian product of more than two finite state machines can also
be computed::
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.cartesian_product([T1, T2])
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((0,), (), ()) to ((0,), (), ()): 0|(1, 0, 0),
Transition from ((0,), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((0,), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (1,), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (1,), ()) to ((), (1,), ()): 1|(0, 1, 0),
Transition from ((), (1,), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (), (2,)) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (), (2,)): 2|(0, 0, 1)]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[(0, 0, 0),
(1, 0, 0),
(0, 0, 0),
(0, 1, 0),
(0, 0, 0),
(0, 0, 1),
(0, 0, 0),
(0, 0, 0),
(0, 0, 0),
(0, 0, 1)]
"""
if FSMOldCodeTransducerCartesianProduct:
from sage.misc.superseded import deprecation
deprecation(16061, "The output of Transducer.cartesian_product "
"will change. Please use "
"Transducer.intersection for the original "
"output.")
return self.intersection(
other,
only_accessible_components=only_accessible_components)
def function(*transitions):
if equal(t.word_in for t in transitions):
return (transitions[0].word_in,
list(itertools.izip_longest(
*(t.word_out for t in transitions)
)))
else:
raise LookupError
def final_function(*states):
return list(itertools.izip_longest(*(s.final_word_out
for s in states)))
return self.product_FiniteStateMachine(
other,
function,
final_function=final_function,
only_accessible_components=only_accessible_components)
def simplification(self):
"""
Returns a simplified transducer.
INPUT:
Nothing.
OUTPUT:
A new transducer.
This function simplifies a transducer by Moore's algorithm,
first moving common output labels of transitions leaving a
state to output labels of transitions entering the state
(cf. :meth:`.prepone_output`).
The resulting transducer implements the same function as the
original transducer.
EXAMPLES::
sage: fsm = Transducer([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsms = fsm.simplification()
sage: fsms
Transducer with 2 states
sage: fsms.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsms.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
::
sage: fsm = Transducer([("A", "A", 0, 0),
....: ("A", "B", 1, 1),
....: ("A", "C", 1, -1),
....: ("B", "A", 2, 0),
....: ("C", "A", 2, 0)])
sage: fsm_simplified = fsm.simplification()
sage: fsm_simplified
Transducer with 2 states
sage: fsm_simplified.transitions()
[Transition from ('A',) to ('A',): 0|0,
Transition from ('A',) to ('B', 'C'): 1|1,0,
Transition from ('A',) to ('B', 'C'): 1|-1,0,
Transition from ('B', 'C') to ('A',): 2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([('A', 'A', 1/2, 0),
....: ('A', 'B', 1/4, 1),
....: ('A', 'C', 1/4, 1),
....: ('B', 'A', 1, 0),
....: ('C', 'A', 1, 0)],
....: initial_states=[0],
....: final_states=['A', 'B', 'C'],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: sorted(T.simplification().transitions())
[Transition from ('A',) to ('A',): 1/2|0,
Transition from ('A',) to ('B', 'C'): 1/2|1,
Transition from ('B', 'C') to ('A',): 1|0]
Illustrating the use of colors in order to avoid identification of states::
sage: T = Transducer( [[0,0,0,0], [0,1,1,1],
....: [1,0,0,0], [1,1,1,1]],
....: initial_states=[0],
....: final_states=[0,1])
sage: sorted(T.simplification().transitions())
[Transition from (0, 1) to (0, 1): 0|0,
Transition from (0, 1) to (0, 1): 1|1]
sage: T.state(0).color = 0
sage: T.state(0).color = 1
sage: sorted(T.simplification().transitions())
[Transition from (0,) to (0,): 0|0,
Transition from (0,) to (1,): 1|1,
Transition from (1,) to (0,): 0|0,
Transition from (1,) to (1,): 1|1]
"""
fsm = deepcopy(self)
fsm.prepone_output()
return fsm.quotient(fsm.equivalence_classes())
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the transducer accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only the generated output (the
third entry below only). If the input is not accepted, a
``ValueError`` is raised.
OUTPUT:
The full output is a triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing.
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
If we are only interested in the output, we can also use::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
The following transducer transforms `0^n 1` to `1^n 2`::
sage: T = Transducer([(0, 0, 0, 1), (0, 1, 1, 2)])
sage: T.state(0).is_initial = True
sage: T.state(1).is_final = True
We can see the different possibilites of the output by::
sage: [T.process(w) for w in [[1], [0, 1], [0, 0, 1], [0, 1, 1],
....: [0], [0, 0], [2, 0], [0, 1, 2]]]
[(True, 1, [2]), (True, 1, [1, 2]),
(True, 1, [1, 1, 2]), (False, None, None),
(False, 0, [1]), (False, 0, [1, 1]),
(False, None, None), (False, None, None)]
If we just want a condensed output, we use::
sage: [T.process(w, full_output=False)
....: for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T.process([0, 1, 2], full_output=False)
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
It is equivalent to::
sage: [T(w) for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T([0, 1, 2])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Transducer.process "
"(and thus of Transducer.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Transducer, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
if it.current_state.label() is None:
return (it.accept_input, it.current_state, None)
else:
return (it.accept_input, it.current_state, it.output_tape)
else:
if not it.accept_input:
raise ValueError("Invalid input sequence.")
return it.output_tape
#*****************************************************************************
def is_FSMProcessIterator(PI):
"""
Tests whether or not ``PI`` inherits from :class:`FSMProcessIterator`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMProcessIterator, FSMProcessIterator
sage: is_FSMProcessIterator(FSMProcessIterator(FiniteStateMachine([[0, 0, 0, 0]], initial_states=[0])))
True
"""
return isinstance(PI, FSMProcessIterator)
class FSMProcessIterator(SageObject):
"""
This class is for processing an input string on a finite state
machine.
An instance of this class is generated when
:meth:`FiniteStateMachine.process` or
:meth:`FiniteStateMachine.iter_process` of the finite state
machine is invoked. It behaves like an iterator which, in each
step, takes one letter of the input and runs (one step on) the
finite state machine with this input. More precisely, in each
step, the process iterator takes an outgoing transition of the
current state, whose input label equals the input letter of the
tape. The output label of the transition, if present, is written
on the output tape.
INPUT:
- ``fsm`` -- The finite state machine on which the input should be
processed.
- ``input_tape`` -- The input tape. It can be anything that is
iterable.
- ``initial_state`` -- The initial state in which the machine
starts. If this is ``None``, the unique inital state of the finite
state machine is takes. If there are several, a ``ValueError`` is
raised.
The process (iteration) stops if there are no more input letters
on the tape. In this case a StopIteration exception is thrown. As
result the following attributes are available:
- ``accept_input`` -- Is ``True`` if the reached state is a final state.
- ``current_state`` -- The current/reached state in the process.
- ``output_tape`` -- The written output.
Current values of those attributes (except ``accept_input``) are
(also) available during the iteration.
OUTPUT:
An iterator.
EXAMPLES:
The following transducer reads binary words and outputs a word,
where blocks of ones are replaced by just a single one. Further
only words that end with a zero are accepted.
::
sage: T = Transducer({'A': [('A', 0, 0), ('B', 1, None)],
....: 'B': [('B', 1, None), ('A', 0, [1, 0])]},
....: initial_states=['A'], final_states=['A'])
sage: input = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0]
sage: T.process(input)
(True, 'A', [1, 0, 0, 1, 0, 1, 0])
The function :meth:`FiniteStateMachine.process` created a new
``FSMProcessIterator``. We can do that manually, too, and get full
access to the iteration process::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: it = FSMProcessIterator(T, input_tape=input)
sage: for _ in it:
....: print (it.current_state, it.output_tape)
('B', [])
('B', [])
('A', [1, 0])
('A', [1, 0, 0])
('B', [1, 0, 0])
('A', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('A', [1, 0, 0, 1, 0, 1, 0])
sage: it.accept_input
True
TESTS::
sage: T = Transducer([[0, 0, 0, 0]])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: No state is initial.
::
sage: T = Transducer([[0, 1, 0, 0]], initial_states=[0, 1])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: Several initial states.
"""
def __init__(self, fsm, input_tape=None, initial_state=None, **kwargs):
"""
See :class:`FSMProcessIterator` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0]
"""
self.fsm = fsm
if initial_state is None:
fsm_initial_states = self.fsm.initial_states()
try:
self.current_state = fsm_initial_states[0]
except IndexError:
raise ValueError("No state is initial.")
if len(fsm_initial_states) > 1:
raise ValueError("Several initial states.")
else:
self.current_state = initial_state
self.output_tape = []
if input_tape is None:
self._input_tape_iter_ = iter([])
else:
if hasattr(input_tape, '__iter__'):
self._input_tape_iter_ = iter(input_tape)
else:
raise ValueError("Given input tape is not iterable.")
def __iter__(self):
"""
Returns ``self``.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: id(it) == id(iter(it))
True
"""
return self
def next(self):
"""
Makes one step in processing the input tape.
INPUT:
Nothing.
OUTPUT:
It returns the taken transition. A ``StopIteration`` exception is
thrown when there is nothing more to read.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.next()
Transition from 'A' to 'A': 0|1
sage: it.next()
Transition from 'A' to 'A': 1|0
sage: it.next()
Traceback (most recent call last):
...
StopIteration
TESTS::
sage: Z = Transducer()
sage: s = Z.add_state(0)
sage: s.is_initial = True
sage: s.is_final = True
sage: s.final_word_out = [1, 2]
sage: Z.process([])
(True, 0, [1, 2])
"""
if hasattr(self, 'accept_input'):
raise StopIteration
try:
# process current state
transition = None
try:
transition = self.current_state.hook(
self.current_state, self)
except AttributeError:
pass
self.write_word(self.current_state.word_out)
# get next
if not isinstance(transition, FSMTransition):
next_word = []
found = False
try:
while not found:
next_word.append(self.read_letter())
if len(next_word) == 1 and any(not t.word_in
for t in self.current_state.transitions):
raise NotImplementedError(
"process cannot handle epsilon transition "
"leaving state %s." % self.current_state.label())
try:
transition = self.get_next_transition(
next_word)
found = True
except ValueError:
pass
if found and any(
t is not transition and startswith(t.word_in,
next_word)
for t in self.current_state.transitions):
raise NotImplementedError("Non-deterministic "
"path encountered "
"when processing "
"input.")
except StopIteration:
# this means input tape is finished
if len(next_word) > 0:
self.current_state = FSMState(None,
allow_label_None=True)
raise StopIteration
# process transition
try:
transition.hook(transition, self)
except AttributeError:
pass
self.write_word(transition.word_out)
# go to next state
self.current_state = transition.to_state
except StopIteration:
# this means, either input tape is finished or
# someone has thrown StopIteration manually (in one
# of the hooks)
if self.current_state.label is None or not self.current_state.is_final:
self.accept_input = False
if not hasattr(self, 'accept_input'):
self.accept_input = True
if self.current_state.is_final:
self.write_word(self.current_state.final_word_out)
raise StopIteration
# return
return transition
def read_letter(self):
"""
Reads a letter from the input tape.
INPUT:
Nothing.
OUTPUT:
A letter.
Exception ``StopIteration`` is thrown if tape has reached
the end.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.read_letter()
0
"""
return self._input_tape_iter_.next()
def write_letter(self, letter):
"""
Writes a letter on the output tape.
INPUT:
- ``letter`` -- the letter to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_letter(42)
sage: it.output_tape
[42]
"""
self.output_tape.append(letter)
def write_word(self, word):
"""
Writes a word on the output tape.
INPUT:
- ``word`` -- the word to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_word([4, 2])
sage: it.output_tape
[4, 2]
"""
for letter in word:
self.write_letter(letter)
def get_next_transition(self, word_in):
"""
Returns the next transition according to ``word_in``. It is
assumed that we are in state ``self.current_state``.
INPUT:
- ``word_in`` -- the input word.
OUTPUT:
The next transition according to ``word_in``. It is assumed
that we are in state ``self.current_state``. If no transition
matches, a ``ValueError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.get_next_transition([0])
Transition from 'A' to 'A': 0|1
sage: it.get_next_transition([2])
Traceback (most recent call last):
...
ValueError: No transition with input [2] found.
"""
for transition in self.current_state.transitions:
if transition.word_in == word_in:
return transition
raise ValueError("No transition with input %s found." % (word_in,))
#*****************************************************************************
@cached_function
def setup_latex_preamble():
r"""
This function adds the package ``tikz`` with support for automata
to the preamble of Latex so that the finite state machines can be
drawn nicely.
INPUT:
Nothing.
OUTPUT:
Nothing.
See the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
TESTS::
sage: from sage.combinat.finite_state_machine import setup_latex_preamble
sage: setup_latex_preamble()
sage: ("\usepackage{tikz}" in latex.extra_preamble()) == latex.has_file("tikz.sty")
True
"""
latex.add_package_to_preamble_if_available('tikz')
latex.add_to_mathjax_avoid_list("tikz")
if latex.has_file("tikz.sty"):
latex.add_to_preamble(r'\usetikzlibrary{automata}')
#*****************************************************************************
| 6,819 | 0 | 587 |
ae682c8b47e3967aa9600e8dd17c4ff5a6d2a51d | 3,202 | py | Python | lambda/email/email_lambda.py | defCoding/anilist-to-mal-sync | a56e546c9c8eb8c756d71942ec9fd2187a2a9cf5 | [
"MIT"
] | null | null | null | lambda/email/email_lambda.py | defCoding/anilist-to-mal-sync | a56e546c9c8eb8c756d71942ec9fd2187a2a9cf5 | [
"MIT"
] | null | null | null | lambda/email/email_lambda.py | defCoding/anilist-to-mal-sync | a56e546c9c8eb8c756d71942ec9fd2187a2a9cf5 | [
"MIT"
] | null | null | null | import json
import os
import boto3
import secrets
import time
def lambda_handler(event, context):
"""
Sends an email notification to user with MAL OAuth link.
Args:
event (dict): AWS triggering event
context (dict): AWS context
Returns:
(dict): JSON response to triggering event
"""
try:
user = event['user']
except KeyError as e:
print(e)
return create_response(400, "Need to provide user to email.")
# Fetch config file
s3 = boto3.client('s3')
bucket = 'anilist-to-mal-config'
key = 'config.json'
try:
data = s3.get_object(Bucket=bucket, Key=key)
config = json.loads(data['Body'].read())
except (s3.exceptions.NoSuchKey, s3.exceptions.InvalidObjectState) as e:
print(e)
return create_response(500, "The server failed to fetch config.")
except JSONDecodeError as e:
print(e)
return create_response(500, "The config file could not be decoded.")
try:
user_email = config['users'][user]['email']
except KeyError as e:
print(e)
return create_response(404, "Could not find user email in config.")
mal_id = config['MAL_CLIENT_ID']
mal_secret = config['MAL_CLIENT_SECRET']
# Generate and send email
ses = boto3.client('ses', region_name=os.environ['AWS_REGION'])
code_challenge = secrets.token_urlsafe(100)[:128]
auth_url = f"https://myanimelist.net/v1/oauth2/authorize?response_type=code&client_id={mal_id}&code_challenge={code_challenge}&state={user}"
print(f"Code Challenge for {user}: {code_challenge}")
try:
ses.send_email(
Destination={
'ToAddresses': [user_email]
},
Message={
'Body': {
'Text': {
'Charset': 'UTF-8',
'Data': f"Click this link to authorize Anilist-to-MAL-sync to be able to update your MAL: {auth_url}"
}
},
'Subject': {
'Charset': 'UTF-8',
'Data': "Anilist-to-MAL-Sync Authorization"
}
},
Source='defcoding@gmail.com'
)
except ses.exceptions.MessageRejected as e:
print(e)
return create_response(500, "Could not send notification email.")
config['users'][user]['code_verifier'] = code_challenge
config['users'][user]['last_notified'] = int(time.time())
config['users'][user]['auth_failed'] = False
s3.put_object(Body=json.dumps(config), Bucket=bucket, Key=key)
return create_response(200, "Email successfully sent!")
def create_response(code: int, body: str) -> dict:
"""
Creates a JSON response for HTTP.
Args:
code (int): The HTTP status code
body (str): The HTTP body as a string
Returns:
(dict): JSON HTTP response
"""
return {
'headers': {
'Content-Type': 'text/html'
},
'statusCode': code,
'body': body
}
| 31.70297 | 144 | 0.55965 | import json
import os
import boto3
import secrets
import time
def lambda_handler(event, context):
"""
Sends an email notification to user with MAL OAuth link.
Args:
event (dict): AWS triggering event
context (dict): AWS context
Returns:
(dict): JSON response to triggering event
"""
try:
user = event['user']
except KeyError as e:
print(e)
return create_response(400, "Need to provide user to email.")
# Fetch config file
s3 = boto3.client('s3')
bucket = 'anilist-to-mal-config'
key = 'config.json'
try:
data = s3.get_object(Bucket=bucket, Key=key)
config = json.loads(data['Body'].read())
except (s3.exceptions.NoSuchKey, s3.exceptions.InvalidObjectState) as e:
print(e)
return create_response(500, "The server failed to fetch config.")
except JSONDecodeError as e:
print(e)
return create_response(500, "The config file could not be decoded.")
try:
user_email = config['users'][user]['email']
except KeyError as e:
print(e)
return create_response(404, "Could not find user email in config.")
mal_id = config['MAL_CLIENT_ID']
mal_secret = config['MAL_CLIENT_SECRET']
# Generate and send email
ses = boto3.client('ses', region_name=os.environ['AWS_REGION'])
code_challenge = secrets.token_urlsafe(100)[:128]
auth_url = f"https://myanimelist.net/v1/oauth2/authorize?response_type=code&client_id={mal_id}&code_challenge={code_challenge}&state={user}"
print(f"Code Challenge for {user}: {code_challenge}")
try:
ses.send_email(
Destination={
'ToAddresses': [user_email]
},
Message={
'Body': {
'Text': {
'Charset': 'UTF-8',
'Data': f"Click this link to authorize Anilist-to-MAL-sync to be able to update your MAL: {auth_url}"
}
},
'Subject': {
'Charset': 'UTF-8',
'Data': "Anilist-to-MAL-Sync Authorization"
}
},
Source='defcoding@gmail.com'
)
except ses.exceptions.MessageRejected as e:
print(e)
return create_response(500, "Could not send notification email.")
config['users'][user]['code_verifier'] = code_challenge
config['users'][user]['last_notified'] = int(time.time())
config['users'][user]['auth_failed'] = False
s3.put_object(Body=json.dumps(config), Bucket=bucket, Key=key)
return create_response(200, "Email successfully sent!")
def create_response(code: int, body: str) -> dict:
"""
Creates a JSON response for HTTP.
Args:
code (int): The HTTP status code
body (str): The HTTP body as a string
Returns:
(dict): JSON HTTP response
"""
return {
'headers': {
'Content-Type': 'text/html'
},
'statusCode': code,
'body': body
}
| 0 | 0 | 0 |
4dc2e9135de4ba5558a0707747e3bfb85be2b7bf | 1,396 | py | Python | plexlibrary/utils.py | aelfa/python-plexlibrary | 58a99606c502b56bcb57953411803f613e531bcc | [
"BSD-3-Clause"
] | 208 | 2017-10-01T20:21:52.000Z | 2022-03-25T12:48:24.000Z | plexlibrary/utils.py | aelfa/python-plexlibrary | 58a99606c502b56bcb57953411803f613e531bcc | [
"BSD-3-Clause"
] | 109 | 2017-10-02T07:04:59.000Z | 2021-11-28T10:08:51.000Z | plexlibrary/utils.py | aelfa/python-plexlibrary | 58a99606c502b56bcb57953411803f613e531bcc | [
"BSD-3-Clause"
] | 51 | 2018-02-24T18:16:20.000Z | 2021-11-26T21:44:12.000Z | # -*- coding: utf-8 -*-
from datetime import datetime
import ruamel.yaml
| 23.661017 | 61 | 0.550143 | # -*- coding: utf-8 -*-
from datetime import datetime
import ruamel.yaml
class Colors(object):
RED = u'\033[1;31m'
BLUE = u'\033[1;34m'
CYAN = u'\033[1;36m'
GREEN = u'\033[0;32m'
RESET = u'\033[0;0m'
BOLD = u'\033[;1m'
REVERSE = u'\033[;7m'
class YAMLBase(object):
def __init__(self, filename):
self.filename = filename
yaml = ruamel.yaml.YAML()
yaml.preserve_quotes = True
with open(self.filename, 'r') as f:
try:
self.data = yaml.load(f)
except ruamel.yaml.YAMLError as e:
raise e
def __getitem__(self, k):
return self.data[k]
def __iter__(self, k):
return self.data.itervalues()
def __setitem__(self, k, v):
self.data[k] = v
def get(self, k, default=None):
if k in self.data:
return self.data[k]
else:
return default
def save(self):
yaml = ruamel.yaml.YAML()
with open(self.filename, 'w') as f:
yaml.dump(self.data, f)
def add_years(years, from_date=None):
if from_date is None:
from_date = datetime.now()
try:
return from_date.replace(year=from_date.year + years)
except ValueError:
# Must be 2/29!
return from_date.replace(month=2, day=28,
year=from_date.year + years)
| 913 | 176 | 230 |
74e31db24980299137e2dba371ce4d3974a253e2 | 1,177 | py | Python | tutorials/pl_video_5_database/auth.py | P-ranjal/pushups-logger | 5df87fe3f3513fcb91abe15b49eed14974c6fe9b | [
"MIT"
] | 10 | 2020-07-21T09:32:56.000Z | 2021-07-15T10:10:52.000Z | tutorials/pl_video_5_database/auth.py | P-ranjal/pushups-logger | 5df87fe3f3513fcb91abe15b49eed14974c6fe9b | [
"MIT"
] | 1 | 2021-12-13T20:47:03.000Z | 2021-12-13T20:47:03.000Z | tutorials/pl_video_5_database/auth.py | P-ranjal/pushups-logger | 5df87fe3f3513fcb91abe15b49eed14974c6fe9b | [
"MIT"
] | 15 | 2021-09-03T18:52:25.000Z | 2022-03-23T07:12:23.000Z | from flask import Blueprint, render_template, url_for, request, redirect
from werkzeug.security import generate_password_hash
from .models import User
from . import db
auth = Blueprint('auth', __name__)
@auth.route('/signup')
@auth.route('/signup', methods=['POST'])
@ auth.route('/login')
@ auth.route('/login', methods=['POST'])
@ auth.route('/logout')
| 22.634615 | 103 | 0.686491 | from flask import Blueprint, render_template, url_for, request, redirect
from werkzeug.security import generate_password_hash
from .models import User
from . import db
auth = Blueprint('auth', __name__)
@auth.route('/signup')
def signup():
return render_template('signup.html')
@auth.route('/signup', methods=['POST'])
def signup_post():
email = request.form.get('email')
name = request.form.get('name')
password = request.form.get('password')
# print(email, name, password)
user = User.query.filter_by(email=email).first()
if user:
print("User already Exists")
new_user = User(email=email, name=name, password=generate_password_hash(password, method='sha256'))
db.session.add(new_user)
db.session.commit()
return redirect(url_for('auth.login'))
@ auth.route('/login')
def login():
return render_template('login.html')
@ auth.route('/login', methods=['POST'])
def login_post():
email = request.form.get('email')
password = request.form.get('password')
print(email, password)
return redirect(url_for('main.profile'))
@ auth.route('/logout')
def logout():
return "Use this to log out."
| 701 | 0 | 110 |
a12d5595bbf31fa577ed2ee632ecb89e19523728 | 2,961 | py | Python | interlib/set.py | D-Programming-Man/Pseudo | 13d39b31754a37d637ca4e39361b66b5a91c9d2c | [
"MIT"
] | null | null | null | interlib/set.py | D-Programming-Man/Pseudo | 13d39b31754a37d637ca4e39361b66b5a91c9d2c | [
"MIT"
] | 13 | 2020-10-21T22:49:36.000Z | 2020-12-07T22:08:03.000Z | interlib/set.py | D-Programming-Man/Pseudo | 13d39b31754a37d637ca4e39361b66b5a91c9d2c | [
"MIT"
] | null | null | null | from interlib.utility import key_var_check
from interlib.utility import print_line
from interlib.utility import inter_data_type
from interlib.utility import list_dict_checker
help_manual = " Syntax: \n" \
" Set <variable_name> [equal] to (<variable>/<number>/<string>/<list>/<table>) \n" \
" \n" \
" Examples: \n" \
" Set x to \"Hello World\" \n" \
" Set y equal to 1232 \n" \
" Set z to [1, 3 , 4, \"hmm\"] \n" \
" Set recipe to {\"Milk\" : \"2 lbs\", \"Crackers\" : \"Handful\"} \n"
'''
Set keyword: used as a more advanced create option
Requires:
. line_numb = The line number we are looking at in the Psudo code file
. line_list = The line we took from the Psudo code file, but in list format
. all_variables = The dictionary that contains all of the variables for that Psudo code file
. indent = The indentation to correctly format the line of python code
. py_lines = The python code that we will append our finalized parsed code to it
Returns:
. A boolean value. This is used in the interpreter.py file to make sure that the parsing of the code executes correctly. Otherwise the parsing stops and ends it prematurely.
'''
| 32.184783 | 174 | 0.634583 | from interlib.utility import key_var_check
from interlib.utility import print_line
from interlib.utility import inter_data_type
from interlib.utility import list_dict_checker
help_manual = " Syntax: \n" \
" Set <variable_name> [equal] to (<variable>/<number>/<string>/<list>/<table>) \n" \
" \n" \
" Examples: \n" \
" Set x to \"Hello World\" \n" \
" Set y equal to 1232 \n" \
" Set z to [1, 3 , 4, \"hmm\"] \n" \
" Set recipe to {\"Milk\" : \"2 lbs\", \"Crackers\" : \"Handful\"} \n"
'''
Set keyword: used as a more advanced create option
Requires:
. line_numb = The line number we are looking at in the Psudo code file
. line_list = The line we took from the Psudo code file, but in list format
. all_variables = The dictionary that contains all of the variables for that Psudo code file
. indent = The indentation to correctly format the line of python code
. py_lines = The python code that we will append our finalized parsed code to it
Returns:
. A boolean value. This is used in the interpreter.py file to make sure that the parsing of the code executes correctly. Otherwise the parsing stops and ends it prematurely.
'''
def handler(interpret_state):
line_numb = interpret_state["line_numb"]
line_list = interpret_state["line_list"]
all_variables = interpret_state["all_variables"]
indent = interpret_state["pseudo_indent"] + interpret_state["indent"]
py_lines = interpret_state["py_lines"]
indent_space = indent * " "
# The position of the line_list
word_pos = 1
var_name = line_list[word_pos]
word_pos += 1
while line_list[word_pos] != "to":
word_pos += 1
word_pos += 1
if line_list[word_pos][0] == "[":
value_list = line_list[word_pos:]
if list_dict_checker("list", all_variables, value_list):
value = " ".join(value_list)
data_type = "list"
else:
print("Error: Invalid list")
print_line(line_numb, line_list)
return False
elif line_list[word_pos][0] == "{":
value_list = line_list[word_pos:]
if list_dict_checker("table", all_variables, value_list):
value = " ".join(value_list)
data_type = "table"
else:
print("Error: Invalid table")
print_line(line_numb, line_list)
return False
else:
value_list = line_list[word_pos:]
value = " ".join(value_list)
if key_var_check(all_variables, [value]) is None:
print("Error on line " + str(line_numb+1) + ". " + var_name + " is being set to an invalid value.")
print_line(line_numb, line_list)
return False
data_type = inter_data_type(value)
py_line = indent_space + var_name + " = " + value + "\n"
py_lines.append(py_line)
all_variables[var_name] = {"data_type": data_type, "value": value}
return True
| 1,687 | 0 | 23 |
35862c22704c47d50ca3de19ade81c7c79f57b64 | 963 | py | Python | py/libs/assetexchange_shared/client/basic.py | ddesmond/assetexchange | 0f8133b449b41595e22f27f3970bec7ebeee19c1 | [
"MIT"
] | null | null | null | py/libs/assetexchange_shared/client/basic.py | ddesmond/assetexchange | 0f8133b449b41595e22f27f3970bec7ebeee19c1 | [
"MIT"
] | null | null | null | py/libs/assetexchange_shared/client/basic.py | ddesmond/assetexchange | 0f8133b449b41595e22f27f3970bec7ebeee19c1 | [
"MIT"
] | null | null | null | import json
try:
from urllib.request import urlopen, Request
except ImportError:
from urllib2 import urlopen, Request
| 23.487805 | 67 | 0.608515 | import json
try:
from urllib.request import urlopen, Request
except ImportError:
from urllib2 import urlopen, Request
def call_basic_func(port, service, function, input, timeout):
# prepare url
url = "http://127.0.0.1:" + str(port) + "/.assetexchange/basic"
# prepare timeout
if timeout == 0:
timeout = 10
# prepare request message
reqMsg = {
'id': 0,
'address': service + '.' + function,
'input': input,
'final': True
}
reqMsgByte = json.dumps(reqMsg).encode('utf8')
# execute request
req = Request(
url, data=reqMsgByte, method="POST",
headers={'content-type': 'application/json'}
)
# read response message
res = urlopen(req, timeout=timeout)
resMsg = json.loads(res.read().decode('utf-8'))
# raise error if one occured
if resMsg['error'] is not None:
raise RuntimeError(resMsg['error'])
return resMsg['output']
| 813 | 0 | 23 |
0d3f8f4387b5ae6ced5e41d7387fe84b5678da0e | 819 | py | Python | bikeheap.py | nicktimko/heapo | 9aef802e365304a85007463674c99e50c3e29634 | [
"PSF-2.0"
] | null | null | null | bikeheap.py | nicktimko/heapo | 9aef802e365304a85007463674c99e50c3e29634 | [
"PSF-2.0"
] | null | null | null | bikeheap.py | nicktimko/heapo | 9aef802e365304a85007463674c99e50c3e29634 | [
"PSF-2.0"
] | null | null | null | '''
>>> heap = BikeHeap([4, 2, 1, 3])
>>> heap.pop()
1
>>> heap.pop()
2
'''
import heapq
if __name__ == '__main__':
import doctest
print(doctest.testmod())
| 18.2 | 66 | 0.59707 | '''
>>> heap = BikeHeap([4, 2, 1, 3])
>>> heap.pop()
1
>>> heap.pop()
2
'''
import heapq
class BikeHeap(object):
def __init__(self, heap):
self.heap = heap
heapq.heapify(self.heap)
def __repr__(self):
return '{}({})'.format(self.__class__.__name__, self.heap)
def __len__(self):
return len(self.heap)
def __iter__(self):
return iter(self.heap)
def pop(self):
return heapq.heappop(self.heap)
def push(self, item):
heapq.heappush(self.heap, item)
def peek(self):
return self.heap[0]
def pushpop(self, item):
return heapq.heappushpop(self.heap, item)
def replace(self, item):
return heapq.heapreplace(self.heap, item)
if __name__ == '__main__':
import doctest
print(doctest.testmod())
| 384 | 2 | 265 |
82ba1046786a2e1a0a460240ef1e2dd952a409f3 | 1,394 | py | Python | setup.py | melinath/philo | 61b73fe068172f02d6c47e2b5387161919ec9618 | [
"0BSD"
] | 2 | 2019-07-08T02:31:06.000Z | 2019-07-08T02:31:57.000Z | setup.py | melinath/philo | 61b73fe068172f02d6c47e2b5387161919ec9618 | [
"0BSD"
] | null | null | null | setup.py | melinath/philo | 61b73fe068172f02d6c47e2b5387161919ec9618 | [
"0BSD"
] | null | null | null | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = __import__('philo').VERSION
setup(
name = 'philo',
version = '.'.join([str(v) for v in version]),
url = "http://philocms.org/",
description = "A foundation for developing web content management systems.",
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
maintainer = "iThink Software",
maintainer_email = "contact@ithinksw.com",
packages = find_packages(),
classifiers = [
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
platforms = ['OS Independent'],
license = 'ISC License (ISCL)',
install_requires = [
'django>=1.3',
'django-mptt>0.4.2,==dev',
],
extras_require = {
'docs': ["sphinx>=1.0"],
'grappelli': ['django-grappelli>=2.3'],
'migrations': ['south>=0.7.2'],
'waldo-recaptcha': ['recaptcha-django'],
'sobol-eventlet': ['eventlet'],
'sobol-scrape': ['BeautifulSoup'],
'penfield': ['django-taggit>=0.9'],
},
dependency_links = [
'https://github.com/django-mptt/django-mptt/tarball/master#egg=django-mptt-dev'
]
) | 27.88 | 83 | 0.667145 | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = __import__('philo').VERSION
setup(
name = 'philo',
version = '.'.join([str(v) for v in version]),
url = "http://philocms.org/",
description = "A foundation for developing web content management systems.",
long_description = open(os.path.join(os.path.dirname(__file__), 'README')).read(),
maintainer = "iThink Software",
maintainer_email = "contact@ithinksw.com",
packages = find_packages(),
classifiers = [
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
platforms = ['OS Independent'],
license = 'ISC License (ISCL)',
install_requires = [
'django>=1.3',
'django-mptt>0.4.2,==dev',
],
extras_require = {
'docs': ["sphinx>=1.0"],
'grappelli': ['django-grappelli>=2.3'],
'migrations': ['south>=0.7.2'],
'waldo-recaptcha': ['recaptcha-django'],
'sobol-eventlet': ['eventlet'],
'sobol-scrape': ['BeautifulSoup'],
'penfield': ['django-taggit>=0.9'],
},
dependency_links = [
'https://github.com/django-mptt/django-mptt/tarball/master#egg=django-mptt-dev'
]
) | 0 | 0 | 0 |
8485555a4159399f238c1140f08f9284e64dc49d | 1,622 | py | Python | examples_scalability/demo_fast_convergence.py | microprediction/winning | f03b74fc87cfe4b317a3b0ac3e165bc5e49a39fc | [
"MIT"
] | 22 | 2021-03-05T21:52:44.000Z | 2022-03-31T20:40:52.000Z | examples_scalability/demo_fast_convergence.py | terragord7/winning | 669e2bac5a6c1cc5cadc99f9d2ae485d954b75d0 | [
"MIT"
] | 1 | 2021-10-20T21:15:19.000Z | 2021-10-20T21:15:19.000Z | examples_scalability/demo_fast_convergence.py | terragord7/winning | 669e2bac5a6c1cc5cadc99f9d2ae485d954b75d0 | [
"MIT"
] | 3 | 2020-12-09T03:16:05.000Z | 2021-06-24T20:26:23.000Z | from winning.lattice_plot import densitiesPlot
from winning.lattice import skew_normal_density, mean_of_density, implicit_state_prices, winner_of_many, sample_winner_of_many
from winning.lattice_calibration import solve_for_implied_offsets, state_prices_from_offsets, densities_from_offsets
import numpy as np
PLOTS=True
import math
unit = 0.05
L = 500
if __name__=='__main__':
demo()
| 41.589744 | 149 | 0.673243 | from winning.lattice_plot import densitiesPlot
from winning.lattice import skew_normal_density, mean_of_density, implicit_state_prices, winner_of_many, sample_winner_of_many
from winning.lattice_calibration import solve_for_implied_offsets, state_prices_from_offsets, densities_from_offsets
import numpy as np
PLOTS=True
import math
unit = 0.05
L = 500
def demo( ):
density = skew_normal_density(L=500, unit = unit, a=1.5)
n = 100
errors = list()
num_iters = list(range(1,5))
for num_iter in num_iters:
true_offsets = [ int(unit*k) for k in range( n ) ]
state_prices = state_prices_from_offsets( density=density, offsets=true_offsets )
print("State prices are " + str( state_prices ))
offset_samples = list( range( -100, 100 ))[::-1]
# Now try to infer offsets from state prices
implied_offsets = solve_for_implied_offsets(prices = state_prices, density = density, offset_samples= offset_samples, nIter=num_iter)
recentered_offsets = [ io-implied_offsets[0] for io in implied_offsets]
differences = [ o1-o2 for o1, o2 in zip(recentered_offsets,true_offsets)]
avg_l1_in_offset = np.mean(np.abs( differences ))
errors.append( avg_l1_in_offset)
print(avg_l1_in_offset)
import matplotlib.pyplot as plt
plt.scatter(num_iters,errors)
plt.yscale('log')
plt.xlabel('Number of iterations')
plt.ylabel('Mean absolute error in relative ratings')
plt.show()
if __name__=='__main__':
demo()
| 1,207 | 0 | 23 |
029cc27343a7d3920b0fc3d87845df59d917c17c | 816 | py | Python | prerun.py | jobovy/galpy-repl | 86d81de0bd7685bd979b627bed7808e3acc2df53 | [
"MIT"
] | null | null | null | prerun.py | jobovy/galpy-repl | 86d81de0bd7685bd979b627bed7808e3acc2df53 | [
"MIT"
] | null | null | null | prerun.py | jobovy/galpy-repl | 86d81de0bd7685bd979b627bed7808e3acc2df53 | [
"MIT"
] | null | null | null | # A set of Python commands to pre-run for the galpy.org/repl redirect
# Install astroquery
import micropip
await micropip.install('astroquery')
# Install galpy
await micropip.install('https://www.galpy.org/wheelhouse/galpy-latest-py3-none-any.whl')
# Turn off warnings
import warnings
from galpy.util import galpyWarning
warnings.simplefilter(action='ignore',category=galpyWarning)
# Import units from astropy to have them handy
from astropy import units
import astropy.units as u
# Set up galpy to return outputs as astropy Quantities
import galpy.util.conversion
galpy.util.conversion._APY_UNITS=True
# Also need to set the following, because pyodide SkyCoord failure prevents this from being set correctly in Orbits
import galpy.orbit.Orbits
galpy.orbit.Orbits._APY_LOADED= True
# Inline plots
%matplotlib inline
| 37.090909 | 115 | 0.816176 | # A set of Python commands to pre-run for the galpy.org/repl redirect
# Install astroquery
import micropip
await micropip.install('astroquery')
# Install galpy
await micropip.install('https://www.galpy.org/wheelhouse/galpy-latest-py3-none-any.whl')
# Turn off warnings
import warnings
from galpy.util import galpyWarning
warnings.simplefilter(action='ignore',category=galpyWarning)
# Import units from astropy to have them handy
from astropy import units
import astropy.units as u
# Set up galpy to return outputs as astropy Quantities
import galpy.util.conversion
galpy.util.conversion._APY_UNITS=True
# Also need to set the following, because pyodide SkyCoord failure prevents this from being set correctly in Orbits
import galpy.orbit.Orbits
galpy.orbit.Orbits._APY_LOADED= True
# Inline plots
%matplotlib inline
| 0 | 0 | 0 |
799c5d7a6bd2508e88501b8857ae8849255bbfa3 | 240 | py | Python | Pi/app.py | madhurgupta10/Automatic-Fish-Feeder | cffb49de95d0080f630f09b353653d80eace661a | [
"Apache-2.0"
] | 4 | 2018-06-28T05:40:03.000Z | 2020-03-08T02:27:17.000Z | Pi/app.py | madhurgupta10/Automatic-Fish-Feeder | cffb49de95d0080f630f09b353653d80eace661a | [
"Apache-2.0"
] | null | null | null | Pi/app.py | madhurgupta10/Automatic-Fish-Feeder | cffb49de95d0080f630f09b353653d80eace661a | [
"Apache-2.0"
] | null | null | null | from fishfeeder import FishFeeder
import firebase
url = "{your firebase url}"
while True:
result = firebase.get(url)
if (result['action'] == True):
FishFeeder().FeedNow(0, 90)
firebase.put(url, {'action': False}) | 20 | 44 | 0.641667 | from fishfeeder import FishFeeder
import firebase
url = "{your firebase url}"
while True:
result = firebase.get(url)
if (result['action'] == True):
FishFeeder().FeedNow(0, 90)
firebase.put(url, {'action': False}) | 0 | 0 | 0 |
0ddae8c995ed97349e47411d71055dd1d6b8ab29 | 2,427 | py | Python | tests/test_peak_properties.py | jacr20/pax | d64d0ae4e4ec3e9bb3e61065ed92e9ea23328940 | [
"BSD-3-Clause"
] | 17 | 2016-04-24T12:02:03.000Z | 2021-07-19T19:39:47.000Z | tests/test_peak_properties.py | jacr20/pax | d64d0ae4e4ec3e9bb3e61065ed92e9ea23328940 | [
"BSD-3-Clause"
] | 300 | 2016-04-01T15:29:57.000Z | 2021-01-03T23:59:45.000Z | tests/test_peak_properties.py | jacr20/pax | d64d0ae4e4ec3e9bb3e61065ed92e9ea23328940 | [
"BSD-3-Clause"
] | 20 | 2016-04-14T15:11:26.000Z | 2021-09-18T06:39:09.000Z | from __future__ import division
import unittest
import numpy as np
from numpy import testing as np_testing
from pax.plugins.peak_processing.BasicProperties import integrate_until_fraction, put_w_in_center_of_field
if __name__ == '__main__':
unittest.main()
| 37.921875 | 106 | 0.658426 | from __future__ import division
import unittest
import numpy as np
from numpy import testing as np_testing
from pax.plugins.peak_processing.BasicProperties import integrate_until_fraction, put_w_in_center_of_field
class TestPeakProperties(unittest.TestCase):
def test_integrate_until_fraction(self):
# Test a simple ones-only waveform, for which no interpolation will be needed
w = np.ones(100, dtype=np.float32)
fractions_desired = np.array([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100], dtype=np.float64) / 100
result = np.zeros(len(fractions_desired))
integrate_until_fraction(w, fractions_desired, result)
np_testing.assert_almost_equal(result, fractions_desired * 100, decimal=4)
# Now test a one-sample waveform, which will probe the interpolation stuff
w = np.ones(1, dtype=np.float32)
result = np.zeros(len(fractions_desired))
integrate_until_fraction(w, fractions_desired, result)
np_testing.assert_almost_equal(result, fractions_desired, decimal=4)
def test_store_waveform(self):
field = np.zeros(5)
put_w_in_center_of_field(np.ones(3), field, 0)
np_testing.assert_equal(field, np.array([0, 0, 1, 1, 1]))
field = np.zeros(5)
put_w_in_center_of_field(np.ones(3), field, 1)
np_testing.assert_equal(field, np.array([0, 1, 1, 1, 0]))
field = np.zeros(5)
put_w_in_center_of_field(np.ones(3), field, 2)
np_testing.assert_equal(field, np.array([1, 1, 1, 0, 0]))
# Left overhang
field = np.zeros(5)
put_w_in_center_of_field(np.ones(4), field, 3)
np_testing.assert_equal(field, np.array([1, 1, 1, 0, 0]))
field = np.zeros(5)
put_w_in_center_of_field(np.ones(7), field, 6)
np_testing.assert_equal(field, np.array([1, 1, 1, 0, 0]))
# Right overhang
field = np.zeros(5)
put_w_in_center_of_field(np.ones(4), field, 0)
np_testing.assert_equal(field, np.array([0, 0, 1, 1, 1]))
field = np.zeros(5)
put_w_in_center_of_field(np.ones(7), field, 0)
np_testing.assert_equal(field, np.array([0, 0, 1, 1, 1]))
# Waveform larger than field
field = np.zeros(5)
put_w_in_center_of_field(np.ones(20), field, 10)
np_testing.assert_equal(field, np.array([1, 1, 1, 1, 1]))
if __name__ == '__main__':
unittest.main()
| 2,062 | 23 | 77 |
c1499693144f5e9b369a97a5f4daf9ab135f676d | 625 | py | Python | tests/test_profiler.py | anudeepsamaiya/django_profiler | 497e57a44340ecee148699940b1f3530874c2a2a | [
"MIT"
] | null | null | null | tests/test_profiler.py | anudeepsamaiya/django_profiler | 497e57a44340ecee148699940b1f3530874c2a2a | [
"MIT"
] | null | null | null | tests/test_profiler.py | anudeepsamaiya/django_profiler | 497e57a44340ecee148699940b1f3530874c2a2a | [
"MIT"
] | 1 | 2020-04-11T15:05:23.000Z | 2020-04-11T15:05:23.000Z | import time
import pytest
from pyprofile import profile
@pytest.fixture
def test_profile_decorator(dump_dir):
"""Only test if the profile decorator is written correctly.
"""
@profile(dump_dir=dump_dir)
@profile()
assert fn() == 2, "fn with no parameters failed."
assert fn_with_parameters(5) == 5, "fn with parameters failed."
| 20.16129 | 67 | 0.6592 | import time
import pytest
from pyprofile import profile
@pytest.fixture
def dump_dir(tmp_path):
d = tmp_path / "pyprofile_test_dumps"
d.mkdir()
return d
def test_profile_decorator(dump_dir):
"""Only test if the profile decorator is written correctly.
"""
@profile(dump_dir=dump_dir)
def fn_with_parameters(seconds):
time.sleep(seconds)
return seconds
@profile()
def fn():
seconds = 2
time.sleep(seconds)
return seconds
assert fn() == 2, "fn with no parameters failed."
assert fn_with_parameters(5) == 5, "fn with parameters failed."
| 192 | 0 | 74 |
cda381e73acefa2ad1f7ad18e8e62ee5d4f16510 | 2,528 | py | Python | strings/__init__.py | AileenLumina/dwarf | 5fc3b1b532290a474d17f84694dae1d0d53be7b4 | [
"MIT"
] | null | null | null | strings/__init__.py | AileenLumina/dwarf | 5fc3b1b532290a474d17f84694dae1d0d53be7b4 | [
"MIT"
] | null | null | null | strings/__init__.py | AileenLumina/dwarf | 5fc3b1b532290a474d17f84694dae1d0d53be7b4 | [
"MIT"
] | null | null | null | """Externalized strings for better structure and easier localization"""
setup_greeting = """
Dwarf - First run configuration
Insert your bot's token, or enter 'cancel' to cancel the setup:"""
not_a_token = "Invalid input. Restart Dwarf and repeat the configuration process."
choose_prefix = """Choose a prefix. A prefix is what you type before a command.
A typical prefix would be the exclamation mark.
Can be multiple characters. You will be able to change it later and add more of them.
Choose your prefix:"""
confirm_prefix = """Are you sure you want {0} as your prefix?
You will be able to issue commands like this: {0}help
Type yes to confirm or no to change it"""
setup_finished = """
The configuration is done. Leave this window always open to keep your bot online.
All commands will have to be issued through Discord's chat,
*this window will now be read only*.
Press enter to continue"""
prefix_singular = "Prefix"
prefix_plural = "Prefixes"
use_this_url = "Use this url to bring your bot to a server:"
bot_is_online = "{} is now online."
connected_to = "Connected to:"
connected_to_servers = "{} servers"
connected_to_channels = "{} channels"
connected_to_users = "{} users"
no_prefix_set = "No prefix set. Defaulting to !"
logging_into_discord = "Logging into Discord..."
invalid_credentials = """Invalid login credentials.
If they worked before Discord might be having temporary technical issues.
In this case, press enter and try again later.
Otherwise you can type 'reset' to delete the current configuration and
redo the setup process again the next start.
> """
keep_updated_win = """Make sure to keep your bot updated by running the file
update.bat"""
keep_updated = """Make sure to keep Dwarf updated by using:\n
git pull\npip3 install --upgrade
git+https://github.com/Rapptz/discord.py@async"""
official_server = "Official server: {}"
invite_link = "https://discord.me/AileenLumina"
update_the_api = """\nYou are using an outdated discord.py.\n
Update your discord.py with by running this in your cmd
prompt/terminal:\npip3 install --upgrade git+https://
github.com/Rapptz/discord.py@async"""
command_disabled = "That command is disabled."
exception_in_command = "Exception in command '{}'"
error_in_command = "Error in command '{}' - {}: {}"
not_available_in_dm = "That command is not available in DMs."
owner_recognized = "{} has been recognized and set as owner."
| 32.410256 | 85 | 0.716377 | """Externalized strings for better structure and easier localization"""
setup_greeting = """
Dwarf - First run configuration
Insert your bot's token, or enter 'cancel' to cancel the setup:"""
not_a_token = "Invalid input. Restart Dwarf and repeat the configuration process."
choose_prefix = """Choose a prefix. A prefix is what you type before a command.
A typical prefix would be the exclamation mark.
Can be multiple characters. You will be able to change it later and add more of them.
Choose your prefix:"""
confirm_prefix = """Are you sure you want {0} as your prefix?
You will be able to issue commands like this: {0}help
Type yes to confirm or no to change it"""
setup_finished = """
The configuration is done. Leave this window always open to keep your bot online.
All commands will have to be issued through Discord's chat,
*this window will now be read only*.
Press enter to continue"""
prefix_singular = "Prefix"
prefix_plural = "Prefixes"
use_this_url = "Use this url to bring your bot to a server:"
bot_is_online = "{} is now online."
connected_to = "Connected to:"
connected_to_servers = "{} servers"
connected_to_channels = "{} channels"
connected_to_users = "{} users"
no_prefix_set = "No prefix set. Defaulting to !"
logging_into_discord = "Logging into Discord..."
invalid_credentials = """Invalid login credentials.
If they worked before Discord might be having temporary technical issues.
In this case, press enter and try again later.
Otherwise you can type 'reset' to delete the current configuration and
redo the setup process again the next start.
> """
keep_updated_win = """Make sure to keep your bot updated by running the file
update.bat"""
keep_updated = """Make sure to keep Dwarf updated by using:\n
git pull\npip3 install --upgrade
git+https://github.com/Rapptz/discord.py@async"""
official_server = "Official server: {}"
invite_link = "https://discord.me/AileenLumina"
update_the_api = """\nYou are using an outdated discord.py.\n
Update your discord.py with by running this in your cmd
prompt/terminal:\npip3 install --upgrade git+https://
github.com/Rapptz/discord.py@async"""
command_disabled = "That command is disabled."
exception_in_command = "Exception in command '{}'"
error_in_command = "Error in command '{}' - {}: {}"
not_available_in_dm = "That command is not available in DMs."
owner_recognized = "{} has been recognized and set as owner."
| 0 | 0 | 0 |
3c59ebfb0f93e4980d81653f15d23ef32b3d89a9 | 4,692 | py | Python | tests/test_scope_manager.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 70 | 2015-12-05T12:33:10.000Z | 2022-03-03T04:56:58.000Z | tests/test_scope_manager.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 711 | 2015-10-06T11:01:48.000Z | 2022-02-09T12:40:47.000Z | tests/test_scope_manager.py | kbh2o/slash | 532b7e3acdf46103ece5b86f21c29f9b58587289 | [
"BSD-3-Clause"
] | 37 | 2015-10-13T11:00:51.000Z | 2022-02-08T07:28:11.000Z | # pylint: disable=redefined-outer-name
import collections
import functools
import itertools
import pytest
import slash
from slash.core.scope_manager import ScopeManager, get_current_scope
from .utils import make_runnable_tests
from .utils.suite_writer import Suite
def test_requirement_mismatch_end_of_module():
"""Test that unmet requirements at end of file(module) still enable scope manager to detect the end and properly pop contextx"""
suite = Suite()
num_files = 3
num_tests_per_file = 5
for i in range(num_files): # pylint: disable=unused-variable
file1 = suite.add_file()
for j in range(num_tests_per_file): # pylint: disable=unused-variable
file1.add_function_test()
t = file1.add_function_test()
t.add_decorator('slash.requires(lambda: False)')
t.expect_skip()
suite.run()
@pytest.fixture
@pytest.fixture
@pytest.fixture
| 30.666667 | 132 | 0.66347 | # pylint: disable=redefined-outer-name
import collections
import functools
import itertools
import pytest
import slash
from slash.core.scope_manager import ScopeManager, get_current_scope
from .utils import make_runnable_tests
from .utils.suite_writer import Suite
def test_requirement_mismatch_end_of_module():
"""Test that unmet requirements at end of file(module) still enable scope manager to detect the end and properly pop contextx"""
suite = Suite()
num_files = 3
num_tests_per_file = 5
for i in range(num_files): # pylint: disable=unused-variable
file1 = suite.add_file()
for j in range(num_tests_per_file): # pylint: disable=unused-variable
file1.add_function_test()
t = file1.add_function_test()
t.add_decorator('slash.requires(lambda: False)')
t.expect_skip()
suite.run()
def test_scope_manager(dummy_fixture_store, scope_manager, tests_by_module):
# pylint: disable=protected-access
last_scopes = None
for module_index, tests in enumerate(tests_by_module):
for test_index, test in enumerate(tests):
scope_manager.begin_test(test)
assert dummy_fixture_store._scopes == ['session', 'module', 'test']
expected = _increment_scope(
last_scopes,
test=1,
module=1 if test_index == 0 else 0,
session=1 if test_index == 0 and module_index == 0 else 0)
assert dummy_fixture_store._scope_ids == expected
# make sure the dict is copied
assert expected is not dummy_fixture_store._scope_ids
last_scopes = expected
scope_manager.end_test(test)
assert dummy_fixture_store._scopes == ['session', 'module']
assert dummy_fixture_store._scope_ids == last_scopes
scope_manager.flush_remaining_scopes()
assert not dummy_fixture_store._scopes
def test_get_current_scope(suite_builder):
@suite_builder.first_file.add_code
def __code__():
# pylint: disable=unused-variable,redefined-outer-name,reimported
import slash
import gossip
TOKEN = 'testing-current-scope-token'
def _validate_current_scope(expected_scope):
assert slash.get_current_scope() == expected_scope
@gossip.register('slash.after_session_start', token=TOKEN)
def session_validation():
assert slash.get_current_scope() == 'session'
@gossip.register('slash.configure', token=TOKEN)
@gossip.register('slash.app_quit', token=TOKEN)
def _no_scope():
assert slash.get_current_scope() is None
def test_something():
assert slash.get_current_scope() == 'test'
gossip.unregister_token(TOKEN)
suite_builder.build().run().assert_success(1)
assert get_current_scope() is None
@pytest.fixture
def scope_manager(dummy_fixture_store, forge):
session = slash.Session()
forge.replace_with(session, 'fixture_store', dummy_fixture_store)
return ScopeManager(session)
@pytest.fixture
def dummy_fixture_store():
return DummyFixtureStore()
@pytest.fixture
def tests_by_module():
def test_func():
pass
num_modules = 5
num_tests_per_module = 3
returned = []
with slash.Session():
for module_index in range(num_modules):
module_name = '__module_{}'.format(module_index)
returned.append([])
for test_index in range(num_tests_per_module): # pylint: disable=unused-variable
[test] = make_runnable_tests(test_func) # pylint: disable=unbalanced-tuple-unpacking
assert test.__slash__.module_name
test.__slash__.module_name = module_name
returned[-1].append(test)
return returned
def _increment_scope(prev_scopes, **increments):
if not prev_scopes:
returned = {}
else:
returned = prev_scopes.copy()
for key, value in increments.items():
if value == 0:
continue
if key not in returned:
returned[key] = 0
returned[key] += value
return returned
class DummyFixtureStore(object):
def __init__(self):
super(DummyFixtureStore, self).__init__()
self._scopes = []
self._counters = collections.defaultdict(
functools.partial(itertools.count, 1))
self._scope_ids = {}
def push_scope(self, scope):
self._scopes.append(scope)
self._scope_ids[scope] = next(self._counters[scope])
def pop_scope(self, scope):
latest_scope = self._scopes.pop()
assert latest_scope == scope
| 3,514 | 11 | 239 |
19346741bfacb214c72a76ffb440bfbbe83af104 | 32,188 | py | Python | modules/PLSR.py | jernelv/SpecAnalysis | 175875ea14f200ecd5de8eaa5b228c32c6621e46 | [
"MIT"
] | 5 | 2021-01-04T10:30:12.000Z | 2022-03-25T10:31:19.000Z | modules/PLSR.py | jernelv/SpecAnalysis | 175875ea14f200ecd5de8eaa5b228c32c6621e46 | [
"MIT"
] | null | null | null | modules/PLSR.py | jernelv/SpecAnalysis | 175875ea14f200ecd5de8eaa5b228c32c6621e46 | [
"MIT"
] | 5 | 2020-11-17T13:07:47.000Z | 2021-08-28T15:03:02.000Z | from __future__ import print_function
import fns
import numpy as np
import os
import matplotlib.pyplot as plt
import matplotlib
import scipy.signal
from scipy import signal
#from sklearn.model_selection import LeavePOut
#from sklearn.model_selection import KFold
from sklearn.model_selection import ShuffleSplit
from sklearn.model_selection import LeaveOneOut
from sklearn.linear_model import ElasticNet
import sklearn.metrics
import types
from math import sqrt
import copy
import sys
import importlib
from .libs import PLSRsave
from .libs import PLSRGeneticAlgorithm
from .libs import PLSRNN
from .libs import PLSRRNN
from .libs import PLSRCNN
from .libs import PLSR_file_import
from .libs import PLSRregressionMethods
from .libs import PLSRregressionVisualization
from .libs import PLSRpreprocessing
from .libs import PLSRwavelengthSelection
from .libs import PLSRsequential_feature_selectors
from .libs import PLSRclassifiers
#### this
'''functions_to_wrap = [[matplotlib.axes.Axes,'pcolormesh'],
[matplotlib.figure.Figure,'colorbar'],
[matplotlib.figure.Figure,'clf'],
[matplotlib.figure.Figure,'set_size_inches'],
[matplotlib.figure.Figure,'add_subplot'],
[matplotlib.figure.Figure,'subplots'],
[matplotlib.figure.Figure,'subplots_adjust'],
[matplotlib.axes.Axes,'invert_yaxis'],
[matplotlib.axes.Axes,'invert_xaxis'],
[matplotlib.axes.Axes,'set_title'],
[matplotlib.axes.Axes,'axis'],
[matplotlib.axes.Axes,'cla'],
[matplotlib.axes.Axes,'plot'],
[matplotlib.figure.Figure,'savefig'],
[matplotlib.axes.Axes,'set_xlim'],
[matplotlib.axes.Axes,'set_position'],
[matplotlib.axes.Axes,'bar'],
[matplotlib.figure.Figure,'add_axes'],
[plt,'figure'],
]
for function in functions_to_wrap:
if not 'function rimt.<locals>.rimt_this' in str(getattr(function[0], function[1])):
setattr(function[0], function[1], fns.rimt(getattr(function[0], function[1])))'''
#from multiprocessing import Pool
#import datetime
#matplotlib.rc('text', usetex=True)
#matplotlib.rc('text.latex', preamble=r'\usepackage{upgreek}')
def set_training(event):
"""Sets the training data set(s) in the GUI."""
frame=event.widget.master.master.master
frame.nav.clear_color('color1')
frame.nav.color_selected('color1')
frame.training_files=frame.nav.get_paths_of_selected_items()
frame.nav.deselect()
return
def set_validation(event):
"""Sets the validation data set(s) in the GUI."""
frame=event.widget.master.master.master
frame.nav.clear_color('color3')
frame.nav.color_selected('color3')
frame.validation_files=frame.nav.get_paths_of_selected_items()
frame.nav.deselect()
return
| 42.917333 | 182 | 0.657015 | from __future__ import print_function
import fns
import numpy as np
import os
import matplotlib.pyplot as plt
import matplotlib
import scipy.signal
from scipy import signal
#from sklearn.model_selection import LeavePOut
#from sklearn.model_selection import KFold
from sklearn.model_selection import ShuffleSplit
from sklearn.model_selection import LeaveOneOut
from sklearn.linear_model import ElasticNet
import sklearn.metrics
import types
from math import sqrt
import copy
import sys
import importlib
from .libs import PLSRsave
from .libs import PLSRGeneticAlgorithm
from .libs import PLSRNN
from .libs import PLSRRNN
from .libs import PLSRCNN
from .libs import PLSR_file_import
from .libs import PLSRregressionMethods
from .libs import PLSRregressionVisualization
from .libs import PLSRpreprocessing
from .libs import PLSRwavelengthSelection
from .libs import PLSRsequential_feature_selectors
from .libs import PLSRclassifiers
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
#### this
'''functions_to_wrap = [[matplotlib.axes.Axes,'pcolormesh'],
[matplotlib.figure.Figure,'colorbar'],
[matplotlib.figure.Figure,'clf'],
[matplotlib.figure.Figure,'set_size_inches'],
[matplotlib.figure.Figure,'add_subplot'],
[matplotlib.figure.Figure,'subplots'],
[matplotlib.figure.Figure,'subplots_adjust'],
[matplotlib.axes.Axes,'invert_yaxis'],
[matplotlib.axes.Axes,'invert_xaxis'],
[matplotlib.axes.Axes,'set_title'],
[matplotlib.axes.Axes,'axis'],
[matplotlib.axes.Axes,'cla'],
[matplotlib.axes.Axes,'plot'],
[matplotlib.figure.Figure,'savefig'],
[matplotlib.axes.Axes,'set_xlim'],
[matplotlib.axes.Axes,'set_position'],
[matplotlib.axes.Axes,'bar'],
[matplotlib.figure.Figure,'add_axes'],
[plt,'figure'],
]
for function in functions_to_wrap:
if not 'function rimt.<locals>.rimt_this' in str(getattr(function[0], function[1])):
setattr(function[0], function[1], fns.rimt(getattr(function[0], function[1])))'''
#from multiprocessing import Pool
#import datetime
#matplotlib.rc('text', usetex=True)
#matplotlib.rc('text.latex', preamble=r'\usepackage{upgreek}')
def crossval(T,V,ui,case):
if not ui['is_validation']=='X-val on training':
case.supressplot=0
return [case]
else:
case.Xval_cases=[]
#XvalTs=[]
#XvalVs=[]
#supressplots=[]
if ui['cross_val_N']==1 and ui['cross_val_max_cases']==-1:
#ui['cross_val_max_cases']=len(T.Y)
splitodule=LeaveOneOut()
print('Using sklearn.LeaveOneOut on '+str(len(T.Y))+' measurements. Maxcases set to '+str(len(T.Y)))
else:
if ui['cross_val_max_cases']==-1:
print('cross_val_max_cases set to -1, cross_val_N not set to 1. Setting cross_val_max_cases to default (20)' )
ui['cross_val_max_cases']=20
splitodule=ShuffleSplit(n_splits=ui['cross_val_max_cases'], test_size=ui['cross_val_N'])
for train,val in splitodule.split(T.X):
case.Xval_cases.append(types.SimpleNamespace())
case.Xval_cases[-1].train=train
case.Xval_cases[-1].val=val
case.Xval_cases[-1].T=types.SimpleNamespace()
case.Xval_cases[-1].T.X=np.array(T.X[train])
case.Xval_cases[-1].T.Y=np.array(T.Y[train])
case.Xval_cases[-1].V=types.SimpleNamespace()
case.Xval_cases[-1].V.X=np.array(T.X[val])
case.Xval_cases[-1].V.Y=np.array(T.Y[val])
case.Xval_cases[-1].supressplot=1
case.Xval_cases[-1].supressplot=0
return case.Xval_cases
def run_reg_module(Xval_case,case,ui,common_variables,active_wavenumers,logfile,keywords={}):
T=Xval_case.T
V=Xval_case.V
supressplot=Xval_case.supressplot
wavenumbers=case.wavenumbers
folder=case.folder
try:
keywords=case.keywords
except:
keywords={}
print('let the developers know if you see this error')
# Set what datapoints to include, the parameter 'wavenum' is in units cm^-1
#datapointlists=ui.datapointlists
# common_variables.tempax and common_variables.tempfig are for the figure that is saved, common_variables.ax and common_variables.fig are for the figure that is displayed
# need to have this for the colorbar
if ui['save_check_var']:
common_variables.tempax.fig=common_variables.tempfig
#plot best result
# or only result if not MW
reg_module=PLSRregressionMethods.getRegModule(ui['reg_type'],keywords)
#reg_module.active_wavenumers=active_wavenumers
# get RMSe
for E in [T,V]:
if len(E.Y)>0:
E.Xsmol=E.X[:,active_wavenumers]
reg_module.fit(T.Xsmol, T.Y)
for E in [T,V]:
if len(E.Y)>0:
E.pred = reg_module.predict(E.Xsmol)[:,0]
else:
E.pred = []
Xval_case.RMSECP=np.sqrt((np.sum((T.pred-T.Y)**2)+np.sum((V.pred-V.Y)**2))/(len(T.Y)+len(V.Y)))
Xval_case.RMSEC=np.sqrt((np.sum((T.pred-T.Y)**2))/(len(T.Y)))
if len(V.Y)>0:
Xval_case.RMSEP=np.sqrt((np.sum((V.pred-V.Y)**2))/(len(V.Y)))
'''if ui['RMS_type']=='Combined RMSEP+RMSEC' and len(V.Y)>0:
RMSe=Xval_case.RMSECP
Y_for_r2=np.concatenate((T.Y,V.Y))
pred_for_r2=np.concatenate((T.pred,V.pred))
el'''
if ui['RMS_type']=='RMSEP':
RMSe=Xval_case.RMSEP
Y_for_r2=V.Y
pred_for_r2=V.pred
else:
RMSe=Xval_case.RMSEC
Y_for_r2=T.Y
pred_for_r2=T.pred
case.XvalRMSEs.append(RMSe)
#calculating coefficient of determination
if not hasattr(case,'X_val_pred'):
case.X_val_pred=[pred_for_r2]
case.X_val_Y=[Y_for_r2]
else:
case.X_val_pred.append(pred_for_r2)
case.X_val_Y.append(Y_for_r2)
if not supressplot: # if plotting this, calculate R^2 for all xval cases
X_pred=np.array(case.X_val_pred).reshape(-1)
X_Y=np.array(case.X_val_Y).reshape(-1)
y_mean = np.sum(X_Y)*(1/len(X_Y))
Xval_case.R_squared = 1 - ((np.sum((X_Y - X_pred)**2))/(np.sum((X_Y - y_mean)**2)))
avg=np.average(X_pred-X_Y)
n=len(X_pred)
Xval_case.SEP=np.sqrt(np.sum( ( X_pred-X_Y-avg )**2 )/(n-1))
Xval_case.mean_absolute_error=sklearn.metrics.mean_absolute_error(X_Y,X_pred)
Xval_case.mean_absolute_error_percent=100/len(X_Y) * np.sum(np.abs(X_Y-X_pred)/X_Y)
else:
Xval_case.R_squared=0
Xval_case.SEP=0
try:
Xval_case.R_not_squared=sqrt(Xval_case.R_squared)
except:
Xval_case.R_not_squared=0
if ui['coeff_det_type']=='R^2':
coeff_det = Xval_case.R_squared
elif ui['coeff_det_type']=='R':
coeff_det = Xval_case.R_not_squared
if reg_module.type=='classifier':#'classifier_type' in keywords:
frac_cor_lab=PLSRclassifiers.get_correct_categorized(case.X_val_Y[-1],case.X_val_pred[-1])
case.XvalCorrClass.append(frac_cor_lab)
else:
frac_cor_lab=-1
#plot
if not supressplot:
if not ui['do_not_save_plots']:
PLSRsave.plot_regression(Xval_case,case,ui,fns.add_axis(common_variables.fig,ui['fig_per_row'],ui['max_plots']),keywords,RMSe, coeff_det,frac_cor_lab=frac_cor_lab)
if ui['save_check_var']:
if not ui['do_not_save_plots']:
PLSRsave.plot_regression(Xval_case,case,ui,common_variables.tempax,keywords,RMSe, coeff_det,frac_cor_lab=frac_cor_lab)
common_variables.tempfig.subplots_adjust(bottom=0.13,left=0.15, right=0.97, top=0.95)
#common_variables.tempfig.savefig(folder+'Best'+'Comp'+str(components)+'Width'+str(round(Wwidth,1))+'Center'+str(round(Wcenter,1))+'.pdf')
#common_variables.tempfig.savefig(folder+'Best'+'Comp'+str(components)+'Width'+str(round(Wwidth,1))+'Center'+str(round(Wcenter,1))+'.svg')
plotFileName=case.folder+ui['reg_type']+PLSRsave.get_unique_keywords_formatted(common_variables.keyword_lists,case.keywords).replace('.','p')
common_variables.tempfig.savefig(plotFileName+ui['file_extension'])
PLSRsave.add_line_to_logfile(logfile,Xval_case,case,ui,keywords,RMSe,coeff_det,frac_cor_lab=frac_cor_lab)
#draw(common_variables)
return reg_module, RMSe
class moduleClass():
filetypes=['DPT','dpt','list','txt','laser']
def __init__(self, fig, locations, frame, ui):
#reload modules
if frame.module_reload_var.get():
if 'modules.libs.PLSRsave' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRsave'])
if 'modules.libs.PLSRGeneticAlgorithm' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRGeneticAlgorithm'])
if 'modules.libs.PLSRsequential_feature_selectors' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRsequential_feature_selectors'])
if 'modules.libs.PLSRNN' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRNN'])
if 'modules.libs.PLSRRNN' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRRNN'])
if 'modules.libs.PLSRCNN' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRCNN'])
if 'modules.libs.PLSR_file_import' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSR_file_import'])
if 'modules.libs.PLSRregressionMethods' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRregressionMethods'])
if 'modules.libs.PLSRclassifiers' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRclassifiers'])
if 'modules.libs.PLSRregressionVisualization' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRregressionVisualization'])
if 'modules.libs.PLSRpreprocessing' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRpreprocessing'])
if 'modules.libs.PLSRwavelengthSelection' in sys.modules: #reload each time it is run
importlib.reload(sys.modules['modules.libs.PLSRwavelengthSelection'])
#code for checking for memory leaks
global run #global keyword used to connect button clicks to class object
run=self
self.fig=fig
self.locations=locations
self.frame=frame
self.ui=ui
def clear_memory(self):
safe_keys=['fig','locations','frame','ui','wrapper_i','wrapper_max']
keys=[]
for key in self.__dict__:
keys.append(key)
for key in keys:
if not key in safe_keys:
delattr(self,key)
def run(self):
if not self.ui['use_wrapper']:
self.run_wrapper_case()
else:
import gc
gc.collect() #collect garbage to free memory from last run
self.wrapper_i=1
self.wrapper_max=len(self.ui['binning'])
if self.ui['filter']=='Try all': self.wrapper_max*=6
if self.ui['try_all_scatter_correction']: self.wrapper_max*=4
if self.ui['try_all_normalize']: self.wrapper_max*=4
if self.ui['scaling']=='Try all': self.wrapper_max*=2
if self.ui['mean_centering']=='Try all': self.wrapper_max*=2
bins=self.ui['binning']
for bin in bins:
self.ui['binning']=[bin]
self.scatter_cor_wrapper()
self.ui['binning']=bins
def scatter_cor_wrapper(self):
#{'key': 'filter', 'type': 'radio:text', 'texts': ['No filter', 'MA', 'Butterworth', 'Hamming','Fourier','Try all'], 'tab': 0, 'row': 7} ,
if self.ui['filter']=='Try all':
self.ui['use_SG']='No SG'
for f in ['No filter', 'MA', 'Butterworth', 'Hamming','Fourier','SG']:
#print(self.__dict__)
self.ui['filter']=f
if self.ui['filter']=='SG':
self.ui['filter']='No filter'
self.ui['use_SG']='use SG'
if self.ui['try_all_scatter_correction']:
self.ui['try_all_scatter_correction']=0
self.ui['normalize']=0
self.ui['SNV_key']=0
self.ui['MSC_key']=0
self.normalize_wrapper()
self.ui['normalize']=1
self.normalize_wrapper()
self.ui['normalize']=0
self.ui['SNV_key']=1
self.normalize_wrapper()
self.ui['SNV_key']=0
self.ui['MSC_key']=1
self.normalize_wrapper()
self.ui['MSC_key']=0
self.ui['try_all_scatter_correction']=1
else:
self.normalize_wrapper()
self.ui['use_SG']='No SG'
self.ui['filter']='Try all'
else:
if self.ui['try_all_scatter_correction']:
self.ui['try_all_scatter_correction']=0
self.ui['normalize']=0
self.ui['SNV_key']=0
self.ui['MSC_key']=0
self.normalize_wrapper()
self.ui['normalize']=1
self.normalize_wrapper()
self.ui['normalize']=0
self.ui['SNV_key']=1
self.normalize_wrapper()
self.ui['SNV_key']=0
self.ui['MSC_key']=1
self.normalize_wrapper()
self.ui['MSC_key']=0
self.ui['try_all_scatter_correction']=1
else:
self.normalize_wrapper()
def normalize_wrapper(self):
ui=self.ui
if not ui['try_all_normalize']:
self.scaling_wrapper()
else:
ui['try_all_normalize']=0
#ui['normalize']=0
ui['baseline_value']=0
ui['baseline_linear']=0
ui['baseline_background']=0
ui['derivative']='Not der'
#
self.scaling_wrapper()
#
#ui['normalize']=1
#self.scaling_wrapper()
#ui['normalize']=0
#
ui['baseline_value']=1
self.scaling_wrapper()
ui['baseline_value']=0
#
ui['baseline_linear']=1
self.scaling_wrapper()
ui['baseline_linear']=0
#
ui['baseline_background']=1
self.scaling_wrapper()
ui['baseline_background']=0
#
ui['derivative']='1st der'
self.scaling_wrapper()
ui['derivative']='2nd der'
self.scaling_wrapper()
ui['derivative']='Not der'
ui['try_all_normalize']=1
return
#{'key': 'scaling', 'type': 'radio:text', 'texts': ['No scaling', 'Scaling','Try all'], 'tab': 0, 'row': 2}
def scaling_wrapper(self):
if not self.ui['scaling']=='Try all':
self.mean_centering_wrapper()
else:
self.ui['scaling']='No scaling'
self.mean_centering_wrapper()
self.ui['scaling']='Scaling'
self.mean_centering_wrapper()
self.ui['scaling']='Try all'
#{'key': 'mean_centering', 'type': 'radio:text', 'texts': ['No mean centering', 'Mean centering','Try all'], 'tab': 0, 'row': 2} ,
def mean_centering_wrapper(self):
if not self.ui['mean_centering']=='Try all':
self.clear_memory()
print('wrapper i = ',self.wrapper_i, ' of ', self.wrapper_max)
self.wrapper_i+=1
self.run_wrapper_case()
else:
self.ui['mean_centering']='No mean centering'
self.clear_memory()
print('wrapper i = ',self.wrapper_i, ' of ', self.wrapper_max)
self.wrapper_i+=1
self.run_wrapper_case()
self.ui['mean_centering']='Mean centering'
self.clear_memory()
print('wrapper i = ',self.wrapper_i, ' of ', self.wrapper_max)
self.wrapper_i+=1
self.run_wrapper_case()
self.ui['mean_centering']='Try all'
def run_wrapper_case(self):
fig=self.fig
locations=self.locations
frame=self.frame
ui=self.ui
eprint('running')
self.fig=fig
fig.clf()
self.frame=frame
# get variables from buttons
common_variables=types.SimpleNamespace()
common_variables.draw=self.draw
self.common_variables=common_variables
common_variables.keyword_lists={}
PLSRregressionMethods.get_relevant_keywords(common_variables,ui)
ui['multiprocessing']=1-(ui['no_multiprocessing'])
save_check_var=frame.save_check_var.get()
ui['save_check_var']=save_check_var
filename=frame.name_field_string.get()
self.filename=filename
#prepare figures for display (set correct number of axes, each pointing to the next axis)
######################### if crossval and moving window -> stop ###########
if ui['is_validation']=='X-val on training' and ui['regression_wavelength_selection']=='Moving window':
print("Use of x-validation with moving window is not supported")
return
######################### if RMSEP and no validation -> stop ##############
if ui['is_validation']=='Training' and ui['RMS_type']=='RMSEP':
print("Unable to calculate RMSEP with only training set")
return
#################### if RMSEP and RMSEC and no validation -> only RMSEP ###
if ui['is_validation']=='Training':
ui['RMS_type']='RMSEC'
if ui['RMS_type']=='Default':
ui['RMS_type']='RMSEC'
else:
if ui['RMS_type']=='Default':
ui['RMS_type']='RMSEP'
common_variables.frame=frame
common_variables.fig=fig
################################################################################################
######################### Load data as training or validation ##################################
################################################################################################
T=types.SimpleNamespace()
V=types.SimpleNamespace()
if len(frame.training_files)==0:
print('training set required')
return
#load training set
T.X, T.Y, common_variables.trainingfiles, self.wavenumbers, self.regressionCurControlTypes=PLSR_file_import.get_files(frame.training_files,ui['max_range'])
self.original_wavenumbers=self.wavenumbers
for i, contrltytpe in enumerate(self.regressionCurControlTypes):
frame.button_handles['cur_col'][i]["text"]=contrltytpe
if ui['is_validation']=='Training' or ui['is_validation']=='X-val on training':# if training or crossval -> deselect validation
frame.nav.deselect()
#frame.nav.clear_color('color3')
#frame.validation_files=frame.nav.get_paths_of_selected_items()
V.X=np.array([]) # set empty validation set
V.Y=np.array([])
elif ui['is_validation']=='Training and Validation':
if len(frame.validation_files)==0:
print('training and validation set, but no validation set in in put')
return
#load validation set
V.X, V.Y, common_variables.validationfiles, _, _2=PLSR_file_import.get_files(frame.validation_files,ui['max_range'])
common_variables.original_T=copy.deepcopy(T)
common_variables.original_V=copy.deepcopy(V)
################################################################################################
################################## load reference spectra #######################################
################################################################################################
if ui['reference_spectra']=='':
self.reference_spectra=None
else:
try:
temp, _1, _2, _3, _4=PLSR_file_import.get_files([ui['reference_spectra']],np.inf)
if len(temp)>0:
print('first reference spectra in list selected for reference spectra selected as reference spectra')
self.reference_spectra=np.array(temp[0])
except Exception as e:
self.reference_spectra=None
print(e)
print('error importing referece spectra -> ignoring')
if ui['background_spectra']=='':
self.background_spectra=None
else:
try:
temp, _1, _2, _3, _4=PLSR_file_import.get_files([ui['background_spectra']],np.inf)
if len(temp)>0:
print('first background spectra in list selected for reference spectra selected as reference spectra')
self.background_spectra=np.array(temp[0])
except Exception as e:
self.background_spectra=None
print(e)
print('error importing referece spectra -> ignoring')
################################################################################################
################# set up folder, save log and temporary figure for saving ######################
################################################################################################
if save_check_var:
if not os.path.exists(filename):
os.makedirs(filename)
PLSRsave.SaveLogFile(filename,ui,common_variables)
common_variables.tempfig,common_variables.tempax=PLSRsave.make_tempfig(ui,frame)
################################################################################################
############################## calculate window ranges #########################################
################################################################################################
common_variables.datapoints=np.arange(len(self.wavenumbers))
#common_variables.datapointlists=[common_variables.datapoints]# declare this for get_or_make_absorbance_ax
#common_variables.datapoints, common_variables.datapointlists=PLSRpreprocessing.GetDatapoints(self.wavenumbers, ui)
################################################################################################
################################### save unprocessed spectra ###################################
################################################################################################
if ui['plot_spectra_before_preprocessing']:
eprint('plot abs')
if ui['save_check_var']:
PLSRsave.PlotAbsorbance(common_variables.tempax,common_variables.tempfig,common_variables.datapoints,ui,self.wavenumbers,T.X,V.X)
plotFileName=filename+'/SpectraPrePreprocessing'
common_variables.tempfig.savefig(plotFileName.replace('.','p')+ui['file_extension'])
common_variables.tempax.cla()
ax=PLSRsave.get_or_make_absorbance_ax(self)
self.draw()
################################################################################################
################################### make pychem input file #####################################
################################################################################################
if int(ui['make_pyChem_input_file']):
if ui['is_validation']=='Training and Validation':
PLSRsave.writePyChemFile(T.X,T.Y,validation,validationtruevalues)
else:
PLSRsave.writePyChemFile(T.X,T.Y,[],[])
################################################################################################
################## set current control and remove data higher than maxrange ####################
################################################################################################
datasets=[T]
if ui['is_validation']=='Training and Validation':
datasets.append(V)
for E in datasets:
keepsamples=[]
for i,_ in enumerate(E.Y):
if not E.Y[i,ui['cur_col']] > ui['max_range']:
keepsamples.append(i)
E.X=E.X[keepsamples,:]
E.Y=E.Y[keepsamples,ui['cur_col']]
ui['cur_control_string']=self.regressionCurControlTypes[ui['cur_col']]
PLSRpreprocessing.do_preprocessing(self,T,V)
if ui['plot_fourier']:
if hasattr(T,'X_fft'):
ax=fns.add_axis(fig,ui['fig_per_row'],ui['max_plots'])
PLSRsave.plot_fourier(ax,fig,T,V,ui)
self.complete_cases=[]
for _ in [1]: # is a loop so that you can use 'break'
for i,dercase in enumerate(self.preprocessed_cases):
#need to set data range in case of derrivative, rerunn in all cases anyways
datapoints=PLSRpreprocessing.GetDatapoints(dercase.wavenumbers, ui)
#common_variables.datapoints=datapoints
#common_variables.datapointlists=datapointlists
if ui['plot_spectra_after_preprocessing']:
ax=fns.add_axis(fig,ui['fig_per_row'],ui['max_plots'])
PLSRsave.PlotAbsorbance(ax,fig,datapoints,ui,dercase.wavenumbers,dercase.T.X,dercase.V.X,dercase=dercase)
self.draw()
if ui['save_check_var']:
PLSRsave.PlotAbsorbance(common_variables.tempax,common_variables.tempfig,datapoints,ui,dercase.wavenumbers,dercase.T.X,dercase.V.X,dercase=dercase)
plotFileName=dercase.folder+'/SpectraPostPreprocessing'
common_variables.tempfig.savefig(plotFileName.replace('.','p')+ui['file_extension'])
common_variables.tempax.cla()
for E in [dercase.T,dercase.V]:
if len(E.Y)>0:
E.X=E.X[:,datapoints]
dercase.wavenumbers=dercase.wavenumbers[datapoints]
#create complete cases for all pemutations of keyword values in keyword_lists
for keyword_case in PLSRregressionMethods.generate_keyword_cases(common_variables.keyword_lists):
self.complete_cases.append(types.SimpleNamespace())
self.complete_cases[-1].wavenumbers=dercase.wavenumbers
self.complete_cases[-1].folder=dercase.folder
self.complete_cases[-1].sg_config=dercase.sg_config
self.complete_cases[-1].derrivative=dercase.derrivative
self.complete_cases[-1].T=dercase.T
self.complete_cases[-1].V=dercase.V
self.complete_cases[-1].preprocessing_done=dercase.preprocessing_done
self.complete_cases[-1].keywords=keyword_case
if ui['reg_type']=='None':
break
for case in self.complete_cases:
case.XvalRMSEs=[]
case.XvalCorrClass=[]
common_variables.keywords=case.keywords
#GeneticAlgorithm(ui,T,V,datapoints,components)
if ui['regression_wavelength_selection']=='No wavelength selection':
active_wavenumers = np.ones(len(case.wavenumbers), dtype=bool)
else:
# report to user regarding split module
if self.ui['WS_loss_type']=='X-validation on training':
if self.ui['WS_cross_val_N']==1 and self.ui['WS_cross_val_max_cases']==-1:
print('Using sklearn.LeaveOneOut on '+str(len(case.T.Y))+' measurements. Maxcases set to '+str(len(case.T.Y)))
else:
if self.ui['WS_cross_val_max_cases']==-1:
print('WS_cross_val_max_cases set to -1, GA_cross_val_N not set to 1. Setting GAcross_val_max_cases to default (20)' )
self.ui['WS_cross_val_max_cases']=20
if ui['regression_wavelength_selection']=='Genetic Algorithm':
GAobject = PLSRGeneticAlgorithm.GeneticAlgorithm(common_variables,ui,case)
active_wavenumers = GAobject.run(fns.add_axis(common_variables.fig,ui['fig_per_row'],ui['max_plots']),case.wavenumbers,case.folder,self.draw)
elif ui['regression_wavelength_selection']=='Moving Window':
active_wavenumers = PLSRwavelengthSelection.MW(case,ui,common_variables)
elif ui['regression_wavelength_selection']=='Sequential Feature Selector':
FSobject = PLSRsequential_feature_selectors.sequentialFeatureSelector(common_variables,ui,case,self.draw)
active_wavenumers = FSobject.run()
Xval_cases=crossval(case.T,case.V,ui,case) # returns [T],[V] if not crossva, otherwise makes cases from validation dataset
for Xval_case in Xval_cases:
# ui.datapoints=runGeneticAlgorithm(dercase[0],dercase[1],dercase[2],dercase[3],dercase[4],dercase[5],dercase[6],dercase[7])
#def MW(T,V,wavenumbers, folder,ui,sg_config,curDerivative,supressplot):
if ui['save_check_var'] and not ui['do_not_save_plots']:
active_wavenumbers_file=case.folder+ui['reg_type']+PLSRsave.get_unique_keywords_formatted(common_variables.keyword_lists,case.keywords).replace('.','p')+'active_wavenumers.dpb'
PLSRsave.save_active_wavenumbers(active_wavenumbers_file,case.wavenumbers,active_wavenumers)
case.active_wavenumers=active_wavenumers
self.draw()
self.last_reg_module, RMSe = run_reg_module(Xval_case,case,ui,common_variables,active_wavenumers,self.filename+'/results_table',keywords={})
self.draw()
self.last_complete_case = case
self.last_Xval_case = Xval_case
if Xval_case.supressplot==0:
if ui['is_validation']=='X-val on training':
#if ui['RMS_type']=='Combined RMSEP+RMSEC':
# print('RMSEC+RMSEP = '+PLSRsave.custom_round(case.xvalRMSE,3)+' '+ui['unit'])
if not 'classifier_type' in case.keywords:
case.xvalRMSE=np.sqrt(np.sum(np.array(case.XvalRMSEs)**2)/len(case.XvalRMSEs))
if ui['RMS_type']=='RMSEC':
print('RMSEC = '+PLSRsave.custom_round(case.xvalRMSE,3)+' '+ui['unit'])
elif ui['RMS_type']=='RMSEP':
print('RMSEP = '+PLSRsave.custom_round(case.xvalRMSE,3)+' '+ui['unit'])
else:
print(case.XvalCorrClass)
case.xvalCorrClas=np.average(case.XvalCorrClass)
print(case.xvalCorrClas)
if ui['RMS_type']=='RMSEC':
print('x-val corr classifed training = '+str(round(case.xvalCorrClas*100,3))+' %')
elif ui['RMS_type']=='RMSEP':
print('x-val corr classifed prediction = '+str(round(case.xvalCorrClas*100,3))+' %')
case.XvalRMSEs=[]
eprint('done')
#plt.close(common_variables.tempfig)
#del common_variables.tempfig
if save_check_var:
# save plot in window
fig.savefig(filename+'/'+'_'.join(filename.split('/')[1:])+ui['file_extension'])
print('Done')
return
def callbackClick(self,frame,event):
ax=event.inaxes
if hasattr(ax,'plot_type'):
if ax.plot_type=='NN node map':
PLSRregressionVisualization.plot_node_activation_vector(event)
return
else:
print("clicked at", event.xdata, event.ydata)
def reorder_plots(self,event):
ui=self.ui
ui['fig_per_row']=int(self.frame.buttons['fig_per_row'].get())
ui['max_plots']=int(self.frame.buttons['max_plots'].get())
fns.move_all_plots(self.fig,ui['fig_per_row'],ui['max_plots'])
self.draw()
@fns.rimt
def draw(self):
self.fig.canvas.draw()
self.frame.update()
def addButtons():
buttons=[
{'key': 'RNNtab3name', 'type': 'tabname', 'text': 'Import Options', 'tab': 3} ,
# dataset configuration
{'key': 'RegressionL0', 'type': 'label', 'text': 'Data import options: ', 'tab': 3, 'row': 0} ,
{'key': 'is_validation', 'type': 'radio:text', 'texts': ['Training', 'Training and Validation', 'X-val on training'], 'tab': 3, 'row': 0} ,
{'key': 'cross_val_N', 'type': 'txt:int', 'text': 'Number of validation samples for cross validation', 'default': '10', 'width': 4, 'tab': 3, 'row': 1} ,
{'key': 'cross_val_max_cases', 'type': 'txt:int', 'text': 'Iterations', 'default': '-1', 'width': 4, 'tab': 3, 'row': 1} ,
{'key': 'RegressionL0a', 'type': 'label', 'text': 'Column of data to use: ', 'tab': 3, 'row': 2} ,
{'key': 'cur_col', 'type': 'radio', 'texts': ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10'], 'tab': 3, 'row': 2} ,
{'key': 'max_range', 'type': 'txt:float', 'text': 'Maximum concentration for training set', 'default': '10000', 'width': 6, 'tab': 3, 'row': 3} ,
{'key': 'unit', 'type': 'txt', 'text': 'Concentration unit', 'default': 'mg/dl', 'width': 6, 'tab': 3, 'row': 4} ,
# config for creating figure and saving
{'key': 'file_extension', 'type': 'radio:text', 'texts': [ '.svg', '.png', '.pdf'], 'tab': 4, 'row': 1} ,
{'key': 'reorder_plots', 'type': 'click', 'text': 'Reorder plots', 'bind': reorder_plots, 'tab': 4, 'row': 1} ,
{'key': 'fig_per_row', 'type': 'txt:int', 'text': 'Figures per row', 'default': '2', 'width': 4, 'tab': 4, 'row': 1} ,
{'key': 'DPI', 'type': 'txt:int', 'text': 'dpi', 'default': '80', 'width': 4, 'tab': 4, 'row': 1} ,
# graphical user interface options
{'key': 'max_plots', 'type': 'txt:int', 'text': 'Max number of plots', 'default': '-1', 'width': 3, 'tab': 4, 'row': 2} ,
# save options
{'key': 'make_pyChem_input_file', 'type': 'check', 'text': 'Make pyChem file', 'tab': 4, 'row': 9} ,
{'key': 'do_not_save_plots', 'type': 'check', 'text': 'do not save plots', 'tab': 4, 'row': 8} ,
{'key': 'use_wrapper', 'type': 'check', 'text': 'use wrapper', 'tab': 4, 'row': 8} ,
# debugging options
{'key': 'RNNtab5name', 'type': 'tabname', 'text': 'Other', 'tab': 5} ,
{'key': 'no_multiprocessing', 'type': 'radio', 'texts': ['use multiprocessing', 'do not use multiprocessing'], 'tab': 5, 'row': 0},
# result
{'key': 'RMS_type', 'type': 'radio:text', 'texts': ['Default', 'RMSEC', 'RMSEP'], 'tab': 3, 'row': 6} ,
{'key': 'coeff_det_type', 'type': 'radio:text', 'texts': ['R^2', 'R'], 'tab': 3, 'row': 7} ,
{'key': 'SEP_MAE_or_%MAE', 'type': 'radio:text', 'texts': ['SEP', 'MAE','%MAE'], 'tab': 3, 'row': 8} ,
# declare input
{'key': 'set_training', 'type': 'click', 'text': 'Set Training', 'bind': set_training,'color':'color1', 'tab': 10, 'row': 0} ,
{'key': 'set_validation', 'type': 'click', 'text': 'Set Validation', 'bind': set_validation,'color':'color3', 'tab': 10, 'row': 0} ,
]
buttons+=PLSRregressionMethods.get_buttons()
buttons+=PLSRclassifiers.get_buttons()
buttons+=PLSRsave.get_buttons()
buttons+=PLSRwavelengthSelection.get_buttons()
buttons+=PLSRpreprocessing.get_buttons()
return buttons
def set_training(event):
"""Sets the training data set(s) in the GUI."""
frame=event.widget.master.master.master
frame.nav.clear_color('color1')
frame.nav.color_selected('color1')
frame.training_files=frame.nav.get_paths_of_selected_items()
frame.nav.deselect()
return
def set_validation(event):
"""Sets the validation data set(s) in the GUI."""
frame=event.widget.master.master.master
frame.nav.clear_color('color3')
frame.nav.color_selected('color3')
frame.validation_files=frame.nav.get_paths_of_selected_items()
frame.nav.deselect()
return
def reorder_plots(event):
global run
run.reorder_plots(run,event)
return
| 28,463 | 588 | 115 |
a03e0b10a5d8e2360238fded84f6753612e5c8fc | 2,982 | py | Python | src/check_docker_container/check_docker_container.py | BlackZork/check_docker_container | 4568ac24a69940d5efcd316d4b8a26069dcdbdff | [
"MIT"
] | 2 | 2021-09-20T00:45:19.000Z | 2021-11-27T09:35:11.000Z | src/check_docker_container/check_docker_container.py | BlackZork/check_docker_container | 4568ac24a69940d5efcd316d4b8a26069dcdbdff | [
"MIT"
] | null | null | null | src/check_docker_container/check_docker_container.py | BlackZork/check_docker_container | 4568ac24a69940d5efcd316d4b8a26069dcdbdff | [
"MIT"
] | null | null | null | #!python
"""Check if docker container is running"""
import argparse
import subprocess
import re
import nagiosplugin
@nagiosplugin.guarded
if __name__ == '__main__':
main()
| 25.487179 | 115 | 0.557009 | #!python
"""Check if docker container is running"""
import argparse
import subprocess
import re
import nagiosplugin
class ContainerState:
def __init__(self, state: str, content: str, is_paused: bool):
self.state = state
self.content = content
self.is_paused = is_paused
class Container(nagiosplugin.Resource):
def __init__(self, cnt_name: str):
self.cnt_name = cnt_name
def probe(self):
#TODO add LANG=en_US ?
result = subprocess.run([
"docker",
"ps",
"-a",
'--format',
r'{{.Names}};{{.Status}}',
"-f"
"name=%s" % self.cnt_name
], encoding="UTF-8", capture_output=True)
if result.returncode != 0:
raise nagiosplugin.CheckError(str(result.stderr))
if not result.stdout:
raise nagiosplugin.CheckError(f"No containers named '{self.cnt_name}' returned by 'docker ps' command")
cs = self.parse(result.stdout)
return [nagiosplugin.Metric('state', cs, context='container')]
status_re = re.compile(r"(.+);([a-zA-Z]+) (.*)")
def parse(self, data):
for ln in data.splitlines():
match = Container.status_re.match(data)
if not match:
raise Exception(f"Cannot parse docker output: {data}")
parts = match.groups()
if parts[0] == self.cnt_name:
return ContainerState(
state = parts[1],
content = parts[2],
is_paused = parts[2].endswith("(Paused)")
)
raise nagiosplugin.CheckError(f"Container {self.cnt_name} not found")
class ContainerContext(nagiosplugin.Context):
def __init__(self):
super().__init__(name="container")
def evaluate(self, metric, resource):
val = metric.value
if val.state == "Up":
if not val.is_paused:
return nagiosplugin.state.Ok
else:
return nagiosplugin.state.Warn
return nagiosplugin.state.Critical
class ContainerSummary(nagiosplugin.Summary):
def ok(self, results):
return '%s %s' % (
results[0].metric.value.state,
results[0].metric.value.content,
)
def problem(self, results):
res = results[0]
if res.metric:
val = res.metric.value
return '%s %s' % (
val.state,
val.content
)
else:
return str(res)
@nagiosplugin.guarded
def main():
argp = argparse.ArgumentParser(description=__doc__)
argp.add_argument('-n', '--name', metavar='NAME', required=True,
help='container name')
args = argp.parse_args()
check = nagiosplugin.Check(
Container(args.name),
ContainerContext(),
ContainerSummary()
)
check.main()
if __name__ == '__main__':
main()
| 2,346 | 202 | 248 |
ff2d1f827478a4d9ae39c3a2e96d1f21adbc1562 | 11,659 | py | Python | fedn/fedn/clients/reducer/plots.py | jadali17/fedn | 00c622388e6b59b23ff09754650b4897791d8a9f | [
"Apache-2.0"
] | 1 | 2021-01-16T03:05:23.000Z | 2021-01-16T03:05:23.000Z | fedn/fedn/clients/reducer/plots.py | jadali17/fedn | 00c622388e6b59b23ff09754650b4897791d8a9f | [
"Apache-2.0"
] | null | null | null | fedn/fedn/clients/reducer/plots.py | jadali17/fedn | 00c622388e6b59b23ff09754650b4897791d8a9f | [
"Apache-2.0"
] | 1 | 2021-01-16T03:05:38.000Z | 2021-01-16T03:05:38.000Z | import pymongo
import json
import numpy
import plotly.graph_objs as go
from datetime import datetime,timedelta
import plotly
import os
from fedn.common.storage.db.mongo import connect_to_mongodb
| 35.012012 | 109 | 0.53641 | import pymongo
import json
import numpy
import plotly.graph_objs as go
from datetime import datetime,timedelta
import plotly
import os
from fedn.common.storage.db.mongo import connect_to_mongodb
class Plot:
def __init__(self):
try:
self.mdb = connect_to_mongodb()
self.alliance = self.mdb["status"]
self.round_time = self.mdb["performances"]
self.psutil_usage = self.mdb["psutil_usage"]
except Exception as e:
print("FAILED TO CONNECT TO MONGO, {}".format(e), flush=True)
self.collection = None
raise
# plot metrics from DB
def _scalar_metrics(self, metrics):
""" Extract all scalar valued metrics from a MODEL_VALIDATON. """
data = json.loads(metrics['data'])
data = json.loads(data['data'])
valid_metrics = []
for metric, val in data.items():
# If it can be converted to a float it is a valid, scalar metric
try:
val = float(val)
valid_metrics.append(metric)
except:
pass
return valid_metrics
def create_table_plot(self):
metrics = self.alliance.find_one({'type': 'MODEL_VALIDATION'})
if metrics == None:
fig = go.Figure(data=[])
fig.update_layout(title_text='No data currently available for mean metrics')
table = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return table
valid_metrics = self._scalar_metrics(metrics)
if valid_metrics == []:
fig = go.Figure(data=[])
fig.update_layout(title_text='No scalar metrics found')
table = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return table
all_vals = []
models = []
for metric in valid_metrics:
validations = {}
for post in self.alliance.find({'type': 'MODEL_VALIDATION'}):
e = json.loads(post['data'])
try:
validations[e['modelId']].append(float(json.loads(e['data'])[metric]))
except KeyError:
validations[e['modelId']] = [float(json.loads(e['data'])[metric])]
vals = []
models = []
for model, data in validations.items():
vals.append(numpy.mean(data))
models.append(model)
all_vals.append(vals)
header_vals = valid_metrics
models.reverse()
values = [models]
print(all_vals, flush=True)
for vals in all_vals:
vals.reverse()
values.append(vals)
fig = go.Figure(data=[go.Table(
header=dict(values=['Model ID'] + header_vals,
line_color='darkslategray',
fill_color='lightskyblue',
align='left'),
cells=dict(values=values, # 2nd column
line_color='darkslategray',
fill_color='lightcyan',
align='left'))
])
fig.update_layout(title_text='Summary: mean metrics')
table = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return table
def create_timeline_plot(self):
trace_data = []
x = []
y = []
base = []
for p in self.alliance.find({'type': 'MODEL_UPDATE_REQUEST'}):
e = json.loads(p['data'])
cid = e['correlationId']
for cc in self.alliance.find({'sender': p['sender'], 'type': 'MODEL_UPDATE'}):
da = json.loads(cc['data'])
if da['correlationId'] == cid:
cp = cc
cd = json.loads(cp['data'])
tr = datetime.strptime(e['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
tu = datetime.strptime(cd['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
ts = tu - tr
base.append(tr.timestamp())
x.append(ts.total_seconds())
y.append(p['sender']['name'])
trace_data.append(go.Bar(
x=x,
y=y,
orientation='h',
base=base,
marker=dict(color='royalblue'),
name="Training",
))
x = []
y = []
base = []
for p in self.alliance.find({'type': 'MODEL_VALIDATION_REQUEST'}):
e = json.loads(p['data'])
cid = e['correlationId']
for cc in self.alliance.find({'sender': p['sender'], 'type': 'MODEL_VALIDATION'}):
da = json.loads(cc['data'])
if da['correlationId'] == cid:
cp = cc
cd = json.loads(cp['data'])
tr = datetime.strptime(e['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
tu = datetime.strptime(cd['timestamp'], '%Y-%m-%d %H:%M:%S.%f')
ts = tu - tr
base.append(tr.timestamp())
x.append(ts.total_seconds())
y.append(p['sender']['name'])
trace_data.append(go.Bar(
x=x,
y=y,
orientation='h',
base=base,
marker=dict(color='lightskyblue'),
name="Validation",
))
layout = go.Layout(
barmode='stack',
showlegend=True,
)
fig = go.Figure(data=trace_data, layout=layout)
fig.update_xaxes(title_text='Timestamp')
fig.update_layout(title_text='Alliance timeline')
timeline = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return timeline
def create_ml_plot(self):
metrics = self.alliance.find_one({'type': 'MODEL_VALIDATION'})
if metrics == None:
fig = go.Figure(data=[])
fig.update_layout(title_text='No data currently available for Mean Absolute Error')
ml = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return ml
data = json.loads(metrics['data'])
data = json.loads(data['data'])
valid_metrics = []
for metric, val in data.items():
# Check if scalar - is this robust ?
if isinstance(val, float):
valid_metrics.append(metric)
# Assemble a dict with all validations
validations = {}
clients = {}
for post in self.alliance.find({'type': 'MODEL_VALIDATION'}):
try:
e = json.loads(post['data'])
clients[post['sender']['name']].append(json.loads(e['data'])[metric])
except KeyError:
clients[post['sender']['name']] = []
rounds = []
traces_data = []
for c in clients:
traces_data.append(go.Scatter(
x=rounds,
y=clients[c],
name=c
))
fig = go.Figure(traces_data)
fig.update_xaxes(title_text='Rounds')
fig.update_yaxes(title_text='MAE', tickvals=[0.0, 0.2, 0.4, 0.6, 0.8, 1.0])
fig.update_layout(title_text='Mean Absolute Error Plot')
ml = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return ml
def create_box_plot(self):
metrics = self.alliance.find_one({'type': 'MODEL_VALIDATION'})
if metrics == None:
fig = go.Figure(data=[])
fig.update_layout(title_text='No data currently available for metric distribution over alliance '
'participants')
box = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return box
valid_metrics = self._scalar_metrics(metrics)
if valid_metrics == []:
fig = go.Figure(data=[])
fig.update_layout(title_text='No scalar metrics found')
box = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return box
# Just grab the first metric in the list.
# TODO: Let the user choose, or plot all of them.
if "accuracy" in valid_metrics:
metric = "accuracy"
else:
metric = valid_metrics[0]
validations = {}
for post in self.alliance.find({'type': 'MODEL_VALIDATION'}):
e = json.loads(post['data'])
try:
validations[e['modelId']].append(float(json.loads(e['data'])[metric]))
except KeyError:
validations[e['modelId']] = [float(json.loads(e['data'])[metric])]
box = go.Figure()
x = []
y = []
box_trace = []
for model_id, acc in validations.items():
x.append(model_id)
y.append(numpy.mean([float(i) for i in acc]))
if len(acc) >= 2:
box.add_trace(go.Box(y=acc, name=str(model_id), marker_color="royalblue", showlegend=False))
rounds = list(range(len(y)))
box.add_trace(go.Scatter(
x=x,
y=y,
name='Mean'
))
box.update_xaxes(title_text='Model ID')
box.update_yaxes(tickvals=[0.0, 0.2, 0.4, 0.6, 0.8, 1.0])
box.update_layout(title_text='Metric distribution over alliance participants: {}'.format(metric))
box = json.dumps(box, cls=plotly.utils.PlotlyJSONEncoder)
return box
def create_round_plot(self):
metrics = self.round_time.find_one({'key': 'round_time'})
if metrics == None:
fig = go.Figure(data=[])
fig.update_layout(title_text='No data currently available for round time')
ml = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return ml
for post in self.round_time.find({'key': 'round_time'}):
rounds = post['round']
traces_data = post['round_time']
fig = go.Figure()
fig.add_trace(go.Scatter(
x=rounds,
y=traces_data,
mode='lines+markers',
name='Time'
))
fig.update_xaxes(title_text='Round')
fig.update_yaxes(title_text='Time (s)')
fig.update_layout(title_text='Round time')
round_t = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return round_t
def create_cpu_plot(self):
metrics = self.psutil_usage.find_one({'key': 'cpu_mem_usage'})
if metrics == None:
fig = go.Figure(data=[])
fig.update_layout(title_text='No data currently available for CPU usage')
cpu = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return cpu
for post in self.psutil_usage.find({'key': 'cpu_mem_usage'}):
cpu = post['cpu']
mem = post['mem']
ps_time = post['time']
round = post['round']
# Create figure with secondary y-axis
from plotly.subplots import make_subplots
fig = make_subplots(specs=[[{"secondary_y": True}]])
fig.add_trace(go.Scatter(
x=ps_time,
y=cpu,
mode='lines+markers',
name='CPU (%)'
))
fig.add_trace(go.Scatter(
x=ps_time,
y=mem,
mode='lines+markers',
name='MEM (%)'
))
fig.add_trace(go.Scatter(
x=ps_time,
y=round,
mode='lines+markers',
name='Round',
), secondary_y=True)
fig.update_xaxes(title_text='Date Time')
fig.update_yaxes(title_text='Percentage (%)')
fig.update_yaxes(title_text="Round", secondary_y=True)
fig.update_layout(title_text='CPU loads and memory usage')
cpu = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder)
return cpu | 10,725 | 716 | 23 |
abc9d3d5be2b40831b42aa3d20376d6f1cb4eaeb | 9,403 | py | Python | src/s_iSpin_10min_plots.py | ashimgiyanani/ProjectTemplate_python | 3135e8976aac751049a6da34a550db0fe045f0bb | [
"BSD-3-Clause"
] | null | null | null | src/s_iSpin_10min_plots.py | ashimgiyanani/ProjectTemplate_python | 3135e8976aac751049a6da34a550db0fe045f0bb | [
"BSD-3-Clause"
] | null | null | null | src/s_iSpin_10min_plots.py | ashimgiyanani/ProjectTemplate_python | 3135e8976aac751049a6da34a550db0fe045f0bb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 29 07:57:54 2020
@author: papalk
"""
# Code logbook
# 10.02.2021 - Handover from Alkistis
# changed filename to filename[0] due to variable type error
# Changing from df.ix[...,0] to df.loc[..., df.columns[0]] due to a newer pandas version
import sys
import datetime as dt
import matplotlib.pyplot as plt
import matplotlib.pylab as pylab
from matplotlib.dates import DateFormatter
from matplotlib.ticker import StrMethodFormatter
import pandas as pd
import numpy as np
import xarray as xr
from mpl_toolkits.mplot3d import Axes3D
import glob
# import user modules
usermodPath = r'../../userModules'
sys.path.append(usermodPath)
import pythonAssist
from pythonAssist import *
plt.style.use('seaborn-whitegrid')
SMALL_SIZE = 17
MEDIUM_SIZE = 22
BIGGER_SIZE = 22
AppendLog=1
plt.rc('font', size=SMALL_SIZE,weight = 'bold') # controls default text sizes
plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=BIGGER_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=MEDIUM_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
plt.rc('figure', figsize = (8, 8))
#%% Import data
dt_start ='2021-10-22 00:00:00' # Select start date in the form yyyy-mm-dd_HH-MM-SS
dt_end = dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S') + dt.timedelta(days=7)# Select end date in the form yyyy-mm-dd_HH-MM-SS
dt_end = dt_end.strftime('%Y-%m-%d %H:%M:%S')
# Import csv
path = r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\10min_Data'
filename = glob.glob(path+'\Bremerhaven WTG01*.csv')
df=pd.read_csv(filename[0], sep = ';',decimal = ',',header=0)
df['TimeStamp'] = [dt.datetime.strptime(date, '%Y-%m-%d %H:%M:%S') for date in df['TimeStamp']]
## Filtering conditions
cond0 = df['WS_free_avg'].notnull() # non-zero values
cond1 = (df["TimeStamp"]>=dt_start)&(df["TimeStamp"]<dt_end) # time interval filtering
cond2 = (df["SaDataValid"]==True) # 95% availability of 10 Hz data, wind vector +-90° in front of turbine within 10 min
cond3 = (df["DataOK"]==True) # data.TotalCountNoRotation=0, 95% availability, min & max rotor rpm, avg rotor rpm, free wind speed > 3.5 m/s, sample ID!= 0
cond4 = (df['WS_free_avg'] > 0) & (df['WS_free_avg'] < 50) # wind speed physical limits
## extra parameters for logbook
# no. of pts during the last week
Npts = df.loc[(cond0 & cond1),:].shape[0]
Nvalid = df.loc[(cond0 & cond1 & cond2),:].shape[0]
Nws_valid = df.loc[(cond0 & cond1 & cond2),df.columns.values[1]].shape[0]
Nwt_valid = df.loc[(cond0 & cond1 & cond2 & cond3),:].shape[0]
Nyaw_valid = df.loc[(cond0 & cond1 & cond3),:].shape[0]
# filling in the weekly availabiliy as 1/0 based on number of points
weekly_avail = []
import more_itertools
step= 144
length = 144
idx = cond1[cond1==True].index
N_win = np.int64(len(df.loc[cond1,'WS_free_avg'])/step)
window = np.transpose(list(more_itertools.windowed(df.loc[cond1,'WS_free_avg'], n=length, fillvalue=np.nan, step=step)))
condn = np.transpose(list(more_itertools.windowed(np.array(cond0[idx] & cond1[idx] & cond2[idx] & cond4[idx]), n=length, fillvalue=np.nan, step=step))).astype(bool)
for i in np.arange(N_win):
daily_avail = window[condn[:,i],i].shape[0]/length
if daily_avail >= 0.6:
weekly_avail.append(1)
else:
weekly_avail.append(0)
#%% Plots
# Plot all
# for i in range(len(df.columns)):
# # date-ws_free_avg
# fig = plt.figure(figsize = (20, 8))
# ax = fig.add_subplot(111)
# ax.plot(df.loc[ cond1,df.columns[0]],df.loc[cond1,i],'.',color = 'gray',label = 'Invalid data');
# ax.plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,i],'.',label = 'Valid data');
# ax.set_xlabel(df.columns[0],labelpad=40,weight= 'bold')
# ax.set_ylabel(df.columns[i],labelpad=40,weight= 'bold')
# date_form = DateFormatter("%d/%m")
# ax.xaxis.set_major_formatter(date_form)
# # ax.set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
# # ax.set_ylim([0,25])
# ax.legend()
# # plt.savefig(r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\QM\TS_' + path[155:165]+'_'+df.columns[i]+'.png',bbox_inches='tight')
fig,ax = plt.subplots(4,1, figsize = (10, 10),sharex=True)
ax[0].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'WS_free_avg'],'.',label = 'Valid data');
ax[0].set_xlabel('date',labelpad=40,weight= 'bold')
ax[0].set_ylabel("WS_free_avg [m/s]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[0].xaxis.set_major_formatter(date_form)
ax[0].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[0].set_ylim([0,25])
ax[1].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'YA_corr_avg'],'.',label = 'Valid data');
ax[1].set_xlabel('date',labelpad=40,weight= 'bold')
ax[1].set_ylabel("YA_corr_avg [$^o$]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[1].xaxis.set_major_formatter(date_form)
ax[1].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[1].set_ylim([-45,45])
ax[2].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'ARS_avg'],'.',label = 'Valid data');
ax[2].set_xlabel('date',labelpad=40,weight= 'bold')
ax[2].set_ylabel("ARS_avg [$^o$/s]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[2].xaxis.set_major_formatter(date_form)
ax[2].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[2].set_ylim([0,55])
ax[3].plot(df.loc[ cond1,df.columns[0]],df.notnull().values[cond1,1]*100,'.',color = 'limegreen', label = 'Valid data');
ax[3].plot(df.loc[ cond1&(df.isnull().values[:,1]),df.columns[0]],df.notnull().values[cond1&(df.isnull().values[:,1]),1]*100,'.',color = 'red', label = 'invalid data');
ax[3].set_xlabel('date',labelpad=40,weight= 'bold')
ax[3].set_ylabel("Availability [%]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[3].xaxis.set_major_formatter(date_form)
ax[3].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[3].set_ylim([-5,110])
plt.xlabel('date',labelpad=10,weight= 'bold')
plt.subplots_adjust(wspace=0, hspace=0.1)
plt.savefig(r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\10min_Data\QM\\'+dt_start[0:10]+'_'+dt_end[0:10]+'.png',
bbox_inches='tight',dpi = 100)
plt.show()
## Append the quality check at the end of verification
# extracting the worksheet
import openpyxl as opl
xl_filepath = r'z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Documentation\Logbook_iSpin.xlsx'
wb = opl.load_workbook(xl_filepath)
# grab the active worksheet
ws = wb['Datenabholung']
# grab the workshet title
ws_title = ws.title
print('[{}] - Active sheet title: {} \n'.format(now(), ws_title))
# appending the worksheet
Str = '{} - {}'.format(dt_start[0:10], dt_end[0:10])
purpose = ('{}'.format(input('Please enter purpose: 0-Überwachung/Observation, 1-Datenabholung (default): ')))
if purpose == '0':
pStr = 'Überwachung'
elif purpose == '1':
pStr = 'Datenabholung'
else:
pStr = 'Datenabholung'
print('[{}] - Input not in the list! Assumed purpose= {} \n'.format(now(), pStr))
test_Data = ('Data {}'.format(input('Please enter OK or not OK plus any comments:')))
appData = [today(), pStr, Str, test_Data,Npts,Nvalid,Nws_valid, Nwt_valid, Nyaw_valid]
appData.extend(np.transpose(weekly_avail))
# target = wb.copy_worksheet(ws) % making a copy of existing worksheet
# iteration to find the last row with values in it
nrows = ws.max_row
lastrow = 0
if nrows > 1000:
nrows = 1000
while True:
if ws.cell(nrows, 3).value != None:
lastrow = nrows
break
else:
nrows -= 1
# appending to the worksheet and saving it
if AppendLog==1: # if AppendLog is wished at start
count=0
# iterate over all entries in appData array which contains variables for appending the excel
for ncol, entry in enumerate(appData,start=1):
# print(ncol, entry)
ws.cell(row=1+nrows, column=ncol, value=entry)
count += 1
print('[{}] - No. of entries made: {} \n'.format(now(), count))
wb.save(xl_filepath) # file should be closed to save
print('[{}] - Changes saved to: {} \n'.format(now(), ws_title))
else:
print('[{}] - Changes not saved to: {} \n'.format(now(), ws_title))
## References:
# https://realpython.com/openpyxl-excel-spreadsheets-python/
## Links to data and logbook
# z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\04_Nacelle-Lidars-Inflow_CONFIDENTIAL\30_Data\Logbook_NacelleLidars.xlsx'
| 44.563981 | 240 | 0.696373 | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 29 07:57:54 2020
@author: papalk
"""
# Code logbook
# 10.02.2021 - Handover from Alkistis
# changed filename to filename[0] due to variable type error
# Changing from df.ix[...,0] to df.loc[..., df.columns[0]] due to a newer pandas version
import sys
import datetime as dt
import matplotlib.pyplot as plt
import matplotlib.pylab as pylab
from matplotlib.dates import DateFormatter
from matplotlib.ticker import StrMethodFormatter
import pandas as pd
import numpy as np
import xarray as xr
from mpl_toolkits.mplot3d import Axes3D
import glob
# import user modules
usermodPath = r'../../userModules'
sys.path.append(usermodPath)
import pythonAssist
from pythonAssist import *
plt.style.use('seaborn-whitegrid')
SMALL_SIZE = 17
MEDIUM_SIZE = 22
BIGGER_SIZE = 22
AppendLog=1
plt.rc('font', size=SMALL_SIZE,weight = 'bold') # controls default text sizes
plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=BIGGER_SIZE) # fontsize of the x and y labels
plt.rc('xtick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=MEDIUM_SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=MEDIUM_SIZE) # legend fontsize
plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title
plt.rc('figure', figsize = (8, 8))
#%% Import data
dt_start ='2021-10-22 00:00:00' # Select start date in the form yyyy-mm-dd_HH-MM-SS
dt_end = dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S') + dt.timedelta(days=7)# Select end date in the form yyyy-mm-dd_HH-MM-SS
dt_end = dt_end.strftime('%Y-%m-%d %H:%M:%S')
# Import csv
path = r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\10min_Data'
filename = glob.glob(path+'\Bremerhaven WTG01*.csv')
df=pd.read_csv(filename[0], sep = ';',decimal = ',',header=0)
df['TimeStamp'] = [dt.datetime.strptime(date, '%Y-%m-%d %H:%M:%S') for date in df['TimeStamp']]
## Filtering conditions
cond0 = df['WS_free_avg'].notnull() # non-zero values
cond1 = (df["TimeStamp"]>=dt_start)&(df["TimeStamp"]<dt_end) # time interval filtering
cond2 = (df["SaDataValid"]==True) # 95% availability of 10 Hz data, wind vector +-90° in front of turbine within 10 min
cond3 = (df["DataOK"]==True) # data.TotalCountNoRotation=0, 95% availability, min & max rotor rpm, avg rotor rpm, free wind speed > 3.5 m/s, sample ID!= 0
cond4 = (df['WS_free_avg'] > 0) & (df['WS_free_avg'] < 50) # wind speed physical limits
## extra parameters for logbook
# no. of pts during the last week
Npts = df.loc[(cond0 & cond1),:].shape[0]
Nvalid = df.loc[(cond0 & cond1 & cond2),:].shape[0]
Nws_valid = df.loc[(cond0 & cond1 & cond2),df.columns.values[1]].shape[0]
Nwt_valid = df.loc[(cond0 & cond1 & cond2 & cond3),:].shape[0]
Nyaw_valid = df.loc[(cond0 & cond1 & cond3),:].shape[0]
# filling in the weekly availabiliy as 1/0 based on number of points
weekly_avail = []
import more_itertools
step= 144
length = 144
idx = cond1[cond1==True].index
N_win = np.int64(len(df.loc[cond1,'WS_free_avg'])/step)
window = np.transpose(list(more_itertools.windowed(df.loc[cond1,'WS_free_avg'], n=length, fillvalue=np.nan, step=step)))
condn = np.transpose(list(more_itertools.windowed(np.array(cond0[idx] & cond1[idx] & cond2[idx] & cond4[idx]), n=length, fillvalue=np.nan, step=step))).astype(bool)
for i in np.arange(N_win):
daily_avail = window[condn[:,i],i].shape[0]/length
if daily_avail >= 0.6:
weekly_avail.append(1)
else:
weekly_avail.append(0)
#%% Plots
# Plot all
# for i in range(len(df.columns)):
# # date-ws_free_avg
# fig = plt.figure(figsize = (20, 8))
# ax = fig.add_subplot(111)
# ax.plot(df.loc[ cond1,df.columns[0]],df.loc[cond1,i],'.',color = 'gray',label = 'Invalid data');
# ax.plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,i],'.',label = 'Valid data');
# ax.set_xlabel(df.columns[0],labelpad=40,weight= 'bold')
# ax.set_ylabel(df.columns[i],labelpad=40,weight= 'bold')
# date_form = DateFormatter("%d/%m")
# ax.xaxis.set_major_formatter(date_form)
# # ax.set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
# # ax.set_ylim([0,25])
# ax.legend()
# # plt.savefig(r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\QM\TS_' + path[155:165]+'_'+df.columns[i]+'.png',bbox_inches='tight')
fig,ax = plt.subplots(4,1, figsize = (10, 10),sharex=True)
ax[0].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'WS_free_avg'],'.',label = 'Valid data');
ax[0].set_xlabel('date',labelpad=40,weight= 'bold')
ax[0].set_ylabel("WS_free_avg [m/s]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[0].xaxis.set_major_formatter(date_form)
ax[0].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[0].set_ylim([0,25])
ax[1].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'YA_corr_avg'],'.',label = 'Valid data');
ax[1].set_xlabel('date',labelpad=40,weight= 'bold')
ax[1].set_ylabel("YA_corr_avg [$^o$]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[1].xaxis.set_major_formatter(date_form)
ax[1].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[1].set_ylim([-45,45])
ax[2].plot(df.loc[ cond1&cond2,df.columns[0]],df.loc[cond1&cond2,'ARS_avg'],'.',label = 'Valid data');
ax[2].set_xlabel('date',labelpad=40,weight= 'bold')
ax[2].set_ylabel("ARS_avg [$^o$/s]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[2].xaxis.set_major_formatter(date_form)
ax[2].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[2].set_ylim([0,55])
ax[3].plot(df.loc[ cond1,df.columns[0]],df.notnull().values[cond1,1]*100,'.',color = 'limegreen', label = 'Valid data');
ax[3].plot(df.loc[ cond1&(df.isnull().values[:,1]),df.columns[0]],df.notnull().values[cond1&(df.isnull().values[:,1]),1]*100,'.',color = 'red', label = 'invalid data');
ax[3].set_xlabel('date',labelpad=40,weight= 'bold')
ax[3].set_ylabel("Availability [%]",labelpad=40,weight= 'bold')
date_form = DateFormatter("%d/%m")
ax[3].xaxis.set_major_formatter(date_form)
ax[3].set_xlim([ dt.datetime.strptime(dt_start, '%Y-%m-%d %H:%M:%S'), dt.datetime.strptime(dt_end, '%Y-%m-%d %H:%M:%S')])
ax[3].set_ylim([-5,110])
plt.xlabel('date',labelpad=10,weight= 'bold')
plt.subplots_adjust(wspace=0, hspace=0.1)
plt.savefig(r'Z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Data\10min_Data\QM\\'+dt_start[0:10]+'_'+dt_end[0:10]+'.png',
bbox_inches='tight',dpi = 100)
plt.show()
## Append the quality check at the end of verification
# extracting the worksheet
import openpyxl as opl
xl_filepath = r'z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\02_iSpin\Documentation\Logbook_iSpin.xlsx'
wb = opl.load_workbook(xl_filepath)
# grab the active worksheet
ws = wb['Datenabholung']
# grab the workshet title
ws_title = ws.title
print('[{}] - Active sheet title: {} \n'.format(now(), ws_title))
# appending the worksheet
Str = '{} - {}'.format(dt_start[0:10], dt_end[0:10])
purpose = ('{}'.format(input('Please enter purpose: 0-Überwachung/Observation, 1-Datenabholung (default): ')))
if purpose == '0':
pStr = 'Überwachung'
elif purpose == '1':
pStr = 'Datenabholung'
else:
pStr = 'Datenabholung'
print('[{}] - Input not in the list! Assumed purpose= {} \n'.format(now(), pStr))
test_Data = ('Data {}'.format(input('Please enter OK or not OK plus any comments:')))
appData = [today(), pStr, Str, test_Data,Npts,Nvalid,Nws_valid, Nwt_valid, Nyaw_valid]
appData.extend(np.transpose(weekly_avail))
# target = wb.copy_worksheet(ws) % making a copy of existing worksheet
# iteration to find the last row with values in it
nrows = ws.max_row
lastrow = 0
if nrows > 1000:
nrows = 1000
while True:
if ws.cell(nrows, 3).value != None:
lastrow = nrows
break
else:
nrows -= 1
# appending to the worksheet and saving it
if AppendLog==1: # if AppendLog is wished at start
count=0
# iterate over all entries in appData array which contains variables for appending the excel
for ncol, entry in enumerate(appData,start=1):
# print(ncol, entry)
ws.cell(row=1+nrows, column=ncol, value=entry)
count += 1
print('[{}] - No. of entries made: {} \n'.format(now(), count))
wb.save(xl_filepath) # file should be closed to save
print('[{}] - Changes saved to: {} \n'.format(now(), ws_title))
else:
print('[{}] - Changes not saved to: {} \n'.format(now(), ws_title))
## References:
# https://realpython.com/openpyxl-excel-spreadsheets-python/
## Links to data and logbook
# z:\Projekte\109797-TestfeldBHV\30_Technical_execution_Confidential\TP3\AP2_Aufbau_Infrastruktur\Infrastruktur_Windmessung\02_Equipment\04_Nacelle-Lidars-Inflow_CONFIDENTIAL\30_Data\Logbook_NacelleLidars.xlsx'
| 0 | 0 | 0 |
74a17973868b8405492f752954532503abd913b9 | 1,478 | py | Python | Course1/Week2/6_fibonacci_sum_last_digit.py | sinhars/Data-Structures-And-Algorithms | 92f42ac347a69260a28cfb37d3013bfe9b045448 | [
"MIT"
] | null | null | null | Course1/Week2/6_fibonacci_sum_last_digit.py | sinhars/Data-Structures-And-Algorithms | 92f42ac347a69260a28cfb37d3013bfe9b045448 | [
"MIT"
] | null | null | null | Course1/Week2/6_fibonacci_sum_last_digit.py | sinhars/Data-Structures-And-Algorithms | 92f42ac347a69260a28cfb37d3013bfe9b045448 | [
"MIT"
] | null | null | null | # Uses python3
import sys
if __name__ == '__main__':
input = sys.stdin.readline()
n = int(input)
print(fibonacci_sum_faster(n))
| 27.886792 | 100 | 0.593369 | # Uses python3
import sys
def fibonacci_sum_naive(n):
if n <= 1:
return n
previous = 0
current = 1
sum = 1
for _ in range(n - 1):
previous, current = current, previous + current
sum += current
return sum % 10
def fibonacci_sum_fast(n):
if n <= 1:
return n
prev_last_digit = 0
curr_last_digit = 1
sum_last_digit = (prev_last_digit + curr_last_digit) % 10
for _ in range(n - 1):
prev_last_digit, curr_last_digit = curr_last_digit, (prev_last_digit + curr_last_digit) % 10
sum_last_digit = (sum_last_digit + curr_last_digit) % 10
return (sum_last_digit)
def fibonacci_sum_faster(n):
all_fib_last = []
all_sums = []
full_pattern = False
for i in range(n + 1):
last_ = i if (i <= 1) else ((all_fib_last[i-1] + all_fib_last[i-2]) % 10)
sum_ = i if (i <= 1) else ((all_sums[i-1] + last_) % 10)
all_fib_last.append(last_)
all_sums.append(sum_)
if (i >= 2) & (all_sums[i - 1] == 0) & (all_sums[i] == 1):
full_pattern = True
break
sum_last_digits = None
if full_pattern:
all_sums.pop()
all_sums.pop()
pos = n % len(all_sums)
sum_last_digits = all_sums[pos]
else:
sum_last_digits = all_sums[len(all_sums) - 1]
return (sum_last_digits)
if __name__ == '__main__':
input = sys.stdin.readline()
n = int(input)
print(fibonacci_sum_faster(n))
| 1,264 | 0 | 72 |
8f7cb8c7fb06b389e9f4be8127c24d6efe69be22 | 1,234 | py | Python | sugar/templatetags/text_tags.py | acdha/django-sugar | cd41b8160e22856818cb1ce1df6fe83f9882ac36 | [
"BSD-3-Clause"
] | 2 | 2015-12-01T01:14:32.000Z | 2015-12-01T13:51:54.000Z | sugar/templatetags/text_tags.py | acdha/django-sugar | cd41b8160e22856818cb1ce1df6fe83f9882ac36 | [
"BSD-3-Clause"
] | null | null | null | sugar/templatetags/text_tags.py | acdha/django-sugar | cd41b8160e22856818cb1ce1df6fe83f9882ac36 | [
"BSD-3-Clause"
] | 2 | 2015-06-09T09:10:08.000Z | 2021-03-17T14:08:24.000Z | # encoding: utf-8
import re
from django import template
register = template.Library()
@register.filter
def truncchar(value, arg):
'''
Truncate after a certain number of characters.
Source: http://www.djangosnippets.org/snippets/194/
Notes
-----
Super stripped down filter to truncate after a certain number of letters.
Example
-------
{{ long_blurb|truncchar:20 }}
The above will display 20 characters of the long blurb followed by "..."
'''
if not isinstance(value, basestring):
value = unicode(value)
if len(value) < arg:
return value
else:
return value[:arg] + u'…'
@register.filter
def re_sub(string, args):
"""
Provide a full regular expression replace on strings in templates
Usage:
{{ my_variable|re_sub:"/(foo|bar)/baaz/" }}
"""
old = args.split(args[0])[1]
new = args.split(args[0])[2]
return re.sub(old, new, string)
@register.filter
def replace(string, args):
"""
Provide a standard Python string replace in templates
Usage:
{{ my_variable|replace:"/foo/bar/" }}
"""
old = args.split(args[0])[1]
new = args.split(args[0])[2]
return string.replace(old, new)
| 18.69697 | 77 | 0.623987 | # encoding: utf-8
import re
from django import template
register = template.Library()
@register.filter
def truncchar(value, arg):
'''
Truncate after a certain number of characters.
Source: http://www.djangosnippets.org/snippets/194/
Notes
-----
Super stripped down filter to truncate after a certain number of letters.
Example
-------
{{ long_blurb|truncchar:20 }}
The above will display 20 characters of the long blurb followed by "..."
'''
if not isinstance(value, basestring):
value = unicode(value)
if len(value) < arg:
return value
else:
return value[:arg] + u'…'
@register.filter
def re_sub(string, args):
"""
Provide a full regular expression replace on strings in templates
Usage:
{{ my_variable|re_sub:"/(foo|bar)/baaz/" }}
"""
old = args.split(args[0])[1]
new = args.split(args[0])[2]
return re.sub(old, new, string)
@register.filter
def replace(string, args):
"""
Provide a standard Python string replace in templates
Usage:
{{ my_variable|replace:"/foo/bar/" }}
"""
old = args.split(args[0])[1]
new = args.split(args[0])[2]
return string.replace(old, new)
| 0 | 0 | 0 |
30dd9d8b5666d553624f3f398eb4c42b85c0a46c | 197 | py | Python | public/python/yellow_led.py | pabloegpf1/GPIO-Express | 0c39568e5798b1e93069afab42427759f5f0a313 | [
"MIT"
] | null | null | null | public/python/yellow_led.py | pabloegpf1/GPIO-Express | 0c39568e5798b1e93069afab42427759f5f0a313 | [
"MIT"
] | null | null | null | public/python/yellow_led.py | pabloegpf1/GPIO-Express | 0c39568e5798b1e93069afab42427759f5f0a313 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(27,GPIO.OUT)
GPIO.output(27,GPIO.HIGH)
time.sleep(2)
GPIO.output(27,GPIO.LOW)
| 15.153846 | 25 | 0.756345 | #!/usr/bin/env python
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(27,GPIO.OUT)
GPIO.output(27,GPIO.HIGH)
time.sleep(2)
GPIO.output(27,GPIO.LOW)
| 0 | 0 | 0 |
c4884895e617d8312daf555d7a5e58ceb6705ef5 | 18,741 | py | Python | joulescope_ui/recording_viewer_device_v2.py | jetperch/pyjoulescope_ui | 0324562dafc1ad2ae4a6a8cd8d073582863bf394 | [
"Apache-2.0"
] | 31 | 2018-12-13T16:13:02.000Z | 2021-09-28T09:57:50.000Z | joulescope_ui/recording_viewer_device_v2.py | jetperch/pyjoulescope_ui | 0324562dafc1ad2ae4a6a8cd8d073582863bf394 | [
"Apache-2.0"
] | 144 | 2019-02-12T14:58:02.000Z | 2022-03-24T12:06:17.000Z | joulescope_ui/recording_viewer_device_v2.py | jetperch/pyjoulescope_ui | 0324562dafc1ad2ae4a6a8cd8d073582863bf394 | [
"Apache-2.0"
] | 7 | 2019-07-04T16:34:54.000Z | 2021-03-24T16:25:06.000Z | # Copyright 2018 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjls import Reader, DataType, AnnotationType, SignalType, SourceDef, SignalDef, SummaryFSR
from PySide2 import QtCore
from joulescope import span
from .widgets.waveform.annotations import AnnotationLoader
import os
import numpy as np
import threading
import queue
import weakref
import logging
TIMEOUT = 10.0
class RecordingView:
"""A user-interface-compatible device that displays previous recorded data"""
@property
@property
@property
def limits(self):
"""Get the (x_min, x_max) limits for the view."""
if self._span is not None:
return list(self._span.limits)
return None
@property
@property
def _get(self, start, stop, incr=None):
"""Get the statistics data.
:param start: The starting sample id (inclusive).
:param stop: The stop sample id (exclusive).
:param incr: The increment for each returned value.
None (default) is equivalent to 1.
:return: A statistics data structure.
"""
self._log.info('get: x_range=%r => (%s, %s, %s)', self._x_range, start, stop, incr)
reader = self._reader
fs = self.sampling_frequency
if incr is None:
incr = 1
elif incr < 1:
msg = f'incr {incr} < 1'
self._log.warning(msg)
raise RuntimeError(msg)
if stop < (start + incr):
msg = f'invalid range {start}, {stop}, {incr}'
self._log.warning(msg)
raise RuntimeError(msg)
x_len = (stop - start) // incr
stop = start + x_len * incr
t_start = start / fs
x = np.arange(x_len, dtype=np.float64)
x *= incr / fs
x += t_start
dx = (x[-1] - x[0]) + (incr - 1) / fs
result = {
'time': {
'x': {'value': x, 'units': 's'},
'delta': {'value': dx, 'units': 's'},
'samples': {'value': [start, stop], 'units': 'samples'},
'limits': {'value': self.limits, 'units': 's'},
},
'state': {'source_type': 'buffer'},
'signals': {},
}
for signal in reader.signals.values():
signal_id = signal.signal_id
if signal_id == 0:
continue
if signal.signal_type != SignalType.FSR:
continue
units = signal.units
try:
if incr > 1:
data = reader.fsr_statistics(signal_id, start, incr, x_len)
dmean = data[:, SummaryFSR.MEAN]
s = {
'µ': {'value': dmean, 'units': units},
'σ2': {'value': data[:, SummaryFSR.STD] * data[:, SummaryFSR.STD], 'units': units},
'min': {'value': data[:, SummaryFSR.MIN], 'units': units},
'max': {'value': data[:, SummaryFSR.MAX], 'units': units},
'p2p': {'value': data[:, SummaryFSR.MAX] - data[:, SummaryFSR.MIN], 'units': units},
# '∫': {'value': 0.0, 'units': units}, # todo
}
else:
data = reader.fsr(signal_id, start, x_len)
zeros = np.zeros(len(data), dtype=np.float32)
s = {
'µ': {'value': data, 'units': units},
'σ2': {'value': zeros, 'units': units},
'min': {'value': data, 'units': units},
'max': {'value': data, 'units': units},
'p2p': {'value': zeros, 'units': units},
# '∫': {'value': 0.0, 'units': units}, # todo
}
result['signals'][signal.name] = s
except Exception:
self._log.warning('view could not get %s', signal.name)
return result
def _statistics_get(self, start=None, stop=None, units=None):
"""Get the statistics for the collected sample data over a time range.
:param start: The starting time relative to the streaming start time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indices.
:return: The statistics data structure.
"""
self._log.info('_statistics_get(%s, %s, %s)', start, stop, units)
if units == 'seconds':
t_start, t_stop = start, stop
fs = self.sampling_frequency
start = int(round(start * fs))
stop = int(round(stop * fs + 1)) # make exclusive
self._log.info('_statistics_get(%s, %s, %s) => (%s, %s)', t_start, t_stop, units, start, stop)
else:
self._log.info('_statistics_get(%s, %s, %s)', start, stop, units)
s = self._get(start, stop, stop - start)
return s
def samples_get(self, start=None, stop=None, units=None, fields=None):
"""Get exact samples over a range.
:param start: The starting time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indices.
:param fields: The list of field names to get.
"""
args = {'start': start, 'stop': stop, 'units': units, 'fields': fields}
return self._parent()._post_block('samples_get', self, args)
def statistics_get(self, start=None, stop=None, units=None, callback=None):
"""Get statistics over a range.
:param start: The starting time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indicies.
:param callback: The optional callable. When provided, this method will
not block and the callable will be called with the statistics
data structure from the view thread.
:return: The statistics data structure or None if callback is provided.
"""
args = {'start': start, 'stop': stop, 'units': units}
if callback is None:
return self._parent()._post_block('statistics_get', self, args)
else:
self._parent()._post('statistics_get', self, args, callback)
return
class RecordingViewerDeviceV2:
"""A user-interface-compatible device that displays previous recorded data
:param filename: The filename path to the pre-recorded data.
"""
@property
@property
@property
@property
@property
| 38.482546 | 108 | 0.567953 | # Copyright 2018 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjls import Reader, DataType, AnnotationType, SignalType, SourceDef, SignalDef, SummaryFSR
from PySide2 import QtCore
from joulescope import span
from .widgets.waveform.annotations import AnnotationLoader
import os
import numpy as np
import threading
import queue
import weakref
import logging
TIMEOUT = 10.0
class RecordingView:
"""A user-interface-compatible device that displays previous recorded data"""
def __init__(self, parent):
self._parent = weakref.ref(parent)
self._x_range = [0.0, 1.0]
self._span = None
self._x = None
self._samples_per = 1
self._refresh_requested = False
self._cache = None
self.on_update_fn = None # callable(data)
self._log = logging.getLogger(__name__)
def __str__(self):
return f'RecordingView()'
def __len__(self):
if self._span is None:
return 0
return self._span.length
@property
def sampling_frequency(self):
return self._parent().sampling_frequency
@property
def calibration(self):
return None
@property
def limits(self):
"""Get the (x_min, x_max) limits for the view."""
if self._span is not None:
return list(self._span.limits)
return None
@property
def _reader(self):
return self._parent()._reader
@property
def voltage_range(self):
return 0
def _on_x_change(self, cmd, kwargs):
x_range = self._x_range
if cmd == 'resize': # {pixels: int}
length = kwargs['pixels']
if length is not None and length != self._span.length:
self._log.info('resize %s', length)
self._span.length = length
self._cache = None # invalidate
x_range, self._samples_per, self._x = self._span.conform_discrete(x_range)
elif cmd == 'span_absolute': # {range: (start: float, stop: float)}]
x_range, self._samples_per, self._x = self._span.conform_discrete(kwargs.get('range'))
elif cmd == 'span_relative': # {pivot: float, gain: float}]
x_range, self._samples_per, self._x = self._span.conform_discrete(
x_range, gain=kwargs.get('gain'), pivot=kwargs.get('pivot'))
elif cmd == 'span_pan':
delta = kwargs.get('delta', 0.0)
x_range = [x_range[0] + delta, x_range[-1] + delta]
x_range, self._samples_per, self._x = self._span.conform_discrete(x_range)
elif cmd == 'refresh':
self._cache = None # invalidate
self._refresh_requested = True
return
else:
self._log.warning('on_x_change(%s) unsupported', cmd)
return
if self._x_range != x_range:
self._cache = None # invalidate
self._x_range = x_range
self._refresh_requested = True
self._log.info('cmd=%s, changed=%s, length=%s, span=%s, range=%s, samples_per=%s',
cmd, self._cache is None, len(self), self._x_range,
self._x_range[1] - self._x_range[0], self._samples_per)
def _get(self, start, stop, incr=None):
"""Get the statistics data.
:param start: The starting sample id (inclusive).
:param stop: The stop sample id (exclusive).
:param incr: The increment for each returned value.
None (default) is equivalent to 1.
:return: A statistics data structure.
"""
self._log.info('get: x_range=%r => (%s, %s, %s)', self._x_range, start, stop, incr)
reader = self._reader
fs = self.sampling_frequency
if incr is None:
incr = 1
elif incr < 1:
msg = f'incr {incr} < 1'
self._log.warning(msg)
raise RuntimeError(msg)
if stop < (start + incr):
msg = f'invalid range {start}, {stop}, {incr}'
self._log.warning(msg)
raise RuntimeError(msg)
x_len = (stop - start) // incr
stop = start + x_len * incr
t_start = start / fs
x = np.arange(x_len, dtype=np.float64)
x *= incr / fs
x += t_start
dx = (x[-1] - x[0]) + (incr - 1) / fs
result = {
'time': {
'x': {'value': x, 'units': 's'},
'delta': {'value': dx, 'units': 's'},
'samples': {'value': [start, stop], 'units': 'samples'},
'limits': {'value': self.limits, 'units': 's'},
},
'state': {'source_type': 'buffer'},
'signals': {},
}
for signal in reader.signals.values():
signal_id = signal.signal_id
if signal_id == 0:
continue
if signal.signal_type != SignalType.FSR:
continue
units = signal.units
try:
if incr > 1:
data = reader.fsr_statistics(signal_id, start, incr, x_len)
dmean = data[:, SummaryFSR.MEAN]
s = {
'µ': {'value': dmean, 'units': units},
'σ2': {'value': data[:, SummaryFSR.STD] * data[:, SummaryFSR.STD], 'units': units},
'min': {'value': data[:, SummaryFSR.MIN], 'units': units},
'max': {'value': data[:, SummaryFSR.MAX], 'units': units},
'p2p': {'value': data[:, SummaryFSR.MAX] - data[:, SummaryFSR.MIN], 'units': units},
# '∫': {'value': 0.0, 'units': units}, # todo
}
else:
data = reader.fsr(signal_id, start, x_len)
zeros = np.zeros(len(data), dtype=np.float32)
s = {
'µ': {'value': data, 'units': units},
'σ2': {'value': zeros, 'units': units},
'min': {'value': data, 'units': units},
'max': {'value': data, 'units': units},
'p2p': {'value': zeros, 'units': units},
# '∫': {'value': 0.0, 'units': units}, # todo
}
result['signals'][signal.name] = s
except Exception:
self._log.warning('view could not get %s', signal.name)
return result
def _update(self):
try:
reader = self._reader
if not callable(self.on_update_fn) or reader is None:
return
self._refresh_requested = False
if self._cache is not None:
self.on_update_fn(self._cache)
return
fs = self.sampling_frequency
start, stop = [int(x * fs) for x in self._x_range]
self._cache = self._get(start, stop, self._samples_per)
self.on_update_fn(self._cache)
except Exception:
self._log.exception('view update failed')
def _statistics_get(self, start=None, stop=None, units=None):
"""Get the statistics for the collected sample data over a time range.
:param start: The starting time relative to the streaming start time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indices.
:return: The statistics data structure.
"""
self._log.info('_statistics_get(%s, %s, %s)', start, stop, units)
if units == 'seconds':
t_start, t_stop = start, stop
fs = self.sampling_frequency
start = int(round(start * fs))
stop = int(round(stop * fs + 1)) # make exclusive
self._log.info('_statistics_get(%s, %s, %s) => (%s, %s)', t_start, t_stop, units, start, stop)
else:
self._log.info('_statistics_get(%s, %s, %s)', start, stop, units)
s = self._get(start, stop, stop - start)
return s
def _statistics_get_multiple(self, ranges, units=None):
self._log.info('_statistics_get_multiple(%s, %s)', ranges, units)
return [self._statistics_get(x[0], x[1], units=units) for x in ranges]
def _samples_get(self, start=None, stop=None, units=None, fields=None):
self._log.info('_samples_get(%s, %s, %s, %s)', start, stop, units, fields)
r = self._reader
if r is None:
return None
return r.samples_get(start, stop, units, fields)
def open(self):
fs = self.sampling_frequency
sample_id_last = self._parent().sample_id_last
x_lim = [0, sample_id_last / fs]
self._span = span.Span(x_lim, 1.0 / fs, 100)
self._x_range, self._samples_per, self._x = self._span.conform_discrete(x_lim)
self._cache = None # invalidate
def close(self):
if self._parent()._thread is not None:
return self._parent()._post_block('view_close', None, self)
def refresh(self, force=None):
return self._parent()._post('refresh', self, {'force': force})
def on_x_change(self, cmd, kwargs):
self._parent()._post('on_x_change', self, (cmd, kwargs))
def samples_get(self, start=None, stop=None, units=None, fields=None):
"""Get exact samples over a range.
:param start: The starting time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indices.
:param fields: The list of field names to get.
"""
args = {'start': start, 'stop': stop, 'units': units, 'fields': fields}
return self._parent()._post_block('samples_get', self, args)
def statistics_get(self, start=None, stop=None, units=None, callback=None):
"""Get statistics over a range.
:param start: The starting time.
:param stop: The ending time.
:param units: The units for start and stop.
'seconds' or None is in floating point seconds relative to the view.
'samples' is in stream buffer sample indicies.
:param callback: The optional callable. When provided, this method will
not block and the callable will be called with the statistics
data structure from the view thread.
:return: The statistics data structure or None if callback is provided.
"""
args = {'start': start, 'stop': stop, 'units': units}
if callback is None:
return self._parent()._post_block('statistics_get', self, args)
else:
self._parent()._post('statistics_get', self, args, callback)
return
def statistics_get_multiple(self, ranges, units=None, callback=None, source_id=None):
args = {'ranges': ranges, 'units': units, 'source_id': source_id}
if callback is None:
return self._parent()._post_block('statistics_get_multiple', self, args)
else:
self._parent()._post('statistics_get_multiple', self, args, callback)
return
def ping(self, *args, **kwargs):
return self._parent()._post_block('ping', self, (args, kwargs))
class RecordingViewerDeviceV2:
"""A user-interface-compatible device that displays previous recorded data
:param filename: The filename path to the pre-recorded data.
"""
def __init__(self, parent, filename, cmdp=None):
if isinstance(filename, str) and not os.path.isfile(filename):
raise IOError('file not found')
self._parent = parent
self._filename = filename
self._cmdp = cmdp
self._reader: Reader = None
self._default_signal: SignalDef = None
self._views = []
self._coalesce = {}
self._thread = None
self._cmd_queue = queue.Queue() # tuples of (command, args, callback)
self._response_queue = queue.Queue()
self._quit = False
self._log = logging.getLogger(__name__)
self._loader = None
self._threadpool = None
def __str__(self):
return os.path.basename(self._filename)
@property
def filename(self):
return self._filename
@property
def sampling_frequency(self):
fs = self._default_signal.sample_rate
if fs <= 0:
raise RuntimeError('Invalid sampling_frequency')
return fs
@property
def sample_id_last(self):
return self._default_signal.length
@property
def calibration(self):
return None
@property
def voltage_range(self):
return 0
def _cmd_process(self, cmd, view, args, cbk):
rv = None
try:
# self._log.debug('_cmd_process %s - start', cmd)
if cmd == 'refresh':
view._refresh_requested = True
elif cmd == 'on_x_change':
rv = view._on_x_change(*args)
elif cmd == 'samples_get':
rv = view._samples_get(**args)
elif cmd == 'statistics_get':
rv = view._statistics_get(**args)
elif cmd == 'statistics_get_multiple':
rv = view._statistics_get_multiple(**args)
elif cmd == 'view_factory':
self._views.append(args)
rv = args
elif cmd == 'view_close':
if args in self._views:
self._views.remove(args)
elif cmd == 'open':
rv = self._open()
elif cmd == 'close':
rv = self._close()
elif cmd == 'ping':
rv = args
else:
self._log.warning('unsupported command %s', cmd)
except Exception:
self._log.exception('While running command')
if callable(cbk):
try:
cbk(rv)
except Exception:
self._log.exception('in callback')
def run(self):
cmd_count = 0
timeout = 1.0
self._log.info('RecordingViewerDevice.start')
while not self._quit:
try:
cmd, view, args, cbk = self._cmd_queue.get(timeout=timeout)
except queue.Empty:
timeout = 1.0
for value in self._coalesce.values():
self._cmd_process(*value)
self._coalesce.clear()
for view in self._views:
if view._refresh_requested:
view._update()
cmd_count = 0
continue
cmd_count += 1
timeout = 0.0
try:
source_id = args.pop('source_id')
except Exception:
source_id = None
if source_id is not None:
key = f'{view}_{cmd}_{source_id}' # keep most recent only
self._coalesce[key] = (cmd, view, args, cbk)
else:
self._cmd_process(cmd, view, args, cbk)
self._log.info('RecordingViewerDevice.run done')
def _post(self, command, view=None, args=None, cbk=None):
if self._thread is None:
self._log.info('RecordingViewerDevice._post(%s) when thread not running', command)
else:
self._cmd_queue.put((command, view, args, cbk))
def _post_block(self, command, view=None, args=None, timeout=None):
timeout = TIMEOUT if timeout is None else float(timeout)
# self._log.debug('_post_block %s start', command)
while not self._response_queue.empty():
self._log.warning('response queue not empty')
try:
self._response_queue.get(timeout=0.0)
except queue.Empty:
pass
if self._thread is None:
raise IOError('View thread not running')
self._post(command, view, args, lambda rv_=None: self._response_queue.put(rv_))
try:
rv = self._response_queue.get(timeout=timeout)
except queue.Empty as ex:
self._log.error('RecordingViewerDevice thread hung: %s - FORCE CLOSE', command)
self._post('close', None, None)
self._thread.join(timeout=TIMEOUT)
self._thread = None
rv = ex
except Exception as ex:
rv = ex
if isinstance(rv, Exception):
raise IOError(rv)
# self._log.debug('_post_block %s done', command) # rv
return rv
def _on_annotations_loaded(self, *args, **kwargs):
self._loader = None
self._threadpool.stop()
def _open(self):
self._log.info('RecordingViewerDevice.open')
self._reader = Reader(self._filename)
signals = self._reader.signals
if len(signals) <= 1:
raise RuntimeError('This JLS file is not currently supported')
self._default_signal = signals[1]
self._loader = AnnotationLoader(self._parent, self._filename, self._cmdp)
self._loader.signals.finished.connect(self._on_annotations_loaded)
self._loader.publish(self._reader) # handle base
self._threadpool = QtCore.QThreadPool()
self._threadpool.start(self._loader)
def _close(self):
if self._reader is not None:
self._reader.close()
self._reader = None
self._quit = True
def view_factory(self):
view = RecordingView(self)
return self._post_block('view_factory', None, view)
def open(self, event_callback_fn=None):
self.close()
self._log.info('open')
self._thread = threading.Thread(name='view', target=self.run)
self._thread.start()
self._post_block('open')
def close(self):
if self._thread is not None:
self._log.info('close')
try:
self._post_block('close')
except Exception:
self._log.exception('while attempting to close')
self._thread.join(timeout=TIMEOUT)
self._thread = None
| 10,435 | 0 | 907 |
c61b3c3a6dae08904316a919aedc1d74066916de | 3,496 | py | Python | examples/python/cancellation/client.py | arghyadip01/grpc | 9e10bfc8a096ef91a327e22f84f10c0fabff4417 | [
"Apache-2.0"
] | 36,552 | 2015-02-26T17:30:13.000Z | 2022-03-31T22:41:33.000Z | examples/python/cancellation/client.py | SanjanaSingh897/grpc | 2d858866eb95ce5de8ccc8c35189a12733d8ca79 | [
"Apache-2.0"
] | 23,536 | 2015-02-26T17:50:56.000Z | 2022-03-31T23:39:42.000Z | examples/python/cancellation/client.py | SanjanaSingh897/grpc | 2d858866eb95ce5de8ccc8c35189a12733d8ca79 | [
"Apache-2.0"
] | 11,050 | 2015-02-26T17:22:10.000Z | 2022-03-31T10:12:35.000Z | # Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example of cancelling requests in gRPC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import signal
import sys
import grpc
from examples.python.cancellation import hash_name_pb2
from examples.python.cancellation import hash_name_pb2_grpc
_DESCRIPTION = "A client for finding hashes similar to names."
_LOGGER = logging.getLogger(__name__)
if __name__ == "__main__":
logging.basicConfig()
main()
| 34.613861 | 81 | 0.658181 | # Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An example of cancelling requests in gRPC."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import logging
import signal
import sys
import grpc
from examples.python.cancellation import hash_name_pb2
from examples.python.cancellation import hash_name_pb2_grpc
_DESCRIPTION = "A client for finding hashes similar to names."
_LOGGER = logging.getLogger(__name__)
def run_unary_client(server_target, name, ideal_distance):
with grpc.insecure_channel(server_target) as channel:
stub = hash_name_pb2_grpc.HashFinderStub(channel)
future = stub.Find.future(hash_name_pb2.HashNameRequest(
desired_name=name, ideal_hamming_distance=ideal_distance),
wait_for_ready=True)
def cancel_request(unused_signum, unused_frame):
future.cancel()
sys.exit(0)
signal.signal(signal.SIGINT, cancel_request)
result = future.result()
print(result)
def run_streaming_client(server_target, name, ideal_distance,
interesting_distance):
with grpc.insecure_channel(server_target) as channel:
stub = hash_name_pb2_grpc.HashFinderStub(channel)
result_generator = stub.FindRange(hash_name_pb2.HashNameRequest(
desired_name=name,
ideal_hamming_distance=ideal_distance,
interesting_hamming_distance=interesting_distance),
wait_for_ready=True)
def cancel_request(unused_signum, unused_frame):
result_generator.cancel()
sys.exit(0)
signal.signal(signal.SIGINT, cancel_request)
for result in result_generator:
print(result)
def main():
parser = argparse.ArgumentParser(description=_DESCRIPTION)
parser.add_argument("name", type=str, help='The desired name.')
parser.add_argument("--ideal-distance",
default=0,
nargs='?',
type=int,
help="The desired Hamming distance.")
parser.add_argument('--server',
default='localhost:50051',
type=str,
nargs='?',
help='The host-port pair at which to reach the server.')
parser.add_argument(
'--show-inferior',
default=None,
type=int,
nargs='?',
help='Also show candidates with a Hamming distance less than this value.'
)
args = parser.parse_args()
if args.show_inferior is not None:
run_streaming_client(args.server, args.name, args.ideal_distance,
args.show_inferior)
else:
run_unary_client(args.server, args.name, args.ideal_distance)
if __name__ == "__main__":
logging.basicConfig()
main()
| 2,330 | 0 | 69 |
d64c7c0ae5f7486716af9d390676fe3fcb47970b | 405 | py | Python | cha_bebe/galeria/migrations/0002_album_capa.py | intelektos/Cha_bebe | 23df4af3901413c9c50e73bd305ade165c81001b | [
"MIT"
] | null | null | null | cha_bebe/galeria/migrations/0002_album_capa.py | intelektos/Cha_bebe | 23df4af3901413c9c50e73bd305ade165c81001b | [
"MIT"
] | 9 | 2020-06-08T03:31:08.000Z | 2022-01-13T02:44:42.000Z | cha_bebe/galeria/migrations/0002_album_capa.py | intelektos/Cha_bebe | 23df4af3901413c9c50e73bd305ade165c81001b | [
"MIT"
] | 1 | 2020-06-01T17:43:20.000Z | 2020-06-01T17:43:20.000Z | # Generated by Django 3.0.6 on 2020-05-14 20:41
from django.db import migrations, models
| 21.315789 | 85 | 0.595062 | # Generated by Django 3.0.6 on 2020-05-14 20:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('galeria', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='album',
name='capa',
field=models.ImageField(blank=True, null=True, upload_to='galeria/capa'),
),
]
| 0 | 291 | 23 |
19c4fa0d09483af5d6bddd0f9df81daca40d46e3 | 1,379 | py | Python | Paths/New Tab with Overlaps.py | harbortype/glyphs-scripts | 792785ac706d2cdf6ec5e7ea5282178e8ef54d83 | [
"Apache-2.0"
] | 23 | 2018-07-28T23:50:32.000Z | 2021-08-31T09:52:00.000Z | Paths/New Tab with Overlaps.py | harbortype/glyphs-scripts | 792785ac706d2cdf6ec5e7ea5282178e8ef54d83 | [
"Apache-2.0"
] | 2 | 2021-05-03T12:06:01.000Z | 2021-11-13T17:33:29.000Z | Paths/New Tab with Overlaps.py | harbortype/glyphs-scripts | 792785ac706d2cdf6ec5e7ea5282178e8ef54d83 | [
"Apache-2.0"
] | 1 | 2020-02-21T09:48:06.000Z | 2020-02-21T09:48:06.000Z | #MenuTitle: New Tab with Overlaps
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Opens a new Edit tab containing all glyphs that contain overlaps.
"""
import copy
thisFont = Glyphs.font # frontmost font
master_ids = [master.id for master in thisFont.masters] # all the master ids
try:
# Glyphs.clearLog() # clears log in Macro window
thisFont.disableUpdateInterface() # suppresses UI updates in Font View
text = ""
for thisGlyph in thisFont.glyphs:
# thisGlyph.beginUndo() # begin undo grouping
thisLayer = thisGlyph.layers[0]
if not thisLayer.paths:
continue
if thisLayer.layerId in master_ids or thisLayer.isSpecialLayer:
has_overlaps = check_for_overlaps(thisLayer)
if has_overlaps:
text += "/%s " % (thisGlyph.name)
# thisGlyph.endUndo() # end undo grouping
thisFont.newTab(text)
finally:
thisFont.enableUpdateInterface() # re-enables UI updates in Font View
| 30.644444 | 76 | 0.75417 | #MenuTitle: New Tab with Overlaps
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Opens a new Edit tab containing all glyphs that contain overlaps.
"""
import copy
thisFont = Glyphs.font # frontmost font
master_ids = [master.id for master in thisFont.masters] # all the master ids
def check_for_overlaps( lyr ):
paths = list(lyr.paths)
GSPathOperator = NSClassFromString("GSPathOperator")
segments = GSPathOperator.segmentsFromPaths_(paths)
count1 = len(segments)
try: # Glyphs 3
GSPathOperator.addIntersections_(segments)
except: # Glyphs 2
PathOperator = GSPathOperator.new()
PathOperator.addIntersections_(segments)
count2 = len(segments)
if count1 != count2:
return True
return False
try:
# Glyphs.clearLog() # clears log in Macro window
thisFont.disableUpdateInterface() # suppresses UI updates in Font View
text = ""
for thisGlyph in thisFont.glyphs:
# thisGlyph.beginUndo() # begin undo grouping
thisLayer = thisGlyph.layers[0]
if not thisLayer.paths:
continue
if thisLayer.layerId in master_ids or thisLayer.isSpecialLayer:
has_overlaps = check_for_overlaps(thisLayer)
if has_overlaps:
text += "/%s " % (thisGlyph.name)
# thisGlyph.endUndo() # end undo grouping
thisFont.newTab(text)
finally:
thisFont.enableUpdateInterface() # re-enables UI updates in Font View
| 404 | 0 | 23 |
265411f87b72e76316c668e81bd373a1cadf6fbf | 33,403 | py | Python | src/main.py | ChocolateEye/Math-and-Chemistry-Bot | 2e44eb21fb962e79a14b110009c75a13929adef2 | [
"MIT"
] | 5 | 2020-12-02T16:17:07.000Z | 2021-11-20T07:51:27.000Z | src/main.py | ChocolateEye/Math-and-Chemistry-Bot | 2e44eb21fb962e79a14b110009c75a13929adef2 | [
"MIT"
] | null | null | null | src/main.py | ChocolateEye/Math-and-Chemistry-Bot | 2e44eb21fb962e79a14b110009c75a13929adef2 | [
"MIT"
] | 1 | 2022-02-16T19:23:51.000Z | 2022-02-16T19:23:51.000Z | import random
import datetime
import asyncio
import math
import matplotlib.pyplot as plt
import numpy as np
import os
import json
import discord
from discord.ext import commands
intents = discord.Intents.default()
intents.members = True
intents.reactions = True
intents.messages = True
client = commands.Bot(command_prefix = '//', intents = intents)
client.remove_command("help")
@client.event
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
@commands.has_role("Owner") #requires a role named Owner in the server , you can change it to has_permission
@client.command()
@client.command()
@client.command()
@client.command()
@client.command()
client.run("bot token here")
| 62.31903 | 388 | 0.592851 | import random
import datetime
import asyncio
import math
import matplotlib.pyplot as plt
import numpy as np
import os
import json
import discord
from discord.ext import commands
intents = discord.Intents.default()
intents.members = True
intents.reactions = True
intents.messages = True
client = commands.Bot(command_prefix = '//', intents = intents)
client.remove_command("help")
@client.event
async def on_ready():
print("Bot is ready!")
@client.command()
async def ping(ctx):
await ctx.send(f"Pong! {round(client.latency * 1000)}ms")
@client.command()
async def help(ctx):
embed = discord.Embed(title = ":white_check_mark: **CALC HELP**" , description = "**PREFIX** : `//`" , timestamp=ctx.message.created_at, color = discord.Colour.blue())
embed.add_field(name = "**//calc <equation>**", value = "evaluates equation", inline = True)
embed.add_field(name = "**//log <number> <base>**", value = "calculates log with input base", inline = True)
embed.add_field(name = "**//ln <number>**", value = "calculates log with base e", inline = True)
embed.add_field(name = "**//power <number> <power>**", value = "calculates the number raised to power", inline = True)
embed.add_field(name = "**//diff help**", value = "use /diff <equation> from the list", inline = True)
embed.add_field(name = "**//pt**", value = "Displays Periodic Table",inline = True)
embed.add_field(name = "**//<sin/cos/tan/cosec/sec/cot> <number in degrees>**", value = "input in degrees", inline = True)
embed.add_field(name = "**//element <elementnumber or name(first letter capital)>**", value = "Displays information about an element",inline = False)
embed.set_thumbnail(url = client.user.avatar_url)
embed.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed)
@client.command()
async def calc(ctx, query):
try:
embed2 = discord.Embed(title = "`Equation`", description = "```"+str(query)+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed2.add_field(name = "`Solution`", value = "```"+str(eval(query))+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed2.set_thumbnail(url = "attachment://calculatoricon.png")
embed2.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed2)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def log(ctx, number : float,*,base : float):
try:
ans = math.log(number,base)
val = str(ans)
embed3 = discord.Embed(title = "`Equation`", description = "```"+"log("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed3.add_field(name = "`Solution`", value = "```"+val[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed3.set_thumbnail(url = "attachment://calculatoricon.png")
embed3.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed3)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def ln(ctx, number : float):
try:
ans1 = math.log(number)
val1 = str(ans1)
embed4 = discord.Embed(title = "`Equation`", description = "```"+"ln("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed4.add_field(name = "`Solution`", value = "```"+val1[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed4.set_thumbnail(url = "attachment://calculatoricon.png")
embed4.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed4)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def sin(ctx,number : float):
try:
ans2 = math.sin(math.radians(number))
val2 = str(ans2)
embed5 = discord.Embed(title = "`Equation`", description = "```"+"Sin("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed5.add_field(name = "`Solution`", value = "```"+val2[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed5.set_thumbnail(url = "attachment://calculatoricon.png")
embed5.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed5)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def cos(ctx,number : float):
try:
if number%90 == 0:
mucheck = number/90
if mucheck%2 == 0:
ans13 = math.cos(math.radians(number))
val13 = str(ans13)
embed10 = discord.Embed(title = "`Equation`", description = "```"+"Cos("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed10.add_field(name = "`Solution`", value = "```"+val13[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed10.set_thumbnail(url = "attachment://calculatoricon.png")
embed10.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed10)
else:
embed11 = discord.Embed(title = "`Equation`", description = "```"+"Cos("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed11.add_field(name = "`Solution`", value = "```"+"0.0"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed11.set_thumbnail(url = "attachment://calculatoricon.png")
embed11.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed11)
else:
ans3 = math.cos(math.radians(number))
val3 = str(ans3)
embed12 = discord.Embed(title = "`Equation`", description = "```"+"Cos("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed12.add_field(name = "`Solution`", value = "```"+val3[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed12.set_thumbnail(url = "attachment://calculatoricon.png")
embed12.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed12)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def tan(ctx,number : float):
try:
if number%90 == 0:
mucheck = number/90
if mucheck%2 == 0:
embed7 = discord.Embed(title = "`Equation`", description = "```"+"Tan("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed7.add_field(name = "`Solution`", value = "```"+"0.0"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed7.set_thumbnail(url = "attachment://calculatoricon.png")
embed7.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed7)
else:
embed8 = discord.Embed(title = "`Equation`", description = "```"+"Tan("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed8.add_field(name = "`Solution`", value = "```"+"Infinite"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed8.set_thumbnail(url = "attachment://calculatoricon.png")
embed8.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed8)
else:
ans5 = math.tan(math.radians(number))
val5 = str(ans5)
embed9 = discord.Embed(title = "`Equation`", description = "```"+"Tan("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed9.add_field(name = "`Solution`", value = "```"+val5[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed9.set_thumbnail(url = "attachment://calculatoricon.png")
embed9.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed9)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def cot(ctx,number : float):
try:
if number%90 == 0:
mucheck = number/90
if mucheck%2 == 0:
embed10 = discord.Embed(title = "`Equation`", description = "```"+"Cot("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed10.add_field(name = "`Solution`", value = "```"+"Infinite"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed10.set_thumbnail(url = "attachment://calculatoricon.png")
embed10.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed10)
else:
embed11 = discord.Embed(title = "`Equation`", description = "```"+"Cot("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed11.add_field(name = "`Solution`", value = "```"+"0.0"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed11.set_thumbnail(url = "attachment://calculatoricon.png")
embed11.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed11)
else:
ans6 = math.tan(math.radians(number))
val6 = str(1/(ans6))
embed12 = discord.Embed(title = "`Equation`", description = "```"+"Cot("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed12.add_field(name = "`Solution`", value = "```"+val6[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed12.set_thumbnail(url = "attachment://calculatoricon.png")
embed12.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed12)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def cosec(ctx,number : float):
try:
if number%90 == 0:
mucheck = number/90
if mucheck%2 == 0:
embed13 = discord.Embed(title = "`Equation`", description = "```"+"Cosec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed13.add_field(name = "`Solution`", value = "```"+"Infinite"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed13.set_thumbnail(url = "attachment://calculatoricon.png")
embed13.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed13)
else:
ans10 = math.sin(math.radians(number))
val10 = str(1/ans10)
embed17 = discord.Embed(title = "`Equation`", description = "```"+"Cosec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed17.add_field(name = "`Solution`", value = "```"+val10[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed17.set_thumbnail(url = "attachment://calculatoricon.png")
embed17.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed17)
else:
ans11 = math.sin(math.radians(number))
val11 = str(1/ans11)
embed18 = discord.Embed(title = "`Equation`", description = "```"+"Cosec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed18.add_field(name = "`Solution`", value = "```"+val11[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed18.set_thumbnail(url = "attachment://calculatoricon.png")
embed18.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed18)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def sec(ctx,number : float):
try:
if number%90 == 0:
mucheck = number/90
if mucheck%2 == 1:
embed14 = discord.Embed(title = "`Equation`", description = "```"+"Sec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed14.add_field(name = "`Solution`", value = "```"+"Infinite"+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed14.set_thumbnail(url = "attachment://calculatoricon.png")
embed14.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed14)
else:
ans9 = math.cos(math.radians(number))
val9 = str(1/ans9)
embed15 = discord.Embed(title = "`Equation`", description = "```"+"Sec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed15.add_field(name = "`Solution`", value = "```"+val9[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed15.set_thumbnail(url = "attachment://calculatoricon.png")
embed15.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed15)
else:
ans8 = math.cos(math.radians(number))
val8 = str(1/ans8)
embed16 = discord.Embed(title = "`Equation`", description = "```"+"Sec("+str(number)+")"+"```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed16.add_field(name = "`Solution`", value = "```"+val8[0:6]+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed16.set_thumbnail(url = "attachment://calculatoricon.png")
embed16.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed16)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def power(ctx,number : float,*,power : float):
try:
val12 = str(math.pow(number,power))
embed19 = discord.Embed(title = "`Equation`", description = "```("+str(number)+")^("+str(power)+")```", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed19.add_field(name = "`Solution`", value = "```"+val12+"```")
f = discord.File("calculatoricon.png", filename="calculatoricon.png")
embed19.set_thumbnail(url = "attachment://calculatoricon.png")
embed19.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed19)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
async def diff(ctx,equation : str):
try:
if equation == "help":
embed20 = discord.Embed(title = ":white_check_mark: List of identities to input in the command :-" , description = "`x` , `ax` , `x^n` , `x^x` , `a^x` , `e^x` , `ln(x)` \n`sinx` , `cosx` , `tanx` , `secx` , `cosecx` , `cotx`\n`sin^-1(x)` , `cos^-1(x)` , `tan^-1(x)`\n`sec^-1(x)` , `cosec^-1(x)` , `cot^-1(x)`", timestamp = ctx.message.created_at, color = discord.Colour.red())
embed20.set_thumbnail(url = client.user.avatar_url)
embed20.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed20)
elif equation == "x":
embed21 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id1.png", filename = "id1.png")
embed21.set_image(url = "attachment://id1.png")
embed21.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed21)
elif equation == "ax":
embed22 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id2.png", filename = "id2.png")
embed22.set_image(url = "attachment://id2.png")
embed22.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed22)
elif equation == "sinx":
embed23 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id3.png", filename = "id3.png")
embed23.set_image(url = "attachment://id3.png")
embed23.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed23)
elif equation == "cosx":
embed24 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id4.png", filename = "id4.png")
embed24.set_image(url = "attachment://id4.png")
embed24.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed24)
elif equation == "x^n":
embed25 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id5.png", filename = "id5.png")
embed25.set_image(url = "attachment://id5.png")
embed25.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed25)
elif equation == "tanx":
embed26 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id6.png", filename = "id6.png")
embed26.set_image(url = "attachment://id6.png")
embed26.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed26)
elif equation == "secx":
embed27 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id7.png", filename = "id7.png")
embed27.set_image(url = "attachment://id7.png")
embed27.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed27)
elif equation == "cosecx":
embed28 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("D:/Kunj Personal/Programming (source code)/study bot/id8.png", filename = "id8.png")
embed28.set_image(url = "attachment://id8.png")
embed28.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed28)
elif equation == "cotx":
embed29 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id9.png", filename = "id9.png")
embed29.set_image(url = "attachment://id9.png")
embed29.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed29)
elif equation == "sin^-1(x)":
embed30 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id10.png", filename = "id10.png")
embed30.set_image(url = "attachment://id10.png")
embed30.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed30)
elif equation == "cos^-1(x)":
embed31 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id11.png", filename = "id11.png")
embed31.set_image(url = "attachment://id11.png")
embed31.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed31)
elif equation == "tan^-1(x)":
embed32 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id12.png", filename = "id12.png")
embed32.set_image(url = "attachment://id12.png")
embed32.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed32)
elif equation == "sec^-1(x)":
embed33 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id13.png", filename = "id13.png")
embed33.set_image(url = "attachment://id13.png")
embed33.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed33)
elif equation == "cosec^-1(x)":
embed34 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id14.png", filename = "id14.png")
embed34.set_image(url = "attachment://id14.png")
embed34.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed34)
elif equation == "cot^-1(x)":
embed35 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id15.png", filename = "id15.png")
embed35.set_image(url = "attachment://id15.png")
embed35.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed35)
elif equation == "e^x":
embed36 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id16.png", filename = "id16.png")
embed36.set_image(url = "attachment://id16.png")
embed36.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed36)
elif equation == "x^x":
embed37 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id17.png", filename = "id17.png")
embed37.set_image(url = "attachment://id17.png")
embed37.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed37)
elif equation == "a^x":
embed38 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id18.png", filename = "id18.png")
embed38.set_image(url = "attachment://id18.png")
embed38.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed38)
elif equation == "lnx":
embed39 = discord.Embed(title = f"differentiation of ({equation}) w.r.t. (x) is", timestamp = ctx.message.created_at, color = discord.Colour.red())
f = discord.File("id19.png", filename = "id19.png")
embed39.set_image(url = "attachment://id19.png")
embed39.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(file = f,embed = embed39)
except:
await ctx.send("Error : Invalid input! Try using something else!")
@client.command()
@commands.has_role("Owner") #requires a role named Owner in the server , you can change it to has_permission
async def clear(ctx,number : int):
await ctx.channel.purge(limit=number)
@client.command()
async def pt(ctx):
embed = discord.Embed(title = "Periodic Table", color = discord.Colour.green())
embed.set_image(url = "https://pubchem.ncbi.nlm.nih.gov/periodic-table/Periodic_Table.png")
await ctx.send(embed = embed)
@client.command()
async def element(ctx, element):
res = isinstance(element, str)
with open('periodic_elements.json','r',encoding='utf-8') as f:
data = json.load(f)
if element.isdigit() == True:
enum = int(element)
if enum < 120:
name = str(data['elements'][enum-1]["name"])
appearance = str(data['elements'][enum-1]["appearance"])
atomic_mass = str(data['elements'][enum-1]["atomic_mass"])
category = str(data['elements'][enum-1]["category"])
density = str(data['elements'][enum-1]["density"])
atomic_number = str(data['elements'][enum-1]["number"])
period = str(data['elements'][enum-1]["period"])
phase = str(data['elements'][enum-1]["phase"])
summary = str(data['elements'][enum-1]["summary"])
symbol = str(data['elements'][enum-1]["symbol"])
shells = data['elements'][enum-1]["shells"]
ionization_energies = data['elements'][enum-1]["ionization_energies"]
electron_configuration_semantic = data['elements'][enum-1]["electron_configuration_semantic"]
embed = discord.Embed(title = f":white_check_mark: **ELEMENT {atomic_number} -> {name.upper()} AND IT'S PROPERTIES**",description = f"**SYMBOL {symbol}**" , timestamp = ctx.message.created_at, color = discord.Colour.green())
embed.set_thumbnail(url = "https://i.pinimg.com/originals/22/72/2b/22722b33f4d7e9d810c6bce2fe678128.jpg")
embed.add_field(name = "**Atomic mass**" , value = atomic_mass, inline = True)
embed.add_field(name = "**Shells**", value = shells, inline = True)
embed.add_field(name = "**Electronic Config**", value = electron_configuration_semantic, inline = True)
embed.add_field(name = "**Phase**", value = phase, inline = True)
embed.add_field(name = "**Period**", value = period, inline = True)
embed.add_field(name = "**Appearance**", value = appearance, inline = True)
embed.add_field(name = "**Density (Kg/m3)**", value = density, inline = True)
embed.add_field(name = "**Category**", value = category, inline = True)
embed.add_field(name = "**Ionization Energies (KJ/mol)**", value = ionization_energies, inline = True)
embed.add_field(name = "**Summary**", value = summary, inline = False)
embed.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed)
else:
embed = discord.Embed(title = ":white_check_mark: **ERROR 404, ELEMENT NOT FOUND**", description = "**Please provide a valid number!**", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed)
elif res == True:
for i in range (1,120):
if element == data['elements'][i-1]["name"]:
name = str(data['elements'][i-1]["name"])
appearance = str(data['elements'][i-1]["appearance"])
atomic_mass = str(data['elements'][i-1]["atomic_mass"])
category = str(data['elements'][i-1]["category"])
density = str(data['elements'][i-1]["density"])
atomic_number = str(data['elements'][i-1]["number"])
period = str(data['elements'][i-1]["period"])
phase = str(data['elements'][i-1]["phase"])
summary = str(data['elements'][i-1]["summary"])
symbol = str(data['elements'][i-1]["symbol"])
shells = data['elements'][i-1]["shells"]
ionization_energies = data['elements'][i-1]["ionization_energies"]
electron_configuration_semantic = data['elements'][i-1]["electron_configuration_semantic"]
embed = discord.Embed(title = f":white_check_mark: **ELEMENT {atomic_number} -> {name.upper()} AND IT'S PROPERTIES**",description = f"**SYMBOL {symbol}**" , timestamp = ctx.message.created_at, color = discord.Colour.green())
embed.set_thumbnail(url = "https://i.pinimg.com/originals/22/72/2b/22722b33f4d7e9d810c6bce2fe678128.jpg")
embed.add_field(name = "**Atomic mass**" , value = atomic_mass, inline = True)
embed.add_field(name = "**Shells**", value = shells, inline = True)
embed.add_field(name = "**Electronic Config**", value = electron_configuration_semantic, inline = True)
embed.add_field(name = "**Phase**", value = phase, inline = True)
embed.add_field(name = "**Period**", value = period, inline = True)
embed.add_field(name = "**Appearance**", value = appearance, inline = True)
embed.add_field(name = "**Density (Kg/m3)**", value = density, inline = True)
embed.add_field(name = "**Category**", value = category, inline = True)
embed.add_field(name = "**Ionization Energies (KJ/mol)**", value = ionization_energies, inline = True)
embed.add_field(name = "**Summary**", value = summary, inline = False)
embed.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed)
break
elif element != data['elements'][i-1]["name"]:
i+=1
continue
else:
embed = discord.Embed(title = ":cyclone: **ERROR 404, ELEMENT NOT FOUND**", description = "**Please provide a valid element!**", timestamp=ctx.message.created_at, color = discord.Colour.green())
embed.set_footer(text = ctx.guild, icon_url = ctx.guild.icon_url)
await ctx.send(embed = embed)
break
@client.command()
async def create_tag(ctx, title : str ,*, description : str):
if title == None or description == None:
await ctx.send("Please provide a title/description!")
elif len(title) > 150:
await ctx.send("Too long title!")
else:
try:
with open('tags.json', 'r') as f:
data3 = json.load(f)
data3[str(title)] = description
with open('tags.json', 'w') as f:
json.dump(data3,f,indent=4)
await ctx.send(f"Tag successfully added! To use it type `bs!tag {title}`")
except:
await ctx.send("There was an error while creating the tag D:")
@client.command()
async def delete_tag(ctx, title : str):
if title == None:
await ctx.send("Please provide a title to delete!")
else:
try:
with open('tags.json','r') as f:
data4 = json.load(f)
del data4[str(title)]
with open('tags.json','w') as f:
json.dump(data4,f,indent=4)
await ctx.send(f"{title} tag successfully deleted!")
except:
await("No such tag exists!")
@client.command()
async def tag(ctx , title : str):
try:
with open('tags.json', 'r') as f:
data4 = json.load(f)
sending = data4[str(title)]
await ctx.send(f"**__TAG TITLE:__** `{title}`\n\n__DESCRIPTION:__{sending}")
except:
await ctx.send("Either there was an error or the tag was not found!!")
client.run("bot token here")
| 32,063 | 0 | 440 |
a7a400dd0f6ec96242a8200780d4b168fcc473a9 | 18,024 | py | Python | latency_search.py | jiuyecao/Opt-CoInfer | 60f29a28c34d3bf9b2f23c98bb8e98caf1abc4f0 | [
"MIT"
] | 1 | 2022-03-01T09:34:12.000Z | 2022-03-01T09:34:12.000Z | latency_search.py | Anonymity2022/Opt-CoInfer | 60f29a28c34d3bf9b2f23c98bb8e98caf1abc4f0 | [
"MIT"
] | null | null | null | latency_search.py | Anonymity2022/Opt-CoInfer | 60f29a28c34d3bf9b2f23c98bb8e98caf1abc4f0 | [
"MIT"
] | null | null | null | #coding=utf-8
import numpy
import random
import torch
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
from scipy.stats import norm
import random
import os
from numpy import argmax
import argparse
global accuracy_loss_constraint, latency_constraint,optimal_scheme,avaliable_scheme,avaliable_evaluated_scheme
import warnings
warnings.filterwarnings("ignore")
# User define the following parameters, 10000 as a loose constraint means the constraint actually has no influence
parser = argparse.ArgumentParser()
parser.add_argument('-accuracy_or_latency', type=bool, default=False)
parser.add_argument('-accuracy_loss', type=float, default=10000)
parser.add_argument('-latency', type=float, default=10000)
parser.add_argument('-bandwidth', type=float, default=1)
parser.add_argument('-gpu', type=int, default=0)
args = parser.parse_args()
data_base_address='./latency_search.txt'
#The latency profiler of the IoT-Cloud system
bandwidth=args.bandwidth/8 #MB/s
accuracy_loss_base = 0.151
only_pi_latency=3.2571
only_cloud=360*240*3/1024/1024/bandwidth
base_transmission=224*224/8/1024/1024 #MB
layer=[64, 64,'M', 128, 128,'M', 256, 256, 256,'M', 512, 512, 512,'M', 512, 512, 512]
pi_layer_latency=[]
pi_latency=[0.0597,0.0937,0.1179,0.5048,0.5054,0.5074,0.51,0.5382,0.5557,0.5682,0.9844,1.0018,1.0142,1.055,1.2156,1.2246,1.2318,1.5411,1.5501,1.5577,1.8679,1.8768,1.8845,1.9072,2.0437,2.0484,2.0532,2.3183,2.3231,2.3279,2.5918,2.5966,2.6014,2.6134,2.6898,2.6913,2.6938,2.7694,2.7709,2.7735,2.8487,2.8503,2.8528,2.8569,3.2571]
gpu_layer_latency=[]
gpu_latency=[ 0.0001, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005, 0.0006, 0.0006, 0.0008, 0.0008, 0.0008, 0.0009, 0.0010, 0.0010, 0.0011, 0.0011, 0.0012, 0.0012, 0.0013, 0.0014, 0.0014, 0.0014, 0.0015, 0.0018, 0.0019, 0.0020, 0.0020, 0.0020, 0.0021, 0.0022, 0.0022, 0.0023, 0.0023, 0.0024, 0.0024, 0.0025, 0.0026, 0.0026, 0.0027, 0.0028, 0.0028, 0.0029, 0.0032]
layer_latency_index=[3,7,10,14,17,20,24,27,30,34,37,40,44]
# probability of improvement acquisition function
for i in layer_latency_index:
gpu_layer_latency.append(gpu_latency[i-1])
pi_layer_latency.append(pi_latency[i-1])
if __name__ == "__main__":
accuracy_or_latency_demand=args.accuracy_or_latency #True for accuracy demand
if accuracy_or_latency_demand:
accuracy_loss_constraint = args.accuracy_loss
latency_constraint = min(only_cloud,only_pi_latency)
else:
accuracy_loss_constraint = 0.1
latency_constraint = min(only_cloud,only_pi_latency)*args.latency #True for accuracy demand
gpu = args.gpu
##initialize the scheme space
search_times=0
optimal_scheme = []
all_scheme, all_transmission, all_latency, all_layer_start_index = scheme_generation(pi_layer_latency)
all_evaluated_scheme = [[i, None] for i in all_scheme]#evaluated_scheme=[[scheme],accuracy_loss]
avaliable_scheme = all_scheme
avaliable_evaluated_scheme = [i for i in all_evaluated_scheme if i[1] != None]#avaliable_evaluated_scheme=[[scheme],accuracy_loss]
print('The whole searching space =',len(avaliable_scheme))
# filter schemes satisfying the latency constraint
avaliable_index = index_conut_latency()
avaliable_scheme = [all_scheme[i] for i in avaliable_index]
for i in avaliable_evaluated_scheme:
if i[0] in avaliable_scheme:
avaliable_scheme.remove(i[0])
##start searching
main()
| 48.845528 | 506 | 0.674933 | #coding=utf-8
import numpy
import random
import torch
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
from scipy.stats import norm
import random
import os
from numpy import argmax
import argparse
global accuracy_loss_constraint, latency_constraint,optimal_scheme,avaliable_scheme,avaliable_evaluated_scheme
import warnings
warnings.filterwarnings("ignore")
# User define the following parameters, 10000 as a loose constraint means the constraint actually has no influence
parser = argparse.ArgumentParser()
parser.add_argument('-accuracy_or_latency', type=bool, default=False)
parser.add_argument('-accuracy_loss', type=float, default=10000)
parser.add_argument('-latency', type=float, default=10000)
parser.add_argument('-bandwidth', type=float, default=1)
parser.add_argument('-gpu', type=int, default=0)
args = parser.parse_args()
data_base_address='./latency_search.txt'
#The latency profiler of the IoT-Cloud system
bandwidth=args.bandwidth/8 #MB/s
accuracy_loss_base = 0.151
only_pi_latency=3.2571
only_cloud=360*240*3/1024/1024/bandwidth
base_transmission=224*224/8/1024/1024 #MB
layer=[64, 64,'M', 128, 128,'M', 256, 256, 256,'M', 512, 512, 512,'M', 512, 512, 512]
pi_layer_latency=[]
pi_latency=[0.0597,0.0937,0.1179,0.5048,0.5054,0.5074,0.51,0.5382,0.5557,0.5682,0.9844,1.0018,1.0142,1.055,1.2156,1.2246,1.2318,1.5411,1.5501,1.5577,1.8679,1.8768,1.8845,1.9072,2.0437,2.0484,2.0532,2.3183,2.3231,2.3279,2.5918,2.5966,2.6014,2.6134,2.6898,2.6913,2.6938,2.7694,2.7709,2.7735,2.8487,2.8503,2.8528,2.8569,3.2571]
gpu_layer_latency=[]
gpu_latency=[ 0.0001, 0.0002, 0.0002, 0.0003, 0.0003, 0.0004, 0.0004, 0.0005, 0.0006, 0.0006, 0.0008, 0.0008, 0.0008, 0.0009, 0.0010, 0.0010, 0.0011, 0.0011, 0.0012, 0.0012, 0.0013, 0.0014, 0.0014, 0.0014, 0.0015, 0.0018, 0.0019, 0.0020, 0.0020, 0.0020, 0.0021, 0.0022, 0.0022, 0.0023, 0.0023, 0.0024, 0.0024, 0.0025, 0.0026, 0.0026, 0.0027, 0.0028, 0.0028, 0.0029, 0.0032]
layer_latency_index=[3,7,10,14,17,20,24,27,30,34,37,40,44]
def gaussin_process(avaliable_evaluated_scheme):
kernel = Matern(length_scale=[1,1,1],length_scale_bounds=[(1e-10,1e10),(1e-10,1e10),(1e-5,1e10)],nu=0.01)
reg = GaussianProcessRegressor(kernel=kernel,n_restarts_optimizer=100,alpha=1e-5,normalize_y=True)
scheme_tmp=[]
accuracy_tmp=[]
for i in avaliable_evaluated_scheme:
scheme_tmp.append(i[0])
accuracy_tmp.append(i[1])
for i in range(13):
scheme_tmp.append([i,0,1])
accuracy_tmp.append(1.0)
scheme_tmp.append([i,1,0])
accuracy_tmp.append(1.0)
scheme_tmp.append([i,1000,10])
accuracy_tmp.append(0)
scheme_tmp=ennormlization(scheme_tmp)
reg.fit(scheme_tmp, accuracy_tmp)
return reg
def ennormlization(avaliable_scheme):
scheme_tmp=[]
layer_tmp=[64, 64, 128, 128, 256, 256, 256, 512, 512, 512, 512, 512, 512]
layer_num=len(layer_tmp)
for i in avaliable_scheme:
scheme_tmp.append([i[0]/len(layer_tmp),i[1]/layer_tmp[i[0]],i[2]/8])
return scheme_tmp
# probability of improvement acquisition function
def opt_acquisition(avaliable_scheme, gp_model):
def acquisition(avaliable_scheme, gp_model):
Xsamples = ennormlization(avaliable_scheme)
mu, std = gp_model.predict(Xsamples,return_std=True)
if accuracy_or_latency_demand:
if numpy.std(mu)<1e-2:
probs=-1*numpy.arange(len(mu.tolist()))
else:
probs = norm.cdf((accuracy_loss_base+accuracy_loss_constraint-mu) / (std+1E-9))
else:
if numpy.std(mu)<1e-2:
probs=numpy.arange(len(mu.tolist()))
else:
probs = norm.cdf((accuracy_loss_base+accuracy_loss_constraint-mu) / (std + 1E-9))
return probs
scores = acquisition(avaliable_scheme,gp_model)
ix = argmax(scores)
return avaliable_scheme[ix]
for i in layer_latency_index:
gpu_layer_latency.append(gpu_latency[i-1])
pi_layer_latency.append(pi_latency[i-1])
def scheme_generation(device_layer_latency):
global base_transmission
partition_num = -1
scheme = []
transmission_data_size = []
scheme_latency = []
scheme_count=-1
layer_start_index=[]
layer_count=-1
layer_tmp=[64, 64,'M', 128, 128,'M', 256, 256, 256,'M', 512, 512, 512,'M', 512, 512, 512,'M']
for i in layer:
layer_count+=1
if i!='M':
partition_num += 1
if layer_tmp[layer_count+1]=='M':
base_transmission=base_transmission/4
for z in range(i):
for j in range(1,9):
scheme.append([partition_num,z+1,j])
transmission_data_size.append(base_transmission*(z+1)*j)
scheme_latency.append(device_layer_latency[partition_num]+(gpu_layer_latency[-1]-gpu_layer_latency[partition_num])+(base_transmission*(z+1)*j)/bandwidth)
scheme_count+=1
layer_start_index.append(scheme_count)
return(scheme,transmission_data_size,scheme_latency,layer_start_index)
def filter():
def find_closet(set, num, bit):
distance = 10000
closet = []
for i, j, k in set:
distance_tmp = abs(i + j - num - bit)
if distance_tmp < distance:
distance = distance_tmp
closet = [i, j, k]
return closet
filter_scheme=[]
for id,num,bit in avaliable_scheme:
nlarge = []
nsmall = []
blarge = []
bsmall = []
accuracy_loss_upper_bound0=10000
accuracy_loss_upper_bound1=10000
accuracy_loss_upper_bound2=10000
monotonicity_accuracy_loss_upper_bound_set = []
for [id_eva,num_eva,bit_eva],accuracy_loss_eva in avaliable_evaluated_scheme:
if id==id_eva:
if (num_eva<=num and bit_eva<=bit):
monotonicity_accuracy_loss_upper_bound_set.append(accuracy_loss_eva)
if num_eva==num:
if bit_eva<bit:
bsmall.append([num_eva, bit_eva, accuracy_loss_eva])
if bit_eva>bit:
blarge.append([num_eva, bit_eva, accuracy_loss_eva])
if bit_eva==bit:
if num_eva<num:
nsmall.append([num_eva, bit_eva, accuracy_loss_eva])
if num_eva>num:
nlarge.append([num_eva, bit_eva, accuracy_loss_eva])
if monotonicity_accuracy_loss_upper_bound_set!=[]:
accuracy_loss_upper_bound0=min(monotonicity_accuracy_loss_upper_bound_set)
if len(nlarge)*len(nsmall)!=0:
nlarge_point = find_closet(nlarge, num, bit)
nsmall_point = find_closet(nsmall, num, bit)
accuracy_loss_upper_bound1=(nlarge_point[2] - nsmall_point[2]) / (
nlarge_point[0] - nsmall_point[0]) * (num - nsmall_point[0]) + nsmall_point[2]
if len(blarge) * len(bsmall) != 0:
blarge_point = find_closet(blarge, num, bit)
bsmall_point = find_closet(bsmall, num, bit)
accuracy_loss_upper_bound2 = (blarge_point[2] - bsmall_point[2]) / (
blarge_point[1] - bsmall_point[1]) * (bit - bsmall_point[1]) + bsmall_point[2]
else:
continue
actual_accuracy_loss_upper_bound=min(accuracy_loss_upper_bound0,accuracy_loss_upper_bound1,accuracy_loss_upper_bound2)
if actual_accuracy_loss_upper_bound<=accuracy_loss_constraint:
filter_scheme.append([id,num,bit])
print('filtered scheme', filter_scheme)
return filter_scheme
def space_shrink():
global avaliable_scheme
def find_closet(set, num, bit):
distance = 10000
closet = []
for i, j, k in set:
distance_tmp = abs(i + j - num - bit)
if distance_tmp < distance:
distance = distance_tmp
closet = [i, j, k]
return closet
#avaliable_scheme.copy() avoids the modification of avaliable_scheme_tmp, when use avaliable_scheme.remove()
avaliable_scheme_tmp=avaliable_scheme.copy()
avaliable_scheme_tmp2=[]
for [id,num,bit] in avaliable_scheme_tmp:
nlarge = []
nsmall = []
blarge = []
bsmall = []
accuracy_loss_lower_bound0=0
accuracy_loss_lower_bound1=0
accuracy_loss_lower_bound2=0
accuracy_loss_lower_bound3=0
accuracy_loss_lower_bound4=0
monotonicity_accuracy_loss_lower_bound_set=[]
for [id_eva,num_eva,bit_eva],accuracy_loss_eva in avaliable_evaluated_scheme:
if id==id_eva:
if (num_eva>=num and bit_eva>=bit):
monotonicity_accuracy_loss_lower_bound_set.append(accuracy_loss_eva)
if num_eva==num:
if bit_eva<bit:
bsmall.append([num_eva, bit_eva, accuracy_loss_eva])
if bit_eva>bit:
blarge.append([num_eva, bit_eva, accuracy_loss_eva])
if bit_eva==bit:
if num_eva<num:
nsmall.append([num_eva, bit_eva, accuracy_loss_eva])
if num_eva>num:
nlarge.append([num_eva, bit_eva, accuracy_loss_eva])
if monotonicity_accuracy_loss_lower_bound_set!=[]:
accuracy_loss_lower_bound0=max(monotonicity_accuracy_loss_lower_bound_set)
if len(nlarge)>=2:
nlarge_point1 = find_closet(nlarge, num, bit)
nlarge.remove(nlarge_point1)
nlarge_point2= find_closet(nlarge, num, bit)
accuracy_loss_upper_lower1=(nlarge_point1[2] - nlarge_point2[2]) / (
nlarge_point1[0] - nlarge_point2[0]) * (num - nlarge_point2[0]) + nlarge_point2[2]
if len(nsmall) >= 2:
nsmall_point1 = find_closet(nsmall, num, bit)
nsmall.remove(nsmall_point1)
nsmall_point2= find_closet(nsmall, num, bit)
accuracy_loss_lower_bound2=(nsmall_point1[2] - nsmall_point2[2]) / (
nsmall_point1[0] - nsmall_point2[0]) * (num - nsmall_point2[0]) + nsmall_point2[2]
if len(blarge)>=2:
blarge_point1 = find_closet(blarge, num, bit)
blarge.remove(blarge_point1)
blarge_point2= find_closet(blarge, num, bit)
accuracy_loss_lower_bound3=(blarge_point1[2] - blarge_point2[2]) / (
blarge_point1[1] - blarge_point2[1]) * (bit - blarge_point2[1]) + blarge_point2[2]
if len(bsmall) >= 2:
bsmall_point1 = find_closet(bsmall, num, bit)
bsmall.remove(bsmall_point1)
bsmall_point2= find_closet(bsmall, num, bit)
accuracy_loss_lower_bound4=(bsmall_point1[2] - bsmall_point2[2]) / (
bsmall_point1[1] - bsmall_point2[1]) * (bit - bsmall_point2[1]) + bsmall_point2[2]
if max(accuracy_loss_lower_bound0,accuracy_loss_lower_bound1,accuracy_loss_lower_bound2,accuracy_loss_lower_bound3,accuracy_loss_lower_bound4)>=accuracy_loss_constraint:
avaliable_scheme.remove([id, num, bit])
avaliable_scheme_tmp2.append([id, num, bit])
print('Space_shrink:')
print(' accuracy_loss_constraint',accuracy_loss_constraint)
print(' latency_constraint',latency_constraint)
print(' removed scheme numer',len(avaliable_scheme_tmp2))
print(' avaliable scheme numer',len(avaliable_scheme))
def evaluate_update_scheme(select_scheme):
global accuracy_loss_constraint,latency_constraint,optimal_scheme, avaliable_evaluated_scheme,search_times,avaliable_scheme
print('Evaluate_Update_scheme',select_scheme)
##evaluate the select_scheme
accuracy_loss_tmp=find_in_database(select_scheme)
if not accuracy_loss_tmp:
os.system('python evaluate_update.py -z_partition_id {:.0f} -z_pruning_num {:.0f} -z_quantization {:.0f} -gpu {:.0f}'.format(select_scheme[0], select_scheme[1], select_scheme[2], gpu))
accuracy_loss_tmp=find_in_database(select_scheme)
search_times+=1
##remove the select_scheme from avaliable_scheme, and store it into database avaliable_evaluated_scheme
avaliable_scheme.remove(select_scheme)
avaliable_evaluated_scheme.append([select_scheme, accuracy_loss_tmp]) #add evaluated scheme to database
if accuracy_loss_tmp-accuracy_loss_constraint<=1e-5:
optimal_scheme=select_scheme
if accuracy_or_latency_demand:
latency_constraint=all_latency[all_scheme.index(select_scheme)]
else:
accuracy_loss_constraint=accuracy_loss_tmp
else:
print('The accuracy_loss_constraint and evaluated accuracy_loss is {:.5f} and {:.5f}'.format(accuracy_loss_constraint,accuracy_loss_tmp))
def index_conut_latency():
index=[]
for i in range(len(all_latency)):
if all_latency[i]<latency_constraint:
index.append(i)
return index
def find_in_database(select_scheme):
accuracy_loss_tmp=None
with open(data_base_address, 'r') as f:
data = f.readlines()
for i in data:
x = i.find('+')
y = i.find('-')
z = i.find('*')
partition_id_tmp=int(i[:x])
num_tmp=int(i[x:y])
bit_tmp = int(i[y+1:z])
if partition_id_tmp==select_scheme[0] and num_tmp==select_scheme[1] and bit_tmp==select_scheme[2]:
accuracy_loss_tmp = float(i[z + 1:])-accuracy_loss_base
return accuracy_loss_tmp
def get_same_element(set1,set2):
tmp=[]
tmp=[i for i in set1 if i in set2]
return tmp
def main():
global accuracy_loss_constraint, latency_constraint, optimal_scheme, avaliable_scheme, avaliable_evaluated_scheme,search_times
# filter schemes that are meant to satisfy the accuracy constrain, according to their accuracy lower bound
filter_scheme_set = filter()
while len(filter_scheme_set) != 0:
filter_scheme = random.sample(filter_scheme_set, 1)[0] # randomly select a qualified scheme
evaluate_update_scheme(filter_scheme) # evaluate the scheme, and store the data in avaliable_evaluated_scheme,and update latency&accuracy constraints
filter_scheme_set = filter()
avaliable_index = index_conut_latency()
avaliable_scheme_tmp = [all_scheme[i] for i in avaliable_index]
avaliable_scheme = get_same_element(avaliable_scheme, avaliable_scheme_tmp)
avaliable_index = index_conut_latency()
avaliable_scheme_tmp = [all_scheme[i] for i in avaliable_index]
avaliable_scheme = get_same_element(avaliable_scheme, avaliable_scheme_tmp)
# remove the schemes that are meant to not satisfy the accuracy_constraint
space_shrink()
while len(avaliable_scheme)>0:
#bulid the Guassian Process model based on the avaliable_evaluated_scheme
GP=gaussin_process(avaliable_evaluated_scheme)
#find the promising scheme according to the cquisition function
promising_scheme=opt_acquisition(avaliable_scheme,GP)
#evaluate and update
evaluate_update_scheme(promising_scheme)
avaliable_index=index_conut_latency()#filter scheme according to latency constraint
avaliable_scheme_tmp=[all_scheme[i] for i in avaliable_index]
avaliable_scheme=get_same_element(avaliable_scheme,avaliable_scheme_tmp)
# filter schemes that are meant to satisfy the accuracy constrain, according to their accuracy lower bound
filter_scheme_set = filter()
while len(filter_scheme_set) != 0:
filter_scheme = random.sample(filter_scheme_set, 1)[0]
evaluate_update_scheme(filter_scheme)
filter_scheme_set = filter()
avaliable_index=index_conut_latency()
avaliable_scheme_tmp=[all_scheme[i] for i in avaliable_index]
avaliable_scheme=get_same_element(avaliable_scheme,avaliable_scheme_tmp)
avaliable_index = index_conut_latency()
avaliable_scheme_tmp = [all_scheme[i] for i in avaliable_index]
avaliable_scheme=get_same_element(avaliable_scheme,avaliable_scheme_tmp)
space_shrink()
print('One Iteration is Finished:\n','evaluation budget', search_times,'\n optimal scheme',optimal_scheme,'\n accuracy_loss_constraint',accuracy_loss_constraint,'\n latency_constraint',latency_constraint)
print('\n')
print('The final result:')
print('\n evaluation budget',search_times,'\n optimal_scheme',optimal_scheme,'\n accuracy_loss_constraint',accuracy_loss_constraint,'\n latency_constraint',latency_constraint)
if __name__ == "__main__":
accuracy_or_latency_demand=args.accuracy_or_latency #True for accuracy demand
if accuracy_or_latency_demand:
accuracy_loss_constraint = args.accuracy_loss
latency_constraint = min(only_cloud,only_pi_latency)
else:
accuracy_loss_constraint = 0.1
latency_constraint = min(only_cloud,only_pi_latency)*args.latency #True for accuracy demand
gpu = args.gpu
##initialize the scheme space
search_times=0
optimal_scheme = []
all_scheme, all_transmission, all_latency, all_layer_start_index = scheme_generation(pi_layer_latency)
all_evaluated_scheme = [[i, None] for i in all_scheme]#evaluated_scheme=[[scheme],accuracy_loss]
avaliable_scheme = all_scheme
avaliable_evaluated_scheme = [i for i in all_evaluated_scheme if i[1] != None]#avaliable_evaluated_scheme=[[scheme],accuracy_loss]
print('The whole searching space =',len(avaliable_scheme))
# filter schemes satisfying the latency constraint
avaliable_index = index_conut_latency()
avaliable_scheme = [all_scheme[i] for i in avaliable_index]
for i in avaliable_evaluated_scheme:
if i[0] in avaliable_scheme:
avaliable_scheme.remove(i[0])
##start searching
main()
| 14,069 | 0 | 273 |
e7b13db3d60af216a9f52452d46ca5d1945110b0 | 12,300 | py | Python | VAE/model_maker.py | BensonRen/AEM_DIM_Bench | 1ff82bfdcd6b0a736bf184f0bcb8a533743aacbb | [
"MIT"
] | 1 | 2022-01-08T22:07:31.000Z | 2022-01-08T22:07:31.000Z | VAE/model_maker.py | BensonRen/AEM_DIM_Bench | 1ff82bfdcd6b0a736bf184f0bcb8a533743aacbb | [
"MIT"
] | null | null | null | VAE/model_maker.py | BensonRen/AEM_DIM_Bench | 1ff82bfdcd6b0a736bf184f0bcb8a533743aacbb | [
"MIT"
] | null | null | null | """
This is the module where the model is defined. It uses the nn.Module as backbone to create the network structure
"""
# Own modules
# Built in
import math
# Libs
import numpy as np
# Pytorch module
import torch.nn as nn
import torch.nn.functional as F
import torch
"""
class Decoder(nn.Module):
def __init__(self, flags):
super(Decoder, self).__init__()
""
This part is the Decoder model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_d = nn.ModuleList([])
self.bn_linears_d = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_d[0:-1]): # Excluding the last one as we need intervals
self.linears_d.append(nn.Linear(fc_num, flags.linear_d[ind + 1]))
self.bn_linears_d.append(nn.BatchNorm1d(flags.linear_d[ind + 1]))
def forward(self, z, S_enc):
""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:return: G: Geometry output
""
out = torch.concatenate(z, S_enc) # initialize the out
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_d, self.bn_linears_d)):
# print(out.size())
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
return out
class Encoder(nn.Module):
def __init__(self, flags):
super(Encoder, self).__init__()
""
This part is the Decoder model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_e = nn.ModuleList([])
self.bn_linears_e = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_e[0:-1]): # Excluding the last one as we need intervals
self.linears_e.append(nn.Linear(fc_num, flags.linear_e[ind + 1]))
self.bn_linears_e.append(nn.BatchNorm1d(flags.linear_e[ind + 1]))
# Re-parameterization
self.zmean_layer = nn.Linear(flags.linear_e[-1], flags.dim_latent_z)
self.z_log_var_layer = nn.Linear(flags.linear_e[-1], flags.dim_latent_z)
def forward(self, G, S_enc):
""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:param G: Geometry output
:return: Z_mean, Z_log_var: the re-parameterized mean and variance of the
""
out = torch.concatenate(G, S_enc) # initialize the out
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_e, self.bn_linears_e)):
# print(out.size())
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
z_mean = self.zmean_layer(out)
z_log_var = self.z_log_var_layer(out)
return z_mean, z_log_var
class SpectraEncoder(nn.Module):
def __init__(self, flags):
super(SpectraEncoder, self).__init__()
""
This part if the backward model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_se = nn.ModuleList([])
self.bn_linears_se = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_se[0:-1]): # Excluding the last one as we need intervals
self.linears_se.append(nn.Linear(fc_num, flags.linear_se[ind + 1]))
self.bn_linears_se.append(nn.BatchNorm1d(flags.linear_se[ind + 1]))
# Conv Layer definitions here
self.convs_se = nn.ModuleList([])
in_channel = 1 # Initialize the in_channel number
for ind, (out_channel, kernel_size, stride) in enumerate(zip(flags.conv_out_channel_se,
flags.conv_kernel_size_se,
flags.conv_stride_se)):
if stride == 2: # We want to double the number
pad = int(kernel_size/2 - 1)
elif stride == 1: # We want to keep the number unchanged
pad = int((kernel_size - 1)/2)
else:
Exception("Now only support stride = 1 or 2, contact Ben")
self.convs_se.append(nn.Conv1d(in_channel, out_channel, kernel_size,
stride=stride, padding=pad))
in_channel = out_channel # Update the out_channel
def forward(self, S):
""
The backward function defines how the backward network is connected
:param S: The 300-d input spectrum
:return: S_enc: The n-d output encoded spectrum
""
out = S.unsqueeze(1)
# For the Conv Layers
for ind, conv in enumerate(self.convs_se):
out = conv(out)
out = out.squeeze()
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_se, self.bn_linears_se)):
out = F.relu(bn(fc(out)))
S_enc = out
return S_enc
"""
| 43.772242 | 121 | 0.551789 | """
This is the module where the model is defined. It uses the nn.Module as backbone to create the network structure
"""
# Own modules
# Built in
import math
# Libs
import numpy as np
# Pytorch module
import torch.nn as nn
import torch.nn.functional as F
import torch
class VAE(nn.Module):
def __init__(self, flags):
"""
This part is to define the modules involved in the
:param flags:
"""
super(VAE, self).__init__()
self.z_dim = flags.dim_z
# print("self.z_dim = ", self.z_dim)
# For Decoder
self.linears_d = nn.ModuleList([])
self.bn_linears_d = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_d[0:-1]): # Excluding the last one as we need intervals
self.linears_d.append(nn.Linear(fc_num, flags.linear_d[ind + 1]))
self.bn_linears_d.append(nn.BatchNorm1d(flags.linear_d[ind + 1]))
# For Encoder
self.linears_e = nn.ModuleList([])
self.bn_linears_e = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_e[0:-1]): # Excluding the last one as we need intervals
self.linears_e.append(nn.Linear(fc_num, flags.linear_e[ind + 1]))
self.bn_linears_e.append(nn.BatchNorm1d(flags.linear_e[ind + 1]))
# Re-parameterization
# self.zmean_layer = nn.Linear(flags.linear_e[-1], self.z_dim)
# self.z_log_var_layer = nn.Linear(flags.linear_e[-1], self.z_dim)
# For Spectra Encoder
self.linears_se = nn.ModuleList([])
self.bn_linears_se = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_se[0:-1]): # Excluding the last one as we need intervals
self.linears_se.append(nn.Linear(fc_num, flags.linear_se[ind + 1]))
self.bn_linears_se.append(nn.BatchNorm1d(flags.linear_se[ind + 1]))
# Conv Layer definitions here
self.convs_se = nn.ModuleList([])
in_channel = 1 # Initialize the in_channel number
for ind, (out_channel, kernel_size, stride) in enumerate(zip(flags.conv_out_channel_se,
flags.conv_kernel_size_se,
flags.conv_stride_se)):
if stride == 2: # We want to double the number
pad = int(kernel_size/2 - 1)
elif stride == 1: # We want to keep the number unchanged
pad = int((kernel_size - 1)/2)
else:
Exception("Now only support stride = 1 or 2, contact Ben")
self.convs_se.append(nn.Conv1d(in_channel, out_channel, kernel_size,
stride=stride, padding=pad))
in_channel = out_channel # Update the out_channel
if self.convs_se: # In case that there is no conv layers
self.convs_se.append(nn.Conv1d(in_channel, out_channels=1, kernel_size=1, stride=1, padding=0))
def encoder(self, G, S_enc):
"""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:param G: Geometry output
:return: Z_mean, Z_log_var: the re-parameterized mean and variance of the
"""
out = torch.cat((G, S_enc), dim=-1) # initialize the out
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_e, self.bn_linears_e)):
if ind != len(self.linears_d) - 1:
# print("decoder layer", ind)
# out = F.relu(fc(out))
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
else:
out = fc(out)
z_mean, z_log_var = torch.chunk(out, 2, dim=1)
# z_mean = self.zmean_layer(out)
# z_log_var = self.z_log_var_layer(out)
return z_mean, z_log_var
def reparameterize(self, mu, logvar):
"""
The reparameterization trick for training a probablistic model
:param mu: The z_mean vector for mean
:param logvar: The z_log_var vector for log of variance
:return: The combined z value
"""
std = torch.exp(0.5 * logvar)
eps = torch.randn_like(std)
return mu + eps * std
def decode(self, z, S_enc):
"""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:return: G: Geometry output
"""
#print("size of z:", z.size())
#print("size of S_enc", S_enc.size())
out = torch.cat((z, S_enc), dim=-1) # initialize the out
#print("size of cated out:", out.size())
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_d, self.bn_linears_d)):
# print(out.size())
if ind != len(self.linears_d) - 1:
#print("decoder layer", ind)
# out = F.relu(fc(out))
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
else:
out = fc(out)
return out
#return torch.tanh(out)
#return torch.sigmoid(out)
def spectra_encoder(self, S):
"""
The backward function defines how the backward network is connected
:param S: The 300-d input spectrum
:return: S_enc: The n-d output encoded spectrum
"""
if not self.convs_se: # If there is no conv_se layer, there is no SE then
return S
out = S.unsqueeze(1)
# For the Conv Layers
for ind, conv in enumerate(self.convs_se):
out = conv(out)
out = out.squeeze()
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_se, self.bn_linears_se)):
out = F.relu(bn(fc(out)))
S_enc = out
return S_enc
def forward(self, G, S):
"""
The forward training funciton for VAEs
:param G: The input geometry
:param S: The input spectra
:return: The output geometry, the mean and variance (log) of the latent variable
"""
S_enc = self.spectra_encoder(S) # Encode the spectra into smaller dimension
z_mean, z_log_var = self.encoder(G, S_enc) # Encoder the spectra & geometry pair into latent
if self.training:
z = self.reparameterize(z_mean, z_log_var) # reparameterize it into z from mean and var
else:
z = z_mean
G_out = self.decode(z, S_enc) # Decode the geometry out
return G_out, z_mean, z_log_var
def inference(self, S):
"""
The inference function
:param S: Input spectra
:return: The output geometry
"""
z = torch.randn([S.size(0), self.z_dim])
if torch.cuda.is_available():
z = z.cuda()
return self.decode(z, self.spectra_encoder(S))
"""
class Decoder(nn.Module):
def __init__(self, flags):
super(Decoder, self).__init__()
""
This part is the Decoder model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_d = nn.ModuleList([])
self.bn_linears_d = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_d[0:-1]): # Excluding the last one as we need intervals
self.linears_d.append(nn.Linear(fc_num, flags.linear_d[ind + 1]))
self.bn_linears_d.append(nn.BatchNorm1d(flags.linear_d[ind + 1]))
def forward(self, z, S_enc):
""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:return: G: Geometry output
""
out = torch.concatenate(z, S_enc) # initialize the out
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_d, self.bn_linears_d)):
# print(out.size())
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
return out
class Encoder(nn.Module):
def __init__(self, flags):
super(Encoder, self).__init__()
""
This part is the Decoder model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_e = nn.ModuleList([])
self.bn_linears_e = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_e[0:-1]): # Excluding the last one as we need intervals
self.linears_e.append(nn.Linear(fc_num, flags.linear_e[ind + 1]))
self.bn_linears_e.append(nn.BatchNorm1d(flags.linear_e[ind + 1]))
# Re-parameterization
self.zmean_layer = nn.Linear(flags.linear_e[-1], flags.dim_latent_z)
self.z_log_var_layer = nn.Linear(flags.linear_e[-1], flags.dim_latent_z)
def forward(self, G, S_enc):
""
The forward function which defines how the network is connected
:param S_enc: The encoded spectra input
:param G: Geometry output
:return: Z_mean, Z_log_var: the re-parameterized mean and variance of the
""
out = torch.concatenate(G, S_enc) # initialize the out
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_e, self.bn_linears_e)):
# print(out.size())
out = F.relu(bn(fc(out))) # ReLU + BN + Linear
z_mean = self.zmean_layer(out)
z_log_var = self.z_log_var_layer(out)
return z_mean, z_log_var
class SpectraEncoder(nn.Module):
def __init__(self, flags):
super(SpectraEncoder, self).__init__()
""
This part if the backward model layers definition:
""
# Linear Layer and Batch_norm Layer definitions here
self.linears_se = nn.ModuleList([])
self.bn_linears_se = nn.ModuleList([])
for ind, fc_num in enumerate(flags.linear_se[0:-1]): # Excluding the last one as we need intervals
self.linears_se.append(nn.Linear(fc_num, flags.linear_se[ind + 1]))
self.bn_linears_se.append(nn.BatchNorm1d(flags.linear_se[ind + 1]))
# Conv Layer definitions here
self.convs_se = nn.ModuleList([])
in_channel = 1 # Initialize the in_channel number
for ind, (out_channel, kernel_size, stride) in enumerate(zip(flags.conv_out_channel_se,
flags.conv_kernel_size_se,
flags.conv_stride_se)):
if stride == 2: # We want to double the number
pad = int(kernel_size/2 - 1)
elif stride == 1: # We want to keep the number unchanged
pad = int((kernel_size - 1)/2)
else:
Exception("Now only support stride = 1 or 2, contact Ben")
self.convs_se.append(nn.Conv1d(in_channel, out_channel, kernel_size,
stride=stride, padding=pad))
in_channel = out_channel # Update the out_channel
def forward(self, S):
""
The backward function defines how the backward network is connected
:param S: The 300-d input spectrum
:return: S_enc: The n-d output encoded spectrum
""
out = S.unsqueeze(1)
# For the Conv Layers
for ind, conv in enumerate(self.convs_se):
out = conv(out)
out = out.squeeze()
# For the linear part
for ind, (fc, bn) in enumerate(zip(self.linears_se, self.bn_linears_se)):
out = F.relu(bn(fc(out)))
S_enc = out
return S_enc
"""
| 0 | 6,997 | 25 |
3e6984b5a209bdec5d3dcb1eec9351a89355202e | 827 | py | Python | pylex/config/lexer_dict_config.py | subhajeet2107/pylexer | 64336fb14299a56116e58c3016261b3f5509510d | [
"MIT"
] | null | null | null | pylex/config/lexer_dict_config.py | subhajeet2107/pylexer | 64336fb14299a56116e58c3016261b3f5509510d | [
"MIT"
] | null | null | null | pylex/config/lexer_dict_config.py | subhajeet2107/pylexer | 64336fb14299a56116e58c3016261b3f5509510d | [
"MIT"
] | null | null | null | """
* This file is part of the subhajeet2107/pylexer package.
*
* (c) Subhajeet Dey <subhajeet2107@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
from pylex.config.lexer_config import LexerConfig
from pylex.config.token_defination import TokenDefination
class LexerDictConfig(LexerConfig):
"""
Lexer Configuration using a dictionary
"""
| 24.323529 | 65 | 0.753325 | """
* This file is part of the subhajeet2107/pylexer package.
*
* (c) Subhajeet Dey <subhajeet2107@gmail.com>
*
* This source file is subject to the MIT license that is bundled
* with this source code in the file LICENSE.
"""
from pylex.config.lexer_config import LexerConfig
from pylex.config.token_defination import TokenDefination
class LexerDictConfig(LexerConfig):
"""
Lexer Configuration using a dictionary
"""
def __init__(self, token_definations={}):
self.definations = []
for k,v in token_definations.items():
if type(v) == TokenDefination:
self.add_token_defination(v)
else:
self.add_token_defination(TokenDefination(v, k))
def add_token_defination(self, token_defination):
self.definations.append(token_defination)
def get_token_definations(self):
return self.definations
| 323 | 0 | 72 |
cba59782f47fa957c6bae9d0a91ab2916e84638a | 13,117 | py | Python | scripts/020migr-01x.py | vertexproject/synapse-regression | a4639acd581a68493e63df1639f416969527fa1f | [
"Apache-2.0"
] | 1 | 2019-01-31T23:18:59.000Z | 2019-01-31T23:18:59.000Z | scripts/020migr-01x.py | vertexproject/synapse-regression | a4639acd581a68493e63df1639f416969527fa1f | [
"Apache-2.0"
] | null | null | null | scripts/020migr-01x.py | vertexproject/synapse-regression | a4639acd581a68493e63df1639f416969527fa1f | [
"Apache-2.0"
] | null | null | null | '''
Generate an 0.1.x cortex for testing migration to 0.2.x
'''
import os
import json
import shutil
import asyncio
import hashlib
import synapse.common as s_common
import synapse.cortex as s_cortex
import synapse.lib.cell as s_cell
import synapse.lib.module as s_module
import synapse.lib.version as s_version
import synapse.lib.modelrev as s_modelrev
import synapse.lib.stormsvc as s_stormsvc
import synapse.tools.backup as s_backup
assert s_version.version == (0, 1, 56)
DESTPATH_CORTEX = 'cortexes/0.1.56-migr'
DESTPATH_SVC = 'cortexes/0.1.56-migr/stormsvc'
DESTPATH_ASSETS = 'assets/0.1.56-migr'
if __name__ == '__main__':
asyncio.run(main())
| 38.02029 | 110 | 0.514752 | '''
Generate an 0.1.x cortex for testing migration to 0.2.x
'''
import os
import json
import shutil
import asyncio
import hashlib
import synapse.common as s_common
import synapse.cortex as s_cortex
import synapse.lib.cell as s_cell
import synapse.lib.module as s_module
import synapse.lib.version as s_version
import synapse.lib.modelrev as s_modelrev
import synapse.lib.stormsvc as s_stormsvc
import synapse.tools.backup as s_backup
assert s_version.version == (0, 1, 56)
DESTPATH_CORTEX = 'cortexes/0.1.56-migr'
DESTPATH_SVC = 'cortexes/0.1.56-migr/stormsvc'
DESTPATH_ASSETS = 'assets/0.1.56-migr'
class MigrMod(s_module.CoreModule):
def getModelDefs(self):
name = 'migr'
ctors = ()
types = (
('migr:test', ('int', {}), {}),
)
forms = (
('migr:test', {}, (
('bar', ('str', {'lower': True}), {}),
)),
)
modldef = (name, {
'ctors': ctors,
'forms': forms,
'types': types,
})
return (modldef, )
class MigrSvcApi(s_stormsvc.StormSvc, s_cell.CellApi):
_storm_svc_name = 'turtle'
_storm_svc_pkgs = ({
'name': 'turtle',
'version': (0, 0, 1),
'commands': ({'name': 'newcmd', 'storm': '[ inet:fqdn=$lib.service.get($cmdconf.svciden).test() ]'},),
},)
async def test(self):
return await self.cell.test()
class MigrStormsvc(s_cell.Cell):
cellapi = MigrSvcApi
confdefs = (
('myfqdn', {'type': 'str', 'defval': 'snake.io', 'doc': 'A test fqdn'}),
)
async def __anit__(self, dirn, conf=None):
await s_cell.Cell.__anit__(self, dirn, conf=conf)
self.myfqdn = self.conf.get('myfqdn')
async def test(self):
return self.myfqdn
async def main():
with s_common.getTempDir() as dirn:
path = os.path.join(dirn, 'cortex')
svcpath = os.path.join(dirn, 'stormsvc')
conf = {
'dedicated': True,
'lmdb:map_async': True,
}
podes = []
nodedata = [] # [ ndef, [item1, item2, ... ]
async with await s_cortex.Cortex.anit(path, conf=conf) as core:
async with core.getLocalProxy() as proxy:
# load modules
await core.loadCoreModule('020migr-01x.MigrMod')
await core.loadCoreModule('synapse.tests.utils.TestModule')
# create forked view with some nodes
view2 = await core.view.fork()
await view2.nodes('[test:int=10]')
# add roles and permissions
role1 = await core.auth.addRole('ninjas')
role2 = await core.auth.addRole('cowboys')
role3 = await core.auth.addRole('friends')
await proxy.addAuthRule('friends', (True, ('view', 'read')), iden=view2.iden)
await proxy.addAuthRule('friends', (True, ('node:add',)), iden=view2.layers[0].iden)
await proxy.addAuthRule('friends', (True, ('prop:set',)), iden=view2.layers[0].iden)
await proxy.addAuthRule('friends', (True, ('layer:lift',)), iden=view2.layers[0].iden)
await role1.addRule((True, ('baz', 'faz')))
# create fred who can add a tag and triggers, and read the forked view
fred = await core.auth.addUser('fred')
await fred.grant('ninjas')
await proxy.addAuthRule('fred', (True, ('view', 'read')), iden=view2.iden)
await fred.addRule((True, ('tag:add', 'trgtag')))
await fred.addRule((True, ('tag:add', 'trgtagdel')))
await fred.addRule((True, ('trigger', 'add')))
await fred.addRule((True, ('trigger', 'get')))
await fred.addRule((True, ('storm', 'queue', 'get')))
await fred.addRule((True, ('storm', 'queue', 'add')))
# create bobo who can write to the layer but doesn't have the trigger rules
bobo = await core.auth.addUser('bobo')
await bobo.setPasswd('secret')
await bobo.grant('friends')
await bobo.addRule((True, ('tag:add', 'bobotag')))
await bobo.addRule((True, ('storm', 'queue', 'get')))
await bobo.addRule((True, ('storm', 'queue', 'put')))
await bobo.addRule((True, ('storm', 'queue', 'boboq')))
# add triggers
await core.addTrigger('node:add', '[ +#trgtag ]', info={'form': 'inet:ipv4'}, user=fred)
await core.addTrigger('node:del', '[ inet:ipv4=7.7.7.7 ]', info={'form': 'file:bytes'})
await core.addTrigger('tag:add', '[ inet:ipv4=5.5.5.5 ]', info={'tag': 'foo.*.baz'})
await core.addTrigger('tag:del', '[ +#trgtagdel ]', info={'tag': 'faz.baz'}, user=fred)
await core.addTrigger('prop:set', '[ +#proptrgtag ]', info={'prop': 'file:bytes:name'})
# add queues
rule = (True, ('storm', 'queue', 'fredq', 'get'))
await proxy.addAuthRule('friends', rule)
await core.eval('queue.add rootq').list()
await core.eval('queue.add fredq', user=fred).list()
await core.eval('queue.add boboq').list()
assert len(await core.getCoreQueues()) == 3
# add cron jobs
await core.addCronJob(fred, 'inet:ipv4', {'hour': 2})
await core.addCronJob(bobo, 'inet:ipv4', {'hour': 2})
await proxy.addAuthRule('friends', (True, ('cron', 'get')))
await fred.addRule((True, ('cron',)))
# extend the data model
await core.addFormProp('inet:ipv4', '_rdxp', ('int', {}), {})
await core.addFormProp('inet:ipv4', '_rdxpz', ('int', {}), {})
await core.addUnivProp('_rdxu', ('str', {'lower': True}), {})
await core.addUnivProp('_rdxuz', ('str', {'lower': True}), {})
await core.addTagProp('score', ('int', {}), {})
await core.addTagProp('nah', ('int', {}), {})
# module forms
scmd = '[ migr:test=22 :bar=spam ]'
await core.nodes(scmd)
# stormsvc nodes
async with await MigrStormsvc.anit(svcpath) as svc:
svc.dmon.share('turtle', svc)
root = svc.auth.getUserByName('root')
await root.setPasswd('root')
info = await svc.dmon.listen('tcp://127.0.0.1:0/')
host, port = info
await core.nodes(f'service.add turtle tcp://root:root@127.0.0.1:{port}/turtle')
await core.nodes('$lib.service.wait(turtle)')
nodes = await core.nodes('newcmd')
assert len(await core.nodes('inet:fqdn=snake.io')) == 1
# crypto
scmd = '[ crypto:currency:client=(1.2.3.4, (btc, 1BvBMSEYstWetqTFn5Au4m4GFg7xJaNVN2)) ]'
await core.nodes(scmd)
scmd = (
f'[ econ:acct:payment="*"'
f' :from:coinaddr=(btc, 1BvBMSEYstWetqTFn5Au4m4GFg7xJaNVN2)'
f' :to:coinaddr=(btc, 1BvBMSEYstWetqTFn5Au4m4GFg7xJaNVN2)]'
)
await core.nodes(scmd)
# geospace
scmd = '[ geo:place="*" :latlong=(-30.0,20.22) +#foo.bar.baz.spam] $node.data.set(cat, dog)'
await core.nodes(scmd)
scmd = (
f'[ geo:nloc=('
f'("inet:wifi:ap", ("BHOT-2019", "d8:38:fc:13:87:5c")), '
f'("43.41483078", "39.94891608"), '
f'"2019"'
f') ]'
)
await core.nodes(scmd)
# dns
scmd = '[ inet:dns:a=(woot.com, 1.2.3.4) inet:dns:a=(vertex.link, 1.2.3.4) ]'
await core.nodes(scmd)
# inet
scmd = (
f'[ inet:ipv4=1.2.3.4 :latlong=(-30.0, 20.22) .seen=("2005", "2006")'
f' :_rdxp=7 ._rdxuz=woot +#foo=("2012", "2013")]'
)
await core.nodes(scmd)
scmd = (
f'[ inet:ipv4=5.6.7.8 :loc=nl :_rdxpz=7 +#foo:nah=42 ]'
f' $node.data.set(car, cool)'
)
await core.nodes(scmd)
# infotech
scmd = f'[ it:prod:softver=$lib.guid() :vers="3.4.45" ]'
await core.nodes(scmd)
# files
bytstr = b'foobar'
scmd = f'$buf={bytstr} [ file:bytes=$buf.encode() ._rdxu=22 +#faz.baz +#foo.bar:score=9 ]'
await core.nodes(scmd)
sha256 = hashlib.sha256(b'spambam').hexdigest()
scmd = f'[ file:bytes={sha256} :mime=x509 :size=375] $node.data.set(car, cat)'
await core.nodes(scmd)
# base
scmd = '[ edge:has=((inet:ipv4, 1.2.3.4), (inet:ipv4, 5.6.7.8)) ]'
await core.nodes(scmd)
guid = s_common.guid()
scmd = f'[ meta:source={guid} :name=foosrc :type=osint ]'
await core.nodes(scmd)
scmd = f'[ meta:seen=({guid}, (inet:ipv4, 1.2.3.4))] $node.data.set(jazz, bam)'
await core.nodes(scmd)
# verify that triggers were activated
assert 1 == len(await core.nodes('inet:ipv4=5.5.5.5'))
assert 3 == len(await core.nodes('inet:ipv4#trgtag'))
# delete source test node
await core.nodes('meta:source:name=test | delnode')
# pack up all the nodes
for node in await core.nodes('.created'):
podes.append(node.pack(dorepr=True))
nodedata.append((node.ndef, [nd async for nd in node.iterData()]))
# startup core so modelrev gets set on all layers
async with await s_cortex.Cortex.anit(path, conf=conf) as core:
await core.stat()
# only run from checkout dir
shutil.rmtree(DESTPATH_CORTEX, ignore_errors=True)
shutil.rmtree(DESTPATH_ASSETS, ignore_errors=True)
s_backup.backup(path, DESTPATH_CORTEX)
s_backup.backup(svcpath, DESTPATH_SVC)
s_common.yamlsave(conf, os.path.join(DESTPATH_CORTEX, 'cell.yaml'))
if not os.path.exists(DESTPATH_ASSETS):
s_common.gendir(DESTPATH_ASSETS)
with open(os.path.join(DESTPATH_ASSETS, 'podes.json'), 'w') as f:
f.write(json.dumps(podes, indent=4))
with open(os.path.join(DESTPATH_ASSETS, 'nodedata.json'), 'w') as f:
f.write(json.dumps(nodedata, indent=2))
# generate splices that will *not* be part of saved cortex
splicepodes = []
splices = {}
async with await s_cortex.Cortex.anit(path, conf=conf) as core:
await core.loadCoreModule('020migr-01x.MigrMod')
await core.loadCoreModule('synapse.tests.utils.TestModule')
lyrs = {}
for view in core.views.values():
for lyr in view.layers:
lyrs[lyr.iden] = (lyr, lyr.splicelog.index())
# Add nodes
scmd = f'[inet:ipv4=10.9.9.1]'
await core.nodes(scmd)
scmd = f'[file:bytes="*" :mime=x509]'
await core.nodes(scmd)
# Add tag to existing nodes
scmd = f'inet:ipv4=1.2.3.4 [+#sp.li.ce]'
await core.nodes(scmd)
# Remove tag from existing nodes
scmd = f'#faz [-#faz]'
await core.nodes(scmd)
# Remove tag prop from exist node
scmd = f'#foo [-#foo.bar:score]'
await core.nodes(scmd)
# Add secondary prop to existing node
scmd = f'geo:place [:desc="foo description"]'
await core.nodes(scmd)
# Remove secondary prop from existing
scmd = f'inet:ipv4=5.6.7.8 [-:loc]'
await core.nodes(scmd)
# Delete a node
scmd = f'meta:seen | delnode --force'
await core.nodes(scmd)
# delete source test node
await core.nodes('meta:source:name=test | delnode')
for node in await core.nodes('.created'):
splicepodes.append(node.pack(dorepr=True))
for lyriden, (lyr, nextoffs) in lyrs.items():
splices[lyriden] = {
'nextoffs': nextoffs,
'splices': [s async for s in lyr.splices(0, -1)],
}
with open(os.path.join(DESTPATH_ASSETS, 'splicepodes.json'), 'w') as f:
f.write(json.dumps(splicepodes, indent=4))
with open(os.path.join(DESTPATH_ASSETS, 'splices.json'), 'w') as f:
f.write(json.dumps(splices, indent=2))
if __name__ == '__main__':
asyncio.run(main())
| 11,842 | 499 | 119 |
1a18878e942b8022f8dd24edaf079318e0e3cb84 | 7,149 | py | Python | GUI_handler.py | xrayian/PhysicsX | 5211a43f79a179609144619ecbefa95d69379793 | [
"Apache-2.0"
] | 2 | 2019-12-07T09:29:59.000Z | 2019-12-27T23:45:54.000Z | GUI_handler.py | xrayian/PhysicsX | 5211a43f79a179609144619ecbefa95d69379793 | [
"Apache-2.0"
] | null | null | null | GUI_handler.py | xrayian/PhysicsX | 5211a43f79a179609144619ecbefa95d69379793 | [
"Apache-2.0"
] | null | null | null | import PySimpleGUI as sg
from support import Velocity, Distance, Time, Acceleration
from os import getlogin
from re import sub
error_count = 0
sg.set_options(element_padding=(5,5),icon="images/icon.ico",)
sg.change_look_and_feel('blueMono')
layout = [
[sg.Text('Initial Velocity'), sg.InputText(focus=True,key='ivelocity'), sg.InputCombo(('m/s','km/h'),default_value='m/s',readonly=True,disabled=True) ],
[sg.Text('Final Velocity '), sg.InputText(key='velocity'), sg.InputCombo(('m/s','km/h'),default_value='m/s',readonly=True,disabled=True) ],
[sg.Text('Acceleration '), sg.InputText(key='acceleration',disabled=True), sg.InputCombo(('m/s²','km/h²'),default_value='m/s²',readonly=True,disabled=True) ],
[sg.Text('Elapsed Time'), sg.InputText(key='time'),sg.InputCombo(('seconds','hours'),default_value='seconds',readonly=True,disabled=True),],
[sg.Text('Distance '), sg.InputText(key='distance'),sg.InputCombo(('meters','kilometers'),default_value='meters',readonly=True,disabled=True),],
[sg.Text('Determine '), sg.Radio('Acceleration',0,default=True,key='is_acceleration',enable_events='True'), sg.Radio('Velocity',0,key='is_velocity',enable_events='True'), sg.Radio('Time',0,key='is_time',enable_events='True'), sg.Radio('Distance',0,key='is_distance',enable_events='True'), ],
[sg.Output(size=(88, 10),key='log',font=("Segoe UI Bold",10),text_color='#221255')],
[sg.Text('Acceleration Rate:',key="output",size=(50,1),font=('Segoe UI Bold',10))],
[sg.Button(button_text='Calculate'),sg.Button(button_text='Clear'),
sg.Button(button_text='Help'),
sg.Button(button_text='Exit')]
]
window = sg.Window('PhysicsX', layout)
while True:
event, values = window.read()
if values['is_acceleration'] is True:
window.find('acceleration').update("",disabled = True)
window.find('output').update('Acceleration Rate:')
window.find('velocity').update(disabled = False)
window.find('distance').update(disabled = False)
window.find('time').update(disabled = False)
elif values['is_velocity'] is True:
window.find('acceleration').update(disabled = False)
window.find('output').update('Final Velocity:')
window.find('velocity').update("",disabled = True)
window.find('distance').update(disabled = False)
window.find('time').update(disabled = False)
elif values['is_distance'] is True:
window.find('acceleration').update(disabled = False)
window.find('velocity').update(disabled = False)
window.find('distance').update("",disabled = True)
window.find('output').update('Distance:')
window.find('time').update(disabled = False)
elif values['is_time'] is True:
window.find('acceleration').update(disabled = False)
window.find('velocity').update(disabled = False)
window.find('distance').update(disabled = False)
window.find('time').update("",disabled = True)
window.find('output').update('Elapsed Time:')
if event in (None, 'Exit', 'Cancel'):
break
if event == 'Calculate':
initial_velocity = parse_data(values['ivelocity'],default=0)
final_velocity = parse_data(values['velocity'])
time = parse_data(values['time'])
distance = parse_data(values['distance'])
acceleration = parse_data(values['acceleration'])
if values['is_acceleration'] is True:
result = Acceleration(initial_velocity= initial_velocity, final_velocity= final_velocity, time=time,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Acceleration Rate: {answer_digit_only} meters/second²')
print(f"[Calculated_Result]: {ans}")
else:
print (ans)
elif values['is_velocity'] is True:
result = Velocity(initial_velocity= initial_velocity, acceleration=acceleration, time=time,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Final Velocity: {answer_digit_only} meters/second')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
elif values['is_time'] is True:
result = Time(initial_velocity= initial_velocity, final_velocity= final_velocity, acceleration=acceleration,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Elapsed Time: {answer_digit_only} seconds')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
elif values['is_distance'] is True:
result = Distance(initial_velocity= initial_velocity, final_velocity= final_velocity, time=time,acceleration=acceleration)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Distance: {answer_digit_only} meters')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
else:
sg.PopupError('No Operation Selected',title='Critical Error')
if event == 'Clear':
window.find('ivelocity').update('')
window.find('velocity').update('')
window.find('distance').update('')
window.find('time').update('')
window.find('acceleration').update('')
window.find('log').update('')
if event == 'Debug':
for i in values:
print(f"values[{i}]--> {values[i]}")
if event == "Help":
user = getlogin()
msg = (f'Hello {user}\n\nTo Calculate The Value Of A Property Just Click On The Dot Next To It\nIf You Don\'t Have A Value For Something: Leave It Blank!\n')
sg.popup(msg,title="Help",background_color='#000000',text_color='#FFFFFF')
window.close()
| 47.344371 | 303 | 0.590152 | import PySimpleGUI as sg
from support import Velocity, Distance, Time, Acceleration
from os import getlogin
from re import sub
error_count = 0
def parse_data(string, default = None, zero = 0):
global error_count
if string == "":
return default
else:
try:
return float(string)
except:
if error_count == 0:
error_count += 1
sg.popup_error('Do not enter anything other than numbers in these fields',title='Warning')
print("[Parse_Error]: `"+ string + "` is not a valid number")
sg.set_options(element_padding=(5,5),icon="images/icon.ico",)
sg.change_look_and_feel('blueMono')
layout = [
[sg.Text('Initial Velocity'), sg.InputText(focus=True,key='ivelocity'), sg.InputCombo(('m/s','km/h'),default_value='m/s',readonly=True,disabled=True) ],
[sg.Text('Final Velocity '), sg.InputText(key='velocity'), sg.InputCombo(('m/s','km/h'),default_value='m/s',readonly=True,disabled=True) ],
[sg.Text('Acceleration '), sg.InputText(key='acceleration',disabled=True), sg.InputCombo(('m/s²','km/h²'),default_value='m/s²',readonly=True,disabled=True) ],
[sg.Text('Elapsed Time'), sg.InputText(key='time'),sg.InputCombo(('seconds','hours'),default_value='seconds',readonly=True,disabled=True),],
[sg.Text('Distance '), sg.InputText(key='distance'),sg.InputCombo(('meters','kilometers'),default_value='meters',readonly=True,disabled=True),],
[sg.Text('Determine '), sg.Radio('Acceleration',0,default=True,key='is_acceleration',enable_events='True'), sg.Radio('Velocity',0,key='is_velocity',enable_events='True'), sg.Radio('Time',0,key='is_time',enable_events='True'), sg.Radio('Distance',0,key='is_distance',enable_events='True'), ],
[sg.Output(size=(88, 10),key='log',font=("Segoe UI Bold",10),text_color='#221255')],
[sg.Text('Acceleration Rate:',key="output",size=(50,1),font=('Segoe UI Bold',10))],
[sg.Button(button_text='Calculate'),sg.Button(button_text='Clear'),
sg.Button(button_text='Help'),
sg.Button(button_text='Exit')]
]
window = sg.Window('PhysicsX', layout)
while True:
event, values = window.read()
if values['is_acceleration'] is True:
window.find('acceleration').update("",disabled = True)
window.find('output').update('Acceleration Rate:')
window.find('velocity').update(disabled = False)
window.find('distance').update(disabled = False)
window.find('time').update(disabled = False)
elif values['is_velocity'] is True:
window.find('acceleration').update(disabled = False)
window.find('output').update('Final Velocity:')
window.find('velocity').update("",disabled = True)
window.find('distance').update(disabled = False)
window.find('time').update(disabled = False)
elif values['is_distance'] is True:
window.find('acceleration').update(disabled = False)
window.find('velocity').update(disabled = False)
window.find('distance').update("",disabled = True)
window.find('output').update('Distance:')
window.find('time').update(disabled = False)
elif values['is_time'] is True:
window.find('acceleration').update(disabled = False)
window.find('velocity').update(disabled = False)
window.find('distance').update(disabled = False)
window.find('time').update("",disabled = True)
window.find('output').update('Elapsed Time:')
if event in (None, 'Exit', 'Cancel'):
break
if event == 'Calculate':
initial_velocity = parse_data(values['ivelocity'],default=0)
final_velocity = parse_data(values['velocity'])
time = parse_data(values['time'])
distance = parse_data(values['distance'])
acceleration = parse_data(values['acceleration'])
if values['is_acceleration'] is True:
result = Acceleration(initial_velocity= initial_velocity, final_velocity= final_velocity, time=time,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Acceleration Rate: {answer_digit_only} meters/second²')
print(f"[Calculated_Result]: {ans}")
else:
print (ans)
elif values['is_velocity'] is True:
result = Velocity(initial_velocity= initial_velocity, acceleration=acceleration, time=time,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Final Velocity: {answer_digit_only} meters/second')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
elif values['is_time'] is True:
result = Time(initial_velocity= initial_velocity, final_velocity= final_velocity, acceleration=acceleration,distance=distance)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Elapsed Time: {answer_digit_only} seconds')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
elif values['is_distance'] is True:
result = Distance(initial_velocity= initial_velocity, final_velocity= final_velocity, time=time,acceleration=acceleration)
ans = result.calculate()
if ans is not None:
answer_digit_only = sub(r"[^0123456789\.-]","",ans)
if answer_digit_only != "":
window.find('output').update(f'Distance: {answer_digit_only} meters')
print(f"[Calculated_Result]: {ans}")
else:
print(ans)
else:
sg.PopupError('No Operation Selected',title='Critical Error')
if event == 'Clear':
window.find('ivelocity').update('')
window.find('velocity').update('')
window.find('distance').update('')
window.find('time').update('')
window.find('acceleration').update('')
window.find('log').update('')
if event == 'Debug':
for i in values:
print(f"values[{i}]--> {values[i]}")
if event == "Help":
user = getlogin()
msg = (f'Hello {user}\n\nTo Calculate The Value Of A Property Just Click On The Dot Next To It\nIf You Don\'t Have A Value For Something: Leave It Blank!\n')
sg.popup(msg,title="Help",background_color='#000000',text_color='#FFFFFF')
window.close()
| 425 | 0 | 23 |
d34ea667ce7eaa4e845915382405956244ee3598 | 3,398 | py | Python | crypto/encrypted-communications/crypto_commons/asymmetric/asymmetric.py | nanzggits/sdctf-2021 | fcddb506f5f798a264fc17e5588c0f5b7d5fbb2c | [
"MIT"
] | 6 | 2021-05-17T21:04:31.000Z | 2022-01-01T23:28:18.000Z | crypto/encrypted-communications/crypto_commons/asymmetric/asymmetric.py | acmucsd/sdctf-2021 | fcddb506f5f798a264fc17e5588c0f5b7d5fbb2c | [
"MIT"
] | null | null | null | crypto/encrypted-communications/crypto_commons/asymmetric/asymmetric.py | acmucsd/sdctf-2021 | fcddb506f5f798a264fc17e5588c0f5b7d5fbb2c | [
"MIT"
] | 1 | 2021-05-29T02:18:30.000Z | 2021-05-29T02:18:30.000Z | import random
from crypto_commons.generic import long_to_bytes, multiply, factorial
from crypto_commons.rsa.rsa_commons import ensure_long, modinv, lcm_multi
"""
Here are some less popular asymmetric cryptosystems:
- Damgard-Jurik
- Paillier (same as Damgard Jurik for s = 1)
"""
def paillier_encrypt(m, g, n, r):
"""
Encrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param m: plaintext to encrypt, can be either long or bytes
:param g: random public integer g
:param n: modulus
:param r: random r
:return: encrypted data as long
"""
m = ensure_long(m)
n2 = n * n
return (pow(g, m, n2) * pow(r, n, n2)) % n2
def paillier_encrypt_simple(m, g, n):
"""
Encrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param m: plaintext to encrypt, can be either long or bytes
:param g: random public integer g
:param n: modulus
:return: encrypted data as long
"""
n2 = n * n
r = random.randint(2, n2)
return paillier_encrypt(m, g, n, r)
def paillier_decrypt(c, factors, g):
"""
Decrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param c: ciphertext
:param factors: prime factors
:param g: random public integer g
:return: decrypted data as long
"""
lbd = lcm_multi([p - 1 for p in factors])
n = multiply(factors)
x = L(pow(g, lbd, n * n), n)
mi = int(modinv(x, n))
m = L(pow(c, lbd, n * n), n) * pow(mi, 1, n)
return m % n
def paillier_decrypt_printable(c, factors, g):
"""
Decrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param c: ciphertext
:param factors: prime factors
:param g: random public integer g
:return: decrypted data as bytes
"""
return long_to_bytes(paillier_decrypt(c, factors, g))
def damgard_jurik_encrypt(m, n, g, s):
"""
Encrypt data using Damgard Jurik Cryptosystem
:param m: plaintext
:param n: modulus
:param g: random public integer g
:param s: order n^s
:return:
"""
m = ensure_long(m)
s1 = s + 1
ns1 = n ** s1
r = random.randint(2, ns1)
enc = pow(g, m, ns1) * pow(r, n ** s, ns1) % ns1
return enc
def damgard_jurik_decrypt(c, n, s, factors, g):
"""
Decrypt data using Damgard Jurik Cryptosystem
:param c: ciphertext
:param n: modulus
:param s: order n^s
:param factors: modulus prime factors
:param g: random public integer g
:return:
"""
d = lcm_multi([p - 1 for p in factors])
ns = pow(n, s)
jd = decrypt(g, d, n, s)
jd_inv = modinv(jd, ns)
jmd = decrypt(c, d, n, s)
return (jd_inv * jmd) % ns
| 26.341085 | 73 | 0.575927 | import random
from crypto_commons.generic import long_to_bytes, multiply, factorial
from crypto_commons.rsa.rsa_commons import ensure_long, modinv, lcm_multi
"""
Here are some less popular asymmetric cryptosystems:
- Damgard-Jurik
- Paillier (same as Damgard Jurik for s = 1)
"""
def paillier_encrypt(m, g, n, r):
"""
Encrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param m: plaintext to encrypt, can be either long or bytes
:param g: random public integer g
:param n: modulus
:param r: random r
:return: encrypted data as long
"""
m = ensure_long(m)
n2 = n * n
return (pow(g, m, n2) * pow(r, n, n2)) % n2
def paillier_encrypt_simple(m, g, n):
"""
Encrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param m: plaintext to encrypt, can be either long or bytes
:param g: random public integer g
:param n: modulus
:return: encrypted data as long
"""
n2 = n * n
r = random.randint(2, n2)
return paillier_encrypt(m, g, n, r)
def paillier_decrypt(c, factors, g):
"""
Decrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param c: ciphertext
:param factors: prime factors
:param g: random public integer g
:return: decrypted data as long
"""
def L(u, n):
return int((u - 1) // n)
lbd = lcm_multi([p - 1 for p in factors])
n = multiply(factors)
x = L(pow(g, lbd, n * n), n)
mi = int(modinv(x, n))
m = L(pow(c, lbd, n * n), n) * pow(mi, 1, n)
return m % n
def paillier_decrypt_printable(c, factors, g):
"""
Decrypt data using Paillier Cryptosystem
Actually it's the same as Damgard Jurik with s=1
:param c: ciphertext
:param factors: prime factors
:param g: random public integer g
:return: decrypted data as bytes
"""
return long_to_bytes(paillier_decrypt(c, factors, g))
def damgard_jurik_encrypt(m, n, g, s):
"""
Encrypt data using Damgard Jurik Cryptosystem
:param m: plaintext
:param n: modulus
:param g: random public integer g
:param s: order n^s
:return:
"""
m = ensure_long(m)
s1 = s + 1
ns1 = n ** s1
r = random.randint(2, ns1)
enc = pow(g, m, ns1) * pow(r, n ** s, ns1) % ns1
return enc
def damgard_jurik_decrypt(c, n, s, factors, g):
"""
Decrypt data using Damgard Jurik Cryptosystem
:param c: ciphertext
:param n: modulus
:param s: order n^s
:param factors: modulus prime factors
:param g: random public integer g
:return:
"""
def decrypt(ct, d, n, s):
def L(x):
return (x - 1) / n
ns1 = pow(n, s + 1)
a = pow(ct, d, ns1)
i = 0
for j in range(1, s + 1):
t1 = L(a % pow(n, j + 1))
t2 = i
for k in range(2, j + 1):
i -= 1
t2 = (t2 * i) % pow(n, j)
fac = long(factorial(k))
up = (t2 * pow(n, k - 1))
down = modinv(fac, pow(n, j))
t1 = (t1 - up * down) % pow(n, j)
i = t1
return i
d = lcm_multi([p - 1 for p in factors])
ns = pow(n, s)
jd = decrypt(g, d, n, s)
jd_inv = modinv(jd, ns)
jmd = decrypt(c, d, n, s)
return (jd_inv * jmd) % ns
| 557 | 0 | 54 |
d742a8f62ce69d50a5f6e561170e826811f173ee | 1,903 | py | Python | src/runescape_old/Runescape_old_data.py | Peilonrayz/recipe_forest | 5d84e143b9a4f4ab139b8b3bce423517822c6f91 | [
"MIT"
] | null | null | null | src/runescape_old/Runescape_old_data.py | Peilonrayz/recipe_forest | 5d84e143b9a4f4ab139b8b3bce423517822c6f91 | [
"MIT"
] | null | null | null | src/runescape_old/Runescape_old_data.py | Peilonrayz/recipe_forest | 5d84e143b9a4f4ab139b8b3bce423517822c6f91 | [
"MIT"
] | null | null | null | mats = [
Mat(**m)
for m in [
{
"name": "Bronze",
"smithing": 1,
"forging": 15,
"forging_levels": [1],
"forging_burial": False,
},
{
"name": "Iron",
"smithing": 2,
"forging": 40,
"forging_levels": [1, 1],
"forging_burial": False,
},
{
"name": "Steel",
"smithing": 3,
"forging": 75,
"forging_levels": [1, 1],
"forging_burial": False,
},
{
"name": "Mithril",
"smithing": 5,
"forging": 120,
"forging_levels": [1, 1, 2],
"forging_burial": False,
},
{
"name": "Adamant",
"smithing": 7,
"forging": 170,
"forging_levels": [1, 1, 2],
"forging_burial": True,
},
{
"name": "Rune",
"smithing": 10,
"forging": 240,
"forging_levels": [1, 1, 2, 4],
"forging_burial": True,
},
{
"name": "Orikalkum",
"smithing": 13,
"forging": 350,
"forging_levels": [1, 1, 2, 4],
"forging_burial": True,
},
{
"name": "Necronium",
"smithing": 17,
"forging": 500,
"forging_levels": [1, 1, 2, 4, 8],
"forging_burial": True,
},
{
"name": "Bane",
"smithing": 21,
"forging": 700,
"forging_levels": [1, 1, 2, 4, 8],
"forging_burial": True,
},
{
"name": "Elder Rune",
"smithing": 26,
"forging": 1000,
"forging_levels": [1, 1, 2, 4, 8, 16],
"forging_burial": True,
},
]
]
| 25.039474 | 50 | 0.352601 | mats = [
Mat(**m)
for m in [
{
"name": "Bronze",
"smithing": 1,
"forging": 15,
"forging_levels": [1],
"forging_burial": False,
},
{
"name": "Iron",
"smithing": 2,
"forging": 40,
"forging_levels": [1, 1],
"forging_burial": False,
},
{
"name": "Steel",
"smithing": 3,
"forging": 75,
"forging_levels": [1, 1],
"forging_burial": False,
},
{
"name": "Mithril",
"smithing": 5,
"forging": 120,
"forging_levels": [1, 1, 2],
"forging_burial": False,
},
{
"name": "Adamant",
"smithing": 7,
"forging": 170,
"forging_levels": [1, 1, 2],
"forging_burial": True,
},
{
"name": "Rune",
"smithing": 10,
"forging": 240,
"forging_levels": [1, 1, 2, 4],
"forging_burial": True,
},
{
"name": "Orikalkum",
"smithing": 13,
"forging": 350,
"forging_levels": [1, 1, 2, 4],
"forging_burial": True,
},
{
"name": "Necronium",
"smithing": 17,
"forging": 500,
"forging_levels": [1, 1, 2, 4, 8],
"forging_burial": True,
},
{
"name": "Bane",
"smithing": 21,
"forging": 700,
"forging_levels": [1, 1, 2, 4, 8],
"forging_burial": True,
},
{
"name": "Elder Rune",
"smithing": 26,
"forging": 1000,
"forging_levels": [1, 1, 2, 4, 8, 16],
"forging_burial": True,
},
]
]
| 0 | 0 | 0 |
55265cd62cce242431d5c3e9243ad98fc1719413 | 904 | py | Python | bookmarks/actions/models.py | mp5maker/django | a2d38e2e9973e755afce1bd0ccb17e58f3db7e33 | [
"MIT"
] | null | null | null | bookmarks/actions/models.py | mp5maker/django | a2d38e2e9973e755afce1bd0ccb17e58f3db7e33 | [
"MIT"
] | 13 | 2020-02-12T00:14:20.000Z | 2022-02-10T08:46:42.000Z | bookmarks/actions/models.py | mp5maker/django | a2d38e2e9973e755afce1bd0ccb17e58f3db7e33 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
| 23.179487 | 64 | 0.643805 | from django.db import models
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey
class Action(models.Model):
user = models.ForeignKey(
User,
related_name="actions",
db_index=True,
on_delete=models.CASCADE
)
verb = models.CharField(
max_length=255
)
target_ct = models.ForeignKey(
ContentType,
blank=True,
null=True,
related_name="target_obj",
on_delete=models.CASCADE
)
target_id = models.PositiveIntegerField(
null=True,
blank=True,
db_index=True
)
target= GenericForeignKey('target_ct', 'target_id')
created = models.DateTimeField(
auto_now_add=True,
db_index=True
)
class Meta:
ordering = ('-created', )
| 0 | 681 | 23 |
e8ae73b679d90773c7e94e9811a24a970cf99181 | 1,655 | py | Python | src/cmcandy/Python_language_Answers/_0004.py | ch98road/leetcode | a9b4be54a169b30f6711809b892dd1f79f2a17e7 | [
"MIT"
] | null | null | null | src/cmcandy/Python_language_Answers/_0004.py | ch98road/leetcode | a9b4be54a169b30f6711809b892dd1f79f2a17e7 | [
"MIT"
] | null | null | null | src/cmcandy/Python_language_Answers/_0004.py | ch98road/leetcode | a9b4be54a169b30f6711809b892dd1f79f2a17e7 | [
"MIT"
] | 1 | 2020-11-26T03:01:12.000Z | 2020-11-26T03:01:12.000Z | from typing import List
if __name__ == '__main__':
s = Solution()
nums1 = [1, 2, 3]
nums2 = [2, 3, 4, 5]
# nums1 = [0, 0]
# nums2 = [0, 0]
print(s.findMedianSortedArrays(nums1=nums1, nums2=nums2))
| 27.583333 | 61 | 0.39577 | from typing import List
class Solution:
def findMedianSortedArrays(self, nums1: List[int],
nums2: List[int]) -> float:
# 首先需要知道长度
len1 = len(nums1)
len2 = len(nums2)
# 交换值,这里保证了nums1是比较短的数组
if len1 > len2:
tmp = nums1
nums1 = nums2
nums2 = tmp
tmp = len1
len1 = len2
len2 = tmp
iMin = 0
iMax = len1
# 数组总长的一半
halfLen = int((len1 + len2 + 1) / 2)
while iMin <= iMax:
i = int((iMin + iMax) / 2)
j = halfLen - i
if i < iMax and nums2[j - 1] > nums1[i]:
iMin = i + 1
elif i > iMin and nums1[i - 1] > nums2[j]:
iMax = i - 1
else:
maxLeft = 0
if i == 0:
maxLeft = nums2[j - 1]
elif j == 0:
maxLeft = nums1[i - 1]
else:
maxLeft = max(nums1[i - 1], nums2[j - 1])
if (len1 + len2) % 2 == 1:
return maxLeft
minRight = 0
if i == len1:
minRight = nums2[j]
elif j == len2:
minRight = nums1[i]
else:
minRight = min(nums1[i], nums2[j])
return (maxLeft + minRight) / 2
return 0.0
if __name__ == '__main__':
s = Solution()
nums1 = [1, 2, 3]
nums2 = [2, 3, 4, 5]
# nums1 = [0, 0]
# nums2 = [0, 0]
print(s.findMedianSortedArrays(nums1=nums1, nums2=nums2))
| 1,450 | -6 | 49 |
334e19a0852221e0278f7a8f1e2e69f913e1c65b | 5,590 | py | Python | turnsole/model.py | Antonio-hi/turnsole | 201e852f6a8699843760fcf4884b7aa6208ab296 | [
"MIT"
] | 1 | 2021-03-05T10:09:56.000Z | 2021-03-05T10:09:56.000Z | turnsole/model.py | Antonio-hi/turnsole | 201e852f6a8699843760fcf4884b7aa6208ab296 | [
"MIT"
] | null | null | null | turnsole/model.py | Antonio-hi/turnsole | 201e852f6a8699843760fcf4884b7aa6208ab296 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author : Lyu Kui
# @Email : 9428.al@gmail.com
# @Created Date : 2021-02-24 13:58:46
# @Last Modified : 2021-03-05 18:14:17
# @Description :
import tensorflow as tf
from .nets.efficientnet import EfficientNetB0, EfficientNetB1, EfficientNetB2, EfficientNetB3
from .nets.efficientnet import EfficientNetB4, EfficientNetB5, EfficientNetB6, EfficientNetB7
if __name__ == '__main__':
model = EasyDet(phi=0)
model.summary()
import time
import numpy as np
x = np.random.random_sample((1, 640, 640, 3))
# warm up
output = model.predict(x)
print('\n[INFO] Test start')
time_start = time.time()
for i in range(1000):
output = model.predict(x)
time_end = time.time()
print('[INFO] Time used: {:.2f} ms'.format((time_end - time_start)*1000/(i+1)))
| 37.516779 | 109 | 0.530054 | # -*- coding: utf-8 -*-
# @Author : Lyu Kui
# @Email : 9428.al@gmail.com
# @Created Date : 2021-02-24 13:58:46
# @Last Modified : 2021-03-05 18:14:17
# @Description :
import tensorflow as tf
from .nets.efficientnet import EfficientNetB0, EfficientNetB1, EfficientNetB2, EfficientNetB3
from .nets.efficientnet import EfficientNetB4, EfficientNetB5, EfficientNetB6, EfficientNetB7
def load_backbone(phi, input_tensor, weights='imagenet'):
if phi == 0:
model = EfficientNetB0(include_top=False,
weights=weights,
input_tensor=input_tensor)
# 从这些层提取特征
layer_names = [
'block2b_add', # 1/4
'block3b_add', # 1/8
'block5c_add', # 1/16
'block7a_project_bn', # 1/32
]
elif phi == 1:
model = EfficientNetB1(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2c_add', # 1/4
'block3c_add', # 1/8
'block5d_add', # 1/16
'block7b_add', # 1/32
]
elif phi == 2:
model = EfficientNetB2(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2c_add', # 1/4
'block3c_add', # 1/8
'block5d_add', # 1/16
'block7b_add', # 1/32
]
elif phi == 3:
model = EfficientNetB3(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2c_add', # 1/4
'block3c_add', # 1/8
'block5e_add', # 1/16
'block7b_add', # 1/32
]
elif phi == 4:
model = EfficientNetB4(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2c_add', # 1/4
'block3d_add', # 1/8
'block5f_add', # 1/16
'block7b_add', # 1/32
]
elif phi == 5:
model = EfficientNetB5(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2e_add', # 1/4
'block3e_add', # 1/8
'block5g_add', # 1/16
'block7c_add', # 1/32
]
elif phi == 6:
model = EfficientNetB6(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2f_add', # 1/4
'block3f_add', # 1/8
'block5h_add', # 1/16
'block7c_add', # 1/32
]
elif phi == 7:
model = EfficientNetB7(include_top=False,
weights=weights,
input_tensor=input_tensor)
layer_names = [
'block2g_add', # 1/4
'block3g_add', # 1/8
'block5j_add', # 1/16
'block7d_add', # 1/32
]
skips = [model.get_layer(name).output for name in layer_names]
return model, skips
def EasyDet(phi=0, input_size=(None, None, 3), weights='imagenet'):
image_input = tf.keras.layers.Input(shape=input_size)
backbone, skips = load_backbone(phi=phi, input_tensor=image_input, weights=weights)
C2, C3, C4, C5 = skips
in2 = tf.keras.layers.Conv2D(256, (1, 1), padding='same', kernel_initializer='he_normal', name='in2')(C2)
in3 = tf.keras.layers.Conv2D(256, (1, 1), padding='same', kernel_initializer='he_normal', name='in3')(C3)
in4 = tf.keras.layers.Conv2D(256, (1, 1), padding='same', kernel_initializer='he_normal', name='in4')(C4)
in5 = tf.keras.layers.Conv2D(256, (1, 1), padding='same', kernel_initializer='he_normal', name='in5')(C5)
# 1 / 32 * 8 = 1 / 4
P5 = tf.keras.layers.UpSampling2D(size=(8, 8))(
tf.keras.layers.Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal')(in5))
# 1 / 16 * 4 = 1 / 4
out4 = tf.keras.layers.Add()([in4, tf.keras.layers.UpSampling2D(size=(2, 2))(in5)])
P4 = tf.keras.layers.UpSampling2D(size=(4, 4))(
tf.keras.layers.Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal')(out4))
# 1 / 8 * 2 = 1 / 4
out3 = tf.keras.layers.Add()([in3, tf.keras.layers.UpSampling2D(size=(2, 2))(out4)])
P3 = tf.keras.layers.UpSampling2D(size=(2, 2))(
tf.keras.layers.Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal')(out3))
# 1 / 4
P2 = tf.keras.layers.Conv2D(64, (3, 3), padding='same', kernel_initializer='he_normal')(
tf.keras.layers.Add()([in2, tf.keras.layers.UpSampling2D(size=(2, 2))(out3)]))
# (b, 1/4, 1/4, 256)
fuse = tf.keras.layers.Concatenate()([P2, P3, P4, P5])
model = tf.keras.models.Model(inputs=image_input, outputs=fuse)
return model
if __name__ == '__main__':
model = EasyDet(phi=0)
model.summary()
import time
import numpy as np
x = np.random.random_sample((1, 640, 640, 3))
# warm up
output = model.predict(x)
print('\n[INFO] Test start')
time_start = time.time()
for i in range(1000):
output = model.predict(x)
time_end = time.time()
print('[INFO] Time used: {:.2f} ms'.format((time_end - time_start)*1000/(i+1)))
| 4,699 | 0 | 46 |
28343120a82f0ad353610fd53956f8cb3bf271dc | 1,008 | py | Python | Groups/Group_ID_6/SIFT_and_RESIFT/Code_files/sift.py | sonaldangi12/DataScience | 3d7cd529a96f37c2ef179ee408e2c6d8744d746a | [
"MIT"
] | 5 | 2020-12-13T07:53:22.000Z | 2020-12-20T18:49:27.000Z | Groups/Group_ID_6/SIFT_and_RESIFT/Code_files/sift.py | Gulnaz-Tabassum/DataScience | 1fd771f873a9bc0800458fd7c05e228bb6c4e8a0 | [
"MIT"
] | null | null | null | Groups/Group_ID_6/SIFT_and_RESIFT/Code_files/sift.py | Gulnaz-Tabassum/DataScience | 1fd771f873a9bc0800458fd7c05e228bb6c4e8a0 | [
"MIT"
] | 24 | 2020-12-12T11:23:28.000Z | 2021-10-04T13:09:38.000Z | from libs import *
| 56 | 128 | 0.779762 | from libs import *
def SIFT_algo(training_image,training_gray,test_image,test_gray):
#test_gray = cv2.cvtColor(test_image, cv2.COLOR_RGB2GRAY)
#training_gray = cv2.cvtColor(training_image, cv2.COLOR_RGB2GRAY)
# Creating SIFT Object
sift = cv2.SIFT_create()
# Detecting features
train_keypoints, train_descriptor = sift.detectAndCompute(training_gray, None)
test_keypoints, test_descriptor = sift.detectAndCompute(test_gray, None)
keypoints_without_size = np.copy(training_image)
keypoints_with_size = np.copy(training_image)
# Drawing keypoints and extent of their importance as descriptor
cv2.drawKeypoints(training_image, train_keypoints, keypoints_without_size, color = (0, 255, 0))
cv2.drawKeypoints(training_image, train_keypoints, keypoints_with_size, flags = cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
return keypoints_with_size,keypoints_without_size,train_descriptor,train_keypoints,test_descriptor,test_keypoints | 967 | 0 | 22 |
fdb3e670f4df9283255b2e51daea9be043b18deb | 3,077 | py | Python | PygameFloatObjects/examples/circle_example.py | MrComboF10/PygameFloatObjects | e139a3b542d1ef2d54604e2769827c9da6d2cee3 | [
"MIT"
] | null | null | null | PygameFloatObjects/examples/circle_example.py | MrComboF10/PygameFloatObjects | e139a3b542d1ef2d54604e2769827c9da6d2cee3 | [
"MIT"
] | null | null | null | PygameFloatObjects/examples/circle_example.py | MrComboF10/PygameFloatObjects | e139a3b542d1ef2d54604e2769827c9da6d2cee3 | [
"MIT"
] | null | null | null | from PygameFloatObjects.objects import *
# circle_example()
| 25.01626 | 108 | 0.644459 | from PygameFloatObjects.objects import *
def resize_circle(screen, screen_size, float_circle, float_font, font_text, font_color, ratio):
# clear screen
screen.fill((0, 0, 0))
# set new float circle radius
new_circle_radius = ratio * float_circle.get_float_radius()
# update float rect
float_circle.set_float_radius(new_circle_radius)
# draw float rect
float_circle.draw()
# set new float font size
new_font_size = ratio * float_font.get_float_size()
# update float font
float_font.set_float_size(new_font_size)
float_font.update()
# create font surface
font_surface = float_font.get_font().render(font_text, True, font_color)
# get font surface rect
font_surface_rect = font_surface.get_rect()
# position surface rect
font_surface_rect.center = float_circle.get_float_center()
# draw font surface
screen.blit(font_surface, font_surface_rect)
pygame.display.update()
def circle_example():
LEFT_BUTTON = 1
RIGHT_BUTTON = 3
# create and init display
pygame.init()
screen_size = (800, 800)
screen = pygame.display.set_mode(screen_size)
# ========== create float rect ==========
# create float circle radius
radius = 300
# create top left point
center = (screen_size[0] / 2, screen_size[1] / 2)
# create object
float_circle = FloatCircle(screen, (255, 0, 255), center, radius)
# draw object
float_circle.draw()
# ========== create float font ==========
# create initial font size
font_size = 60
# create font name
font_name = "area"
# create font content
font_text = "L/R click on this window!"
# create font color
font_color = (0, 255, 255)
# create object
float_font = FloatFont(font_name, font_size)
# create font surface
font_surface = float_font.get_font().render(font_text, True, font_color)
# get font surface rect
font_surface_rect = font_surface.get_rect()
# position surface rect
font_surface_rect.center = center
# draw font surface
screen.blit(font_surface, font_surface_rect)
pygame.display.update()
loop_exit = False
# main loop
while not loop_exit:
for event in pygame.event.get():
# quit loop if quit window button is pressed
if event.type == pygame.QUIT:
loop_exit = True
# verify if mouse button is pressed
if event.type == pygame.MOUSEBUTTONDOWN:
# verify if mouse button is the left button
if event.button == LEFT_BUTTON:
# increase objects size
resize_circle(screen, screen_size, float_circle, float_font, font_text, font_color, 1.1)
# verify if mouse button is the right button
if event.button == RIGHT_BUTTON:
# decrease objects size
resize_circle(screen, screen_size, float_circle, float_font, font_text, font_color, 0.9)
pygame.quit()
# circle_example()
| 2,967 | 0 | 46 |
fbaf88f5eb5f3ead2d85328be4d84297eecebdb4 | 1,666 | py | Python | examples/widgets/effectwidget3_advanced.py | Galland/kivy | 95a6bf279883d706f645e4629c16d5ee1038f0ec | [
"MIT"
] | 9 | 2016-09-03T07:20:01.000Z | 2020-05-21T14:44:48.000Z | examples/widgets/effectwidget3_advanced.py | Galland/kivy | 95a6bf279883d706f645e4629c16d5ee1038f0ec | [
"MIT"
] | 6 | 2020-01-31T18:04:48.000Z | 2021-06-05T10:53:55.000Z | examples/widgets/effectwidget3_advanced.py | Galland/kivy | 95a6bf279883d706f645e4629c16d5ee1038f0ec | [
"MIT"
] | 4 | 2016-09-10T15:27:54.000Z | 2020-03-27T22:05:31.000Z | '''
This example demonstrates creating and usind an AdvancedEffectBase. In
this case, we use it to efficiently pass the touch coordinates into the shader.
'''
from kivy.base import runTouchApp
from kivy.properties import ListProperty
from kivy.lang import Builder
from kivy.uix.effectwidget import EffectWidget, AdvancedEffectBase
effect_string = '''
uniform vec2 touch;
vec4 effect(vec4 color, sampler2D texture, vec2 tex_coords, vec2 coords)
{
vec2 distance = 0.025*(coords - touch);
float dist_mag = (distance.x*distance.x + distance.y*distance.y);
vec3 multiplier = vec3(abs(sin(dist_mag - time)));
return vec4(multiplier * color.xyz, 1.0);
}
'''
root = Builder.load_string('''
TouchWidget:
Button:
text: 'Some text!'
Image:
source: 'data/logo/kivy-icon-512.png'
allow_stretch: True
keep_ratio: False
''')
runTouchApp(root)
| 26.03125 | 79 | 0.67467 | '''
This example demonstrates creating and usind an AdvancedEffectBase. In
this case, we use it to efficiently pass the touch coordinates into the shader.
'''
from kivy.base import runTouchApp
from kivy.properties import ListProperty
from kivy.lang import Builder
from kivy.uix.effectwidget import EffectWidget, AdvancedEffectBase
effect_string = '''
uniform vec2 touch;
vec4 effect(vec4 color, sampler2D texture, vec2 tex_coords, vec2 coords)
{
vec2 distance = 0.025*(coords - touch);
float dist_mag = (distance.x*distance.x + distance.y*distance.y);
vec3 multiplier = vec3(abs(sin(dist_mag - time)));
return vec4(multiplier * color.xyz, 1.0);
}
'''
class TouchEffect(AdvancedEffectBase):
touch = ListProperty([0.0, 0.0])
def __init__(self, *args, **kwargs):
super(TouchEffect, self).__init__(*args, **kwargs)
self.glsl = effect_string
self.uniforms = {'touch': [0.0, 0.0]}
def on_touch(self, *args, **kwargs):
self.uniforms['touch'] = [float(i) for i in self.touch]
class TouchWidget(EffectWidget):
def __init__(self, *args, **kwargs):
super(TouchWidget, self).__init__(*args, **kwargs)
self.effect = TouchEffect()
self.effects = [self.effect]
def on_touch_down(self, touch):
super(TouchWidget, self).on_touch_down(touch)
self.on_touch_move(touch)
def on_touch_move(self, touch):
self.effect.touch = touch.pos
root = Builder.load_string('''
TouchWidget:
Button:
text: 'Some text!'
Image:
source: 'data/logo/kivy-icon-512.png'
allow_stretch: True
keep_ratio: False
''')
runTouchApp(root)
| 527 | 119 | 126 |
4d0ed4ed46b2081c3bacc309febdc41a216b030d | 718 | py | Python | tests/fraction_tests/test_lt.py | lycantropos/rithm | 61ae1614411ab0ce7feb403fdf93b71f49231ec1 | [
"MIT"
] | null | null | null | tests/fraction_tests/test_lt.py | lycantropos/rithm | 61ae1614411ab0ce7feb403fdf93b71f49231ec1 | [
"MIT"
] | null | null | null | tests/fraction_tests/test_lt.py | lycantropos/rithm | 61ae1614411ab0ce7feb403fdf93b71f49231ec1 | [
"MIT"
] | null | null | null | from typing import Union
from hypothesis import given
from tests.utils import (FractionWithBuiltin,
IntWithBuiltin,
equivalence)
from . import strategies
@given(strategies.fractions_with_builtins,
strategies.fractions_or_ints_with_builtins)
| 34.190476 | 77 | 0.651811 | from typing import Union
from hypothesis import given
from tests.utils import (FractionWithBuiltin,
IntWithBuiltin,
equivalence)
from . import strategies
@given(strategies.fractions_with_builtins,
strategies.fractions_or_ints_with_builtins)
def test_connection_with_builtin(first_with_builtin: FractionWithBuiltin,
second_with_builtin
: Union[FractionWithBuiltin, IntWithBuiltin]
) -> None:
first, first_builtin = first_with_builtin
second, second_builtin = second_with_builtin
assert equivalence(first < second, first_builtin < second_builtin)
| 394 | 0 | 22 |
d9cc4bd981391c19f9bf7d7e3f5eaa43030cb2ed | 12,142 | py | Python | exp0005/train1.py | hirune924/kaggle-HuBMAP | e4c2008378eb773db551cee52380bfccdf3a10fa | [
"Apache-2.0"
] | null | null | null | exp0005/train1.py | hirune924/kaggle-HuBMAP | e4c2008378eb773db551cee52380bfccdf3a10fa | [
"Apache-2.0"
] | null | null | null | exp0005/train1.py | hirune924/kaggle-HuBMAP | e4c2008378eb773db551cee52380bfccdf3a10fa | [
"Apache-2.0"
] | null | null | null | ####################
# Import Libraries
####################
import os
import sys
import cv2
import numpy as np
import pandas as pd
import pytorch_lightning as pl
from pytorch_lightning.metrics import Accuracy
from pytorch_lightning import loggers
from pytorch_lightning import seed_everything
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import LearningRateMonitor, ModelCheckpoint
import torch
from torch.utils.data import Dataset, DataLoader
from sklearn.model_selection import StratifiedKFold, KFold
import segmentation_models_pytorch as smp
from catalyst.contrib.nn.criterion.dice import DiceLoss
#from sklearn import model_selection
import albumentations as A
import timm
import glob
from omegaconf import OmegaConf
from sklearn.metrics import roc_auc_score
from tqdm import tqdm
from PIL import Image
cv2.setNumThreads(0)
####################
# Utils
####################
####################
# Config
####################
conf_dict = {'batch_size': 8,#32,
'epoch': 30,
'image_size': 128,#640,
'image_scale': 2,
'encoder_name': 'timm-efficientnet-b0',
'lr': 0.001,
'fold': 0,
'csv_path': '../input/extract-test/train.csv',
'data_dir': '../input/extract-test/size_2048',
'output_dir': './',
'use_mask_exist': True,
'trainer': {}}
conf_base = OmegaConf.create(conf_dict)
####################
# Dataset
####################
####################
# Data Module
####################
# OPTIONAL, called only on 1 GPU/machine(for download or tokenize)
# OPTIONAL, called for every GPU/machine
####################
# Lightning Module
####################
####################
# Train
####################
if __name__ == "__main__":
main()
| 37.591331 | 156 | 0.565228 | ####################
# Import Libraries
####################
import os
import sys
import cv2
import numpy as np
import pandas as pd
import pytorch_lightning as pl
from pytorch_lightning.metrics import Accuracy
from pytorch_lightning import loggers
from pytorch_lightning import seed_everything
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import LearningRateMonitor, ModelCheckpoint
import torch
from torch.utils.data import Dataset, DataLoader
from sklearn.model_selection import StratifiedKFold, KFold
import segmentation_models_pytorch as smp
from catalyst.contrib.nn.criterion.dice import DiceLoss
#from sklearn import model_selection
import albumentations as A
import timm
import glob
from omegaconf import OmegaConf
from sklearn.metrics import roc_auc_score
from tqdm import tqdm
from PIL import Image
cv2.setNumThreads(0)
####################
# Utils
####################
def rand_bbox(size, lam):
W = size[2]
H = size[3]
cut_rat = np.sqrt(1. - lam)
cut_w = np.int(W * cut_rat)
cut_h = np.int(H * cut_rat)
# uniform
cx = np.random.randint(W)
cy = np.random.randint(H)
bbx1 = np.clip(cx - cut_w // 2, 0, W)
bby1 = np.clip(cy - cut_h // 2, 0, H)
bbx2 = np.clip(cx + cut_w // 2, 0, W)
bby2 = np.clip(cy + cut_h // 2, 0, H)
return bbx1, bby1, bbx2, bby2
####################
# Config
####################
conf_dict = {'batch_size': 8,#32,
'epoch': 30,
'image_size': 128,#640,
'image_scale': 2,
'encoder_name': 'timm-efficientnet-b0',
'lr': 0.001,
'fold': 0,
'csv_path': '../input/extract-test/train.csv',
'data_dir': '../input/extract-test/size_2048',
'output_dir': './',
'use_mask_exist': True,
'trainer': {}}
conf_base = OmegaConf.create(conf_dict)
####################
# Dataset
####################
class HuBMAPDataset(Dataset):
def __init__(self, dataframe, transform=None):
self.data = dataframe.reset_index(drop=True)
self.transform = transform
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
img_path = self.data.loc[idx, "image"]
mask_path = self.data.loc[idx, "mask"]
# [TODO] 画像読み込みをpytorch nativeにしたい
image = np.asarray(Image.open(img_path))
mask = np.asarray(Image.open(mask_path))
trans = self.transform(image=image, mask=mask)
image = torch.from_numpy(trans["image"].transpose(2, 0, 1))
mask = torch.from_numpy(trans["mask"]).unsqueeze(dim=0).float()
return image, mask
####################
# Data Module
####################
class HuBMAPDataModule(pl.LightningDataModule):
def __init__(self, conf):
super().__init__()
self.conf = conf
# OPTIONAL, called only on 1 GPU/machine(for download or tokenize)
def prepare_data(self):
pass
# OPTIONAL, called for every GPU/machine
def setup(self, stage=None):
if stage == 'fit':
df = pd.read_csv(self.conf.csv_path)
kf = KFold(n_splits=5, shuffle=True, random_state=2021)
for fold, (train_index, val_index) in enumerate(kf.split(df.values)):
df.loc[val_index, "fold"] = int(fold)
df["fold"] = df["fold"].astype(int)
train_df = df[df['fold'] != self.conf.fold]
valid_df = df[df['fold'] == self.conf.fold]
train_image_list = []
for index, row in train_df.iterrows():
train_image_list += glob.glob(os.path.join(self.conf.data_dir, "*" + row['id'] + "_image.png"))
train_df = pd.DataFrame({'image': train_image_list})
train_df['mask'] = train_df['image'].str[:-9]+'mask.png'
if self.conf.use_mask_exist:
print('check no mask image')
remove_index = []
for index, row in tqdm(train_df.iterrows()):
if np.sum(cv2.imread(row['mask'])) == 0:
remove_index.append(index)
train_nomask_df = train_df.loc[remove_index]
train_df = train_df.drop(remove_index)
print(len(train_nomask_df))#1950
print(len(train_df))#1108
valid_image_list = []
for index, row in valid_df.iterrows():
valid_image_list += glob.glob(os.path.join(self.conf.data_dir, "*" + row['id'] + "_image.png"))
valid_df = pd.DataFrame({'image': valid_image_list})
valid_df['mask'] = valid_df['image'].str[:-9]+'mask.png'
train_transform = A.Compose([
A.RandomCrop(height=self.conf.image_size*self.conf.image_scale, width=self.conf.image_size*self.conf.image_scale, p=1),
A.Resize(height=self.conf.image_size, width=self.conf.image_size, p=1),
A.Flip(p=0.5),
A.ShiftScaleRotate(p=0.5),
A.HueSaturationValue(hue_shift_limit=30, sat_shift_limit=30, val_shift_limit=30, p=0.7),
A.RandomBrightnessContrast(brightness_limit=(-0.4,0.4), contrast_limit=(-0.4, 0.4), p=0.7),
A.CLAHE(clip_limit=(1,4), p=0.5),
A.OneOf([
A.OpticalDistortion(distort_limit=1.0),
A.GridDistortion(num_steps=5, distort_limit=1.),
A.ElasticTransform(alpha=3),
], p=1.0),
A.OneOf([
A.Blur(blur_limit=15),
A.GaussianBlur(blur_limit=(3, 15)),
A.MotionBlur(blur_limit=15),
A.MedianBlur(blur_limit=15),
], p=1.0),
A.OneOf([
A.GaussNoise(var_limit=[10, 50]),
A.GlassBlur(max_delta=8),
A.Posterize(num_bits=4),
], p=0.7),
A.OneOf([
A.JpegCompression(quality_lower=95, quality_upper=100, p=0.50),
A.Downscale(scale_min=0.75, scale_max=0.95),
], p=1.0),
A.MaskDropout(image_fill_value='inpaint', mask_fill_value=1),
A.IAAPiecewiseAffine(p=0.5),
A.IAASharpen(p=0.5),
A.Cutout(max_h_size=int(self.conf.image_size * 0.1), max_w_size=int(self.conf.image_size * 0.1), num_holes=5, p=0.5),
A.Normalize()
])
valid_transform = A.Compose([
A.Resize(height=int(2048/self.conf.image_scale), width=int(2048/self.conf.image_scale), interpolation=1, always_apply=False, p=1.0),
A.Normalize(mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225), max_pixel_value=255.0, always_apply=False, p=1.0)
])
self.train_dataset = HuBMAPDataset(train_df, transform=train_transform)
self.train_nomask_dataset = HuBMAPDataset(train_nomask_df, transform=train_transform)
self.valid_dataset = HuBMAPDataset(valid_df, transform=valid_transform)
elif stage == 'test':
pass
def train_dataloader(self):
#mask_batch = int(self.conf.batch_size*0.8)
#no_mask_batch = self.conf.batch_size - int(self.conf.batch_size*0.8)
return [DataLoader(self.train_dataset, batch_size=self.conf.batch_size, num_workers=4, shuffle=True, pin_memory=True, drop_last=True),
DataLoader(self.train_nomask_dataset, batch_size=self.conf.batch_size, num_workers=4, shuffle=True, pin_memory=True, drop_last=True)]
def val_dataloader(self):
return DataLoader(self.valid_dataset, batch_size=self.conf.batch_size, num_workers=4, shuffle=False, pin_memory=True, drop_last=True)
def test_dataloader(self):
return DataLoader(self.test_dataset, batch_size=self.conf.batch_size, num_workers=4, shuffle=False, pin_memory=True, drop_last=False)
####################
# Lightning Module
####################
class LitSystem(pl.LightningModule):
def __init__(self, conf):
super().__init__()
#self.conf = conf
self.save_hyperparameters(conf)
self.model = smp.Unet(encoder_name=conf.encoder_name, in_channels=3, classes=1)
self.bceloss = torch.nn.BCEWithLogitsLoss()
self.diceloss = DiceLoss()
#self.diceloss = smp.utils.losses.DiceLoss(activation='sigmoid')
self.dice = smp.utils.losses.DiceLoss(activation='sigmoid')
def forward(self, x):
# use forward for inference/predictions
return self.model(x)
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.model.parameters(), lr=self.hparams.lr)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=self.hparams.epoch)
return [optimizer], [scheduler]
def training_step(self, batch, batch_idx):
x, y = batch[0]
x2, y2 = batch[1]
#x, y = torch.cat([x1, x2], dim=0), torch.cat([y1, y2], dim=0)
# cutmix
lam = np.random.beta(0.5, 0.5)
rand_index = torch.randperm(x.size()[0]).type_as(x).long()
bbx1, bby1, bbx2, bby2 = rand_bbox(x.size(), lam)
x[:, :, bbx1:bbx2, bby1:bby2] = x[rand_index, :, bbx1:bbx2, bby1:bby2]
y[:, :, bbx1:bbx2, bby1:bby2] = y[rand_index, :, bbx1:bbx2, bby1:bby2]
# mixnoise
#lam = np.minimum(np.random.beta(1.0, 1.0), 0.25)
lam = np.random.beta(1.0, 1.0)/4 + 0.75
x = lam * x + (1 - lam) * x2
#y = lam * y + (1 - lam) * y2
y_hat = self.model(x)
loss = self.diceloss(y_hat, y) + self.bceloss(y_hat, y)
#loss = self.bceloss(y_hat, y)
self.log('train_loss', loss, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self.model(x)
#loss = self.diceloss(y_hat, y) + self.bceloss(y_hat, y)
loss = self.bceloss(y_hat, y)
dice = 1-self.dice(y_hat, y)
return {
"val_loss": loss,
"val_dice": dice
}
def validation_epoch_end(self, outputs):
avg_val_loss = torch.stack([x["val_loss"] for x in outputs]).mean()
avg_val_dice = torch.stack([x["val_dice"] for x in outputs]).mean()
self.log('val_loss', avg_val_loss)
self.log('val_dice', avg_val_dice)
####################
# Train
####################
def main():
conf_cli = OmegaConf.from_cli()
conf = OmegaConf.merge(conf_base, conf_cli)
print(OmegaConf.to_yaml(conf))
seed_everything(2021)
tb_logger = loggers.TensorBoardLogger(save_dir=os.path.join(conf.output_dir, 'tb_log/'))
csv_logger = loggers.CSVLogger(save_dir=os.path.join(conf.output_dir, 'csv_log/'))
lr_monitor = LearningRateMonitor(logging_interval='step')
checkpoint_callback = ModelCheckpoint(dirpath=os.path.join(conf.output_dir, 'ckpt/'), monitor='val_dice',
save_last=True, save_top_k=5, mode='max',
save_weights_only=True, filename='{epoch}-{val_dice:.5f}')
data_module = HuBMAPDataModule(conf)
lit_model = LitSystem(conf)
trainer = Trainer(
logger=[tb_logger, csv_logger],
callbacks=[lr_monitor, checkpoint_callback],
max_epochs=conf.epoch,
gpus=-1,
amp_backend='native',
amp_level='O2',
precision=16,
num_sanity_val_steps=10,
val_check_interval=1.0,
multiple_trainloader_mode='min_size',
**conf.trainer
)
trainer.fit(lit_model, data_module)
if __name__ == "__main__":
main()
| 9,716 | 49 | 532 |
4011ca77bfb0d17039209384edff99a24db49296 | 732 | py | Python | hw1/metrics/average_session_time/collect_counts_to_file/mapper.py | alexsyrom/shad_bigdata | e22ad5e95cfb8a606d4f3727f731b340e39cdecd | [
"MIT"
] | null | null | null | hw1/metrics/average_session_time/collect_counts_to_file/mapper.py | alexsyrom/shad_bigdata | e22ad5e95cfb8a606d4f3727f731b340e39cdecd | [
"MIT"
] | null | null | null | hw1/metrics/average_session_time/collect_counts_to_file/mapper.py | alexsyrom/shad_bigdata | e22ad5e95cfb8a606d4f3727f731b340e39cdecd | [
"MIT"
] | null | null | null | #!/shared/anaconda/bin/python
# encoding=utf8
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from builtins import bytes, chr
from builtins import str
from builtins import dict
from builtins import object
from builtins import range
from builtins import map
from builtins import zip
from builtins import filter
import sys
import re
import codecs
reload(sys)
sys.setdefaultencoding('utf8')
sys.stdin = codecs.getreader('utf8')(sys.stdin, errors='ignore')
session_time = 0
session_count = 0
for line in sys.stdin:
count, time = list(map(int, line.split('\t')))
session_count += count
session_time += time
print(session_count, session_time, sep='\t')
| 20.914286 | 64 | 0.773224 | #!/shared/anaconda/bin/python
# encoding=utf8
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from builtins import bytes, chr
from builtins import str
from builtins import dict
from builtins import object
from builtins import range
from builtins import map
from builtins import zip
from builtins import filter
import sys
import re
import codecs
reload(sys)
sys.setdefaultencoding('utf8')
sys.stdin = codecs.getreader('utf8')(sys.stdin, errors='ignore')
session_time = 0
session_count = 0
for line in sys.stdin:
count, time = list(map(int, line.split('\t')))
session_count += count
session_time += time
print(session_count, session_time, sep='\t')
| 0 | 0 | 0 |
836f46b8249549cfe9db555c9ba7b0e246a7f47d | 2,892 | py | Python | error_reporting/tests/unit/test__gapic.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 2 | 2021-11-26T07:08:43.000Z | 2022-03-07T20:20:04.000Z | error_reporting/tests/unit/test__gapic.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | null | null | null | error_reporting/tests/unit/test__gapic.py | DaveCheez/google-cloud-python | fc03d4d41f13e9d13db7206438163b3a471fdabd | [
"Apache-2.0"
] | 1 | 2020-04-14T10:47:41.000Z | 2020-04-14T10:47:41.000Z | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
| 36.607595 | 87 | 0.730636 | # Copyright 2017 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
class Test_make_report_error_api(unittest.TestCase):
@staticmethod
def _call_fut(client):
from google.cloud.error_reporting._gapic import make_report_error_api
return make_report_error_api(client)
def test_make_report_error_api(self):
client = mock.Mock(spec=["project", "_credentials", "_client_info"])
# Call the function being tested.
patch = mock.patch(
"google.cloud.errorreporting_v1beta1."
"gapic.report_errors_service_client.ReportErrorsServiceClient"
)
with patch as patched:
report_error_client = self._call_fut(client)
# Assert that the final error client has the project in
# the expected location.
self.assertIs(report_error_client._project, client.project)
self.assertIs(report_error_client._gapic_api, patched.return_value)
patched.assert_called_once_with(
credentials=client._credentials, client_info=client._client_info
)
class Test_ErrorReportingGapicApi(unittest.TestCase):
PROJECT = "PROJECT"
def _make_one(self, gapic_api, project):
from google.cloud.error_reporting._gapic import _ErrorReportingGapicApi
return _ErrorReportingGapicApi(gapic_api, project)
def test_constructor(self):
gapic_api = mock.Mock(spec=[])
gapic_client_wrapper = self._make_one(gapic_api, self.PROJECT)
self.assertEqual(gapic_client_wrapper._project, self.PROJECT)
self.assertEqual(gapic_client_wrapper._gapic_api, gapic_api)
def test_report_error_event(self):
from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2
gapic_api = mock.Mock(spec=["project_path", "report_error_event"])
gapic_client_wrapper = self._make_one(gapic_api, self.PROJECT)
error_report = {"message": "The cabs are here."}
gapic_client_wrapper.report_error_event(error_report)
gapic_api.project_path.assert_called_once_with(self.PROJECT)
project_name = gapic_api.project_path.return_value
error_pb = report_errors_service_pb2.ReportedErrorEvent(
message=error_report["message"]
)
gapic_api.report_error_event.assert_called_once_with(project_name, error_pb)
| 2,000 | 240 | 46 |
03356f01967a8b617450e2beff23d2006d513315 | 2,427 | py | Python | tests/test_signal.py | julienbourdeau/algoliasearch-django | 885c9f1af4a70ce544709e3968120d2ab839df68 | [
"MIT"
] | null | null | null | tests/test_signal.py | julienbourdeau/algoliasearch-django | 885c9f1af4a70ce544709e3968120d2ab839df68 | [
"MIT"
] | null | null | null | tests/test_signal.py | julienbourdeau/algoliasearch-django | 885c9f1af4a70ce544709e3968120d2ab839df68 | [
"MIT"
] | null | null | null | import time
from django.test import TestCase
from algoliasearch_django import algolia_engine
from algoliasearch_django import get_adapter
from algoliasearch_django import register
from algoliasearch_django import unregister
from algoliasearch_django import raw_search
from algoliasearch_django import clear_index
from algoliasearch_django import update_records
from .models import Website
| 36.772727 | 79 | 0.685208 | import time
from django.test import TestCase
from algoliasearch_django import algolia_engine
from algoliasearch_django import get_adapter
from algoliasearch_django import register
from algoliasearch_django import unregister
from algoliasearch_django import raw_search
from algoliasearch_django import clear_index
from algoliasearch_django import update_records
from .models import Website
class SignalTestCase(TestCase):
@classmethod
def setUpClass(cls):
register(Website)
@classmethod
def tearDownClass(cls):
algolia_engine.client.delete_index(get_adapter(Website).index_name)
unregister(Website)
def tearDown(self):
clear_index(Website)
def test_save_signal(self):
Website.objects.create(name='Algolia', url='https://www.algolia.com')
Website.objects.create(name='Google', url='https://www.google.com')
Website.objects.create(name='Facebook', url='https://www.facebook.com')
time.sleep(5)
self.assertEqual(raw_search(Website)['nbHits'], 3)
def test_delete_signal(self):
Website.objects.create(name='Algolia', url='https://www.algolia.com')
Website.objects.create(name='Google', url='https://www.google.com')
Website.objects.create(name='Facebook', url='https://www.facebook.com')
Website.objects.get(name='Algolia').delete()
Website.objects.get(name='Facebook').delete()
time.sleep(5)
result = raw_search(Website)
self.assertEqual(result['nbHits'], 1)
self.assertEqual(result['hits'][0]['name'], 'Google')
def test_update_records(self):
Website.objects.create(name='Algolia', url='https://www.algolia.com')
Website.objects.create(name='Google', url='https://www.google.com')
Website.objects.create(name='Facebook', url='https://www.facebook.com')
Website.objects.create(name='Facebook', url='https://www.facebook.fr')
Website.objects.create(name='Facebook', url='https://fb.com')
qs = Website.objects.filter(name='Facebook')
update_records(Website, qs, url='https://facebook.com')
qs.update(url='https://facebook.com')
time.sleep(5)
result = raw_search(Website, 'Facebook')
self.assertEqual(result['nbHits'], qs.count())
for res, url in zip(result['hits'], qs.values_list('url', flat=True)):
self.assertEqual(res['url'], url)
| 1,806 | 205 | 23 |
de5b472342d358feb4e1a695df12e73f9a4245b9 | 5,403 | py | Python | pyNTCIREVAL/metrics/ncu.py | mpkato/pyNTCIREVAL | db0b342fbd385936a8a5db31533fb020743c3137 | [
"MIT"
] | 20 | 2016-12-19T10:19:17.000Z | 2021-07-03T18:38:25.000Z | pyNTCIREVAL/metrics/ncu.py | mpkato/pyNTCIREVAL | db0b342fbd385936a8a5db31533fb020743c3137 | [
"MIT"
] | 3 | 2017-12-24T01:27:27.000Z | 2018-01-20T14:54:21.000Z | pyNTCIREVAL/metrics/ncu.py | mpkato/pyNTCIREVAL | db0b342fbd385936a8a5db31533fb020743c3137 | [
"MIT"
] | 1 | 2017-02-25T02:34:40.000Z | 2017-02-25T02:34:40.000Z | from .grade_metric import GradeMetric
import types
class NCU(GradeMetric):
'''
NCU (Normalised Cumulative Utility)
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
beta: a parameter for blended ratio
sp: a stop probability function. There are three functions in our
implementation, uniform (p_u), graded-uniform (p_gu), rank-biased (p_rb).
'''
def gain(self, idx):
'''
Blended ratio
'''
rank = self.rank(idx)
g = sum([self._grade(i) for i in range(rank)])
ig = sum(self.ideal_grade_ranked_list[:rank])
return (self.relnum + self.beta * g) / (rank + self.beta * ig)
def p_u():
'''
Uniform stop probability function
where the stop probability is the same for all the relevant documents,
i.e. 1/(# relevant documents).
'''
return func
def p_gu(stops):
'''
Graded-uniform stop probability function
where the stop probability is defined for each relevance level.
Args:
stops: a list of the stop probability for each relevance level (except 0 level).
'''
return func
def p_rb(gamma):
'''
Rank-biased stop probability function
where the stop probability increases
as the number of preceding relevant documents increase.
Args:
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
'''
return func
class NCUguP(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Graded-uniform and NU = Precision
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
stops: a list of the stop probability for each relevance level (except 0 level).
'''
class NCUguBR(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Graded-uniform and NU = Blended ratio
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
stops: a list of the stop probability for each relevance level (except 0 level).
beta: a parameter for blended ratio
'''
class NCUrbP(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Rank-biased and NU = Precision
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
'''
class NCUrbBR(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Rank-biased and NU = Blended ratio
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
beta: a parameter for blended ratio
'''
| 36.02 | 120 | 0.642421 | from .grade_metric import GradeMetric
import types
class NCU(GradeMetric):
'''
NCU (Normalised Cumulative Utility)
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
beta: a parameter for blended ratio
sp: a stop probability function. There are three functions in our
implementation, uniform (p_u), graded-uniform (p_gu), rank-biased (p_rb).
'''
def __init__(self, xrelnum, grades, beta, sp):
super(NCU, self).__init__(xrelnum, grades)
self.beta = beta
self.sp = types.MethodType(sp, self)
self.ideal_grade_ranked_list = self._get_ideal_grade_ranked_list()
def gain(self, idx):
'''
Blended ratio
'''
rank = self.rank(idx)
g = sum([self._grade(i) for i in range(rank)])
ig = sum(self.ideal_grade_ranked_list[:rank])
return (self.relnum + self.beta * g) / (rank + self.beta * ig)
def discount(self, idx):
return self.sp(idx)
def p_u():
'''
Uniform stop probability function
where the stop probability is the same for all the relevant documents,
i.e. 1/(# relevant documents).
'''
def func(self, idx):
if self._is_relevant(idx):
jrelnum = self.jrelnum
if self.cutoff:
jrelnum = min([jrelnum, self.cutoff])
return 1.0 / jrelnum
else:
return 0.0
return func
def p_gu(stops):
'''
Graded-uniform stop probability function
where the stop probability is defined for each relevance level.
Args:
stops: a list of the stop probability for each relevance level (except 0 level).
'''
def func(self, idx):
level = self._level(idx)
if level > 0:
return stops[level-1]\
/ sum([num * stops[l-1]
for l, num in enumerate(self.xrelnum) if l > 0])
else:
return 0.0
return func
def p_rb(gamma):
'''
Rank-biased stop probability function
where the stop probability increases
as the number of preceding relevant documents increase.
Args:
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
'''
def func(self, idx):
if self._is_relevant(idx):
return gamma ** (self.relnum - 1)\
/ sum([gamma ** i for i in range(self.jrelnum)])
else:
return 0.0
return func
class NCUguP(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Graded-uniform and NU = Precision
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
stops: a list of the stop probability for each relevance level (except 0 level).
'''
def __init__(self, xrelnum, grades, stops):
super(NCUguP, self).__init__(xrelnum, grades, 0.0, p_gu(stops))
class NCUguBR(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Graded-uniform and NU = Blended ratio
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
stops: a list of the stop probability for each relevance level (except 0 level).
beta: a parameter for blended ratio
'''
def __init__(self, xrelnum, grades, stops, beta):
super(NCUguBR, self).__init__(xrelnum, grades, beta, p_gu(stops))
class NCUrbP(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Rank-biased and NU = Precision
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
'''
def __init__(self, xrelnum, grades, gamma):
super(NCUrbP, self).__init__(xrelnum, grades, 0.0, p_rb(gamma))
class NCUrbBR(NCU):
'''
NCU (Normalised Cumulative Utility)
with Pr = Rank-biased and NU = Blended ratio
See Sakai. T. and Robertson, S.: Modelling A User Population for Designing Information Retrieval Metrics, EVIA 2008.
Args:
xrelnum: the number of judged X-rel docs (including 0-rel=judged nonrel).
grades: a list of the grade for each relevance level (except level 0).
gamma: a parameter that controls the gain of the stop probability
when a relevant document is observed.
beta: a parameter for blended ratio
'''
def __init__(self, xrelnum, grades, gamma, beta):
super(NCUrbBR, self).__init__(xrelnum, grades, beta, p_rb(gamma))
| 1,283 | 0 | 235 |
7eb21d816b491f2d24f46f36844fc9ee8f7a718e | 1,833 | py | Python | lambda/automations/remove_tag.py | hyperglance/aws-rule-automations | b770b2687ffa04a6bad857f3ed6505cf57985022 | [
"MIT"
] | null | null | null | lambda/automations/remove_tag.py | hyperglance/aws-rule-automations | b770b2687ffa04a6bad857f3ed6505cf57985022 | [
"MIT"
] | null | null | null | lambda/automations/remove_tag.py | hyperglance/aws-rule-automations | b770b2687ffa04a6bad857f3ed6505cf57985022 | [
"MIT"
] | 3 | 2021-06-14T04:31:42.000Z | 2021-12-05T23:56:37.000Z | """
This automation attempts to remove a tag for a resource, identified as above or below the configured threshold
by Hyperglance Rule(s)
This automation will operate across accounts, where the appropriate IAM Role exists.
"""
import logging
import processing.automation_utils as utils
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
def hyperglance_automation(boto_session, resource: dict, automation_params=''):
""" Attempts to Tag a resource
Parameters
----------
boto_session : object
The boto session to use to invoke the automation
resource: dict
Dict of Resource attributes touse in the automation
automation_params : str
Automation parameters passed from the Hyperglance UI
"""
key = automation_params.get('Key')
tags = resource['tags']
if not key in tags.keys():
logger.error(tags)
logger.error("tag " + key + " is not present - aborting automation")
return
utils.remove_tag(boto_session, key, resource)
| 24.44 | 110 | 0.558647 | """
This automation attempts to remove a tag for a resource, identified as above or below the configured threshold
by Hyperglance Rule(s)
This automation will operate across accounts, where the appropriate IAM Role exists.
"""
import logging
import processing.automation_utils as utils
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
def hyperglance_automation(boto_session, resource: dict, automation_params=''):
""" Attempts to Tag a resource
Parameters
----------
boto_session : object
The boto session to use to invoke the automation
resource: dict
Dict of Resource attributes touse in the automation
automation_params : str
Automation parameters passed from the Hyperglance UI
"""
key = automation_params.get('Key')
tags = resource['tags']
if not key in tags.keys():
logger.error(tags)
logger.error("tag " + key + " is not present - aborting automation")
return
utils.remove_tag(boto_session, key, resource)
def info() -> dict:
INFO = {
"displayName": "Remove Tag",
"description": "Removes a tag from a resource",
"resourceTypes": [
"Security Group",
"EC2 Instance",
"AMI",
"Internet Gateway",
"Network ACL",
"Network Interface",
"Route Table",
"EBS Snapshot",
"EBS Volume",
"Subnet",
"VPC",
"SQS Queue",
"SNS Topic"
],
"params": [
{
"name": "Key",
"type": "string",
"default": ""
}
],
"permissions": [
"ec2:DeleteTags",
"sns:UntagResource",
"sqs:UntagQueue"
]
}
return INFO
| 786 | 0 | 23 |
31e3b04e581943e1a356d811ce94a841c6eb327f | 923 | py | Python | Helper.py | davidmwilson/sample-python-Mar2018 | 1eec7442668fb60a321b879ea6167cdcb44e8ea6 | [
"MIT"
] | null | null | null | Helper.py | davidmwilson/sample-python-Mar2018 | 1eec7442668fb60a321b879ea6167cdcb44e8ea6 | [
"MIT"
] | null | null | null | Helper.py | davidmwilson/sample-python-Mar2018 | 1eec7442668fb60a321b879ea6167cdcb44e8ea6 | [
"MIT"
] | null | null | null | '''
This file contains helper functions
'''
from datetime import timedelta
def date_range(start_date, end_date):
'''
Name: dateRange
Input: start date
end date
Output: list of dates
Purpose: generate list of dates between start and end
'''
# now return list of dates
for date_element in range(int((end_date-start_date).days)):
yield start_date+timedelta(date_element)
def format_cmd_line(cmdline, day):
'''
Name: formatCmdLine
Input: raw command line
date
Output: final command line
Purpose: take a cmd line template and substitute in dates
according to handful of predefined formats
'''
# yyyymmdd only currently supported format
cmdline = cmdline.replace("{yyyymmdd}", day.strftime("%Y%m%d"))
# more below...
# ... sometime
return cmdline
| 27.969697 | 67 | 0.617551 | '''
This file contains helper functions
'''
from datetime import timedelta
def date_range(start_date, end_date):
'''
Name: dateRange
Input: start date
end date
Output: list of dates
Purpose: generate list of dates between start and end
'''
# now return list of dates
for date_element in range(int((end_date-start_date).days)):
yield start_date+timedelta(date_element)
def format_cmd_line(cmdline, day):
'''
Name: formatCmdLine
Input: raw command line
date
Output: final command line
Purpose: take a cmd line template and substitute in dates
according to handful of predefined formats
'''
# yyyymmdd only currently supported format
cmdline = cmdline.replace("{yyyymmdd}", day.strftime("%Y%m%d"))
# more below...
# ... sometime
return cmdline
| 0 | 0 | 0 |
af36d94d0e2f8841d635ae23a80d8dd35597ddbb | 1,599 | py | Python | junn/io/file_lists.py | modsim/junn | a40423b98c6a3739dd0b2ba02d546a5db91f9215 | [
"BSD-2-Clause"
] | null | null | null | junn/io/file_lists.py | modsim/junn | a40423b98c6a3739dd0b2ba02d546a5db91f9215 | [
"BSD-2-Clause"
] | null | null | null | junn/io/file_lists.py | modsim/junn | a40423b98c6a3739dd0b2ba02d546a5db91f9215 | [
"BSD-2-Clause"
] | null | null | null | """File list helper functions."""
import re
def generate_glob_and_replacer(search, replace):
"""
Prepare a wildcard pattern for globbing and replacing.
:param search:
:param replace:
:return:
"""
replace = generate_replacer(search, replace)
glob_pattern = prepare_for_regex(search, task='glob')
return glob_pattern, replace
def generate_replacer(search, replace):
"""
Prepare a wildcard pattern to be used a replacement regex.
:param search:
:param replace:
:return:
"""
search = prepare_for_regex(search)
replace = prepare_for_regex(replace, task='replace')
search = re.compile(search)
return _inner
def prepare_for_regex(input_, task='search'):
"""
Prepare a wildcard pattern for specific use.
:param input_:
:param task:
:return:
"""
splits = input_.split('{}')
process = _prepare_for_regex_get_processor(task)
return (
''.join(
process(n, split)
for n, split in zip([n for n, _ in enumerate(splits)][1:], splits)
)
+ splits[-1]
)
| 20.5 | 78 | 0.602251 | """File list helper functions."""
import re
def generate_glob_and_replacer(search, replace):
"""
Prepare a wildcard pattern for globbing and replacing.
:param search:
:param replace:
:return:
"""
replace = generate_replacer(search, replace)
glob_pattern = prepare_for_regex(search, task='glob')
return glob_pattern, replace
def generate_replacer(search, replace):
"""
Prepare a wildcard pattern to be used a replacement regex.
:param search:
:param replace:
:return:
"""
search = prepare_for_regex(search)
replace = prepare_for_regex(replace, task='replace')
search = re.compile(search)
def _inner(pattern):
return search.sub(replace, pattern)
return _inner
def _prepare_for_regex_get_processor(task):
if task == 'search':
def process(_, split):
return re.escape(split) + '(.*)'
elif task == 'replace':
def process(n, split):
return split + '\\%d' % (n,)
elif task == 'glob':
def process(_, split):
return split + '*'
else:
raise RuntimeError("Unsupported task.")
return process
def prepare_for_regex(input_, task='search'):
"""
Prepare a wildcard pattern for specific use.
:param input_:
:param task:
:return:
"""
splits = input_.split('{}')
process = _prepare_for_regex_get_processor(task)
return (
''.join(
process(n, split)
for n, split in zip([n for n, _ in enumerate(splits)][1:], splits)
)
+ splits[-1]
)
| 437 | 0 | 50 |
c26b922a65f29ea9d7b6686a72d16bf849a4f4ee | 1,625 | py | Python | money_bot/utils/markups.py | summer-school-2019/Money-bot | c74c4abb8a496a6423d9916e5bf8aeec23d191aa | [
"MIT"
] | 3 | 2019-08-19T13:01:12.000Z | 2019-10-11T06:38:03.000Z | money_bot/utils/markups.py | summer-school-2019/money-bot | c74c4abb8a496a6423d9916e5bf8aeec23d191aa | [
"MIT"
] | 27 | 2019-08-18T11:41:35.000Z | 2019-08-22T08:45:40.000Z | money_bot/utils/markups.py | summer-school-2019/Money-bot | c74c4abb8a496a6423d9916e5bf8aeec23d191aa | [
"MIT"
] | 1 | 2021-12-16T23:14:42.000Z | 2021-12-16T23:14:42.000Z | from aiogram import types
from aiogram.utils.callback_data import CallbackData
from money_bot.utils.strings import ADD_TASKS_MENU_TEXT, EARN_MENU_TEXT, MAIN_MENU_BUTTONS_LABELS
earn_factory = CallbackData("earn", "skip")
add_tasks_factory = CallbackData("add_tasks", "data")
| 36.931818 | 115 | 0.756308 | from aiogram import types
from aiogram.utils.callback_data import CallbackData
from money_bot.utils.strings import ADD_TASKS_MENU_TEXT, EARN_MENU_TEXT, MAIN_MENU_BUTTONS_LABELS
earn_factory = CallbackData("earn", "skip")
add_tasks_factory = CallbackData("add_tasks", "data")
def get_main_menu_markup():
btns_text = [
MAIN_MENU_BUTTONS_LABELS["earn"],
MAIN_MENU_BUTTONS_LABELS["play"],
MAIN_MENU_BUTTONS_LABELS["balance"],
MAIN_MENU_BUTTONS_LABELS["invite"],
MAIN_MENU_BUTTONS_LABELS["withdrawal"],
MAIN_MENU_BUTTONS_LABELS["rules"],
MAIN_MENU_BUTTONS_LABELS["add_tasks"],
]
keyboard_markup = types.ReplyKeyboardMarkup(row_width=2, resize_keyboard=True)
keyboard_markup.add(*[types.KeyboardButton(btn_text) for btn_text in btns_text])
return keyboard_markup
def get_earn_markup(task):
return types.InlineKeyboardMarkup(row_width=1).add(
types.InlineKeyboardButton(EARN_MENU_TEXT["subscribe"], url=task.url),
types.InlineKeyboardButton(EARN_MENU_TEXT["get_award"], callback_data=earn_factory.new(skip=0)),
types.InlineKeyboardButton(EARN_MENU_TEXT["skip_task"], callback_data=earn_factory.new(skip=1)),
)
def get_next_task_markup():
return types.InlineKeyboardMarkup(row_width=1).add(
types.InlineKeyboardButton(EARN_MENU_TEXT["more_tasks"], callback_data=earn_factory.new(skip=2))
)
def get_check_admin_markup():
return types.InlineKeyboardMarkup(row_width=1).add(
types.InlineKeyboardButton(ADD_TASKS_MENU_TEXT["check_admin"], callback_data=add_tasks_factory.new(data=0))
)
| 1,252 | 0 | 92 |
142d22661732ca16a783f0d30d8e3b370f328aab | 4,123 | py | Python | qa-webapp-tests/csv_importer/clean_up_records.py | cesarvh/qa-automation | 2bbdeda6f811e4bf40975658b3d8440cd3b4eff7 | [
"ECL-2.0"
] | null | null | null | qa-webapp-tests/csv_importer/clean_up_records.py | cesarvh/qa-automation | 2bbdeda6f811e4bf40975658b3d8440cd3b4eff7 | [
"ECL-2.0"
] | 25 | 2019-10-30T20:10:29.000Z | 2021-11-05T23:48:17.000Z | qa-webapp-tests/csv_importer/clean_up_records.py | cesarvh/qa-automation | 2bbdeda6f811e4bf40975658b3d8440cd3b4eff7 | [
"ECL-2.0"
] | 6 | 2019-02-20T17:16:42.000Z | 2020-07-08T22:46:33.000Z | import requests
import regex as re
import sys
import socket
import ssl
import datetime
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
from requests.packages.urllib3.connection import VerifiedHTTPSConnection
requests.packages.urllib3.connectionpool.HTTPConnection = MyHTTPConnection
requests.packages.urllib3.connectionpool.HTTPConnectionPool.ConnectionCls = MyHTTPConnection
# HTTPS
requests.packages.urllib3.connectionpool.HTTPSConnection = MyHTTPSConnection
requests.packages.urllib3.connectionpool.VerifiedHTTPSConnection = MyHTTPSConnection
requests.packages.urllib3.connectionpool.HTTPSConnectionPool.ConnectionCls = MyHTTPSConnection
requests.packages.urllib3.disable_warnings()
BASE_URL = 'https://{0}.cspace.berkeley.edu/cspace-services/'
SERVICE = 'servicegroups/common/items?as=(collectionspace_core:updatedAt >= TIMESTAMP "{0}")'
PAGE_OPT = '&pgSz={0}&pgNum={1}'
START_DATE = str(datetime.date.today() - datetime.timedelta(days=1)) + "T00:00:00Z"
if __name__ == "__main__":
args = sys.argv
userinfo = args[args.index("-u") + 1].split(":")
if (len(userinfo) != 2):
print("Please input password in this format: -u username:password")
sys.exit(-1)
user, pwd = userinfo[0], userinfo[1]
# Which mode are we invoking?
print(len(args))
print(args[1] == 'delete')
if args[1] == 'delete' and len(args) == 6:
filepath = args[args.index("-f") + 1]
deleteRecords(user, pwd, filepath)
elif args[1] == 'fetch' and len(args) == 8:
env = args[args.index("-e") + 1]
profile = args[args.index("-p") + 1]
fetchUris(user, pwd, profile + '-' + env)
else:
print('Usage: fetch -p \'profile\' -e \'qa/dev\' -u \'username:password\' \
\n OR delete -f filepath -u \'username:password\'')
sys.exit(-1)
| 34.940678 | 125 | 0.689061 | import requests
import regex as re
import sys
import socket
import ssl
import datetime
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
from requests.packages.urllib3.connection import VerifiedHTTPSConnection
class MyHTTPConnection(HTTPConnection):
def connect(self):
self.sock = socket.socket(socket.AF_INET)
self.sock.connect((self.host, self.port))
if self._tunnel_host:
self._tunnel()
requests.packages.urllib3.connectionpool.HTTPConnection = MyHTTPConnection
requests.packages.urllib3.connectionpool.HTTPConnectionPool.ConnectionCls = MyHTTPConnection
# HTTPS
class MyHTTPSConnection(VerifiedHTTPSConnection):
def connect(self):
self.sock = socket.socket(socket.AF_INET)
self.sock.connect((self.host, self.port))
if self._tunnel_host:
self._tunnel()
self.sock = ssl.wrap_socket(self.sock, self.key_file, self.cert_file)
requests.packages.urllib3.connectionpool.HTTPSConnection = MyHTTPSConnection
requests.packages.urllib3.connectionpool.VerifiedHTTPSConnection = MyHTTPSConnection
requests.packages.urllib3.connectionpool.HTTPSConnectionPool.ConnectionCls = MyHTTPSConnection
requests.packages.urllib3.disable_warnings()
BASE_URL = 'https://{0}.cspace.berkeley.edu/cspace-services/'
SERVICE = 'servicegroups/common/items?as=(collectionspace_core:updatedAt >= TIMESTAMP "{0}")'
PAGE_OPT = '&pgSz={0}&pgNum={1}'
START_DATE = str(datetime.date.today() - datetime.timedelta(days=1)) + "T00:00:00Z"
def fetchUris(user, pwd, profile):
# Get the amount of total items, so we can fetch the right amount of records
initial_get = BASE_URL.format(profile) + SERVICE.format(START_DATE) + PAGE_OPT.format(1, 0)
r = requests.get(initial_get, auth=(user, pwd))
CSIDS_LIST_FILE = open("./tmp/uri_list_{0}_{1}.txt".format(profile, START_DATE), "w+")
if (r.status_code < 200 or r.status_code > 300):
print ('The request {0} could not be fulfilled. Please try again. Error code {1}'.format(initial_get, r.status_code))
sys.exit(-1)
# find the number of total items, divide them into chunks of 1000 items per page
content = str(r.content)
total_items = content[content.index("totalItems>") + 11 : content.index("</totalItems")]
num_pages = int(total_items) // 1000
count = 0
for page in range(num_pages + 1):
request = BASE_URL.format(profile) + SERVICE.format(START_DATE) + PAGE_OPT.format(1000, page)
r = requests.get(request, auth=(user, pwd))
if (r.status_code < 200 or r.status_code > 300):
print ('The request FETCH {0} could not be fulfilled. Please try again. Error code {1}'.format(request, r.status_code))
sys.exit(-1)
content = str(r.content)
uris = re.findall('<uri>\S+?</uri>', content)
for uri in uris:
uri = re.findall('>/(\S+?)<', uri)[0]
CSIDS_LIST_FILE.write(BASE_URL.format(profile) + uri + '\n')
CSIDS_LIST_FILE.close()
def deleteRecords(user, pwd, filepath):
with open(filepath) as f:
for uri in f:
r = requests.delete(uri.rstrip('\n'), auth=(user, pwd))
if (r.status_code < 200 or r.status_code > 300):
print ('The request DELETE {0} could not be fulfilled. Please try again. Error code {1}'.format(uri, r.status_code))
sys.exit(-1)
if __name__ == "__main__":
args = sys.argv
userinfo = args[args.index("-u") + 1].split(":")
if (len(userinfo) != 2):
print("Please input password in this format: -u username:password")
sys.exit(-1)
user, pwd = userinfo[0], userinfo[1]
# Which mode are we invoking?
print(len(args))
print(args[1] == 'delete')
if args[1] == 'delete' and len(args) == 6:
filepath = args[args.index("-f") + 1]
deleteRecords(user, pwd, filepath)
elif args[1] == 'fetch' and len(args) == 8:
env = args[args.index("-e") + 1]
profile = args[args.index("-p") + 1]
fetchUris(user, pwd, profile + '-' + env)
else:
print('Usage: fetch -p \'profile\' -e \'qa/dev\' -u \'username:password\' \
\n OR delete -f filepath -u \'username:password\'')
sys.exit(-1)
| 2,102 | 46 | 143 |
03df7be861e0be415f21ea706784bc2090bdf90e | 2,170 | py | Python | lib/optim/build.py | pprp/pytorch-cifar-model-zoo | e7e68d8162a77aee2161bcd3cd70c20ddd7ef7ab | [
"MIT"
] | 23 | 2021-12-16T02:08:51.000Z | 2022-03-31T00:21:22.000Z | lib/optim/build.py | pprp/pytorch-cifar-tricks | e7e68d8162a77aee2161bcd3cd70c20ddd7ef7ab | [
"MIT"
] | 3 | 2021-12-26T13:28:01.000Z | 2022-03-21T08:31:15.000Z | lib/optim/build.py | pprp/pytorch-cifar-tricks | e7e68d8162a77aee2161bcd3cd70c20ddd7ef7ab | [
"MIT"
] | 5 | 2022-01-03T14:53:59.000Z | 2022-02-10T02:24:51.000Z | import torch.optim as optim
from .adamw import AdamW
from .adabound import AdaBound, AdaBoundW
from .asam import SAM, ASAM
| 29.726027 | 77 | 0.531797 | import torch.optim as optim
from .adamw import AdamW
from .adabound import AdaBound, AdaBoundW
from .asam import SAM, ASAM
def build_optimizer(model, args):
if args.optims == "sgd":
optimizer = optim.SGD(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
)
elif args.optims == "adam":
optimizer = optim.Adam(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
betas=(0.9, 0.999),
)
elif args.optims == "adamw":
optimizer = AdamW(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
betas=(0.9, 0.999),
)
elif args.optims == "nesterov":
optimizer = optim.SGD(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov=True,
)
elif args.optims == "adabound":
optimizer = AdaBound(
filter(lambda p: p.requires_grad, model.parameters()), lr=args.lr
)
elif args.optims == "adaboundw":
optimizer = AdaBoundW(
filter(lambda p: p.requires_grad, model.parameters()), lr=args.lr
)
elif args.optims == "sam":
opt = optim.SGD(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
)
optimizer = SAM(
optimizer=opt,
model=model,
rho=0.5,
eta=0,
)
elif args.optims == "asam":
opt = optim.SGD(
filter(lambda p: p.requires_grad, model.parameters()),
lr=args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
)
optimizer = ASAM(
optimizer=opt,
model=model,
rho=0.5,
eta=0,
)
else:
raise "Not Implemented."
return optimizer
| 2,023 | 0 | 23 |
74b4a3f8850ece871caac28796759390364809f4 | 139 | py | Python | SAM2017/user_profile/urls.py | jpavelw/sam-2017 | 2f1eba1d8fc91bfc0c53483f46cc6d60fa6d00fd | [
"MIT"
] | 1 | 2016-08-31T16:49:17.000Z | 2016-08-31T16:49:17.000Z | SAM2017/user_profile/urls.py | jpavelw/sam-2017 | 2f1eba1d8fc91bfc0c53483f46cc6d60fa6d00fd | [
"MIT"
] | null | null | null | SAM2017/user_profile/urls.py | jpavelw/sam-2017 | 2f1eba1d8fc91bfc0c53483f46cc6d60fa6d00fd | [
"MIT"
] | null | null | null | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', 'user_profile.views.profile', name="user_profile"),
]
| 19.857143 | 66 | 0.697842 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', 'user_profile.views.profile', name="user_profile"),
]
| 0 | 0 | 0 |
99abbff041369ff47030a8e018bce680d7f202ed | 8,182 | py | Python | munimap/print_requests.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 2 | 2022-02-07T13:20:45.000Z | 2022-02-14T21:40:06.000Z | munimap/print_requests.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 4 | 2021-06-17T07:53:53.000Z | 2021-12-17T10:55:48.000Z | munimap/print_requests.py | MrSnyder/bielefeldGEOCLIENT | 17c78b43fc2055d23a1bc4b5091da164756bf767 | [
"Apache-2.0"
] | 2 | 2021-06-01T09:41:55.000Z | 2022-02-14T17:33:33.000Z | from __future__ import division
import re
from werkzeug.exceptions import BadRequest
from flask import current_app
from munimap.grid import Grid
import logging
log = logging.getLogger(__name__)
MISSING = object()
PRINT_NAME_REGEX = re.compile(r'^[0-9a-zA-Z_\.\-]+$')
| 32.859438 | 111 | 0.605353 | from __future__ import division
import re
from werkzeug.exceptions import BadRequest
from flask import current_app
from munimap.grid import Grid
import logging
log = logging.getLogger(__name__)
MISSING = object()
def parse_value(params, key, func, default=MISSING):
if default is MISSING:
# required argument
if key not in params:
raise BadRequest('missing param %s' % key)
if key not in params:
return default
try:
return func(params[key])
except (ValueError, TypeError):
raise BadRequest('invalid param value %s=%s' % (key, params[key]))
def parse_bbox(params):
def _parse_bbox(values):
bbox = [float(x) for x in values.split(',')]
if len(bbox) != 4:
raise ValueError
return bbox
return parse_value(params, 'bbox', _parse_bbox)
def parse_cells(params):
x = parse_value(params, 'cellsx', int, default=0)
y = parse_value(params, 'cellsy', int, default=0)
return x, y
def parse_size(params):
width = parse_value(params, 'width', int, default=None)
height = parse_value(params, 'height', int, default=None)
if width is None and height is None:
return None, None
if width is None or height is None:
raise BadRequest('requires width and height param')
return width, height
def parse_index_layers(params):
return parse_value(params, 'index_layers', lambda x: x.split(','), default=[])
def parse_layers(params):
return parse_value(params, 'layers', lambda x: x.split(','), default=[])
def parse_srs(params):
def _parse_srs(val):
if isinstance(val, int):
return val
return int(val.split(':')[-1])
return parse_value(params, 'srs', _parse_srs, default=4326)
def check_value(params, key, check, default=MISSING):
if default is MISSING:
# required argument
if key not in params:
raise BadRequest('missing param %s' % key)
if key not in params:
return default
if not check(params[key]):
raise BadRequest('invalid param value %s=%s' % (key, params[key]))
return params[key]
def check_bbox(params):
def _check_bbox(bbox):
if not isinstance(bbox, list) or len(bbox) != 4:
return False
if not all(isinstance(x, (float, int)) for x in bbox):
return False
return True
return check_value(params, 'bbox', _check_bbox)
def check_int(params, key, default=MISSING):
return check_value(params, key, lambda x: isinstance(x, int), default=default)
def check_list(params, key, default=MISSING):
return check_value(params, key, lambda x: isinstance(x, list), default=default)
def check_dict(params, key, default=MISSING):
return check_value(params, key, lambda x: isinstance(x, dict), default=default)
def format_list(l):
if not l:
return ''
return ','.join(map(str, l))
def calc_res(size, bbox, dpi):
w = abs(bbox[0] - bbox[2])
h = abs(bbox[1] - bbox[3])
return max(
w / (size[0] / (25.4 / (0.28 * dpi))),
h / (size[0] / (25.4 / (0.28 * dpi)))
)
class MapRequest(object):
def __init__(self, **kw):
self.bbox = kw.get('bbox')
self.srs = kw.get('srs')
self.layers = kw.get('layers')
self.index_layers = kw.get('index_layers')
self.dpi = kw.get('dpi')
self.cellsx = kw.get('cellsx')
self.cellsy = kw.get('cellsy')
self.limit = kw.get('limit')
self.width = kw.get('height')
self.height = kw.get('width')
# save cells from grid if cells are 0,0
if self.cellsx == 0 or self.cellsy == 0:
grid = self.grid
if grid:
self.cellsy, self.cellsx = grid.cells
@property
def size(self):
if self.width is None or self.height is None:
return None
return self.width, self.height
@property
def cells(self):
if self.cellsx is None or self.cellsy is None:
return None
return self.cellsx, self.cellsy
@property
def grid(self):
if not self.size:
return None
return Grid(
self.bbox, self.size, cells=self.cells, map_res=self.dpi,
labels=current_app.config.get('GRID_LABELS'),
invert_top_labels=current_app.config.get('INVERT_TOP_GRID_LABELS'),
invert_left_labels=current_app.config.get('INVERT_LEFT_GRID_LABELS'),
# TODO def_res=self.def_res,
)
def as_strings(self):
d = {}
for k, v in self.__dict__.iteritems():
if isinstance(v, (list, tuple)):
v = format_list(v)
else:
v = str(v)
d[k] = v
return d
@classmethod
def from_req(cls, req):
param = dict((k.lower(), v) for k, v in req.args.iteritems())
param['width'], param['height'] = parse_size(param)
param['bbox'] = parse_bbox(param)
param['srs'] = parse_srs(param)
param['layers'] = parse_layers(param)
param['index_layers'] = parse_index_layers(param)
param['dpi'] = int(param.get('dpi', 72))
param['limit'] = int(param.get('limit', 1000))
param['cellsx'], param['cellsy'] = parse_cells(param)
return cls(**param)
PRINT_NAME_REGEX = re.compile(r'^[0-9a-zA-Z_\.\-]+$')
class PrintRequest(MapRequest):
def __init__(self, **kw):
MapRequest.__init__(self, **kw)
self.output_format = kw.get('output_format')
self.mimetype = kw.get('mimetype')
self.page_layout = kw.get('page_layout')
self.page_size = kw.get('page_size')
self.scale = kw.get('scale')
self.res = calc_res(self.page_size, self.bbox, self.dpi)
self.calc_scale = self.res / 0.00028
self.name = kw.get('name')
self.feature_collection = kw.get('feature_collection')
self.measure_feature_collection = kw.get('measure_feature_collection')
@classmethod
def from_json(cls, param):
param = uncamel_case_dict(param)
param['bbox'] = check_bbox(param)
param['srs'] = check_int(param, 'srs')
param['layers'] = check_list(param, 'layers')
param['index_layers'] = check_list(param, 'index_layers', [])
param['dpi'] = check_int(param, 'dpi', 300)
param['limit'] = check_int(param, 'limit', 1000)
param['cellsx'], param['cellsy'] = check_list(param, 'cells', [0, 0])
param['page_size'] = check_list(param, 'page_size', None)
param['output_format'] = check_value(param, 'output_format', lambda x: isinstance(x, basestring))
param['mimetype'] = check_value(param, 'mimetype', lambda x: isinstance(x, basestring))
param['page_layout'] = check_value(param, 'page_layout', lambda x: isinstance(x, basestring), 'custom')
param['scale'] = check_value(param, 'scale', lambda x: isinstance(x, (float, int)))
param['name'] = check_value(
param,
'name',
lambda x: isinstance(x, basestring) and PRINT_NAME_REGEX.match(x)
)
param['cellsx'] = param['cellsx'] or 0 # set to 0 in case of None
param['cellsy'] = param['cellsy'] or 0 # set to 0 in case of None
param['feature_collection'] = check_dict(param, 'feature_collection', {})
param['measure_feature_collection'] = check_dict(param, 'measure_feature_collection', {})
return cls(**param)
@property
def grid(self):
if not hasattr(self, 'page_size'):
return None
if len(self.index_layers) == 0:
return None
if not self.size and self.page_size:
MM_PER_INCH = 25.4
self.width = int(self.page_size[0] / MM_PER_INCH * self.dpi)
self.height = int(self.page_size[1] / MM_PER_INCH * self.dpi)
return MapRequest.grid.fget(self)
def uncamel_case_dict(d):
result = {}
for k, v in d.iteritems():
result[uncamel_case(k)] = v
return result
def uncamel_case(key):
return re.sub('([a-z])([A-Z])', lambda m: m.group(1) + '_' + m.group(2).lower(), key)
| 7,147 | 346 | 414 |
e0aeb69df014c9a6bb0944ae2d7d1e9033e4a4f8 | 5,632 | py | Python | Modules/Loadable/Plots/Testing/Python/PlotsSelfTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | Modules/Loadable/Plots/Testing/Python/PlotsSelfTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | Modules/Loadable/Plots/Testing/Python/PlotsSelfTest.py | forfullstack/slicersources-src | 91bcecf037a27f3fad4c0ab57e8286fc258bb0f5 | [
"Apache-2.0"
] | null | null | null | import os
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
#
# PlotsSelfTest
#
#
# PlotsSelfTestWidget
#
#
# PlotsSelfTestLogic
#
class PlotsSelfTestLogic(ScriptedLoadableModuleLogic):
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget
"""
class PlotsSelfTestTest(ScriptedLoadableModuleTest):
"""
This is the test case for your scripted module.
"""
def setUp(self):
""" Do whatever is needed to reset the state - typically a scene clear will be enough.
"""
slicer.mrmlScene.Clear(0)
def runTest(self):
"""Run as few or as many tests as needed here.
"""
self.setUp()
self.test_PlotsSelfTest_FullTest1()
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
| 32.935673 | 115 | 0.68892 | import os
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
#
# PlotsSelfTest
#
class PlotsSelfTest(ScriptedLoadableModule):
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "PlotsSelfTest"
self.parent.categories = ["Testing.TestCases"]
self.parent.dependencies = ["Plots"]
self.parent.contributors = ["Andras Lasso (PerkLab, Queen's)"]
self.parent.helpText = """This is a self test for plot nodes and widgets."""
parent.acknowledgementText = """This file was originally developed by Andras Lasso, PerkLab, Queen's University
and was supported through Canada CANARIE's Research Software Program."""
#
# PlotsSelfTestWidget
#
class PlotsSelfTestWidget(ScriptedLoadableModuleWidget):
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
#
# PlotsSelfTestLogic
#
class PlotsSelfTestLogic(ScriptedLoadableModuleLogic):
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget
"""
def __init__(self):
pass
class PlotsSelfTestTest(ScriptedLoadableModuleTest):
"""
This is the test case for your scripted module.
"""
def setUp(self):
""" Do whatever is needed to reset the state - typically a scene clear will be enough.
"""
slicer.mrmlScene.Clear(0)
def runTest(self):
"""Run as few or as many tests as needed here.
"""
self.setUp()
self.test_PlotsSelfTest_FullTest1()
# ------------------------------------------------------------------------------
def test_PlotsSelfTest_FullTest1(self):
# Check for Plots module
self.assertTrue( slicer.modules.plots )
self.section_SetupPathsAndNames()
self.section_CreateTable()
self.section_CreatePlots()
self.section_TestPlotView()
self.delayDisplay("Test passed")
# ------------------------------------------------------------------------------
def section_SetupPathsAndNames(self):
# Set constants
self.tableName = 'SampleTable'
self.xColumnName = 'x'
self.y1ColumnName = 'cos'
self.y2ColumnName = 'sin'
self.series1Name = "Cosine"
self.series2Name = "Sine"
self.chartName = "My Chart"
# ------------------------------------------------------------------------------
def section_CreateTable(self):
self.delayDisplay("Create table")
tableNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLTableNode", self.tableName)
self.assertIsNotNone(tableNode)
table = tableNode.GetTable()
self.assertIsNotNone(table)
# Create X, Y1, and Y2 series
arrX = vtk.vtkFloatArray()
arrX.SetName(self.xColumnName)
table.AddColumn(arrX)
arrY1 = vtk.vtkFloatArray()
arrY1.SetName(self.y1ColumnName)
table.AddColumn(arrY1)
arrY2 = vtk.vtkFloatArray()
arrY2.SetName(self.y2ColumnName)
table.AddColumn(arrY2)
# Fill in the table with some example values
import math
numPoints = 69
inc = 7.5 / (numPoints - 1)
table.SetNumberOfRows(numPoints)
for i in range(numPoints):
table.SetValue(i, 0, i * inc )
table.SetValue(i, 1, math.cos(i * inc))
table.SetValue(i, 2, math.sin(i * inc))
# ------------------------------------------------------------------------------
def section_CreatePlots(self):
self.delayDisplay("Create plots")
tableNode = slicer.util.getNode(self.tableName)
# Create plot data series nodes
plotSeriesNode1 = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", self.series1Name)
plotSeriesNode1.SetAndObserveTableNodeID(tableNode.GetID())
plotSeriesNode1.SetXColumnName(self.xColumnName)
plotSeriesNode1.SetYColumnName(self.y1ColumnName)
plotSeriesNode1.SetLineStyle(slicer.vtkMRMLPlotSeriesNode.LineStyleDash)
plotSeriesNode1.SetMarkerStyle(slicer.vtkMRMLPlotSeriesNode.MarkerStyleSquare)
plotSeriesNode2 = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotSeriesNode", self.series2Name)
plotSeriesNode2.SetAndObserveTableNodeID(tableNode.GetID())
plotSeriesNode2.SetXColumnName(self.xColumnName)
plotSeriesNode2.SetYColumnName(self.y2ColumnName)
plotSeriesNode2.SetUniqueColor()
# Create plot chart node
plotChartNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotChartNode", self.chartName)
plotChartNode.AddAndObservePlotSeriesNodeID(plotSeriesNode1.GetID())
plotChartNode.AddAndObservePlotSeriesNodeID(plotSeriesNode2.GetID())
plotChartNode.SetTitle('A simple plot with 2 curves')
plotChartNode.SetXAxisTitle('A simple plot with 2 curves')
plotChartNode.SetYAxisTitle('This is the Y axis')
# ------------------------------------------------------------------------------
def section_TestPlotView(self):
self.delayDisplay("Test plot view")
plotChartNode = slicer.util.getNode(self.chartName)
# Create plot view node
plotViewNode = slicer.mrmlScene.AddNewNodeByClass("vtkMRMLPlotViewNode")
plotViewNode.SetPlotChartNodeID(plotChartNode.GetID());
# Create plotWidget
plotWidget = slicer.qMRMLPlotWidget()
plotWidget.setMRMLScene(slicer.mrmlScene)
plotWidget.setMRMLPlotViewNode(plotViewNode)
plotWidget.show()
# Create plotView
plotView = slicer.qMRMLPlotView()
plotView.setMRMLScene(slicer.mrmlScene)
plotView.setMRMLPlotViewNode(plotViewNode)
plotView.show()
# Save variables into slicer namespace for debugging
slicer.plotWidget = plotWidget
slicer.plotView = plotView
| 4,036 | 58 | 238 |