content
stringlengths 5
1.05M
|
|---|
#!/usr/bin/env python
"""deque.py: Deque implementation"""
__author__ = 'rohitsinha'
class Deque:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def addFront(self, item):
self.items.append(item)
def addRear(self, item):
self.items.insert(0, item)
def removeFront(self):
return self.items.pop()
def removeRear(self):
return self.items.pop(0)
def size(self):
return len(self.items)
def __str__(self):
return str(self.items).strip('[]')
if __name__ == '__main__':
d = Deque()
print(d.isEmpty())
d.addFront(5)
d.addRear('Hello')
print(d.removeFront())
d.addFront(True)
print(d.removeRear())
print(d.size())
print(d.removeFront())
d.addRear('Bye')
d.addFront('Bye')
print(d)
|
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.layers import Dropout
from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix, classification_report
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
def one_hot_encode(labels):
n_labels = len(labels)
n_unique_labels = len(np.unique(labels))
one_hot_encode = np.zeros((n_labels, n_unique_labels + 2))
one_hot_encode[np.arange(n_labels), labels] = 1
one_hot_encode = np.delete(one_hot_encode, 0, axis=1)
return one_hot_encode
def create_model(n_hidden_units_1, n_hidden_units_2, n_hidden_units_3, n_hidden_units_4, n_classes, n_dim,
activation_function='relu', init_type='normal', optimiser='adamax', dropout_rate=0.2, ):
model = Sequential()
# layer 1
model.add(Dense(n_hidden_units_1, input_dim=n_dim, init=init_type, activation=activation_function))
# layer 2
model.add(Dense(n_hidden_units_2, init=init_type, activation=activation_function))
model.add(Dropout(dropout_rate))
# layer 3
model.add(Dense(n_hidden_units_3, init=init_type, activation=activation_function))
model.add(Dropout(dropout_rate))
# layer4
model.add(Dense(n_hidden_units_4, init=init_type, activation=activation_function))
model.add(Dropout(dropout_rate))
# output layer
model.add(Dense(n_classes, init=init_type, activation='softmax'))
# model compilation
model.compile(loss='categorical_crossentropy', optimizer=optimiser, metrics=['categorical_accuracy'])
return model
def ann_feature(input_data):
labels = input_data['value']
np.save('X', input_data.drop('value', axis=1))
# one hot encoding labels
labels_one = one_hot_encode(labels)
np.save('y', labels_one)
X = np.load('X.npy', allow_pickle=True)
y = np.load('y.npy', allow_pickle=True)
train_x, test_x, train_y, test_y = train_test_split(X, y, test_size=0.33, random_state=42)
n_dim = train_x.shape[1]
n_classes = train_y.shape[1]
n_hidden_units_1 = n_dim
n_hidden_units_2 = 400 # approx n_dim * 2
n_hidden_units_3 = 200 # half of layer 2
n_hidden_units_4 = 100
# create the model
model = create_model(n_hidden_units_1, n_hidden_units_2, n_hidden_units_3, n_hidden_units_4, n_classes, n_dim)
# train the model
history = model.fit(train_x, train_y, epochs=400, batch_size=4)
# plot metrics
plt.plot(history.history['categorical_accuracy'])
plt.title('Accuracy over number of Epochs')
plt.xlabel('No. of Epochs')
plt.ylabel('Accuracy')
plt.savefig('Metrics\\featureepoch.png')
# predicting from the model
predict = model.predict(test_x, batch_size=4)
emotions = ['neutral', 'calm', 'happy', 'sad', 'angry', 'fearful', 'disgust', 'surprised']
# predicted emotions from the test set
y_pred = np.argmax(predict, 1)
predicted_emo = []
for i in range(0, test_y.shape[0]):
emo = emotions[y_pred[i]]
predicted_emo.append(emo)
actual_emo = []
y_true = np.argmax(test_y, 1)
for i in range(0, test_y.shape[0]):
emo = emotions[y_true[i]]
actual_emo.append(emo)
csv_classification = pd.DataFrame(classification_report(actual_emo, predicted_emo, output_dict=True)).transpose()
csv_classification.to_csv('Metrics\\feature_only_dnn.csv', sep=',')
# generate the confusion matrix
cm = confusion_matrix(actual_emo, predicted_emo)
print(cm)
index = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
columns = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
cm_df = pd.DataFrame(cm, index, columns)
plt.figure(figsize=(15, 10))
heatmap = sns.heatmap(cm_df, annot=True)
fig = heatmap.get_figure()
fig.savefig("Metrics\\featuresHeat.png")
def ann_feature_gender(input_data):
labels = input_data['value']
np.save('X', input_data.drop('value', axis=1))
# one hot encoding labels
labels_one = one_hot_encode(labels)
np.save('y', labels_one)
X = np.load('X.npy', allow_pickle=True)
y = np.load('y.npy', allow_pickle=True)
train_x, test_x, train_y, test_y = train_test_split(X, y, test_size=0.33, random_state=42)
n_dim = train_x.shape[1]
n_classes = train_y.shape[1]
n_hidden_units_1 = n_dim
n_hidden_units_2 = 400 # approx n_dim * 2
n_hidden_units_3 = 200 # half of layer 2
n_hidden_units_4 = 100
# create the model
model = create_model(n_hidden_units_1, n_hidden_units_2, n_hidden_units_3, n_hidden_units_4, n_classes, n_dim)
# train the model
history = model.fit(train_x, train_y, epochs=400, batch_size=4)
# plot metrics
plt.plot(history.history['categorical_accuracy'])
plt.title('Accuracy over number of Epochs')
plt.xlabel('No. of Epochs')
plt.ylabel('Accuracy')
plt.savefig('Metrics\\featureGenderepoch.png')
# predicting from the model
predict = model.predict(test_x, batch_size=4)
emotions = ['neutral', 'calm', 'happy', 'sad', 'angry', 'fearful', 'disgust', 'surprised']
# predicted emotions from the test set
y_pred = np.argmax(predict, 1)
predicted_emo = []
for i in range(0, test_y.shape[0]):
emo = emotions[y_pred[i]]
predicted_emo.append(emo)
actual_emo = []
y_true = np.argmax(test_y, 1)
for i in range(0, test_y.shape[0]):
emo = emotions[y_true[i]]
actual_emo.append(emo)
csv_classification = pd.DataFrame(classification_report(actual_emo, predicted_emo, output_dict=True)).transpose()
csv_classification.to_csv('Metrics\\feature_gender_dnn.csv', sep=',')
# generate the confusion matrix
cm = confusion_matrix(actual_emo, predicted_emo)
print(cm)
index = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
columns = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
cm_df = pd.DataFrame(cm, index, columns)
plt.figure(figsize=(15, 10))
heatmap = sns.heatmap(cm_df, annot=True)
fig = heatmap.get_figure()
fig.savefig("Metrics\\featuresGenderHeat.png")
def ann_feature_gender_intensity(input_data):
labels = input_data['value']
np.save('X', input_data.drop('value', axis=1))
# one hot encoding labels
labels_one = one_hot_encode(labels)
np.save('y', labels_one)
X = np.load('X.npy', allow_pickle=True)
y = np.load('y.npy', allow_pickle=True)
train_x, test_x, train_y, test_y = train_test_split(X, y, test_size=0.33, random_state=42)
n_dim = train_x.shape[1]
n_classes = train_y.shape[1]
n_hidden_units_1 = n_dim
n_hidden_units_2 = 400 # approx n_dim * 2
n_hidden_units_3 = 200 # half of layer 2
n_hidden_units_4 = 100
# create the model
model = create_model(n_hidden_units_1, n_hidden_units_2, n_hidden_units_3, n_hidden_units_4, n_classes, n_dim)
# train the model
history = model.fit(train_x, train_y, epochs=400, batch_size=4)
# plot metrics
plt.plot(history.history['categorical_accuracy'])
plt.title('Accuracy over number of Epochs')
plt.xlabel('No. of Epochs')
plt.ylabel('Accuracy')
plt.savefig('Metrics\\featureGenderIntepoch.png')
# predicting from the model
predict = model.predict(test_x, batch_size=4)
emotions = ['neutral', 'calm', 'happy', 'sad', 'angry', 'fearful', 'disgust', 'surprised']
# predicted emotions from the test set
y_pred = np.argmax(predict, 1)
predicted_emo = []
for i in range(0, test_y.shape[0]):
emo = emotions[y_pred[i]]
predicted_emo.append(emo)
actual_emo = []
y_true = np.argmax(test_y, 1)
for i in range(0, test_y.shape[0]):
emo = emotions[y_true[i]]
actual_emo.append(emo)
csv_classification = pd.DataFrame(classification_report(actual_emo, predicted_emo, output_dict=True)).transpose()
csv_classification.to_csv('Metrics\\feature_gender_intensity_dnn.csv', sep=',')
# generate the confusion matrix
cm = confusion_matrix(actual_emo, predicted_emo)
print(cm)
index = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
columns = ['angry', 'calm', 'disgust', 'fearful', 'happy', 'neutral', 'sad', 'surprised']
cm_df = pd.DataFrame(cm, index, columns)
plt.figure(figsize=(15, 10))
heatmap = sns.heatmap(cm_df, annot=True)
fig = heatmap.get_figure()
fig.savefig("Metrics\\featuresGenderIntHeat.png")
if __name__ == '__main__':
full_data = pd.read_csv('maxFeatures.csv')
no_high = full_data[full_data.intensity != 1]
feature_only_data = no_high.drop(['filename', 'emotion_name', 'gender', 'intensity'], axis=1)
feature_gender_data = no_high.drop(['filename', 'emotion_name', 'intensity'], axis=1)
no_low = full_data[full_data.intensity != 0]
feature_gender_intensity_data = no_low.drop(['filename', 'emotion_name'], axis=1)
ann_feature(feature_only_data)
ann_feature_gender(feature_gender_data)
ann_feature_gender_intensity(feature_gender_intensity_data)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class TradeComplainQueryResponse(object):
def __init__(self):
self._complain_event_id = None
self._complain_reason = None
self._content = None
self._gmt_create = None
self._gmt_finished = None
self._gmt_modified = None
self._images = None
self._leaf_category_name = None
self._merchant_order_no = None
self._phone_no = None
self._status = None
self._target_id = None
self._target_type = None
self._trade_no = None
@property
def complain_event_id(self):
return self._complain_event_id
@complain_event_id.setter
def complain_event_id(self, value):
self._complain_event_id = value
@property
def complain_reason(self):
return self._complain_reason
@complain_reason.setter
def complain_reason(self, value):
self._complain_reason = value
@property
def content(self):
return self._content
@content.setter
def content(self, value):
self._content = value
@property
def gmt_create(self):
return self._gmt_create
@gmt_create.setter
def gmt_create(self, value):
self._gmt_create = value
@property
def gmt_finished(self):
return self._gmt_finished
@gmt_finished.setter
def gmt_finished(self, value):
self._gmt_finished = value
@property
def gmt_modified(self):
return self._gmt_modified
@gmt_modified.setter
def gmt_modified(self, value):
self._gmt_modified = value
@property
def images(self):
return self._images
@images.setter
def images(self, value):
if isinstance(value, list):
self._images = list()
for i in value:
self._images.append(i)
@property
def leaf_category_name(self):
return self._leaf_category_name
@leaf_category_name.setter
def leaf_category_name(self, value):
self._leaf_category_name = value
@property
def merchant_order_no(self):
return self._merchant_order_no
@merchant_order_no.setter
def merchant_order_no(self, value):
self._merchant_order_no = value
@property
def phone_no(self):
return self._phone_no
@phone_no.setter
def phone_no(self, value):
self._phone_no = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def target_id(self):
return self._target_id
@target_id.setter
def target_id(self, value):
self._target_id = value
@property
def target_type(self):
return self._target_type
@target_type.setter
def target_type(self, value):
self._target_type = value
@property
def trade_no(self):
return self._trade_no
@trade_no.setter
def trade_no(self, value):
self._trade_no = value
def to_alipay_dict(self):
params = dict()
if self.complain_event_id:
if hasattr(self.complain_event_id, 'to_alipay_dict'):
params['complain_event_id'] = self.complain_event_id.to_alipay_dict()
else:
params['complain_event_id'] = self.complain_event_id
if self.complain_reason:
if hasattr(self.complain_reason, 'to_alipay_dict'):
params['complain_reason'] = self.complain_reason.to_alipay_dict()
else:
params['complain_reason'] = self.complain_reason
if self.content:
if hasattr(self.content, 'to_alipay_dict'):
params['content'] = self.content.to_alipay_dict()
else:
params['content'] = self.content
if self.gmt_create:
if hasattr(self.gmt_create, 'to_alipay_dict'):
params['gmt_create'] = self.gmt_create.to_alipay_dict()
else:
params['gmt_create'] = self.gmt_create
if self.gmt_finished:
if hasattr(self.gmt_finished, 'to_alipay_dict'):
params['gmt_finished'] = self.gmt_finished.to_alipay_dict()
else:
params['gmt_finished'] = self.gmt_finished
if self.gmt_modified:
if hasattr(self.gmt_modified, 'to_alipay_dict'):
params['gmt_modified'] = self.gmt_modified.to_alipay_dict()
else:
params['gmt_modified'] = self.gmt_modified
if self.images:
if isinstance(self.images, list):
for i in range(0, len(self.images)):
element = self.images[i]
if hasattr(element, 'to_alipay_dict'):
self.images[i] = element.to_alipay_dict()
if hasattr(self.images, 'to_alipay_dict'):
params['images'] = self.images.to_alipay_dict()
else:
params['images'] = self.images
if self.leaf_category_name:
if hasattr(self.leaf_category_name, 'to_alipay_dict'):
params['leaf_category_name'] = self.leaf_category_name.to_alipay_dict()
else:
params['leaf_category_name'] = self.leaf_category_name
if self.merchant_order_no:
if hasattr(self.merchant_order_no, 'to_alipay_dict'):
params['merchant_order_no'] = self.merchant_order_no.to_alipay_dict()
else:
params['merchant_order_no'] = self.merchant_order_no
if self.phone_no:
if hasattr(self.phone_no, 'to_alipay_dict'):
params['phone_no'] = self.phone_no.to_alipay_dict()
else:
params['phone_no'] = self.phone_no
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.target_id:
if hasattr(self.target_id, 'to_alipay_dict'):
params['target_id'] = self.target_id.to_alipay_dict()
else:
params['target_id'] = self.target_id
if self.target_type:
if hasattr(self.target_type, 'to_alipay_dict'):
params['target_type'] = self.target_type.to_alipay_dict()
else:
params['target_type'] = self.target_type
if self.trade_no:
if hasattr(self.trade_no, 'to_alipay_dict'):
params['trade_no'] = self.trade_no.to_alipay_dict()
else:
params['trade_no'] = self.trade_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = TradeComplainQueryResponse()
if 'complain_event_id' in d:
o.complain_event_id = d['complain_event_id']
if 'complain_reason' in d:
o.complain_reason = d['complain_reason']
if 'content' in d:
o.content = d['content']
if 'gmt_create' in d:
o.gmt_create = d['gmt_create']
if 'gmt_finished' in d:
o.gmt_finished = d['gmt_finished']
if 'gmt_modified' in d:
o.gmt_modified = d['gmt_modified']
if 'images' in d:
o.images = d['images']
if 'leaf_category_name' in d:
o.leaf_category_name = d['leaf_category_name']
if 'merchant_order_no' in d:
o.merchant_order_no = d['merchant_order_no']
if 'phone_no' in d:
o.phone_no = d['phone_no']
if 'status' in d:
o.status = d['status']
if 'target_id' in d:
o.target_id = d['target_id']
if 'target_type' in d:
o.target_type = d['target_type']
if 'trade_no' in d:
o.trade_no = d['trade_no']
return o
|
# import modules
import sys
import pygame
# define color white (red, green, blue) (0-255)
WHITE = (255, 255, 255)
# is the game finished?
finished = False
# initialize pygame
pygame.init()
# set our display as fullscreen
game_display = pygame.display.set_mode((0, 0), pygame.FULLSCREEN)
# set the background color as white
game_display.fill(WHITE)
# update the display
pygame.display.update()
# main loop, do this while game is not finished (finished = False)
while not finished:
# cycle through events in the queue
for event in pygame.event.get():
# if a key has been pushed and released
if event.type == pygame.KEYUP:
# and if that key is ESCAPE, quit!
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
# if a different quit event is detected, quit
if event.type == pygame.QUIT:
# finished = True, so we exit loop
finished = True
# if we've exited the loop, we quit!
pygame.quit()
sys.exit()
|
#!/usr/bin/python3
# booksales.py: show how to use LAST_INSERT_ID(expr)
import mysql.connector
import cookbook
conn = cookbook.connect()
try:
#@ _UPDATE_COUNTER_
cursor = conn.cursor()
cursor.execute('''
INSERT INTO booksales (title,copies)
VALUES('The Greater Trumps',LAST_INSERT_ID(1))
ON DUPLICATE KEY UPDATE copies = LAST_INSERT_ID(copies+1)
''')
count = cursor.lastrowid
cursor.close()
conn.commit()
#@ _UPDATE_COUNTER_
print("count: %d" % count)
except mysql.connector.Error as e:
print("Oops, the statement failed")
print("Error: %s" % e)
conn.close()
|
import unittest
from python_oop.testing.lab.List.extended_list import IntegerList
class IntegerListTest(unittest.TestCase):
def test_integers_add_when_integer(self):
integer_list = IntegerList()
internal_list = integer_list.add(1)
self.assertEqual([1], internal_list)
def test_integers_add_when_not_integer_exception(self):
integer_list = IntegerList()
# internal_list = integer_list.add(1.0)
with self.assertRaises(ValueError):
# integer_list.add(1.0)
integer_list.add("asd")
def test_integers_remove_index_when_index_in_range(self):
value_to_remove = 3
integer_list = IntegerList(1, 2, value_to_remove, 4)
result = integer_list.remove_index(2)
self.assertEqual(value_to_remove, result)
self.assertListEqual([1, 2, 4], integer_list.get_data())
# def test_integers_remove_index_when_index_is_negative_not_in_range(self):
# integer_list = IntegerList(1, 2, 3, 4)
# index = -5
# with self.assertRaises(IndexError):
# integer_list.remove_index(index)
def test_integers_remove_index_when_index_is_positive_not_in_range(self):
integer_list = IntegerList(1, 2, 3, 4)
index = 5
with self.assertRaises(IndexError):
integer_list.remove_index(index)
def test_integers_init_store_only_integers(self):
integer_list = IntegerList(1, 2, 3, 4, 'as', 1.0, 5)
integer_list.get_data()
def test_integers_get_when_index_in_range(self):
value_to_get = 3
integer_list = IntegerList(1, 2, value_to_get, 4)
result = integer_list.get(2)
self.assertEqual(value_to_get, result)
self.assertListEqual([1, 2, 3, 4], integer_list.get_data())
def test_integers_get_when_index_is_negative_not_in_range(self):
integer_list = IntegerList(1, 2, 3, 4)
index = -5
with self.assertRaises(IndexError):
integer_list.get(index)
def test_integers_get_when_index_is_positive_not_in_range(self):
integer_list = IntegerList(1, 2, 3, 4)
index = 5
with self.assertRaises(IndexError):
integer_list.get(index)
def test_integers_insert_when_index_in_range(self):
value_to_insert = 3
index_to_insert = 2
integer_list = IntegerList(1, 2, 4)
integer_list.insert(index_to_insert, value_to_insert)
self.assertListEqual([1, 2, 3, 4], integer_list.get_data())
# def test_integers_insert_when_index_is_negative_not_in_range(self):
# value_to_insert = 3
# index_to_insert = -5
# type(value_to_insert)
# integer_list = IntegerList(1, 2, 4)
# with self.assertRaises(IndexError):
# integer_list.insert(index_to_insert, value_to_insert)
def test_integers_insert_when_index_is_positive_not_in_range(self):
value_to_insert = 3
index_to_insert = 5
integer_list = IntegerList(1, 2, 4)
with self.assertRaises(IndexError):
integer_list.insert(index_to_insert, value_to_insert)
def test_integers_insert_when_not_integer_exception(self):
value_to_insert = 'asd'
index_to_insert = 2
integer_list = IntegerList(1, 2, 4)
with self.assertRaises(ValueError):
integer_list.insert(index_to_insert, value_to_insert)
def test_integers_biggest(self):
biggest = 17
integer_list = IntegerList(1, 2, biggest, 4)
self.assertEqual(biggest, integer_list.get_biggest())
def test_integers_get_index(self):
el = 5
index = 4
integer_list = IntegerList(1, 2, 3, 4, 5, 6)
self.assertEqual(index, integer_list.get_index(el))
if __name__ == '__main__':
unittest.main()
|
import os
import identifiers_api
from create_identifiers.lib import utils
from create_identifiers.lib import arguments
from create_identifiers.regulondb_multigenomic import multigenomic_identifiers
def run(input_path, **kwargs):
"""
:param paths:
:param kwargs:
:return:
"""
utils.verify_paths(input_path)
jsons_data = utils.load_files(input_path)
database = kwargs.get("database", None)
if database == "regulondbmultigenomic":
multigenomic_identifiers.manage_ids(jsons_data, **kwargs)
elif database == "regulondbht":
pass
elif database == "regulondbdatamarts":
pass
else:
raise KeyError("Process of creating identifiers for the selected "
f"database({database}) has not been implemented or "
f"there's a typo, please verify it before continuing")
if __name__ == "__main__":
arguments = arguments.load_arguments()
input_data_directory = arguments.inputdir
regulondb_release_version = os.getenv("RELEASE_VERSION") if os.getenv("RELEASE_VERSION") else arguments.version
identifiers_api.connect(arguments.url)
kwargs = {
"database": arguments.database,
"regulondbReleaseVersion": regulondb_release_version,
"sourceDBVersion": arguments.sourceversion,
"sourceDBName": arguments.source,
"organism": arguments.organism
}
utils.set_log(arguments.log)
run(input_data_directory, **kwargs)
|
def test_service_properties(test_servicer_tls_config):
assert isinstance(test_servicer_tls_config.hostport, str)
assert test_servicer_tls_config.use_server_ssl is True
assert test_servicer_tls_config.ssl_server_credentials is not None
assert test_servicer_tls_config.ssl_channel_credentials is not None
|
from flask import Blueprint, jsonify, request, Response
from project import db
from project.api.models import Todo, User
todo_blueprint = Blueprint(
"todos",
__name__
)
@todo_blueprint.route("/api/todos")
def get_todos():
response = []
todos = db.session.query(Todo).all()
for todo in todos:
response.append(todo.to_dict())
return jsonify(response)
@todo_blueprint.route("/api/todos/<int:todo_id>")
def get_todo(todo_id):
todo = Todo.query.filter_by(todo_id=todo_id).first()
if not todo:
return Response(status=404)
return jsonify(todo.to_dict())
@todo_blueprint.route("/api/todos/<int:todo_id>", methods=["DELETE"])
def delete_todo(todo_id):
todo = Todo.query.filter_by(todo_id=todo_id).first()
if not todo:
return Response(status=404)
db.session.delete(todo)
db.session.commit()
return Response(status=204)
@todo_blueprint.route("/api/todos", methods=["POST", "PUT"])
def create_or_update_todo():
response = {}
request_json = request.get_json()
# Validation
keys = ["title", "content", "completed", "dueDate", "priority"]
if request.method == "PUT":
keys.append("todoId")
for key in keys:
if key not in request_json:
response["message"] = "Missing {key} in request body".format(key=key)
return jsonify(response), 400
user_id = request_json.get("userId")
if user_id:
user = User.query.filter_by(user_id=user_id).first()
if not user:
response["message"] = "User not found"
return jsonify(response), 404
# Parse the request data
todo = None
if request.method == "POST":
todo = Todo()
elif request.method == "PUT":
todo_id = int(request_json["todoId"])
todo = Todo.query.filter_by(todo_id=todo_id).first()
if not todo:
response["message"] = "Todo not found"
return jsonify(response), 404
todo.title = request_json["title"]
todo.content = request_json["content"]
todo.completed = request_json["completed"]
todo.due_date = request_json["dueDate"]
todo.priority = request_json["priority"]
if "userId" in request_json:
todo.user_id = user_id
if request.method == "POST":
db.session.add(todo)
response["message"] = "Todo created successfully"
elif request.method == "PUT":
response["message"] = "Todo updated successfully"
db.session.commit()
response["todoId"] = todo.todo_id
return jsonify(response), 201
|
import factory
import factory.fuzzy
from django.contrib.gis.geos import MultiPolygon, Point, Polygon
from factory.random import randgen
from munigeo.models import Address, Municipality, Street
from .models import ContractZone
class ContractZoneFactory(factory.django.DjangoModelFactory):
name = factory.Faker("bs")
boundary = factory.Sequence(
lambda n: MultiPolygon(
Polygon(
((24 + n, 60), (25 + n, 60), (25 + n, 61), (24 + n, 61), (24 + n, 60))
)
)
)
origin_id = factory.Sequence(lambda n: n)
class Meta:
model = ContractZone
# Because of a bug in django-munigeo v0.3.2 we cannot use Django's get_or_create() for models that
# have translated fields, so we need to use this workaround for now.
def _get_or_create_municipality(id):
try:
return Municipality.objects.get(id=id)
except Municipality.DoesNotExist:
return MunicipalityFactory(id=id)
class MunicipalityFactory(factory.django.DjangoModelFactory):
name = factory.Faker("city")
id = factory.Faker("uuid4")
class Meta:
model = Municipality
class StreetFactory(factory.django.DjangoModelFactory):
municipality = factory.LazyFunction(lambda: _get_or_create_municipality("helsinki"))
name = factory.Faker("street_name")
class Meta:
model = Street
class AddressFactory(factory.django.DjangoModelFactory):
number = factory.LazyFunction(lambda: str(randgen.randint(1, 20)))
number_end = factory.LazyAttribute(
lambda o: str(int(o.number) + randgen.randint(1, 5))
)
letter = factory.Faker("random_letter")
street = factory.SubFactory(StreetFactory)
location = factory.LazyFunction(
lambda: Point(
24.915 + randgen.uniform(0, 0.040), 60.154 + randgen.uniform(0, 0.022)
)
)
class Meta:
model = Address
|
def create_identity(user, sp_mapping):
identity = {}
for user_attr, out_attr in sp_mapping.items():
if hasattr(user, user_attr):
identity[out_attr] = getattr(user, user_attr)
return identity
|
__author__ = 'erik + jc + benni'
import numpy as np
class Coordinate(object):
"""
Coordinate is a base class for everything which has a position.
"""
def __init__(self, id, x, y, z):
"""
:param id: id of this object
:param x: x coordinate
:param y: y coordinate
:param z: z coordinate
:return:
"""
self._coordinates = np.array([x,y,z])
self._id = id
def get_id(self):
"""
:return: id of this coordinate
"""
return self._id
def get_coordinates(self):
"""
:return: numpy array holding the coordinates [x,y,z]
"""
return self._coordinates
class Vertex(Coordinate):
"""
A Vertex object is a part of a surface network. Therefore it is associated with Edges as well as Quads connected to
this Vertex.
"""
def __init__(self, id, x, y, z):
"""
:param id: id of this vertex. The user has to take care, that this id is unique!
:param x: x coordinate
:param y: y coordinate
:param z: z coordinate
:return:
"""
super(Vertex, self).__init__(id, x, y, z)
self._edges = set()
self._quads = set()
def add_edge(self, edge):
"""
adds an edge to the vertex. This means, that this edge is connected to the Vertex.
:param edge: Edge object
:return:
"""
self._edges.add(edge)
def add_quad(self, quad):
"""
adds an quad to the vertex. This means, that this quad is connected to the Vertex
:param quad: Quad object
:return:
"""
self._quads.add(quad)
def get_edges(self):
"""
:return: set of all edges connected to this Vertex
"""
return self._edges
def get_quads(self):
"""
:return: set of all quads connected to this Vertex
:rtype: PetersScheme.Edge
"""
return self._quads
def number_edges(self):
"""
:return: number of edges connected to this vertex
"""
return len(self._edges)
def number_quads(self):
"""
:return: number of quads connected to this vertex
"""
return len(self._quads)
class FineVertex(Coordinate):
"""
A FineVertex object is a vertex of the fine resolution representation of our surface reconstruction. Each of these
vertices is associated with a Quad patch of the coarse resolution. Every FineVertex also has a set of parameters
u and v, which corresponds to its parametrization in the parameter domain of the associated Quad.
"""
def __init__(self, id, x, y, z, u, v, quad):
"""
:param id: id of this vertex. The user has to take care, that this id is unique!
:param x: x coordinate
:param y: y coordinate
:param z: z coordinate
:param u: u parameter
:param v: v parameter
:param quad: associated Quad
:return:
"""
super(FineVertex, self).__init__(id, x, y, z)
self._id = id
self._quad = quad
self._params = {'u':u, 'v':v}
def get_parameters(self):
"""
:return: the dict with both parameters u,v
"""
return self._params
def get_u(self):
"""
:return: parameter u
"""
return self._params['u']
def get_v(self):
"""
:return: parameter v
"""
return self._params['v']
def get_associated_quad(self):
"""
Returns the pointer to the patch this FineVertex is associated with.
:return: returns a Quad object
"""
return self._quad
class Vertex_DooSabin(Vertex):
def __init__(self, id, x, y, z):
# self._maltab_id = _id+1
super(Vertex_DooSabin, self).__init__(id, x, y, z)
self.neighbouringVertices = []
self.neighbouringFaces = []
self.childFace = None
self.parentOrigGrid = None
self.A = []
self.B1 = []
self.B2 = []
self.C = []
def getId(self):
return self._id
def getCoordinates(self):
return self._coordinates
def addNeighbouringVertex(self, vertex):
#vertex should be of the type vertex!
self.neighbouringVertices.append(vertex)
return
def addNeighbouringFace(self, face):
#Face should be of the type face! Not a list of vertex ids
self.neighbouringFaces.append(face)
return
|
import math
import math as m
{}
x = int(input(int(input())))
print(x)
|
#-*- coding:utf-8 -*-
from flask import Flask,render_template,request
import MySQLdb,sys
import base64
reload(sys)
sys.setdefaultencoding("utf-8")
MYSQL_HOST = '127.0.0.1'
MYSQL_USER = 'root'
MYSQL_PASS = 'root'
MYSQL_DB = 'bot'
app = Flask(__name__)
@app.route('/view')
def view():
conn = MySQLdb.connect(MYSQL_HOST, MYSQL_USER,MYSQL_PASS,MYSQL_DB,charset="utf8")
cursor = conn.cursor()
cursor.execute('select * from bot')
results=cursor.fetchall()
conn.close()
return render_template("show.html",
results = results)
@app.route('/')
def main():
return 'Hello,World'
@app.route("/callback",methods=[ 'GET'])
def callback():
conn = MySQLdb.connect(MYSQL_HOST, MYSQL_USER,MYSQL_PASS,MYSQL_DB,charset="utf8")
cursor = conn.cursor()
mac = base64.b64decode(request.values['mac'])
ip = base64.b64decode(request.values['ip'])
time = base64.b64decode(request.values['time'])
print mac,ip,time
cursor.execute('insert into bot(mac,ip,time) values ("%s","%s","%s")' %(mac,ip,time))
cursor.close()
conn.close()
return 'ok'
if __name__ == '__main__':
try:
app.run(host='0.0.0.0',port=80,debug=True,threaded=True)
except:
pass
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
"""
C13751579: Asset Picker UI/UX
"""
import os
import sys
from PySide2 import QtWidgets, QtTest, QtCore
from PySide2.QtCore import Qt
import azlmbr.asset as asset
import azlmbr.bus as bus
import azlmbr.legacy.general as general
import azlmbr.paths
import azlmbr.math as math
sys.path.append(os.path.join(azlmbr.paths.devroot, 'AutomatedTesting', 'Gem', 'PythonTests'))
import editor_python_test_tools.hydra_editor_utils as hydra
import editor_python_test_tools.pyside_utils as pyside_utils
from editor_python_test_tools.editor_test_helper import EditorTestHelper
class AssetPickerUIUXTest(EditorTestHelper):
def __init__(self):
EditorTestHelper.__init__(self, log_prefix="AssetPicker_UI_UX", args=["level"])
@pyside_utils.wrap_async
async def run_test(self):
"""
Summary:
Verify the functionality of Asset Picker and UI/UX properties
Expected Behavior:
The asset picker opens and is labeled appropriately ("Pick Model Asset" in this instance).
The Asset Picker window can be resized and moved around the screen.
The file tree expands/retracts appropriately and a scroll bar is present when the menus extend
beyond the length of the window.
The assets are limited to a valid type for the field selected (mesh assets in this instance)
The asset picker is closed and the selected asset is assigned to the mesh component.
Test Steps:
1) Open a new level
2) Create entity and add Mesh component
3) Access Entity Inspector
4) Click Asset Picker (Mesh Asset)
a) Collapse all the files initially and verify if scroll bar is not visible
b) Expand/Verify Top folder of file path
c) Expand/Verify Nested folder of file path
d) Verify if the ScrollBar appears after expanding folders
e) Collapse Nested and Top Level folders and verify if collapsed
f) Verify if the correct files are appearing in the Asset Picker
g) Move the widget and verify position
h) Resize the widget
g) Assign Mesh asset
5) Verify if Mesh Asset is assigned via both OK/Enter options
Note:
- This test file must be called from the Lumberyard Editor command terminal
- Any passed and failed tests are written to the Editor.log file.
Parsing the file or running a log_monitor are required to observe the test results.
:return: None
"""
self.file_path = ["AutomatedTesting", "Assets", "Objects", "Foliage"]
self.incorrect_file_found = False
self.mesh_asset = "cedar.azmodel"
self.prefix = ""
def is_asset_assigned(component, interaction_option):
path = os.path.join("assets", "objects", "foliage", "cedar.azmodel")
expected_asset_id = asset.AssetCatalogRequestBus(bus.Broadcast, 'GetAssetIdByPath', path, math.Uuid(),
False)
result = hydra.get_component_property_value(component, "Controller|Configuration|Mesh Asset")
expected_asset_str = expected_asset_id.invoke("ToString")
result_str = result.invoke("ToString")
print(f"Asset assigned for {interaction_option} option: {expected_asset_str == result_str}")
return expected_asset_str == result_str
def move_and_resize_widget(widget):
# Move the widget and verify position
initial_position = widget.pos()
x, y = initial_position.x() + 5, initial_position.y() + 5
widget.move(x, y)
curr_position = widget.pos()
move_success = curr_position.x() == x and curr_position.y() == y
self.test_success = move_success and self.test_success
self.log(f"Widget Move Test: {move_success}")
# Resize the widget and verify size
width, height = (
widget.geometry().width() + 10,
widget.geometry().height() + 10,
)
widget.resize(width, height)
resize_success = widget.geometry().width() == width and widget.geometry().height() == height
self.test_success = resize_success and self.test_success
self.log(f"Widget Resize Test: {resize_success}")
def verify_files_appeared(model, allowed_asset_extensions, parent_index=QtCore.QModelIndex()):
indices = [parent_index]
while len(indices) > 0:
parent_index = indices.pop(0)
for row in range(model.rowCount(parent_index)):
cur_index = model.index(row, 0, parent_index)
cur_data = cur_index.data(Qt.DisplayRole)
if (
"." in cur_data
and (cur_data.lower().split(".")[-1] not in allowed_asset_extensions)
and not cur_data[-1] == ")"
):
print(f"Incorrect file found: {cur_data}")
self.incorrect_file_found = True
indices = list()
break
indices.append(cur_index)
self.test_success = not self.incorrect_file_found and self.test_success
def print_message_prefix(message):
print(f"{self.prefix}: {message}")
async def asset_picker(prefix, allowed_asset_extensions, asset, interaction_option):
active_modal_widget = await pyside_utils.wait_for_modal_widget()
if active_modal_widget and self.prefix == "":
self.prefix = prefix
dialog = active_modal_widget.findChildren(QtWidgets.QDialog, "AssetPickerDialogClass")[0]
print_message_prefix(f"Asset Picker title for Mesh: {dialog.windowTitle()}")
tree = dialog.findChildren(QtWidgets.QTreeView, "m_assetBrowserTreeViewWidget")[0]
scroll_area = tree.findChild(QtWidgets.QWidget, "qt_scrollarea_vcontainer")
scroll_bar = scroll_area.findChild(QtWidgets.QScrollBar)
# a) Collapse all the files initially and verify if scroll bar is not visible
tree.collapseAll()
await pyside_utils.wait_for_condition(lambda: not scroll_bar.isVisible(), 0.5)
print_message_prefix(
f"Scroll Bar is not visible before expanding the tree: {not scroll_bar.isVisible()}"
)
# Get Model Index of the file paths
model_index_1 = pyside_utils.find_child_by_pattern(tree, self.file_path[0])
print(model_index_1.model())
model_index_2 = pyside_utils.find_child_by_pattern(model_index_1, self.file_path[1])
# b) Expand/Verify Top folder of file path
print_message_prefix(f"Top level folder initially collapsed: {not tree.isExpanded(model_index_1)}")
tree.expand(model_index_1)
print_message_prefix(f"Top level folder expanded: {tree.isExpanded(model_index_1)}")
# c) Expand/Verify Nested folder of file path
print_message_prefix(f"Nested folder initially collapsed: {not tree.isExpanded(model_index_2)}")
tree.expand(model_index_2)
print_message_prefix(f"Nested folder expanded: {tree.isExpanded(model_index_2)}")
# d) Verify if the ScrollBar appears after expanding folders
tree.expandAll()
await pyside_utils.wait_for_condition(lambda: scroll_bar.isVisible(), 0.5)
print_message_prefix(f"Scroll Bar appeared after expanding tree: {scroll_bar.isVisible()}")
# e) Collapse Nested and Top Level folders and verify if collapsed
tree.collapse(model_index_2)
print_message_prefix(f"Nested folder collapsed: {not tree.isExpanded(model_index_2)}")
tree.collapse(model_index_1)
print_message_prefix(f"Top level folder collapsed: {not tree.isExpanded(model_index_1)}")
# f) Verify if the correct files are appearing in the Asset Picker
verify_files_appeared(tree.model(), allowed_asset_extensions)
print_message_prefix(f"Expected Assets populated in the file picker: {not self.incorrect_file_found}")
# While we are here we can also check if we can resize and move the widget
move_and_resize_widget(active_modal_widget)
# g) Assign asset
tree.collapseAll()
await pyside_utils.wait_for_condition(
lambda: len(dialog.findChildren(QtWidgets.QFrame, "m_searchWidget")) > 0, 0.5)
search_widget = dialog.findChildren(QtWidgets.QFrame, "m_searchWidget")[0]
search_line_edit = search_widget.findChildren(QtWidgets.QLineEdit, "textSearch")[0]
search_line_edit.setText(asset)
tree.expandAll()
asset_model_index = pyside_utils.find_child_by_pattern(tree, asset)
await pyside_utils.wait_for_condition(lambda: asset_model_index.isValid(), 2.0)
tree.expand(asset_model_index)
tree.setCurrentIndex(asset_model_index)
if interaction_option == "ok":
button_box = dialog.findChild(QtWidgets.QDialogButtonBox, "m_buttonBox")
ok_button = button_box.button(QtWidgets.QDialogButtonBox.Ok)
await pyside_utils.click_button_async(ok_button)
elif interaction_option == "enter":
QtTest.QTest.keyClick(tree, Qt.Key_Enter, Qt.NoModifier)
self.prefix = ""
# 1) Open a new level
self.test_success = self.create_level(
self.args["level"],
heightmap_resolution=1024,
heightmap_meters_per_pixel=1,
terrain_texture_resolution=4096,
use_terrain=False,
)
# 2) Create entity and add Mesh component
entity_position = math.Vector3(125.0, 136.0, 32.0)
entity = hydra.Entity("TestEntity")
entity.create_entity(entity_position, ["Mesh"])
# 3) Access Entity Inspector
editor_window = pyside_utils.get_editor_main_window()
entity_inspector = editor_window.findChild(QtWidgets.QDockWidget, "Entity Inspector")
component_list_widget = entity_inspector.findChild(QtWidgets.QWidget, "m_componentListContents")
# 4) Click on Asset Picker (Mesh Asset)
general.select_object("TestEntity")
general.idle_wait(0.5)
mesh_asset = component_list_widget.findChildren(QtWidgets.QFrame, "Mesh Asset")[0]
attached_button = mesh_asset.findChildren(QtWidgets.QPushButton, "attached-button")[0]
# Assign Mesh Asset via OK button
pyside_utils.click_button_async(attached_button)
await asset_picker("Mesh Asset", ["azmodel", "fbx"], "cedar (ModelAsset)", "ok")
# 5) Verify if Mesh Asset is assigned
try:
mesh_success = await pyside_utils.wait_for_condition(lambda: is_asset_assigned(entity.components[0],
"ok"))
except pyside_utils.EventLoopTimeoutException as err:
print(err)
mesh_success = False
self.test_success = mesh_success and self.test_success
# Clear Mesh Asset
hydra.get_set_test(entity, 0, "Controller|Configuration|Mesh Asset", None)
general.select_object("TestEntity")
general.idle_wait(0.5)
mesh_asset = component_list_widget.findChildren(QtWidgets.QFrame, "Mesh Asset")[0]
attached_button = mesh_asset.findChildren(QtWidgets.QPushButton, "attached-button")[0]
# Assign Mesh Asset via Enter
pyside_utils.click_button_async(attached_button)
await asset_picker("Mesh Asset", ["azmodel", "fbx"], "cedar (ModelAsset)", "enter")
# 5) Verify if Mesh Asset is assigned
try:
mesh_success = await pyside_utils.wait_for_condition(lambda: is_asset_assigned(entity.components[0],
"enter"))
except pyside_utils.EventLoopTimeoutException as err:
print(err)
mesh_success = False
self.test_success = mesh_success and self.test_success
test = AssetPickerUIUXTest()
test.run()
|
from .cli import Cli, CommandGroup
from .utils import Option
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from aiida.orm import RemoteData, FolderData, SinglefileData, Dict
from aiida_quantumespresso.calculations.namelists import NamelistsCalculation
class Pw2wannier90Calculation(NamelistsCalculation):
"""
pw2wannier90.x code of the Quantum ESPRESSO distribution, handles the
calculation of the Amn, Mmn, ... files to be used to compute
Wannier functions with the Wannier90 code.
For more information, refer to http://www.quantum-espresso.org/
and http://www.wannier.org/
"""
_default_namelists = ['INPUTPP']
_SEEDNAME = 'aiida'
_blocked_keywords = [
('INPUTPP', 'outdir', NamelistsCalculation._OUTPUT_SUBFOLDER),
('INPUTPP', 'prefix', NamelistsCalculation._PREFIX),
('INPUTPP', 'seedname', _SEEDNAME)
]
# By default we do not download anything else than aiida.out. One can add the files
# _SEEDNAME.amn/.nnm/.eig to inputs.settings['ADDITIONAL_RETRIEVE_LIST'] to retrieve them.
_internal_retrieve_list = []
_default_parser = 'quantumespresso.pw2wannier90'
@classmethod
def define(cls, spec):
super(Pw2wannier90Calculation, cls).define(spec)
spec.input('nnkp_file', valid_type=SinglefileData,
help='A SinglefileData containing the .nnkp file generated by wannier90.x -pp')
spec.input('parent_folder', valid_type=(RemoteData, FolderData),
help='The output folder of a pw.x calculation')
spec.output('output_parameters', valid_type=Dict)
spec.default_output_node = 'output_parameters'
spec.exit_code(
100, 'ERROR_NO_RETRIEVED_FOLDER', message='The retrieved folder data node could not be accessed.')
spec.exit_code(
110, 'ERROR_READING_OUTPUT_FILE', message='The output file could not be read from the retrieved folder.')
spec.exit_code(
130, 'ERROR_JOB_NOT_DONE', message='The computation did not finish properly (\'JOB DONE\' not found).')
spec.exit_code(
140, 'ERROR_GENERIC_QE_ERROR', message='QE printed an error message')
spec.exit_code(
150, 'ERROR_GENERIC_PARSING_FAILURE', message='An error happened while parsing the output file')
def prepare_for_submission(self, folder):
"""
Prepare the inputs of the calculation and the calcinfo data.
:param folder: an `aiida.common.folders.Folder` to temporarily write files on disk
:return: `aiida.common.datastructures.CalcInfo` instance
"""
# Run the global namelist logic
calcinfo = super(Pw2wannier90Calculation, self).prepare_for_submission(folder)
# Put the nnkp in the folder, with the correct filename
nnkp_file = self.inputs.nnkp_file
calcinfo.local_copy_list.append(
(nnkp_file.uuid, nnkp_file.filename, '{}.nnkp'.format(self._SEEDNAME))
)
return calcinfo
|
"""AssKeyValueMapping definition."""
from collections.abc import Iterable
from typing import Any
from ass_parser.ass_sections.ass_base_section import AssBaseSection
from ass_parser.errors import CorruptAssLineError
from ass_parser.observable_mapping_mixin import ObservableMappingMixin
class AssKeyValueMapping(ObservableMappingMixin[str, str], AssBaseSection):
"""ASS key-value mapping section."""
def consume_ass_body_lines(self, lines: list[tuple[int, str]]) -> None:
"""Populate self from ASS text representation of this section,
excluding the ASS header line.
:param lines: list of tuples (line_num, line)
"""
self.clear()
for line_num, line in lines:
try:
key, value = line.split(":", 1)
except ValueError as exc:
raise CorruptAssLineError(
line_num, line, "expected a colon"
) from exc
else:
self[key] = value.lstrip()
def produce_ass_body_lines(self) -> Iterable[str]:
"""Produce ASS text representation of self, excluding the ASS header
line.
:return: a generator of ASS section body lines
"""
for key, value in self.items():
yield f"{key}: {value}"
def __eq__(self, other: Any) -> bool:
"""Check for equality. Ignores event handlers.
:param other: other object
:return: whether objects are equal
"""
if not isinstance(other, AssKeyValueMapping):
return False
return self.name == other.name and self._data == other._data
|
import os
import numpy as np
from lib.datasets.dataset_utils import filter_mot_gt_boxes
import cv2
import pandas
mot_dir = '/home/liuqk/Dataset/MOT'
mot = {
'MOT17': {
'train': ['MOT17-13', 'MOT17-11', 'MOT17-10', 'MOT17-09', 'MOT17-05', 'MOT17-04', 'MOT17-02'],
'test': ['MOT17-01', 'MOT17-03', 'MOT17-06', 'MOT17-07', 'MOT17-08', 'MOT17-12', 'MOT17-14']
},
'MOT16': {
'train': ['MOT16-13', 'MOT16-11', 'MOT16-10', 'MOT16-09', 'MOT16-05', 'MOT16-04', 'MOT16-02'],
'test': ['MOT16-01', 'MOT16-03', 'MOT16-06', 'MOT16-07', 'MOT16-08', 'MOT16-12', 'MOT16-14']
},
'2DMOT2015': {
'train': ['ETH-Bahnhof', 'ETH-Sunnyday', 'KITTI-13', 'KITTI-17', 'PETS09-S2L1', 'TUD-Campus', 'TUD-Stadtmitte'],
'test': ['ADL-Rundle-1', 'ADL-Rundle-3', 'AVG-TownCentre', 'ETH-Crossing', 'ETH-Jelmoli', 'ETH-Linthescher',
'KITTI-16', 'KITTI-19', 'PETS09-S2L2', 'TUD-Crossing', 'Venice-1']
}
}
# statics on gt boxes
dataset = ['2DMOT2015', 'MOT17']
stage = ['train']
max_num_box_per_frame = -1
xywh = np.zeros((0, 4))
for d in dataset:
for s in stage:
sub_d = mot[d][s]
for seq in sub_d:
if d in ['MOT17']:
seq = seq + '-DPM'
one_im_path = os.path.join(mot_dir, d, s, seq, 'img1', '000001.jpg')
one_im = cv2.imread(one_im_path)
im_h, im_w = one_im.shape[0], one_im.shape[1]
gt_path = os.path.join(mot_dir, d, s, seq, 'gt', 'gt.txt')
gt = np.loadtxt(gt_path, delimiter=',')
if d in ['MOT17']:
gt = filter_mot_gt_boxes(gt_boxes=gt, vis_threshold=0.1)
for id in range(1, int(np.max(gt[:, 0])) + 1):
index = gt[:,0] == id
if max_num_box_per_frame < index.sum():
max_num_box_per_frame = index.sum()
print('find max number of boxes in one frame: {}'.format(max_num_box_per_frame))
one_seq_boxes = gt[:, 2:6]
one_seq_boxes[:, [0, 2]] = one_seq_boxes[:, [0, 2]]# / im_w
one_seq_boxes[:, [1, 3]] = one_seq_boxes[:, [1, 3]]# / im_w
xywh = np.concatenate((xywh, one_seq_boxes), 0)
print(xywh)
whr = np.zeros((xywh.shape[0], 3))
whr[:, 0] = xywh[:, 2] # w
whr[:, 1] = xywh[:, 3] # h
whr[:, 2] = xywh[:, 3] / xywh[:, 2] # ratio, h/w
mean_whr = np.mean(whr, axis=0)
std_whr = np.std(whr, axis=0)
min_whr = np.min(whr, axis=0)
max_whr = np.max(whr, axis=0)
print('mean [w, h, h/w ]: ', mean_whr)
print('std [w, h, h/w]: ', std_whr)
print('min [w, h, h/w ]: ', min_whr)
print('max [w, h, h/w]: ', max_whr)
|
from ward import test
from users.tests.api import admin_graphql_client
from users.tests.factories import user_factory
from users.tests.session import db
@test("unlogged cannot fetch me")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
user = await user_factory(email="user@email.it", is_staff=False)
admin_graphql_client.force_login(user)
query = """query {
me {
id
email
}
}"""
response = await admin_graphql_client.query(query)
assert response.errors[0]["message"] == "Unauthorized"
@test("fetch me")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(email="user@email.it", is_staff=True)
admin_graphql_client.force_login(logged_user)
query = """query {
me {
id
email
}
}"""
response = await admin_graphql_client.query(query)
assert not response.errors
assert response.data["me"] == {
"id": str(logged_user.id),
"email": logged_user.email,
}
@test("only staff accounts can fetch me")
async def _(
admin_graphql_client=admin_graphql_client, db=db, user_factory=user_factory
):
logged_user = await user_factory(email="user@email.it", is_staff=False)
admin_graphql_client.force_login(logged_user)
query = """query {
me {
id
email
}
}"""
response = await admin_graphql_client.query(query)
assert response.errors[0]["message"] == "Unauthorized"
|
import pandas as pd
import matplotlib.pyplot as plt
df = pd.read_csv("http://fbug-store.herokuapp.com/csv")
SENSOR_1_ID = "200040001047373333353132"
SENSOR_2_ID = "35005c000d51363034323832"
SENSOR_3_ID = "2d0049000d51363034323832"
WINDOW = 40
df.timestamp = pd.to_datetime(df.timestamp)
sensor_1_time = df[df.device == SENSOR_1_ID].timestamp.values
sensor_2_time = df[df.device == SENSOR_2_ID].timestamp.values
sensor_3_time = df[df.device == SENSOR_3_ID].timestamp.values
sensor_1_concentration = df[df.device == SENSOR_1_ID].concentration.rolling(window=WINDOW).mean().values
sensor_2_concentration = df[df.device == SENSOR_2_ID].concentration.rolling(window=WINDOW).mean().values
sensor_3_concentration = df[df.device == SENSOR_3_ID].concentration.rolling(window=WINDOW).mean().values
plt.plot(sensor_1_time, sensor_1_concentration, label="Sensor1")
plt.plot(sensor_2_time, sensor_2_concentration, label="Sensor2")
plt.plot(sensor_3_time, sensor_3_concentration, label="Sensor3")
plt.legend()
plt.show()
'''
‘200040001047373333353132’: ‘Sensor 1’,
‘35005c000d51363034323832’: ‘Sensor 2’,
‘2d0049000d51363034323832’: ‘Sensor 3’
'''
|
# Copyright (c) 2018-2022 The CYBAVO developers
# All Rights Reserved.
# NOTICE: All information contained herein is, and remains
# the property of CYBAVO and its suppliers,
# if any. The intellectual and technical concepts contained
# herein are proprietary to CYBAVO
# Dissemination of this information or reproduction of this materia
# is strictly forbidden unless prior written permission is obtained
# from CYBAVO.
from flask import Blueprint, request
from mockserver.models import setAPICode, getAPICode
from hashlib import sha256
merchant = Blueprint('merchant', __name__)
@merchant.route('/<merchant_id>/apitoken', methods=['GET', 'POST'])
def apitoken(merchant_id):
args = request.get_json()
if (not merchant_id):
return 'Invalid parameters', 400
else:
setAPICode(merchant_id, args['api_code'], args['api_secret'])
print('API Code:', args['api_code'], 'API Secret:', args['api_secret'])
return { 'result': 1 }, 200
|
#
# Code by Alexander Pruss and under the MIT license
#
from mineturtle import *
t = Turtle()
t.turtle(None)
t.pendelay(0)
t.angle(0) # align to grid
def face():
t.startface()
for i in range(4):
t.go(20)
t.yaw(90)
t.endface()
t.penblock(block.GLASS)
for i in range(2):
face()
t.roll(-90)
face()
t.roll(90)
t.pitch(90)
face()
t.pitch(-90)
t.penup()
t.go(20)
t.yaw(90)
t.go(20)
t.pitch(90)
t.go(20)
t.pitch(-90)
t.yaw(90)
t.pitch(-90)
t.pendown()
|
import os
import random
import pymongo
import datetime
import seaborn as sns
import matplotlib as plt
from discord.ext import commands
from discord import File
from pandas import DataFrame
from discord import Embed
from dotenv import load_dotenv
mongo_client = pymongo.MongoClient("mongodb://localhost:27017/")
db = mongo_client["stonk_bot"]
usercollection = db["users"]
marketcollection = db["market"]
askcollection = db['ask']
bidcollection = db['bid']
tradecollection = db['trade']
statuscollection = db['status']
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN')
bot = commands.Bot(command_prefix='!')
if len(list(statuscollection.find())) != 1:
statuscollection.drop()
statuscollection.insert_one({'closed':True})
def is_closed():
return statuscollection.find_one()['closed']
def get_combined_stocks_of_user_with_id(id):
asks = askcollection.find({"seller_id": id})
held = usercollection.find_one({"id": id})['stocks']
combined = {}
for ask in asks:
combined[ask['short']] = combined.get(ask['short'], 0) + ask['amount']
for short in held:
combined[short] = combined.get(short, 0) + held[short]
return combined
@bot.command(name='join', help='join the stock market')
async def join(ctx):
user_id = ctx.author.id
user_name = ctx.author.name
user_info = usercollection.find_one({"id": user_id})
if user_info is not None:
await ctx.send("you already have joined")
return
user_info = {'id': user_id, 'balance': 100, 'name': user_name, 'stocks': {}, 'last_time_malocht': datetime.datetime.utcnow() - datetime.timedelta(hours=12)}
usercollection.insert_one(user_info)
await ctx.send("joined")
@bot.command(name='malochen', help='malochen gehen')
async def malochen(ctx):
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
if user_info['last_time_malocht'] + datetime.timedelta(hours=12) > datetime.datetime.utcnow():
since_last = datetime.datetime.utcnow() - user_info['last_time_malocht']
await ctx.send(f"du hast in den letzten 12 stunden schon malocht since last: {since_last}")
return
new_money= user_info['balance'] + 200.0
usercollection.update_one({"id": user_id}, { "$set": {"balance": new_money, "last_time_malocht": datetime.datetime.utcnow()}})
await ctx.send("maloche maloche maloche")
@bot.command(name='balance', help='shows your balance')
async def balance(ctx):
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
response = f"```Your balance is: {('%.2f' % user_info['balance'])} Fobicoins\n\n" + "{:<10} {:<10}".format("stock", "amount")
user_stocks = get_combined_stocks_of_user_with_id(user_id)
for stock in user_stocks:
response = response + "\n{:<10} {:<10}".format(stock, user_stocks[stock])
await ctx.send(response+ "```")
@bot.command(name='market', help='shows the market')
async def market(ctx):
market = marketcollection.find()
outstring = "```{:<10} {:<20} {:<10} {:<10} {:<10} {:<10} {:<10} {:<10}".format("shorthand", "name", "shares", "self_held", "last_price", "cap", "balance", "div")
for stonk in market:
outstring = outstring + "\n{:<10} {:<20} {:<10} {:<10} {:<10} {:<10} {:<10} {:<10}".format(stonk['short'], stonk['name'], stonk['shares'], stonk['self_held'], stonk['last_price'], '%.2f' % ((int(stonk['shares'])-int(stonk['self_held'])) * float(stonk['last_price'])), "%.2f" % stonk['balance'], stonk['div'])
await ctx.send(outstring+ "```")
@bot.command(name='top', help='shows top 10 players')
async def overview(ctx):
top_ten = usercollection.find().sort("balance", -1).limit(10)
market = marketcollection.find()
marketPrices= {}
for stock in market:
marketPrices[stock['short']] = stock['last_price']
outstring = "```{:<15} {:<15} {:<15} {:<15}".format("name", "balance", "net_worth", "car")
for player in top_ten:
net_worth = float(player['balance'])
player_stocks = get_combined_stocks_of_user_with_id(player['id'])
for stock_key in player_stocks:
net_worth = net_worth + float(player_stocks[stock_key]) * float(marketPrices[stock_key])
outstring = outstring + "\n{:<15} {:<15} {:<15} {:<15}".format(player['name'], "%.2f" % player['balance'], "%.2f" % net_worth, player.get('car', "None"))
await ctx.send(outstring+ "```")
@bot.command(name='info', help='get info on a specific stock')
async def info(ctx, short=None):
if short is None:
await ctx.send("please provide a short hand")
return
stock = marketcollection.find_one({"short": short})
if stock is None:
await ctx.send("i really dont know this shorthand")
return
asks = askcollection.find({"short": short}).sort("price_per_stock")
outstring = "```asks:\n{:<15} {:<15} {:<15}".format("seller", "amount", "price_per_stock")
for ask in asks:
outstring = outstring + "\n{:<15} {:<15} {:<15}".format(ask['seller_name'], ask['amount'], ask['price_per_stock'])
bids = bidcollection.find({"short": short}).sort("price_per_stock", -1)
outstring = outstring + "\n\nbids:\n{:<15} {:<15} {:<15}".format("buyer", "amount", "price_per_stock")
for bid in bids:
outstring = outstring + "\n{:<15} {:<15} {:<15}".format(bid['buyer_name'], bid['amount'], bid['price_per_stock'])
await ctx.send(outstring+ "```")
trades = DataFrame(list(tradecollection.find({"short": short}).sort("when")))
if len(trades) < 1:
return
plot = sns.lineplot(x="when", y="price_per_stock", data=trades)
plot.get_figure().savefig("out.png")
plt.pyplot.close(plot.get_figure())
await ctx.send(file=File("out.png"))
os.remove("out.png")
@bot.command(name='ask', help='create a sell order')
async def ask(ctx, short=None, amount=None, price_per_stock = None):
if is_closed():
await ctx.send("market is closed")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
if short is None or amount is None:
await ctx.send("Usage: !ask <short> <amount> <price_per_stock>")
return
stock = marketcollection.find_one({"short": short})
if stock is None:
await ctx.send("i really dont know this shorthand")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if short not in user_info['stocks']:
await ctx.send(f"you dont even have any {short}")
return
amount_of_stocks = int(user_info['stocks'][short])
if amount_of_stocks < int(amount):
await ctx.send(f"you dont even have {amount} {short}")
return
user_info['stocks'][short] = amount_of_stocks - int(amount)
usercollection.update_one({"id": user_id}, { "$set": {"stocks": user_info['stocks']}})
ask = {"seller_id": user_id, "seller_name": user_info['name'], "short": short, "amount": int(amount), "price_per_stock": price_per_stock}
bids = bidcollection.find({"short": short}).sort("price_per_stock", -1)
for bid in bids:
if float(ask['price_per_stock']) <= float(bid['price_per_stock']):
buy_ask(ask, bid)
if bid['amount'] == 0:
bidcollection.delete_one({"_id": bid['_id']})
else:
bidcollection.update_one({"_id": bid['_id']}, { "$set": {"amount": bid['amount']}})
else:
break
if ask['amount'] == 0 :
break
if ask['amount'] == 0:
await ctx.send("ask resolved")
return
askcollection.insert_one(ask)
await ctx.send("ask placed")
def buy_ask(ask, bid):
if bid['buyer_id'] == ask['seller_id']:
return
bid_amount = int(bid['amount'])
ask_amount = int(ask['amount'])
ask['amount'] = max(0, ask_amount - bid_amount)
bid['amount'] = max(0, bid_amount - ask_amount)
amount_sold = ask_amount - int(ask['amount'])
if ask['price_per_stock'] < bid['price_per_stock']:
price_per_stock = float(bid['price_per_stock'])
else:
price_per_stock = float(ask['price_per_stock'])
money_to_pay = price_per_stock * amount_sold
tradecollection.insert_one({"short": ask['short'], "amount_sold": amount_sold, "price_per_stock": price_per_stock, "when": datetime.datetime.utcnow()})
marketcollection.update_one({"short": ask['short']}, { "$set": {"last_price": price_per_stock}})
if 'seller_id' in ask:
usercollection.update_one({"id": ask['seller_id']}, { "$inc": {"balance": money_to_pay}})
usercollection.update_one({"id": bid['buyer_id']}, { "$inc": {"balance": -money_to_pay}})
buyer_info = usercollection.find_one({"id": bid['buyer_id']})
current_amount = buyer_info['stocks'].get(bid['short'], 0)
buyer_info['stocks'][bid['short']] = current_amount + amount_sold
usercollection.update_one({"id": bid['buyer_id']}, { "$set": {"stocks": buyer_info['stocks']}})
@bot.command(name='bid', help='create a buy order')
async def bid(ctx, short=None, amount=None, price_per_stock = None):
if is_closed():
await ctx.send("market is closed")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
if short is None or amount is None:
await ctx.send("Usage: !bid <short> <amount> <price_per_stock>")
return
stock = marketcollection.find_one({"short": short})
if stock is None:
await ctx.send("i really dont know this shorthand")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
total_price = int(amount) * float(price_per_stock)
if(user_info['balance']< total_price):
await ctx.send(f"nice try, but youre missing {total_price- user_info['balance']} fobicoins")
return
bid = {"buyer_id": user_id, "buyer_name": user_info['name'], "short": short, "amount": amount, "price_per_stock": price_per_stock}
current_asks = askcollection.find({"short": short}).sort("price_per_stock")
for ask in current_asks:
if float(ask['price_per_stock']) <= float(bid['price_per_stock']):
buy_ask(ask, bid)
if ask['amount'] == 0:
askcollection.delete_one({"_id": ask['_id']})
else:
askcollection.update_one({"_id": ask['_id']}, { "$set": {"amount": ask['amount']}})
if bid['amount'] == 0 :
break
if bid['amount'] == 0:
await ctx.send("bid resolved")
return
bidcollection.insert_one(bid)
await ctx.send("bid placed")
@bot.command(name='cancelbid', help='cancel all bids on a stock')
async def cancelbid(ctx, short=None):
if is_closed():
await ctx.send("market is closed")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
if short is None:
await ctx.send("Usage: !cancelbid <short>")
return
bidcollection.delete_many({"buyer_id": user_id, "short": short})
await ctx.send("done")
@bot.command(name='cancelask', help='cancel all asks on a stock')
async def cancelask(ctx, short=None):
if is_closed():
await ctx.send("market is closed")
return
user_id = ctx.author.id
user_info = usercollection.find_one({"id": user_id})
if user_info is None:
await ctx.send("i dont know you you have to !join first")
return
if short is None:
await ctx.send("Usage: !cancelask <short>")
return
asks = askcollection.find({"seller_id": user_id, "short": short})
user_stocks = user_info['stocks']
for ask in asks:
user_stocks[short] = user_stocks[short] + ask['amount']
askcollection.delete_one({"_id": ask['_id']})
usercollection.update_one({"id": user_id}, { "$set": {"stocks": user_stocks}})
await ctx.send("done")
@bot.command(name='closemarket')
@commands.has_role('stonkbot')
async def closemarket(ctx):
statuscollection.drop()
statuscollection.insert_one({'closed':True})
await ctx.send(":bell::bell::bell: market closed :bell::bell::bell:")
@bot.command(name='openmarket')
@commands.has_role('stonkbot')
async def openmarket(ctx):
statuscollection.drop()
statuscollection.insert_one({'closed':False})
await ctx.send(":bell::bell::bell: market opened :bell::bell::bell:")
stonks = marketcollection.find()
users = usercollection.find()
outstring = "```stock performance:\n{:<15} {:<15} {:<15}".format("short", "profit", "dividend")
for stonk in stonks:
change = float(stonk['balance']) * random.uniform(-0.15, 0.15 * float(stonk['performance']))
dividend_per_share = 0.0
if change > 0:
traded_shares = int(stonk['shares']) - int(stonk['self_held'])
dividend_per_share = change * float(stonk['div']) / traded_shares
change = change - dividend_per_share * traded_shares
marketcollection.update_one({"short": stonk['short']}, { "$inc": {"balance": change}})
outstring = outstring + "\n{:<15} {:<15} {:<15}".format(stonk['short'], "%.2f" % change, "%.2f" % dividend_per_share)
for user in users:
user_stocks = get_combined_stocks_of_user_with_id(user['id'])
number_of_stocks = int(user_stocks.get(stonk['short'], 0))
if number_of_stocks > 0:
usercollection.update_one({"id": user['id']}, { "$inc": {"balance": number_of_stocks * dividend_per_share}})
await ctx.send(outstring+ "```")
bot.run(TOKEN)
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import re
from nltk import tree
from swda import CorpusReader
from tree_pos_map import TreeMapCorpus
from tree_pos_map import POSMapCorpus
possibleMistranscription = [("its", "it's"),
("Its", "It's"),
("it's", "its"),
("It's", "Its"),
("whose", "who's"),
("Whose", "Who's"),
("who's", "whose"),
("Who's", "Whose"),
("you're", "your"),
("You're", "Your"),
("your", "you're"),
("Your", "You're"),
("their", "they're"),
("Their", "They're"),
("they're", "their"),
("They're", "Their"),
("programme", "program"),
("program", "programme"),
("centre", "center"),
("center", "centre"),
("travelling", "traveling"),
("traveling", "travelling"),
("colouring", "coloring"),
("coloring", "colouring")]
class TreeMapWriter:
"""Object which writes mappings from the words in utterances
to the nodes of the corresponding trees in a treebank
"""
def __init__(self, corpus_path="../swda",
metadata_path="swda-metadata.csv",
target_folder_path="Maps",
ranges=None,
errorLog=None):
print "started TreeMapWriting"
self.write_to_file(corpus_path,
metadata_path,
target_folder_path,
ranges,
errorLog)
def write_to_file(self, corpus_path,
metadata_path,
target_folder_path,
ranges,
errorLog):
"""Writes files to a target folder with the mappings
from words in utterances to tree nodes in trees.
"""
if errorLog:
errorLog = open(errorLog, 'w')
corpus = CorpusReader(corpus_path, metadata_path)
# Iterate through all transcripts
incorrectTrees = 0
folder = None
corpus_file = None
for trans in corpus.iter_transcripts():
# print "iterating",trans.conversation_no
if not trans.has_pos():
continue
# print "has pos"
if ranges and not trans.conversation_no in ranges:
continue
# print "in range"
# just look at transcripts WITH trees as compliment to the
# below models
if not trans.has_trees():
continue
end = trans.swda_filename.rfind("/")
start = trans.swda_filename.rfind("/", 0, end)
c_folder = trans.swda_filename[start + 1:end]
if c_folder != folder:
# for now splitting the maps by folder
folder = c_folder
if corpus_file:
corpus_file.close()
corpus_file = open(target_folder_path +
"/Tree_map_{0}.csv.text".format(folder), 'w')
wordTreeMapList = TreeMapCorpus(False, errorLog)
print "new map for folder", folder
translist = trans.utterances
translength = len(translist)
count = 0
# iterating through transcript utterance by utterance
# create list of tuples i.e. map from word to the index(ices)
# (possibly multiple or null) of the relevant leaf/ves
# of a given tree i.e. utt.tree[0].leaves[0] would be a pair (0,0))
while count < translength:
utt = trans.utterances[count]
words = utt.text_words()
wordTreeMap = [] # [((word), (List of LeafIndices))]
forwardtrack = 0
backtrack = 0
continued = False
# print "\n COUNT" + str(count)
# print utt.damsl_act_tag()
if len(utt.trees) == 0 or utt.damsl_act_tag() == "x":
wordTreeMap.append((utt, [])) # just dummy value
# errormessage = "WARNING: NO TREE for file/utt: " +\
# str(utt.swda_filename) + " " + utt.caller + "." + \
# str(utt.utterance_index) + "." + \
#str(utt.subutterance_index) + " " + utt.text
# print(errormessage)
count += 1
continue
# raw_input()
# indices for which tree and leaf we're at:
i = 0 # tree
j = 0 # leaf
# initialise pairs of trees and ptb pairs
trees = []
for l in range(0, len(utt.trees)):
trees.append(
(utt.ptb_treenumbers[l], count, l, utt.trees[l]))
# print "TREES = "
# for tree in trees:
# print tree
origtrees = list(trees)
origcount = count
# overcoming the problem of previous utterances contributing
# to the tree at this utterance, we need to add the words from
# the previous utt add in all the words from previous utterance
# with a dialogue act tag/or the same tree?
# check that the last tree in the previous utterance
# is the same as the previous one
previousUttSame = trans.previous_utt_same_speaker(utt)
# print previousUttSame
lastTreeMap = None
if previousUttSame:
# print "search for previous full act utt
# for " + str(utt.swda_filename) +
# str(utt.transcript_index)
lastTreeMap = wordTreeMapList.get_treemap(
trans,
previousUttSame)
if ((not lastTreeMap) or (len(lastTreeMap) == 0) or
(len(lastTreeMap) == 1 and lastTreeMap[0][1] == [])):
# print "no last tree map, backwards searching"
while previousUttSame and \
((not lastTreeMap) or (len(lastTreeMap) == 0) or
(len(lastTreeMap) == 1 and lastTreeMap[0][1] == [])):
previousUttSame = trans.previous_utt_same_speaker(
previousUttSame) # go back one more
lastTreeMap = wordTreeMapList.get_treemap(trans,
previousUttSame)
if previousUttSame:
pass
# print previousUttSame.transcript_index
if not lastTreeMap:
pass
# print "no last treemap found for:"
# print utt.swda_filename
# print utt.transcript_index
if lastTreeMap and \
(utt.damsl_act_tag() == "+" or
(len(lastTreeMap.treebank_numbers) > 0
and lastTreeMap.treebank_numbers[-1] ==
utt.ptb_treenumbers[0])):
continued = True
# might have to backtrack
# now checking for wrong trees
lastPTB = lastTreeMap.treebank_numbers
lastIndexes = lastTreeMap.transcript_numbers
lastTreesTemp = lastTreeMap.get_trees(trans)
lastTrees = []
for i in range(0, len(lastPTB)):
lastTrees.append([lastPTB[i], lastIndexes[i][0],
lastIndexes[i][1], lastTreesTemp[i]])
if not (lastPTB[-1] == utt.ptb_treenumbers[0]):
# print "not same, need to correct!"
# print words
# print trees
# print "last one"
# print previousUttSame.text_words()
# print lastTrees
if utt.ptb_treenumbers[0] - lastPTB[-1] > 1:
# backtrack and redo the antecedent
count = count - (count - lastIndexes[-1][0])
utt = previousUttSame
words = utt.text_words()
mytrees = []
for i in range(0, len(lastTrees) - 1):
mytrees.append(lastTrees[i])
trees = mytrees + [origtrees[0]]
# print "\n(1)backtrack to with new trees:"
backtrack = 1
# print utt.transcript_index
# print words
# print trees
# raw_input()
# alternately, this utt's tree may be further back
# than its antecdent's, rare mistake
elif utt.ptb_treenumbers[0] < lastTrees[-1][0]:
# continue with this utterance and trees
# (if there are any), but replace its first
# tree with its antecdents last one
forwardtrack = 1
trees = [lastTrees[-1]] + origtrees[1:]
# print "\n(2)replacing first one to lasttreemap's:"
# print words
# print trees
# raw_input()
if backtrack != 1: # we should have no match
found_treemap = False
# resetting
# for t in wordTreeMapList.keys():
# print t
# print wordTreeMapList[t]
for t in range(len(lastTreeMap) - 1, -1, -1):
# print lastTreeMap[t][1]
# if there is a leafIndices for the
# word being looked at, gets last mapped one
if len(lastTreeMap[t][1]) > 0:
# print "last treemapping of last
# caller utterance =
# " + str(lastTreeMap[t][1][-1])
j = lastTreeMap[t][1][-1][1] + 1
found_treemap = True
# print "found last mapping, j -1 = " + str(j-1)
# raw_input()
break
if not found_treemap:
pass
# print "NO matched last TREEMAP found for \
# previous Utt Same Speaker of " + \
# str(trans.swda_filename) + " " + \
# str(utt.transcript_index)
# print lastTreeMap
# for tmap in wordTreeMapList.keys():
# print tmap
# print wordTreeMapList[tmap]
# raw_input()
possibleComment = False # can have comments, flag
mistranscribe = False
LeafIndices = [] # possibly empty list of leaf indices
word = words[0]
# loop until no more words left to be matched in utterance
while len(words) > 0:
# print "top WORD:" + word
if not mistranscribe:
wordtest = re.sub(r"[\.\,\?\"\!]", "", word)
wordtest = wordtest.replace("(", "").replace(")", "")
match = False
LeafIndices = [] # possibly empty list of leaf indices
if (possibleComment
or word[0:1] in ["{", "}", "-"]
or word in ["/", ".", ",", "]"]
or wordtest == ""
or any([x in word for x in ["<", ">", "*", "[", "+", "]]",
"...", "#", "="]])):
# no tree equivalent for {D } type annotations
if (word[0:1] == "-" or
any([x in word for x in
["*", "<<", "<+", "[[", "<"]])) \
and not possibleComment:
possibleComment = True
if possibleComment:
#print("match COMMENT!:" + word)
# raw_input()
LeafIndices = []
match = True
#wordTreeMap.append((word, LeafIndices))
if any([x in word for x in [">>", "]]", ">"]]) or \
word[0] == "-": # turn off comment
possibleComment = False
#del words[0]
# LeadIndices will be null here
wordTreeMap.append((word, LeafIndices))
LeafIndices = []
match = True
# print "match annotation!:" + word
del words[0] # word is consumed, should always be one
if len(words) > 0:
word = words[0]
wordtest = re.sub(r"[\.\,\?\/\)\(\"\!]", "", word)
wordtest = wordtest.replace("(", "")
wordtest = wordtest.replace(")", "")
else:
break
continue
# carry on to next word without updating indices?
else:
while i < len(trees):
# print "i number of trees :" + str(len(utt.trees))
# print "i tree number :" + str(i)
# print "i loop word :" + word
tree = trees[i][3]
# print "looking at ptb number " + str(trees[i][0])
# print "looking at index number " \
#+ str(trees[i][1])+","+str(trees[i][2])
while j < len(tree.leaves()):
leaf = tree.leaves()[j]
# print "j number of leaves : " \
#+ str(len(tree.leaves()))
# print "j loop word : " + word
# print "j loop wordtest : " + wordtest
# print "j leaf : " + str(j) + " " + leaf
breaker = False
# exact match
if wordtest == leaf or word == leaf:
LeafIndices.append((i, j))
wordTreeMap.append((word, LeafIndices))
# print("match!:" + word + " " + \
# str(utt.swda_filename) + " " + \
# utt.caller + "." + \
# str(utt.utterance_index) + \
# "." + str(utt.subutterance_index))
del words[0] # word is consumed
if len(words) > 0:
word = words[0] # next word
wordtest = re.sub(
r"[\.\,\?\/\)\(\"\!]", "", word)
wordtest = wordtest.replace("(", "")
wordtest = wordtest.replace(")", "")
LeafIndices = []
j += 1 # increment loop to next leaf
match = True
breaker = True
# raw_input()
break
elif leaf in wordtest or \
leaf in word and not leaf == ",":
testleaf = leaf
LeafIndices.append((i, j))
j += 1
for k in range(j, j + 3): # 3 beyond
if (k >= len(tree.leaves())):
j = 0
i += 1
#breaker = True
breaker = True
break # got to next tree
if (testleaf + tree.leaves()[k]) \
in wordtest or (testleaf +
tree.leaves()[k])\
in word:
testleaf += tree.leaves()[k]
LeafIndices.append((i, k))
j += 1
# concatenation
if testleaf == wordtest or \
testleaf == word: # word matched
wordTreeMap.append((word,
LeafIndices))
del words[0] # remove word
# print "match!:" + word +\
#str(utt.swda_filename) + " "\
# + utt.caller + "." + \
# str(utt.utterance_index) +\
# "." + \
# str(utt.subutterance_index))
if len(words) > 0:
word = words[0]
wordtest = re.sub(
r"[\.\,\?\/\)\(\"\!]",
"", word)
wordtest = wordtest.\
replace("(", "")
wordtest = wordtest.\
replace(")", "")
# reinitialise leaves
LeafIndices = []
j = k + 1
match = True
breaker = True
# raw_input()
break
else:
# otherwise go on
j += 1
if breaker:
break
if match:
break
if j >= len(tree.leaves()):
j = 0
i += 1
if match:
break
# could not match word! try mistranscriptions first:
if not match:
if not mistranscribe: # one final stab at matching!
mistranscribe = True
for pair in possibleMistranscription:
if pair[0] == wordtest:
wordtest = pair[1]
if len(wordTreeMap) > 0:
if len(wordTreeMap[-1][1]) > 0:
i = wordTreeMap[-1][1][-1][0]
j = wordTreeMap[-1][1][-1][1]
else:
# go back to beginning of
# tree search
i = 0
j = 0
else:
i = 0 # go back to beginning
j = 0
break # matched
elif continued:
# possible lack of matching up of words in
# previous utterance same caller and same
# tree// not always within same tree!!
errormessage = "Possible bad start for \
CONTINUED UTT ''" + words[0] + "'' in file/utt: "\
+ str(utt.swda_filename) + "\n " + utt.caller + \
"." + str(utt.utterance_index) + "." + \
str(utt.subutterance_index) + \
"POSSIBLE COMMENT = " + str(possibleComment)
# print errormessage
if not errorLog is None:
errorLog.write(errormessage + "\n")
# raw_input()
if backtrack == 1:
backtrack += 1
elif backtrack == 2:
# i.e. we've done two loops and
# still haven't found it, try the other way
count = origcount
utt = trans.utterances[count]
words = utt.text_words()
word = words[0]
trees = [lastTrees[-1]] + origtrees[1:]
# print "\nSECOND PASS(2)replacing \
# first one to lasttreemap's:"
# print words
# print trees
backtrack += 1
# mistranscribe = False #TODO perhaps needed
wordTreeMap = []
# switch to forward track this is
# the only time we want to try
# from the previous mapped leaf in the
# other tree
foundTreemap = False
for t in range(len(lastTreeMap) - 1, -1, -1):
# backwards iteration through words
# print lastTreeMap[t][1]
if len(lastTreeMap[t][1]) > 0:
# print "last treemapping of last \
# caller utterance = " + \
# str(lastTreeMap[t][1][-1])
j = lastTreeMap[t][1][-1][1] + 1
foundTreemap = True
# print "found last mapping, j = " \
#+ str(j)
# raw_input()
# break when last tree
# mapped word from this caller is found
break
if not foundTreemap:
# print "NO matched last TREEMAP found\
# for previous Utt Same Speaker of " + \
# str(utt.swda_filename) + " " + \
# utt.caller + "." + \
# str(utt.utterance_index) + "." +\
# str(utt.subutterance_index)
j = 0
# for tmap in wordTreeMapList.keys():
# print tmap
# print wordTreeMapList[tmap]
# raw_input()
i = 0 # go back to first tree
continue
elif forwardtrack == 1:
forwardtrack += 1
elif forwardtrack == 2:
count = count - (count - lastIndexes[-1][0])
utt = previousUttSame
words = utt.text_words()
word = words[0]
mytrees = []
for i in range(0, len(lastTrees) - 1):
mytrees.append(lastTrees[i])
trees = mytrees + [origtrees[0]]
# print "\nSECOND PASS(1)backtrack to \
# with new trees:"
# print utt.transcript_index
# print words
# print trees
forwardtrack += 1
# mistranscribe = False #TODO maybe needed
wordTreeMap = []
# raw_input()
elif forwardtrack == 3 or backtrack == 3:
# if this hasn't worked reset to old trees
# print "trying final reset"
count = origcount
utt = trans.utterances[count]
words = utt.text_words()
word = words[0]
trees = origtrees
forwardtrack = 0
backtrack = 0
# mistranscribe = False #TODO maybe needed
wordTreeMap = []
# raw_input()
else:
pass
# print "resetting search"
# raw_input()
# unless forward tracking now,
# just go back to beginning
i = 0 # go back to beginning of tree search
j = 0
else:
mistranscribe = False
LeafIndices = []
wordTreeMap.append((word, LeafIndices))
errormessage = "WARNING: 440 no/partial tree \
mapping for ''" + words[0] + "'' in file/utt: "\
+ str(utt.swda_filename) + " \n" + utt.caller\
+ "." + str(utt.utterance_index) + "." + \
str(utt.subutterance_index) + \
"POSSIBLE COMMENT = " + str(possibleComment)
# print utt.text_words()
del words[0] # remove word
# for trip in wordTreeMap:
# print "t",trip
if len(words) > 0:
word = words[0]
wordtest = re.sub(r"[\.\,\?\/\)\(\"\!]", "",
word)
wordtest = wordtest.replace("(", "")
wordtest = wordtest.replace(")", "")
# print errormessage
if errorLog:
errorLog.write("possible wrong tree mapping:"
+ errormessage + "\n")
raw_input()
# end of while loop (words)
mytreenumbers = []
for treemap in trees:
# the whole list but the tree
mytreenumbers.append(treemap[:-1])
if not len(utt.text_words()) == len(wordTreeMap):
print "ERROR. uneven lengths!"
print utt.text_words()
print wordTreeMap
print trans.swda_filename
print utt.transcript_index
raw_input()
count += 1
continue
# add the treemap
wordTreeMapList.append(trans.conversation_no,
utt.transcript_index,
tuple(mytreenumbers),
tuple(wordTreeMap))
count += 1
# rewrite after each transcript
filedict = defaultdict(str)
for key in wordTreeMapList.keys():
csv_string = '"' + str(list(wordTreeMapList[key])) + '"'
mytreenumbers = wordTreeMapList[key].transcript_numbers
myptbnumbers = wordTreeMapList[key].treebank_numbers
tree_list_string = '"'
for i in range(0, len(mytreenumbers)):
treemap = [myptbnumbers[i]] + mytreenumbers[i]
tree_list_string += str(treemap) + ";"
tree_list_string = tree_list_string[:-1] + '"'
filename = '"' + key[0:key.rfind(':')] + '"'
transindex = key[key.rfind(':') + 1:]
filedict[int(transindex)] = filename \
+ "\t" + transindex + '\t' + csv_string + "\t" \
+ tree_list_string + "\n"
for key in sorted(filedict.keys()):
corpus_file.write(filedict[key])
wordTreeMapList = TreeMapCorpus(False, errorLog) # reset each time
print "\n" + str(incorrectTrees) + " incorrect trees"
corpus_file.close()
if not errorLog is None:
errorLog.close()
class POSMapWriter:
"""Object which writes mappings from the words in utterances
to the corresponding POS tags.
"""
def __init__(self, corpus_path="../swda",
metadata_path="swda-metadata.csv",
target_folder_path="Maps",
ranges=None,
errorLog=None):
print "started MapWriting"
self.write_to_file(corpus_path,
metadata_path,
target_folder_path,
ranges,
errorLog)
def write_to_file(self, corpus_path,
metadata_path,
target_folder_path,
ranges,
errorLog):
"""Writes files to a target folder with the mappings
from words in utterances to corresponding POS tags.
"""
if errorLog:
errorLog = open(errorLog, 'w')
corpus = CorpusReader(corpus_path, metadata_path)
folder = None
corpus_file = None
for trans in corpus.iter_transcripts():
# print "iterating",trans.conversation_no
if not trans.has_pos():
continue
# print "has pos"
if ranges and not trans.conversation_no in ranges:
continue
# print "in range"
# just look at transcripts WITHOUT trees as compliment to the
# above models
if trans.has_trees():
continue
end = trans.swda_filename.rfind("/")
start = trans.swda_filename.rfind("/", 0, end)
c_folder = trans.swda_filename[start + 1:end]
if c_folder != folder:
# for now splitting the maps by folder
folder = c_folder
if corpus_file:
corpus_file.close()
corpus_file = open(target_folder_path +
"/POS_map_{0}.csv.text".format(folder), 'w')
wordPOSMapList = POSMapCorpus(False, errorLog)
print "new map for folder", folder
translist = trans.utterances
translength = len(translist)
count = 0
# iterating through transcript utterance by utterance
while count < translength:
utt = trans.utterances[count]
words = utt.text_words()
wordPOSMap = []
if len(utt.pos) == 0: # no POS
wordPOSMap.append((utt, [])) # just dummy value
wordPOSMapList.append(trans.conversation_no,
utt.transcript_index,
list(wordPOSMap))
errormessage = "WARNING: NO POS for file/utt: " +\
str(utt.swda_filename) + " " + utt.caller + "." + \
str(utt.utterance_index) + "." + \
str(utt.subutterance_index) + " " + utt.text
# print errormessage
# raw_input()
else:
# indices for which POS we're at
j = 0
possibleComment = False # can have comments, flag
mistranscribe = False
word = words[0]
# loop until no more words left to be matched in utterance
while len(words) > 0:
word = words[0]
# print "top WORD:" + word
if not mistranscribe:
wordtest = re.sub(r"[\.\,\?\/\)\(\"\!\\]", "",
word)
wordtest = wordtest.replace("(", "").\
replace(")", "").replace("/", "")
match = False
POSIndices = []
if (possibleComment
or word[0:1] in ["{", "}", "-"]
or word in ["/", ".", ",", "]"]
or wordtest == ""
or any([x in word for x in
["<", ">", "*", "[", "+", "]]",
"...", "#", "="]])):
# no tree equivalent for {D } type annotations
if (word[0:1] == "-" or
any([x in word for x in
["*", "<<", "<+", "[[", "<"]])) \
and not possibleComment:
possibleComment = True
if possibleComment:
# print "match COMMENT!:" + word
# raw_input()
POSIndices = []
match = True
if (any([x in word for x in [">>", "]]", "))",
">"]]) or
word[0] == "-") \
and not word == "->":
# turn off comment
possibleComment = False
if (">>" in word or "]]" in word or "))"
in word or ">" in word and
not word == "->"): # turn off comment
possibleComment = False
#del words[0]
wordPOSMap.append((word, POSIndices))
POSIndices = []
match = True
# print "match annotation!:" + word
del words[0] # word is consumed
if len(words) > 0:
word = words[0]
wordtest = re.sub(r"[\.\,\?\/\)\(\"\!\\]",
"", word)
wordtest = wordtest.replace("(", "")
wordtest = wordtest.replace(")", "")
else:
break
continue # carry on to next word
else:
myPOS = utt.regularize_pos_lemmas()
while j < len(myPOS):
pos = myPOS[j][0] # pair of (word,POS)
# print "j number of pos : " + str(len(myPOS))
# print "j loop word : " + word
# print "j loop wordtest : " + wordtest
# print "j pos : " + str(j) + " " + str(pos)
# raw_input()
breaker = False
if wordtest == pos or word == pos: # exact match
POSIndices.append(j)
wordPOSMap.append((word, POSIndices))
# print "match!:" + word + " in file/utt: "\
# + str(utt.swda_filename) + \
# str(utt.transcript_index))
del words[0] # word is consumed
if len(words) > 0:
word = words[0] # next word
wordtest = re.sub(
r"[\.\,\?\/\)\(\"\!\\]",
"", word)
wordtest = wordtest.replace("(", "").\
replace(")", "").replace("/", "")
POSIndices = []
j += 1 # increment lead number
match = True
breaker = True
# raw_input()
break
elif (pos in wordtest or pos in word) \
and not pos in [",", "."]:
# substring relation
testpos = pos
POSIndices.append(j)
j += 1
if wordtest[-1] == "-" and \
pos == wordtest[0:-1]:
wordPOSMap.append((word, POSIndices))
del words[0] # remove word
# print "match!:" + word + " in \
# file/utt: " + str(utt.swda_filename) \
#+ str(utt.transcript_index)
if len(words) > 0:
word = words[0]
wordtest = re.sub(
r"[\.\,\?\/\)\(\"\!\\]",
"", word)
wordtest = wordtest.\
replace("(", "").\
replace(")", "").\
replace("/", "")
POSIndices = []
match = True
breaker = True
break
for k in range(j, j + 3):
if (k >= len(myPOS)):
breaker = True
break
if (testpos + myPOS[k][0]) in wordtest\
or (testpos + myPOS[k][0]) in word:
testpos += myPOS[k][0]
POSIndices.append(k)
j += 1
# concatenation
if testpos == wordtest or \
testpos == word: # matched
wordPOSMap.append((word,
POSIndices))
del words[0] # remove word
# print "match!:" +\
# word + " in file/utt: " + \
# str(utt.swda_filename) +\
# str(utt.transcript_index))
if len(words) > 0:
word = words[0]
wordtest = re.sub(
r"[\.\,\?\/\)\(\"\!\\]",
"", word)
wordtest = wordtest.\
replace("(", "")
wordtest = wordtest.\
replace(")", "")
POSIndices = []
j = k + 1
match = True
breaker = True
break
else:
j += 1 # otherwise go on
if breaker:
break
if match:
break
# could not match word! Could be mistransription
if not match:
# print "false checking other options"
# print j
# print word
# print wordtest
if not mistranscribe:
mistranscribe = True
for pair in possibleMistranscription:
if pair[0] == wordtest:
wordtest = pair[1]
break # matched
if wordtest[-1] == "-": # partial words
wordtest = wordtest[0:-1]
if "'" in wordtest:
wordtest = wordtest.replace("'", "")
if len(wordPOSMap) > 0:
found = False
for n in range(
len(wordPOSMap) - 1, -1, -1):
if len(wordPOSMap[n][1]) > 0:
j = wordPOSMap[n][1][-1] + 1
# print j
found = True
break
if not found:
# if not possible go back to
# the beginning!
j = 0
else:
j = 0
# print j
else:
mistranscribe = False
wordPOSMap.append((word, POSIndices))
errormessage = "WARNING: no/partial POS \
mapping for ''" + words[0] + "'' in file/utt:"\
+ str(utt.swda_filename) + "-" + \
str(utt.transcript_index) + \
"POSSIBLE COMMENT = " + \
str(possibleComment)
del words[0] # remove word
if len(words) > 0:
word = words[0]
wordtest = re.sub(r"[\.\,\?\/\)\(\"\!\\]",
"", word)
wordtest = wordtest.replace("(", "").\
replace(")", "").replace("/", "")
# print errormessage
if errorLog:
errorLog.write("possible wrong POS : " +
errormessage + "\n")
# raw_input()
# end of while loop (words)
if not len(wordPOSMap) == len(utt.text_words()):
print "Error "
print "Length mismatch in file/utt: " + \
str(utt.swda_filename) + str(utt.transcript_index)
print utt.text_words()
print wordPOSMap
raw_input()
wordPOSMapList.append(trans.conversation_no,
str(utt.transcript_index),
list(wordPOSMap))
# print "\nadded POSmap " + str(trans.swda_filename) + \
#"." + str(utt.transcript_index) + "\n"
csv_string = '"' + str(wordPOSMap) + '"'
corpus_file.write('"' + str(utt.conversation_no) +
'"\t' + str(utt.transcript_index) +
'\t' + csv_string + "\n")
count += 1
corpus_file.close()
if errorLog:
errorLog.close()
if __name__ == '__main__':
t = TreeMapWriter()
|
from django import forms
from .models import Userm,Neighborhood,Business,Post
class NewProfileForm(forms.ModelForm):
class Meta:
model = Userm
exclude = ['user']
class PostForm(forms.ModelForm):
class Meta:
model = Post
exclude = ['author','post_date']
|
from diagrams import Cluster, Diagram, Edge
from diagrams.aws.analytics import KinesisDataStreams, KinesisDataFirehose
from diagrams.aws.compute import Lambda
from diagrams.aws.database import DDB
from diagrams.aws.integration import Eventbridge, SQS
from diagrams.aws.mobile import Amplify
from diagrams.aws.network import APIGateway
from diagrams.onprem.client import User
from diagrams.onprem.compute import Server
graph_attr = {
"viewport": "1024,768"
}
with Diagram(filename="rent-price-webcrawler", outformat="jpg", direction="TB", show=False):
cron = Eventbridge("Cron(daily)")
lambda_crawler = Lambda("Crawler")
lambda_enrichment = Lambda("Enrichment")
lambda_search = Lambda("Search")
sqs_queue = SQS("Offers Enrichment")
ddb_table = DDB("Offers")
frontend = Amplify("Frontend")
api_gateway_entrypoint = APIGateway("API Entrypoint")
user = User("User")
webserver = Server("Webserver")
with Cluster("Collection and Enrichment"):
cron >> Edge(label="Trigger") >> lambda_crawler << Edge(label="Get Web Page") << webserver
lambda_crawler >> Edge(label="Send message to Queue") >> sqs_queue
lambda_crawler >> Edge(label="Write to Table") >> ddb_table
sqs_queue >> Edge(label="Send message to Listener") >> lambda_enrichment
lambda_enrichment >> Edge(label="Write to Table") >> ddb_table
with Cluster("User interaction"):
user >> Edge(label="Access") >> frontend
frontend >> Edge(label="Request") >> api_gateway_entrypoint
api_gateway_entrypoint >> Edge(label="Trigger") >> lambda_search
lambda_search >> Edge(label="Query") >> ddb_table
|
class WrongfulError(Exception):
"""定义异常,此类为传入字符不合法异常,无法正常转换为数字时间"""
def __init__(self,err='Unknown error.'):
Exception.__init__(self,err)
|
class CIL_Node:
pass
class ProgramCil(CIL_Node):
def __init__(self, types, data, code):
self.types = types
self.data = data
self.code = code
class TypeCil(CIL_Node):
def __init__(self, idx, attributes=[], methods=[]):
self.id = idx
self.attributes = attributes
self.methods = methods
class AttributeCil(CIL_Node):
def __init__(self, idx):
self.id = idx
class MethodCil(CIL_Node):
def __init__(self, idx, ref):
self.id = idx
self.ref = ref
class FunctionCil(CIL_Node):
def __init__(self, idx, args=[], localsx=[], body=[]):
self.id = idx
self.args = args
self.locals = localsx
self.body = body
class IfCil(CIL_Node):
def __init__(self, condition, label):
self.condition = condition
self.label = label
class ArgCil(CIL_Node):
def __init__(self, idx):
self.id = idx
class LocalCil(CIL_Node):
def __init__(self, idx):
self.id = idx
class AssignmentCil(CIL_Node):
def __init__(self, idx, expr):
self.id = idx
self.expr = expr
class StringCil(CIL_Node):
def __init__(self, idx: str, text: str):
self.id = idx
self.text = text
class LabelCil(CIL_Node):
def __init__(self, idx):
self.id = idx
class GotoCil(CIL_Node):
def __init__(self, label):
self.label = label
class GetAttrCil(CIL_Node):
def __init__(self, typex, attr):
self.type = typex
self.attr = attr
class SetAttr(CIL_Node):
def __init__(self, typex, attr, value):
self.type = typex
self.attr = attr
self.value = value
class GetIndex(CIL_Node):
def __init__(self, array, index):
self.array = array
self.index = index
class SetIndex(CIL_Node):
def __init__(self, array, index, value):
self.array = array
self.index = index
self.value = value
|
class Node(object):
def __init__(self, val, left, right, next):
self.val = val
self.left = left
self.right = right
self.next = next
class Solution(object):
def connect(self, root):
"""
:type root: Node
:rtype: Node
"""
if not root:
return None
curr_lv_head = root
while curr_lv_head:
next_lv_head = next_lv_tail = None
curr = curr_lv_head
while curr:
if curr.left:
if next_lv_head is None:
next_lv_head = next_lv_tail = curr.left
else:
next_lv_tail.next = curr.left
next_lv_tail = curr.left
if curr.right:
if next_lv_head is None:
next_lv_head = next_lv_tail = curr.right
else:
next_lv_tail.next = curr.right
next_lv_tail = curr.right
curr = curr.next
curr_lv_head = next_lv_head
return root
|
#! /usr/bin/env python
# encoding: utf-8
from aoc2017.day_14 import hash_grid_2
def test_hash_grid_2_1():
input_ = "flqrgnkx"
output = 1242
assert hash_grid_2(input_) == output
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = [
'ActorArgs',
'CaseClassificationArgs',
]
@pulumi.input_type
class ActorArgs:
def __init__(__self__, *,
display_name: Optional[pulumi.Input[str]] = None,
email: Optional[pulumi.Input[str]] = None):
"""
An object containing information about the effective user and authenticated principal responsible for an action.
:param pulumi.Input[str] display_name: The name to display for the actor. If not provided, it is inferred from credentials supplied during case creation. When an email is provided, a display name must also be provided. This will be obfuscated if the user is a Google Support agent.
:param pulumi.Input[str] email: The email address of the actor. If not provided, it is inferred from credentials supplied during case creation. If the authenticated principal does not have an email address, one must be provided. When a name is provided, an email must also be provided. This will be obfuscated if the user is a Google Support agent.
"""
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if email is not None:
pulumi.set(__self__, "email", email)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The name to display for the actor. If not provided, it is inferred from credentials supplied during case creation. When an email is provided, a display name must also be provided. This will be obfuscated if the user is a Google Support agent.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def email(self) -> Optional[pulumi.Input[str]]:
"""
The email address of the actor. If not provided, it is inferred from credentials supplied during case creation. If the authenticated principal does not have an email address, one must be provided. When a name is provided, an email must also be provided. This will be obfuscated if the user is a Google Support agent.
"""
return pulumi.get(self, "email")
@email.setter
def email(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "email", value)
@pulumi.input_type
class CaseClassificationArgs:
def __init__(__self__, *,
display_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None):
"""
A classification object with a product type and value.
:param pulumi.Input[str] display_name: The display name of the classification.
:param pulumi.Input[str] id: The unique ID for a classification. Must be specified for case creation.
"""
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The display name of the classification.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
The unique ID for a classification. Must be specified for case creation.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
|
from datetime import datetime
from typing import Any, Dict, List, Optional
import pydantic
from python_on_whales.utils import DockerCamelModel
class ObjectVersion(DockerCamelModel):
index: int
class NamedResourceSpec(DockerCamelModel):
kind: str
value: str
class DiscreteResourceSpec(DockerCamelModel):
kind: str
value: int
class AssignedGenericResources(DockerCamelModel):
named_resource_spec: Optional[NamedResourceSpec]
discrete_resource_spec: Optional[DiscreteResourceSpec]
class ContainerStatus(DockerCamelModel):
container_id: str = pydantic.Field(alias="ContainerID")
pid: int = pydantic.Field(alias="PID")
exit_code: Optional[int]
class TaskStatus(DockerCamelModel):
timestamp: datetime
state: str
message: str
err: Optional[str]
container_status: Optional[ContainerStatus]
class LogDriver(DockerCamelModel):
name: str
options: Dict[str, str]
class NetworkAttachmentConfig(DockerCamelModel):
target: str
aliases: List[str]
driver_opts: Optional[Dict[str, str]]
class Platform(DockerCamelModel):
architecture: Optional[str]
os: Optional[str] = pydantic.Field(alias="OS")
class Spread(DockerCamelModel):
spread_descriptor: str
class Placement(DockerCamelModel):
constraints: Optional[List[str]]
preferences: Optional[List[Spread]]
max_replicas: Optional[int]
platforms: Optional[List[Platform]]
class RestartPolicy(DockerCamelModel):
condition: str
delay: int
max_attempts: int
window: int
class PluginPrivilege(DockerCamelModel):
name: str
description: str
value: List[str]
class PluginSpec(DockerCamelModel):
name: str
remote: str
disabled: bool
plugin_privilege: List[PluginPrivilege]
class ContainerSpec(DockerCamelModel):
image: str
labels: Optional[Dict[str, str]]
command: Optional[List[str]]
args: Optional[List[str]]
hostname: Optional[str]
env: Optional[List[str]]
dir: Optional[str]
user: Optional[str]
groups: Optional[List[str]]
privileges: Any
tty: Optional[bool] = pydantic.Field(alias="TTY")
open_stdin: Optional[bool]
read_only: Optional[bool]
mounts: Optional[List[Any]]
stop_signal: Optional[str]
stop_grace_period: Optional[int]
health_check: Any
hosts: Optional[List[str]]
dns_config: Any
secrets: Optional[List[Any]]
configs: Optional[List[Any]]
isolation: Optional[str]
init: Optional[bool]
sysctls: Any
class NetworkAttachmentSpec(DockerCamelModel):
container_id: str = pydantic.Field(alias="ContainerID")
class ResourceObject(DockerCamelModel):
nano_cpus: Optional[int] = pydantic.Field(alias="NanoCPUs")
memory_bytes: Optional[int]
generic_resources: Optional[List[AssignedGenericResources]]
class Resources(DockerCamelModel):
limits: Optional[ResourceObject]
reservation: Optional[ResourceObject]
class TaskSpec(DockerCamelModel):
# TODO: set types for Any
plugin_spec: Optional[PluginSpec]
container_spec: Optional[ContainerSpec]
network_attachment_spec: Optional[NetworkAttachmentSpec]
resources: Resources
restart_policy: Any
placement: Placement
force_update: Optional[int]
runtime: Optional[str]
networks: Optional[List[NetworkAttachmentConfig]]
log_driver: Optional[LogDriver]
class TaskInspectResult(DockerCamelModel):
id: str = pydantic.Field(alias="ID")
version: ObjectVersion
created_at: datetime
updated_at: datetime
name: Optional[str]
labels: Optional[Dict[str, str]]
spec: TaskSpec
service_id: str = pydantic.Field(alias="ServiceID")
slot: Optional[int]
node_id: Optional[str] = pydantic.Field(alias="NodeID")
assigned_generic_resources: Optional[List[AssignedGenericResources]]
status: TaskStatus
desired_state: str
|
from random import randint
from celery_tasks.sms.tasks import send_sms_code
from django.shortcuts import render, redirect
from django import http
from django.views import View
import re, json, logging
from django.contrib.auth import login, authenticate, logout, mixins
from django.core.paginator import Paginator, EmptyPage
from goods.models import SKU
from oauth.utils import generate_openid_signature, check_openid_signature
from orders.models import OrderInfo, OrderGoods
from .utils import generate_email_verify_url, check_verify_token
from .models import User, Address
from meiduo_mall.utils.response_code import RETCODE
from django_redis import get_redis_connection
from django.conf import settings
from celery_tasks.email.tasks import send_verify_email
from meiduo_mall.utils.view import LoginRequiredView
from carts.utils import merge_cart_cookie_to_redis
logger = logging.getLogger('django')
class RegisterView(View):
"""展示注册界面"""
def get(self, request):
return render(request, 'register.html')
# 获取注册信息
def post(self, request):
query_dict = request.POST
username = query_dict.get('username')
password = query_dict.get('password')
password2 = query_dict.get('password2')
mobile = query_dict.get('mobile')
sms_code = query_dict.get('sms_code')
allow = query_dict.get('allow')
# 校验注册信息
if all([username, password, mobile, sms_code, allow]) is False:
return http.HttpResponse("缺少必要参数")
if not re.match(r'^[a-zA-Z0-9_-]{5,20}$', username):
return http.HttpResponse("请输入5-20个字符的用户名")
if not re.match(r'^[0-9A-Za-z]{8,20}$', password):
return http.HttpResponse("请输入8-20位密码")
if not password == password2:
return http.HttpResponse("两次密码输入不一致")
if not re.match(r'^1[3456789]\d{9}$', mobile):
return http.HttpResponse("您输入的手机号格式不正确")
# 校验短信验证码
redis_conn = get_redis_connection('verify_code')
sms_code_server = redis_conn.get("sms_%s" % mobile)
redis_conn.delete("sms_%s" % mobile)
if sms_code_server is None:
return http.HttpResponse("短信验证码过期")
sms_code_server = sms_code_server.decode()
if sms_code_server != sms_code:
return http.HttpResponse("短信验证码输入错误")
# 创建用户并加密密码
user = User.objects.create_user(username=username, password=password, mobile=mobile)
# 保留登录状态
login(request, user)
# 返回主页
response = redirect('/')
response.set_cookie('username', user.username, max_age=settings.SESSION_COOKIE_AGE)
return response
class UsernameCountView(View):
"""检查用户名是否重复"""
def get(self, request, username):
count = User.objects.filter(username=username).count()
response_data = {'count': count, 'code': RETCODE.OK, 'errmsg': 'ok'}
return http.JsonResponse(response_data)
class MobileCountView(View):
"""检查手机号是否重复"""
def get(self, request, mobile):
count = User.objects.filter(mobile=mobile).count()
response_data = {'count': count, 'code': RETCODE.OK, 'errmsg': 'ok'}
return http.JsonResponse(response_data)
# 登录界面
class LoginView(View):
def get(self, request):
return render(request, 'login.html')
# 获取登录信息
def post(self, request):
username = request.POST.get('username')
password = request.POST.get('password')
remember = request.POST.get('remember')
# 认证用户名和密码(有一个为None即返回None)
user = authenticate(request, username=username, password=password)
if user is None:
return render(request, 'login.html', {'account_errmsg': '用户名或密码错误'})
# 保持登录状态
login(request, user)
# 设置未勾选记住密码时session为关闭浏览器即失效
if remember != 'on':
request.session.set_expiry(0)
# 判断请求来源(url字符串查询)
next = request.GET.get('next')
if next == "/orders/settlement/":
merge_cart_cookie_to_redis(request)
response = redirect('/carts/')
else:
response = redirect(next or '/')
# 设置cookie
response.set_cookie('username', user.username, max_age=settings.SESSION_COOKIE_AGE if remember else None)
response.delete_cookie("carts")
return response
class LogoutView(View):
"""退出登录"""
def get(self, request):
logout(request)
response = redirect('/login/')
response.delete_cookie('username')
return response
class UserInfoView(mixins.LoginRequiredMixin, View):
"""mixins扩展用来返回进入登录页面之前的页面"""
def get(self, request):
return render(request, 'user_center_info.html')
class EmailView(View):
"""设置邮箱"""
# 接受请求
def put(self, request):
json_dict = json.loads(request.body.decode())
email = json_dict.get('email')
# 校验
if not email:
return http.JsonResponse({'code': RETCODE.NECESSARYPARAMERR, 'errmsg': '缺少必要参数'})
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$', email):
return http.JsonResponse({'code': RETCODE.EMAILERR, 'errmsg': '邮箱错误'})
# 修改邮箱字段
user = request.user
User.objects.filter(username=user.username, email='').update(email=email)
# 生成激活链接
verify_url = generate_email_verify_url(user)
print(verify_url)
# celery 异步发邮件
send_verify_email.delay(email, verify_url)
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '添加邮件成功'})
class VerifyEmailUrl(View):
"""邮箱激活"""
def get(self, request):
token = request.GET.get('token') # 获取token
# 校验
if token is None:
return http.HttpResponseForbidden("缺少token")
# 解密token并获取user
user = check_verify_token(token)
if user is None:
return http.HttpResponseForbidden("token无效")
# 设置email_active为True
user.email_active = True
user.save()
return redirect('/info/')
class AddressView(LoginRequiredView):
"""收货地址页面展示"""
def get(self, request):
user = request.user
# 查询所有收货地址
address_qs = Address.objects.filter(user=user, is_deleted=False)
# 定义列表包装地址字典数据
address_list = []
for address_model in address_qs:
address_list.append({
'id': address_model.id,
'title': address_model.title,
'receiver': address_model.receiver,
'province': address_model.province.name,
'province_id': address_model.province.id,
'city': address_model.city.name,
'city_id': address_model.city.id,
'district': address_model.district.name,
'district_id': address_model.district.id,
'place': address_model.place,
'mobile': address_model.mobile,
'tel': address_model.tel,
'email': address_model.email
})
# print(address_list)
# 外层包装
context = {
'addresses': address_list,
'default_address_id': user.default_address_id
}
# print(context)
# 渲染
return render(request, 'user_center_site.html', context)
class CreateAdderssView(LoginRequiredView):
"""新增地址"""
def post(self, request):
# 地址不能超过20个
user = request.user
count = Address.objects.filter(user=user, is_deleted=False).count()
if count >= 20:
return http.JsonResponse({'code': RETCODE.THROTTLINGERR, 'errmsg': '超出上限'})
# 接收数据
json_dict = json.loads(request.body.decode())
title = json_dict.get('title')
receiver = json_dict.get('receiver')
province_id = json_dict.get('province_id')
city_id = json_dict.get('city_id')
district_id = json_dict.get('district_id')
place = json_dict.get('place')
mobile = json_dict.get('mobile')
tel = json_dict.get('tel')
email = json_dict.get('email')
# 校验
if all([title, receiver, province_id, city_id, district_id, place, mobile]) is False:
return http.HttpResponseForbidden("缺少必要参数")
if not re.match(r'^1[345789]\d{9}$', mobile):
return http.HttpResponseForbidden("mobile格式错误")
if tel:
if not re.match(r'^(0[0-9]{2,3}-)?([2-9][0-9]{6,7})+(-[0-9]{1,4})?$', tel):
return http.HttpResponseForbidden("tel格式错误")
if email:
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$', email):
return http.HttpResponseForbidden("email格式错误")
# 保存数据
try:
address_model = Address.objects.create(
user=request.user,
title=title,
receiver=receiver,
province_id=province_id,
city_id=city_id,
district_id=district_id,
place=place,
mobile=mobile,
tel=tel,
email=email
)
except Exception as e:
logger.error(e)
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': '新增地址失败'})
# 没有默认地址则设为默认地址
if user.default_address is None:
user.default_address = address_model
user.save()
# 数据转换为字典
address_dict = {
'id': address_model.id,
'title': address_model.title,
'receiver': address_model.receiver,
'province_id': address_model.province.id,
'province': address_model.province.name,
'city_id': address_model.city.id,
'city': address_model.city.name,
'district_id': address_model.district.id,
'district': address_model.district.name,
'place': address_model.place,
'mobile': address_model.mobile,
'tel': address_model.tel,
'email': address_model.email
}
# 响应
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '新增地址成功', 'address': address_dict})
class UpdateDestroyAddressView(LoginRequiredView):
"""修改删除收货地址"""
def put(self, request, address_id):
"""接收请求体数据"""
json_dict = json.loads(request.body.decode())
title = json_dict.get('title')
receiver = json_dict.get('receiver')
province_id = json_dict.get('province_id')
city_id = json_dict.get('city_id')
district_id = json_dict.get('district_id')
place = json_dict.get('place')
mobile = json_dict.get('mobile')
tel = json_dict.get('tel')
email = json_dict.get('email')
# 校验
if all([title, receiver, province_id, city_id, district_id, place, mobile]) is False:
return http.HttpResponseForbidden("缺少必要参数")
if not re.match(r'^1[345789]\d{9}$', mobile):
return http.HttpResponseForbidden("mobile格式错误")
if tel:
if not re.match(r'^(0[0-9]{2,3}-)?([2-9][0-9]{6,7})+(-[0-9]{1,4})?$', tel):
return http.HttpResponseForbidden("tel格式错误")
if email:
if not re.match(r'^[a-z0-9][\w\.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$', email):
return http.HttpResponseForbidden("email格式错误")
# 修改
try:
Address.objects.filter(id=address_id).update(
title=title,
receiver=receiver,
province_id=province_id,
city_id=city_id,
district_id=district_id,
place=place,
mobile=mobile,
tel=tel,
email=email
)
except Exception as e:
logger.error(e)
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': '修改失败'})
# 获取修改后模型对象
address_model = Address.objects.get(id=address_id)
address_dict = {
'id': address_model.id,
'title': address_model.title,
'receiver': address_model.receiver,
'province_id': address_model.province.id,
'province': address_model.province.name,
'city_id': address_model.city.id,
'city': address_model.city.name,
'district_id': address_model.district.id,
'district': address_model.district.name,
'place': address_model.place,
'mobile': address_model.mobile,
'tel': address_model.tel,
'email': address_model.email
}
# 响应
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '修改地址成功', 'address': address_dict})
def delete(self, request, address_id):
"""删除地址"""
# 获取删除address_id
try:
address = Address.objects.get(id=address_id)
# 修改数据库中is_delete为True
address.is_deleted = True
address.save()
except Exception as e:
logger.error(e)
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': 'address_id不存在'})
# 响应
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '删除地址成功'})
class DefaultAddressView(LoginRequiredView):
"""修改默认收货地址"""
def put(self, request, address_id):
try:
address = Address.objects.get(id=address_id)
user = request.user
user.default_address = address
user.save()
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '设置默认地址成功'})
except Exception as e:
logger.error(e)
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': '设置默认地址失败'})
class UpdateAddressTitleView(LoginRequiredView):
"""修改收货地址标题"""
def put(self, request, address_id):
json_dict = json.loads(request.body.decode())
title = json_dict.get('title')
if title is None:
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': '缺少必传参数'})
try:
address = Address.objects.get(id=address_id)
address.title = title
address.save()
return http.JsonResponse({'code': RETCODE.OK, 'errmsg': '修改成功'})
except Exception as e:
logger.error(e)
return http.JsonResponse({'code': RETCODE.PARAMERR, 'errmsg': '修改失败'})
class ChangePasswordView(LoginRequiredView):
"""修改密码"""
def get(self, request):
"""展示界面"""
return render(request, 'user_center_pass.html')
def post(self, request):
"""接收表单"""
query_dict = request.POST
old_pwd = query_dict.get('old_pwd')
new_pwd = query_dict.get('new_pwd')
new_cpwd = query_dict.get('new_cpwd')
# 校验
if all([old_pwd, new_pwd, new_cpwd]) is False:
return http.HttpResponseForbidden("缺少必传参数")
user = request.user
if user.check_password(old_pwd) is False:
return render(request, 'user_center_pass.html', {'oringin_pwd_errmsg': '原密码错误'})
if not re.match(r'^[0-9A-Za-z]{8,20}$', new_pwd):
return http.HttpResponseForbidden("密码最短8位,最长20位")
if new_cpwd != new_pwd:
return http.HttpResponseForbidden("两次密码输入不一致")
# 修改密码:user.set_password
user.set_password(new_pwd)
user.save()
# 清除登录中状态
logout(request)
# 清除cookie中username
response = redirect('/login/')
response.delete_cookie('username')
# 重定向到login界面
return response
class UserBrowsHistoryView(LoginRequiredView):
"""用户商品浏览记录"""
def post(self, request):
# 接收请求体中的sku_id
json_dict = json.loads(request.body.decode())
sku_id = json_dict.get("sku_id")
# 检验sku_id是否真实
try:
sku = SKU.objects.get(id=sku_id)
except SKU.DoesNotExist:
return http.HttpResponseForbidden("商品不存在")
# 创建redis连接对象
redis_conn = get_redis_connection('history')
# 创建管道
pl = redis_conn.pipeline()
# 获取用户并拼接key
user = request.user
key = "history_%s" % user.id
# 去重
pl.lrem(key, 0, sku_id)
# 添加到列表开头
pl.lpush(key, sku_id)
# 截取列表前5个
pl.ltrim(key, 0, 4)
# 执行管道
pl.execute()
# 响应
return http.JsonResponse({"code": RETCODE.OK, "errmsg": "OK"})
def get(self, request):
"""获取浏览记录并返回前端展示"""
# 获取当前登录对象
user = request.user
# 创建数据库连接
redis_conn = get_redis_connection("history")
# 获取用户所有redis中储存的浏览记录列表
sku_ids = redis_conn.lrange("history_%s" % user.id, 0, -1)
# 创建列表保存字典
sku_list = []
# 根据浏览记录列表中sku_id获取sku对象模型存入字典
for sku_id in sku_ids:
sku_model = SKU.objects.get(id=sku_id)
sku_list.append({
"id": sku_model.id,
"name": sku_model.name,
"default_image_url": sku_model.default_image.url,
"price": sku_model.price
})
# 响应
return http.JsonResponse({"code": RETCODE.OK, "errmsg": "OK", "skus": sku_list})
class UserOrderView(LoginRequiredView, View):
"""订单"""
def get(self, request, page_num):
user = request.user
order_qs = OrderInfo.objects.filter(user_id=user.id)
order_list = []
for order_model in order_qs:
sku_list = []
sku_qs = OrderGoods.objects.filter(order_id=order_model.order_id)
for sku_model in sku_qs:
sku = SKU.objects.get(id=sku_model.sku_id)
sku_list.append({
"name": sku.name,
"price": sku_model.price,
"count": sku_model.count,
"default_image": sku.default_image,
"amount": str(sku.price * sku_model.count)
})
order_list.append({
"order_id": order_model.order_id,
"create_time": order_model.create_time,
"sku_list": sku_list,
"total_amount": order_model.total_amount,
"freight": order_model.freight,
"pay_method_name": OrderInfo.PAY_METHOD_CHOICES[order_model.pay_method - 1][1],
"status": order_model.status,
"status_name": OrderInfo.ORDER_STATUS_CHOICES[order_model.status - 1][1],
# "pay_method_name": order_model.pay_method,
# "status_name": order_model.status
}
)
paginator = Paginator(order_list, 5)
# 获取指定页数据
try:
page_skus = paginator.page(page_num)
except EmptyPage:
return http.HttpResponse("当前页面不存在")
# 获取总页面
total_page = paginator.num_pages
# print(order_list)
context = {
"page_orders": page_skus,
"page_num": page_num,
'total_page': total_page, # 总页数
}
return render(request, "user_center_order.html", context)
class FindPasswordView(View):
"""找回密码"""
def get(self, request):
return render(request, "find_password.html")
class CheckInofView(View):
"""第一步"""
def get(self, request, user_name):
query_dict = request.GET
text = query_dict.get("text")
if all([user_name, text]) is False:
return http.JsonResponse({"code": RETCODE, "errmsg": "缺少必传参数"})
uuid = query_dict.get("image_code_id")
redis_conn = get_redis_connection("verify_code")
image_code_server = redis_conn.get('img_%s' % uuid)
image_code_client = query_dict.get('text')
image_code_server = image_code_server.decode()
if image_code_server.lower() != image_code_client.lower():
return http.JsonResponse({"code": RETCODE.IMAGECODEERR, "errmsg": "验证码输入错误"}, status=400)
if not redis_conn.get("img_%s" % uuid):
return http.JsonResponse({"code": RETCODE.DBERR, "errmsg": "验证码过期"})
try:
if re.match(r'^1[3-9]\d{9}$', user_name):
# 尝试匹配手机号,成功则将账号输入栏的对应数据库查询项改为mobile
user = User.objects.get(mobile=user_name)
else:
# 否则将账号输入栏的对应数据库查询项设为username
user = User.objects.get(username=user_name)
except User.DoesNotExist:
return http.JsonResponse({"code": RETCODE.USERERR, "errmsg": "账号不存在"}, status=404)
mobile = user.mobile
access_token = generate_openid_signature([user_name, mobile])
mobile = mobile[0:3] + "*" * 4 + mobile[-4:]
return http.JsonResponse({"code": RETCODE.OK, "errmsg": "OK", "mobile": mobile, "access_token": access_token})
class SmsCodeSendView(View):
"""发短信"""
def get(self, request):
query_dict = request.GET
access_token = query_dict.get("access_token")
user_info = check_openid_signature(access_token)
mobile = user_info[1]
user_name = user_info[0]
redis_conn = get_redis_connection('verify_code')
# 查询数据库中是否有标志
send_flag = redis_conn.get('sms_flag_%s' % mobile)
if send_flag:
return http.JsonResponse({"message": "error"})
# 生成短信验证码
sms_code = "%06d" % randint(0, 999999)
# print(sms_code)
logger.info(sms_code)
pl = redis_conn.pipeline()
pl.setex('sms_%s' % mobile, 300, sms_code)
# 设置标志60秒过期,标明该手机号60s内发过一次短信
pl.setex('sms_flag_%s' % mobile, 60, 1)
pl.execute()
send_sms_code.delay(mobile, sms_code)
return http.JsonResponse({"message": "OK"})
class CheckSmsCodeView(View):
"""第二步,验证短信"""
def get(self, request, user_name):
query_dict = request.GET
sms_code = query_dict.get("sms_code")
user = User.objects.get(username=user_name)
mobile = user.mobile
redis_conn = get_redis_connection('verify_code')
sms_code_server = redis_conn.get("sms_%s" % mobile)
redis_conn.delete("sms_%s" % mobile)
if sms_code_server is None:
return http.HttpResponse("短信验证码过期")
sms_code_server = sms_code_server.decode()
if sms_code_server != sms_code:
return http.HttpResponse("短信验证码输入错误")
user_id = user.id
access_token = generate_openid_signature([user_name, mobile])
return http.JsonResponse({"code": RETCODE.OK, "errmsg": "OK", "user_id": user_id, "access_token": access_token})
class NewPasswordView(View):
def post(self, request, user_id):
json_dict = json.loads(request.body.decode())
new_pwd = json_dict.get('password')
new_cpwd = json_dict.get('password2')
try:
User.objects.get(id=user_id)
except User.DoesNotExist:
return http.HttpResponseForbidden("无此用户")
access_token = json_dict.get("access_token")
user_info = check_openid_signature(access_token)
mobile = user_info[1]
user_name = user_info[0]
try:
User.objects.get(mobile=mobile)
except User.DoesNotExist:
return http.HttpResponseForbidden("非法请求")
# 校验
user = User.objects.get(id=user_id)
if all([new_pwd, new_cpwd]) is False:
return http.HttpResponseForbidden("缺少必传参数")
if not re.match(r'^[0-9A-Za-z]{8,20}$', new_pwd):
return http.HttpResponseForbidden("密码最短8位,最长20位")
if new_cpwd != new_pwd:
return http.HttpResponseForbidden("两次密码输入不一致")
if authenticate(request, username=user_name, password=new_pwd):
return http.JsonResponse({"code": RETCODE.PWDERR, "errmsg": "与原密码重复"}, status=400)
# 修改密码:user.set_password
user.set_password(new_pwd)
user.save()
return http.JsonResponse({"code": RETCODE.OK, "errmsg": "OK"})
|
quote = """
Alright, but apart from the Sanitation, the Medicine, Education, Wine,
Public Order, Irrigation, Roads, the Fresh-Water System,
and Public Health, what have the Romans ever done for us?
"""
for c in quote:
if c.isupper():
print(c)
|
def showMenu():
output = ""
output += "---------------------------------------\n"
output += "| |\n"
output += "| |> |--| |--| /\ /\ |\n"
output += "| |> |__| |__| / \/ \ |\n"
output += "| |\n"
output += "| |\n"
output += "| ----- /\ /\ / | / |\n"
output += "| | /--\ / \ / | / |\n"
output += "| | / \ / \/ | \ |\n"
output += "| |\n"
output += "| |\n"
output += "| |\n"
output += "| Space) play |\n"
output += "| |\n"
output += "| Q) Quit |\n"
output += "| |\n"
output += "---------------------------------------\n"
return output
|
"""
This script is equivalent of the jupyter notebook example_locust_dataset.ipynb
but in a standard python script.
"""
from tridesclous import *
import pyqtgraph as pg
from matplotlib import pyplot
import time
from pprint import pprint
dirname = 'tridesclous_olfactory_bulb'
def initialize_catalogueconstructor():
#download dataset
localdir, filenames, params = download_dataset(name='olfactory_bulb')
print(filenames)
print(params)
print()
#create a DataIO
import os, shutil
if os.path.exists(dirname):
#remove is already exists
shutil.rmtree(dirname)
dataio = DataIO(dirname=dirname)
# feed DataIO
dataio.set_data_source(type='RawData', filenames=filenames, **params)
#The dataset contains 16 channels but 14 and 15 are respiration and trigs.
dataio.add_one_channel_group(channels=range(14), chan_grp=0)
print(dataio)
def apply_catalogue_steps_auto():
dataio = DataIO(dirname=dirname)
cc = CatalogueConstructor(dataio=dataio)
params = get_auto_params_for_catalogue(dataio, chan_grp=0)
params['adjacency_radius_um'] = 400.
apply_all_catalogue_steps(cc, params, verbose=True)
print(cc)
def open_cataloguewindow():
dataio = DataIO(dirname=dirname)
catalogueconstructor = CatalogueConstructor(dataio=dataio)
app = pg.mkQApp()
win = CatalogueWindow(catalogueconstructor)
win.show()
app.exec_()
def run_peeler():
dataio = DataIO(dirname=dirname)
catalogue = dataio.load_catalogue(chan_grp=0)
peeler_params = get_auto_params_for_peelers(dataio, chan_grp=0)
pprint(peeler_params)
peeler = Peeler(dataio)
peeler.change_params(catalogue=catalogue, **peeler_params)
t1 = time.perf_counter()
peeler.run()
t2 = time.perf_counter()
print('peeler.run', t2-t1)
def open_PeelerWindow():
dataio = DataIO(dirname=dirname)
print(dataio)
initial_catalogue = dataio.load_catalogue(chan_grp=0)
app = pg.mkQApp()
win = PeelerWindow(dataio=dataio, catalogue=initial_catalogue)
win.show()
app.exec_()
if __name__ =='__main__':
#~ initialize_catalogueconstructor()
#~ apply_catalogue_steps_auto()
#~ open_cataloguewindow()
run_peeler()
#~ open_PeelerWindow()
|
from django.db import models
# Create your models here.
class Link(models.Model):
chave = models.SlugField(verbose_name="Identificação Rede",max_length=100,unique=True)
descricao = models.CharField(verbose_name='Descrição',max_length=100)
url = models.URLField(max_length=200,null=False,blank=False)
criado = models.DateTimeField(auto_now_add=True)
alterado = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = "Link"
verbose_name_plural = "Links"
ordering=['chave']
def __str__(self):
return self.chave
|
"""AppArmor control for host."""
from __future__ import annotations
import logging
from pathlib import Path
import shutil
from awesomeversion import AwesomeVersion
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import DBusError, HostAppArmorError
from ..resolution.const import UnsupportedReason
from ..utils.apparmor import validate_profile
from .const import HostFeature
_LOGGER: logging.Logger = logging.getLogger(__name__)
class AppArmorControl(CoreSysAttributes):
"""Handle host AppArmor controls."""
def __init__(self, coresys: CoreSys):
"""Initialize host power handling."""
self.coresys: CoreSys = coresys
self._profiles: set[str] = set()
@property
def available(self) -> bool:
"""Return True if AppArmor is available on host."""
return (
HostFeature.OS_AGENT in self.sys_host.features
and UnsupportedReason.APPARMOR not in self.sys_resolution.unsupported
)
@property
def version(self) -> AwesomeVersion | None:
"""Return hosts AppArmor Version."""
return self.sys_dbus.agent.apparmor.version
def exists(self, profile_name: str) -> bool:
"""Return True if a profile exists."""
return profile_name in self._profiles
def _get_profile(self, profile_name: str) -> Path:
"""Get a profile from AppArmor store."""
if profile_name not in self._profiles:
raise HostAppArmorError(
f"Can't find {profile_name} for removing", _LOGGER.error
)
return Path(self.sys_config.path_apparmor, profile_name)
async def load(self) -> None:
"""Load available profiles."""
for content in self.sys_config.path_apparmor.iterdir():
if not content.is_file():
continue
self._profiles.add(content.name)
_LOGGER.info("Loading AppArmor Profiles: %s", self._profiles)
# Load profiles
if self.available:
for profile_name in self._profiles:
try:
await self._load_profile(profile_name)
except HostAppArmorError:
pass
else:
_LOGGER.warning("AppArmor is not enabled on host")
async def load_profile(self, profile_name: str, profile_file: Path) -> None:
"""Load/Update a new/exists profile into AppArmor."""
if not validate_profile(profile_name, profile_file):
raise HostAppArmorError(
f"AppArmor profile '{profile_name}' is not valid", _LOGGER.error
)
# Copy to AppArmor folder
dest_profile: Path = Path(self.sys_config.path_apparmor, profile_name)
try:
await self.sys_run_in_executor(shutil.copyfile, profile_file, dest_profile)
except OSError as err:
raise HostAppArmorError(
f"Can't copy {profile_file}: {err}", _LOGGER.error
) from err
# Load profiles
_LOGGER.info("Adding/updating AppArmor profile: %s", profile_name)
self._profiles.add(profile_name)
if not self.available:
return
await self._load_profile(profile_name)
async def remove_profile(self, profile_name: str) -> None:
"""Remove a AppArmor profile."""
profile_file: Path = self._get_profile(profile_name)
# Unload if apparmor is enabled
if self.available:
await self._unload_profile(profile_name)
try:
await self.sys_run_in_executor(profile_file.unlink)
except OSError as err:
raise HostAppArmorError(
f"Can't remove profile: {err}", _LOGGER.error
) from err
_LOGGER.info("Removing AppArmor profile: %s", profile_name)
self._profiles.remove(profile_name)
async def backup_profile(self, profile_name: str, backup_file: Path) -> None:
"""Backup A profile into a new file."""
profile_file: Path = self._get_profile(profile_name)
try:
await self.sys_run_in_executor(shutil.copy, profile_file, backup_file)
except OSError as err:
raise HostAppArmorError(
f"Can't backup profile {profile_name}: {err}", _LOGGER.error
) from err
async def _load_profile(self, profile_name: str) -> None:
"""Load a profile on the host."""
try:
await self.sys_dbus.agent.apparmor.load_profile(
self.sys_config.path_extern_apparmor.joinpath(profile_name),
self.sys_config.path_extern_apparmor_cache,
)
except DBusError as err:
raise HostAppArmorError(
f"Can't load profile {profile_name}: {err!s}", _LOGGER.error
) from err
async def _unload_profile(self, profile_name: str) -> None:
"""Unload a profile on the host."""
try:
await self.sys_dbus.agent.apparmor.unload_profile(
self.sys_config.path_extern_apparmor.joinpath(profile_name),
self.sys_config.path_extern_apparmor_cache,
)
except DBusError as err:
raise HostAppArmorError(
f"Can't unload profile {profile_name}: {err!s}", _LOGGER.error
) from err
|
# wemos_flash.py Test flash chips with ESP8266 host
# Released under the MIT License (MIT). See LICENSE.
# Copyright (c) 2020 Peter Hinch
import uos
from machine import SPI, Pin
from flash_spi import FLASH
cspins = (Pin(5, Pin.OUT, value=1), Pin(14, Pin.OUT, value=1))
spi=SPI(-1, baudrate=20_000_000, sck=Pin(4), miso=Pin(0), mosi=Pin(2))
def get_flash():
flash = FLASH(spi, cspins)
print('Instantiated Flash')
return flash
directory = '/fl_ext'
a = bytearray(range(256)) # Data to write
b = bytearray(256) # Data to read back
files = {} # n:length
errors = 0
def fname(n):
return '{}/{:05d}'.format(directory, n + 1) # Names start 00001
def fcreate(n): # Create a binary file of random length
length = int.from_bytes(uos.urandom(2), 'little') + 1 # 1-65536 bytes
linit = length
with open(fname(n), 'wb') as f:
while(length):
nw = min(length, 256)
f.write(a[:nw])
length -= nw
files[n] = length
return linit
def fcheck(n):
length = files[n]
with open(fname(n), 'rb') as f:
while(length):
nr = f.readinto(b)
if not nr:
return False
if a[:nr] != b[:nr]:
return False
length -= nr
return True
def check_all():
global errors
for n in files:
if fcheck(n):
print('File {:d} OK'.format(n))
else:
print('Error in file', n)
errors += 1
print('Total errors:', errors)
def remove_all():
for n in files:
uos.remove(fname(n))
def flash_test(format=False):
eep = get_flash()
if format:
uos.VfsLfs2.mkfs(eep)
try:
uos.mount(eep,'/fl_ext')
except OSError: # Already mounted
pass
for n in range(128):
length = fcreate(n)
print('Created', n, length)
print('Created files', files)
check_all()
for _ in range(100):
for x in range(5): # Rewrite 5 files with new lengths
n = int.from_bytes(uos.urandom(1), 'little') & 0x7f
length = fcreate(n)
print('Rewrote', n, length)
check_all()
remove_all()
msg='''Run wemos_flash.flash_test(True) to format new array, otherwise
wemos_flash.flash_test()
Runs prolonged test of filesystem.'''
print(msg)
|
from vrepper.lib.vrepConst import sim_jointfloatparam_velocity, simx_opmode_buffer, simx_opmode_streaming
from vrepper.utils import check_ret, blocking
import numpy as np
class vrepobject():
def __init__(self, env, handle, is_joint=True):
self.env = env
self.handle = handle
self.is_joint = is_joint
def get_orientation(self, relative_to=None):
eulerAngles, = check_ret(self.env.simxGetObjectOrientation(
self.handle,
-1 if relative_to is None else relative_to.handle,
blocking))
return eulerAngles
def get_position(self, relative_to=None):
position, = check_ret(self.env.simxGetObjectPosition(
self.handle,
-1 if relative_to is None else relative_to.handle,
blocking))
return position
def get_velocity(self):
return check_ret(self.env.simxGetObjectVelocity(
self.handle,
# -1 if relative_to is None else relative_to.handle,
blocking))
# linearVel, angularVel
def set_velocity(self, v):
self._check_joint()
return check_ret(self.env.simxSetJointTargetVelocity(
self.handle,
v,
blocking))
def set_force(self, f):
self._check_joint()
return check_ret(self.env.simxSetJointForce(
self.handle,
f,
blocking))
def set_position_target(self, angle):
"""
Set desired position of a servo
:param int angle: target servo angle in degrees
:return: None if successful, otherwise raises exception
"""
self._check_joint()
return check_ret(self.env.simxSetJointTargetPosition(
self.handle,
-np.deg2rad(angle),
blocking))
def force_position(self, angle):
"""
Force desired position of a servo
:param int angle: target servo angle in degrees
:return: None if successful, otherwise raises exception
"""
self._check_joint()
return check_ret(self.env.simxSetJointPosition(
self.handle,
-np.deg2rad(angle),
blocking))
def set_position(self, x, y, z):
"""
Set object to specific position (should never be done with joints)
:param pos: tuple or list with 3 coordinates
:return: None
"""
pos = (x, y, z)
return check_ret(self.env.simxSetObjectPosition(self.handle, -1, pos, blocking))
def get_joint_angle(self):
self._check_joint()
angle = check_ret(
self.env.simxGetJointPosition(
self.handle,
blocking
)
)
return -np.rad2deg(angle[0])
def get_joint_force(self):
self._check_joint()
force = check_ret(
self.env.simxGetJointForce(
self.handle,
blocking
)
)
return force
def get_joint_velocity(self):
self._check_joint()
vel = check_ret(self.env.simxGetObjectFloatParameter(
self.handle,
sim_jointfloatparam_velocity,
blocking
))
return vel
def read_force_sensor(self):
state, forceVector, torqueVector = check_ret(self.env.simxReadForceSensor(
self.handle,
blocking))
if state & 1 == 1:
return None # sensor data not ready
else:
return forceVector, torqueVector
def get_vision_image(self):
resolution, image = check_ret(self.env.simxGetVisionSensorImage(
self.handle,
0, # options=0 -> RGB
blocking,
))
dim, im = resolution, image
nim = np.array(im, dtype='uint8')
nim = np.reshape(nim, (dim[1], dim[0], 3))
nim = np.flip(nim, 0) # LR flip
nim = np.flip(nim, 2) # RGB -> BGR
return nim
def _check_joint(self):
if not self.is_joint:
raise Exception("Trying to call a joint function on a non-joint object.")
def get_global_variable(self, name, is_first_time):
if is_first_time:
return self.env.simxGetFloatSignal(self.cid, name, simx_opmode_streaming)
else:
return self.env.simxGetFloatSignal(self.cid, name, simx_opmode_buffer)
|
#!/usr/bin/python2.7
def fib(n):
"""
TODO
"""
result = []
a, b = 0, 1
while a < n:
result.append(a)
a, b = b, a + b
return result
f = fib
print (f.__doc__)
print f(100)
|
from __future__ import absolute_import, unicode_literals, print_function, division
import sys
import os
import datetime
import json
from pytest import raises
from flexmock import flexmock
from .helper import json_data, json_str
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import stampr
class Test(object):
def setup(self):
(flexmock(stampr.client.Client).should_receive("ping").once())
stampr.authenticate("user", "pass")
self.created = stampr.batch.Batch(batch_id=2, config_id=1)
self.uncreated = stampr.batch.Batch(config_id=1)
self.start = datetime.datetime(1900, 1, 1, 0, 0, 0)
self.finish = datetime.datetime(2000, 1, 1, 0, 0, 0)
class TestBatchInit(Test):
def test_generate_a_config(self):
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("post", ("configs", ),
returnenvelope=False,
output="single",
turnaround="threeday",
size="standard",
style="color")
.and_return({ "config_id": 7 }))
subject = stampr.batch.Batch()
assert subject.config_id == 7
def test_failure_with_config_and_config_id(self):
with raises(ValueError):
stampr.batch.Batch(config_id=2, config=flexmock())
class TestBatchBlock(Test):
def test_yield_itself(self):
yielded = None
batch = stampr.batch.Batch(config_id=1)
with batch as b:
yielded = b
assert yielded == batch
class TestBatchInitDefault(Test):
def test_that_it_has_a_config_id(self):
assert self.uncreated.config_id == 1
def test_that_it_has_no_template(self):
assert self.uncreated.template is None
def test_that_it_has_a_default_status(self):
assert self.uncreated.status == "processing"
class TestBatchInitFromData(Test):
def setup(self):
super(TestBatchInitFromData, self).setup()
self.created_from_data = stampr.batch.Batch(**json_data("batch_create"))
def test_that_it_has_a_config_id(self):
assert self.created_from_data.config_id == 1
def test_that_it_has_a_template(self):
assert self.created_from_data.template == "bleh"
def test_that_it_has_a_status(self):
assert self.created_from_data.status == "processing"
def test_that_it_has_an_id(self):
assert self.created_from_data.id == 2
class TestBatchTemplate(Test):
def test_set_the_value(self):
subject = self.uncreated
subject.template = "hello"
assert subject.template == "hello"
def test_accept_none(self):
subject = self.uncreated
subject.template = None
assert subject.template is None
def test_fail_with_a_bad_type(self):
with raises(TypeError):
self.uncreated.template = 12
def test_fail_if_the_batch_is_already_created(self):
with raises(stampr.exceptions.ReadOnlyError):
self.created.template = "hello"
class TestBatchStatusNotCreated(Test):
def test_accept_a_correct_value(self):
subject = self.uncreated
assert subject.status == "processing" # The default.
subject.status = "hold"
assert subject.status == "hold"
def test_refuse_incorrect_string(self):
with raises(ValueError):
self.uncreated.status = "frog"
def test_bad_type(self):
with raises(TypeError):
self.uncreated.status = 14
class TestBatchStatusCreated(Test):
def test_no_authentication(self):
stampr.client.Client._current = stampr.client.NullClient()
with raises(stampr.exceptions.APIError):
self.created.status = "hold"
def test_accept_a_correct_value_and_update(self):
subject = self.created
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("post", ("batches", 2), status="hold")
.and_return(json_data("batch_create")))
assert subject.status == "processing"
subject.status = "hold"
assert subject.status == "hold"
def test_do_nothing_if_value_hasnt_changed(self):
subject = self.created
subject.status = "processing"
assert subject.status == "processing"
class TestBatchCreate(Test):
def test_no_authentication(self):
stampr.client.Client._current = stampr.client.NullClient()
with raises(stampr.exceptions.APIError):
self.uncreated.create()
def test_post_a_creation_request_without_a_template(self):
subject = self.uncreated
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("post", ("batches",), config_id=1, status="processing")
.and_return(json_data("batch_create")))
subject.create()
assert subject.id == 2
def test_post_a_creation_request_with_a_template(self):
subject = stampr.batch.Batch(config_id=1, template="Bleh")
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("post", ("batches",), config_id=1, status="processing", template="Bleh")
.and_return(json_data("batch_create")))
subject.create()
assert subject.id == 2
class TestBatchDelete(Test):
def test_no_authentication(self):
stampr.client.Client._current = stampr.client.NullClient()
with raises(stampr.exceptions.APIError):
self.created.delete()
def test_delete_the_batch(self):
subject = stampr.batch.Batch(config_id=1, template="Bleh", batch_id=2)
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("delete", ("batches", 2))
.and_return(True))
subject.delete()
def test_fail_if_the_batch_isnt_created_yet(self):
with raises(stampr.exceptions.APIError):
self.uncreated.delete()
class TestBatchIndex(Test):
def test_no_authentication(self):
stampr.client.Client._current = stampr.client.NullClient()
with raises(stampr.exceptions.APIError):
stampr.batch.Batch[4677]
def test_retreive_a_specific_batch(self):
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("get", ("batches", 1))
.and_return(json_data("batch_index")))
batch = stampr.batch.Batch[1]
assert isinstance(batch, stampr.batch.Batch)
assert batch.id == 2
def test_fail_with_a_negative_id(self):
with raises(ValueError):
stampr.batch.Batch[-1]
def test_fail_with_a_bad_id(self):
with raises(TypeError):
stampr.batch.Batch["fred"]
def test_retreive_a_specific_batch(self):
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("get", ("batches", 1))
.and_return([]))
with raises(stampr.exceptions.RequestError):
stampr.batch.Batch[1]
class TestBatchBrowse(Test):
def test_no_authentication(self):
stampr.client.Client._current = stampr.client.NullClient()
with raises(stampr.exceptions.APIError):
stampr.batch.Batch.browse(self.start, self.finish)
def test_retrieve_a_list_over_a_period(self):
for i in [0, 1, 2]:
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("get", ("batches", "browse", "1900-01-01T00:00:00", "2000-01-01T00:00:00", i))
.and_return(json_data("batches_%d" % i)))
batches = stampr.batch.Batch.browse(self.start, self.finish)
assert [b.id for b in batches] == [2, 3, 4]
def test_fail_with_bad_start(self):
with raises(TypeError):
stampr.batch.Batch.browse(1, 3)
class TestBatchBrowseWithStatus(Test):
def test_retrieve_a_list_of_batches_over_a_period_with_given_status(self):
for i in [0, 1, 2]:
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("get", ("batches", "with", "processing", "1900-01-01T00:00:00", "2000-01-01T00:00:00", i))
.and_return(json_data("batches_%d" % i)))
batches = stampr.batch.Batch.browse(self.start, self.finish, status="processing")
assert [b.id for b in batches] == [2, 3, 4]
def test_fail_with_a_bad_status_type(self):
with raises(TypeError):
stampr.batch.Batch.browse(self.start, self.finish, status=12)
def test_fail_with_a_bad_status_value(self):
with raises(ValueError):
stampr.batch.Batch.browse(self.start, self.finish, status="frog")
def test_fail_with_a_bad_period(self):
with raises(TypeError):
stampr.batch.Batch.browse(1, 3, status="processing")
class TestBatchMailing(Test):
def test_create_a_mailing(self):
(flexmock(stampr.client.Client.current)
.should_receive("_api")
.with_args("post", ("configs", ),
returnenvelope=False,
output="single",
turnaround="threeday",
size="standard",
style="color")
.and_return({ "config_id": 7 }))
batch = stampr.batch.Batch(batch_id=6, template="frog")
mail = batch.mailing()
assert isinstance(mail, stampr.mailing.Mailing)
assert mail.batch_id == 6
|
from typing import Any, Dict, List, Text
import regex
import re
import rasa.shared.utils.io
from rasa.shared.constants import DOCS_URL_COMPONENTS
from rasa.nlu.tokenizers.tokenizer import Token, Tokenizer
from rasa.shared.nlu.training_data.message import Message
class WhitespaceTokenizer(Tokenizer):
defaults = {
# Flag to check whether to split intents
"intent_tokenization_flag": False,
# Symbol on which intent should be split
"intent_split_symbol": "_",
# Regular expression to detect tokens
"token_pattern": None,
}
# the following language should not be tokenized using the WhitespaceTokenizer
not_supported_language_list = ["zh", "ja", "th"]
def __init__(self, component_config: Dict[Text, Any] = None) -> None:
"""Construct a new tokenizer using the WhitespaceTokenizer framework."""
super().__init__(component_config)
self.emoji_pattern = self.get_emoji_regex()
if "case_sensitive" in self.component_config:
rasa.shared.utils.io.raise_warning(
"The option 'case_sensitive' was moved from the tokenizers to the "
"featurizers.",
docs=DOCS_URL_COMPONENTS,
)
@staticmethod
def get_emoji_regex():
return re.compile(
"["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"\U00002702-\U000027B0"
"\U000024C2-\U0001F251"
"\u200d" # zero width joiner
"\u200c" # zero width non-joiner
"]+",
flags=re.UNICODE,
)
def remove_emoji(self, text: Text) -> Text:
"""Remove emoji if the full text, aka token, matches the emoji regex."""
match = self.emoji_pattern.fullmatch(text)
if match is not None:
return ""
return text
def tokenize(self, message: Message, attribute: Text) -> List[Token]:
text = message.get(attribute)
# we need to use regex instead of re, because of
# https://stackoverflow.com/questions/12746458/python-unicode-regular-expression-matching-failing-with-some-unicode-characters
# remove 'not a word character' if
words = regex.sub(
# there is a space or an end of a string after it
r"[^\w#@&]+(?=\s|$)|"
# there is a space or beginning of a string before it
# not followed by a number
r"(\s|^)[^\w#@&]+(?=[^0-9\s])|"
# not in between numbers and not . or @ or & or - or #
# e.g. 10'000.00 or blabla@gmail.com
# and not url characters
r"(?<=[^0-9\s])[^\w._~:/?#\[\]()@!$&*+,;=-]+(?=[^0-9\s])",
" ",
text,
).split()
words = [self.remove_emoji(w) for w in words]
words = [w for w in words if w]
# if we removed everything like smiles `:)`, use the whole text as 1 token
if not words:
words = [text]
tokens = self._convert_words_to_tokens(words, text)
return self._apply_token_pattern(tokens)
|
from .vika import Vika
__all__ = (Vika, )
|
"""User Profile"""
from django.db import models
from django.utils.translation import gettext_lazy as _
from src.apps.core.models import BaseAuditableModel
from src.apps.user.models import User
class UserProfile(BaseAuditableModel):
"""User profile model"""
GENDER = (
('M', 'Male'),
('F', 'Female'),
('None', 'None'),
)
SEATS = (('Window', 'Window seat'), ('Aisle', 'Aisle seat'))
first_name = models.CharField(
_('First Name'),
max_length=255,
null=True,
)
middle_name = models.CharField(
_('Middle Name'),
max_length=255,
null=True,
)
last_name = models.CharField(
_('Last Name'),
max_length=255,
null=True,
)
gender = models.CharField(
_('Gender'),
max_length=10,
choices=GENDER,
default=None,
null=True,
)
phone = models.CharField(
_('Phone number'),
null=True,
max_length=100,
)
photo_url = models.CharField(
_('Passport photograph'),
null=False,
default='https://res.cloudinary.com/veeqtor/image/upload/v1561820733/'
'airtech/user_photo_placeholder.png',
max_length=255,
)
photo_public_id = models.CharField(
_('Passport public id'),
null=False,
default='iowjgoirgoierhgio934843897986798',
max_length=100,
)
dob = models.DateField(
_('Date of Birth'),
null=True,
)
seat_preference = models.CharField(_('Seat Preference'),
max_length=10,
choices=SEATS,
default=None,
null=True)
user = models.OneToOneField(
User,
related_name='user_profile',
on_delete=models.CASCADE,
)
@property
def display_name(self) -> str:
"""Returns the display of the user"""
return f'{self.first_name} {self.last_name}'
class Meta:
"""Meta"""
verbose_name_plural = 'User profiles'
db_table = 'user_profiles'
def __str__(self):
"""String representation"""
return f'{self.display_name}'
class Passport(BaseAuditableModel):
"""Model for the passport table"""
image = models.CharField(
_('Passport Image URL'),
max_length=255,
null=True,
)
passport_number = models.CharField(_('Passport Number'),
max_length=100,
null=True,
unique=True)
country = models.CharField(_('Country of Citizenship'),
max_length=255,
null=True)
issued_date = models.DateField(
_('Issued date'),
null=True,
)
expiry_date = models.DateField(
_('Expiry date'),
null=True,
)
profile = models.ForeignKey(
UserProfile,
related_name='passports',
on_delete=models.CASCADE,
)
class Meta:
"""Meta"""
verbose_name_plural = 'Passports'
db_table = 'user_passports'
def __str__(self):
"""String representation"""
return f'{self.passport_number} - {self.country}'
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainWindows.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
import cv2 as cv
import matplotlib.pyplot as plt
import numpy as np
import math
import json
import os
from tkinter import filedialog
import tkinter as tk
from PIL import Image
from PyQt5.QtGui import QPixmap, QImage
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(798, 598)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 170, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(53, 53, 53))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(57, 57, 57))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(47, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(53, 53, 53))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(57, 57, 57))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(47, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 170, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(53, 53, 53))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(47, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(47, 47, 47))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
MainWindow.setPalette(palette)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget)
self.stackedWidget.setObjectName("stackedWidget")
self.page = QtWidgets.QWidget()
self.page.setObjectName("page")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.page)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.Title = QtWidgets.QLabel(self.page)
self.Title.setMaximumSize(QtCore.QSize(769, 140))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setWeight(75)
self.Title.setFont(font)
self.Title.setTextFormat(QtCore.Qt.RichText)
self.Title.setAlignment(QtCore.Qt.AlignCenter)
self.Title.setObjectName("Title")
self.verticalLayout_2.addWidget(self.Title)
self.widget = QtWidgets.QWidget(self.page)
self.widget.setObjectName("widget")
self.pushButton = QtWidgets.QPushButton(self.widget)
self.pushButton.setGeometry(QtCore.QRect(150, 150, 171, 81))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.pushButton.setFont(font)
self.pushButton.setStyleSheet("QPushButton {\n"
" background-color: rgb(255, 170, 0);\n"
" font-color:\n"
"}")
self.pushButton.setObjectName("pushButton")
self.pushButton_2 = QtWidgets.QPushButton(self.widget)
self.pushButton_2.setGeometry(QtCore.QRect(440, 150, 171, 81))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.pushButton_2.setFont(font)
self.pushButton_2.setStyleSheet("QPushButton {\n"
" background-color: rgb(255, 170, 0);\n"
" font-color:\n"
"}")
self.pushButton_2.setObjectName("pushButton_2")
self.verticalLayout_2.addWidget(self.widget)
self.stackedWidget.addWidget(self.page)
self.page_2 = QtWidgets.QWidget()
self.page_2.setObjectName("page_2")
self.label = QtWidgets.QLabel(self.page_2)
self.label.setGeometry(QtCore.QRect(9, 9, 250, 170))
self.label.setText("")
self.label.setScaledContents(True)
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.page_2)
self.label_2.setGeometry(QtCore.QRect(9, 360, 250, 170))
self.label_2.setText("")
self.label_2.setScaledContents(True)
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.page_2)
self.label_3.setGeometry(QtCore.QRect(9, 185, 250, 169))
self.label_3.setText("")
self.label_3.setScaledContents(True)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.page_2)
self.label_4.setGeometry(QtCore.QRect(265, 185, 250, 169))
self.label_4.setText("")
self.label_4.setScaledContents(True)
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.page_2)
self.label_5.setGeometry(QtCore.QRect(265, 360, 250, 170))
self.label_5.setText("")
self.label_5.setScaledContents(True)
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.page_2)
self.label_6.setGeometry(QtCore.QRect(265, 9, 250, 170))
self.label_6.setText("")
self.label_6.setScaledContents(True)
self.label_6.setObjectName("label_6")
self.label_7 = QtWidgets.QLabel(self.page_2)
self.label_7.setGeometry(QtCore.QRect(521, 185, 250, 169))
self.label_7.setText("")
self.label_7.setScaledContents(True)
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.page_2)
self.label_8.setGeometry(QtCore.QRect(521, 9, 250, 170))
self.label_8.setText("")
self.label_8.setScaledContents(True)
self.label_8.setObjectName("label_8")
self.label_9 = QtWidgets.QLabel(self.page_2)
self.label_9.setGeometry(QtCore.QRect(521, 360, 250, 170))
self.label_9.setText("")
self.label_9.setScaledContents(True)
self.label_9.setObjectName("label_9")
self.stackedWidget.addWidget(self.page_2)
self.verticalLayout.addWidget(self.stackedWidget)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 798, 21))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
self.button_click()
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "CBIR Using Color and Shape Dechiptors"))
self.Title.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" color:#eaa400;\">CBIR Using Color and Shape Dechiptors</span></p></body></html>"))
self.pushButton.setText(_translate("MainWindow", "Refresh Database"))
self.pushButton_2.setText(_translate("MainWindow", "Search Image"))
def button_click(self):
self.pushButton.clicked.connect(self.refreshDatabase)
self.pushButton_2.clicked.connect(self.searchImage)
pass
def displayImage(self, listImage):
for index, img in enumerate(listImage):
# print(index, ' ', img)
# print('Successfull read')
self.stackedWidget.setCurrentIndex(1)
img = 'image/'+ img
if index == 0:
self.label.setPixmap(QtGui.QPixmap(img))
elif index == 1:
self.label_2.setPixmap(QtGui.QPixmap(img))
elif index == 2:
self.label_3.setPixmap(QtGui.QPixmap(img))
elif index == 3:
self.label_4.setPixmap(QtGui.QPixmap(img))
elif index == 4:
self.label_5.setPixmap(QtGui.QPixmap(img))
elif index == 5:
self.label_6.setPixmap(QtGui.QPixmap(img))
elif index == 6:
self.label_7.setPixmap(QtGui.QPixmap(img))
elif index == 7:
self.label_8.setPixmap(QtGui.QPixmap(img))
elif index == 8:
self.label_9.setPixmap(QtGui.QPixmap(img))
else:
break
def searchImage(self):
fname = self.openWindow()
_, _, moments, img = self.main(fname)
Hasil = self.fetchDatabase(moments.tolist())
Hasil = dict(sorted(Hasil.items(), key=lambda item: item[1]))
# Print Hasil dari Similarity yang telah di urutkan
# print(Hasil)
self.displayImage(Hasil)
def canberraDistance(self, A : list, B : list):
total = 0
for i in range(7):
top = abs(A[i]-B[i])
bottom = abs(A[i]) + abs(B[i])
total += top/bottom
pass
return total
def fetchDatabase(self, imageQquery : list):
result = []
object = {}
fjson = open('DatabaseMoments.json')
dataMoment = json.load(fjson)
for i in dataMoment:
fname = dataMoment[i]['Name']
fmoments = dataMoment[i]['moments']
value = self.canberraDistance(imageQquery, fmoments)
object[fname] = value
return object
def refreshDatabase(self):
dbImages = {}
for index, img in enumerate(self.listImg()):
dir = 'image/'+img
stdValue, stdDotValue, moments, citra = self.main(dir)
objImg = {
"Name": img,
# "value1": stdValue,
# "value2": stdDotValue,
"moments": moments.tolist()
}
dbImages[index] = objImg
with open('DatabaseMoments.json', 'w') as outfile:
json.dump(dbImages, outfile)
def listImg(self):
for (root, dirs, fname) in os.walk('image', topdown=True):
return fname
pass
def main(self, fImg: str = 'image/flower1.jpg'):
clrImg = cv.imread(fImg)
# clrImg = cv.imread('image/flower1.jpg')
scale_percent = 50
#calculate the 50 percent of original dimensions
width = int(clrImg.shape[1] * scale_percent / 100)
height = int(clrImg.shape[0] * scale_percent / 100)
dsize = (width, height)
clrImg = cv.resize(clrImg, dsize)
# #Plot
# plt.imshow(cv.cvtColor(clrImg, cv.COLOR_BGR2RGB))
# plt.show()
# Convert to Lab
labImg = cv.cvtColor(clrImg, cv.COLOR_BGR2RGB)
labImg = cv.cvtColor(labImg, cv.COLOR_RGB2Lab)
# Getting L Value from labImg
splitImg = labImg
L, a, b = cv.split(splitImg)
# print(L[332,348])
# splitImg[:,:,0] = 0
# splitImg[:,:,2] = 0
# splitImg[:,:,1] = 0
# Convert to Grayscale
grayImg = splitImg[:,:,0]
# Gaussian Blur
blrImg = cv.GaussianBlur(grayImg, (3,3), 0)
# Gradient Image using Sobel
kernelSize = 1
sobelx = cv.Sobel(blrImg,cv.CV_64F,1,0,ksize=kernelSize)
abs_sobelx = np.absolute(sobelx)
sobel_x = np.uint8(abs_sobelx)
sobely = cv.Sobel(blrImg,cv.CV_64F,0,1,ksize=kernelSize)
abs_sobely = np.absolute(sobely)
sobel_y = np.uint8(abs_sobely)
sobelImg = cv.addWeighted(sobel_x, 0.5, sobel_y, 0.5, 0)
# gradient_magnitude = np.sqrt(np.square(sobel_x) + np.square(sobel_y))
# gradient_magnitude *= 255.0 / gradient_magnitude.max()
# Standard Deviation Gradient Image
stdValue = np.std(sobelImg)
ret,thresh1 = cv.threshold(sobelImg,stdValue,255,cv.THRESH_BINARY)
dotImg = cv.multiply(sobelImg, thresh1)
stdDotValue = np.std(dotImg)
# menghitung Moment pada Gambar
# countour, hierarchy = cv.findContours(dotImg, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
# countourImg = cv.drawContours(clrImg, countour, -1, (0,255,0), 1)
MomentsImg = cv.HuMoments(cv.moments(dotImg)).flatten()
#
for i in range(7):
MomentsImg[i] = -1 * math.copysign(1.0, MomentsImg[i]) * math.log10(abs(MomentsImg[i]))
# cv.imshow('Original Image', clrImg)
# cv.imshow('Lab Color Image', labImg)
# cv.imshow('Blur Lab Image', blrImg)
# cv.imshow('Grayscale form LabImg', grayImg)
# cv.imshow('Sobel X', sobel_x)
# cv.imshow('Sobel Y', sobel_y)
# cv.imshow('Final Sobel', sobelImg)
# cv.imshow('Binary Image', thresh1)
# cv.imshow('Contour Image', countourImg)
# # cv.imshow('2 Images. Sobel | Binary', dotImg)
# cv.waitKey()
# cv.destroyAllWindows()
# clrImg = cv.cvtColor(clrImg, cv.COLOR_BGR2RGB)
# print('RGB Value of (348,332) to (352,336): ')
# print(clrImg[332:335, 348:351])
# print()
# print('Lab Value of (348,332) to (352,336): ')
# print(labImg[331:336, 347:352])
# print()
# print('Grayscale Lab of (348,332) to (352,336): ')
# print(grayImg[331:336, 347:352])
# print()
# print('Blur Lab Value of (348,332) to (352,336): ')
# print(blrImg[331:336, 347:352])
# print()
# print('Sobel X Image of (348,332) to (352,336): ')
# print(sobel_x[332:335, 348:351])
# print()
# print('Sobel Y Image of (348,332) to (352,336): ')
# print(sobel_y[332:335, 348:351])
# print()
# print('Sobel Image of (348,332) to (352,336): ')
# print(sobelImg[332:335, 348:351])
# print()
# print('Thresholding Image of (348,332) to (352,336): ')
# print(thresh1[332:335, 348:351])
# print()
# print('Dot Product Image of (348,332) to (352,336): ')
# print(dotImg[332:335, 348:351])
# print(MomentsImg)
# return stdValue
# print(fImg, ': ', stdValue, ' ', stdDotValue)
# print(MomentImg)
return stdValue, stdDotValue, MomentsImg, dotImg
def openWindow(self):
root = tk.Tk()
root.withdraw()
file_path = filedialog.askopenfilename()
return file_path
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
from unittest import TestCase
from utilities import getDictValue
class Test_Utilities(TestCase):
def setUp(self):
self.fixture = {'posts': {'comments': [{'text': 'smartidea'}]}}
def tearDown(self):
self.fixture = None
def test_get_dict_value(self):
out = getDictValue(self.fixture,'posts.comments.0.text')
self.assertEqual(out,'smartidea')
|
from django.contrib import messages
from django.test import TestCase
from django.urls import reverse
from bookclubs.forms import LogInForm
from bookclubs.models import User
from bookclubs.tests.helpers import LogInTester, reverse_with_next
class LogInViewTestCase(TestCase, LogInTester):
"""Tests for the log in view"""
fixtures = [
'bookclubs/tests/fixtures/default_user.json',
]
def setUp(self):
self.url = reverse('log_in')
self.user = User.objects.get(username='@johndoe')
def test_log_in_url(self):
self.assertEqual(self.url, '/log_in/')
def test_get_log_in(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
next = response.context['next']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertFalse(next)
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 0)
# self.assert_no_menu(response)
def test_get_log_in_with_redirect(self):
destination_url = reverse('feed')
self.url = reverse_with_next('log_in', destination_url)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
next = response.context['next']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertEqual(next, destination_url)
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 0)
def test_get_log_in_redirects_when_logged_in(self):
self.client.login(username=self.user.username, password="Password123")
response = self.client.get(self.url, follow=True)
redirect_url = reverse('feed')
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
self.assertTemplateUsed(response, 'feed.html')
def test_unsuccesful_log_in(self):
form_input = {'username': '@johndoe', 'password': 'WrongPassword123'}
response = self.client.post(self.url, form_input)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertFalse(self._is_logged_in())
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
def test_log_in_with_blank_username(self):
form_input = {'username': '', 'password': 'WrongPassword123'}
response = self.client.post(self.url, form_input)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertFalse(self._is_logged_in())
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
def test_log_in_with_blank_password(self):
form_input = {'username': '@johndoe', 'password': ''}
response = self.client.post(self.url, form_input)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertFalse(self._is_logged_in())
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
def test_succesful_log_in(self):
form_input = {'username': '@johndoe', 'password': 'Password123'}
response = self.client.post(self.url, form_input, follow=True)
self.assertTrue(self._is_logged_in())
response_url = reverse('feed')
self.assertRedirects(response, response_url, status_code=302, target_status_code=200)
self.assertTemplateUsed(response, 'feed.html')
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 0)
# self.assert_menu(response)
def test_succesful_log_in_with_redirect(self):
redirect_url = reverse('feed')
form_input = {'username': '@johndoe', 'password': 'Password123', 'next': redirect_url}
response = self.client.post(self.url, form_input, follow=True)
self.assertTrue(self._is_logged_in())
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
self.assertTemplateUsed(response, 'feed.html')
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 0)
def test_post_log_in_redirects_when_logged_in(self):
self.client.login(username=self.user.username, password="Password123")
form_input = {'username': '@wronguser', 'password': 'WrongPassword123'}
response = self.client.post(self.url, form_input, follow=True)
redirect_url = reverse('feed')
self.assertRedirects(response, redirect_url, status_code=302, target_status_code=200)
self.assertTemplateUsed(response, 'feed.html')
def test_post_log_in_with_incorrect_credentials_and_redirect(self):
redirect_url = reverse('feed')
form_input = {'username': '@johndoe', 'password': 'WrongPassword123', 'next': redirect_url}
response = self.client.post(self.url, form_input)
next = response.context['next']
self.assertEqual(next, redirect_url)
def test_valid_log_in_by_inactive_user(self):
self.user.is_active = False
self.user.save()
form_input = {'username': '@johndoe', 'password': 'Password123'}
response = self.client.post(self.url, form_input, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'log_in.html')
form = response.context['form']
self.assertTrue(isinstance(form, LogInForm))
self.assertFalse(form.is_bound)
self.assertFalse(self._is_logged_in())
messages_list = list(response.context['messages'])
self.assertEqual(len(messages_list), 1)
self.assertEqual(messages_list[0].level, messages.ERROR)
|
import sys
import tkinter as tk
import tkinter.font as tk_font
from tkinter import messagebox
def beep():
print("\a", end="")
sys.stdout.flush()
class BoardValues():
""" Board values."""
none = 0
loss = 1
draw = 2
unknown = 3
win = 4
class SkillLevels():
""" Skill levels."""
random = 0
beginner = 1
advanced = 2
class App:
def kill_callback(self):
self.window.destroy()
def __init__(self):
self.x0 = 0
self.y0 = 0
self.tree_links = []
self.walls = []
# Make the widgets.
self.window = tk.Tk()
self.window.title("tic_tac_toe")
self.window.protocol("WM_DELETE_WINDOW", self.kill_callback)
self.window.geometry("355x342")
# Control area.
menubar = tk.Menu(self.window)
game_menu = tk.Menu(menubar, tearoff=False)
game_menu.add_command(label="Play X", command=self.play_x, underline=5)
game_menu.add_command(label="Play O", command=self.play_o, underline=5)
game_menu.add_separator()
game_menu.add_command(label="Exit", command=self.window.destroy, underline=0)
menubar.add_cascade(label="Game", menu=game_menu)
level_menu = tk.Menu(menubar, tearoff=False)
self.skill_level = tk.IntVar()
level_menu.add_radiobutton(label="Random", variable=self.skill_level, value=int(SkillLevels.random))
level_menu.add_radiobutton(label="Beginner", variable=self.skill_level, value=int(SkillLevels.beginner))
level_menu.add_radiobutton(label="Advanced", variable=self.skill_level, value=int(SkillLevels.advanced))
self.skill_level.set(SkillLevels.random)
menubar.add_cascade(label="Level", menu=level_menu)
# Build the board.
font = tk_font.Font(size=65)
sq_width = 2
sq_height = 1
padx = 5
pady = 5
row = 1
self.squares = [[None for c in range(3)] for r in range(3)]
self.squares[0][0] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="00", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[0][0].grid(padx=padx, pady=pady, row=row, column=0)
self.squares[0][0].bind("<Button-1>", lambda event: self.square_clicked(event, (0, 0)))
self.squares[0][1] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="01", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[0][1].grid(padx=padx, pady=pady, row=row, column=1)
self.squares[0][1].bind("<Button-1>", lambda event: self.square_clicked(event, (0, 1)))
self.squares[0][2] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="02", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[0][2].grid(padx=padx, pady=pady, row=row, column=2)
self.squares[0][2].bind("<Button-1>", lambda event: self.square_clicked(event, (0, 2)))
row += 1
self.squares[1][0] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="10", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[1][0].grid(padx=padx, pady=pady, row=row, column=0)
self.squares[1][0].bind("<Button-1>", lambda event: self.square_clicked(event, (1, 0)))
self.squares[1][1] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="11", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[1][1].grid(padx=padx, pady=pady, row=row, column=1)
self.squares[1][1].bind("<Button-1>", lambda event: self.square_clicked(event, (1, 1)))
self.squares[1][2] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="12", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[1][2].grid(padx=padx, pady=pady, row=row, column=2)
self.squares[1][2].bind("<Button-1>", lambda event: self.square_clicked(event, (1, 2)))
row += 1
self.squares[2][0] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="20", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[2][0].grid(padx=padx, pady=pady, row=row, column=0)
self.squares[2][0].bind("<Button-1>", lambda event: self.square_clicked(event, (2, 0)))
self.squares[2][1] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="21", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[2][1].grid(padx=padx, pady=pady, row=row, column=1)
self.squares[2][1].bind("<Button-1>", lambda event: self.square_clicked(event, (2, 1)))
self.squares[2][2] = tk.Label(self.window, width=sq_width, height=sq_height, bg="white", text="22", borderwidth=2, relief=tk.SUNKEN, font=font)
self.squares[2][2].grid(padx=padx, pady=pady, row=row, column=2)
self.squares[2][2].bind("<Button-1>", lambda event: self.square_clicked(event, (2, 2)))
# Bind some shortcut keys.
self.window.bind_all("<Control-x>", self.shortcut_play_x)
self.window.bind_all("<Control-o>", self.shortcut_play_o)
# Prepare for a game.
self.play_x()
# Force focus so Alt+F4 closes this window and not the Python shell.
self.window.focus_force()
self.window.config(menu=menubar)
self.window.mainloop()
def shortcut_play_x(self, event):
self.play_x()
def shortcut_play_o(self, event):
self.play_o()
def play_x(self):
self.reset_game("X", "O")
def play_o(self):
self.reset_game("O", "X")
# Let the computer move.
self.make_computer_move()
def reset_game(self, player, computer):
""" Prepare for a new game."""
self.board = [[" " for r in range(3)] for c in range(3)]
for r in range(3):
for c in range(3):
self.squares[r][c]["text"] = ""
self.board[r][c] = " "
self.user_player = player
self.computer_player = computer
self.current_player = "X"
self.num_squares_taken = 0
def square_clicked(self, event, rc):
""" The user clicked a square."""
# See if a game is in progress.
if self.current_player == " ":
return
# Get the row and column.
r, c = rc
# See if the spot is already taken.
if self.board[r][c] != " ":
beep()
return
# Take this square.
self.board[r][c] = self.current_player
self.squares[r][c]["text"] = self.current_player
self.num_squares_taken += 1
# See if there is a winner.
if self.is_winner(r, c):
self.show_winner()
return
elif self.num_squares_taken == 9:
# We have a cat's game.
self.show_cats_game()
return
# Switch players.
self.current_player = self.computer_player
# Let the computer move.
self.make_computer_move()
def is_winner(self, r, c):
""" Return true if the player who just took spare [r, c] has won."""
player = self.board[r][c]
if ((player == self.board[r][0]) and \
(player == self.board[r][1]) and \
(player == self.board[r][2])):
return True
if ((player == self.board[0][c]) and \
(player == self.board[1][c]) and \
(player == self.board[2][c])):
return True
if ((r == c) or (r + c == 2)):
if ((player == self.board[0][0]) and \
(player == self.board[1][1]) and \
(player == self.board[2][2])):
return True
if ((player == self.board[0][2]) and \
(player == self.board[1][1]) and \
(player == self.board[2][0])):
return True
return False
def show_winner(self):
""" Display a winner message."""
text = f"{self.current_player} Wins!"
messagebox.showinfo(text, text)
self.current_player = " "
def show_cats_game(self):
text = "It's a tie!"
messagebox.showinfo(text, text)
self.current_player = " "
def make_computer_move(self):
""" Make the computer take a move."""
# Check the skill level.
skill_level = self.skill_level.get()
if skill_level == SkillLevels.random:
# Random moves.
best_r, best_c = self.random_move()
# print("Random")
elif skill_level == SkillLevels.beginner:
# Minimax looking 3 moves ahead.
best_value, best_r, best_c = self.board_value(self.computer_player, self.user_player, 1, 3)
# print("Beginner")
else:
# Minimax looking 9 moves ahead.
best_value, best_r, best_c = self.board_value(self.computer_player, self.user_player, 1, 9)
# print("Advanced")
# Make the move.
self.board[best_r][best_c] = self.computer_player
self.squares[best_r][best_c]["text"] = self.computer_player
self.num_squares_taken += 1
# See if there is a winner.
if self.is_winner(best_r, best_c):
self.show_winner()
return
elif self.num_squares_taken == 9:
# We have a cat's game.
self.show_cats_game()
return
# Switch whose move it is.
self.current_player = self.user_player
def random_move(self):
""" Move randomly."""
best_r = -1
best_c = -1
# Pick a random move.
move = random.randint(0, 9 - self.num_squares_taken)
# Find that move.
for row in range(3):
for col in range(3):
if self.board[row][col] == " ":
move -= 1
if move < 0:
best_r = row
best_c = col
break
if best_r >= 0:
break
return best_r, best_c
def board_value(self, player1, player2, depth, max_depth):
""" Find the best board value for player1."""
# If we are too deep, then we don't know.
if (depth > max_depth) or (self.num_squares_taken == 9):
return BoardValues.unknown, -1, -1
# Track the worst move for player2.
player2_value = BoardValues.win
# Make test moves.
for row in range(3):
for col in range(3):
# See if this move is taken.
if self.board[row][col] == ' ':
# Try this move.
self.board[row][col] = player1
self.num_squares_taken += 1
# See if this gives player1 a win.
if self.is_winner(row, col):
# This gives player1 a win and therefore player2 a loss.
# Take this move.
best_r = row
best_c = col
player2_value = BoardValues.loss
else:
# Recursively try moves for player2.
test_r = -1
test_c = -1
test_value = BoardValues.none
test_value, test_r, test_c = self.board_value(player2, player1, depth + 1, max_depth)
# See if this is an improvement for player 2.
if player2_value >= test_value:
best_r = row
best_c = col
player2_value = test_value
# Undo the move.
self.board[row][col] = " "
self.num_squares_taken -= 1
# If player2 will lose, stop searching.
if player2_value == BoardValues.loss:
break
# If player2 will lose, stop searching.
if player2_value == BoardValues.loss:
break
# We now know the worst we can force player2 to do.
# Convert that into a board value for player1.
if player2_value == BoardValues.loss:
best_value = BoardValues.win
elif player2_value == BoardValues.win:
best_value = BoardValues.loss
else:
best_value = player2_value
return best_value, best_r, best_c
if __name__ == '__main__':
app = App()
# app.root.destroy()
|
from django.db import models
# Create your models here.
class Task(models.Model):
task_name = models.CharField(max_length= 200)
task_desc = models.CharField(max_length= 200)
date_created = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
image = models.ImageField(upload_to='Images/', default='Images/None/No0img.jpg')
|
"""
Module used for initializing plots to draw histograms.
"""
import numpy as np
import matplotlib.pyplot as plt
import const
def init_bar_plot():
"""
Initialize histogram stacked bar plot.
"""
_fig, axis = plt.subplots()
axis.set_title('Stacked bar plot histogram (BGR)')
axis.set_xlabel('Bin')
axis.set_ylabel('Frequency (num of pixels)')
axis.set_ylim(0, 700000)
_n, _bins, bars_blue = axis.hist(0, const.BINS, rwidth=const.BAR_WIDTH,
histtype='bar', stacked=True, color='blue', label='Blue')
_n, _bins, bars_green = axis.hist(0, const.BINS, rwidth=const.BAR_WIDTH,
histtype='bar', stacked=True, color='green', label='Green')
_n, _bins, bars_red = axis.hist(0, const.BINS, rwidth=const.BAR_WIDTH,
histtype='bar', stacked=True, color='red', label='Red')
axis.legend()
return bars_blue, bars_green, bars_red
def init_line_plot():
"""
Initialize line plot histogram of all 3 channels
"""
_fig, axis = plt.subplots()
axis.set_title('Line histogram (BGR)')
axis.set_xlabel('Bin')
axis.set_ylabel('Frequency (num of pixels)')
axis.set_xlim(0, const.MAX_PIXEL_VAL-1)
axis.set_ylim(0, 54000)
line_blue, = axis.plot(const.FULL_BINS, np.zeros((const.MAX_PIXEL_VAL,)), color='b', label='Blue')
line_green, = axis.plot(const.FULL_BINS, np.zeros((const.MAX_PIXEL_VAL,)), color='g', label='Green')
line_red, = axis.plot(const.FULL_BINS, np.zeros((const.MAX_PIXEL_VAL,)), color='r', label='Red')
axis.legend()
return line_blue, line_green, line_red
def turn_on_interactive_and_show():
"""
Turn on interactive plotting and show plot.
"""
plt.ion()
plt.show()
|
from flask import Flask, send_from_directory
def loader(app: Flask):
"""
This function is to create multiple static files based on the ``STATICFILES`` configuration.
"""
staticfiles = app.config.get("STATICFILES", [])
for static in staticfiles:
path, endpoint, static_folder = static
static_host = None
if len(static) == 4:
static_host = static[-1]
def serve_static(filename):
cache_timeout = app.get_send_file_max_age(filename)
return send_from_directory(
static_folder, filename, cache_timeout=cache_timeout
)
app.add_url_rule(
path.rstrip("/") + "/<path:filename>",
endpoint=endpoint,
host=static_host,
view_func=serve_static,
)
|
from .. import core
from .mixin import ArrayMixin
class NumpyArray(ArrayMixin, core.NumpyArray):
"""An underlying numpy array.
.. versionadded:: (cfdm) 1.7.0
"""
def __getitem__(self, indices):
"""Returns a subspace of the array as a numpy array.
x.__getitem__(indices) <==> x[indices]
The indices that define the subspace must be either `Ellipsis`
or a sequence that contains an index for each dimension. In
the latter case, each dimension's index must either be a
`slice` object or a sequence of two or more integers.
Indexing is similar to numpy indexing. The only difference to
numpy indexing (given the restrictions on the type of indices
allowed) is:
* When two or more dimension's indices are sequences of
integers then these indices work independently along each
dimension (similar to the way vector subscripts work in
Fortran).
.. versionadded:: (cfdm) 1.7.0
"""
return self.get_subspace(
self._get_component("array"), indices, copy=True
)
def to_memory(self):
"""Bring an array on disk into memory and retain it there.
There is no change to an array that is already in memory.
.. versionadded:: (cfdm) 1.7.0
:Returns:
`{{class}}`
The array that is stored in memory.
**Examples:**
>>> b = a.to_memory()
"""
return self
|
import pytest
from backend.blockchain.blockchain import Blockchain
from backend.blockchain.block import GENESIS_DATA
@pytest.fixture
def blockchain():
return Blockchain()
@pytest.fixture
def foo_chain(blockchain):
for i in range(3):
blockchain.add_block(i)
return blockchain
def test_blockchain(blockchain):
assert blockchain.chain[0].hash == GENESIS_DATA["hash"]
def test_add_block(blockchain):
data = "foo"
blockchain.add_block(data)
assert blockchain.chain[-1].data == data
def test_blockchain_vlidity(blockchain, foo_chain):
blockchain.blockchain_validity(foo_chain.chain)
def test_bad_genesis_block(blockchain, foo_chain):
foo_chain.chain[0].data = "foo"
with pytest.raises(Exception, match="The chain does not begin with the genesis block !!!"):
blockchain.blockchain_validity(foo_chain.chain)
def test_replace_chain(foo_chain):
blockchain = Blockchain()
blockchain.replace_chain(foo_chain.chain)
assert blockchain.chain == foo_chain.chain
def test_replace_chain_shorter_chain(foo_chain):
blockchain = Blockchain()
with pytest.raises(Exception, match="The chain must be longer than the existing chain !!!"):
foo_chain.replace_chain(blockchain.chain)
def test_replace_chain_bad_chain(foo_chain):
blockchain = Blockchain()
foo_chain.chain[1].data = "foo"
with pytest.raises(Exception, match="The chain is invalid:"):
blockchain.replace_chain(foo_chain.chain)
|
# -*- coding: utf-8 -*-
import os
import pytest
from Bio.Seq import Seq
import numpy as numpy
from neoRNA.io.mut_count_io import MutCountIO
parametrize = pytest.mark.parametrize
class TestMutRateIO(object):
fileDir = os.path.dirname(os.path.realpath('__file__'))
__EXAMPLE_FILENAME = 'tests/io/example_files/mutation_rate_example.csv'
__EXAMPLE_FILE_PATH = os.path.join(fileDir, __EXAMPLE_FILENAME)
__SEQUENCE = Seq('GGGAGCCTGCCCTCTGATCTCTGCCTGTTCCTCTGTCCCACAGAGGGCAAAGGCTACGGGTCAGAGAGCGGGGAGGAGGACGGTGCCGGTTTCG')
def test_init(self):
mut_rate_record = MutCountIO(self.__SEQUENCE)
assert mut_rate_record is not None
def test_initWithRatesList(self):
mut_rate_record = MutCountIO(self.__SEQUENCE)
rates_list = [0.1, 0.2, 0.3]
mut_rate_record.rates = rates_list
assert mut_rate_record is not None
assert mut_rate_record.rates.size == 3
def test_initWithFileLoad(self):
mut_rate_record = MutCountIO(self.__SEQUENCE, self.__EXAMPLE_FILE_PATH)
assert mut_rate_record is not None
def test_ratesParsing(self):
mut_rate_record = MutCountIO(self.__SEQUENCE, self.__EXAMPLE_FILE_PATH)
assert len(str(self.__SEQUENCE)) == mut_rate_record.rates.size
def test_ratesFixing(self):
mut_rate_record = MutCountIO(self.__SEQUENCE, self.__EXAMPLE_FILE_PATH)
rates = mut_rate_record.rates
assert rates[0] == -999
fixed_rates = mut_rate_record.get_fixed_rates()
assert fixed_rates[0] == 0
def test_ACOnlyRates(self):
sequence_string = 'ACGTATG'
rates_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, -999]
mut_rate_record = MutCountIO(Seq(sequence_string))
mut_rate_record.rates = rates_list
assert mut_rate_record.get_ac_only_rates().tolist() == [0.1, 0.2, -999.0, -999.0, 0.5, -999.0, -999.0]
|
import pytest
import requests
def test_swagger():
model_endpoint = 'http://localhost:5000/swagger.json'
r = requests.get(url=model_endpoint)
assert r.status_code == 200
assert r.headers['Content-Type'] == 'application/json'
json = r.json()
assert 'swagger' in json
assert json.get('info') and json.get('info').get('title') == 'MAX Audio Classifier'
assert json.get('info').get('version') == '1.1.0'
assert json.get('info').get('description') == 'Identify sounds in short audio clips'
def test_metadata():
model_endpoint = 'http://localhost:5000/model/metadata'
r = requests.get(url=model_endpoint)
assert r.status_code == 200
metadata = r.json()
assert metadata['id'] == 'audio_embeddings-tf-imagenet'
assert metadata['name'] == 'audio_embeddings TensorFlow Model'
assert metadata['description'] == 'audio_embeddings TensorFlow model trained on Audio Set'
assert metadata['license'] == 'Apache 2.0'
def test_predict():
model_endpoint = 'http://localhost:5000/model/predict'
file_path = 'assets/birds1.wav'
with open(file_path, 'rb') as file:
file_form = {'audio': (file_path, file, 'audio/wav')}
r = requests.post(url=model_endpoint, files=file_form)
assert r.status_code == 200
response = r.json()
assert response['status'] == 'ok'
assert response['predictions'][0]['label_id'] == '/m/015p6'
assert response['predictions'][0]['label'] == 'Bird'
assert response['predictions'][0]['probability'] > 0.4
def test_empty_filter():
model_endpoint = 'http://localhost:5000/model/predict?filter='
file_path = 'assets/gunshots.wav'
with open(file_path, 'rb') as file:
file_form = {'audio': (file_path, file, 'audio/wav')}
r = requests.post(url=model_endpoint, files=file_form)
assert r.status_code == 200
response = r.json()
assert response['status'] == 'ok'
assert len(response['predictions']) >= 5
assert response['predictions'][0]['label_id'] == '/m/032s66'
assert response['predictions'][0]['label'] == 'Gunshot, gunfire'
assert response['predictions'][0]['probability'] > 0.5
def test_multi_empty_filter():
model_endpoint = 'http://localhost:5000/model/predict?filter=,,'
file_path = 'assets/gunshots.wav'
with open(file_path, 'rb') as file:
file_form = {'audio': (file_path, file, 'audio/wav')}
r = requests.post(url=model_endpoint, files=file_form)
assert r.status_code == 200
response = r.json()
assert response['status'] == 'ok'
assert len(response['predictions']) >= 5
assert response['predictions'][0]['label_id'] == '/m/032s66'
assert response['predictions'][0]['label'] == 'Gunshot, gunfire'
assert response['predictions'][0]['probability'] > 0.5
def test_filter():
model_endpoint = 'http://localhost:5000/model/predict?filter=Cap%20gun'
file_path = 'assets/gunshots.wav'
with open(file_path, 'rb') as file:
file_form = {'audio': (file_path, file, 'audio/wav')}
r = requests.post(url=model_endpoint, files=file_form)
assert r.status_code == 200
response = r.json()
assert response['status'] == 'ok'
assert len(response['predictions']) == 1
assert response['predictions'][0]['label_id'] == '/m/073cg4'
assert response['predictions'][0]['label'] == 'Cap gun'
assert response['predictions'][0]['probability'] > 0.2
def test_multi_filter():
model_endpoint = 'http://localhost:5000/model/predict?filter=Clang,Ding'
file_path = 'assets/gunshots.wav'
with open(file_path, 'rb') as file:
file_form = {'audio': (file_path, file, 'audio/wav')}
r = requests.post(url=model_endpoint, files=file_form)
assert r.status_code == 200
response = r.json()
assert response['status'] == 'ok'
assert len(response['predictions']) == 2
assert response['predictions'][0]['label_id'] == '/m/07rv4dm'
assert response['predictions'][0]['label'] == 'Clang'
assert response['predictions'][0]['probability'] > 0.1
assert response['predictions'][1]['label_id'] == '/m/07phxs1'
assert response['predictions'][1]['label'] == 'Ding'
assert response['predictions'][1]['probability'] > 0.09
if __name__ == '__main__':
pytest.main([__file__])
|
# -*- coding: utf-8 -*-
import os
import sys
import argparse
# env
sys.path.append('/usr/lib/python2.7/dist-packages/')
sys.path.append('/usr/lib/python2.7/')
sys.path.append('/usr/local/lib/python2.7/dist-packages/')
sys.path.append('/data2/django_1.8/')
sys.path.append('/data2/django_projects/')
sys.path.append('/data2/django_third/')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djczech.settings")
from django.conf import settings
from djczech.reconciliation.data.models import Cheque
from djzbar.utils.informix import get_session
from djtools.fields import TODAY
from datetime import date, datetime
from itertools import islice
from sqlalchemy import exc
"""
Shell script that munges CSV data
"""
import csv
EARL = settings.INFORMIX_EARL
# set up command-line options
desc = """
Accepts as input a CSV file
"""
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
"-f", "--file",
help="File name.",
dest="phile"
)
parser.add_argument(
"-d", "--date",
help="Import date format: 1891-05-01",
dest="date"
)
parser.add_argument(
"--test",
action='store_true',
help="Dry run?",
dest="test"
)
def main():
"""
main function
"""
# convert date to datetime
import_date = datetime.strptime(date, "%Y-%m-%d")
print "import_date = {}".format(import_date)
# for some reason we set jbpayee equal to the import date
# plus user info
jbpayee = "{}_{}".format(
TODAY, settings.ADMINS[0][0]
)
# CSV headers
fieldnames = (
"jbstatus_date", "jbstatus", "jbamount",
"jbaccount", "jbchkno", "jbpayee"
)
# remove all lines up to and including the headers line
with open(phile, "r") as f:
n = 0
for line in f.readlines():
n += 1
if 'As of date' in line: # line in which field names was found
break
f.close()
f = islice(open(phile, "r"), n, None)
# read the CSV file
reader = csv.DictReader(f, fieldnames, delimiter=',')
# create database session
if test:
print EARL
print settings.IMPORT_STATUS
session = get_session(EARL)
session.autoflush = False
x = 0
for r in reader:
# convert amount from string to float and strip dollar sign
try:
jbamount = float(r["jbamount"][1:].replace(',',''))
except:
jbamount = 0
# status date
try:
jbstatus_date = datetime.strptime(
r["jbstatus_date"], "%m/%d/%Y"
)
except:
jbstatus_date = None
# check number
try:
cheque_number = int(r["jbchkno"])
except:
cheque_number = 0
# create a Cheque object
cheque = Cheque(
jbimprt_date=import_date,
jbstatus_date=jbstatus_date,
jbchkno=cheque_number, jbchknolnk=cheque_number,
jbstatus=settings.IMPORT_STATUS, jbaction="",
jbaccount=r["jbaccount"], jbamount=jbamount,
jbamountlnk=jbamount, jbpayee=jbpayee
)
# missing fields: jbissue_date, jbpostd_dat
if test:
print "{}) {}".format(x, cheque.__dict__)
else:
# insert the data
try:
session.add(cheque)
session.flush()
except exc.SQLAlchemyError as e:
print e
print "Bad data: {}".format(cheque.__dict__)
session.rollback()
x += 1
if not test:
session.commit()
# fin
print "Checks processed: {}".format(x)
session.close()
######################
# shell command line
######################
if __name__ == "__main__":
args = parser.parse_args()
phile = args.phile
date = args.date
test = args.test
if not phile or not date:
print "mandatory options are missing: file name and date\n"
parser.print_help()
exit(-1)
sys.exit(main())
|
#! /usr/bin/env python3
'''
Problem 33 - Project Euler
http://projecteuler.net/index.php?section=problems&id=033
'''
from math import gcd
from fractions import Fraction
from functools import reduce
from operator import mul
def is_digit_cancelling_fraction_pair(i, j):
i_s = str(i)
j_s = str(j)
for s in i_s:
if s in j_s and i_s[0] != i_s[1] and j_s[0] != j_s[1]:
i2 = int(i_s.replace(s,''))
j2 = int(j_s.replace(s,''))
if i % 10 != 0 and j % 10 != 0 and Fraction(i,j) == Fraction(i2,j2):
return True
return False
if __name__ == '__main__':
pairs = [(i, j) for i in range(10, 100) for j in range(i+1,100)]
p2 = list(filter(lambda x: is_digit_cancelling_fraction_pair(*x), pairs))
print(p2)
print(reduce(mul, map(lambda x: Fraction(x[0], x[1]), p2)).denominator)
|
# -*- coding: utf-8 -*-
r"""
Finite State Machines, Automata, Transducers
This module adds support for finite state machines, automata and
transducers. See classes :class:`Automaton` and :class:`Transducer`
(or the more general class :class:`FiniteStateMachine`) and the
:ref:`examples <finite_state_machine_examples>` below for
details creating one.
Contents
========
:class:`FiniteStateMachine` and derived classes :class:`Transducer` and :class:`Automaton`
------------------------------------------------------------------------------------------
Accessing parts of a finite state machine
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.state` | Get a state by its label
:meth:`~FiniteStateMachine.states` | List of states
:meth:`~FiniteStateMachine.iter_states` | Iterator over the states
:meth:`~FiniteStateMachine.initial_states` | List of initial states
:meth:`~FiniteStateMachine.iter_initial_states` | Iterator over initial states
:meth:`~FiniteStateMachine.final_states` | List of final states
:meth:`~FiniteStateMachine.iter_final_states` | Iterator over final states
:meth:`~FiniteStateMachine.transition` | Get a transition by its states and labels
:meth:`~FiniteStateMachine.transitions` | List of transitions
:meth:`~FiniteStateMachine.iter_transitions` | Iterator over the transitions
:meth:`~FiniteStateMachine.predecessors` | List of predecessors of a state
:meth:`~FiniteStateMachine.induced_sub_finite_state_machine` | Induced sub-machine
:meth:`~FiniteStateMachine.accessible_components` | Accessible components
:meth:`~FiniteStateMachine.final_components` | Final components (connected components which cannot be left again)
(Modified) Copies
^^^^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.empty_copy` | Returns an empty deep copy
:meth:`~FiniteStateMachine.deepcopy` | Returns a deep copy
:meth:`~FiniteStateMachine.relabeled` | Returns a relabeled deep copy
Manipulation
^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.add_state` | Add a state
:meth:`~FiniteStateMachine.add_states` | Add states
:meth:`~FiniteStateMachine.delete_state` | Delete a state
:meth:`~FiniteStateMachine.add_transition` | Add a transition
:meth:`~FiniteStateMachine.add_transitions_from_function` | Add transitions
:attr:`~FiniteStateMachine.on_duplicate_transition` | Hook for handling duplicate transitions
:meth:`~FiniteStateMachine.add_from_transition_function` | Add transitions by a transition function
:meth:`~FiniteStateMachine.delete_transition` | Delete a transition
:meth:`~FiniteStateMachine.remove_epsilon_transitions` | Remove epsilon transitions (not implemented)
:meth:`~FiniteStateMachine.split_transitions` | Split transitions with input words of length ``> 1``
:meth:`~FiniteStateMachine.determine_alphabets` | Determines input and output alphabets
:meth:`~FiniteStateMachine.construct_final_word_out` | Construct final output by implicitly reading trailing letters; cf. :meth:`~FiniteStateMachine.with_final_word_out`
Properties
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.has_state` | Checks for a state
:meth:`~FiniteStateMachine.has_initial_state` | Checks for an initial state
:meth:`~FiniteStateMachine.has_initial_states` | Checks for initial states
:meth:`~FiniteStateMachine.has_final_state` | Checks for an final state
:meth:`~FiniteStateMachine.has_final_states` | Checks for final states
:meth:`~FiniteStateMachine.has_transition` | Checks for a transition
:meth:`~FiniteStateMachine.is_deterministic` | Checks for a deterministic machine
:meth:`~FiniteStateMachine.is_complete` | Checks for a complete machine
:meth:`~FiniteStateMachine.is_connected` | Checks for a connected machine
:meth:`~FiniteStateMachine.is_Markov_chain` | Checks for a Markov chain
:meth:`~FiniteStateMachine.is_monochromatic` | Checks whether the colors of all states are equal
:meth:`~FiniteStateMachine.asymptotic_moments` | Main terms of expectation and variance of sums of labels
Operations
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.disjoint_union` | Disjoint union (not implemented)
:meth:`~FiniteStateMachine.concatenation` | Concatenation (not implemented)
:meth:`~FiniteStateMachine.Kleene_closure` | Kleene closure (not implemented)
:meth:`Automaton.intersection` | Intersection of automata
:meth:`Transducer.intersection` | Intersection of transducers
:meth:`Transducer.cartesian_product` | Cartesian product of a transducer with another finite state machine
:meth:`~FiniteStateMachine.product_FiniteStateMachine` | Product of finite state machines
:meth:`~FiniteStateMachine.composition` | Composition (output of other is input of self)
:meth:`~FiniteStateMachine.input_projection` | Input projection (output is deleted)
:meth:`~FiniteStateMachine.output_projection` | Output projection (old output is new input)
:meth:`~FiniteStateMachine.projection` | Input or output projection
:meth:`~FiniteStateMachine.transposition` | Transposition (all transitions are reversed)
:meth:`~FiniteStateMachine.with_final_word_out` | Machine with final output constructed by implicitly reading trailing letters, cf. :meth:`~FiniteStateMachine.construct_final_word_out` for inplace version
:meth:`Automaton.determinisation` | Determinisation of an automaton
:meth:`~FiniteStateMachine.process` | Process input
:meth:`Automaton.process` | Process input of an automaton (output differs from general case)
:meth:`Transducer.process` | Process input of a transducer (output differs from general case)
:meth:`~FiniteStateMachine.iter_process` | Return process iterator
Simplification
^^^^^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.prepone_output` | Prepone output where possible
:meth:`~FiniteStateMachine.equivalence_classes` | List of equivalent states
:meth:`~FiniteStateMachine.quotient` | Quotient with respect to equivalence classes
:meth:`~FiniteStateMachine.merged_transitions` | Merge transitions while adding input
:meth:`~FiniteStateMachine.markov_chain_simplification` | Simplification of a Markov chain
:meth:`Automaton.minimization` | Minimization of an automaton
:meth:`Transducer.simplification` | Simplification of a transducer
Conversion
^^^^^^^^^^
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.adjacency_matrix` | (Weighted) adjacency :class:`matrix <Matrix>`
:meth:`~FiniteStateMachine.graph` | Underlying :class:`DiGraph`
:meth:`~FiniteStateMachine.plot` | Plot
LaTeX output
++++++++++++
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:meth:`~FiniteStateMachine.latex_options` | Set options
:meth:`~FiniteStateMachine.set_coordinates` | Set coordinates of the states
:meth:`~FiniteStateMachine.default_format_transition_label` | Default formatting of words in transition labels
:meth:`~FiniteStateMachine.format_letter_negative` | Format negative numbers as overlined number
:meth:`~FiniteStateMachine.format_transition_label_reversed` | Format words in transition labels in reversed order
:class:`FSMState`
-----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMState.final_word_out` | Final output of a state
:attr:`~FSMState.is_final` | Describes whether a state is final or not
:attr:`~FSMState.is_initial` | Describes whether a state is initial or not
:meth:`~FSMState.label` | Label of a state
:meth:`~FSMState.relabeled` | Returns a relabeled deep copy of a state
:meth:`~FSMState.fully_equal` | Checks whether two states are fully equal (including all attributes)
:class:`FSMTransition`
----------------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:attr:`~FSMTransition.from_state` | State in which transition starts
:attr:`~FSMTransition.to_state` | State in which transition ends
:attr:`~FSMTransition.word_in` | Input word of the transition
:attr:`~FSMTransition.word_out` | Output word of the transition
:meth:`~FSMTransition.deepcopy` | Returns a deep copy of the transition
Helper Functions
----------------
.. csv-table::
:class: contentstable
:widths: 30, 70
:delim: |
:func:`equal` | Checks whether all elements of ``iterator`` are equal
:func:`full_group_by` | Group iterable by values of some key
:func:`startswith` | Determine whether list starts with the given prefix
:func:`FSMLetterSymbol` | Returns a string associated to the input letter
:func:`FSMWordSymbol` | Returns a string associated to a word
:func:`is_FSMState` | Tests whether an object inherits from :class:`FSMState`
:func:`is_FSMTransition` | Tests whether an object inherits from :class:`FSMTransition`
:func:`is_FiniteStateMachine` | Tests whether an object inherits from :class:`FiniteStateMachine`
:func:`duplicate_transition_ignore` | Default function for handling duplicate transitions
:func:`duplicate_transition_raise_error` | Raise error when inserting a duplicate transition
:func:`duplicate_transition_add_input` | Add input when inserting a duplicate transition
.. _finite_state_machine_examples:
Examples
========
We start with a general :class:`FiniteStateMachine`. Later there will
be also an :class:`Automaton` and a :class:`Transducer`.
A simple finite state machine
-----------------------------
We can easily create a finite state machine by
::
sage: fsm = FiniteStateMachine()
sage: fsm
Finite state machine with 0 states
By default this is the empty finite state machine, so not very
interesting. Let's create and add some states and transitions::
sage: day = fsm.add_state('day')
sage: night = fsm.add_state('night')
sage: sunrise = fsm.add_transition(night, day)
sage: sunset = fsm.add_transition(day, night)
Let us look at ``sunset`` more closely::
sage: sunset
Transition from 'day' to 'night': -|-
Note that could also have created and added the transitions directly
by::
sage: fsm.add_transition('day', 'night')
Transition from 'day' to 'night': -|-
This would have had added the states automatically, since they are
present in the transitions.
Anyhow, we got the following finite state machine::
sage: fsm
Finite state machine with 2 states
We can also obtain the underlying directed graph by
::
sage: fsm.graph()
Digraph on 2 vertices
To visualize a finite state machine, we can use
:func:`~sage.misc.latex.latex` and run the result through LaTeX,
see the section on :ref:`finite_state_machine_LaTeX_output`
below.
Alternatively, we could have created the finite state machine above
simply by
::
sage: FiniteStateMachine([('night', 'day'), ('day', 'night')])
Finite state machine with 2 states
See :class:`FiniteStateMachine` for a lot of possibilities to create
finite state machines.
.. _finite_state_machine_recognizing_NAFs_example:
A simple Automaton (recognizing NAFs)
---------------------------------------
We want to build an automaton which recognizes non-adjacent forms
(NAFs), i.e., sequences which have no adjacent non-zeros.
We use `0`, `1`, and `-1` as digits::
sage: NAF = Automaton(
....: {'A': [('A', 0), ('B', 1), ('B', -1)], 'B': [('A', 0)]})
sage: NAF.state('A').is_initial = True
sage: NAF.state('A').is_final = True
sage: NAF.state('B').is_final = True
sage: NAF
Automaton with 2 states
Of course, we could have specified the initial and final states
directly in the definition of ``NAF`` by ``initial_states=['A']`` and
``final_states=['A', 'B']``.
So let's test the automaton with some input::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: NAF([0])
True
sage: NAF([0, 1])
True
sage: NAF([1, -1])
False
sage: NAF([0, -1, 0, 1])
True
sage: NAF([0, -1, -1, -1, 0])
False
sage: NAF([-1, 0, 0, 1, 1])
False
Alternatively, we could call that by
::
sage: NAF.process([0, -1, 0, 1])
(True, 'B')
which gives additionally the state in which we arrived.
.. _finite_state_machine_LaTeX_output:
LaTeX output
------------
We can visualize a finite state machine by converting it to LaTeX by
using the usual function :func:`~sage.misc.latex.latex`. Within LaTeX,
TikZ is used for typesetting the graphics, see the
:wikipedia:`PGF/TikZ`.
::
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.185.00) edge node[rotate=360.00, anchor=north] {$1, -1$} (v1.355.00);
\path[->] (v1.5.00) edge node[rotate=0.00, anchor=south] {$0$} (v0.175.00);
\end{tikzpicture}
We can turn this into a graphical representation.
::
sage: view(NAF) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells in this and the previous section.
Several options can be set to customize the output, see
:meth:`~FiniteStateMachine.latex_options` for details. In particular,
we use :meth:`~FiniteStateMachine.format_letter_negative` to format
`-1` as `\overline{1}`.
::
sage: NAF.latex_options(
....: coordinates={'A': (0, 0),
....: 'B': (6, 0)},
....: initial_where={'A': 'below'},
....: format_letter=NAF.format_letter_negative,
....: format_state_label=lambda x:
....: r'\mathcal{%s}' % x.label()
....: )
sage: print latex(NAF)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, accepting, initial, initial where=below] (v0) at (0.000000, 0.000000) {$\mathcal{A}$};
\node[state, accepting] (v1) at (6.000000, 0.000000) {$\mathcal{B}$};
\path[->] (v0) edge[loop above] node {$0$} ();
\path[->] (v0.5.00) edge node[rotate=0.00, anchor=south] {$1, \overline{1}$} (v1.175.00);
\path[->] (v1.185.00) edge node[rotate=360.00, anchor=north] {$0$} (v0.355.00);
\end{tikzpicture}
sage: view(NAF) # not tested
A simple transducer (binary inverter)
-------------------------------------
Let's build a simple transducer, which rewrites a binary word by
iverting each bit::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
We can look at the states and transitions::
sage: inverter.states()
['A']
sage: for t in inverter.transitions():
....: print t
Transition from 'A' to 'A': 0|1
Transition from 'A' to 'A': 1|0
Now we apply a word to it and see what the transducer does::
sage: inverter([0, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1])
[1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 0]
``True`` means, that we landed in a final state, that state is labeled
``'A'``, and we also got an output.
A transducer which performs division by `3` in binary
-----------------------------------------------------
Now we build a transducer, which divides a binary number by `3`.
The labels of the states are the remainder of the division.
The transition function is
::
sage: def f(state_from, read):
....: if state_from + read <= 1:
....: state_to = 2*state_from + read
....: write = 0
....: else:
....: state_to = 2*state_from + read - 3
....: write = 1
....: return (state_to, write)
which assumes reading a binary number from left to right.
We get the transducer with
::
sage: D = Transducer(f, initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
Let us try to divide `12` by `3`::
sage: D([1, 1, 0, 0])
[0, 1, 0, 0]
Now we want to divide `13` by `3`::
sage: D([1, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The raised ``ValueError``
means `13` is not divisible by `3`.
.. _finite_state_machine_gray_code_example:
Gray Code
---------
The Gray code is a binary :wikipedia:`numeral system <Numeral_system>`
where two successive values differ in only one bit, cf. the
:wikipedia:`Gray_code`. The Gray code of an integer `n` is obtained by
a bitwise xor between the binary expansion of `n` and the binary
expansion of `\lfloor n/2\rfloor`; the latter corresponds to a
shift by one position in binary.
The purpose of this example is to construct a transducer converting the
standard binary expansion to the Gray code by translating this
construction into operations with transducers.
For this construction, the least significant digit is at
the left-most position.
Note that it is easier to shift everything to
the right first, i.e., multiply by `2` instead of building
`\lfloor n/2\rfloor`. Then, we take the input xor with the right
shift of the input and forget the first letter.
We first construct a transducer shifting the binary expansion to the
right. This requires storing the previously read digit in a state.
::
sage: def shift_right_transition(state, digit):
....: if state == 'I':
....: return (digit, None)
....: else:
....: return (digit, state)
sage: shift_right_transducer = Transducer(
....: shift_right_transition,
....: initial_states=['I'],
....: input_alphabet=[0, 1],
....: final_states=[0])
sage: shift_right_transducer.transitions()
[Transition from 'I' to 0: 0|-,
Transition from 'I' to 1: 1|-,
Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 1: 1|1]
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False
sage: shift_right_transducer([0, 1, 1, 0])
[0, 1, 1]
sage: shift_right_transducer([1, 0, 0])
[1, 0]
The output of the shifts above look a bit weird (from a right-shift
transducer, we would expect, for example, that ``[1, 0, 0]`` was
mapped to ``[0, 1, 0]``), since we write ``None`` instead of the zero
at the left. Further, note that only `0` is listed as a final state
as we have to enforce that a most significant zero is read as the last
input letter in order to flush the last digit::
sage: shift_right_transducer([0, 1, 0, 1])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
Next, we construct the transducer performing the xor operation. We also
have to take ``None`` into account as our ``shift_right_transducer``
waits one iteration until it starts writing output. This corresponds
with our intention to forget the first letter.
::
sage: def xor_transition(state, digits):
....: if digits[0] is None or digits[1] is None:
....: return (0, None)
....: else:
....: return (0, digits[0].__xor__(digits[1]))
sage: from itertools import product
sage: xor_transducer = Transducer(
....: xor_transition,
....: initial_states=[0],
....: final_states=[0],
....: input_alphabet=list(product([None, 0, 1], [0, 1])))
sage: xor_transducer.transitions()
[Transition from 0 to 0: (None, 0)|-,
Transition from 0 to 0: (None, 1)|-,
Transition from 0 to 0: (0, 0)|0,
Transition from 0 to 0: (0, 1)|1,
Transition from 0 to 0: (1, 0)|1,
Transition from 0 to 0: (1, 1)|0]
sage: xor_transducer([(None, 0), (None, 1), (0, 0), (0, 1), (1, 0), (1, 1)])
[0, 1, 1, 0]
sage: xor_transducer([(0, None)])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
The transducer computing the Gray code is then constructed as a
:meth:`cartesian product <Transducer.cartesian_product>` between the
shifted version and the original input (represented here by the
``shift_right_transducer`` and the :meth:`identity transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.Identity>`,
respectively). This cartesian product is then fed into the
``xor_transducer`` as a :meth:`composition
<FiniteStateMachine.composition>` of transducers.
As described in :meth:`Transducer.cartesian_product`, we have to
temporarily set
``finite_state_machine.FSMOldCodeTransducerCartesianProduct`` to
``False`` in order to disable backwards compatible code.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_transducer = shift_right_transducer.cartesian_product(transducers.Identity([0, 1]))
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: Gray_transducer = xor_transducer(product_transducer)
We use :meth:`~FiniteStateMachine.construct_final_word_out` to make sure that all output
is written; otherwise, we would have to make sure that a sufficient number of trailing
zeros is read.
::
sage: Gray_transducer.construct_final_word_out([0])
sage: Gray_transducer.transitions()
[Transition from (('I', 0), 0) to ((0, 0), 0): 0|-,
Transition from (('I', 0), 0) to ((1, 0), 0): 1|-,
Transition from ((0, 0), 0) to ((0, 0), 0): 0|0,
Transition from ((0, 0), 0) to ((1, 0), 0): 1|1,
Transition from ((1, 0), 0) to ((0, 0), 0): 0|1,
Transition from ((1, 0), 0) to ((1, 0), 0): 1|0]
There is a :meth:`prepackaged transducer
<sage.combinat.finite_state_machine_generators.TransducerGenerators.GrayCode>`
for Gray code, let's see whether they agree. We have to use
:meth:`~FiniteStateMachine.relabeled` to relabel our states with
integers.
::
sage: constructed = Gray_transducer.relabeled()
sage: packaged = transducers.GrayCode()
sage: constructed == packaged
True
Finally, we check that this indeed computes the Gray code of the first
10 non-negative integers.
::
sage: for n in srange(10):
....: Gray_transducer(n.bits())
[]
[1]
[1, 1]
[0, 1]
[0, 1, 1]
[1, 1, 1]
[1, 0, 1]
[0, 0, 1]
[0, 0, 1, 1]
[1, 0, 1, 1]
Using the hook-functions
------------------------
Let's use the previous example "divison by `3`" to demonstrate the
optional state and transition parameters ``hook``.
First, we define, what those functions should do. In our case, this is
just saying in which state we are and which transition we take
::
sage: def state_hook(state, process):
....: print "We are now in State %s." % (state.label(),)
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: def transition_hook(transition, process):
....: print ("Currently we go from %s to %s, "
....: "reading %s and writing %s." % (
....: transition.from_state, transition.to_state,
....: FSMWordSymbol(transition.word_in),
....: FSMWordSymbol(transition.word_out)))
Now, let's add these hook-functions to the existing transducer::
sage: for s in D.iter_states():
....: s.hook = state_hook
sage: for t in D.iter_transitions():
....: t.hook = transition_hook
Rerunning the process again now gives the following output::
sage: D.process([1, 1, 0, 1])
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
Currently we go from 1 to 0, reading 1 and writing 1.
We are now in State 0.
Currently we go from 0 to 0, reading 0 and writing 0.
We are now in State 0.
Currently we go from 0 to 1, reading 1 and writing 0.
We are now in State 1.
(False, 1, [0, 1, 0, 0])
The example above just explains the basic idea of using
hook-functions. In the following, we will use those hooks more seriously.
Detecting sequences with same number of `0` and `1`
---------------------------------------------------
Suppose we have a binary input and want to accept all sequences with
the same number of `0` and `1`. This cannot be done with a finite
automaton. Anyhow, we can make usage of the hook functions to extend
our finite automaton by a counter::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: C = FiniteStateMachine()
sage: def update_counter(state, process):
....: l = process.read_letter()
....: process.fsm.counter += 1 if l == 1 else -1
....: if process.fsm.counter > 0:
....: next_state = 'positive'
....: elif process.fsm.counter < 0:
....: next_state = 'negative'
....: else:
....: next_state = 'zero'
....: return FSMTransition(state, process.fsm.state(next_state),
....: l, process.fsm.counter)
sage: C.add_state(FSMState('zero', hook=update_counter,
....: is_initial=True, is_final=True))
'zero'
sage: C.add_state(FSMState('positive', hook=update_counter))
'positive'
sage: C.add_state(FSMState('negative', hook=update_counter))
'negative'
Now, let's input some sequence::
sage: C.counter = 0; C([1, 1, 1, 1, 0, 0])
(False, 'positive', [1, 2, 3, 4, 3, 2])
The result is False, since there are four `1` but only two `0`. We
land in the state ``positive`` and we can also see the values of the
counter in each step.
Let's try some other examples::
sage: C.counter = 0; C([1, 1, 0, 0])
(True, 'zero', [1, 2, 1, 0])
sage: C.counter = 0; C([0, 1, 0, 0])
(False, 'negative', [-1, 0, -1, -2])
See also methods :meth:`Automaton.process` and
:meth:`Transducer.process` (or even
:meth:`FiniteStateMachine.process`), the explanation of the parameter
``hook`` and the examples in :class:`FSMState` and
:class:`FSMTransition`, and the description and examples in
:class:`FSMProcessIterator` for more information on processing and
hooks.
AUTHORS:
- Daniel Krenn (2012-03-27): initial version
- Clemens Heuberger (2012-04-05): initial version
- Sara Kropf (2012-04-17): initial version
- Clemens Heuberger (2013-08-21): release candidate for Sage patch
- Daniel Krenn (2013-08-21): release candidate for Sage patch
- Sara Kropf (2013-08-21): release candidate for Sage patch
- Clemens Heuberger (2013-09-02): documentation improved
- Daniel Krenn (2013-09-13): comments from trac worked in
- Clemens Heuberger (2013-11-03): output (labels) of determinisation,
product, composition, etc. changed (for consistency),
representation of state changed, documentation improved
- Daniel Krenn (2013-11-04): whitespaces in documentation corrected
- Clemens Heuberger (2013-11-04): full_group_by added
- Daniel Krenn (2013-11-04): next release candidate for Sage patch
- Sara Kropf (2013-11-08): fix for adjacency matrix
- Clemens Heuberger (2013-11-11): fix for prepone_output
- Daniel Krenn (2013-11-11): comments from trac #15078 included:
docstring of FiniteStateMachine rewritten, Automaton and Transducer
inherited from FiniteStateMachine
- Daniel Krenn (2013-11-25): documentation improved according to
comments from trac #15078
- Clemens Heuberger, Daniel Krenn, Sara Kropf (2014-02-21--2014-07-18):
A huge bunch of improvements. Details see
#15841, #15847, #15848, #15849, #15850, #15922, #15923, #15924,
#15925, #15928, #15960, #15961, #15962, #15963, #15975, #16016,
#16024, #16061, #16128, #16132, #16138, #16139, #16140, #16143,
#16144, #16145, #16146, #16191, #16200, #16205, #16206, #16207,
#16229, #16253, #16254, #16255, #16266, #16355, #16357, #16387,
#16425, #16539, #16555, #16557, #16588, #16589, #16666, #16668,
#16674, #16675, #16677.
ACKNOWLEDGEMENT:
- Clemens Heuberger, Daniel Krenn and Sara Kropf are supported by the
Austrian Science Fund (FWF): P 24644-N26.
Methods
=======
"""
#*****************************************************************************
# Copyright (C) 2012--2014 Clemens Heuberger <clemens.heuberger@aau.at>
# 2012--2014 Daniel Krenn <dev@danielkrenn.at>
# 2012--2014 Sara Kropf <sara.kropf@aau.at>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.structure.sage_object import SageObject
from sage.graphs.digraph import DiGraph
from sage.matrix.constructor import matrix
from sage.rings.integer_ring import ZZ
from sage.rings.real_mpfr import RR
from sage.symbolic.ring import SR
from sage.calculus.var import var
from sage.misc.cachefunc import cached_function
from sage.misc.latex import latex
from sage.misc.misc import verbose
from sage.functions.trig import cos, sin, atan2
from sage.symbolic.constants import pi
from copy import copy
from copy import deepcopy
import itertools
from itertools import imap
from collections import defaultdict, OrderedDict
def full_group_by(l, key=lambda x: x):
"""
Group iterable ``l`` by values of ``key``.
INPUT:
- iterable ``l``
- key function ``key``
OUTPUT:
A list of pairs ``(k, elements)`` such that ``key(e)=k`` for all
``e`` in ``elements``.
This is similar to ``itertools.groupby`` except that lists are
returned instead of iterables and no prior sorting is required.
We do not require
- that the keys are sortable (in contrast to the
approach via ``sorted`` and ``itertools.groupby``) and
- that the keys are hashable (in contrast to the
implementation proposed in `<http://stackoverflow.com/a/15250161>`_).
However, it is required
- that distinct keys have distinct ``str``-representations.
The implementation is inspired by
`<http://stackoverflow.com/a/15250161>`_, but non-hashable keys are
allowed.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import full_group_by
sage: t = [2/x, 1/x, 2/x]
sage: r = full_group_by([0, 1, 2], key=lambda i:t[i])
sage: sorted(r, key=lambda p:p[1])
[(2/x, [0, 2]), (1/x, [1])]
sage: from itertools import groupby
sage: for k, elements in groupby(sorted([0, 1, 2],
....: key=lambda i:t[i]),
....: key=lambda i:t[i]):
....: print k, list(elements)
2/x [0]
1/x [1]
2/x [2]
Note that the behavior is different from ``itertools.groupby``
because neither `1/x<2/x` nor `2/x<1/x` does hold.
Here, the result ``r`` has been sorted in order to guarantee a
consistent order for the doctest suite.
"""
elements = defaultdict(list)
original_keys = {}
for item in l:
k = key(item)
s = str(k)
if s in original_keys:
if original_keys[s]!=k:
raise ValueError("Two distinct elements with representation "
"%s " % s)
else:
original_keys[s]=k
elements[s].append(item)
return [(original_keys[s], values ) for (s, values) in elements.items()]
def equal(iterator):
"""
Checks whether all elements of ``iterator`` are equal.
INPUT:
- ``iterator`` -- an iterator of the elements to check
OUTPUT:
``True`` or ``False``.
This implements `<http://stackoverflow.com/a/3844832/1052778>`_.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import equal
sage: equal([0, 0, 0])
True
sage: equal([0, 1, 0])
False
sage: equal([])
True
sage: equal(iter([None, None]))
True
We can test other properties of the elements than the elements
themselves. In the following example, we check whether all tuples
have the same lengths::
sage: equal(len(x) for x in [(1, 2), (2, 3), (3, 1)])
True
sage: equal(len(x) for x in [(1, 2), (1, 2, 3), (3, 1)])
False
"""
try:
iterator = iter(iterator)
first = next(iterator)
return all(first == rest for rest in iterator)
except StopIteration:
return True
def startswith(list, prefix):
"""
Determine whether list starts with the given prefix.
INPUT:
- ``list`` -- list
- ``prefix`` -- list representing the prefix
OUTPUT:
``True`` or ``False``.
Similar to :meth:`str.startswith`.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import startswith
sage: startswith([1, 2, 3], [1, 2])
True
sage: startswith([1], [1, 2])
False
sage: startswith([1, 3, 2], [1, 2])
False
"""
return list[:len(prefix)] == prefix
#*****************************************************************************
FSMEmptyWordSymbol = '-'
EmptyWordLaTeX = r'\varepsilon'
EndOfWordLaTeX = r'\$'
FSMOldCodeTransducerCartesianProduct = True
FSMOldProcessOutput = True # See trac #16132 (deprecation).
tikz_automata_where = {"right": 0,
"above": 90,
"left": 180,
"below": 270}
def FSMLetterSymbol(letter):
"""
Returns a string associated to the input letter.
INPUT:
- ``letter`` -- the input letter or ``None`` (representing the
empty word).
OUTPUT:
If ``letter`` is ``None`` the symbol for the empty word
``FSMEmptyWordSymbol`` is returned, otherwise the string
associated to the letter.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMLetterSymbol
sage: FSMLetterSymbol(0)
'0'
sage: FSMLetterSymbol(None)
'-'
"""
return FSMEmptyWordSymbol if letter is None else repr(letter)
def FSMWordSymbol(word):
"""
Returns a string of ``word``. It may returns the symbol of the
empty word ``FSMEmptyWordSymbol``.
INPUT:
- ``word`` -- the input word.
OUTPUT:
A string of ``word``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMWordSymbol
sage: FSMWordSymbol([0, 1, 1])
'0,1,1'
"""
if not isinstance(word, list):
return FSMLetterSymbol(word)
if len(word) == 0:
return FSMEmptyWordSymbol
s = ''
for letter in word:
s += (',' if len(s) > 0 else '') + FSMLetterSymbol(letter)
return s
#*****************************************************************************
def is_FSMState(S):
"""
Tests whether or not ``S`` inherits from :class:`FSMState`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMState, FSMState
sage: is_FSMState(FSMState('A'))
True
"""
return isinstance(S, FSMState)
class FSMState(SageObject):
"""
Class for a state of a finite state machine.
INPUT:
- ``label`` -- the label of the state.
- ``word_out`` -- (default: ``None``) a word that is written when
the state is reached.
- ``is_initial`` -- (default: ``False``)
- ``is_final`` -- (default: ``False``)
- ``final_word_out`` -- (default: ``None``) a word that is written when
the state is reached as the last state of some input; only for final
states.
- ``hook`` -- (default: ``None``) A function which is called when
the state is reached during processing input. It takes two input
parameters: the first is the current state (to allow using the same
hook for several states), the second is the current process
iterator object (to have full access to everything; e.g. the
next letter from the input tape can be read in). It can output
the next transition, i.e. the transition to take next. If it
returns ``None`` the process iterator chooses. Moreover, this
function can raise a ``StopIteration`` exception to stop
processing of a finite state machine the input immediately. See
also the example below.
- ``color`` -- (default: ``None``) In order to distinguish states,
they can be given an arbitrary "color" (an arbitrary object).
This is used in :meth:`FiniteStateMachine.equivalence_classes`:
states of different colors are never considered to be
equivalent. Note that :meth:`Automaton.determinisation` requires
that ``color`` is hashable.
- ``allow_label_None`` -- (default: ``False``) If ``True`` allows also
``None`` as label. Note that a state with label ``None`` is used in
:class:`FSMProcessIterator`.
OUTPUT:
Returns a state of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state 1', word_out=0, is_initial=True)
sage: A
'state 1'
sage: A.label()
'state 1'
sage: B = FSMState('state 2')
sage: A == B
False
We can also define a final output word of a final state which is
used if the input of a transducer leads to this state. Such final
output words are used in subsequential transducers. ::
sage: C = FSMState('state 3', is_final=True, final_word_out='end')
sage: C.final_word_out
['end']
The final output word can be a single letter, ``None`` or a list of
letters::
sage: A = FSMState('A')
sage: A.is_final = True
sage: A.final_word_out = 2
sage: A.final_word_out
[2]
sage: A.final_word_out = [2, 3]
sage: A.final_word_out
[2, 3]
Only final states can have a final output word which is not
``None``::
sage: B = FSMState('B')
sage: B.final_word_out is None
True
sage: B.final_word_out = 2
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state B is not final.
Setting the ``final_word_out`` of a final state to ``None`` is the
same as setting it to ``[]`` and is also the default for a final
state::
sage: C = FSMState('C', is_final=True)
sage: C.final_word_out
[]
sage: C.final_word_out = None
sage: C.final_word_out
[]
sage: C.final_word_out = []
sage: C.final_word_out
[]
It is not allowed to use ``None`` as a label::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(None)
Traceback (most recent call last):
...
ValueError: Label None reserved for a special state,
choose another label.
This can be overridden by::
sage: FSMState(None, allow_label_None=True)
None
Note that :meth:`Automaton.determinisation` requires that ``color``
is hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
We can use a hook function of a state to stop processing. This is
done by raising a ``StopIteration`` exception. The following code
demonstrates this::
sage: T = Transducer([(0, 1, 9, 'a'), (1, 2, 9, 'b'),
....: (2, 3, 9, 'c'), (3, 4, 9, 'd')],
....: initial_states=[0],
....: final_states=[4],
....: input_alphabet=[9])
sage: def stop(current_state, process_iterator):
....: raise StopIteration()
sage: T.state(3).hook = stop
sage: T.process([9, 9, 9, 9])
(False, 3, ['a', 'b', 'c'])
"""
is_initial = False
"""
Describes whether the state is initial.
EXAMPLES::
sage: T = Automaton([(0,0,0)])
sage: T.initial_states()
[]
sage: T.state(0).is_initial = True
sage: T.initial_states()
[0]
"""
def __init__(self, label, word_out=None,
is_initial=False, is_final=False, final_word_out=None,
hook=None, color=None, allow_label_None=False):
"""
See :class:`FSMState` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('final', is_final=True)
'final'
TESTS::
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = True
sage: A = FSMState('A', is_final=True, final_word_out='end')
sage: A.final_word_out
['end']
sage: A = FSMState('A', is_final=True,
....: final_word_out=['e', 'n', 'd'])
sage: A.final_word_out
['e', 'n', 'd']
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=True, final_word_out=None)
sage: A.final_word_out
[]
sage: A = FSMState('A', is_final=False)
sage: A.final_word_out is None
True
sage: A.is_final = False
sage: A = FSMState('A', is_final=False, final_word_out='end')
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False,
....: final_word_out=['e', 'n', 'd'])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
sage: A = FSMState('A', is_final=False, final_word_out=None)
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=False, final_word_out=[])
Traceback (most recent call last):
...
ValueError: Only final states can have a final output word,
but state A is not final.
"""
if not allow_label_None and label is None:
raise ValueError("Label None reserved for a special state, "
"choose another label.")
self._label_ = label
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
self.is_initial = is_initial
self._final_word_out_ = None
self.is_final = is_final
self.final_word_out = final_word_out
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
self.color = color
def __lt__(self, other):
"""
Returns True if label of ``self`` is less than label of
``other``.
INPUT:
- `other` -- a state.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState(0) < FSMState(1)
True
"""
return self.label() < other.label()
@property
def final_word_out(self):
"""
The final output word of a final state which is written if the
state is reached as the last state of the input of the finite
state machine. For a non-final state, the value is ``None``.
``final_word_out`` can be a single letter, a list or ``None``,
but for a final-state, it is always saved as a list.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=2)
sage: A.final_word_out
[2]
sage: A.final_word_out = 3
sage: A.final_word_out
[3]
sage: A.final_word_out = [3, 4]
sage: A.final_word_out
[3, 4]
sage: A.final_word_out = None
sage: A.final_word_out
[]
sage: B = FSMState('B')
sage: B.final_word_out is None
True
A non-final state cannot have a final output word::
sage: B.final_word_out = [3, 4]
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
"""
return self._final_word_out_
@final_word_out.setter
def final_word_out(self, final_word_out):
"""
Sets the value of the final output word of a final state.
INPUT:
- ``final_word_out`` -- a list, any element or ``None``.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: B = FSMState('B')
sage: B.final_word_out = []
Traceback (most recent call last):
...
ValueError: Only final states can have a final
output word, but state B is not final.
sage: B.final_word_out = None
sage: B.final_word_out is None
True
"""
if not self.is_final:
if final_word_out is not None:
raise ValueError("Only final states can have a "
"final output word, but state %s is not final."
% (self.label()))
else:
self._final_word_out_ = None
elif isinstance(final_word_out, list):
self._final_word_out_ = final_word_out
elif final_word_out is not None:
self._final_word_out_ = [final_word_out]
else:
self._final_word_out_ = []
@property
def is_final(self):
"""
Describes whether the state is final or not.
``True`` if the state is final and ``False`` otherwise.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True, final_word_out=3)
sage: A.is_final
True
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A.final_word_out = None
sage: A.is_final = False
sage: A.is_final
False
"""
return (self.final_word_out is not None)
@is_final.setter
def is_final(self, is_final):
"""
Defines the state as a final state or a non-final state.
INPUT:
- ``is_final`` -- ``True`` if the state should be final and
``False`` otherwise.
OUTPUT:
Nothing.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: A.final_word_out
[]
sage: A.is_final = False
sage: A.final_word_out is None
True
sage: A = FSMState('A', is_final=True, final_word_out='a')
sage: A.is_final = False
Traceback (most recent call last):
...
ValueError: State A cannot be non-final, because it has a
final output word. Only final states can have a final output
word.
sage: A = FSMState('A', is_final=True, final_word_out=[])
sage: A.is_final = False
sage: A.final_word_out is None
True
"""
if is_final and self.final_word_out is None:
self._final_word_out_ = []
elif not is_final:
if not self.final_word_out:
self._final_word_out_ = None
else:
raise ValueError("State %s cannot be non-final, because it "
"has a final output word. Only final states "
"can have a final output word. "
% self.label())
def label(self):
"""
Returns the label of the state.
INPUT:
Nothing.
OUTPUT:
The label of the state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('state')
sage: A.label()
'state'
"""
return self._label_
def __copy__(self):
"""
Returns a (shallow) copy of the state.
INPUT:
Nothing.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: copy(A)
'A'
"""
new = FSMState(self.label(), self.word_out,
self.is_initial, self.is_final,
color=self.color,
final_word_out=self.final_word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: deepcopy(A)
'A'
"""
try:
label = self._deepcopy_relabel_
except AttributeError:
label = deepcopy(self.label(), memo)
new = FSMState(label, deepcopy(self.word_out, memo),
self.is_initial, self.is_final)
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
new.color = deepcopy(self.color, memo)
new.final_word_out = deepcopy(self.final_word_out, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the state.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState((1, 3), color=[1, 2],
....: is_final=True, final_word_out=3)
sage: B = deepcopy(A)
sage: B
(1, 3)
sage: B.label == A.label
True
sage: B.label is A.label
False
sage: B.color == A.color
True
sage: B.color is A.color
False
sage: B.is_final == A.is_final
True
sage: B.is_final is A.is_final
True
sage: B.final_word_out == A.final_word_out
True
sage: B.final_word_out is A.final_word_out
False
"""
return deepcopy(self, memo)
def relabeled(self, label, memo=None):
"""
Returns a deep copy of the state with a new label.
INPUT:
- ``label`` -- the label of new state.
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: A.relabeled('B')
'B'
"""
self._deepcopy_relabel_ = label
new = deepcopy(self, memo)
del self._deepcopy_relabel_
return new
def __hash__(self):
"""
Returns a hash value for the object.
INPUT:
Nothing.
OUTPUT:
The hash of this state.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: hash(A) #random
-269909568
"""
return hash(self.label())
def _repr_(self):
"""
Returns the string "label".
INPUT:
Nothing.
OUTPUT:
A string.
TESTS:
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A')._repr_()
"'A'"
"""
return repr(self.label())
def __eq__(left, right):
"""
Returns True if two states are the same, i.e., if they have
the same labels.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
Note that the hooks and whether the states are initial or
final are not checked. To fully compare two states (including
these attributes), use :meth:`.fully_equal`.
As only the labels are used when hashing a state, only the
labels can actually be compared by the equality relation.
Note that the labels are unique within one finite state machine,
so this may only lead to ambiguities when comparing states
belonging to different finite state machines.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A == B
True
"""
if not is_FSMState(right):
return False
return left.label() == right.label()
def __ne__(left, right):
"""
Tests for inequality, complement of __eq__.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('A', is_final=True)
sage: A != B
False
"""
return (not (left == right))
def fully_equal(left, right, compare_color=True):
"""
Checks whether two states are fully equal, i.e., including all
attributes except ``hook``.
INPUT:
- ``left`` -- a state.
- ``right`` -- a state.
- ``compare_color`` -- If ``True`` (default) colors are
compared as well, otherwise not.
OUTPUT:
``True`` or ``False``.
Note that usual comparison by ``==`` does only compare the labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: B = FSMState('A', is_initial=True)
sage: A.fully_equal(B)
False
sage: A == B
True
sage: A.is_initial = True; A.color = 'green'
sage: A.fully_equal(B)
False
sage: A.fully_equal(B, compare_color=False)
True
"""
color = not compare_color or left.color == right.color
return (left.__eq__(right) and
left.is_initial == right.is_initial and
left.is_final == right.is_final and
left.final_word_out == right.final_word_out and
left.word_out == right.word_out and
color)
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMState
sage: FSMState('A').__nonzero__()
True
"""
return True # A state cannot be zero (see __init__)
#*****************************************************************************
def is_FSMTransition(T):
"""
Tests whether or not ``T`` inherits from :class:`FSMTransition`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMTransition, FSMTransition
sage: is_FSMTransition(FSMTransition('A', 'B'))
True
"""
return isinstance(T, FSMTransition)
class FSMTransition(SageObject):
"""
Class for a transition of a finite state machine.
INPUT:
- ``from_state`` -- state from which transition starts.
- ``to_state`` -- state in which transition ends.
- ``word_in`` -- the input word of the transitions (when the
finite state machine is used as automaton)
- ``word_out`` -- the output word of the transitions (when the
finite state machine is used as transducer)
OUTPUT:
A transition of a finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: S = FSMTransition(A, B, 0, 1)
sage: T = FSMTransition('A', 'B', 0, 1)
sage: T == S
True
sage: U = FSMTransition('A', 'B', 0)
sage: U == T
False
"""
from_state = None
"""State from which the transition starts. Read-only."""
to_state = None
"""State in which the transition ends. Read-only."""
word_in = None
"""Input word of the transition. Read-only."""
word_out = None
"""Output word of the transition. Read-only."""
def __init__(self, from_state, to_state,
word_in=None, word_out=None,
hook=None):
"""
See :class:`FSMTransition` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)
Transition from 'A' to 'B': 0|1
"""
if is_FSMState(from_state):
self.from_state = from_state
else:
self.from_state = FSMState(from_state)
if is_FSMState(to_state):
self.to_state = to_state
else:
self.to_state = FSMState(to_state)
if isinstance(word_in, list):
self.word_in = word_in
elif word_in is not None:
self.word_in = [word_in]
else:
self.word_in = []
if isinstance(word_out, list):
self.word_out = word_out
elif word_out is not None:
self.word_out = [word_out]
else:
self.word_out = []
if hook is not None:
if hasattr(hook, '__call__'):
self.hook = hook
else:
raise TypeError('Wrong argument for hook.')
def __lt__(self, other):
"""
Returns True if ``self`` is less than ``other`` with respect to the
key ``(self.from_state, self.word_in, self.to_state, self.word_out)``.
INPUT:
- `other` -- a transition.
OUTPUT:
True or False.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition(0,1,0,0) < FSMTransition(1,0,0,0)
True
"""
return (self.from_state, self.word_in, self.to_state, self.word_out) < \
(other.from_state, other.word_in, other.to_state, other.word_out)
def __copy__(self):
"""
Returns a (shallow) copy of the transition.
INPUT:
Nothing.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: copy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(self.from_state, self.to_state,
self.word_in, self.word_out)
if hasattr(self, 'hook'):
new.hook = self.hook
return new
copy = __copy__
def __deepcopy__(self, memo):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
new = FSMTransition(deepcopy(self.from_state, memo),
deepcopy(self.to_state, memo),
deepcopy(self.word_in, memo),
deepcopy(self.word_out, memo))
if hasattr(self, 'hook'):
new.hook = deepcopy(self.hook, memo)
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the transition.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: deepcopy(t)
Transition from 'A' to 'B': 0|-
"""
return deepcopy(self, memo)
def __hash__(self):
"""
Since transitions are mutable, they should not be hashable, so
we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this transition.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: hash(FSMTransition('A', 'B'))
Traceback (most recent call last):
...
TypeError: Transitions are mutable, and thus not hashable.
"""
raise TypeError("Transitions are mutable, and thus not hashable.")
def _repr_(self):
"""
Represents a transitions as from state to state and input, output.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 0)._repr_()
"Transition from 'A' to 'B': 0|0"
"""
return "Transition from %s to %s: %s" % (repr(self.from_state),
repr(self.to_state),
self._in_out_label_())
def _in_out_label_(self):
"""
Returns the input and output of a transition as
"word_in|word_out".
INPUT:
Nothing.
OUTPUT:
A string of the input and output labels.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0, 1)._in_out_label_()
'0|1'
"""
return "%s|%s" % (FSMWordSymbol(self.word_in),
FSMWordSymbol(self.word_out))
def __eq__(left, right):
"""
Returns True if the two transitions are the same, i.e., if the
both go from the same states to the same states and read and
write the same words.
Note that the hooks are not checked.
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 == t2
True
"""
if not is_FSMTransition(right):
raise TypeError('Only instances of FSMTransition ' \
'can be compared.')
return left.from_state == right.from_state \
and left.to_state == right.to_state \
and left.word_in == right.word_in \
and left.word_out == right.word_out
def __ne__(left, right):
"""
INPUT:
- ``left`` -- a transition.
- ``right`` -- a transition.
OUTPUT:
True or False.
Tests for inequality, complement of __eq__.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A', is_initial=True)
sage: t1 = FSMTransition('A', 'B', 0, 1)
sage: t2 = FSMTransition(A, 'B', 0, 1)
sage: t1 != t2
False
"""
return (not (left == right))
def __nonzero__(self):
"""
Returns True.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: FSMTransition('A', 'B', 0).__nonzero__()
True
"""
return True # A transition cannot be zero (see __init__)
#*****************************************************************************
def is_FiniteStateMachine(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`FiniteStateMachine`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine
sage: is_FiniteStateMachine(FiniteStateMachine())
True
sage: is_FiniteStateMachine(Automaton())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, FiniteStateMachine)
def duplicate_transition_ignore(old_transition, new_transition):
"""
Default function for handling duplicate transitions in finite
state machines. This implementation ignores the occurrence.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
The same transition, unchanged.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_ignore
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_ignore(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Transition from 0 to 0: 1|-
"""
return old_transition
def duplicate_transition_raise_error(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation raises a ``ValueError``.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
Nothing. A ``ValueError`` is raised.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_raise_error(FSMTransition(0, 0, 1),
....: FSMTransition(0, 0, 1))
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 0 to 0: 1|-
"""
raise ValueError("Attempting to re-insert transition %s" % old_transition)
def duplicate_transition_add_input(old_transition, new_transition):
"""
Alternative function for handling duplicate transitions in finite
state machines. This implementation adds the input label of the
new transition to the input label of the old transition. This is
intended for the case where a Markov chain is modelled by a finite
state machine using the input labels as transition probabilities.
See the documentation of the ``on_duplicate_transition`` parameter
of :class:`FiniteStateMachine`.
INPUT:
- ``old_transition`` -- A transition in a finite state machine.
- ``new_transition`` -- A transition, identical to ``old_transition``,
which is to be inserted into the finite state machine.
OUTPUT:
A transition whose input weight is the sum of the input
weights of ``old_transition`` and ``new_transition``.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: duplicate_transition_add_input(FSMTransition('a', 'a', 1/2),
....: FSMTransition('a', 'a', 1/2))
Transition from 'a' to 'a': 1|-
Input labels must be lists of length 1::
sage: duplicate_transition_add_input(FSMTransition('a', 'a', [1, 1]),
....: FSMTransition('a', 'a', [1, 1]))
Traceback (most recent call last):
...
TypeError: Trying to use duplicate_transition_add_input on
"Transition from 'a' to 'a': 1,1|-" and
"Transition from 'a' to 'a': 1,1|-",
but input words are assumed to be lists of length 1
"""
if (hasattr(old_transition.word_in, '__iter__')
and len(old_transition.word_in) == 1
and hasattr(new_transition.word_in, '__iter__')
and len(new_transition.word_in) == 1):
old_transition.word_in = [old_transition.word_in[0]
+ new_transition.word_in[0]]
else:
raise TypeError('Trying to use duplicate_transition_add_input on ' +
'"%s" and "%s", ' % (old_transition, new_transition) +
'but input words are assumed to be lists of length 1')
return old_transition
class FiniteStateMachine(SageObject):
"""
Class for a finite state machine.
A finite state machine is a finite set of states connected by
transitions.
INPUT:
- ``data`` -- can be any of the following:
#. a dictionary of dictionaries (of transitions),
#. a dictionary of lists (of states or transitions),
#. a list (of transitions),
#. a function (transition function),
#. an other instance of a finite state machine.
- ``initial_states`` and ``final_states`` -- the initial and
final states of this machine
- ``input_alphabet`` and ``output_alphabet`` -- the input and
output alphabets of this machine
- ``determine_alphabets`` -- If ``True``, then the function
:meth:`.determine_alphabets` is called after ``data`` was read and
processed, if ``False``, then not. If it is ``None``, then it is
decided during the construction of the finite state machine
whether :meth:`.determine_alphabets` should be called.
- ``with_final_word_out`` -- If given (not ``None``), then the
function :meth:`.with_final_word_out` (more precisely, its inplace
pendant :meth:`.construct_final_word_out`) is called with input
``letters=with_final_word_out`` at the end of the creation
process.
- ``store_states_dict`` -- If ``True``, then additionally the states
are stored in an interal dictionary for speed up.
- ``on_duplicate_transition`` -- A function which is called when a
transition is inserted into ``self`` which already existed (same
``from_state``, same ``to_state``, same ``word_in``, same ``word_out``).
This function is assumed to take two arguments, the first being
the already existing transition, the second being the new
transition (as an :class:`FSMTransition`). The function must
return the (possibly modified) original transition.
By default, we have ``on_duplicate_transition=None``, which is
interpreted as
``on_duplicate_transition=duplicate_transition_ignore``, where
``duplicate_transition_ignore`` is a predefined function
ignoring the occurrence. Other such predefined functions are
``duplicate_transition_raise_error`` and
``duplicate_transition_add_input``.
OUTPUT:
A finite state machine.
The object creation of :class:`Automaton` and :class:`Transducer`
is the same as the one described here (i.e. just replace the word
``FiniteStateMachine`` by ``Automaton`` or ``Transducer``).
Each transition of an automaton has an input label. Automata can,
for example, be determinised (see
:meth:`Automaton.determinisation`) and minimized (see
:meth:`Automaton.minimization`). Each transition of a transducer
has an input and an output label. Transducers can, for example, be
simplified (see :meth:`Transducer.simplification`).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
See documentation for more examples.
We illustrate the different input formats:
#. The input-data can be a dictionary of dictionaries, where
- the keys of the outer dictionary are state-labels (from-states of
transitions),
- the keys of the inner dictionaries are state-labels (to-states of
transitions),
- the values of the inner dictionaries specify the transition
more precisely.
The easiest is to use a tuple consisting of an input and an
output word::
sage: FiniteStateMachine({'a':{'b':(0, 1), 'c':(1, 1)}})
Finite state machine with 3 states
Instead of the tuple anything iterable (e.g. a list) can be
used as well.
If you want to use the arguments of :class:`FSMTransition`
directly, you can use a dictionary::
sage: FiniteStateMachine({'a':{'b':{'word_in':0, 'word_out':1},
....: 'c':{'word_in':1, 'word_out':1}}})
Finite state machine with 3 states
In the case you already have instances of
:class:`FSMTransition`, it is possible to use them directly::
sage: FiniteStateMachine({'a':{'b':FSMTransition('a', 'b', 0, 1),
....: 'c':FSMTransition('a', 'c', 1, 1)}})
Finite state machine with 3 states
#. The input-data can be a dictionary of lists, where the keys
are states or label of states.
The list-elements can be states::
sage: a = FSMState('a')
sage: b = FSMState('b')
sage: c = FSMState('c')
sage: FiniteStateMachine({a:[b, c]})
Finite state machine with 3 states
Or the list-elements can simply be labels of states::
sage: FiniteStateMachine({'a':['b', 'c']})
Finite state machine with 3 states
The list-elements can also be transitions::
sage: FiniteStateMachine({'a':[FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)]})
Finite state machine with 3 states
Or they can be tuples of a label, an input word and an output
word specifying a transition::
sage: FiniteStateMachine({'a':[('b', 0, 1), ('c', 1, 1)]})
Finite state machine with 3 states
#. The input-data can be a list, where its elements specify
transitions::
sage: FiniteStateMachine([FSMTransition('a', 'b', 0, 1),
....: FSMTransition('a', 'c', 1, 1)])
Finite state machine with 3 states
It is possible to skip ``FSMTransition`` in the example above::
sage: FiniteStateMachine([('a', 'b', 0, 1), ('a', 'c', 1, 1)])
Finite state machine with 3 states
The parameters of the transition are given in tuples. Anyhow,
anything iterable (e.g. a list) is possible.
You can also name the parameters of the transition. For this
purpose you take a dictionary::
sage: FiniteStateMachine([{'from_state':'a', 'to_state':'b',
....: 'word_in':0, 'word_out':1},
....: {'from_state':'a', 'to_state':'c',
....: 'word_in':1, 'word_out':1}])
Finite state machine with 3 states
Other arguments, which :class:`FSMTransition` accepts, can be
added, too.
#. The input-data can also be function acting as transition
function:
This function has two input arguments:
#. a label of a state (from which the transition starts),
#. a letter of the (input-)alphabet (as input-label of the transition).
It returns a tuple with the following entries:
#. a label of a state (to which state the transition goes),
#. a letter of or a word over the (output-)alphabet (as
output-label of the transition).
It may also output a list of such tuples if several
transitions from the from-state and the input letter exist
(this means that the finite state machine is
non-deterministic).
If the transition does not exist, the function should raise a
``LookupError`` or return an empty list.
When constructing a finite state machine in this way, some
inital states and an input alphabet have to be specified.
::
sage: def f(state_from, read):
....: if int(state_from) + read <= 2:
....: state_to = 2*int(state_from)+read
....: write = 0
....: else:
....: state_to = 2*int(state_from) + read - 5
....: write = 1
....: return (str(state_to), write)
sage: F = FiniteStateMachine(f, input_alphabet=[0, 1],
....: initial_states=['0'],
....: final_states=['0'])
sage: F([1, 0, 1])
(True, '0', [0, 0, 1])
#. The input-data can be an other instance of a finite state machine::
sage: FiniteStateMachine(FiniteStateMachine([]))
Traceback (most recent call last):
...
NotImplementedError
The following examples demonstrate the use of ``on_duplicate_transition``::
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]])
sage: F.transitions()
[Transition from 'a' to 'a': 1/2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F1 = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_raise_error)
Traceback (most recent call last):
...
ValueError: Attempting to re-insert transition Transition from 'a' to 'a': 1/2|-
Use ``duplicate_transition_add_input`` to emulate a Markov chain,
the input labels are considered as transition probabilities::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = FiniteStateMachine([['a', 'a', 1/2], ['a', 'a', 1/2]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.transitions()
[Transition from 'a' to 'a': 1|-]
Use ``with_final_word_out`` to construct final output::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=0)
sage: for s in T.iter_final_states():
....: print s, s.final_word_out
0 []
1 [0]
TESTS::
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: FiniteStateMachine({a:[b, c], b:[b, c, d],
....: c:[a, b], d:[a, c]})
Finite state machine with 4 states
We have several constructions which lead to the same finite
state machine::
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: C = FSMState('C')
sage: FSM1 = FiniteStateMachine(
....: {A:{B:{'word_in':0, 'word_out':1},
....: C:{'word_in':1, 'word_out':1}}})
sage: FSM2 = FiniteStateMachine({A:{B:(0, 1), C:(1, 1)}})
sage: FSM3 = FiniteStateMachine(
....: {A:{B:FSMTransition(A, B, 0, 1),
....: C:FSMTransition(A, C, 1, 1)}})
sage: FSM4 = FiniteStateMachine({A:[(B, 0, 1), (C, 1, 1)]})
sage: FSM5 = FiniteStateMachine(
....: {A:[FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)]})
sage: FSM6 = FiniteStateMachine(
....: [{'from_state':A, 'to_state':B, 'word_in':0, 'word_out':1},
....: {'from_state':A, 'to_state':C, 'word_in':1, 'word_out':1}])
sage: FSM7 = FiniteStateMachine([(A, B, 0, 1), (A, C, 1, 1)])
sage: FSM8 = FiniteStateMachine(
....: [FSMTransition(A, B, 0, 1), FSMTransition(A, C, 1, 1)])
sage: FSM1 == FSM2 == FSM3 == FSM4 == FSM5 == FSM6 == FSM7 == FSM8
True
It is possible to skip ``FSMTransition`` in the example above.
Some more tests for different input-data::
sage: FiniteStateMachine({'a':{'a':[0, 0], 'b':[1, 1]},
....: 'b':{'b':[1, 0]}})
Finite state machine with 2 states
sage: a = FSMState('S_a', 'a')
sage: b = FSMState('S_b', 'b')
sage: c = FSMState('S_c', 'c')
sage: d = FSMState('S_d', 'd')
sage: t1 = FSMTransition(a, b)
sage: t2 = FSMTransition(b, c)
sage: t3 = FSMTransition(b, d)
sage: t4 = FSMTransition(c, d)
sage: FiniteStateMachine([t1, t2, t3, t4])
Finite state machine with 4 states
"""
on_duplicate_transition = duplicate_transition_ignore
"""
Which function to call when a duplicate transition is inserted. See
the documentation of the parameter ``on_duplicate_transition`` of
the class :class:`FiniteStateMachine` for details.
"""
#*************************************************************************
# init
#*************************************************************************
def __init__(self,
data=None,
initial_states=None, final_states=None,
input_alphabet=None, output_alphabet=None,
determine_alphabets=None,
with_final_word_out=None,
store_states_dict=True,
on_duplicate_transition=None):
"""
See :class:`FiniteStateMachine` for more information.
TEST::
sage: FiniteStateMachine()
Finite state machine with 0 states
"""
self._states_ = [] # List of states in the finite state
# machine. Each state stores a list of
# outgoing transitions.
if store_states_dict:
self._states_dict_ = {}
if initial_states is not None:
if not hasattr(initial_states, '__iter__'):
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
if final_states is not None:
if not hasattr(final_states, '__iter__'):
raise TypeError('Final states must be iterable ' \
'(e.g. a list of states).')
for s in final_states:
state = self.add_state(s)
state.is_final = True
self.input_alphabet = input_alphabet
self.output_alphabet = output_alphabet
if on_duplicate_transition is None:
on_duplicate_transition = duplicate_transition_ignore
if hasattr(on_duplicate_transition, '__call__'):
self.on_duplicate_transition=on_duplicate_transition
else:
raise TypeError('on_duplicate_transition must be callable')
if data is None:
pass
elif is_FiniteStateMachine(data):
raise NotImplementedError
elif hasattr(data, 'iteritems'):
# data is a dict (or something similar),
# format: key = from_state, value = iterator of transitions
for (sf, iter_transitions) in data.iteritems():
self.add_state(sf)
if hasattr(iter_transitions, 'iteritems'):
for (st, transition) in iter_transitions.iteritems():
self.add_state(st)
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(sf, st, **transition)
elif hasattr(transition, '__iter__'):
self.add_transition(sf, st, *transition)
else:
self.add_transition(sf, st, transition)
elif hasattr(iter_transitions, '__iter__'):
for transition in iter_transitions:
if hasattr(transition, '__iter__'):
L = [sf]
L.extend(transition)
elif is_FSMTransition(transition):
L = transition
else:
L = [sf, transition]
self.add_transition(L)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__iter__'):
# data is a something that is iterable,
# items are transitions
for transition in data:
if is_FSMTransition(transition):
self.add_transition(transition)
elif hasattr(transition, 'iteritems'):
self.add_transition(transition)
elif hasattr(transition, '__iter__'):
self.add_transition(transition)
else:
raise TypeError('Wrong input data for transition.')
if determine_alphabets is None and input_alphabet is None \
and output_alphabet is None:
determine_alphabets = True
elif hasattr(data, '__call__'):
self.add_from_transition_function(data)
else:
raise TypeError('Cannot decide what to do with data.')
if determine_alphabets:
self.determine_alphabets()
if with_final_word_out is not None:
self.construct_final_word_out(with_final_word_out)
self._allow_composition_ = True
#*************************************************************************
# copy and hash
#*************************************************************************
def __copy__(self):
"""
Returns a (shallow) copy of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
TESTS::
sage: copy(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
copy = __copy__
def empty_copy(self, memo=None, new_class=None):
"""
Returns an empty deep copy of the finite state machine, i.e.,
``input_alphabet``, ``output_alphabet``, ``on_duplicate_transition``
are preserved, but states and transitions are not.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
- ``new_class`` -- a class for the copy. By default
(``None``), the class of ``self`` is used.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_raise_error
sage: F = FiniteStateMachine([('A', 'A', 0, 2), ('A', 'A', 1, 3)],
....: input_alphabet=[0, 1],
....: output_alphabet=[2, 3],
....: on_duplicate_transition=duplicate_transition_raise_error)
sage: FE = F.empty_copy(); FE
Finite state machine with 0 states
sage: FE.input_alphabet
[0, 1]
sage: FE.output_alphabet
[2, 3]
sage: FE.on_duplicate_transition == duplicate_transition_raise_error
True
TESTS::
sage: T = Transducer()
sage: type(T.empty_copy())
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.empty_copy(new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
if new_class is None:
new = self.__class__()
else:
new = new_class()
new.input_alphabet = deepcopy(self.input_alphabet, memo)
new.output_alphabet = deepcopy(self.output_alphabet, memo)
new.on_duplicate_transition = self.on_duplicate_transition
return new
def __deepcopy__(self, memo):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- a dictionary storing already processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
"""
relabel = hasattr(self, '_deepcopy_relabel_')
new = self.empty_copy(memo=memo)
relabel_iter = itertools.count(0)
for state in self.iter_states():
if relabel:
if self._deepcopy_labels_ is None:
state._deepcopy_relabel_ = next(relabel_iter)
elif hasattr(self._deepcopy_labels_, '__call__'):
state._deepcopy_relabel_ = self._deepcopy_labels_(state.label())
elif hasattr(self._deepcopy_labels_, '__getitem__'):
state._deepcopy_relabel_ = self._deepcopy_labels_[state.label()]
else:
raise TypeError("labels must be None, a callable "
"or a dictionary.")
s = deepcopy(state, memo)
if relabel:
del state._deepcopy_relabel_
new.add_state(s)
for transition in self.iter_transitions():
new.add_transition(deepcopy(transition, memo))
return new
def deepcopy(self, memo=None):
"""
Returns a deep copy of the finite state machine.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A', 0, 1), ('A', 'A', 1, 0)])
sage: deepcopy(F)
Finite state machine with 1 states
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: C = deepcopy(F)
sage: C.transitions()[0].from_state is C.state('A')
True
sage: C.transitions()[0].to_state is C.state('A')
True
"""
return deepcopy(self, memo)
def relabeled(self, memo=None, labels=None):
"""
Returns a deep copy of the finite state machine, but the
states are relabeled.
INPUT:
- ``memo`` -- (default: ``None``) a dictionary storing already
processed elements.
- ``labels`` -- (default: ``None``) a dictionary or callable
mapping old labels to new labels. If ``None``, then the new
labels are integers starting with 0.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: FSM1 = FiniteStateMachine([('A', 'B'), ('B', 'C'), ('C', 'A')])
sage: FSM1.states()
['A', 'B', 'C']
sage: FSM2 = FSM1.relabeled()
sage: FSM2.states()
[0, 1, 2]
sage: FSM3 = FSM1.relabeled(labels={'A': 'a', 'B': 'b', 'C': 'c'})
sage: FSM3.states()
['a', 'b', 'c']
sage: FSM4 = FSM2.relabeled(labels=lambda x: 2*x)
sage: FSM4.states()
[0, 2, 4]
TESTS::
sage: FSM2.relabeled(labels=1)
Traceback (most recent call last):
...
TypeError: labels must be None, a callable or a dictionary.
"""
self._deepcopy_relabel_ = True
self._deepcopy_labels_ = labels
new = deepcopy(self, memo)
del self._deepcopy_relabel_
del self._deepcopy_labels_
return new
def induced_sub_finite_state_machine(self, states):
"""
Returns a sub-finite-state-machine of the finite state machine
induced by the given states.
INPUT:
- ``states`` -- a list (or an iterator) of states (either labels or
instances of :class:`FSMState`) of the sub-finite-state-machine.
OUTPUT:
A new finite state machine. It consists (of deep copies) of
the given states and (deep copies) of all transitions of ``self``
between these states.
EXAMPLE::
sage: FSM = FiniteStateMachine([(0, 1, 0), (0, 2, 0),
....: (1, 2, 0), (2, 0, 0)])
sage: sub_FSM = FSM.induced_sub_finite_state_machine([0, 1])
sage: sub_FSM.states()
[0, 1]
sage: sub_FSM.transitions()
[Transition from 0 to 1: 0|-]
sage: FSM.induced_sub_finite_state_machine([3])
Traceback (most recent call last):
...
ValueError: 3 is not a state of this finite state machine.
TESTS:
Make sure that the links between transitions and states
are still intact::
sage: sub_FSM.transitions()[0].from_state is sub_FSM.state(0)
True
"""
good_states = set()
for state in states:
if not self.has_state(state):
raise ValueError("%s is not a state of this finite state machine." % state)
good_states.add(self.state(state))
memo = {}
new = self.empty_copy(memo=memo)
for state in good_states:
s = deepcopy(state, memo)
new.add_state(s)
for state in good_states:
for transition in self.iter_transitions(state):
if transition.to_state in good_states:
new.add_transition(deepcopy(transition, memo))
return new
def __hash__(self):
"""
Since finite state machines are mutable, they should not be
hashable, so we return a type error.
INPUT:
Nothing.
OUTPUT:
The hash of this finite state machine.
EXAMPLES::
sage: hash(FiniteStateMachine())
Traceback (most recent call last):
...
TypeError: Finite state machines are mutable, and thus not hashable.
"""
if getattr(self, "_immutable", False):
return hash((tuple(self.states()), tuple(self.transitions())))
raise TypeError("Finite state machines are mutable, " \
"and thus not hashable.")
#*************************************************************************
# operators
#*************************************************************************
def __or__(self, other):
"""
Returns the disjoint union of the finite state machines self and other.
INPUT:
- ``other`` -- a finite state machine.
OUTPUT:
A new finite state machine.
TESTS::
sage: FiniteStateMachine() | FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.disjoint_union(other)
__add__ = __or__
def __iadd__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F += FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __and__(self, other):
"""
Returns the intersection of ``self`` with ``other``.
TESTS::
sage: FiniteStateMachine() & FiniteStateMachine([('A', 'B')])
Traceback (most recent call last):
...
NotImplementedError
"""
if is_FiniteStateMachine(other):
return self.intersection(other)
def __imul__(self, other):
"""
TESTS::
sage: F = FiniteStateMachine()
sage: F *= FiniteStateMachine()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def __call__(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Calls either method :meth:`.composition` or :meth:`.process`
(with ``full_output=False``).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True, is_final=True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: H = G(F)
sage: H.states()
[('A', 1), ('B', 1), ('B', 2)]
"""
if len(args) == 0:
raise TypeError("Called with too few arguments.")
if is_FiniteStateMachine(args[0]):
return self.composition(*args, **kwargs)
if hasattr(args[0], '__iter__'):
if not kwargs.has_key('full_output'):
kwargs['full_output'] = False
return self.process(*args, **kwargs)
raise TypeError("Do not know what to do with that arguments.")
#*************************************************************************
# tests
#*************************************************************************
def __nonzero__(self):
"""
Returns True if the finite state machine consists of at least
one state.
INPUT:
Nothing.
OUTPUT:
True or False.
TESTS::
sage: FiniteStateMachine().__nonzero__()
False
"""
return len(self._states_) > 0
def __eq__(left, right):
"""
Returns ``True`` if the two finite state machines are equal,
i.e., if they have the same states and the same transitions.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
``True`` or ``False``.
Note that this function compares all attributes of a state (by
using :meth:`FSMState.fully_equal`) except for colors. Colors
are handled as follows: If the colors coincide, then the
finite state machines are also considered equal. If not, then
they are considered as equal if both finite state machines are
monochromatic.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1)])
sage: F == FiniteStateMachine()
False
sage: G = FiniteStateMachine([('A', 'B', 1)],
....: initial_states=['A'])
sage: F == G
False
sage: F.state('A').is_initial = True
sage: F == G
True
This shows the behavior when the states have colors::
sage: F.state('A').color = 'red'
sage: G.state('A').color = 'red'
sage: F == G
True
sage: G.state('A').color = 'blue'
sage: F == G
False
sage: F.state('B').color = 'red'
sage: F.is_monochromatic()
True
sage: G.state('B').color = 'blue'
sage: G.is_monochromatic()
True
sage: F == G
True
"""
if not is_FiniteStateMachine(right):
raise TypeError('Only instances of FiniteStateMachine '
'can be compared.')
if len(left._states_) != len(right._states_):
return False
colors_equal = True
for state in left.iter_states():
try:
right_state = right.state(state.label())
except LookupError:
return False
# we handle colors separately
if not state.fully_equal(right_state, compare_color=False):
return False
if state.color != right_state.color:
colors_equal = False
left_transitions = state.transitions
right_transitions = right.state(state).transitions
if len(left_transitions) != len(right_transitions):
return False
for t in left_transitions:
if t not in right_transitions:
return False
# handle colors
if colors_equal:
return True
if left.is_monochromatic() and right.is_monochromatic():
return True
return False
def __ne__(left, right):
"""
Tests for inequality, complement of :meth:`.__eq__`.
INPUT:
- ``left`` -- a finite state machine.
- ``right`` -- a finite state machine.
OUTPUT:
True or False.
EXAMPLES::
sage: E = FiniteStateMachine([('A', 'B', 0)])
sage: F = Automaton([('A', 'B', 0)])
sage: G = Transducer([('A', 'B', 0, 1)])
sage: E == F
True
sage: E == G
False
"""
return (not (left == right))
def __contains__(self, item):
"""
Returns true, if the finite state machine contains the
state or transition item. Note that only the labels of the
states and the input and output words are tested.
INPUT:
- ``item`` -- a state or a transition.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: FSMState('A', is_initial=True) in F
True
sage: 'A' in F
False
sage: FSMTransition('A', 'B', 0) in F
True
"""
if is_FSMState(item):
return self.has_state(item)
if is_FSMTransition(item):
return self.has_transition(item)
return False
def is_Markov_chain(self, is_zero=None):
"""
Checks whether ``self`` is a Markov chain where the transition
probabilities are modeled as input labels.
INPUT:
- ``is_zero`` -- by default (``is_zero=None``), checking for
zero is simply done by
:meth:`~sage.structure.element.Element.is_zero`. This
parameter can be used to provide a more sophisticated check
for zero, e.g. in the case of symbolic probabilities, see
the examples below.
OUTPUT:
``True`` or ``False``.
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input` and the sum of the input
weights of the transitions leaving a state must add up to 1.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
True
:attr:`on_duplicate_transition` must be
:func:`duplicate_transition_add_input`::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0], [1, 1, 1/2, 1]])
sage: F.is_Markov_chain()
False
Sum of input labels of the transitions leaving states must be 1::
sage: F = Transducer([[0, 0, 1/4, 0], [0, 1, 3/4, 1],
....: [1, 0, 1/2, 0]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
If the probabilities are variables in the symbolic ring,
:func:`~sage.symbolic.assumptions.assume` will do the trick::
sage: var('p q')
(p, q)
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: assume(p + q == 1)
sage: (p + q - 1).is_zero()
True
sage: F.is_Markov_chain()
True
sage: forget()
sage: del(p, q)
If the probabilities are variables in some polynomial ring,
the parameter ``is_zero`` can be used::
sage: R.<p, q> = PolynomialRing(QQ)
sage: def is_zero_polynomial(polynomial):
....: return polynomial in (p + q - 1)*R
sage: F = Transducer([(0, 0, p, 1), (0, 0, q, 0)],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: F.is_Markov_chain()
False
sage: F.is_Markov_chain(is_zero_polynomial)
True
"""
def default_is_zero(expression):
return expression.is_zero()
is_zero_function = default_is_zero
if is_zero is not None:
is_zero_function = is_zero
if self.on_duplicate_transition != duplicate_transition_add_input:
return False
return all(is_zero_function(sum(t.word_in[0] for t in state.transitions) - 1)
for state in self.states())
#*************************************************************************
# representations / LaTeX
#*************************************************************************
def _repr_(self):
"""
Represents the finite state machine as "Finite state machine
with n states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: FiniteStateMachine()._repr_()
'Finite state machine with 0 states'
"""
return "Finite state machine with %s states" % len(self._states_)
default_format_letter = latex
format_letter = default_format_letter
def format_letter_negative(self, letter):
r"""
Format negative numbers as overlined numbers, everything
else by standard LaTeX formatting.
INPUT:
``letter`` -- anything.
OUTPUT:
Overlined absolute value if letter is a negative integer,
:func:`latex(letter) <sage.misc.latex.latex>` otherwise.
EXAMPLES::
sage: A = Automaton([(0, 0, -1)])
sage: map(A.format_letter_negative, [-1, 0, 1, 'a', None])
['\\overline{1}', 0, 1, \text{\texttt{a}}, \mbox{\rm None}]
sage: A.latex_options(format_letter=A.format_letter_negative)
sage: print(latex(A))
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$\overline{1}$} ();
\end{tikzpicture}
"""
if letter in ZZ and letter < 0:
return r'\overline{%d}' % -letter
else:
return latex(letter)
def format_transition_label_reversed(self, word):
r"""
Format words in transition labels in reversed order.
INPUT:
``word`` -- list of letters.
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode, letters are written in reversed order.
This is the reversed version of
:meth:`.default_format_transition_label`.
In digit expansions, digits are frequently processed from the
least significant to the most significant position, but it is
customary to write the least significant digit at the
right-most position. Therefore, the labels have to be
reversed.
EXAMPLE::
sage: T = Transducer([(0, 0, 0, [1, 2, 3])])
sage: T.format_transition_label_reversed([1, 2, 3])
'3 2 1'
sage: T.latex_options(format_transition_label=T.format_transition_label_reversed)
sage: print latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\path[->] (v0) edge[loop above] node {$0\mid 3 2 1$} ();
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.format_transition_label_reversed([])
'\\varepsilon'
"""
return self.default_format_transition_label(reversed(word))
def default_format_transition_label(self, word):
r"""
Default formatting of words in transition labels for LaTeX output.
INPUT:
``word`` -- list of letters
OUTPUT:
String representation of ``word`` suitable to be typeset in
mathematical mode.
- For a non-empty word: Concatenation of the letters, piped through
``self.format_letter`` and separated by blanks.
- For an empty word:
``sage.combinat.finite_state_machine.EmptyWordLaTeX``.
There is also a variant :meth:`.format_transition_label_reversed`
writing the words in reversed order.
EXAMPLES:
#. Example of a non-empty word::
sage: T = Transducer()
sage: print T.default_format_transition_label(
....: ['a', 'alpha', 'a_1', '0', 0, (0, 1)])
\text{\texttt{a}} \text{\texttt{alpha}}
\text{\texttt{a{\char`\_}1}} 0 0 \left(0, 1\right)
#. In the example above, ``'a'`` and ``'alpha'`` should perhaps
be symbols::
sage: var('a alpha a_1')
(a, alpha, a_1)
sage: print T.default_format_transition_label([a, alpha, a_1])
a \alpha a_{1}
#. Example of an empty word::
sage: print T.default_format_transition_label([])
\varepsilon
We can change this by setting
``sage.combinat.finite_state_machine.EmptyWordLaTeX``::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = ''
sage: T.default_format_transition_label([])
''
Finally, we restore the default value::
sage: sage.combinat.finite_state_machine.EmptyWordLaTeX = r'\varepsilon'
#. This method is the default value for
``FiniteStateMachine.format_transition_label``. That can be changed to be
any other function::
sage: A = Automaton([(0, 1, 0)])
sage: def custom_format_transition_label(word):
....: return "t"
sage: A.latex_options(format_transition_label=custom_format_transition_label)
sage: print latex(A)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$0$};
\node[state] (v1) at (-3.000000, 0.000000) {$1$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$t$} (v1);
\end{tikzpicture}
TEST:
Check that #16357 is fixed::
sage: T = Transducer()
sage: T.default_format_transition_label([])
'\\varepsilon'
sage: T.default_format_transition_label(iter([]))
'\\varepsilon'
"""
result = " ".join(imap(self.format_letter, word))
if result:
return result
else:
return EmptyWordLaTeX
format_transition_label = default_format_transition_label
def latex_options(self,
coordinates=None,
format_state_label=None,
format_letter=None,
format_transition_label=None,
loop_where=None,
initial_where=None,
accepting_style=None,
accepting_distance=None,
accepting_where=None,
accepting_show_empty=None):
r"""
Set options for LaTeX output via
:func:`~sage.misc.latex.latex` and therefore
:func:`~sage.misc.latex.view`.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates. If no
coordinates are given, states a placed equidistantly on a
circle of radius `3`. See also :meth:`.set_coordinates`.
- ``format_state_label`` -- a function mapping labels of
states to a string suitable for typesetting in LaTeX's
mathematics mode. If not given, :func:`~sage.misc.latex.latex`
is used.
- ``format_letter`` -- a function mapping letters of the input
and output alphabets to a string suitable for typesetting in
LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` uses
:func:`~sage.misc.latex.latex`.
- ``format_transition_label`` -- a function mapping words over
the input and output alphabets to a string suitable for
typesetting in LaTeX's mathematics mode. If not given,
:meth:`.default_format_transition_label` is used.
- ``loop_where`` -- a dictionary or a function mapping labels of
initial states to one of ``'above'``, ``'left'``, ``'below'``,
``'right'``. If not given, ``'above'`` is used.
- ``initial_where`` -- a dictionary or a function mapping
labels of initial states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'left'``) is used.
- ``accepting_style`` -- one of ``'accepting by double'`` and
``'accepting by arrow'``. If not given, ``'accepting by
double'`` is used unless there are non-empty final output
words.
- ``accepting_distance`` -- a string giving a LaTeX length
used for the length of the arrow leading from a final state.
If not given, TikZ' default (currently ``'3ex'``) is used
unless there are non-empty final output words, in which case
``'7ex'`` is used.
- ``accepting_where`` -- a dictionary or a function mapping
labels of final states to one of ``'above'``, ``'left'``,
``'below'``, ``'right'``. If not given, TikZ' default
(currently ``'right'``) is used. If the final state has a
final output word, it is also possible to give an angle
in degrees.
- ``accepting_show_empty`` -- if ``True`` the arrow of an
empty final output word is labeled as well. Note that this
implicitly implies ``accepting_style='accepting by
arrow'``. If not given, the default ``False`` is used.
OUTPUT:
Nothing.
As TikZ (cf. the :wikipedia:`PGF/TikZ`) is used to typeset
the graphics, the syntax is oriented on TikZ' syntax.
This is a convenience function collecting all options for
LaTeX output. All of its functionality can also be achieved by
directly setting the attributes
- ``coordinates``, ``format_label``, ``loop_where``,
``initial_where``, and ``accepting_where`` of
:class:`FSMState` (here, ``format_label`` is a callable
without arguments, everything else is a specific value);
- ``format_label`` of :class:`FSMTransition` (``format_label``
is a callable without arguments);
- ``format_state_label``, ``format_letter``,
``format_transition_label``, ``accepting_style``,
``accepting_distance``, and ``accepting_show_empty``
of :class:`FiniteStateMachine`.
This function, however, also (somewhat) checks its input and
serves to collect documentation on all these options.
The function can be called several times, only those arguments
which are not ``None`` are taken into account. By the same
means, it can be combined with directly setting some
attributes as outlined above.
EXAMPLES:
See also the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
::
sage: T = Transducer(initial_states=[4],
....: final_states=[0, 3])
sage: for j in srange(4):
....: T.add_transition(4, j, 0, [0, j])
....: T.add_transition(j, 4, 0, [0, -j])
....: T.add_transition(j, j, 0, 0)
Transition from 4 to 0: 0|0,0
Transition from 0 to 4: 0|0,0
Transition from 0 to 0: 0|0
Transition from 4 to 1: 0|0,1
Transition from 1 to 4: 0|0,-1
Transition from 1 to 1: 0|0
Transition from 4 to 2: 0|0,2
Transition from 2 to 4: 0|0,-2
Transition from 2 to 2: 0|0
Transition from 4 to 3: 0|0,3
Transition from 3 to 4: 0|0,-3
Transition from 3 to 3: 0|0
sage: T.add_transition(4, 4, 0, 0)
Transition from 4 to 4: 0|0
sage: T.state(3).final_word_out = [0, 0]
sage: T.latex_options(
....: coordinates={4: (0, 0),
....: 0: (-6, 3),
....: 1: (-2, 3),
....: 2: (2, 3),
....: 3: (6, 3)},
....: format_state_label=lambda x: r'\mathbf{%s}' % x,
....: format_letter=lambda x: r'w_{%s}' % x,
....: format_transition_label=lambda x:
....: r"{\scriptstyle %s}" % T.default_format_transition_label(x),
....: loop_where={4: 'below', 0: 'left', 1: 'above',
....: 2: 'right', 3:'below'},
....: initial_where=lambda x: 'above',
....: accepting_style='accepting by double',
....: accepting_distance='10ex',
....: accepting_where={0: 'left', 3: 45}
....: )
sage: T.state(4).format_label=lambda: r'\mathcal{I}'
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state, accepting, accepting where=left] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\node[state, accepting, accepting where=45] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid {\scriptstyle w_{0} w_{0}}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-1}}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-2}}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{-3}}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{0}}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{1}}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{2}}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0} w_{3}}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {${\scriptstyle w_{0}}\mid {\scriptstyle w_{0}}$} ();
\end{tikzpicture}
sage: view(T) # not tested
To actually see this, use the live documentation in the Sage notebook
and execute the cells.
By changing some of the options, we get the following output::
sage: T.latex_options(
....: format_transition_label=T.default_format_transition_label,
....: accepting_style='accepting by arrow',
....: accepting_show_empty=True
....: )
sage: latex(T)
\begin{tikzpicture}[auto, initial text=, >=latex, accepting text=, accepting/.style=accepting by arrow, accepting distance=10ex]
\node[state, initial, initial where=above] (v0) at (0.000000, 0.000000) {$\mathcal{I}$};
\node[state] (v1) at (-6.000000, 3.000000) {$\mathbf{0}$};
\path[->] (v1.180.00) edge node[rotate=360.00, anchor=south] {$\$ \mid \varepsilon$} ++(180.00:10ex);
\node[state] (v2) at (6.000000, 3.000000) {$\mathbf{3}$};
\path[->] (v2.45.00) edge node[rotate=45.00, anchor=south] {$\$ \mid w_{0} w_{0}$} ++(45.00:10ex);
\node[state] (v3) at (-2.000000, 3.000000) {$\mathbf{1}$};
\node[state] (v4) at (2.000000, 3.000000) {$\mathbf{2}$};
\path[->] (v1) edge[loop left] node[rotate=90, anchor=south] {$w_{0}\mid w_{0}$} ();
\path[->] (v1.-21.57) edge node[rotate=-26.57, anchor=south] {$w_{0}\mid w_{0} w_{0}$} (v0.148.43);
\path[->] (v3) edge[loop above] node {$w_{0}\mid w_{0}$} ();
\path[->] (v3.-51.31) edge node[rotate=-56.31, anchor=south] {$w_{0}\mid w_{0} w_{-1}$} (v0.118.69);
\path[->] (v4) edge[loop right] node[rotate=90, anchor=north] {$w_{0}\mid w_{0}$} ();
\path[->] (v4.-118.69) edge node[rotate=56.31, anchor=north] {$w_{0}\mid w_{0} w_{-2}$} (v0.51.31);
\path[->] (v2) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\path[->] (v2.-148.43) edge node[rotate=26.57, anchor=north] {$w_{0}\mid w_{0} w_{-3}$} (v0.21.57);
\path[->] (v0.158.43) edge node[rotate=333.43, anchor=north] {$w_{0}\mid w_{0} w_{0}$} (v1.328.43);
\path[->] (v0.128.69) edge node[rotate=303.69, anchor=north] {$w_{0}\mid w_{0} w_{1}$} (v3.298.69);
\path[->] (v0.61.31) edge node[rotate=56.31, anchor=south] {$w_{0}\mid w_{0} w_{2}$} (v4.231.31);
\path[->] (v0.31.57) edge node[rotate=26.57, anchor=south] {$w_{0}\mid w_{0} w_{3}$} (v2.201.57);
\path[->] (v0) edge[loop below] node {$w_{0}\mid w_{0}$} ();
\end{tikzpicture}
sage: view(T) # not tested
TESTS::
sage: T.latex_options(format_state_label='Nothing')
Traceback (most recent call last):
...
TypeError: format_state_label must be callable.
sage: T.latex_options(format_letter='')
Traceback (most recent call last):
...
TypeError: format_letter must be callable.
sage: T.latex_options(format_transition_label='')
Traceback (most recent call last):
...
TypeError: format_transition_label must be callable.
sage: T.latex_options(loop_where=37)
Traceback (most recent call last):
...
TypeError: loop_where must be a callable or a
dictionary.
sage: T.latex_options(loop_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: loop_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(initial_where=90)
Traceback (most recent call last):
...
TypeError: initial_where must be a callable or a
dictionary.
sage: T.latex_options(initial_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: initial_where for 4 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_style='fancy')
Traceback (most recent call last):
...
ValueError: accepting_style must be in ['accepting by
double', 'accepting by arrow'].
sage: T.latex_options(accepting_where=90)
Traceback (most recent call last):
...
TypeError: accepting_where must be a callable or a
dictionary.
sage: T.latex_options(accepting_where=lambda x: 'top')
Traceback (most recent call last):
...
ValueError: accepting_where for 0 must be in ['below',
'right', 'above', 'left'].
sage: T.latex_options(accepting_where={0: 'above', 3: 'top'})
Traceback (most recent call last):
...
ValueError: accepting_where for 3 must be a real number or
be in ['below', 'right', 'above', 'left'].
"""
if coordinates is not None:
self.set_coordinates(coordinates)
if format_state_label is not None:
if not hasattr(format_state_label, '__call__'):
raise TypeError('format_state_label must be callable.')
self.format_state_label = format_state_label
if format_letter is not None:
if not hasattr(format_letter, '__call__'):
raise TypeError('format_letter must be callable.')
self.format_letter = format_letter
if format_transition_label is not None:
if not hasattr(format_transition_label, '__call__'):
raise TypeError('format_transition_label must be callable.')
self.format_transition_label = format_transition_label
if loop_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.states():
if hasattr(loop_where, '__call__'):
where = loop_where(state.label())
else:
try:
where = loop_where[state.label()]
except TypeError:
raise TypeError("loop_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.loop_where = where
else:
raise ValueError('loop_where for %s must be in %s.' %
(state.label(), permissible))
if initial_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_initial_states():
if hasattr(initial_where, '__call__'):
where = initial_where(state.label())
else:
try:
where = initial_where[state.label()]
except TypeError:
raise TypeError("initial_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.initial_where = where
else:
raise ValueError('initial_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_style is not None:
permissible = ['accepting by double',
'accepting by arrow']
if accepting_style in permissible:
self.accepting_style = accepting_style
else:
raise ValueError('accepting_style must be in %s.' %
permissible)
if accepting_distance is not None:
self.accepting_distance = accepting_distance
if accepting_where is not None:
permissible = list(tikz_automata_where.iterkeys())
for state in self.iter_final_states():
if hasattr(accepting_where, '__call__'):
where = accepting_where(state.label())
else:
try:
where = accepting_where[state.label()]
except TypeError:
raise TypeError("accepting_where must be a "
"callable or a dictionary.")
except KeyError:
continue
if where in permissible:
state.accepting_where = where
elif hasattr(state, 'final_word_out') \
and state.final_word_out:
if where in RR:
state.accepting_where = where
else:
raise ValueError('accepting_where for %s must '
'be a real number or be in %s.' %
(state.label(), permissible))
else:
raise ValueError('accepting_where for %s must be in %s.' %
(state.label(), permissible))
if accepting_show_empty is not None:
self.accepting_show_empty = accepting_show_empty
def _latex_(self):
r"""
Returns a LaTeX code for the graph of the finite state machine.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 1, 2)],
....: initial_states=['A'],
....: final_states=['B'])
sage: F.state('A').initial_where='below'
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state, initial, initial where=below] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state, accepting] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$ $} (v1);
\end{tikzpicture}
"""
def label_rotation(angle, both_directions):
"""
Given an angle of a transition, compute the TikZ string to
rotate the label.
"""
angle_label = angle
anchor_label = "south"
if angle > 90 or angle <= -90:
angle_label = angle + 180
if both_directions:
# if transitions in both directions, the transition to the
# left has its label below the transition, otherwise above
anchor_label = "north"
return "rotate=%.2f, anchor=%s" % (angle_label, anchor_label)
setup_latex_preamble()
options = ["auto", "initial text=", ">=latex"]
nonempty_final_word_out = False
for state in self.iter_final_states():
if state.final_word_out:
nonempty_final_word_out = True
break
if hasattr(self, "accepting_style"):
accepting_style = self.accepting_style
elif nonempty_final_word_out:
accepting_style = "accepting by arrow"
else:
accepting_style = "accepting by double"
if accepting_style == "accepting by arrow":
options.append("accepting text=")
options.append("accepting/.style=%s" % accepting_style)
if hasattr(self, "accepting_distance"):
accepting_distance = self.accepting_distance
elif nonempty_final_word_out:
accepting_distance = "7ex"
else:
accepting_distance = None
if accepting_style == "accepting by arrow" and accepting_distance:
options.append("accepting distance=%s"
% accepting_distance)
if hasattr(self, "accepting_show_empty"):
accepting_show_empty = self.accepting_show_empty
else:
accepting_show_empty = False
result = "\\begin{tikzpicture}[%s]\n" % ", ".join(options)
j = 0;
for vertex in self.iter_states():
if not hasattr(vertex, "coordinates"):
vertex.coordinates = (3*cos(2*pi*j/len(self.states())),
3*sin(2*pi*j/len(self.states())))
options = ""
if vertex.is_final:
if not (vertex.final_word_out
and accepting_style == "accepting by arrow") \
and not accepting_show_empty:
# otherwise, we draw a custom made accepting path
# with label below
options += ", accepting"
if hasattr(vertex, "accepting_where"):
options += ", accepting where=%s" % (
vertex.accepting_where,)
if vertex.is_initial:
options += ", initial"
if hasattr(vertex, "initial_where"):
options += ", initial where=%s" % vertex.initial_where
if hasattr(vertex, "format_label"):
label = vertex.format_label()
elif hasattr(self, "format_state_label"):
label = self.format_state_label(vertex)
else:
label = latex(vertex.label())
result += "\\node[state%s] (v%d) at (%f, %f) {$%s$};\n" % (
options, j, vertex.coordinates[0],
vertex.coordinates[1], label)
vertex._number_ = j
if vertex.is_final and (vertex.final_word_out or accepting_show_empty):
angle = 0
if hasattr(vertex, "accepting_where"):
angle = tikz_automata_where.get(vertex.accepting_where,
vertex.accepting_where)
result += "\\path[->] (v%d.%.2f) edge node[%s] {$%s \mid %s$} ++(%.2f:%s);\n" % (
j, angle,
label_rotation(angle, False),
EndOfWordLaTeX,
self.format_transition_label(vertex.final_word_out),
angle, accepting_distance)
j += 1
def key_function(s):
return (s.from_state, s.to_state)
# We use an OrderedDict instead of a dict in order to have a
# defined ordering of the transitions in the output. See
# http://trac.sagemath.org/ticket/16580#comment:3 . As the
# transitions have to be sorted anyway, the performance
# penalty should be bearable; nevertheless, this is only
# required for doctests.
adjacent = OrderedDict(
(pair, list(transitions))
for pair, transitions in
itertools.groupby(
sorted(self.iter_transitions(),
key=key_function),
key=key_function
))
for ((source, target), transitions) in adjacent.iteritems():
if len(transitions) > 0:
labels = []
for transition in transitions:
if hasattr(transition, "format_label"):
labels.append(transition.format_label())
else:
labels.append(self._latex_transition_label_(
transition, self.format_transition_label))
label = ", ".join(labels)
if source != target:
angle = atan2(
target.coordinates[1] - source.coordinates[1],
target.coordinates[0] - source.coordinates[0]) * 180/pi
both_directions = (target, source) in adjacent
if both_directions:
angle_source = ".%.2f" % ((angle + 5).n(),)
angle_target = ".%.2f" % ((angle + 175).n(),)
else:
angle_source = ""
angle_target = ""
result += "\\path[->] (v%d%s) edge node[%s] {$%s$} (v%d%s);\n" % (
source._number_, angle_source,
label_rotation(angle, both_directions),
label,
target._number_, angle_target)
else:
loop_where = "above"
if hasattr(source, "loop_where"):
loop_where = source.loop_where
rotation = {'left': '[rotate=90, anchor=south]',
'right': '[rotate=90, anchor=north]'}
result += "\\path[->] (v%d) edge[loop %s] node%s {$%s$} ();\n" % (
source._number_,
loop_where, rotation.get(loop_where, ''),
label)
result += "\\end{tikzpicture}"
return result
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
TESTS::
sage: F = FiniteStateMachine([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
' '
"""
return ' '
def set_coordinates(self, coordinates, default=True):
"""
Set coordinates of the states for the LaTeX representation by
a dictionary or a function mapping labels to coordinates.
INPUT:
- ``coordinates`` -- a dictionary or a function mapping labels
of states to pairs interpreted as coordinates.
- ``default`` -- If ``True``, then states not given by
``coordinates`` get a default position on a circle of
radius 3.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates({0: (0, 0), 1: (2, 0), 2: (1, 1)})
sage: F.state(0).coordinates
(0, 0)
We can also use a function to determine the coordinates::
sage: F = Automaton([[0, 1, 1], [1, 2, 2], [2, 0, 0]])
sage: F.set_coordinates(lambda l: (l, 3/(l+1)))
sage: F.state(2).coordinates
(2, 1)
"""
states_without_coordinates = []
for state in self.iter_states():
try:
state.coordinates = coordinates[state.label()]
continue
except (KeyError, TypeError):
pass
try:
state.coordinates = coordinates(state.label())
continue
except TypeError:
pass
states_without_coordinates.append(state)
if default:
n = len(states_without_coordinates)
for j, state in enumerate(states_without_coordinates):
state.coordinates = (3*cos(2*pi*j/n),
3*sin(2*pi*j/n))
#*************************************************************************
# other
#*************************************************************************
def _matrix_(self, R=None):
"""
Returns the adjacency matrix of the finite state machine.
See :meth:`.adjacency_matrix` for more information.
EXAMPLES::
sage: B = FiniteStateMachine({0: {0: (0, 0), 'a': (1, 0)},
....: 'a': {2: (0, 0), 3: (1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B._matrix_()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
"""
return self.adjacency_matrix()
def adjacency_matrix(self, input=None,
entry=None):
"""
Returns the adjacency matrix of the underlying graph.
INPUT:
- ``input`` -- Only transitions with input label ``input`` are
respected.
- ``entry`` -- The function ``entry`` takes a transition and the
return value is written in the matrix as the entry
``(transition.from_state, transition.to_state)``. The default
value (``None``) of entry takes the variable ``x`` to the
power of the sum of the output word of the transition.
OUTPUT:
A matrix.
If any label of a state is not an integer, the finite state
machine is relabeled at the beginning. If there are more than
one transitions between two states, then the different return
values of ``entry`` are added up.
EXAMPLES::
sage: B = FiniteStateMachine({0:{0:(0, 0), 'a':(1, 0)},
....: 'a':{2:(0, 0), 3:(1, 0)},
....: 2:{0:(1, 1), 4:(0, 0)},
....: 3:{'a':(0, 1), 2:(1, 1)},
....: 4:{4:(1, 1), 3:(0, 1)}},
....: initial_states=[0])
sage: B.adjacency_matrix()
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
This is equivalent to::
sage: matrix(B)
[1 1 0 0 0]
[0 0 1 1 0]
[x 0 0 0 1]
[0 x x 0 0]
[0 0 0 x x]
It is also possible to use other entries in the adjacency matrix::
sage: B.adjacency_matrix(entry=(lambda transition: 1))
[1 1 0 0 0]
[0 0 1 1 0]
[1 0 0 0 1]
[0 1 1 0 0]
[0 0 0 1 1]
sage: B.adjacency_matrix(1, entry=(lambda transition:
....: exp(I*transition.word_out[0]*var('t'))))
[ 0 1 0 0 0]
[ 0 0 0 1 0]
[e^(I*t) 0 0 0 0]
[ 0 0 e^(I*t) 0 0]
[ 0 0 0 0 e^(I*t)]
sage: a = Automaton([(0, 1, 0),
....: (1, 2, 0),
....: (2, 0, 1),
....: (2, 1, 0)],
....: initial_states=[0],
....: final_states=[0])
sage: a.adjacency_matrix()
[0 1 0]
[0 0 1]
[1 1 0]
"""
def default_function(transitions):
var('x')
return x**sum(transition.word_out)
if entry is None:
entry = default_function
relabeledFSM = self
l = len(relabeledFSM.states())
for state in self.iter_states():
if state.label() not in ZZ or state.label() >= l \
or state.label() < 0:
relabeledFSM = self.relabeled()
break
dictionary = {}
for transition in relabeledFSM.iter_transitions():
if input is None or transition.word_in == [input]:
if (transition.from_state.label(),
transition.to_state.label()) in dictionary:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
+= entry(transition)
else:
dictionary[(transition.from_state.label(),
transition.to_state.label())] \
= entry(transition)
return matrix(len(relabeledFSM.states()), dictionary)
def determine_alphabets(self, reset=True):
"""
Determines the input and output alphabet according to the
transitions in self.
INPUT:
- ``reset`` -- If reset is ``True``, then the existing input
and output alphabets are erased, otherwise new letters are
appended to the existing alphabets.
OUTPUT:
Nothing.
After this operation the input alphabet and the output
alphabet of self are a list of letters.
.. TODO::
At the moment, the letters of the alphabets need to be hashable.
EXAMPLES::
sage: T = Transducer([(1, 1, 1, 0), (1, 2, 2, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: final_states=[1],
....: determine_alphabets=False)
sage: T.state(1).final_word_out = [1, 4]
sage: (T.input_alphabet, T.output_alphabet)
(None, None)
sage: T.determine_alphabets()
sage: (T.input_alphabet, T.output_alphabet)
([0, 1, 2], [0, 1, 4])
"""
if reset:
ain = set()
aout = set()
else:
ain = set(self.input_alphabet)
aout = set(self.output_alphabet)
for t in self.iter_transitions():
for letter in t.word_in:
ain.add(letter)
for letter in t.word_out:
aout.add(letter)
for s in self.iter_final_states():
for letter in s.final_word_out:
aout.add(letter)
self.input_alphabet = list(ain)
self.output_alphabet = list(aout)
#*************************************************************************
# get states and transitions
#*************************************************************************
def states(self):
"""
Returns the states of the finite state machine.
INPUT:
Nothing.
OUTPUT:
The states of the finite state machine as list.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.states()
['1', '2']
"""
return copy(self._states_)
def iter_states(self):
"""
Returns an iterator of the states.
INPUT:
Nothing.
OUTPUT:
An iterator of the states of the finite state machine.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [s.label() for s in FSM.iter_states()]
['1', '2']
"""
return iter(self._states_)
def transitions(self, from_state=None):
"""
Returns a list of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
A list of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: FSM.transitions()
[Transition from '1' to '2': 1|-,
Transition from '2' to '2': 0|-]
"""
return list(self.iter_transitions(from_state))
def iter_transitions(self, from_state=None):
"""
Returns an iterator of all transitions.
INPUT:
- ``from_state`` -- (default: ``None``) If ``from_state`` is
given, then a list of transitions starting there is given.
OUTPUT:
An iterator of all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('1')]
[('1', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions('2')]
[('2', '2')]
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM.iter_transitions()]
[('1', '2'), ('2', '2')]
"""
if from_state is None:
return self._iter_transitions_all_()
else:
return iter(self.state(from_state).transitions)
def _iter_transitions_all_(self):
"""
Returns an iterator over all transitions.
INPUT:
Nothing.
OUTPUT:
An iterator over all transitions.
EXAMPLES::
sage: FSM = Automaton([('1', '2', 1), ('2', '2', 0)])
sage: [(t.from_state.label(), t.to_state.label())
....: for t in FSM._iter_transitions_all_()]
[('1', '2'), ('2', '2')]
"""
for state in self.iter_states():
for t in state.transitions:
yield t
def initial_states(self):
"""
Returns a list of all initial states.
INPUT:
Nothing.
OUTPUT:
A list of all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: F.initial_states()
['A']
"""
return list(self.iter_initial_states())
def iter_initial_states(self):
"""
Returns an iterator of the initial states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial=True)
sage: B = FSMState('B')
sage: F = FiniteStateMachine([(A, B, 1, 0)])
sage: [s.label() for s in F.iter_initial_states()]
['A']
"""
return itertools.ifilter(lambda s:s.is_initial, self.iter_states())
def final_states(self):
"""
Returns a list of all final states.
INPUT:
Nothing.
OUTPUT:
A list of all final states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: F.final_states()
['A', 'C']
"""
return list(self.iter_final_states())
def iter_final_states(self):
"""
Returns an iterator of the final states.
INPUT:
Nothing.
OUTPUT:
An iterator over all initial states.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_final=True)
sage: B = FSMState('B', is_initial=True)
sage: C = FSMState('C', is_final=True)
sage: F = FiniteStateMachine([(A, B), (A, C)])
sage: [s.label() for s in F.iter_final_states()]
['A', 'C']
"""
return itertools.ifilter(lambda s:s.is_final, self.iter_states())
def state(self, state):
"""
Returns the state of the finite state machine.
INPUT:
- ``state`` -- If ``state`` is not an instance of
:class:`FSMState`, then it is assumed that it is the label
of a state.
OUTPUT:
Returns the state of the finite state machine corresponding to
``state``.
If no state is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: FSM = FiniteStateMachine([(A, 'B'), ('C', A)])
sage: FSM.state('A') == A
True
sage: FSM.state('xyz')
Traceback (most recent call last):
...
LookupError: No state with label xyz found.
"""
def what(s, switch):
if switch:
return s.label()
else:
return s
switch = is_FSMState(state)
try:
return self._states_dict_[what(state, switch)]
except AttributeError:
for s in self.iter_states():
if what(s, not switch) == state:
return s
except KeyError:
pass
raise LookupError("No state with label %s found." % (what(state, switch),))
def transition(self, transition):
"""
Returns the transition of the finite state machine.
INPUT:
- ``transition`` -- If ``transition`` is not an instance of
:class:`FSMTransition`, then it is assumed that it is a
tuple ``(from_state, to_state, word_in, word_out)``.
OUTPUT:
Returns the transition of the finite state machine
corresponding to ``transition``.
If no transition is found, then a ``LookupError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'B', 0)
sage: F = FiniteStateMachine([t])
sage: F.transition(('A', 'B', 0))
Transition from 'A' to 'B': 0|-
sage: id(t) == id(F.transition(('A', 'B', 0)))
True
"""
if not is_FSMTransition(transition):
transition = FSMTransition(*transition)
for s in self.iter_transitions(transition.from_state):
if s == transition:
return s
raise LookupError("No transition found.")
#*************************************************************************
# properties (state and transitions)
#*************************************************************************
def has_state(self, state):
"""
Returns whether ``state`` is one of the states of the finite
state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label of a state.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_state('A')
False
"""
try:
self.state(state)
return True
except LookupError:
return False
def has_transition(self, transition):
"""
Returns whether ``transition`` is one of the transitions of
the finite state machine.
INPUT:
- ``transition`` has to be a :class:`FSMTransition`.
OUTPUT:
True or False.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t = FSMTransition('A', 'A', 0, 1)
sage: FiniteStateMachine().has_transition(t)
False
sage: FiniteStateMachine().has_transition(('A', 'A', 0, 1))
Traceback (most recent call last):
...
TypeError: Transition is not an instance of FSMTransition.
"""
if is_FSMTransition(transition):
return transition in self.iter_transitions()
raise TypeError("Transition is not an instance of FSMTransition.")
def has_initial_state(self, state):
"""
Returns whether ``state`` is one of the initial states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'A')], initial_states=['A'])
sage: F.has_initial_state('A')
True
"""
try:
return self.state(state).is_initial
except LookupError:
return False
def has_initial_states(self):
"""
Returns whether the finite state machine has an initial state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_initial_states()
False
"""
return len(self.initial_states()) > 0
def has_final_state(self, state):
"""
Returns whether ``state`` is one of the final states of the
finite state machine.
INPUT:
- ``state`` can be a :class:`FSMState` or a label.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine(final_states=['A']).has_final_state('A')
True
"""
try:
return self.state(state).is_final
except LookupError:
return False
def has_final_states(self):
"""
Returns whether the finite state machine has a final state.
INPUT:
Nothing.
OUTPUT:
True or False.
EXAMPLES::
sage: FiniteStateMachine().has_final_states()
False
"""
return len(self.final_states()) > 0
#*************************************************************************
# properties
#*************************************************************************
def is_deterministic(self):
"""
Returns whether the finite finite state machine is deterministic.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be deterministic if
each transition has input label of length one and for each
pair `(q,a)` where `q` is a state and `a` is an element of the
input alphabet, there is at most one transition from `q` with
input label `a`.
TESTS::
sage: fsm = FiniteStateMachine()
sage: fsm.add_transition(('A', 'B', 0, []))
Transition from 'A' to 'B': 0|-
sage: fsm.is_deterministic()
True
sage: fsm.add_transition(('A', 'C', 0, []))
Transition from 'A' to 'C': 0|-
sage: fsm.is_deterministic()
False
sage: fsm.add_transition(('A', 'B', [0,1], []))
Transition from 'A' to 'B': 0,1|-
sage: fsm.is_deterministic()
False
"""
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key,transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
return True
def is_complete(self):
"""
Returns whether the finite state machine is complete.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
A finite state machine is considered to be complete if
each transition has an input label of length one and for each
pair `(q, a)` where `q` is a state and `a` is an element of the
input alphabet, there is exactly one transition from `q` with
input label `a`.
EXAMPLES::
sage: fsm = FiniteStateMachine([(0, 0, 0, 0),
....: (0, 1, 1, 1),
....: (1, 1, 0, 0)],
....: determine_alphabets=False)
sage: fsm.is_complete()
Traceback (most recent call last):
...
ValueError: No input alphabet is given. Try calling determine_alphabets().
sage: fsm.input_alphabet = [0, 1]
sage: fsm.is_complete()
False
sage: fsm.add_transition((1, 1, 1, 1))
Transition from 1 to 1: 1|1
sage: fsm.is_complete()
True
sage: fsm.add_transition((0, 0, 1, 0))
Transition from 0 to 0: 1|0
sage: fsm.is_complete()
False
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
for state in self.iter_states():
for transition in state.transitions:
if len(transition.word_in) != 1:
return False
transition_classes_by_word_in = full_group_by(
state.transitions,
key=lambda t: t.word_in)
for key, transition_class in transition_classes_by_word_in:
if len(transition_class) > 1:
return False
# all input labels are lists, extract the only element
outgoing_alphabet = [key[0] for key, transition_class in
transition_classes_by_word_in]
if not sorted(self.input_alphabet) == sorted(outgoing_alphabet):
return False
return True
def is_connected(self):
"""
TESTS::
sage: FiniteStateMachine().is_connected()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
#*************************************************************************
# let the finite state machine work
#*************************************************************************
def process(self, *args, **kwargs):
"""
Returns whether the finite state machine accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
OUTPUT:
A triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing (in the case the finite state machine runs as
transducer).
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = FiniteStateMachine({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
Alternatively, we can invoke this function by::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
::
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = FiniteStateMachine(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w)[0] for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
Non-deterministic finite state machines cannot be handeled.
::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)],
....: initial_states=[0])
sage: T.process([0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T = Transducer([(0, 1, [0, 0], 0), (0, 2, [0, 0, 1], 0),
....: (0, 1, 1, 2), (1, 0, [], 1), (1, 1, 1, 3)],
....: initial_states=[0], final_states=[0, 1])
sage: T.process([0])
(False, None, None)
sage: T.process([0, 0])
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered when processing input.
sage: T.process([1])
(True, 1, [2])
sage: T.process([1, 1])
Traceback (most recent call last):
...
NotImplementedError: process cannot handle epsilon transition leaving state 1.
"""
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
return (it.accept_input, it.current_state, it.output_tape)
def iter_process(self, input_tape=None, initial_state=None, **kwargs):
"""
See :meth:`.process` for more informations.
EXAMPLES::
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = inverter.iter_process(input_tape=[0, 1, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0, 0]
"""
return FSMProcessIterator(self, input_tape, initial_state, **kwargs)
#*************************************************************************
# change finite state machine (add/remove state/transitions)
#*************************************************************************
def add_state(self, state):
"""
Adds a state to the finite state machine and returns the new
state. If the state already exists, that existing state is
returned.
INPUT:
- ``state`` is either an instance of
:class:`FSMState` or,
otherwise, a label of a state.
OUTPUT:
The new or existing state.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: F = FiniteStateMachine()
sage: A = FSMState('A', is_initial=True)
sage: F.add_state(A)
'A'
"""
try:
return self.state(state)
except LookupError:
pass
# at this point we know that we have a new state
if is_FSMState(state):
s = state
else:
s = FSMState(state)
s.transitions = list()
self._states_.append(s)
try:
self._states_dict_[s.label()] = s
except AttributeError:
pass
return s
def add_states(self, states):
"""
Adds several states. See add_state for more information.
INPUT:
- ``states`` -- a list of states or iterator over states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B'])
sage: F.states()
['A', 'B']
"""
for state in states:
self.add_state(state)
def add_transition(self, *args, **kwargs):
"""
Adds a transition to the finite state machine and returns the
new transition.
If the transition already exists, the return value of
``self.on_duplicate_transition`` is returned. See the
documentation of :class:`FiniteStateMachine`.
INPUT:
The following forms are all accepted:
::
sage: from sage.combinat.finite_state_machine import FSMState, FSMTransition
sage: A = FSMState('A')
sage: B = FSMState('B')
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(FSMTransition(A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, 0, 1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(A, B, word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition('A', 'B', {'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': {'word_in': 0, 'word_out': 1}|-
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition(from_state=A, to_state=B,
....: word_in=0, word_out=1)
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition({'from_state': A, 'to_state': B,
....: 'word_in': 0, 'word_out': 1})
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition((A, B, 0, 1))
Transition from 'A' to 'B': 0|1
sage: FSM = FiniteStateMachine()
sage: FSM.add_transition([A, B, 0, 1])
Transition from 'A' to 'B': 0|1
If the states ``A`` and ``B`` are not instances of
:class:`FSMState`, then it is assumed that they are labels of
states.
OUTPUT:
The new transition.
"""
if len(args) + len(kwargs) == 0:
return
if len(args) + len(kwargs) == 1:
if len(args) == 1:
d = args[0]
if is_FSMTransition(d):
return self._add_fsm_transition_(d)
else:
d = next(kwargs.itervalues())
if hasattr(d, 'iteritems'):
args = []
kwargs = d
elif hasattr(d, '__iter__'):
args = d
kwargs = {}
else:
raise TypeError("Cannot decide what to do with input.")
data = dict(zip(
('from_state', 'to_state', 'word_in', 'word_out', 'hook'),
args))
data.update(kwargs)
data['from_state'] = self.add_state(data['from_state'])
data['to_state'] = self.add_state(data['to_state'])
return self._add_fsm_transition_(FSMTransition(**data))
def _add_fsm_transition_(self, t):
"""
Adds a transition.
INPUT:
- ``t`` -- an instance of :class:`FSMTransition`.
OUTPUT:
The new transition.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: F = FiniteStateMachine()
sage: F._add_fsm_transition_(FSMTransition('A', 'B'))
Transition from 'A' to 'B': -|-
"""
try:
existing_transition = self.transition(t)
except LookupError:
pass
else:
return self.on_duplicate_transition(existing_transition, t)
from_state = self.add_state(t.from_state)
self.add_state(t.to_state)
from_state.transitions.append(t)
return t
def add_from_transition_function(self, function, initial_states=None,
explore_existing_states=True):
"""
Constructs a finite state machine from a transition function.
INPUT:
- ``function`` may return a tuple (new_state, output_word) or a
list of such tuples.
- ``initial_states`` -- If no initial states are given, the
already existing initial states of self are taken.
- If ``explore_existing_states`` is True (default), then
already existing states in self (e.g. already given final
states) will also be processed if they are reachable from
the initial states.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine(initial_states=['A'],
....: input_alphabet=[0, 1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
sage: F.transitions()
[Transition from 'A' to 'A': 0|0,
Transition from 'A' to 'B': 0|1,
Transition from 'A' to 'A': 1|1,
Transition from 'A' to 'B': 1|0,
Transition from 'B' to 'A': 0|0,
Transition from 'B' to 'B': 0|1,
Transition from 'B' to 'A': 1|1,
Transition from 'B' to 'B': 1|0]
Initial states can also be given as a parameter::
sage: F = FiniteStateMachine(input_alphabet=[0,1])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f,initial_states=['A'])
sage: F.initial_states()
['A']
Already existing states in the finite state machine (the final
states in the example below) are also explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function)
sage: F.transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
If ``explore_existing_states=False``, however, this behavior
is turned off, i.e., already existing states are not
explored::
sage: F = FiniteStateMachine(initial_states=[0],
....: final_states=[1],
....: input_alphabet=[0])
sage: def transition_function(state, letter):
....: return(1-state, [])
sage: F.add_from_transition_function(transition_function,
....: explore_existing_states=False)
sage: F.transitions()
[Transition from 0 to 1: 0|-]
TEST::
sage: F = FiniteStateMachine(initial_states=['A'])
sage: def f(state, input):
....: return [('A', input), ('B', 1-input)]
sage: F.add_from_transition_function(f)
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
::
sage: def transition(state, where):
....: return (vector([0, 0]), 1)
sage: Transducer(transition, input_alphabet=[0], initial_states=[0])
Traceback (most recent call last):
...
TypeError: mutable vectors are unhashable
"""
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if initial_states is None:
not_done = self.initial_states()
elif hasattr(initial_states, '__iter__'):
not_done = []
for s in initial_states:
state = self.add_state(s)
state.is_initial = True
not_done.append(state)
else:
raise TypeError('Initial states must be iterable ' \
'(e.g. a list of states).')
if len(not_done) == 0:
raise ValueError("No state is initial.")
if explore_existing_states:
ignore_done = self.states()
for s in not_done:
try:
ignore_done.remove(s)
except ValueError:
pass
else:
ignore_done = []
while len(not_done) > 0:
s = not_done.pop(0)
for letter in self.input_alphabet:
try:
return_value = function(s.label(), letter)
except LookupError:
continue
if not hasattr(return_value, "pop"):
return_value = [return_value]
try:
for (st_label, word) in return_value:
pass
except TypeError:
raise ValueError("The callback function for "
"add_from_transition is expected "
"to return a pair (new_state, "
"output_label) or a list of such pairs. "
"For the state %s and the input "
"letter %s, it however returned %s, "
"which is not acceptable."
% (s.label(), letter, return_value))
for (st_label, word) in return_value:
if not self.has_state(st_label):
not_done.append(self.add_state(st_label))
elif len(ignore_done) > 0:
u = self.state(st_label)
if u in ignore_done:
not_done.append(u)
ignore_done.remove(u)
self.add_transition(s, st_label,
word_in=letter, word_out=word)
def add_transitions_from_function(self, function, labels_as_input=True):
"""
Adds one or more transitions if ``function(state, state)``
says that there are some.
INPUT:
- ``function`` -- a transition function. Given two states
``from_state`` and ``to_state`` (or their labels if
``label_as_input`` is true), this function shall return a
tuple ``(word_in, word_out)`` to add a transition from
``from_state`` to ``to_state`` with input and output labels
``word_in`` and ``word_out``, respectively. If no such
addition is to be added, the transition function shall
return ``None``. The transition function may also return
a list of such tuples in order to add multiple transitions
between the pair of states.
- ``label_as_input`` -- (default: ``True``)
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine()
sage: F.add_states(['A', 'B', 'C'])
sage: def f(state1, state2):
....: if state1 == 'C':
....: return None
....: return (0, 1)
sage: F.add_transitions_from_function(f)
sage: len(F.transitions())
6
Multiple transitions are also possible::
sage: F = FiniteStateMachine()
sage: F.add_states([0, 1])
sage: def f(state1, state2):
....: if state1 != state2:
....: return [(0, 1), (1, 0)]
....: else:
....: return None
sage: F.add_transitions_from_function(f)
sage: F.transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|1,
Transition from 1 to 0: 1|0]
TESTS::
sage: F = FiniteStateMachine()
sage: F.add_state(0)
0
sage: def f(state1, state2):
....: return 1
sage: F.add_transitions_from_function(f)
Traceback (most recent call last):
...
ValueError: The callback function for add_transitions_from_function
is expected to return a pair (word_in, word_out) or a list of such
pairs. For states 0 and 0 however, it returned 1,
which is not acceptable.
"""
for s_from in self.iter_states():
for s_to in self.iter_states():
try:
if labels_as_input:
return_value = function(s_from.label(), s_to.label())
else:
return_value = function(s_from, s_to)
except LookupError:
continue
if return_value is None:
continue
if not hasattr(return_value, "pop"):
transitions = [return_value]
else:
transitions = return_value
for t in transitions:
if not hasattr(t, '__getitem__'):
raise ValueError("The callback function for "
"add_transitions_from_function "
"is expected to return a "
"pair (word_in, word_out) or a "
"list of such pairs. For "
"states %s and %s however, it "
"returned %s, which is not "
"acceptable." % (s_from, s_to, return_value))
label_in = t[0]
try:
label_out = t[1]
except LookupError:
label_out = None
self.add_transition(s_from, s_to, label_in, label_out)
def delete_transition(self, t):
"""
Deletes a transition by removing it from the list of transitions of
the state, where the transition starts.
INPUT:
- ``t`` -- a transition.
OUTPUT:
Nothing.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0), ('B', 'A', 1)])
sage: F.delete_transition(('A', 'B', 0))
sage: F.transitions()
[Transition from 'B' to 'A': 1|-]
"""
transition = self.transition(t)
transition.from_state.transitions.remove(transition)
def delete_state(self, s):
"""
Deletes a state and all transitions coming or going to this state.
INPUT:
- ``s`` -- a label of a state or an :class:`FSMState`.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMTransition
sage: t1 = FSMTransition('A', 'B', 0)
sage: t2 = FSMTransition('B', 'B', 1)
sage: F = FiniteStateMachine([t1, t2])
sage: F.delete_state('A')
sage: F.transitions()
[Transition from 'B' to 'B': 1|-]
TESTS::
sage: F._states_
['B']
sage: F._states_dict_ # This shows that #16024 is fixed.
{'B': 'B'}
"""
state = self.state(s)
for transition in self.transitions():
if transition.to_state == state:
self.delete_transition(transition)
self._states_.remove(state)
try:
del self._states_dict_[state.label()]
except AttributeError:
pass
def remove_epsilon_transitions(self):
"""
TESTS::
sage: FiniteStateMachine().remove_epsilon_transitions()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def accessible_components(self):
"""
Returns a new finite state machine with the accessible states
of self and all transitions between those states.
INPUT:
Nothing.
OUTPUT:
A finite state machine with the accessible states of self and
all transitions between those states.
A state is accessible if there is a directed path from an
initial state to the state. If self has no initial states then
a copy of the finite state machine self is returned.
EXAMPLES::
sage: F = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 2 states
::
sage: F = Automaton([(0, 0, 1), (0, 0, 1), (1, 1, 0), (1, 0, 1)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 1 states
TESTS:
Check whether input of length > 1 works::
sage: F = Automaton([(0, 1, [0, 1]), (0, 2, 0)],
....: initial_states=[0])
sage: F.accessible_components()
Automaton with 3 states
"""
if len(self.initial_states()) == 0:
return deepcopy(self)
memo = {}
def accessible(from_state, read):
return [(deepcopy(x.to_state, memo), x.word_out)
for x in self.iter_transitions(from_state)
if x.word_in[0] == read]
new_initial_states=map(lambda x: deepcopy(x, memo),
self.initial_states())
result = self.empty_copy()
result.add_from_transition_function(accessible,
initial_states=new_initial_states)
for final_state in self.iter_final_states():
try:
new_final_state=result.state(final_state.label)
new_final_state.is_final=True
except LookupError:
pass
return result
# *************************************************************************
# creating new finite state machines
# *************************************************************************
def disjoint_union(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().disjoint_union(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def concatenation(self, other):
"""
TESTS::
sage: F = FiniteStateMachine([('A', 'A')])
sage: FiniteStateMachine().concatenation(F)
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def Kleene_closure(self):
"""
TESTS::
sage: FiniteStateMachine().Kleene_closure()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def intersection(self, other):
"""
TESTS::
sage: FiniteStateMachine().intersection(FiniteStateMachine())
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def product_FiniteStateMachine(self, other, function,
new_input_alphabet=None,
only_accessible_components=True,
final_function=None,
new_class=None):
r"""
Returns a new finite state machine whose states are
`d`-tuples of states of the original finite state machines.
INPUT:
- ``other`` -- a finite state machine (for `d=2`) or a list
(or iterable) of `d-1` finite state machines.
- ``function`` has to accept `d` transitions from `A_j` to `B_j`
for `j\in\{1, \ldots, d\}` and returns a pair ``(word_in, word_out)``
which is the label of the transition `A=(A_1, \ldots, A_d)` to `B=(B_1,
\ldots, B_d)`. If there is no transition from `A` to `B`,
then ``function`` should raise a ``LookupError``.
- ``new_input_alphabet`` (optional) -- the new input alphabet
as a list.
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
- ``final_function`` -- A function mapping `d` final states of
the original finite state machines to the final output of
the corresponding state in the new finite state machine. By
default, the final output is the empty word if both final
outputs of the constituent states are empty; otherwise, a
``ValueError`` is raised.
- ``new_class`` -- Class of the new finite state machine. By
default (``None``), the class of ``self`` is used.
OUTPUT:
A finite state machine whose states are `d`-tuples of states of the
original finite state machines. A state is initial or
final if all constituent states are initial or final,
respectively.
The labels of the transitions are defined by ``function``.
The final output of a final state is determined by calling
``final_function`` on the constituent states.
The color of a new state is the tuple of colors of the
constituent states of ``self`` and ``other``.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1), ('A', 'A', 0), ('B', 'A', 2)],
....: initial_states=['A'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Automaton([(1, 1, 1)], initial_states=[1], final_states=[1])
sage: def addition(transition1, transition2):
....: return (transition1.word_in[0] + transition2.word_in[0],
....: None)
sage: H = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H.transitions()
[Transition from ('A', 1) to ('B', 1): 2|-,
Transition from ('A', 1) to ('A', 1): 1|-,
Transition from ('B', 1) to ('A', 1): 3|-]
sage: H1 = F.product_FiniteStateMachine(G, addition, [0, 1, 2, 3], only_accessible_components=False)
sage: H1.states()[0].label()[0] is F.states()[0]
True
sage: H1.states()[0].label()[1] is G.states()[0]
True
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(
....: G, lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None))
sage: H.states()
[(0, 0), (1, 0)]
::
sage: F = Automaton([(0,1,1/4), (0,0,3/4), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: G = Automaton([(0,0,1), (1,1,3/4), (1,0,1/4)],
....: initial_states=[0] )
sage: H = F.product_FiniteStateMachine(G,
....: lambda t1,t2: (t1.word_in[0]*t2.word_in[0], None),
....: only_accessible_components=False)
sage: H.states()
[(0, 0), (1, 0), (0, 1), (1, 1)]
Also final output words are considered according to the function
``final_function``::
sage: F = Transducer([(0, 1, 0, 1), (1, 1, 1, 1), (1, 1, 0, 1)],
....: final_states=[1])
sage: F.state(1).final_word_out = 1
sage: G = Transducer([(0, 0, 0, 1), (0, 0, 1, 0)], final_states=[0])
sage: G.state(0).final_word_out = 1
sage: def minus(t1, t2):
....: return (t1.word_in[0] - t2.word_in[0],
....: t1.word_out[0] - t2.word_out[0])
sage: H = F.product_FiniteStateMachine(G, minus)
Traceback (most recent call last):
...
ValueError: A final function must be given.
sage: def plus(s1, s2):
....: return s1.final_word_out[0] + s2.final_word_out[0]
sage: H = F.product_FiniteStateMachine(G, minus,
....: final_function=plus)
sage: H.final_states()
[(1, 0)]
sage: H.final_states()[0].final_word_out
[2]
Products of more than two finite state machines are also possible::
sage: def plus(s1, s2, s3):
....: if s1.word_in == s2.word_in == s3.word_in:
....: return (s1.word_in,
....: sum(s.word_out[0] for s in (s1, s2, s3)))
....: else:
....: raise LookupError
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.product_FiniteStateMachine([T1, T2], plus)
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|0,
Transition from ((), (), ()) to ((), (1,), ()): 1|0,
Transition from ((), (), ()) to ((), (), (2,)): 2|0,
Transition from ((0,), (), ()) to ((0,), (), ()): 0|1,
Transition from ((0,), (), ()) to ((), (1,), ()): 1|0,
Transition from ((0,), (), ()) to ((), (), (2,)): 2|0,
Transition from ((), (1,), ()) to ((0,), (), ()): 0|0,
Transition from ((), (1,), ()) to ((), (1,), ()): 1|1,
Transition from ((), (1,), ()) to ((), (), (2,)): 2|0,
Transition from ((), (), (2,)) to ((0,), (), ()): 0|0,
Transition from ((), (), (2,)) to ((), (1,), ()): 1|0,
Transition from ((), (), (2,)) to ((), (), (2,)): 2|1]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[0, 1, 0, 1, 0, 1, 0, 0, 0, 1]
``other`` can also be an iterable::
sage: T == T0.product_FiniteStateMachine(iter([T1, T2]), plus)
True
TESTS:
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: B = A.product_FiniteStateMachine(A,
....: lambda t1, t2: (0, None))
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
Check handling of the parameter ``other``::
sage: A.product_FiniteStateMachine(None, plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
sage: A.product_FiniteStateMachine([None], plus)
Traceback (most recent call last):
...
ValueError: other must be a finite state machine or a list
of finite state machines.
Test whether ``new_class`` works::
sage: T = Transducer()
sage: type(T.product_FiniteStateMachine(T, None))
<class 'sage.combinat.finite_state_machine.Transducer'>
sage: type(T.product_FiniteStateMachine(T, None,
....: new_class=Automaton))
<class 'sage.combinat.finite_state_machine.Automaton'>
"""
def default_final_function(*args):
if any(s.final_word_out for s in args):
raise ValueError("A final function must be given.")
return []
if final_function is None:
final_function = default_final_function
result = self.empty_copy(new_class=new_class)
if new_input_alphabet is not None:
result.input_alphabet = new_input_alphabet
else:
result.input_alphabet = None
if hasattr(other, '__iter__'):
machines = [self]
machines.extend(other)
if not all(is_FiniteStateMachine(m) for m in machines):
raise ValueError("other must be a finite state machine "
"or a list of finite state machines.")
elif is_FiniteStateMachine(other):
machines = [self, other]
else:
raise ValueError("other must be a finite state machine or "
"a list of finite state machines.")
for transitions in itertools.product(
*(m.iter_transitions() for m in machines)):
try:
word = function(*transitions)
except LookupError:
continue
result.add_transition(tuple(t.from_state for t in transitions),
tuple(t.to_state for t in transitions),
word[0], word[1])
for state in result.states():
if all(s.is_initial for s in state.label()):
state.is_initial = True
if all(s.is_final for s in state.label()):
state.is_final = True
state.final_word_out = final_function(*state.label())
state.color = tuple(s.color for s in state.label())
if only_accessible_components:
if result.input_alphabet is None:
result.determine_alphabets()
return result.accessible_components()
else:
return result
def composition(self, other, algorithm=None,
only_accessible_components=True):
"""
Returns a new transducer which is the composition of ``self``
and ``other``.
INPUT:
- ``other`` -- a transducer
- ``algorithm`` -- can be one of the following
- ``direct`` -- The composition is calculated directly.
There can be arbitrarily many initial and final states,
but the input and output labels must have length 1.
WARNING: The output of other is fed into self.
- ``explorative`` -- An explorative algorithm is used.
At least the following restrictions apply, but are not
checked:
- both self and other have exactly one initial state
- all input labels of transitions have length exactly 1
The input alphabet of self has to be specified.
This is a very limited implementation of composition.
WARNING: The output of ``other`` is fed into ``self``.
If algorithm is ``None``, then the algorithm is chosen
automatically (at the moment always ``direct``).
OUTPUT:
A new transducer.
The labels of the new finite state machine are pairs of states
of the original finite state machines. The color of a new
state is the tuple of colors of the constituent states.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G.composition(F, algorithm='explorative')
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Also final output words are considered if ``algorithm='direct'`` or
``None``::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'],
....: final_states=['A', 'B'])
sage: F.state('A').final_word_out = 0
sage: F.state('B').final_word_out = 1
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2])
sage: G.state(2).final_word_out = 0
sage: Hd = F.composition(G, algorithm='direct')
sage: Hd.final_states()
[(2, 'B')]
Note that ``(2, 'A')`` is not final, as the final output `0`
of state `2` of `G` cannot be processed in state ``'A'`` of
`F`.
::
sage: [s.final_word_out for s in Hd.final_states()]
[[1, 0]]
Be aware that after composition, different transitions may
share the same output label (same python object)::
sage: F = Transducer([ ('A','B',0,0), ('B','A',0,0)],
....: initial_states=['A'],
....: final_states=['A'])
sage: F.transitions()[0].word_out is F.transitions()[1].word_out
False
sage: G = Transducer([('C','C',0,1)],)
....: initial_states=['C'],
....: final_states=['C'])
sage: H = G.composition(F)
sage: H.transitions()[0].word_out is H.transitions()[1].word_out
True
In the explorative algorithm, transducers with non-empty final
output words are currently not implemented::
sage: A = transducers.GrayCode()
sage: B = transducers.abs([0, 1])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Similarly, the explorative algorithm cannot handle
non-deterministic finite state machines::
sage: A = Transducer([(0, 0, 0, 0), (0, 1, 0, 0)])
sage: B = transducers.Identity([0])
sage: A.composition(B, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
sage: B.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is currently
not implemented for non-deterministic transducers.
TESTS:
Due to the limitations of the two algorithms the following
(examples from above, but different algorithm used) does not
give a full answer or does not work.
In the following, ``algorithm='explorative'`` is inadequate,
as ``F`` has more than one initial state::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: He = F.composition(G, algorithm='explorative')
sage: He.initial_states()
[(1, 'A')]
sage: He.transitions()
[Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
In the following example, ``algorithm='direct'`` is inappropriate
as there are edges with output labels of length greater than 1::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: Hd = G.composition(F, algorithm='direct')
In the following examples, we compose transducers and automata
and check whether the types are correct. ::
sage: from sage.combinat.finite_state_machine import (
....: is_Automaton, is_Transducer)
sage: T = Transducer([(0, 0, 0, 0)], initial_states=[0])
sage: A = Automaton([(0, 0, 0)], initial_states=[0])
sage: is_Transducer(T.composition(T, algorithm='direct'))
True
sage: is_Transducer(T.composition(T, algorithm='explorative'))
True
sage: T.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: T.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='direct')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: A.composition(A, algorithm='explorative')
Traceback (most recent call last):
...
TypeError: Composition with automaton is not possible.
sage: is_Automaton(A.composition(T, algorithm='direct'))
True
sage: is_Automaton(A.composition(T, algorithm='explorative'))
True
"""
if not other._allow_composition_:
raise TypeError("Composition with automaton is not "
"possible.")
if algorithm is None:
algorithm = 'direct'
if algorithm == 'direct':
return self._composition_direct_(other, only_accessible_components)
elif algorithm == 'explorative':
return self._composition_explorative_(other)
else:
raise ValueError("Unknown algorithm %s." % (algorithm,))
def _composition_direct_(self, other, only_accessible_components=True):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, 0), ('B', 'A', 0, 1)],
....: initial_states=['A', 'B'], final_states=['B'],
....: determine_alphabets=True)
sage: G = Transducer([(1, 1, 1, 0), (1, 2, 0, 1),
....: (2, 2, 1, 1), (2, 2, 0, 0)],
....: initial_states=[1], final_states=[2],
....: determine_alphabets=True)
sage: Hd = F._composition_direct_(G)
sage: Hd.initial_states()
[(1, 'B'), (1, 'A')]
sage: Hd.transitions()
[Transition from (1, 'B') to (1, 'A'): 1|1,
Transition from (1, 'A') to (2, 'B'): 0|0,
Transition from (2, 'B') to (2, 'A'): 0|1,
Transition from (2, 'A') to (2, 'B'): 1|0]
"""
def function(transition1, transition2):
if transition1.word_out == transition2.word_in:
return (transition1.word_in, transition2.word_out)
else:
raise LookupError
result = other.product_FiniteStateMachine(
self, function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: [],
new_class=self.__class__)
for state_result in result.iter_states():
state = state_result.label()[0]
if state.is_final:
accept, state_to, output = self.process(
state.final_word_out,
initial_state=self.state(state_result.label()[1]))
if not accept:
state_result.is_final = False
else:
state_result.is_final = True
state_result.final_word_out = output
return result
def _composition_explorative_(self, other):
"""
See :meth:`.composition` for details.
TESTS::
sage: F = Transducer([('A', 'B', 1, [1, 0]), ('B', 'B', 1, 1),
....: ('B', 'B', 0, 0)],
....: initial_states=['A'], final_states=['B'])
sage: G = Transducer([(1, 1, 0, 0), (1, 2, 1, 0),
....: (2, 2, 0, 1), (2, 1, 1, 1)],
....: initial_states=[1], final_states=[1])
sage: He = G._composition_explorative_(F)
sage: He.transitions()
[Transition from ('A', 1) to ('B', 2): 1|0,1,
Transition from ('B', 2) to ('B', 2): 0|1,
Transition from ('B', 2) to ('B', 1): 1|1,
Transition from ('B', 1) to ('B', 1): 0|0,
Transition from ('B', 1) to ('B', 2): 1|0]
Check that colors are correctly dealt with. In particular, the
new colors have to be hashable such that
:meth:`Automaton.determinisation` does not fail::
sage: T = Transducer([[0, 0, 0, 0]], initial_states=[0])
sage: A = T.input_projection()
sage: B = A.composition(T, algorithm='explorative')
sage: B.states()[0].color
(None, None)
sage: B.determinisation()
Automaton with 1 states
.. TODO::
The explorative algorithm should be re-implemented using the
process iterators of both finite state machines.
"""
def composition_transition(state, input):
(state1, state2) = state
transition1 = None
for transition in other.iter_transitions(state1):
if transition.word_in == [input]:
transition1 = transition
break
if transition1 is None:
raise LookupError
new_state1 = transition1.to_state
new_state2 = state2
output = []
for o in transition1.word_out:
transition2 = None
for transition in self.iter_transitions(new_state2):
if transition.word_in == [o]:
transition2 = transition
break
if transition2 is None:
raise LookupError
new_state2 = transition2.to_state
output += transition2.word_out
return ((new_state1, new_state2), output)
if any(s.final_word_out for s in self.iter_final_states()) or \
any(s.final_word_out for s in other.iter_final_states()):
raise NotImplementedError("Explorative composition is not "
"implemented for transducers with "
"non-empty final output words. Try "
"the direct algorithm instead.")
if not self.is_deterministic() or not other.is_deterministic():
raise NotImplementedError("Explorative composition is "
"currently not implemented for "
"non-deterministic transducers.")
F = other.empty_copy(new_class=self.__class__)
new_initial_states = [(other.initial_states()[0], self.initial_states()[0])]
F.add_from_transition_function(composition_transition,
initial_states=new_initial_states)
for state in F.states():
if all(map(lambda s: s.is_final, state.label())):
state.is_final = True
state.color = tuple(map(lambda s: s.color, state.label()))
return F
def input_projection(self):
"""
Returns an automaton where the output of each transition of
self is deleted.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.input_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 0|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 1|-]
"""
return self.projection(what='input')
def output_projection(self):
"""
Returns a automaton where the input of each transition of self
is deleted and the new input is the original output.
INPUT:
Nothing
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.output_projection()
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
Final output words are also considered correctly::
sage: H = Transducer([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0), ('A', ('final', 0), 0, 0)],
....: final_states=['A', 'B'])
sage: H.state('B').final_word_out = 2
sage: J = H.output_projection()
sage: J.states()
['A', 'B', ('final', 0), ('final', 1)]
sage: J.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'A' to ('final', 0): 0|-,
Transition from 'B' to 'B': 0|-,
Transition from 'B' to ('final', 1): 2|-]
sage: J.final_states()
['A', ('final', 1)]
"""
return self.projection(what='output')
def projection(self, what='input'):
"""
Returns an Automaton which transition labels are the projection
of the transition labels of the input.
INPUT:
- ``what`` -- (default: ``input``) either ``input`` or ``output``.
OUTPUT:
An automaton.
EXAMPLES::
sage: F = FiniteStateMachine([('A', 'B', 0, 1), ('A', 'A', 1, 1),
....: ('B', 'B', 1, 0)])
sage: G = F.projection(what='output')
sage: G.transitions()
[Transition from 'A' to 'B': 1|-,
Transition from 'A' to 'A': 1|-,
Transition from 'B' to 'B': 0|-]
"""
new = Automaton()
# TODO: use empty_copy() in order to
# preserve on_duplicate_transition and future extensions.
# for this, empty_copy would need a new optional argument
# use_class=None ?
if what == 'input':
new.input_alphabet = copy(self.input_alphabet)
elif what == 'output':
new.input_alphabet = copy(self.output_alphabet)
else:
raise NotImplementedError
state_mapping = {}
for state in self.iter_states():
state_mapping[state] = new.add_state(deepcopy(state))
for transition in self.iter_transitions():
if what == 'input':
new_word_in = transition.word_in
elif what == 'output':
new_word_in = transition.word_out
else:
raise NotImplementedError
new.add_transition((state_mapping[transition.from_state],
state_mapping[transition.to_state],
new_word_in, None))
if what == 'output':
states = [s for s in self.iter_final_states() if s.final_word_out]
if not states:
return new
number = 0
while new.has_state(('final', number)):
number += 1
final = new.add_state(('final', number))
final.is_final = True
for state in states:
output = state.final_word_out
new.state(state_mapping[state]).final_word_out = []
new.state(state_mapping[state]).is_final = False
new.add_transition((state_mapping[state], final, output, None))
return new
def transposition(self):
"""
Returns a new finite state machine, where all transitions of the
input finite state machine are reversed.
INPUT:
Nothing.
OUTPUT:
A new finite state machine.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'A', 1), ('A', 'B', 0)],
....: initial_states=['A'], final_states=['B'])
sage: aut.transposition().transitions('B')
[Transition from 'B' to 'A': 0|-]
::
sage: aut = Automaton([('1', '1', 1), ('1', '2', 0), ('2', '2', 0)],
....: initial_states=['1'], final_states=['1', '2'])
sage: aut.transposition().initial_states()
['1', '2']
TESTS:
If a final state of ``self`` has a non-empty final output word,
transposition is not implemented::
sage: T = Transducer([('1', '1', 1, 0), ('1', '2', 0, 1),
....: ('2', '2', 0, 2)],
....: initial_states=['1'],
....: final_states=['1', '2'])
sage: T.state('1').final_word_out = [2, 5]
sage: T.transposition()
Traceback (most recent call last):
...
NotImplementedError: Transposition for transducers with
final output words is not implemented.
"""
transposition = self.empty_copy()
for state in self.iter_states():
transposition.add_state(deepcopy(state))
for transition in self.iter_transitions():
transposition.add_transition(
transition.to_state.label(), transition.from_state.label(),
transition.word_in, transition.word_out)
for initial in self.iter_initial_states():
state = transposition.state(initial.label())
if not initial.is_final:
state.is_final = True
state.is_initial = False
for final in self.iter_final_states():
state = transposition.state(final.label())
if final.final_word_out:
raise NotImplementedError("Transposition for transducers "
"with final output words is not "
"implemented.")
if not final.is_initial:
state.is_final = False
state.is_initial = True
return transposition
def split_transitions(self):
"""
Returns a new transducer, where all transitions in self with input
labels consisting of more than one letter
are replaced by a path of the corresponding length.
INPUT:
Nothing.
OUTPUT:
A new transducer.
EXAMPLES::
sage: A = Transducer([('A', 'B', [1, 2, 3], 0)],
....: initial_states=['A'], final_states=['B'])
sage: A.split_transitions().states()
[('A', ()), ('B', ()),
('A', (1,)), ('A', (1, 2))]
"""
new = self.empty_copy()
for state in self.states():
new.add_state(FSMState((state, ()), is_initial=state.is_initial,
is_final=state.is_final))
for transition in self.transitions():
for j in range(len(transition.word_in)-1):
new.add_transition((
(transition.from_state, tuple(transition.word_in[:j])),
(transition.from_state, tuple(transition.word_in[:j+1])),
transition.word_in[j],
[]))
new.add_transition((
(transition.from_state, tuple(transition.word_in[:-1])),
(transition.to_state, ()),
transition.word_in[-1:],
transition.word_out))
return new
def final_components(self):
"""
Returns the final components of a finite state machine as finite
state machines.
INPUT:
Nothing.
OUTPUT:
A list of finite state machines, each representing a final
component of ``self``.
A final component of a transducer ``T`` is a strongly connected
component ``C`` such that there are no transitions of ``T``
leaving ``C``.
The final components are the only parts of a transducer which
influence the main terms of the asympotic behaviour of the sum
of output labels of a transducer, see [HKP2014]_ and [HKW2014]_.
EXAMPLES::
sage: T = Transducer([['A', 'B', 0, 0], ['B', 'C', 0, 1],
....: ['C', 'B', 0, 1], ['A', 'D', 1, 0],
....: ['D', 'D', 0, 0], ['D', 'B', 1, 0],
....: ['A', 'E', 2, 0], ['E', 'E', 0, 0]])
sage: FC = T.final_components()
sage: sorted(FC[0].transitions())
[Transition from 'B' to 'C': 0|1,
Transition from 'C' to 'B': 0|1]
sage: FC[1].transitions()
[Transition from 'E' to 'E': 0|0]
Another example (cycle of length 2)::
sage: T = Automaton([[0, 1, 0], [1, 0, 0]])
sage: len(T.final_components()) == 1
True
sage: T.final_components()[0].transitions()
[Transition from 0 to 1: 0|-,
Transition from 1 to 0: 0|-]
REFERENCES:
.. [HKP2014] Clemens Heuberger, Sara Kropf, and Helmut
Prodinger, *Asymptotic analysis of the sum of the output of
transducer*, in preparation.
"""
DG = self.digraph()
condensation = DG.strongly_connected_components_digraph()
return [self.induced_sub_finite_state_machine(map(self.state, component))
for component in condensation.vertices()
if condensation.out_degree(component) == 0]
# *************************************************************************
# simplifications
# *************************************************************************
def prepone_output(self):
"""
For all paths, shift the output of the path from one
transition to the earliest possible preceeding transition of
the path.
INPUT:
Nothing.
OUTPUT:
Nothing.
Apply the following to each state `s` (except initial states) of the
finite state machine as often as possible:
If the letter `a` is a prefix of the output label of all transitions from
`s` (including the final output of `s`), then remove it from all these
labels and append it to all output labels of all transitions leading
to `s`.
We assume that the states have no output labels, but final outputs are
allowed.
EXAMPLES::
sage: A = Transducer([('A', 'B', 1, 1),
....: ('B', 'B', 0, 0),
....: ('B', 'C', 1, 0)],
....: initial_states=['A'],
....: final_states=['C'])
sage: A.prepone_output()
sage: A.transitions()
[Transition from 'A' to 'B': 1|1,0,
Transition from 'B' to 'B': 0|0,
Transition from 'B' to 'C': 1|-]
::
sage: B = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['C'])
sage: B.prepone_output()
sage: B.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
If initial states are not labeled as such, unexpected results may be
obtained::
sage: C = Transducer([(0,1,0,0)])
sage: C.prepone_output()
verbose 0 (...: finite_state_machine.py, prepone_output)
All transitions leaving state 0 have an output label with
prefix 0. However, there is no inbound transition and it
is not an initial state. This routine (possibly called by
simplification) therefore erased this prefix from all
outbound transitions.
sage: C.transitions()
[Transition from 0 to 1: 0|-]
Also the final output of final states can be changed::
sage: T = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: T.state('B').final_word_out = [1]
sage: T.prepone_output()
sage: T.transitions()
[Transition from 'A' to 'B': 0|1,1,
Transition from 'B' to 'C': 1|1,
Transition from 'B' to 'C': 0|-]
sage: T.state('B').final_word_out
[]
::
sage: S = Transducer([('A', 'B', 0, 1),
....: ('B', 'C', 1, [1, 1]),
....: ('B', 'C', 0, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: S.state('B').final_word_out = [0]
sage: S.prepone_output()
sage: S.transitions()
[Transition from 'A' to 'B': 0|1,
Transition from 'B' to 'C': 1|1,1,
Transition from 'B' to 'C': 0|1]
sage: S.state('B').final_word_out
[0]
Output labels do not have to be hashable::
sage: C = Transducer([(0, 1, 0, []),
....: (1, 0, 0, [vector([0, 0]), 0]),
....: (1, 1, 1, [vector([0, 0]), 1]),
....: (0, 0, 1, 0)],
....: determine_alphabets=False,
....: initial_states=[0])
sage: C.prepone_output()
sage: sorted(C.transitions())
[Transition from 0 to 1: 0|(0, 0),
Transition from 0 to 0: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 1: 1|1,(0, 0)]
"""
def find_common_output(state):
if any(itertools.ifilter(
lambda transition: not transition.word_out,
self.transitions(state))) \
or state.is_final and not state.final_word_out:
return tuple()
first_letters = map(lambda transition: transition.word_out[0],
self.transitions(state))
if state.is_final:
first_letters = first_letters + [state.final_word_out[0]]
if not first_letters:
return tuple()
first_item = first_letters.pop()
if all([item == first_item for item in first_letters]):
return (first_item,)
return tuple()
changed = 1
iteration = 0
while changed > 0:
changed = 0
iteration += 1
for state in self.iter_states():
if state.is_initial:
continue
if state.word_out:
raise NotImplementedError(
"prepone_output assumes that all states have "
"empty output word, but state %s has output "
"word %s" % (state, state.word_out))
common_output = find_common_output(state)
if common_output:
changed += 1
if state.is_final:
assert state.final_word_out[0] == common_output[0]
state.final_word_out = state.final_word_out[1:]
for transition in self.transitions(state):
assert transition.word_out[0] == common_output[0]
transition.word_out = transition.word_out[1:]
found_inbound_transition = False
for transition in self.iter_transitions():
if transition.to_state == state:
transition.word_out = transition.word_out \
+ [common_output[0]]
found_inbound_transition = True
if not found_inbound_transition:
verbose(
"All transitions leaving state %s have an "
"output label with prefix %s. However, "
"there is no inbound transition and it is "
"not an initial state. This routine "
"(possibly called by simplification) "
"therefore erased this prefix from all "
"outbound transitions." %
(state, common_output[0]),
level=0)
def equivalence_classes(self):
r"""
Returns a list of equivalence classes of states.
INPUT:
Nothing.
OUTPUT:
A list of equivalence classes of states.
Two states `a` and `b` are equivalent if and only if there is
a bijection `\varphi` between paths starting at `a` and paths
starting at `b` with the following properties: Let `p_a` be a
path from `a` to `a'` and `p_b` a path from `b` to `b'` such
that `\varphi(p_a)=p_b`, then
- `p_a.\mathit{word}_\mathit{in}=p_b.\mathit{word}_\mathit{in}`,
- `p_a.\mathit{word}_\mathit{out}=p_b.\mathit{word}_\mathit{out}`,
- `a'` and `b'` have the same output label, and
- `a'` and `b'` are both final or both non-final and have the
same final output word.
The function :meth:`.equivalence_classes` returns a list of
the equivalence classes to this equivalence relation.
This is one step of Moore's minimization algorithm.
.. SEEALSO::
:meth:`.minimization`
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").is_final = True
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
sage: fsm.state("A").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A'], ['B'], ['C'], ['D']]
sage: fsm.state("C").final_word_out = 1
sage: sorted(fsm.equivalence_classes())
[['A', 'C'], ['B', 'D']]
"""
# Two states `a` and `b` are j-equivalent if and only if there
# is a bijection `\varphi` between paths of length <= j
# starting at `a` and paths starting at `b` with the following
# properties: Let `p_a` be a path from `a` to `a'` and `p_b` a
# path from `b` to `b'` such that `\varphi(p_a)=p_b`, then
#
# - `p_a.\mathit{word}_{in}=p_b.\mathit{word}_{in}`,
# - `p_a.\mathit{word}_{out}=p_b.\mathit{word}_{out}`,
# - `a'` and `b'` have the same output label, and
# - `a'` and `b'` are both final or both non-final.
# If for some j the relations j-1 equivalent and j-equivalent
# coincide, then they are equal to the equivalence relation
# described in the docstring.
# classes_current holds the equivalence classes of
# j-equivalence, classes_previous holds the equivalence
# classes of j-1 equivalence.
# initialize with 0-equivalence
classes_previous = []
key_0 = lambda state: (state.is_final, state.color, state.word_out,
state.final_word_out)
states_grouped = full_group_by(self.states(), key=key_0)
classes_current = [equivalence_class for
(key,equivalence_class) in states_grouped]
while len(classes_current) != len(classes_previous):
class_of = {}
classes_previous = classes_current
classes_current = []
for k in range(len(classes_previous)):
for state in classes_previous[k]:
class_of[state] = k
key_current = lambda state: sorted(
[(transition.word_in,
transition.word_out,
class_of[transition.to_state])
for transition in state.transitions])
for class_previous in classes_previous:
states_grouped = full_group_by(class_previous, key=key_current)
classes_current.extend([equivalence_class for
(key,equivalence_class) in states_grouped])
return classes_current
def quotient(self, classes):
r"""
Constructs the quotient with respect to the equivalence
classes.
INPUT:
- ``classes`` is a list of equivalence classes of states.
OUTPUT:
A finite state machine.
The labels of the new states are tuples of states of the
``self``, corresponding to ``classes``.
Assume that `c` is a class, and `a` and `b` are states in
`c`. Then there is a bijection `\varphi` between the
transitions from `a` and the transitions from `b` with the
following properties: if `\varphi(t_a)=t_b`, then
- `t_a.\mathit{word}_\mathit{in}=t_b.\mathit{word}_\mathit{in}`,
- `t_a.\mathit{word}_\mathit{out}=t_b.\mathit{word}_\mathit{out}`, and
- `t_a` and `t_b` lead to some equivalent states `a'` and `b'`.
Non-initial states may be merged with initial states, the
resulting state is an initial state.
All states in a class must have the same ``is_final``,
``final_word_out`` and ``word_out`` values.
EXAMPLES::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
sage: fsmq.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsmq.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
sage: fsmq1 = fsm.quotient(fsm.equivalence_classes())
sage: fsmq1 == fsmq
True
sage: fsm.quotient([[fsm.state("A"), fsm.state("B"), fsm.state("C"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Transitions of state 'A' and 'B' are incompatible.
TESTS::
sage: fsm = FiniteStateMachine([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)],
....: final_states=["A", "C"])
sage: fsm.state("A").final_word_out = 1
sage: fsm.state("C").final_word_out = 2
sage: fsmq = fsm.quotient([[fsm.state("A"), fsm.state("C")],
....: [fsm.state("B"), fsm.state("D")]])
Traceback (most recent call last):
...
AssertionError: Class ['A', 'C'] mixes
final states with different final output words.
"""
new = self.empty_copy()
state_mapping = {}
# Create new states and build state_mapping
for c in classes:
new_label = tuple(c)
new_state = c[0].relabeled(new_label)
new.add_state(new_state)
for state in c:
state_mapping[state] = new_state
# Copy data from old transducer
for c in classes:
new_state = state_mapping[c[0]]
sorted_transitions = sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in c[0].transitions])
for transition in self.iter_transitions(c[0]):
new.add_transition(
from_state = new_state,
to_state = state_mapping[transition.to_state],
word_in = transition.word_in,
word_out = transition.word_out)
# check that all class members have the same information (modulo classes)
for state in c:
new_state.is_initial = new_state.is_initial or state.is_initial
assert new_state.is_final == state.is_final, \
"Class %s mixes final and non-final states" % (c,)
assert new_state.word_out == state.word_out, \
"Class %s mixes different word_out" % (c,)
assert new_state.color == state.color, \
"Class %s mixes different colors" % (c,)
assert sorted_transitions == sorted(
[(state_mapping[t.to_state], t.word_in, t.word_out)
for t in state.transitions]), \
"Transitions of state %s and %s are incompatible." % (c[0], state)
assert new_state.final_word_out == state.final_word_out, \
"Class %s mixes final states with different " \
"final output words." % (c,)
return new
def merged_transitions(self):
"""
Merges transitions which have the same ``from_state``,
``to_state`` and ``word_out`` while adding their ``word_in``.
INPUT:
Nothing.
OUTPUT:
A finite state machine with merged transitions. If no mergers occur,
return ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 1], [1, -2, 1/4, 1], [1, -2, 1/2, 1],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 1], [-2, 3, 1/2, 1]],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.merged_transitions()
sage: T1 is T
False
sage: sorted(T1.transitions())
[Transition from -2 to -2: 1/4|1,
Transition from -2 to 2: 1/4|1,
Transition from -2 to 3: 1/2|1,
Transition from 1 to 2: 1/4|1,
Transition from 1 to -2: 3/4|1,
Transition from 2 to -2: 1/4|1,
Transition from 2 to 2: 1/4|1,
Transition from 2 to 3: 1/2|1]
Applying the function again does not change the result::
sage: T2 = T1.merged_transitions()
sage: T2 is T1
True
"""
def key(transition):
return (transition.to_state, transition.word_out)
new = self.empty_copy()
changed = False
state_dict = {}
memo = {}
for state in self.states():
new_state = deepcopy(state,memo)
state_dict[state] = new_state
new.add_state(new_state)
for state in self.states():
grouped_transitions = itertools.groupby(sorted(state.transitions, key=key), key=key)
for (to_state, word_out), transitions in grouped_transitions:
transition_list = list(transitions)
changed = changed or len(transition_list) > 1
word_in = 0
for transition in transition_list:
if hasattr(transition.word_in, '__iter__') and len(transition.word_in) == 1:
word_in += transition.word_in[0]
else:
raise TypeError('%s does not have a list of length 1 as word_in' % transition)
new.add_transition((state, to_state, word_in, word_out))
if changed:
return new
else:
return self
def markov_chain_simplification(self):
"""
Consider ``self`` as Markov chain with probabilities as input labels
and simplify it.
INPUT:
Nothing.
OUTPUT:
Simplified version of ``self``.
EXAMPLE::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([[1, 2, 1/4, 0], [1, -2, 1/4, 0], [1, -2, 1/2, 0],
....: [2, 2, 1/4, 1], [2, -2, 1/4, 1], [-2, -2, 1/4, 1],
....: [-2, 2, 1/4, 1], [2, 3, 1/2, 2], [-2, 3, 1/2, 2]],
....: initial_states=[1],
....: final_states=[3],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: T1 = T.markov_chain_simplification()
sage: sorted(T1.transitions())
[Transition from ((1,),) to ((2, -2),): 1|0,
Transition from ((2, -2),) to ((2, -2),): 1/2|1,
Transition from ((2, -2),) to ((3,),): 1/2|2]
"""
current = self.merged_transitions()
number_states = len(current.states())
while True:
current = current.simplification()
new_number_states = len(current.states())
new = current.merged_transitions()
if new is current and number_states == new_number_states:
return new
current = new
number_states = new_number_states
def with_final_word_out(self, letters, allow_non_final=True):
"""
Constructs a new finite state machine with final output words
for all states by implicitly reading trailing letters until a
final state is reached.
INPUT:
- ``letters`` -- either an element of the input alphabet or a
list of such elements. This is repeated cyclically when
needed.
- ``allow_non_final`` -- a boolean (default: ``True``) which
indicates whether we allow that some states may be non-final
in the resulting finite state machine. I.e., if ``False`` then
each state has to have a path to a final state with input
label matching ``letters``.
OUTPUT:
A finite state machine.
The inplace version of this function is
:meth:`.construct_final_word_out`.
Suppose for the moment a single element ``letter`` as input
for ``letters``. This is equivalent to ``letters = [letter]``.
We will discuss the general case below.
Let ``word_in`` be a word over the input alphabet and assume
that the original finite state machine transforms ``word_in`` to
``word_out`` reaching a possibly non-final state ``s``. Let
further `k` be the minimum number of letters ``letter`` such
that there is a path from ``s`` to some final state ``f`` whose
input label consists of `k` copies of ``letter`` and whose
output label is ``path_word_out``. Then the state ``s`` of the
resulting finite state machine is a final state with final
output ``path_word_out + f.final_word_out``. Therefore, the new
finite state machine transforms ``word_in`` to ``word_out +
path_word_out + f.final_word_out``.
This is e.g. useful for finite state machines operating on digit
expansions: there, it is sometimes required to read a sufficient
number of trailing zeros (at the most significant positions) in
order to reach a final state and to flush all carries. In this
case, this method constructs an essentially equivalent finite
state machine in the sense that it not longer requires adding
sufficiently many trailing zeros. However, it is the
responsibility of the user to make sure that if adding trailing
zeros to the input anyway, the output is equivalent.
If ``letters`` consists of more than one letter, then it is
assumed that (not necessarily complete) cycles of ``letters``
are appended as trailing input.
.. SEEALSO::
:ref:`example on Gray code <finite_state_machine_gray_code_example>`
EXAMPLES:
#. A simple transducer transforming `00` blocks to `01`
blocks::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: T.process([0, 0, 0])
(False, 1, [0, 1, 0])
sage: T.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
0 []
1 [1]
sage: F.process([0, 0, 0])
(True, 1, [0, 1, 0, 1])
sage: F.process([0, 0, 0, 0])
(True, 0, [0, 1, 0, 1])
#. A more realistic example: Addition of `1` in binary. We
construct a transition function transforming the input
to its binary expansion::
sage: def binary_transition(carry, input):
....: value = carry + input
....: if value.mod(2) == 0:
....: return (value/2, 0)
....: else:
....: return ((value-1)/2, 1)
Now, we only have to start with a carry of `1` to
get the required transducer::
sage: T = Transducer(binary_transition,
....: input_alphabet=[0, 1],
....: initial_states=[1],
....: final_states=[0])
We test this for the binary expansion of `7`::
sage: T.process([1, 1, 1])
(False, 1, [0, 0, 0])
The final carry `1` has not be flushed yet, we have to add a
trailing zero::
sage: T.process([1, 1, 1, 0])
(True, 0, [0, 0, 0, 1])
We check that with this trailing zero, the transducer
performs as advertised::
sage: all(ZZ(T(k.bits()+[0]), base=2) == k + 1
....: for k in srange(16))
True
However, most of the time, we produce superfluous trailing
zeros::
sage: T(11.bits()+[0])
[0, 0, 1, 1, 0]
We now use this method::
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
1 [1]
0 []
The same tests as above, but we do not have to pad with
trailing zeros anymore::
sage: F.process([1, 1, 1])
(True, 1, [0, 0, 0, 1])
sage: all(ZZ(F(k.bits()), base=2) == k + 1
....: for k in srange(16))
True
No more trailing zero in the output::
sage: F(11.bits())
[0, 0, 1, 1]
sage: all(F(k.bits())[-1] == 1
....: for k in srange(16))
True
#. Here is an example, where we allow trailing repeated `10`::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 1, 'b'),
....: (2, 0, 0, 'c')],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out([1, 0])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bc
Trying this with trailing repeated `01` does not produce
a ``final_word_out`` for state ``1``, but for state ``2``::
sage: F = T.with_final_word_out([0, 1])
sage: for f in F.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
2 c
#. Here another example with a more-letter trailing input::
sage: T = Transducer([(0, 1, 0, 'a'),
....: (1, 2, 0, 'b'), (1, 2, 1, 'b'),
....: (2, 3, 0, 'c'), (2, 0, 1, 'e'),
....: (3, 1, 0, 'd'), (3, 1, 1, 'd')],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0, 0, 1, 1])
sage: for f in T.iter_final_states():
....: print f, ''.join(f.final_word_out)
0
1 bcdbcdbe
2 cdbe
3 dbe
TESTS:
#. Reading copies of ``letter`` may result in a cycle. In
this simple example, we have no final state at all::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: The finite state machine contains
a cycle starting at state 0 with input label 0
and no final state.
#. A unique transition with input word ``letter`` is
required::
sage: T = Transducer([(0, 1, 0, 0), (0, 2, 0, 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 0
with input label 0.
It is not a problem if there is no transition starting
at state ``1`` with input word ``letter``::
sage: T = Transducer([(0, 1, 0, 0)])
sage: F = T.with_final_word_out(0)
sage: for f in F.iter_final_states():
....: print f, f.final_word_out
Anyhow, you can override this by::
sage: T = Transducer([(0, 1, 0, 0)])
sage: T.with_final_word_out(0, allow_non_final=False)
Traceback (most recent call last):
...
ValueError: No unique transition leaving state 1
with input label 0.
#. All transitions must have input labels of length `1`::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
sage: T = Transducer([(0, 0, [0, 1], 0)])
sage: T.with_final_word_out(0)
Traceback (most recent call last):
...
NotImplementedError: All transitions must have input
labels of length 1. Consider calling split_transitions().
#. An empty list as input is not allowed::
sage: T = Transducer([(0, 0, [], 0)])
sage: T.with_final_word_out([])
Traceback (most recent call last):
...
ValueError: letters is not allowed to be an empty list.
"""
new = deepcopy(self)
new.construct_final_word_out(letters, allow_non_final)
return new
def construct_final_word_out(self, letters, allow_non_final=True):
"""
This is an inplace version of :meth:`.with_final_word_out`. See
:meth:`.with_final_word_out` for documentation and examples.
TESTS::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 1)],
....: initial_states=[0],
....: final_states=[0])
sage: F = T.with_final_word_out(0)
sage: T.construct_final_word_out(0)
sage: T == F # indirect doctest
True
sage: T = Transducer([(0, 1, 0, None)],
....: final_states=[1])
sage: F = T.with_final_word_out(0)
sage: F.state(0).final_word_out
[]
"""
from itertools import cycle, izip_longest
if not isinstance(letters, list):
letters = [letters]
elif not letters:
raise ValueError(
"letters is not allowed to be an empty list.")
in_progress = set()
cache = {}
def find_final_word_out(state):
# The return value is the output which is produced when
# reading the given letters until a final state is reached.
# If no final state can be reached, then None is returned.
# For final states, the final word out is returned.
# For final states with empty final output, that is [].
position, letter = next(trailing_letters)
if state.is_final:
return state.final_word_out
if (state, position) in cache:
return cache[state, position]
if (state, position) in in_progress:
raise ValueError(
"The finite state machine contains a cycle "
"starting at state %s with input label %s "
"and no final state." % (state, letter))
if any(len(t.word_in) != 1 for t in state.transitions):
raise NotImplementedError(
"All transitions must have input labels of length "
"1. Consider calling split_transitions().")
transitions = [t for t in state.transitions
if t.word_in == [letter]]
if allow_non_final and not transitions:
final_word_out = None
elif len(transitions) != 1:
raise ValueError(
"No unique transition leaving state %s with input "
"label %s." % (state, letter))
else:
in_progress.add((state, position))
next_word = find_final_word_out(transitions[0].to_state)
if next_word is not None:
final_word_out = transitions[0].word_out + next_word
else:
final_word_out = None
in_progress.remove((state, position))
cache[state, position] = final_word_out
return final_word_out
for state in self.iter_states():
assert(not in_progress)
# trailing_letters is an infinite iterator additionally
# marking positions
trailing_letters = cycle(enumerate(letters))
find_final_word_out(state)
# actual modifications can only be carried out after all final words
# have been computed as it may not be permissible to stop at a
# formerly non-final state unless a cycle has been completed.
for (state, position), final_word_out in cache.iteritems():
if position == 0 and final_word_out is not None:
state.is_final = True
state.final_word_out = final_word_out
# *************************************************************************
# other
# *************************************************************************
def graph(self, edge_labels='words_in_out'):
"""
Returns the graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
- ``edge_label``: (default: ``'words_in_out'``) can be
- ``'words_in_out'`` (labels will be strings ``'i|o'``)
- a function with which takes as input a transition
and outputs (returns) the label
OUTPUT:
A graph.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A')
sage: T = Transducer()
sage: T.graph()
Digraph on 0 vertices
sage: T.add_state(A)
'A'
sage: T.graph()
Digraph on 1 vertex
sage: T.add_transition(('A', 'A', 0, 1))
Transition from 'A' to 'A': 0|1
sage: T.graph()
Looped digraph on 1 vertex
"""
if edge_labels == 'words_in_out':
label_fct = lambda t:t._in_out_label_()
elif hasattr(edge_labels, '__call__'):
label_fct = edge_labels
else:
raise TypeError('Wrong argument for edge_labels.')
graph_data = []
isolated_vertices = []
for state in self.iter_states():
transitions = state.transitions
if len(transitions) == 0:
isolated_vertices.append(state.label())
for t in transitions:
graph_data.append((t.from_state.label(), t.to_state.label(),
label_fct(t)))
G = DiGraph(graph_data)
G.add_vertices(isolated_vertices)
return G
digraph = graph
def plot(self):
"""
Plots a graph of the finite state machine with labeled
vertices and labeled edges.
INPUT:
Nothing.
OUTPUT:
A plot of the graph of the finite state machine.
TESTS::
sage: FiniteStateMachine([('A', 'A', 0)]).plot()
"""
return self.graph(edge_labels='words_in_out').plot()
def predecessors(self, state, valid_input=None):
"""
Lists all predecessors of a state.
INPUT:
- ``state`` -- the state from which the predecessors should be
listed.
- ``valid_input`` -- If ``valid_input`` is a list, then we
only consider transitions whose input labels are contained
in ``valid_input``. ``state`` has to be a :class:`FSMState`
(not a label of a state). If input labels of length larger
than `1` are used, then ``valid_input`` has to be a list of
lists.
OUTPUT:
A list of states.
EXAMPLES::
sage: A = Transducer([('I', 'A', 'a', 'b'), ('I', 'B', 'b', 'c'),
....: ('I', 'C', 'c', 'a'), ('A', 'F', 'b', 'a'),
....: ('B', 'F', ['c', 'b'], 'b'), ('C', 'F', 'a', 'c')],
....: initial_states=['I'], final_states=['F'])
sage: A.predecessors(A.state('A'))
['A', 'I']
sage: A.predecessors(A.state('F'), valid_input=['b', 'a'])
['F', 'C', 'A', 'I']
sage: A.predecessors(A.state('F'), valid_input=[['c', 'b'], 'a'])
['F', 'C', 'B']
"""
if valid_input is not None:
valid_list = list()
for input in valid_input:
input_list = input
if not isinstance(input_list, list):
input_list = [input]
valid_list.append(input_list)
valid_input = valid_list
unhandeled_direct_predecessors = {s:[] for s in self.states() }
for t in self.transitions():
if valid_input is None or t.word_in in valid_input:
unhandeled_direct_predecessors[t.to_state].append(t.from_state)
done = []
open = [state]
while len(open) > 0:
s = open.pop()
candidates = unhandeled_direct_predecessors[s]
if candidates is not None:
open.extend(candidates)
unhandeled_direct_predecessors[s] = None
done.append(s)
return(done)
def asymptotic_moments(self, variable=SR.symbol('n')):
r"""
Returns the main terms of expectation and variance of the sum
of output labels and its covariance with the sum of input
labels.
INPUT:
- ``variable`` -- a symbol denoting the length of the input,
by default `n`.
OUTPUT:
A dictionary consisting of
- ``expectation`` -- `e n + \operatorname{Order}(1)`,
- ``variance`` -- `v n + \operatorname{Order}(1)`,
- ``covariance`` -- `c n + \operatorname{Order}(1)`
for suitable constants `e`, `v` and `c`.
Assume that all input and output labels are numbers and that
``self`` is complete and has only one final component. Assume
further that this final component is aperiodic. Furthermore,
assume that there is exactly one initial state and that all
states are final.
Denote by `X_n` the sum of output labels written by the
finite state machine when reading a random input word of
length `n` over the input alphabet (assuming
equidistribution).
Then the expectation of `X_n` is `en+O(1)`, the variance
of `X_n` is `vn+O(1)` and the covariance of `X_n` and
the sum of input labels is `cn+O(1)`, cf. [HKW2014]_,
Theorem 2.
In the case of non-integer input or output labels, performance
degrades significantly. For rational input and output labels,
consider rescaling to integers. This limitation comes from the
fact that determinants over polynomial rings can be computed
much more efficiently than over the symbolic ring. In fact, we
compute (parts) of a trivariate generating function where the
input and output labels are exponents of some indeterminates,
see [HKW2014]_, Theorem 2 for details. If those exponents are
integers, we can use a polynomial ring.
EXAMPLES:
#. A trivial example: write the negative of the input::
sage: T = Transducer([(0, 0, 0, 0), (0, 0, 1, -1)],
....: initial_states=[0],
....: final_states=[0])
sage: T([0, 1, 1])
[0, -1, -1]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
-1/4*n + Order(1)
#. For the case of the Hamming weight of the non-adjacent-form
(NAF) of integers, cf. the :wikipedia:`Non-adjacent_form`
and the :ref:`example on recognizing NAFs
<finite_state_machine_recognizing_NAFs_example>`, the
following agrees with the results in [HP2007]_.
We first use the transducer to convert the standard binary
expansion to the NAF given in [HP2007]_. We use the parameter
``with_final_word_out`` such that we do not have to add
sufficiently many trailing zeros::
sage: NAF = Transducer([(0, 0, 0, 0),
....: (0, '.1', 1, None),
....: ('.1', 0, 0, [1, 0]),
....: ('.1', 1, 1, [-1, 0]),
....: (1, 1, 1, 0),
....: (1, '.1', 0, None)],
....: initial_states=[0],
....: final_states=[0],
....: with_final_word_out=[0])
As an example, we compute the NAF of `27` by this
transducer.
::
sage: binary_27 = 27.bits()
sage: binary_27
[1, 1, 0, 1, 1]
sage: NAF_27 = NAF(binary_27)
sage: NAF_27
[-1, 0, -1, 0, 0, 1, 0]
sage: ZZ(NAF_27, base=2)
27
Next, we are only interested in the Hamming weight::
sage: def weight(state, input):
....: if input is None:
....: result = 0
....: else:
....: result = ZZ(input != 0)
....: return (0, result)
sage: weight_transducer = Transducer(weight,
....: input_alphabet=[-1, 0, 1],
....: initial_states=[0],
....: final_states=[0])
At the moment, we can not use composition with ``NAF``,
because it has non-empty final output words::
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative')
Traceback (most recent call last):
...
NotImplementedError: Explorative composition is not
implemented for transducers with non-empty final output
words. Try the direct algorithm instead.
Thus, we change ``NAF``, then compose and again construct
the final output words::
sage: for s in NAF.final_states():
....: s.final_word_out = []
sage: NAFweight = weight_transducer.composition(
....: NAF,
....: algorithm='explorative').relabeled()
sage: NAFweight.construct_final_word_out(0)
sage: sorted(NAFweight.transitions())
[Transition from 0 to 0: 0|0,
Transition from 0 to 1: 1|-,
Transition from 1 to 0: 0|1,0,
Transition from 1 to 2: 1|1,0,
Transition from 2 to 1: 0|-,
Transition from 2 to 2: 1|0]
sage: NAFweight(binary_27 + [0, 0])
[1, 0, 1, 0, 0, 1, 0]
Now, we actually compute the asymptotic moments::
sage: moments = NAFweight.asymptotic_moments()
sage: moments['expectation']
1/3*n + Order(1)
sage: moments['variance']
2/27*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is Example 3.1 in [HKW2014]_, where a transducer with
variable output labels is given. There, the aim was to
choose the output labels of this very simple transducer such
that the input and output sum are asymptotically
independent, i.e., the constant `c` vanishes.
::
sage: var('a_1, a_2, a_3, a_4')
(a_1, a_2, a_3, a_4)
sage: T = Transducer([[0, 0, 0, a_1], [0, 1, 1, a_3],
....: [1, 0, 0, a_4], [1, 1, 1, a_2]],
....: initial_states=[0], final_states=[0, 1])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
1/4*(a_1 + a_2 + a_3 + a_4)*n + Order(1)
sage: moments['covariance']
-1/4*(a_1 - a_2)*n + Order(1)
Therefore, the asymptotic covariance vanishes if and only if
`a_2=a_1`.
#. This is Example 6.2 in [HKW2014]_, dealing with the
transducer converting the binary expansion of an integer
into Gray code (cf. the :wikipedia:`Gray_code` and the
:ref:`example on Gray code
<finite_state_machine_gray_code_example>`)::
sage: moments = transducers.GrayCode().asymptotic_moments()
sage: moments['expectation']
1/2*n + Order(1)
sage: moments['variance']
1/4*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the first part of Example 6.3 in [HKW2014]_,
counting the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: sorted(block10.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|1,
Transition from (1,) to (1,): 1|0]
sage: moments = block10.asymptotic_moments()
sage: moments['expectation']
1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
Order(1)
#. This is the second part of Example 6.3 in [HKW2014]_,
counting the number of 11 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: sorted(block11.transitions())
[Transition from () to (): 0|0,
Transition from () to (1,): 1|0,
Transition from (1,) to (): 0|0,
Transition from (1,) to (1,): 1|1]
sage: var('N')
N
sage: moments = block11.asymptotic_moments(N)
sage: moments['expectation']
1/4*N + Order(1)
sage: moments['variance']
5/16*N + Order(1)
sage: correlation = (moments['covariance'].coefficient(N) /
....: (1/2 * sqrt(moments['variance'].coefficient(N))))
sage: correlation
2/5*sqrt(5)
#. This is Example 6.4 in [HKW2014]_, counting the number of
01 blocks minus the number of 10 blocks in the standard binary
expansion. The least significant digit is at the left-most
position::
sage: block01 = transducers.CountSubblockOccurrences(
....: [0, 1],
....: input_alphabet=[0, 1])
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: product_01x10 = block01.cartesian_product(block10)
sage: block_difference = transducers.sub([0, 1])(product_01x10)
sage: T = block_difference.simplification().relabeled()
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = True
sage: T.transitions()
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|0,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|1,
Transition from 2 to 1: 0|0,
Transition from 2 to 0: 1|0]
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
Order(1)
sage: moments['variance']
Order(1)
sage: moments['covariance']
Order(1)
#. The finite state machine must have a unique final component::
sage: T = Transducer([(0, -1, -1, -1), (0, 1, 1, 1),
....: (-1, -1, -1, -1), (-1, -1, 1, -1),
....: (1, 1, -1, 1), (1, 1, 1, 1)],
....: initial_states=[0],
....: final_states=[0, 1, -1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines with one final
component.
In this particular example, the first letter of the input
decides whether we reach the loop at `-1` or the loop at
`1`. In the first case, we have `X_n = -n`, while we have
`X_n = n` in the second case. Therefore, the expectation
`E(X_n)` of `X_n` is `E(X_n) = 0`. We get `(X_n-E(X_n))^2 =
n^2` in all cases, which results in a variance of `n^2`.
So this example shows that the variance may be non-linear if
there is more than one final component.
TESTS:
#. An input alphabet must be given::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: determine_alphabets=False)
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: No input alphabet is given.
Try calling determine_alphabets().
#. The finite state machine must have a unique initial state::
sage: T = Transducer([(0, 0, 0, 0)])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: A unique initial state is required.
#. The finite state machine must be complete::
sage: T = Transducer([[0, 0, 0, 0]],
....: initial_states=[0], final_states=[0],
....: input_alphabet=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: This finite state machine is
not complete.
#. The final component of the finite state machine must be
aperiodic::
sage: T = Transducer([(0, 1, 0, 0), (1, 0, 0, 0)],
....: initial_states=[0], final_states=[0, 1])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
NotImplementedError: asymptotic_moments is only
implemented for finite state machines whose unique final
component is aperiodic.
#. Non-integer input or output labels lead to a warning::
sage: T = Transducer([[0, 0, 0, 0], [0, 0, 1, -1/2]],
....: initial_states=[0], final_states=[0])
sage: moments = T.asymptotic_moments()
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
This warning can be silenced by :func:`~sage.misc.misc.set_verbose`::
sage: set_verbose(-1, "finite_state_machine.py")
sage: moments = T.asymptotic_moments()
sage: moments['expectation']
-1/4*n + Order(1)
sage: moments['variance']
1/16*n + Order(1)
sage: moments['covariance']
-1/8*n + Order(1)
sage: set_verbose(0, "finite_state_machine.py")
#. Check whether ``word_out`` of ``FSMState`` are correctly
dealt with::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2,
....: is_initial=True,
....: is_final=True)
sage: T = Transducer([(s, s, 0, 1)],
....: initial_states=[s], final_states=[s])
sage: T([0, 0])
[2, 1, 2, 1, 2]
sage: T.asymptotic_moments()['expectation']
3*n + Order(1)
The same test for non-integer output::
sage: from sage.combinat.finite_state_machine import FSMState
sage: s = FSMState(0, word_out=2/3)
sage: T = Transducer([(s, s, 0, 1/2)],
....: initial_states=[s], final_states=[s])
sage: T.asymptotic_moments()['expectation']
verbose 0 (...) Non-integer output weights lead to
significant performance degradation.
7/6*n + Order(1)
#. All states of ``self`` have to be final::
sage: T = Transducer([(0, 1, 1, 4)], initial_states=[0])
sage: T.asymptotic_moments()
Traceback (most recent call last):
...
ValueError: Not all states are final.
ALGORITHM:
See [HKW2014]_, Theorem 2.
REFERENCES:
.. [HKW2014] Clemens Heuberger, Sara Kropf and Stephan Wagner,
*Combinatorial Characterization of Independent Transducers via
Functional Digraphs*, :arxiv:`1404.3680`.
.. [HP2007] Clemens Heuberger and Helmut Prodinger, *The Hamming
Weight of the Non-Adjacent-Form under Various Input Statistics*,
Periodica Mathematica Hungarica Vol. 55 (1), 2007, pp. 81–96,
:doi:`10.1007/s10998-007-3081-z`.
"""
from sage.calculus.functional import derivative
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.rational_field import QQ
if self.input_alphabet is None:
raise ValueError("No input alphabet is given. "
"Try calling determine_alphabets().")
if len(self.initial_states()) != 1:
raise ValueError("A unique initial state is required.")
if not all(state.is_final for state in self.iter_states()):
raise ValueError("Not all states are final.")
if not self.is_complete():
raise NotImplementedError("This finite state machine is "
"not complete.")
final_components = self.final_components()
if len(final_components) != 1:
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"with one final component.")
final_component = final_components[0]
if not final_component.digraph().is_aperiodic():
raise NotImplementedError("asymptotic_moments is only "
"implemented for finite state machines "
"whose unique final component is "
"aperiodic.")
def get_matrix(fsm, x, y):
return fsm.adjacency_matrix(
entry=lambda transition: x**sum(transition.word_in) *
y**(sum(transition.word_out) +
sum(transition.from_state.word_out)))
K = len(self.input_alphabet)
R = PolynomialRing(QQ, ("x", "y", "z"))
(x, y, z) = R.gens()
try:
M = get_matrix(self, x, y)
except TypeError:
verbose("Non-integer output weights lead to "
"significant performance degradation.", level=0)
# fall back to symbolic ring
R = SR
x = R.symbol()
y = R.symbol()
z = R.symbol()
M = get_matrix(self, x, y)
def substitute_one(g):
return g.subs({x: 1, y: 1, z: 1})
else:
def substitute_one(g):
# the result of the substitution shall live in QQ,
# not in the polynomial ring R, so the method
# subs does not achieve the result.
# Therefore, we need this helper function.
return g(1, 1, 1)
f = (M.parent().identity_matrix() - z/K*M).det()
f_x = substitute_one(derivative(f, x))
f_y = substitute_one(derivative(f, y))
f_z = substitute_one(derivative(f, z))
f_xy = substitute_one(derivative(f, x, y))
f_xz = substitute_one(derivative(f, x, z))
f_yz = substitute_one(derivative(f, y, z))
f_yy = substitute_one(derivative(f, y, y))
f_zz = substitute_one(derivative(f, z, z))
e_2 = f_y / f_z
v_2 = (f_y**2 * (f_zz+f_z) + f_z**2 * (f_yy+f_y)
- 2*f_y*f_z*f_yz) / f_z**3
c = (f_x * f_y * (f_zz+f_z) + f_z**2 * f_xy - f_y*f_z*f_xz
- f_x*f_z*f_yz) / f_z**3
return {'expectation': e_2*variable + SR(1).Order(),
'variance': v_2*variable + SR(1).Order(),
'covariance': c*variable + SR(1).Order()}
def is_monochromatic(self):
"""
Checks whether the colors of all states are equal.
INPUT:
Nothing.
OUTPUT:
``True`` or ``False``.
EXAMPLES::
sage: G = transducers.GrayCode()
sage: [s.color for s in G.iter_states()]
[None, None, None]
sage: G.is_monochromatic()
True
sage: G.state(1).color = 'blue'
sage: G.is_monochromatic()
False
"""
return equal(s.color for s in self.iter_states())
#*****************************************************************************
def is_Automaton(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Automaton`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Automaton
sage: is_Automaton(FiniteStateMachine())
False
sage: is_Automaton(Automaton())
True
sage: is_FiniteStateMachine(Automaton())
True
"""
return isinstance(FSM, Automaton)
class Automaton(FiniteStateMachine):
"""
This creates an automaton, which is a finite state machine, whose
transitions have input labels.
An automaton has additional features like creating a deterministic
and a minimized automaton.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create an automaton recognizing even numbers (given in
binary and read from left to right) in the following way::
sage: A = Automaton([('P', 'Q', 0), ('P', 'P', 1),
....: ('Q', 'P', 1), ('Q', 'Q', 0)],
....: initial_states=['P'], final_states=['Q'])
sage: A
Automaton with 2 states
sage: A([0])
True
sage: A([1, 1, 0])
True
sage: A([1, 0, 1])
False
Note that the full output of the commands can be obtained by
calling :meth:`.process` and looks like this::
sage: A.process([1, 0, 1])
(False, 'P')
TESTS::
sage: Automaton()
Automaton with 0 states
"""
def __init__(self, *args, **kwargs):
"""
Initialize an automaton. See :class:`Automaton` and its parent
:class:`FiniteStateMachine` for more information.
TESTS::
sage: Transducer()._allow_composition_
True
sage: Automaton()._allow_composition_
False
"""
super(Automaton, self).__init__(*args, **kwargs)
self._allow_composition_ = False
def _repr_(self):
"""
Represents the finite state machine as "Automaton with n
states" where n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Automaton()._repr_()
'Automaton with 0 states'
"""
return "Automaton with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Automaton([('A', 'B', 1)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Automaton([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right]
"""
return format_function(transition.word_in)
def intersection(self, other, only_accessible_components=True):
"""
Returns a new automaton which accepts an input if it is
accepted by both given automata.
INPUT:
- ``other`` -- an automaton
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new automaton which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the new automaton is the cartesian product of the
set of states of both given automata. There is a transition `((A, B),
(C, D), a)` in the new automaton if there are transitions `(A, C, a)`
and `(B, D, a)` in the old automata.
The methods :meth:`.intersection` and
:meth:`.cartesian_product` are the same (for automata).
EXAMPLES::
sage: aut1 = Automaton([('1', '2', 1),
....: ('2', '2', 1),
....: ('2', '2', 0)],
....: initial_states=['1'],
....: final_states=['2'],
....: determine_alphabets=True)
sage: aut2 = Automaton([('A', 'A', 1),
....: ('A', 'B', 0),
....: ('B', 'B', 0),
....: ('B', 'A', 1)],
....: initial_states=['A'],
....: final_states=['B'],
....: determine_alphabets=True)
sage: res = aut1.intersection(aut2)
sage: (aut1([1, 1]), aut2([1, 1]), res([1, 1]))
(True, False, False)
sage: (aut1([1, 0]), aut2([1, 0]), res([1, 0]))
(True, True, True)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'A') to ('2', 'B'): 0|-,
Transition from ('2', 'A') to ('2', 'A'): 1|-,
Transition from ('2', 'B') to ('2', 'B'): 0|-,
Transition from ('2', 'B') to ('2', 'A'): 1|-]
For automata with epsilon-transitions, intersection is not well
defined. But for any finite state machine, epsilon-transitions can be
removed by :meth:`.remove_epsilon_transitions`.
::
sage: a1 = Automaton([(0, 0, 0),
....: (0, 1, None),
....: (1, 1, 1),
....: (1, 2, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a2 = Automaton([(0, 0, 0), (0, 1, 1), (1, 1, 1)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: a1.intersection(a2)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input)
was found.
sage: a1.remove_epsilon_transitions() # not tested (since not implemented yet)
sage: a1.intersection(a2) # not tested
"""
if not is_Automaton(other):
raise TypeError(
"Only an automaton can be intersected with an automaton.")
def function(transition1, transition2):
if not transition1.word_in or not transition2.word_in:
raise ValueError(
"An epsilon-transition (with empty input) was found.")
if transition1.word_in == transition2.word_in:
return (transition1.word_in, None)
else:
raise LookupError
return self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components)
cartesian_product = intersection
def determinisation(self):
"""
Returns a deterministic automaton which accepts the same input
words as the original one.
INPUT:
Nothing.
OUTPUT:
A new automaton, which is deterministic.
The labels of the states of the new automaton are frozensets
of states of ``self``. The color of a new state is the
frozenset of colors of the constituent states of ``self``.
Therefore, the colors of the constituent states have to be
hashable.
The input alphabet must be specified.
EXAMPLES::
sage: aut = Automaton([('A', 'A', 0), ('A', 'B', 1), ('B', 'B', 1)],
....: initial_states=['A'], final_states=['B'])
sage: aut.determinisation().transitions()
[Transition from frozenset(['A'])
to frozenset(['A']): 0|-,
Transition from frozenset(['A'])
to frozenset(['B']): 1|-,
Transition from frozenset(['B'])
to frozenset([]): 0|-,
Transition from frozenset(['B'])
to frozenset(['B']): 1|-,
Transition from frozenset([])
to frozenset([]): 0|-,
Transition from frozenset([])
to frozenset([]): 1|-]
::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: A.determinisation().states()
[frozenset(['A']), frozenset(['A', 'B']),
frozenset(['A', 'C']), frozenset(['A', 'C', 'B'])]
::
sage: A = Automaton([(0, 1, 1), (0, 2, [1, 1]), (0, 3, [1, 1, 1]),
....: (1, 0, -1), (2, 0, -2), (3, 0, -3)],
....: initial_states=[0], final_states=[0, 1, 2, 3])
sage: B = A.determinisation().relabeled()
sage: all(t.to_state.label() == 2 for t in
....: B.state(2).transitions)
True
sage: B.state(2).is_final
False
sage: B.delete_state(2) # this is a sink
sage: sorted(B.transitions())
[Transition from 0 to 1: 1|-,
Transition from 1 to 0: -1|-,
Transition from 1 to 3: 1|-,
Transition from 3 to 0: -2|-,
Transition from 3 to 4: 1|-,
Transition from 4 to 0: -3|-]
Note that colors of states have to be hashable::
sage: A = Automaton([[0, 0, 0]], initial_states=[0])
sage: A.state(0).color = []
sage: A.determinisation()
Traceback (most recent call last):
...
TypeError: unhashable type: 'list'
sage: A.state(0).color = ()
sage: A.determinisation()
Automaton with 1 states
TESTS:
This is from #15078, comment 13.
::
sage: D = {'A': [('A', 'a'), ('B', 'a'), ('A', 'b')],
....: 'C': [], 'B': [('C', 'b')]}
sage: auto = Automaton(D, initial_states=['A'], final_states=['C'])
sage: auto.is_deterministic()
False
sage: auto.process(list('aaab'))
Traceback (most recent call last):
...
NotImplementedError: Non-deterministic path encountered
when processing input.
sage: auto.states()
['A', 'C', 'B']
sage: Ddet = auto.determinisation()
sage: Ddet
Automaton with 3 states
sage: Ddet.is_deterministic()
True
sage: sorted(Ddet.transitions())
[Transition from frozenset(['A']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A']) to frozenset(['A']): 'b'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'B']) to frozenset(['A', 'C']): 'b'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A', 'B']): 'a'|-,
Transition from frozenset(['A', 'C']) to frozenset(['A']): 'b'|-]
sage: Ddet.initial_states()
[frozenset(['A'])]
sage: Ddet.final_states()
[frozenset(['A', 'C'])]
"""
if any(len(t.word_in) > 1 for t in self.iter_transitions()):
return self.split_transitions().determinisation()
epsilon_successors = {}
direct_epsilon_successors = {}
for state in self.iter_states():
direct_epsilon_successors[state] = set(
t.to_state
for t in self.iter_transitions(state)
if not t.word_in)
epsilon_successors[state] = set([state])
old_count_epsilon_successors = 0
count_epsilon_successors = len(epsilon_successors)
while old_count_epsilon_successors < count_epsilon_successors:
old_count_epsilon_successors = count_epsilon_successors
count_epsilon_successors = 0
for state in self.iter_states():
for direct_successor in direct_epsilon_successors[state]:
epsilon_successors[state] = epsilon_successors[state].union(epsilon_successors[direct_successor])
count_epsilon_successors += len(epsilon_successors[state])
def set_transition(states, letter):
result = set()
for state in states:
for transition in self.iter_transitions(state):
if transition.word_in == [letter]:
result.add(transition.to_state)
result = result.union(*(epsilon_successors[s] for s in result))
return (frozenset(result), [])
result = self.empty_copy()
new_initial_states = [frozenset(self.iter_initial_states())]
result.add_from_transition_function(set_transition,
initial_states=new_initial_states)
for state in result.iter_states():
state.is_final = any(s.is_final for s in state.label())
state.color = frozenset(s.color for s in state.label())
return result
def minimization(self, algorithm=None):
"""
Returns the minimization of the input automaton as a new automaton.
INPUT:
- ``algorithm`` -- Either Moore's algorithm (by
``algorithm='Moore'`` or as default for deterministic
automata) or Brzozowski's algorithm (when
``algorithm='Brzozowski'`` or when the automaton is not
deterministic) is used.
OUTPUT:
A new automaton.
The resulting automaton is deterministic and has a minimal
number of states.
EXAMPLES::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A.minimization(algorithm='Brzozowski')
sage: B.transitions(B.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(B.states())
3
sage: C = A.minimization(algorithm='Brzozowski')
sage: C.transitions(C.states()[1])
[Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C']), frozenset(['C'])]): 0|-,
Transition from frozenset([frozenset(['A', 'C', 'B']),
frozenset(['C', 'B']), frozenset(['A', 'C'])]) to
frozenset([frozenset(['A', 'C', 'B']), frozenset(['C', 'B']),
frozenset(['A', 'C'])]): 1|-]
sage: len(C.states())
3
::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut.minimization(algorithm='Brzozowski')
sage: [len(min.states()), len(aut.states())]
[3, 4]
sage: min = aut.minimization(algorithm='Moore')
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
deterministic = self.is_deterministic()
if algorithm == "Moore" or (algorithm is None and deterministic):
return self._minimization_Moore_()
elif algorithm == "Brzozowski" or (algorithm is None and not deterministic):
return self._minimization_Brzozowski_()
else:
raise NotImplementedError("Algorithm '%s' is not implemented. Choose 'Moore' or 'Brzozowski'" % algorithm)
def _minimization_Brzozowski_(self):
"""
Returns a minimized automaton by using Brzozowski's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: A = Automaton([('A', 'A', 1), ('A', 'A', 0), ('A', 'B', 1),
....: ('B', 'C', 0), ('C', 'C', 1), ('C', 'C', 0)],
....: initial_states=['A'], final_states=['C'])
sage: B = A._minimization_Brzozowski_()
sage: len(B.states())
3
"""
return self.transposition().determinisation().transposition().determinisation()
def _minimization_Moore_(self):
"""
Returns a minimized automaton by using Moore's algorithm.
See also :meth:`.minimization`.
TESTS::
sage: aut = Automaton([('1', '2', 'a'), ('2', '3', 'b'),
....: ('3', '2', 'a'), ('2', '1', 'b'),
....: ('3', '4', 'a'), ('4', '3', 'b')],
....: initial_states=['1'], final_states=['1'])
sage: min = aut._minimization_Moore_()
Traceback (most recent call last):
...
NotImplementedError: Minimization via Moore's Algorithm is only
implemented for deterministic finite state machines
"""
if self.is_deterministic():
return self.quotient(self.equivalence_classes())
else:
raise NotImplementedError("Minimization via Moore's Algorithm is only " \
"implemented for deterministic finite state machines")
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the automaton accepts the input and the state
where the computation stops.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only whether the sequence is accepted
or not (the first entry below only).
OUTPUT:
The full output is a pair, where
- the first entry is ``True`` if the input string is accepted and
- the second gives the state reached after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.).
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: NAF_ = FSMState('_', is_initial = True, is_final = True)
sage: NAF1 = FSMState('1', is_final = True)
sage: NAF = Automaton(
....: {NAF_: [(NAF_, 0), (NAF1, 1)], NAF1: [(NAF_, 0)]})
sage: [NAF.process(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[(True, '_'), (True, '1'), (False, None),
(True, '1'), (False, None), (False, None)]
If we just want a condensed output, we use::
sage: [NAF.process(w, full_output=False)
....: for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
It is equivalent to::
sage: [NAF(w) for w in [[0], [0, 1], [1, 1], [0, 1, 0, 1],
....: [0, 1, 1, 1, 0], [1, 0, 0, 1, 1]]]
[True, True, False, True, False, False]
The following example illustrates the difference between
non-existing paths and reaching a non-final state::
sage: NAF.process([2])
(False, None)
sage: NAF.add_transition(('_', 's', 2))
Transition from '_' to 's': 2|-
sage: NAF.process([2])
(False, 's')
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Automaton.process "
"(and thus of Automaton.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Automaton, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
return (it.accept_input, it.current_state)
else:
return it.accept_input
#*****************************************************************************
def is_Transducer(FSM):
"""
Tests whether or not ``FSM`` inherits from :class:`Transducer`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FiniteStateMachine, is_Transducer
sage: is_Transducer(FiniteStateMachine())
False
sage: is_Transducer(Transducer())
True
sage: is_FiniteStateMachine(Transducer())
True
"""
return isinstance(FSM, Transducer)
class Transducer(FiniteStateMachine):
"""
This creates a transducer, which is a finite state machine, whose
transitions have input and output labels.
An transducer has additional features like creating a simplified
transducer.
See class :class:`FiniteStateMachine` for more information.
EXAMPLES:
We can create a transducer performing the addition of 1 (for
numbers given in binary and read from right to left) in the
following way::
sage: T = Transducer([('C', 'C', 1, 0), ('C', 'N', 0, 1),
....: ('N', 'N', 0, 0), ('N', 'N', 1, 1)],
....: initial_states=['C'], final_states=['N'])
sage: T
Transducer with 2 states
sage: T([0])
[1]
sage: T([1,1,0])
[0, 0, 1]
sage: ZZ(T(15.digits(base=2)+[0]), base=2)
16
Note that we have padded the binary input sequence by a `0` so
that the transducer can reach its final state.
TESTS::
sage: Transducer()
Transducer with 0 states
"""
def _repr_(self):
"""
Represents the transducer as "Transducer with n states" where
n is the number of states.
INPUT:
Nothing.
OUTPUT:
A string.
EXAMPLES::
sage: Transducer()._repr_()
'Transducer with 0 states'
"""
return "Transducer with %s states" % len(self._states_)
def _latex_transition_label_(self, transition, format_function=latex):
r"""
Returns the proper transition label.
INPUT:
- ``transition`` - a transition
- ``format_function`` - a function formatting the labels
OUTPUT:
A string.
EXAMPLES::
sage: F = Transducer([('A', 'B', 1, 2)])
sage: print latex(F) # indirect doctest
\begin{tikzpicture}[auto, initial text=, >=latex]
\node[state] (v0) at (3.000000, 0.000000) {$\text{\texttt{A}}$};
\node[state] (v1) at (-3.000000, 0.000000) {$\text{\texttt{B}}$};
\path[->] (v0) edge node[rotate=360.00, anchor=south] {$1\mid 2$} (v1);
\end{tikzpicture}
TESTS::
sage: F = Transducer([('A', 'B', 0, 1)])
sage: t = F.transitions()[0]
sage: F._latex_transition_label_(t)
\left[0\right] \mid \left[1\right]
"""
return (format_function(transition.word_in) + "\\mid "
+ format_function(transition.word_out))
def intersection(self, other, only_accessible_components=True):
"""
Returns a new transducer which accepts an input if it is accepted by
both given finite state machines producing the same output.
INPUT:
- ``other`` -- a transducer
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A new transducer which computes the intersection
(see below) of the languages of ``self`` and ``other``.
The set of states of the transducer is the cartesian product of the
set of states of both given transducer. There is a transition `((A,
B), (C, D), a, b)` in the new transducer if there are
transitions `(A, C, a, b)` and `(B, D, a, b)` in the old transducers.
EXAMPLES::
sage: transducer1 = Transducer([('1', '2', 1, 0),
....: ('2', '2', 1, 0),
....: ('2', '2', 0, 1)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'A', 1, 0),
....: ('A', 'B', 0, 0),
....: ('B', 'B', 0, 1),
....: ('B', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.transitions()
[Transition from ('1', 'A') to ('2', 'A'): 1|0,
Transition from ('2', 'A') to ('2', 'A'): 1|0]
In general, transducers are not closed under intersection. But
for transducer which do not have epsilon-transitions, the
intersection is well defined (cf. [BaWo2012]_). However, in
the next example the intersection of the two transducers is
not well defined. The intersection of the languages consists
of `(a^n, b^n c^n)`. This set is not recognizable by a
*finite* transducer.
::
sage: t1 = Transducer([(0, 0, 'a', 'b'),
....: (0, 1, None, 'c'),
....: (1, 1, None, 'c')],
....: initial_states=[0],
....: final_states=[0, 1])
sage: t2 = Transducer([('A', 'A', None, 'b'),
....: ('A', 'B', 'a', 'c'),
....: ('B', 'B', 'a', 'c')],
....: initial_states=['A'],
....: final_states=['A', 'B'])
sage: t2.intersection(t1)
Traceback (most recent call last):
...
ValueError: An epsilon-transition (with empty input or output)
was found.
TESTS::
sage: transducer1 = Transducer([('1', '2', 1, 0)],
....: initial_states=['1'],
....: final_states=['2'])
sage: transducer2 = Transducer([('A', 'B', 1, 0)],
....: initial_states=['A'],
....: final_states=['B'])
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[('2', 'B')]
sage: transducer1.state('2').final_word_out = 1
sage: transducer2.state('B').final_word_out = 2
sage: res = transducer1.intersection(transducer2)
sage: res.final_states()
[]
REFERENCES:
.. [BaWo2012] Javier Baliosian and Dina Wonsever, *Finite State
Transducers*, chapter in *Handbook of Finite State Based Models and
Applications*, edited by Jiacun Wang, Chapman and Hall/CRC, 2012.
"""
if not is_Transducer(other):
raise TypeError(
"Only a transducer can be intersected with a transducer.")
def function(transition1, transition2):
if not transition1.word_in or not transition2.word_in \
or not transition1.word_out or not transition2.word_out:
raise ValueError("An epsilon-transition "
"(with empty input or output) was found.")
if transition1.word_in == transition2.word_in \
and transition1.word_out == transition2.word_out:
return (transition1.word_in, transition1.word_out)
else:
raise LookupError
new = self.product_FiniteStateMachine(
other,
function,
only_accessible_components=only_accessible_components,
final_function=lambda s1, s2: s1.final_word_out)
for state in new.iter_final_states():
state0 = self.state(state.label()[0])
state1 = other.state(state.label()[1])
if state0.final_word_out != state1.final_word_out:
state.final_word_out = None
state.is_final = False
return new
def cartesian_product(self, other, only_accessible_components=True):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldCodeTransducerCartesianProduct`` to ``False``.
Return a new transducer which can simultaneously process an
input with the machines ``self`` and ``other`` where the
output labels are `d`-tuples of the original output labels.
INPUT:
- ``other`` - a finite state machine (if `d=2`) or a list (or
other iterable) of `d-1` finite state machines
- ``only_accessible_components`` -- If ``True`` (default), then
the result is piped through :meth:`.accessible_components`. If no
``new_input_alphabet`` is given, it is determined by
:meth:`.determine_alphabets`.
OUTPUT:
A transducer which can simultaneously process an input with ``self``
and the machine(s) in ``other``.
The set of states of the new transducer is the cartesian product of
the set of states of ``self`` and ``other``.
Let `(A_j, B_j, a_j, b_j)` for `j\in\{1, \ldots, d\}` be
transitions in the machines ``self`` and in ``other``. Then
there is a transition `((A_1, \ldots, A_d), (B_1, \ldots,
B_d), a, (b_1, \ldots, b_d))` in the new transducer if `a_1 =
\cdots = a_d =: a`.
EXAMPLES:
Originally a different output was constructed by
:meth:`Transducer.cartesian_product`. This output is now produced by
:meth:`Transducer.intersection`.
::
sage: transducer1 = Transducer([('A', 'A', 0, 0),
....: ('A', 'A', 1, 1)],
....: initial_states=['A'],
....: final_states=['A'],
....: determine_alphabets=True)
sage: transducer2 = Transducer([(0, 1, 0, ['b', 'c']),
....: (0, 0, 1, 'b'),
....: (1, 1, 0, 'a')],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: result = transducer1.cartesian_product(transducer2)
doctest:...: DeprecationWarning: The output of
Transducer.cartesian_product will change.
Please use Transducer.intersection for the original output.
See http://trac.sagemath.org/16061 for details.
sage: result
Transducer with 0 states
By setting ``FSMOldCodeTransducerCartesianProduct`` to ``False``
the new desired output is produced.
::
sage: sage.combinat.finite_state_machine.FSMOldCodeTransducerCartesianProduct = False
sage: result = transducer1.cartesian_product(transducer2)
sage: result
Transducer with 2 states
sage: result.transitions()
[Transition from ('A', 0) to ('A', 1): 0|(0, 'b'),(None, 'c'),
Transition from ('A', 0) to ('A', 0): 1|(1, 'b'),
Transition from ('A', 1) to ('A', 1): 0|(0, 'a')]
sage: result([1, 0, 0])
[(1, 'b'), (0, 'b'), (None, 'c'), (0, 'a')]
sage: (transducer1([1, 0, 0]), transducer2([1, 0, 0]))
([1, 0, 0], ['b', 'b', 'c', 'a'])
Also final output words are correctly processed::
sage: transducer1.state('A').final_word_out = 2
sage: result = transducer1.cartesian_product(transducer2)
sage: result.final_states()[0].final_word_out
[(2, None)]
The following transducer counts the number of 11 blocks minus
the number of 10 blocks over the alphabet ``[0, 1]``.
::
sage: count_11 = transducers.CountSubblockOccurrences(
....: [1, 1],
....: input_alphabet=[0, 1])
sage: count_10 = transducers.CountSubblockOccurrences(
....: [1, 0],
....: input_alphabet=[0, 1])
sage: count_11x10 = count_11.cartesian_product(count_10)
sage: difference = transducers.sub([0, 1])(count_11x10)
sage: T = difference.simplification().relabeled()
sage: T.initial_states()
[1]
sage: sorted(T.transitions())
[Transition from 0 to 1: 0|-1,
Transition from 0 to 0: 1|1,
Transition from 1 to 1: 0|0,
Transition from 1 to 0: 1|0]
sage: input = [0, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0]
sage: output = [0, 0, 1, -1, 0, -1, 0, 0, 0, 1, 1, -1]
sage: T(input) == output
True
If ``other`` is an automaton, then :meth:`.cartesian_product` returns
``self`` where the input is restricted to the input accepted by
``other``.
For example, if the transducer transforms the standard
binary expansion into the non-adjacent form and the automaton
recognizes the binary expansion without adjacent ones, then the
cartesian product of these two is a transducer which does not change
the input (except for changing ``a`` to ``(a, None)`` and ignoring a
leading `0`).
::
sage: NAF = Transducer([(0, 1, 0, None),
....: (0, 2, 1, None),
....: (1, 1, 0, 0),
....: (1, 2, 1, 0),
....: (2, 1, 0, 1),
....: (2, 3, 1, -1),
....: (3, 2, 0, 0),
....: (3, 3, 1, 0)],
....: initial_states=[0],
....: final_states=[1],
....: determine_alphabets=True)
sage: aut11 = Automaton([(0, 0, 0), (0, 1, 1), (1, 0, 0)],
....: initial_states=[0],
....: final_states=[0, 1],
....: determine_alphabets=True)
sage: res = NAF.cartesian_product(aut11)
sage: res([1, 0, 0, 1, 0, 1, 0])
[(1, None), (0, None), (0, None), (1, None), (0, None), (1, None)]
This is obvious because if the standard binary expansion does not have
adjacent ones, then it is the same as the non-adjacent form.
Be aware that :meth:`.cartesian_product` is not commutative.
::
sage: aut11.cartesian_product(NAF)
Traceback (most recent call last):
...
TypeError: Only an automaton can be intersected with an automaton.
The cartesian product of more than two finite state machines can also
be computed::
sage: T0 = transducers.CountSubblockOccurrences([0, 0], [0, 1, 2])
sage: T1 = transducers.CountSubblockOccurrences([1, 1], [0, 1, 2])
sage: T2 = transducers.CountSubblockOccurrences([2, 2], [0, 1, 2])
sage: T = T0.cartesian_product([T1, T2])
sage: T.transitions()
[Transition from ((), (), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((0,), (), ()) to ((0,), (), ()): 0|(1, 0, 0),
Transition from ((0,), (), ()) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((0,), (), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (1,), ()) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (1,), ()) to ((), (1,), ()): 1|(0, 1, 0),
Transition from ((), (1,), ()) to ((), (), (2,)): 2|(0, 0, 0),
Transition from ((), (), (2,)) to ((0,), (), ()): 0|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (1,), ()): 1|(0, 0, 0),
Transition from ((), (), (2,)) to ((), (), (2,)): 2|(0, 0, 1)]
sage: T([0, 0, 1, 1, 2, 2, 0, 1, 2, 2])
[(0, 0, 0),
(1, 0, 0),
(0, 0, 0),
(0, 1, 0),
(0, 0, 0),
(0, 0, 1),
(0, 0, 0),
(0, 0, 0),
(0, 0, 0),
(0, 0, 1)]
"""
if FSMOldCodeTransducerCartesianProduct:
from sage.misc.superseded import deprecation
deprecation(16061, "The output of Transducer.cartesian_product "
"will change. Please use "
"Transducer.intersection for the original "
"output.")
return self.intersection(
other,
only_accessible_components=only_accessible_components)
def function(*transitions):
if equal(t.word_in for t in transitions):
return (transitions[0].word_in,
list(itertools.izip_longest(
*(t.word_out for t in transitions)
)))
else:
raise LookupError
def final_function(*states):
return list(itertools.izip_longest(*(s.final_word_out
for s in states)))
return self.product_FiniteStateMachine(
other,
function,
final_function=final_function,
only_accessible_components=only_accessible_components)
def simplification(self):
"""
Returns a simplified transducer.
INPUT:
Nothing.
OUTPUT:
A new transducer.
This function simplifies a transducer by Moore's algorithm,
first moving common output labels of transitions leaving a
state to output labels of transitions entering the state
(cf. :meth:`.prepone_output`).
The resulting transducer implements the same function as the
original transducer.
EXAMPLES::
sage: fsm = Transducer([("A", "B", 0, 1), ("A", "B", 1, 0),
....: ("B", "C", 0, 0), ("B", "C", 1, 1),
....: ("C", "D", 0, 1), ("C", "D", 1, 0),
....: ("D", "A", 0, 0), ("D", "A", 1, 1)])
sage: fsms = fsm.simplification()
sage: fsms
Transducer with 2 states
sage: fsms.transitions()
[Transition from ('A', 'C')
to ('B', 'D'): 0|1,
Transition from ('A', 'C')
to ('B', 'D'): 1|0,
Transition from ('B', 'D')
to ('A', 'C'): 0|0,
Transition from ('B', 'D')
to ('A', 'C'): 1|1]
sage: fsms.relabeled().transitions()
[Transition from 0 to 1: 0|1,
Transition from 0 to 1: 1|0,
Transition from 1 to 0: 0|0,
Transition from 1 to 0: 1|1]
::
sage: fsm = Transducer([("A", "A", 0, 0),
....: ("A", "B", 1, 1),
....: ("A", "C", 1, -1),
....: ("B", "A", 2, 0),
....: ("C", "A", 2, 0)])
sage: fsm_simplified = fsm.simplification()
sage: fsm_simplified
Transducer with 2 states
sage: fsm_simplified.transitions()
[Transition from ('A',) to ('A',): 0|0,
Transition from ('A',) to ('B', 'C'): 1|1,0,
Transition from ('A',) to ('B', 'C'): 1|-1,0,
Transition from ('B', 'C') to ('A',): 2|-]
::
sage: from sage.combinat.finite_state_machine import duplicate_transition_add_input
sage: T = Transducer([('A', 'A', 1/2, 0),
....: ('A', 'B', 1/4, 1),
....: ('A', 'C', 1/4, 1),
....: ('B', 'A', 1, 0),
....: ('C', 'A', 1, 0)],
....: initial_states=[0],
....: final_states=['A', 'B', 'C'],
....: on_duplicate_transition=duplicate_transition_add_input)
sage: sorted(T.simplification().transitions())
[Transition from ('A',) to ('A',): 1/2|0,
Transition from ('A',) to ('B', 'C'): 1/2|1,
Transition from ('B', 'C') to ('A',): 1|0]
Illustrating the use of colors in order to avoid identification of states::
sage: T = Transducer( [[0,0,0,0], [0,1,1,1],
....: [1,0,0,0], [1,1,1,1]],
....: initial_states=[0],
....: final_states=[0,1])
sage: sorted(T.simplification().transitions())
[Transition from (0, 1) to (0, 1): 0|0,
Transition from (0, 1) to (0, 1): 1|1]
sage: T.state(0).color = 0
sage: T.state(0).color = 1
sage: sorted(T.simplification().transitions())
[Transition from (0,) to (0,): 0|0,
Transition from (0,) to (1,): 1|1,
Transition from (1,) to (0,): 0|0,
Transition from (1,) to (1,): 1|1]
"""
fsm = deepcopy(self)
fsm.prepone_output()
return fsm.quotient(fsm.equivalence_classes())
def process(self, *args, **kwargs):
"""
.. WARNING::
The default output of this method is scheduled to change.
This docstring describes the new default behaviour, which can
already be achieved by setting
``FSMOldProcessOutput`` to ``False``.
Returns whether the transducer accepts the input, the state
where the computation stops and which output is generated.
INPUT:
- ``input_tape`` -- The input tape can be a list with entries from
the input alphabet.
- ``initial_state`` -- (default: ``None``) The state in which
to start. If this parameter is ``None`` and there is only
one initial state in the machine, then this state is taken.
- ``full_output`` -- (default: ``True``) If set, then the full
output is given, otherwise only the generated output (the
third entry below only). If the input is not accepted, a
``ValueError`` is raised.
OUTPUT:
The full output is a triple, where
- the first entry is ``True`` if the input string is accepted,
- the second gives the reached state after processing the
input tape (This is a state with label ``None`` if the input
could not be processed, i.e., when at one point no
transition to go could be found.), and
- the third gives a list of the output labels used during
processing.
By setting ``FSMOldProcessOutput`` to ``False``
the new desired output is produced.
EXAMPLES::
sage: sage.combinat.finite_state_machine.FSMOldProcessOutput = False # activate new output behavior
sage: from sage.combinat.finite_state_machine import FSMState
sage: A = FSMState('A', is_initial = True, is_final = True)
sage: binary_inverter = Transducer({A:[(A, 0, 1), (A, 1, 0)]})
sage: binary_inverter.process([0, 1, 0, 0, 1, 1])
(True, 'A', [1, 0, 1, 1, 0, 0])
If we are only interested in the output, we can also use::
sage: binary_inverter([0, 1, 0, 0, 1, 1])
[1, 0, 1, 1, 0, 0]
The following transducer transforms `0^n 1` to `1^n 2`::
sage: T = Transducer([(0, 0, 0, 1), (0, 1, 1, 2)])
sage: T.state(0).is_initial = True
sage: T.state(1).is_final = True
We can see the different possibilites of the output by::
sage: [T.process(w) for w in [[1], [0, 1], [0, 0, 1], [0, 1, 1],
....: [0], [0, 0], [2, 0], [0, 1, 2]]]
[(True, 1, [2]), (True, 1, [1, 2]),
(True, 1, [1, 1, 2]), (False, None, None),
(False, 0, [1]), (False, 0, [1, 1]),
(False, None, None), (False, None, None)]
If we just want a condensed output, we use::
sage: [T.process(w, full_output=False)
....: for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T.process([0, 1, 2], full_output=False)
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
It is equivalent to::
sage: [T(w) for w in [[1], [0, 1], [0, 0, 1]]]
[[2], [1, 2], [1, 1, 2]]
sage: T([0, 1, 2])
Traceback (most recent call last):
...
ValueError: Invalid input sequence.
"""
if FSMOldProcessOutput:
from sage.misc.superseded import deprecation
deprecation(16132, "The output of Transducer.process "
"(and thus of Transducer.__call__) "
"will change. Please use the corresponding "
"functions from FiniteStateMachine "
"for the original output.")
return super(Transducer, self).process(*args, **kwargs)
if not kwargs.has_key('full_output'):
kwargs['full_output'] = True
it = self.iter_process(*args, **kwargs)
for _ in it:
pass
# process output
if kwargs['full_output']:
if it.current_state.label() is None:
return (it.accept_input, it.current_state, None)
else:
return (it.accept_input, it.current_state, it.output_tape)
else:
if not it.accept_input:
raise ValueError("Invalid input sequence.")
return it.output_tape
#*****************************************************************************
def is_FSMProcessIterator(PI):
"""
Tests whether or not ``PI`` inherits from :class:`FSMProcessIterator`.
TESTS::
sage: from sage.combinat.finite_state_machine import is_FSMProcessIterator, FSMProcessIterator
sage: is_FSMProcessIterator(FSMProcessIterator(FiniteStateMachine([[0, 0, 0, 0]], initial_states=[0])))
True
"""
return isinstance(PI, FSMProcessIterator)
class FSMProcessIterator(SageObject):
"""
This class is for processing an input string on a finite state
machine.
An instance of this class is generated when
:meth:`FiniteStateMachine.process` or
:meth:`FiniteStateMachine.iter_process` of the finite state
machine is invoked. It behaves like an iterator which, in each
step, takes one letter of the input and runs (one step on) the
finite state machine with this input. More precisely, in each
step, the process iterator takes an outgoing transition of the
current state, whose input label equals the input letter of the
tape. The output label of the transition, if present, is written
on the output tape.
INPUT:
- ``fsm`` -- The finite state machine on which the input should be
processed.
- ``input_tape`` -- The input tape. It can be anything that is
iterable.
- ``initial_state`` -- The initial state in which the machine
starts. If this is ``None``, the unique inital state of the finite
state machine is takes. If there are several, a ``ValueError`` is
raised.
The process (iteration) stops if there are no more input letters
on the tape. In this case a StopIteration exception is thrown. As
result the following attributes are available:
- ``accept_input`` -- Is ``True`` if the reached state is a final state.
- ``current_state`` -- The current/reached state in the process.
- ``output_tape`` -- The written output.
Current values of those attributes (except ``accept_input``) are
(also) available during the iteration.
OUTPUT:
An iterator.
EXAMPLES:
The following transducer reads binary words and outputs a word,
where blocks of ones are replaced by just a single one. Further
only words that end with a zero are accepted.
::
sage: T = Transducer({'A': [('A', 0, 0), ('B', 1, None)],
....: 'B': [('B', 1, None), ('A', 0, [1, 0])]},
....: initial_states=['A'], final_states=['A'])
sage: input = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0]
sage: T.process(input)
(True, 'A', [1, 0, 0, 1, 0, 1, 0])
The function :meth:`FiniteStateMachine.process` created a new
``FSMProcessIterator``. We can do that manually, too, and get full
access to the iteration process::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: it = FSMProcessIterator(T, input_tape=input)
sage: for _ in it:
....: print (it.current_state, it.output_tape)
('B', [])
('B', [])
('A', [1, 0])
('A', [1, 0, 0])
('B', [1, 0, 0])
('A', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('B', [1, 0, 0, 1, 0])
('A', [1, 0, 0, 1, 0, 1, 0])
sage: it.accept_input
True
TESTS::
sage: T = Transducer([[0, 0, 0, 0]])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: No state is initial.
::
sage: T = Transducer([[0, 1, 0, 0]], initial_states=[0, 1])
sage: T.process([])
Traceback (most recent call last):
...
ValueError: Several initial states.
"""
def __init__(self, fsm, input_tape=None, initial_state=None, **kwargs):
"""
See :class:`FSMProcessIterator` for more information.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: for _ in it:
....: pass
sage: it.output_tape
[1, 0]
"""
self.fsm = fsm
if initial_state is None:
fsm_initial_states = self.fsm.initial_states()
try:
self.current_state = fsm_initial_states[0]
except IndexError:
raise ValueError("No state is initial.")
if len(fsm_initial_states) > 1:
raise ValueError("Several initial states.")
else:
self.current_state = initial_state
self.output_tape = []
if input_tape is None:
self._input_tape_iter_ = iter([])
else:
if hasattr(input_tape, '__iter__'):
self._input_tape_iter_ = iter(input_tape)
else:
raise ValueError("Given input tape is not iterable.")
def __iter__(self):
"""
Returns ``self``.
TESTS::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: id(it) == id(iter(it))
True
"""
return self
def next(self):
"""
Makes one step in processing the input tape.
INPUT:
Nothing.
OUTPUT:
It returns the taken transition. A ``StopIteration`` exception is
thrown when there is nothing more to read.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.next()
Transition from 'A' to 'A': 0|1
sage: it.next()
Transition from 'A' to 'A': 1|0
sage: it.next()
Traceback (most recent call last):
...
StopIteration
TESTS::
sage: Z = Transducer()
sage: s = Z.add_state(0)
sage: s.is_initial = True
sage: s.is_final = True
sage: s.final_word_out = [1, 2]
sage: Z.process([])
(True, 0, [1, 2])
"""
if hasattr(self, 'accept_input'):
raise StopIteration
try:
# process current state
transition = None
try:
transition = self.current_state.hook(
self.current_state, self)
except AttributeError:
pass
self.write_word(self.current_state.word_out)
# get next
if not isinstance(transition, FSMTransition):
next_word = []
found = False
try:
while not found:
next_word.append(self.read_letter())
if len(next_word) == 1 and any(not t.word_in
for t in self.current_state.transitions):
raise NotImplementedError(
"process cannot handle epsilon transition "
"leaving state %s." % self.current_state.label())
try:
transition = self.get_next_transition(
next_word)
found = True
except ValueError:
pass
if found and any(
t is not transition and startswith(t.word_in,
next_word)
for t in self.current_state.transitions):
raise NotImplementedError("Non-deterministic "
"path encountered "
"when processing "
"input.")
except StopIteration:
# this means input tape is finished
if len(next_word) > 0:
self.current_state = FSMState(None,
allow_label_None=True)
raise StopIteration
# process transition
try:
transition.hook(transition, self)
except AttributeError:
pass
self.write_word(transition.word_out)
# go to next state
self.current_state = transition.to_state
except StopIteration:
# this means, either input tape is finished or
# someone has thrown StopIteration manually (in one
# of the hooks)
if self.current_state.label is None or not self.current_state.is_final:
self.accept_input = False
if not hasattr(self, 'accept_input'):
self.accept_input = True
if self.current_state.is_final:
self.write_word(self.current_state.final_word_out)
raise StopIteration
# return
return transition
def read_letter(self):
"""
Reads a letter from the input tape.
INPUT:
Nothing.
OUTPUT:
A letter.
Exception ``StopIteration`` is thrown if tape has reached
the end.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.read_letter()
0
"""
return self._input_tape_iter_.next()
def write_letter(self, letter):
"""
Writes a letter on the output tape.
INPUT:
- ``letter`` -- the letter to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_letter(42)
sage: it.output_tape
[42]
"""
self.output_tape.append(letter)
def write_word(self, word):
"""
Writes a word on the output tape.
INPUT:
- ``word`` -- the word to be written.
OUTPUT:
Nothing.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.write_word([4, 2])
sage: it.output_tape
[4, 2]
"""
for letter in word:
self.write_letter(letter)
def get_next_transition(self, word_in):
"""
Returns the next transition according to ``word_in``. It is
assumed that we are in state ``self.current_state``.
INPUT:
- ``word_in`` -- the input word.
OUTPUT:
The next transition according to ``word_in``. It is assumed
that we are in state ``self.current_state``. If no transition
matches, a ``ValueError`` is thrown.
EXAMPLES::
sage: from sage.combinat.finite_state_machine import FSMProcessIterator
sage: inverter = Transducer({'A': [('A', 0, 1), ('A', 1, 0)]},
....: initial_states=['A'], final_states=['A'])
sage: it = FSMProcessIterator(inverter, input_tape=[0, 1])
sage: it.get_next_transition([0])
Transition from 'A' to 'A': 0|1
sage: it.get_next_transition([2])
Traceback (most recent call last):
...
ValueError: No transition with input [2] found.
"""
for transition in self.current_state.transitions:
if transition.word_in == word_in:
return transition
raise ValueError("No transition with input %s found." % (word_in,))
#*****************************************************************************
@cached_function
def setup_latex_preamble():
r"""
This function adds the package ``tikz`` with support for automata
to the preamble of Latex so that the finite state machines can be
drawn nicely.
INPUT:
Nothing.
OUTPUT:
Nothing.
See the section on :ref:`finite_state_machine_LaTeX_output`
in the introductory examples of this module.
TESTS::
sage: from sage.combinat.finite_state_machine import setup_latex_preamble
sage: setup_latex_preamble()
sage: ("\usepackage{tikz}" in latex.extra_preamble()) == latex.has_file("tikz.sty")
True
"""
latex.add_package_to_preamble_if_available('tikz')
latex.add_to_mathjax_avoid_list("tikz")
if latex.has_file("tikz.sty"):
latex.add_to_preamble(r'\usetikzlibrary{automata}')
#*****************************************************************************
|
from django.contrib import admin
from . import models
class LibraryAdmin(admin.ModelAdmin):
search_fields = (
"owner",
"name",
"repository__owner",
"repository__name",
"repository__remote_id",
)
admin.site.register(models.Library, LibraryAdmin)
|
from avatar.models import Avatar
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import permission_classes, api_view
from rest_framework.permissions import IsAuthenticatedOrReadOnly
from rest_framework import viewsets
from rest_framework.views import APIView
from avatar.serializers import AvatarSerializer
class AvatarViewSet(viewsets.ModelViewSet):
queryset = Avatar.objects.all()
serializer_class = AvatarSerializer
|
def SubsequenceLength(string):
seen = {}
maximum_length = 0
start = 0
for end in range(len(string)):
if string[end] in seen:
start = max(start, seen[string[end]] + 1)
seen[string[end]] = end
maximum_length = max(maximum_length, end-start + 1)
return maximum_length
print(SubsequenceLength("geeksforgeeks"))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('umibukela', '0019_auto_20170124_1252'),
]
operations = [
migrations.AlterField(
model_name='cycleresultset',
name='monitors',
field=models.ManyToManyField(help_text=b"Only monitors for the current partner are shown. If you update the Partner you'll have to save and edit this Cycle Result Set again to see the available monitors.", to='umibukela.Monitor', blank=True),
),
]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import tempfile
import unittest
import md5_check
class TestMd5Check(unittest.TestCase):
def testCallAndRecordIfStale(self):
input_strings = ['string1', 'string2']
input_file1 = tempfile.NamedTemporaryFile()
input_file2 = tempfile.NamedTemporaryFile()
file1_contents = 'input file 1'
file2_contents = 'input file 2'
input_file1.write(file1_contents)
input_file1.flush()
input_file2.write(file2_contents)
input_file2.flush()
input_files = [input_file1.name, input_file2.name]
record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
def CheckCallAndRecord(should_call, message, force=False):
self.called = False
def MarkCalled():
self.called = True
md5_check.CallAndRecordIfStale(
MarkCalled,
record_path=record_path.name,
input_paths=input_files,
input_strings=input_strings,
force=force)
self.failUnlessEqual(should_call, self.called, message)
CheckCallAndRecord(True, 'should call when record doesn\'t exist')
CheckCallAndRecord(False, 'should not call when nothing changed')
CheckCallAndRecord(True, force=True, message='should call when forced')
input_file1.write('some more input')
input_file1.flush()
CheckCallAndRecord(True, 'changed input file should trigger call')
input_files = input_files[::-1]
CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
input_files = input_files[:1]
CheckCallAndRecord(True, 'removing file should trigger call')
input_files.append(input_file2.name)
CheckCallAndRecord(True, 'added input file should trigger call')
input_strings[0] = input_strings[0] + ' a bit longer'
CheckCallAndRecord(True, 'changed input string should trigger call')
input_strings = input_strings[::-1]
CheckCallAndRecord(True, 'reordering of string inputs should trigger call')
input_strings = input_strings[:1]
CheckCallAndRecord(True, 'removing a string should trigger call')
input_strings.append('a brand new string')
CheckCallAndRecord(True, 'added input string should trigger call')
if __name__ == '__main__':
unittest.main()
|
import os
import shutil
from time import sleep
from cerium import AndroidDriver
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
from constants import DIR_CHOOSE, DIRECTORY, POSITION
from quizzes import insert_db
from utils import choose_parsing, confirm_question, question_parsing
shutil.rmtree(DIRECTORY)
os.mkdir(DIRECTORY)
driver = AndroidDriver(wireless=True)
# driver = AndroidDriver()
# driver.auto_connect()
class FileEventHandler(FileSystemEventHandler):
def __init__(self):
FileSystemEventHandler.__init__(self)
def on_created(self, event):
global question, options
if event.src_path.split('\\')[-1] == 'findQuiz':
sleep(0.5)
try:
question, options = question_parsing()
x, y = confirm_question(question, options)
while not os.path.exists(DIR_CHOOSE):
driver.click(x, y)
sleep(0.2)
except KeyError:
driver.back()
sleep(0.2)
option = POSITION[-1]
driver.swipe_up()
driver.click(option['x'], option['y'])
print('游戏开始')
elif event.src_path.split('\\')[-1] == 'choose':
sleep(1)
question, answer = choose_parsing(question, options)
print('问题:', question)
print('答案:', answer)
insert_db(question, answer)
elif event.src_path.split('\\')[-1] == 'fightResult':
print('游戏结束\n')
driver.back()
sleep(0.2)
option = POSITION[-1]
driver.swipe_up()
driver.click(option['x'], option['y'])
print('游戏开始')
if __name__ == "__main__":
observer = Observer()
handler = FileEventHandler()
observer.schedule(handler, DIRECTORY, True)
print('游戏开始')
option = POSITION[-1]
driver.click(option['x'], option['y'])
observer.start()
try:
while True:
sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
import time
def get_next_array(the_str):
j = 0
k = -1
next_array = [-1] + [0] * (len(the_str)-1)
while j < len(the_str) - 1:
if k == -1 or the_str[j] == the_str[k]:
j += 1
k += 1
next_array[j] = k
else:
k = next_array[k]
return next_array
def kmp(aim_str, model_str):
aim_len = len(aim_str)
model_len = len(model_str)
re = []
if aim_len >= model_len:
i = 0
j = 0
next_array = get_next_array(model_str)
while i < aim_len:
if j == -1 or aim_str[i] == model_str[j]:
i += 1
j += 1
else:
j = next_array[j]
if j == model_len:
re.append(i - model_len)
i = i - model_len + 1
j = 0
return ["time: " + str(len(re))] + re
start = time.time()
print(kmp("ababaababababba", "aba"))
print(time.time()-start)
print(get_next_array("abbababababab"))
|
'''
Test script for GrFNN, plotting the entrainment for a sin wave of changing frequency.
@author T. Kaplan
'''
import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import time
from gfnn import FrequencyType, FrequencyDist, ZParams, GrFNN
from plot import spectro_plot
# Construct our model by instantiating the class defined above
dim_in = 300
freq_dist = FrequencyDist(0.25, 6.0, dim_in, FrequencyType.LINEAR)
zparams = ZParams()
model = GrFNN(freq_dist, zparams, fs=160)
# Stimulus - 50 seconds of FHz sin, at a changing frequency (4->2)
F = 4
t1 = np.arange(0, 25, model.dt)
x1 = np.sin(2 * np.pi * F * t1) * 0.25
t2 = np.arange(25, 50, model.dt)
x2 = np.sin(2 * np.pi * F/2 * t2) * 0.25
# Prepare an initial plot
t = np.concatenate([t1, t2])
x = np.concatenate([x1, x2])
px = freq_dist.dist
py = np.zeros(px.shape)
plt.plot(px, py)
zs = np.empty((len(t), dim_in), dtype=np.complex64)
t0 = time.time()
for i in range(len(t)):
out = model(x[i])
zs[i] = out
# Update plot:
if i % 10 == 0:
py = np.abs(out)
plt.gca().lines[0].set_ydata(py)
plt.gca().relim()
plt.gca().autoscale_view()
plt.pause(0.01)
t1 = time.time()
print('Took', round(t1-t0, 2))
plt.show()
|
#!/usr/bin/env python
# encoding: utf-8
from flask import render_template
from .. import demo
@demo.app_errorhandler(404)
def page_not_found(e):
return "page not found"
@demo.app_errorhandler(500)
def internal_server_error(e):
return "internal server error"
|
# -*- coding: utf-8 -*-
"""SuperDARN data support for grdex files(Alpha Level!)
Parameters
----------
platform : string
'superdarn'
name : string
'grdex'
tag : string
'north' or 'south' for Northern/Southern hemisphere data
Note
----
Requires davitpy and davitpy to load SuperDARN files.
Uses environment variables set by davitpy to download files
from Virginia Tech SuperDARN data servers. davitpy routines
are used to load SuperDARN data.
This material is based upon work supported by the
National Science Foundation under Grant Number 1259508.
Any opinions, findings, and conclusions or recommendations expressed in this
material are those of the author(s) and do not necessarily reflect the views
of the National Science Foundation.
Warnings
--------
Cleaning only removes entries that have 0 vectors, grdex files
are constituted from what it is thought to be good data.
"""
from __future__ import print_function
from __future__ import absolute_import
import sys
import os
import pandas as pds
import numpy as np
import pysat
platform = 'superdarn'
name = 'grdex'
tags = {'north':'',
'south':''}
sat_ids = {'':['north', 'south']}
test_dates = {'':{'north':pysat.datetime(2009,1,1),
'south':pysat.datetime(2009,1,1)}}
def list_files(tag='north', sat_id=None, data_path=None, format_str=None):
"""Return a Pandas Series of every file for chosen satellite data
Parameters
-----------
tag : (string)
Denotes type of file to load. Accepted types are 'north' and 'south'.
(default='north')
sat_id : (string or NoneType)
Specifies the satellite ID for a constellation. Not used.
(default=None)
data_path : (string or NoneType)
Path to data directory. If None is specified, the value previously
set in Instrument.files.data_path is used. (default=None)
format_str : (string or NoneType)
User specified file format. If None is specified, the default
formats associated with the supplied tags are used. (default=None)
Returns
--------
pysat.Files.from_os : (pysat._files.Files)
A class containing the verified available files
"""
if format_str is None and tag is not None:
if tag == 'north' or tag == 'south':
hemi_fmt = ''.join(('{year:4d}{month:02d}{day:02d}.', tag, '.grdex'))
return pysat.Files.from_os(data_path=data_path, format_str=hemi_fmt)
else:
estr = 'Unrecognized tag name for SuperDARN, north or south.'
raise ValueError(estr)
elif format_str is None:
estr = 'A tag name must be passed to SuperDARN.'
raise ValueError (estr)
else:
return pysat.Files.from_os(data_path=data_path, format_str=format_str)
def load(fnames, tag=None, sat_id=None):
import davitpy
if len(fnames) <= 0:
return pysat.DataFrame(None), pysat.Meta(None)
elif len(fnames) == 1:
myPtr = davitpy.pydarn.sdio.sdDataPtr(sTime=pysat.datetime(1980, 1, 1),
fileType='grdex',
eTime=pysat.datetime(2250, 1, 1),
hemi=tag,
fileName=fnames[0])
myPtr.open()
in_list = []
in_dict = {'stid':[],
'channel':[],
'noisemean':[],
'noisesd':[],
'gsct':[],
'nvec':[],
'pmax':[],
'start_time':[],
'end_time':[],
'vemax':[],
'vemin':[],
'pmin':[],
'programid':[],
'wmax':[],
'wmin':[],
'freq':[]}
while True:
info = myPtr.readRec()
if info is None:
myPtr.close()
break
drift_frame = pds.DataFrame.from_records(info.vector.__dict__,
nrows=len(info.pmax),
index=info.vector.index)
drift_frame['partial'] = 1
drift_frame.drop('index', axis=1, inplace=True)
drift_frame.index.name = 'index'
sum_vec = 0
for nvec in info.nvec:
in_list.append(drift_frame.iloc[sum_vec:sum_vec+nvec])
sum_vec += nvec
in_dict['stid'].extend(info.stid)
in_dict['channel'].extend(info.channel)
in_dict['noisemean'].extend(info.noisemean)
in_dict['noisesd'].extend(info.noisesd)
in_dict['gsct'].extend(info.gsct)
in_dict['nvec'].extend(info.nvec)
in_dict['pmax'].extend(info.pmax)
in_dict['start_time'].extend([info.sTime]*len(info.pmax))
in_dict['end_time'].extend([info.eTime]*len(info.pmax))
in_dict['vemax'].extend(info.vemax)
in_dict['vemin'].extend(info.vemin)
in_dict['pmin'].extend(info.pmin)
in_dict['programid'].extend(info.programid)
in_dict['wmax'].extend(info.wmax)
in_dict['wmin'].extend(info.wmin)
in_dict['freq'].extend(info.freq)
output = pds.DataFrame(in_dict)
output['vector'] = in_list
output.index = output.start_time
output.drop('start_time', axis=1, inplace=True)
return output, pysat.Meta()
else:
raise ValueError('Only one filename currently supported.')
#def default(ivm):
#
# return
def clean(self):
# remove data when there are no vectors
idx, = np.where(self['nvec'] > 0)
self.data = self.data.iloc[idx]
return
def download(date_array, tag, sat_id, data_path, user=None, password=None):
"""
Download SuperDARN data from Virginia Tech organized for loading by pysat.
"""
import sys
import os
import pysftp
import davitpy
if user is None:
user = os.environ['DBREADUSER']
if password is None:
password = os.environ['DBREADPASS']
with pysftp.Connection(
os.environ['VTDB'],
username=user,
password=password) as sftp:
for date in date_array:
myDir = '/data/'+date.strftime("%Y")+'/grdex/'+tag+'/'
fname = date.strftime("%Y%m%d")+'.' + tag + '.grdex'
local_fname = fname+'.bz2'
saved_fname = os.path.join(data_path,local_fname)
full_fname = os.path.join(data_path,fname)
try:
print('Downloading file for '+date.strftime('%D'))
sys.stdout.flush()
sftp.get(myDir+local_fname, saved_fname)
os.system('bunzip2 -c '+saved_fname+' > '+ full_fname)
os.system('rm ' + saved_fname)
except IOError:
print('File not available for '+date.strftime('%D'))
return
|
def load(h):
return ({'abbr': 192, 'code': 192, 'title': 'Medical meteorological products'},
{'abbr': 193, 'code': 193, 'title': 'Diagnostic meteorological products'},
{'abbr': 194, 'code': 194, 'title': 'Analyse error products'},
{'abbr': 195,
'code': 195,
'title': 'Probabilities from deterministic local-model'},
{'abbr': 196, 'code': 196, 'title': 'Probabilities from WarnMOS'},
{'abbr': 197, 'code': 197, 'title': 'Mineral dust'},
{'abbr': 198, 'code': 198, 'title': 'Covariance'},
{'abbr': 254, 'code': 254, 'title': 'DUMMIES for testing'},
{'abbr': None, 'code': 255, 'title': 'Missing'})
|
"""dobbyproject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
import dobby.views
import board.views
urlpatterns = [
path('admin/', admin.site.urls),
path('', dobby.views.home, name='home'),
path('signup/', dobby.views.signup, name='signup'),
path('userroom/', dobby.views.userroom, name='userroom'),
path('board/', board.views.board_home, name='board_home'),
path('error/', dobby.views.error, name='error'),
path('board', board.views.board_home, name='board_home'),
path('board/<int:board_id>/', board.views.detail, name='detail'),
path('board/new/', board.views.new, name='new'),
path('board/create/', board.views.create, name='create'),
]
|
from app import app, db
from flask import render_template
@app.errorhandler(404)
def error_404(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def error_500(error):
db.session.remove()
return render_template('errors/500.html'), 500
@app.errorhandler(413)
def error_413(error):
return render_template('errors/413.html'), 413
|
from spec import Spec, skip, eq_, raises
from invoke.tasks import task, Task
from invoke.loader import Loader
from _utils import support
#
# NOTE: Most Task tests use @task as it's the primary interface and is a very
# thin wrapper around Task itself. This way we don't have to write 2x tests for
# both Task and @task. Meh :)
#
def _func():
pass
class task_(Spec):
"@task"
def setup(self):
self.loader = Loader(root=support)
self.vanilla = self.loader.load_collection('decorator')
def allows_access_to_wrapped_object(self):
def lolcats():
pass
eq_(task(lolcats).body, lolcats)
def allows_alias_specification(self):
eq_(self.vanilla['foo'], self.vanilla['bar'])
def allows_multiple_aliases(self):
eq_(self.vanilla['foo'], self.vanilla['otherbar'])
def allows_default_specification(self):
eq_(self.vanilla[''], self.vanilla['biz'])
@raises(ValueError)
def raises_ValueError_on_multiple_defaults(self):
self.loader.load_collection('decorator_multi_default')
def sets_arg_help(self):
eq_(self.vanilla['punch'].help['why'], 'Motive')
def sets_arg_kind(self):
skip()
def allows_annotating_args_as_positional(self):
eq_(self.vanilla['one_positional'].positional, ['pos'])
eq_(self.vanilla['two_positionals'].positional, ['pos1', 'pos2'])
def when_positional_arg_missing_all_non_default_args_are_positional(self):
eq_(self.vanilla['implicit_positionals'].positional, ['pos1', 'pos2'])
def pre_tasks_stored_as_simple_list_of_strings(self):
@task(pre=['whatever'])
def func():
pass
eq_(func.pre, ['whatever'])
def allows_star_args_as_shortcut_for_pre(self):
@task('my', 'pre', 'tasks')
def func():
pass
eq_(func.pre, ('my', 'pre', 'tasks'))
@raises(TypeError)
def no_ambiguity_between_star_args_and_pre_kwarg(self):
@task('lol', 'wut', pre=['no', 'wai'])
def func():
pass
class Task_(Spec):
class attributes:
def has_default_flag(self):
eq_(Task(_func).is_default, False)
class callability:
def setup(self):
@task
def foo():
"My docstring"
return 5
self.task = foo
def dunder_call_wraps_body_call(self):
eq_(self.task(), 5)
def tracks_times_called(self):
eq_(self.task.called, False)
self.task()
eq_(self.task.called, True)
eq_(self.task.times_called, 1)
self.task()
eq_(self.task.times_called, 2)
def wraps_body_docstring(self):
eq_(self.task.__doc__, "My docstring")
def wraps_body_name(self):
eq_(self.task.__name__, "foo")
class get_arguments:
def setup(self):
@task(positional=['arg3', 'arg1'])
def mytask(arg1, arg2=False, arg3=5):
pass
self.task = mytask
self.args = self.task.get_arguments()
self.argdict = self._arglist_to_dict(self.args)
def _arglist_to_dict(self, arglist):
# This kinda duplicates Context.add_arg(x) for x in arglist :(
ret = {}
for arg in arglist:
for name in arg.names:
ret[name] = arg
return ret
def _task_to_dict(self, task):
return self._arglist_to_dict(task.get_arguments())
def positional_args_come_first(self):
eq_(self.args[0].name, 'arg3')
eq_(self.args[1].name, 'arg1')
eq_(self.args[2].name, 'arg2')
def kinds_are_preserved(self):
eq_(
map(lambda x: x.kind, self.args),
# Remember that the default 'kind' is a string.
[int, str, bool]
)
def positional_flag_is_preserved(self):
eq_(
map(lambda x: x.positional, self.args),
[True, True, False]
)
def turns_function_signature_into_Arguments(self):
eq_(len(self.args), 3, str(self.args))
assert 'arg2' in self.argdict
def shortflags_created_by_default(self):
assert 'a' in self.argdict
assert self.argdict['a'] is self.argdict['arg1']
def shortflags_dont_care_about_positionals(self):
"Positionalness doesn't impact whether shortflags are made"
for short, long_ in (
('a', 'arg1'),
('r', 'arg2'),
('g', 'arg3'),
):
assert self.argdict[short] is self.argdict[long_]
def autocreated_short_flags_can_be_disabled(self):
@task(auto_shortflags=False)
def mytask(arg):
pass
args = self._task_to_dict(mytask)
assert 'a' not in args
assert 'arg' in args
def autocreated_shortflags_dont_collide(self):
"auto-created short flags don't collide"
@task
def mytask(arg1, arg2, barg):
pass
args = self._task_to_dict(mytask)
assert 'a' in args
assert args['a'] is args['arg1']
assert 'r' in args
assert args['r'] is args['arg2']
assert 'b' in args
assert args['b'] is args['barg']
def early_auto_shortflags_shouldnt_lock_out_real_shortflags(self):
# I.e. "task --foo -f" => --foo should NOT get to pick '-f' for its
# shortflag or '-f' is totally fucked.
@task
def mytask(longarg, l):
pass
args = self._task_to_dict(mytask)
assert 'longarg' in args
assert 'o' in args
assert args['o'] is args['longarg']
assert 'l' in args
|
from typing import Tuple
import math
def normal_approximation_to_binomial(n: int, p: float) -> Tuple[float, float]:
mu = p * n
sigma = math.sqrt(p * (1 - p) * n)
return mu, sigma
from lesson5 import normal_cdf
normal_probability_below = normal_cdf
def normal_probability_above(lo: float,
mu: float =0,
sigma: float = 1) -> float:
return 1-normal_cdf(lo, mu, sigma)
def normal_probability_between(lo: float, hi: float, mu: float = 0, sigma: float = 1):
return normal_cdf(hi, mu, sigma) - normal_cdf(lo, mu, sigma)
def normal_probability_outside(lo: float, hi: float, mu: float = 0, sigma: float = 1):
return 1 - normal_probability_between(lo, hi, mu, sigma)
from lesson5 import inverse_normal_cdf
def normal_upper_bound(probability: float,
mu: float = 0,
sigma: float = 1) -> float:
return inverse_normal_cdf(probability, mu, sigma)
def normal_lower_bound(probability: float,
mu: float = 0,
sigma: float = 1) -> float:
return inverse_normal_cdf(1- probability, mu, sigma)
def normal_two_sided_bounds(probability: float, mu: float = 0, sigma: float = 1) -> Tuple[float, float]:
tail_probability = (1 - probability) / 2
upper_bound = normal_lower_bound(tail_probability, mu, sigma)
lower_bound = normal_upper_bound(tail_probability, mu, sigma)
return lower_bound, upper_bound
mu_0, sigma_0 = normal_approximation_to_binomial(1000, 0.5)
lower_bound, upper_bound = normal_two_sided_bounds(0.95, mu_0, sigma_0)
lo, hi = normal_two_sided_bounds(0.95, mu_0, sigma_0)
mu_1, sigma_1 = normal_approximation_to_binomial(1000, 0.55)
type2_probability = normal_probability_between(lo, hi, mu_1, sigma_1)
power = 1 - type2_probability
hi = normal_upper_bound(0.95, mu_0, sigma_0)
type2_probability = normal_probability_below(hi, mu_1, sigma_1)
power = 1 - type2_probability
def two_sided_p_value(x: float, mu: float =0, sigma: float = 1) -> float:
if x >= mu:
return 2 * normal_probability_above(x, mu, sigma)
else:
return 2 * normal_probability_below(x, mu, sigma)
two_sided_p_value(529.5, mu_0, sigma_0)
import random
extreme_value_count = 0
for _ in range(1000):
num_heads = sum(1 if random.random() < 0.5 else 0
for _ in range(1000))
if num_heads >= 530 or num_heads <= 470:
extreme_value_count += 1
#assert 59 < extreme_value_count < 65, f'{extreme_value_count}'
two_sided_p_value(531.5, mu_0, sigma_0)
upper_p_value = normal_probability_above
lower_p_value = normal_probability_below
upper_p_value(524.5, mu_0, sigma_0)
upper_p_value(526.5, mu_0, sigma_0)
# p-hacking
from typing import List
def run_experiment() -> List[bool]:
return [random.random() < 0.5 for _ in range(1000)]
def reject_fairness(experiment: List[bool]) -> bool:
num_heads = len([flip for flip in experiment if flip])
return num_heads < 469 or num_heads > 531
random.seed(0)
experiments = [run_experiment() for _ in range(1000)]
num_rejections = len([experiment
for experiment in experiments
if reject_fairness(experiment)])
assert num_rejections == 46
def estimated_parameters(N: int, n: int):
p = n / N
sigma = math.sqrt(p * (1 - p) / N)
return p, sigma
# for a_b_test_statistic we use null hypothesis
def a_b_test_statistic(N_A: int, n_A: int, N_B: int, n_B: int) -> float:
p_A, sigma_A = estimated_parameters(N_A, n_A)
p_B, sigma_B = estimated_parameters(N_B, n_B)
return (p_B - p_A) / math.sqrt(sigma_A ** 2 + sigma_B ** 2)
# if ad A takes 200 clicks from 1000 viewers and ad B 180 from 1000 then z = expected value of typical diff gaussian
z = a_b_test_statistic(1000, 200, 1000, 180) # -1.14
# Probability for this difference if mean values where equal
two_sided_p_value(z) # 0.254
# If add B took 150 clicks, then the probability for this difference if p_a and p_b where equal is:
z = a_b_test_statistic(1000, 200, 1000, 150) # -2.94
two_sided_p_value(z) # 0.003
# Very small probability for such a click difference if ad a and b where same effective
def B(alpha: float, beta: float) -> float: # B is a normalization constant such as total probability equals 1
return math.gamma(alpha) * math.gamma(beta) / math.gamma(alpha + beta)
def b_pdf(x: float, alpha: float, beta: float) -> float:
if x <= 0 or x >= 1:
return 0
return x ** (alpha - 1) * (1 - x) ** (beta - 1) / B(alpha, beta)
# In general the weight of this distribution is set around value : alpha / (alpha + beta)
# the higher a and b are, the more narrow the distro is
# if a greater than b, the weight is close to 1
# if b greater than a, the weight is close to 0
|
FILE_TYPE_OPTIONS = {}
USAGE_RIGHT_OPTIONS = {}
ASPECT_RATIO_OPTIONS = {'tall': 't', 'square': 's', 'wide': 'w',
'panoramic': 'xw'}
IMAGE_SIZE_OPTIONS = {'any': '', 'icon': 'i', 'medium': 'm', 'large': 'l',
'exactly': 'ex', '400x300+': 'qsvga', '640x480+': 'vga',
'800x600+': 'svga', '1024x768+': 'xga', '2mp+': '2mp',
'4mp+': '4mp', '6mp+': '6mp', '8mp+': '8mp',
'10mp+': '10mp', '12mp+': '12mp', '15mp+': '15mp',
'20mp+': '20mp', '40mp+': '40mp', '70mp+': '70mp'}
def aspect_ratio_paramenter(option):
if not option:
return None
ASPECT_RATIO_PARAM = 'iar'
return ASPECT_RATIO_PARAM + ':' + ASPECT_RATIO_OPTIONS[option]
def image_size_parameter(option):
if not option:
return None
IMAGE_SIZE_PARAM = 'isz'
if isinstance(option, (tuple, list)):
width, height = option
values = ':{},iszw:{},iszh:{}'.format(IMAGE_SIZE_OPTIONS['exactly'],
width, height)
return IMAGE_SIZE_PARAM + values
else:
return IMAGE_SIZE_PARAM + ':' + IMAGE_SIZE_OPTIONS[option]
def image_aspect_parameters(aspect_ratio, image_size):
if any([aspect_ratio, image_size]) is False:
return None
else:
IMAGE_RELATED = 'tbs='
values = filter(lambda x: x is not None, [aspect_ratio, image_size])
options = ','.join(list(values))
return '{}{}'.format(IMAGE_RELATED, options)
def query_builder(query, image_size=None, aspect_ratio=None, page_number=0):
if query is None:
raise ValueError('query must have a value.')
SEARCH_TYPE = 'tbm'
IMAGES = 'isch'
SEARCH_TYPE_PARAM = '='.join([SEARCH_TYPE, IMAGES])
BASE_URL = 'https://www.google.com/search?' + SEARCH_TYPE_PARAM
# Add page number
PAGE_NUMBER = 'ijn'
page_number_param = '='.join([PAGE_NUMBER, str(page_number)])
URL = '&'.join([BASE_URL, page_number_param])
# Add query value
QUERY_TYPE = 'q'
query_param = '='.join([QUERY_TYPE, str(query)])
URL = '&'.join([URL, query_param])
# Add image aspects parameters
iar = aspect_ratio_paramenter(aspect_ratio)
isz = image_size_parameter(image_size)
image_aspect_param = image_aspect_parameters(iar, isz)
if image_aspect_param is not None:
URL = '&'.join([URL, image_aspect_param])
return URL
|
# -*- coding: utf-8 -*-
class Link(object):
def __init__(self, link):
self.url = link['url']
self.id = link['id']
def __str__(self):
return '\t\tId: {0} \n\t\tUrl: {1}'.format(self.id, self.url)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'nacl_scons_dir': '../../third_party/native_client/googleclient/native_client/scons-out',
},
'includes': [
'common.gypi',
],
'targets': [
{
'target_name': 'build_nacl',
'type': 'none',
'variables': {
'nacl_libs': [
'google_nacl_imc',
'google_nacl_imc_c',
],
'nacl_lib_dir': '<(nacl_scons_dir)/<(CONFIGURATION)-<(OS)/lib',
'nacl_output_dir': '<(SHARED_INTERMEDIATE_DIR)/nacl_libs',
},
'actions': [
{
'action_name': 'build_nacl',
'inputs' : [
'build_nacl.py',
],
'outputs': [
'<(nacl_output_dir)/google_nacl_imc<(LIBRARY_SUFFIX)',
'<(nacl_output_dir)/google_nacl_imc_c<(LIBRARY_SUFFIX)',
'dummy_file_that_never_gets_built_so_scons_always_runs',
],
'action': [
'C:/Python24/python.exe',
'<@(_inputs)',
'--output="<(nacl_output_dir)"',
'--configuration="<(CONFIGURATION)"',
'--platform=<(OS)',
'<@(nacl_libs)',
],
},
],
'all_dependent_settings': {
'include_dirs': [
'<(nacldir)',
],
'libraries': [
'-l<(nacl_output_dir)/google_nacl_imc<(LIBRARY_SUFFIX)',
'-l<(nacl_output_dir)/google_nacl_imc_c<(LIBRARY_SUFFIX)',
],
},
},
],
}
|
""" Executors that run trials in their environment.
"""
from powerlift.executors.azure_ci import AzureContainerInstance
from powerlift.executors.docker import InsecureDocker
from powerlift.executors.localmachine import LocalMachine
|
import sys
from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, List, Dict, Any
import logging
log = logging.getLogger(__name__)
import pygame
from pygame.event import Event
if TYPE_CHECKING:
from src.scene import Scene, GameScene
class EventName(str):
pass
class Observer(ABC):
@abstractmethod
def subscribe(self) -> EventName:
"""Subscribe self to some event name.
:returns the event name to which this observer just subscribed"""
@abstractmethod
def update(self, data: Any) -> None:
"""This method is called when an event - to which this observer
is subscribed - happens."""
subscribers: Dict[EventName, List[Observer]] = {}
def subscribe(event_name: EventName, observer: Observer) -> EventName:
"""Subscribe an observer to a specific event name.
:returns the event name to which the observer just subscribed"""
if event_name in subscribers:
subscribers[event_name].append(observer)
subscribers[event_name] = [observer]
return event_name
def notify(event_name: EventName, *args, **kwargs) -> None:
"""Notify all observers subscribed to an event with event name."""
n_observers = 0
if event_name in subscribers:
for observer in subscribers[event_name]:
n_observers += 1
observer.update(*args, **kwargs)
log.debug(f"Notifying {n_observers} observers "
f"of event '{event_name}'")
def unsubscribe_all() -> None:
global subscribers
subscribers = {}
class EventHandler(ABC):
scene: 'Scene'
def __init__(self, scene: 'Scene'):
self.scene = scene
@abstractmethod
def handle_events(self, events: List[Event]) -> None:
raise NotImplementedError
class AppEventHandler(EventHandler):
def __init__(self, scene: 'Scene'):
super().__init__(scene)
def handle_events(self, events: List[Event]) -> None:
for event in events:
if event.type == pygame.QUIT:
pygame.quit()
print("Successful termination")
sys.exit(0)
class TextEventHandler(EventHandler):
text: str
def __init__(self, scene: 'GameScene'):
super().__init__(scene)
self.text = ""
def handle_events(self, events: List[Event]) -> None:
self.text = "" # reset the text before handling events
for event in events:
if event.type == pygame.KEYDOWN:
self.text += event.unicode
def get_text_from_this_tick(self) -> str:
"""Return the text inputted during the last game tick.
:return: text that may be an empty string
"""
return self.text
|
import uvicore
from uvicore.console import command, argument, option
from uvicore.support.dumper import dd, dump
@command()
@option('--raw', is_flag=True, help='Show output without prettyprinter')
def bindings(raw: bool = False):
"""List all Ioc Bindings"""
if not raw:
uvicore.log.header("List of all Ioc bindings")
uvicore.log.line()
dump(uvicore.ioc.bindings)
else:
dic = {}
for key, binding in uvicore.ioc.bindings.items():
dic[key] = binding.__dict__
print(dic)
@command()
def singletons():
"""List Singleton Ioc Bindings"""
uvicore.log.header("List of all Ioc singleton bindings")
uvicore.log.line()
bindings = {key:binding for (key, binding) in uvicore.ioc.bindings.items() if binding.singleton == True}
dump(bindings)
@command()
@argument('type')
def type(type: str):
"""List Ioc Bindings of a Specific Type (comma separated)"""
uvicore.log.header("List of all {} Ioc bindings".format(type))
uvicore.log.line()
types = []
if ',' in type:
parts = type.split(',')
for part in parts:
types.append(part.upper())
else:
types = [type.upper()]
bindings = {}
for key, binding in uvicore.ioc.bindings.items():
if binding.type.upper() in types:
bindings[key] = binding
#bindings = {key:binding for (key, binding) in uvicore.ioc.bindings.items() if binding.type.upper() == type}
dump(bindings)
@command()
def overrides():
"""List Overridden Ioc Bindings"""
uvicore.log.header("List of all Ioc bindings that have been overridden")
uvicore.log.line()
bindings = {key:binding for (key, binding) in uvicore.ioc.bindings.items() if binding.path != key}
# overridden = {}
# for key, binding in uvicore.ioc.bindings.items():
# if key != binding.path:
# overridden[key] = binding
dump(bindings)
@command()
@argument('key', default='')
@option('--raw', is_flag=True, help='Show output without prettyprinter')
def get(key: str = None, raw: bool = False):
"""Get a binding by name"""
if not raw:
dump(uvicore.ioc.binding(key))
else:
print(uvicore.ioc.binding(key).__dict__)
|
import argparse
from .create import get_data_create_parser, execute_data_create_tool
from .inspect import get_data_inspect_parser, execute_data_inspect_tool
from .lint import get_data_lint_parser, execute_data_lint_tool
# Dictionary of functions which execute their respective tool
TOOLS = {
'create': execute_data_create_tool,
'inspect': execute_data_inspect_tool,
'lint': execute_data_lint_tool
}
def get_data_parser():
"""
Generates the parser used for the data container tool.
:returns: the parser generated
:rtype: :class:`argparse.ArgumentParser`
"""
parser = argparse.ArgumentParser(description='Surround Data Container Tool')
sub_parser = parser.add_subparsers(description='This tool must be called with one of the following commands', dest='command')
sub_parser.add_parser('create', parents=[get_data_create_parser()], help='Capture new data into a container with metadata', description='Create a data container from a file or directory')
sub_parser.add_parser('inspect', parents=[get_data_inspect_parser()], help='Inspect a data containers contents and/or metadata', description='Inspect the metadata and/or contents of a data container')
sub_parser.add_parser('lint', parents=[get_data_lint_parser()], help='Check the validity of a data container', description='Check the validity of a data container')
return parser
def execute_data_tool(parser, args, extra_args):
"""
Executes the data container tool using the parser and arguments provided.
Uses the TOOLS dictionary and the command argument to execute the correct
sub-command function.
:param parser: the parser used to get the arguments
:type parser: :class:`argparse.ArgumentParser`
:param args: the arguments parsed from the user
:type args: :class:`argparse.Namespace`
"""
if args.command in TOOLS:
TOOLS[args.command](parser, args)
elif not args.command:
parser.print_help()
else:
parser.print_usage()
def main():
"""
Entry point used when this script is executed directly, parses the arguments and executes
the data container tool.
"""
parser = get_data_parser()
args = parser.parse_args()
execute_data_tool(parser, args, [])
if __name__ == "__main__":
main()
|
from math import pi
from re import I
import sys
import time
import os.path as osp
import argparse
import torch
import torch.nn as nn
import numpy as np
import torchreid
from torchreid.utils import (
Logger, check_isfile, set_random_seed, collect_env_info,
resume_from_checkpoint, load_pretrained_weights, compute_model_complexity
)
from test_default_config import videodata_kwargs, get_default_config
from tqdm import tqdm
def parse_data_for_eval(data):
features = data['feature']
pids = data['pid']
camids = data['camid']
return features, pids, camids
def feature_loader(data_loader, load_type='avg'):
if load_type == 'avg':
load_type = torch.mean
else:
load_type = lambda x, y: x
data_lenth = len(data_loader.dataset)
batch_size = data_loader.batch_size
sqeuence_len = data_loader.dataset.seq_len
f_ = np.zeros(shape=(data_lenth, 512), dtype=np.float32)
pids_ = np.zeros(shape=(data_lenth), dtype=np.int64)
camids_ = np.zeros(shape=(data_lenth), dtype=np.int64)
all_ = np.zeros(shape=(data_lenth, sqeuence_len, 512), dtype=np.float32)
# all_ = []
idxs = [i for i in range(batch_size)]
for batch_idx, data in tqdm(enumerate(data_loader)):
batch_idxs = [(batch_idx-1) * batch_size + i for i in range(batch_size)]
features, pids, camids = parse_data_for_eval(data)
f_[batch_idxs] = load_type(features, dim=1).numpy()[idxs]
pids_[batch_idxs] = pids[idxs]
camids_[batch_idxs] = camids[idxs]
all_[batch_idxs] = features[idxs]
f_ = torch.tensor(f_, dtype=torch.float32)
all_ = torch.tensor(all_, dtype=torch.float32)
# pids_ = np.asarray(pids_)
# camids_ = np.asarray(camids_)
return f_, pids_, camids_, all_
if __name__ == '__main__':
cfg = get_default_config()
cfg.use_gpu = torch.cuda.is_available()
cfg.merge_from_file('test_config.yaml')
set_random_seed(cfg.train.seed)
datamanager = torchreid.data.VideoRLDataManager(**videodata_kwargs(cfg))
train_loader = datamanager.train_loader
query_loader = datamanager.test_loader['mars']['query']
gallery_loader = datamanager.test_loader['mars']['gallery']
print('Extracting features from train set ...')
tf, t_pids, t_camids, t_all = feature_loader(train_loader)
print('Done, obtained {}-by-{} matrix'.format(tf.size(0), tf.size(1)))
torch.save(tf, 'train_avg_feature.pt')
torch.save(t_all, 'train_all_feature.pt')
np.save('train_pids.npy', t_pids)
np.save('train_camids.npy', t_camids)
print('Extracting features from query set ...')
qf, q_pids, q_camids, q_all = feature_loader(query_loader)
print('Done, obtained {}-by-{} matrix'.format(qf.size(0), qf.size(1)))
torch.save(qf, './query_avg_feature.pt')
torch.save(q_all, './query_all_feature.pt')
np.save('./query_pids.npy', q_pids)
np.save('./query_camids.npy', q_camids)
print('Extracting features from gallery set ...')
gf, g_pids, g_camids, g_all = feature_loader(gallery_loader)
print('Done, obtained {}-by-{} matrix'.format(gf.size(0), gf.size(1)))
torch.save(gf, './gallery_avg_feature.pt')
torch.save(g_all, './gallery_all_feature.pt')
np.save('./gallery_pids.npy', g_pids)
np.save('./gallery_camids.npy', g_camids)
|
import ConfigParser
import duo_web as duo
from contextlib import closing
from flask import Flask, request, session, redirect, url_for, render_template, flash
# config
DEBUG = True
# create flask application
app = Flask(__name__)
app.config.from_object(__name__)
# config parser
def grab_keys(filename='duo.conf'):
config = ConfigParser.RawConfigParser()
config.read(filename)
akey = config.get('duo', 'akey')
ikey = config.get('duo', 'ikey')
skey = config.get('duo', 'skey')
host = config.get('duo', 'host')
return {'akey': akey, 'ikey': ikey, 'skey': skey, 'host': host}
# app-specific configs
def app_config(filename='app.conf'):
config = ConfigParser.RawConfigParser()
config.read(filename)
return config.get('app', 'skey')
# Routing functions
@app.route('/')
def show_entries():
return render_template('show_entries.html')
@app.route('/mfa', methods=['GET', 'POST'])
def mfa():
result = grab_keys()
sec = duo.sign_request(result['ikey'], result['skey'], result['akey'], session['user'])
if request.method == 'GET':
return render_template('duoframe.html', duohost=result['host'], sig_request=sec)
if request.method == 'POST':
user = duo.verify_response(result['ikey'], result['skey'], result['akey'], request.args.get('sig_response'))
if user == session['user']:
return render_template(url_for('mfa'), user=user)
@app.route('/success', methods=['POST'])
def success():
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] == "":
error = 'Type something in the username field.'
else:
session['logged_in'] = True
session['user'] = request.form['username']
flash('You are logged in')
return redirect(url_for('mfa'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
# main body
if __name__ == '__main__':
app.secret_key = app_config('app.conf')
app.run(host="0.0.0.0", port=5000)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Train 3072-7800-512 Gaussian-Bernoulli-Multinomial DBM with pre-training
on CIFAR-10, augmented (x10) using shifts by 1 pixel in all directions
and horizontal mirroring.
Gaussian RBM is initialized from 26 small RBMs trained on patches 8x8
of images, as in [1]. Multinomial RBM trained with increasing k in CD-k and decreasing
learning rate over time.
Per sample validation mean reconstruction error for DBM monotonically
decreases during training from ~0.3 to ~0.11 at the end.
The training took approx. 26 x 35m + 5h 52m + 4h 55m + 11h 11m =
= 1d 13h 8m on GTX 1060.
I also trained for longer with options
```
--small-l2 2e-3 --small-epochs 120 --small-sparsity-cost 0 \
--increase-n-gibbs-steps-every 20 --epochs 80 72 200 \
--l2 2e-3 0.01 1e-8 --max-mf-updates 70
```
with a decrease of MSRE from ~0.6 to ~0.147 at the end and it took
~3d 1h 41m on GTX 1060.
Note that DBM is trained without centering.
References
----------
[1] A. Krizhevsky and G. Hinton. Learning multiple layers of features
from tine images. 2009.
"""
print __doc__
import os
import argparse
import numpy as np
from keras import regularizers
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from keras.initializers import glorot_uniform
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, BatchNormalization as BN
from sklearn.metrics import accuracy_score
import env
from bm import DBM
from bm.rbm import GaussianRBM, MultinomialRBM
from bm.utils import (RNG, Stopwatch,
one_hot, one_hot_decision_function, unhot)
from bm.utils.augmentation import shift, horizontal_mirror
from bm.utils.dataset import (load_cifar10,
im_flatten, im_unflatten)
from bm.utils.optimizers import MultiAdam
def make_augmentation(X_train, y_train, n_train, args):
X_aug = None
X_aug_path = os.path.join(args.data_path, 'X_aug.npy')
y_train = y_train.tolist() * 10
RNG(seed=1337).shuffle(y_train)
augment = True
if os.path.isfile(X_aug_path):
print "\nLoading augmented data ..."
X_aug = np.load(X_aug_path)
print "Checking augmented data ..."
if len(X_aug) == 10 * n_train:
augment = False
if augment:
print "\nAugmenting data ..."
s = Stopwatch(verbose=True).start()
X_aug = np.zeros((10 * n_train, 32, 32, 3), dtype=np.float32)
X_train = im_unflatten(X_train)
X_aug[:n_train] = X_train
for i in xrange(n_train):
for k, offset in enumerate((
( 1, 0),
(-1, 0),
( 0, 1),
( 0, -1)
)):
img = X_train[i].copy()
X_aug[(k + 1) * n_train + i] = shift(img, offset=offset)
for i in xrange(5 * n_train):
X_aug[5 * n_train + i] = horizontal_mirror(X_aug[i].copy())
# shuffle once again
RNG(seed=1337).shuffle(X_aug)
# convert to 'uint8' type to save disk space
X_aug *= 255.
X_aug = X_aug.astype('uint8')
# flatten to (10 * `n_train`, 3072) shape
X_aug = im_flatten(X_aug)
# save to disk
np.save(X_aug_path, X_aug)
s.elapsed()
print "\n"
return X_aug, y_train
def make_small_rbms((X_train, X_val), args):
X_train = im_unflatten(X_train)
X_val = im_unflatten(X_val)
small_rbm_config = dict(n_visible=8 * 8 * 3,
n_hidden=300,
sigma=1.,
W_init=0.001,
vb_init=0.,
hb_init=0.,
n_gibbs_steps=1,
learning_rate=args.small_lr,
momentum=np.geomspace(0.5, 0.9, 8),
max_epoch=args.small_epochs,
batch_size=args.small_batch_size,
l2=args.small_l2,
sample_v_states=True,
sample_h_states=True,
sparsity_target=args.small_sparsity_target,
sparsity_cost=args.small_sparsity_cost,
dbm_first=True, # !!!
metrics_config=dict(
msre=True,
feg=True,
train_metrics_every_iter=2000,
val_metrics_every_epoch=2,
feg_every_epoch=2,
n_batches_for_feg=100,
),
verbose=True,
display_filters=12,
display_hidden_activations=36,
v_shape=(8, 8, 3),
dtype='float32',
tf_saver_params=dict(max_to_keep=1))
small_rbms = []
# first 16 ...
for i in xrange(4):
for j in xrange(4):
rbm_id = 4 * i + j
rbm_dirpath = args.small_dirpath_prefix + str(rbm_id) + '/'
if os.path.isdir(rbm_dirpath):
print "\nLoading small RBM #{0} ...\n\n".format(rbm_id)
rbm = GaussianRBM.load_model(rbm_dirpath)
else:
print "\nTraining small RBM #{0} ...\n\n".format(rbm_id)
X_patches = X_train[:, 8 * i:8 * (i + 1),
8 * j:8 * (j + 1), :]
X_patches_val = X_val[:, 8 * i:8 * (i + 1),
8 * j:8 * (j + 1), :]
X_patches = im_flatten(X_patches)
X_patches_val = im_flatten(X_patches_val)
rbm = GaussianRBM(random_seed=9000 + rbm_id,
model_path=rbm_dirpath,
**small_rbm_config)
rbm.fit(X_patches, X_patches_val)
small_rbms.append(rbm)
# next 9 ...
for i in xrange(3):
for j in xrange(3):
rbm_id = 16 + 3 * i + j
rbm_dirpath = args.small_dirpath_prefix + str(rbm_id) + '/'
if os.path.isdir(rbm_dirpath):
print "\nLoading small RBM #{0} ...\n\n".format(rbm_id)
rbm = GaussianRBM.load_model(rbm_dirpath)
else:
print "\nTraining small RBM #{0} ...\n\n".format(rbm_id)
X_patches = X_train[:, 4 + 8 * i:4 + 8 * (i + 1),
4 + 8 * j:4 + 8 * (j + 1), :]
X_patches_val = X_val[:, 4 + 8 * i:4 + 8 * (i + 1),
4 + 8 * j:4 + 8 * (j + 1), :]
X_patches = im_flatten(X_patches)
X_patches_val = im_flatten(X_patches_val)
rbm = GaussianRBM(random_seed=args.small_random_seed + rbm_id,
model_path=rbm_dirpath,
**small_rbm_config)
rbm.fit(X_patches, X_patches_val)
small_rbms.append(rbm)
# ... and the last one
rbm_id = 25
rbm_dirpath = args.small_dirpath_prefix + str(rbm_id) + '/'
if os.path.isdir(rbm_dirpath):
print "\nLoading small RBM #{0} ...\n\n".format(rbm_id)
rbm = GaussianRBM.load_model(rbm_dirpath)
else:
print "\nTraining small RBM #{0} ...\n\n".format(rbm_id)
X_patches = X_train.copy() # (N, 32, 32, 3)
X_patches = X_patches.transpose(0, 3, 1, 2) # (N, 3, 32, 32)
X_patches = X_patches.reshape((-1, 3, 4, 8, 4, 8)).mean(axis=4).mean(axis=2) # (N, 3, 8, 8)
X_patches = X_patches.transpose(0, 2, 3, 1) # (N, 8, 8, 3)
X_patches = im_flatten(X_patches) # (N, 8*8*3)
X_patches_val = X_val.copy()
X_patches_val = X_patches_val.transpose(0, 3, 1, 2)
X_patches_val = X_patches_val.reshape((-1, 3, 4, 8, 4, 8)).mean(axis=4).mean(axis=2)
X_patches_val = X_patches_val.transpose(0, 2, 3, 1)
X_patches_val = im_flatten(X_patches_val)
rbm = GaussianRBM(random_seed=9000 + rbm_id,
model_path=rbm_dirpath,
**small_rbm_config)
rbm.fit(X_patches, X_patches_val)
small_rbms.append(rbm)
return small_rbms
def make_large_weights(small_rbms):
W = np.zeros((300 * 26, 32, 32, 3), dtype=np.float32)
W[...] = RNG(seed=1234).rand(*W.shape) * 5e-6
vb = np.zeros((32, 32, 3))
hb = np.zeros(300 * 26)
for i in xrange(4):
for j in xrange(4):
rbm_id = 4 * i + j
weights = small_rbms[rbm_id].get_tf_params(scope='weights')
W_small = weights['W']
W_small = W_small.T # (300, 192)
W_small = im_unflatten(W_small) # (300, 8, 8, 3)
W[300 * rbm_id: 300 * (rbm_id + 1), 8 * i:8 * (i + 1),
8 * j:8 * (j + 1), :] = W_small
vb[8 * i:8 * (i + 1),
8 * j:8 * (j + 1), :] += im_unflatten(weights['vb'])
hb[300 * rbm_id: 300 * (rbm_id + 1)] = weights['hb']
for i in xrange(3):
for j in xrange(3):
rbm_id = 16 + 3 * i + j
weights = small_rbms[rbm_id].get_tf_params(scope='weights')
W_small = weights['W']
W_small = W_small.T
W_small = im_unflatten(W_small)
W[300 * rbm_id: 300 * (rbm_id + 1), 4 + 8 * i:4 + 8 * (i + 1),
4 + 8 * j:4 + 8 * (j + 1), :] = W_small
vb[4 + 8 * i:4 + 8 * (i + 1),
4 + 8 * j:4 + 8 * (j + 1), :] += im_unflatten(weights['vb'])
hb[300 * rbm_id: 300 * (rbm_id + 1)] = weights['hb']
weights = small_rbms[25].get_tf_params(scope='weights')
W_small = weights['W']
W_small = W_small.T
W_small = im_unflatten(W_small)
vb_small = im_unflatten(weights['vb'])
for i in xrange(8):
for j in xrange(8):
U = W_small[:, i, j, :]
U = np.expand_dims(U, -1)
U = np.expand_dims(U, -1)
U = U.transpose(0, 2, 3, 1)
W[-300:, 4 * i:4 * (i + 1),
4 * j:4 * (j + 1), :] = U / 16.
vb[4 * i:4 * (i + 1),
4 * j:4 * (j + 1), :] += vb_small[i, j, :].reshape((1, 1, 3)) / 16.
hb[-300:] = weights['hb']
W = im_flatten(W)
W = W.T
vb /= 2.
vb[4:-4, 4:-4, :] /= 1.5
vb = im_flatten(vb)
return W, vb, hb
def make_grbm((X_train, X_val), small_rbms, args):
if os.path.isdir(args.grbm_dirpath):
print "\nLoading G-RBM ...\n\n"
grbm = GaussianRBM.load_model(args.grbm_dirpath)
else:
print "\nAssembling weights for large Gaussian RBM ...\n\n"
W, vb, hb = make_large_weights(small_rbms)
print "\nTraining G-RBM ...\n\n"
grbm = GaussianRBM(n_visible=32 * 32 * 3,
n_hidden=300 * 26,
sigma=1.,
W_init=W,
vb_init=vb,
hb_init=hb,
n_gibbs_steps=args.n_gibbs_steps[0],
learning_rate=args.lr[0],
momentum=np.geomspace(0.5, 0.9, 8),
max_epoch=args.epochs[0],
batch_size=args.batch_size[0],
l2=args.l2[0],
sample_v_states=True,
sample_h_states=True,
sparsity_target=0.1,
sparsity_cost=1e-4,
dbm_first=True, # !!!
metrics_config=dict(
msre=True,
feg=True,
train_metrics_every_iter=1000,
val_metrics_every_epoch=1,
feg_every_epoch=2,
n_batches_for_feg=50,
),
verbose=True,
display_filters=24,
display_hidden_activations=36,
v_shape=(32, 32, 3),
random_seed=args.random_seed[0],
dtype='float32',
tf_saver_params=dict(max_to_keep=1),
model_path=args.grbm_dirpath)
grbm.fit(X_train, X_val)
return grbm
def make_mrbm((Q_train, Q_val), args):
if os.path.isdir(args.mrbm_dirpath):
print "\nLoading M-RBM ...\n\n"
mrbm = MultinomialRBM.load_model(args.mrbm_dirpath)
else:
print "\nTraining M-RBM ...\n\n"
epochs = args.epochs[1]
n_every = args.increase_n_gibbs_steps_every
n_gibbs_steps = np.arange(args.n_gibbs_steps[1],
args.n_gibbs_steps[1] + epochs / n_every)
learning_rate = args.lr[1] / np.arange(1, 1 + epochs / n_every)
n_gibbs_steps = np.repeat(n_gibbs_steps, n_every)
learning_rate = np.repeat(learning_rate, n_every)
mrbm = MultinomialRBM(n_visible=300 * 26,
n_hidden=512,
n_samples=512,
W_init=0.001,
hb_init=0.,
vb_init=0.,
n_gibbs_steps=n_gibbs_steps,
learning_rate=learning_rate,
momentum=np.geomspace(0.5, 0.9, 8),
max_epoch=max(args.epochs[1], n_every),
batch_size=args.batch_size[1],
l2=args.l2[1],
sample_h_states=True,
sample_v_states=True,
sparsity_target=0.2,
sparsity_cost=1e-4,
dbm_last=True, # !!!
metrics_config=dict(
msre=True,
pll=True,
feg=True,
train_metrics_every_iter=1000,
val_metrics_every_epoch=2,
feg_every_epoch=2,
n_batches_for_feg=50,
),
verbose=True,
display_filters=0,
display_hidden_activations=100,
random_seed=args.random_seed[1],
dtype='float32',
tf_saver_params=dict(max_to_keep=1),
model_path=args.mrbm_dirpath)
mrbm.fit(Q_train, Q_val)
return mrbm
def make_rbm_transform(rbm, X, path, np_dtype=None):
H = None
transform = True
if os.path.isfile(path):
H = np.load(path)
if len(X) == len(H):
transform = False
if transform:
H = rbm.transform(X, np_dtype=np_dtype)
np.save(path, H)
return H
def make_dbm((X_train, X_val), rbms, (Q, G), args):
if os.path.isdir(args.dbm_dirpath):
print "\nLoading DBM ...\n\n"
dbm = DBM.load_model(args.dbm_dirpath)
dbm.load_rbms(rbms) # !!!
else:
print "\nTraining DBM ...\n\n"
dbm = DBM(rbms=rbms,
n_particles=args.n_particles,
v_particle_init=X_train[:args.n_particles].copy(),
h_particles_init=(Q[:args.n_particles].copy(),
G[:args.n_particles].copy()),
n_gibbs_steps=args.n_gibbs_steps[2],
max_mf_updates=args.max_mf_updates,
mf_tol=args.mf_tol,
learning_rate=np.geomspace(args.lr[2], 1e-6, args.epochs[2]),
momentum=np.geomspace(0.5, 0.9, 10),
max_epoch=args.epochs[2],
batch_size=args.batch_size[2],
l2=args.l2[2],
max_norm=args.max_norm,
sample_v_states=True,
sample_h_states=(True, True),
sparsity_target=args.sparsity_target,
sparsity_cost=args.sparsity_cost,
sparsity_damping=args.sparsity_damping,
train_metrics_every_iter=1000,
val_metrics_every_epoch=2,
random_seed=args.random_seed[2],
verbose=True,
display_filters=12,
display_particles=36,
v_shape=(32, 32, 3),
dtype='float32',
tf_saver_params=dict(max_to_keep=1),
model_path=args.dbm_dirpath)
dbm.fit(X_train, X_val)
return dbm
def make_mlp((X_train, y_train), (X_val, y_val), (X_test, y_test),
(W, hb), args):
dense_params = {}
if W is not None and hb is not None:
dense_params['weights'] = (W, hb)
# define and initialize MLP model
mlp = Sequential([
Dense(7800, input_shape=(3 * 32 * 32,),
kernel_regularizer=regularizers.l2(args.mlp_l2),
kernel_initializer=glorot_uniform(seed=3333),
**dense_params),
BN(),
Activation('relu'),
Dropout(args.mlp_dropout, seed=4444),
Dense(10, kernel_initializer=glorot_uniform(seed=5555)),
Activation('softmax'),
])
mlp.compile(optimizer=MultiAdam(lr=0.001,
lr_multipliers={'dense_1': args.mlp_lrm[0],
'dense_2': args.mlp_lrm[1]}),
loss='categorical_crossentropy',
metrics=['accuracy'])
# train and evaluate classifier
with Stopwatch(verbose=True) as s:
early_stopping = EarlyStopping(monitor=args.mlp_val_metric, patience=6, verbose=2)
reduce_lr = ReduceLROnPlateau(monitor=args.mlp_val_metric, factor=0.2, verbose=2,
patience=3, min_lr=1e-5)
callbacks = [early_stopping, reduce_lr]
try:
mlp.fit(X_train, one_hot(y_train, n_classes=10),
epochs=args.mlp_epochs,
batch_size=args.mlp_batch_size,
shuffle=False,
validation_data=(X_val, one_hot(y_val, n_classes=10)),
callbacks=callbacks)
except KeyboardInterrupt:
pass
y_pred = mlp.predict(X_test)
y_pred = unhot(one_hot_decision_function(y_pred), n_classes=10)
print "Test accuracy: {:.4f}".format(accuracy_score(y_test, y_pred))
# save predictions, targets, and fine-tuned weights
np.save(args.mlp_save_prefix + 'y_pred.npy', y_pred)
np.save(args.mlp_save_prefix + 'y_test.npy', y_test)
W_finetuned, _ = mlp.layers[0].get_weights()
np.save(args.mlp_save_prefix + 'W_finetuned.npy', W_finetuned)
def main():
# training settings
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# general
parser.add_argument('--gpu', type=str, default='0', metavar='ID',
help="ID of the GPU to train on (or '' to train on CPU)")
# data
parser.add_argument('--n-train', type=int, default=49000, metavar='N',
help='number of training examples')
parser.add_argument('--n-val', type=int, default=1000, metavar='N',
help='number of validation examples')
parser.add_argument('--data-path', type=str, default='../data/', metavar='PATH',
help='directory for storing augmented data etc.')
parser.add_argument('--no-aug', action='store_true',
help="if enabled, don't augment data")
# small RBMs related
parser.add_argument('--small-lr', type=float, default=1e-3, metavar='LR', nargs='+',
help='learning rate or sequence of such (per epoch)')
parser.add_argument('--small-epochs', type=int, default=100, metavar='N',
help='number of epochs to train')
parser.add_argument('--small-batch-size', type=int, default=48, metavar='B',
help='input batch size for training')
parser.add_argument('--small-l2', type=float, default=1e-3, metavar='L2',
help='L2 weight decay coefficient')
parser.add_argument('--small-sparsity-target', type=float, default=0.1, metavar='T',
help='desired probability of hidden activation')
parser.add_argument('--small-sparsity-cost', type=float, default=1e-3, metavar='C',
help='controls the amount of sparsity penalty')
parser.add_argument('--small-random-seed', type=int, default=9000, metavar='N',
help="random seeds for models training")
parser.add_argument('--small-dirpath-prefix', type=str, default='../models/rbm_cifar_small_', metavar='PREFIX',
help='directory path prefix to save RBMs trained on patches')
# M-RBM related
parser.add_argument('--increase-n-gibbs-steps-every', type=int, default=16, metavar='I',
help='increase number of Gibbs steps every specified number of epochs for M-RBM')
# common for RBMs and DBM
parser.add_argument('--n-gibbs-steps', type=int, default=(1, 1, 1), metavar='N', nargs='+',
help='(initial) number of Gibbs steps for CD/PCD')
parser.add_argument('--lr', type=float, default=(5e-4, 5e-5, 4e-5), metavar='LR', nargs='+',
help='(initial) learning rates')
parser.add_argument('--epochs', type=int, default=(64, 33, 100), metavar='N', nargs='+',
help='number of epochs to train')
parser.add_argument('--batch-size', type=int, default=(100, 100, 100), metavar='B', nargs='+',
help='input batch size for training, `--n-train` and `--n-val`' + \
'must be divisible by this number (for DBM)')
parser.add_argument('--l2', type=float, default=(1e-3, 0.005, 0.), metavar='L2', nargs='+',
help='L2 weight decay coefficients')
parser.add_argument('--random-seed', type=int, default=(1111, 2222, 3333), metavar='N', nargs='+',
help='random seeds for models training')
# save dirpaths
parser.add_argument('--grbm-dirpath', type=str, default='../models/grbm_cifar/', metavar='DIRPATH',
help='directory path to save Gaussian RBM')
parser.add_argument('--mrbm-dirpath', type=str, default='../models/mrbm_cifar/', metavar='DIRPATH',
help='directory path to save Multinomial RBM')
parser.add_argument('--dbm-dirpath', type=str, default='../models/dbm_cifar/', metavar='DIRPATH',
help='directory path to save DBM')
# DBM related
parser.add_argument('--n-particles', type=int, default=100, metavar='M',
help='number of persistent Markov chains')
parser.add_argument('--max-mf-updates', type=int, default=50, metavar='N',
help='maximum number of mean-field updates per weight update')
parser.add_argument('--mf-tol', type=float, default=1e-11, metavar='TOL',
help='mean-field tolerance')
parser.add_argument('--max-norm', type=float, default=4., metavar='C',
help='maximum norm constraint')
parser.add_argument('--sparsity-target', type=float, default=(0.2, 0.2), metavar='T', nargs='+',
help='desired probability of hidden activation')
parser.add_argument('--sparsity-cost', type=float, default=(1e-4, 1e-3), metavar='C', nargs='+',
help='controls the amount of sparsity penalty')
parser.add_argument('--sparsity-damping', type=float, default=0.9, metavar='D',
help='decay rate for hidden activations probs')
# MLP related
parser.add_argument('--mlp-no-init', action='store_true',
help='if enabled, use random initialization')
parser.add_argument('--mlp-l2', type=float, default=1e-4, metavar='L2',
help='L2 weight decay coefficient')
parser.add_argument('--mlp-lrm', type=float, default=(0.01, 1.), metavar='LRM', nargs='+',
help='learning rate multipliers of 1e-3')
parser.add_argument('--mlp-epochs', type=int, default=100, metavar='N',
help='number of epochs to train')
parser.add_argument('--mlp-val-metric', type=str, default='val_acc', metavar='S',
help="metric on validation set to perform early stopping, {'val_acc', 'val_loss'}")
parser.add_argument('--mlp-batch-size', type=int, default=128, metavar='N',
help='input batch size for training')
parser.add_argument('--mlp-dropout', type=float, default=0.7, metavar='P',
help='probability of visible units being set to zero')
parser.add_argument('--mlp-save-prefix', type=str, default='../data/grbm_', metavar='PREFIX',
help='prefix to save MLP predictions and targets')
# parse and check params
args = parser.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
for x, m in (
(args.n_gibbs_steps, 3),
(args.lr, 3),
(args.epochs, 3),
(args.batch_size, 3),
(args.l2, 3),
(args.random_seed, 3),
):
if len(x) == 1:
x *= m
# prepare data (load + scale + split)
print "\nPreparing data ..."
X, y = load_cifar10(mode='train', path=args.data_path)
X = X.astype(np.float32)
X /= 255.
RNG(seed=42).shuffle(X)
RNG(seed=42).shuffle(y)
n_train = min(len(X), args.n_train)
n_val = min(len(X), args.n_val)
X_train = X[:n_train]
X_val = X[-n_val:]
y_train = y[:n_train]
y_val = y[-n_val:]
if not args.no_aug:
# augment data
X_aug, y_train = make_augmentation(X_train, y_train, n_train, args)
# convert + scale augmented data again
X_train = X_aug.astype(np.float32)
X_train /= 255.
print "Augmented shape: {0}".format(X_train.shape)
print "Augmented range: {0}".format((X_train.min(), X_train.max()))
# center and normalize training data
X_mean = X_train.mean(axis=0)
X_std = X_train.std(axis=0)
if not args.no_aug:
mean_path = os.path.join(args.data_path, 'X_aug_mean.npy')
std_path = os.path.join(args.data_path, 'X_aug_std.npy')
if not os.path.isfile(mean_path):
np.save(mean_path, X_mean)
if not os.path.isfile(std_path):
np.save(std_path, X_std)
X_train -= X_mean
X_train /= X_std
X_val -= X_mean
X_val /= X_std
print "Augmented mean: ({0:.3f}, ...); std: ({1:.3f}, ...)".format(X_train.mean(axis=0)[0],
X_train.std(axis=0)[0])
print "Augmented range: ({0:.3f}, {1:.3f})\n\n".format(X_train.min(), X_train.max())
# train 26 small Gaussian RBMs on patches
small_rbms = None
if not os.path.isdir(args.grbm_dirpath):
small_rbms = make_small_rbms((X_train, X_val), args)
# assemble large weight matrix and biases
# and pre-train large Gaussian RBM (G-RBM)
grbm = make_grbm((X_train, X_val), small_rbms, args)
# extract features Q = p_{G-RBM}(h|v=X)
print "\nExtracting features from G-RBM ...\n\n"
Q_train, Q_val = None, None
if not os.path.isdir(args.mrbm_dirpath) or not os.path.isdir(args.dbm_dirpath):
Q_train_path = os.path.join(args.data_path, 'Q_train_cifar.npy')
Q_train = make_rbm_transform(grbm, X_train, Q_train_path, np_dtype=np.float16)
if not os.path.isdir(args.mrbm_dirpath):
Q_val_path = os.path.join(args.data_path, 'Q_val_cifar.npy')
Q_val = make_rbm_transform(grbm, X_val, Q_val_path)
# pre-train Multinomial RBM (M-RBM)
mrbm = make_mrbm((Q_train, Q_val), args)
# extract features G = p_{M-RBM}(h|v=Q)
print "\nExtracting features from M-RBM ...\n\n"
Q, G = None, None
if not os.path.isdir(args.dbm_dirpath):
Q = Q_train[:args.n_particles]
G_path = os.path.join(args.data_path, 'G_train_cifar.npy')
G = make_rbm_transform(mrbm, Q, G_path)
# jointly train DBM
dbm = make_dbm((X_train, X_val), (grbm, mrbm), (Q, G), args)
# load test data
X_test, y_test = load_cifar10(mode='test', path=args.data_path)
X_test /= 255.
X_test -= X_mean
X_test /= X_std
# G-RBM discriminative fine-tuning:
# initialize MLP with learned weights,
# add FC layer and train using backprop
print "\nG-RBM Discriminative fine-tuning ...\n\n"
W, hb = None, None
if not args.mlp_no_init:
weights = grbm.get_tf_params(scope='weights')
W = weights['W']
hb = weights['hb']
make_mlp((X_train, y_train), (X_val, y_val), (X_test, y_test),
(W, hb), args)
if __name__ == '__main__':
main()
|
# Copyright 2017 Bo Shao. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
This class is only used at inference time.
In the case of a production system, the SessionData has to be maintained so that ChatSession objects
can expire and then be cleaned from the memory.
"""
class SessionData:
def __init__(self):
self.session_dict = {}
def add_session(self):
items = self.session_dict.items()
if items:
last_id = max(k for k, v in items)
else:
last_id = 0
new_id = last_id + 1
self.session_dict[new_id] = ChatSession(new_id)
return new_id
def get_session(self, session_id):
return self.session_dict[session_id]
class ChatSession:
def __init__(self, session_id):
"""
Args:
session_id: The integer ID of the chat session.
"""
self.session_id = session_id
self.howru_asked = False
self.user_name = None
self.call_me = None
self.last_question = None
self.last_answer = None
self.update_pair = True
self.last_topic = None
self.keep_topic = False
# Will be storing the information of the pending action:
# The action function name, the parameter for answer yes, and the parameter for answer no.
self.pending_action = {'func': None, 'Yes': None, 'No': None}
def before_prediction(self):
self.update_pair = True
self.keep_topic = False
def after_prediction(self, new_question, new_answer):
self._update_last_pair(new_question, new_answer)
self._clear_last_topic()
def _update_last_pair(self, new_question, new_answer):
"""
Last pair is updated after each prediction except in a few cases.
"""
if self.update_pair:
self.last_question = new_question
self.last_answer = new_answer
def _clear_last_topic(self):
"""
Last topic is cleared after each prediction except in a few cases.
"""
if not self.keep_topic:
self.last_topic = None
def update_pending_action(self, func_name, yes_para, no_para):
self.pending_action['func'] = func_name
self.pending_action['Yes'] = yes_para
self.pending_action['No'] = no_para
def clear_pending_action(self):
"""
Pending action is, and only is, cleared at the end of function: execute_pending_action_and_reply.
"""
self.pending_action['func'] = None
self.pending_action['Yes'] = None
self.pending_action['No'] = None
|
from applications.config.appconfig import AppConfig
class ObjectUpdateStatus(object):
ObjStatusList = list()
gqlClient = None
def __init__(self, client, vision_workspace_name):
self.ObjStatusList.clear()
self.gqlClient = client
self.vision_workspace_name = vision_workspace_name
def add_tracking_object_status(self, id, rois, x, y, z, u, v, w):
for roi in rois:
objStatus = {
"id": str(id),
"state": {
"roi": roi,
"pose": {"x": x, "y": y, "z": z, "u": u, "v": v, "w": w},
},
}
self.ObjStatusList.append(objStatus)
# don't send 'state' data for undetermined objects
def add_empty_object_status(self, id):
objStatus = {
"id": str(id),
# "state": {
# "roi": roi,
# "pose": {
# "x": x,
# "y": y,
# "z": z,
# "u": u,
# "v": v,
# "w": w
# }
# }
}
self.ObjStatusList.append(objStatus)
def sendObjStatus(self, undetectedObjectIDList):
# process undetected marks
for markerID in undetectedObjectIDList:
# self.add_tracking_object_status(markerID, [None],
# None, None, None, None, None, None)
self.add_empty_object_status(markerID)
# process duplicated marks
checkIDList = list()
for objStatus in self.ObjStatusList:
objID = objStatus["id"]
if objID in checkIDList:
self.ObjStatusList.remove(objStatus)
else:
checkIDList.append(objID)
status = {"objectStatus": self.ObjStatusList}
if AppConfig.ObjTrackingDebugMode == False:
self.gqlClient.update_tracking_workspace_status(
name=self.vision_workspace_name, status=status
)
else:
print("\n", status, "\n")
def contains_object_status(self):
return len(self.ObjStatusList) > 0
def clear_object_status(self):
self.ObjStatusList.clear()
|
"""
Test access_policies
"""
from copy import deepcopy
from os import environ
from unittest import TestCase
from uuid import uuid4
from archivist.archivist import Archivist
# pylint: disable=fixme
# pylint: disable=missing-docstring
# pylint: disable=unused-variable
DISPLAY_NAME = "AccessPolicy display name"
PROPS = {
"display_name": DISPLAY_NAME,
"description": "Policy description",
}
FILTERS = [
{
"or": [
"attributes.arc_home_location_identity=locations/5ea815f0-4de1-4a84-9377-701e880fe8ae",
"attributes.arc_home_location_identity=locations/27eed70b-9e2b-4db1-b8c4-e36505350dcc",
]
},
{
"or": [
"attributes.arc_display_type=Valve",
"attributes.arc_display_type=Pump",
]
},
{
"or": [
"attributes.ext_vendor_name=SynsationIndustries",
]
},
]
ACCESS_PERMISSIONS = [
{
"subjects": [
"subjects/6a951b62-0a26-4c22-a886-1082297b063b",
"subjects/a24306e5-dc06-41ba-a7d6-2b6b3e1df48d",
],
"behaviours": ["Attachments", "RecordEvidence"],
"include_attributes": [
"arc_display_name",
"arc_display_type",
"arc_firmware_version",
],
"user_attributes": [{"or": ["group:maintainers", "group:supervisors"]}],
}
]
class TestAccessPolicies(TestCase):
"""
Test Archivist AccessPolicies Create method
"""
maxDiff = None
@classmethod
def setUpClass(cls):
with open(environ["TEST_AUTHTOKEN_FILENAME"], encoding="utf-8") as fd:
auth = fd.read().strip()
cls.arch = Archivist(environ["TEST_ARCHIVIST"], auth, verify=False)
cls.props = deepcopy(PROPS)
cls.props["display_name"] = f"{DISPLAY_NAME} {uuid4()}"
def test_access_policies_create(self):
"""
Test access_policy creation
"""
access_policy = self.arch.access_policies.create(
self.props, FILTERS, ACCESS_PERMISSIONS
)
self.assertEqual(
access_policy["display_name"],
self.props["display_name"],
msg="Incorrect display name",
)
def test_access_policies_update(self):
"""
Test access_policy update
"""
access_policy = self.arch.access_policies.create(
self.props, FILTERS, ACCESS_PERMISSIONS
)
self.assertEqual(
access_policy["display_name"],
self.props["display_name"],
msg="Incorrect display name",
)
access_policy = self.arch.access_policies.update(
access_policy["identity"],
props=self.props,
filters=FILTERS,
access_permissions=ACCESS_PERMISSIONS,
)
def test_access_policies_delete(self):
"""
Test access_policy delete
"""
access_policy = self.arch.access_policies.create(
self.props, FILTERS, ACCESS_PERMISSIONS
)
self.assertEqual(
access_policy["display_name"],
self.props["display_name"],
msg="Incorrect display name",
)
access_policy = self.arch.access_policies.delete(
access_policy["identity"],
)
self.assertEqual(
access_policy,
{},
msg="Incorrect access_policy",
)
def test_access_policies_list(self):
"""
Test access_policy list
"""
# TODO: filtering on display_name does not currently work...
access_policies = self.arch.access_policies.list(
display_name=self.props["display_name"]
)
for access_policy in access_policies:
self.assertEqual(
access_policy["display_name"],
self.props["display_name"],
msg="Incorrect display name",
)
self.assertGreater(
len(access_policy["display_name"]),
0,
msg="Incorrect display name",
)
def test_access_policies_count(self):
"""
Test access_policy count
"""
count = self.arch.access_policies.count()
self.assertGreaterEqual(
count,
0,
msg="Count is zero",
)
|
import torch
from torch.autograd import Variable
from torch import optim
from torch import nn
import torch.nn.functional as F
import numpy as np
from agents.Agent import Agent
from .config import DDPG_CONFIG
import basenets
import copy
from utils import databuffer
import os
from collections import deque
from utils.mathutils import explained_variance
from .DDPG import DDPG
from rlnets.DDPG import FCDDPG_C
from .config import TD3_CONFIG
class TD3(DDPG):
def __init__(self,hyperparams):
config = copy.deepcopy(TD3_CONFIG)
config.update(hyperparams)
super(TD3, self).__init__(config)
self.d = config["actor_delayed_steps"]
self.smooth_noise = config["smooth_noise"]
self.epsilon = config["smooth_epsilon"]
self.e_Critic_double = FCDDPG_C(self.n_states, self.n_action_dims,
n_hiddens=self.hidden_layers_v,
nonlinear=self.act_func,
usebn=self.using_bn,
initializer="uniform",
initializer_param={"last_lower": 3e-3, "last_upper": 3e-3}
)
self.t_Critic_double = FCDDPG_C(self.n_states, self.n_action_dims,
n_hiddens=self.hidden_layers_v,
nonlinear=self.act_func,
usebn=self.using_bn,
initializer="uniform",
initializer_param={"last_lower": 3e-3, "last_upper": 3e-3}
)
self.hard_update(self.t_Critic, self.e_Critic)
self.optimizer_c = self.optimizer_c_func(list(self.e_Critic.parameters()) +
list(self.e_Critic_double.parameters()),
lr = self.lrv)
def cuda(self):
DDPG.cuda(self)
self.e_Critic_double.cuda()
self.t_Critic_double.cuda()
def learn(self):
for i in range(self.d):
# sample batch memory from all memory
batch_memory = self.sample_batch(self.batch_size)[0]
if self.norm_ob:
batch_memory['state'] = np.clip(
(batch_memory['state'] - self.ob_mean) / np.sqrt(self.ob_var + 1e-8), -10, 10)
batch_memory['next_state'] = np.clip(
(batch_memory['next_state'] - self.ob_mean) / np.sqrt(self.ob_var + 1e-8), -10, 10)
if self.norm_rw:
batch_memory['reward'] = np.clip(batch_memory['reward'] / np.sqrt(self.rw_var + 1e-8), -10, 10)
self.r = self.r.resize_(batch_memory['reward'].shape).copy_(torch.Tensor(batch_memory['reward']))
self.done = self.done.resize_(batch_memory['done'].shape).copy_(torch.Tensor(batch_memory['done']))
self.s_ = self.s_.resize_(batch_memory['next_state'].shape).copy_(torch.Tensor(batch_memory['next_state']))
self.a = self.a.resize_(batch_memory['action'].shape).copy_(torch.Tensor(batch_memory['action']))
self.s = self.s.resize_(batch_memory['state'].shape).copy_(torch.Tensor(batch_memory['state']))
# Target Policy Smoothing'
a_noise = np.clip(np.random.normal(0, self.smooth_noise, size = self.a.size()), -self.epsilon, self.epsilon)
a_noise = torch.Tensor(a_noise).type_as(self.a)
a_ = torch.clamp(self.t_Actor(self.s_) + a_noise, -self.action_bounds, self.action_bounds)
# Clipping Double Q Learning
q_1 = self.t_Critic(self.s_, a_)
q_2 = self.t_Critic_double(self.s_, a_)
q_target = self.r + (1 - self.done) * self.gamma * torch.min(q_1, q_2)
q_target = q_target.detach().squeeze()
q_eval_1 = self.e_Critic(self.s, self.a).squeeze()
q_eval_2 = self.e_Critic_double(self.s, self.a).squeeze()
self.Qt = q_target.cpu().numpy()
self.Qe1 = q_eval_1.detach().cpu().numpy()
self.Qe2 = q_eval_2.detach().cpu().numpy()
# update critic
self.loss_c = self.loss(q_eval_1, q_target) + self.loss(q_eval_2, q_target)
self.e_Critic.zero_grad()
self.e_Critic_double.zero_grad()
self.loss_c.backward()
if self.max_grad_norm is not None:
torch.nn.utils.clip_grad_norm_(self.e_Critic.parameters(), self.max_grad_norm)
torch.nn.utils.clip_grad_norm_(self.e_Critic_double.parameters(), self.max_grad_norm)
self.optimizer_c.step()
# update actor
self.loss_a = -self.e_Critic(self.s, self.e_Actor(self.s)).mean()
self.e_Actor.zero_grad()
self.loss_a.backward()
if self.max_grad_norm is not None:
torch.nn.utils.clip_grad_norm_(self.e_Actor.parameters(), self.max_grad_norm)
self.optimizer_a.step()
self.learn_step_counter += 1
self.noise = self.noise * (
1 - self.exploration_noise_decrement) if self.noise > self.noise_min else self.noise_min
# check to replace target parameters
self.soft_update(self.t_Actor, self.e_Actor, self.replace_tau)
self.soft_update(self.t_Critic, self.e_Critic, self.replace_tau)
self.soft_update(self.t_Critic_double, self.e_Critic_double, self.replace_tau)
def run_td3_train(env, agent, max_timesteps, logger, log_interval):
timestep_counter = 0
total_updates = max_timesteps / env.num_envs
epinfobuf = deque(maxlen=100)
observations = env.reset()
loss_a = 0
loss_c = 0
explained_var = 0
while (True):
# collection of training data
mb_obs, mb_as, mb_dones, mb_rs, mb_obs_ = [], [], [], [], []
epinfos = []
for i in range(0, agent.nsteps, env.num_envs):
observations = torch.Tensor(observations)
if timestep_counter > agent.learn_start_step:
actions = agent.choose_action(observations)
actions = actions.cpu().numpy().clip(env.action_space.low, env.action_space.high)
else:
actions = []
for i in range(env.num_envs):
actions.append(env.action_space.sample())
actions = np.asarray(actions, dtype=np.float32)
observations = observations.cpu().numpy()
observations_, rewards, dones, infos = env.step(actions)
for info in infos:
maybeepinfo = info.get('episode')
if maybeepinfo: epinfos.append(maybeepinfo)
mb_obs.append(observations)
mb_as.append(actions)
mb_rs.append(rewards)
mb_obs_.append(observations_)
mb_dones.append(dones)
observations = observations_
epinfobuf.extend(epinfos)
def reshape_data(arr):
s = arr.shape
return arr.reshape(s[0] * s[1], *s[2:])
mb_obs = reshape_data(np.asarray(mb_obs, dtype=np.float32))
mb_rs = reshape_data(np.asarray(mb_rs, dtype=np.float32))
mb_as = reshape_data(np.asarray(mb_as))
mb_dones = reshape_data(np.asarray(mb_dones, dtype=np.uint8))
mb_obs_ = reshape_data(np.asarray(mb_obs_, dtype=np.float32))
# store transition
transition = {
'state': mb_obs if mb_obs.ndim == 2 else np.expand_dims(mb_obs, 1),
'action': mb_as if mb_as.ndim == 2 else np.expand_dims(mb_as, 1),
'reward': mb_rs if mb_rs.ndim == 2 else np.expand_dims(mb_rs, 1),
'next_state': mb_obs_ if mb_obs_.ndim == 2 else np.expand_dims(mb_obs_, 1),
'done': mb_dones if mb_dones.ndim == 2 else np.expand_dims(mb_dones, 1),
}
agent.store_transition(transition)
# training controller
timestep_counter += agent.nsteps
if timestep_counter >= max_timesteps:
break
if timestep_counter > agent.batch_size:
# Update observation and reward mean and var.
if agent.norm_ob:
agent.ob_mean, agent.ob_var = env.ob_rms.mean, env.ob_rms.var
if agent.norm_rw:
agent.rw_mean, agent.rw_var = env.ret_rms.mean, env.ret_rms.var
for i in range(0, agent.nsteps):
agent.learn()
# adjust learning rate for policy and value function
# decay_coef = 1 - agent.learn_step_counter / total_updates
# adjust_learning_rate(agent.optimizer_a, original_lr=agent.lr, decay_coef=decay_coef)
# adjust_learning_rate(agent.optimizer_c, original_lr=agent.lrv, decay_coef=decay_coef)
explained_var += 0.5 * explained_variance(agent.Qe1, agent.Qt)
explained_var += 0.5 * explained_variance(agent.Qe2, agent.Qt)
loss_a += agent.loss_a.item()
loss_c += agent.loss_c.item()
if agent.learn_step_counter % log_interval == 0:
print("------------------log information------------------")
print("total_timesteps:".ljust(20) + str(timestep_counter))
print("iterations:".ljust(20) + str(agent.learn_step_counter) + " / " + str(int(total_updates)))
print("explained_var:".ljust(20) + str(explained_var / log_interval))
logger.add_scalar("explained_var/train", explained_var / log_interval, timestep_counter)
print("episode_len:".ljust(20) + "{:.1f}".format(np.mean([epinfo['l'] for epinfo in epinfobuf])))
print("episode_rew:".ljust(20) + str(np.mean([epinfo['r'] for epinfo in epinfobuf])))
logger.add_scalar("episode_reward/train", np.mean([epinfo['r'] for epinfo in epinfobuf]),
timestep_counter)
print("max_episode_rew:".ljust(20) + str(np.max([epinfo['r'] for epinfo in epinfobuf])))
print("min_episode_rew:".ljust(20) + str(np.min([epinfo['r'] for epinfo in epinfobuf])))
print("loss_a:".ljust(20) + str(loss_a / log_interval))
logger.add_scalar("actor_loss/train", loss_a / log_interval, timestep_counter)
print("loss_c:".ljust(20) + str(loss_c / log_interval))
logger.add_scalar("critic_loss/train", loss_c / log_interval, timestep_counter)
print("action_noise_std:".ljust(20) + str(agent.noise))
explained_var = 0
loss_a = 0
loss_c = 0
return agent
def adjust_learning_rate(optimizer, original_lr=1e-4, decay_coef=0.95):
for param_group in optimizer.param_groups:
param_group['lr'] = original_lr * decay_coef
|
from flask import Flask, request, json
from flask_cors import CORS
from bs4 import BeautifulSoup
import requests
import base64
from PIL import Image
import numpy as np
import io
import re
from eval import evaluate
from locateWord import find_word
import os
app = Flask(__name__)
CORS(app)
links = ""
words = ""
imgArray = []
def scrape_videos(websiteURL):
html_text = requests.get(websiteURL).text
soup = BeautifulSoup(html_text, 'lxml')
video_div_container = soup.find('div', {"itemtype":"http://schema.org/VideoObject"})
try:
video = video_div_container.select("#video_con_signasl_1")[0].find('source')
videoLink = video['src']
except:
try:
videoLink = video_div_container.select("iframe")[0].attrs['src'] + "&autoplay=1"
videoLink = videoLink.replace("loop=1", "loop=0")
except:
videoLink = "unavailable"
return str(videoLink)
def compile_videos(words):
all_videos = []
for word in words:
all_videos.append(scrape_videos('https://www.signasl.org/sign/' + str(word)))
return all_videos
@app.route('/api/getText', methods=['GET'])
def api():
global words, links
return{
"link": links,
"words": words
}
def cleanText(word):
punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_~'''
clean_word = ""
for char in word:
if char not in punctuations:
clean_word = clean_word + char
return clean_word
@app.route('/api/sendText', methods=['POST'])
def findVideos():
global words, links
request_data = json.loads(request.data)
cleaned_words = list(map(cleanText, request_data['content'].lower().split()))
links = compile_videos(cleaned_words)
words = list(request_data['content'].split())
return {
'message': links,
'words': words
}
@app.route('/api/sendImage', methods=['POST'])
def getImageData():
global imgArray
request_data = json.loads(request.data)
if (request_data['save'] == "True"):
image_data = re.sub('^data:image/.+;base64,', '', request_data['image_data'])
imgdata = base64.b64decode(image_data)
image = Image.open(io.BytesIO(imgdata))
image = np.array(image)
imgArray.append(image)
return {
'word': ""
}
else:
prediction = evaluate(imgArray)
word = find_word(prediction)
imgArray = []
return {
'word': word
}
|
import os,sys
# change the path accoring to the test folder in system
#sys.path.append('/home/ubuntu/setup/src/fogflow/test/UnitTest/v1')
from datetime import datetime
import copy
import json
import requests
import time
import pytest
import data_ngsi10
import sys
# change it by broker ip and port
brokerIp="http://localhost:8070"
print("Testing of v1 API")
# testCase 1
'''
To test subscription request
'''
def test_getSubscription1():
url=brokerIp+"/ngsi10/subscribeContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata1),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
assert r.status_code == 200
#testCase 2
'''
To test entity creation with attributes, then susbscribing and get subscription using ID
'''
def test_getSubscription2():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata2),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata3),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result1"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 3
'''
To test entity creation with one attribute : pressure only followed by subscribing and get using ID
'''
def test_getSubscription3():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata4),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata5),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result2"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 4
'''
To test entity creation with one attribute : Temperature only followed by subscription and get using ID
'''
def test_getSubscription4():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata6),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata7),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result3"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 5
'''
To test create entity without passing Domain data followed by subscription and get using ID
'''
def test_getSubscription5():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata8),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata9),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result4"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 6
'''
To test create entity without attributes followed by subscription and get using Id
'''
def test_getSubscription6():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata10),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata11),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result5"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 7
'''
To test create entity without attributes and Metadata and followed by sbscription and get using Id
'''
def test_getSubscription7():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata12),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#print(r.status_code)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata13),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result6"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 8
'''
To test create entity without entity type followed by subscription and get using Id
'''
def test_getSubscription8():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r = requests.post(url,data=json.dumps(data_ngsi10.subdata14),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata15),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['entities']
sid2=resp[0]["id"]
if "Result7"==sid2:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 9
'''
To test get subscription request by first posting subscription request followed by delete request
'''
def test_getSubscription9():
#create an entity
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata16),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#subscribing
url_del=brokerIp+"/ngsi10/subscription/"
url=url_del+sid
r = requests.delete(url,headers=headers)
#print(r.status_code)
#get request to fetch subscription
get_url=brokerIp+"/ngsi10/subscription"
url=get_url+sid
r=requests.get(url)
print("Subscription with sid-"+sid+" not found")
assert r.status_code == 404
#testCase 10
'''
To test the update post request to create entity
'''
def test_getSubscription10():
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata17),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
#print(r.status_code)
assert r.status_code == 200
#testCase 11
'''
To test subscription with attributes and using ID to validate it
'''
def test_getSubscription11():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata18),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata19),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update the created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata20),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
print(r.content)
assert r.status_code == 200
#testCase 12
'''
To test subscription for its if and else part : 1) for Destination Header
'''
'''def test_getSubscription12():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata21),headers=headers)
resp_content=r.content
resInJson=resp_content.decode('utf8').replace("'",'"')
resp=json.loads(resInJson)
#print(resp)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','Destination' : 'orion-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata22),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update the created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata23),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://127.0.0.1:1026/v2/entities/"
#r=requests.post(url,json={"subscriptionId" : sid})
r=requests.get(url)
print(r.content)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#if resp[0]["id"]=="Result11" and resp[0]["type"]=="Result11":
#print("\nValidated")
#else:
#print("\nNot Validated")
assert r.status_code == 200
'''
#testCase 13
'''
To test subscription for its if and else part : 2) for User - Agent Header
'''
def test_getSubscription18():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata24),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','User-Agent' : 'lightweight-iot-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata25),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata26),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
print(r.content)
assert r.status_code == 200
#testCase 14
'''
To test subcription for its if else part : 3) Require-Reliability Header
'''
def test_getSubscription19():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata27),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','Require-Reliability' : 'true'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata28),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update the created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata29),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
print(r.content)
assert r.status_code == 200
#testCase 15
'''
To test subscription with two headers simultaneously : 4) Destination and User-Agent
'''
def test_getSubscription20():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata30),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','Destination' : 'orion-broker','User-Agent':'lightweight-iot-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata31),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata32),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
#print(r.content)
if r.content=="Not validated":
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 16
'''
To test subscription with two headers simultaneously : 4) User-Agent and Require-Reliability
'''
def test_getSubscription21():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata33),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','User-Agent':'lightweight-iot-broker','Require-Reliability':'true'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata34),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata35),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
print(r.content)
assert r.status_code == 200
#testCase 17
'''
To test subscription with two headers simultaneously : 4) Destination and Require-Reliability headers
'''
def test_getSubscription22():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata36),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','Destination':'orion-broker','Require-Reliability':'true'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata37),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata38),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
#print(r.content)
if r.content == "Not validated":
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 18
'''
To test subscription with all headers simultaneously : 5) Destination, User-Agent and Require-Reliability headers
'''
def test_getSubscription23():
#create an entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type' : 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata39),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#subscribing
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json','Destination':'orion-broker','User-Agent':'lightweight-iot-broker','Require-Reliability':'true'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata40),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#update created entity
url=brokerIp+"/ngsi10/updateContext"
r=requests.post(url,data=json.dumps(data_ngsi10.subdata41),headers=headers)
resp_content1=r.content
resInJson=resp_content1.decode('utf8').replace("'",'"')
resp1=json.loads(resInJson)
#print(resp1)
#validate via accumulator
url="http://0.0.0.0:8888/validateNotification"
r=requests.post(url,json={"subscriptionId" : sid})
#print(r.content)
if r.content == "Not validated":
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
#testCase 19
'''
To test for get subscripton requests
'''
def test_getsubscription24():
url=brokerIp+"/ngsi10/subscription"
r=requests.get(url)
assert r.status_code == 200
#testCase 20
'''
To test for get all entities
'''
def test_getallentities():
url=brokerIp+"/ngsi10/entity"
r=requests.get(url)
assert r.status_code == 200
#testCase 21
'''
To test for query request using Id
'''
def test_queryrequest1():
url=brokerIp+"/ngsi10/queryContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata42),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 22
'''
To test for query request using type
'''
def test_queryrequest2():
url=brokerIp+"/ngsi10/queryContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata43),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 23
'''
To test for query request using geo-scope(polygon)
'''
def test_queryrequest3():
url=brokerIp+"/ngsi10/queryContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata44),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 24
'''
To test for query request multiple filter
'''
def test_queryrequest4():
url=brokerIp+"/ngsi10/queryContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata45),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 25
'''
To test if wrong payload is decoded or not
'''
def test_case25():
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata46),headers=headers)
#print(r.status_code)
assert r.status_code == 200
#testCase26
'''
To test the response on passing DELETE in updateAction in payload
'''
def test_case26():
#create v1 entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','User-Agent':'lightweight-iot-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata47),headers=headers)
#print(r.content)
#get the created entity
url=brokerIp+"/ngsi10/entity/Result047"
r=requests.get(url)
#print(r.content)
#passing DELETE in update payload
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','User-Agent':'lightweight-iot-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata48),headers=headers)
#print(r.content)
#get the created entity
url=brokerIp+"/ngsi10/entity/Result047"
r=requests.get(url)
#print(r.content)
assert r.status_code == 404
#testCase 27
'''
To test the entity creation with empty payload
'''
def test_case27():
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata49),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 28
'''
To test the subscription with empty payload
'''
def test_case28():
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata49),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
assert r.status_code == 200
#testCase 29
'''
To get subscription of empty payload when subscribing
'''
def test_case29():
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata49),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(resp)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#get request
get_url=brokerIp+"/ngsi10/subscription/"
url=get_url+sid
r=requests.get(url)
#print(r.content)
assert r.status_code == 200
#testCase 30
'''
To test the action of API on passing an attribute as a command in payload
'''
def test_cases30():
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','User-Agent':'lightweight-iot-broker'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata50),headers=headers)
#print(r.content)
assert r.status_code == 500
#testCase 31
'''
To test the fiware header with updateAction equal to UPDATE
'''
def test_case31():
#create and register entity
url=brokerIp+"/NGSI9/registerContext"
headers={'Content-Type':'appliction/json','fiware-service':'openiot','fiware-servicepath':'/'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata51),headers=headers)
#print(r.content)
# maiing a updateContext request
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','fiware-service':'openiot','fiware-servicepath':'/'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata57),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 32
'''
To test the fiware header with updateAction equal to APPEND
'''
def test_case32():
url=brokerIp+"/NGSI9/registerContext"
headers={'Content-Type':'appliction/json','fiware-service':'openiot','fiware-servicepath':'/'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata52),headers=headers)
#print(r.content)
# maiing a updateContext request
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','fiware-service':'openiot','fiware-servicepath':'/'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata57),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 33
'''
To test the fiware header with updateAction equal to delete
'''
def test_case33():
#create v1 entity
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','fiware-service':'Abc','fiware-servicepath':'pqr'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata53),headers=headers)
#print(r.content)
#get the created entity
url=brokerIp+"/ngsi10/entity/Result053"
r=requests.get(url)
#print(r.content)
#passing DELETE in update payload
url=brokerIp+"/ngsi10/updateContext"
headers={'Content-Type':'appliction/json','fiware-service':'Abc','fiware-servicepath':'pqr'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata54),headers=headers)
#print(r.content)
#get the created entity
url=brokerIp+"/ngsi10/entity/Result053"
r=requests.get(url)
#print(r.content)
assert r.status_code == 404
#testCase 34
'''
To test the notifyContext request
'''
def test_case34():
url=brokerIp+"/ngsi10/notifyContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata55),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 35
'''
To test unsubscribing feature
'''
def test_case35():
#create subscription
url=brokerIp+"/ngsi10/subscribeContext"
headers= {'Content-Type': 'application/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata56),headers=headers)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
resp=resp['subscribeResponse']
sid=resp['subscriptionId']
#print(sid)
#unsubscribe Context
url=brokerIp+"/ngsi10/unsubscribeContext"
headers={'Content-Type': 'application/json'}
r=requests.post(url,json={"subscriptionId":sid,"originator":"POMN"},headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 36
'''
To test entity creation using other route
'''
def test_case36():
url=brokerIp+"/v1/updateContext"
headers={'Content-Type':'appliction/json'}
r=requests.post(url,data=json.dumps(data_ngsi10.subdata56),headers=headers)
#print(r.content)
assert r.status_code == 200
#testCase 37
'''
To test and fetch unique entity
'''
def test_case37():
url=brokerIp+"/ngsi10/entity/Result14"
r=requests.get(url)
#print(r.content)
assert r.status_code == 200
#testCase 38
'''
To test and fetch attribute specific to an entity
'''
def test_case38():
url=brokerIp+"/ngsi10/entity/Result14/pressure"
r=requests.get(url)
resp_content=r.content
resInJson= resp_content.decode('utf8').replace("'", '"')
resp=json.loads(resInJson)
#print(r.content)
name=resp['name']
type1=resp['type']
val=resp['value']
if name=='pressure' and type1=='float' and val==55:
print("\nValidated")
else:
print("\nNot Validated")
assert r.status_code == 200
|
import math
def IR(spot:list, m=1):
"""
IR(): A function to calculate Single Effective Interest Rate from an array of spot rates.
:param spot: An array/List of Spot rates
:type spot: list
:param m: Frequency of Interest Calculation (eg: 2 for Semi-annually), defaults to 1.
:type m: float
:return: float, None for -ve values of m.
:rtype: float
"""
if(m<=0 or len(spot)==0):
return None
return spot[-1]
def FR(spot:list, k:float, m=1):
"""
FR(): A function to calculate Forward Rate from an array of spot rates.
:param spot: An array/List of Spot rates
:type spot: list
:param k: Term Period to calculate Forward rate from. (Eg: Nth year forward rate **K** years from now)
:type k: float
:param m: Frequency of Interest Calculation (eg: 2 for Semi-annually), defaults to 1.
:type m: float
:return: float, None for -ve values of m.
:rtype: float
"""
if(m<=0 or k<=0 or len(spot)<=1 or k>len(spot)):
return None
num = (1+spot[-1]/m)**(len(spot)*m)
denom = (1+spot[k-1]/m)**(k*m)
if(denom==0):
return None
sum = (num/denom) -1
return sum
|
import mongoengine
from models.base_event import BaseEvent
from models.user import *
class BaseChannel(Document):
# Ownership and Managers
created_by = ReferenceField('User', required=True)
owner = ReferenceField('User', default=created_by)
moderators = ListField(ReferenceField('User'), default=[])
# Channel information
name = StringField(required=True)
subname = StringField(required=True, unique=True)
wallpaper_url = StringField(default=utils.get_random_wallpaper())
profile_url = StringField(required=True)
description = StringField()
created_at = DateTimeField(default=datetime.utcnow())
location = PointField(default=[-179, -85])
tags = ListField(StringField(), default=[])
promote_image_urls = ListField(StringField())
# Manage users in the channel
followed_by = ListField(ReferenceField('User'), default=[])
liked_by = ListField(ReferenceField('User'), default=[])
reported_by = ListField(ReferenceField('User'), default=[])
threads = ListField(ReferenceField('BaseThread'), default=[])
meta = {'allow_inheritance': True}
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This file handles all flask-restful resources for /v3/ec2tokens
import flask
from keystoneclient.contrib.ec2 import utils as ec2_utils
from oslo_serialization import jsonutils
from six.moves import http_client
from keystone.api._shared import EC2_S3_Resource
from keystone.api._shared import json_home_relations
from keystone.common import render_token
from keystone.common import utils
from keystone import exception
from keystone.i18n import _
from keystone.server import flask as ks_flask
CRED_TYPE_EC2 = 'ec2'
class EC2TokensResource(EC2_S3_Resource.ResourceBase):
@staticmethod
def _check_signature(creds_ref, credentials):
signer = ec2_utils.Ec2Signer(creds_ref['secret'])
signature = signer.generate(credentials)
# NOTE(davechecn): credentials.get('signature') is not guaranteed to
# exist, we need to check it explicitly.
if credentials.get('signature'):
if utils.auth_str_equal(credentials['signature'], signature):
return True
# NOTE(vish): Some client libraries don't use the port when
# signing requests, so try again without the port.
elif ':' in credentials['host']:
hostname, _port = credentials.split(':')
credentials['host'] = hostname
# NOTE(davechen): we need to reinitialize 'signer' to avoid
# contaminated status of signature, this is similar with
# other programming language libraries, JAVA for example.
signer = ec2_utils.Ec2Signer(creds_ref['secret'])
signature = signer.generate(credentials)
if utils.auth_str_equal(
credentials['signature'], signature):
return True
raise exception.Unauthorized(_('Invalid EC2 signature.'))
# Raise the exception when credentials.get('signature') is None
else:
raise exception.Unauthorized(
_('EC2 signature not supplied.'))
@ks_flask.unenforced_api
def post(self):
"""Authenticate ec2 token.
POST /v3/ec2tokens
"""
token = self.handle_authenticate()
token_reference = render_token.render_token_response_from_model(token)
resp_body = jsonutils.dumps(token_reference)
response = flask.make_response(resp_body, http_client.OK)
response.headers['X-Subject-Token'] = token.id
response.headers['Content-Type'] = 'application/json'
return response
class EC2TokensAPI(ks_flask.APIBase):
_name = 'ec2tokens'
_import_name = __name__
resources = []
resource_mapping = [
ks_flask.construct_resource_map(
resource=EC2TokensResource,
url='/ec2tokens',
resource_kwargs={},
rel='ec2tokens',
resource_relation_func=(
json_home_relations.os_ec2_resource_rel_func))
]
APIs = (EC2TokensAPI,)
|
from shape import Shape
from color import Color
from location import Location
from random import randint
def create_questions(shape, color, location, image_id):
shape_name = shape.name.lower()
color_name = color.name.lower()
location_name = location.name.lower()
questions = [
(f'what shape is in the image?', shape_name),
(f'what shape is present?', shape_name),
(f'what shape does the image contain?', shape_name),
(f'what is the {color_name} shape?', shape_name),
(f'what color is the {shape_name}?', color_name),
(f'what is the color of the {shape_name}?', color_name),
(f'what color is the shape?', color_name),
(f'what is the color of the shape?', color_name),
(f'what is the position of the shape?', location_name),
(f'where is the shape?', location_name),
(f'where is the {shape_name}?', location_name),
(f'where is the {color_name} shape?', location_name),
]
yes_no_questions = []
for s in Shape:
cur_shape_name = s.name.lower()
pos_answer = 'yes' if s is shape else 'no'
yes_no_questions.append((f'is there a {cur_shape_name}?', pos_answer))
yes_no_questions.append((f'is there a {cur_shape_name} in the image?', pos_answer))
yes_no_questions.append((f'does the image contain a {cur_shape_name}?', pos_answer))
yes_no_questions.append((f'is a {cur_shape_name} present?', pos_answer))
neg_answer = 'no' if s is shape else 'yes'
yes_no_questions.append((f'is there not a {cur_shape_name}?', neg_answer))
yes_no_questions.append((f'is there not a {cur_shape_name} in the image?', neg_answer))
yes_no_questions.append((f'does the image not contain a {cur_shape_name}?', neg_answer))
yes_no_questions.append((f'is no {cur_shape_name} present?', neg_answer))
for c in Color:
cur_color_name = c.name.lower()
pos_answer = 'yes' if c is color else 'no'
yes_no_questions.append((f'is there a {cur_color_name} shape?', pos_answer))
yes_no_questions.append((f'is there a {cur_color_name} shape in the image?', pos_answer))
yes_no_questions.append((f'does the image contain a {cur_color_name} shape?', pos_answer))
yes_no_questions.append((f'is a {cur_color_name} shape present?', pos_answer))
neg_answer = 'no' if c is color else 'yes'
yes_no_questions.append((f'is there not a {cur_color_name} shape?', neg_answer))
yes_no_questions.append((f'is there not a {cur_color_name} shape in the image?', neg_answer))
yes_no_questions.append((f'does the image not contain a {cur_color_name} shape?', neg_answer))
yes_no_questions.append((f'is no {cur_color_name} shape present?', neg_answer))
for l in Location:
cur_location_name = l.name.lower()
pos_answer = 'yes' if l is location else 'no'
yes_no_questions.append((f'is there a shape in the {cur_location_name}?', pos_answer))
yes_no_questions.append((f'is there a shape in the {cur_location_name} in the image?', pos_answer))
yes_no_questions.append((f'does the image contain a shape in the {cur_location_name}?', pos_answer))
yes_no_questions.append((f'is a shape present in the {cur_location_name}?', pos_answer))
neg_answer = 'no' if s is shape else 'yes'
pos_answer = 'yes' if l is location else 'no'
yes_no_questions.append((f'is there not a shape in the {cur_location_name}?', pos_answer))
yes_no_questions.append((f'is there not a shape in the {cur_location_name} in the image?', pos_answer))
yes_no_questions.append((f'does the image not contain a shape in the {cur_location_name}?', pos_answer))
yes_no_questions.append((f'is a shape not present in the {cur_location_name}?', pos_answer))
questions = list(filter(lambda _: randint(0, 99) < 48, questions))
yes_no_questions = list(filter(lambda _: randint(0, 99) < 12, yes_no_questions))
all_questions = questions + yes_no_questions
return (list(map(lambda x: x + (image_id,), all_questions)), len(yes_no_questions))
|
# Generated by Django 3.0.6 on 2021-07-14 21:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('evaluation', '0003_auto_20210714_1450'),
]
operations = [
migrations.AlterField(
model_name='test_assign',
name='done',
field=models.CharField(blank=True, default='Asignado', max_length=100, null=True),
),
]
|
# --------------------------------------------------------
# Original Code
# https://github.com/VITA-Group/UAV-NDFT
# Pytorch multi-GPU Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Jiasen Lu, Jianwei Yang, based on code from Ross Girshick
#
# Modified by Chaehyeon Lee
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import os
import sys
import numpy as np
import argparse
import pprint
import pdb
import time
import cv2
import torch
from torch.autograd import Variable
import torch.nn as nn
import torch.optim as optim
import pickle
from roi_data_layer.roidb import combined_roidb
from roi_data_layer.roibatchLoader import roibatchLoader
from model.utils.config import cfg, cfg_from_file, cfg_from_list, get_output_dir
from model.rpn.bbox_transform import clip_boxes
from model.nms.nms_wrapper import nms
from model.rpn.bbox_transform import bbox_transform_inv
from model.utils.net_utils import vis_detections
from model.faster_rcnn.vgg16 import vgg16
from model.faster_rcnn.resnet import resnet
import pdb
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Train a Fast R-CNN network')
parser.add_argument('--dataset', dest='dataset',
help='training dataset',
default='uav', type=str)
parser.add_argument('--cfg', dest='cfg_file',
help='optional config file',
default='cfgs/vgg16.yml', type=str)
parser.add_argument('--net', dest='net',
help='vgg16, res50, res101, res152',
default='res101', type=str)
parser.add_argument('--set', dest='set_cfgs',
help='set config keys', default=None,
nargs=argparse.REMAINDER)
parser.add_argument('--save_dir', dest='save_dir',
help='directory to save models', default="models",
type=str)
parser.add_argument('--cuda', dest='cuda',
help='whether use CUDA',
action='store_true')
parser.add_argument('--ls', dest='large_scale',
help='whether use large imag scale',
action='store_true')
parser.add_argument('--mGPUs', dest='mGPUs',
help='whether use multiple GPUs',
action='store_true')
parser.add_argument('--cag', dest='class_agnostic',
help='whether perform class_agnostic bbox regression',
action='store_true')
parser.add_argument('--parallel_type', dest='parallel_type',
help='which part of model to parallel, 0: all, 1: model before roi pooling',
default=0, type=int)
parser.add_argument('--checksession', dest='checksession',
help='checksession to load model',
default=1, type=int)
parser.add_argument('--checkepoch', dest='checkepoch',
help='checkepoch to load network',
default=4, type=int)
parser.add_argument('--checkpoint', dest='checkpoint',
help='checkpoint to load network',
default=3960, type=int)
parser.add_argument('--vis', dest='vis',
help='visualization mode',
action='store_true')
parser.add_argument('--model_dir', dest='model_dir',
help='directory to save models', default="models",
type=str)
parser.add_argument('--gamma_altitude', dest='gamma_altitude',
help='the gamma is used to control the relative weight of the adversarial loss of altitude',
type=float, required=True)
parser.add_argument('--gamma_angle', dest='gamma_angle',
help='the gamma is used to control the relative weight of the adversarial loss of viewing angle',
type=float, required=True)
parser.add_argument('--gamma_weather', dest='gamma_weather',
help='the gamma is used to control the relative weight of the adversarial loss of weather',
type=float, required=True)
parser.add_argument('--use_restarting', dest='use_restarting',
help='where to use restarting',
action='store_true')
parser.add_argument('--is_baseline_method', dest='is_baseline_method',
help='whether to evaluate the baseline method',
action='store_true')
parser.add_argument('--ovthresh', dest='ovthresh',
help='the IoU threshold for evaluation',
default=0.7, type=float)
parser.add_argument('--overall_eval', dest='overall_eval',
help='display the evaluation results regularly',
action='store_true')
parser.add_argument('--checkpoint_name', dest='checkpoint_name',
help='checkpoint name', default="faster_rcnn_final.pth",
type=str)
args = parser.parse_args()
return args
lr = cfg.TRAIN.LEARNING_RATE
momentum = cfg.TRAIN.MOMENTUM
weight_decay = cfg.TRAIN.WEIGHT_DECAY
if __name__ == '__main__':
args = parse_args()
print('Called with args:')
print(args)
if torch.cuda.is_available() and not args.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
np.random.seed(cfg.RNG_SEED)
if args.dataset == "pascal_voc":
args.imdb_name = "voc_2007_trainval"
args.imdbval_name = "voc_2007_test"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
if args.dataset == "uav":
args.imdb_name = "uav_2017_trainval"
args.imdbval_name = "uav_2017_test"
args.set_cfgs = ['ANCHOR_SCALES', '[1, 2, 4, 8, 16]', 'ANCHOR_RATIOS', '[0.5,1,2]', 'MAX_NUM_GT_BOXES', '20']
elif args.dataset == "pascal_voc_0712":
args.imdb_name = "voc_2007_trainval+voc_2012_trainval"
args.imdbval_name = "voc_2007_test"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "coco":
args.imdb_name = "coco_2014_train+coco_2014_valminusminival"
args.imdbval_name = "coco_2014_minival"
args.set_cfgs = ['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "imagenet":
args.imdb_name = "imagenet_train"
args.imdbval_name = "imagenet_val"
args.set_cfgs = ['ANCHOR_SCALES', '[8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
elif args.dataset == "vg":
args.imdb_name = "vg_150-50-50_minitrain"
args.imdbval_name = "vg_150-50-50_minival"
args.set_cfgs = ['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]']
args.cfg_file = "cfgs/{}_ls.yml".format(args.net) if args.large_scale else "cfgs/{}.yml".format(args.net)
if args.cfg_file is not None:
cfg_from_file(args.cfg_file)
if args.set_cfgs is not None:
cfg_from_list(args.set_cfgs)
print('Using config:')
pprint.pprint(cfg)
cfg.TRAIN.USE_FLIPPED = False
imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdbval_name, False)
imdb.competition_mode(on=True)
imdb.set_gamma_altitude(args.gamma_altitude)
imdb.set_gamma_angle(args.gamma_angle)
imdb.set_gamma_weather(args.gamma_weather)
imdb.set_epoch(args.checkepoch)
imdb.set_ckpt(args.checkpoint)
print('{:d} roidb entries'.format(len(roidb)))
print(imdb.classes)
if args.gamma_altitude > 1e-10 and args.gamma_angle > 1e-10 and args.gamma_weather > 1e-10:
nuisance_type = "A+V+W"
elif args.gamma_altitude > 1e-10 and args.gamma_angle > 1e-10:
nuisance_type = "A+V"
elif args.gamma_altitude > 1e-10 and args.gamma_weather > 1e-10:
nuisance_type = "A+W"
elif args.gamma_angle > 1e-10 and args.gamma_weather > 1e-10:
nuisance_type = "V+W"
elif args.gamma_altitude > 1e-10:
nuisance_type = "A"
elif args.gamma_angle > 1e-10:
nuisance_type = "V"
elif args.gamma_weather > 1e-10:
nuisance_type = "W"
else:
nuisance_type ="Baseline"
if args.overall_eval:
nuisance_type = "Overall"
model_dir = os.path.join(args.model_dir, nuisance_type, 'altitude={}_angle={}_weather={}(0)'.format(str(args.gamma_altitude), str(args.gamma_angle), str(args.gamma_weather)))
if not os.path.exists(model_dir):
raise Exception('There is no input directory for loading network from ' + model_dir)
load_name = os.path.join(model_dir, args.checkpoint_name)
# initilize the network here.
if args.net == 'vgg16':
fasterRCNN = vgg16(imdb.classes, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res101':
fasterRCNN = resnet(imdb.classes, 101, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res50':
fasterRCNN = resnet(imdb.classes, 50, pretrained=False, class_agnostic=args.class_agnostic)
elif args.net == 'res152':
fasterRCNN = resnet(imdb.classes, 152, pretrained=False, class_agnostic=args.class_agnostic)
else:
print("network is not defined")
pdb.set_trace()
fasterRCNN.create_architecture()
print("load checkpoint %s" % (load_name))
checkpoint = torch.load(load_name)
for key in ['RCNN_angle_score.weight', 'RCNN_angle_score.bias',
'RCNN_altitude_score.weight', 'RCNN_altitude_score.bias',
'RCNN_weather_score.weight', 'RCNN_weather_score.bias'
]:
if key in checkpoint['model'].keys():
del checkpoint['model'][key]
model_dict = fasterRCNN.state_dict()
model_dict.update(checkpoint['model'])
fasterRCNN.load_state_dict(model_dict)
# fasterRCNN.load_state_dict(checkpoint['model'])
if 'pooling_mode' in checkpoint.keys():
cfg.POOLING_MODE = checkpoint['pooling_mode']
print('load model successfully!')
# initilize the tensor holder here.
im_data = torch.FloatTensor(1)
im_info = torch.FloatTensor(1)
meta_data = torch.FloatTensor(1)
num_boxes = torch.LongTensor(1)
gt_boxes = torch.FloatTensor(1)
# ship to cuda
if args.cuda:
im_data = im_data.cuda()
im_info = im_info.cuda()
meta_data = meta_data.cuda()
num_boxes = num_boxes.cuda()
gt_boxes = gt_boxes.cuda()
# make variable
im_data = Variable(im_data)
im_info = Variable(im_info)
meta_data = Variable(meta_data)
num_boxes = Variable(num_boxes)
gt_boxes = Variable(gt_boxes)
if args.cuda:
cfg.CUDA = True
if args.cuda:
fasterRCNN.cuda()
start = time.time()
max_per_image = 100
vis = args.vis
if vis:
thresh = 0.05
else:
thresh = 0.0
save_name = 'faster_rcnn_10'
num_images = len(imdb.image_index)
print(num_images)
all_boxes = [[[] for _ in xrange(num_images)]
for _ in xrange(imdb.num_classes)]
output_dir = get_output_dir(imdb, save_name)
dataset = roibatchLoader(roidb, ratio_list, ratio_index, 1, \
imdb.num_classes, training=False, normalize=False)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1,
shuffle=False, num_workers=4,
pin_memory=True)
data_iter = iter(dataloader)
_t = {'im_detect': time.time(), 'misc': time.time()}
det_file = os.path.join(output_dir, 'detections.pkl')
fasterRCNN.eval()
empty_array = np.transpose(np.array([[], [], [], [], []]), (1, 0))
for i in range(num_images):
data = next(data_iter)
im_data.data.resize_(data[0].size()).copy_(data[0])
im_info.data.resize_(data[1].size()).copy_(data[1])
meta_data.data.resize_(data[2].size()).copy_(data[2])
gt_boxes.data.resize_(data[3].size()).copy_(data[3])
num_boxes.data.resize_(data[4].size()).copy_(data[4])
det_tic = time.time()
rois, cls_prob, bbox_pred, _, _, _ = fasterRCNN(im_data=im_data, im_info=im_info, meta_data=meta_data, gt_boxes=gt_boxes, num_boxes=num_boxes)
scores = cls_prob.data
boxes = rois.data[:, :, 1:5]
if cfg.TEST.BBOX_REG:
# Apply bounding-box regression deltas
box_deltas = bbox_pred.data
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED:
# Optionally normalize targets by a precomputed mean and stdev
if args.class_agnostic:
box_deltas = box_deltas.view(-1, 4) * torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS).cuda() \
+ torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS).cuda()
box_deltas = box_deltas.view(1, -1, 4)
else:
box_deltas = box_deltas.view(-1, 4) * torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_STDS).cuda() \
+ torch.FloatTensor(cfg.TRAIN.BBOX_NORMALIZE_MEANS).cuda()
box_deltas = box_deltas.view(1, -1, 4 * len(imdb.classes))
pred_boxes = bbox_transform_inv(boxes, box_deltas, 1)
pred_boxes = clip_boxes(pred_boxes, im_info.data, 1)
else:
# Simply repeat the boxes, once for each class
pred_boxes = np.tile(boxes, (1, scores.shape[1]))
pred_boxes /= data[1][0][2].item()
scores = scores.squeeze()
pred_boxes = pred_boxes.squeeze()
det_toc = time.time()
detect_time = det_toc - det_tic
misc_tic = time.time()
if vis:
im = cv2.imread(imdb.image_path_at(i))
im2show = np.copy(im)
for j in xrange(1, imdb.num_classes):
inds = torch.nonzero(scores[:, j] > thresh).view(-1)
# if there is det
if inds.numel() > 0:
cls_scores = scores[:, j][inds]
_, order = torch.sort(cls_scores, 0, True)
if args.class_agnostic:
cls_boxes = pred_boxes[inds, :]
else:
cls_boxes = pred_boxes[inds][:, j * 4:(j + 1) * 4]
cls_dets = torch.cat((cls_boxes, cls_scores.unsqueeze(1)), 1)
# cls_dets = torch.cat((cls_boxes, cls_scores), 1)
cls_dets = cls_dets[order]
keep = nms(cls_dets, cfg.TEST.NMS)
cls_dets = cls_dets[keep.view(-1).long()]
if vis:
im2show = vis_detections(im2show, imdb.classes[j], cls_dets.cpu().numpy(), 0.3)
all_boxes[j][i] = cls_dets.cpu().numpy()
else:
all_boxes[j][i] = empty_array
# Limit to max_per_image detections *over all classes*
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][i][:, -1]
for j in xrange(1, imdb.num_classes)])
all_boxes[j][i] = np.concatenate(
(all_boxes[j][i], np.tile(meta_data.cpu().numpy(), (len(image_scores), 1))),
axis=1)
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in xrange(1, imdb.num_classes):
keep = np.where(all_boxes[j][i][:, -1] >= image_thresh)[0]
all_boxes[j][i] = all_boxes[j][i][keep, :]
misc_toc = time.time()
nms_time = misc_toc - misc_tic
sys.stdout.write('im_detect: {:d}/{:d} {:.3f}s {:.3f}s \r' \
.format(i + 1, num_images, detect_time, nms_time))
sys.stdout.flush()
if vis:
cv2.imwrite('result.png', im2show)
pdb.set_trace()
# cv2.imshow('test', im2show)
# cv2.waitKey(0)
with open(det_file, 'wb') as f:
pickle.dump(all_boxes, f, pickle.HIGHEST_PROTOCOL)
print('Evaluating detections')
imdb.evaluate_detections(all_boxes, output_dir, nuisance_type=nuisance_type, baseline_method=args.is_baseline_method, ovthresh=args.ovthresh, memo=args.checkpoint_name)
end = time.time()
print("test time: %0.4fs" % (end - start))
|
"""jxgl.cqu.edu.cn 网址的路由
"""
import logging
import re
import time
from typing import List, Union, Dict, Optional
from hashlib import md5
from bs4 import BeautifulSoup
from requests import Session
from ..model import Course, ExperimentCourse, Exam
from . import HOST, HEADERS
__all__ = ("Route", "Parsed", "Jxgl")
class Route:
home = "/home.aspx"
mainform = "/MAINFRM.aspx"
logintest = "/sys/Main_banner.aspx"
class TeachingArrangement:
"教学安排模块"
# 个人课表
personal_courses = "/znpk/Pri_StuSel.aspx"
# 查询个人课表
personal_courses_table = "/znpk/Pri_StuSel_rpt.aspx"
# 考试安排表
personal_exams = "/kssw/stu_ksap.aspx"
# 查询考试安排
personal_exams_table = "/kssw/stu_ksap_rpt.aspx"
class Assignment:
"""成绩单
为了避开因未评教而拒绝提供成绩单查询的行为,通过老教务网接口获取数据。
"""
# 发送 POST 获取会话
oldjw_login = "http://oldjw.cqu.edu.cn:8088/login.asp"
# 全部成绩
whole_assignment = "http://oldjw.cqu.edu.cn:8088/score/sel_score/sum_score_sel.asp"
class Parsed:
class Assignment:
class LoginIncorrectError(ValueError):
pass
@staticmethod
def whole_assignment(u: str, p: str, kwargs: dict = {}) -> dict:
"""通过老教务网接口获取成绩单。
登录密码和新教务网不同,如果没修改过,应为身份证后 6 位。
:param str u: 学号
:param str p: 登录密码
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
包含字段::
学号(str)
姓名(str)
专业(str)
GPA(str)
查询时间(str)
详细(List[dict])
课程编码(str)
课程名称(str)
成绩(str)
学分(str)
选修(str)
类别(str)
教师(str)
考别(str)
备注(str)
时间(str)
"""
login_form = {
# 学号,非统一身份认证号
"username": u,
# 老教务网的密码和新教务不同,一般为身份证后 6 位。
"password": p,
# 不知道干啥的,好像也没用
"submit1.x": 20,
"submit1.y": 22,
# 院系快速导航
"select1": "#"
}
session = Session()
resp = session.post(Route.Assignment.oldjw_login, data=login_form, **kwargs)
resp_text = resp.content.decode("gbk")
if "你的密码不正确,请到教务处咨询(学生密码错误请向学院教务人员或辅导员查询)!" in resp_text:
raise Parsed.Assignment.LoginIncorrectError(
"学号或密码错误,老教务处的密码默认为身份证后六位,"
#
"或到教务处咨询(学生密码错误请向学院教务人员或辅导员查询)!"
)
assignments = session.get(Route.Assignment.whole_assignment, **kwargs).content.decode("gbk")
assparse = BeautifulSoup(assignments, "lxml")
header_text = str(assparse.select_one("td > p:nth-child(2)"))
header = [t for t in (re.sub(r"</b>|</?p>|\s", "", t) for t in header_text.split("<b>")) if t != ""]
details = []
for tr in assparse.select("tr")[3:-1]:
tds = [re.sub(r"\s", "", td.text) for td in tr.select("td")]
data = {
"课程编码": tds[1],
"课程名称": tds[2],
"成绩": tds[3],
"学分": tds[4],
"选修": tds[5],
"类别": tds[6],
"教师": tds[7],
"考别": tds[8],
"备注": tds[9],
"时间": tds[10],
}
details.append(data)
查询时间 = re.search(r"查询时间:(2\d{3}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2})", assignments)
table = {
"学号": header[0][3:],
"姓名": header[1][3:],
"专业": header[2][3:],
"GPA": header[3][4:],
"查询时间": 查询时间[1] if 查询时间 is not None else "Unknown",
"详细": details,
}
return table
class Jxgl():
"""与教学管理系统交互
:param str username: 教学管理系统的用户名(学号)
:param str password: 教学管理系统的密码
:param str jxglUrl: (可选) 教学管理系统的地址(含协议名及域名,如"http://jxgl.cqu.edu.cn")
:param Session session: (可选) 自定义 Session 对象
:param dict headers: (可选) 访问教学管理系统使用的请求头
创建后不会自动登陆,需要使用 login 方法来登陆
"""
class NoUserError(ValueError):
"使用了不存在的用户名的登陆错误时抛出的异常"
pass
class LoginIncorrectError(ValueError):
"用户名或密码不正确的登陆错误时抛出的异常"
pass
class LoginExpired(Exception):
"登陆 cookies 过期或尚未登陆时抛出的异常"
pass
class UnregisteredError(Exception):
"学生未注册(开学期间可能出现已在辅导员处注册但依然出现此错误的情况)时抛出抛出的异常"
pass
jxglUrl: str
username: str
password: str
session: Session
def login(self, kwargs: dict = {}) -> None:
"""向主页发出请求,发送帐号密码表单,获取 cookie
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
帐号或密码错误则抛出异常 NoUserError 或 LoginIncorrectError;
学生未注册(开学期间可能出现已在辅导员处注册但依然出现此错误的情况)则抛出 UnregisteredError
"""
# 初始化 Cookie
url = f"{self.jxglUrl}/home.aspx"
resp = self.session.get(url, **kwargs)
# fix: 偶尔不需要设置 cookie, 直接就进入主页了
# 这是跳转页 JavaScript 的等效代码
pattern = re.compile(r"(?<=document.cookie=')DSafeId=([A-Z0-9]+);(?=';)")
if pattern.search(resp.text):
first_cookie = re.search(pattern, resp.text)[1]
self.session.cookies.set("DSafeId", first_cookie)
time.sleep(0.680)
resp = self.session.get(url, **kwargs)
new_cookie = resp.headers.get("set-cookie", self.session.cookies.get_dict())
c = {
1: re.search("(?<=ASP.NET_SessionId=)([a-zA-Z0-9]+)(?=;)", new_cookie)[1],
2: re.search("(?<=_D_SID=)([A-Z0-9]+)(?=;)", new_cookie)[1]
}
self.session.cookies.set("ASP.NET_SessionId", c[1])
self.session.cookies.set("_D_SID", c[2])
# 发送表单
url = f"{self.jxglUrl}/_data/index_login.aspx"
html = BeautifulSoup(self.session.get(url, **kwargs).text, "lxml")
login_form = {
"__VIEWSTATE": html.select_one("#Logon > input[name=__VIEWSTATE]")["value"],
"__VIEWSTATEGENERATOR": html.select_one("#Logon > input[name=__VIEWSTATEGENERATOR]")["value"],
"Sel_Type": "STU",
"txt_dsdsdsdjkjkjc": self.username, # 学号
"txt_dsdfdfgfouyy": "", # 密码, 实际上的密码加密后赋值给 efdfdfuuyyuuckjg
"txt_ysdsdsdskgf": "",
"pcInfo": "",
"typeName": "",
"aerererdsdxcxdfgfg": "",
"efdfdfuuyyuuckjg": self._chkpwd(self.username, self.password),
}
page_text = self.session.post(url, data=login_form, **kwargs).content.decode(encoding='GBK')
if "正在加载权限数据..." in page_text:
return
if "账号或密码不正确!请重新输入。" in page_text:
raise self.LoginIncorrectError
if "该账号尚未分配角色!" in page_text:
raise self.NoUserError
if "alert('您尚未报到注册成功,请到学院咨询并办理相关手续!" in page_text:
raise self.UnregisteredError
else:
raise ValueError("意料之外的登陆返回页面")
def __init__(
self,
username: str,
password: str,
jxglUrl: str = HOST.PREFIX,
session: Optional[Session] = None,
headers: dict = HEADERS
) -> None:
self.username: str = username
self.password: str = password
self.jxglUrl: str = jxglUrl
self.session: Session = Session() if session is None else session
self.session.headers.update(HEADERS)
def getExamsTerms(self, kwargs: dict = {}) -> Dict[int, str]:
"""获取考试安排的学期列表
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
返回一个字典,结构:{学期编号(int): 学期名称(str)}
注:似乎只会有一个学期
"""
url: str = f"{self.jxglUrl}{Route.TeachingArrangement.personal_exams}"
return self.parseExamsTerms(self.session.get(url, **kwargs).text)
@staticmethod
def parseExamsTerms(htmlText: str) -> Dict[int, str]:
"""解析考试安排学期列表的 html 文本"""
el_学年学期 = BeautifulSoup(htmlText, "lxml").select("select[name=sel_xnxq] > option")
return {int(i.attrs["value"]): i.text for i in el_学年学期}
def getCoursesTerms(self, kwargs: dict = {}) -> Dict[int, str]:
"""获取课程表的学期列表
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
返回一个字典,结构:{学期编号(int): 学期名称(str)}
"""
url: str = f"{self.jxglUrl}{Route.TeachingArrangement.personal_courses}"
return self.parseCoursesTerms(self.session.get(url, **kwargs).text)
@staticmethod
def parseCoursesTerms(htmlText: str) -> Dict[int, str]:
"""解析课程表学期列表的 html 文本"""
el_学年学期 = BeautifulSoup(htmlText, "lxml").select("select[name=Sel_XNXQ] > option")
return {int(i.attrs["value"]): i.text for i in el_学年学期}
def getCourses(self, termId: int, kwargs: dict = {}) -> List[Union[Course, ExperimentCourse]]:
"""获取指定学期的课程表
:param int termId: 学期编号,包含在 getCoursesTerms 方法的返回值中
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
"""
url = f"{self.jxglUrl}{Route.TeachingArrangement.personal_courses_table}"
resp = self.session.post(url, data={"Sel_XNXQ": termId, "px": 0, "rad": "on"}, **kwargs)
if ("您正查看的此页已过期" in resp.text):
raise self.LoginExpired
return self.parseCourses(resp.text)
@staticmethod
def parseCourses(htmlText: str) -> List[Union[Course, ExperimentCourse]]:
"""解析课程表的 html 文本"""
listing = BeautifulSoup(htmlText, "lxml").select("table > tbody > tr")
return [Jxgl._makeCourse(i) for i in listing]
def getExams(self, termId: int, kwargs: dict = {}) -> List[Exam]:
"""获取指定学期的考试安排
:param int termId: 学期编号,包含在 getExamsTerms 方法的返回值中
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
"""
url = f"{self.jxglUrl}{Route.TeachingArrangement.personal_exams_table}"
resp = self.session.post(url, data={"sel_xnxq": termId}, **kwargs)
if ("您正查看的此页已过期" in resp.text):
raise self.LoginExpired
return self.parseExams(resp.text)
@staticmethod
def parseExams(htmlText: str) -> List[Exam]:
"""解析考试安排的 html 文本"""
listing = BeautifulSoup(htmlText, "lxml").select("table[ID=ID_Table] > tr")
return [Jxgl._makeExam(i) for i in listing]
def isLogined(self, kwargs: dict = {}) -> bool:
"""判断是否处于登陆状态
:param dict kwargs: (可选) 连接时传递给 requests 库的额外参数(详见 requests 库中的 request)
处于登陆状态则返回 True,否则返回 False
"""
return self.session.get(f"{self.jxglUrl}{Route.logintest}", allow_redirects=False, **kwargs).status_code == 200
@staticmethod
def _makeExam(tr: BeautifulSoup) -> Exam:
td = tr.select("td")
return Exam(
identifier=td[1].text,
score=float(td[2].text),
classifier=td[3].text,
exam_type=td[4].text,
time=td[5].text,
location=td[6].text,
seat_no=int(td[7].text)
)
@staticmethod
def _makeCourse(tr: BeautifulSoup) -> Union[Course, ExperimentCourse]:
"根据传入的 tr 元素,获取对应的 Course 对象"
td = tr.select("td")
# 第一列是序号,忽略
if len(td) == 13:
return Course(
identifier=td[1].text if td[1].text != "" else td[1].attrs.get("hidevalue", ''),
score=float(td[2].text if td[2].text != "" else td[2].attrs.get("hidevalue", '')),
time_total=float(td[3].text if td[3].text != "" else td[3].attrs.get("hidevalue", '')),
time_teach=float(td[4].text if td[4].text != "" else td[4].attrs.get("hidevalue", '')),
time_practice=float(td[5].text if td[5].text != "" else td[5].attrs.get("hidevalue", '')),
classifier=td[6].text if td[6].text != "" else td[6].attrs.get("hidevalue", ''),
teach_type=td[7].text if td[7].text != "" else td[7].attrs.get("hidevalue", ''),
exam_type=td[8].text if td[8].text != "" else td[8].attrs.get("hidevalue", ''),
teacher=td[9].text if td[9].text != "" else td[9].attrs.get("hidevalue", ''),
week_schedule=td[10].text,
day_schedule=td[11].text,
location=td[12].text
)
elif len(td) == 12:
return ExperimentCourse(
identifier=td[1].text if td[1].text != "" else td[1].attrs.get("hidevalue", ''),
score=float(td[2].text if td[2].text != "" else td[2].attrs.get("hidevalue", '')),
time_total=float(td[3].text if td[3].text != "" else td[3].attrs.get("hidevalue", '')),
time_teach=float(td[4].text if td[4].text != "" else td[4].attrs.get("hidevalue", '')),
time_practice=float(td[5].text if td[5].text != "" else td[5].attrs.get("hidevalue", '')),
project_name=td[6].text if td[6].text != "" else td[6].attrs.get("hidevalue", ''),
teacher=td[7].text if td[7].text != "" else td[7].attrs.get("hidevalue", ''),
hosting_teacher=td[8].text if td[8].text != "" else td[8].attrs.get("hidevalue", ''),
week_schedule=td[9].text if td[9].text != "" else td[9].attrs.get("hidevalue", ''),
day_schedule=td[10].text if td[10].text != "" else td[10].attrs.get("hidevalue", ''),
location=td[11].text if td[11].text != "" else td[11].attrs.get("hidevalue", ''),
)
else:
logging.error("未知的数据结构")
logging.error(tr.prettify())
raise ValueError("未知的数据结构")
@staticmethod
def _md5(string: str) -> str:
return md5(string.encode()).hexdigest().upper()
@staticmethod
def _chkpwd(username: str, password: str) -> str:
"赋值给: efdfdfuuyyuuckjg"
schoolcode = "10611"
return Jxgl._md5(username + Jxgl._md5(password)[0:30].upper() + schoolcode)[0:30].upper()
|
"""Module containing the ticket for flights serializers"""
from rest_framework import serializers
from ticket.models import Ticket
from flight.serializers import FlightDetailSerializer
class TicketSerializer(serializers.ModelSerializer):
"""Class to handle the serializing and deserializing of ticket data"""
flight = FlightDetailSerializer(many=True, read_only=True)
class Meta:
"""Class to add additional information to the serializer"""
model = Ticket
fields = ['id', 'ticket_class', 'cost', 'booked', 'owners', 'created_at','updated_at','deleted_at', 'flight']
extra_kwargs = {'flight': {'required': False}}
|
from collections import Counter
from unittest import mock, TestCase
import numpy as np
from ngs_tools import sequence
from . import mixins
class TestSequence(mixins.TestMixin, TestCase):
def test_alignment_to_cigar(self):
self.assertEqual('4D', sequence.alignment_to_cigar('ACGT', '----'))
self.assertEqual('1M2D1M', sequence.alignment_to_cigar('ACGT', 'A--T'))
self.assertEqual('4I', sequence.alignment_to_cigar('----', 'AAAT'))
self.assertEqual(
'3M1X', sequence.alignment_to_cigar('ACGT', 'ACNV', mismatch=True)
)
with self.assertRaises(sequence.SequenceError):
sequence.alignment_to_cigar('AAAA', 'AAA')
with self.assertRaises(sequence.SequenceError):
sequence.alignment_to_cigar('----', '----')
def test_call_consensus_with_qualities(self):
with open(self.sequences_path, 'r') as f1, open(self.qualities_path,
'r') as f2:
sequences = [line.strip() for line in f1 if not line.isspace()]
qualities = [line.strip() for line in f2 if not line.isspace()]
consensuses, assignments = sequence.call_consensus_with_qualities(
sequences, qualities
)
counts = Counter(assignments)
# Check that assignments are ordered
self.assertEqual(
sorted(counts.values(), reverse=True),
[counts[i] for i in range(len(counts))]
)
common = counts.most_common(2)
self.assertTrue(common[0][1] > 50)
self.assertTrue(common[1][1] > 25)
def test_call_consensus_with_qualities_allow_ambiguous(self):
sequences = ['AAAC', 'AAAG']
qualities = ['AAAA', 'AAAA']
consensuses, assignments = sequence.call_consensus_with_qualities(
sequences, qualities, allow_ambiguous=True
)
self.assertEqual(consensuses, ['AAAS'])
np.testing.assert_equal(assignments, [0, 0])
def test_call_consensus_with_qualities_return_qualities(self):
with open(self.sequences_path, 'r') as f1, open(self.qualities_path,
'r') as f2:
sequences = [line.strip() for line in f1 if not line.isspace()]
qualities = [line.strip() for line in f2 if not line.isspace()]
consensuses, assignments, consensus_qualities = sequence.call_consensus_with_qualities(
sequences, qualities, return_qualities=True
)
counts = Counter(assignments)
common = counts.most_common(2)
self.assertTrue(common[0][1] > 50)
self.assertTrue(common[1][1] > 25)
def test_levenshtein_distance(self):
self.assertEqual(1, sequence.levenshtein_distance('AC', 'AT'))
self.assertEqual(0, sequence.levenshtein_distance('AT', 'AN'))
self.assertEqual(2, sequence.levenshtein_distance('XZ', 'ZX'))
def test_levenshtein_distance_raises_error(self):
with mock.patch('ngs_tools.sequence.LEVENSHTEIN_DISTANCE_ALIGNER',
None):
with self.assertRaises(sequence.SequenceError):
sequence.levenshtein_distance('AC', 'AT')
def test_hamming_distance(self):
self.assertEqual(0, sequence.hamming_distance('ACTG', 'ACTG'))
self.assertEqual(1, sequence.hamming_distance('ACTG', 'ACTT'))
self.assertEqual(0, sequence.hamming_distance('ACTG', 'ACTN'))
def test_hamming_distances(self):
np.testing.assert_equal(
np.array([0, 1, 0]),
sequence.hamming_distances('ACTG', ['ACTG', 'ACTT', 'ACTN'])
)
def test_hamming_distance_matrix(self):
np.testing.assert_equal(
np.array([[0, 1, 0], [1, 0, 0], [0, 0, 0]]),
sequence.hamming_distance_matrix(['ACTG', 'ACTT', 'ACTN'],
['ACTG', 'ACTT', 'ACTN'])
)
def test_pairwise_hamming_distances(self):
np.testing.assert_equal(
np.array([[0, 1, 0], [1, 0, 0], [0, 0, 0]]),
sequence.pairwise_hamming_distances(['ACTG', 'ACTT', 'ACTN'])
)
def test_correct_sequences_to_whitelist(self):
sequences = ['ACTG', 'ACTT', 'AGCC', 'TTTT']
qualities = ['AAAA', 'AAAA', 'AAAA', 'AAAA']
whitelist = ['ACTG', 'TTTN']
with mock.patch('ngs_tools.sequence.utils.progress', mixins.tqdm_mock),\
mock.patch('ngs_tools.sequence.progress', mixins.tqdm_mock):
corrections = sequence.correct_sequences_to_whitelist(
sequences, qualities, whitelist
)
self.assertEqual(['ACTG', 'ACTG', None, 'TTTN'], corrections)
def test_correct_sequences_to_whitelist_simple(self):
sequences = ['ACTG', 'ACTT', 'AGCC', 'TTTT']
whitelist = ['ACTG', 'TTTN']
with mock.patch('ngs_tools.sequence.utils.progress', mixins.tqdm_mock),\
mock.patch('ngs_tools.sequence.progress', mixins.tqdm_mock):
corrections = sequence.correct_sequences_to_whitelist_simple(
sequences, whitelist
)
self.assertEqual({
'ACTG': 'ACTG',
'ACTT': 'ACTG',
'AGCC': None,
'TTTT': 'TTTN'
}, corrections)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.