content stringlengths 5 1.05M |
|---|
"""
File: dashboard.py
Author: Come Bertrand
Email: bertrand.cosme@gmail.com
Github: https://github.com/ComeBertrand
Description: dashboard drawing functions.
"""
from datetime import timedelta
from bokeh.layouts import layout, column
from bokeh.models import ColumnDataSource, CustomJS, Panel, Tabs, Div, Slider
from .draw_utils import create_box_plot, create_hovered_multiline_graph, create_hovered_single_line_graph
def create_benchmark_dashboard(statistics_per_problem_per_meta):
panels = []
for statistics_per_meta in statistics_per_problem_per_meta:
tab_name, problem_dashboard = create_problem_dashboard(statistics_per_meta)
panels.append(Panel(child=problem_dashboard, title=tab_name))
dashboard = Tabs(tabs=panels)
return dashboard
def create_problem_dashboard(statistics_per_meta):
problem_name = statistics_per_meta[0].problem.get_name()
header = create_dashboard_header(problem_name, statistics_per_meta)
overall_comparison = create_meta_comparison(statistics_per_meta)
meta_dashboards = create_meta_dashboards(statistics_per_meta)
return problem_name, layout([[header], [overall_comparison], [meta_dashboards]])
def create_dashboard_header(problem_name, statistics_per_meta):
problem_header = Div(text="""
<span><h3>Problem :</h3> {}</span>
""".format(problem_name))
meta_header_text = "<span><h3>Metaheuristics :</h3></span><ol>"
for meta_stat in statistics_per_meta:
meta_name = meta_stat.metaheuristic.get_name()
meta_header_text += "<li>{}</li>".format(meta_name)
meta_header_text += "</ol>"
meta_header = Div(text=meta_header_text)
return layout([[problem_header, meta_header]])
def create_meta_comparison(statistics_per_meta):
# Create multiline graphs
data_sources_fitness = []
data_sources_time = []
legend = []
fitness_formatter = lambda x: "{:.1e}".format(x)
time_formatter = lambda x: str(timedelta(seconds=int(x)))
for i, meta_stat in enumerate(statistics_per_meta):
runs = list(range(1, meta_stat.nb_run + 1))
data_source_fitness = ColumnDataSource(data=dict(
x=runs,
y=meta_stat.best_values,
index=runs,
fitness=[fitness_formatter(fitness) for fitness in meta_stat.best_values],
))
data_sources_fitness.append(data_source_fitness)
data_source_time = ColumnDataSource(data=dict(
x=runs,
y=meta_stat.time_tots,
index=runs,
time=[time_formatter(calculation_time) for calculation_time in meta_stat.time_tots],
))
data_sources_time.append(data_source_time)
legend.append('{:d} - {}'.format(i + 1, meta_stat.metaheuristic.get_name()))
hover_data_fitness = [('Fitness', '@fitness'), ('Run', '@index')]
multi_graph_fitness = create_hovered_multiline_graph('Best fitness per run', 'Runs', 'Fitness',
data_sources_fitness, hover_data_fitness, legend)
box_plot_fitness = create_box_plot('Aggregated fitness per metaheuristic', 'Fitness', legend,
[data_source.data['y'] for data_source in data_sources_fitness],
value_formatter=fitness_formatter)
hover_data_time = [('Calculation time', '@time'), ('Run', '@index')]
multi_graph_time = create_hovered_multiline_graph('Calculation time per run', 'Runs', 'Calculation time (in s)',
data_sources_time, hover_data_time, legend)
box_plot_time = create_box_plot('Aggregated time per metaheuristic', 'Calculation time in s', legend,
[data_source.data['y'] for data_source in data_sources_time],
value_formatter=time_formatter)
# Create header
title = Div(text="""</br></br><span><h3>Metaheuristics comparison :</h3></span>""")
return layout([[title], [multi_graph_fitness, multi_graph_time], [box_plot_fitness, box_plot_time]])
def create_meta_dashboards(statistics_per_meta):
panels = []
for i, meta_stat in enumerate(statistics_per_meta):
tab_name = '{:d} - {}'.format(i + 1, meta_stat.metaheuristic.get_name())
meta_dashboard = create_meta_dashboard(meta_stat)
panels.append(Panel(child=meta_dashboard, title=tab_name))
dashboard = Tabs(tabs=panels)
# Create header
title = Div(text="""</br></br><span><h3>Metaheuristics analysis :</h3></span>""")
return layout([[title], [dashboard]])
def create_meta_dashboard(meta_stat):
values_per_run = {}
for i in range(meta_stat.nb_run):
values_per_run['values_{:d}'.format(i+1)] = meta_stat.get_run_values(i)
values_per_run['iter_{:d}'.format(i+1)] = list(range(meta_stat.get_run_nb_iterations(i)))
all_data_source = ColumnDataSource(data=values_per_run)
data_source_fitness = ColumnDataSource(data=dict(
x=values_per_run['iter_1'],
y=values_per_run['values_1']
))
graph_fitness_per_iter = create_hovered_single_line_graph('Best fitness evolution during iterations', 'Iterations',
'Fitness', data_source_fitness,
[('Value', '@y'), ('Iteration', '@x')])
slider_callback = CustomJS(args=dict(source=data_source_fitness, all_data_source=all_data_source), code="""
var data = source.data;
var f = cb_obj.value;
var all_data = all_data_source.data;
data['x'] = all_data['iter_' + f];
data['y'] = all_data['values_' + f];
source.trigger('change');
""")
slider = Slider(start=1, end=meta_stat.nb_run, value=1, step=1, title="Run nb")
slider.js_on_change('value', slider_callback)
return column(slider, graph_fitness_per_iter)
|
import json
import pyspark
from pyspark.sql import SparkSession
from pyspark.sql.types import *
from jupyterlab_sql_editor.ipython_magic.common.export import (
Function,
SchemaExporter,
Connection,
Catalog,
SparkTableSchema,
Table,
)
class SparkConnection(Connection):
def __init__(self, spark) -> None:
self.spark = spark
def render_table(self, table: Table):
catalog_name = table.catalog_name
database_name = table.database_name
table_name = table.table_name
if catalog_name == "spark_catalog":
catalog_name = None
if database_name == "":
database_name = None
full_table_name = table_name
if database_name:
full_table_name = database_name + "." + full_table_name
if catalog_name:
full_table_name = catalog_name + "." + full_table_name
catalog = catalog_name if catalog_name else "spark_catalog"
self.spark.sql(f"USE {catalog}")
columns = self._get_columns(full_table_name)
return {
"columns": columns,
"tableName": table_name,
"database": database_name,
"catalog": catalog_name,
}
def render_function(self, function: Function):
return {
"name": function.function_name,
"description": self._get_description(function.function_name),
}
def _get_columns(self, full_table_name):
schema = self.spark.table(full_table_name).schema
table_schema = SparkTableSchema(schema)
return table_schema.convert()
def get_function_names(self):
self.spark.sql("USE spark_catalog")
rows = self.spark.sql("SHOW FUNCTIONS").collect()
function_names = []
for r in rows:
function_names.append(r.function)
return function_names
def _get_description(self, function_name):
rows = self.spark.sql(f"DESCRIBE FUNCTION EXTENDED {function_name}").collect()
text_lines = list(map(lambda r: r.function_desc, rows))
return "\n".join(text_lines)
def get_table_names(self, catalog_name, database_name):
table_names = []
self.spark.sql(f"USE {catalog_name}")
if database_name:
rows = self.spark.sql(f"SHOW TABLES IN {database_name}").collect()
else:
rows = self.spark.sql(f"SHOW TABLES").collect()
for r in rows:
# depending if iceberg catalogs are use you might get results
# with either a database or namespace column
if (
getattr(r, "database", "") == database_name
or getattr(r, "namespace", "") == database_name
):
table_names.append(r["tableName"])
return table_names
def get_database_names(self, catalog_name):
self.spark.sql(f"USE {catalog_name}")
rows = self.spark.sql("SHOW DATABASES").collect()
database_names = []
for r in rows:
database_names.append(r["namespace"])
if catalog_name == "spark_catalog":
database_names.append("")
return database_names
# spark = SparkSession.builder.appName("abc").getOrCreate()
# spark.sql("select 'allo'").createOrReplaceTempView("view_no_database")
# spark.sql("create database db1").collect()
# spark.sql("create database db2").collect()
# spark.sql(
# """
# CREATE OR REPLACE VIEW view_default_database
# (ID COMMENT 'Unique identification number', Name)
# COMMENT 'View for experienced employees'
# AS SELECT 1 as id, 'jc' as name
# """
# ).collect()
# spark.sql(
# """
# CREATE OR REPLACE VIEW db1.view_in_db1
# (ID COMMENT 'Unique identification number', Name)
# COMMENT 'View for experienced employees'
# AS SELECT 1 as id, 'jc' as name
# """
# ).collect()
# spark.sql("use spark_catalog").show()
# spark.sql("use spark_catalog.db1").show()
# spark.sql(
# """
# show tables
# """
# ).show()
# spark.table("view_no_database").printSchema()
# spark.table("default.view_default_database").printSchema()
# spark.table("db1.view_in_db1").printSchema()
def update_database_schema(spark, schema_file_name, catalog_names):
connection = SparkConnection(spark)
local_catalog = Catalog(connection, "spark_catalog")
catalogs: list(Catalog) = []
for name in catalog_names:
catalogs.append(Catalog(connection, name))
catalogs.append(local_catalog)
exp = SchemaExporter(connection, schema_file_name, catalogs, local_catalog)
exp.update_schema()
def update_local_database(spark, schema_file_name):
connection = SparkConnection(spark)
local_catalog = Catalog(connection, "spark_catalog")
exp = SchemaExporter(connection, schema_file_name, None, local_catalog, display_progress=False)
exp.update_local_schema()
|
from dynaconf import settings
# test default loader never gets cleaned
print("store:", settings.store)
print("loaders:", settings.loaded_by_loaders)
print("dotenv_overrride:", settings.DOTENV_OVERRIDE_FOR_DYNACONF)
print("redis:", settings.REDIS_FOR_DYNACONF)
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
print("cleaning.....")
settings.clean()
settings.execute_loaders()
print("store:", settings.store)
print("loaders:", settings.loaded_by_loaders)
print("deleted:", settings._deleted)
print("dotenv_override:", settings.DOTENV_OVERRIDE_FOR_DYNACONF)
print("redis:", settings.REDIS_FOR_DYNACONF)
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
with settings.using_namespace('dev'):
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
print("store:", settings.store)
print("loaders:", settings.loaded_by_loaders)
print("deleted:", settings._deleted)
print("dotenv_override:", settings.DOTENV_OVERRIDE_FOR_DYNACONF)
print("redis:", settings.REDIS_FOR_DYNACONF)
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
settings.namespace('dev')
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
print("store:", settings.store)
print("loaders:", settings.loaded_by_loaders)
print("deleted:", settings._deleted)
print("dotenv_override:", settings.DOTENV_OVERRIDE_FOR_DYNACONF)
print("redis:", settings.REDIS_FOR_DYNACONF)
settings.namespace()
print("PYVAR", settings.PYVAR)
print("YAMLVAR", settings.YAMLVAR)
print("TOMLVAR", settings.TOMLVAR)
print("INIVAR", settings.INIVAR)
print("JSONVAR", settings.JSONVAR)
print("store:", settings.store)
print("loaders:", settings.loaded_by_loaders)
print("deleted:", settings._deleted)
print("dotenv_override:", settings.DOTENV_OVERRIDE_FOR_DYNACONF)
print("redis:", settings.REDIS_FOR_DYNACONF)
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
s = sys.stdin.readline().strip()
p = sys.stdin.readline().strip()
s = s + s
if p in s:
print('Yes')
else:
print('No')
|
# algorithm tolerance
CCD_COVERGENCE_TOL = 1e-10
BISECTION_TOL = 1e-5
ADMM_TOL = 1e-10
MAX_ITER = 5000
BISECTION_UPPER_BOUND = 10
MAXITER_BISECTION = 5000
# bounds
MIN_WEIGHT = 0
MAX_WEIGHT = 1e3
RISK_BUDGET_TOL = 0.00001
|
from flask import Blueprint
from flask import request, jsonify, make_response, Blueprint
from functools import wraps
from app.api.v1.models.meetup import Meetup
from app.api.v1.models.user import User
from app.api.v1.utils.meetup_validators import MeetupValidator
from app.api.v1.models.base_model import AuthenticationRequired
v1 = Blueprint('meetupv1', __name__, url_prefix='/api/v1/')
""" This route performs a get request to fetch all upcoming meetups """
@v1.route("/meetups/upcoming", methods=['GET'])
def get_all_meetups():
meetups = Meetup().fetch_meetups()
return make_response(jsonify({
"status": 200,
"meetups": meetups
}), 200)
""" This route posts a meetup """
@v1.route("/meetups", methods=['POST'])
@AuthenticationRequired
def post_a_meetup():
data = request.get_json()
auth_header = request.headers
print(auth_header)
if MeetupValidator().meetup_fields(data):
return make_response(jsonify(MeetupValidator().meetup_fields(data)), 400)
else:
# Validate user
validate_question = MeetupValidator(data)
validation_methods = [
validate_question.valid_date,
validate_question.data_exists,
validate_question.valid_description,
validate_question.valid_location,
validate_question.valid_tags,
validate_question.valid_title
]
for error in validation_methods:
if error():
return make_response(jsonify({
"error": error(),
"status": 422
}), 422)
meetup = {
"title": data['title'],
"description": data['description'],
"location": data['location'],
"images": data['images'],
"happeningOn": data['happeningOn'],
"tags": data['tags'],
}
meetup_model = Meetup(meetup)
meetup_model.save_meetup()
return make_response(jsonify({
"status": 201,
"message": "You have successfully posted a meetup",
"data": [{
"title": data['title'],
"location": data['location'],
"images": ["img1", "img2"],
"happeningOn": data['happeningOn'],
"tags": data['tags'],
}],
}), 201)
""" This route fetches a specific meetup """
@v1.route("/meetups/<int:meetupId>", methods=['GET'])
def get_meetup(meetupId):
meetup = Meetup().fetch_specific_meetup(meetupId)
if meetup:
return jsonify({
"status": 200,
"data": meetup
}), 200
else:
return jsonify({
"status": 404,
"error": "No meetup found!"
}), 404
""" This route edits a meetup """
@v1.route("meetups/<int:meetupId>", methods=['PATCH'])
@AuthenticationRequired
def edit_meetup(meetupId):
data = request.get_json()
meetup = Meetup().fetch_specific_meetup(meetupId)
if meetup:
updates = {
"title": data['title'],
"description": data['description'],
"location": data['location'],
"images": ["img1", "img2"],
"happeningOn": data['happeningOn'],
"tags": data['tags']
}
Meetup().edit_meetup(meetupId, updates)
return jsonify({
"status": 200,
"message": "{} was updated".format(meetup['title'].upper())
}), 200
else:
return make_response(jsonify({
"error": 'meetup not found!',
"status": 404
}), 404)
""" This route deletes a specific meetup """
@v1.route("admin/meetups/<int:meetupId>", methods=['DELETE'])
@AuthenticationRequired
def delete_meetup(meetupId):
get_meetup = Meetup().fetch_specific_meetup(meetupId)
if get_meetup:
Meetup().delete_meetup(meetupId)
return jsonify({
"status": 200,
"message": "{} was deleted".format(get_meetup['title'].upper())
}), 200
else:
return make_response(jsonify({
"error": 'Meetup not found!',
"status": 404
}), 404)
""" This route posts RSVPS on meetups """
@v1.route("/meetups/<int:meetupId>/rsvp", methods=['POST'])
@AuthenticationRequired
def post_RSVP(meetupId):
data = request.get_json()
meetups = Meetup().fetch_meetups()
meetup = [meetup for meetup in meetups if meetup['id'] == meetupId]
if meetup:
title = meetup[0]['title'].upper()
rsvp = {
"meetup": meetupId,
"title": title,
"status": data['status']
}
def confirm():
if rsvp['status'] == "yes":
return "You have successfully RSVP'd on {} meetup".format(title)
elif rsvp['status'] == "no":
return "You have confirmed you're not attending the {} meetup".format(title)
elif rsvp['status'] == "maybe":
return "You have confirmed you might attend the {} meetup".format(title)
return jsonify({
"status": 200,
"message": confirm(),
"data": rsvp
}), 201
else:
return jsonify({
"error": 'Meetup not found or doesn\'nt exist',
"status": 404
}), 404
|
# License MIT (https://opensource.org/licenses/MIT).
from . import test_newbalance
from . import test_pos_debt
|
import os
from pathlib import Path
from typing import Tuple
from .constants import Errors, ErrorTypes, Files, ModuleCst, Templates
from .module_parser import ModuleParser
from .utils import (
create_file,
get_name_component,
is_combinational,
is_sequential,
)
__all__ = ['Analysis']
class Analysis(object):
"""
This class has the methods to analyze an input file,
split it into files with their modules and
modify these files so that they have injections.
"""
def __init__(
self,
path_file_source: str,
output_path: str,
top_module: str,
errtype='SET'
) -> None:
"""
Validate and initiate class with instance attr.
"""
self._validate_initial_params(
path_file_source,
output_path,
)
self.path_file_source = Path(path_file_source)
self.output_path = Path(output_path)
self.top_module = top_module
self.errtype = errtype
def _validate_initial_params(self, path_file_source: str, output_path: str) -> None:
"""
Validate if the params are corrects, if not raise a Exception.
"""
if not(path_file_source and output_path):
raise ValueError(Errors.EMPTY_PARAMS)
if not os.path.exists(path_file_source):
raise ValueError(Errors.FILE_DOSNT_EXISTS)
if not os.path.isfile(path_file_source):
raise ValueError(Errors.IS_NOT_A_FILE)
def _set_and_create_paths(self) -> None:
"""
Create the paths necesaries to operate.
"""
self.src_path = self.output_path.joinpath('src')
self.src_path.mkdir(parents=True, exist_ok=True)
self.result_path = self.output_path.joinpath('output')
self.result_path.mkdir(parents=True, exist_ok=True)
def _remove_module_start(self, lines: list) -> Tuple[list, list]:
"""
"""
list_module_start = []
next_line_break = False
for line in lines:
if '(' in line:
next_line_break = True
elif next_line_break:
break
list_module_start.append(line)
return lines[len(list_module_start):], list_module_start
def _remove_list_port(self, lines: list) -> Tuple[list, list]:
"""
"""
list_port = []
next_line_break = False
for line in lines:
list_port.append(line)
if ModuleCst.PORT_END_LINE in line:
next_line_break = True
elif next_line_break:
break
return lines[len(list_port):], list_port
def _remove_list_io(self, lines: list) -> Tuple[list, list]:
"""
"""
list_dec = []
for line in lines:
if ModuleCst.WIRE in line:
break
list_dec.append(line)
return lines[len(list_dec):], list_dec
def _remove_list_wire(self, lines: list) -> Tuple[list, list]:
"""
"""
list_wire = []
for line in lines:
if ModuleCst.WIRE not in line:
break
list_wire.append(line)
return lines[len(list_wire):], list_wire
def _src_file_exists(self, name: str) -> bool:
"""
Validate if the source file exists
"""
filename = name + Files.EXTENSION
file_path = self.src_path / filename
return os.path.exists(file_path)
def _make_injections(self, file_content: list) -> Tuple[int, str]:
"""
This method analyse the kind and the id of a injection.
"""
injection_counter = 0
analysis = ''
for line in file_content:
if ModuleCst.COMPONENT_START in line:
if (
is_combinational(line) and self.errtype == ErrorTypes.SET or
is_sequential(line) and self.errtype == ErrorTypes.SEU
):
name = get_name_component(line)
analysis += line.replace(name, name + Files.MOD)
analysis += Templates.INJ.format(injection_counter)
injection_counter += 1
elif self.errtype in [ErrorTypes.SEU, ErrorTypes.SET, ErrorTypes.RAMB]:
name = get_name_component(line)
if self._src_file_exists(name):
mod_count = self.create_files_with_injections(name)
line = Templates.UTT_COMPONENT.format(name, name)
if mod_count != 0:
line_inj_utr = Templates.INJ_UTT.format(
initial_value=injection_counter + mod_count - 1,
final_value=injection_counter,
)
if self.errtype == ErrorTypes.RAMB:
line_inj_utr += Templates.RAMB_INTRC
line += line_inj_utr
injection_counter += mod_count
analysis += line
else:
analysis += line
else:
analysis += line
return injection_counter, analysis
def create_files_with_injections(self, filename: str) -> int:
"""
Generate the files with injections in the result path.
This method can be recursive.
"""
filename += Files.EXTENSION
file_input_path = self.src_path / filename
with open(file_input_path, 'r') as src_file:
file_content = src_file.readlines()
file_content, list_module_start = self._remove_module_start(file_content)
# remove ports from original file
file_content, list_line_port = self._remove_list_port(file_content)
# remove io from original file
file_content, list_line_io = self._remove_list_io(file_content)
# remove wire from original file
file_content, list_line_wire = self._remove_list_wire(file_content)
# the file only have the instructions lines
injection_counter, analysis = self._make_injections(file_content)
content_output_file = self._get_content_output_file(
list_line_port,
list_line_io,
list_line_wire,
list_module_start,
injection_counter,
analysis,
)
create_file(
self.result_path / filename,
content_output_file
)
return injection_counter
def _get_content_output_file(
self,
list_port: list,
list_io: list,
list_wire: list,
list_module_start: list,
injection_counter: int,
analysis: str,
) -> str:
"""
Generate the content for the component file with injections.
"""
output = list_module_start[0]
if self.errtype in [ErrorTypes.SEU, ErrorTypes.SET, ErrorTypes.RAMB]:
output += Templates.INITIAL_LINE.format(list_module_start[1])
if self.errtype == ErrorTypes.RAMB:
output += Templates.INPUT_LINES_RAMB
output += ''.join(list_port)
if injection_counter != 0:
new_line = Templates.INPUT_ARRAY_INJ.format(
first=injection_counter-1,
second=0,
)
else:
new_line = Templates.INPUT_INJ
output += new_line
if self.errtype == ErrorTypes.RAMB:
output += Templates.INPUT_LINES_ARRAY_RAMB
output += ''.join(list_io + list_wire)
output += analysis
return output
def run(self):
"""
Execute all the methods to generate the files with injections from the input file.
Two groups of files will be generated, ones in {output_folder}/src/ and others in {output_folder}/output/
In the src folder are the modules with out injectios, in the output folder are the modules with the injections.
"""
self._set_and_create_paths()
modules = ModuleParser.split_file_in_modules(
self.path_file_source,
self.src_path,
)
for module in modules:
name = ModuleParser.get_module_name(module)
self.create_files_with_injections(name)
|
import numpy as np
import matplotlib.pyplot as plt
from numpy.linalg import inv
from numpy.linalg import cholesky
from math import sin, cos, atan, sqrt
import math
from scipy.interpolate import interp1d
from scipy.integrate import ode
from scipy.integrate import solve_ivp
from scipy.linalg import expm
from scipy.linalg import solve_continuous_are
from pydrake.solvers import mathematicalprogram as mp
from pydrake.solvers.osqp import OsqpSolver
from pydrake.solvers.snopt import SnoptSolver
from pydrake.solvers.mathematicalprogram import MathematicalProgram, Solve
import pydrake.symbolic as sym
from pydrake.all import MonomialBasis, OddDegreeMonomialBasis, Variables
class Quadrotor(object):
def __init__(self, Q, R, Qf):
self.g = 9.81
self.m = 1
self.a = 0.25
self.I = 0.0625
self.Q = Q
self.R = R
self.Qf = Qf
# Input limits
self.umin = 0
# self.umax = 5.5
self.umax = 20
self.n_x = 6
self.n_u = 2
self.S = np.zeros((6, 6))
try:
self.S = np.load('S_sol.npy')
# For the SOS problem in this homework, we maximized the rho level-set
# where rho = 1
self.rho = 1.0
except:
print("Warning: S_sol.npy does not exist. CLF-based controllers (Problem 3) will not work")
print("To generate S_sol.npy, please complete Problem 2 and run stability_analysis.py")
# Set use_experimental_inputs to True to test the CLF QP boundary controller
# Only works after Problem 3.2.b and 3.3.b are completed
self.use_experimental_inputs = False
def x_d(self, t):
# Nomial state
# return np.array([0, 0, 0, 0, 0, 0])
return np.array([3*cos(t), 3*sin(t), atan(3*cos(t)/(9.81 - 3*sin(t))), -3*sin(t), 3*cos(t), (-3*(1 - 0.305810397553517*sin(t))**2*sin(t) + 0.09351999925184*(9.81 - 3*sin(t))*cos(t)**2)/((9.81 - 3*sin(t))*((1 - 0.305810397553517*sin(t))**2 + 0.09351999925184*cos(t)**2))])
def u_d(self, t):
# Nomial input
# return np.array([self.m*self.g/2, self.m*self.g/2])
return np.array([5.0e-15*(1000000000000.0*sqrt((90000.0*sin(t)**2 - 588600.0*sin(t) + 90000.0*cos(t)**2 + 962361.0)/(90000.0*sin(t)**2 - 588600.0*sin(t) + 962361.0))*(981.0 - 300.0*sin(t))*(729000000000000.0*sin(t)**6 - 1.430298e+16*sin(t)**5 + 1.458e+15*sin(t)**4*cos(t)**2 + 1.169268615e+17*sin(t)**4 - 1.907064e+16*sin(t)**3*cos(t)**2 - 5.0980111614e+17*sin(t)**3 + 729000000000000.0*sin(t)**2*cos(t)**4 + 9.35414892e+16*sin(t)**2*cos(t)**2 + 1.25028723733335e+18*sin(t)**2 - 4.76766e+15*sin(t)*cos(t)**4 - 2.03920446456e+17*sin(t)*cos(t)**2 - 1.63537570643202e+18*sin(t) + 7.7951241e+15*cos(t)**4 + 1.6670496497778e+17*cos(t)**2 + 8.91279760005452e+17) - 83562883710976.0*sin(t)**5*cos(t) + 5.95957500000008e+28*sin(t)**4*cos(t) - 87960930222080.0*sin(t)**3*cos(t)**3 - 3.89756205000002e+29*sin(t)**3*cos(t) + 5.95957500000003e+28*sin(t)**2*cos(t)**3 + 3.94064967394918e+15*sin(t)**2*cos(t) - 3.89756205e+29*sin(t)*cos(t)**3 + 4.1676241244445e+30*sin(t)*cos(t) + 6.37251395174999e+29*cos(t)**3 - 6.81406544346676e+30*cos(t))/(729000000000000.0*sin(t)**6 - 1.430298e+16*sin(t)**5 + 1.458e+15*sin(t)**4*cos(t)**2 + 1.169268615e+17*sin(t)**4 - 1.907064e+16*sin(t)**3*cos(t)**2 - 5.0980111614e+17*sin(t)**3 + 729000000000000.0*sin(t)**2*cos(t)**4 + 9.35414892e+16*sin(t)**2*cos(t)**2 + 1.25028723733335e+18*sin(t)**2 - 4.76766e+15*sin(t)*cos(t)**4 - 2.03920446456e+17*sin(t)*cos(t)**2 - 1.63537570643202e+18*sin(t) + 7.7951241e+15*cos(t)**4 + 1.6670496497778e+17*cos(t)**2 + 8.91279760005452e+17), 5.0e-15*(1000000000000.0*sqrt((90000.0*sin(t)**2 - 588600.0*sin(t) + 90000.0*cos(t)**2 + 962361.0)/(90000.0*sin(t)**2 - 588600.0*sin(t) + 962361.0))*(981.0 - 300.0*sin(t))*(729000000000000.0*sin(t)**6 - 1.430298e+16*sin(t)**5 + 1.458e+15*sin(t)**4*cos(t)**2 + 1.169268615e+17*sin(t)**4 - 1.907064e+16*sin(t)**3*cos(t)**2 - 5.0980111614e+17*sin(t)**3 + 729000000000000.0*sin(t)**2*cos(t)**4 + 9.35414892e+16*sin(t)**2*cos(t)**2 + 1.25028723733335e+18*sin(t)**2 - 4.76766e+15*sin(t)*cos(t)**4 - 2.03920446456e+17*sin(t)*cos(t)**2 - 1.63537570643202e+18*sin(t) + 7.7951241e+15*cos(t)**4 + 1.6670496497778e+17*cos(t)**2 + 8.91279760005452e+17) + 83562883710976.0*sin(t)**5*cos(t) - 5.95957500000008e+28*sin(t)**4*cos(t) + 87960930222080.0*sin(t)**3*cos(t)**3 + 3.89756205000002e+29*sin(t)**3*cos(t) - 5.95957500000003e+28*sin(t)**2*cos(t)**3 - 3.94064967394918e+15*sin(t)**2*cos(t) + 3.89756205e+29*sin(t)*cos(t)**3 - 4.1676241244445e+30*sin(t)*cos(t) - 6.37251395174999e+29*cos(t)**3 + 6.81406544346676e+30*cos(t))/(729000000000000.0*sin(t)**6 - 1.430298e+16*sin(t)**5 + 1.458e+15*sin(t)**4*cos(t)**2 + 1.169268615e+17*sin(t)**4 - 1.907064e+16*sin(t)**3*cos(t)**2 - 5.0980111614e+17*sin(t)**3 + 729000000000000.0*sin(t)**2*cos(t)**4 + 9.35414892e+16*sin(t)**2*cos(t)**2 + 1.25028723733335e+18*sin(t)**2 - 4.76766e+15*sin(t)*cos(t)**4 - 2.03920446456e+17*sin(t)*cos(t)**2 - 1.63537570643202e+18*sin(t) + 7.7951241e+15*cos(t)**4 + 1.6670496497778e+17*cos(t)**2 + 8.91279760005452e+17)])
def continuous_time_full_dynamics(self, x, u):
# Dynamics for the quadrotor
g = self.g
m = self.m
a = self.a
I = self.I
theta = x[2]
ydot = x[3]
zdot = x[4]
thetadot = x[5]
u0 = u[0]
u1 = u[1]
xdot = np.array([ydot,
zdot,
thetadot,
-sin(theta) * (u0 + u1) / m,
-g + cos(theta) * (u0 + u1) / m,
a * (u0 - u1) / I])
return xdot
def continuous_time_linearized_dynamics(self,t):
# Dynamics linearized at the fixed point
# This function returns A and B matrix
# A = np.zeros((6,6))
# A[:3, -3:] = np.identity(3)
# A[3, 2] = -self.g;
# B = np.zeros((6,2))
# B[4,0] = 1/self.m;
# B[4,1] = 1/self.m;
# B[5,0] = self.a/self.I
# B[5,1] = -self.a/self.I
x_d = self.x_d(t)
u_d = self.u_d(t)
m = self.m
a = self.a
I = self.I
A = np.array([[0,0,0,1,0,0],
[0,0,0,0,1,0],
[0,0,0,0,0,1],
[0,0,-cos(x_d[2])/m * (u_d[0] + u_d[1]),0,0,0],
[0,0,-sin(x_d[2])/m * (u_d[0] + u_d[1]),0,0,0],
[0,0,0,0,0,0]])
B = np.array([[0,0],
[0,0],
[0,0],
[-sin(x_d[2])/m, -sin(x_d[2])/m],
[ cos(x_d[2])/m, cos(x_d[2])/m],
[a/I, -a/I]])
return A, B
def discrete_time_linearized_dynamics(self, T,t):
# Discrete time version of the linearized dynamics at the fixed point
# This function returns A and B matrix of the discrete time dynamics
A_c, B_c = self.continuous_time_linearized_dynamics(t)
A_d = np.identity(6) + A_c * T;
B_d = B_c * T;
return A_d, B_d
def add_initial_state_constraint(self, prog, x, x_current):
# TODO: impose initial state constraint.
# Use AddBoundingBoxConstraint
for i in range(6):
prog.AddBoundingBoxConstraint(x_current[i],x_current[i],x[0,i])
pass
def add_input_saturation_constraint(self, prog, x, u, N,t):
# TODO: impose input limit constraint.
# Use AddBoundingBoxConstraint
# The limits are available through self.umin and self.umax
ud = self.u_d(t)
for i in range(N-1):
# remember to add to u to make it not error
prog.AddBoundingBoxConstraint(self.umin-ud,self.umax-ud,u[i])
pass
def add_dynamics_constraint(self, prog, x, u, N, T,t):
# TODO: impose dynamics constraint.
# Use AddLinearEqualityConstraint(expr, value)
# A, B = self.discrete_time_linearized_dynamics(T,t)
for i in range(N-1):
A, B = self.discrete_time_linearized_dynamics(T,t+i*T)
x_kp1 = x[i+1] - self.x_d(t+(i+1)*T)
x_curr = A @ (x[i]-self.x_d(t+i*T)) + B @ (u[i]) # compute what x_k+1 should be
for q in range(6): # do that for all 6
prog.AddLinearEqualityConstraint(x_kp1[q] == x_curr[q])
# prog.AddLinearEqualityConstraint(x[i+1] = A @ x[i] + B @ u[i])
pass
def add_cost(self, prog, x, u, N, t, T):
# TODO: add cost.
cost = 0
Qk = np.block([[self.Q,np.zeros((6,2))],
[np.zeros((2,6)), self.R]])
#z_d = np.array([np.concatenate((self.x_d(),self.u_d()))]).T# the desired value
for k in range(N-1):
z_d = np.array([np.concatenate((self.x_d(t+k*T),np.zeros((2))))]).T# the desired value
zk = np.array([np.concatenate((x[k],u[k]))]).T - z_d
cost_k = zk.T @ Qk @ zk
cost += cost_k
# prog.AddQuadraticErrorCost(Qk,z_d,zk)
QT = np.block([[self.Qf,np.zeros((6,2))],
[np.zeros((2,6)), self.R]])
# zT = np.array([np.concatenate((x[-1],u[-1]))]).T
zT = np.array([x[-1] - self.x_d(t+T*N)]).T
# cost += (zT.T @ QT @ zT)/2
# cost += (zT.T @ self.Qf @ zT)
cost += (zT.T @ self.S @ zT)
prog.AddQuadraticCost(cost[0,0])
pass
def add_mpc_clf_constraint(self, prog, x, N):
'''
Adds the constraint V(x_t) <= V(x_0)
Note that this constraint is non-linear and turns this MPC problem from a
quadratic program to a non-linear program
'''
if (not np.allclose(self.S, np.zeros((self.n_x, self.n_x)))):
# TODO: add the discrete time stability constraint within this if statement
prog.AddConstraint(x[N-1].T @ self.S @ x[N-1] - x[0].T @ self.S @ x[0] <= 0)
pass
def compute_mpc_feedback(self, x_current, t_current,N=10,use_clf=False):
'''
This function computes the MPC controller input u
'''
# Parameters for the QP
T = 0.1
# Initialize mathematical program and declare decision variables
prog = MathematicalProgram()
x = np.zeros((int(N), 6), dtype="object")
for i in range(N):
x[i] = prog.NewContinuousVariables(6, "x_" + str(i))
u = np.zeros((N-1, 2), dtype="object")
for i in range(N-1):
u[i] = prog.NewContinuousVariables(2, "u_" + str(i))
# Add constraints and cost
self.add_initial_state_constraint(prog, x, x_current)
self.add_input_saturation_constraint(prog, x, u, N, t_current)
self.add_dynamics_constraint(prog, x, u, N, T, t_current)
self.add_cost(prog, x, u, N, t_current, T)
# Placeholder constraint and cost to satisfy QP requirements
# TODO: Delete after completing this function
# prog.AddQuadraticCost(0)
# prog.AddLinearEqualityConstraint(0, 0)
# Adds the stability constraint: V(x_T) <= V(x_0) if using
# the clf version of MPC
if (use_clf) :
self.add_mpc_clf_constraint(prog, x, N)
# Solve the QP
solver = OsqpSolver()
if (use_clf) :
# Because we've added the CLF constraint here,
# this problem becomes a non-linear program
solver = SnoptSolver()
result = solver.Solve(prog)
u_mpc = np.zeros(2)
# TODO: retrieve the controller input from the solution of the optimization problem
# and use it to compute the MPC input u
# You should make use of result.GetSolution(decision_var) where decision_var
# is the variable you want
u_mpc = result.GetSolution(u[0])#[result.GetSolution(u[i]) for i in range(N-1) if True]
# for i in range(N):
# u0 = result.GetSolution("u_" + str(i)))
u_mpc += self.u_d(t_current) # remember to add the nominal input
return u_mpc
def compute_lqr_feedback(self, x):
'''
Infinite horizon LQR controller
'''
A, B = self.continuous_time_linearized_dynamics()
S = solve_continuous_are(A, B, self.Q, self.R)
K = -inv(self.R) @ B.T @ S
u = self.u_d() + K @ x;
return u
def dynamics_cubic_approximation(self, x, u):
'''
Approximated Dynamics for the quadrotor.
We substitute
sin(theta) = theta - (theta**3)/6
cos(theta) = 1 - (theta**2)/2
into the full dynamics.
'''
g = self.g
m = self.m
a = self.a
I = self.I
theta = x[2]
ydot = x[3]
zdot = x[4]
thetadot = x[5]
u0 = u[0]
u1 = u[1]
xdot = np.array([ydot,
zdot,
thetadot,
-(theta - (theta**3)/6) * (u0 + u1) / m,
-g + (1 - (theta**2)/2) * (u0 + u1) / m,
a * (u0 - u1) / I])
return xdot
def closed_loop_dynamics_cubic_approximation(self, x):
# Closed-loop dynamics with infinite horizon LQR
u = self.compute_lqr_feedback(x)
return self.dynamics_cubic_approximation(x, u)
def f(self, x):
'''
Returns the f(x) component of the control affine dynamics xdot = f(x) + h(x) u
'''
# Extract individual components of x, may not need all variables
theta = x[2]
ydot = x[3]
zdot = x[4]
thetadot = x[5]
f_x = np.zeros((6)) # was ((6,1))
# TODO: fill in the values for f_x
# f_x[0] = ydot
# f_x[1] = zdot
# f_x[2] = thetadot
# f_x[4] = -self.g * theta
f_x[0] = ydot
f_x[1] = zdot
f_x[2] = thetadot
f_x[4] = -self.g
return f_x
def h(self, x):
'''
Returns the h(x) component of the control affine dynamics xdot = f(x) + h(x) u
'''
# Extract individual components of x, may not need all variables
theta = x[2]
ydot = x[3]
zdot = x[4]
thetadot = x[5]
g_x = np.zeros((self.n_x, self.n_u))
# TODO: fill in the values for g_x
# g_x[4,0] = 1/self.m
# g_x[4,1] = 1/self.m
# g_x[5,0] = self.a/self.I
# g_x[5,1] = -self.a/self.I
g_x[3,0] = -np.sin(theta)/self.m
g_x[3,1] = -np.sin(theta)/self.m
g_x[4,0] = np.cos(theta)/self.m
g_x[4,1] = np.cos(theta)/self.m
g_x[5,0] = self.a/self.I
g_x[5,1] = -self.a/self.I
return g_x
def compute_clf_qp_feedback(self, x):
'''
Find control input u using V such that Vdot leq 0
by constructing a QP that minimizes u^T R u
s.t. dVdx h(x) u <= -dVdx f(x)
'''
if self.use_experimental_inputs and not self.is_near_boundary(x):
return np.clip(6 * np.random.random(2), self.umin, self.umax)
# You can retrieve the system paramters m, g, a, and I from the class variables
# For example, a can be retrieved by calling self.a
# Boiler plate code to construct the mathematical program
prog = MathematicalProgram()
u = prog.NewContinuousVariables(self.n_u, "u")
R = np.eye(self.n_u)
# Add cost function
prog.AddQuadraticCost(u.T @ R @ u)
# TODO: Add the constraint dVdx h(x) u <= -dVdx f(x)
# This is a constraint that is linear in the decision variables u, so we can
# use prog.AddLinearConstraint(expr)
prog.AddLinearConstraint(x.T @ self.S @ self.h(x) @ u + x.T @ self.S @ self.f(x) <= 0)
solver = OsqpSolver()
result = solver.Solve(prog)
return result.GetSolution(u)
def is_near_boundary(self, x, tol = 1e-2):
'''
Returns whether the state x is within tol of the boundary of the ROA
'''
near_boundary = True
# TODO: Using V = x.T S x computed in Problem 2, set near_boundary to True if
# V(x) is near the boundary of the ROA.
V = x.T @ self.S @ x
near_boundary = self.rho - V <= tol
return near_boundary |
from pathlib import Path
from params_proto.neo_hyper import Sweep
from sac_dennis_rff.config import Args, Actor, Critic, Agent
from model_free_analysis import RUN
with Sweep(RUN, Args, Actor, Critic, Agent) as sweep:
Args.dmc = True
Args.checkpoint_root = "gs://ge-data-improbable/checkpoints"
Args.save_final_replay_buffer = True
RUN.prefix = "{project}/{project}/{file_stem}/{job_name}"
Agent.use_rff = True
Agent.learnable_temperature = True
# Agent.scale = 0.0001
with sweep.product:
Args.seed = [100, 200, 300, 400, 500]
Agent.scale = [0.0003, 0.001, 0.003]
with sweep.zip:
Args.env_name = ['dmc:Cheetah-run-v1', 'dmc:Acrobot-swingup-v1',
'dmc:Hopper-hop-v1',
'dmc:Quadruped-walk-v1',
'dmc:Humanoid-run-v1', 'dmc:Finger-turn_hard-v1',
'dmc:Walker-run-v1']
Agent.actor_fourier_features = [680, 240, 600, 3120, 2680, 480, 960]
Agent.critic_fourier_features = [920, 280, 760, 3600, 3520, 560, 1200]
Args.train_frames = [1_000_000, 1_000_000,
1_000_000,
1_000_000,
2_000_000, 500_000,
1_000_000, ]
@sweep.each
def tail(RUN, Args, Actor, Critic, Agent, *_):
if Agent.learnable_temperature:
suffix = 'alpha_tune'
else:
suffix = f'alpha_fixed-{Agent.init_temperature}'
RUN.prefix, RUN.job_name, _ = RUN(script_path=__file__,
job_name=f"{Args.env_name.split(':')[-1][:-3]}/{suffix}/scale-{Agent.scale}/{Args.seed}")
sweep.save(f"{Path(__file__).stem}.jsonl") |
""" Tic Tac Toe
----------------------------------------
https://hackr.io/blog/python-projects
"""
import random
import sys
# board initialized as [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
board=[i for i in range(0,9)]
player, computer = '',''
# Corners, Center and Others, respectively
moves=((1,7,3,9), (5,), (2,4,6,8))
# Winner combinations
winners=((0,1,2), (3,4,5), (6,7,8), (0,3,6), (1,4,7), (2,5,8), (0,4,8), (2,4,6))
# Table
tab = range(1,10)
def print_board():
x = 1
for i in board:
end = ' | '
if x % 3 == 0:
end = ' \n'
if i != 1: end += '---------\n';
char = ' '
if i in ('X','O'):
char = i;
x += 1
print(char, end = end)
# Question: How do we know what's "end = end" ?
# Here comes the typical way of learning: find the resource (API Doc)
def select_char():
chars = ('X','O')
if random.randint(0,1) == 0:
return chars[::-1]
return chars
def can_move(brd, player, move):
if move in tab and brd[move-1] == move-1:
return True
return False
def can_win(brd, player, move):
places = []
x = 0
for i in brd:
if i == player: places.append(x);
x += 1
win = True
for tup in winners:
win = True
for ix in tup:
if brd[ix] != player:
win = False
break
if win == True:
break
return win
'''
Make a move (set the tab[move] = 'X' or 'O')
If undo = True, it's just a try - to figure out if the move can win
'''
def make_move(brd, player, move, undo = False):
if can_move(brd, player, move):
brd[move - 1] = player
win = can_win(brd, player, move)
if undo:
brd[move - 1] = move - 1
return (True, win)
return (False, False)
# AI goes here - Don't tricked by the name, AI.
def computer_move():
move = -1
# If I can win, others do not matter.
for i in range(1, 10):
if make_move(board, computer, i, True)[1]:
move = i
break
if move == -1:
# If player can win, block him.
for i in range(1,10):
if make_move(board, player, i, True)[1]:
move = i
break
if move == -1:
# Otherwise, try to take one of desired places.
for tup in moves:
for mv in tup:
if move == -1 and can_move(board, computer, mv):
move = mv
break
return make_move(board, computer, move)
def space_exist():
return board.count('X') + board.count('O') != 9
player, computer = select_char()
print('Player is [%s] and computer is [%s]' % (player, computer))
result='%%% Deuce ! %%%'
while space_exist():
print_board()
print('#Make your move ! [1-9] : ', end='')
move = int(input())
moved, won = make_move(board, player, move)
if not moved:
print(' >> Invalid number ! Try again !')
continue
#
if won:
result = '*** Congratulations ! You won ! ***'
break
elif computer_move()[1]:
result = '=== You lose ! =='
break;
print_board()
print(result)
|
### ----------------------------------------------------------------------------
### Import
### ----------------------------------------------------------------------------
import pandas as pd
URI_TREINO = "https://github.com/tgcsantos/quaretenadados/blob/master/DADOS_TREINO.csv?raw=true"
URI_TESTE = "https://github.com/tgcsantos/quaretenadados/raw/master/DADOS_TESTE.csv"
URI_DESAFIOQT = "https://github.com/tgcsantos/quaretenadados/raw/master/DESAFIOQT.csv"
dados_treino = pd.read_csv(URI_TREINO)
dados_teste = pd.read_csv(URI_TESTE)
dados_desafioqt = pd.read_csv(URI_DESAFIOQT)
erro_treino = "Erro ao carregar dados de treino"
erro_teste = "Erro ao carregar dados de teste"
erro_desafioqt = "Erro ao carregar dados de submissão"
assert dados_treino.shape == (150000, 5), erro_treino
assert dados_teste.shape == (20000, 5), erro_teste
assert dados_desafioqt.shape == (10000, 5), erro_desafioqt
coluna_label = 'NU_NOTA_LC'
coluna_features = ['NU_NOTA_CN', 'NU_NOTA_CH', 'NU_NOTA_MT', 'NU_NOTA_REDACAO']
X_treino = dados_treino[coluna_features]
Y_treino = dados_treino[coluna_label]
X_teste = dados_teste[coluna_features]
Y_teste = dados_teste[coluna_label]
|
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
class ProgressiveWebmMuxingManifestType(Enum):
DASH_ON_DEMAND = "DASH_ON_DEMAND"
NONE = "NONE"
|
def alphabet_war(fight):
d = {'w':4,'p':3,'b':2,'s':1,
'm':-4,'q':-3,'d':-2,'z':-1}
r = sum(d[c] for c in fight if c in d)
return {r==0:"Let's fight again!",
r>0:"Left side wins!",
r<0:"Right side wins!"
}[True]
|
# Copyright 2021 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tfp_compatible_distribution.py`."""
from absl.testing import absltest
from absl.testing import parameterized
import chex
from distrax._src.distributions.categorical import Categorical
from distrax._src.distributions.independent import Independent
from distrax._src.distributions.laplace import Laplace
from distrax._src.distributions.mvn_diag import MultivariateNormalDiag
from distrax._src.distributions.normal import Normal
from distrax._src.distributions.tfp_compatible_distribution import tfp_compatible_distribution
from distrax._src.distributions.transformed import Transformed
from distrax._src.distributions.uniform import Uniform
import jax
import jax.numpy as jnp
import numpy as np
from tensorflow_probability.substrates import jax as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
RTOL = 1e-4
class TFPCompatibleDistributionNormal(parameterized.TestCase):
"""Tests for Normal distribution."""
def setUp(self):
super().setUp()
self._sample_shape = (np.int32(10),)
self._seed = 42
self._key = jax.random.PRNGKey(self._seed)
self.assertion_fn = lambda x, y: np.testing.assert_allclose(x, y, rtol=RTOL)
self.base_dist = Normal(loc=jnp.array([0., 0.]), scale=jnp.array([1., 1.]))
self.values = jnp.array([1., -1.])
self.distrax_second_dist = Normal(loc=-1., scale=0.8)
self.tfp_second_dist = tfd.Normal(loc=-1., scale=0.8)
@property
def wrapped_dist(self):
return tfp_compatible_distribution(self.base_dist)
def test_event_shape(self):
chex.assert_equal(self.wrapped_dist.event_shape, self.base_dist.event_shape)
def test_batch_shape(self):
chex.assert_equal(self.wrapped_dist.batch_shape, self.base_dist.batch_shape)
@chex.all_variants
def test_sample(self):
def sample_fn(key):
return self.wrapped_dist.sample(seed=key, sample_shape=self._sample_shape)
sample_fn = self.variant(sample_fn)
self.assertion_fn(
sample_fn(self._key),
self.base_dist.sample(sample_shape=self._sample_shape, seed=self._key))
@chex.all_variants(with_pmap=False)
@parameterized.named_parameters(
('mean', 'mean'),
('mode', 'mode'),
('median', 'median'),
('stddev', 'stddev'),
('variance', 'variance'),
('entropy', 'entropy'),
)
def test_method(self, method):
try:
expected_result = self.variant(getattr(self.base_dist, method))()
except NotImplementedError:
return
except AttributeError:
return
result = self.variant(getattr(self.wrapped_dist, method))()
self.assertion_fn(result, expected_result)
@chex.all_variants
@parameterized.named_parameters(
('log_prob', 'log_prob'),
('prob', 'prob'),
('log_cdf', 'log_cdf'),
('cdf', 'cdf'),
)
def test_method_with_value(self, method):
try:
expected_result = self.variant(
getattr(self.base_dist, method))(self.values)
except NotImplementedError:
return
except AttributeError:
return
result = self.variant(getattr(self.wrapped_dist, method))(self.values)
self.assertion_fn(result, expected_result)
@chex.all_variants
@parameterized.named_parameters(
('kl_divergence', 'kl_divergence'),
('cross_entropy', 'cross_entropy'),
)
def test_with_two_distributions(self, method):
"""Test methods of the form listed below.
D(distrax_distrib || wrapped_distrib),
D(wrapped_distrib || distrax_distrib),
D(tfp_distrib || wrapped_distrib),
D(wrapped_distrib || tfp_distrib).
Args:
method: the method name to be tested
"""
try:
expected_result1 = self.variant(
getattr(self.distrax_second_dist, method))(self.base_distribution)
expected_result2 = self.variant(
getattr(self.base_distribution, method))(self.distrax_second_dist)
except NotImplementedError:
return
except AttributeError:
return
distrax_result1 = self.variant(getattr(self.distrax_second_dist, method))(
self.wrapped_dist)
distrax_result2 = self.variant(getattr(self.wrapped_dist, method))(
self.distrax_second_dist)
tfp_result1 = self.variant(getattr(self.tfp_second_dist, method))(
self.wrapped_dist)
tfp_result2 = self.variant(getattr(self.wrapped_dist, method))(
self.tfp_second_dist)
self.assertion_fn(distrax_result1, expected_result1)
self.assertion_fn(distrax_result2, expected_result2)
self.assertion_fn(tfp_result1, expected_result1)
self.assertion_fn(tfp_result2, expected_result2)
class TFPCompatibleDistributionMvnNormal(TFPCompatibleDistributionNormal):
"""Tests for multivariate normal distribution."""
def setUp(self):
super().setUp()
self.base_dist = MultivariateNormalDiag(loc=jnp.array([0., 1.]))
self.values = jnp.array([1., -1.])
self.distrax_second_dist = MultivariateNormalDiag(
loc=jnp.array([-1., 0.]), scale_diag=jnp.array([0.8, 1.2]))
self.tfp_second_dist = tfd.MultivariateNormalDiag(
loc=jnp.array([-1., 0.]), scale_diag=jnp.array([0.8, 1.2]))
class TFPCompatibleDistributionCategorical(TFPCompatibleDistributionNormal):
"""Tests for categorical distribution."""
def setUp(self):
super().setUp()
self.base_dist = Categorical(logits=jnp.array([0., -1., 1.]))
self.values = jnp.array([0, 1, 2])
self.distrax_second_dist = Categorical(probs=jnp.array([0.2, 0.2, 0.6]))
self.tfp_second_dist = tfd.Categorical(probs=jnp.array([0.2, 0.2, 0.6]))
class TFPCompatibleDistributionTransformed(TFPCompatibleDistributionNormal):
"""Tests for transformed distributions."""
def setUp(self):
super().setUp()
self.base_dist = Transformed(
distribution=Normal(loc=0., scale=1.),
bijector=tfb.Exp())
self.values = jnp.array([0., 1., 2.])
self.distrax_second_dist = Transformed(
distribution=Normal(loc=0.5, scale=0.8),
bijector=tfb.Exp())
self.tfp_second_dist = tfd.TransformedDistribution(
distribution=tfd.Normal(loc=0.5, scale=0.8),
bijector=tfb.Exp())
class TfpMetaDistributionsWithWrappedBaseDistribution(parameterized.TestCase):
"""Tests for meta distributions (with wrappper base distr)."""
def setUp(self):
super().setUp()
self._sample_shape = (np.int32(10),)
self._seed = 42
self._key = jax.random.PRNGKey(self._seed)
self.assertion_fn = lambda x, y: np.testing.assert_allclose(x, y, rtol=RTOL)
def test_with_independent(self):
base_dist = Normal(loc=jnp.array([0., 0.]), scale=jnp.array([1., 1.]))
wrapped_dist = tfp_compatible_distribution(base_dist)
meta_dist = tfd.Independent(wrapped_dist, 1, validate_args=True)
samples = meta_dist.sample((), self._key)
log_prob = meta_dist.log_prob(samples)
distrax_meta_dist = Independent(base_dist, 1)
expected_log_prob = distrax_meta_dist.log_prob(samples)
self.assertion_fn(log_prob, expected_log_prob)
def test_with_transformed_distribution(self):
base_dist = Normal(loc=jnp.array([0., 0.]), scale=jnp.array([1., 1.]))
wrapped_dist = tfp_compatible_distribution(base_dist)
meta_dist = tfd.TransformedDistribution(
distribution=wrapped_dist, bijector=tfb.Exp(), validate_args=True)
samples = meta_dist.sample(seed=self._key)
log_prob = meta_dist.log_prob(samples)
distrax_meta_dist = Transformed(
distribution=base_dist, bijector=tfb.Exp())
expected_log_prob = distrax_meta_dist.log_prob(samples)
self.assertion_fn(log_prob, expected_log_prob)
def test_with_sample(self):
base_dist = Normal(0., 1.)
wrapped_dist = tfp_compatible_distribution(base_dist)
meta_dist = tfd.Sample(
wrapped_dist, sample_shape=[1, 3], validate_args=True)
meta_dist.log_prob(meta_dist.sample(2, seed=self._key))
def test_with_joint_distribution_named_auto_batched(self):
def laplace(a, b):
return tfp_compatible_distribution(Laplace(a * jnp.ones((2, 1)), b))
meta_dist = tfd.JointDistributionNamedAutoBatched({
'a': tfp_compatible_distribution(Uniform(2. * jnp.ones(3), 4.)),
'b': tfp_compatible_distribution(Uniform(2. * jnp.ones(3), 4.)),
'c': laplace}, validate_args=True)
meta_dist.log_prob(meta_dist.sample(4, seed=self._key))
def test_with_joint_distribution_coroutine_auto_batched(self):
def model_fn():
a = yield tfp_compatible_distribution(Uniform(2. * jnp.ones(3), 4.),
name='a')
b = yield tfp_compatible_distribution(Uniform(2. * jnp.ones(3), 4.),
name='b')
yield tfp_compatible_distribution(Laplace(a * jnp.ones((2, 1)), b),
name='c')
meta_dist = tfd.JointDistributionCoroutineAutoBatched(
model_fn, validate_args=True)
meta_dist.log_prob(meta_dist.sample(7, seed=self._key))
if __name__ == '__main__':
absltest.main()
|
import numpy
if __name__ == '__main__':
n = 4
a = numpy.loadtxt('a.txt').tolist()
b = numpy.loadtxt('b.txt').tolist()
x = numpy.loadtxt('x.txt')
y = numpy.loadtxt('y.txt')
print(numpy.dot(x, y).diagonal().sum())
for k in range(n):
print(numpy.linalg.matrix_power(x, 2)[k, :].sum() <= a[k])
print((x ** 2)[:, k].sum() <= b[k])
print(numpy.linalg.matrix_power(y, 2)[:, k].sum() <= a[k])
print((y ** 2)[k, :].sum() <= b[k])
|
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyscf.lib import misc
import numpy as np
import scipy.sparse as sparse
from ctypes import POINTER, c_double, c_int, c_int64, c_float, c_int
libsparsetools = misc.load_library("libsparsetools")
"""
Wrapper to sparse matrix operations from scipy implemented with openmp
"""
def csr_matvec(csr, x):
if not sparse.isspmatrix_csr(csr):
raise Exception("Matrix must be in csr format")
nrow, ncol = csr.shape
nnz = csr.data.shape[0]
if x.size != ncol:
print(x.size, ncol)
raise ValueError("wrong dimension!")
xx = np.require(x, requirements=["A", "O"])
if csr.dtype == np.float32:
y = np.zeros((nrow), dtype=np.float32)
libsparsetools.scsr_matvec(c_int(nrow), c_int(ncol), c_int(nnz),
csr.indptr.ctypes.data_as(POINTER(c_int)),
csr.indices.ctypes.data_as(POINTER(c_int)),
csr.data.ctypes.data_as(POINTER(c_float)),
xx.ctypes.data_as(POINTER(c_float)),
y.ctypes.data_as(POINTER(c_float)))
elif csr.dtype == np.float64:
y = np.zeros((nrow), dtype=np.float64)
libsparsetools.dcsr_matvec(c_int(nrow), c_int(ncol), c_int(nnz),
csr.indptr.ctypes.data_as(POINTER(c_int)),
csr.indices.ctypes.data_as(POINTER(c_int)),
csr.data.ctypes.data_as(POINTER(c_double)),
xx.ctypes.data_as(POINTER(c_double)),
y.ctypes.data_as(POINTER(c_double)))
else:
raise ValueError("Not implemented")
return y
def csc_matvec(csc, x):
"""
Matrix vector multiplication
using csc format
"""
if not sparse.isspmatrix_csc(csc):
raise Exception("Matrix must be in csc format")
nrow, ncol = csc.shape
nnz = csc.data.shape[0]
if x.size != ncol:
print(x.size, ncol)
raise ValueError("wrong dimension!")
xx = np.require(x, requirements="C")
if csc.dtype == np.float32:
y = np.zeros((nrow), dtype=np.float32)
libsparsetools.scsc_matvec(c_int(nrow), c_int(ncol), c_int(nnz),
csc.indptr.ctypes.data_as(POINTER(c_int)),
csc.indices.ctypes.data_as(POINTER(c_int)),
csc.data.ctypes.data_as(POINTER(c_float)),
xx.ctypes.data_as(POINTER(c_float)),
y.ctypes.data_as(POINTER(c_float)))
elif csc.dtype == np.float64:
y = np.zeros((nrow), dtype=np.float64)
libsparsetools.dcsc_matvec(c_int(nrow), c_int(ncol), c_int(nnz),
csc.indptr.ctypes.data_as(POINTER(c_int)),
csc.indices.ctypes.data_as(POINTER(c_int)),
csc.data.ctypes.data_as(POINTER(c_double)),
xx.ctypes.data_as(POINTER(c_double)),
y.ctypes.data_as(POINTER(c_double)))
else:
raise ValueError("Not implemented")
return y
def csc_matvecs(csc, B, transB = False, order="C"):
"""
Matrix matrix multiplication
using csc format
"""
if not sparse.isspmatrix_csc(csc):
raise Exception("Matrix must be in csc format")
if transB:
# Here need to be careful, since using the transpose of B
# will change from row major to col major and vice-versa
mat = np.require(B.T, dtype=B.dtype, requirements=["A", "O", order])
else:
mat = np.require(B, dtype=B.dtype, requirements=["A", "O", order])
nrow, ncol = csc.shape
nvecs = mat.shape[1]
if csc.dtype == np.float32:
C = np.zeros((nrow, nvecs), dtype=np.float32, order=order)
libsparsetools.scsc_matvecs(c_int(nrow), c_int(ncol), c_int(nvecs),
csc.indptr.ctypes.data_as(POINTER(c_int)),
csc.indices.ctypes.data_as(POINTER(c_int)),
csc.data.ctypes.data_as(POINTER(c_float)),
mat.ctypes.data_as(POINTER(c_float)),
C.ctypes.data_as(POINTER(c_float)))
elif csc.dtype == np.float64:
C = np.zeros((nrow, nvecs), dtype=np.float64, order=order)
libsparsetools.dcsc_matvecs(c_int(nrow), c_int(ncol), c_int(nvecs),
csc.indptr.ctypes.data_as(POINTER(c_int)),
csc.indices.ctypes.data_as(POINTER(c_int)),
csc.data.ctypes.data_as(POINTER(c_double)),
mat.ctypes.data_as(POINTER(c_double)),
C.ctypes.data_as(POINTER(c_double)))
else:
raise ValueError("Not implemented")
return C
|
from stmlearn.equivalencecheckers import SmartWmethodEquivalenceChecker, Sequential
from stmlearn.equivalencecheckers.experimental import GeneticEquivalenceChecker
from stmlearn.learners import TTTMealyLearner
from stmlearn.suls import MealyDotSUL
from stmlearn.util import MATExperiment, bfs, Logger
from stmlearn.suls._rerssoconnector import RERSSOConnector
from stmlearn.teachers import Teacher
from pathlib import Path
import argparse
parser = argparse.ArgumentParser(description='Run benchmark experiment')
parser.add_argument('path', type=Path)
parser.add_argument('type', type=str, choices=['wmethod', 'genetic'])
parser.add_argument('--timeout', type=int, default=60, help="Timeout in seconds")
args = parser.parse_args()
def create_wmethod(sul):
return MATExperiment(
learner=TTTMealyLearner,
teacher=Teacher(
sul=sul,
eqc=SmartWmethodEquivalenceChecker(
horizon=7,
stop_on={'invalid_input'},
stop_on_startswith={'error'}
)
)
)
def create_genetic(sul):
return MATExperiment(
learner=TTTMealyLearner,
teacher=Teacher(
sul=sul,
eqc=Sequential(
GeneticEquivalenceChecker,
SmartWmethodEquivalenceChecker(
horizon=7,
stop_on={'invalid_input'},
stop_on_startswith={'error'}
)
)
)
)
path = args.path
type = args.type
problem = path.stem
print("loading:", path)
sul = MealyDotSUL(path)
if type == 'wmethod':
experiment = create_wmethod(sul)
elif type == 'genetic':
experiment = create_genetic(sul)
else:
raise Exception("Invalid type")
experiment.set_timeout(args.timeout)
# Set up the logging
experiment.enable_logging(f'logs/{type}', problem,
log_interval=10,
write_on_change={'STATE_COUNT'})
Logger().write()
# Set up the counterexample tracking for the genetic eq checker
experiment.enable_ct_tracking()
experiment.run()
Logger().write()
|
#!/usr/bin/env python
COPY_GOOGLE_DOC_KEY = '1APeMsNoGiUcf9o7i8y2_5hI687DDexq4ymuQQK_mO3k'
|
from dataclasses import dataclass
from uff.uff_io import Serializable
from uff.utils import PRIMITIVE_INTS
@dataclass
class TimedEvent(Serializable):
"""
UFF class to describe a TR/RX event transmitted at a given moment in time.
Attributes:
event (int): Index of the uff.event within the list of unique_events in the uff.channel_data structure
time_offset (float): Time offset relative to start of the sequence repetition (frame) [s]
"""
event: int
time_offset: float
def serialize(self):
assert isinstance(
self.event, PRIMITIVE_INTS
), 'TimedEvent.event should be index of the uff.event.'
return super().serialize()
@staticmethod
def str_name():
return 'sequence'
|
import angr
from angr.sim_type import *
from angr.state_plugins import SimActionObject
from cle.backends.externs import ExternObject
from nativedroid.analyses.resolver.annotation import *
from nativedroid.analyses.resolver.jni.java_type import *
from nativedroid.analyses.resolver.jni.jni_helper import *
from nativedroid.protobuf.jnsaf_grpc_pb2 import *
__author__ = "Xingwei Lin, Fengguo Wei"
__copyright__ = "Copyright 2018, The Argus-SAF Project"
__license__ = "Apache v2.0"
nativedroid_logger = logging.getLogger('nativedroid.jni_native_interface')
nativedroid_logger.setLevel(logging.INFO)
jni_native_interface_origin_usage = {
'GetVersion': 0,
'DefineClass': 0,
'FindClass': 0,
'FromReflectedMethod': 0,
'FromReflectedField': 0,
'ToReflectedMethod': 0,
'GetSuperClass': 0,
'IsAssignableFrom': 0,
'ToReflectedField': 0,
'Throw': 0,
'ThrowNew': 0,
'ExceptionOccurred': 0,
'ExceptionDescribe': 0,
'ExceptionClear': 0,
'FatalError': 0,
'PushLocalFrame': 0,
'PopLocalFrame': 0,
'NewGlobalRef': 0,
'DeleteGlobalRef': 0,
'DeleteLocalRef': 0,
'IsSameObject': 0,
'NewLocalRef': 0,
'EnsureLocalCapacity': 0,
'AllocObject': 0,
'NewObject': 0,
'NewObjectV': 0,
'NewObjectA': 0,
'GetObjectClass': 0,
'IsInstanceOf': 0,
'GetMethodID': 0,
'CallObjectMethod': 0,
'CallObjectMethodV': 0,
'CallObjectMethodA': 0,
'CallBooleanMethod': 0,
'CallBooleanMethodV': 0,
'CallBooleanMethodA': 0,
'CallByteMethod': 0,
'CallByteMethodV': 0,
'CallByteMethodA': 0,
'CallCharMethod': 0,
'CallCharMethodV': 0,
'CallCharMethodA': 0,
'CallShortMethod': 0,
'CallShortMethodV': 0,
'CallShortMethodA': 0,
'CallIntMethod': 0,
'CallIntMethodV': 0,
'CallIntMethodA': 0,
'CallLongMethod': 0,
'CallLongMethodV': 0,
'CallLongMethodA': 0,
'CallFloatMethod': 0,
'CallFloatMethodV': 0,
'CallFloatMethodA': 0,
'CallDoubleMethod': 0,
'CallDoubleMethodV': 0,
'CallDoubleMethodA': 0,
'CallVoidMethod': 0,
'CallVoidMethodV': 0,
'CallVoidMethodA': 0,
'CallNonvirtualObjectMethod': 0,
'CallNonvirtualObjectMethodV': 0,
'CallNonvirtualObjectMethodA': 0,
'CallNonvirtualBooleanMethod': 0,
'CallNonvirtualBooleanMethodV': 0,
'CallNonvirtualBooleanMethodA': 0,
'CallNonvirtualByteMethod': 0,
'CallNonvirtualByteMethodV': 0,
'CallNonvirtualByteMethodA': 0,
'CallNonvirtualCharMethod': 0,
'CallNonvirtualCharMethodV': 0,
'CallNonvirtualCharMethodA': 0,
'CallNonvirtualShortMethod': 0,
'CallNonvirtualShortMethodV': 0,
'CallNonvirtualShortMethodA': 0,
'CallNonvirtualIntMethod': 0,
'CallNonvirtualIntMethodV': 0,
'CallNonvirtualIntMethodA': 0,
'CallNonvirtualLongMethod': 0,
'CallNonvirtualLongMethodV': 0,
'CallNonvirtualLongMethodA': 0,
'CallNonvirtualFloatMethod': 0,
'CallNonvirtualFloatMethodV': 0,
'CallNonvirtualFloatMethodA': 0,
'CallNonvirtualDoubleMethod': 0,
'CallNonvirtualDoubleMethodV': 0,
'CallNonvirtualDoubleMethodA': 0,
'CallNonvirtualVoidMethod': 0,
'CallNonvirtualVoidMethodV': 0,
'CallNonvirtualVoidMethodA': 0,
'GetFieldID': 0,
'GetObjectField': 0,
'GetBooleanField': 0,
'GetByteField': 0,
'GetCharField': 0,
'GetShortField': 0,
'GetIntField': 0,
'GetLongField': 0,
'GetFloatField': 0,
'GetDoubleField': 0,
'SetObjectField': 0,
'SetBooleanField': 0,
'SetByteField': 0,
'SetCharField': 0,
'SetShortField': 0,
'SetIntField': 0,
'SetLongField': 0,
'SetFloatField': 0,
'SetDoubleField': 0,
'GetStaticMethodID': 0,
'CallStaticObjectMethod': 0,
'CallStaticObjectMethodV': 0,
'CallStaticObjectMethodA': 0,
'CallStaticBooleanMethod': 0,
'CallStaticBooleanMethodV': 0,
'CallStaticBooleanMethodA': 0,
'CallStaticByteMethod': 0,
'CallStaticByteMethodV': 0,
'CallStaticByteMethodA': 0,
'CallStaticCharMethod': 0,
'CallStaticCharMethodV': 0,
'CallStaticCharMethodA': 0,
'CallStaticShortMethod': 0,
'CallStaticShortMethodV': 0,
'CallStaticShortMethodA': 0,
'CallStaticIntMethod': 0,
'CallStaticIntMethodV': 0,
'CallStaticIntMethodA': 0,
'CallStaticLongMethod': 0,
'CallStaticLongMethodV': 0,
'CallStaticLongMethodA': 0,
'CallStaticFloatMethod': 0,
'CallStaticFloatMethodV': 0,
'CallStaticFloatMethodA': 0,
'CallStaticDoubleMethod': 0,
'CallStaticDoubleMethodV': 0,
'CallStaticDoubleMethodA': 0,
'CallStaticVoidMethod': 0,
'CallStaticVoidMethodV': 0,
'CallStaticVoidMethodA': 0,
'GetStaticFieldID': 0,
'GetStaticObjectField': 0,
'GetStaticBooleanField': 0,
'GetStaticByteField': 0,
'GetStaticCharField': 0,
'GetStaticShortField': 0,
'GetStaticIntField': 0,
'GetStaticLongField': 0,
'GetStaticFloatField': 0,
'GetStaticDoubleField': 0,
'SetStaticObjectField': 0,
'SetStaticBooleanField': 0,
'SetStaticByteField': 0,
'SetStaticCharField': 0,
'SetStaticShortField': 0,
'SetStaticIntField': 0,
'SetStaticLongField': 0,
'SetStaticFloatField': 0,
'SetStaticDoubleField': 0,
'NewString': 0,
'GetStringLength': 0,
'GetStringChars': 0,
'ReleaseStringChars': 0,
'NewStringUTF': 0,
'GetStringUTFLength': 0,
'GetStringUTFChars': 0,
'ReleaseStringUTFChars': 0,
'GetArrayLength': 0,
'NewObjectArray': 0,
'GetObjectArrayElement': 0,
'SetObjectArrayElement': 0,
'NewBooleanArray': 0,
'NewByteArray': 0,
'NewCharArray': 0,
'NewShortArray': 0,
'NewIntArray': 0,
'NewLongArray': 0,
'NewFloatArray': 0,
'NewDoubleArray': 0,
'GetBooleanArrayElements': 0,
'GetByteArrayElements': 0,
'GetCharArrayElements': 0,
'GetShortArrayElements': 0,
'GetIntArrayElements': 0,
'GetLongArrayElements': 0,
'GetFloatArrayElements': 0,
'GetDoubleArrayElements': 0,
'ReleaseBooleanArrayElements': 0,
'ReleaseByteArrayElements': 0,
'ReleaseCharArrayElements': 0,
'ReleaseShortArrayElements': 0,
'ReleaseIntArrayElements': 0,
'ReleaseLongArrayElements': 0,
'ReleaseFloatArrayElements': 0,
'ReleaseDoubleArrayElements': 0,
'GetBooleanArrayRegion': 0,
'GetByteArrayRegion': 0,
'GetCharArrayRegion': 0,
'GetShortArrayRegion': 0,
'GetIntArrayRegion': 0,
'GetLongArrayRegion': 0,
'GetFloatArrayRegion': 0,
'GetDoubleArrayRegion': 0,
'SetBooleanArrayRegion': 0,
'SetByteArrayRegion': 0,
'SetCharArrayRegion': 0,
'SetShortArrayRegion': 0,
'SetIntArrayRegion': 0,
'SetLongArrayRegion': 0,
'SetFloatArrayRegion': 0,
'SetDoubleArrayRegion': 0,
'RegisterNatives': 0,
'UnregisterNatives': 0,
'MonitorEnter': 0,
'MonitorExit': 0,
'GetJavaVM': 0,
'GetStringRegion': 0,
'GetStringUTFRegion': 0,
'GetPrimitiveArrayCritical': 0,
'ReleasePrimitiveArrayCritical': 0,
'GetStringCritical': 0,
'ReleaseStringCritical': 0,
'NewWeakGlobalRef': 0,
'DeleteWeakGlobalRef': 0,
'ExceptionCheck': 0,
'NewDirectByteBuffer': 0,
'GetDirectBufferAddress': 0,
'GetDirectBufferCapacity': 0,
'GetObjectRefType': 0
}
def icc_handle(analysis_center, class_name, method_name, return_annotation, simproc):
"""
:param analysis_center:
:param class_name:
:param method_name:
:param return_annotation:
:param simproc: Reflection call SimProcedure
:return return_annotation
"""
if class_name == 'android/content/Intent':
if method_name == 'setClassName':
for annotation in simproc.arg(4).annotations:
if isinstance(annotation, JstringAnnotation):
return_annotation.icc_info['is_icc'] = True
return_annotation.icc_info['component_name'] = annotation.value
elif method_name == 'putExtra':
return_annotation = simproc.arg(1).annotations[0]
extra_key = None
extra_value = None
for annotation in simproc.arg(3).annotations:
if isinstance(annotation, JstringAnnotation):
extra_key = annotation.value
for annotation in simproc.arg(4).annotations:
if isinstance(annotation, JobjectAnnotation):
extra_value = copy.deepcopy(annotation)
return_annotation.icc_info['extra'] = {extra_key: extra_value}
elif method_name == 'getStringExtra':
for annotation in simproc.arg(3).annotations:
if isinstance(annotation, JstringAnnotation):
return_annotation.taint_info['is_taint'] = True
return_annotation.taint_info['taint_type'] = ['_SOURCE_', '_API_']
return_annotation.taint_info['taint_info'] = ['SENSITIVE_INFO']
return_annotation.taint_info['source_kind'] = 'icc_source'
return_annotation.icc_info['is_icc'] = True
return_annotation.icc_info['extra'] = {annotation.value: None}
elif class_name == 'android/content/Context':
if method_name == 'startActivity' or method_name == 'sendBroadcast' or method_name == 'startService':
for annotation in simproc.arg(3).annotations:
if isinstance(annotation, JobjectAnnotation):
# TODO(fengguow) For now we only handle explicit type.
component_name = annotation.icc_info['component_name']
extra = annotation.icc_info['extra']
source_args = set()
for _, extra_annotation in extra.items():
if extra_annotation.source.startswith('arg'):
arg_index = re.split('arg|_', extra_annotation.source)[1]
source_args.add(int(arg_index))
if source_args:
jnsaf_client = analysis_center.get_jnsaf_client()
if jnsaf_client:
request = RegisterICCRequest(apk_digest=jnsaf_client.apk_digest,
component_name=jnsaf_client.component_name,
target_component_name=component_name,
signature=analysis_center.get_signature(),
is_source=False,
source_args=source_args)
response = jnsaf_client.RegisterICC(request)
if not response.status:
nativedroid_logger.error('RegisterICC failed for request: %s' % request)
nativedroid_logger.info("Start Component: %s, extra: %s", component_name, extra)
return return_annotation
class NativeDroidSimProcedure(angr.SimProcedure):
def __init__(
self, analysis_center, project=None, cc=None, symbolic_return=None,
returns=None, is_syscall=None, is_stub=False,
num_args=None, display_name=None, library_name=None,
is_function=None, **kwargs
):
super(NativeDroidSimProcedure, self).__init__(project, cc, symbolic_return, returns,
is_syscall, is_stub, num_args, display_name,
library_name, is_function, **kwargs)
self._analysis_center = analysis_center
class GetVersion(NativeDroidSimProcedure):
def run(self, env):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetVersion'
class DefineClass(NativeDroidSimProcedure):
def run(self, env, name, loader, buf, bufLen):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jclass = JClass(self.project)
return_value = claripy.BVV(jclass.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'DefineClass'
class FindClass(NativeDroidSimProcedure):
def run(self, env, name):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
strlen_simproc = angr.SIM_PROCEDURES['libc']['strlen']
name_strlen = self.inline_call(strlen_simproc, name)
name_str = self.state.solver.eval(self.state.memory.load(name, name_strlen.ret_expr), cast_to=str)
nativedroid_logger.info('Class: %s', name_str)
jclass = JClass(self.project)
return_value = claripy.BVV(jclass.ptr, self.project.arch.bits)
return_value = return_value.annotate(JclassAnnotation(class_type=name_str, fields_info=list()))
return return_value
def __repr__(self):
return 'FindClass'
class FromReflectedMethod(NativeDroidSimProcedure):
def run(self, env, method):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jmethod_id = JMethodID(self.project)
return_value = claripy.BVV(jmethod_id.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'FromReflectedMethod'
class FromReflectedField(NativeDroidSimProcedure):
def run(self, env, field):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jfield_id = JFieldID(self.project)
return_value = claripy.BVV(jfield_id.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'FromReflectedField'
class ToReflectedMethod(NativeDroidSimProcedure):
def run(self, env, cls, method_id, is_static):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'ToReflectedMethod'
class GetSuperClass(NativeDroidSimProcedure):
def run(self, env, clazz):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jclass = JClass(self.project)
return_value = claripy.BVV(jclass.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetSuperClass'
class IsAssignableFrom(NativeDroidSimProcedure):
def run(self, env, clazz1, clazz2):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean = JBoolean(self.project)
return_value = claripy.BVV(jboolean.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'IsAssignableFrom'
class ToReflectedField(NativeDroidSimProcedure):
def run(self, env, cls, fieldID, isStatic):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'ToReflectedField'
class Throw(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'Throw'
class ThrowNew(NativeDroidSimProcedure):
def run(self, env, clazz, message):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'ThrowNew'
class ExceptionOccurred(NativeDroidSimProcedure):
def run(self, env):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jthrowable = JThrowlable(self.project)
return_value = claripy.BVV(jthrowable.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'ExceptionOccurred'
class ExceptionDescribe(NativeDroidSimProcedure):
def run(self, env):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ExceptionDescribe'
class ExceptionClear(NativeDroidSimProcedure):
def run(self, env):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ExceptionClear'
class FatalError(NativeDroidSimProcedure):
def run(self, env, msg):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'FatalError'
class PushLocalFrame(NativeDroidSimProcedure):
def run(self, env, capacity):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'PushLocalFrame'
class PopLocalFrame(NativeDroidSimProcedure):
def run(self, env, result):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'PopLocalFrame'
class NewGlobalRef(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewGlobalRef'
class DeleteGlobalRef(NativeDroidSimProcedure):
def run(self, env, globalRef):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'DeleteGlobalRef'
class DeleteLocalRef(NativeDroidSimProcedure):
def run(self, env, localRef):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'DeleteLocalRef'
class IsSameObject(NativeDroidSimProcedure):
def run(self, env, ref1, ref2):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean = JBoolean(self.project)
return_value = claripy.BVV(jboolean.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'IsSameObject'
class NewLocalRef(NativeDroidSimProcedure):
def run(self, env, ref):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewLocalRef'
class EnsureLocalCapacity(NativeDroidSimProcedure):
def run(self, env, capacity):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'EnsureLocalCapacity'
class AllocObject(NativeDroidSimProcedure):
def run(self, env, clazz):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'AllocObject'
class NewObject(NativeDroidSimProcedure):
def run(self, env, clazz, methodID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
for annotation in clazz.annotations:
if isinstance(annotation, JclassAnnotation):
return_value = return_value.annotate(
JobjectAnnotation(source='from_native', obj_type=annotation.class_type, fields_info=list()))
return return_value
def __repr__(self):
return 'NewObject'
class NewObjectV(NewObject):
def __repr__(self):
return 'NewObjectV'
class NewObjectA(NewObject):
def __repr__(self):
return 'NewObjectA'
class GetObjectClass(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jclass = JClass(self.project)
return_value = claripy.BVV(jclass.ptr, self.project.arch.bits)
for annotation in obj.annotations:
if isinstance(annotation, JobjectAnnotation):
return_value = return_value.annotate(
JclassAnnotation(class_type=annotation.obj_type, fields_info=list()))
return return_value
def __repr__(self):
return 'GetObjectClass'
class IsInstanceOf(NativeDroidSimProcedure):
def run(self, env, obj, clazz):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean = JInt(self.project)
return_value = claripy.BVV(jboolean.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'IsInstanceOf'
class GetMethodID(NativeDroidSimProcedure):
def run(self, env, clazz, name, sig):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
strlen_simproc = angr.SIM_PROCEDURES['libc']['strlen']
name_strlen = self.inline_call(strlen_simproc, name)
name_str = self.state.solver.eval(self.state.memory.load(name, name_strlen.ret_expr), cast_to=str)
signature_strlen = self.inline_call(strlen_simproc, sig)
signature_str = self.state.solver.eval(self.state.memory.load(sig, signature_strlen.ret_expr),
cast_to=str)
jmethod_id = JMethodID(self.project)
return_value = claripy.BVV(jmethod_id.ptr, self.project.arch.bits)
class_name = None
for annotation in clazz.annotations:
if isinstance(annotation, JclassAnnotation):
class_name = annotation.class_type
nativedroid_logger.info('CLASS: %s', class_name)
nativedroid_logger.info('METHOD: %s', name_str)
nativedroid_logger.info('SIGN: %s', signature_str)
return_value = return_value.annotate(
JmethodIDAnnotation(class_name=class_name, method_name=name_str, method_signature=signature_str))
return return_value
def __repr__(self):
return 'GetMethodID'
class CallObjectMethod(NativeDroidSimProcedure):
@staticmethod
def get_method_taint_attribute(ssm, method_full_signature):
if method_full_signature:
source_tags = ssm.get_source_tags(method_full_signature)
source_kind = ssm.get_source_kind(method_full_signature)
if source_tags:
return ['_SOURCE_', '|'.join(source_tags), source_kind]
tags = ssm.get_sink_tags(method_full_signature)
if tags:
poss = tags[0]
info = 'ALL' if not poss else '|'.join(map(str, poss))
sink_kind = ssm.get_sink_kind(method_full_signature)
return ['_SINK_', info, sink_kind]
return None
def get_sink_args(self, method_name, num_args, idx, tags):
process_args = []
if method_name == 'sendTextMessage':
for i in range(1, num_args + 1):
arg = self.arg(idx + i)
if isinstance(arg, SimActionObject):
for anno in arg.annotations:
if isinstance(anno, JstringAnnotation):
process_args.append(arg)
break
elif method_name == 'sendDataMessage':
for i in range(1, num_args + 1):
arg = self.arg(idx + i)
if isinstance(arg, SimActionObject):
for anno in arg.annotations:
if isinstance(anno, JbyteArrayAnnotation):
process_args.append(arg)
break
elif tags == 'ALL':
if idx == 2:
process_args.append(self.arg(1))
for i in range(1, num_args + 1):
arg = self.arg(idx + i)
if isinstance(arg, SimActionObject):
process_args.append(arg)
else:
poss = tags.split('|')
for pos in poss:
try:
index = int(pos.split('.')[0])
arg = self.arg(index + idx)
if isinstance(arg, SimActionObject):
process_args.append(arg)
except ValueError:
nativedroid_logger.error('Cannot parse int: %s', pos)
return process_args
def run(self):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
idx = 1 if isinstance(self, CallStaticTypeMethod) else 2
method_id = self.arg(idx)
assert isinstance(method_id, SimActionObject)
for annotation in method_id.annotations:
if isinstance(annotation, JmethodIDAnnotation):
class_name = annotation.class_name
method_name = annotation.method_name
method_signature = annotation.method_signature
method_full_signature = get_method_full_signature(class_name, method_name, method_signature)
num_args = count_arg_nums(method_signature)
jnsaf_client = self._analysis_center.get_jnsaf_client()
ssm = self._analysis_center.get_source_sink_manager()
heap_summary = None
if jnsaf_client:
request = GetSummaryRequest(
apk_digest=jnsaf_client.apk_digest,
component_name=jnsaf_client.component_name,
signature=method_full_signature, gen=True,
depth=jnsaf_client.depth)
response = jnsaf_client.GetSummary(request)
if response.taint_result:
ssm.parse_lines(response.taint_result)
if response.heap_summary:
heap_summary = response.heap_summary
jni_return_type = get_jni_return_type(method_signature)
java_return_type = get_java_return_type(method_signature)
typ = get_type(self.project, java_return_type)
typ_size = get_type_size(self.project, java_return_type)
return_value = claripy.BVV(typ.ptr, typ_size)
return_annotation = construct_annotation(jni_return_type, 'from_reflection_call')
method_taint_attribute = CallObjectMethod.get_method_taint_attribute(ssm, method_full_signature)
if method_taint_attribute:
if method_taint_attribute[0] == '_SOURCE_':
return_annotation.taint_info['is_taint'] = True
return_annotation.taint_info['taint_type'] = [method_taint_attribute[0], '_API_']
return_annotation.taint_info['taint_info'] = [method_taint_attribute[1]]
return_annotation.taint_info['source_kind'] = method_taint_attribute[2]
elif method_taint_attribute[0] == '_SINK_':
process_args = self.get_sink_args(method_name, num_args, idx, method_taint_attribute[1])
for parg in process_args:
for anno in parg.annotations:
if isinstance(anno, JobjectAnnotation):
if anno.taint_info['is_taint']:
anno.taint_info['sink_kind'] = method_taint_attribute[2]
if '_SOURCE_' in anno.taint_info['taint_type'][0]:
if '_API_' in anno.taint_info['taint_type'][1]:
anno.taint_info['taint_type'] = ['_SINK_', '_SOURCE_']
anno.taint_info['taint_info'] = \
[method_taint_attribute[1]]
elif '_ARGUMENT_' in anno.taint_info['taint_type'][1]:
anno.taint_info['taint_type'] = \
['_SINK_', anno.taint_info['taint_type'][1]]
anno.taint_info['taint_info'] = \
[method_taint_attribute[1]]
obj = None if idx == 1 else self.arg(1)
if obj and isinstance(obj, SimActionObject):
for anno in obj.annotations:
if isinstance(anno, JobjectAnnotation):
if anno.taint_info['is_taint']:
if '_ARGUMENT_' in anno.taint_info['taint_type'][1] and heap_summary:
for rule in heap_summary.rules:
if rule.binary_rule and rule.binary_rule.rule_lhs.ret:
if rule.binary_rule.rule_rhs.this:
this = rule.binary_rule.rule_rhs.this
if this.heap and this.heap.heap_access:
access = this.heap.heap_access[0]
if access.field_access:
field_anno = construct_annotation(
jni_return_type, 'from_reflection_call')
field_anno.field_info['is_field'] = True
field_anno.field_info['field_name'] = \
access.field_access.field_name
field_anno.field_info['base_annotation'] = anno
field_anno.taint_info['is_taint'] = True
field_anno.taint_info['taint_type'] = \
[anno.taint_info['taint_type'][0], '_ARGUMENT_FIELD_']
field_anno.taint_info['taint_info'] = \
anno.taint_info['taint_info']
field_anno.taint_info['source_kind'] = \
anno.taint_info['source_kind']
field_anno.taint_info['sink_kind'] = \
anno.taint_info['sink_kind']
return_annotation = field_anno
else:
if anno.fields_info:
for field_info in anno.fields_info:
if field_info.is_tainted:
return_annotation.taint_info = copy.deepcopy(
field_info.taint_info)
return_annotation = icc_handle(self._analysis_center, class_name, method_name, return_annotation, self)
return_value = return_value.append_annotation(return_annotation)
return return_value
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'CallObjectMethod'
class CallTypeMethod(CallObjectMethod):
def __repr__(self):
return 'CallTypeMethod'
class CallObjectMethodV(CallTypeMethod):
def __repr__(self):
return 'CallObjectMethodV'
class CallObjectMethodA(CallTypeMethod):
def __repr__(self):
return 'CallObjectMethodA'
class CallBooleanMethod(CallTypeMethod):
def __repr__(self):
return 'CallBooleanMethod'
class CallBooleanMethodV(CallTypeMethod):
def __repr__(self):
return 'CallBooleanMethodV'
class CallBooleanMethodA(CallTypeMethod):
def __repr__(self):
return 'CallBooleanMethodA'
class CallByteMethod(CallTypeMethod):
def __repr__(self):
return 'CallByteMethod'
class CallByteMethodV(CallTypeMethod):
def __repr__(self):
return 'CallByteMethodV'
class CallByteMethodA(CallTypeMethod):
def __repr__(self):
return 'CallByteMethodA'
class CallCharMethod(CallTypeMethod):
def __repr__(self):
return 'CallCharMethod'
class CallCharMethodV(CallTypeMethod):
def __repr__(self):
return 'CallCharMethodV'
class CallCharMethodA(CallTypeMethod):
def __repr__(self):
return 'CallCharMethodA'
class CallShortMethod(CallTypeMethod):
def __repr__(self):
return 'CallShortMethod'
class CallShortMethodV(CallTypeMethod):
def __repr__(self):
return 'CallShortMethodV'
class CallShortMethodA(CallTypeMethod):
def __repr__(self):
return 'CallShortMethodA'
class CallIntMethod(CallTypeMethod):
def __repr__(self):
return 'CallIntMethod'
class CallIntMethodV(CallTypeMethod):
def __repr__(self):
return 'CallIntMethodV'
class CallIntMethodA(CallTypeMethod):
def __repr__(self):
return 'CallIntMethodA'
class CallLongMethod(CallTypeMethod):
def __repr__(self):
return 'CallLongMethod'
class CallLongMethodV(CallTypeMethod):
def __repr__(self):
return 'CallLongMethodV'
class CallLongMethodA(CallTypeMethod):
def __repr__(self):
return 'CallLongMethodA'
class CallFloatMethod(CallTypeMethod):
def __repr__(self):
return 'CallFloatMethod'
class CallFloatMethodV(CallTypeMethod):
def __repr__(self):
return 'CallFloatMethodV'
class CallFloatMethodA(CallTypeMethod):
def __repr__(self):
return 'CallFloatMethodA'
class CallDoubleMethod(CallTypeMethod):
def __repr__(self):
return 'CallDoubleMethod'
class CallDoubleMethodV(CallTypeMethod):
def __repr__(self):
return 'CallDoubleMethodV'
class CallDoubleMethodA(CallTypeMethod):
def __repr__(self):
return 'CallDoubleMethodA'
class CallVoidMethod(CallTypeMethod):
def __repr__(self):
return 'CallVoidMethod'
class CallVoidMethodV(CallVoidMethod):
def __repr__(self):
return 'CallVoidMethodV'
class CallVoidMethodA(CallVoidMethod):
def __repr__(self):
return 'CallVoidMethodA'
class CallNonvirtualTypeMethod(CallTypeMethod):
def __repr__(self):
return 'CallNonvirtual<Type>Method'
class CallNonvirtualObjectMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualObjectMethod'
class CallNonvirtualObjectMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualObjectMethodV'
class CallNonvirtualObjectMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualObjectMethodA'
class CallNonvirtualBooleanMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualBooleanMethod'
class CallNonvirtualBooleanMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualBooleanMethodV'
class CallNonvirtualBooleanMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualBooleanMethodA'
class CallNonvirtualByteMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualByteMethod'
class CallNonvirtualByteMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualByteMethodV'
class CallNonvirtualByteMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualByteMethodA'
class CallNonvirtualCharMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualCharMethod'
class CallNonvirtualCharMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualCharMethodV'
class CallNonvirtualCharMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualCharMethodA'
class CallNonvirtualShortMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualShortMethod'
class CallNonvirtualShortMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualShortMethodV'
class CallNonvirtualShortMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualShortMethodA'
class CallNonvirtualIntMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualIntMethod'
class CallNonvirtualIntMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualIntMethodV'
class CallNonvirtualIntMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualIntMethodA'
class CallNonvirtualLongMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualLongMethod'
class CallNonvirtualLongMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualLongMethodV'
class CallNonvirtualLongMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualLongMethodA'
class CallNonvirtualFloatMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualFloatMethod'
class CallNonvirtualFloatMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualFloatMethodV'
class CallNonvirtualFloatMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualFloatMethodA'
class CallNonvirtualDoubleMethod(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualDoubleMethod'
class CallNonvirtualDoubleMethodV(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualDoubleMethodV'
class CallNonvirtualDoubleMethodA(CallNonvirtualTypeMethod):
def __repr__(self):
return 'CallNonvirtualDoubleMethodA'
class CallNonvirtualVoidMethod(CallTypeMethod):
def __repr__(self):
return 'CallNonvirtualVoidMethod'
class CallNonvirtualVoidMethodV(CallNonvirtualVoidMethod):
def __repr__(self):
return 'CallNonvirtualVoidMethodV'
class CallNonvirtualVoidMethodA(CallNonvirtualVoidMethod):
def __repr__(self):
return 'CallNonvirtualVoidMethodA'
class GetFieldID(NativeDroidSimProcedure):
def run(self, env, clazz, name, sig):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
class_name = None
for annotation in clazz.annotations:
if isinstance(annotation, JclassAnnotation):
class_name = annotation.class_type
strlen_simproc = angr.SIM_PROCEDURES['libc']['strlen']
name_strlen = self.inline_call(strlen_simproc, name)
name_str = self.state.solver.eval(self.state.memory.load(name, name_strlen.ret_expr), cast_to=str)
signature_strlen = self.inline_call(strlen_simproc, sig)
sig_str = self.state.solver.eval(self.state.memory.load(sig, signature_strlen.ret_expr),
cast_to=str)
nativedroid_logger.info('CLASS: %s', class_name)
nativedroid_logger.info('FIELD: %s', name_str)
nativedroid_logger.info('SIGN: %s', sig_str)
jfield_id = JFieldID(self.project)
return_value = claripy.BVV(jfield_id.ptr, self.project.arch.bits)
return_value = return_value.annotate(
JfieldIDAnnotation(class_name=class_name, field_name=name_str, field_signature=sig_str))
return return_value
def __repr__(self):
return 'GetFieldID'
class GetObjectField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
for annotation in fieldID.annotations:
if isinstance(annotation, JfieldIDAnnotation):
field_name = annotation.field_name
field_signature = annotation.field_signature
jni_return_type = get_jni_return_type(field_signature)
java_return_type = get_java_return_type(field_signature)
typ = get_type(self.project, java_return_type)
typ_size = get_type_size(self.project, java_return_type)
return_value = claripy.BVV(typ.ptr, typ_size)
for anno in obj.annotations:
if isinstance(anno, JobjectAnnotation):
field_exist = False
field_index = 0
for index, field_info in enumerate(anno.fields_info):
if field_info.field_info['field_name'] == field_name and \
field_info.obj_type == jni_return_type:
field_exist = True
field_index = index
if field_exist:
return_annotation = copy.deepcopy(anno.fields_info[field_index])
return_annotation.heap = anno.heap + '.' + field_name if anno.heap else None
return_annotation.field_info = {'is_field': True, 'field_name': field_name,
'base_annotation': anno}
return_value = return_value.append_annotation(return_annotation)
else:
jni_return_type = get_jni_return_type(field_signature)
return_annotation = construct_annotation(jni_return_type, anno.source)
return_annotation.heap = anno.heap + '.' + field_name if anno.heap else None
return_annotation.field_info = {'is_field': True, 'field_name': field_name,
'base_annotation': anno}
if anno.source.startswith('arg'):
return_annotation.taint_info['is_taint'] = True
return_annotation.taint_info['taint_type'] = ['_SOURCE_', '_ARGUMENT_FIELD_']
return_annotation.taint_info['taint_info'] = ['SENSITIVE_INFO']
return_annotation.taint_info['source_kind'] = anno.taint_info['source_kind']
return_annotation.taint_info['sink_kind'] = anno.taint_info['sink_kind']
return_value = return_value.append_annotation(return_annotation)
return return_value
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetObjectField'
class GetBooleanField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean = JBoolean(self.project)
return_value = claripy.BVV(jboolean.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetBooleanField'
class GetByteField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jbyte = JInt(self.project)
return_value = claripy.BVV(jbyte.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetByteField'
class GetCharField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jchar = JInt(self.project)
return_value = claripy.BVV(jchar.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetCharField'
class GetShortField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jshort = JInt(self.project)
return_value = claripy.BVV(jshort.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetShortField'
class GetIntField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetIntField'
class GetLongField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jlong = JLong(self.project)
return_value = claripy.BVV(jlong.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetLongField'
class GetFloatField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jfloat = JFloat(self.project)
return_value = claripy.BVV(jfloat.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetFloatField'
class GetDoubleField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jdouble = JDouble(self.project)
return_value = claripy.BVV(jdouble.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetDoubleField'
class SetTypeField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID, value):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'Set<Type>Field'
class SetObjectField(NativeDroidSimProcedure):
def run(self, env, obj, fieldID, value):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
field_annotation = None
for annotation in value.annotations:
if isinstance(annotation, JobjectAnnotation) or isinstance(annotation, PrimitiveTypeAnnotation):
field_annotation = copy.deepcopy(annotation)
id_annotation = None
for annotation in fieldID.annotations:
if isinstance(annotation, JfieldIDAnnotation):
id_annotation = annotation
for annotation in obj.annotations:
if isinstance(annotation, JobjectAnnotation):
field_exist = False
for field_info in annotation.fields_info:
if field_info.field_name == \
id_annotation.field_name and field_info.field_signature == id_annotation.field_signature:
field_exist = True
if field_exist:
# TODO need refactor logic
pass
else:
field_annotation.field_info['is_field'] = True
field_annotation.field_info['field_name'] = id_annotation.field_name
field_annotation.field_info['base_annotation'] = annotation
annotation.fields_info.append(field_annotation)
def __repr__(self):
return 'SetObjectField'
class SetBooleanField(SetTypeField):
def __repr__(self):
return 'SetBooleanField'
class SetByteField(SetTypeField):
def __repr__(self):
return 'SetByteField'
class SetCharField(SetTypeField):
def __repr__(self):
return 'SetCharField'
class SetShortField(SetTypeField):
def __repr__(self):
return 'SetShortField'
class SetIntField(SetTypeField):
def run(self, env, obj, fieldID, value):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
field_annotation = None
for annotation in value.annotations:
if isinstance(annotation, JintAnnotation):
field_annotation = annotation
if field_annotation is None:
field_annotation = JintAnnotation(source='from_native', value=value.ast.args[0])
id_annotation = None
for annotation in fieldID.annotations:
if isinstance(annotation, JfieldIDAnnotation):
id_annotation = annotation
for annotation in obj.annotations:
if isinstance(annotation, JobjectAnnotation):
field_exist = False
for index, field_info in enumerate(annotation.fields_info):
if field_info.field_info['field_name'] == \
id_annotation.field_name and \
field_info.field_info['field_signature'] == id_annotation.field_signature:
field_exist = True
if field_exist:
# TODO need refactor logic
pass
else:
field_annotation.heap = annotation.heap + '.' + id_annotation.field_name \
if annotation.heap else None
field_annotation.field_info['is_field'] = True
field_annotation.field_info['field_name'] = id_annotation.field_name
field_annotation.field_info['base_annotation'] = annotation
annotation.fields_info.append(field_annotation)
def __repr__(self):
return 'SetIntField'
class SetLongField(SetTypeField):
def __repr__(self):
return 'SetLongField'
class SetFloatField(SetTypeField):
def __repr__(self):
return 'SetFloatField'
class SetDoubleField(SetTypeField):
def __repr__(self):
return 'SetDoubleField'
class GetStaticMethodID(GetMethodID):
def __repr__(self):
return 'GetStaticMethodID'
class CallStaticTypeMethod(CallTypeMethod):
def __repr__(self):
return 'CallStatic<Type>Method'
class CallStaticObjectMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticObjectMethod'
class CallStaticObjectMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticObjectMethodV'
class CallStaticObjectMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticObjectMethodA'
class CallStaticBooleanMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticBooleanMethod'
class CallStaticBooleanMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticBooleanMethodV'
class CallStaticBooleanMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticBooleanMethodA'
class CallStaticByteMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticByteMethod'
class CallStaticByteMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticByteMethodV'
class CallStaticByteMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticByteMethodA'
class CallStaticCharMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticCharMethod'
class CallStaticCharMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticCharMethodV'
class CallStaticCharMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticCharMethodA'
class CallStaticShortMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticShortMethod'
class CallStaticShortMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticShortMethodV'
class CallStaticShortMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticShortMethodA'
class CallStaticIntMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticIntMethod'
class CallStaticIntMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticIntMethodV'
class CallStaticIntMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticIntMethodA'
class CallStaticLongMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticLongMethod'
class CallStaticLongMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticLongMethodV'
class CallStaticLongMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticLongMethodA'
class CallStaticFloatMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticFloatMethod'
class CallStaticFloatMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticFloatMethodV'
class CallStaticFloatMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticFloatMethodA'
class CallStaticDoubleMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticDoubleMethod'
class CallStaticDoubleMethodV(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticDoubleMethodV'
class CallStaticDoubleMethodA(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticDoubleMethodA'
class CallStaticVoidMethod(CallStaticTypeMethod):
def __repr__(self):
return 'CallStaticVoidMethod'
class CallStaticVoidMethodV(CallStaticVoidMethod):
def __repr__(self):
return 'CallStaticVoidMethodV'
class CallStaticVoidMethodA(CallStaticVoidMethod):
def __repr__(self):
return 'CallStaticVoidMethodA'
class GetStaticFieldID(GetFieldID):
def __repr__(self):
return 'GetStaticFieldID'
class GetStaticObjectField(GetObjectField):
def run(self, env, clazz, fieldID):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
for annotation in fieldID.annotations:
if isinstance(annotation, JfieldIDAnnotation):
field_name = annotation.field_name
field_signature = annotation.field_signature
java_return_type = get_java_return_type(field_signature)
typ = get_type(self.project, java_return_type)
typ_size = get_type_size(self.project, java_return_type)
return_value = claripy.BVV(typ.ptr, typ_size)
for anno in clazz.annotations:
if isinstance(anno, JclassAnnotation):
field_exist = False
field_index = 0
for index, field_info in enumerate(anno.fields_info):
if field_info.field_info['field_name'] == field_name and \
field_info.obj_type == java_return_type:
field_exist = True
field_index = index
if field_exist:
return_value = return_value.append_annotation(
copy.deepcopy(anno.fields_info[field_index]))
else:
jni_return_type = get_jni_return_type(field_signature)
return_annotation = construct_annotation(jni_return_type, 'from_class')
# return_annotation.source = 'from_class'
# return_annotation.obj_type = jni_return_type
return_annotation.field_info['is_field'] = True
return_annotation.field_info['field_name'] = field_name
return_annotation.field_info['base_annotation'] = anno
return_annotation.taint_info['is_taint'] = True
return_annotation.taint_info['taint_type'] = ['_SOURCE_', '_CLASS_FIELD_']
return_annotation.taint_info['taint_info'] = ['SENSITIVE_INFO']
return_annotation.taint_info['source_kind'] = 'api_source'
return_value = return_value.append_annotation(return_annotation)
return return_value
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetStaticObjectField'
class GetStaticBooleanField(GetBooleanField):
def __repr__(self):
return 'GetStaticBooleanField'
class GetStaticByteField(GetByteField):
def __repr__(self):
return 'GetStaticByteField'
class GetStaticCharField(GetCharField):
def __repr__(self):
return 'GetStaticCharField'
class GetStaticShortField(GetShortField):
def __repr__(self):
return 'GetStaticShortField'
class GetStaticIntField(GetIntField):
def __repr__(self):
return 'GetStaticIntField'
class GetStaticLongField(GetLongField):
def __repr__(self):
return 'GetStaticLongField'
class GetStaticFloatField(GetFloatField):
def __repr__(self):
return 'GetStaticFloatField'
class GetStaticDoubleField(GetDoubleField):
def __repr__(self):
return 'GetStaticDoubleField'
class SetStaticObjectField(SetObjectField):
def __repr__(self):
return 'SetStaticObjectField'
class SetStaticBooleanField(SetBooleanField):
def __repr__(self):
return 'SetStaticBooleanField'
class SetStaticByteField(SetByteField):
def __repr__(self):
return 'SetStaticByteField'
class SetStaticCharField(SetCharField):
def __repr__(self):
return 'SetStaticCharField'
class SetStaticShortField(SetShortField):
def __repr__(self):
return 'SetStaticShortField'
class SetStaticIntField(SetIntField):
def __repr__(self):
return 'SetStaticIntField'
class SetStaticLongField(SetLongField):
def __repr__(self):
return 'SetStaticLongField'
class SetStaticFloatField(SetFloatField):
def __repr__(self):
return 'SetStaticFloatField'
class SetStaticDoubleField(SetDoubleField):
def __repr__(self):
return 'SetStaticDoubleField'
class NewString(NativeDroidSimProcedure):
def run(self, env, unicodeChars, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jstring = JString(self.project)
return_value = claripy.BVV(jstring.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewString'
class GetStringLength(NativeDroidSimProcedure):
def run(self, env, string):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetStringLength'
class GetStringChars(NativeDroidSimProcedure):
def run(self, env, string, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
return string
def __repr__(self):
return 'GetStringChars'
class ReleaseStringChars(NativeDroidSimProcedure):
def run(self, env, string, chars):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseStringChars'
class NewStringUTF(NativeDroidSimProcedure):
_SENSITIVE_STRINGS = [
'/', '.', '?', 'ELF'
]
def run(self, env, mybytes):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
strlen_simproc = angr.SIM_PROCEDURES['libc']['strlen']
name_strlen = self.inline_call(strlen_simproc, mybytes)
string_arg = self.state.solver.eval(self.state.memory.load(mybytes, name_strlen.ret_expr), cast_to=str)
nativedroid_logger.info('String: %s', string_arg)
jstring = JString(self.project)
annotation = JstringAnnotation(source='from_native', value=string_arg)
for s in self._SENSITIVE_STRINGS:
if s in string_arg:
annotation.taint_info['is_taint'] = True
annotation.taint_info['taint_type'] = ['_SOURCE_', '_STMT_']
annotation.taint_info['taint_info'] = ['SENSITIVE_INFO']
annotation.taint_info['source_kind'] = 'string_source'
break
return_value = claripy.BVV(jstring.ptr, self.project.arch.bits)
return_value = return_value.annotate(annotation)
return return_value
def __repr__(self):
return 'NewStringUTF'
class GetStringUTFLength(NativeDroidSimProcedure):
def run(self, env, string):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetStringUTFLength'
class GetStringUTFChars(NativeDroidSimProcedure):
def run(self, env, string, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
return string
def __repr__(self):
return 'GetStringUTFChars'
class ReleaseStringUTFChars(NativeDroidSimProcedure):
def run(self, env, string, utf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseStringUTFChars'
class GetArrayLength(NativeDroidSimProcedure):
def run(self, env, array):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetArrayLength'
class NewObjectArray(NativeDroidSimProcedure):
def run(self, env, length, elementClass, initialElement):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject_array = JObjectArray(self.project)
return_value = claripy.BVV(jobject_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewObjectArray'
class GetObjectArrayElement(NativeDroidSimProcedure):
def run(self, env, array, index):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
element_index = index.ast.args[0]
array_annotation = array.annotations[0]
element_type = array_annotation.obj_type.split('[]')[0]
element_annotation = construct_annotation(element_type, array_annotation.source)
element_annotation.heap = array_annotation.heap + '[]' if array_annotation.heap else None
element_annotation.array_info['is_element'] = True
element_annotation.array_info['element_index'] = element_index
element_annotation.array_info['base_annotation'] = copy.deepcopy(array_annotation)
if array.annotations[0].source.startswith('arg'):
element_annotation.taint_info['is_taint'] = True
element_annotation.taint_info['taint_type'] = ['_SOURCE_', '_ARGUMENT_ELEMENT_']
element_annotation.taint_info['taint_info'] = ['SENSITIVE_INFO']
element_annotation.taint_info['source_kind'] = array_annotation.taint_info['source_kind']
element_annotation.taint_info['sink_kind'] = array_annotation.taint_info['sink_kind']
return_value = return_value.annotate(element_annotation)
return return_value
def __repr__(self):
return 'GetObjectArrayElement'
class SetObjectArrayElement(NativeDroidSimProcedure):
def run(self, env, array, index, value):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetObjectArrayElement'
class NewBooleanArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean_array = JBooleanArray(self.project)
return_value = claripy.BVV(jboolean_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewBooleanArray'
class NewByteArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jbyte_array = JByteArray(self.project)
return_value = claripy.BVV(jbyte_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewByteArray'
class NewCharArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jchar_array = JCharArray(self.project)
return_value = claripy.BVV(jchar_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewCharArray'
class NewShortArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jshort_array = JShortArray(self.project)
return_value = claripy.BVV(jshort_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewShortArray'
class NewIntArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint_array = JIntArray(self.project)
return_value = claripy.BVV(jint_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewIntArray'
class NewLongArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jlong_array = JLongArray(self.project)
return_value = claripy.BVV(jlong_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewLongArray'
class NewFloatArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jfloat_array = JFloatArray(self.project)
return_value = claripy.BVV(jfloat_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewFloatArray'
class NewDoubleArray(NativeDroidSimProcedure):
def run(self, env, length):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jdouble_array = JDoubleArray(self.project)
return_value = claripy.BVV(jdouble_array.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewDoubleArray'
class GetBooleanArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetBooleanArrayElements'
class GetByteArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
return array
def __repr__(self):
return 'GetByteArrayElements'
class GetCharArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetCharArrayElements'
class GetShortArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetShortArrayElements'
class GetIntArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetIntArrayElements'
class GetLongArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetLongArrayElements'
class GetFloatArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetFloatArrayElements'
class GetDoubleArrayElements(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jsize = JSize(self.project)
return_value = claripy.BVV(jsize.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetDoubleArrayElements'
class ReleaseBooleanArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseBooleanArrayElements'
class ReleaseByteArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseByteArrayElements'
class ReleaseCharArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseCharArrayElements'
class ReleaseShortArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseShortArrayElements'
class ReleaseIntArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseIntArrayElements'
class ReleaseLongArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseLongArrayElements'
class ReleaseFloatArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseFloatArrayElements'
class ReleaseDoubleArrayElements(NativeDroidSimProcedure):
def run(self, env, array, elems, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseDoubleArrayElements'
class GetBooleanArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetBooleanArrayRegion'
class GetByteArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetByteArrayRegion'
class GetCharArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetCharArrayRegion'
class GetShortArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetShortArrayRegion'
class GetIntArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetIntArrayRegion'
class GetLongArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetLongArrayRegion'
class GetFloatArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetFloatArrayRegion'
class GetDoubleArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetDoubleArrayRegion'
class SetBooleanArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetBooleanArrayRegion'
class SetByteArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetBooleanArrayRegion'
class SetCharArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetCharArrayRegion'
class SetShortArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetShortArrayRegion'
class SetIntArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetIntArrayRegion'
class SetLongArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetLongArrayRegion'
class SetFloatArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetFloatArrayRegion'
class SetDoubleArrayRegion(NativeDroidSimProcedure):
def run(self, env, array, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'SetDoubleArrayRegion'
class RegisterNatives(NativeDroidSimProcedure):
def run(self, env, clazz, methods, nMethods):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
method_num = nMethods.ast.args[0]
for i in range(method_num):
method = self.state.mem[methods + i * 3 * self.state.arch.bytes].JNINativeMethod
name = method.name.deref.string.concrete
signature = method.signature.deref.string.concrete
fn_ptr = method.fnPtr.resolved.args[0]
dynamic_map = self._analysis_center.get_dynamic_register_map()
dynamic_map['%s:%s' % (name, signature)] = long(fn_ptr)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'RegisterNatives'
class UnregisterNatives(NativeDroidSimProcedure):
def run(self, env, clazz):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'UnregisterNatives'
class MonitorEnter(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'MonitorEnter'
class MonitorExit(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'MonitorExit'
class GetJavaVM(NativeDroidSimProcedure):
def run(self, env, vm):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jint = JInt(self.project)
return_value = claripy.BVV(jint.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetJavaVM'
class GetStringRegion(NativeDroidSimProcedure):
def run(self, env, string, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetStringRegion'
class GetStringUTFRegion(NativeDroidSimProcedure):
def run(self, env, string, start, length, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetStringUTFRegion'
class GetPrimitiveArrayCritical(NativeDroidSimProcedure):
def run(self, env, array, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetPrimitiveArrayCritical'
class ReleasePrimitiveArrayCritical(NativeDroidSimProcedure):
def run(self, env, array, carray, mode):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleasePrimitiveArrayCritical'
class GetStringCritical(NativeDroidSimProcedure):
def run(self, env, string, isCopy):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetStringCritical'
class ReleaseStringCritical(NativeDroidSimProcedure):
def run(self, env, string, carray):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'ReleaseStringCritical'
class NewWeakGlobalRef(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jweak = JWeak(self.project)
return_value = claripy.BVV(jweak.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewWeakGlobalRef'
class DeleteWeakGlobalRef(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'DeleteWeakGlobalRef'
class ExceptionCheck(NativeDroidSimProcedure):
def run(self, env):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jboolean = JBoolean(self.project)
return_value = claripy.BVV(jboolean.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'ExceptionCheck'
class NewDirectByteBuffer(NativeDroidSimProcedure):
def run(self, env, address, capacity):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject = JObject(self.project)
return_value = claripy.BVV(jobject.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'NewDirectByteBuffer'
class GetDirectBufferAddress(NativeDroidSimProcedure):
def run(self, env, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
def __repr__(self):
return 'GetDirectBufferAddress'
class GetDirectBufferCapacity(NativeDroidSimProcedure):
def run(self, env, buf):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jlong = JLong(self.project)
return_value = claripy.BVV(jlong.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetDirectBufferCapacity'
class GetObjectRefType(NativeDroidSimProcedure):
def run(self, env, obj):
nativedroid_logger.info('JNINativeInterface SimProcedure: %s', self)
jobject_ref_type = JObjectRefType(self.project)
return_value = claripy.BVV(jobject_ref_type.ptr, self.project.arch.bits)
return return_value
def __repr__(self):
return 'GetObjectRefType'
class JNINativeInterface(ExternObject):
JNINativeInterface_index_to_name = {
0: "reserved0",
1: "reserved1",
2: "reserved2",
3: "reserved3",
4: "GetVersion",
5: "DefineClass",
6: "FindClass",
7: "FromReflectedMethod",
8: "FromReflectedField",
9: "ToReflectedMethod",
10: "GetSuperclass",
11: "IsAssignableFrom",
12: "ToReflectedField",
13: "Throw",
14: "ThrowNew",
15: "ExceptionOccurred",
16: "ExceptionDescribe",
17: "ExceptionClear",
18: "FatalError",
19: "PushLocalFrame",
20: "PopLocalFrame",
21: "NewGlobalRef",
22: "DeleteGlobalRef",
23: "DeleteLocalRef",
24: "IsSameObject",
25: "NewLocalRef",
26: "EnsureLocalCapacity",
27: "AllocObject",
28: "NewObject",
29: "NewObjectV",
30: "NewObjectA",
31: "GetObjectClass",
32: "IsInstanceOf",
33: "GetMethodID",
34: "CallObjectMethod",
35: "CallObjectMethodV",
36: "CallObjectMethodA",
37: "CallBooleanMethod",
38: "CallBooleanMethodV",
39: "CallBooleanMethodA",
40: "CallByteMethod",
41: "CallByteMethodV",
42: "CallByteMethodA",
43: "CallCharMethod",
44: "CallCharMethodV",
45: "CallCharMethodA",
46: "CallShortMethod",
47: "CallShortMethodV",
48: "CallShortMethodA",
49: "CallIntMethod",
50: "CallIntMethodV",
51: "CallIntMethodA",
52: "CallLongMethod",
53: "CallLongMethodV",
54: "CallLongMethodA",
55: "CallFloatMethod",
56: "CallFloatMethodV",
57: "CallFloatMethodA",
58: "CallDoubleMethod",
59: "CallDoubleMethodV",
60: "CallDoubleMethodA",
61: "CallVoidMethod",
62: "CallVoidMethodV",
63: "CallVoidMethodA",
64: "CallNonvirtualObjectMethod",
65: "CallNonvirtualObjectMethodV",
66: "CallNonvirtualObjectMethodA",
67: "CallNonvirtualBooleanMethod",
68: "CallNonvirtualBooleanMethodV",
69: "CallNonvirtualBooleanMethodA",
70: "CallNonvirtualByteMethod",
71: "CallNonvirtualByteMethodV",
72: "CallNonvirtualByteMethodA",
73: "CallNonvirtualCharMethod",
74: "CallNonvirtualCharMethodV",
75: "CallNonvirtualCharMethodA",
76: "CallNonvirtualShortMethod",
77: "CallNonvirtualShortMethodV",
78: "CallNonvirtualShortMethodA",
79: "CallNonvirtualIntMethod",
80: "CallNonvirtualIntMethodV",
81: "CallNonvirtualIntMethodA",
82: "CallNonvirtualLongMethod",
83: "CallNonvirtualLongMethodV",
84: "CallNonvirtualLongMethodA",
85: "CallNonvirtualFloatMethod",
86: "CallNonvirtualFloatMethodV",
87: "CallNonvirtualFloatMethodA",
88: "CallNonvirtualDoubleMethod",
89: "CallNonvirtualDoubleMethodV",
90: "CallNonvirtualDoubleMethodA",
91: "CallNonvirtualVoidMethod",
92: "CallNonvirtualVoidMethodV",
93: "CallNonvirtualVoidMethodA",
94: "GetFieldID",
95: "GetObjectField",
96: "GetBooleanField",
97: "GetByteField",
98: "GetCharField",
99: "GetShortField",
100: "GetIntField",
101: "GetLongField",
102: "GetFloatField",
103: "GetDoubleField",
104: "SetObjectField",
105: "SetBooleanField",
106: "SetByteField",
107: "SetCharField",
108: "SetShortField",
109: "SetIntField",
110: "SetLongField",
111: "SetFloatField",
112: "SetDoubleField",
113: "GetStaticMethodID",
114: "CallStaticObjectMethod",
115: "CallStaticObjectMethodV",
116: "CallStaticObjectMethodA",
117: "CallStaticBooleanMethod",
118: "CallStaticBooleanMethodV",
119: "CallStaticBooleanMethodA",
120: "CallStaticByteMethod",
121: "CallStaticByteMethodV",
122: "CallStaticByteMethodA",
123: "CallStaticCharMethod",
124: "CallStaticCharMethodV",
125: "CallStaticCharMethodA",
126: "CallStaticShortMethod",
127: "CallStaticShortMethodV",
128: "CallStaticShortMethodA",
129: "CallStaticIntMethod",
130: "CallStaticIntMethodV",
131: "CallStaticIntMethodA",
132: "CallStaticLongMethod",
133: "CallStaticLongMethodV",
134: "CallStaticLongMethodA",
135: "CallStaticFloatMethod",
136: "CallStaticFloatMethodV",
137: "CallStaticFloatMethodA",
138: "CallStaticDoubleMethod",
139: "CallStaticDoubleMethodV",
140: "CallStaticDoubleMethodA",
141: "CallStaticVoidMethod",
142: "CallStaticVoidMethodV",
143: "CallStaticVoidMethodA",
144: "GetStaticFieldID",
145: "GetStaticObjectField",
146: "GetStaticBooleanField",
147: "GetStaticByteField",
148: "GetStaticCharField",
149: "GetStaticShortField",
150: "GetStaticIntField",
151: "GetStaticLongField",
152: "GetStaticFloatField",
153: "GetStaticDoubleField",
154: "SetStaticObjectField",
155: "SetStaticBooleanField",
156: "SetStaticByteField",
157: "SetStaticCharField",
158: "SetStaticShortField",
159: "SetStaticIntField",
160: "SetStaticLongField",
161: "SetStaticFloatField",
162: "SetStaticDoubleField",
163: "NewString",
164: "GetStringLength",
165: "GetStringChars",
166: "ReleaseStringChars",
167: "NewStringUTF",
168: "GetStringUTFLength",
169: "GetStringUTFChars",
170: "ReleaseStringUTFChars",
171: "GetArrayLength",
172: "NewObjectArray",
173: "GetObjectArrayElement",
174: "SetObjectArrayElement",
175: "NewBooleanArray",
176: "NewByteArray",
177: "NewCharArray",
178: "NewShortArray",
179: "NewIntArray",
180: "NewLongArray",
181: "NewFloatArray",
182: "NewDoubleArray",
183: "GetBooleanArrayElements",
184: "GetByteArrayElements",
185: "GetCharArrayElements",
186: "GetShortArrayElements",
187: "GetIntArrayElements",
188: "GetLongArrayElements",
189: "GetFloatArrayElements",
190: "GetDoubleArrayElements",
191: "ReleaseBooleanArrayElements",
192: "ReleaseByteArrayElements",
193: "ReleaseCharArrayElements",
194: "ReleaseShortArrayElements",
195: "ReleaseIntArrayElements",
196: "ReleaseLongArrayElements",
197: "ReleaseFloatArrayElements",
198: "ReleaseDoubleArrayElements",
199: "GetBooleanArrayRegion",
200: "GetByteArrayRegion",
201: "GetCharArrayRegion",
202: "GetShortArrayRegion",
203: "GetIntArrayRegion",
204: "GetLongArrayRegion",
205: "GetFloatArrayRegion",
206: "GetDoubleArrayRegion",
207: "SetBooleanArrayRegion",
208: "SetByteArrayRegion",
209: "SetCharArrayRegion",
210: "SetShortArrayRegion",
211: "SetIntArrayRegion",
212: "SetLongArrayRegion",
213: "SetFloatArrayRegion",
214: "SetDoubleArrayRegion",
215: "RegisterNatives",
216: "UnregisterNatives",
217: "MonitorEnter",
218: "MonitorExit",
219: "GetJavaVM",
220: "GetStringRegion",
221: "GetStringUTFRegion",
222: "GetPrimitiveArrayCritical",
223: "ReleasePrimitiveArrayCritical",
224: "GetStringCritical",
225: "ReleaseStringCritical",
226: "NewWeakGlobalRef",
227: "DeleteWeakGlobalRef",
228: "ExceptionCheck",
229: "NewDirectByteBuffer",
230: "GetDirectBufferAddress",
231: "GetDirectBufferCapacity",
232: "GetObjectRefType",
}
JNINativeInterface_sig = {
'_ZN7_JNIEnv10GetVersionEv': 'GetVersion',
'_ZN7_JNIEnv11DefineClassEPKcP8_jobjectPKai': 'DefineClass',
'_ZN7_JNIEnv9FindClassEPKc': 'FindClass',
'_ZN7_JNIEnv19FromReflectedMethodEP8_jobject': 'FromReflectedMethod',
'_ZN7_JNIEnv18FromReflectedFieldEP8_jobject': 'FromReflectedField',
'_ZN7_JNIEnv17ToReflectedMethodEP7_jclassP10_jmethodIDh': 'ToReflectedMethod',
'_ZN7_JNIEnv13GetSuperclassEP7_jclass': 'GetSuperclass',
'_ZN7_JNIEnv16IsAssignableFromEP7_jclassS1_': 'IsAssignableFrom',
'_ZN7_JNIEnv16ToReflectedFieldEP7_jclassP9_jfieldIDh': 'ToReflectedField',
'_ZN7_JNIEnv5ThrowEP11_jthrowable': 'Throw',
'_ZN7_JNIEnv8ThrowNewEP7_jclassPKc': 'ThrowNew',
'_ZN7_JNIEnv17ExceptionOccurredEv': 'ExceptionOccurred',
'_ZN7_JNIEnv17ExceptionDescribeEv': 'ExceptionDescribe',
'_ZN7_JNIEnv14ExceptionClearEv': 'ExceptionClear',
'_ZN7_JNIEnv10FatalErrorEPKc': 'FatalError',
'_ZN7_JNIEnv14PushLocalFrameEi': 'PushLocalFrame',
'_ZN7_JNIEnv13PopLocalFrameEP8_jobject': 'PopLocalFrame',
'_ZN7_JNIEnv12NewGlobalRefEP8_jobject': 'NewGlobalRef',
'_ZN7_JNIEnv15DeleteGlobalRefEP8_jobject': 'DeleteGlobalRef',
'_ZN7_JNIEnv14DeleteLocalRefEP8_jobject': 'DeleteLocalRef',
'_ZN7_JNIEnv12IsSameObjectEP8_jobjectS1_': 'IsSameObject',
'_ZN7_JNIEnv11NewLocalRefEP8_jobject': 'NewLocalRef',
'_ZN7_JNIEnv19EnsureLocalCapacityEi': 'EnsureLocalCapacity',
'_ZN7_JNIEnv11AllocObjectEP7_jclass': 'AllocObject',
'_ZN7_JNIEnv9NewObjectEP7_jclassP10_jmethodIDz': 'NewObject',
'_ZN7_JNIEnv10NewObjectVEP7_jclassP10_jmethodIDPc': 'NewObjectV',
'_ZN7_JNIEnv10NewObjectAEP7_jclassP10_jmethodIDP6jvalue': 'NewObjectA',
'_ZN7_JNIEnv14GetObjectClassEP8_jobject': 'GetObjectClass',
'_ZN7_JNIEnv12IsInstanceOfEP8_jobjectP7_jclass': 'IsInstanceOf',
'_ZN7_JNIEnv11GetMethodIDEP7_jclassPKcS3_': 'GetMethodID',
'_ZN7_JNIEnv16CallObjectMethodEP8_jobjectP10_jmethodIDz': 'CallObjectMethod',
'_ZN7_JNIEnv17CallObjectMethodVEP8_jobjectP10_jmethodIDPc': 'CallObjectMethodV',
'_ZN7_JNIEnv17CallObjectMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallObjectMethodA',
'_ZN7_JNIEnv17CallBooleanMethodEP8_jobjectP10_jmethodIDz': 'CallBooleanMethod',
'_ZN7_JNIEnv18CallBooleanMethodVEP8_jobjectP10_jmethodIDPc': 'CallBooleanMethodV',
'_ZN7_JNIEnv18CallBooleanMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallBooleanMethodA',
'_ZN7_JNIEnv14CallByteMethodEP8_jobjectP10_jmethodIDz': 'CallByteMethod',
'_ZN7_JNIEnv15CallByteMethodVEP8_jobjectP10_jmethodIDPc': 'CallByteMethodV',
'_ZN7_JNIEnv15CallByteMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallByteMethodA',
'_ZN7_JNIEnv14CallCharMethodEP8_jobjectP10_jmethodIDz': 'CallCharMethod',
'_ZN7_JNIEnv15CallCharMethodVEP8_jobjectP10_jmethodIDPc': 'CallCharMethodV',
'_ZN7_JNIEnv15CallCharMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallCharMethodA',
'_ZN7_JNIEnv15CallShortMethodEP8_jobjectP10_jmethodIDz': 'CallShortMethod',
'_ZN7_JNIEnv16CallShortMethodVEP8_jobjectP10_jmethodIDPc': 'CallShortMethodV',
'_ZN7_JNIEnv16CallShortMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallShortMethodA',
'_ZN7_JNIEnv13CallIntMethodEP8_jobjectP10_jmethodIDz': 'CallIntMethod',
'_ZN7_JNIEnv14CallIntMethodVEP8_jobjectP10_jmethodIDPc': 'CallIntMethodV',
'_ZN7_JNIEnv14CallIntMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallIntMethodA',
'_ZN7_JNIEnv14CallLongMethodEP8_jobjectP10_jmethodIDz': 'CallLongMethod',
'_ZN7_JNIEnv15CallLongMethodVEP8_jobjectP10_jmethodIDPc': 'CallLongMethodV',
'_ZN7_JNIEnv15CallLongMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallLongMethodA',
'_ZN7_JNIEnv15CallFloatMethodEP8_jobjectP10_jmethodIDz': 'CallFloatMethod',
'_ZN7_JNIEnv16CallFloatMethodVEP8_jobjectP10_jmethodIDPc': 'CallFloatMethodV',
'_ZN7_JNIEnv16CallFloatMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallFloatMethodA',
'_ZN7_JNIEnv16CallDoubleMethodEP8_jobjectP10_jmethodIDz': 'CallDoubleMethod',
'_ZN7_JNIEnv17CallDoubleMethodVEP8_jobjectP10_jmethodIDPc': 'CallDoubleMethodV',
'_ZN7_JNIEnv17CallDoubleMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallDoubleMethodA',
'_ZN7_JNIEnv14CallVoidMethodEP8_jobjectP10_jmethodIDz': 'CallVoidMethod',
'_ZN7_JNIEnv15CallVoidMethodVEP8_jobjectP10_jmethodIDPc': 'CallVoidMethodV',
'_ZN7_JNIEnv15CallVoidMethodAEP8_jobjectP10_jmethodIDP6jvalue': 'CallVoidMethodA',
'_ZN7_JNIEnv26CallNonvirtualObjectMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualObjectMethod',
'_ZN7_JNIEnv27CallNonvirtualObjectMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualObjectMethodV',
'_ZN7_JNIEnv27CallNonvirtualObjectMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue':
'CallNonvirtualObjectMethodA',
'_ZN7_JNIEnv27CallNonvirtualBooleanMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualBooleanMethod',
'_ZN7_JNIEnv28CallNonvirtualBooleanMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualBooleanMethodV',
'_ZN7_JNIEnv28CallNonvirtualBooleanMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue':
'CallNonvirtualBooleanMethodA',
'_ZN7_JNIEnv24CallNonvirtualByteMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualByteMethod',
'_ZN7_JNIEnv25CallNonvirtualByteMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualByteMethodV',
'_ZN7_JNIEnv25CallNonvirtualByteMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue': 'CallNonvirtualByteMethodA',
'_ZN7_JNIEnv24CallNonvirtualCharMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualCharMethod',
'_ZN7_JNIEnv25CallNonvirtualCharMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualCharMethodV',
'_ZN7_JNIEnv25CallNonvirtualCharMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue': 'CallNonvirtualCharMethodA',
'_ZN7_JNIEnv25CallNonvirtualShortMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualShortMethod',
'_ZN7_JNIEnv26CallNonvirtualShortMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualShortMethodV',
'_ZN7_JNIEnv26CallNonvirtualShortMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue':
'CallNonvirtualShortMethodA',
'_ZN7_JNIEnv23CallNonvirtualIntMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualIntMethod',
'_ZN7_JNIEnv24CallNonvirtualIntMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualIntMethodV',
'_ZN7_JNIEnv24CallNonvirtualIntMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue': 'CallNonvirtualIntMethodA',
'_ZN7_JNIEnv24CallNonvirtualLongMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualLongMethod',
'_ZN7_JNIEnv25CallNonvirtualLongMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualLongMethodV',
'_ZN7_JNIEnv25CallNonvirtualLongMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue': 'CallNonvirtualLongMethodA',
'_ZN7_JNIEnv25CallNonvirtualFloatMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualFloatMethod',
'_ZN7_JNIEnv26CallNonvirtualFloatMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualFloatMethodV',
'_ZN7_JNIEnv26CallNonvirtualFloatMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue':
'CallNonvirtualFloatMethodA',
'_ZN7_JNIEnv26CallNonvirtualDoubleMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualDoubleMethod',
'_ZN7_JNIEnv27CallNonvirtualDoubleMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualDoubleMethodV',
'_ZN7_JNIEnv27CallNonvirtualDoubleMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue':
'CallNonvirtualDoubleMethodA',
'_ZN7_JNIEnv24CallNonvirtualVoidMethodEP8_jobjectP7_jclassP10_jmethodIDz': 'CallNonvirtualVoidMethod',
'_ZN7_JNIEnv25CallNonvirtualVoidMethodVEP8_jobjectP7_jclassP10_jmethodIDPc': 'CallNonvirtualVoidMethodV',
'_ZN7_JNIEnv25CallNonvirtualVoidMethodAEP8_jobjectP7_jclassP10_jmethodIDP6jvalue': 'CallNonvirtualVoidMethodA',
'_ZN7_JNIEnv10GetFieldIDEP7_jclassPKcS3_': 'GetFieldID',
'_ZN7_JNIEnv14GetObjectFieldEP8_jobjectP9_jfieldID': 'GetObjectField',
'_ZN7_JNIEnv15GetBooleanFieldEP8_jobjectP9_jfieldID': 'GetBooleanField',
'_ZN7_JNIEnv12GetByteFieldEP8_jobjectP9_jfieldID': 'GetByteField',
'_ZN7_JNIEnv12GetCharFieldEP8_jobjectP9_jfieldID': 'GetCharField',
'_ZN7_JNIEnv13GetShortFieldEP8_jobjectP9_jfieldID': 'GetShortField',
'_ZN7_JNIEnv11GetIntFieldEP8_jobjectP9_jfieldID': 'GetIntField',
'_ZN7_JNIEnv12GetLongFieldEP8_jobjectP9_jfieldID': 'GetLongField',
'_ZN7_JNIEnv13GetFloatFieldEP8_jobjectP9_jfieldID': 'GetFloatField',
'_ZN7_JNIEnv14GetDoubleFieldEP8_jobjectP9_jfieldID': 'GetDoubleField',
'_ZN7_JNIEnv14SetObjectFieldEP8_jobjectP9_jfieldIDS1_': 'SetObjectField',
'_ZN7_JNIEnv15SetBooleanFieldEP8_jobjectP9_jfieldIDh': 'SetBooleanField',
'_ZN7_JNIEnv12SetByteFieldEP8_jobjectP9_jfieldIDa': 'SetByteField',
'_ZN7_JNIEnv12SetCharFieldEP8_jobjectP9_jfieldIDt': 'SetCharField',
'_ZN7_JNIEnv13SetShortFieldEP8_jobjectP9_jfieldIDs': 'SetShortField',
'_ZN7_JNIEnv11SetIntFieldEP8_jobjectP9_jfieldIDi': 'SetIntField',
'_ZN7_JNIEnv12SetLongFieldEP8_jobjectP9_jfieldIDx': 'SetLongField',
'_ZN7_JNIEnv13SetFloatFieldEP8_jobjectP9_jfieldIDf': 'SetFloatField',
'_ZN7_JNIEnv14SetDoubleFieldEP8_jobjectP9_jfieldIDd': 'SetDoubleField',
'_ZN7_JNIEnv17GetStaticMethodIDEP7_jclassPKcS3_': 'GetStaticMethodID',
'_ZN7_JNIEnv22CallStaticObjectMethodEP7_jclassP10_jmethodIDz': 'CallStaticObjectMethod',
'_ZN7_JNIEnv23CallStaticObjectMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticObjectMethodV',
'_ZN7_JNIEnv23CallStaticObjectMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticObjectMethodA',
'_ZN7_JNIEnv23CallStaticBooleanMethodEP7_jclassP10_jmethodIDz': 'CallStaticBooleanMethod',
'_ZN7_JNIEnv24CallStaticBooleanMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticBooleanMethodV',
'_ZN7_JNIEnv24CallStaticBooleanMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticBooleanMethodA',
'_ZN7_JNIEnv20CallStaticByteMethodEP7_jclassP10_jmethodIDz': 'CallStaticByteMethod',
'_ZN7_JNIEnv21CallStaticByteMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticByteMethodV',
'_ZN7_JNIEnv21CallStaticByteMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticByteMethodA',
'_ZN7_JNIEnv20CallStaticCharMethodEP7_jclassP10_jmethodIDz': 'CallStaticCharMethod',
'_ZN7_JNIEnv21CallStaticCharMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticCharMethodV',
'_ZN7_JNIEnv21CallStaticCharMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticCharMethodA',
'_ZN7_JNIEnv21CallStaticShortMethodEP7_jclassP10_jmethodIDz': 'CallStaticShortMethod',
'_ZN7_JNIEnv22CallStaticShortMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticShortMethodV',
'_ZN7_JNIEnv22CallStaticShortMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticShortMethodA',
'_ZN7_JNIEnv19CallStaticIntMethodEP7_jclassP10_jmethodIDz': 'CallStaticIntMethod',
'_ZN7_JNIEnv20CallStaticIntMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticIntMethodV',
'_ZN7_JNIEnv20CallStaticIntMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticIntMethodA',
'_ZN7_JNIEnv20CallStaticLongMethodEP7_jclassP10_jmethodIDz': 'CallStaticLongMethod',
'_ZN7_JNIEnv21CallStaticLongMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticLongMethodV',
'_ZN7_JNIEnv21CallStaticLongMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticLongMethodA',
'_ZN7_JNIEnv21CallStaticFloatMethodEP7_jclassP10_jmethodIDz': 'CallStaticFloatMethod',
'_ZN7_JNIEnv22CallStaticFloatMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticFloatMethodV',
'_ZN7_JNIEnv22CallStaticFloatMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticFloatMethodA',
'_ZN7_JNIEnv22CallStaticDoubleMethodEP7_jclassP10_jmethodIDz': 'CallStaticDoubleMethod',
'_ZN7_JNIEnv23CallStaticDoubleMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticDoubleMethodV',
'_ZN7_JNIEnv23CallStaticDoubleMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticDoubleMethodA',
'_ZN7_JNIEnv20CallStaticVoidMethodEP7_jclassP10_jmethodIDz': 'CallStaticVoidMethod',
'_ZN7_JNIEnv21CallStaticVoidMethodVEP7_jclassP10_jmethodIDPc': 'CallStaticVoidMethodV',
'_ZN7_JNIEnv21CallStaticVoidMethodAEP7_jclassP10_jmethodIDP6jvalue': 'CallStaticVoidMethodA',
'_ZN7_JNIEnv16GetStaticFieldIDEP7_jclassPKcS3_': 'GetStaticFieldID',
'_ZN7_JNIEnv20GetStaticObjectFieldEP7_jclassP9_jfieldID': 'GetStaticObjectField',
'_ZN7_JNIEnv21GetStaticBooleanFieldEP7_jclassP9_jfieldID': 'GetStaticBooleanField',
'_ZN7_JNIEnv18GetStaticByteFieldEP7_jclassP9_jfieldID': 'GetStaticByteField',
'_ZN7_JNIEnv18GetStaticCharFieldEP7_jclassP9_jfieldID': 'GetStaticCharField',
'_ZN7_JNIEnv19GetStaticShortFieldEP7_jclassP9_jfieldID': 'GetStaticShortField',
'_ZN7_JNIEnv17GetStaticIntFieldEP7_jclassP9_jfieldID': 'GetStaticIntField',
'_ZN7_JNIEnv18GetStaticLongFieldEP7_jclassP9_jfieldID': 'GetStaticLongField',
'_ZN7_JNIEnv19GetStaticFloatFieldEP7_jclassP9_jfieldID': 'GetStaticFloatField',
'_ZN7_JNIEnv20GetStaticDoubleFieldEP7_jclassP9_jfieldID': 'GetStaticDoubleField',
'_ZN7_JNIEnv20SetStaticObjectFieldEP7_jclassP9_jfieldIDP8_jobject': 'SetStaticObjectField',
'_ZN7_JNIEnv21SetStaticBooleanFieldEP7_jclassP9_jfieldIDh': 'SetStaticBooleanField',
'_ZN7_JNIEnv18SetStaticByteFieldEP7_jclassP9_jfieldIDa': 'SetStaticByteField',
'_ZN7_JNIEnv18SetStaticCharFieldEP7_jclassP9_jfieldIDt': 'SetStaticCharField',
'_ZN7_JNIEnv19SetStaticShortFieldEP7_jclassP9_jfieldIDs': 'SetStaticShortField',
'_ZN7_JNIEnv17SetStaticIntFieldEP7_jclassP9_jfieldIDi': 'SetStaticIntField',
'_ZN7_JNIEnv18SetStaticLongFieldEP7_jclassP9_jfieldIDx': 'SetStaticLongField',
'_ZN7_JNIEnv19SetStaticFloatFieldEP7_jclassP9_jfieldIDf': 'SetStaticFloatField',
'_ZN7_JNIEnv20SetStaticDoubleFieldEP7_jclassP9_jfieldIDd': 'SetStaticDoubleField',
'_ZN7_JNIEnv9NewStringEPKti': 'NewString',
'_ZN7_JNIEnv15GetStringLengthEP8_jstring': 'GetStringLength',
'_ZN7_JNIEnv14GetStringCharsEP8_jstringPh': 'GetStringChars',
'_ZN7_JNIEnv18ReleaseStringCharsEP8_jstringPKt': 'ReleaseStringChars',
'_ZN7_JNIEnv12NewStringUTFEPKc': 'NewStringUTF',
'_ZN7_JNIEnv18GetStringUTFLengthEP8_jstring': 'GetStringUTFLength',
'_ZN7_JNIEnv17GetStringUTFCharsEP8_jstringPh': 'GetStringUTFChars',
'_ZN7_JNIEnv21ReleaseStringUTFCharsEP8_jstringPKc': 'ReleaseStringUTFChars',
'_ZN7_JNIEnv14GetArrayLengthEP7_jarray': 'GetArrayLength',
'_ZN7_JNIEnv14NewObjectArrayEiP7_jclassP8_jobject': 'NewObjectArray',
'_ZN7_JNIEnv21GetObjectArrayElementEP13_jobjectArrayi': 'GetObjectArrayElement',
'_ZN7_JNIEnv21SetObjectArrayElementEP13_jobjectArrayiP8_jobject': 'SetObjectArrayElement',
'_ZN7_JNIEnv15NewBooleanArrayEi': 'NewBooleanArray',
'_ZN7_JNIEnv12NewByteArrayEi': 'NewByteArray',
'_ZN7_JNIEnv12NewCharArrayEi': 'NewCharArray',
'_ZN7_JNIEnv13NewShortArrayEi': 'NewShortArray',
'_ZN7_JNIEnv11NewIntArrayEi': 'NewIntArray',
'_ZN7_JNIEnv12NewLongArrayEi': 'NewLongArray',
'_ZN7_JNIEnv13NewFloatArrayEi': 'NewFloatArray',
'_ZN7_JNIEnv14NewDoubleArrayEi': 'NewDoubleArray',
'_ZN7_JNIEnv23GetBooleanArrayElementsEP14_jbooleanArrayPh': 'GetBooleanArrayElements',
'_ZN7_JNIEnv20GetByteArrayElementsEP11_jbyteArrayPh': 'GetByteArrayElements',
'_ZN7_JNIEnv20GetCharArrayElementsEP11_jcharArrayPh': 'GetCharArrayElements',
'_ZN7_JNIEnv21GetShortArrayElementsEP12_jshortArrayPh': 'GetShortArrayElements',
'_ZN7_JNIEnv19GetIntArrayElementsEP10_jintArrayPh': 'GetIntArrayElements',
'_ZN7_JNIEnv20GetLongArrayElementsEP11_jlongArrayPh': 'GetLongArrayElements',
'_ZN7_JNIEnv21GetFloatArrayElementsEP12_jfloatArrayPh': 'GetFloatArrayElements',
'_ZN7_JNIEnv22GetDoubleArrayElementsEP13_jdoubleArrayPh': 'GetDoubleArrayElements',
'_ZN7_JNIEnv27ReleaseBooleanArrayElementsEP14_jbooleanArrayPhi': 'ReleaseBooleanArrayElements',
'_ZN7_JNIEnv24ReleaseByteArrayElementsEP11_jbyteArrayPai': 'ReleaseByteArrayElements',
'_ZN7_JNIEnv24ReleaseCharArrayElementsEP11_jcharArrayPti': 'ReleaseCharArrayElements',
'_ZN7_JNIEnv25ReleaseShortArrayElementsEP12_jshortArrayPsi': 'ReleaseShortArrayElements',
'_ZN7_JNIEnv23ReleaseIntArrayElementsEP10_jintArrayPii': 'ReleaseIntArrayElements',
'_ZN7_JNIEnv24ReleaseLongArrayElementsEP11_jlongArrayPxi': 'ReleaseLongArrayElements',
'_ZN7_JNIEnv25ReleaseFloatArrayElementsEP12_jfloatArrayPfi': 'ReleaseFloatArrayElements',
'_ZN7_JNIEnv26ReleaseDoubleArrayElementsEP13_jdoubleArrayPdi': 'ReleaseDoubleArrayElements',
'_ZN7_JNIEnv21GetBooleanArrayRegionEP14_jbooleanArrayiiPh': 'GetBooleanArrayRegion',
'_ZN7_JNIEnv18GetByteArrayRegionEP11_jbyteArrayiiPa': 'GetByteArrayRegion',
'_ZN7_JNIEnv18GetCharArrayRegionEP11_jcharArrayiiPt': 'GetCharArrayRegion',
'_ZN7_JNIEnv19GetShortArrayRegionEP12_jshortArrayiiPs': 'GetShortArrayRegion',
'_ZN7_JNIEnv17GetIntArrayRegionEP10_jintArrayiiPi': 'GetIntArrayRegion',
'_ZN7_JNIEnv18GetLongArrayRegionEP11_jlongArrayiiPx': 'GetLongArrayRegion',
'_ZN7_JNIEnv19GetFloatArrayRegionEP12_jfloatArrayiiPf': 'GetFloatArrayRegion',
'_ZN7_JNIEnv20GetDoubleArrayRegionEP13_jdoubleArrayiiPd': 'GetDoubleArrayRegion',
'_ZN7_JNIEnv21SetBooleanArrayRegionEP14_jbooleanArrayiiPKh': 'SetBooleanArrayRegion',
'_ZN7_JNIEnv18SetByteArrayRegionEP11_jbyteArrayiiPKa': 'SetByteArrayRegion',
'_ZN7_JNIEnv18SetCharArrayRegionEP11_jcharArrayiiPKt': 'SetCharArrayRegion',
'_ZN7_JNIEnv19SetShortArrayRegionEP12_jshortArrayiiPKs': 'SetShortArrayRegion',
'_ZN7_JNIEnv17SetIntArrayRegionEP10_jintArrayiiPKi': 'SetIntArrayRegion',
'_ZN7_JNIEnv18SetLongArrayRegionEP11_jlongArrayiiPKx': 'SetLongArrayRegion',
'_ZN7_JNIEnv19SetFloatArrayRegionEP12_jfloatArrayiiPKf': 'SetFloatArrayRegion',
'_ZN7_JNIEnv20SetDoubleArrayRegionEP13_jdoubleArrayiiPKd': 'SetDoubleArrayRegion',
'_ZN7_JNIEnv15RegisterNativesEP7_jclassPK15JNINativeMethodi': 'RegisterNatives',
'_ZN7_JNIEnv17UnregisterNativesEP7_jclass': 'UnregisterNatives',
'_ZN7_JNIEnv12MonitorEnterEP8_jobject': 'MonitorEnter',
'_ZN7_JNIEnv11MonitorExitEP8_jobject': 'MonitorExit',
'_ZN7_JNIEnv9GetJavaVMEPP7_JavaVM': 'GetJavaVM',
'_ZN7_JNIEnv15GetStringRegionEP8_jstringiiPt': 'GetStringRegion',
'_ZN7_JNIEnv18GetStringUTFRegionEP8_jstringiiPc': 'GetStringUTFRegion',
'_ZN7_JNIEnv25GetPrimitiveArrayCriticalEP7_jarrayPh': 'GetPrimitiveArrayCritical',
'_ZN7_JNIEnv29ReleasePrimitiveArrayCriticalEP7_jarrayPvi': 'ReleasePrimitiveArrayCritical',
'_ZN7_JNIEnv17GetStringCriticalEP8_jstringPh': 'GetStringCritical',
'_ZN7_JNIEnv21ReleaseStringCriticalEP8_jstringPKt': 'ReleaseStringCritical',
'_ZN7_JNIEnv16NewWeakGlobalRefEP8_jobject': 'NewWeakGlobalRef',
'_ZN7_JNIEnv19DeleteWeakGlobalRefEP8_jobject': 'DeleteWeakGlobalRef',
'_ZN7_JNIEnv14ExceptionCheckEv': 'ExceptionCheck',
'_ZN7_JNIEnv19NewDirectByteBufferEPvx': 'NewDirectByteBuffer',
'_ZN7_JNIEnv22GetDirectBufferAddressEP8_jobject': 'GetDirectBufferAddress',
'_ZN7_JNIEnv23GetDirectBufferCapacityEP8_jobject': 'GetDirectBufferCapacity',
'_ZN7_JNIEnv16GetObjectRefTypeEP8_jobject': 'GetObjectRefType',
}
JNINativeInterface_name_to_simproc = {
'GetVersion': GetVersion,
'DefineClass': DefineClass,
'FindClass': FindClass,
'FromReflectedMethod': FromReflectedMethod,
'FromReflectedField': FromReflectedField,
'ToReflectedMethod': ToReflectedMethod,
'GetSuperClass': GetSuperClass,
'IsAssignableFrom': IsAssignableFrom,
'ToReflectedField': ToReflectedField,
'Throw': Throw,
'ThrowNew': ThrowNew,
'ExceptionOccurred': ExceptionOccurred,
'ExceptionDescribe': ExceptionDescribe,
'ExceptionClear': ExceptionClear,
'FatalError': FatalError,
'PushLocalFrame': PushLocalFrame,
'PopLocalFrame': PopLocalFrame,
'NewGlobalRef': NewGlobalRef,
'DeleteGlobalRef': DeleteGlobalRef,
'DeleteLocalRef': DeleteLocalRef,
'IsSameObject': IsSameObject,
'NewLocalRef': NewLocalRef,
'EnsureLocalCapacity': EnsureLocalCapacity,
'AllocObject': AllocObject,
'NewObject': NewObject,
'NewObjectV': NewObjectV,
'NewObjectA': NewObjectA,
'GetObjectClass': GetObjectClass,
'IsInstanceOf': IsInstanceOf,
'GetMethodID': GetMethodID,
'CallObjectMethod': CallObjectMethod,
'CallObjectMethodV': CallObjectMethodV,
'CallObjectMethodA': CallObjectMethodA,
'CallBooleanMethod': CallBooleanMethod,
'CallBooleanMethodV': CallBooleanMethodV,
'CallBooleanMethodA': CallBooleanMethodA,
'CallByteMethod': CallByteMethod,
'CallByteMethodV': CallByteMethodV,
'CallByteMethodA': CallByteMethodA,
'CallCharMethod': CallCharMethod,
'CallCharMethodV': CallCharMethodV,
'CallCharMethodA': CallCharMethodA,
'CallShortMethod': CallShortMethod,
'CallShortMethodV': CallShortMethodV,
'CallShortMethodA': CallShortMethodA,
'CallIntMethod': CallIntMethod,
'CallIntMethodV': CallIntMethodV,
'CallIntMethodA': CallIntMethodA,
'CallLongMethod': CallLongMethod,
'CallLongMethodV': CallLongMethodV,
'CallLongMethodA': CallLongMethodA,
'CallFloatMethod': CallFloatMethod,
'CallFloatMethodV': CallFloatMethodV,
'CallFloatMethodA': CallFloatMethodA,
'CallDoubleMethod': CallDoubleMethod,
'CallDoubleMethodV': CallDoubleMethodV,
'CallDoubleMethodA': CallDoubleMethodA,
'CallVoidMethod': CallVoidMethod,
'CallVoidMethodV': CallVoidMethodV,
'CallVoidMethodA': CallVoidMethodA,
'CallNonvirtualObjectMethod': CallNonvirtualObjectMethod,
'CallNonvirtualObjectMethodV': CallNonvirtualObjectMethodV,
'CallNonvirtualObjectMethodA': CallNonvirtualObjectMethodA,
'CallNonvirtualBooleanMethod': CallNonvirtualBooleanMethod,
'CallNonvirtualBooleanMethodV': CallNonvirtualBooleanMethodV,
'CallNonvirtualBooleanMethodA': CallNonvirtualBooleanMethodA,
'CallNonvirtualByteMethod': CallNonvirtualByteMethod,
'CallNonvirtualByteMethodV': CallNonvirtualByteMethodV,
'CallNonvirtualByteMethodA': CallNonvirtualByteMethodA,
'CallNonvirtualCharMethod': CallNonvirtualCharMethod,
'CallNonvirtualCharMethodV': CallNonvirtualCharMethodV,
'CallNonvirtualCharMethodA': CallNonvirtualCharMethodA,
'CallNonvirtualShortMethod': CallNonvirtualShortMethod,
'CallNonvirtualShortMethodV': CallNonvirtualShortMethodV,
'CallNonvirtualShortMethodA': CallNonvirtualShortMethodA,
'CallNonvirtualIntMethod': CallNonvirtualIntMethod,
'CallNonvirtualIntMethodV': CallNonvirtualIntMethodV,
'CallNonvirtualIntMethodA': CallNonvirtualIntMethodA,
'CallNonvirtualLongMethod': CallNonvirtualLongMethod,
'CallNonvirtualLongMethodV': CallNonvirtualLongMethodV,
'CallNonvirtualLongMethodA': CallNonvirtualLongMethodA,
'CallNonvirtualFloatMethod': CallNonvirtualFloatMethod,
'CallNonvirtualFloatMethodV': CallNonvirtualFloatMethodV,
'CallNonvirtualFloatMethodA': CallNonvirtualFloatMethodA,
'CallNonvirtualDoubleMethod': CallNonvirtualDoubleMethod,
'CallNonvirtualDoubleMethodV': CallNonvirtualDoubleMethodV,
'CallNonvirtualDoubleMethodA': CallNonvirtualDoubleMethodA,
'CallNonvirtualVoidMethod': CallNonvirtualVoidMethod,
'CallNonvirtualVoidMethodV': CallNonvirtualVoidMethodV,
'CallNonvirtualVoidMethodA': CallNonvirtualVoidMethodA,
'GetFieldID': GetFieldID,
'GetObjectField': GetObjectField,
'GetBooleanField': GetBooleanField,
'GetByteField': GetByteField,
'GetCharField': GetCharField,
'GetShortField': GetShortField,
'GetIntField': GetIntField,
'GetLongField': GetLongField,
'GetFloatField': GetFloatField,
'GetDoubleField': GetDoubleField,
'SetObjectField': SetObjectField,
'SetBooleanField': SetBooleanField,
'SetByteField': SetByteField,
'SetCharField': SetCharField,
'SetShortField': SetShortField,
'SetIntField': SetIntField,
'SetLongField': SetLongField,
'SetFloatField': SetFloatField,
'SetDoubleField': SetDoubleField,
'GetStaticMethodID': GetStaticMethodID,
'CallStaticObjectMethod': CallStaticObjectMethod,
'CallStaticObjectMethodV': CallStaticObjectMethodV,
'CallStaticObjectMethodA': CallStaticObjectMethodA,
'CallStaticBooleanMethod': CallStaticBooleanMethod,
'CallStaticBooleanMethodV': CallStaticBooleanMethodV,
'CallStaticBooleanMethodA': CallStaticBooleanMethodA,
'CallStaticByteMethod': CallStaticByteMethod,
'CallStaticByteMethodV': CallStaticByteMethodV,
'CallStaticByteMethodA': CallStaticByteMethodA,
'CallStaticCharMethod': CallStaticCharMethod,
'CallStaticCharMethodV': CallStaticCharMethodV,
'CallStaticCharMethodA': CallStaticCharMethodA,
'CallStaticShortMethod': CallStaticShortMethod,
'CallStaticShortMethodV': CallStaticShortMethodV,
'CallStaticShortMethodA': CallStaticShortMethodA,
'CallStaticIntMethod': CallStaticIntMethod,
'CallStaticIntMethodV': CallStaticIntMethodV,
'CallStaticIntMethodA': CallStaticIntMethodA,
'CallStaticLongMethod': CallStaticLongMethod,
'CallStaticLongMethodV': CallStaticLongMethodV,
'CallStaticLongMethodA': CallStaticLongMethodA,
'CallStaticFloatMethod': CallStaticFloatMethod,
'CallStaticFloatMethodV': CallStaticFloatMethodV,
'CallStaticFloatMethodA': CallStaticFloatMethodA,
'CallStaticDoubleMethod': CallStaticDoubleMethod,
'CallStaticDoubleMethodV': CallStaticDoubleMethodV,
'CallStaticDoubleMethodA': CallStaticDoubleMethodA,
'CallStaticVoidMethod': CallStaticVoidMethod,
'CallStaticVoidMethodV': CallStaticVoidMethodV,
'CallStaticVoidMethodA': CallStaticVoidMethodA,
'GetStaticFieldID': GetStaticFieldID,
'GetStaticObjectField': GetStaticObjectField,
'GetStaticBooleanField': GetStaticBooleanField,
'GetStaticByteField': GetStaticByteField,
'GetStaticCharField': GetStaticCharField,
'GetStaticShortField': GetStaticShortField,
'GetStaticIntField': GetStaticIntField,
'GetStaticLongField': GetStaticLongField,
'GetStaticFloatField': GetStaticFloatField,
'GetStaticDoubleField': GetStaticDoubleField,
'SetStaticObjectField': SetStaticObjectField,
'SetStaticBooleanField': SetStaticBooleanField,
'SetStaticByteField': SetStaticByteField,
'SetStaticCharField': SetStaticCharField,
'SetStaticShortField': SetStaticShortField,
'SetStaticIntField': SetStaticIntField,
'SetStaticLongField': SetStaticLongField,
'SetStaticFloatField': SetStaticFloatField,
'SetStaticDoubleField': SetStaticDoubleField,
'NewString': NewString,
'GetStringLength': GetStringLength,
'GetStringChars': GetStringChars,
'ReleaseStringChars': ReleaseStringChars,
'NewStringUTF': NewStringUTF,
'GetStringUTFLength': GetStringUTFLength,
'GetStringUTFChars': GetStringUTFChars,
'ReleaseStringUTFChars': ReleaseStringUTFChars,
'GetArrayLength': GetArrayLength,
'NewObjectArray': NewObjectArray,
'GetObjectArrayElement': GetObjectArrayElement,
'SetObjectArrayElement': SetObjectArrayElement,
'NewBooleanArray': NewBooleanArray,
'NewByteArray': NewByteArray,
'NewCharArray': NewCharArray,
'NewShortArray': NewShortArray,
'NewIntArray': NewIntArray,
'NewLongArray': NewLongArray,
'NewFloatArray': NewFloatArray,
'NewDoubleArray': NewDoubleArray,
'GetBooleanArrayElements': GetBooleanArrayElements,
'GetByteArrayElements': GetByteArrayElements,
'GetCharArrayElements': GetCharArrayElements,
'GetShortArrayElements': GetShortArrayElements,
'GetIntArrayElements': GetIntArrayElements,
'GetLongArrayElements': GetLongArrayElements,
'GetFloatArrayElements': GetFloatArrayElements,
'GetDoubleArrayElements': GetDoubleArrayElements,
'ReleaseBooleanArrayElements': ReleaseBooleanArrayElements,
'ReleaseByteArrayElements': ReleaseByteArrayElements,
'ReleaseCharArrayElements': ReleaseCharArrayElements,
'ReleaseShortArrayElements': ReleaseShortArrayElements,
'ReleaseIntArrayElements': ReleaseIntArrayElements,
'ReleaseLongArrayElements': ReleaseLongArrayElements,
'ReleaseFloatArrayElements': ReleaseFloatArrayElements,
'ReleaseDoubleArrayElements': ReleaseDoubleArrayElements,
'GetBooleanArrayRegion': GetBooleanArrayRegion,
'GetByteArrayRegion': GetByteArrayRegion,
'GetCharArrayRegion': GetCharArrayRegion,
'GetShortArrayRegion': GetShortArrayRegion,
'GetIntArrayRegion': GetIntArrayRegion,
'GetLongArrayRegion': GetLongArrayRegion,
'GetFloatArrayRegion': GetFloatArrayRegion,
'GetDoubleArrayRegion': GetDoubleArrayRegion,
'SetBooleanArrayRegion': SetBooleanArrayRegion,
'SetByteArrayRegion': SetByteArrayRegion,
'SetCharArrayRegion': SetCharArrayRegion,
'SetShortArrayRegion': SetShortArrayRegion,
'SetIntArrayRegion': SetIntArrayRegion,
'SetLongArrayRegion': SetLongArrayRegion,
'SetFloatArrayRegion': SetFloatArrayRegion,
'SetDoubleArrayRegion': SetDoubleArrayRegion,
'RegisterNatives': RegisterNatives,
'UnregisterNatives': UnregisterNatives,
'MonitorEnter': MonitorEnter,
'MonitorExit': MonitorExit,
'GetJavaVM': GetJavaVM,
'GetStringRegion': GetStringRegion,
'GetStringUTFRegion': GetStringUTFRegion,
'GetPrimitiveArrayCritical': GetPrimitiveArrayCritical,
'ReleasePrimitiveArrayCritical': ReleasePrimitiveArrayCritical,
'GetStringCritical': GetStringCritical,
'ReleaseStringCritical': ReleaseStringCritical,
'NewWeakGlobalRef': NewWeakGlobalRef,
'DeleteWeakGlobalRef': DeleteWeakGlobalRef,
'ExceptionCheck': ExceptionCheck,
'NewDirectByteBuffer': NewDirectByteBuffer,
'GetDirectBufferAddress': GetDirectBufferAddress,
'GetDirectBufferCapacity': GetDirectBufferCapacity,
'GetObjectRefType': GetObjectRefType,
}
def __init__(self, project, analysis_center):
super(JNINativeInterface, self).__init__(project.loader)
self._provides = 'JNIEnv'
self._project = project
self._fptr_size = self._project.arch.bits / 8
self._project.loader.add_object(self)
self._analysis_center = analysis_center
self._construct()
# Define JNINativeMethod Struct and then resolved in RegisterNatives SimProcedure.
angr.sim_type.define_struct('struct JNINativeMethod {const char* name;const char* signature;void* fnPtr;}')
angr.sim_type.parse_type('struct JNINativeMethod')
def _construct(self):
# allocate memory for the fake JNINativeInterface struct
self._JNINativeInterface = self.allocate(
len(self.JNINativeInterface_index_to_name) * self._fptr_size)
# allocate memory to JNIEnv (a pointer) and make it to point to the fake JNINativeInterface struct
self._JNIEnv = self.allocate(self._fptr_size)
self.memory.write_addr_at(self._JNIEnv - self.min_addr, self._JNINativeInterface)
# direct calls hook
for addr in self._project.loader.main_object.symbols_by_addr:
symb = self._project.loader.main_object.symbols_by_addr[addr]
if symb.name in self.JNINativeInterface_sig:
symb_name = symb.name
jni_native_interface_func_name = self.JNINativeInterface_sig[symb_name]
if jni_native_interface_func_name in self.JNINativeInterface_name_to_simproc:
proc = self.JNINativeInterface_name_to_simproc[
jni_native_interface_func_name](self._analysis_center)
self._project.hook(addr, proc)
else:
self._project.hook(addr, angr.SIM_PROCEDURES['stubs']['ReturnUnconstrained']())
# iterate through the mapping
for index, name in self.JNINativeInterface_index_to_name.iteritems():
# if the mapped value is None (there are 4 reserved entries), hook it with PathTerminator
if name.startswith('reserved'):
addr = self.allocate(self._fptr_size)
self._project.hook(addr, angr.SIM_PROCEDURES['stubs']['PathTerminator']())
else:
addr = self.allocate(self._fptr_size)
# if we have a custom simprocedure for that function, hook with that
if name in self.JNINativeInterface_name_to_simproc:
proc = self.JNINativeInterface_name_to_simproc[name](self._analysis_center)
self._project.hook(addr, proc)
# otherwise hook with ReturnUnconstrained
else:
self._project.hook(addr, angr.SIM_PROCEDURES['stubs']['ReturnUnconstrained']())
self.memory.write_addr_at(self._JNINativeInterface - self.min_addr + index * self._fptr_size, addr)
@property
def ptr(self):
return self._JNIEnv
|
import os
class RosettaScoreData(object):
def __init__(self):
self.score = None
self.rms = None
self.maxsub = None
self.description = None
self.model = None
class RosettaScoreParser(object):
def __init__(self, directory):
self.directory = directory
self.avgScore = None
self.topScore = None
self.avgRms = None
self.topRms = None
self.avgMaxsub = None
self.topMaxsub = None
self.data = []
score_file = os.path.join(directory, "score.fsc")
if not os.path.isfile(score_file):
raise RuntimeError("Cannot find ROSETTA score file: {0}".format(score_file))
self.parseFile(score_file)
def parse_file(self, score_file):
idxScore = None
idxRms = None
idxMaxsub = None
idxDesc = None
for i, line in enumerate(open(score_file, 'r')):
line = line.strip()
# Read header
if i == 0:
for j, f in enumerate(line.split()):
if f == "score":
idxScore = j
elif f == "rms":
idxRms = j
elif f == "maxsub":
idxMaxsub = j
elif f == "description":
idxDesc = j
if idxScore is None or idxRms is None or idxMaxsub is None or idxDesc is None:
raise RuntimeError("Missing header field from score file: {0}".format(score_file))
continue
# End read header
if not line: # ignore blank lines - not sure why they are there...
continue
d = RosettaScoreData()
fields = line.split()
d.score = float(fields[idxScore])
d.rms = float(fields[idxRms])
d.maxsub = float(fields[idxMaxsub])
d.description = fields[idxDesc]
# pdb = fields[31]
d.model = os.path.join(self.directory, d.description + ".pdb")
self.data.append(d)
avg = 0
self.topScore = self.data[0].score
for d in self.data:
avg += d.score
if d.score < self.topScore:
self.topScore = d.score
self.avgScore = avg / len(self.data)
avg = 0
self.topRms = self.data[0].rms
for d in self.data:
avg += d.rms
if d.rms < self.topRms:
self.topRms = d.rms
self.avgRms = avg / len(self.data)
avg = 0
self.topMaxsub = self.data[0].maxsub
for d in self.data:
avg += d.maxsub
if d.maxsub > self.topMaxsub:
self.topMaxsub = d.maxsub
self.avgMaxsub = avg / len(self.data)
return
def maxsub_sorted(self, reverse=True):
return sorted(self.data, key=lambda data: data.maxsub, reverse=reverse)
def rms_sorted(self, reverse=True):
return sorted(self.data, key=lambda data: data.rms, reverse=reverse)
def rms(self, name):
for d in self.data:
if d.description == name:
return d.rms
def maxsub(self, name):
for d in self.data:
if d.description == name:
return d.maxsub
def __str__(self):
s = "Results for: {0}\n".format(self.name)
s += "Top score : {0}\n".format(self.topScore)
s += "Avg score : {0}\n".format(self.avgScore)
s += "Top rms : {0}\n".format(self.topRms)
s += "Avg rms : {0}\n".format(self.avgRms)
s += "Top maxsub: {0}\n".format(self.topMaxsub)
s += "Avg maxsub: {0}\n".format(self.avgMaxsub)
return s
|
from datetime import date
from typing import Tuple, Generator
from pidriver.feature import feature
if feature("DEBUG_MODE"):
class InkyPHAT:
WIDTH: int = 212
HEIGHT: int = 104
BLACK: int = 0
WHITE: int = 255
RED: int = 127
else:
from inky import InkyPHAT
from PIL import Image, ImageDraw, ImageFont
from font_hanken_grotesk import HankenGroteskMedium
from pidriver.data import get_semester_file, Period
HALF_H = InkyPHAT.HEIGHT // 2
QUARTER_H = InkyPHAT.HEIGHT // 4
DIVIDER = 160
def draw_square(
canvas: Image,
x1: int = 0,
y1: int = 0,
x2: int = InkyPHAT.WIDTH,
y2: int = InkyPHAT.HEIGHT,
color=InkyPHAT.BLACK,
) -> None:
for y in range(y1, y2):
for x in range(x1, x2):
canvas.putpixel((x, y), color)
def draw_dithered_square(
canvas: Image,
x1: int = 0,
y1: int = 0,
x2: int = InkyPHAT.WIDTH,
y2: int = InkyPHAT.HEIGHT,
color=InkyPHAT.BLACK,
) -> None:
alternate: bool = True
for y in range(y1, y2):
for x in range(x1, x2):
if alternate:
canvas.putpixel((x, y), color)
else:
canvas.putpixel((x, y), InkyPHAT.WHITE)
alternate = not alternate
if (x2 - x1) % 2 == 0:
alternate = not alternate
def draw_vertical_line(
canvas: Image,
v: int = 0,
y1: int = 0,
y2: int = InkyPHAT.HEIGHT,
color=InkyPHAT.RED,
) -> None:
for y in range(y1, y2):
canvas.putpixel((v, y), color)
def draw_horizontal_line(
canvas: Image, h: int = 0, x1: int = 0, x2: int = InkyPHAT.WIDTH, color=InkyPHAT.RED
) -> None:
for x in range(x1, x2):
canvas.putpixel((x, h), color)
def determine_time_length(start: date, end: date, scaling: float = None) -> int:
if scaling is not None:
return int((end - start).days * scaling)
return (end - start).days
def calculate_period_delta(
semester_period: Period, period: Period, scaling_factor: float = 1.0
) -> Tuple:
return (
determine_time_length(semester_period.start, period.start, scaling_factor),
determine_time_length(semester_period.start, period.end, scaling_factor),
)
def calculate_times(config, *, key=None, struct=None, scaling_factor: float = 1.0):
if key:
return (
determine_time_length(
config.period.start, config[key].start, scaling_factor
),
determine_time_length(config["start"], config[key]["end"], scaling_factor),
)
if struct:
return (
determine_time_length(config["start"], struct["start"], scaling_factor),
determine_time_length(config["start"], struct["end"], scaling_factor),
)
raise ValueError("key or struct is required")
def create_new_image() -> Image:
return Image.new("P", size=(InkyPHAT.WIDTH, InkyPHAT.HEIGHT))
def draw_progress_bar(
img: Image,
percent: float = 0.25,
x1: int = 0,
y1: int = 0,
x2: int = InkyPHAT.WIDTH,
y2: int = InkyPHAT.HEIGHT,
color=InkyPHAT.BLACK,
) -> Image:
progress = x1 + int((x2 - x1) * percent)
draw_square(img, x1=x1, y1=y1, x2=progress, y2=y2, color=color)
def draw_text(
img: Image,
text: str,
x1: int = 0,
y1: int = 0,
x2: int = InkyPHAT.WIDTH,
y2: int = InkyPHAT.HEIGHT,
color=InkyPHAT.BLACK,
) -> None:
assert x1 < x2
assert x1 >= 0
draw = ImageDraw.Draw(img)
font = 20
text_w = 0
text_h = 0
hanked_medium = ImageFont.truetype(HankenGroteskMedium, font)
while font > 0:
text_w, text_h = hanked_medium.getsize(text)
if (x2 - x1 - text_w) > 0 and (y2 - y1 - text_h) > 0:
break
font -= 1
hanked_medium = ImageFont.truetype(HankenGroteskMedium, font)
if font == 0:
raise ValueError("Text not able to be displayed")
text_x = x1 + (x2 - x1 - text_w) // 2
text_y = y1 + (y2 - y1 - text_h) // 2
draw.text((text_x, text_y), text, color, font=hanked_medium)
def sunday_tick_marks(
period: Period, scaling_factor: float
) -> Generator[int, None, None]:
sunday_distance = 6 - period.start.isoweekday()
total_length = (period.end - period.start).days
days = sunday_distance
while days < total_length:
yield int(days * scaling_factor)
days += 7
def draw_semester_display(y1: int = HALF_H, y2: int = InkyPHAT.HEIGHT) -> Image:
today = date.today()
config = get_semester_file()
time_length = determine_time_length(config.period.start, config.period.end)
scaling_factor = InkyPHAT.WIDTH / time_length
completed_duration = determine_time_length(
start=config.period.start, end=today, scaling=scaling_factor
)
midterm_start, midterm_end = calculate_period_delta(
semester_period=config.period,
period=config.midterms,
scaling_factor=scaling_factor,
)
finals_start, finals_end = calculate_period_delta(
semester_period=config.period,
period=config.finals,
scaling_factor=scaling_factor,
)
img = create_new_image()
p = completed_duration / 212
if p > 1:
p = 1
draw_square(img, color=InkyPHAT.WHITE)
draw_progress_bar(img, percent=p, y1=HALF_H, y2=y2)
draw_horizontal_line(img, h=HALF_H)
draw_horizontal_line(img, h=QUARTER_H)
draw_vertical_line(img, v=DIVIDER, y2=HALF_H)
draw_text(
img,
str(time_length - (date.today() - config.period.start).days),
x1=DIVIDER,
y1=-4,
y2=QUARTER_H,
)
draw_text(
img, " {}% ".format(int(p * 100)), x1=DIVIDER, y1=QUARTER_H - 4, y2=HALF_H
)
top_text: str
bottom_text: str
if len(config.events) >= 2:
first_event, second_event = config.events[0], config.events[1]
top_text = "{} - {}/{}".format(
first_event.name, first_event.date.month, first_event.date.day
)
bottom_text = "{} - {}/{}".format(
second_event.name, second_event.date.month, second_event.date.day
)
elif len(config.events) >= 1:
first_event = config.events[0]
top_text = "{} - {}/{}".format(
first_event.name, first_event.date.month, first_event.date.day
)
bottom_text = ""
else:
top_text = "No events"
bottom_text = ""
draw_text(img, top_text, x2=DIVIDER, y1=0, y2=QUARTER_H)
draw_text(img, bottom_text, x2=DIVIDER, y1=QUARTER_H, y2=HALF_H)
draw_dithered_square(
img, x1=midterm_start, x2=midterm_end, y1=y1, color=InkyPHAT.BLACK
)
draw_dithered_square(img, x1=finals_start, x2=finals_end, y1=y1, color=InkyPHAT.RED)
# TODO: Reimplement breaks
# for struct in config["breaks"]:
# start, end = calculate_times(
# config, struct=struct, scaling_factor=scaling_factor
# )
# start = determine_time_length(config["start"], struct["start"], scaling_factor)
# end = determine_time_length(config["start"], struct["end"], scaling_factor)
# draw_square(img, x1=start, x2=end, y10=y1, color=InkyPHAT.RED)
for sunday in sunday_tick_marks(config.period, scaling_factor=scaling_factor):
draw_vertical_line(img, v=sunday, y1=y1)
return img
def draw_display_message(text: str):
# Set up properties of eInk display
inky_display = InkyPHAT("red")
inky_display.set_border(inky_display.BLACK)
hanked_medium = ImageFont.truetype(HankenGroteskMedium, 20)
img = Image.new("P", size=(InkyPHAT.WIDTH, InkyPHAT.HEIGHT))
draw = ImageDraw.Draw(img)
text_w, text_h = hanked_medium.getsize(text)
text_x = (InkyPHAT.WIDTH - text_w) // 2
text_y = (InkyPHAT.HEIGHT - text_h) // 2
draw.text((text_x, text_y), text, InkyPHAT.BLACK, font=hanked_medium)
inky_display.set_image(img)
inky_display.show()
if feature("DEBUG_MODE"):
def draw_to_display():
img = draw_semester_display()
img.save("test.bmp")
else:
def draw_to_display():
# Set up properties of eInk display
inky_display = InkyPHAT("red")
inky_display.set_border(inky_display.BLACK)
# Load previously generated image
img = draw_semester_display()
# Display generated semester progress image
inky_display.set_image(img)
inky_display.show()
|
"""Package just imports contents of serve module
"""
from sporran.serve import *
|
def headers():
return ("captured-at" + "\t"
"consumption(W)" + "\t"
"pvpower(kW)" + "\t"
"consumption+pvpower(W)" + "\t"
"consumption-pvpower(W)")
def write_result(fname, consuption, power, dt):
with open(fname, "a") as f:
line = (dt.strftime("%Y-%m-%dT%H:%M:%S") + "\t"
+ str(consuption) + "\t"
+ str(power) + "\t"
+ str(consuption + (power*1000)) + "\t"
+ str(consuption - (power*1000)) + "\n")
f.write(line)
def write_headers(fname):
with open(fname, "a") as f:
f.write(headers())
def print_headers():
print(headers())
def print_result(consuption, power, dt):
line = (dt.strftime("%Y-%m-%dT%H:%M:%S") + "\t"
+ str(consuption) + "\t"
+ str(power) + "\t"
+ str(consuption + (power*1000)) + "\t"
+ str(consuption - (power*1000)))
print(line)
|
import numpy as np
import torch
import torch.nn as nn
import os
import matplotlib.pyplot as plt
import torchvision.transforms as transforms
from utils.opt import parse_option
from torch.utils.tensorboard import SummaryWriter
from torchvision.datasets import mnist
from torch.nn import CrossEntropyLoss
from torch.optim import SGD
from torch.utils.data import DataLoader
from trainval import train
from evaluate import validate
from utils.utils import save_checkpoint, get_optimizer
from model.LeNet import LeNet5
opt = parse_option()
if __name__ == '__main__':
# download and create datasets
train_dataset = mnist.MNIST(
root='./train', download=True, train=True, transform=transforms.Compose([
transforms.Resize((32, 32)), transforms.ToTensor()]))
val_dataset = mnist.MNIST(root='./test', download=True, train=False, transform=transforms.Compose([
transforms.Resize((32, 32)), transforms.ToTensor()]))
# define the data loaders
train_loader = DataLoader(train_dataset, opt.batch_size)
val_loader = DataLoader(val_dataset, opt.batch_size)
model = LeNet5()
print(model)
optimizer = get_optimizer(opt, model)
criterion = nn.CrossEntropyLoss()
best_accuracy = 0
iter = 0
for epoch in range(opt.epoch):
# train for one epoch
iter, loss = train(train_loader, model, criterion,
optimizer, epoch, iter=iter)
# evaluate
loss, accuracy = validate(
val_loader, model, criterion, epoch)
is_best = accuracy < best_accuracy
best_accuracy = min(accuracy, best_accuracy)
# If best_eval, best_save_path
# Save latest/best weights in the model directory
save_checkpoint(
{"state_dict": model,
"epoch": epoch + 1,
"accuracy": accuracy,
"optimizer": optimizer.state_dict(),
}, is_best, opt.SAVE_DIR, 'checkpoint.pth')
print('accuracy: {:.2f}%'.format(100 * accuracy))
final_model_state_file = os.path.join(opt.SAVE_DIR, 'final_state.pth')
print('saving final model state to {}'.format(final_model_state_file))
torch.save(model.state_dict(), final_model_state_file)
print('Done!')
|
# Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A helper for jobs that have been created on the Quantum Engine."""
import copy
import datetime
from typing import Dict, List, Optional, Tuple, TYPE_CHECKING
import cirq
from cirq_google.engine import calibration
from cirq_google.engine.abstract_job import AbstractJob
if TYPE_CHECKING:
from cirq_google.engine.abstract_local_program import AbstractLocalProgram
from cirq_google.engine.abstract_local_processor import AbstractLocalProcessor
from cirq_google.engine.abstract_local_engine import AbstractLocalEngine
class AbstractLocalJob(AbstractJob):
"""A job that handles labels and descriptions locally in-memory.
This class is designed to make writing custom AbstractJob objects
that function in-memory easier. This class will handle basic functionality
expected to be common across all local implementations.
Implementors of this class should write the following functions:
- Status functions: execution_status, failure
- Action functions: cancel, delete
- Result functions: results, batched_results, calibration_results
`
Attributes:
processor_ids: A string list of processor ids that this job can be run on.
processor_id: If provided, the processor id that the job was run on.
If not provided, assumed to be the first element of processor_ids
parent_program: Program containing this job
repetitions: number of repetitions for each parameter set
sweeps: list of Sweeps that this job should iterate through.
"""
def __init__(
self,
*,
job_id: str,
parent_program: 'AbstractLocalProgram',
repetitions: int,
sweeps: List[cirq.Sweep],
processor_id: str = '',
):
self._id = job_id
self._processor_id = processor_id
self._parent_program = parent_program
self._repetitions = repetitions
self._sweeps = sweeps
self._create_time = datetime.datetime.now()
self._update_time = datetime.datetime.now()
self._description = ''
self._labels: Dict[str, str] = {}
def engine(self) -> 'AbstractLocalEngine':
"""Returns the parent program's `AbstractEngine` object."""
return self._parent_program.engine()
def id(self) -> str:
"""Returns the identifier of this job."""
return self._id
def program(self) -> 'AbstractLocalProgram':
"""Returns the parent `AbstractLocalProgram` object."""
return self._parent_program
def create_time(self) -> 'datetime.datetime':
"""Returns when the job was created."""
return self._create_time
def update_time(self) -> 'datetime.datetime':
"""Returns when the job was last updated."""
return self._update_time
def description(self) -> str:
"""Returns the description of the job."""
return self._description
def set_description(self, description: str) -> 'AbstractJob':
"""Sets the description of the job.
Params:
description: The new description for the job.
Returns:
This AbstractJob.
"""
self._description = description
self._update_time = datetime.datetime.now()
return self
def labels(self) -> Dict[str, str]:
"""Returns the labels of the job."""
return copy.copy(self._labels)
def set_labels(self, labels: Dict[str, str]) -> 'AbstractJob':
"""Sets (overwriting) the labels for a previously created quantum job.
Params:
labels: The entire set of new job labels.
Returns:
This AbstractJob.
"""
self._labels = copy.copy(labels)
self._update_time = datetime.datetime.now()
return self
def add_labels(self, labels: Dict[str, str]) -> 'AbstractJob':
"""Adds new labels to a previously created quantum job.
Params:
labels: New labels to add to the existing job labels.
Returns:
This AbstractJob.
"""
self._update_time = datetime.datetime.now()
for key in labels:
self._labels[key] = labels[key]
return self
def remove_labels(self, keys: List[str]) -> 'AbstractJob':
"""Removes labels with given keys from the labels of a previously
created quantum job.
Params:
label_keys: Label keys to remove from the existing job labels.
Returns:
This AbstractJob.
"""
self._update_time = datetime.datetime.now()
for key in keys:
del self._labels[key]
return self
def processor_ids(self) -> List[str]:
"""Returns the processor ids provided when the job was created."""
return [self._processor_id]
def get_repetitions_and_sweeps(self) -> Tuple[int, List[cirq.Sweep]]:
"""Returns the repetitions and sweeps for the job.
Returns:
A tuple of the repetition count and list of sweeps.
"""
return (self._repetitions, self._sweeps)
def get_processor(self) -> 'AbstractLocalProcessor':
"""Returns the AbstractProcessor for the processor the job is/was run on,
if available, else None."""
return self.engine().get_processor(self._processor_id)
def get_calibration(self) -> Optional[calibration.Calibration]:
"""Returns the recorded calibration at the time when the job was created,
from the parent Engine object."""
return self.get_processor().get_latest_calibration(int(self._create_time.timestamp()))
|
import rclpy
from rclpy.node import Node
from sensor_msgs.msg import Imu, Temperature, MagneticField
from geometry_msgs.msg import Vector3
import subprocess
from .src.bwt901cl import BWT901CL
class Imu901cl(Node):
def __init__(self, time_interval=1.0):
super().__init__('imu_bwt901cl')
self.pub_imu = self.create_publisher(Imu, '/sensor/bwt901cl/Imu', 10)
self.pub_mag = self.create_publisher(MagneticField, '/sensor/bwt901cl/MagneticField', 10)
self.pub_tmp = self.create_publisher(Temperature, '/sensor/bwt901cl/Temperature', 10)
self.pub_ang = self.create_publisher(Vector3, '/sensor/bwt901cl/Angle', 10)
self.tmr = self.create_timer(time_interval, self.timer_callback)
subprocess.call("sudo chmod 777 /dev/ttyUSB0", shell=True)
self.imu_sensor = BWT901CL("/dev/ttyUSB0")
def timer_callback(self):
msg_imu = Imu()
msg_mag = MagneticField()
msg_tmp = Temperature()
msg_ang = Vector3()
angle, angular_velocity, accel, temp, magnetic, quaternion, time = self.imu_sensor.getData()
msg_tmp.temperature = temp
self.pub_tmp.publish(msg_tmp)
msg_mag.magnetic_field.x = float(magnetic[0])
msg_mag.magnetic_field.y = float(magnetic[1])
msg_mag.magnetic_field.z = float(magnetic[2])
self.pub_mag.publish(msg_mag)
msg_imu.orientation.x = quaternion[0]
msg_imu.orientation.y = quaternion[1]
msg_imu.orientation.z = quaternion[2]
msg_imu.orientation.w = quaternion[3]
msg_imu.angular_velocity.x = angular_velocity[0]
msg_imu.angular_velocity.y = angular_velocity[1]
msg_imu.angular_velocity.z = angular_velocity[2]
msg_imu.linear_acceleration.x = accel[0]
msg_imu.linear_acceleration.y = accel[1]
msg_imu.linear_acceleration.z = accel[2]
self.pub_imu.publish(msg_imu)
msg_ang.x = angle[0]
msg_ang.y = angle[1]
msg_ang.z = angle[2]
self.pub_ang.publish(msg_ang)
#print("Time:", time)
#print("th:", angle)
#print("d_th: ", angular_velocity)
#print("d_x: ", accel)
#print("mag: ", magnetic)
#print("tmp: ", temp)
#print(quaternion)
def main(args=None):
print('Hi from bwt901cl_pkg.')
rclpy.init(args=args)
node_imu_bwt901cl = Imu901cl(time_interval=0.1)
rclpy.spin(node_imu_bwt901cl)
node_imu_bwt901cl.destroy_node()
rclpy.shutdown()
if __name__ == '__main__':
main()
|
for i in range(0, 6):
print(""" / __ \ \__/ / __ \ \__/ / __ \ \__/ / __ \ \__/ / __ \ \__/ / __ \ \_
/ / \ \____/ / \ \____/ / \ \____/ / \ \____/ / \ \____/ / \ \__""")
if i < 5:
print("""\ \__/ / __ \ \__/ / __ \ \__/ / __ \ \__/ / __ \ \__/ / __ \ \__/ / _
\____/ / \ \____/ / \ \____/ / \ \____/ / \ \____/ / \ \____/ /""") |
import itertools as itl
correct = set(range(1,10))
def three(l):
return [l[:3],l[3:6],l[6:9]]
def check(s):
for r in s:
if set(r) != correct:
return False
for c in zip(*s):
if set(c) != correct:
return False
for r in three(s):
for m in three(list(zip(*r))):
ms = list(itl.chain(*m))
if set(ms) != correct:
return False
return True
def solve(n, s):
print("Case #{0}:".format(n+1))
if check(s):
print("Serendipity")
else:
for p in [(p0, p1) for p0 in range(81) for p1 in range(p0, 81)]:
x1, y1, x2, y2 = p[0] // 9, p[0] % 9, p[1] // 9, p[1] % 9
s[x1][y1], s[x2][y2] = s[x2][y2], s[x1][y1]
if check(s):
print("({},{}) <-> ({},{})".format(x1+1, y1+1, x2+1, y2+1))
s[x1][y1], s[x2][y2] = s[x2][y2], s[x1][y1]
[solve(n, [[int(v) for v in input().split(' ')] for l in range(9)]) for n in range(int(input()))] |
default_app_config = 'apps.posts.apps.PostsConfig'
|
import json
from django.contrib.auth.decorators import permission_required
from django.core.urlresolvers import reverse
from django.db import models
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectMixin
from .models import Suggestion
from auxiliary.decorators import login_required_ajax
from auxiliary.serializers import PromiseAwareJSONEncoder
class PendingSuggestionsCountView(View):
"""Return the pending suggestions for Model/Instance.
The view can return the results for multiple models/objects. Pass each in
the query string's ``for`` argument, e.g::
?for=auxiliary.Tidbit&for=events.Event&for=mks.Member-801
The last one in the example above get's pending for Member instance with
pk=801.
"""
def get_models_and_instances(self, request):
"Returns models/instances in GET param ``for``"
items = request.GET.getlist('for')
for item in items:
try:
model_name, pk = item.split('-', 1)
except ValueError:
model_name, pk = item, None
model = models.get_model(*model_name.split('.', 1))
if pk is None:
yield model
else:
instance = model.objects.get(pk=pk)
yield instance
def get_pending(self, request):
res = {}
for model_or_instance in self.get_models_and_instances(request):
if isinstance(model_or_instance, models.Model):
key = unicode(model_or_instance)
else:
key = unicode(model_or_instance._meta.verbose_name)
res[key] = Suggestion.objects.get_pending_suggestions_for(
model_or_instance)
return res
def prepare_pending(self, result, can_apply=False):
"Prepares the QuerySet for the response"
res = {}
for key in result:
count = result[key].count()
if count:
res[key] = count
return res
def get(self, request, *args, **kwargs):
res = self.get_pending(request)
can_apply = request.user.has_perm('suggestions.autoapply_suggestion')
res = self.prepare_pending(res, can_apply=can_apply)
return HttpResponse(
json.dumps(res, ensure_ascii=False, cls=PromiseAwareJSONEncoder),
mimetype='application/json')
class PendingSuggestionsView(PendingSuggestionsCountView):
def prepare_pending(self, result, can_apply=False):
"Prepares the QuerySet for the response"
for key in result:
result[key] = [
{
'label': unicode(x),
'apply_url': can_apply and x.can_auto_apply and reverse(
'suggestions_auto_apply', kwargs={'pk': x.pk}),
'reject_url': can_apply and reverse(
'suggestions_reject', kwargs={'pk': x.pk}),
'by': unicode(x.suggested_by),
'by_url': x.suggested_by.profiles.get().get_absolute_url(),
'by_email': can_apply and x.suggested_by.email,
'suggested_at': x.suggested_at.strftime('%Y-%m-%d %H:%M'),
}
for x in result[key]]
return result
@method_decorator(login_required_ajax)
def get(self, request, *args, **kwargs):
return super(PendingSuggestionsView, self).get(
request, *args, **kwargs)
class AutoApplySuggestionView(SingleObjectMixin, View):
"Auto apply a suggestion"
model = Suggestion
@method_decorator(permission_required('suggesions.autoapply_suggestion',
raise_exception=True))
def post(self, request, *args, **kwargs):
suggestion = self.get_object()
if not suggestion.can_auto_apply:
res = {
'success': False,
'message': "Can't auto apply this suggestion"
}
else:
suggestion.auto_apply(request.user)
res = {
'success': True,
}
return HttpResponse(
json.dumps(res, ensure_ascii=False, cls=PromiseAwareJSONEncoder),
mimetype='application/json')
class RejectSuggestionView(SingleObjectMixin, View):
"Reject a suggestion"
model = Suggestion
@method_decorator(permission_required('suggesions.autoapply_suggestion',
raise_exception=True))
def post(self, request, *args, **kwargs):
suggestion = self.get_object()
reason = request.POST.get('reason', 'Unknown')
suggestion.reject(request.user, reason)
res = {
'success': True,
}
return HttpResponse(
json.dumps(res, ensure_ascii=False, cls=PromiseAwareJSONEncoder),
mimetype='application/json')
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import unittest
import httplib2
import six
from six.moves import http_client as http
from glance.tests import functional
TEST_VAR_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 'var'))
class TestSSL(functional.FunctionalTest):
"""Functional tests verifying SSL communication"""
def setUp(self):
super(TestSSL, self).setUp()
if getattr(self, 'inited', False):
return
self.inited = False
self.disabled = True
# NOTE (stevelle): Test key/cert/CA file created as per:
# http://nrocco.github.io/2013/01/25/
# self-signed-ssl-certificate-chains.html
# For these tests certificate.crt must be created with 'Common Name'
# set to 127.0.0.1
self.key_file = os.path.join(TEST_VAR_DIR, 'privatekey.key')
if not os.path.exists(self.key_file):
self.disabled_message = ("Could not find private key file %s" %
self.key_file)
self.inited = True
return
self.cert_file = os.path.join(TEST_VAR_DIR, 'certificate.crt')
if not os.path.exists(self.cert_file):
self.disabled_message = ("Could not find certificate file %s" %
self.cert_file)
self.inited = True
return
self.ca_file = os.path.join(TEST_VAR_DIR, 'ca.crt')
if not os.path.exists(self.ca_file):
self.disabled_message = ("Could not find CA file %s" %
self.ca_file)
self.inited = True
return
self.inited = True
self.disabled = False
def tearDown(self):
super(TestSSL, self).tearDown()
if getattr(self, 'inited', False):
return
@unittest.skipIf(six.PY3, 'SSL handshakes are broken in PY3')
def test_ssl_ok(self):
"""Make sure the public API works with HTTPS."""
self.cleanup()
self.start_servers(**self.__dict__.copy())
path = "https://%s:%d/versions" % ("127.0.0.1", self.api_port)
https = httplib2.Http(ca_certs=self.ca_file)
response, content = https.request(path, 'GET')
self.assertEqual(http.OK, response.status)
|
# Generated by Django 3.0.6 on 2020-05-30 21:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Evento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=100, verbose_name='título')),
('descricao', models.TextField(blank=True, null=True)),
('data_evento', models.DateTimeField(verbose_name='data do evento')),
('data_criacao', models.DateTimeField(auto_now=True, verbose_name='data da criação')),
('local', models.TextField(blank=True, null=True)),
('usuario', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'evento',
},
),
]
|
#!/usr/bin/env python
# coding: utf-8
import re, string
import os
import time
import jieba
import argparse
from smart_open import open
def contain_city(line, city_list):
# segment the sentence using jieba
words = set(' '.join(jieba.cut(line, cut_all=False)).split(' '))
for city in city_list:
if city in words:
return True
return False
def process(root_path, file_list, city_list):
for i, filename in enumerate(file_list):
if i % 10 == 0:
print('Currently the {}th document: '.format(i) + filename)
# ------------ your website data
with open(root_path + '/webdata/' + filename, encoding='utf-8') as fin:
with open(root_path + '/new/' + filename, 'w', encoding='utf-8') as fout:
for line in fin:
l = line
if l == '' or l.startswith('\r'):
continue
# drop alphabetic characters
l = re.sub(r'[a-zA-Z]', '', l)
# drop digits and punctuations
l = re.sub('[%s]' % (string.punctuation + string.digits), '', l)
# drop empty line
if l == '\r':
continue
isContain = contain_city(l, city_list)
if line.startswith('WARC-Target-URI:') or line.startswith('WARC-Date:'):
fout.write(line)
elif isContain:
fout.write(line + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--id', type=int, help='From 0')
args = parser.parse_args()
root_path = '/Users/joy/Desktop' # ----------------- need to be replaced
# create directory for segment videos
try:
if not os.path.exists("{}/new".format(root_path)):
os.mkdir("{}/new".format(root_path))
except OSError:
raise OSError("Creation of the directory {} failed"
.format("{}/new".format(root_path)))
city_list = []
with open(root_path + '/China_Cities_Coordinates_CHN_ENG.csv') as f: # --------- cities
skip_head = True
for line in f:
if skip_head:
skip_head = False
continue
else:
city_list.append(line.split(',')[0])
city_list = set(city_list)
jieba.disable_parallel()
start = args.id * 560
end = min((args.id + 1) * 560, 56000)
print('Start: {}, end: {}'.format(start, end))
file_list = [f for f in os.listdir(root_path + '/webdata') # ------------ your website data
if f.startswith('part-')][start:end]
process(root_path, file_list, city_list)
|
from skimage.io import imread
from skimage.segmentation import slic
from skimage.util import img_as_float
from multiprocessing import cpu_count
from joblib import Parallel, delayed
from os.path import exists
from os.path import join
from tqdm import tqdm
from os import mkdir
import torch
def create_masks(imageList, numSegments=100, limOverseg=None):
# Save mask and target for image number
def save_mask(image_number):
# Load image/target pair
image_path = join(imageList.imagePath, image_number + ".jpg")
target_path = join(imageList.targetPath, image_number + ".png")
image = img_as_float(imread(image_path))
target = imread(target_path)
target = torch.from_numpy(target)
# Save paths
saveDir = join(imageList.path, 'SuperPixels')
maskDir = join(saveDir, '{}_sp_mask'.format(numSegments))
targetDir = join(saveDir, '{}_sp_target'.format(numSegments))
# Check that directories exist
if not exists(saveDir):
mkdir(saveDir)
if not exists(maskDir):
mkdir(maskDir)
if not exists(targetDir):
mkdir(targetDir)
# Define save paths
mask_save_path = join(maskDir, image_number + ".pt")
target_save_path = join(targetDir, image_number + ".pt")
# If they haven't already been made, make them
if not exists(mask_save_path) and not exists(target_save_path):
# Create mask for image/target pair
mask, target_s = create_mask(
image=image,
target=target,
numSegments=numSegments,
limOverseg=limOverseg
)
torch.save(mask, mask_save_path)
torch.save(target_s, target_save_path)
num_cores = cpu_count()
inputs = tqdm(imageList.list)
# Iterate through all images utilising all CPU cores
Parallel(n_jobs=num_cores)(delayed(save_mask)(image_number)
for image_number in inputs)
def create_mask(image, target, numSegments, limOverseg):
# Perform SLIC segmentation
mask = slic(image, n_segments=numSegments, slic_zero=True)
mask = torch.from_numpy(mask)
if limOverseg is not None:
# Oversegmentation step
superpixels = mask.unique().numel()
overseg = superpixels
for superpixel in range(superpixels):
overseg -= 1
# Define mask for superpixel
segment_mask = mask == superpixel
# Classes in this superpixel
classes = target[segment_mask].unique(sorted=True)
# Check if superpixel is on target boundary
on_boundary = classes.numel() > 1
# If current superpixel is on a gt boundary
if on_boundary:
# Find how many of each class is in superpixel
class_hist = torch.bincount(target[segment_mask])
# Remove zero elements
class_hist = class_hist[class_hist.nonzero()].float()
# Find minority class in superpixel
min_class = min(class_hist)
# Is the minority class large enough for oversegmentation
above_threshold = min_class > class_hist.sum() * limOverseg
if above_threshold:
# Leaving one class in supperpixel be
for c in classes[1:]:
# Adding to the oversegmentation offset
overseg += 1
# Add offset to class c in the mask
mask[segment_mask] += (target[segment_mask]
== c).long() * overseg
# (Re)define how many superpixels there are and create target_s
superpixels = mask.unique().numel()
target_s = torch.zeros(superpixels, dtype=torch.long)
for superpixel in range(superpixels):
# Define mask for superpixel
segment_mask = mask == superpixel
# Apply mask, the mode for majority class
target_s[superpixel] = target[segment_mask].view(-1).mode()[0]
return mask, target_s
|
from revoscalepy.computecontext.RxComputeContext import RxComputeContext
from revoscalepy.computecontext.RxInSqlServer import RxInSqlServer
from revoscalepy.computecontext.RxInSqlServer import RxSqlServerData
from revoscalepy.etl.RxImport import rx_import_datasource
class DataSource():
def __init__(self, connectionstring):
"""Data source remote compute context
Args:
connectionstring: connection string to the SQL server.
"""
self.__connectionstring = connectionstring
def loaddata(self):
dataSource = RxSqlServerData(sqlQuery = "select * from dbo.trainingdata", verbose=True, reportProgress =True,
connectionString = self.__connectionstring)
self.__computeContext = RxInSqlServer(connectionString = self.__connectionstring, autoCleanup = True)
data = rx_import_datasource(dataSource)
return data
def getcomputecontext(self):
if self.__computeContext is None:
raise RuntimeError("Data must be loaded before requesting computecontext!")
return self.__computeContext
|
from pysc2.lib import actions as sc2_actions
from pysc2.lib import features
_LOAD_MODEL_PATH = "./model/20180621-134211/"
_SAVE_MODEL_PATH = "./model/"
# define the num of input and output
_SIZE_HIGH_NET_INPUT = 20
_SIZE_HIGH_NET_OUT = 3
_SIZE_CONTROLLER_OUT = 2
_SIZE_BASE_NET_OUT = 2
_SIZE_TECH_NET_INPUT = 9
_SIZE_TECH_NET_OUT = 4
_SIZE_POP_NET_INPUT = 12
_SIZE_POP_NET_OUT = 3
_SIZE_BATTLE_NET_OUT = 2
_SIZE_FIGHT_NET_OUT = 3
MAP_CHANNELS = 10
# timesteps per second
_FPS = 22.4
# Minimap index
_M_HEIGHT = features.MINIMAP_FEATURES.height_map.index
_M_VISIBILITY = features.MINIMAP_FEATURES.visibility_map.index
_M_CAMERA = features.MINIMAP_FEATURES.camera.index
_M_RELATIVE = features.MINIMAP_FEATURES.player_relative.index
_M_SELECTED = features.MINIMAP_FEATURES.selected.index
# Screen index
_S_HEIGHT = features.SCREEN_FEATURES.height_map.index
_S_VISIBILITY = features.SCREEN_FEATURES.visibility_map.index
_S_POWER = features.SCREEN_FEATURES.power.index
_S_RELATIVE = features.SCREEN_FEATURES.player_relative.index
_S_UNIT_TYPE = features.SCREEN_FEATURES.unit_type.index
_S_SELECTED = features.SCREEN_FEATURES.selected.index
_S_HITPOINT_R = features.SCREEN_FEATURES.unit_hit_points_ratio.index
_S_SHIELD_R = features.SCREEN_FEATURES.unit_shields_ratio.index
_S_DENSITY_A = features.SCREEN_FEATURES.unit_density_aa.index
# Unit type index
_MINERAL_TYPE_INDEX = 483
_GAS_TYPE_INDEX = 342
_PROBE_TYPE_INDEX = 84
_ZEALOT_TYPE_INDEX = 73
_STALKER_TYPE_INDEX = 74
_NEXUS_TYPE_INDEX = 59
_PYLON_TYPE_INDEX = 60
_ASSIMILATOR_TYPE_INDEX = 61
_FORGE_TYPE_INDEX = 63
_CANNON_TYPE_INDEX = 66
_GATEWAY_TYPE_INDEX = 62
_CYBER_TYPE_INDEX = 72
UNIT_MAP = {
84: "Probe",
73: "Zealot",
74: "Stalker",
59: "Nexus",
62: "Gateway",
60: "Pylon",
61: "Assimilator",
342: "VespeneGeyser",
483: "MineralField750",
341: "MineralField",
1961: "MineralField450",
88: "Extractor",
20: "Refinery",
}
UNIT_MAP_INV = {v: k for k, v in UNIT_MAP.items()}
# _M_RELATIVE_TYPE
_RELATIVE_NONE = 0
_RELATIVE_SELF = 1
_RELATIVE_ALLY = 2
_RELATIVE_NEUTRAL = 3
_RELATIVE_ENEMY = 4
# Action type index
_NO_OP = sc2_actions.FUNCTIONS.no_op.id
_SMART_SCREEN = sc2_actions.FUNCTIONS.Smart_screen.id
_SELECT_ARMY = sc2_actions.FUNCTIONS.select_army.id
_SELECT_WORKER = sc2_actions.FUNCTIONS.select_idle_worker.id
_SELECT_BY_ID = sc2_actions.FUNCTIONS.select_unit.id
_CONTROL_GROUP = sc2_actions.FUNCTIONS.select_control_group.id
_ATTACH_M = sc2_actions.FUNCTIONS.Attack_minimap.id
_ATTACK_S = sc2_actions.FUNCTIONS.Attack_screen.id
_MOVE_S = sc2_actions.FUNCTIONS.Move_screen.id
_MOVE_M = sc2_actions.FUNCTIONS.Move_minimap.id
_SELECT_UNIT = sc2_actions.FUNCTIONS.select_unit.id
_SELECT_POINT = sc2_actions.FUNCTIONS.select_point.id
_MOVE_CAMERA = sc2_actions.FUNCTIONS.move_camera.id
_TRAIN_PROBE = sc2_actions.FUNCTIONS.Train_Probe_quick.id
_TRAIN_ZEALOT = sc2_actions.FUNCTIONS.Train_Zealot_quick.id
_TRAIN_STALKER = sc2_actions.FUNCTIONS.Train_Stalker_quick.id
_BUILD_PYLON_S = sc2_actions.FUNCTIONS.Build_Pylon_screen.id
_BUILD_ASSIMILATOR_S = sc2_actions.FUNCTIONS.Build_Assimilator_screen.id
_BUILD_FORGE_S = sc2_actions.FUNCTIONS.Build_Forge_screen.id
_BUILD_GATEWAY_S = sc2_actions.FUNCTIONS.Build_Gateway_screen.id
_BUILD_CYBER_S = sc2_actions.FUNCTIONS.Build_CyberneticsCore_screen.id
_HARVEST_S = sc2_actions.FUNCTIONS.Harvest_Gather_screen.id
_A_SMART_SCREEN = sc2_actions.FUNCTIONS.Smart_screen.ability_id
_A_TRAIN_PROBE = sc2_actions.FUNCTIONS.Train_Probe_quick.ability_id
_A_TRAIN_ZEALOT = sc2_actions.FUNCTIONS.Train_Zealot_quick.ability_id
_A_TRAIN_STALKER = sc2_actions.FUNCTIONS.Train_Stalker_quick.ability_id
_A_BUILD_PYLON_S = sc2_actions.FUNCTIONS.Build_Pylon_screen.ability_id
_A_BUILD_ASSIMILATOR_S = sc2_actions.FUNCTIONS.Build_Assimilator_screen.ability_id
_A_BUILD_FORGE_S = sc2_actions.FUNCTIONS.Build_Forge_screen.ability_id
_A_BUILD_GATEWAY_S = sc2_actions.FUNCTIONS.Build_Gateway_screen.ability_id
_A_BUILD_CYBER_S = sc2_actions.FUNCTIONS.Build_CyberneticsCore_screen.ability_id
_A_ATTACK_ATTACK_MINIMAP_S = sc2_actions.FUNCTIONS.Attack_Attack_minimap.ability_id
_A_ATTACK_MINIMAP_S = sc2_actions.FUNCTIONS.Attack_minimap.ability_id
_A_ATTACK_ATTACK_SCREEN_S = sc2_actions.FUNCTIONS.Attack_Attack_screen.ability_id
_A_ATTACK_SCREEN_S = sc2_actions.FUNCTIONS.Attack_screen.ability_id
_NOT_QUEUED = [0]
_QUEUED = [1]
_CLICK = [0]
_SHIFT_CLICK = [1]
_DBL_CLICK = [2]
_RECALL_GROUP = [0]
_SET_GROUP = [1]
_APPEND_GROUP = [2]
_GATEWAY_GROUP_ID = [9]
_BASE_GROUP_ID = [0]
_ARMY_GROUP_ID = [3]
_ARMY_INDEX = -1
_GATEWAY_GROUP_INDEX = -9
# up, up_right, right, right_down, down, down_left, left, left_up
center_x = 32
center_y = 32
movement = 15
move_pos_array = [[center_x, center_y - movement], [center_x + movement, center_y - movement],
[center_x + movement, center_y], [center_x + movement, center_y + movement],
[center_x, center_y + movement], [center_x - movement, center_y + movement],
[center_x - movement, center_y], [center_x - movement, center_y - movement],
]
# screen pos
# mineral_pos = [18, 26]
# gas1_pos = [18, 38]
# gas2_pos = [45, 11]
# base_pos = [36, 35]
# minimap pos
my_sub_pos = [41, 20] # our sub mineral pos
#enemy_sub_pos = [13, 50]
#enemy_main_pos = [41, 45]
enemy_sub_pos = [13, 50]
enemy_main_pos = [45, 47]
base_camera_pos = [19, 24]
# game difficulty
difficulty = 1
def time_wait(sec):
return int(sec * _FPS)
|
import os
import unittest
import numpy as np
from pymatgen.core.structure import Molecule
from pymatgen.io.qchem.outputs import QCOutput
from atomate.qchem.firetasks.geo_transformations import PerturbGeometry, RotateTorsion
from atomate.utils.testing import AtomateTest
__author__ = "Brandon Wood, Evan Spotte-Smith"
__email__ = "b.wood@berkeley.edu"
module_dir = os.path.dirname(os.path.abspath(__file__))
class TestGeoTransformations(AtomateTest):
@classmethod
def setUpClass(cls):
cls.pt_mol = Molecule.from_file(
os.path.join(
module_dir, "..", "..", "test_files", "pt_gs_wb97mv_tz_initial.xyz"
)
)
cls.pt_rot_90_mol = Molecule.from_file(
os.path.join(module_dir, "..", "..", "test_files", "pt_rotated_90.0.xyz")
)
def setUp(self, lpad=False):
super().setUp(lpad=False)
def tearDown(self):
pass
def test_rotate_torsion(self):
atom_indexes = [6, 8, 9, 10]
angle = 90.0
ft = RotateTorsion(
{"molecule": self.pt_mol, "atom_indexes": atom_indexes, "angle": angle}
)
rot_mol = ft.run_task({})
test_mol = Molecule.from_dict(
rot_mol.as_dict()["update_spec"]["prev_calc_molecule"]
)
np.testing.assert_equal(self.pt_rot_90_mol.species, test_mol.species)
np.testing.assert_allclose(
self.pt_rot_90_mol.cart_coords, test_mol.cart_coords, atol=0.0001
)
class TestPerturbGeometry(AtomateTest):
@classmethod
def setUpClass(cls):
cls.ts_init = Molecule.from_file(
os.path.join(module_dir, "..", "..", "test_files", "ts_init.xyz")
)
cls.ts_perturbed = Molecule.from_file(
os.path.join(module_dir, "..", "..", "test_files", "ts_perturbed.xyz")
)
cls.mode = QCOutput(
os.path.join(module_dir, "..", "..", "test_files", "ts.out")
).data["frequency_mode_vectors"][0]
def setUp(self, lpad=False):
super().setUp(lpad=False)
def tearDown(self):
pass
def test_perturb(self):
ft = PerturbGeometry(
{"molecule": self.ts_init, "mode": self.mode, "scale": 1.0}
)
pert_mol = ft.run_task({})
test_mol = Molecule.from_dict(
pert_mol.as_dict()["update_spec"]["prev_calc_molecule"]
)
np.testing.assert_equal(self.ts_perturbed.species, test_mol.species)
np.testing.assert_allclose(
self.ts_perturbed.cart_coords, test_mol.cart_coords, atol=0.0001
)
if __name__ == "__main__":
unittest.main()
|
import time
import busm
EXECUTION_TIME = 0.13
@busm.through_smtp
def smtp_sample():
time.sleep(EXECUTION_TIME)
print('Call smtp_sample().')
@busm.through_line
def line_sample():
time.sleep(EXECUTION_TIME)
print('Call line_sample().')
@busm.through_telegram
def telegram_sample1():
time.sleep(EXECUTION_TIME)
print('Call telegram_sample().')
@busm.through_telegram(subject='Message with subject.')
def telegram_sample2():
time.sleep(EXECUTION_TIME)
print('Call telegram_sample2().')
@busm.through_telegram
def telegram_exception():
time.sleep(EXECUTION_TIME)
print('Call telegram_exception().')
a = 1 / 0
if __name__ == '__main__':
smtp_sample()
line_sample()
telegram_sample1()
telegram_sample2()
telegram_exception()
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
class Tests:
lc_tool_opened = (
"Landscape Canvas tool opened",
"Failed to open Landscape Canvas tool"
)
new_graph_created = (
"Successfully created new graph",
"Failed to create new graph"
)
graph_registered = (
"Graph registered with Landscape Canvas",
"Failed to register graph"
)
preview_entity_set = (
"Perlin Noise Gradient component Preview Entity property set to Box Shape EntityId",
"Unexpected entity set in Perlin Noise Gradient Preview Entity property"
)
mixer_inbound_gradient_set_a = (
"Gradient Mixer component Inbound Gradient extendable property set to Perlin Noise Gradient EntityId",
"Unexpected entity set in Gradient Mixer's Inbound Gradient property"
)
mixer_inbound_gradient_set_b = (
"Gradient Mixer component Inbound Gradient extendable property set to FastNoise Gradient EntityId",
"Unexpected entity set in Gradient Mixer's Inbound Gradient property"
)
mixer_operation_a = (
"Layer 1 Operation is set to Initialize",
"Layer 1 Operation is not set to Initialize as expected"
)
mixer_operation_b = (
"Layer 2 Operation is set to Average",
"Layer 2 Operation is not set to Average as expected"
)
newEntityId = None
def GradientMixer_NodeConstruction():
"""
Summary:
This test verifies a Gradient Mixer vegetation setup can be constructed through Landscape Canvas.
Expected Behavior:
Entities contain all required components and component references after creating nodes and setting connections
on a Landscape Canvas graph.
Test Steps:
1) Create a new level
2) Open Landscape Canvas and create a new graph
3) Add all necessary nodes to the graph and set connections to form a Gradient Mixer setup
4) Verify all components and component references were properly set during graph construction
Note:
- This test file must be called from the Open 3D Engine Editor command terminal
- Any passed and failed tests are written to the Editor.log file.
Parsing the file or running a log_monitor are required to observe the test results.
:return: None
"""
import azlmbr.bus as bus
import azlmbr.editor as editor
import azlmbr.editor.graph as graph
import azlmbr.landscapecanvas as landscapecanvas
import azlmbr.legacy.general as general
import azlmbr.math as math
import azlmbr.paths
import editor_python_test_tools.hydra_editor_utils as hydra
import editor_python_test_tools.prefab_utils as PrefabUtils
from editor_python_test_tools.utils import Report
editorId = azlmbr.globals.property.LANDSCAPE_CANVAS_EDITOR_ID
def onEntityCreated(parameters):
global newEntityId
newEntityId = parameters[0]
# Open an existing simple level
hydra.open_base_level()
# Open Landscape Canvas tool and verify
general.open_pane('Landscape Canvas')
Report.critical_result(Tests.lc_tool_opened, general.is_pane_visible('Landscape Canvas'))
# Create a new graph in Landscape Canvas
newGraphId = graph.AssetEditorRequestBus(bus.Event, 'CreateNewGraph', editorId)
Report.critical_result(Tests.new_graph_created, newGraphId is not None)
# Make sure the graph we created is in Landscape Canvas
graph_registered = graph.AssetEditorRequestBus(bus.Event, 'ContainsGraph', editorId, newGraphId)
Report.result(Tests.graph_registered, graph_registered)
# Listen for entity creation notifications so we can verify the component EntityId
# references are set correctly when connecting slots on the nodes
handler = editor.EditorEntityContextNotificationBusHandler()
handler.connect()
handler.add_callback('OnEditorEntityCreated', onEntityCreated)
positionX = 10.0
positionY = 10.0
offsetX = 340.0
offsetY = 100.0
# Add a Box Shape node to the graph
newGraph = graph.GraphManagerRequestBus(bus.Broadcast, 'GetGraph', newGraphId)
boxShapeNode = landscapecanvas.LandscapeCanvasNodeFactoryRequestBus(bus.Broadcast, 'CreateNodeForTypeName', newGraph,
'BoxShapeNode')
graph.GraphControllerRequestBus(bus.Event, 'AddNode', newGraphId, boxShapeNode, math.Vector2(positionX, positionY))
boxShapeEntityId = newEntityId
positionX += offsetX
positionY += offsetY
# Add a Random Noise Gradient node to the graph
perlinNoiseNode = landscapecanvas.LandscapeCanvasNodeFactoryRequestBus(bus.Broadcast, 'CreateNodeForTypeName', newGraph,
'PerlinNoiseGradientNode')
graph.GraphControllerRequestBus(bus.Event, 'AddNode', newGraphId, perlinNoiseNode, math.Vector2(positionX, positionY))
perlinNoiseEntityId = newEntityId
positionX += offsetX
positionY += offsetY
# Add a FastNoise Gradient node to the graph
fastNoiseNode = landscapecanvas.LandscapeCanvasNodeFactoryRequestBus(bus.Broadcast, 'CreateNodeForTypeName', newGraph,
'FastNoiseGradientNode')
graph.GraphControllerRequestBus(bus.Event, 'AddNode', newGraphId, fastNoiseNode, math.Vector2(positionX, positionY))
fastNoiseEntityId = newEntityId
positionX += offsetX
positionY += offsetY
# Add a Gradient Mixer node to the graph
gradientMixerNode = landscapecanvas.LandscapeCanvasNodeFactoryRequestBus(bus.Broadcast, 'CreateNodeForTypeName', newGraph,
'GradientMixerNode')
graph.GraphControllerRequestBus(bus.Event, 'AddNode', newGraphId, gradientMixerNode, math.Vector2(positionX, positionY))
PrefabUtils.wait_for_propagation()
gradientMixerEntityId = newEntityId
boundsSlotId = graph.GraphModelSlotId('Bounds')
previewBoundsSlotId = graph.GraphModelSlotId('PreviewBounds')
inboundGradientSlotId = graph.GraphModelSlotId('InboundGradient')
outboundGradientSlotId = graph.GraphModelSlotId('OutboundGradient')
inboundGradientSlotId2 = graph.GraphControllerRequestBus(bus.Event, 'ExtendSlot', newGraphId, gradientMixerNode,
'InboundGradient')
# Connect slots on our nodes to construct a Gradient Mixer hierarchy
graph.GraphControllerRequestBus(bus.Event, 'AddConnectionBySlotId', newGraphId, boxShapeNode, boundsSlotId,
perlinNoiseNode, previewBoundsSlotId)
graph.GraphControllerRequestBus(bus.Event, 'AddConnectionBySlotId', newGraphId, boxShapeNode, boundsSlotId,
fastNoiseNode, previewBoundsSlotId)
graph.GraphControllerRequestBus(bus.Event, 'AddConnectionBySlotId', newGraphId, boxShapeNode, boundsSlotId,
gradientMixerNode, previewBoundsSlotId)
graph.GraphControllerRequestBus(bus.Event, 'AddConnectionBySlotId', newGraphId, perlinNoiseNode, outboundGradientSlotId,
gradientMixerNode, inboundGradientSlotId)
graph.GraphControllerRequestBus(bus.Event, 'AddConnectionBySlotId', newGraphId, fastNoiseNode, outboundGradientSlotId,
gradientMixerNode, inboundGradientSlotId2)
# Delay to allow all the underlying component properties to be updated after the slot connections are made
general.idle_wait(1.0)
# Get component info
gradientMixerTypeId = hydra.get_component_type_id("Gradient Mixer")
perlinNoiseTypeId = hydra.get_component_type_id("Perlin Noise Gradient")
gradientMixerOutcome = editor.EditorComponentAPIBus(bus.Broadcast, 'GetComponentOfType', gradientMixerEntityId,
gradientMixerTypeId)
gradientMixerComponent = gradientMixerOutcome.GetValue()
perlinNoiseOutcome = editor.EditorComponentAPIBus(bus.Broadcast, 'GetComponentOfType', perlinNoiseEntityId,
perlinNoiseTypeId)
perlinNoiseComponent = perlinNoiseOutcome.GetValue()
# Verify the Preview EntityId property on our Perlin Noise Gradient component has been set to our Box Shape's EntityId
previewEntityId = hydra.get_component_property_value(perlinNoiseComponent, 'Preview Settings|Pin Preview to Shape')
Report.result(Tests.preview_entity_set, previewEntityId and boxShapeEntityId.invoke("Equal", previewEntityId))
# Verify the 1st Inbound Gradient EntityId property on our Gradient Mixer component has been set to our Perlin Noise
# Gradient's EntityId
inboundGradientEntityId = hydra.get_component_property_value(gradientMixerComponent,
'Configuration|Layers|[0]|Gradient|Gradient Entity Id')
Report.result(Tests.mixer_inbound_gradient_set_a, inboundGradientEntityId and perlinNoiseEntityId.invoke("Equal", inboundGradientEntityId))
# Verify the 2nd Inbound Gradient EntityId property on our Gradient Mixer component has been set to our FastNoise
# Gradient Modifier's EntityId
inboundGradientEntityId2 = hydra.get_component_property_value(gradientMixerComponent,
'Configuration|Layers|[1]|Gradient|Gradient Entity Id')
Report.result(Tests.mixer_inbound_gradient_set_b, inboundGradientEntityId2 and fastNoiseEntityId.invoke("Equal", inboundGradientEntityId2))
# Verify that Gradient Mixer Layer Operations are properly set
mixer_operation_a = hydra.get_component_property_value(gradientMixerComponent, 'Configuration|Layers|[0]|Operation')
mixer_operation_b = hydra.get_component_property_value(gradientMixerComponent, 'Configuration|Layers|[1]|Operation')
Report.result(Tests.mixer_operation_a, mixer_operation_a == 0)
Report.result(Tests.mixer_operation_b, mixer_operation_b == 6)
# Stop listening for entity creation notifications
handler.disconnect()
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(GradientMixer_NodeConstruction)
|
# coding=utf-8
# Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants_test.test_base import TestBase
from pants.contrib.node.targets.node_module import NodeModule
class NodeModuleTest(TestBase):
def test_bin_executables_string(self):
target = self.make_target(spec=':name', target_type=NodeModule, package_name='name',
bin_executables='./cli.js')
self.assertEqual('./cli.js', target.payload.bin_executables)
def test_bin_executables_dict(self):
target1 = self.make_target(spec=':name1', target_type=NodeModule, package_name='name1',
bin_executables='./cli.js')
target2 = self.make_target(spec=':name2', target_type=NodeModule, package_name='name2',
bin_executables={"name2": "./cli.js"})
self.assertNotEqual(target1, target2)
self.assertEqual('./cli.js', target1.payload.bin_executables)
self.assertEqual({'name2': './cli.js'}, target2.payload.bin_executables)
|
""" Provides an installer for dependencies. """
from __future__ import annotations
import abc
import dataclasses
import logging
import shlex
import subprocess as sp
import typing as t
from pathlib import Path
from slap.python.dependency import MultiDependency
from slap.python.pep508 import filter_dependencies, test_dependency
if t.TYPE_CHECKING:
from slap.project import Project
from slap.python.dependency import Dependency
from slap.python.environment import PythonEnvironment
logger = logging.getLogger(__name__)
@dataclasses.dataclass
class Indexes:
"""Represents a configuration of PyPI indexes."""
#: The name of the default index in the #urls mapping.
default: str | None = None
#: A mapping that assigns each key (the name of the index) its index URL.
urls: dict[str, str] = dataclasses.field(default_factory=dict)
def combine_with(self, other: Indexes) -> None:
if other.default and self.default and other.default != self.default:
logger.warning(
"Conflicting default index between projects in repository: %r (current), %r",
self.default,
other.default,
)
if not self.default:
self.default = other.default
# TODO (@NiklasRosenstein): Warn about conflicting package indexes.
self.urls = {**other.urls, **self.urls}
@dataclasses.dataclass
class InstallOptions:
indexes: Indexes
quiet: bool
upgrade: bool
class Installer(abc.ABC):
"""An installer for dependencies into a #PythonEnvironment."""
@abc.abstractmethod
def install(self, dependencies: list[Dependency], target: PythonEnvironment, options: InstallOptions) -> int:
...
class SymlinkHelper(t.Protocol):
"""
A helper for introspecting a project for additional dependencies and symlinking it. This is needed when a
#PathDependency is encountered with #PathDependency.link enabled.
"""
def get_dependencies_for_project(self, project: Path) -> list[Dependency]:
...
def link_project(self, project: Path) -> None:
...
class PipInstaller(Installer):
"""Installs dependencies via Pip."""
def __init__(self, symlink_helper: SymlinkHelper | None) -> None:
"""
Args:
symlink_helper: A helper for implementing #PathDependency.link when it is encountered. If not specified,
an error will be raised when a #PathDependency is passed that needs to be linked.
"""
self.symlink_helper = symlink_helper
def install(self, dependencies: t.Sequence[Dependency], target: PythonEnvironment, options: InstallOptions) -> int:
from slap.python.dependency import PathDependency, PypiDependency, UrlDependency
# Collect the Pip arguments and the dependencies that need to be installed through other methods.
supports_hashes = {PypiDependency, UrlDependency}
unsupported_hashes: dict[type[Dependency], list[Dependency]] = {}
link_projects: list[Path] = []
pip_arguments: list[str] = []
# used_indexes: set[str] = set()
dependencies = list(dependencies)
while dependencies:
dependency = dependencies.pop()
# TODO (@NiklasRosenstein): Pass extras from PipInstaller caller.
if not test_dependency(dependency, target.pep508, set()):
continue
# Collect dependencies for which hashes are not supported so we can report it later.
if dependency.hashes and type(dependency) not in supports_hashes:
unsupported_hashes.setdefault(type(dependency), []).append(dependency)
if isinstance(dependency, PathDependency) and dependency.link:
logger.info("Collecting recursive dependencies for project <val>%s</val>", dependency.path)
if self.symlink_helper is None:
raise Exception(
f"Unable to install %r because no symlink helper is available in this context", dependency
)
dependencies += filter_dependencies(
dependencies=self.symlink_helper.get_dependencies_for_project(dependency.path),
env=target.pep508,
extras=set(dependency.extras or []),
)
link_projects.append(dependency.path)
continue
if isinstance(dependency, MultiDependency):
for sub_dependency in dependency.dependencies:
# TODO (@NiklasRosenstein): Pass extras from the caller so we can evaluate them here
if test_dependency(sub_dependency, target.pep508, set()):
dependencies.insert(0, sub_dependency)
else:
pip_arguments += self.dependency_to_pip_arguments(dependency)
# if isinstance(dependency, PypiDependency) and dependency.source:
# used_indexes.add(dependency.source)
# Add the extra index URLs.
# TODO (@NiklasRosenstein): Inject credentials for index URLs.
# NOTE (@NiklasRosenstein): While the dependency configuration allows you to specify exactly for each
# dependency where it should be fetched from, with the Pip CLI we cannot currently have that level
# of control.
try:
if options.indexes.default is not None:
pip_arguments += ["--index-url", options.indexes.urls[options.indexes.default]]
# for index_name in used_indexes - {options.indexes.default}:
# NOTE (@NiklasRosenstein): For now we just pass all indexes to Pip. When you run `slap install` without
# the `--link` option, the package will be installed directly with Pip, thus the runtime dependencies
# are not passed here and we would not recognize the extra indexes required for those dependencies.
for index_name in options.indexes.urls.keys() - {options.indexes.default}:
pip_arguments += ["--extra-index-url", options.indexes.urls[index_name]]
except KeyError as exc:
raise Exception(f"PyPI index {exc} is not configured")
# Construct the Pip command to run.
pip_command = [target.executable, "-m", "pip", "install"] + pip_arguments
if options.quiet:
pip_command += ["-q"]
if options.upgrade:
pip_command += ["--upgrade"]
logger.info("Installing with Pip using command <subj>$ %s</subj>", " ".join(map(shlex.quote, pip_command)))
if (res := sp.call(pip_command)) != 0:
return res
# Symlink all projects that need to be linked.
for project_path in link_projects:
assert self.symlink_helper is not None
self.symlink_helper.link_project(project_path)
return 0
@staticmethod
def dependency_to_pip_arguments(dependency: Dependency) -> list[str]:
"""Converts a dependency to a list of arguments for Pip.
Args:
dependency: The dependency to convert. Must be one of #GitDependency, #PathDependency,
#PypiDependency or #UrlDependency. A #MultiDependency is not supported by this function.
Raises:
Exception: If an unexpected kind of dependency was encountered.
"""
from slap.python.dependency import GitDependency, PathDependency, PypiDependency, UrlDependency
extras = "" if not dependency.extras else f'[{",".join(dependency.extras)}]'
hashes = " ".join(f"--hash={h}" for h in dependency.hashes or [])
pip_arguments = []
if isinstance(dependency, GitDependency):
# TODO (@NiklasRosenstein): Add Git branch/rev/tag to the URL.
if dependency.branch or dependency.rev or dependency.tag:
logger.warning(
"PipInstaller does not currently support Git branch/rev/tag, dependency will be installed "
"from main branch: <val>%s</val>",
dependency,
)
pip_arguments += [f"{dependency.name}{extras} @ git+{dependency.url}"]
elif isinstance(dependency, PathDependency):
assert not dependency.link # We caught that case before
if dependency.develop:
pip_arguments += ["-e"]
prefix = "" if dependency.path.is_absolute() else "./"
pip_arguments += [f"{prefix}{dependency.path}{extras}"]
elif isinstance(dependency, PypiDependency):
pip_arguments += [f"{dependency.name}{extras} {dependency.version.to_pep_508()} {hashes}".rstrip()]
elif isinstance(dependency, UrlDependency):
pip_arguments += [f"{dependency.name}{extras} @ {dependency.url} {hashes}".rstrip()]
else:
raise Exception(f"Unexpected dependency type: {dependency}")
assert pip_arguments, dependency
return pip_arguments
def get_indexes_for_projects(projects: t.Sequence[Project]) -> Indexes:
"""Combines the indexes configuration from each project into one index."""
indexes = Indexes()
for project in projects:
indexes.combine_with(project.dependencies().indexes)
return indexes
|
import logging
################################################################################
# database configuration
################################################################################
DB_TYPE = 'sqlite'
DB_ARG = 'phxd.db'
################################################################################
# logging configuration
################################################################################
LOG_FILE = None
LOG_LEVEL = logging.DEBUG
################################################################################
# server configuration
################################################################################
SERVER_PORTS = (5500,)
SERVER_NAME = "my_phxd_server"
IDLE_TIME = 10 * 60
BAN_TIME = 15 * 60
################################################################################
# SSL configuration
################################################################################
ENABLE_SSL = False
SSL_PORT = 5600
SSL_KEY_FILE = 'certs/privkey.pem'
SSL_CERT_FILE = 'certs/cacert.pem'
################################################################################
# tracker configuration
################################################################################
ENABLE_TRACKER_REGISTER = False
TRACKER_ADDRESS = "hltracker.com"
TRACKER_PORT = 5499
TRACKER_PASSWORD = ""
TRACKER_INTERVAL = 5 * 60
SERVER_DESCRIPTION = "My phxd server."
################################################################################
# chat options
################################################################################
# filled with (nick, chat)
CHAT_FORMAT = "\r%13.13s: %s"
CHAT_PREFIX_LEN = 17
CHAT_PREFIX_ADD_NICK_LEN = False
EMOTE_FORMAT = "\r *** %s %s"
EMOTE_PREFIX_LEN = 7 # + len(nick)
MAX_NICK_LEN = 32
MAX_CHAT_LEN = 4096
LOG_CHAT = True
LOG_DIR = "chatlogs"
################################################################################
# message options
################################################################################
MAX_MSG_LEN = 2048
################################################################################
# news options
################################################################################
# filled with (nick, login, date, body)
NEWS_FORMAT = "From %s [%s] (%s):\r\r%s\r_________________________________________________________\r"
DEFAULT_NEWS_LIMIT = 25
################################################################################
# files options
################################################################################
FILE_ROOT = "files"
SHOW_DOTFILES = False
DIR_UMASK = 0o755
UPLOAD_SCRIPT = None
################################################################################
# transfer options
################################################################################
XFER_TIMEOUT = 30.0
################################################################################
# GIF icon options
################################################################################
ENABLE_GIF_ICONS = True
MAX_GIF_SIZE = 32768
DEFAULT_ICON_TIME = 10
|
from icemac.addressbook.i18n import _
from .interfaces import IBirthDate
import icemac.addressbook.browser.base
import zope.component
class ExportList(icemac.addressbook.browser.base.BaseView):
"""List available export formats."""
title = _('Export person data')
def exporters(self):
"""Iterable of exporters having enough data so export something."""
# XXX: This has no API, the exporters should be subscription adapters
# which return None if they have not enough data to export
# something and a dict consting of title and URL otherwise.
birthdate_data = zope.component.getMultiAdapter(
(self.context, self.request), IBirthDate)
if birthdate_data.icalendar_event is not None:
yield dict(title=_('iCalendar export of birth date (.ics file)'),
url=self.url(self.context, 'iCalendar'))
def back_url(self):
return self.url(self.context)
|
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
def enable_debug_logger():
logger.setLevel(logging.DEBUG)
ch.setLevel(logging.DEBUG) |
#
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD.
# See https://nomad-lab.eu for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
from nomad.metainfo import Package, Quantity, Section
from nomad.datamodel.metainfo import common_experimental
m_package = Package(name='mpes')
class Method(common_experimental.Method):
m_def = Section(validate=False, extends_base_section=True)
general_beamline = Quantity(
type=str,
description='''
Name of the beamline the experiment took place.
''')
general_source_pump = Quantity(
type=str,
description='''
Name or model of the pump light source.
''')
general_source_probe = Quantity(
type=str,
description='''
Name or model of the probe light source.
''')
number_of_axes = Quantity(
type=int,
description='''
Number of axes in the measurement hardware.
''')
general_measurement_axis = Quantity(
type=str,
shape=['number_of_axes'],
description='''
Names of the axes in the measurement hardware.
''')
general_physical_axis = Quantity(
type=str,
shape=['number_of_axes'],
description='''
Names of the axes in physical terms.
''')
source_pump_repetition_rate = Quantity(
type=np.dtype(np.float64),
unit='hertz',
description='''
Repetition rate of the pump source.
''')
source_pump_pulse_duration = Quantity(
type=np.dtype(np.float64),
unit='femtosecond',
description='''
Pulse duration of the pump source.
''')
source_pump_wavelength = Quantity(
type=np.dtype(np.float64),
unit='nanometer',
description='''
Center wavelength of the pump source.
''')
source_pump_spectrum = Quantity(
type=np.dtype(np.float64),
shape=['length_of_spectrum'],
description='''
Spectrum of the pump source.
''')
source_pump_photon_energy = Quantity(
type=np.dtype(np.float64),
unit='electron_volt',
description='''
Photon energy of the pump source.
''')
source_pump_size = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='millimeter ** 2',
description='''
Full-width at half-maximum (FWHM) of the pump source size at or closest to the
sample position.
''')
source_pump_fluence = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='millijoule / millimeter ** 2',
description='''
Fluence of the pump source at or closest to the sample position.
''')
source_pump_polarization = Quantity(
type=str,
description='''
Polarization of the pump source.
''')
source_pump_bunch = Quantity(
type=np.dtype(np.int32),
description='''
Total bunch number of the pump source.
''')
source_probe_repetition_rate = Quantity(
type=np.dtype(np.float64),
unit='hertz',
description='''
Repetition rate of the probe source.
''')
source_probe_pulse_duration = Quantity(
type=np.dtype(np.float64),
unit='femtosecond',
description='''
Pulse duration of the probe source.
''')
source_probe_wavelength = Quantity(
type=np.dtype(np.float64),
unit='nanometer',
description='''
Center wavelength of the probe source.
''')
length_of_spectrum = Quantity(
type=int,
description='''
Number of pixel elements in the spectrum.
''')
source_probe_spectrum = Quantity(
type=np.dtype(np.float64),
shape=['length_of_spectrum'],
description='''
Spectrum of the probe source.
''')
source_probe_photon_energy = Quantity(
type=np.dtype(np.float64),
unit='electron_volt',
description='''
Photon energy of the probe source.
''')
source_probe_size = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='millimeter ** 2',
description='''
Full-width at half-maximum (FWHM) of the probe source size at or closest to the
sample position.
''')
source_probe_fluence = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='millijoule / millimeter ** 2',
description='''
Fluence of the probe source at or closest to the sample position.
''')
source_probe_polarization = Quantity(
type=str,
description='''
Polarization of the probe source.
''')
source_probe_bunch = Quantity(
type=np.dtype(np.int32),
description='''
Total bunch number of the probe source.
''')
source_temporal_resolution = Quantity(
type=np.dtype(np.float64),
unit='femtosecond',
description='''
Full-width at half-maximum (FWHM) of the pump-probe cross-correlation function.
''')
detector_extractor_voltage = Quantity(
type=np.dtype(np.float64),
unit='volt',
description='''
Voltage between the extractor and the sample.
''')
detector_work_distance = Quantity(
type=np.dtype(np.float64),
unit='millimeter',
description='''
Distance between the sample and the detector entrance.
''')
number_of_lenses = Quantity(
type=int,
description='''
Number of electron lenses in the electron detector.
''')
detector_lens_names = Quantity(
type=str,
shape=['number_of_lenses'],
description='''
Set of names for the electron-optic lenses.
''')
detector_lens_voltages = Quantity(
type=np.dtype(np.float64),
shape=['number_of_lenses'],
unit='volt',
description='''
Set of electron-optic lens voltages.
''')
detector_tof_distance = Quantity(
type=np.dtype(np.float64),
unit='meter',
description='''
Drift distance of the time-of-flight tube.
''')
number_of_tof_voltages = Quantity(
type=int,
description='''
Number of time-of-flight (TOF) drift tube voltage values in the electron detector.
''')
detector_tof_voltages = Quantity(
type=np.dtype(np.float64),
shape=['number_of_tof_voltages'],
unit='volt',
description='''
Voltage applied to the time-of-flight tube.
''')
detector_sample_bias = Quantity(
type=np.dtype(np.float64),
unit='volt',
description='''
Voltage bias applied to sample.
''')
detector_magnification = Quantity(
type=np.dtype(np.float64),
description='''
Detector magnification.
''')
number_of_detector_voltages = Quantity(
type=int,
description='''
Number of detector voltage settings in the electron detector.
''')
detector_voltages = Quantity(
type=np.dtype(np.float64),
shape=['number_of_detector_voltages'],
unit='volt',
description='''
Voltage applied to detector.
''')
detector_type = Quantity(
type=str,
description='''
Description of the detector type (e.g. ‘MCP’, ‘CCD’, ‘CMOS’, etc.).
''')
number_of_sensor_sizes = Quantity(
type=int,
description='''
Number of detector sensor size dimensions (depending on the number of sensors).
''')
detector_sensor_size = Quantity(
type=np.dtype(np.float64),
shape=['number_of_sensor_sizes'],
unit='millimeter',
description='''
Size of each of the imaging sensor chip on the detector.
''')
detector_sensor_count = Quantity(
type=np.dtype(np.int32),
description='''
Number of imaging sensor chips on the detector.
''')
detector_sensor_pixel_size = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='micrometer',
description='''
Pixel size of the imaging sensor chip on the detector.
''')
number_of_momentum_calibration_coefficients = Quantity(
type=int,
description='''
Number of the momentum calibration parameters for the detector.
''')
detector_calibration_x_to_momentum = Quantity(
type=np.dtype(np.float64),
shape=['number_of_momentum_calibration_coefficients'],
unit='1 / angstrom',
description='''
Pixel x axis to kx momentum calibration.
''')
detector_calibration_y_to_momentum = Quantity(
type=np.dtype(np.float64),
shape=['number_of_momentum_calibration_coefficients'],
unit='1 / angstrom',
description='''
Pixel y axis to ky momentum calibration.
''')
number_of_energy_calibration_coefficients = Quantity(
type=int,
description='''
Number of the energy calibration parameters for the detector.
''')
detector_calibration_tof_to_energy = Quantity(
type=np.dtype(np.float64),
shape=['number_of_energy_calibration_coefficients'],
unit='electron_volt',
description='''
Time-of-flight to energy calibration.
''')
detector_calibration_stage_to_delay = Quantity(
type=np.dtype(np.float64),
shape=['number_of_delay_calibration_coefficients'],
unit='femtosecond',
description='''
Translation stage position to pump-probe delay calibration.
''')
number_of_other_calibration_coefficients = Quantity(
type=int,
description='''
Number of the other calibration parameters for the detector.
''')
detector_calibration_other_converts = Quantity(
type=np.dtype(np.float64),
shape=['number_of_other_calibration_coefficients'],
description='''
Conversion factor between other measured and physical axes.
''')
detector_momentum_resolution = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='1 / angstrom',
description='''
Momentum resolution of the detector.
''')
detector_spatial_resolution = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='micrometer',
description='''
Spatial resolution of the source.
''')
detector_energy_resolution = Quantity(
type=np.dtype(np.float64),
shape=['none_shape'],
unit='electron_volt',
description='''
Energy resolution of the detector.
''')
class Sample(common_experimental.Sample):
sample_state_of_matter = Quantity(
type=str,
description='''
Physical state of the sample.
''')
sample_purity = Quantity(
type=np.dtype(np.float64),
description='''
Chemical purity of the sample.
''')
sample_surface_termination = Quantity(
type=str,
description='''
Surface termination of the sample (if crystalline).
''')
sample_layers = Quantity(
type=str,
description='''
Sample layer or bulk structure.
''')
sample_stacking_order = Quantity(
type=str,
description='''
Stacking order of the solid surface (if crystalline).
''')
sample_chemical_id_cas = Quantity(
type=str,
description='''
CAS registry number of the sample’s chemical content.
''')
sample_pressure = Quantity(
type=np.dtype(np.float64),
unit='pascal',
description='''
Pressure surrounding the sample at the time of measurement.
''')
sample_growth_method = Quantity(
type=str,
description='''
Sample growth method.
''')
sample_preparation_method = Quantity(
type=str,
description='''
Sample preparation method.
''')
sample_vendor = Quantity(
type=str,
description='''
Name of the sample vendor.
''')
sample_substrate_material = Quantity(
type=str,
description='''
Material of the substrate the sample has immediate contact with.
''')
sample_substrate_state_of_matter = Quantity(
type=str,
description='''
State of matter of the substrate material.
''')
sample_substrate_vendor = Quantity(
type=str,
description='''
Name of the substrate vendor.
''')
m_package.__init_metainfo__()
|
#
# ojar - [o]jar rune
# https://github.com/vesche/ojar
#
def clear(_):
from os import system, name
system('cls' if name == 'nt' else 'clear')
return _
def info(_):
from runes.common import list_functions
return '[o]jar rune\nactions: {}'.format(list_functions(__name__))
def runelist(_):
runes = [ 'crypto', 'evil', 'ojar', 'random', 'string', 'unit' ]
return ''.join(['[{0}]{1}\n'.format(rune[0], rune[1:]) \
for rune in runes]).rstrip()
def version(_):
return 'ojar v0.1'
def quit(_):
from sys import exit
exit(0)
|
# This is the main file for this app
# Start Date - 20.11.21
# Srikara Narasimha
"""this is open-source and free to copy."""
""" Under the unlicence licence. Read LICENCE page """
from tkinter import *
root = Tk()
root.title("All in One Converter - Srikara Narasimha")
# root.geometry("500x500+0+0")
root.eval('tk::PlaceWindow . center')
orig_color = root.cget("background")
root.resizable(False, False)
root.configure(bg='black')
def met_kilometer():
global meter_kilo
global meter_kilo_entry
meter_kilo = Toplevel()
meter_kilo.title("Meter To Kilometer")
meter_kilo_label1 = Label(meter_kilo, text="Please enter meter value below:- ")
meter_kilo_label1.grid(row=0, column=0)
meter_kilo_entry = Entry(meter_kilo)
meter_kilo_entry.grid(row=1, column=0)
meter_kilo_enterbutton = Button(meter_kilo, text="Enter", command=met_kilometer_answer)
meter_kilo_enterbutton.grid(row=1, column=1)
def met_kilometer_answer():
global meter_kilo_label2
check_label2 = "meter_kilo_label2" in globals()
if check_label2 == True:
meter_kilo_label2.destroy()
else:
pass
user_input_met_kilo = float(meter_kilo_entry.get())
answer_met_kilo = user_input_met_kilo / 1000
meter_kilo_label2 = Label(meter_kilo, text=str(answer_met_kilo) + (" Km"))
meter_kilo_label2.grid(row=2, column=0)
meter_kilo.mainloop()
def kilo_meter():
global kilo_meter
global kilo_meter_entry
kilo_meter = Toplevel()
kilo_meter.title("Meter To Kilometer")
kilo_meter_label1 = Label(kilo_meter, text="Please enter Kilometer value below:- ")
kilo_meter_label1.grid(row=0, column=0)
kilo_meter_entry = Entry(kilo_meter)
kilo_meter_entry.grid(row=1, column=0)
kilo_meter_enterbutton = Button(kilo_meter, text="Enter", command=kilo_meter_answer)
kilo_meter_enterbutton.grid(row=1, column=1)
def kilo_meter_answer():
global kilo_meter_label2
check_label2 = "kilo_meter_label2" in globals()
if check_label2 == True:
kilo_meter_label2.destroy()
else:
pass
user_input_kilo_meter = float(kilo_meter_entry.get())
answer_kilo_meter = user_input_kilo_meter * 1000
kilo_meter_label2 = Label(kilo_meter, text=str(answer_kilo_meter) + (" m"))
kilo_meter_label2.grid(row=2, column=0)
def min_sec():
global min_sec
global min_sec_entry
min_sec = Toplevel()
min_sec.title("Seconds To Minutes")
min_sec_label1 = Label(min_sec, text="Please enter Minutes value below:- ")
min_sec_label1.grid(row=0, column=0)
min_sec_entry = Entry(min_sec)
min_sec_entry.grid(row=1, column=0)
min_sec_enterbutton = Button(min_sec, text="Enter", command=min_sec_answer)
min_sec_enterbutton.grid(row=1, column=1)
def min_sec_answer():
global min_sec_label2
check_label2 = "min_sec_label2" in globals()
if check_label2 == True:
min_sec_label2.destroy()
else:
pass
user_input_min_sec = float(min_sec_entry.get())
answer_min_sec = user_input_min_sec * 60
min_sec_label2 = Label(min_sec, text=str(answer_min_sec) + (" sec"))
min_sec_label2.grid(row=2, column=0)
def sec_min():
global sec_min
global sec_min_entry
sec_min = Toplevel()
sec_min.title("Seconds To Minutes")
sec_min_label1 = Label(sec_min, text="Please enter Seconds value below:- ")
sec_min_label1.grid(row=0, column=0)
sec_min_entry = Entry(sec_min)
sec_min_entry.grid(row=1, column=0)
sec_min_enterbutton = Button(sec_min, text="Enter", command=sec_min_answer)
sec_min_enterbutton.grid(row=1, column=1)
def sec_min_answer():
global sec_min_label2
check_label2 = "sec_min_label2" in globals()
if check_label2 == True:
sec_min_label2.destroy()
else:
pass
user_input_sec_min = float(sec_min_entry.get())
answer_sec_min = user_input_sec_min / 60
sec_min_label2 = Label(sec_min, text=str(answer_sec_min) + (" min"))
sec_min_label2.grid(row=2, column=0)
def sec_hour():
global sec_hour
global sec_hour_entry
sec_hour = Toplevel()
sec_hour.title("Seconds To Hour")
sec_hour_label1 = Label(sec_hour, text="Please enter Seconds value below:- ")
sec_hour_label1.grid(row=0, column=0)
sec_hour_entry = Entry(sec_hour)
sec_hour_entry.grid(row=1, column=0)
sec_hour_enterbutton = Button(sec_hour, text="Enter", command=sec_hour_answer)
sec_hour_enterbutton.grid(row=1, column=1)
def sec_hour_answer():
global sec_hour_label2
check_label2 = "sec_hour_label2" in globals()
if check_label2 == True:
sec_hour_label2.destroy()
else:
pass
user_input_sec_hour = float(sec_hour_entry.get())
answer_sec_hour = user_input_sec_hour / 60 / 60
sec_hour_label2 = Label(sec_hour, text=str(answer_sec_hour) + (" hours"))
sec_hour_label2.grid(row=2, column=0)
def min_hour():
global min_hour
global min_hour_entry
min_hour = Toplevel()
min_hour.title("Minutes To Hour")
min_hour_label1 = Label(min_hour, text="Please enter Minutes value below:- ")
min_hour_label1.grid(row=0, column=0)
min_hour_entry = Entry(min_hour)
min_hour_entry.grid(row=1, column=0)
min_hour_enterbutton = Button(min_hour, text="Enter", command=min_hour_answer)
min_hour_enterbutton.grid(row=1, column=1)
def min_hour_answer():
global min_hour_label2
check_label2 = "min_hour_label2" in globals()
if check_label2 == True:
min_hour_label2.destroy()
else:
pass
user_input_min_hour = float(min_hour_entry.get())
answer_min_hour = user_input_min_hour / 60
min_hour_label2 = Label(min_hour, text=str(answer_min_hour) + (" hours"))
min_hour_label2.grid(row=2, column=0)
def g_kg():
global g_kg
global g_kg_entry
g_kg = Toplevel()
g_kg.title("Minutes To Hour")
g_kg_label1 = Label(g_kg, text="Please enter Grams value below:- ")
g_kg_label1.grid(row=0, column=0)
g_kg_entry = Entry(g_kg)
g_kg_entry.grid(row=1, column=0)
g_kg_enterbutton = Button(g_kg, text="Enter", command=g_kg_answer)
g_kg_enterbutton.grid(row=1, column=1)
def g_kg_answer():
global g_kg_label2
check_label2 = "g_kg_label2" in globals()
if check_label2 == True:
g_kg_label2.destroy()
else:
pass
user_input_g_kg = float(g_kg_entry.get())
answer_g_kg = user_input_g_kg / 1000
g_kg_label2 = Label(g_kg, text=str(answer_g_kg) + (" Kg"))
g_kg_label2.grid(row=2, column=0)
def kg_g():
global kg_g
global kg_g_entry
kg_g = Toplevel()
kg_g.title("Minutes To Hour")
kg_g_label1 = Label(kg_g, text="Please enter KiloGrams value below:- ")
kg_g_label1.grid(row=0, column=0)
kg_g_entry = Entry(kg_g)
kg_g_entry.grid(row=1, column=0)
kg_g_enterbutton = Button(kg_g, text="Enter", command=kg_g_answer)
kg_g_enterbutton.grid(row=1, column=1)
def kg_g_answer():
global kg_g_label2
check_label2 = "kg_g_label2" in globals()
if check_label2 == True:
kg_g_label2.destroy()
else:
pass
user_input_kg_g = float(kg_g_entry.get())
answer_kg_g = user_input_kg_g * 1000
kg_g_label2 = Label(kg_g, text=str(answer_kg_g) + (" Grams"))
kg_g_label2.grid(row=2, column=0)
def main_window():
start_label.destroy()
start_button.destroy()
exit_button = Button(root, command=root.destroy, text='Exit', bg='black', fg='purple')
exit_button.grid(row=0, column=2)
mass_label = Label(root, text=" Distance", bg='black', fg='white')
mass_label.grid(row=0, column=0, columnspan=4)
divider_label1 = Label(root, text="---------------------------------------------------------------", bg='black', fg='white')
divider_label1.grid(row=4, column=0, columnspan=5)
time_label = Label(root, text=" Time", bg='black', fg='white')
time_label.grid(row=5, column=0, columnspan=4)
divider_label2 = Label(root, text="---------------------------------------------------------------", bg='black', fg='white')
divider_label2.grid(row=8, column=0, columnspan=5)
distance_label = Label(root, text=" Mass", bg='black', fg='white')
distance_label.grid(row=9, column=0, columnspan=4)
met_kil_button = Button(root, text="Meter To Kilometer", command=met_kilometer, bg='blue')
met_kil_button.grid(row=1,column=0)
kil_met_button = Button(root, text="Kilometer to Meter", command=kilo_meter, bg='blue', )
kil_met_button.grid(row=1,column=1)
sec_min_button = Button(root, text="Seconds To Minutes", command=sec_min, bg='green')
sec_min_button.grid(row=6,column=2)
min_sec_button = Button(root, text="Minutes to Seconds", command=min_sec, bg='green')
min_sec_button.grid(row=6,column=0)
min_hour_button = Button(root, text="Minutes to Hours", command=min_hour, bg='green')
min_hour_button.grid(row=7,column=0)
sec_hour_button = Button(root, text="Seconds to Hours", command=sec_hour, bg='green')
sec_hour_button.grid(row=6,column=1)
g_kg_button = Button(root, text="Grams To KiloGrams", command=g_kg, bg='orange')
g_kg_button.grid(row=11,column=0)
kg_g_button = Button(root, text="Kilograms to Grams", command=kg_g, bg='orange')
kg_g_button.grid(row=11,column=1)
start_label = Label(root, text="Welcome! Click the below button to start.", bg='black', fg='white')
start_label.pack()
start_button = Button(root, text="Click ME!", command=main_window, bg='black', fg='grey')
start_button.pack()
root.mainloop() |
"""
Multiclass test for unsupervised and deterministic classification of membrane-bound particles
Input: - The STAR file with the particles to classify
- Classification parameters
Output: - A set of STAR files with the new classes
- 2D rotational averaged around membrane normal exemplars and inter-particles averages per class
"""
__author__ = 'Antonio Martinez-Sanchez'
###### Global variables
CDF_TH = 0.95 # Threshold for accumulated correlation
################# Package import
import os
import sys
import time
import pyseg as ps
import numpy as np
from matplotlib import pyplot as plt
from matplotlib import image as mimg
########################################################################################
# PARAMETERS
########################################################################################
ROOT_PATH = '/fs/pool/pool-lucic2/antonio/pick_test'
# Input STAR file to classify
in_star = ROOT_PATH + '/klass/mclass_data/test_realistic_uniovi/test_1_big.star' # '/klass/mclass_data/test_equal_mb/test_1.star' # '/klass/mclass_data/test_realist/test_6.star' # '/klass/mclass_data/test_1_nk8_snr0.01.star'
# Output directory for the star files
out_dir = ROOT_PATH + '/klass/mclass_data/out_ap/realistic_uniovi_pca_big_mb' # '/klass/mclass_data/out_ap/equal_mb' # '/klass/mclass_data/out_ap/test_realist/test_6'
out_stem = 'klass_hc'
# Particles pre-processing
pp_mask = ROOT_PATH + '/klass/mclass_data/mask_90_0_90_30.mrc' # '/klass/mclass_data/masks/mask_cyl_160_35_125_30_nomb.mrc' # '/masks/mask_cyl_160_81_128_20.mrc' #
pp_low_sg = 4 # voxels
pp_npr = 40 # Number of parallel processors if None then auto
ap_pref = -6
###### Advanced settings
# Particles pre-processing
pp_3d = True # False
pp_rln_norm = False
pp_2d_norm = True
pp_direct = True
pp_n_sset = None # 3000
# CC 2d radial matrix computation parameters
cc_metric = 'cc' # 'cc' or 'similarity'
cc_npr = 40 # None # 1 # None # if None then auto
## Clustering
cu_mode = 'vectors' # 'moments' # 'ncc_2dz'
cu_n_comp = 20 # None
# Affinity Propagation clustering parameters
ap_damp = 0.9
ap_max_iter = 2000
ap_conv_iter = 40
ap_ref = 'average' # 'exemplar' #
ap_ref_per = 33 # %
# Agglomerative clustering post-processing
do_ag = True # False
ag_n_clusters = 8 # 11 # 8
ag_linkage = 'ward' # 'average' # 'complete' #
ag_mode = 'exemplars' # ''averages'
ag_ncomp = 21
########################################################################################
# MAIN ROUTINE
########################################################################################
########## Print initial message
print('Test for deterministic classification of a STAR file.')
print('\tAuthor: ' + __author__)
print('\tDate: ' + time.strftime("%c") + '\n')
print('Options:')
print('\tInput STAR file: ' + str(in_star))
print('\tOutput directory: ' + str(out_dir))
print('\tOutput stem for AP: ' + str(out_stem))
print('\tParticles pre-processing:')
print('\t\t-Mask: ' + str(pp_mask))
print('\t\t-Low pass Gaussian filter sigma: ' + str(pp_low_sg) + ' voxels')
if pp_rln_norm:
print('\t\t-Normalize particles according relion convention.')
if pp_2d_norm:
print('\t\t-Renormalize particles after the radial averaging.')
if pp_3d:
print('\t\t-Radial compensation for 3D.')
if pp_npr is None:
print('\t\t-Number of processes: Auto')
else:
print('\t\t-Number of processes: ' + str(pp_npr))
if pp_direct:
print('\t\t-Direct particles loading activated.')
if pp_n_sset:
print('\t\t-Taking a random subset of: ' + str(pp_n_sset) + ' particles')
if cu_mode == 'ncc_2dz':
print('\tCC Z-axis radially averages matrix parameters: ')
print('\t\t-Metric: ' + str(cc_metric))
if cc_npr is None:
print('\t\t-Number of processes: Auto')
else:
print('\t\t-Number of processes: ' + str(cc_npr))
print('\tClustering: ')
print('\t\t-Mode: ' + str(cu_mode))
if cu_mode != 'ncc_2dz':
print('\t\t-Number of components: ' + str(cu_n_comp))
print('\t\tAffinity Propagation classification settings: ')
print('\t\t\t-Damping: ' + str(ap_damp))
if ap_pref is not None:
print('\t\t\t-Affinity propagation preference: ' + str(ap_pref))
print('\t\t\t-Maximum number of iterations: ' + str(ap_max_iter))
print('\t\t\t-Iterations for convergence: ' + str(ap_conv_iter))
print('\t\t\t-Reference for statistics: ' + str(ap_ref))
print('\t\t\t-Percentile for statistics: ' + str(ap_ref_per) + ' %')
if do_ag:
print('\tAgglomerative clustering post-processing: ')
print('\t\t-Number of clusters to find: ' + str(ag_n_clusters))
print('\t\t-Linkage: ' + str(ag_linkage))
print('\t\t-Mode: ' + str(ag_mode))
if ag_ncomp is not None:
print('\t\t-PCA number of components: ' + str(ag_ncomp))
print('')
######### Process
print('Main Routine: ')
print('\tLoading STAR file...')
star = ps.sub.Star()
try:
star.load(in_star)
if pp_n_sset:
print('\t\tCurrent STAR file has ' + str(star.get_nrows()) + ' particles')
print('\t\tGetting a random subset of ' + str(pp_n_sset) + ' particles')
star = star.gen_random_subset(pp_n_sset)
star_class = ps.sub.ClassStar(star)
np.savetxt(out_dir + '/labels.txt', star.get_column_data('_rlnClassNumber'), fmt='%d')
except ps.pexceptions.PySegInputError as e:
print('ERROR: input STAR file could not be loaded because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
print('\tLoading and pre-processing the particles...')
try:
mask = ps.disperse_io.load_tomo(pp_mask)
star_class.load_particles(mask, low_sg=pp_low_sg, avg_norm=pp_2d_norm, rln_norm=pp_rln_norm, rad_3D=pp_3d,
npr=pp_npr, debug_dir=None, ref_dir=None, direct_rec=pp_direct)
star_class.save_particles(out_dir+'/all_particles', out_stem, masks=False, stack=False)
mimg.imsave(out_dir+'/all_particles/global_mask.png', star_class.get_global_mask())
except ps.pexceptions.PySegInputError as e:
print('ERROR: Particles could not be loaded because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
if cu_mode == 'ncc_2dz':
print('\tBuilding the NCC matrix...')
try:
star_class.build_ncc_z2d(metric=cc_metric, npr=cc_npr)
star_class.save_cc(out_dir + '/cc_matrix.txt', txt=True)
except ps.pexceptions.PySegInputError as e:
print('ERROR: The NCC matrix could not be created because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
elif cu_mode == 'vectors':
print('\tBuilding vectors...')
try:
star_class.build_vectors()
if (do_ag == True) and (ag_ncomp is not True):
star_class.vectors_dim_reduction(n_comp=ag_ncomp, method='pca')
star_class.save_vectors(out_dir + '/vectors.txt', txt=True)
if cu_n_comp is not None:
star_class.vectors_dim_reduction(n_comp=cu_n_comp, method='pca')
except ps.pexceptions.PySegInputError as e:
print('ERROR: The NCC matrix could not be created because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
print('\tPCA dimensionality reduction...')
try:
evs = star_class.vectors_dim_reduction(n_comp=cu_n_comp, method='pca')
ids_evs_sorted = np.argsort(evs)[::-1]
except ps.pexceptions.PySegInputError as e:
print('ERROR: Classification failed because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
plt.figure()
plt.bar(np.arange(1, len(evs) + 1) - 0.25, evs[ids_evs_sorted], width=0.5, linewidth=2)
plt.xlim(0, len(evs) + 1)
plt.xticks(list(range(1, len(evs) + 1)))
plt.xlabel('#eigenvalue')
plt.ylabel('fraction of total correlation')
plt.tight_layout()
plt.savefig(out_dir + '/' + out_stem + '_evs.png', dpi=300)
cdf_evs, evs_sorted = np.zeros(shape=len(evs), dtype=np.float32), np.sort(evs)[::-1]
cdf_evs[0] = evs_sorted[0]
th_x = None
for i in range(len(evs_sorted)):
cdf_evs[i] = evs_sorted[:i + 1].sum()
if (cdf_evs[i] >= CDF_TH) and (th_x is None):
th_x = i + 1
plt.figure()
plt.bar(np.arange(1, len(evs) + 1) - 0.25, cdf_evs, width=0.5, linewidth=2)
plt.xlim(0, len(evs) + 1)
plt.ylim(0, 1)
if th_x is not None:
plt.plot((th_x + 0.5, th_x + 0.5), (0, 1), color='k', linewidth=2, linestyle='--')
plt.xticks(list(range(1, len(evs) + 1)))
plt.xlabel('#eigenvalue')
plt.ylabel('Accumulated fraction of total correlation')
plt.tight_layout()
plt.savefig(out_dir + '/' + out_stem + '_cdf_evs.png', dpi=300)
print('\tPreparing the ground truth...')
k_set = list(set(star.get_column_data('_rlnClassNumber')))
gt_im, gt_bc = dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_np, gt_nn = dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_tp, gt_fp, gt_fn = dict().fromkeys(k_set), dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_tpr, gt_fpr = dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_pr, gt_f1 = dict().fromkeys(k_set), dict().fromkeys(k_set)
for i, k_id in enumerate(star.get_column_data('_rlnClassNumber')):
try:
gt_im[k_id].append(star.get_element(key='_rlnImageName', row=i))
except AttributeError:
gt_im[k_id] = [star.get_element(key='_rlnImageName', row=i),]
try:
gt_np[k_id] += 1
except TypeError:
gt_np[k_id] = 1
gt_bc[k_id] = list()
gt_nn[k_id] = 0
gt_tp[k_id], gt_fp[k_id], gt_fn[k_id] = 0, 0, 0
gt_tpr[k_id], gt_fpr[k_id] = 0, 0
gt_pr[k_id], gt_f1[k_id] = 0, 0
print('\tAffinity Propagation classification...')
try:
star_class.affinity_propagation(mode_in=cu_mode, damping=ap_damp, preference=ap_pref,
max_iter=ap_max_iter, convergence_iter=ap_conv_iter,
verbose=True)
except ps.pexceptions.PySegInputError as e:
print('ERROR: Classification failed because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
star_class.update_relion_classes()
if do_ag:
print('\tAgglomerative Clustering refinement...')
try:
star_class.agglomerative_clustering_ref(n_clusters=ag_n_clusters, mode=ag_mode, pca_ncomp=ag_ncomp,
linkage=ag_linkage, verbose=True)
except ps.pexceptions.PySegInputError as e:
print('ERROR: Classification failed because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
star_class.update_relion_classes()
split_stars = star_class.get_split_stars()
print('\t\tEvaluating the classification (VERSION: merging classes):')
print('\t\t\t-Finding the most representative output classes: ')
ko_set = list(range(len(split_stars)))
ko_ids = dict().fromkeys(list(range(len(split_stars))))
for ko_id, split_star in enumerate(split_stars):
for row in range(split_star.get_nrows()):
img = split_star.get_element('_rlnImageName', row)
for k_id in gt_im.keys():
if img in gt_im[k_id]:
try:
ko_ids[ko_id].append(k_id)
except AttributeError:
ko_ids[ko_id] = [k_id,]
ko_mr = dict().fromkeys(list(ko_ids.keys()))
for ko_id, ids in zip(iter(ko_ids.keys()), iter(ko_ids.values())):
ko_mr[ko_id] = np.argmax(np.bincount(np.asarray(ids)))
for k_id in gt_tp.keys():
hold_np = 0
for ko_id, k_id_max in zip(iter(ko_mr.keys()), iter(ko_mr.values())):
if k_id_max == k_id:
gt_bc[k_id].append(ko_id)
hold_np += len(ko_ids[ko_id])
print('\t\t\t\t+Class ' + str(k_id) + ': NP=' + str(gt_np[k_id]))
print('\t\t\t\t\t*Classes associated ' + str(gt_bc[k_id]) + ': NPf=' + str(hold_np))
for kk_id in gt_tp.keys():
if kk_id != k_id:
gt_nn[k_id] += gt_np[kk_id]
print('\t\t\t-Computing the metrics: ')
for k_id in gt_tp.keys():
for ko_id in gt_bc[k_id]:
hold = (np.asarray(ko_ids[ko_id]) == k_id).sum()
gt_tp[k_id] += hold
hold = (np.asarray(ko_ids[ko_id]) != k_id).sum()
gt_fp[k_id] += hold
for kk_id in gt_bc.keys():
if kk_id != k_id:
for ko_id in gt_bc[kk_id]:
hold = (np.asarray(ko_ids[ko_id]) == k_id).sum()
gt_fn[k_id] += hold
gt_tpr[k_id] = gt_tp[k_id] / float(gt_np[k_id])
gt_fpr[k_id] = gt_fp[k_id] / float(gt_nn[k_id])
try:
gt_pr[k_id] = gt_tp[k_id] / float(gt_tp[k_id] + gt_fp[k_id])
except ZeroDivisionError:
gt_pr[k_id] = 0
try:
gt_f1[k_id] = 2. * (gt_pr[k_id] * gt_tpr[k_id]) / (gt_pr[k_id] + gt_tpr[k_id])
except ZeroDivisionError:
gt_f1[k_id] = 0
print('\t\t\t\t+Class ' + str(k_id) + ': ')
print('\t\t\t\t\t->TP=' + str(gt_tp[k_id]) + ', FP=' + str(gt_fp[k_id]) + ', FN=' + str(gt_fn[k_id]))
print('\t\t\t\t\t->TPR=' + str(gt_tpr[k_id]) + ', FPR=' + str(gt_fpr[k_id]))
print('\t\t\t\t\t->P=' + str(gt_pr[k_id]) + ', F1=' + str(gt_f1[k_id]))
print('\t\t\t-Computing global metrics: ')
print('\t\t\t-Global metrics: ')
gt_tpr, gt_pr = np.asarray(list(gt_tpr.values()), dtype=np.float32), np.asarray(list(gt_pr.values()), dtype=np.float32)
precision = gt_pr.mean()
recall = gt_tpr.mean()
print('\t\t\t\t+Precision=' + str(precision))
print('\t\t\t\t+Recall=' + str(recall))
print('\t\t\t\t+F1-score=' + str(2.*(precision*recall)/(precision+recall)))
print('\t\tEvaluating the classification (VERSION: one-to-one class):')
print('\t\t\t-Finding the most representative output classes: ')
ge_lut = dict().fromkeys(list(gt_np.keys()))
eg_lut = dict().fromkeys(list(ko_ids.keys()))
for g_key in gt_np.keys():
hold_max, hold_max_id = 0, -1
for e_key in ko_ids.keys():
hold_np = (np.asarray(ko_ids[e_key]) == g_key).sum()
if hold_np > hold_max:
hold_max = hold_np
hold_max_id = e_key
ge_lut[g_key] = hold_max_id
eg_lut[hold_max_id] = g_key
print('\t\t\t-Computing the metrics: ')
gt_tp, gt_fp, gt_fn = dict().fromkeys(k_set), dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_tpr, gt_fpr = dict().fromkeys(k_set), dict().fromkeys(k_set)
gt_pr, gt_f1 = dict().fromkeys(k_set), dict().fromkeys(k_set)
for k_id in gt_tp.keys():
gt_nn[k_id] = 0
for k_id in gt_tp.keys():
for kk_id in gt_tp.keys():
if k_id != kk_id:
gt_nn[k_id] += gt_np[kk_id]
for k_id in gt_tp.keys():
bc_id = ge_lut[k_id]
gt_tp[k_id] = (np.asarray(ko_ids[bc_id]) == k_id).sum()
gt_fp[k_id] = (np.asarray(ko_ids[bc_id]) != k_id).sum()
gt_tpr[k_id] = gt_tp[k_id] / float(gt_np[k_id])
# gt_nn[k_id] = gt_fp[k_id] + (gt_np[k_id]-gt_tp[k_id])
gt_fpr[k_id] = gt_fp[k_id] / float(gt_nn[k_id])
try:
gt_pr[k_id] = gt_tp[k_id] / float(gt_tp[k_id] + gt_fp[k_id])
except ZeroDivisionError:
gt_pr[k_id] = 0
try:
gt_f1[k_id] = 2. * (gt_pr[k_id] * gt_tpr[k_id]) / (gt_pr[k_id] + gt_tpr[k_id])
except ZeroDivisionError:
gt_f1[k_id] = 0
print('\t\t\t\t+Class ' + str(k_id) + ': ')
print('\t\t\t\t\t->TP=' + str(gt_tp[k_id]) + ', FP=' + str(gt_fp[k_id]) + ', FN=' + str(gt_fn[k_id]))
print('\t\t\t\t\t->TPR=' + str(gt_tpr[k_id]) + ', FPR=' + str(gt_fpr[k_id]))
print('\t\t\t\t\t->P=' + str(gt_pr[k_id]) + ', F1=' + str(gt_f1[k_id]))
print('\t\t\t-Computing global metrics: ')
print('\t\t\t-Global metrics: ')
gt_tpr, gt_pr = np.asarray(list(gt_tpr.values()), dtype=np.float32), np.asarray(list(gt_pr.values()), dtype=np.float32)
precision = gt_pr.mean()
recall = gt_tpr.mean()
print('\t\t\t\t+Precision=' + str(precision))
print('\t\t\t\t+Recall=' + str(recall))
print('\t\t\t\t+F1-score=' + str(2.*(precision*recall)/(precision+recall)))
print('\t\tStoring the results...')
try:
star_class.save_star(out_dir, out_stem, parse_rln=True, mode='gather')
star_class.save_star(out_dir, out_stem, parse_rln=True, mode='split')
star_class.save_star(out_dir, out_stem, mode='particle')
star_class.save_class(out_dir, out_stem, purge_k=0, mode='exemplars')
star_class.save_class(out_dir, out_stem, purge_k=0, mode='averages')
except ps.pexceptions.PySegInputError as e:
print('ERROR: Result could not be stored because of "' + e.get_message() + '"')
print('Terminated. (' + time.strftime("%c") + ')')
sys.exit(-1)
print('Terminated. (' + time.strftime("%c") + ')') |
#!/usr/bin/env python3
import argparse, joblib
from collections import Counter
import logging, sys
logger = logging.getLogger(__name__)
logger.setLevel(10)
ch = logging.StreamHandler(sys.stdout)
ch.setFormatter(logging.Formatter("[%(asctime)s] %(levelname)s - %(message)s"))
logger.addHandler(ch)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input', type=str)
parser.add_argument('output', type=str)
parser.add_argument('--min_count', '-min', type=int, default=5)
args = parser.parse_args()
logger.info('Load Data')
with open(args.input) as f:
c = Counter()
for i, line in enumerate(f):
c.update(line.split())
if i % 100000 == 0:
logger.info('Prog={}'.format(i))
logger.info('Filter Low Freq Words')
word2count = {word: count for word, count in c.items() if count >= args.min_count}
logger.info('Save')
joblib.dump(word2count, args.output)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : 河北雪域网络科技有限公司 A.Star
# @contact: astar@snowland.ltd
# @site: www.snowland.ltd
# @file: _SM4.py
# @time: 2018/9/21 15:25
# @Software: PyCharm
"""
SM4 GM
@author: Dawei
@author: A.Star
"""
import copy
import struct
import time
from functools import reduce
# Expanded SM4 S-boxes Sbox table: 8bits input convert to 8 bits output
SboxTable = [
0xd6, 0x90, 0xe9, 0xfe, 0xcc, 0xe1, 0x3d, 0xb7, 0x16, 0xb6, 0x14, 0xc2, 0x28, 0xfb, 0x2c, 0x05,
0x2b, 0x67, 0x9a, 0x76, 0x2a, 0xbe, 0x04, 0xc3, 0xaa, 0x44, 0x13, 0x26, 0x49, 0x86, 0x06, 0x99,
0x9c, 0x42, 0x50, 0xf4, 0x91, 0xef, 0x98, 0x7a, 0x33, 0x54, 0x0b, 0x43, 0xed, 0xcf, 0xac, 0x62,
0xe4, 0xb3, 0x1c, 0xa9, 0xc9, 0x08, 0xe8, 0x95, 0x80, 0xdf, 0x94, 0xfa, 0x75, 0x8f, 0x3f, 0xa6,
0x47, 0x07, 0xa7, 0xfc, 0xf3, 0x73, 0x17, 0xba, 0x83, 0x59, 0x3c, 0x19, 0xe6, 0x85, 0x4f, 0xa8,
0x68, 0x6b, 0x81, 0xb2, 0x71, 0x64, 0xda, 0x8b, 0xf8, 0xeb, 0x0f, 0x4b, 0x70, 0x56, 0x9d, 0x35,
0x1e, 0x24, 0x0e, 0x5e, 0x63, 0x58, 0xd1, 0xa2, 0x25, 0x22, 0x7c, 0x3b, 0x01, 0x21, 0x78, 0x87,
0xd4, 0x00, 0x46, 0x57, 0x9f, 0xd3, 0x27, 0x52, 0x4c, 0x36, 0x02, 0xe7, 0xa0, 0xc4, 0xc8, 0x9e,
0xea, 0xbf, 0x8a, 0xd2, 0x40, 0xc7, 0x38, 0xb5, 0xa3, 0xf7, 0xf2, 0xce, 0xf9, 0x61, 0x15, 0xa1,
0xe0, 0xae, 0x5d, 0xa4, 0x9b, 0x34, 0x1a, 0x55, 0xad, 0x93, 0x32, 0x30, 0xf5, 0x8c, 0xb1, 0xe3,
0x1d, 0xf6, 0xe2, 0x2e, 0x82, 0x66, 0xca, 0x60, 0xc0, 0x29, 0x23, 0xab, 0x0d, 0x53, 0x4e, 0x6f,
0xd5, 0xdb, 0x37, 0x45, 0xde, 0xfd, 0x8e, 0x2f, 0x03, 0xff, 0x6a, 0x72, 0x6d, 0x6c, 0x5b, 0x51,
0x8d, 0x1b, 0xaf, 0x92, 0xbb, 0xdd, 0xbc, 0x7f, 0x11, 0xd9, 0x5c, 0x41, 0x1f, 0x10, 0x5a, 0xd8,
0x0a, 0xc1, 0x31, 0x88, 0xa5, 0xcd, 0x7b, 0xbd, 0x2d, 0x74, 0xd0, 0x12, 0xb8, 0xe5, 0xb4, 0xb0,
0x89, 0x69, 0x97, 0x4a, 0x0c, 0x96, 0x77, 0x7e, 0x65, 0xb9, 0xf1, 0x09, 0xc5, 0x6e, 0xc6, 0x84,
0x18, 0xf0, 0x7d, 0xec, 0x3a, 0xdc, 0x4d, 0x20, 0x79, 0xee, 0x5f, 0x3e, 0xd7, 0xcb, 0x39, 0x48,
]
# System parameter
FK = [0xa3b1bac6, 0x56aa3350, 0x677d9197, 0xb27022dc]
# fixed parameter
CK = [
0x00070e15, 0x1c232a31, 0x383f464d, 0x545b6269,
0x70777e85, 0x8c939aa1, 0xa8afb6bd, 0xc4cbd2d9,
0xe0e7eef5, 0xfc030a11, 0x181f262d, 0x343b4249,
0x50575e65, 0x6c737a81, 0x888f969d, 0xa4abb2b9,
0xc0c7ced5, 0xdce3eaf1, 0xf8ff060d, 0x141b2229,
0x30373e45, 0x4c535a61, 0x686f767d, 0x848b9299,
0xa0a7aeb5, 0xbcc3cad1, 0xd8dfe6ed, 0xf4fb0209,
0x10171e25, 0x2c333a41, 0x484f565d, 0x646b7279
]
ENCRYPT = 0
DECRYPT = 1
def GET_UINT32_BE(key_data):
return int((key_data[0] << 24) | (key_data[1] << 16) | (key_data[2] << 8) | (key_data[3]))
def PUT_UINT32_BE(n):
return [int((n >> 24) & 0xff), int((n >> 16) & 0xff), int((n >> 8) & 0xff), int((n) & 0xff)]
# rotate shift left marco definition
def SHL(x, n):
return int(int(x << n) & 0xffffffff)
def ROTL(x, n):
return SHL(x, n) | int((x >> (32 - n)) & 0xffffffff)
def XOR(a, b):
return list(map(lambda x, y: x ^ y, a, b))
# look up in SboxTable and get the related value.
# args: [in] inch: 0x00~0xFF (8 bits unsigned value).
def sm4Sbox(idx):
return SboxTable[idx]
# Calculating round encryption key.
# args: [in] a: a is a 32 bits unsigned value;
# return: sk[i]: i{0,1,2,3,...31}.
def sm4CalciRK(ka):
a = PUT_UINT32_BE(ka)
b = [sm4Sbox(i) for i in a]
bb = GET_UINT32_BE(b)
rk = bb ^ (ROTL(bb, 13)) ^ (ROTL(bb, 23))
return rk
# private F(Lt) function:
# "T algorithm" == "L algorithm" + "t algorithm".
# args: [in] a: a is a 32 bits unsigned value;
# return: c: c is calculated with line algorithm "L" and nonline algorithm "t"
def sm4Lt(ka):
a = PUT_UINT32_BE(ka)
b = [sm4Sbox(i) for i in a]
bb = GET_UINT32_BE(b)
return bb ^ (ROTL(bb, 2)) ^ (ROTL(bb, 10)) ^ (ROTL(bb, 18)) ^ (ROTL(bb, 24))
# private F function:
# Calculating and getting encryption/decryption contents.
# args: [in] x0: original contents;
# args: [in] x1: original contents;
# args: [in] x2: original contents;
# args: [in] x3: original contents;
# args: [in] rk: encryption/decryption key;
# return the contents of encryption/decryption contents.
def sm4F(x0, x1, x2, x3, rk):
return x0 ^ sm4Lt(x1 ^ x2 ^ x3 ^ rk)
class Sm4(object):
def __init__(self):
self.sk = [0] * 32
self.mode = ENCRYPT
def sm4_set_key(self, key_data, mode):
self.sm4_setkey(key_data, mode)
def sm4_setkey(self, key, mode):
k = [0] * 36
MK = struct.unpack_from(">IIII", bytes(key))
k[0:4] = XOR(MK, FK)
item = k[1] ^ k[2]
for i in range(32):
item ^= k[i + 3]
k[i + 4] = k[i] ^ sm4CalciRK(item ^ CK[i])
item ^= k[i + 1]
self.sk = k[4:]
self.mode = mode
if mode == DECRYPT:
self.sk.reverse()
def sm4_one_round(self, sk, in_put):
item = list(struct.unpack_from(">IIII", bytes(in_put)))
item.reverse()
res = reduce(lambda x, y: [sm4F(x[3], x[2], x[1], x[0], y), x[0], x[1], x[2]], sk, item)
rev2 = reduce(lambda x, i: x+struct.pack(">I", i), res, b'')
return list(rev2)
def sm4_crypt_ecb(self, input_data):
# SM4-ECB block encryption/decryption
tmp = [input_data[i:i + 16] for i in range(0, len(input_data), 16)]
output_data = reduce(lambda a, b: a + b, map(lambda x: self.sm4_one_round(self.sk, x), tmp), [])
return output_data
def sm4_crypt_cbc(self, iv, input_data):
# SM4-CBC buffer encryption/decryption
length = len(input_data)
i = 0
output_data = []
tmp_input = [0] * 16
if self.mode == ENCRYPT:
while length > 0:
tmp_input[0:16] = XOR(input_data[i:i + 16], iv[0:16])
output_data += self.sm4_one_round(self.sk, tmp_input[0:16])
iv = copy.deepcopy(output_data[i:i + 16])
i += 16
length -= 16
else:
ivs = [input_data[i:i + 16] for i in range(0, len(input_data), 16)]
ivs.insert(0, iv)
tmp = map(lambda x: self.sm4_one_round(self.sk, x), ivs[1:])
output_data = reduce(lambda a, b: a + b, map(XOR, tmp, ivs[:-1]), [])
return output_data
def sm4_crypt_pcbc(self, iv, input_data):
"""
SM4-PCBC buffer encryption/decryption
:param iv:
:param input_data:
:return:
"""
length = len(input_data)
i = 0
output_data = []
if self.mode == ENCRYPT:
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, XOR(iv, tmp_input[0:16]))
output_data.extend(out)
iv = copy.deepcopy(XOR(out, tmp_input))
i += 16
length -= 16
else:
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, tmp_input[0:16])
out = XOR(out, iv)
iv = copy.deepcopy(XOR(out, tmp_input))
output_data.extend(out)
i += 16
length -= 16
return output_data
def sm4_crypt_ofb(self, iv, input_data):
"""
SM4-OFB buffer encryption/decryption
:param iv:
:param input_data:
:return:
"""
length = len(input_data)
i = 0
output_data = []
if self.mode == ENCRYPT:
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, iv)
iv = out
out = XOR(out, tmp_input)
output_data.extend(out)
i += 16
length -= 16
else:
self.mode = ENCRYPT
self.sk = self.sk[::-1]
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, iv)
iv = out
out = XOR(out, tmp_input)
output_data.extend(out)
i += 16
length -= 16
self.mode = DECRYPT
self.sk = self.sk[::-1]
return output_data
def sm4_crypt_cfb(self, iv, input_data):
"""
SM4-CFB buffer encryption/decryption
:param iv:
:param input_data:
:return:
"""
length = len(input_data)
i = 0
output_data = []
if self.mode == ENCRYPT:
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, iv)
iv = XOR(tmp_input, out)
output_data.extend(iv)
i += 16
length -= 16
else:
self.mode = ENCRYPT
self.sk = self.sk[::-1]
while length > 0:
tmp_input = input_data[i:i + 16]
out = self.sm4_one_round(self.sk, iv)
out = XOR(out, tmp_input)
iv = tmp_input
output_data.extend(out)
i += 16
length -= 16
self.mode = DECRYPT
self.sk = self.sk[::-1]
return output_data
def sm4_crypt_ecb(mode, key, data):
sm4_d = Sm4()
sm4_d.sm4_set_key(key, mode)
en_data = sm4_d.sm4_crypt_ecb(data)
return en_data
def sm4_crypt_cbc(mode, key, iv, data):
sm4_d = Sm4()
sm4_d.sm4_set_key(key, mode)
en_data = sm4_d.sm4_crypt_cbc(iv, data)
return en_data
def sm4_crypt_pcbc(mode, key, iv, data):
sm4_d = Sm4()
sm4_d.sm4_set_key(key, mode)
en_data = sm4_d.sm4_crypt_pcbc(iv, data)
return en_data
def sm4_crypt_cfb(mode, key, iv, data):
sm4_d = Sm4()
sm4_d.sm4_set_key(key, mode)
en_data = sm4_d.sm4_crypt_cfb(iv, data)
return en_data
def sm4_crypt_ofb(mode, key, iv, data):
sm4_d = Sm4()
sm4_d.sm4_set_key(key, mode)
en_data = sm4_d.sm4_crypt_ofb(iv, data)
return en_data
SM4 = Sm4
if __name__ == "__main__":
# log_init()
import numpy as np
input_data = list(np.random.randint(256, size=1024*6))
iv_data = [0] * 16
time.clock()
sm4_d = Sm4()
key_data = b'hello world, errr...'
# key_data = [0x5a] * 16
sm4_d.sm4_set_key(key_data, ENCRYPT)
st = time.clock()
en_data = sm4_d.sm4_crypt_ecb(input_data)
print(en_data, "en_data:")
sm4_d.sm4_set_key(key_data, DECRYPT)
de_data = sm4_d.sm4_crypt_ecb(en_data)
print(de_data, "de_data:")
if de_data == input_data:
print("ecb check pass")
else:
print("ecb check fail")
raise BaseException("error")
et = time.clock()
print(et-st)
sm4_d.sm4_set_key(key_data, ENCRYPT)
en_data = sm4_d.sm4_crypt_cbc(iv_data, input_data)
print(en_data, "en_data:")
sm4_d.sm4_set_key(key_data, DECRYPT)
de_data = sm4_d.sm4_crypt_cbc(iv_data, en_data)
print(de_data, "de_data:")
if de_data == input_data:
print("cbc check pass")
else:
print("cbc check fail")
raise BaseException("error")
# file test
file_path = r"../../test2.txt"
ecb_path_en = r"../../test2k_ecb_en.txt"
ecb_path_de = r"../../test2k_ecb_de.txt"
cbc_path_en = r"../../test2k_cbc_en.txt"
cbc_path_de = r"../../test2k_cbc_de.txt"
key_data = [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08]
iv_data = [0x5a] * 16
with open(file_path, 'rb') as f:
file_data = f.read()
# file_data_list = [ord(x) for x in file_data]
file_data_list = list(file_data)
# 1. ECB
sm4_d = Sm4()
sm4_d.sm4_set_key(key_data, ENCRYPT)
en_data = sm4_d.sm4_crypt_ecb(file_data_list)
with open(ecb_path_en, 'wb') as f:
f.write(bytes(en_data))
sm4_d.sm4_set_key(key_data, DECRYPT)
de_data = sm4_d.sm4_crypt_ecb(en_data)
with open(ecb_path_de, 'wb') as f:
f.write(bytes(de_data))
if de_data == file_data_list:
print("file decode pass")
else:
print("file decode fail")
raise BaseException('error')
# 2. CBC
sm4_d.sm4_set_key(key_data, ENCRYPT)
en_data = sm4_d.sm4_crypt_cbc(iv_data, file_data_list)
with open(cbc_path_en, 'wb') as f:
f.write(bytes(en_data))
sm4_d.sm4_set_key(key_data, DECRYPT)
de_data = sm4_d.sm4_crypt_cbc(iv_data, en_data)
with open(cbc_path_de, 'wb') as f:
f.write(bytes(de_data))
if de_data == file_data_list:
print("file decode pass")
else:
print("file decode fail")
raise BaseException("error")
|
import pygame
from Character import Character
class Otis(Character):
def __init__(self):
super().__init__()
self.name = 'Otis'
self.health = 200
self.health = 0
self.portrait = pygame.image.load('Sprites/Otis/OtisPortrait.png')
self.image = pygame.image.load('Sprites/Otis/OtisStand1.png')
self.neutralPosition = pygame.image.load('Sprites/Otis/OtisStand1.png')
#The following loops add in all of the sprites for the animation...
#Inserts Otis's standing animation.
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Otis/OtisStand1.png'), (130, 250)))
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Otis/OtisStand2.png'), (130, 250)))
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Otis/OtisStand3.png'), (130, 250)))
def update(self, player, x):
if player.isDashing:
if player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
if player.currentDash < len(player.character.dash)/2:
player.x += 0.7*player.multiplier
elif player.currentDash > len(player.character.dash)/2 and \
player.currentDash < len(player.character.dash)/1.5:
player.x += 0.35*player.multiplier
elif player.currentDash > len(player.character.dash)/1.5:
player.x += 0.175*player.multiplier
player.currentDash += 1.0
if player.currentDash >= len(player.character.dash):
player.isDashing = False
player.setAction = False
player.y = player.yOriginal
player.currentDash = 0
if player.index >= len(player.character.dash):
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.dash[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.dash[player.index], True, False)
player.index += 1
elif player.isBackDashing:
if player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
if player.currentDash < len(player.character.backdash)/2:
player.x += 0.7*(-1)*player.multiplier
player.y -= 0.16
elif player.currentDash > len(player.character.backdash)/2 \
and player.currentDash < len(player.character.backdash)/1.5:
player.x += 0.35*(-1)*player.multiplier
elif player.currentDash > len(player.character.backdash)/1.5:
player.x += 0.175*(-1)*player.multiplier
player.y += 0.08
player.currentDash += 1.0
if player.currentDash >= len(player.character.backdash):
player.isBackDashing = False
player.setAction = False
player.y = player.yOriginal
player.currentDash = 0
if player.index >= len(player.character.backdash):
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.backdash[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.backdash[player.index], True, False)
player.index += 1
elif x < 0:
player.x += x
if player.facingRight:
if player.index >= len(player.character.walkBackward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkBackward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkBackward[player.index],True, False)
player.index += 1
else:
if player.index >= len(player.character.walkFoward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkFoward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkFoward[player.index], True, False)
player.index += 1
elif x > 0:
player.x += x
if player.facingRight:
if player.index >= len(player.character.walkFoward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkFoward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkFoward[player.index], True, False)
player.index += 1
else:
if player.index >= len(player.character.walkBackward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkBackward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkBackward[player.index], True, False)
player.index += 1
elif x == 0:
player.x += x
if player.crouching != True:
if player.index >= len(player.character.standing):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.standing[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.standing[player.index], True, False)
player.index += 1
else:
if player.index >= len(player.character.crouching):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.crouching[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(self.character.crouching[player.index], True, False)
player.index += 1 |
# -*- coding:utf-8 -*-
from PythonMiddleware.notify import Notify
from PythonMiddleware.graphene import Graphene
from PythonMiddlewarebase.operationids import operations
import sys
import pymongo
import datetime
from time import sleep
from collections import deque
from threading import Thread, Lock
from prometheus_client import CollectorRegistry, Gauge, pushadd_to_gateway
from config import *
from utils import Logging
from handle_block import logger, parse_operations, handle_operations, init_gauges
#logger = Logging().getLogger()
block_info_q = deque()
pending_block_num_q = deque()
op_d = deque()
#thread lock
block_info_deque_lock = Lock()
pending_block_num_deque_lock = Lock()
op_deque_lock = Lock()
def check_block(args):
def one_block_check(block_num):
logger.info('recv block number: {}'.format(block_num))
try:
block = gph.rpc.get_block(block_num)
#witness_id = block['witness']
block_witness = gph.rpc.get_object(gph.rpc.get_object(block['witness'])['witness_account'])['name']
except Exception as e:
logger.error('get_object exception. block {}, error {}'.format(block_num, repr(e)))
block_time = block['timestamp']
transactions = block["transactions"]
witness_sign = block['witness_signature']
trx_total = 0
ops_total = 0
transactions_id = []
if transactions:
trx_total = len(transactions)
for trx in transactions:
transactions_id.append(trx[0])
ops_total += len(trx[1]["operations"])
block_data = {
"block_num": block_num,
"time": block_time,
"witness": block_witness,
"witness_sign": witness_sign,
"transactions_total": trx_total,
"transactions_id": transactions_id,
"operations_total": ops_total
}
block_info_deque_lock.acquire()
block_info_q.append(block_data)
block_info_deque_lock.release()
start = args[0]
end = args[1]
gph = Graphene(node=nodeaddress)
info = gph.info()
last_block_num = info['head_block_number']
#logger.info('last_block_num: {}, block start: {}, end: {}, info: {}'.format(last_block_num, start, end, info))
logger.info('last_block_num: {}, block start: {}, end: {}'.format(last_block_num, start, end))
if start > last_block_num:
logger.error("start:{} < end:{}".format(start, end))
return
if end > last_block_num:
end = last_block_num
conn = pymongo.MongoClient(mongodb_params['host'], mongodb_params['port'])
conn_db = conn[mongodb_params['db_name']]
for index in range(start, end+1):
result = conn_db.block.find({'block_num':index})
if result.count() == 0:
logger.info('check block number: {}'.format(index))
one_block_check(index)
else:
logger.info('block({}) already exists in mongodb'.format(index))
sleep(0.1)
conn.close()
# 解析区块
def analysis_block():
gph = Graphene(node=nodeaddress)
from PythonMiddleware.instance import set_shared_graphene_instance
set_shared_graphene_instance(gph)
while 1:
if pending_block_num_q:
try:
pending_block_num_deque_lock.acquire()
block_num = pending_block_num_q.popleft()
pending_block_num_deque_lock.release()
logger.debug('pop block number: {}'.format(block_num))
try:
block_info = gph.rpc.get_block(block_num)
time = block_info["timestamp"]
transactions = block_info["transactions"]
operations_list = parse_operations(gph, block_num, time, transactions)
#logger.debug('block: {}, trx_list: {}'.format(block_num, operations_list))
except Exception as e:
logger.error('parse block exception. block {}, error {}'.format(block_num, repr(e)))
if operations_list:
op_deque_lock.acquire()
op_d.append(operations_list)
op_deque_lock.release()
except Exception as e:
logger.error("pending_block_num_q: {}, except: '{}'".format(pending_block_num_q, repr(e)))
sleep(0.7)
#将区块数据写入数据库中block表中
def block2db():
while 1:
if block_info_q:
try:
#global block_info_deque_lock
block_info_deque_lock.acquire()
block = block_info_q.popleft()
block_info_deque_lock.release()
#update mongodb
conn = pymongo.MongoClient(mongodb_params['host'], mongodb_params['port'])
conn_db = conn[mongodb_params['db_name']]
try:
conn_db.block.insert_one({
'block_num': block["block_num"],
'time': block["time"],
'witness': block["witness"],
'witness_sign': block["witness_sign"],
'transactions_id': str(block["transactions_id"]),
'transactions_total': block["transactions_total"],
'operations_total': block["operations_total"]
})
except Exception as e:
logger.error("block: {}, except: '{}'".format(block["block_num"], repr(e)))
finally:
conn.close()
logger.info('block num: {} done.'.format(block["block_num"]))
except Exception as e:
logger.error("except: '{}'".format(repr(e)))
sleep(0.7)
#将区块解析过的数据写入到数据库中的op表和transaction表中
def data2db():
while 1:
if op_d:
try:
op_deque_lock.acquire()
operations_list = op_d.popleft()
op_deque_lock.release()
handle_operations(operations_list)
# status = handle_operations(operations_list)
# if not status:
# op_deque_lock.acquire()
# block_trx_ops = op_d.appendleft(operations_list)
# op_deque_lock.release()
# logger.warn('consume status {}, trx list: {}'.format(status, operations_list))
except Exception as e:
logger.error("except: '{}'".format(repr(e)))
sleep(0.5)
if __name__ == '__main__':
logger.info('args: {}'.format(sys.argv))
if len(sys.argv) < 3:
logger.error('Usage: python3 check.py block_number_start, block_number_end')
sys.exit(1)
start = int(sys.argv[1])
end = int(sys.argv[2])
if start > end or start <= 0 or end <= 0:
logger.error('block_number_start: {} > block_number_end: {} or start <= 0 or end <= 0'.format(start, end))
sys.exit(1)
args = [start, end]
init_gauges()
t1 = Thread(target=check_block, args=(args,))
t1.start()
t2 = Thread(target=block2db)
t2.start()
t3 = Thread(target=analysis_block)
t3.start()
t4 = Thread(target=data2db)
t4.start()
|
"""
[summary]
[extended_summary]
"""
# region [Imports]
# * Standard Library Imports ------------------------------------------------------------------------------------------------------------------------------------>
import gc
import os
import unicodedata
from abc import ABC, abstractmethod
from typing import Callable, Dict, TYPE_CHECKING, Union
from datetime import datetime, timezone
from functools import partial
from collections import UserDict
# * Third Party Imports ----------------------------------------------------------------------------------------------------------------------------------------->
import discord
from jinja2 import BaseLoader, Environment
from natsort import natsorted
from discord.ext import commands, tasks, flags, ipc
import gidlogger as glog
from antipetros_discordbot.auxiliary_classes.all_item import AllItem
from antipetros_discordbot.init_userdata.user_data_setup import ParaStorageKeeper
from antipetros_discordbot.utility.discord_markdown_helper.discord_formating_helper import embed_hyperlink
from antipetros_discordbot.utility.named_tuples import EmbedFieldItem
from antipetros_discordbot.utility.general_decorator import handler_method, handler_method_only_commands
from async_property import async_property
from antipetros_discordbot.utility.discord_markdown_helper.general_markdown_helper import CodeBlock
from antipetros_discordbot.utility.discord_markdown_helper.discord_formating_helper import embed_hyperlink
from antipetros_discordbot.utility.discord_markdown_helper.special_characters import ListMarker, SPECIAL_SPACE, ZERO_WIDTH
import inflect
import inspect
from antipetros_discordbot.engine.replacements import AntiPetrosBaseCommand, AntiPetrosBaseGroup, AntiPetrosFlagCommand
from antipetros_discordbot.engine.replacements.helper import CommandCategory
if TYPE_CHECKING:
from antipetros_discordbot.engine.antipetros_bot import AntiPetrosBot
# endregion[Imports]
# region [TODO]
# endregion [TODO]
# region [AppUserData]
# endregion [AppUserData]
# region [Logging]
log = glog.aux_logger(__name__)
log.info(glog.imported(__name__))
# endregion[Logging]
# region [Constants]
THIS_FILE_DIR = os.path.abspath(os.path.dirname(__file__))
APPDATA = ParaStorageKeeper.get_appdata()
BASE_CONFIG = ParaStorageKeeper.get_config('base_config')
COGS_CONFIG = ParaStorageKeeper.get_config('cogs_config')
# location of this file, does not work if app gets compiled to exe with pyinstaller
THIS_FILE_DIR = os.path.abspath(os.path.dirname(__file__))
inflect_engine = inflect.engine()
# endregion[Constants]
class StringKeyDict(UserDict):
def __init__(self, in_dict: dict = None) -> None:
super().__init__(__dict={str(key): value for key, value in in_dict.items()} if in_dict is not None else in_dict)
def __setitem__(self, key, item):
super().__setitem__(key=str(key), item=item)
def __getitem__(self, key):
return super().__getitem__(key=str(key))
def __delitem__(self, key):
super().__delitem__(key=str(key))
def __contains__(self, key):
return super().__contains__(key=str(key))
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[str(key)] = value
return d
class AbstractProvider(ABC):
field_item = EmbedFieldItem
def __init__(self, in_builder: "HelpCommandEmbedBuilder"):
self.bot = in_builder.bot
self.in_object = in_builder.in_object
self.member = in_builder.member
@abstractmethod
async def __call__(self):
...
@classmethod
@property
@abstractmethod
def provides(cls):
...
@property
def typus(self):
if isinstance(self.in_object, CommandCategory):
return 'categories'
if isinstance(self.in_object, (AntiPetrosBaseCommand, AntiPetrosBaseGroup, AntiPetrosFlagCommand)):
return 'commands'
@property
def is_group(self):
return isinstance(self.in_object, commands.Group)
@property
def is_sub_command(self):
if hasattr(self.in_object, 'parent'):
return self.in_object.parent is not None
return False
@property
def member_can_invoke(self):
member_roles = set([AllItem()] + [role for role in self.member.roles])
if self.member.id in self.bot.owner_ids:
return True
if set(self.in_object.allowed_roles).isdisjoint(member_roles) is False:
return True
try:
if self.member in self.in_object.allowed_members or AllItem() in self.in_object.allowed_members:
return True
except TypeError:
pass
return False
class AbstractFieldProvider(AbstractProvider):
provides = 'fields'
def __init__(self, in_builder: "HelpCommandEmbedBuilder"):
super().__init__(in_builder)
self.all_handler = None
self.field_name_handler = self._no_underscore_and_to_title
self._set_handler_attribute()
def _set_handler_attribute(self):
self.all_handler = {}
for method_name, method_object in inspect.getmembers(self, inspect.ismethod):
if hasattr(method_object, 'is_handler') and method_object.is_handler is True:
self.all_handler[method_object.handled_attr] = method_object
@ classmethod
@ property
def bool_symbol_map(cls) -> Dict[bool, str]:
return NotImplemented
async def handle_name(self, name):
try:
return await self.field_name_handler(name)
except Exception:
return name
async def _no_underscore_and_to_title(self, in_data):
return in_data.replace('_', ' ').title()
async def _no_handling(self, in_data):
return in_data
def add_handler(self, new_handler: Callable):
if not inspect.iscoroutinefunction(new_handler):
raise TypeError('new_handler needs to be a coroutine')
if not hasattr(new_handler, 'is_handler'):
new_handler = handler_method(new_handler)
self.all_handler[new_handler.handled_attr] = partial(new_handler, new_handler.handled_attr)
class DefaultTitleProvider(AbstractProvider):
provides = 'title'
async def __call__(self):
if self.is_sub_command:
return f"{self.in_object.parent.name} {self.in_object.name}"
return self.in_object.name
class DefaultDescriptionProvider(AbstractProvider):
provides = 'description'
async def get_commands(self):
frequ_dict = await self.bot.get_command_frequency()
sorted_commands = sorted(getattr(self.in_object, 'commands'), key=lambda x: frequ_dict.get(x.name, 0), reverse=True)
value = ListMarker.make_list([f"`{command}`" for command in sorted_commands])
return '\n'.join(map(lambda x: f"{SPECIAL_SPACE*8}{x}", value.splitlines()))
async def __call__(self):
description = self.in_object.description
if self.typus == 'categories':
description = description + f'{ZERO_WIDTH}\n{ZERO_WIDTH}\n**Commands:**\n' + await self.get_commands()
if self.member_can_invoke is False:
description = f"__** You do not have the necesary roles to actually invoke this command**__\n{ZERO_WIDTH}\n" + description
return description
class DefaulThumbnailProvider(AbstractProvider):
provides = "thumbnail"
async def __call__(self):
if hasattr(self.in_object, 'gif') and self.in_object.gif is not None:
return self.in_object.gif
class DefaulImageProvider(AbstractProvider):
provides = "image"
async def __call__(self):
return None
class DefaultAuthorProvider(AbstractProvider):
provides = "author"
async def __call__(self):
return {'name': self.bot.display_name, "url": self.bot.github_url, "icon_url": self.bot.portrait_url}
class DefaultfooterProvider(AbstractProvider):
provides = 'footer'
async def __call__(self):
return None
class DefaulURLProvider(AbstractProvider):
provides = 'url'
async def __call__(self):
return self.in_object.github_wiki_link
class DefaultFieldsProvider(AbstractFieldProvider):
bool_symbol_map = {True: '✅',
False: '❎'}
async def __call__(self):
fields = []
for handler_attr, handler_func in self.all_handler.items():
handler_attr = handler_attr.removesuffix('_ca').removesuffix('_co')
if hasattr(self.in_object, handler_attr) and handler_func.applicable_to in ['all', self.typus]:
new_item = await handler_func()
if new_item is not None:
fields.append(new_item)
return fields
@ property
def visible_channels(self):
_out = []
for channel in self.in_object.allowed_channels:
if channel.name.casefold() == 'all':
_out.append(channel)
else:
channel_member_permissions = channel.permissions_for(self.member)
if channel_member_permissions.administrator is True or all(perms is True for perms in [channel_member_permissions.read_messages, channel_member_permissions.send_messages]):
_out.append(channel)
return set(_out)
@ handler_method
async def _handle_usage(self):
attr_name = "usage"
name = await self.handle_name(attr_name)
value = CodeBlock(getattr(self.in_object, attr_name), 'css')
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_aliases(self):
attr_name = "aliases"
name = await self.handle_name(attr_name)
items = [f"`{alias}`" for alias in sorted(getattr(self.in_object, attr_name))]
value = ListMarker.make_list(items)
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method_only_commands
async def _handle_allowed_members(self):
attr_name = "allowed_members"
name = await self.handle_name(attr_name)
value = getattr(self.in_object, attr_name)
if not value:
value = None
else:
value = '\n'.join(member.mention for member in value)
return self.field_item(name=name, value=value, inline=False)
@ handler_method
async def _handle_allowed_channels(self):
attr_name = "allowed_channels"
name = await self.handle_name(attr_name)
channels = sorted(getattr(self.in_object, attr_name), key=lambda x: x.position)
value = ListMarker.make_column_list([channel.mention for channel in channels if channel in self.visible_channels], ListMarker.star, amount_columns=1)
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_allowed_roles(self):
attr_name = "allowed_roles"
name = await self.handle_name(attr_name)
roles = sorted(getattr(self.in_object, attr_name), key=lambda x: x.position)
value = '\n'.join(f"`{role.name}`" for role in roles)
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_allowed_in_dms(self):
attr_name = "allowed_in_dms"
name = await self.handle_name(attr_name)
value = self.bool_symbol_map.get(getattr(self.in_object, attr_name))
inline = False
return self.field_item(name=name, value=value, inline=inline)
# @ handler_method
# async def _handle_github_link(self):
# attr_name = "github_link"
# name = await self.handle_name(attr_name)
# value = embed_hyperlink('link 🔗', getattr(self.in_object, attr_name))
# inline = True
# return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_github_wiki_link(self):
attr_name = "github_wiki_link"
name = await self.handle_name(attr_name)
value = embed_hyperlink('link 🔗', getattr(self.in_object, attr_name))
inline = True
return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_extra_info(self):
attr_name = "extra_info"
name = await self.handle_name(attr_name)
attr_value = getattr(self.in_object, attr_name)
if attr_value is None:
return None
value = f"`{attr_value}`"
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method
async def _handle_example(self):
attr_name = "example"
name = await self.handle_name(attr_name)
value = CodeBlock(getattr(self.in_object, attr_name), 'css')
inline = False
return self.field_item(name=name, value=value, inline=inline)
@ handler_method_only_commands
async def _handle_commands_co(self):
attr_name = "commands"
name = await self.handle_name('sub_commands')
value = ListMarker.make_list([f"`{command}`" for command in getattr(self.in_object, attr_name)])
inline = False
return self.field_item(name=name, value=value, inline=inline)
@handler_method_only_commands
async def _handle_hidden(self):
attr_name = "hidden"
name = await self.handle_name(attr_name)
value = self.bool_symbol_map.get(getattr(self.in_object, attr_name))
inline = True
return self.field_item(name=name, value=value, inline=inline)
@handler_method_only_commands
async def _handle_enabled(self):
attr_name = "enabled"
name = await self.handle_name(attr_name)
value = self.bool_symbol_map.get(getattr(self.in_object, attr_name))
inline = True
return self.field_item(name=name, value=value, inline=inline)
class DefaultColorProvider(AbstractProvider):
provides = 'color'
async def __call__(self):
if isinstance(self.in_object, commands.Command):
return self.in_object.cog.color
return 'GIDDIS_FAVOURITE'
class DefaultTimestampProvider(AbstractProvider):
provides = 'timestamp'
async def __call__(self):
return datetime.now(tz=timezone.utc)
class HelpEmbedBuilder:
field_item = EmbedFieldItem
default_title_provider = DefaultTitleProvider
default_description_provider = DefaultDescriptionProvider
default_thumbnail_provider = DefaulThumbnailProvider
default_image_provider = DefaulImageProvider
default_author_provider = DefaultAuthorProvider
default_footer_provider = DefaultfooterProvider
default_fields_provider = DefaultFieldsProvider
default_url_provider = DefaulURLProvider
default_color_provider = DefaultColorProvider
default_Timestamp_provider = DefaultTimestampProvider
def __init__(self, bot: "AntiPetrosBot", invoking_person: Union[discord.Member, discord.User], in_object: Union["AntiPetrosBaseCommand", "AntiPetrosBaseGroup", "AntiPetrosFlagCommand", CommandCategory]):
self.bot = bot
self.in_object = in_object
self.member = self.bot.get_antistasi_member(invoking_person.id) if isinstance(invoking_person, discord.User) else invoking_person
self.title_provider = self.default_title_provider(self)
self.description_provider = self.default_description_provider(self)
self.thumbnail_provider = self.default_thumbnail_provider(self)
self.image_provider = self.default_image_provider(self)
self.author_provider = self.default_author_provider(self)
self.footer_provider = self.default_footer_provider(self)
self.fields_provider = self.default_fields_provider(self)
self.url_provider = self.default_url_provider(self)
self.color_provider = self.default_color_provider(self)
self.timestamp_provider = self.default_Timestamp_provider(self)
def set_provider(self, provider: AbstractProvider):
setattr(self, provider.provides + '_provider', provider(self))
async def to_embed(self):
embed_data = await self.bot.make_generic_embed(title=await self.title_provider(),
description=await self.description_provider(),
thumbnail=await self.thumbnail_provider(),
image=await self.image_provider(),
author=await self.author_provider(),
fields=await self.fields_provider(),
url=await self.url_provider(),
color=await self.color_provider(),
timestamp=await self.timestamp_provider())
yield embed_data
# region[Main_Exec]
if __name__ == '__main__':
pass
# endregion[Main_Exec]
|
import unittest
from best_line import best_line
class Test_Case_Best_Line(unittest.TestCase):
def test_best_line(self):
self.assertEqual(best_line([(0, 1), (1, 2), (2, 3), (-5, 5)]), '1.0:1.0') |
import uuid
from django.template.response import TemplateResponse
def document_view_html(request, id):
"""Renders the document as html"""
obj = get_object_or_404(Post, pk=uuid.UUID(id))
template = "detail_view.html"
context = {}
context["data"] = MyModel.objects.get(id=id)
return TemplateResponse(request, template, context)
def document_view_pdf(request, id):
"""Renders the document as a PDF"""
# Check if a pdf version exists, and if so, serve it
# If not, render to PDF and then serve the file
|
from .c_perfevaluator import CPerfEvaluator
from .c_perfevaluator_xval import CPerfEvaluatorXVal
from .c_perfevaluator_xval_multiclass import CPerfEvaluatorXValMulticlass
|
#
# Author: Stanislaw Adaszewski, 2015
#
import networkx as nx
import numpy as np
import time
from numpy import array, tile, concatenate
from numpy.linalg import norm
class ConstrainedKMeans(object):
def __init__(self):
pass
def fit(self, data, demand):
C, M, f = constrained_kmeans(data, demand)
self.centroids = C
self.labels = M
self.flow_cost = f
def constrained_kmeans(data, demand, maxiter=None, fixedprec=1e9):
data = array(data)
min_ = np.min(data, axis=0)
max_ = np.max(data, axis=0)
n, d = data.shape
K = len(demand) # number of clusters
# Initialze centroids
C = min_ + np.random.random((K, d)) * (max_ - min_)
# Initialize labels
M = array([-1] * n, dtype=np.int)
itercnt = 0
while True:
itercnt += 1
g = nx.DiGraph()
# Add nodes for each point 0...n-1
# Each node has 'supply' of 1, i.e. demand = -1
g.add_nodes_from(range(0, n), demand=-1)
# Add nodes for centroids
for k in range(K):
g.add_node(n + k, demand=demand[k])
# Calculating cost...
displacements = tile(data.T, K).T - tile(C, n).reshape(K * n, d)
costs = norm(displacements, axis=1).reshape(K*n, 1)
# Preparing data_to_C_edges...
data_to_C_edges = np.hstack(
[tile([range(n)], K).T,
tile(array([range(n, n + K)]).T, n).reshape(K * n, 1),
costs * fixedprec]
).astype(np.uint64)
# Adding to graph
g.add_weighted_edges_from(data_to_C_edges)
# Add artifical demand node
a = n + K
g.add_node(a, demand=n-np.sum(demand))
C_to_a_edges = concatenate((array([range(n, n + K)]).T, tile([[a]], K).T), axis=1)
g.add_edges_from(C_to_a_edges)
# Calculating min cost flow...
f = nx.min_cost_flow(g)
# assign
M_new = np.ones(n, dtype=np.int) * -1
for i in range(n):
p = sorted(f[i].items(), key=lambda x: x[1])[-1][0]
M_new[i] = p - n
# stop condition
if np.all(M_new == M):
# Stop
return (C, M, f)
M = M_new
# compute new centers
for k in range(K):
C[k, :] = np.mean(data[M==k, :], axis=0)
if maxiter is not None and itercnt >= maxiter:
# Max iterations reached
return (C, M, f)
def main():
np.random.seed(824)
data = np.random.random((75, 3))
t = time.time()
(C, M, f) = constrained_kmeans(data, [25, 25, 25])
print('Elapsed:', (time.time() - t) * 1000, 'ms')
print('C:', C)
print('M:', M)
if __name__ == '__main__':
main()
|
import os, sys, six, re, json
import unicodedata
import logging
import numpy as np
from collections import defaultdict
from modules.backend import K, tf
_open_ = open
is_py2 = six.PY2
if not is_py2:
basestring = str
def is_string(s):
"""判断是否为字符串
"""
return isinstance(s, basestring)
def lowercase_and_normalize(text):
"""转小写并进行简单的标准化
"""
text = text.lower()
text = unicodedata.normalize('NFD', text)
# Mn: Mark, Nonspacing
text = ''.join([ch for ch in text if unicodedata.category(ch) != 'Mn'])
return text
def truncate_sequences(maxlen, indices, *sequences):
"""截断总长度至不超过maxlen
循环地pop掉除了_token_end的最后一个元素
parameters:
indices: int
pop的索引,一般为-1,如果加上了_token_end(如'[SEP]')则为-2
"""
# tuple -> list
sequences = [s for s in sequences if s]
if not isinstance(indices, (list, tuple)):
indices = [indices] * len(sequences)
while True:
lengths = [len(s) for s in sequences]
if sum(lengths) > maxlen:
i = np.argmax(lengths)
sequences[i].pop(indices[i])
else:
return sequences
# TODO(复习)
def orthogonally_resize(a, new_shape, window=2):
"""简单的正交化缩放矩阵
parameters:
-----------
window: int
控制复制时的批大小,不同维度同理
例如:window = 2, 4 -> 6 ==> [0,1,2,3] -复制-> [0,1,0,1,2,3,2,3] -截断-> [0,1,0,1,2,3]
window = 2, 4 -> 6 ==> [0,1,2,3] -复制-> [0,1,2,3,0,1,2,3] -截断-> [0,1,2,3,0,1]
"""
assert a.ndim == len(new_shape)
slices, a_norm, w = [], np.linalg.norm(a), window
print(a.shape)
print(new_shape)
for i, (d1, d2) in enumerate(zip(a.shape, new_shape)):
print('d1: ', d1, 'd2: ', d2)
if d1 != d2:
print('hit')
k = d2 // d1 + int(d2 % d1 != 0)
if k > 1: # d2 > d1
# 强制约定d1是window的整倍数,否则无法reshape
assert d1 % window == 0
a = a.reshape(a.shape[:i] + (d1 // w, w) + a.shape[i + 1:])
# 先冗余复制
a = np.repeat(a, k, axis=i)
a = a.reshape(a.shape[:i] + (d1 * k, ) + a.shape[i + 2:])
slices.append(np.s_[:d2])
# 然后用new_shape切片
a = a[tuple(slices)]
# 与原范数相等
return a / np.linalg.norm(a) * a_norm
class DataGenerator:
"""数据生成器模板
"""
def __init__(self, data, batch_size=32, buffer_size=None):
self.data = data
self.batch_size = batch_size
if hasattr(self.data, '__len__'):
self.steps = len(data) // self.batch_size
if len(self.data) % self.batch_size != 0:
self.steps += 1
else:
self.steps = None
self.buffer_size = buffer_size or self.batch_size * 1000
def __len__(self):
return self.steps
def sample(self, random=False):
"""采样函数,每个样本同时返回一个is_end标记
"""
if random:
if self.steps is None:
def generator():
caches, isfull = [], False
for d in self.data:
caches.append(d)
if isfull:
i = np.random.randint(len(caches))
yield caches.pop(i)
else:
if len(caches) == self.buffer_size:
isfull = True
# isfull=False, data总量小于buffer_size
while caches:
i = np.random.randint(len(caches))
yield caches.pop(i)
# hasattr(self.data, '__len__') = True
else:
def generator():
# 不重复乱序
for i in np.random.permutation(len(self.data)):
yield self.data[i]
data = generator()
else:
data = iter(self.data)
# TODO(如果没有__next__方法呢)
d_current = next(data) # 先取一个 后面开始迭代
for d_next in data:
yield False, d_current
d_current = d_next
yield True, d_current
def __iter__(self, random=False):
raise NotImplementedError
def forfit(self, random=True):
# TODO(怎么停????)
while True:
for d in self.__iter__(random):
yield d
def fortest(self, random=False):
while True:
for d in self.__iter__(random):
yield d[0]
def to_dataset(self, types, shapes, names=None, padded_batch=False):
"""转为tf.data.Dataset格式
如果传入names的话,自动把数据包装成dict形式。
"""
if names is None:
generator = self.forfit
else:
if is_string(names):
warps = lambda k, v: {k, v}
elif is_string(names[0]):
warps = lambda k, v: dict(zip(k, v))
else:
# k, v本身是多维的,结果就第0维上每个元素,搞个dict
# example:tuple(dict(zip(i, j)) for i, j in
# zip([[1,2,3], [3,4]],
# [['a', 'b', 'c'], ['c', 'd']]))
# ({1: 'a', 2: 'b', 3: 'c'}, {3: 'c', 4: 'd'})
warps = lambda k, v: tuple(
dict(zip(i, j)) for i, j in zip(k, v)
)
def generator():
for d in self.forfit():
yield warps(names, d)
types = warps(names, types)
shapes = warps(names. shapes)
if padded_batch:
dataset = tf.data.Dataset.from_generator(
generator, output_types=types
)
dataset = dataset.padded_batch(self.batch_size, shapes)
else:
dataset = tf.data.Dataset.from_generator(
generator, output_types=types, output_shapes=shapes
)
dataset = dataset.batch(self.batch_size)
return dataset
class ViterbiDecoder:
"""Viterbi解码算法基类
"""
def __init__(self, trans, starts=None, ends=None):
self.trans = trans
self.num_labels = len(trans)
self.non_starts = []
self.non_ends = []
if starts is not None:
for i in range(self.num_labels):
if i not in starts:
self.non_starts.append(i)
if ends is not None:
for i in range(self.num_labels):
if i not in ends:
self.non_ends.append(i)
def decode(self, nodes):
"""nodes.shape=[seq_len, num_labels]
"""
# 预处理,主要是[CLS], [SEP]固定为0
nodes[0, self.non_starts] -= np.inf
nodes[0, self.non_ends] -= np.inf
# 动态规划
labels = np.arange(self.num_labels).reshape((1, -1))
scores = nodes[0].reshape((-1, 1))
path = labels
for l in range(1, len(nodes)):
M = scores + self.trans + nodes[l].reshape((1, -1))
idxs = M.argmax(axis=0)
scores = M.max(axis=0).reshape((-1, 1))
# 注意path[:, idx]这种切片方式,按照idxs取0维上最后一个元素
# [[0,0,0], [4,5,6]][:, [1,0,2]] -> [[0,0,0], [5,4,6]]
# [[0,0,0], [4,5,6]][:, [1,0,0]] -> [[0,0,0], [5,4,4]]
path = np.concatenate([path[:, idxs], labels], axis=0)
# 最优路径
return path[:, scores[:, 0].argmax()]
if __name__ == '__main__':
a = np.arange(32).reshape(1,2,4,4)
b = orthogonally_resize(a, (1,2,6,6))
print('b shape: ', b.shape)
print('b: \n', b)
|
type = 'SMPLify'
body_model = dict(
type='SMPL',
gender='neutral',
num_betas=10,
keypoint_src='smpl_45',
keypoint_dst='smpl_45',
model_path='data/body_models/smpl',
batch_size=1)
stages = [
# stage 1
dict(
num_iter=20,
fit_global_orient=True,
fit_transl=True,
fit_body_pose=False,
fit_betas=False,
joint_weights=dict(
body_weight=5.0,
use_shoulder_hip_only=True,
)),
# stage 2
dict(
num_iter=10,
fit_global_orient=True,
fit_transl=True,
fit_body_pose=True,
fit_betas=True,
joint_weights=dict(body_weight=5.0, use_shoulder_hip_only=False))
]
optimizer = dict(
type='LBFGS', max_iter=20, lr=1e-2, line_search_fn='strong_wolfe')
keypoints2d_loss = dict(
type='KeypointMSELoss', loss_weight=1.0, reduction='sum', sigma=100)
keypoints3d_loss = dict(
type='KeypointMSELoss', loss_weight=10, reduction='sum', sigma=100)
shape_prior_loss = dict(type='ShapePriorLoss', loss_weight=1, reduction='sum')
joint_prior_loss = dict(
type='JointPriorLoss',
loss_weight=20,
reduction='sum',
smooth_spine=True,
smooth_spine_loss_weight=20,
use_full_body=True)
smooth_loss = dict(type='SmoothJointLoss', loss_weight=0, reduction='sum')
pose_prior_loss = dict(
type='MaxMixturePrior',
prior_folder='data',
num_gaussians=8,
loss_weight=4.78**2,
reduction='sum')
ignore_keypoints = [
'neck_openpose', 'right_hip_openpose', 'left_hip_openpose',
'right_hip_extra', 'left_hip_extra'
]
|
def test_basic(app):
assert app.name == 'quokka'
|
period1 = 50
period2 = 200 |
class My(object):
y = 5988
class Mt(My):
z = 598
class Mat(Mt):
x = 54
p1=Mat()
print(p1.x)
print(p1.y)
print(p1.z)
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import cv2
import json
import functools
import argparse
import numpy as np
import tensorflow as tf
from tqdm import tqdm
from model_utils.config import CONFIG_MAP
from model_utils.preprocess import PREPROCESS_FUNC
from model_utils.anchor import BUILD_ANCHOR_FUNC
from model_utils.decoder import batch_decode
from model_utils.postprocess import batch_multiclass_non_max_suppression
from model_utils.postprocess import compute_clip_window
from model_utils.postprocess import score_converter_fn_with_logit_scale
from dataset_tools.cocoval import cocoval
from dataset_tools.json_encoder import MyEncoder
tf.app.flags.DEFINE_string(
'input_graph', 'float/float.pb',
'TensorFlow \'GraphDef\' file to load.')
tf.app.flags.DEFINE_string(
'model_type', 'ssd_mobilenet_v2', 'Detection model type')
tf.app.flags.DEFINE_string(
'input_tensor', 'input_tensor', 'input tensor name')
tf.app.flags.DEFINE_string(
'eval_image_path', 'data/coco2014_minival_8059/image',
'The directory where put the eval images')
tf.app.flags.DEFINE_string(
'eval_image_list', 'data/coco2014_minival_8059/minival2014_8059.txt', 'file has validation images list')
tf.app.flags.DEFINE_string(
'gt_json', "data/coco2014_minival_8059/minival2014_8059.json", 'ground truth json file')
tf.app.flags.DEFINE_string(
'det_json', "data/ssd_coco.json", 'output detected json file')
tf.app.flags.DEFINE_integer(
'eval_iter', 8059, 'evaluate iterations')
tf.app.flags.DEFINE_boolean(
'use_quantize', False, 'quantize or not')
tf.app.flags.DEFINE_string(
'gpus', '0',
'The gpus used for running evaluation.')
FLAGS = tf.app.flags.FLAGS
os.environ['CUDA_VISIBLE_DEVICES'] = FLAGS.gpus
if FLAGS.use_quantize:
from tensorflow.contrib import decent_q
class SSD(object):
''' Construct the SSD model trained on COCO
'''
def __init__(self, params, input_graph):
self.frozon_graph = input_graph
self.new_height = params.height
self.new_width = params.width
self.preprocess_fn = PREPROCESS_FUNC[params.feature_extractor_type]
self.build_anchor_fn = BUILD_ANCHOR_FUNC[params.anchor_type]
self.anchor_config = params.anchor_config
self.nms_config = params.nms_config
self.score_fn = params.score_fn
self.logit_scale = params.logit_scale
self.input_tensor = params.input_tensor
self.box_encoding_tensor = params.box_encoding_tensor
self.class_score_tensor = params.class_score_tensor
self.scale_factors = params.scale_factors
self.feature_map_spatial_dims = params.feature_map_spatial_dims
self.graph = tf.Graph()
graph_def = tf.GraphDef()
with tf.gfile.GFile(self.frozon_graph, 'rb') as fid:
graph_def.ParseFromString(fid.read())
with self.graph.as_default():
raw_image = tf.placeholder(tf.float32, shape=(None, None, None, 3), name="raw_image")
preprocessed_image = self.preprocess(raw_image)
tf.import_graph_def(graph_def, name='', input_map={self.input_tensor: preprocessed_image})
self.anchors = self.build_anchors()
prediction_dict = { "box_encodings": tf.get_default_graph().get_tensor_by_name(self.box_encoding_tensor),
"class_predictions_with_background": tf.get_default_graph().get_tensor_by_name(self.class_score_tensor),
"anchors": self.anchors}
self.detections = self.postprocess(prediction_dict)
self.sess = tf.Session(graph=self.graph)
def run_single_image(self, image):
''' Conduct preprocess, network process and postprocess for detection
Args:
image: image of shape [height, width, 3] with RGB channel order,
pixel value in [0.0, 255.0]
'''
image_tensor = self.graph.get_tensor_by_name("raw_image:0")
feed_dict = {image_tensor: np.expand_dims(image, 0)}
out_dict = self.detections
pred_dict = self.sess.run(out_dict, feed_dict=feed_dict)
return pred_dict
def preprocess(self, image):
return self.preprocess_fn(image, self.new_height, self.new_width)
def build_anchors(self):
return self.build_anchor_fn(self.anchor_config, self.feature_map_spatial_dims, im_height=self.new_height, im_width=self.new_width)
def postprocess(self, prediction_dict):
''' Decode box_encodings and class_predictions_with_background, apply multi-class NMS
Args:
prediction_dict: dict with these keys - "box_encodings", "class_predictions_with_background", "anchors"
'''
with tf.name_scope('Postprocessor'):
box_encodings = prediction_dict['box_encodings']
class_predictions = prediction_dict['class_predictions_with_background']
anchors = prediction_dict['anchors']
# decode bounding box
detection_boxes = batch_decode(box_encodings, anchors, self.scale_factors)
detection_boxes = tf.expand_dims(detection_boxes, axis=2)
# map score with conversion_fn
score_conversion_fn = score_converter_fn_with_logit_scale(self.score_fn, self.logit_scale)
detection_scores = score_conversion_fn(class_predictions)
detection_scores = tf.slice(detection_scores, [0, 0, 1], [-1, -1, -1])
# multi-class nms
resized_inputs_shape = [1, self.new_height, self.new_height, 3]
true_image_shapes = np.array([[self.new_height, self.new_width, 3]])
non_max_suppression_fn = functools.partial(
batch_multiclass_non_max_suppression,
score_thresh=self.nms_config.score_threshold,
iou_thresh=self.nms_config.iou_threshold,
max_size_per_class=self.nms_config.max_detections_per_class,
max_total_size=self.nms_config.max_total_detections)
(nmsed_boxes, nmsed_scores, nmsed_classes, nmsed_masks, nmsed_additional_fields, num_detections) = non_max_suppression_fn(
detection_boxes,
detection_scores,
clip_window=compute_clip_window(resized_inputs_shape, true_image_shapes))
label_id_offset = 1
nmsed_classes += label_id_offset
detection_dict = {
"detection_boxes": nmsed_boxes,
"detection_scores": nmsed_scores,
"detection_classes": nmsed_classes,
"num_detections": tf.to_float(num_detections),
}
return detection_dict
#######################################################
def run_SSD_for_eval(model, image_root, image_list_file):
with open(image_list_file, 'r') as f_image:
image_lines = f_image.readlines()
coco_records = []
if FLAGS.eval_iter > len(image_lines):
raise ValueError(
"eval_iter(%d) should be fewer than total image numbers(%d)." %
(FLAGS.eval_iter, len(image_lines)))
for image_line in tqdm(image_lines[:FLAGS.eval_iter]):
image_name = image_line.strip()
image_path = os.path.join(image_root, image_name + ".jpg")
image = cv2.imread(image_path)
height, width = image.shape[0:2]
image = image[:,:,::-1] # BGR to RGB
image = np.array(image, dtype=np.float32)
output_dict = model.run_single_image(image)
output_dict['num_detections'] = int(output_dict['num_detections'][0])
output_dict['detection_classes'] = output_dict[
'detection_classes'][0].astype(np.uint8)
output_dict['detection_boxes'] = output_dict['detection_boxes'][0]
output_dict['detection_scores'] = output_dict['detection_scores'][0]
for i in range(output_dict['detection_classes'].shape[0]):
record = {}
ymin = output_dict['detection_boxes'][i][0] * height
xmin = output_dict['detection_boxes'][i][1] * width
ymax = output_dict['detection_boxes'][i][2] * height
xmax = output_dict['detection_boxes'][i][3] * width
score = output_dict['detection_scores'][i]
class_id = output_dict['detection_classes'][i]
record['image_id'] = int(image_name.split('_')[-1])
record['category_id'] = class_id
record['score'] = score
record['bbox'] = [xmin, ymin, xmax - xmin + 1, ymax - ymin + 1]
# if score < 0.005:
# break
coco_records.append(record)
return coco_records
if __name__ == "__main__":
model = SSD(CONFIG_MAP[FLAGS.model_type], FLAGS.input_graph)
coco_records = run_SSD_for_eval(model, FLAGS.eval_image_path, FLAGS.eval_image_list)
with open(FLAGS.det_json, 'w') as f_det:
f_det.write(json.dumps(coco_records, cls=MyEncoder))
cocoval(FLAGS.det_json, FLAGS.gt_json)
|
from django.core.validators import MinLengthValidator, MinValueValidator
from django.db import models
class Profile(models.Model):
USER_NAME_MAX_CHARS = 15
USER_NAME_MIN_CHARS = 2
MIN_AGE = 0
user_name = models.CharField(
max_length=USER_NAME_MAX_CHARS,
validators=(
MinLengthValidator(USER_NAME_MIN_CHARS),
)
)
email = models.EmailField()
age = models.IntegerField(
null=True,
blank=True,
validators=(
MinValueValidator(MIN_AGE),
)
)
class Album(models.Model):
ALBUM_NAME_MAX_CHARS = 30
ARTIST_NAME_MAX_CHARS = 30
GENRE_MAX_CHARS = 30
MIN_PRICE = 0.0
POPMUSIC = 'Pop Music'
JAZZMUSIC = 'Jazz Music'
RBMUSIC = 'R&B Music'
ROCKMUSIC = 'Rock Music'
COUNTRYMUSIC = 'Country Music'
DANCEMUSIC = 'Dance Music'
HIPHOPMUSIC = 'Hip Hop Music'
OTHER = 'Other'
GENRE_CHOICES = [
(POPMUSIC, 'Pop Music'),
(JAZZMUSIC, 'Jazz Music'),
(RBMUSIC, 'R&B Music'),
(ROCKMUSIC, 'Rock Music'),
(COUNTRYMUSIC, 'Country Music'),
(DANCEMUSIC, 'Dance Music'),
(HIPHOPMUSIC, 'Hip Hop Music'),
(OTHER, 'Other'),
]
name = models.CharField(
max_length=ALBUM_NAME_MAX_CHARS,
unique=True,
)
artist = models.CharField(
max_length=ARTIST_NAME_MAX_CHARS,
)
genre = models.CharField(
max_length=GENRE_MAX_CHARS,
choices=GENRE_CHOICES,
)
description = models.TextField(
null=True,
blank=True,
)
image = models.URLField()
price = models.FloatField(
validators=(
MinValueValidator(MIN_PRICE),
)
)
|
import pandas as pd
import streamlit as st
from sklearn.ensemble import RandomForestClassifier
st.title('Model Deployment: Insurance Fraud Detection')
st.sidebar.header('User Input Parameters')
def user_input_features():
claim_type = st.sidebar.selectbox('claim_type',('1','2','3','4','5'))
uninhabitable = st.sidebar.selectbox('uninhabitable',('0','1'))
claim_amount = st.sidebar.number_input("Insert the Claim Amount")
coverage = st.sidebar.number_input("Insert the Coverage Amount")
deductible = st.sidebar.number_input("Insert the deductible Amount")
townsize = st.sidebar.selectbox('townsize',('1','2','3','4','5'))
gender = st.sidebar.selectbox('gender',('0','1'))
edcat = st.sidebar.selectbox('edcat',('1','2','3','4','5'))
retire = st.sidebar.selectbox('retire',('0','1'))
income = st.sidebar.number_input("Insert the Income Amount")
marital = st.sidebar.selectbox('marital',('0','1'))
reside = st.sidebar.selectbox('reside',('1','2','3','4','5','6','7','8','9','10'))
primary_residence = st.sidebar.selectbox('primary_residence',('0','1'))
data = {'claim_type':claim_type,
'uninhabitable':uninhabitable,
'claim_amount':claim_amount,
'coverage':coverage,
'deductible':deductible,
'townsize':townsize,
'gender':gender,
'edcat':edcat,
'retire':retire,
'income':income,
'marital':marital,
'reside':reside,
'primary_residence':primary_residence
}
features = pd.DataFrame(data,index = [0])
return features
df = user_input_features()
st.subheader('User Input parameters')
st.write(df)
claimants = pd.read_csv("Insurance_claims.csv")
claimants.dropna()
claimants = claimants.drop(['incident_date','job_start_date','occupancy_date','policyid','policy_date','dob'], axis=1)
X = claimants.drop(['claimid','fraudulent'],axis=1)
y = claimants['fraudulent']
from imblearn.combine import SMOTETomek
smk = SMOTETomek(random_state=12)
X_res,y_res = smk.fit_resample(X,y)
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test = train_test_split(X_res,y_res,train_size=.8,random_state=40)
from sklearn.ensemble import RandomForestClassifier
rf_classifier = RandomForestClassifier(criterion='gini',n_estimators=80,class_weight='balanced')
rf_classifier.fit(X_train,y_train)
prediction = rf_classifier.predict(df)
prediction_proba = rf_classifier.predict_proba(df)
st.subheader('Predicted Result')
st.write('Fraud Claim' if prediction_proba[0][1] > 0.5 else 'Genuine')
st.subheader('Prediction Probability')
st.write(prediction_proba)
st.subheader('Type of claim')
st.text('1 - Wind/Hail')
st.text('2 - Water damage')
st.text('3 - Fire-Smoke')
st.text('4 - Contamination')
st.text('5 - Theft/Vandalism')
st.subheader('Property uninhabitable')
st.text('0 - No')
st.text('1 - Yes')
st.subheader('Size of hometown')
st.text('1 - > 250,000')
st.text('2 - 50,000-249,999')
st.text('3 - 10,000-49,999')
st.text('4 - 2,500-9,999')
st.text('5 - < 2,500')
st.subheader('Gender')
st.text('0 - Male')
st.text('1 - Feamle')
st.subheader('Level of education')
st.text('1 -Did not complete high school')
st.text('2 - High school degree')
st.text('3 - Some college')
st.text('4 - College degree')
st.text('5 - < Post-undergraduate degree')
st.subheader('Retired')
st.text('0 - No')
st.text('1 - Yes')
st.subheader('Marital status')
st.text('0 - Unmarried')
st.text('1 - Married')
st.subheader('Number of people in household')
st.text('1 - 10')
st.subheader('Property is primary residence')
st.text('0 -Yes')
st.text('1 -No')
|
import socket
import select
import config
class IRCConnection:
def __init__(self, serverName, port=6667):
self.connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.connection.connect((serverName, port))
self.connection.setblocking(0)
def sendMessage(self, toSend):
'''Helper function that sends the given
string as an IRC message.
'''
if not toSend.startswith("PONG"):
print toSend
self.connection.send(str(toSend) + "\r\n")
def receive(self):
'''Recieve 512 bytes from the connection (512 bytes == 1 message)
'''
# time out after a reasonable period of time so we revoice quickly
ready = select.select([self.connection], [], [], 0.2)
if ready[0]:
return str(self.connection.recv(512))
else:
return None
def setNick(self, nick):
'''Sets the nick to given string.
'''
self.sendMessage("NICK " + nick)
def setUser(self, userName, hostName, serverName, realName):
'''Set the user info as given.
'''
self.sendMessage("USER " + userName + " " +
hostName + " " +
serverName + " :" +
realName)
def authenticate(self, password):
'''Authenticate with NickServ with given password.
'''
self.sendMessage("PRIVMSG NickServ IDENTIFY " + password)
def setBot(self, nick):
'''Tell the server that we're a bot. (Note: This is network-dependent!)
'''
config.botIdentify(self, botNick=nick)
def reply(self, toSend, nick, chan, isPM):
sendTo = nick if isPM else chan
self.sendMessage("PRIVMSG " + sendTo + " :" + toSend)
def quit(self, quitMessage):
if quitMessage == "":
self.sendMessage("QUIT")
else:
self.sendMessage("QUIT :" + quitMessage)
def part(self, partMessage, chan):
if chan != "":
if partMessage == "":
self.sendMessage("PART " + chan)
else:
self.sendMessage("PART " + chan + " :" + partMessage)
def join(self, chan):
self.sendMessage("JOIN " + chan)
def close(self):
'''Close the connection.
'''
self.connection.close()
|
"""command_line.py"""
import vbiz_parser
import sys
import getopt
def main():
"""main"""
input_file = ''
output_file = ''
upload_path = ''
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:o:u:", [
"ifile=", "ofile=", "upath="])
except getopt.GetoptError:
print('Run vbiz_parser -h for helping')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('vbiz_parser -i <inputfile> -o <outputfile> -u <uploadpath>')
sys.exit()
elif opt in ("-i", "--ifile"):
input_file = arg
elif opt in ("-o", "--ofile"):
output_file = arg
elif opt in ("-u", "--upath"):
upload_path = arg
vbiz_parser.let_parse(input_file, output_file, upload_path)
|
import argparse
import sys
import yt
# hard-coded parameters (in code units)
field = 'particle_mass'
proj_axis = 'z'
center = [0.5, 0.5, 0.5]
width_x = 1.0e-2
width_y = 1.0e-2
width_z = 1.0e-2
colormap = 'arbre'
dpi = 150
# load the command-line parameters
parser = argparse.ArgumentParser( description='Plot the projected particle mass' )
parser.add_argument( '-i', action='store', required=False, type=str, dest='prefix_in',
help='input path prefix [%(default)s]', default='./' )
parser.add_argument( '-o', action='store', required=False, type=str, dest='prefix_out',
help='output filename prefix [%(default)s]', default='fig__thin-slice-projection' )
parser.add_argument( '-s', action='store', required=True, type=int, dest='idx_start',
help='first data index' )
parser.add_argument( '-e', action='store', required=True, type=int, dest='idx_end',
help='last data index' )
parser.add_argument( '-d', action='store', required=False, type=int, dest='didx',
help='delta data index [%(default)d]', default=1 )
args=parser.parse_args()
# take note
print( '\nCommand-line arguments:' )
print( '-------------------------------------------------------------------' )
for t in range( len(sys.argv) ):
print str(sys.argv[t]),
print( '' )
print( '-------------------------------------------------------------------\n' )
idx_start = args.idx_start
idx_end = args.idx_end
didx = args.didx
prefix_in = args.prefix_in
prefix_out = args.prefix_out
yt.enable_parallelism()
ts = yt.load( [ prefix_in+'/Data_%06d'%idx for idx in range(idx_start, idx_end+1, didx) ] )
#ts = yt.load( 'Data_??????' )
for ds in ts.piter():
# plot
p = yt.ParticleProjectionPlot( ds, proj_axis, fields=field, center=center, width=(width_x,width_y), depth=width_z )
# p.set_unit( field, 'Msun' )
# p.set_zlim( field, 1.0e6, 1.0e8 )
p.set_cmap( field, colormap )
p.annotate_timestamp( time_unit='code_time', corner='upper_right', text_args={'color':'k'} )
# save the image
p.save( prefix_out+'_'+ds.basename+'.png', mpl_kwargs={'dpi':dpi} )
|
import os
import time
import json
import psutil
from importlib import import_module
from collections import defaultdict
from pprint import pprint
"""
This is my implementation of the TaskExecutor. Each Task can be run at most:
max_instances (int) within a given time_bound (int: in minutes).
Each Task's guard is dependent on it's task type.
E.g "TASK A" will have a particular max_instances and time_bound,
not necessarily the same as "TASK B"
That is how the guards are implemented, and it fits well within the logic of
rate limiting as it's based on max_instances/time_bound.
Most of the methods are the standard getters/setters.
"""
# Managing process to retrieve tasks from queue and execute
class TaskExecutor:
def __init__(self, task_queue, config):
self.config = config
self.task_queue = task_queue
self.task_guards = config['task_guards']
self.active_processes = {}
self.task_counts = defaultdict(int)
self.start_executor()
"""
TODO: for global queue guards (update via postgres)
Say you want to update max instances, time bound for a task_type.
This might be because you just launched multiple instances of this service. Thus to not break the
functionality of the queue guards, you need to have some consensus on max instances, time bound
outside the confines of any one service.
Disclaimer, this might not be the right approach.
"""
def update_all_queue_globals(self):
pass
# update max instances, time bound
def update_global_guard_info(self, task_type,
max_instances=None, time_bound=None):
if max_instances is not None:
self.task_guards[task_type]['max_instances'] = max_instances
if time_bound is not None:
self.task_guards[task_type]['time_bound'] = time_bound
# update prev time, for queue guards
def update_local_guard_info(self, task_type, prev_time=None):
if prev_time is not None:
self.task_guards[task_type]['prev_time'] = prev_time
# checks if a guard has been exceeded, given max_instances for a particular task type.
def guard_exceeded(self, task_type):
max_instances = self.task_guards[task_type]['max_instances']
#print('TASK:', task_type, 'COUNTS:', self.task_counts[task_type])
return self.task_counts[task_type] >= max_instances
def get_all_task_types(self):
return tuple(self.task_guards.keys())
def increment_task_count(self, task_type):
self.task_counts[task_type] += 1
def decrement_task_count(self, task_type):
self.task_counts[task_type] -= 1
def set_task_count(self, task_type, task_count):
self.task_counts[task_type] = task_count
# getter for # of tasks for a particular type
def get_task_count(self, task_type):
return self.task_counts[task_type]
# num of how many tasks of a given type are active
def count_active_tasks_of_type(self, task_type):
active_count = 0
all_pids = psutil.pids()
for pid in self.active_processes.keys():
if pid in all_pids:
active_count += 1
return active_count
# num of all active tasks. uses count_active_tasks_of_type() as a helper
def total_active_tasks(self):
total_tasks_num = 0
for task_type in self.get_all_task_types():
tasks_of_type = self.count_active_tasks_of_type(task_type)
total_tasks_num += tasks_of_type
return total_tasks_num
"""
Helper method for try_refresh_guard().
Checks if it's time for a particular task type to get it's task count reset.
Essentially checks if it's possible to free the guard.
"""
def is_task_count_refresh_window(self, task_type):
guard_info = self.task_guards[task_type]
time_bound = guard_info['time_bound']
prev_time = guard_info['prev_time']
if time.time() - prev_time >= time_bound:
return True
else:
return False
"""
tries to refresh a guard for a particular type, if it's time to do so.
If there are tasks still executing during the refresh window, they carry over.
This ensures we can't go over our rate limit for a particular task type,
even with a fresh guard.
"""
def try_refresh_guard(self, task_type):
if self.is_task_count_refresh_window(task_type):
print('REFRESHING GUARD:', task_type)
active_tasks = self.count_active_tasks_of_type(task_type)
self.set_task_count(task_type, active_tasks)
self.update_local_guard_info(task_type,
prev_time=time.time())
"""
Validates whether a given task still lives. If not, then we kill it, and
remove it from active_processes.
"""
def cleanup_finished_processes(self):
pids_to_rm = []
if len(self.active_processes) == 0:
return
for pid, task in self.active_processes.items():
if not task.proc.is_alive():
print('TASK FINISHED:', pid, task.task_type)
pids_to_rm.append(pid)
for pid in pids_to_rm:
self.active_processes.pop(pid, None)
"""
Starts the continuous execution of tasks on the queue.
This Executor operates with conservative efficiency.
e.g It'll make sure that it's really really hard to break a task's guards,
and if it thinks a task is ready to go, it'll spin it up ASAP.
"""
def start_executor(self):
while True:
#print('-')
# TODO: retrieve any new global info on your guards
self.update_all_queue_globals()
# check on active tasks/processes first
self.cleanup_finished_processes()
if self.total_active_tasks() >= self.config['max_threads']:
time.sleep(1)
continue
# greedily pop next task, even if's its not ready
self.task_queue.sort_by_priority()
status, next_task = self.task_queue.get_next_ready_task()
if status == 'error':
self.task_queue.readd_task(next_task)
continue
next_type = next_task.task_type
next_id = next_task.task_id
# refresh guards if ready, or otherwise check them
self.try_refresh_guard(next_type)
# we refresh first, bc otherwise it hangs forever once a guard is exceeded
if self.guard_exceeded(next_type):
print("GUARD TRIGGERED:", next_type)
self.task_queue.readd_task(next_task)
continue
# start next task if it's kosher e.g the guard is not exceeded
task_pid = next_task.execute()
print('NEXT TASK:', next_type, 'PID:', task_pid)
self.active_processes[task_pid] = next_task
self.increment_task_count(next_type)
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
#
# This file is part of Django facets released under the MIT license.
# See the LICENSE for more information.
from __future__ import (print_function, division, absolute_import, unicode_literals)
from django.conf import settings as _settings
DEFAULTS = {
'FACETS_ENABLED': not _settings.DEBUG,
'FACETS_HANDLERS': (
'facets.processors.css.CssUrlsProcessor',
)
}
class FacetsSettings(object):
def __init__(self, wrapped_settings):
self._wrapped = wrapped_settings
def __getattr__(self, name):
if hasattr(self._wrapped, name):
return getattr(self._wrapped, name)
elif name in DEFAULTS:
return DEFAULTS[name]
else:
raise AttributeError('{0} setting not found.'.format(name))
settings = FacetsSettings(_settings)
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006,2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
import doctest
import unittest
from genshi import util
from genshi.tests.test_utils import doctest_suite
from genshi.util import LRUCache
class LRUCacheTestCase(unittest.TestCase):
def test_setitem(self):
cache = LRUCache(2)
cache['A'] = 0
self.assertEqual(1, len(cache))
self.assertEqual('A', cache.head.key)
self.assertEqual('A', cache.tail.key)
item_a = cache._dict['A']
self.assertEqual('A', item_a.key)
self.assertEqual(0, item_a.value)
self.assertEqual(None, item_a.prv)
self.assertEqual(None, item_a.nxt)
cache['B'] = 1
self.assertEqual(2, len(cache))
self.assertEqual('B', cache.head.key)
self.assertEqual('A', cache.tail.key)
item_a = cache._dict['A']
item_b = cache._dict['B']
self.assertEqual('A', item_a.key)
self.assertEqual(0, item_a.value)
self.assertEqual(item_b, item_a.prv)
self.assertEqual(None, item_a.nxt)
self.assertEqual('B', item_b.key)
self.assertEqual(1, item_b.value)
self.assertEqual(None, item_b.prv)
self.assertEqual(item_a, item_b.nxt)
cache['C'] = 2
self.assertEqual(2, len(cache))
self.assertEqual('C', cache.head.key)
self.assertEqual('B', cache.tail.key)
item_b = cache._dict['B']
item_c = cache._dict['C']
self.assertEqual('B', item_b.key)
self.assertEqual(1, item_b.value)
self.assertEqual(item_c, item_b.prv)
self.assertEqual(None, item_b.nxt)
self.assertEqual('C', item_c.key)
self.assertEqual(2, item_c.value)
self.assertEqual(None, item_c.prv)
self.assertEqual(item_b, item_c.nxt)
def test_getitem(self):
cache = LRUCache(2)
cache['A'] = 0
cache['B'] = 1
cache['A']
self.assertEqual(2, len(cache))
self.assertEqual('A', cache.head.key)
self.assertEqual('B', cache.tail.key)
item_a = cache._dict['A']
item_b = cache._dict['B']
self.assertEqual('A', item_a.key)
self.assertEqual(0, item_a.value)
self.assertEqual(None, item_a.prv)
self.assertEqual(item_b, item_a.nxt)
self.assertEqual('B', item_b.key)
self.assertEqual(1, item_b.value)
self.assertEqual(item_a, item_b.prv)
self.assertEqual(None, item_b.nxt)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest_suite(util))
suite.addTest(unittest.makeSuite(LRUCacheTestCase, 'test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import pokemon
def main():
pokemon.do_post()
if __name__ == "__main__":
main()
|
# Copyright 2021 The NetKet Authors - All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from typing import List, Generator, Iterator, Tuple
class AbstractGraph(abc.ABC):
"""Abstract class for NetKet graph objects."""
@abc.abstractmethod
def is_connected(self) -> bool:
r"""True if the graph is connected"""
raise NotImplementedError
@abc.abstractmethod
def is_bipartite(self) -> bool:
r"""True if the graph is bipartite"""
raise NotImplementedError
@abc.abstractmethod
def edges(self) -> Iterator[Tuple[int, int]]:
r"""Iterator over the edges of the graph"""
raise NotImplementedError
@abc.abstractmethod
def nodes(self) -> Iterator[int]:
r"""Iterator over the nodes of the graph"""
raise NotImplementedError
@abc.abstractmethod
def distances(self) -> List[List]:
r"""List containing the distances between the nodes.
The fact that some node may not be reachable from another is represented by -1"""
raise NotImplementedError
@abc.abstractmethod
def automorphisms(self):
r"""Symmetry group containing the automorphisms of the graph"""
raise NotImplementedError
@property
@abc.abstractmethod
def n_nodes(self) -> int:
r"""The number of nodes (or vertices) in the graph"""
raise NotImplementedError
@property
def n_edges(self) -> int:
r"""The number of edges in the graph."""
return len(self.edges())
@abc.abstractmethod
def adjacency_list(self) -> List[List]:
r"""List containing the adjacency list of the graph where each node
is represented by an integer in [0, n_nodes)"""
raise NotImplementedError
|
class MergeSort:
@staticmethod
def sort(array):
if(array == None):
print('Input array is empty')
l = len(array)
if(l >= 1):
MergeSort.mergeSort(0,l-1,array)
@staticmethod
def mergeSort(i,j,array):
if(i < j):
mid = (i+j)/2
MergeSort.mergeSort(i,mid,array)
MergeSort.mergeSort(mid+1,j,array)
MergeSort.merge(i,mid,j,array)
@staticmethod
def merge(i,mid,j,array):
aux = []
k = i
m = mid + 1
while(k <= mid and m <= j):
if(array[k] <= array[m]):
aux.append(array[k])
k += 1
else:
aux.append(array[m])
m += 1
while(k <= mid):
aux.append(array[k])
k += 1
while(m <= j):
aux.append(array[m])
m += 1
array[i:j+1] = aux
if __name__ == '__main__':
a = [3,4,5,1,2,10]
MergeSort.sort(a)
print(a)
|
#TODO: Add some form of analysis?,
#Edit proxy cycling, so the process originator loop passes the # of proxy to use
#Additionally, need to add a fail state -- raised exception if there's more than x loops of proxy or captcha cycling....
#Research whether we can pull more than 20 proxyList at once
####Possibly even start writing them to a file?
#Add reading in the search terms from a file or something, so it's easily editable?
#Add best seller flag?
#Test out using sentiment analysis to tag the data -- brand name / pack size / pack count / $ per oz
#Add more sources: instacart, walmart, etc. Then create wrapper program that calls each of these subroutines
#from proxy_list_scrape import scrapeProxyList, getProxyList, updateProxyFile
from proxy_list_scrape import scrapeProxyListUK
from datetime import date
import re
import random
import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
from multiprocessing import Process, Queue, Pool, Manager, Lock
#from send_email import send_email
from data_tagging import tag_data, get_tag_dicts
from DB_functions import update_scrape_db
#Declare the variables that will be needed to run the request loop
proxyList = []
#proxyList.append(scrapeProxyList())
proxyCounter = 0
startTime = time.time()
qcount = 0
#Declare the lists that will be used to store the final dataframe
sourceList=[] #list to store sources of scraped data
searchTerms=[] #list to store keyword of product
dates = [] #list to store date of search for product
products=[] #List to store name of the product
prices=[] #List to store price of the product
regularPrices=[] #List to store regular prices, if any
onSales=[] #List to store boolean of whether item is on sale or not
amazonChoices=[] #List to store boolean of whether item is an Amazon Choice product or not
sponsoredList=[] #List to store boolean of whether item is Sponsored
positions=[] #List of where the product was positioned in the search
pages=[] #List to store ratings of the product
#Now declare the lists of additional tag data
brands=[] #List to store tagged brand of the product
MFGs=[] #List to store tagged manufcaturer of the product
variants=[] #List to store tagged variants of the product
packTypes=[] #List to store tagged pack types of the product
packCounts=[] #List to store tagged pack counts of the product
packSizes=[] #List to store tagged pack sizes of the product
#Declare the request variables that determine how many requests will get made -- eventually these will be fed as arguments to the request function from a wrapper function
no_pages = 4
keywords = ['Soda', 'Water', 'Sports Drinks', 'Coffee', 'Cereal', 'Snack Bars', 'Chips', 'Snacks', 'Contact Lenses', 'Coke', 'Fanta',
'Sprite', 'Powerade', 'Frosted Flakes', 'Special K', 'Froot Loops', 'Raisin Bran', 'Pringles', 'Cheez It', 'Rice Krispies', 'Rice Krispies Treats',
'Pop Tarts', 'Acuvue', 'Oasys', 'Pet Food', 'Dog Food', 'Cat Food']
#keywords = ['cereal']
#THIS IS WHERE I SHOULD DECLARE A FUNCTION TO GENERATE THE PROXY LIST
###THEN IT SHOULD STORE THE GLOBAL LIST
###AND HAVE METHODS TO CALL/UPDATE IT
#Function for getting a proxy
def generateProxyList(lock, proxyCounter):
#global proxyList
lock.acquire()
try:
proxyList = scrapeProxyListUK(proxyCounter)
finally:
lock.release()
return proxyList
def getProxy(proxyList):
#global proxyList
print(len(proxyList))
#Return a random proxy in the list
return proxyList[0]
#Function for deleting the proxy from
def removeProxy(proxyList, proxy, lock, proxyCounter):
#global proxyList
lock.acquire()
try:
#Figure out what index in the list is the one to delete
if proxy in proxyList:
proxyList.remove(proxy)
print("Removing " + str(proxy) + ". " + str(len(proxyList)) + " remaining.")# + " from: " + str(proxyList))
#Check to make sure it's not the only proxy in the list, and if it is, append a new scrape
if len(proxyList) < 1:
print("Proxy List Too Short: " + str(proxyList))
print("Refreshing proxy List")
proxyList.extend( scrapeProxyListUK(proxyCounter) )
if proxyCounter < 200: proxyCounter += 20
finally:
lock.release()
return proxyCounter
def get_data(keyword, pageNo, q, lock, proxyList, tagging_df, proxyCounter):
headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0", "Accept-Encoding":"gzip, deflate", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "DNT":"1","Connection":"close", "Upgrade-Insecure-Requests":"1"}
#Keep trying until an exception isn't raised
noError = False
while not noError:
#wait for a random period of time before fetching the next page, to help avoid being blocked by amazon
time.sleep(random.random())
proxy = getProxy(proxyList)
if 'http' in proxy:
printProxy = proxy['http']
else:
printProxy = proxy['https']
try:
r = requests.get("https://www.amazon.co.uk/s?k=" + keyword + "&page=" + str(pageNo), headers=headers, proxies=proxy, timeout=15)
except:
#remove the bad proxy from the list so we don't try it again
proxyCounter = removeProxy(proxyList, proxy, lock, proxyCounter)
print("There was a connection error")
print("Bad Proxy: " + printProxy)
else:
print("Now things are ok")
#THIS IS ALL DEBUGGING NONSENSE
if 'http' in proxy:
printProxy = proxy['http']
else:
printProxy = proxy['https']
print("This proxy worked " + printProxy)
#Add here some checking for whether soup is readable
content = r.content
soup = BeautifulSoup(content, features="lxml")
if len(soup.findAll('div', attrs={'data-index':re.compile(r'\d+')})) == 0:
#remove the bad proxy from the list so we don't try it again
proxyCounter = removeProxy(proxyList, proxy, lock, proxyCounter)
print("There was a captcha error")
print("Bad Proxy: " + printProxy)
else:
noError = True
#content = r.content
#soup = BeautifulSoup(content, features="lxml")
#print(soup.encode('utf-8')) # uncomment this in case there is some non UTF-8 character in the content and
# you get error
for d in soup.findAll('div', attrs={'data-index':re.compile(r'\d+')}):
#Some of those search results may be carousels, so we need to iterate for each "entry" within the search result
#Set up a carousel counter to add to placement entries for carousel entries
carouselCounter = 0
if len(d.findAll('div', attrs={'data-asin':re.compile(r'.*')})) > 0:
soupList = d.findAll('div', attrs={'data-asin':re.compile(r'.*')})
else:
soupList = d.findAll('div', attrs={'class':'sg-col-inner'})
#Now I'm only getting the top carousel data
for product in soupList:
#print("We're in the second soup loop")
#if we're in the first carousel
if product.find('span', attrs={'data-click-el':'title'}) is not None:
name = product.find('span', attrs={'data-click-el':'title'})
ad = True
else:
name = product.find('span', attrs={'class':re.compile(r'a-color-base a-text-normal')})
ad = False
#print(name)
price = product.find('span', attrs={'class':'a-offscreen'})
#print(price)
regularPrice = product.find('span', attrs={'data-a-strike':'true'})
#This should work, because it's looking
amazonChoice = d.find('span', attrs={'class':'a-badge'})
placement = d['data-index']
all=[]
#Product Name and Price should be the two minimum checks before putting ANYTHING into the queue
#print("Checking Name: ", name)
if name is not None: #and price is not None:
all.append("Amazon")
all.append(keyword)
all.append(date.today())
all.append(name.text)
if price is not None:
all.append(price.text)
else:
all.append("$0")
if regularPrice is not None:
all.append(regularPrice.contents[0].text)
all.append(True)
else:
all.append('$0')
all.append(False)
if amazonChoice is not None:
all.append(True)
else:
all.append(False)
#The second part of the check is to look if something is within a carousel, which I'm considering a sponsored product....
if 'AdHolder' in d['class'] or 's-widget' in d['class'] or ad == True:
all.append(True)
else:
all.append(False)
#Using string slice to only input the # part of the string and then appending "-#" if it's one of the carousel entries
all.append( placement + "-" + str(carouselCounter))
all.append(str(pageNo))
#Now add the tags based on the newly updated data
tagDict = tag_data(name.text, tagging_df[0], tagging_df[1], tagging_df[2])
all.append(tagDict['brand'])
all.append(tagDict['mfg'])
all.append(tagDict['variant'])
all.append(tagDict['packType'])
all.append(tagDict['count'])
all.append(tagDict['size'])
q.put(all)
#increment carousel counter -- shouldn't matter if this isn't a carousel
carouselCounter += 1
print("Put " + keyword + " #" + str(pageNo))
#DEBUGGING -- if this is page 1 of the first loop, just save the full html file
if pageNo == 1 and keyword == 'cereal':
with open('./UK_cereal_html.html', 'w', encoding='utf-8') as outfile:
outfile.write(str(soup))
results = []
if __name__ == "__main__":
print("In the main")
m = Manager()
q = m.Queue() # use this manager Queue instead of multiprocessing Queue as that causes error
lock = Lock()
proxyCounter = 20
proxyList = generateProxyList(lock, proxyCounter)
#proxyList.extend( scrapeProxyList() )
print("ProxyList of length: " + str(len(proxyList)))
#This is where we create the keyword / page dictionary to loop through, so we can truly be parallel with execution
searchList = []
#raise SyntaxError
for word in keywords:
for i in range (1, no_pages):
searchList.append( {'word': word, 'page': i} )
#get the path for the tagging dataframes
tagging_df = get_tag_dicts()
p = {}
for i in range(len(searchList)):
print("starting process: ", i)
p[i] = Process(target=get_data, args=(searchList[i]['word'], searchList[i]['page'], q, lock, proxyList, tagging_df, proxyCounter))
p[i].start()
# join should be done in seperate for loop
# reason being that once we join within previous for loop, join for p1 will start working
# and hence will not allow the code to run after one iteration till that join is complete, ie.
# the thread which is started as p1 is completed, so it essentially becomes a serial work instead of
# parallel
for i in range(len(searchList)):
p[i].join()
print("#" + str(i) + " joined")
while q.empty() is not True:
qcount = qcount+1
queue_top = q.get()
sourceList.append(queue_top[0])
searchTerms.append(queue_top[1])
dates.append(queue_top[2])
products.append(queue_top[3])
prices.append(queue_top[4])
regularPrices.append(queue_top[5])
onSales.append(queue_top[6])
amazonChoices.append(queue_top[7])
sponsoredList.append(queue_top[8])
positions.append(queue_top[9])
pages.append(queue_top[10])
brands.append(queue_top[11])
MFGs.append(queue_top[12])
variants.append(queue_top[13])
packTypes.append(queue_top[14])
packCounts.append(queue_top[15])
packSizes.append(queue_top[16])
print("Q Count " + str(qcount) + " pulled")
#Only run once everything is done
print("total time taken: ", str(time.time()-startTime), " qcount: ", qcount)
#print(q.get())
#df = pd.DataFrame({'Source':sourceList, 'Keyword':searchTerms,'Date':dates, 'Product Name':products, 'Price':prices,
# 'Regular Price':regularPrices, 'On Sale':onSales, 'Amazon Choice':amazonChoices, 'Sponsored':sponsoredList, 'List Position':positions, 'Page':pages})
#print(df)
#df.to_csv('./amazon_data/' + str(date.today()) + '-SearchList.csv', index=False, encoding='utf-8')
#print("Dataframe saved")
tagged_df = pd.DataFrame({'Source':sourceList, 'Keyword':searchTerms,'Date':dates, 'Product_Name':products, 'Price':prices,
'Regular_Price':regularPrices, 'On_Sale':onSales, 'Featured':amazonChoices, 'Sponsored':sponsoredList,
'List_Position':positions, 'Page':pages, 'Brand':brands, 'MFG':MFGs, 'Variant':variants, 'Pack_Type':packTypes,
'Pack_Count':packCounts, 'Pack_Size':packSizes})
tagged_df.to_csv('./amazon_UK_data/' + str(date.today()) + '-SearchList-Tagged.csv', index=False, encoding='utf-8')
#print("Tagged DataFrame Saved")
#Now write to the database and overwrite if we already have Amazon scrape data from today
#numWritten = update_scrape_db(tagged_df, True)
#print("Entries written to DB: ", numWritten)
#Send completion email so we can make sure data got recorded
recipient = 'david@4sightassociates.com'
subject = 'Daily Web Scrape Update'
message = ("Web scraping finished with " + str(qcount) + " entries recorded. \n" +
"Brands Tagged: " + str(len(brands) - brands.count('')) + "\n" +
"MFGs Tagged: " + str(len(MFGs) - MFGs.count('')) + "\n" +
"Variants Tagged: " + str(len(variants) - variants.count('')) + "\n" +
"Pack Types Tagged: " + str(len(packTypes) - packTypes.count('')) + "\n" +
"Pack Counts Tagged: " + str(len(packCounts) - packCounts.count('')) + "\n" +
"Pack Sizes Tagged: " + str(len(packSizes) - packSizes.count(''))
)
#send_email(recipient, subject, message)
print("Message sent")
#And finally, update the proxy list with tne new list
#updateProxyFile('./proxyList.csv', proxyList) |
#
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import time
from itertools import chain
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase, enable_mode
from ansible.plugins.connection.network_cli import Connection as NetworkCli
class Cliconf(CliconfBase):
def send_command(self, command, prompt=None, answer=None, sendonly=False, newline=True, prompt_retry_check=False):
"""Executes a cli command and returns the results
This method will execute the CLI command on the connection and return
the results to the caller. The command output will be returned as a
string
"""
kwargs = {'command': to_bytes(command), 'sendonly': sendonly,
'newline': newline, 'prompt_retry_check': prompt_retry_check}
if prompt is not None:
kwargs['prompt'] = to_bytes(prompt)
if answer is not None:
kwargs['answer'] = to_bytes(answer)
if isinstance(self._connection, NetworkCli):
resp = self._connection.send(**kwargs)
else:
resp = self._connection.send_request(command, **kwargs)
return resp
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'eos'
reply = self.get('show version | json')
data = json.loads(reply)
device_info['network_os_version'] = data['version']
device_info['network_os_model'] = data['modelName']
reply = self.get('show hostname | json')
data = json.loads(reply)
device_info['network_os_hostname'] = data['hostname']
return device_info
@enable_mode
def get_config(self, source='running', format='text', flags=None):
lookup = {'running': 'running-config', 'startup': 'startup-config'}
if source not in lookup:
return self.invalid_params("fetching configuration from %s is not supported" % source)
cmd = 'show %s ' % lookup[source]
if format and format is not 'text':
cmd += '| %s ' % format
cmd += ' '.join(to_list(flags))
cmd = cmd.strip()
return self.send_command(cmd)
@enable_mode
def edit_config(self, command):
for cmd in chain(['configure'], to_list(command), ['end']):
self.send_command(cmd)
def get(self, command, prompt=None, answer=None, sendonly=False):
return self.send_command(command, prompt=prompt, answer=answer, sendonly=sendonly)
def get_capabilities(self):
result = {}
result['rpc'] = self.get_base_rpc()
result['device_info'] = self.get_device_info()
if isinstance(self._connection, NetworkCli):
result['network_api'] = 'cliconf'
else:
result['network_api'] = 'eapi'
return json.dumps(result)
# Imported from module_utils
def close_session(self, session):
# to close session gracefully execute abort in top level session prompt.
self.get('end')
self.get('configure session %s' % session)
self.get('abort')
def run_commands(self, commands, check_rc=True):
"""Run list of commands on remote device and return results
"""
responses = list()
multiline = False
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
if command == 'end':
continue
elif command.startswith('banner') or multiline:
multiline = True
elif command == 'EOF' and multiline:
multiline = False
try:
out = self.get(command, prompt, answer, multiline)
except AnsibleConnectionFailure as e:
if check_rc:
raise
out = getattr(e, 'err', e)
out = to_text(out, errors='surrogate_or_strict')
if out is not None:
try:
out = json.loads(out)
except ValueError:
out = out.strip()
responses.append(out)
return responses
def load_config(self, commands, commit=False, replace=False):
"""Loads the config commands onto the remote device
"""
session = 'ansible_%s' % int(time.time())
result = {'session': session}
self.get('configure session %s' % session)
if replace:
self.get('rollback clean-config')
try:
self.run_commands(commands)
except AnsibleConnectionFailure:
self.close_session(session)
raise
out = self.get('show session-config diffs')
if out:
result['diff'] = out.strip()
if commit:
self.get('commit')
else:
self.close_session(session)
return result
|
import kb, json, os
import libkbot as libkb
class quote:
level = 1
keywords = ['цитата','цит']
def handler(self,msg):
msg = libkb.fix_names(msg)
msg['config'] = kb.config
arg = json.dumps(msg)
os.system("python3 plugins/quote/quote.py '"+arg+"'")
kb.reg_command(quote()) |
from __future__ import annotations
# This concept of channels comes directly from the Kraken API docs : https://docs.kraken.com/websockets/#message-ohlc
import asyncio
import functools
from dataclasses import dataclass, field
import typing
from types import MappingProxyType
from aiokraken.model.timeframe import KTimeFrameModel
from result import Err, Ok, Result
from aiokraken.websockets.schemas.openorders import openOrderWS, openOrderWSSchema
from aiokraken.websockets.schemas.owntrades import ownTradeWS, ownTradeWSSchema
from aiokraken.websockets.schemas.trade import TradeWS, TradeWSSchema
from aiokraken.websockets.schemas.ohlc import OHLCUpdate, OHLCUpdateSchema
from aiokraken.websockets.schemas.ticker import TickerWS, TickerWSSchema
from aiokraken.model.assetpair import AssetPair
from aiokraken.websockets.schemas.subscribe import Subscribe, Subscription
from aiokraken.rest.exceptions import AIOKrakenSchemaValidationException
from aiokraken.rest.schemas.base import BaseSchema
_ticker_schema = TickerWSSchema()
_ohlcupdate_schema = OHLCUpdateSchema()
_trade_schema = TradeWSSchema()
# TODO : maybe a way to make partial call easier ? curry decorator ?
def _public_parser(
*, schema: BaseSchema, data: typing.Any, pair: AssetPair
): # TODO: refine Any ??
try:
# - https://docs.kraken.com/websockets-beta/#message-ticker
# - https://docs.kraken.com/websockets-beta/#message-ohlc
# - https://docs.kraken.com/websockets-beta/#message-trade
# also calling the parsed model to store the pair here as well...
parsed = schema.load(data)
# this is specific to public streams
parsed_with_pair = parsed(pair)
return Ok(parsed_with_pair)
except AIOKrakenSchemaValidationException as sve:
return Err(error=sve)
def publicchannelparser(channel_name: str):
if channel_name == "trade":
return functools.partial(_public_parser, schema=_trade_schema)
elif channel_name == "ticker":
return functools.partial(_public_parser, schema=_ticker_schema)
elif channel_name.startswith("ohlc"):
return functools.partial(_public_parser, schema=_ohlcupdate_schema)
else:
raise NotImplementedError
#### PRIVATE
_owntrades_schema = ownTradeWSSchema()
_openorders_schema = openOrderWSSchema()
def _private_parser(*, schema: BaseSchema, data: typing.Any): # TODO : refine Any ??
""" ownTrades Channel callable. note the name must match the channel name returned by kraken."""
try:
# - https://docs.kraken.com/websockets-beta/#message-ownTrades
# - https://docs.kraken.com/websockets-beta/#message-openOrders
# if data is a list, we parse one element at a time
parsed = schema.load(data, many=isinstance(data, list))
return Ok(parsed)
except AIOKrakenSchemaValidationException as sve:
return Err(error=sve)
def privatechannelparser(channel_name: str):
if channel_name == "ownTrades":
return functools.partial(_private_parser, schema=_owntrades_schema)
elif channel_name == "openOrders":
return functools.partial(_private_parser, schema=_openorders_schema)
else:
raise NotImplementedError
if __name__ == "__main__":
raise NotImplementedError
|
import json
def de_duplication(str):
dedup_str = ''
for char in str:
if not char in dedup_str:
dedup_str += char
return dedup_str
with open('data/new/dev.txt', 'rt', encoding='utf-8') as fin:
char_vector=""
for line in fin:
line = line.strip()
if not line:
continue;
sentence = json.loads(line)
sentence_text = sentence["sentText"].strip().strip('"')
char_vector += sentence_text
#print(sentence_text)
char_vector = de_duplication(char_vector)
print(char_vector)
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover LineItemCreativeAssociationService."""
__author__ = 'api.sgrinberg@gmail.com (Stan Grinberg)'
import base64
from datetime import date
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..'))
import unittest
from adspygoogle.common import Utils
from tests.adspygoogle.dfp import client
from tests.adspygoogle.dfp import HTTP_PROXY
from tests.adspygoogle.dfp import SERVER_V201108
from tests.adspygoogle.dfp import SERVER_V201111
from tests.adspygoogle.dfp import TEST_VERSION_V201108
from tests.adspygoogle.dfp import TEST_VERSION_V201111
from tests.adspygoogle.dfp import VERSION_V201108
from tests.adspygoogle.dfp import VERSION_V201111
class LicaServiceTestV201108(unittest.TestCase):
"""Unittest suite for LineItemCreativeAssociationService using V201108."""
SERVER = SERVER_V201108
VERSION = VERSION_V201108
client.debug = False
service = None
creative1 = None
creative2 = None
creative3 = None
line_item_id = None
IMAGE_DATA = open(os.path.join('data', 'medium_rectangle.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA = base64.encodestring(IMAGE_DATA)
lica1 = None
lica2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetLineItemCreativeAssociationService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if (not self.__class__.creative1 or not self.__class__.creative2 or
not self.__class__.creative3):
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
advertiser_id = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCompany(company)[0]['id']
creatives = []
for i in xrange(3):
creatives.append({
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'image.jpg',
'imageByteArray': self.__class__.IMAGE_DATA,
'size': {'width': '300', 'height': '250'}
})
creatives = client.GetCreativeService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCreatives(creatives)
self.__class__.creative1 = creatives[0]
self.__class__.creative2 = creatives[1]
self.__class__.creative3 = creatives[2]
if not self.__class__.line_item_id:
filter_statement = {'query': 'ORDER BY name LIMIT 500'}
user_service = client.GetUserService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
users = user_service.GetUsersByStatement(filter_statement)
trafficker_id = '0'
for user in users[0]['results']:
if user['roleName'] in ('Trafficker',):
trafficker_id = user['id']
break
order = {
'advertiserId': advertiser_id,
'currencyCode': 'USD',
'name': 'Order #%s' % Utils.GetUniqueName(),
'traffickerId': trafficker_id
}
order_id = client.GetOrderService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateOrder(order)[0]['id']
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_unit = {
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'adUnitSizes': [
{
'size': {
'width': '300',
'height': '250'
}
}
],
'description': 'Ad unit description.',
'targetWindow': 'BLANK'
}
ad_unit_id = inventory_service.CreateAdUnit(ad_unit)[0]['id']
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [ad_unit_id]
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.__class__.line_item_id = client.GetLineItemService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateLineItem(line_item)[0]['id']
def testCreateLineItemCreativeAssociation(self):
"""Test whether we can create a line item creative association."""
lica = {
'creativeId': self.__class__.creative1['id'],
'lineItemId': self.__class__.line_item_id
}
self.assert_(isinstance(
self.__class__.service.CreateLineItemCreativeAssociation(lica), tuple))
def testCreateLineItemCreativeAssociations(self):
"""Test whether we can create a list of line item creative associations."""
licas = [
{
'creativeId': self.__class__.creative2['id'],
'lineItemId': self.__class__.line_item_id
},
{
'creativeId': self.__class__.creative3['id'],
'lineItemId': self.__class__.line_item_id
}
]
licas = self.__class__.service.CreateLineItemCreativeAssociations(licas)
self.__class__.lica1 = licas[0]
self.__class__.lica2 = licas[1]
self.assert_(isinstance(licas, tuple))
def testGetLineItemCreativeAssociation(self):
"""Test whether we can fetch an existing line item creative association."""
self.assert_(isinstance(
self.__class__.service.GetLineItemCreativeAssociation(
self.__class__.line_item_id, self.__class__.creative2['id']),
tuple))
def testGetLineItemCreativeAssociationsByStatement(self):
"""Test whether we can fetch a list of existing line item creative
associations that match given statement."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
filter_statement = {'query': 'WHERE lineItemId = \'%s\' LIMIT 500'
% self.__class__.line_item_id}
self.assert_(isinstance(
self.__class__.service.GetLineItemCreativeAssociationsByStatement(
filter_statement), tuple))
def testPerformLineItemCreativeAssociationAction(self):
"""Test whether we can deactivate a line item create association."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
action = {'type': 'DeactivateLineItemCreativeAssociations'}
filter_statement = {'query': 'WHERE lineItemId = \'%s\' AND '
'status = \'ACTIVE\'' % self.__class__.line_item_id}
self.assert_(isinstance(
self.__class__.service.PerformLineItemCreativeAssociationAction(
action, filter_statement), tuple))
def testUpdateLineItemCreativeAssociation(self):
"""Test whether we can update a line item creative association."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
destination_url = 'http://news.google.com'
self.__class__.lica1['destinationUrl'] = destination_url
lica = self.__class__.service.UpdateLineItemCreativeAssociation(
self.__class__.lica1)
self.assert_(isinstance(lica, tuple))
self.assertEqual(lica[0]['destinationUrl'], destination_url)
def testUpdateLineItemCreativeAssociations(self):
"""Test whether we can update a list of line item creative associations."""
if not self.__class__.lica1 or not self.__class__.lica2:
self.testCreateLineItemCreativeAssociations()
destination_url = 'http://news.google.com'
self.__class__.lica1['destinationUrl'] = destination_url
self.__class__.lica2['destinationUrl'] = destination_url
licas = self.__class__.service.UpdateLineItemCreativeAssociations(
[self.__class__.lica1, self.__class__.lica2])
self.assert_(isinstance(licas, tuple))
for lica in licas:
self.assertEqual(lica['destinationUrl'], destination_url)
class LicaServiceTestV201111(unittest.TestCase):
"""Unittest suite for LineItemCreativeAssociationService using V201111."""
SERVER = SERVER_V201111
VERSION = VERSION_V201111
client.debug = False
service = None
creative1 = None
creative2 = None
creative3 = None
line_item_id = None
IMAGE_DATA = open(os.path.join('data', 'medium_rectangle.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA = base64.encodestring(IMAGE_DATA)
lica1 = None
lica2 = None
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetLineItemCreativeAssociationService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if (not self.__class__.creative1 or not self.__class__.creative2 or
not self.__class__.creative3):
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
advertiser_id = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCompany(company)[0]['id']
creatives = []
for i in xrange(3):
creatives.append({
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'image.jpg',
'imageByteArray': self.__class__.IMAGE_DATA,
'size': {'width': '300', 'height': '250'}
})
creatives = client.GetCreativeService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateCreatives(creatives)
self.__class__.creative1 = creatives[0]
self.__class__.creative2 = creatives[1]
self.__class__.creative3 = creatives[2]
if not self.__class__.line_item_id:
filter_statement = {'query': 'ORDER BY name LIMIT 500'}
user_service = client.GetUserService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
users = user_service.GetUsersByStatement(filter_statement)
trafficker_id = '0'
for user in users[0]['results']:
if user['roleName'] in ('Trafficker',):
trafficker_id = user['id']
break
order = {
'advertiserId': advertiser_id,
'currencyCode': 'USD',
'name': 'Order #%s' % Utils.GetUniqueName(),
'traffickerId': trafficker_id
}
order_id = client.GetOrderService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateOrder(order)[0]['id']
inventory_service = client.GetInventoryService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
network_service = client.GetNetworkService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY)
root_ad_unit_id = \
network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
ad_unit = {
'name': 'Ad_Unit_%s' % Utils.GetUniqueName(),
'parentId': root_ad_unit_id,
'adUnitSizes': [
{
'size': {
'width': '300',
'height': '250'
}
}
],
'description': 'Ad unit description.',
'targetWindow': 'BLANK'
}
ad_unit_id = inventory_service.CreateAdUnit(ad_unit)[0]['id']
line_item = {
'name': 'Line item #%s' % Utils.GetUniqueName(),
'orderId': order_id,
'targeting': {
'inventoryTargeting': {
'targetedAdUnitIds': [ad_unit_id]
}
},
'creativePlaceholders': [
{
'size': {
'width': '300',
'height': '250'
}
},
{
'size': {
'width': '120',
'height': '600'
}
}
],
'lineItemType': 'STANDARD',
'startDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '1'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'endDateTime': {
'date': {
'year': str(date.today().year + 1),
'month': '9',
'day': '30'
},
'hour': '0',
'minute': '0',
'second': '0'
},
'costType': 'CPM',
'costPerUnit': {
'currencyCode': 'USD',
'microAmount': '2000000'
},
'creativeRotationType': 'EVEN',
'discountType': 'PERCENTAGE',
'unitsBought': '500000',
'unitType': 'IMPRESSIONS'
}
self.__class__.line_item_id = client.GetLineItemService(
self.__class__.SERVER, self.__class__.VERSION,
HTTP_PROXY).CreateLineItem(line_item)[0]['id']
def testCreateLineItemCreativeAssociation(self):
"""Test whether we can create a line item creative association."""
lica = {
'creativeId': self.__class__.creative1['id'],
'lineItemId': self.__class__.line_item_id
}
self.assert_(isinstance(
self.__class__.service.CreateLineItemCreativeAssociation(lica), tuple))
def testCreateLineItemCreativeAssociations(self):
"""Test whether we can create a list of line item creative associations."""
licas = [
{
'creativeId': self.__class__.creative2['id'],
'lineItemId': self.__class__.line_item_id
},
{
'creativeId': self.__class__.creative3['id'],
'lineItemId': self.__class__.line_item_id
}
]
licas = self.__class__.service.CreateLineItemCreativeAssociations(licas)
self.__class__.lica1 = licas[0]
self.__class__.lica2 = licas[1]
self.assert_(isinstance(licas, tuple))
def testGetLineItemCreativeAssociation(self):
"""Test whether we can fetch an existing line item creative association."""
self.assert_(isinstance(
self.__class__.service.GetLineItemCreativeAssociation(
self.__class__.line_item_id, self.__class__.creative2['id']),
tuple))
def testGetLineItemCreativeAssociationsByStatement(self):
"""Test whether we can fetch a list of existing line item creative
associations that match given statement."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
filter_statement = {'query': 'WHERE lineItemId = \'%s\' LIMIT 500'
% self.__class__.line_item_id}
self.assert_(isinstance(
self.__class__.service.GetLineItemCreativeAssociationsByStatement(
filter_statement), tuple))
def testPerformLineItemCreativeAssociationAction(self):
"""Test whether we can deactivate a line item create association."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
action = {'type': 'DeactivateLineItemCreativeAssociations'}
filter_statement = {'query': 'WHERE lineItemId = \'%s\' AND '
'status = \'ACTIVE\'' % self.__class__.line_item_id}
self.assert_(isinstance(
self.__class__.service.PerformLineItemCreativeAssociationAction(
action, filter_statement), tuple))
def testUpdateLineItemCreativeAssociation(self):
"""Test whether we can update a line item creative association."""
if not self.__class__.lica1:
self.testCreateLineItemCreativeAssociations()
destination_url = 'http://news.google.com'
self.__class__.lica1['destinationUrl'] = destination_url
lica = self.__class__.service.UpdateLineItemCreativeAssociation(
self.__class__.lica1)
self.assert_(isinstance(lica, tuple))
self.assertEqual(lica[0]['destinationUrl'], destination_url)
def testUpdateLineItemCreativeAssociations(self):
"""Test whether we can update a list of line item creative associations."""
if not self.__class__.lica1 or not self.__class__.lica2:
self.testCreateLineItemCreativeAssociations()
destination_url = 'http://news.google.com'
self.__class__.lica1['destinationUrl'] = destination_url
self.__class__.lica2['destinationUrl'] = destination_url
licas = self.__class__.service.UpdateLineItemCreativeAssociations(
[self.__class__.lica1, self.__class__.lica2])
self.assert_(isinstance(licas, tuple))
for lica in licas:
self.assertEqual(lica['destinationUrl'], destination_url)
"""Set up test suite using v201108.
Returns:
TestSuite test suite using v201108.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(LicaServiceTestV201108))
return suite
def makeTestSuiteV201111():
"""Set up test suite using v201111.
Returns:
TestSuite test suite using v201111.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(LicaServiceTestV201111))
return suite
if __name__ == '__main__':
suites = []
if TEST_VERSION_V201108:
suites.append(makeTestSuiteV201108())
if TEST_VERSION_V201111:
suites.append(makeTestSuiteV201111())
if suites:
alltests = unittest.TestSuite(suites)
unittest.main(defaultTest='alltests')
|
""" a2c algorithm implementation.
"""
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.distributions.categorical import Categorical
os.environ['KMP_DUPLICATE_LIB_OK']='True'
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class Actor(nn.Module):
""" Actor Neural Network class.
Args:
input_size: Network input dimension.
hidden_size: Hidden layer dimension.
output_size: Network output dimension.
"""
def __init__(self, input_size, hidden_size, output_size):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.fc2 = nn.Linear(hidden_size, hidden_size)
self.fc3 = nn.Linear(hidden_size, hidden_size)
self.fc4 = nn.Linear(hidden_size, output_size)
def forward(self, state):
""" Forward propogation.
Args:
state: Current state.
Returns:
distibution: Probability distribution of actions.
"""
out = F.relu(self.fc1(state))
out = F.relu(self.fc2(out))
out = F.relu(self.fc3(out))
out = self.fc4(out)
distribution = F.softmax(out, dim=-1)
distribution = Categorical(distribution)
return distribution
class Critic(nn.Module):
""" Critic Neural Network class.
Args:
input_size: Network input dimension.
hidden_size: Hidden layer dimension.
output_size: Network output dimension.
"""
def __init__(self, input_size, hidden_size, output_size=1):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.fc2 = nn.Linear(hidden_size, hidden_size)
self.fc3 = nn.Linear(hidden_size, hidden_size)
self.fc4 = nn.Linear(hidden_size, output_size)
def forward(self, state):
""" Forward propogation.
Args:
state: Current state.
Returns:
value: Estimated state value.
"""
out = F.relu(self.fc1(state))
out = F.relu(self.fc2(out))
out = F.relu(self.fc3(out))
value = self.fc4(out)
return value
class ActorCritic():
""" Actor Critic Class.
Args:
env: Environment to solve.
episodes: Max number of training episodes.
max_score: Maximum score possible in the environment.
hidden_size: Hidden layer dimension.
gamma: Discount factor for future rewards.
save: Saves the network weights if set to True.
"""
def __init__(self, env, episodes, max_score, hidden_size=256, gamma=0.99, save=False):
input_size = env.observation_space.shape[0]
output_size = env.action_space.n
self.env = env
self.actor = Actor(input_size, hidden_size, output_size)
self.actor.to(device)
self.critic = Critic(input_size, hidden_size)
self.critic.to(device)
self.actor_optim = optim.Adam(self.actor.parameters())
self.critic_optim = optim.Adam(self.critic.parameters())
self.episodes = episodes
self.max_score = max_score
self.gamma = gamma
self.save = save
self.log_probs = []
self.values = []
self.rewards = []
def get_returns(self):
""" Calculates returns from rewards.
Args:
None.
Returns:
returns: Calculated returns tensor.
"""
returns = [0]
R = 0
for i in reversed(range(len(self.rewards) - 1)):
# add the discounted returns of the next steps
R = self.rewards[i] + self.gamma * R
returns.insert(0, R)
returns = torch.tensor(returns, requires_grad=True).to(device)
return returns
def get_action_value(self, state):
""" Gets chosen action, log_probability and state_value from the 2 networks.
Args:
state: current state.
Returns:
action: Chosen action.
log_prob: Log probality of the chosen action.
state_value: Estimated value of the state.
"""
state = torch.tensor([state], dtype=torch.float).to(device)
self.actor.eval()
self.critic.eval()
distribution = self.actor(state)
action = distribution.sample()
log_prob = torch.squeeze(distribution.log_prob(action))
action = torch.squeeze(action).item()
state_value = self.critic(state)
state_value = torch.squeeze(state_value)
return action, log_prob, state_value
def save_to_memory(self, log_prob, value, reward):
""" Saves log_probs, values and rewards to memory for optimization .
Args:
log_prob: Log-probability of the chosen action.
value: Estimated value of the state.
reward: reward of the action.
Returns:
None.
"""
self.log_probs.append(log_prob)
self.values.append(value)
self.rewards.append(reward)
def clear_memory(self):
""" Resets memory for the new episode.
Args:
None.
Returns:
None.
"""
self.log_probs.clear()
self.values.clear()
self.rewards.clear()
def save_weights(self):
""" Saves network weights.
Args:
None.
Returns:
None.
"""
torch.save(self.actor.state_dict(), "model_weights/actor.pth")
torch.save(self.critic.state_dict(), "model_weights/critic.pth")
def optimize(self):
""" Updates networks weights to train.
Args:
None.
Returns:
None.
"""
returns = self.get_returns()
values = torch.stack(self.values).to(device)
advantage = returns - values
advantage = advantage.detach() # required for actor_loss backprop w/o error
log_probs = torch.stack(self.log_probs).to(device)
actor_loss = ( -log_probs * advantage ).sum()
loss_funct = nn.MSELoss()
critic_loss = loss_funct(returns, values)
self.critic_optim.zero_grad()
critic_loss.backward()
self.critic_optim.step()
self.actor_optim.zero_grad()
actor_loss.backward()
self.actor_optim.step()
def train(self):
""" Main training loop. Plays N episodes and optimizes networks after each episode.
Args:
None.
Returns:
best_score: Best score obtained.
ep: The number of episodes played.
game_rew_hist: List of rewards obtained at each episode.
"""
game_rew_hist = []
best_score = - np.Inf
for ep in range(self.episodes):
episode_rew = 0
state = self.env.reset()
done = False
while not done:
action, log_prob, value = self.get_action_value(state)
state_next, reward, done, _ = self.env.step(action)
episode_rew += reward
self.save_to_memory(log_prob, value, reward)
state = state_next
self.optimize()
self.clear_memory()
game_rew_hist.append(episode_rew)
if (ep) % 10 == 0:
avg_score = np.mean(game_rew_hist[-10:])
print(f"Episode: {ep + 1} \t ¦ \t Reward {avg_score}")
if avg_score > best_score:
best_score = avg_score
if self.save:
self.save_weights()
if avg_score == self.max_score:
print(f"Maximum score of {self.max_score} reached")
break
return best_score, ep, game_rew_hist
|
from experiment.generic_p1 import P1GetExptRecordingInfo
from azure_service_bus.send_consume import AzureReceiver
class P1AzureGetExptRecordingInfoReceiver(AzureReceiver, P1GetExptRecordingInfo):
pass
|
# -*- coding: utf-8
from __future__ import unicode_literals, absolute_import
from django.conf.urls import url, include
from heavy_celery.urls import urlpatterns as heavy_celery_urls
urlpatterns = [
url(r'^', include((heavy_celery_urls, 'heavy_celery'), namespace='heavy_celery')),
]
|
"""
Copyright (C) 2021 Alyssa Rosenzweig <alyssa@rosenzweig.io>
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice (including the next
paragraph) shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
opcodes = {}
immediates = {}
enums = {}
class Opcode(object):
def __init__(self, name, dests, srcs, imms, is_float, can_eliminate, encoding_16, encoding_32):
self.name = name
self.dests = dests
self.srcs = srcs
self.imms = imms
self.is_float = is_float
self.can_eliminate = can_eliminate
self.encoding_16 = encoding_16
self.encoding_32 = encoding_32
class Immediate(object):
def __init__(self, name, ctype):
self.name = name
self.ctype = ctype
class Encoding(object):
def __init__(self, description):
(exact, mask, length_short, length_long) = description
# Convenience
if length_long is None:
length_long = length_short
self.exact = exact
self.mask = mask
self.length_short = length_short
self.extensible = length_short != length_long
if self.extensible:
assert(length_long == length_short + (4 if length_short > 8 else 2))
def op(name, encoding_32, dests = 1, srcs = 0, imms = [], is_float = False, can_eliminate = True, encoding_16 = None):
encoding_16 = Encoding(encoding_16) if encoding_16 is not None else None
encoding_32 = Encoding(encoding_32) if encoding_32 is not None else None
opcodes[name] = Opcode(name, dests, srcs, imms, is_float, can_eliminate, encoding_16, encoding_32)
def immediate(name, ctype = "uint32_t"):
imm = Immediate(name, ctype)
immediates[name] = imm
return imm
def enum(name, value_dict):
enums[name] = value_dict
return immediate(name, "enum agx_" + name)
L = (1 << 15)
_ = None
FORMAT = immediate("format", "enum agx_format")
IMM = immediate("imm")
WRITEOUT = immediate("writeout")
INDEX = immediate("index")
COMPONENT = immediate("component")
CHANNELS = immediate("channels")
TRUTH_TABLE = immediate("truth_table")
ROUND = immediate("round")
SHIFT = immediate("shift")
MASK = immediate("mask")
BFI_MASK = immediate("bfi_mask")
LOD_MODE = immediate("lod_mode", "enum agx_lod_mode")
DIM = immediate("dim", "enum agx_dim")
SCOREBOARD = immediate("scoreboard")
ICOND = immediate("icond")
FCOND = immediate("fcond")
NEST = immediate("nest")
INVERT_COND = immediate("invert_cond")
NEST = immediate("nest")
TARGET = immediate("target", "agx_block *")
PERSPECTIVE = immediate("perspective", "bool")
SR = enum("sr", {
0: 'threadgroup_position_in_grid.x',
1: 'threadgroup_position_in_grid.y',
2: 'threadgroup_position_in_grid.z',
4: 'threads_per_threadgroup.x',
5: 'threads_per_threadgroup.y',
6: 'threads_per_threadgroup.z',
8: 'dispatch_threads_per_threadgroup.x',
9: 'dispatch_threads_per_threadgroup.y',
10: 'dispatch_threads_per_threadgroup.z',
48: 'thread_position_in_threadgroup.x',
49: 'thread_position_in_threadgroup.y',
50: 'thread_position_in_threadgroup.z',
51: 'thread_index_in_threadgroup',
52: 'thread_index_in_subgroup',
53: 'subgroup_index_in_threadgroup',
56: 'active_thread_index_in_quad',
58: 'active_thread_index_in_subgroup',
62: 'backfacing',
80: 'thread_position_in_grid.x',
81: 'thread_position_in_grid.y',
82: 'thread_position_in_grid.z',
})
FUNOP = lambda x: (x << 28)
FUNOP_MASK = FUNOP((1 << 14) - 1)
def funop(name, opcode):
op(name, (0x0A | L | (opcode << 28),
0x3F | L | (((1 << 14) - 1) << 28), 6, _),
srcs = 1, is_float = True)
# Listing of opcodes
funop("floor", 0b000000)
funop("srsqrt", 0b000001)
funop("dfdx", 0b000100)
funop("dfdy", 0b000110)
funop("rcp", 0b001000)
funop("rsqrt", 0b001001)
funop("sin_pt_1", 0b001010)
funop("log2", 0b001100)
funop("exp2", 0b001101)
funop("sin_pt_2", 0b001110)
funop("ceil", 0b010000)
funop("trunc", 0b100000)
funop("roundeven", 0b110000)
op("fadd",
encoding_16 = (0x26 | L, 0x3F | L, 6, _),
encoding_32 = (0x2A | L, 0x3F | L, 6, _),
srcs = 2, is_float = True)
op("fma",
encoding_16 = (0x36, 0x3F, 6, 8),
encoding_32 = (0x3A, 0x3F, 6, 8),
srcs = 3, is_float = True)
op("fmul",
encoding_16 = ((0x16 | L), (0x3F | L), 6, _),
encoding_32 = ((0x1A | L), (0x3F | L), 6, _),
srcs = 2, is_float = True)
op("mov_imm",
encoding_32 = (0x62, 0xFF, 6, 8),
encoding_16 = (0x62, 0xFF, 4, 6),
imms = [IMM])
op("iadd",
encoding_32 = (0x0E, 0x3F | L, 8, _),
srcs = 2, imms = [SHIFT])
op("imad",
encoding_32 = (0x1E, 0x3F | L, 8, _),
srcs = 3, imms = [SHIFT])
op("bfi",
encoding_32 = (0x2E, 0x7F | (0x3 << 26), 8, _),
srcs = 3, imms = [BFI_MASK])
op("bfeil",
encoding_32 = (0x2E | L, 0x7F | L | (0x3 << 26), 8, _),
srcs = 3, imms = [BFI_MASK])
op("asr",
encoding_32 = (0x2E | L | (0x1 << 26), 0x7F | L | (0x3 << 26), 8, _),
srcs = 2)
op("icmpsel",
encoding_32 = (0x12, 0x7F, 8, 10),
srcs = 4, imms = [ICOND])
op("fcmpsel",
encoding_32 = (0x02, 0x7F, 8, 10),
srcs = 4, imms = [FCOND])
# sources are coordinates, LOD, texture, sampler, offset
# TODO: anything else?
op("texture_sample",
encoding_32 = (0x32, 0x7F, 8, 10), # XXX WRONG SIZE
srcs = 5, imms = [DIM, LOD_MODE, MASK, SCOREBOARD])
# sources are base, index
op("device_load",
encoding_32 = (0x05, 0x7F, 6, 8),
srcs = 2, imms = [FORMAT, MASK, SCOREBOARD])
op("wait", (0x38, 0xFF, 2, _), dests = 0,
can_eliminate = False, imms = [SCOREBOARD])
op("get_sr", (0x72, 0x7F | L, 4, _), dests = 1, imms = [SR])
op("sample_mask", (0x7fc1, 0xffff, 6, _), dests = 0, srcs = 1, can_eliminate = False)
# Essentially same encoding
op("ld_tile", (0x49, 0x7F, 8, _), dests = 1, srcs = 0,
can_eliminate = False, imms = [FORMAT])
op("st_tile", (0x09, 0x7F, 8, _), dests = 0, srcs = 1,
can_eliminate = False, imms = [FORMAT])
for (name, exact) in [("any", 0xC000), ("none", 0xC200)]:
op("jmp_exec_" + name, (exact, (1 << 16) - 1, 6, _), dests = 0, srcs = 0,
can_eliminate = False, imms = [TARGET])
# TODO: model implicit r0l destinations
op("pop_exec", (0x52 | (0x3 << 9), ((1 << 48) - 1) ^ (0x3 << 7) ^ (0x3 << 11), 6, _),
dests = 0, srcs = 0, can_eliminate = False, imms = [NEST])
for is_float in [False, True]:
mod_mask = 0 if is_float else (0x3 << 26) | (0x3 << 38)
for (cf, cf_op) in [("if", 0), ("else", 1), ("while", 2)]:
name = "{}_{}cmp".format(cf, "f" if is_float else "i")
exact = 0x42 | (0x0 if is_float else 0x10) | (cf_op << 9)
mask = 0x7F | (0x3 << 9) | mod_mask | (0x3 << 44)
imms = [NEST, FCOND if is_float else ICOND, INVERT_COND]
op(name, (exact, mask, 6, _), dests = 0, srcs = 2, can_eliminate = False,
imms = imms, is_float = is_float)
op("bitop", (0x7E, 0x7F, 6, _), srcs = 2, imms = [TRUTH_TABLE])
op("convert", (0x3E | L, 0x7F | L | (0x3 << 38), 6, _), srcs = 2, imms = [ROUND])
op("ld_vary", (0x21, 0xBF, 8, _), srcs = 1, imms = [CHANNELS, PERSPECTIVE])
op("ld_vary_flat", (0xA1, 0xBF, 8, _), srcs = 1, imms = [CHANNELS])
op("st_vary", None, dests = 0, srcs = 2, can_eliminate = False)
op("stop", (0x88, 0xFFFF, 2, _), dests = 0, can_eliminate = False)
op("trap", (0x08, 0xFFFF, 2, _), dests = 0, can_eliminate = False)
op("writeout", (0x48, 0xFF, 4, _), dests = 0, imms = [WRITEOUT], can_eliminate = False)
op("p_combine", _, srcs = 4)
op("p_extract", _, srcs = 1, imms = [COMPONENT])
|
import sys
import math
import time
import logging
import numpy as np
import cv2
import pyrealsense2 as rs
from shapely.geometry import Polygon, JOIN_STYLE
from scipy import spatial
from descartes import PolygonPatch
M2TOCM2 = 10000
CMTOM = 0.01
DS5_product_ids = ["0AD1", "0AD2", "0AD3", "0AD4", "0AD5", "0AF6",
"0AFE", "0AFF", "0B00", "0B01", "0B03", "0B07", "0B3A"]
ORANGE = [249, 115, 6]
# rotate_points, align_vector_to_zaxis, get_downsampled_patch, calculate_plane_normal, filter_zero
def find_device_that_supports_advanced_mode(ctx, devices):
for dev in devices:
if dev.supports(rs.camera_info.product_id) and str(dev.get_info(rs.camera_info.product_id)) in DS5_product_ids:
if dev.supports(rs.camera_info.name):
logging.info("Found device that supports advanced mode: %r", dev.get_info(rs.camera_info.name))
return dev
return None
def enable_advanced_mode(advnc_mode):
"""Attempts to enable advanced mode
"""
# Loop until we successfully enable advanced mode
while not advnc_mode.is_enabled():
logging.info("Trying to enable advanced mode...")
advnc_mode.toggle_advanced_mode(True)
# At this point the device will disconnect and re-connect.
logging.info("Device disconnecting. Sleeping for 5 seconds...")
time.sleep(5)
# The 'dev' object will become invalid and we need to initialize it again
dev = find_device_that_supports_advanced_mode()
if dev is None:
logging.error("Device did not reconnect! Exiting")
sys.exit(1)
advnc_mode = rs.rs400_advanced_mode(dev)
logging.info("Advanced mode is %r", "enabled" if advnc_mode.is_enabled() else "disabled")
return advnc_mode
def load_setting_file(ctx, devices, setting_file):
"""Loads a setting file
Arguments:
ctx {ctx} -- RS context
devices {device} -- Realsense device
setting_file {str} -- Path to settings file
Returns:
bool -- True if successful
"""
dev = find_device_that_supports_advanced_mode(ctx, devices)
if dev is None:
logging.error("No device supports the advanced mode! Can not upload settings file: %r", setting_file)
return None
advnc_mode = rs.rs400_advanced_mode(dev)
logging.info("Advanced mode is %r", "enabled" if advnc_mode.is_enabled() else "disabled")
advnc_mode = enable_advanced_mode(advnc_mode)
# Read settings file as a string
with open(setting_file, 'r') as file:
settings_json_str = file.read()
advnc_mode.load_json(settings_json_str)
return True
def get_intrinsics(pipeline, stream=rs.stream.color):
"""Get intrinics for specified stream
Arguments:
pipeline {rs::pipeline} -- The pipeline that has been configured
Keyword Arguments:
stream {rs::stream::type} -- Stream Type (default: {rs.stream.color})
Returns:
rs::intrinsics -- The instrinsics object
"""
streams = [stream_ for stream_ in pipeline.get_active_profile().get_streams() if stream_.stream_type() == stream]
intrinsics = None
if streams:
intrinsics = streams[0].as_video_stream_profile().get_intrinsics()
return intrinsics
def create_projection_matrix(intrinsics):
fx, fy, ppx, ppy = intrinsics.fx, intrinsics.fy, intrinsics.ppx, intrinsics.ppy
proj_mat = np.array([[fx, 0, ppx, 0], [0, fy, ppy, 0], [0, 0, 1, 0]])
return proj_mat
def project_points_img(points, proj_mat, width, height):
"""Projects points into image given a projection matrix
Arguments:
points {ndarray} -- 3D points
proj_mat {ndarray, 3X4} -- Projection Matrix
width {int} -- width of image
height {height} -- height of image
Returns:
ndarray -- pixels
"""
pixels = proj_mat.dot(points)
pixels = np.divide(pixels[:2, :], pixels[2, :]).transpose().astype(np.int)
# Remove pixels that are outside the image
pixels[:, 0] = np.clip(pixels[:, 0], 0, width)
pixels[:, 1] = np.clip(pixels[:, 1], 0, height)
# mask_x = (pixels[:, 0] < width) & (pixels[:, 0] > 0)
# mask_y = (pixels[:, 1] < height) & (pixels[:, 1] > 0)
# # Return the pixels and points that are inside the image
# pixels = pixels[mask_x & mask_y]
return pixels
def plot_shapely_polys(polygons, ax, color='green'):
for poly in polygons:
outlinePatch = PolygonPatch(poly, ec=color, fill=False, linewidth=2)
ax.add_patch(outlinePatch)
def get_pix_coordinates(pts, proj_mat, w, h):
"""Get Pixel coordinates of ndarray
Arguments:
pts {ndarray} -- 3D point clouds 3XN
proj_mat {ndarray} -- 4X3 Projection Matrix
w {int} -- width
h {int} -- height
Returns:
ndarray -- Pixel coordinates
"""
points_t = np.ones(shape=(4, pts.shape[1]))
points_t[:3, :] = pts
pixels = project_points_img(points_t, proj_mat, w, h)
return pixels
def plot_opencv_polys(polygons, color_image, proj_mat, rot_mat, w, h, color=(0, 255, 0), thickness=2):
for i, (poly, height) in enumerate(polygons):
# Get 2D polygons and assign z component the height value of extracted plane
if rot_mat is not None:
pts = np.array(poly.exterior.coords)[:,:2] # NX2
pts = np.column_stack((pts, np.ones((pts.shape[0])) * height)) # NX3
# Transform flat plane coordinate system to original cordinate system of depth frame
pts = pts.transpose() # 3XN
pts = np.linalg.inv(rot_mat) @ pts
else:
pts = np.transpose(np.array(poly.exterior.coords)[:,:3]) # NX3
# np.savetxt(f"polygon_{i}_cameraframe.txt", pts.transpose())
# Project coordinates to image space
pix_coords = get_pix_coordinates(pts, proj_mat, w, h)
pix_coords = pix_coords.reshape((-1, 1, 2))
cv2.polylines(color_image, [pix_coords], True, color, thickness=thickness)
def plot_planes_and_obstacles(planes, obstacles, proj_mat, rot_mat, color_image, config, thickness=2):
"""Plots the planes and obstacles (3D polygons) into the color image
Arguments:
planes {list(Polygons)} -- List of Shapely Polygon with height tuples
obstacles {list[(polygon, height)]} -- List of tuples with polygon with height
proj_mat {ndarray} -- Projection Matrix
rot_mat {ndarray} -- Rotation Matrix
color_image {ndarray} -- Color Image
config {dict} -- Configuration
"""
plot_opencv_polys(
planes, color_image, proj_mat, rot_mat, config['color']['width'],
config['color']['height'], color=(0, 255, 0), thickness=thickness)
plot_opencv_polys(
obstacles, color_image, proj_mat, rot_mat, config['color']['width'],
config['color']['height'], color=ORANGE, thickness=thickness)
def plot_polygons(polygons, points, ax):
for poly in polygons:
shell_coords = [get_point(pi, points) for pi in poly.shell]
outline = Polygon(shell=shell_coords)
outlinePatch = PolygonPatch(outline, ec='green', fill=False, linewidth=2)
ax.add_patch(outlinePatch)
for hole_poly in poly.holes:
shell_coords = [get_point(pi, points) for pi in hole_poly]
outline = Polygon(shell=shell_coords)
outlinePatch = PolygonPatch(outline, ec='orange', fill=False, linewidth=2)
ax.add_patch(outlinePatch)
ax.set_xlim(points[:, 0].min(), points[:, 0].max())
ax.set_ylim(points[:, 1].min(), points[:, 1].max())
|
#!/usr/bin/env python
from tools.load import LoadMatrix
lm=LoadMatrix()
traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
parameter_list = [[traindat,testdat,10,3,1.0],[traindat,testdat,10,4,1.0]]
def kernel_sparse_poly (fm_train_real=traindat,fm_test_real=testdat,
size_cache=10,degree=3,c=1.0):
from shogun import SparseRealFeatures
from shogun import PolyKernel
feats_train=SparseRealFeatures(fm_train_real)
feats_test=SparseRealFeatures(fm_test_real)
kernel=PolyKernel(feats_train, feats_train, size_cache,
degree, c)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('SparsePoly')
kernel_sparse_poly(*parameter_list[0])
|
"""
@author: Yuhao Cheng
@contact: yuhao.cheng[at]outlook.com
"""
import torch
import torch.nn as nn
from collections import OrderedDict
import torchsnooper
from ..model_registry import META_ARCH_REGISTRY
from pyanomaly.networks.meta.base.commonness import (
PixelDiscriminator,
DoubleConv,
Down,
Up,
OutConv,
BasicConv2d
)
__all__ = ['AnoPredGeneratorUnet']
@META_ARCH_REGISTRY.register()
class AnoPredGeneratorUnet(nn.Module):
def __init__(self, cfg, bilinear=False):
super(AnoPredGeneratorUnet, self).__init__()
c_in = 12 # 4*3 = 12
c_out = 3
self.c_in = c_in
self.c_out = c_out
self.bilinear = bilinear
self.inc = DoubleConv(self.c_in, 64)
self.down1 = Down(64, 128)
self.down2 = Down(128, 256)
self.down3 = Down(256,512)
# self.inter = DoubleConv(256, 512)
self.up1 = Up(768, 512, 256, self.bilinear)
self.up2 = Up(384,256,128, self.bilinear)
self.up3 = Up(192,128,64, self.bilinear)
self.output = BasicConv2d(64, self.c_out, kernel_size=3, padding=1)
# @torchsnooper.snoop()
def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
# x4 = self.inter(x3)
# import ipdb; ipdb.set_trace()
x = self.up1(x4, x3)
x = self.up2(x, x2)
x = self.up3(x, x1)
x = self.output(x)
# return x
return torch.tanh(x)
|
class Solution(object):
def isHappy(self, n):
"""
:type n: int
:rtype: bool
"""
l1 =[]
while n!=1 and n not in l1:
l1.append(n)
m = str(n)
n =0
for i in m:
n += int(i)**2
return n == 1
|
"""
Django settings for onlineCAL project.
Generated by 'django-admin startproject' using Django 2.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import os.path
from dotenv import load_dotenv
from django.contrib import messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# Load Environment
ENV_LOCATION = os.path.join(BASE_DIR, 'proj.env')
if os.path.exists(ENV_LOCATION):
load_dotenv(ENV_LOCATION)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY CONFIGURATION
SECRET_FILE = os.path.normpath(os.path.join(BASE_DIR, 'SECRET.key'))
try:
SECRET_KEY = open(SECRET_FILE).read().strip()
except IOError:
try:
from django.utils.crypto import get_random_string
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!$%&()=+-_'
SECRET_KEY = get_random_string(50, chars)
with open(SECRET_FILE, 'w') as f:
f.write(SECRET_KEY)
except IOError:
raise Exception('Could not open %s for writing!' % SECRET_FILE)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Change the value to the production IP address.
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_filters',
'rangefilter',
'crispy_forms',
'django_q',
'onlineCAL',
'booking_portal',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'onlineCAL.urls'
AUTH_USER_MODEL = 'booking_portal.CustomUser'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
# Our custom auth package is not a Django app.
os.path.join(BASE_DIR, 'auth', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
CRISPY_FAIL_SILENTLY = not DEBUG
WSGI_APPLICATION = 'onlineCAL.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Add the Bootstrap classes in addition to the default Django ones.
MESSAGE_TAGS = {
messages.DEBUG: 'info alert-info',
messages.INFO: 'info alert-info',
messages.SUCCESS: 'success alert-success',
messages.WARNING: 'warning alert-warning',
messages.ERROR: 'error alert-danger',
}
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_REDIRECT_URL = '/'
AUTH_USER_MODEL = 'booking_portal.CustomUser'
LOGIN_URL = '/auth/login/'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = os.getenv('EMAIL_USERNAME', 'test@email.com')
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_PASSWORD', 'test@123')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_USE_SSL = False
FILTERS_EMPTY_CHOICE_LABEL='Any'
DEFAULT_AUTO_FIELD='django.db.models.AutoField'
# Django Q
Q_CLUSTER = {
'workers': 4,
'timeout': 90,
'retry': 120,
'queue_limit': 50,
'bulk': 10,
'orm': 'default'
} |
# python
import string
from typing import Type, Set
# django
from django.utils.text import camel_case_to_spaces
# graphene
import graphene
from graphene.types.generic import GenericScalar
# graphene_django
from graphene_django import DjangoObjectType
# wagtail
from wagtail.core.fields import StreamField
from wagtail.core.models import Page as wagtailPage
# wagtail forms
from wagtail.contrib.forms.models import AbstractForm
# wagtail settings
from wagtail.contrib.settings.models import BaseSetting
# app
from .registry import registry
from .permissions import with_page_permissions
from .settings import url_prefix_for_site, RELAY
# app types
from .types import (
Page,
Settings,
FormError,
FormField,
)
def _add_form(cls: Type[AbstractForm], node: str, dict_params: dict) -> Type[graphene.Mutation]:
if node in registry.forms: # pragma: no cover
return registry.forms[node]
registry.page_prefetch_fields.add(cls.__name__.lower())
dict_params['Meta'].interfaces += (Page,)
dict_params['form_fields'] = graphene.List(FormField)
def form_fields(self, _info):
return list(FormField(name=field_.clean_name, field_type=field_.field_type,
label=field_.label, required=field_.required,
help_text=field_.help_text, choices=field_.choices,
default_value=field_.default_value)
for field_ in self.form_fields.all())
dict_params['resolve_form_fields'] = form_fields
registry.pages[cls] = type(node, (DjangoObjectType,), dict_params)
args = type("Arguments", (), {'values': GenericScalar(),
"url": graphene.String(required=True)})
_node = node
def mutate(_self, info, url, values):
url_prefix = url_prefix_for_site(info)
query = wagtailPage.objects.filter(url_path=url_prefix + url.rstrip('/') + '/')
instance = with_page_permissions(
info.context,
query.specific()
).live().first()
user = info.context.user
# convert camelcase to dashes
values = {camel_case_to_spaces(k).replace(' ', '-'): v for k, v in values.items()}
form = instance.get_form(values, None, page=instance, user=user)
if form.is_valid():
# form_submission
instance.process_form_submission(form)
return registry.forms[_node](result="OK")
else:
return registry.forms[_node](result="FAIL", errors=[FormError(*err) for err in form.errors.items()])
dict_params = {
"Arguments": args,
"mutate": mutate,
"result": graphene.String(),
"errors": graphene.List(FormError),
}
tp = type(node + "Mutation", (graphene.Mutation,), dict_params) # type: Type[graphene.Mutation]
registry.forms[node] = tp
return tp
def _add_page(cls: Type[wagtailPage], node: str, dict_params: dict) -> Type[DjangoObjectType]:
if cls in registry.pages: # pragma: no cover
return registry.pages[cls]
registry.page_prefetch_fields.add(cls.__name__.lower())
dict_params['Meta'].interfaces += (Page,)
tp = type(node, (DjangoObjectType,), dict_params) # type: Type[DjangoObjectType]
registry.pages[cls] = tp
return tp
def _add_setting(cls: Type[BaseSetting], node: str, dict_params: dict) -> Type[DjangoObjectType]:
if not hasattr(cls, 'name'): # we always need a name field
cls.name = cls.__name__
dict_params['Meta'].interfaces += (Settings,)
tp = type(node, (DjangoObjectType,), dict_params) # type: Type[DjangoObjectType]
registry.settings[node] = (tp, cls)
return tp
def _add_snippet(cls: type, node: str, dict_params: dict) -> Type[DjangoObjectType]:
if cls in registry.snippets: # pragma: no cover
return registry.snippets[cls]
tp = type(node, (DjangoObjectType,), dict_params) # type: Type[DjangoObjectType]
registry.snippets[cls] = tp
registry.snippets_by_name[node] = tp
return tp
def _add_django_model(_cls: type, node: str, dict_params: dict) -> Type[DjangoObjectType]:
if node in registry.django: # pragma: no cover
return registry.django[node]
tp = type(node, (DjangoObjectType,), dict_params) # type: Type[DjangoObjectType]
registry.django[node] = tp
return tp
def _add_streamfields(cls: wagtailPage, node: str, dict_params: dict, app: str, prefix: str) -> None:
from .types.streamfield import (
block_handler,
stream_field_handler,
)
for field in cls._meta.fields:
if isinstance(field, StreamField):
field_name = field.name
stream_field_name = f"{node}{string.capwords(field_name, sep='_').replace('_', '')}"
blocks = field.stream_block.child_blocks
handlers = dict(
(name, block_handler(block, app, prefix))
for name, block in blocks.items()
)
f, resolve = stream_field_handler(
stream_field_name,
field_name,
handlers
)
dict_params.update({
field.name: f,
"resolve_" + field.name: resolve
})
def _register_model(registered: Set[type], cls: type, snippet: bool,
app: str, prefix: str, override_name=None) -> None:
if cls in registered:
return
prefix = prefix.format(app=string.capwords(app),
cls=cls.__name__)
node = override_name or prefix + cls.__name__
# dict parameters to create GraphQL type
class Meta:
model = cls
interfaces = (graphene.relay.Node, ) if RELAY else tuple()
dict_params = {'Meta': Meta}
# add streamfield handlers
_add_streamfields(cls, node, dict_params, app, prefix)
if snippet:
_add_snippet(cls, node, dict_params)
elif issubclass(cls, AbstractForm):
_add_form(cls, node, dict_params)
elif issubclass(cls, wagtailPage):
_add_page(cls, node, dict_params)
elif issubclass(cls, BaseSetting):
_add_setting(cls, node, dict_params)
else: # Django Model
_add_django_model(cls, node, dict_params)
registered.add(cls)
def add_app(app: str, prefix: str = '{app}') -> None:
from django.contrib.contenttypes.models import ContentType
from wagtail.snippets.models import get_snippet_models
snippets = get_snippet_models()
models = [mdl.model_class()
for mdl in ContentType.objects.filter(app_label=app).all()]
snippets = [s for s in snippets if s in models]
to_register = [x for x in snippets + models if x is not None]
registered: Set = set()
# prefetch content_types
ContentType.objects.get_for_models(*to_register)
for cls in to_register:
_register_model(registered, cls, cls in snippets, app, prefix)
def add_apps_with_settings(settings: dict) -> None:
apps = settings.get('APPS', [])
for app in apps:
prefixes = settings.get('PREFIX', {})
if isinstance(prefixes, str):
prefix = prefixes # pragma: no cover
else:
prefix = prefixes.get(app, '{app}')
add_app(app, prefix=prefix)
if not apps: # pragma: no cover
import logging
logging.warning("No APPS specified for wagtail_graphql")
def add_apps() -> None:
from .settings import SETTINGS
add_apps_with_settings(SETTINGS)
# standard page
_register_model(set(), wagtailPage, False, 'wagtailcore', '', override_name='BasePage')
|
import stripe
from stripe import error
from stripe import util
from stripe.api_resources import Customer
from stripe.six.moves.urllib.parse import quote_plus
from async_stripe.api_resources.abstract import patch_nested_resources
async def detach_patch(self, idempotency_key=None, **params):
token = util.utf8(self.id)
if hasattr(self, "customer") and self.customer:
extn = quote_plus(token)
customer = util.utf8(self.customer)
base = Customer.class_url()
owner_extn = quote_plus(customer)
url = "%s/%s/sources/%s" % (base, owner_extn, extn)
headers = util.populate_headers(idempotency_key)
self.refresh_from(await self.request("delete", url, params, headers))
return self
else:
raise error.InvalidRequestError(
"Source %s does not appear to be currently attached "
"to a customer object." % token,
"id",
)
async def source_transactions_patch(self, **params):
"""source_transactions is deprecated, use Source.list_source_transactions instead."""
return await self.request(
"get", self.instance_url() + "/source_transactions", params
)
stripe.Source.detach = detach_patch
stripe.Source.source_transactions = source_transactions_patch
nested_resources = ["source_transaction"]
patch_nested_resources(stripe.Source, nested_resources) |
import subprocess
def create_blast_database(fasta_str, save_path):
"""
:param fasta_str:
:param save_path:
:return:
"""
with open(save_path + 'fasta.txt', 'w') as f:
f.write(fasta_str)
f.close()
database_filename = save_path + '_database'
# creation of the makeblastdb command line
makeblastdb_cline = subprocess.Popen(['/ncbi-blast-2.7.1+/bin/makeblastdb.exe',
'-in', save_path + 'db_fasta.txt', '-parse_seqids', '-dbtype', 'nucl', '-out',
database_filename], stdout=subprocess.PIPE)
# calls the makeblastdb command line and cleans up unnecessary files
makeblastdb_cline.communicate()
return database_filename
def blast_n(fasta_str, save_path, database_filename):
"""
:param fasta_str:
:param save_path:
:param database_filename:
:return:
"""
with open(save_path, 'w') as f:
f.write(fasta_str)
f.close()
# generate unique filename for query
blast_output_file_path = save_path[:-4] + '.xml'
print(blast_output_file_path)
print(save_path)
# creation of the blast command line
blastp_cline = subprocess.Popen(['ncbi-blast-2.7.1+/bin/blastn.exe', '-task',
'blastn-short', '-out', blast_output_file_path, '-outfmt', '5', '-query',
save_path, '-db', database_filename], stdout=subprocess.PIPE)
# calling of the blast command line and cleans up unnecessary files
blastp_cline.communicate()
return blast_output_file_path
|
import six
from ..interface import (ContractSyntaxError, describe_value,
ContractNotRespected)
from ..main import parse_contract_string, check_contracts
def check_contracts_ok(contract, value):
if isinstance(contract, six.string_types):
contract = [contract]
value = [value]
context = check_contracts(contract, value)
assert isinstance(context, dict)
"%s" % context
"%r" % context
def check_contracts_fail(contract, value, error=ContractNotRespected):
""" Returns the exception """
if isinstance(contract, six.string_types):
contract = [contract]
value = [value]
try:
context = check_contracts(contract, value)
msg = ('I was expecting that the values would not not'
' satisfy the contract.\n')
for v in value:
msg += ' value: %s\n' % describe_value(v)
for c in contract:
cp = parse_contract_string(c)
msg += ' contract: %r, parsed as %r (%s)\n' % (c, cp, cp)
msg += ' context: %r\n' % context
raise Exception(msg)
except error as e:
# Try generation of strings:
s = "%r" % e # @UnusedVariable
s = "%s" % e # @UnusedVariable
return e
def check_syntax_fail(string):
assert isinstance(string, six.string_types)
try:
parsed_contract = parse_contract_string(string)
msg = 'I would not expect to parse %r.' % string
msg += ' contract: %s\n' % parsed_contract
raise Exception(msg)
except ContractSyntaxError as e:
# Try generation of strings:
s = "%r" % e # @UnusedVariable
s = "%s" % e # @UnusedVariable
pass
|
import os
import typing
import tarfile
import jk_pathpatternmatcher2
from .utils.ServiceInfo import ServiceInfo
from .utils.ServiceMgr import ServiceMgr
from .ThaniyaBackupContext import ThaniyaBackupContext
class ThaniyaService:
@staticmethod
def getServiceStatus(ctx:ThaniyaBackupContext, serviceName:str) -> ServiceInfo:
assert isinstance(ctx, ThaniyaBackupContext)
assert isinstance(serviceName, str)
assert serviceName
ctx = ctx.descend("Retrieving state of service " + repr(serviceName) + " ...")
with ctx.log as nestedLog:
ret = ServiceMgr.getStatus(serviceName)
nestedLog.notice("PID: " + str(ret.pid))
nestedLog.notice("State: " + ret.sState)
return ret
#
@staticmethod
def serviceStart(ctx:ThaniyaBackupContext, serviceName:str):
assert isinstance(ctx, ThaniyaBackupContext)
assert isinstance(serviceName, str)
assert serviceName
ctx = ctx.descend("Starting service " + repr(serviceName) + " ...")
with ctx.log as nestedLog:
ServiceMgr.start(serviceName)
nestedLog.notice("Service started.")
#
@staticmethod
def serviceStop(ctx:ThaniyaBackupContext, serviceName:str):
assert isinstance(ctx, ThaniyaBackupContext)
assert isinstance(serviceName, str)
assert serviceName
ctx = ctx.descend("Stopping service: " + repr(serviceName) + " ...")
with ctx.log as nestedLog:
ServiceMgr.stop(serviceName)
nestedLog.notice("Service stopped.")
#
@staticmethod
def serviceRestore(ctx:ThaniyaBackupContext, serviceInfo:ServiceInfo):
assert isinstance(ctx, ThaniyaBackupContext)
assert isinstance(serviceInfo, ServiceInfo)
ctx = ctx.descend("Restoring service " + repr(serviceInfo.serviceName) + " to previous state " + repr(serviceInfo.sState) + "...")
with ctx.log as nestedLog:
currentState = ServiceMgr.getStatus(serviceInfo.serviceName)
nestedLog.notice("Current state: " + currentState.sState)
nestedLog.notice("Target state: " + serviceInfo.sState)
if serviceInfo.isRunning:
if currentState.isRunning:
nestedLog.notice("Nothing to do.")
else:
nestedLog.notice("Starting service ...")
ServiceMgr.start(serviceInfo.serviceName)
nestedLog.notice("Service started.")
else:
if currentState.isRunning:
nestedLog.notice("Stopping service ...")
ServiceMgr.stop(serviceInfo.serviceName)
nestedLog.notice("Service stopped.")
else:
nestedLog.notice("Nothing to do.")
#
#
|
from typing import Optional, Union
import lldb
from pwndbg.bridge.interface import IDbg
def bridge_lldb() -> Optional[IDbg]:
try:
import lldb
if lldb.debugger:
return LldbImpl()
else:
return None
except ImportError:
return None
class LldbImpl(IDbg):
CMD_REDIRECT_MAPPING = {
"set python print-stack full": "",
"set python print-stack message": "",
}
def execute(self, cmd: str, /, from_tty: bool = False, to_string: bool = False) -> Optional[str]:
if cmd not in self.CMD_REDIRECT_MAPPING or not self.CMD_REDIRECT_MAPPING[cmd]:
raise NotImplementedError(f"{cmd} is not implement in lldb version")
interp: lldb.SBCommandInterpreter = lldb.debugger.GetCommandInterpreter()
result: lldb.SBCommandReturnObject = lldb.SBCommandReturnObject()
interp.HandleCommand(cmd, result)
if result.Succeeded():
return result.GetOutput()
else:
raise RuntimeError(f"FAIL: {cmd}. Error msg: {result.GetError()}")
def read(self, addr: int, count: int, partial: bool = False) -> bytearray:
pass
def write(self, addr: int, data: Union[str, bytes, bytearray]):
pass
|
"""
This is an implementation of the Demetrescu, Italiano and Emiliozzi dynamic Potentially Uniform Paths algorithm (PUP)
http://www.dis.uniroma1.it/demetres/experim/dsp/
"""
###
#from LDSP.h
#this is a library function
class Heap:
def __init__(self):
#init
def extractMin(self):
#todo
def insert(self,key,priority):
#todo
def decreaseKey(self,key,priority):
#todo
def findMin(self):
#todo
################################################################################
class LDSPPath:
def __init__(self):
#init
#shouldn't be able to call this - MUST use newPathVertex, Edge or Path instead
################################################################################
class LDSP:
def __init__(self):
#init here!
self.pi = {}
self.l=None #TODO: these are wrong!
self.r=None
self.start=None
self.end=None
self.first=None
self.last=None
self.cost=0
self.sel=False
self.GL = {}
self.GR = {}
self.SL = {}
self.SR = {}
self.d = {}
self.p = {}
self.P = {}
self.Q = {}
################################################################################
"""
Build a new path from a single vertex
@param v The vertex to add
@returns The new path
"""
def newPathVertex(self,v):
#new path from a single vertex
pi = LDSPPath()
self.l[pi]=None
self.r[pi]=None
self.start[pi]=v
self.end[pi]=v
self.first[pi]=None
self.last[pi]=None
self.cost[pi]=0
self.sel[pi]=True
self.GL[pi] = {}
self.GR[pi] = {}
self.SL[pi] = {}
self.SR[pi] = {}
return pi
################################################################################
"""
Build a new path from an edge
@param e The edge to add
@returns The new path
"""
def newPathEdge(self,e):
#new path from an edge
u = e[0]
v = e[1]
w = ???
pi = LDSPPath()
self.l[pi] = self.pi[u]
self.r[pi] = self.pi[v]
self.start[pi] = u
self.end[pi] = v
self.first[pi] = e
self.last[pi] = e
self.cost[pi] = w
self.sel[pi] = False
self.GL[pi] = {}
self.GR[pi] = {}
self.SL[pi] = {}
self.SR[pi] = {}
#insert TODO
#insert TODO
return pi
################################################################################
"""
Build a new path from two existing paths
@param pi1 First path
@param pi2 Second path
@pre r[pi1]==l[pi2], otherwise we throw an error
@returns The new path
"""
def newPath(self,pi1,pi2):
#new path from two existing paths
if pi1.r!=pi2.l:
print("LDSPPath newPath(pi1,pi2) ERROR, paths not linked: ",pi1,pi2)
return None
pi = LDSPPath()
pi.l = pi1
pi.r = pi2
self.start[pi] = pi1.start
self.end[pi] = pi2.end
self.first[pi] = pi1.first
self.last[pi] = pi2.last
self.cost[pi] = pi1.cost + pi2.cost #is this right?
self.sel[pi] = False
self.GL[pi] = {}
self.GR[pi] = {}
self.SL[pi] = {}
self.SR[pi] = {}
#insert TODO
#insert TODO
return pi
################################################################################
def apsp(self,G):
#main function, make the shortest paths
for u in self.graph.nodes:
self.pi[u] = self.newPathVertex(u)
self.d[u]={} #need to initialise map of map
self.d[u][u] = 0
self.p[u]={} #need to initialise map of map
self.p[u][u]=self.pi[u]
#endfor
#THIS IS POTENTIALLY 1000000 x 1000000 heaps
for u in self.graph.nodes:
for v in self.graph.nodes:
if u!=v:
self.P[u] = {} #need to initialise map of map
self.P[u][v]
#endif
#endfor
#endfor
self.insertEdges(G.edges)
buildPaths(G)
################################################################################
"""
Insert edges into structures
@param Eins Edges to insert (NetworkX)
"""
def insertEdges(self,Eins):
#insert edges
for e in Eins:
u = e[0]
v = e[1]
pi = self.newpathEdge(e)
heapInsert(self.P[u,v],pi,cost[pi])
################################################################################
def deleteEdges(self,Edel):
#delete edges
################################################################################
def buildPaths(self,G):
#build the shortest paths
self.Q = heap()
self.initBuildPaths(G)
while len(self.Q)>0:
(u,v) = extractMin(Q)
newShortestPath(self.p[u][v])
#endwhile
################################################################################
def initBuildPaths(self,G):
#init
for
################################################################################
def newShortestPath(self,pi):
#new shortest path
#pi=path
################################################################################
def examine(self,pi):
#examine
#pi=path
################################################################################
#up to here, the functions are all for static case of apsp
################################################################################
#from here onwards, the functions relate to edge insertions and deletions and weight updates
#from C code
# PUBLIC FUNCTION PROTOTYPES
#LDSP* LDSP_New (LGraph* inGraph, LEdgeInfo* inEdgeWeights);
#LDSP* LDSP_NewEmpty (ui2 inNumVertices);
#void LDSP_Delete (LDSP** AThis);
#ui2 LDSP_GetNumVertices (LDSP* This);
#ui4 LDSP_GetEdgeWeight (LDSP* This, ui2 inU, ui2 inV);
#void LDSP_UpdateEdge (LDSP* This, ui2 inU, ui2 inV, ui4 inW);
#ui4 LDSP_GetDist (LDSP* This, ui2 inX, ui2 inY);
#ui2 LDSP_GetLWit (LDSP* This, ui2 inX, ui2 inY);
#ui2 LDSP_GetRWit (LDSP* This, ui2 inX, ui2 inY);
#LDSP_TSetup LDSP_GetConfig (LDSP* This);
#void LDSP_SetConfig (LDSP* This, LDSP_TSetup inSetup);
#ui4 LDSP_GetUsedMem (LDSP* This);
#LDSP_TStat LDSP_GetStatistics (LDSP* This);
#void LDSP_Dump (LDSP* This);
|
from __future__ import absolute_import, division, print_function
import os
import pre_commit.main
# work around https://github.com/Homebrew/homebrew-core/issues/30445
os.environ.pop("__PYVENV_LAUNCHER__", None)
def main():
return pre_commit.main.main(
["run", "--config", ".pre-commit-config.yaml", "--hook-stage", "commit"]
)
if __name__ == "__main__":
exit(main())
|
from .parse import extract_blog_from_XML
from . import parse
from . import latexwrite
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.